@databricks/appkit 0.18.0 → 0.19.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +8 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/index.js +2 -2
- package/dist/cache/index.js.map +1 -1
- package/dist/cli/commands/plugin/create/scaffold.js +2 -8
- package/dist/cli/commands/plugin/create/scaffold.js.map +1 -1
- package/dist/connectors/files/client.js +223 -0
- package/dist/connectors/files/client.js.map +1 -0
- package/dist/connectors/files/defaults.js +131 -0
- package/dist/connectors/files/defaults.js.map +1 -0
- package/dist/connectors/files/index.js +4 -0
- package/dist/connectors/genie/client.js +18 -6
- package/dist/connectors/genie/client.js.map +1 -1
- package/dist/connectors/index.js +3 -0
- package/dist/context/execution-context.js +7 -1
- package/dist/context/execution-context.js.map +1 -1
- package/dist/context/index.js +1 -1
- package/dist/core/appkit.d.ts.map +1 -1
- package/dist/core/appkit.js +24 -4
- package/dist/core/appkit.js.map +1 -1
- package/dist/index.d.ts +3 -2
- package/dist/index.js +2 -1
- package/dist/index.js.map +1 -1
- package/dist/plugin/plugin.d.ts +24 -5
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +43 -10
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/plugin/to-plugin.d.ts +5 -2
- package/dist/plugin/to-plugin.d.ts.map +1 -1
- package/dist/plugin/to-plugin.js +5 -2
- package/dist/plugin/to-plugin.js.map +1 -1
- package/dist/plugins/analytics/analytics.d.ts +1 -2
- package/dist/plugins/analytics/analytics.d.ts.map +1 -1
- package/dist/plugins/analytics/analytics.js +1 -2
- package/dist/plugins/analytics/analytics.js.map +1 -1
- package/dist/plugins/files/defaults.d.ts +1 -0
- package/dist/plugins/files/defaults.js +56 -0
- package/dist/plugins/files/defaults.js.map +1 -0
- package/dist/plugins/files/helpers.js +30 -0
- package/dist/plugins/files/helpers.js.map +1 -0
- package/dist/plugins/files/index.d.ts +3 -0
- package/dist/plugins/files/index.js +5 -0
- package/dist/plugins/files/manifest.js +40 -0
- package/dist/plugins/files/manifest.js.map +1 -0
- package/dist/plugins/files/plugin.d.ts +105 -0
- package/dist/plugins/files/plugin.d.ts.map +1 -0
- package/dist/plugins/files/plugin.js +714 -0
- package/dist/plugins/files/plugin.js.map +1 -0
- package/dist/plugins/files/types.d.ts +105 -0
- package/dist/plugins/files/types.d.ts.map +1 -0
- package/dist/plugins/genie/genie.d.ts +1 -2
- package/dist/plugins/genie/genie.d.ts.map +1 -1
- package/dist/plugins/genie/genie.js +1 -2
- package/dist/plugins/genie/genie.js.map +1 -1
- package/dist/plugins/index.d.ts +3 -0
- package/dist/plugins/index.js +4 -0
- package/dist/plugins/lakebase/lakebase.d.ts +1 -2
- package/dist/plugins/lakebase/lakebase.d.ts.map +1 -1
- package/dist/plugins/lakebase/lakebase.js +1 -2
- package/dist/plugins/lakebase/lakebase.js.map +1 -1
- package/dist/plugins/server/index.d.ts +2 -2
- package/dist/plugins/server/index.d.ts.map +1 -1
- package/dist/plugins/server/index.js +9 -4
- package/dist/plugins/server/index.js.map +1 -1
- package/dist/registry/manifest-loader.js +1 -1
- package/dist/registry/manifest-loader.js.map +1 -1
- package/dist/registry/types.d.ts +3 -3
- package/dist/registry/types.d.ts.map +1 -1
- package/dist/registry/types.js.map +1 -1
- package/dist/shared/src/plugin.d.ts +12 -4
- package/dist/shared/src/plugin.d.ts.map +1 -1
- package/docs/api/appkit/Class.Plugin.md +60 -12
- package/docs/api/appkit/Class.ResourceRegistry.md +3 -3
- package/docs/api/appkit/Function.createApp.md +3 -3
- package/docs/api/appkit/Interface.PluginManifest.md +9 -3
- package/docs/api/appkit/TypeAlias.PluginData.md +45 -0
- package/docs/api/appkit/TypeAlias.ToPlugin.md +1 -1
- package/docs/api/appkit-ui/files/DirectoryList.md +36 -0
- package/docs/api/appkit-ui/files/FileBreadcrumb.md +27 -0
- package/docs/api/appkit-ui/files/FileEntry.md +27 -0
- package/docs/api/appkit-ui/files/FilePreviewPanel.md +32 -0
- package/docs/api/appkit-ui/files/NewFolderInput.md +30 -0
- package/docs/api/appkit.md +1 -0
- package/docs/configuration.md +15 -0
- package/docs/plugins/custom-plugins.md +4 -13
- package/docs/plugins/files.md +350 -0
- package/docs/plugins.md +2 -1
- package/llms.txt +8 -1
- package/package.json +1 -1
- package/dist/plugins/server/remote-tunnel/denied.html/denied.html +0 -68
- package/dist/plugins/server/remote-tunnel/index.html/index.html +0 -165
- package/dist/plugins/server/remote-tunnel/wait.html/wait.html +0 -158
package/CLAUDE.md
CHANGED
|
@@ -44,6 +44,7 @@ npx @databricks/appkit docs <query>
|
|
|
44
44
|
- [Caching](./docs/plugins/caching.md): AppKit provides both global and plugin-level caching capabilities.
|
|
45
45
|
- [Creating custom plugins](./docs/plugins/custom-plugins.md): If you need custom API routes or background logic, implement an AppKit plugin. The fastest way is to use the CLI:
|
|
46
46
|
- [Execution context](./docs/plugins/execution-context.md): AppKit manages Databricks authentication via two contexts:
|
|
47
|
+
- [Files plugin](./docs/plugins/files.md): File operations against Databricks Unity Catalog Volumes. Supports listing, reading, downloading, uploading, deleting, and previewing files with built-in caching, retry, and timeout handling via the execution interceptor pipeline.
|
|
47
48
|
- [Genie plugin](./docs/plugins/genie.md): Integrates Databricks AI/BI Genie spaces into your AppKit application, enabling natural language data queries via a conversational interface.
|
|
48
49
|
- [Lakebase plugin](./docs/plugins/lakebase.md): Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release.
|
|
49
50
|
- [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
|
|
@@ -83,7 +84,7 @@ npx @databricks/appkit docs <query>
|
|
|
83
84
|
- [Interface: GenerateDatabaseCredentialRequest](./docs/api/appkit/Interface.GenerateDatabaseCredentialRequest.md): Request parameters for generating database OAuth credentials
|
|
84
85
|
- [Interface: ITelemetry](./docs/api/appkit/Interface.ITelemetry.md): Plugin-facing interface for OpenTelemetry instrumentation.
|
|
85
86
|
- [Interface: LakebasePoolConfig](./docs/api/appkit/Interface.LakebasePoolConfig.md): Configuration for creating a Lakebase connection pool
|
|
86
|
-
- [Interface: PluginManifest](./docs/api/appkit/Interface.PluginManifest.md): Plugin manifest that declares metadata and resource requirements.
|
|
87
|
+
- [Interface: PluginManifest<TName>](./docs/api/appkit/Interface.PluginManifest.md): Plugin manifest that declares metadata and resource requirements.
|
|
87
88
|
- [Interface: RequestedClaims](./docs/api/appkit/Interface.RequestedClaims.md): Optional claims for fine-grained Unity Catalog table permissions
|
|
88
89
|
- [Interface: RequestedResource](./docs/api/appkit/Interface.RequestedResource.md): Resource to request permissions for in Unity Catalog
|
|
89
90
|
- [Interface: ResourceEntry](./docs/api/appkit/Interface.ResourceEntry.md): Internal representation of a resource in the registry.
|
|
@@ -94,6 +95,7 @@ npx @databricks/appkit docs <query>
|
|
|
94
95
|
- [Interface: ValidationResult](./docs/api/appkit/Interface.ValidationResult.md): Result of validating all registered resources against the environment.
|
|
95
96
|
- [Type Alias: ConfigSchema](./docs/api/appkit/TypeAlias.ConfigSchema.md): Configuration schema definition for plugin config.
|
|
96
97
|
- [Type Alias: IAppRouter](./docs/api/appkit/TypeAlias.IAppRouter.md): Express router type for plugin route registration
|
|
98
|
+
- [Type Alias: PluginData<T, U, N>](./docs/api/appkit/TypeAlias.PluginData.md): Type Parameters
|
|
97
99
|
- [Type Alias: ResourcePermission](./docs/api/appkit/TypeAlias.ResourcePermission.md): Union of all possible permission levels across all resource types.
|
|
98
100
|
- [Type Alias: ToPlugin()<T, U, N>](./docs/api/appkit/TypeAlias.ToPlugin.md): Type Parameters
|
|
99
101
|
- [Variable: sql](./docs/api/appkit/Variable.sql.md): SQL helper namespace
|
|
@@ -110,6 +112,11 @@ npx @databricks/appkit docs <query>
|
|
|
110
112
|
- [PieChart](./docs/api/appkit-ui/data/PieChart.md): Pie Chart component for proportional data visualization.
|
|
111
113
|
- [RadarChart](./docs/api/appkit-ui/data/RadarChart.md): Radar Chart component for multi-dimensional data comparison.
|
|
112
114
|
- [ScatterChart](./docs/api/appkit-ui/data/ScatterChart.md): Scatter Chart component for correlation and distribution visualization.
|
|
115
|
+
- [DirectoryList](./docs/api/appkit-ui/files/DirectoryList.md): Card-wrapped directory listing with loading, error, and empty states
|
|
116
|
+
- [FileBreadcrumb](./docs/api/appkit-ui/files/FileBreadcrumb.md): Path-aware breadcrumb navigation built on top of Breadcrumb primitives
|
|
117
|
+
- [FileEntry](./docs/api/appkit-ui/files/FileEntry.md): Single file or directory row with icon, name, size, and selection state
|
|
118
|
+
- [FilePreviewPanel](./docs/api/appkit-ui/files/FilePreviewPanel.md): Preview panel displaying file metadata, image/text preview, and download/delete actions
|
|
119
|
+
- [NewFolderInput](./docs/api/appkit-ui/files/NewFolderInput.md): Inline folder-name input with create/cancel actions
|
|
113
120
|
- [GenieChat](./docs/api/appkit-ui/genie/GenieChat.md): Full-featured chat interface for a single Databricks AI/BI Genie space. Handles message streaming, conversation history, and auto-reconnection via SSE.
|
|
114
121
|
- [GenieChatInput](./docs/api/appkit-ui/genie/GenieChatInput.md): Auto-expanding textarea input with a send button for chat messages. Submits on Enter (Shift+Enter for newline).
|
|
115
122
|
- [GenieChatMessage](./docs/api/appkit-ui/genie/GenieChatMessage.md): Renders a single Genie message bubble with optional expandable SQL query attachments.
|
package/dist/appkit/package.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"mappings":";;;;;;;AA4BA;;;;;;;;;;;cAAa,YAAA;EAAA,wBACa,uBAAA;EAAA,iBACP,IAAA;EAAA,eACF,QAAA;EAAA,eACA,WAAA;EAAA,QAEP,OAAA;EAAA,QACA,MAAA;EAAA,QACA,gBAAA;EAAA,QACA,iBAAA;EAAA,QACA,kBAAA;EAAA,QAEA,SAAA;EAAA,QACA,gBAAA;EAAA,QAKD,WAAA,CAAA;EAhBU;;;;;;EAAA,OA6CV,eAAA,CAAA,GAAmB,YAAA;EArClB;;;;;;;EAAA,OAuDK,WAAA,CACX,UAAA,GAAa,OAAA,CAAQ,WAAA,IACpB,OAAA,CAAQ,YAAA;EADY;;;;;;;;;;;;EAAA,eA8BF,MAAA;EA+DnB;;;;;;;;EAJI,YAAA,GAAA,CACJ,GAAA,gCACA,EAAA,QAAU,OAAA,CAAQ,CAAA,GAClB,OAAA,UACA,OAAA;IAAY,GAAA;EAAA,IACX,OAAA,CAAQ,CAAA;
|
|
1
|
+
{"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"mappings":";;;;;;;AA4BA;;;;;;;;;;;cAAa,YAAA;EAAA,wBACa,uBAAA;EAAA,iBACP,IAAA;EAAA,eACF,QAAA;EAAA,eACA,WAAA;EAAA,QAEP,OAAA;EAAA,QACA,MAAA;EAAA,QACA,gBAAA;EAAA,QACA,iBAAA;EAAA,QACA,kBAAA;EAAA,QAEA,SAAA;EAAA,QACA,gBAAA;EAAA,QAKD,WAAA,CAAA;EAhBU;;;;;;EAAA,OA6CV,eAAA,CAAA,GAAmB,YAAA;EArClB;;;;;;;EAAA,OAuDK,WAAA,CACX,UAAA,GAAa,OAAA,CAAQ,WAAA,IACpB,OAAA,CAAQ,YAAA;EADY;;;;;;;;;;;;EAAA,eA8BF,MAAA;EA+DnB;;;;;;;;EAJI,YAAA,GAAA,CACJ,GAAA,gCACA,EAAA,QAAU,OAAA,CAAQ,CAAA,GAClB,OAAA,UACA,OAAA;IAAY,GAAA;EAAA,IACX,OAAA,CAAQ,CAAA;EAqKT;;;;;EAjDI,GAAA,GAAA,CAAO,GAAA,WAAc,OAAA,CAAQ,CAAA;EAiE7B;EAAA,QAhDE,YAAA;EAgDmB;;;;;;;EAjBrB,GAAA,GAAA,CACJ,GAAA,UACA,KAAA,EAAO,CAAA,EACP,OAAA;IAAY,GAAA;EAAA,IACX,OAAA;EAuDY;;;;;EA1CT,MAAA,CAAO,GAAA,WAAc,OAAA;;EAMrB,KAAA,CAAA,GAAS,OAAA;;;;;;EAUT,GAAA,CAAI,GAAA,WAAc,OAAA;;;;;;;EAmBxB,WAAA,CAAY,KAAA,gCAAqC,OAAA;;EAO3C,KAAA,CAAA,GAAS,OAAA;;;;;EAQT,gBAAA,CAAA,GAAoB,OAAA;AAAA"}
|
package/dist/cache/index.js
CHANGED
|
@@ -12,7 +12,7 @@ import { InMemoryStorage } from "./storage/memory.js";
|
|
|
12
12
|
import { PersistentStorage } from "./storage/persistent.js";
|
|
13
13
|
import "./storage/index.js";
|
|
14
14
|
import { createHash } from "node:crypto";
|
|
15
|
-
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
15
|
+
import { ApiError, WorkspaceClient } from "@databricks/sdk-experimental";
|
|
16
16
|
|
|
17
17
|
//#region src/cache/index.ts
|
|
18
18
|
init_errors();
|
|
@@ -192,7 +192,7 @@ var CacheManager = class CacheManager {
|
|
|
192
192
|
}).catch((error) => {
|
|
193
193
|
span.recordException(error);
|
|
194
194
|
span.setStatus({ code: SpanStatusCode.ERROR });
|
|
195
|
-
if (error instanceof AppKitError) throw error;
|
|
195
|
+
if (error instanceof AppKitError || error instanceof ApiError) throw error;
|
|
196
196
|
throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
|
|
197
197
|
}).finally(() => {
|
|
198
198
|
this.inFlightRequests.delete(cacheKey);
|
package/dist/cache/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { createLakebasePool } from \"@/connectors/lakebase\";\nimport { AppKitError, ExecutionError, InitializationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\nconst logger = createLogger(\"cache\");\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw InitializationError.notInitialized(\n \"CacheManager\",\n \"Ensure AppKit.create() has completed before accessing the cache\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const pool = createLakebasePool({ workspaceClient });\n const persistentStorage = new PersistentStorage(config, pool);\n\n const isHealthy = await persistentStorage.healthCheck();\n if (isHealthy) {\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n\n // Health check failed, close the pool and fallback\n await pool.end();\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n });\n\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n cache_deduplication: true,\n });\n\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: false,\n cache_key: cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n logger.debug(\"Error cleaning up expired entries: %O\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;aAI6E;AAQ7E,MAAM,SAAS,aAAa,QAAQ;;;;;;;;;;;;;;;AAgBpC,IAAa,eAAb,MAAa,aAAa;CACxB,OAAwB,0BAA0B;CAClD,AAAiB,OAAe;CAChC,OAAe,WAAgC;CAC/C,OAAe,cAA4C;CAE3D,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,AAAQ;CACR,AAAQ;CAKR,AAAQ,YAAY,SAAuB,QAAqB;AAC9D,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,oBAAoB,eACxB,gBACA,kEACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,OAAO,mBAAmB,EAAE,iBADV,IAAI,gBAAgB,EAAE,CAAC,EACI,CAAC;GACpD,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,KAAK;AAG7D,OADkB,MAAM,kBAAkB,aAAa,EACxC;AACb,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;AAIpD,SAAM,KAAK,KAAK;UACV;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACZ,CAAC;AAEF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACX,qBAAqB;MACtB,CAAC;AAEF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;AAEF,WAAO,OAAO,EAAE,aAAa;KAC3B,WAAW;KACX,WAAW;KACZ,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,WAAW;AACtB,WAAM,KAAK,IAAI,UAAU,QAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAO;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,SAAI,iBAAiB,YACnB,OAAM;AAER,WAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;MACD,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,UAAO,MAAM,yCAAyC,MAAM;IAC5D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { ApiError, WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { createLakebasePool } from \"@/connectors/lakebase\";\nimport { AppKitError, ExecutionError, InitializationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\nconst logger = createLogger(\"cache\");\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw InitializationError.notInitialized(\n \"CacheManager\",\n \"Ensure AppKit.create() has completed before accessing the cache\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const pool = createLakebasePool({ workspaceClient });\n const persistentStorage = new PersistentStorage(config, pool);\n\n const isHealthy = await persistentStorage.healthCheck();\n if (isHealthy) {\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n\n // Health check failed, close the pool and fallback\n await pool.end();\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n });\n\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n cache_deduplication: true,\n });\n\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: false,\n cache_key: cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n // Preserve AppKit errors and Databricks API errors (with status codes)\n // so route handlers can map them to proper HTTP responses.\n if (error instanceof AppKitError || error instanceof ApiError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n logger.debug(\"Error cleaning up expired entries: %O\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;aAI6E;AAQ7E,MAAM,SAAS,aAAa,QAAQ;;;;;;;;;;;;;;;AAgBpC,IAAa,eAAb,MAAa,aAAa;CACxB,OAAwB,0BAA0B;CAClD,AAAiB,OAAe;CAChC,OAAe,WAAgC;CAC/C,OAAe,cAA4C;CAE3D,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,AAAQ;CACR,AAAQ;CAKR,AAAQ,YAAY,SAAuB,QAAqB;AAC9D,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,oBAAoB,eACxB,gBACA,kEACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,OAAO,mBAAmB,EAAE,iBADV,IAAI,gBAAgB,EAAE,CAAC,EACI,CAAC;GACpD,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,KAAK;AAG7D,OADkB,MAAM,kBAAkB,aAAa,EACxC;AACb,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;AAIpD,SAAM,KAAK,KAAK;UACV;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACZ,CAAC;AAEF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACX,qBAAqB;MACtB,CAAC;AAEF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;AAEF,WAAO,OAAO,EAAE,aAAa;KAC3B,WAAW;KACX,WAAW;KACZ,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,WAAW;AACtB,WAAM,KAAK,IAAI,UAAU,QAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAO;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAG9C,SAAI,iBAAiB,eAAe,iBAAiB,SACnD,OAAM;AAER,WAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;MACD,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,UAAO,MAAM,yCAAyC,MAAM;IAC5D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|
|
@@ -92,9 +92,7 @@ function scaffoldPlugin(targetDir, answers, options) {
|
|
|
92
92
|
import manifest from "./manifest.json";
|
|
93
93
|
|
|
94
94
|
export class ${className} extends Plugin {
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
static manifest = manifest as PluginManifest;
|
|
95
|
+
static manifest = manifest as PluginManifest<"${answers.name}">;
|
|
98
96
|
|
|
99
97
|
injectRoutes(router: IAppRouter): void {
|
|
100
98
|
// Add your routes here, e.g.:
|
|
@@ -109,11 +107,7 @@ export class ${className} extends Plugin {
|
|
|
109
107
|
}
|
|
110
108
|
}
|
|
111
109
|
|
|
112
|
-
export const ${exportName} = toPlugin
|
|
113
|
-
typeof ${className},
|
|
114
|
-
Record<string, never>,
|
|
115
|
-
"${answers.name}"
|
|
116
|
-
>(${className}, "${answers.name}");
|
|
110
|
+
export const ${exportName} = toPlugin(${className});
|
|
117
111
|
`;
|
|
118
112
|
writeTracked(path.join(targetDir, `${answers.name}.ts`), pluginTs, written);
|
|
119
113
|
const indexTs = `export { ${className}, ${exportName} } from "./${answers.name}";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"scaffold.js","names":[],"sources":["../../../../../src/cli/commands/plugin/create/scaffold.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { humanizeResourceType, MANIFEST_SCHEMA_ID } from \"./resource-defaults\";\nimport type { CreateAnswers } from \"./types\";\n\n/** Convert kebab-name to PascalCase (e.g. my-plugin -> MyPlugin). */\nfunction toPascalCase(name: string): string {\n return name\n .split(\"-\")\n .map((s) => s.charAt(0).toUpperCase() + s.slice(1).toLowerCase())\n .join(\"\");\n}\n\n/** Convert kebab-name to camelCase (e.g. my-plugin -> myPlugin). */\nfunction toCamelCase(name: string): string {\n const pascal = toPascalCase(name);\n return pascal.charAt(0).toLowerCase() + pascal.slice(1);\n}\n\n/** Build manifest.json resources from selected resources. */\nfunction buildManifestResources(answers: CreateAnswers) {\n const required: unknown[] = [];\n const optional: unknown[] = [];\n\n for (const r of answers.resources) {\n const alias = humanizeResourceType(r.type);\n const entry = {\n type: r.type,\n alias,\n resourceKey: r.resourceKey,\n description: r.description || `Required for ${alias} functionality.`,\n permission: r.permission,\n fields: r.fields,\n };\n if (r.required) {\n required.push(entry);\n } else {\n optional.push(entry);\n }\n }\n\n return { required, optional };\n}\n\n/** Build full manifest object for manifest.json. */\nfunction buildManifest(answers: CreateAnswers): Record<string, unknown> {\n const { required, optional } = buildManifestResources(answers);\n const manifest: Record<string, unknown> = {\n $schema: MANIFEST_SCHEMA_ID,\n name: answers.name,\n displayName: answers.displayName,\n description: answers.description,\n resources: { required, optional },\n };\n if (answers.author) manifest.author = answers.author;\n manifest.version = answers.version || \"0.1.0\";\n if (answers.license) manifest.license = answers.license;\n return manifest;\n}\n\n/** Resolve absolute target directory from cwd and answers. */\nexport function resolveTargetDir(cwd: string, answers: CreateAnswers): string {\n return path.resolve(cwd, answers.targetPath);\n}\n\n/** Track files written during scaffolding for rollback on failure. */\nfunction writeTracked(\n filePath: string,\n content: string,\n written: string[],\n): void {\n fs.writeFileSync(filePath, content);\n written.push(filePath);\n}\n\n/** Remove files written during a failed scaffold attempt. */\nfunction rollback(written: string[], targetDir: string): void {\n for (const filePath of written.reverse()) {\n try {\n fs.unlinkSync(filePath);\n } catch {\n // best-effort cleanup\n }\n }\n try {\n const remaining = fs.readdirSync(targetDir);\n if (remaining.length === 0) fs.rmdirSync(targetDir);\n } catch {\n // directory may not be empty or may have been removed already\n }\n}\n\n/**\n * Scaffold plugin files into targetDir. Pure: no interactive I/O.\n * Writes manifest.json, {name}.ts, index.ts; for isolated also package.json, tsconfig.json, README.md.\n * On failure, rolls back any files already written.\n */\nexport function scaffoldPlugin(\n targetDir: string,\n answers: CreateAnswers,\n options: { isolated: boolean },\n): void {\n fs.mkdirSync(targetDir, { recursive: true });\n\n const written: string[] = [];\n\n try {\n const manifest = buildManifest(answers);\n const className = toPascalCase(answers.name);\n const exportName = toCamelCase(answers.name);\n\n writeTracked(\n path.join(targetDir, \"manifest.json\"),\n `${JSON.stringify(manifest, null, 2)}\\n`,\n written,\n );\n\n const pluginTs = `import {\n Plugin,\n toPlugin,\n type IAppRouter,\n type PluginManifest,\n} from \"@databricks/appkit\";\nimport manifest from \"./manifest.json\";\n\nexport class ${className} extends Plugin {\n
|
|
1
|
+
{"version":3,"file":"scaffold.js","names":[],"sources":["../../../../../src/cli/commands/plugin/create/scaffold.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { humanizeResourceType, MANIFEST_SCHEMA_ID } from \"./resource-defaults\";\nimport type { CreateAnswers } from \"./types\";\n\n/** Convert kebab-name to PascalCase (e.g. my-plugin -> MyPlugin). */\nfunction toPascalCase(name: string): string {\n return name\n .split(\"-\")\n .map((s) => s.charAt(0).toUpperCase() + s.slice(1).toLowerCase())\n .join(\"\");\n}\n\n/** Convert kebab-name to camelCase (e.g. my-plugin -> myPlugin). */\nfunction toCamelCase(name: string): string {\n const pascal = toPascalCase(name);\n return pascal.charAt(0).toLowerCase() + pascal.slice(1);\n}\n\n/** Build manifest.json resources from selected resources. */\nfunction buildManifestResources(answers: CreateAnswers) {\n const required: unknown[] = [];\n const optional: unknown[] = [];\n\n for (const r of answers.resources) {\n const alias = humanizeResourceType(r.type);\n const entry = {\n type: r.type,\n alias,\n resourceKey: r.resourceKey,\n description: r.description || `Required for ${alias} functionality.`,\n permission: r.permission,\n fields: r.fields,\n };\n if (r.required) {\n required.push(entry);\n } else {\n optional.push(entry);\n }\n }\n\n return { required, optional };\n}\n\n/** Build full manifest object for manifest.json. */\nfunction buildManifest(answers: CreateAnswers): Record<string, unknown> {\n const { required, optional } = buildManifestResources(answers);\n const manifest: Record<string, unknown> = {\n $schema: MANIFEST_SCHEMA_ID,\n name: answers.name,\n displayName: answers.displayName,\n description: answers.description,\n resources: { required, optional },\n };\n if (answers.author) manifest.author = answers.author;\n manifest.version = answers.version || \"0.1.0\";\n if (answers.license) manifest.license = answers.license;\n return manifest;\n}\n\n/** Resolve absolute target directory from cwd and answers. */\nexport function resolveTargetDir(cwd: string, answers: CreateAnswers): string {\n return path.resolve(cwd, answers.targetPath);\n}\n\n/** Track files written during scaffolding for rollback on failure. */\nfunction writeTracked(\n filePath: string,\n content: string,\n written: string[],\n): void {\n fs.writeFileSync(filePath, content);\n written.push(filePath);\n}\n\n/** Remove files written during a failed scaffold attempt. */\nfunction rollback(written: string[], targetDir: string): void {\n for (const filePath of written.reverse()) {\n try {\n fs.unlinkSync(filePath);\n } catch {\n // best-effort cleanup\n }\n }\n try {\n const remaining = fs.readdirSync(targetDir);\n if (remaining.length === 0) fs.rmdirSync(targetDir);\n } catch {\n // directory may not be empty or may have been removed already\n }\n}\n\n/**\n * Scaffold plugin files into targetDir. Pure: no interactive I/O.\n * Writes manifest.json, {name}.ts, index.ts; for isolated also package.json, tsconfig.json, README.md.\n * On failure, rolls back any files already written.\n */\nexport function scaffoldPlugin(\n targetDir: string,\n answers: CreateAnswers,\n options: { isolated: boolean },\n): void {\n fs.mkdirSync(targetDir, { recursive: true });\n\n const written: string[] = [];\n\n try {\n const manifest = buildManifest(answers);\n const className = toPascalCase(answers.name);\n const exportName = toCamelCase(answers.name);\n\n writeTracked(\n path.join(targetDir, \"manifest.json\"),\n `${JSON.stringify(manifest, null, 2)}\\n`,\n written,\n );\n\n const pluginTs = `import {\n Plugin,\n toPlugin,\n type IAppRouter,\n type PluginManifest,\n} from \"@databricks/appkit\";\nimport manifest from \"./manifest.json\";\n\nexport class ${className} extends Plugin {\n static manifest = manifest as PluginManifest<\"${answers.name}\">;\n\n injectRoutes(router: IAppRouter): void {\n // Add your routes here, e.g.:\n // this.route(router, {\n // name: \"example\",\n // method: \"get\",\n // path: \"/\",\n // handler: async (_req, res) => {\n // res.json({ message: \"Hello from ${answers.name}\" });\n // },\n // });\n }\n}\n\nexport const ${exportName} = toPlugin(${className});\n`;\n\n writeTracked(path.join(targetDir, `${answers.name}.ts`), pluginTs, written);\n\n const indexTs = `export { ${className}, ${exportName} } from \"./${answers.name}\";\n`;\n\n writeTracked(path.join(targetDir, \"index.ts\"), indexTs, written);\n\n if (options.isolated) {\n const packageName =\n answers.name.includes(\"/\") || answers.name.startsWith(\"@\")\n ? answers.name\n : `appkit-plugin-${answers.name}`;\n\n const packageJson = {\n name: packageName,\n version: answers.version || \"0.1.0\",\n type: \"module\",\n main: \"./dist/index.js\",\n types: \"./dist/index.d.ts\",\n files: [\"dist\"],\n scripts: {\n build: \"tsc\",\n typecheck: \"tsc --noEmit\",\n },\n peerDependencies: {\n \"@databricks/appkit\": \">=0.5.0\",\n },\n devDependencies: {\n typescript: \"^5.0.0\",\n },\n };\n\n writeTracked(\n path.join(targetDir, \"package.json\"),\n `${JSON.stringify(packageJson, null, 2)}\\n`,\n written,\n );\n\n const tsconfigJson = {\n compilerOptions: {\n target: \"ES2022\",\n module: \"NodeNext\",\n moduleResolution: \"NodeNext\",\n outDir: \"dist\",\n rootDir: \".\",\n declaration: true,\n strict: true,\n skipLibCheck: true,\n },\n include: [\"*.ts\"],\n exclude: [\"node_modules\", \"dist\"],\n };\n\n writeTracked(\n path.join(targetDir, \"tsconfig.json\"),\n `${JSON.stringify(tsconfigJson, null, 2)}\\n`,\n written,\n );\n\n const readme = `# ${answers.displayName}\n\n${answers.description}\n\n## Installation\n\n\\`\\`\\`bash\npnpm add ${packageName} @databricks/appkit\n\\`\\`\\`\n\n## Usage\n\nRegister the plugin in your AppKit app:\n\n\\`\\`\\`ts\nimport { createApp } from \"@databricks/appkit\";\nimport { ${exportName} } from \"${packageName}\";\n\ncreateApp({\n plugins: [\n ${exportName}(),\n // ... other plugins\n ],\n}).then((app) => { /* ... */ });\n\\`\\`\\`\n`;\n\n writeTracked(path.join(targetDir, \"README.md\"), readme, written);\n }\n } catch (err) {\n rollback(written, targetDir);\n throw err;\n }\n}\n"],"mappings":";;;;;;AAMA,SAAS,aAAa,MAAsB;AAC1C,QAAO,KACJ,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,OAAO,EAAE,CAAC,aAAa,GAAG,EAAE,MAAM,EAAE,CAAC,aAAa,CAAC,CAChE,KAAK,GAAG;;;AAIb,SAAS,YAAY,MAAsB;CACzC,MAAM,SAAS,aAAa,KAAK;AACjC,QAAO,OAAO,OAAO,EAAE,CAAC,aAAa,GAAG,OAAO,MAAM,EAAE;;;AAIzD,SAAS,uBAAuB,SAAwB;CACtD,MAAM,WAAsB,EAAE;CAC9B,MAAM,WAAsB,EAAE;AAE9B,MAAK,MAAM,KAAK,QAAQ,WAAW;EACjC,MAAM,QAAQ,qBAAqB,EAAE,KAAK;EAC1C,MAAM,QAAQ;GACZ,MAAM,EAAE;GACR;GACA,aAAa,EAAE;GACf,aAAa,EAAE,eAAe,gBAAgB,MAAM;GACpD,YAAY,EAAE;GACd,QAAQ,EAAE;GACX;AACD,MAAI,EAAE,SACJ,UAAS,KAAK,MAAM;MAEpB,UAAS,KAAK,MAAM;;AAIxB,QAAO;EAAE;EAAU;EAAU;;;AAI/B,SAAS,cAAc,SAAiD;CACtE,MAAM,EAAE,UAAU,aAAa,uBAAuB,QAAQ;CAC9D,MAAM,WAAoC;EACxC,SAAS;EACT,MAAM,QAAQ;EACd,aAAa,QAAQ;EACrB,aAAa,QAAQ;EACrB,WAAW;GAAE;GAAU;GAAU;EAClC;AACD,KAAI,QAAQ,OAAQ,UAAS,SAAS,QAAQ;AAC9C,UAAS,UAAU,QAAQ,WAAW;AACtC,KAAI,QAAQ,QAAS,UAAS,UAAU,QAAQ;AAChD,QAAO;;;AAIT,SAAgB,iBAAiB,KAAa,SAAgC;AAC5E,QAAO,KAAK,QAAQ,KAAK,QAAQ,WAAW;;;AAI9C,SAAS,aACP,UACA,SACA,SACM;AACN,IAAG,cAAc,UAAU,QAAQ;AACnC,SAAQ,KAAK,SAAS;;;AAIxB,SAAS,SAAS,SAAmB,WAAyB;AAC5D,MAAK,MAAM,YAAY,QAAQ,SAAS,CACtC,KAAI;AACF,KAAG,WAAW,SAAS;SACjB;AAIV,KAAI;AAEF,MADkB,GAAG,YAAY,UAAU,CAC7B,WAAW,EAAG,IAAG,UAAU,UAAU;SAC7C;;;;;;;AAUV,SAAgB,eACd,WACA,SACA,SACM;AACN,IAAG,UAAU,WAAW,EAAE,WAAW,MAAM,CAAC;CAE5C,MAAM,UAAoB,EAAE;AAE5B,KAAI;EACF,MAAM,WAAW,cAAc,QAAQ;EACvC,MAAM,YAAY,aAAa,QAAQ,KAAK;EAC5C,MAAM,aAAa,YAAY,QAAQ,KAAK;AAE5C,eACE,KAAK,KAAK,WAAW,gBAAgB,EACrC,GAAG,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC,KACrC,QACD;EAED,MAAM,WAAW;;;;;;;;eAQN,UAAU;kDACyB,QAAQ,KAAK;;;;;;;;;6CASlB,QAAQ,KAAK;;;;;;eAM3C,WAAW,cAAc,UAAU;;AAG9C,eAAa,KAAK,KAAK,WAAW,GAAG,QAAQ,KAAK,KAAK,EAAE,UAAU,QAAQ;EAE3E,MAAM,UAAU,YAAY,UAAU,IAAI,WAAW,aAAa,QAAQ,KAAK;;AAG/E,eAAa,KAAK,KAAK,WAAW,WAAW,EAAE,SAAS,QAAQ;AAEhE,MAAI,QAAQ,UAAU;GACpB,MAAM,cACJ,QAAQ,KAAK,SAAS,IAAI,IAAI,QAAQ,KAAK,WAAW,IAAI,GACtD,QAAQ,OACR,iBAAiB,QAAQ;GAE/B,MAAM,cAAc;IAClB,MAAM;IACN,SAAS,QAAQ,WAAW;IAC5B,MAAM;IACN,MAAM;IACN,OAAO;IACP,OAAO,CAAC,OAAO;IACf,SAAS;KACP,OAAO;KACP,WAAW;KACZ;IACD,kBAAkB,EAChB,sBAAsB,WACvB;IACD,iBAAiB,EACf,YAAY,UACb;IACF;AAED,gBACE,KAAK,KAAK,WAAW,eAAe,EACpC,GAAG,KAAK,UAAU,aAAa,MAAM,EAAE,CAAC,KACxC,QACD;AAiBD,gBACE,KAAK,KAAK,WAAW,gBAAgB,EACrC,GAAG,KAAK,UAjBW;IACnB,iBAAiB;KACf,QAAQ;KACR,QAAQ;KACR,kBAAkB;KAClB,QAAQ;KACR,SAAS;KACT,aAAa;KACb,QAAQ;KACR,cAAc;KACf;IACD,SAAS,CAAC,OAAO;IACjB,SAAS,CAAC,gBAAgB,OAAO;IAClC,EAIiC,MAAM,EAAE,CAAC,KACzC,QACD;GAED,MAAM,SAAS,KAAK,QAAQ,YAAY;;EAE5C,QAAQ,YAAY;;;;;WAKX,YAAY;;;;;;;;;WASZ,WAAW,WAAW,YAAY;;;;MAIvC,WAAW;;;;;;AAOX,gBAAa,KAAK,KAAK,WAAW,YAAY,EAAE,QAAQ,QAAQ;;UAE3D,KAAK;AACZ,WAAS,SAAS,UAAU;AAC5B,QAAM"}
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import { createLogger } from "../../logging/logger.js";
|
|
2
|
+
import { TelemetryManager } from "../../telemetry/telemetry-manager.js";
|
|
3
|
+
import { SpanKind, SpanStatusCode } from "../../telemetry/index.js";
|
|
4
|
+
import { FILES_MAX_READ_SIZE, contentTypeFromPath, isTextContentType } from "./defaults.js";
|
|
5
|
+
import { ApiError } from "@databricks/sdk-experimental";
|
|
6
|
+
|
|
7
|
+
//#region src/connectors/files/client.ts
|
|
8
|
+
const logger = createLogger("connectors:files");
|
|
9
|
+
var FilesConnector = class {
|
|
10
|
+
name = "files";
|
|
11
|
+
defaultVolume;
|
|
12
|
+
customContentTypes;
|
|
13
|
+
telemetry;
|
|
14
|
+
telemetryMetrics;
|
|
15
|
+
constructor(config) {
|
|
16
|
+
this.defaultVolume = config.defaultVolume;
|
|
17
|
+
this.customContentTypes = config.customContentTypes;
|
|
18
|
+
this.telemetry = TelemetryManager.getProvider(this.name, config.telemetry);
|
|
19
|
+
this.telemetryMetrics = {
|
|
20
|
+
operationCount: this.telemetry.getMeter().createCounter("files.operation.count", {
|
|
21
|
+
description: "Total number of file operations",
|
|
22
|
+
unit: "1"
|
|
23
|
+
}),
|
|
24
|
+
operationDuration: this.telemetry.getMeter().createHistogram("files.operation.duration", {
|
|
25
|
+
description: "Duration of file operations",
|
|
26
|
+
unit: "ms"
|
|
27
|
+
})
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
resolvePath(filePath) {
|
|
31
|
+
if (filePath.length > 4096) throw new Error(`Path exceeds maximum length of 4096 characters (got ${filePath.length}).`);
|
|
32
|
+
if (filePath.includes("\0")) throw new Error("Path must not contain null bytes.");
|
|
33
|
+
if (filePath.split("/").some((s) => s === "..")) throw new Error("Path traversal (\"../\") is not allowed.");
|
|
34
|
+
if (filePath.startsWith("/")) {
|
|
35
|
+
if (!filePath.startsWith("/Volumes/")) throw new Error("Absolute paths must start with \"/Volumes/\". Unity Catalog volume paths follow the format: /Volumes/<catalog>/<schema>/<volume>/");
|
|
36
|
+
return filePath;
|
|
37
|
+
}
|
|
38
|
+
if (!this.defaultVolume) throw new Error("Cannot resolve relative path: no default volume set. Use an absolute path or set a default volume.");
|
|
39
|
+
return `${this.defaultVolume}/${filePath}`;
|
|
40
|
+
}
|
|
41
|
+
async traced(operation, attributes, fn) {
|
|
42
|
+
const startTime = Date.now();
|
|
43
|
+
let success = false;
|
|
44
|
+
return this.telemetry.startActiveSpan(`files.${operation}`, {
|
|
45
|
+
kind: SpanKind.CLIENT,
|
|
46
|
+
attributes: {
|
|
47
|
+
"files.operation": operation,
|
|
48
|
+
...attributes
|
|
49
|
+
}
|
|
50
|
+
}, async (span) => {
|
|
51
|
+
try {
|
|
52
|
+
const result = await fn(span);
|
|
53
|
+
success = true;
|
|
54
|
+
span.setStatus({ code: SpanStatusCode.OK });
|
|
55
|
+
return result;
|
|
56
|
+
} catch (error) {
|
|
57
|
+
span.recordException(error);
|
|
58
|
+
span.setStatus({
|
|
59
|
+
code: SpanStatusCode.ERROR,
|
|
60
|
+
message: error instanceof Error ? error.message : String(error)
|
|
61
|
+
});
|
|
62
|
+
throw error;
|
|
63
|
+
} finally {
|
|
64
|
+
span.end();
|
|
65
|
+
const duration = Date.now() - startTime;
|
|
66
|
+
const metricAttrs = {
|
|
67
|
+
"files.operation": operation,
|
|
68
|
+
success: String(success)
|
|
69
|
+
};
|
|
70
|
+
this.telemetryMetrics.operationCount.add(1, metricAttrs);
|
|
71
|
+
this.telemetryMetrics.operationDuration.record(duration, metricAttrs);
|
|
72
|
+
}
|
|
73
|
+
}, {
|
|
74
|
+
name: this.name,
|
|
75
|
+
includePrefix: true
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
async list(client, directoryPath) {
|
|
79
|
+
const resolvedPath = directoryPath ? this.resolvePath(directoryPath) : this.defaultVolume;
|
|
80
|
+
if (!resolvedPath) throw new Error("No directory path provided and no default volume set.");
|
|
81
|
+
return this.traced("list", { "files.path": resolvedPath }, async () => {
|
|
82
|
+
const entries = [];
|
|
83
|
+
for await (const entry of client.files.listDirectoryContents({ directory_path: resolvedPath })) entries.push(entry);
|
|
84
|
+
return entries;
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
async read(client, filePath, options) {
|
|
88
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
89
|
+
const maxSize = options?.maxSize ?? FILES_MAX_READ_SIZE;
|
|
90
|
+
return this.traced("read", { "files.path": resolvedPath }, async () => {
|
|
91
|
+
const response = await this.download(client, filePath);
|
|
92
|
+
if (!response.contents) return "";
|
|
93
|
+
const reader = response.contents.getReader();
|
|
94
|
+
const decoder = new TextDecoder();
|
|
95
|
+
let result = "";
|
|
96
|
+
let bytesRead = 0;
|
|
97
|
+
while (true) {
|
|
98
|
+
const { done, value } = await reader.read();
|
|
99
|
+
if (done) break;
|
|
100
|
+
bytesRead += value.byteLength;
|
|
101
|
+
if (bytesRead > maxSize) {
|
|
102
|
+
await reader.cancel();
|
|
103
|
+
throw new Error(`File exceeds maximum read size (${maxSize} bytes). Use download() for large files.`);
|
|
104
|
+
}
|
|
105
|
+
result += decoder.decode(value, { stream: true });
|
|
106
|
+
}
|
|
107
|
+
result += decoder.decode();
|
|
108
|
+
return result;
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
async download(client, filePath) {
|
|
112
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
113
|
+
return this.traced("download", { "files.path": resolvedPath }, async () => {
|
|
114
|
+
return client.files.download({ file_path: resolvedPath });
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
async exists(client, filePath) {
|
|
118
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
119
|
+
return this.traced("exists", { "files.path": resolvedPath }, async () => {
|
|
120
|
+
try {
|
|
121
|
+
await this.metadata(client, filePath);
|
|
122
|
+
return true;
|
|
123
|
+
} catch (error) {
|
|
124
|
+
if (error instanceof ApiError && error.statusCode === 404) return false;
|
|
125
|
+
throw error;
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
async metadata(client, filePath) {
|
|
130
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
131
|
+
return this.traced("metadata", { "files.path": resolvedPath }, async () => {
|
|
132
|
+
const response = await client.files.getMetadata({ file_path: resolvedPath });
|
|
133
|
+
return {
|
|
134
|
+
contentLength: response["content-length"],
|
|
135
|
+
contentType: contentTypeFromPath(filePath, response["content-type"], this.customContentTypes),
|
|
136
|
+
lastModified: response["last-modified"]
|
|
137
|
+
};
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
async upload(client, filePath, contents, options) {
|
|
141
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
142
|
+
return this.traced("upload", { "files.path": resolvedPath }, async () => {
|
|
143
|
+
const body = contents;
|
|
144
|
+
const overwrite = options?.overwrite ?? true;
|
|
145
|
+
const hostValue = client.config.host;
|
|
146
|
+
if (!hostValue) throw new Error("Databricks host is not configured. Set DATABRICKS_HOST or configure client.config.host.");
|
|
147
|
+
const host = hostValue.startsWith("http") ? hostValue : `https://${hostValue}`;
|
|
148
|
+
const url = new URL(`/api/2.0/fs/files${resolvedPath}`, host);
|
|
149
|
+
url.searchParams.set("overwrite", String(overwrite));
|
|
150
|
+
const headers = new Headers({ "Content-Type": "application/octet-stream" });
|
|
151
|
+
const fetchOptions = {
|
|
152
|
+
method: "PUT",
|
|
153
|
+
headers,
|
|
154
|
+
body
|
|
155
|
+
};
|
|
156
|
+
if (body instanceof ReadableStream) fetchOptions.duplex = "half";
|
|
157
|
+
else if (body instanceof Buffer) headers.set("Content-Length", String(body.length));
|
|
158
|
+
else if (typeof body === "string") headers.set("Content-Length", String(Buffer.byteLength(body)));
|
|
159
|
+
await client.config.authenticate(headers);
|
|
160
|
+
const res = await fetch(url.toString(), fetchOptions);
|
|
161
|
+
if (!res.ok) {
|
|
162
|
+
const text = await res.text();
|
|
163
|
+
logger.error(`Upload failed (${res.status}): ${text}`);
|
|
164
|
+
throw new ApiError(`Upload failed: ${text.length > 200 ? `${text.slice(0, 200)}…` : text}`, "UPLOAD_FAILED", res.status, void 0, []);
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
async createDirectory(client, directoryPath) {
|
|
169
|
+
const resolvedPath = this.resolvePath(directoryPath);
|
|
170
|
+
return this.traced("createDirectory", { "files.path": resolvedPath }, async () => {
|
|
171
|
+
await client.files.createDirectory({ directory_path: resolvedPath });
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
async delete(client, filePath) {
|
|
175
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
176
|
+
return this.traced("delete", { "files.path": resolvedPath }, async () => {
|
|
177
|
+
await client.files.delete({ file_path: resolvedPath });
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
async preview(client, filePath, options) {
|
|
181
|
+
const resolvedPath = this.resolvePath(filePath);
|
|
182
|
+
return this.traced("preview", { "files.path": resolvedPath }, async () => {
|
|
183
|
+
const meta = await this.metadata(client, filePath);
|
|
184
|
+
const isText = isTextContentType(meta.contentType);
|
|
185
|
+
const isImage = meta.contentType?.startsWith("image/") || false;
|
|
186
|
+
if (!isText) return {
|
|
187
|
+
...meta,
|
|
188
|
+
textPreview: null,
|
|
189
|
+
isText: false,
|
|
190
|
+
isImage
|
|
191
|
+
};
|
|
192
|
+
const response = await client.files.download({ file_path: resolvedPath });
|
|
193
|
+
if (!response.contents) return {
|
|
194
|
+
...meta,
|
|
195
|
+
textPreview: "",
|
|
196
|
+
isText: true,
|
|
197
|
+
isImage: false
|
|
198
|
+
};
|
|
199
|
+
const reader = response.contents.getReader();
|
|
200
|
+
const decoder = new TextDecoder();
|
|
201
|
+
let preview = "";
|
|
202
|
+
const maxChars = options?.maxChars ?? 1024;
|
|
203
|
+
while (preview.length < maxChars) {
|
|
204
|
+
const { done, value } = await reader.read();
|
|
205
|
+
if (done) break;
|
|
206
|
+
preview += decoder.decode(value, { stream: true });
|
|
207
|
+
}
|
|
208
|
+
preview += decoder.decode();
|
|
209
|
+
await reader.cancel();
|
|
210
|
+
if (preview.length > maxChars) preview = preview.slice(0, maxChars);
|
|
211
|
+
return {
|
|
212
|
+
...meta,
|
|
213
|
+
textPreview: preview,
|
|
214
|
+
isText: true,
|
|
215
|
+
isImage: false
|
|
216
|
+
};
|
|
217
|
+
});
|
|
218
|
+
}
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
//#endregion
|
|
222
|
+
export { FilesConnector };
|
|
223
|
+
//# sourceMappingURL=client.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/files/client.ts"],"sourcesContent":["import { ApiError, type WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { TelemetryOptions } from \"shared\";\nimport { createLogger } from \"../../logging/logger\";\nimport type {\n DirectoryEntry,\n DownloadResponse,\n FileMetadata,\n FilePreview,\n} from \"../../plugins/files/types\";\nimport type { TelemetryProvider } from \"../../telemetry\";\nimport {\n type Counter,\n type Histogram,\n type Span,\n SpanKind,\n SpanStatusCode,\n TelemetryManager,\n} from \"../../telemetry\";\nimport {\n contentTypeFromPath,\n FILES_MAX_READ_SIZE,\n isTextContentType,\n} from \"./defaults\";\n\nconst logger = createLogger(\"connectors:files\");\n\nexport interface FilesConnectorConfig {\n defaultVolume?: string;\n timeout?: number;\n telemetry?: TelemetryOptions;\n customContentTypes?: Record<string, string>;\n}\n\nexport class FilesConnector {\n private readonly name = \"files\";\n private defaultVolume: string | undefined;\n private readonly customContentTypes: Record<string, string> | undefined;\n\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n operationCount: Counter;\n operationDuration: Histogram;\n };\n\n constructor(config: FilesConnectorConfig) {\n this.defaultVolume = config.defaultVolume;\n this.customContentTypes = config.customContentTypes;\n\n this.telemetry = TelemetryManager.getProvider(this.name, config.telemetry);\n this.telemetryMetrics = {\n operationCount: this.telemetry\n .getMeter()\n .createCounter(\"files.operation.count\", {\n description: \"Total number of file operations\",\n unit: \"1\",\n }),\n operationDuration: this.telemetry\n .getMeter()\n .createHistogram(\"files.operation.duration\", {\n description: \"Duration of file operations\",\n unit: \"ms\",\n }),\n };\n }\n\n resolvePath(filePath: string): string {\n if (filePath.length > 4096) {\n throw new Error(\n `Path exceeds maximum length of 4096 characters (got ${filePath.length}).`,\n );\n }\n if (filePath.includes(\"\\0\")) {\n throw new Error(\"Path must not contain null bytes.\");\n }\n\n const segments = filePath.split(\"/\");\n if (segments.some((s) => s === \"..\")) {\n throw new Error('Path traversal (\"../\") is not allowed.');\n }\n if (filePath.startsWith(\"/\")) {\n if (!filePath.startsWith(\"/Volumes/\")) {\n throw new Error(\n 'Absolute paths must start with \"/Volumes/\". ' +\n \"Unity Catalog volume paths follow the format: /Volumes/<catalog>/<schema>/<volume>/\",\n );\n }\n return filePath;\n }\n if (!this.defaultVolume) {\n throw new Error(\n \"Cannot resolve relative path: no default volume set. Use an absolute path or set a default volume.\",\n );\n }\n return `${this.defaultVolume}/${filePath}`;\n }\n\n private async traced<T>(\n operation: string,\n attributes: Record<string, string>,\n fn: (span: Span) => Promise<T>,\n ): Promise<T> {\n const startTime = Date.now();\n let success = false;\n\n return this.telemetry.startActiveSpan(\n `files.${operation}`,\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"files.operation\": operation,\n ...attributes,\n },\n },\n async (span: Span) => {\n try {\n const result = await fn(span);\n success = true;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n throw error;\n } finally {\n span.end();\n const duration = Date.now() - startTime;\n const metricAttrs = {\n \"files.operation\": operation,\n success: String(success),\n };\n this.telemetryMetrics.operationCount.add(1, metricAttrs);\n this.telemetryMetrics.operationDuration.record(duration, metricAttrs);\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n async list(\n client: WorkspaceClient,\n directoryPath?: string,\n ): Promise<DirectoryEntry[]> {\n const resolvedPath = directoryPath\n ? this.resolvePath(directoryPath)\n : this.defaultVolume;\n if (!resolvedPath) {\n throw new Error(\"No directory path provided and no default volume set.\");\n }\n\n return this.traced(\"list\", { \"files.path\": resolvedPath }, async () => {\n const entries: DirectoryEntry[] = [];\n for await (const entry of client.files.listDirectoryContents({\n directory_path: resolvedPath,\n })) {\n entries.push(entry);\n }\n return entries;\n });\n }\n\n async read(\n client: WorkspaceClient,\n filePath: string,\n options?: { maxSize?: number },\n ): Promise<string> {\n const resolvedPath = this.resolvePath(filePath);\n const maxSize = options?.maxSize ?? FILES_MAX_READ_SIZE;\n return this.traced(\"read\", { \"files.path\": resolvedPath }, async () => {\n const response = await this.download(client, filePath);\n if (!response.contents) {\n return \"\";\n }\n const reader = response.contents.getReader();\n const decoder = new TextDecoder();\n let result = \"\";\n let bytesRead = 0;\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n bytesRead += value.byteLength;\n if (bytesRead > maxSize) {\n await reader.cancel();\n throw new Error(\n `File exceeds maximum read size (${maxSize} bytes). Use download() for large files.`,\n );\n }\n result += decoder.decode(value, { stream: true });\n }\n result += decoder.decode();\n return result;\n });\n }\n\n async download(\n client: WorkspaceClient,\n filePath: string,\n ): Promise<DownloadResponse> {\n const resolvedPath = this.resolvePath(filePath);\n return this.traced(\"download\", { \"files.path\": resolvedPath }, async () => {\n return client.files.download({\n file_path: resolvedPath,\n });\n });\n }\n\n async exists(client: WorkspaceClient, filePath: string): Promise<boolean> {\n const resolvedPath = this.resolvePath(filePath);\n return this.traced(\"exists\", { \"files.path\": resolvedPath }, async () => {\n try {\n await this.metadata(client, filePath);\n return true;\n } catch (error) {\n if (error instanceof ApiError && error.statusCode === 404) {\n return false;\n }\n throw error;\n }\n });\n }\n\n async metadata(\n client: WorkspaceClient,\n filePath: string,\n ): Promise<FileMetadata> {\n const resolvedPath = this.resolvePath(filePath);\n return this.traced(\"metadata\", { \"files.path\": resolvedPath }, async () => {\n const response = await client.files.getMetadata({\n file_path: resolvedPath,\n });\n return {\n contentLength: response[\"content-length\"],\n contentType: contentTypeFromPath(\n filePath,\n response[\"content-type\"],\n this.customContentTypes,\n ),\n lastModified: response[\"last-modified\"],\n };\n });\n }\n\n async upload(\n client: WorkspaceClient,\n filePath: string,\n contents: ReadableStream | Buffer | string,\n options?: { overwrite?: boolean },\n ): Promise<void> {\n const resolvedPath = this.resolvePath(filePath);\n\n return this.traced(\"upload\", { \"files.path\": resolvedPath }, async () => {\n const body = contents;\n const overwrite = options?.overwrite ?? true;\n\n // Workaround: The SDK's files.upload() has two bugs:\n // 1. It ignores the `contents` field (sets body to undefined)\n // 2. apiClient.request() checks `instanceof` against its own ReadableStream\n // subclass, so standard ReadableStream instances get JSON.stringified to \"{}\"\n // Bypass both by calling the REST API directly with SDK-provided auth.\n const hostValue = client.config.host;\n if (!hostValue) {\n throw new Error(\n \"Databricks host is not configured. Set DATABRICKS_HOST or configure client.config.host.\",\n );\n }\n const host = hostValue.startsWith(\"http\")\n ? hostValue\n : `https://${hostValue}`;\n const url = new URL(`/api/2.0/fs/files${resolvedPath}`, host);\n url.searchParams.set(\"overwrite\", String(overwrite));\n\n const headers = new Headers({\n \"Content-Type\": \"application/octet-stream\",\n });\n const fetchOptions: RequestInit = { method: \"PUT\", headers, body };\n\n if (body instanceof ReadableStream) {\n fetchOptions.duplex = \"half\";\n } else if (body instanceof Buffer) {\n headers.set(\"Content-Length\", String(body.length));\n } else if (typeof body === \"string\") {\n headers.set(\"Content-Length\", String(Buffer.byteLength(body)));\n }\n\n await client.config.authenticate(headers);\n\n const res = await fetch(url.toString(), fetchOptions);\n\n if (!res.ok) {\n const text = await res.text();\n logger.error(`Upload failed (${res.status}): ${text}`);\n const safeMessage = text.length > 200 ? `${text.slice(0, 200)}…` : text;\n throw new ApiError(\n `Upload failed: ${safeMessage}`,\n \"UPLOAD_FAILED\",\n res.status,\n undefined,\n [],\n );\n }\n });\n }\n\n async createDirectory(\n client: WorkspaceClient,\n directoryPath: string,\n ): Promise<void> {\n const resolvedPath = this.resolvePath(directoryPath);\n return this.traced(\n \"createDirectory\",\n { \"files.path\": resolvedPath },\n async () => {\n await client.files.createDirectory({\n directory_path: resolvedPath,\n });\n },\n );\n }\n\n async delete(client: WorkspaceClient, filePath: string): Promise<void> {\n const resolvedPath = this.resolvePath(filePath);\n return this.traced(\"delete\", { \"files.path\": resolvedPath }, async () => {\n await client.files.delete({\n file_path: resolvedPath,\n });\n });\n }\n\n async preview(\n client: WorkspaceClient,\n filePath: string,\n options?: { maxChars?: number },\n ): Promise<FilePreview> {\n const resolvedPath = this.resolvePath(filePath);\n return this.traced(\"preview\", { \"files.path\": resolvedPath }, async () => {\n const meta = await this.metadata(client, filePath);\n const isText = isTextContentType(meta.contentType);\n const isImage = meta.contentType?.startsWith(\"image/\") || false;\n\n if (!isText) {\n return { ...meta, textPreview: null, isText: false, isImage };\n }\n\n const response = await client.files.download({\n file_path: resolvedPath,\n });\n if (!response.contents) {\n return { ...meta, textPreview: \"\", isText: true, isImage: false };\n }\n\n const reader = response.contents.getReader();\n const decoder = new TextDecoder();\n let preview = \"\";\n const maxChars = options?.maxChars ?? 1024;\n\n while (preview.length < maxChars) {\n const { done, value } = await reader.read();\n if (done) break;\n preview += decoder.decode(value, { stream: true });\n }\n preview += decoder.decode();\n await reader.cancel();\n\n if (preview.length > maxChars) {\n preview = preview.slice(0, maxChars);\n }\n\n return { ...meta, textPreview: preview, isText: true, isImage: false };\n });\n }\n}\n"],"mappings":";;;;;;;AAwBA,MAAM,SAAS,aAAa,mBAAmB;AAS/C,IAAa,iBAAb,MAA4B;CAC1B,AAAiB,OAAO;CACxB,AAAQ;CACR,AAAiB;CAEjB,AAAiB;CACjB,AAAiB;CAKjB,YAAY,QAA8B;AACxC,OAAK,gBAAgB,OAAO;AAC5B,OAAK,qBAAqB,OAAO;AAEjC,OAAK,YAAY,iBAAiB,YAAY,KAAK,MAAM,OAAO,UAAU;AAC1E,OAAK,mBAAmB;GACtB,gBAAgB,KAAK,UAClB,UAAU,CACV,cAAc,yBAAyB;IACtC,aAAa;IACb,MAAM;IACP,CAAC;GACJ,mBAAmB,KAAK,UACrB,UAAU,CACV,gBAAgB,4BAA4B;IAC3C,aAAa;IACb,MAAM;IACP,CAAC;GACL;;CAGH,YAAY,UAA0B;AACpC,MAAI,SAAS,SAAS,KACpB,OAAM,IAAI,MACR,uDAAuD,SAAS,OAAO,IACxE;AAEH,MAAI,SAAS,SAAS,KAAK,CACzB,OAAM,IAAI,MAAM,oCAAoC;AAItD,MADiB,SAAS,MAAM,IAAI,CACvB,MAAM,MAAM,MAAM,KAAK,CAClC,OAAM,IAAI,MAAM,2CAAyC;AAE3D,MAAI,SAAS,WAAW,IAAI,EAAE;AAC5B,OAAI,CAAC,SAAS,WAAW,YAAY,CACnC,OAAM,IAAI,MACR,oIAED;AAEH,UAAO;;AAET,MAAI,CAAC,KAAK,cACR,OAAM,IAAI,MACR,qGACD;AAEH,SAAO,GAAG,KAAK,cAAc,GAAG;;CAGlC,MAAc,OACZ,WACA,YACA,IACY;EACZ,MAAM,YAAY,KAAK,KAAK;EAC5B,IAAI,UAAU;AAEd,SAAO,KAAK,UAAU,gBACpB,SAAS,aACT;GACE,MAAM,SAAS;GACf,YAAY;IACV,mBAAmB;IACnB,GAAG;IACJ;GACF,EACD,OAAO,SAAe;AACpB,OAAI;IACF,MAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,cAAU;AACV,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU;KACb,MAAM,eAAe;KACrB,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;KAChE,CAAC;AACF,UAAM;aACE;AACR,SAAK,KAAK;IACV,MAAM,WAAW,KAAK,KAAK,GAAG;IAC9B,MAAM,cAAc;KAClB,mBAAmB;KACnB,SAAS,OAAO,QAAQ;KACzB;AACD,SAAK,iBAAiB,eAAe,IAAI,GAAG,YAAY;AACxD,SAAK,iBAAiB,kBAAkB,OAAO,UAAU,YAAY;;KAGzE;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;CAGH,MAAM,KACJ,QACA,eAC2B;EAC3B,MAAM,eAAe,gBACjB,KAAK,YAAY,cAAc,GAC/B,KAAK;AACT,MAAI,CAAC,aACH,OAAM,IAAI,MAAM,wDAAwD;AAG1E,SAAO,KAAK,OAAO,QAAQ,EAAE,cAAc,cAAc,EAAE,YAAY;GACrE,MAAM,UAA4B,EAAE;AACpC,cAAW,MAAM,SAAS,OAAO,MAAM,sBAAsB,EAC3D,gBAAgB,cACjB,CAAC,CACA,SAAQ,KAAK,MAAM;AAErB,UAAO;IACP;;CAGJ,MAAM,KACJ,QACA,UACA,SACiB;EACjB,MAAM,eAAe,KAAK,YAAY,SAAS;EAC/C,MAAM,UAAU,SAAS,WAAW;AACpC,SAAO,KAAK,OAAO,QAAQ,EAAE,cAAc,cAAc,EAAE,YAAY;GACrE,MAAM,WAAW,MAAM,KAAK,SAAS,QAAQ,SAAS;AACtD,OAAI,CAAC,SAAS,SACZ,QAAO;GAET,MAAM,SAAS,SAAS,SAAS,WAAW;GAC5C,MAAM,UAAU,IAAI,aAAa;GACjC,IAAI,SAAS;GACb,IAAI,YAAY;AAChB,UAAO,MAAM;IACX,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,QAAI,KAAM;AACV,iBAAa,MAAM;AACnB,QAAI,YAAY,SAAS;AACvB,WAAM,OAAO,QAAQ;AACrB,WAAM,IAAI,MACR,mCAAmC,QAAQ,0CAC5C;;AAEH,cAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;;AAEnD,aAAU,QAAQ,QAAQ;AAC1B,UAAO;IACP;;CAGJ,MAAM,SACJ,QACA,UAC2B;EAC3B,MAAM,eAAe,KAAK,YAAY,SAAS;AAC/C,SAAO,KAAK,OAAO,YAAY,EAAE,cAAc,cAAc,EAAE,YAAY;AACzE,UAAO,OAAO,MAAM,SAAS,EAC3B,WAAW,cACZ,CAAC;IACF;;CAGJ,MAAM,OAAO,QAAyB,UAAoC;EACxE,MAAM,eAAe,KAAK,YAAY,SAAS;AAC/C,SAAO,KAAK,OAAO,UAAU,EAAE,cAAc,cAAc,EAAE,YAAY;AACvE,OAAI;AACF,UAAM,KAAK,SAAS,QAAQ,SAAS;AACrC,WAAO;YACA,OAAO;AACd,QAAI,iBAAiB,YAAY,MAAM,eAAe,IACpD,QAAO;AAET,UAAM;;IAER;;CAGJ,MAAM,SACJ,QACA,UACuB;EACvB,MAAM,eAAe,KAAK,YAAY,SAAS;AAC/C,SAAO,KAAK,OAAO,YAAY,EAAE,cAAc,cAAc,EAAE,YAAY;GACzE,MAAM,WAAW,MAAM,OAAO,MAAM,YAAY,EAC9C,WAAW,cACZ,CAAC;AACF,UAAO;IACL,eAAe,SAAS;IACxB,aAAa,oBACX,UACA,SAAS,iBACT,KAAK,mBACN;IACD,cAAc,SAAS;IACxB;IACD;;CAGJ,MAAM,OACJ,QACA,UACA,UACA,SACe;EACf,MAAM,eAAe,KAAK,YAAY,SAAS;AAE/C,SAAO,KAAK,OAAO,UAAU,EAAE,cAAc,cAAc,EAAE,YAAY;GACvE,MAAM,OAAO;GACb,MAAM,YAAY,SAAS,aAAa;GAOxC,MAAM,YAAY,OAAO,OAAO;AAChC,OAAI,CAAC,UACH,OAAM,IAAI,MACR,0FACD;GAEH,MAAM,OAAO,UAAU,WAAW,OAAO,GACrC,YACA,WAAW;GACf,MAAM,MAAM,IAAI,IAAI,oBAAoB,gBAAgB,KAAK;AAC7D,OAAI,aAAa,IAAI,aAAa,OAAO,UAAU,CAAC;GAEpD,MAAM,UAAU,IAAI,QAAQ,EAC1B,gBAAgB,4BACjB,CAAC;GACF,MAAM,eAA4B;IAAE,QAAQ;IAAO;IAAS;IAAM;AAElE,OAAI,gBAAgB,eAClB,cAAa,SAAS;YACb,gBAAgB,OACzB,SAAQ,IAAI,kBAAkB,OAAO,KAAK,OAAO,CAAC;YACzC,OAAO,SAAS,SACzB,SAAQ,IAAI,kBAAkB,OAAO,OAAO,WAAW,KAAK,CAAC,CAAC;AAGhE,SAAM,OAAO,OAAO,aAAa,QAAQ;GAEzC,MAAM,MAAM,MAAM,MAAM,IAAI,UAAU,EAAE,aAAa;AAErD,OAAI,CAAC,IAAI,IAAI;IACX,MAAM,OAAO,MAAM,IAAI,MAAM;AAC7B,WAAO,MAAM,kBAAkB,IAAI,OAAO,KAAK,OAAO;AAEtD,UAAM,IAAI,SACR,kBAFkB,KAAK,SAAS,MAAM,GAAG,KAAK,MAAM,GAAG,IAAI,CAAC,KAAK,QAGjE,iBACA,IAAI,QACJ,QACA,EAAE,CACH;;IAEH;;CAGJ,MAAM,gBACJ,QACA,eACe;EACf,MAAM,eAAe,KAAK,YAAY,cAAc;AACpD,SAAO,KAAK,OACV,mBACA,EAAE,cAAc,cAAc,EAC9B,YAAY;AACV,SAAM,OAAO,MAAM,gBAAgB,EACjC,gBAAgB,cACjB,CAAC;IAEL;;CAGH,MAAM,OAAO,QAAyB,UAAiC;EACrE,MAAM,eAAe,KAAK,YAAY,SAAS;AAC/C,SAAO,KAAK,OAAO,UAAU,EAAE,cAAc,cAAc,EAAE,YAAY;AACvE,SAAM,OAAO,MAAM,OAAO,EACxB,WAAW,cACZ,CAAC;IACF;;CAGJ,MAAM,QACJ,QACA,UACA,SACsB;EACtB,MAAM,eAAe,KAAK,YAAY,SAAS;AAC/C,SAAO,KAAK,OAAO,WAAW,EAAE,cAAc,cAAc,EAAE,YAAY;GACxE,MAAM,OAAO,MAAM,KAAK,SAAS,QAAQ,SAAS;GAClD,MAAM,SAAS,kBAAkB,KAAK,YAAY;GAClD,MAAM,UAAU,KAAK,aAAa,WAAW,SAAS,IAAI;AAE1D,OAAI,CAAC,OACH,QAAO;IAAE,GAAG;IAAM,aAAa;IAAM,QAAQ;IAAO;IAAS;GAG/D,MAAM,WAAW,MAAM,OAAO,MAAM,SAAS,EAC3C,WAAW,cACZ,CAAC;AACF,OAAI,CAAC,SAAS,SACZ,QAAO;IAAE,GAAG;IAAM,aAAa;IAAI,QAAQ;IAAM,SAAS;IAAO;GAGnE,MAAM,SAAS,SAAS,SAAS,WAAW;GAC5C,MAAM,UAAU,IAAI,aAAa;GACjC,IAAI,UAAU;GACd,MAAM,WAAW,SAAS,YAAY;AAEtC,UAAO,QAAQ,SAAS,UAAU;IAChC,MAAM,EAAE,MAAM,UAAU,MAAM,OAAO,MAAM;AAC3C,QAAI,KAAM;AACV,eAAW,QAAQ,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;;AAEpD,cAAW,QAAQ,QAAQ;AAC3B,SAAM,OAAO,QAAQ;AAErB,OAAI,QAAQ,SAAS,SACnB,WAAU,QAAQ,MAAM,GAAG,SAAS;AAGtC,UAAO;IAAE,GAAG;IAAM,aAAa;IAAS,QAAQ;IAAM,SAAS;IAAO;IACtE"}
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
//#region src/connectors/files/defaults.ts
|
|
2
|
+
/**
|
|
3
|
+
* Default maximum size for `read()` in bytes (10 MB).
|
|
4
|
+
* Prevents loading very large files into memory as a string.
|
|
5
|
+
* Use `download()` for files larger than this limit.
|
|
6
|
+
*/
|
|
7
|
+
const FILES_MAX_READ_SIZE = 10 * 1024 * 1024;
|
|
8
|
+
const EXTENSION_CONTENT_TYPES = Object.freeze({
|
|
9
|
+
".png": "image/png",
|
|
10
|
+
".jpg": "image/jpeg",
|
|
11
|
+
".jpeg": "image/jpeg",
|
|
12
|
+
".gif": "image/gif",
|
|
13
|
+
".webp": "image/webp",
|
|
14
|
+
".svg": "image/svg+xml",
|
|
15
|
+
".bmp": "image/bmp",
|
|
16
|
+
".ico": "image/vnd.microsoft.icon",
|
|
17
|
+
".html": "text/html",
|
|
18
|
+
".css": "text/css",
|
|
19
|
+
".js": "text/javascript",
|
|
20
|
+
".ts": "text/typescript",
|
|
21
|
+
".py": "text/x-python",
|
|
22
|
+
".txt": "text/plain",
|
|
23
|
+
".md": "text/markdown",
|
|
24
|
+
".csv": "text/csv",
|
|
25
|
+
".json": "application/json",
|
|
26
|
+
".jsonl": "application/x-ndjson",
|
|
27
|
+
".xml": "application/xml",
|
|
28
|
+
".yaml": "application/x-yaml",
|
|
29
|
+
".yml": "application/x-yaml",
|
|
30
|
+
".sql": "application/sql",
|
|
31
|
+
".pdf": "application/pdf",
|
|
32
|
+
".ipynb": "application/x-ipynb+json",
|
|
33
|
+
".parquet": "application/vnd.apache.parquet",
|
|
34
|
+
".zip": "application/zip",
|
|
35
|
+
".gz": "application/gzip"
|
|
36
|
+
});
|
|
37
|
+
const TEXT_KEYWORDS = [
|
|
38
|
+
"json",
|
|
39
|
+
"xml",
|
|
40
|
+
"yaml",
|
|
41
|
+
"sql",
|
|
42
|
+
"javascript"
|
|
43
|
+
];
|
|
44
|
+
/**
|
|
45
|
+
* Determine whether a content type represents text.
|
|
46
|
+
*
|
|
47
|
+
* Returns `true` for any `text/*` type and for known structured-text types
|
|
48
|
+
* such as JSON, XML, YAML, SQL, and JavaScript.
|
|
49
|
+
*
|
|
50
|
+
* @param contentType - MIME content type string to check.
|
|
51
|
+
* @returns `true` if the content type is text-based.
|
|
52
|
+
*/
|
|
53
|
+
function isTextContentType(contentType) {
|
|
54
|
+
if (!contentType) return false;
|
|
55
|
+
if (contentType.startsWith("text/")) return true;
|
|
56
|
+
return TEXT_KEYWORDS.some((kw) => contentType.includes(kw));
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* MIME types that are safe to serve inline (i.e. browsers cannot execute
|
|
60
|
+
* scripts from them). Any type **not** in this set should be forced to
|
|
61
|
+
* download via `Content-Disposition: attachment` when served by the `/raw`
|
|
62
|
+
* endpoint to prevent stored-XSS attacks.
|
|
63
|
+
*/
|
|
64
|
+
const SAFE_INLINE_CONTENT_TYPES = new Set([
|
|
65
|
+
"image/png",
|
|
66
|
+
"image/jpeg",
|
|
67
|
+
"image/gif",
|
|
68
|
+
"image/webp",
|
|
69
|
+
"image/bmp",
|
|
70
|
+
"image/vnd.microsoft.icon",
|
|
71
|
+
"text/plain",
|
|
72
|
+
"text/csv",
|
|
73
|
+
"text/markdown",
|
|
74
|
+
"application/json",
|
|
75
|
+
"application/pdf"
|
|
76
|
+
]);
|
|
77
|
+
/**
|
|
78
|
+
* Check whether a content type is safe to serve inline.
|
|
79
|
+
*
|
|
80
|
+
* @param contentType - MIME content type string.
|
|
81
|
+
* @returns `true` if the type is in the safe-inline allowlist.
|
|
82
|
+
*/
|
|
83
|
+
function isSafeInlineContentType(contentType) {
|
|
84
|
+
return SAFE_INLINE_CONTENT_TYPES.has(contentType);
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* MIME types that must never be allowed in `customContentTypes` because
|
|
88
|
+
* browsers can execute scripts from them. Allowing these in custom mappings
|
|
89
|
+
* would bypass the `/raw` endpoint's forced-download protection for unsafe types.
|
|
90
|
+
*/
|
|
91
|
+
const DANGEROUS_CONTENT_TYPES = new Set([
|
|
92
|
+
"text/html",
|
|
93
|
+
"text/javascript",
|
|
94
|
+
"application/javascript",
|
|
95
|
+
"application/xhtml+xml",
|
|
96
|
+
"image/svg+xml"
|
|
97
|
+
]);
|
|
98
|
+
/**
|
|
99
|
+
* Validate that a `customContentTypes` map does not contain any MIME types
|
|
100
|
+
* that would bypass XSS protections when served inline.
|
|
101
|
+
*
|
|
102
|
+
* @param customTypes - Map of extension → MIME type overrides.
|
|
103
|
+
* @throws {Error} if any mapping resolves to a dangerous MIME type.
|
|
104
|
+
*/
|
|
105
|
+
function validateCustomContentTypes(customTypes) {
|
|
106
|
+
for (const [ext, mimeType] of Object.entries(customTypes)) {
|
|
107
|
+
const normalized = mimeType.toLowerCase().trim();
|
|
108
|
+
if (DANGEROUS_CONTENT_TYPES.has(normalized)) throw new Error(`Unsafe customContentTypes mapping: "${ext}" → "${mimeType}". MIME type "${normalized}" can execute scripts in browsers and is not allowed. Remove this mapping or use a safe content type.`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Resolve the MIME content type for a file path.
|
|
113
|
+
*
|
|
114
|
+
* @param filePath - Path to the file (only the extension is inspected).
|
|
115
|
+
* @param reported - Optional MIME type reported by the caller; used as fallback when the extension is unknown.
|
|
116
|
+
* @param customTypes - Optional map of extension → MIME type overrides (e.g. `{ ".csv": "text/csv" }`).
|
|
117
|
+
* @returns The resolved MIME content type string.
|
|
118
|
+
*/
|
|
119
|
+
function contentTypeFromPath(filePath, reported, customTypes) {
|
|
120
|
+
const dotIndex = filePath.lastIndexOf(".");
|
|
121
|
+
const ext = dotIndex > 0 ? filePath.slice(dotIndex).toLowerCase() : "";
|
|
122
|
+
const fromCustom = customTypes?.[ext];
|
|
123
|
+
if (fromCustom) return fromCustom;
|
|
124
|
+
const fromExt = EXTENSION_CONTENT_TYPES[ext];
|
|
125
|
+
if (fromExt) return fromExt;
|
|
126
|
+
return reported ?? "application/octet-stream";
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
//#endregion
|
|
130
|
+
export { DANGEROUS_CONTENT_TYPES, EXTENSION_CONTENT_TYPES, FILES_MAX_READ_SIZE, SAFE_INLINE_CONTENT_TYPES, contentTypeFromPath, isSafeInlineContentType, isTextContentType, validateCustomContentTypes };
|
|
131
|
+
//# sourceMappingURL=defaults.js.map
|