@databricks/appkit 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +1 -0
- package/dist/analytics/analytics.d.ts +11 -1
- package/dist/analytics/analytics.d.ts.map +1 -1
- package/dist/analytics/analytics.js +12 -5
- package/dist/analytics/analytics.js.map +1 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/index.js +11 -10
- package/dist/cache/index.js.map +1 -1
- package/dist/cache/storage/memory.js +4 -2
- package/dist/cache/storage/memory.js.map +1 -1
- package/dist/cache/storage/persistent.js +7 -0
- package/dist/cache/storage/persistent.js.map +1 -1
- package/dist/connectors/lakebase/client.js +8 -4
- package/dist/connectors/lakebase/client.js.map +1 -1
- package/dist/connectors/sql-warehouse/client.js +5 -2
- package/dist/connectors/sql-warehouse/client.js.map +1 -1
- package/dist/context/execution-context.d.ts +17 -0
- package/dist/context/execution-context.d.ts.map +1 -0
- package/dist/context/service-context.d.ts +21 -0
- package/dist/context/service-context.d.ts.map +1 -0
- package/dist/context/service-context.js +2 -6
- package/dist/context/service-context.js.map +1 -1
- package/dist/context/user-context.d.ts +29 -0
- package/dist/context/user-context.d.ts.map +1 -0
- package/dist/core/appkit.d.ts.map +1 -1
- package/dist/core/appkit.js +35 -12
- package/dist/core/appkit.js.map +1 -1
- package/dist/errors/authentication.js +3 -6
- package/dist/errors/authentication.js.map +1 -1
- package/dist/errors/base.js +4 -0
- package/dist/errors/base.js.map +1 -1
- package/dist/errors/configuration.js +3 -6
- package/dist/errors/configuration.js.map +1 -1
- package/dist/errors/connection.js +3 -6
- package/dist/errors/connection.js.map +1 -1
- package/dist/errors/execution.js +3 -6
- package/dist/errors/execution.js.map +1 -1
- package/dist/errors/initialization.js +3 -6
- package/dist/errors/initialization.js.map +1 -1
- package/dist/errors/server.js +3 -6
- package/dist/errors/server.js.map +1 -1
- package/dist/errors/tunnel.js +3 -6
- package/dist/errors/tunnel.js.map +1 -1
- package/dist/errors/validation.js +3 -6
- package/dist/errors/validation.js.map +1 -1
- package/dist/index.d.ts +2 -1
- package/dist/index.js +4 -1
- package/dist/index.js.map +1 -1
- package/dist/logging/wide-event-emitter.js +1 -3
- package/dist/logging/wide-event-emitter.js.map +1 -1
- package/dist/logging/wide-event.js +2 -0
- package/dist/logging/wide-event.js.map +1 -1
- package/dist/plugin/dev-reader.js +3 -6
- package/dist/plugin/dev-reader.js.map +1 -1
- package/dist/plugin/interceptors/retry.js +3 -0
- package/dist/plugin/interceptors/retry.js.map +1 -1
- package/dist/plugin/plugin.d.ts +10 -24
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +23 -30
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/server/base-server.js +2 -0
- package/dist/server/base-server.js.map +1 -1
- package/dist/server/index.d.ts +23 -1
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +29 -13
- package/dist/server/index.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-controller.js +30 -15
- package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-manager.js +5 -1
- package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
- package/dist/server/static-server.js +1 -0
- package/dist/server/static-server.js.map +1 -1
- package/dist/server/vite-dev-server.js +1 -0
- package/dist/server/vite-dev-server.js.map +1 -1
- package/dist/shared/src/plugin.d.ts +25 -1
- package/dist/shared/src/plugin.d.ts.map +1 -1
- package/dist/stream/arrow-stream-processor.js +5 -10
- package/dist/stream/arrow-stream-processor.js.map +1 -1
- package/dist/stream/buffers.js +7 -0
- package/dist/stream/buffers.js.map +1 -1
- package/dist/stream/stream-manager.js +5 -0
- package/dist/stream/stream-manager.js.map +1 -1
- package/dist/stream/stream-registry.js +1 -0
- package/dist/stream/stream-registry.js.map +1 -1
- package/dist/stream/validator.js +2 -6
- package/dist/stream/validator.js.map +1 -1
- package/dist/telemetry/noop.js +1 -0
- package/dist/telemetry/noop.js.map +1 -1
- package/dist/telemetry/telemetry-manager.js +4 -6
- package/dist/telemetry/telemetry-manager.js.map +1 -1
- package/dist/telemetry/telemetry-provider.js +3 -0
- package/dist/telemetry/telemetry-provider.js.map +1 -1
- package/dist/type-generator/spinner.js +9 -11
- package/dist/type-generator/spinner.js.map +1 -1
- package/docs/docs/api/appkit/Class.AppKitError/index.html +5 -3
- package/docs/docs/api/appkit/Class.AppKitError.md +7 -0
- package/docs/docs/api/appkit/Class.AuthenticationError/index.html +3 -3
- package/docs/docs/api/appkit/Class.ConfigurationError/index.html +3 -3
- package/docs/docs/api/appkit/Class.ConnectionError/index.html +3 -3
- package/docs/docs/api/appkit/Class.ExecutionError/index.html +3 -3
- package/docs/docs/api/appkit/Class.InitializationError/index.html +3 -3
- package/docs/docs/api/appkit/Class.Plugin/index.html +28 -21
- package/docs/docs/api/appkit/Class.Plugin.md +34 -34
- package/docs/docs/api/appkit/Class.ServerError/index.html +3 -3
- package/docs/docs/api/appkit/Class.TunnelError/index.html +3 -3
- package/docs/docs/api/appkit/Class.ValidationError/index.html +3 -3
- package/docs/docs/api/appkit/Function.appKitTypesPlugin/index.html +3 -3
- package/docs/docs/api/appkit/Function.createApp/index.html +4 -4
- package/docs/docs/api/appkit/Function.getExecutionContext/index.html +26 -0
- package/docs/docs/api/appkit/Function.getExecutionContext.md +19 -0
- package/docs/docs/api/appkit/Function.isSQLTypeMarker/index.html +4 -4
- package/docs/docs/api/appkit/Interface.BasePluginConfig/index.html +3 -3
- package/docs/docs/api/appkit/Interface.CacheConfig/index.html +3 -3
- package/docs/docs/api/appkit/Interface.ITelemetry/index.html +3 -3
- package/docs/docs/api/appkit/Interface.StreamExecutionSettings/index.html +3 -3
- package/docs/docs/api/appkit/Interface.TelemetryConfig/index.html +3 -3
- package/docs/docs/api/appkit/TypeAlias.IAppRouter/index.html +3 -3
- package/docs/docs/api/appkit/Variable.sql/index.html +3 -3
- package/docs/docs/api/appkit/index.html +4 -4
- package/docs/docs/api/appkit-ui/data/AreaChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/BarChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/DataTable/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/DonutChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/HeatmapChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/LineChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/PieChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/RadarChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/data/ScatterChart/index.html +2 -2
- package/docs/docs/api/appkit-ui/index.html +2 -2
- package/docs/docs/api/appkit-ui/styling/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Accordion/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Alert/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/AlertDialog/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/AspectRatio/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Avatar/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Badge/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Breadcrumb/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Button/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ButtonGroup/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Calendar/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Card/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Carousel/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ChartContainer/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Checkbox/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Collapsible/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Command/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ContextMenu/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Dialog/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Drawer/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/DropdownMenu/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Empty/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Field/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/FormControl/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/HoverCard/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Input/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/InputGroup/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/InputOTP/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Item/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Kbd/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Label/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Menubar/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/NavigationMenu/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Pagination/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Popover/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Progress/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/RadioGroup/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ResizableHandle/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ScrollArea/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Select/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Separator/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Sheet/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Sidebar/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Skeleton/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Slider/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Spinner/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Switch/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Table/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Tabs/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Textarea/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Toaster/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Toggle/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/ToggleGroup/index.html +2 -2
- package/docs/docs/api/appkit-ui/ui/Tooltip/index.html +2 -2
- package/docs/docs/api/appkit.md +6 -5
- package/docs/docs/api/index.html +2 -2
- package/docs/docs/app-management/index.html +2 -2
- package/docs/docs/architecture/index.html +2 -2
- package/docs/docs/category/development/index.html +2 -2
- package/docs/docs/configuration/index.html +2 -2
- package/docs/docs/core-principles/index.html +2 -2
- package/docs/docs/development/index.html +2 -2
- package/docs/docs/development/llm-guide/index.html +2 -2
- package/docs/docs/development/local-development/index.html +2 -2
- package/docs/docs/development/project-setup/index.html +2 -2
- package/docs/docs/development/remote-bridge/index.html +2 -2
- package/docs/docs/development/type-generation/index.html +2 -2
- package/docs/docs/index.html +2 -2
- package/docs/docs/plugins/index.html +7 -3
- package/docs/docs/plugins.md +36 -11
- package/llms.txt +1 -0
- package/package.json +1 -1
package/CLAUDE.md
CHANGED
|
@@ -38,6 +38,7 @@ The CLI will display the documentation content directly in the terminal.
|
|
|
38
38
|
- [Class: ValidationError](./docs/docs/api./docs/Class.ValidationError.md): Error thrown when input validation fails.
|
|
39
39
|
- [Function: appKitTypesPlugin()](./docs/docs/api./docs/Function.appKitTypesPlugin.md): Vite plugin to generate types for AppKit queries.
|
|
40
40
|
- [Function: createApp()](./docs/docs/api./docs/Function.createApp.md): Bootstraps AppKit with the provided configuration.
|
|
41
|
+
- [Function: getExecutionContext()](./docs/docs/api./docs/Function.getExecutionContext.md): Get the current execution context.
|
|
41
42
|
- [Function: isSQLTypeMarker()](./docs/docs/api./docs/Function.isSQLTypeMarker.md): Type guard to check if a value is a SQL type marker
|
|
42
43
|
- [Interface: BasePluginConfig](./docs/docs/api./docs/Interface.BasePluginConfig.md): Base configuration interface for AppKit plugins
|
|
43
44
|
- [Interface: CacheConfig](./docs/docs/api./docs/Interface.CacheConfig.md): Configuration for caching
|
|
@@ -8,7 +8,7 @@ import express from "express";
|
|
|
8
8
|
//#region src/analytics/analytics.d.ts
|
|
9
9
|
declare class AnalyticsPlugin extends Plugin {
|
|
10
10
|
name: string;
|
|
11
|
-
envVars:
|
|
11
|
+
protected envVars: string[];
|
|
12
12
|
protected static description: string;
|
|
13
13
|
protected config: IAnalyticsConfig;
|
|
14
14
|
private SQLClient;
|
|
@@ -46,6 +46,16 @@ declare class AnalyticsPlugin extends Plugin {
|
|
|
46
46
|
*/
|
|
47
47
|
protected getArrowData(workspaceClient: WorkspaceClient, jobId: string, signal?: AbortSignal): Promise<ReturnType<typeof this.SQLClient.getArrowData>>;
|
|
48
48
|
shutdown(): Promise<void>;
|
|
49
|
+
/**
|
|
50
|
+
* Returns the public exports for the analytics plugin.
|
|
51
|
+
* Note: `asUser()` is automatically added by AppKit.
|
|
52
|
+
*/
|
|
53
|
+
exports(): {
|
|
54
|
+
/**
|
|
55
|
+
* Execute a SQL query using service principal credentials.
|
|
56
|
+
*/
|
|
57
|
+
query: (query: string, parameters?: Record<string, SQLTypeMarker | null | undefined>, formatParameters?: Record<string, any>, signal?: AbortSignal) => Promise<any>;
|
|
58
|
+
};
|
|
49
59
|
}
|
|
50
60
|
/**
|
|
51
61
|
* @internal
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":";;;;;;;;cA0Ba,eAAA,SAAwB,MAAA;;;EAAxB,iBAAA,WAAgB,EAAA,MAAA;EAAA,UAAA,MAAA,EAKD,gBALC;UAKD,SAAA;UAMN,cAAA;aAWC,CAAA,MAAA,EAXD,gBAWC;cA0BN,CAAA,MAAA,EA1BM,UA0BN,CAAA,EAAA,IAAA;;;;;mBA2CZ,CAAA,GAAA,EA3CI,OAAA,CAAQ,OA2CZ,EAAA,GAAA,EA1CI,OAAA,CAAQ,QA0CZ,CAAA,EAzCA,OAyCA,CAAA,IAAA,CAAA;;;;;mBAkHA,CAAA,GAAA,EApHI,OAAA,CAAQ,OAoHZ,EAAA,GAAA,EAnHI,OAAA,CAAQ,QAmHZ,CAAA,EAlHA,OAkHA,CAAA,IAAA,CAAA
|
|
1
|
+
{"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":";;;;;;;;cA0Ba,eAAA,SAAwB,MAAA;;;EAAxB,iBAAA,WAAgB,EAAA,MAAA;EAAA,UAAA,MAAA,EAKD,gBALC;UAKD,SAAA;UAMN,cAAA;aAWC,CAAA,MAAA,EAXD,gBAWC;cA0BN,CAAA,MAAA,EA1BM,UA0BN,CAAA,EAAA,IAAA;;;;;mBA2CZ,CAAA,GAAA,EA3CI,OAAA,CAAQ,OA2CZ,EAAA,GAAA,EA1CI,OAAA,CAAQ,QA0CZ,CAAA,EAzCA,OAyCA,CAAA,IAAA,CAAA;;;;;mBAkHA,CAAA,GAAA,EApHI,OAAA,CAAQ,OAoHZ,EAAA,GAAA,EAnHI,OAAA,CAAQ,QAmHZ,CAAA,EAlHA,OAkHA,CAAA,IAAA,CAAA;;;;;;;;;;;;;AAqDL;;;OAAsB,CAAA,KAAA,EAAA,MAAA,EAAA,UAAA,CAAA,EAxDL,MAwDK,CAAA,MAAA,EAxDU,aAwDV,GAAA,IAAA,GAAA,SAAA,CAAA,EAAA,gBAAA,CAAA,EAvDC,MAuDD,CAAA,MAAA,EAAA,GAAA,CAAA,EAAA,MAAA,CAAA,EAtDT,WAsDS,CAAA,EArDjB,OAqDiB,CAAA,GAAA,CAAA;;;;0CA5BD,yCAER,cACR,QAAQ;cAIO;;;;;;;;;wCAnCH,eAAe,sDACT,8BACV,gBACR;;;;;;cAqDQ,WAAS,gBAAA,iBAAA"}
|
|
@@ -13,13 +13,13 @@ import { QueryProcessor } from "./query.js";
|
|
|
13
13
|
init_context();
|
|
14
14
|
const logger = createLogger("analytics");
|
|
15
15
|
var AnalyticsPlugin = class extends Plugin {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
name = "analytics";
|
|
17
|
+
envVars = [];
|
|
18
|
+
static description = "Analytics plugin for data analysis";
|
|
19
|
+
SQLClient;
|
|
20
|
+
queryProcessor;
|
|
19
21
|
constructor(config) {
|
|
20
22
|
super(config);
|
|
21
|
-
this.name = "analytics";
|
|
22
|
-
this.envVars = [];
|
|
23
23
|
this.config = config;
|
|
24
24
|
this.queryProcessor = new QueryProcessor();
|
|
25
25
|
this.SQLClient = new SQLWarehouseConnector({
|
|
@@ -165,6 +165,13 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
165
165
|
async shutdown() {
|
|
166
166
|
this.streamManager.abortAll();
|
|
167
167
|
}
|
|
168
|
+
/**
|
|
169
|
+
* Returns the public exports for the analytics plugin.
|
|
170
|
+
* Note: `asUser()` is automatically added by AppKit.
|
|
171
|
+
*/
|
|
172
|
+
exports() {
|
|
173
|
+
return { query: this.query };
|
|
174
|
+
}
|
|
168
175
|
};
|
|
169
176
|
/**
|
|
170
177
|
* @internal
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport { createLogger } from \"../logging/logger\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nconst logger = createLogger(\"analytics\");\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n logger.debug(\"Processing Arrow job request for jobId=%s\", jobId);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"getArrowData\").setContext(\"analytics\", {\n job_id: jobId,\n plugin: this.name,\n });\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n logger.debug(\n \"Sending Arrow buffer: %d bytes for job %s\",\n result.data.length,\n jobId,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n logger.error(\"Arrow job error: %O\", error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n\n // Request-scoped logging with WideEvent tracking\n logger.debug(req, \"Executing query: %s (format=%s)\", query_key, format);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"executeQuery\").setContext(\"analytics\", {\n query_key,\n format,\n parameter_count: parameters ? Object.keys(parameters).length : 0,\n plugin: this.name,\n });\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const queryResult = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!queryResult) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const { query, isAsUser } = queryResult;\n\n // get execution context - user-scoped if .obo.sql, otherwise service principal\n const executor = isAsUser ? this.asUser(req) : this;\n const userKey = getCurrentUserId();\n const executorKey = isAsUser ? userKey : \"global\";\n\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n executorKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await executor.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await executor.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n executorKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;;cAaoB;AAWpB,MAAM,SAAS,aAAa,YAAY;AAExC,IAAa,kBAAb,cAAqC,OAAO;;qBAIX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAXR;iBACG,EAAE;AAWV,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,UAAO,MAAM,6CAA6C,MAAM;AAGhE,GADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;IACvE,QAAQ;IACR,QAAQ,KAAK;IACd,CAAC;GAEF,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,UAAO,MACL,6CACA,OAAO,KAAK,QACZ,MACD;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,UAAO,MAAM,uBAAuB,MAAM;AAC1C,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;AAG5C,SAAO,MAAM,KAAK,mCAAmC,WAAW,OAAO;AAGvE,EADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;GACvE;GACA;GACA,iBAAiB,aAAa,OAAO,KAAK,WAAW,CAAC,SAAS;GAC/D,QAAQ,KAAK;GACd,CAAC;AAEF,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,cAAc,MAAM,KAAK,IAAI,YACjC,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,aAAa;AAChB,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,EAAE,OAAO,aAAa;EAG5B,MAAM,WAAW,WAAW,KAAK,OAAO,IAAI,GAAG;EAC/C,MAAM,UAAU,kBAAkB;EAClC,MAAM,cAAc,WAAW,UAAU;EAEzC,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAEP,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,SAAS,cACb,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,SAAS,MAC5B,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,YACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;AAOjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
|
|
1
|
+
{"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport { createLogger } from \"../logging/logger\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nconst logger = createLogger(\"analytics\");\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n protected envVars: string[] = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n logger.debug(\"Processing Arrow job request for jobId=%s\", jobId);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"getArrowData\").setContext(\"analytics\", {\n job_id: jobId,\n plugin: this.name,\n });\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n logger.debug(\n \"Sending Arrow buffer: %d bytes for job %s\",\n result.data.length,\n jobId,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n logger.error(\"Arrow job error: %O\", error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n\n // Request-scoped logging with WideEvent tracking\n logger.debug(req, \"Executing query: %s (format=%s)\", query_key, format);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"executeQuery\").setContext(\"analytics\", {\n query_key,\n format,\n parameter_count: parameters ? Object.keys(parameters).length : 0,\n plugin: this.name,\n });\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const queryResult = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!queryResult) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const { query, isAsUser } = queryResult;\n\n // get execution context - user-scoped if .obo.sql, otherwise service principal\n const executor = isAsUser ? this.asUser(req) : this;\n const userKey = getCurrentUserId();\n const executorKey = isAsUser ? userKey : \"global\";\n\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n executorKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await executor.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await executor.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n executorKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n\n /**\n * Returns the public exports for the analytics plugin.\n * Note: `asUser()` is automatically added by AppKit.\n */\n exports() {\n return {\n /**\n * Execute a SQL query using service principal credentials.\n */\n query: this.query,\n };\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;;cAaoB;AAWpB,MAAM,SAAS,aAAa,YAAY;AAExC,IAAa,kBAAb,cAAqC,OAAO;CAC1C,OAAO;CACP,AAAU,UAAoB,EAAE;CAEhC,OAAiB,cAAc;CAI/B,AAAQ;CACR,AAAQ;CAER,YAAY,QAA0B;AACpC,QAAM,OAAO;AACb,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,UAAO,MAAM,6CAA6C,MAAM;AAGhE,GADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;IACvE,QAAQ;IACR,QAAQ,KAAK;IACd,CAAC;GAEF,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,UAAO,MACL,6CACA,OAAO,KAAK,QACZ,MACD;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,UAAO,MAAM,uBAAuB,MAAM;AAC1C,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;AAG5C,SAAO,MAAM,KAAK,mCAAmC,WAAW,OAAO;AAGvE,EADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;GACvE;GACA;GACA,iBAAiB,aAAa,OAAO,KAAK,WAAW,CAAC,SAAS;GAC/D,QAAQ,KAAK;GACd,CAAC;AAEF,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,cAAc,MAAM,KAAK,IAAI,YACjC,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,aAAa;AAChB,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,EAAE,OAAO,aAAa;EAG5B,MAAM,WAAW,WAAW,KAAK,OAAO,IAAI,GAAG;EAC/C,MAAM,UAAU,kBAAkB;EAClC,MAAM,cAAc,WAAW,UAAU;EAEzC,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAEP,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,SAAS,cACb,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,SAAS,MAC5B,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,YACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;CAO/B,UAAU;AACR,SAAO,EAIL,OAAO,KAAK,OACb;;;;;;AAOL,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
|
package/dist/appkit/package.js
CHANGED
package/dist/cache/index.js
CHANGED
|
@@ -33,17 +33,18 @@ const logger = createLogger("cache");
|
|
|
33
33
|
* ```
|
|
34
34
|
*/
|
|
35
35
|
var CacheManager = class CacheManager {
|
|
36
|
-
static
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
static
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
36
|
+
static MIN_CLEANUP_INTERVAL_MS = 6e4;
|
|
37
|
+
name = "cache-manager";
|
|
38
|
+
static instance = null;
|
|
39
|
+
static initPromise = null;
|
|
40
|
+
storage;
|
|
41
|
+
config;
|
|
42
|
+
inFlightRequests;
|
|
43
|
+
cleanupInProgress;
|
|
44
|
+
lastCleanupAttempt;
|
|
45
|
+
telemetry;
|
|
46
|
+
telemetryMetrics;
|
|
45
47
|
constructor(storage, config) {
|
|
46
|
-
this.name = "cache-manager";
|
|
47
48
|
this.storage = storage;
|
|
48
49
|
this.config = config;
|
|
49
50
|
this.inFlightRequests = /* @__PURE__ */ new Map();
|
package/dist/cache/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport { AppKitError, ExecutionError, InitializationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\nconst logger = createLogger(\"cache\");\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw InitializationError.notInitialized(\n \"CacheManager\",\n \"Ensure AppKit.create() has completed before accessing the cache\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n });\n\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n cache_deduplication: true,\n });\n\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: false,\n cache_key: cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n logger.debug(\"Error cleaning up expired entries: %O\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAI6E;AAQ7E,MAAM,SAAS,aAAa,QAAQ;;;;;;;;;;;;;;;AAgBpC,IAAa,eAAb,MAAa,aAAa;;iCAC0B;;;kBAEH;;;qBACY;;CAc3D,AAAQ,YAAY,SAAuB,QAAqB;cAhBhC;AAiB9B,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,oBAAoB,eACxB,gBACA,kEACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACZ,CAAC;AAEF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACX,qBAAqB;MACtB,CAAC;AAEF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;AAEF,WAAO,OAAO,EAAE,aAAa;KAC3B,WAAW;KACX,WAAW;KACZ,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,WAAW;AACtB,WAAM,KAAK,IAAI,UAAU,QAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAO;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,SAAI,iBAAiB,YACnB,OAAM;AAER,WAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;MACD,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,UAAO,MAAM,yCAAyC,MAAM;IAC5D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|
|
1
|
+
{"version":3,"file":"index.js","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport { AppKitError, ExecutionError, InitializationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\nconst logger = createLogger(\"cache\");\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw InitializationError.notInitialized(\n \"CacheManager\",\n \"Ensure AppKit.create() has completed before accessing the cache\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n });\n\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n cache_deduplication: true,\n });\n\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: false,\n cache_key: cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n logger.debug(\"Error cleaning up expired entries: %O\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAI6E;AAQ7E,MAAM,SAAS,aAAa,QAAQ;;;;;;;;;;;;;;;AAgBpC,IAAa,eAAb,MAAa,aAAa;CACxB,OAAwB,0BAA0B;CAClD,AAAiB,OAAe;CAChC,OAAe,WAAgC;CAC/C,OAAe,cAA4C;CAE3D,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ;CAER,AAAQ;CACR,AAAQ;CAKR,AAAQ,YAAY,SAAuB,QAAqB;AAC9D,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,oBAAoB,eACxB,gBACA,kEACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACZ,CAAC;AAEF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACX,qBAAqB;MACtB,CAAC;AAEF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;AAEF,WAAO,OAAO,EAAE,aAAa;KAC3B,WAAW;KACX,WAAW;KACZ,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,WAAW;AACtB,WAAM,KAAK,IAAI,UAAU,QAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAO;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,SAAI,iBAAiB,YACnB,OAAM;AAER,WAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;MACD,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,UAAO,MAAM,yCAAyC,MAAM;IAC5D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|
|
@@ -6,9 +6,11 @@ import { inMemoryStorageDefaults } from "./defaults.js";
|
|
|
6
6
|
* to manage memory usage and ensure efficient cache operations.
|
|
7
7
|
*/
|
|
8
8
|
var InMemoryStorage = class {
|
|
9
|
+
cache = /* @__PURE__ */ new Map();
|
|
10
|
+
accessOrder = /* @__PURE__ */ new Map();
|
|
11
|
+
accessCounter;
|
|
12
|
+
maxSize;
|
|
9
13
|
constructor(config) {
|
|
10
|
-
this.cache = /* @__PURE__ */ new Map();
|
|
11
|
-
this.accessOrder = /* @__PURE__ */ new Map();
|
|
12
14
|
this.cache = /* @__PURE__ */ new Map();
|
|
13
15
|
this.accessOrder = /* @__PURE__ */ new Map();
|
|
14
16
|
this.maxSize = config.maxSize ?? inMemoryStorageDefaults.maxSize;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"memory.js","names":[],"sources":["../../../src/cache/storage/memory.ts"],"sourcesContent":["import type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { inMemoryStorageDefaults } from \"./defaults\";\n\n/**\n * In-memory cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n */\nexport class InMemoryStorage implements CacheStorage {\n private cache: Map<string, CacheEntry> = new Map();\n private accessOrder: Map<string, number> = new Map();\n private accessCounter: number;\n private maxSize: number;\n\n constructor(config: CacheConfig) {\n this.cache = new Map();\n this.accessOrder = new Map();\n this.maxSize = config.maxSize ?? inMemoryStorageDefaults.maxSize;\n this.accessCounter = 0;\n }\n\n /** Get an entry from the cache */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n const entry = this.cache.get(key);\n if (!entry) return null;\n\n this.accessOrder.set(key, ++this.accessCounter);\n return entry as CacheEntry<T>;\n }\n\n /** Set an entry in the cache */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n if (this.cache.size >= this.maxSize && !this.cache.has(key)) {\n this.evictLRU();\n }\n\n this.cache.set(key, entry);\n this.accessOrder.set(key, ++this.accessCounter);\n }\n\n /** Delete an entry from the cache */\n async delete(key: string): Promise<void> {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n }\n\n /** Clean in-memory cache */\n async clear(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Check if the cache has an entry */\n async has(key: string): Promise<boolean> {\n const entry = this.cache.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n return false;\n }\n\n return true;\n }\n\n /** Get the size of the cache */\n async size(): Promise<number> {\n return this.cache.size;\n }\n\n /** Check if the cache is persistent */\n isPersistent(): boolean {\n return false;\n }\n\n /** Check the health of the cache */\n async healthCheck(): Promise<boolean> {\n return true;\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Evict the least recently used entry (LRU) */\n private evictLRU(): void {\n let oldestKey: string | null = null;\n let oldestAccess = Infinity;\n\n for (const [key, accessTime] of this.accessOrder) {\n if (accessTime < oldestAccess) {\n oldestAccess = accessTime;\n oldestKey = key;\n }\n }\n\n if (oldestKey) {\n this.cache.delete(oldestKey);\n this.accessOrder.delete(oldestKey);\n }\n }\n}\n"],"mappings":";;;;;;;AAOA,IAAa,kBAAb,MAAqD;
|
|
1
|
+
{"version":3,"file":"memory.js","names":[],"sources":["../../../src/cache/storage/memory.ts"],"sourcesContent":["import type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { inMemoryStorageDefaults } from \"./defaults\";\n\n/**\n * In-memory cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n */\nexport class InMemoryStorage implements CacheStorage {\n private cache: Map<string, CacheEntry> = new Map();\n private accessOrder: Map<string, number> = new Map();\n private accessCounter: number;\n private maxSize: number;\n\n constructor(config: CacheConfig) {\n this.cache = new Map();\n this.accessOrder = new Map();\n this.maxSize = config.maxSize ?? inMemoryStorageDefaults.maxSize;\n this.accessCounter = 0;\n }\n\n /** Get an entry from the cache */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n const entry = this.cache.get(key);\n if (!entry) return null;\n\n this.accessOrder.set(key, ++this.accessCounter);\n return entry as CacheEntry<T>;\n }\n\n /** Set an entry in the cache */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n if (this.cache.size >= this.maxSize && !this.cache.has(key)) {\n this.evictLRU();\n }\n\n this.cache.set(key, entry);\n this.accessOrder.set(key, ++this.accessCounter);\n }\n\n /** Delete an entry from the cache */\n async delete(key: string): Promise<void> {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n }\n\n /** Clean in-memory cache */\n async clear(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Check if the cache has an entry */\n async has(key: string): Promise<boolean> {\n const entry = this.cache.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n return false;\n }\n\n return true;\n }\n\n /** Get the size of the cache */\n async size(): Promise<number> {\n return this.cache.size;\n }\n\n /** Check if the cache is persistent */\n isPersistent(): boolean {\n return false;\n }\n\n /** Check the health of the cache */\n async healthCheck(): Promise<boolean> {\n return true;\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Evict the least recently used entry (LRU) */\n private evictLRU(): void {\n let oldestKey: string | null = null;\n let oldestAccess = Infinity;\n\n for (const [key, accessTime] of this.accessOrder) {\n if (accessTime < oldestAccess) {\n oldestAccess = accessTime;\n oldestKey = key;\n }\n }\n\n if (oldestKey) {\n this.cache.delete(oldestKey);\n this.accessOrder.delete(oldestKey);\n }\n }\n}\n"],"mappings":";;;;;;;AAOA,IAAa,kBAAb,MAAqD;CACnD,AAAQ,wBAAiC,IAAI,KAAK;CAClD,AAAQ,8BAAmC,IAAI,KAAK;CACpD,AAAQ;CACR,AAAQ;CAER,YAAY,QAAqB;AAC/B,OAAK,wBAAQ,IAAI,KAAK;AACtB,OAAK,8BAAc,IAAI,KAAK;AAC5B,OAAK,UAAU,OAAO,WAAW,wBAAwB;AACzD,OAAK,gBAAgB;;;CAIvB,MAAM,IAAO,KAA4C;EACvD,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;AAC/C,SAAO;;;CAIT,MAAM,IAAO,KAAa,OAAqC;AAC7D,MAAI,KAAK,MAAM,QAAQ,KAAK,WAAW,CAAC,KAAK,MAAM,IAAI,IAAI,CACzD,MAAK,UAAU;AAGjB,OAAK,MAAM,IAAI,KAAK,MAAM;AAC1B,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;;;CAIjD,MAAM,OAAO,KAA4B;AACvC,OAAK,MAAM,OAAO,IAAI;AACtB,OAAK,YAAY,OAAO,IAAI;;;CAI9B,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,MAAM,IAAI,KAA+B;EACvC,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,QAAK,MAAM,OAAO,IAAI;AACtB,QAAK,YAAY,OAAO,IAAI;AAC5B,UAAO;;AAGT,SAAO;;;CAIT,MAAM,OAAwB;AAC5B,SAAO,KAAK,MAAM;;;CAIpB,eAAwB;AACtB,SAAO;;;CAIT,MAAM,cAAgC;AACpC,SAAO;;;CAIT,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,AAAQ,WAAiB;EACvB,IAAI,YAA2B;EAC/B,IAAI,eAAe;AAEnB,OAAK,MAAM,CAAC,KAAK,eAAe,KAAK,YACnC,KAAI,aAAa,cAAc;AAC7B,kBAAe;AACf,eAAY;;AAIhB,MAAI,WAAW;AACb,QAAK,MAAM,OAAO,UAAU;AAC5B,QAAK,YAAY,OAAO,UAAU"}
|
|
@@ -23,6 +23,13 @@ const logger = createLogger("cache:persistent");
|
|
|
23
23
|
*
|
|
24
24
|
*/
|
|
25
25
|
var PersistentStorage = class {
|
|
26
|
+
connector;
|
|
27
|
+
tableName;
|
|
28
|
+
maxBytes;
|
|
29
|
+
maxEntryBytes;
|
|
30
|
+
evictionBatchSize;
|
|
31
|
+
evictionCheckProbability;
|
|
32
|
+
initialized;
|
|
26
33
|
constructor(config, connector) {
|
|
27
34
|
this.connector = connector;
|
|
28
35
|
this.maxBytes = config.maxBytes ?? lakebaseStorageDefaults.maxBytes;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"persistent.js","names":[],"sources":["../../../src/cache/storage/persistent.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport type { LakebaseConnector } from \"../../connectors\";\nimport { InitializationError, ValidationError } from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { lakebaseStorageDefaults } from \"./defaults\";\n\nconst logger = createLogger(\"cache:persistent\");\n\n/**\n * Persistent cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n *\n * @example\n * const persistentStorage = new PersistentStorage(config, connector);\n * await persistentStorage.initialize();\n * await persistentStorage.get(\"my-key\");\n * await persistentStorage.set(\"my-key\", \"my-value\");\n * await persistentStorage.delete(\"my-key\");\n * await persistentStorage.clear();\n * await persistentStorage.has(\"my-key\");\n *\n */\nexport class PersistentStorage implements CacheStorage {\n private readonly connector: LakebaseConnector;\n private readonly tableName: string;\n private readonly maxBytes: number;\n private readonly maxEntryBytes: number;\n private readonly evictionBatchSize: number;\n private readonly evictionCheckProbability: number;\n private initialized: boolean;\n\n constructor(config: CacheConfig, connector: LakebaseConnector) {\n this.connector = connector;\n this.maxBytes = config.maxBytes ?? lakebaseStorageDefaults.maxBytes;\n this.maxEntryBytes =\n config.maxEntryBytes ?? lakebaseStorageDefaults.maxEntryBytes;\n this.evictionBatchSize = lakebaseStorageDefaults.evictionBatchSize;\n this.evictionCheckProbability =\n config.evictionCheckProbability ??\n lakebaseStorageDefaults.evictionCheckProbability;\n this.tableName = lakebaseStorageDefaults.tableName; // hardcoded, safe for now\n this.initialized = false;\n }\n\n /** Initialize the persistent storage and run migrations if necessary */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n try {\n await this.runMigrations();\n this.initialized = true;\n } catch (error) {\n logger.error(\"Error in persistent storage initialization: %O\", error);\n throw error;\n }\n }\n\n /**\n * Get a cached value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the cached value or null if not found\n */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n\n const result = await this.connector.query<{\n value: Buffer;\n expiry: string;\n }>(`SELECT value, expiry FROM ${this.tableName} WHERE key_hash = $1`, [\n keyHash,\n ]);\n\n if (result.rows.length === 0) return null;\n\n const entry = result.rows[0];\n\n // fire-and-forget update\n this.connector\n .query(\n `UPDATE ${this.tableName} SET last_accessed = NOW() WHERE key_hash = $1`,\n [keyHash],\n )\n .catch(() => {\n logger.debug(\"Error updating last_accessed time for key: %s\", key);\n });\n\n return {\n value: this.deserializeValue<T>(entry.value),\n expiry: Number(entry.expiry),\n };\n }\n\n /**\n * Set a value in the persistent storage\n * @param key - Cache key\n * @param entry - Cache entry\n * @returns Promise of the result\n */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n const keyBytes = Buffer.from(key, \"utf-8\");\n const valueBytes = this.serializeValue(entry.value);\n const byteSize = keyBytes.length + valueBytes.length;\n\n if (byteSize > this.maxEntryBytes) {\n throw ValidationError.invalidValue(\n \"cache entry size\",\n byteSize,\n `maximum ${this.maxEntryBytes} bytes`,\n );\n }\n\n // probabilistic eviction check\n if (Math.random() < this.evictionCheckProbability) {\n const totalBytes = await this.totalBytes();\n if (totalBytes + byteSize > this.maxBytes) {\n await this.evictBySize(byteSize);\n }\n }\n\n await this.connector.query(\n `INSERT INTO ${this.tableName} (key_hash, key, value, byte_size, expiry, created_at, last_accessed)\n VALUES ($1, $2, $3, $4, $5, NOW(), NOW())\n ON CONFLICT (key_hash)\n DO UPDATE SET value = $3, byte_size = $4, expiry = $5, last_accessed = NOW()\n `,\n [keyHash, keyBytes, valueBytes, byteSize, entry.expiry],\n );\n }\n\n /**\n * Delete a value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n await this.connector.query(\n `DELETE FROM ${this.tableName} WHERE key_hash = $1`,\n [keyHash],\n );\n }\n\n /** Clear the persistent storage */\n async clear(): Promise<void> {\n await this.ensureInitialized();\n await this.connector.query(`TRUNCATE TABLE ${this.tableName}`);\n }\n\n /**\n * Check if a value exists in the persistent storage\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n\n const result = await this.connector.query<{ exists: boolean }>(\n `SELECT EXISTS(SELECT 1 FROM ${this.tableName} WHERE key_hash = $1) as exists`,\n [keyHash],\n );\n\n return result.rows[0]?.exists ?? false;\n }\n\n /**\n * Get the size of the persistent storage\n * @returns Promise of the size of the storage\n */\n async size(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.connector.query<{ count: string }>(\n `SELECT COUNT(*) as count FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Get the total number of bytes in the persistent storage */\n async totalBytes(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.connector.query<{ total: string }>(\n `SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.total ?? \"0\", 10);\n }\n\n /**\n * Check if the persistent storage is persistent\n * @returns true if the storage is persistent, false otherwise\n */\n isPersistent(): boolean {\n return true;\n }\n\n /**\n * Check if the persistent storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async healthCheck(): Promise<boolean> {\n try {\n return await this.connector.healthCheck();\n } catch {\n return false;\n }\n }\n\n /** Close the persistent storage */\n async close(): Promise<void> {\n await this.connector.close();\n }\n\n /**\n * Cleanup expired entries from the persistent storage\n * @returns Promise of the number of expired entries\n */\n async cleanupExpired(): Promise<number> {\n await this.ensureInitialized();\n const result = await this.connector.query<{ count: string }>(\n `WITH deleted as (DELETE FROM ${this.tableName} WHERE expiry < $1 RETURNING *) SELECT COUNT(*) as count FROM deleted`,\n [Date.now()],\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Evict entries from the persistent storage by size */\n private async evictBySize(requiredBytes: number): Promise<void> {\n const freedByExpiry = await this.cleanupExpired();\n if (freedByExpiry > 0) {\n const currentBytes = await this.totalBytes();\n if (currentBytes + requiredBytes <= this.maxBytes) {\n return;\n }\n }\n\n await this.connector.query(\n `DELETE FROM ${this.tableName} WHERE key_hash IN\n (SELECT key_hash FROM ${this.tableName} ORDER BY last_accessed ASC LIMIT $1)`,\n [this.evictionBatchSize],\n );\n }\n\n /** Ensure the persistent storage is initialized */\n private async ensureInitialized(): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n }\n\n /** Generate a 64-bit hash for the cache key using SHA256 */\n private hashKey(key: string): bigint {\n if (!key) throw ValidationError.missingField(\"key\");\n const hash = createHash(\"sha256\").update(key).digest();\n return hash.readBigInt64BE(0);\n }\n\n /** Serialize a value to a buffer */\n private serializeValue<T>(value: T): Buffer {\n return Buffer.from(JSON.stringify(value), \"utf-8\");\n }\n\n /** Deserialize a value from a buffer */\n private deserializeValue<T>(buffer: Buffer): T {\n return JSON.parse(buffer.toString(\"utf-8\")) as T;\n }\n\n /** Run migrations for the persistent storage */\n private async runMigrations(): Promise<void> {\n try {\n await this.connector.query(`\n CREATE TABLE IF NOT EXISTS ${this.tableName} (\n id BIGSERIAL PRIMARY KEY,\n key_hash BIGINT NOT NULL,\n key BYTEA NOT NULL,\n value BYTEA NOT NULL,\n byte_size INTEGER NOT NULL,\n expiry BIGINT NOT NULL,\n created_at TIMESTAMP NOT NULL DEFAULT NOW(),\n last_accessed TIMESTAMP NOT NULL DEFAULT NOW()\n )\n `);\n\n // unique index on key_hash for fast lookups\n await this.connector.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS idx_${this.tableName}_key_hash ON ${this.tableName} (key_hash);`,\n );\n\n // index on expiry for cleanup queries\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_expiry ON ${this.tableName} (expiry); `,\n );\n\n // index on last_accessed for LRU eviction\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_last_accessed ON ${this.tableName} (last_accessed); `,\n );\n\n // index on byte_size for monitoring\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.tableName} (byte_size); `,\n );\n } catch (error) {\n logger.error(\n \"Error in running migrations for persistent storage: %O\",\n error,\n );\n throw InitializationError.migrationFailed(error as Error);\n }\n }\n}\n"],"mappings":";;;;;;;;aAGoE;AAIpE,MAAM,SAAS,aAAa,mBAAmB;;;;;;;;;;;;;;;AAgB/C,IAAa,oBAAb,MAAuD;CASrD,YAAY,QAAqB,WAA8B;AAC7D,OAAK,YAAY;AACjB,OAAK,WAAW,OAAO,YAAY,wBAAwB;AAC3D,OAAK,gBACH,OAAO,iBAAiB,wBAAwB;AAClD,OAAK,oBAAoB,wBAAwB;AACjD,OAAK,2BACH,OAAO,4BACP,wBAAwB;AAC1B,OAAK,YAAY,wBAAwB;AACzC,OAAK,cAAc;;;CAIrB,MAAM,aAA4B;AAChC,MAAI,KAAK,YAAa;AAEtB,MAAI;AACF,SAAM,KAAK,eAAe;AAC1B,QAAK,cAAc;WACZ,OAAO;AACd,UAAO,MAAM,kDAAkD,MAAM;AACrE,SAAM;;;;;;;;CASV,MAAM,IAAO,KAA4C;AACvD,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EAEjC,MAAM,SAAS,MAAM,KAAK,UAAU,MAGjC,6BAA6B,KAAK,UAAU,uBAAuB,CACpE,QACD,CAAC;AAEF,MAAI,OAAO,KAAK,WAAW,EAAG,QAAO;EAErC,MAAM,QAAQ,OAAO,KAAK;AAG1B,OAAK,UACF,MACC,UAAU,KAAK,UAAU,iDACzB,CAAC,QAAQ,CACV,CACA,YAAY;AACX,UAAO,MAAM,iDAAiD,IAAI;IAClE;AAEJ,SAAO;GACL,OAAO,KAAK,iBAAoB,MAAM,MAAM;GAC5C,QAAQ,OAAO,MAAM,OAAO;GAC7B;;;;;;;;CASH,MAAM,IAAO,KAAa,OAAqC;AAC7D,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EACjC,MAAM,WAAW,OAAO,KAAK,KAAK,QAAQ;EAC1C,MAAM,aAAa,KAAK,eAAe,MAAM,MAAM;EACnD,MAAM,WAAW,SAAS,SAAS,WAAW;AAE9C,MAAI,WAAW,KAAK,cAClB,OAAM,gBAAgB,aACpB,oBACA,UACA,WAAW,KAAK,cAAc,QAC/B;AAIH,MAAI,KAAK,QAAQ,GAAG,KAAK,0BAEvB;OADmB,MAAM,KAAK,YAAY,GACzB,WAAW,KAAK,SAC/B,OAAM,KAAK,YAAY,SAAS;;AAIpC,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU;;;;SAK9B;GAAC;GAAS;GAAU;GAAY;GAAU,MAAM;GAAO,CACxD;;;;;;;CAQH,MAAM,OAAO,KAA4B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AACjC,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU,uBAC9B,CAAC,QAAQ,CACV;;;CAIH,MAAM,QAAuB;AAC3B,QAAM,KAAK,mBAAmB;AAC9B,QAAM,KAAK,UAAU,MAAM,kBAAkB,KAAK,YAAY;;;;;;;CAQhE,MAAM,IAAI,KAA+B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AAOjC,UALe,MAAM,KAAK,UAAU,MAClC,+BAA+B,KAAK,UAAU,kCAC9C,CAAC,QAAQ,CACV,EAEa,KAAK,IAAI,UAAU;;;;;;CAOnC,MAAM,OAAwB;AAC5B,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,iCAAiC,KAAK,YACvC;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAM,aAA8B;AAClC,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,oDAAoD,KAAK,YAC1D;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;;;;CAOnD,eAAwB;AACtB,SAAO;;;;;;CAOT,MAAM,cAAgC;AACpC,MAAI;AACF,UAAO,MAAM,KAAK,UAAU,aAAa;UACnC;AACN,UAAO;;;;CAKX,MAAM,QAAuB;AAC3B,QAAM,KAAK,UAAU,OAAO;;;;;;CAO9B,MAAM,iBAAkC;AACtC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,gCAAgC,KAAK,UAAU,wEAC/C,CAAC,KAAK,KAAK,CAAC,CACb;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAc,YAAY,eAAsC;AAE9D,MADsB,MAAM,KAAK,gBAAgB,GAC7B,GAElB;OADqB,MAAM,KAAK,YAAY,GACzB,iBAAiB,KAAK,SACvC;;AAIJ,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU;8BACN,KAAK,UAAU,wCACvC,CAAC,KAAK,kBAAkB,CACzB;;;CAIH,MAAc,oBAAmC;AAC/C,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;;;CAK3B,AAAQ,QAAQ,KAAqB;AACnC,MAAI,CAAC,IAAK,OAAM,gBAAgB,aAAa,MAAM;AAEnD,SADa,WAAW,SAAS,CAAC,OAAO,IAAI,CAAC,QAAQ,CAC1C,eAAe,EAAE;;;CAI/B,AAAQ,eAAkB,OAAkB;AAC1C,SAAO,OAAO,KAAK,KAAK,UAAU,MAAM,EAAE,QAAQ;;;CAIpD,AAAQ,iBAAoB,QAAmB;AAC7C,SAAO,KAAK,MAAM,OAAO,SAAS,QAAQ,CAAC;;;CAI7C,MAAc,gBAA+B;AAC3C,MAAI;AACF,SAAM,KAAK,UAAU,MAAM;yCACQ,KAAK,UAAU;;;;;;;;;;cAU1C;AAGR,SAAM,KAAK,UAAU,MACnB,yCAAyC,KAAK,UAAU,eAAe,KAAK,UAAU,cACvF;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,aAAa,KAAK,UAAU,aAC9E;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,oBAAoB,KAAK,UAAU,oBACrF;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,gBAAgB,KAAK,UAAU,gBACjF;WACM,OAAO;AACd,UAAO,MACL,0DACA,MACD;AACD,SAAM,oBAAoB,gBAAgB,MAAe"}
|
|
1
|
+
{"version":3,"file":"persistent.js","names":[],"sources":["../../../src/cache/storage/persistent.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport type { LakebaseConnector } from \"../../connectors\";\nimport { InitializationError, ValidationError } from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { lakebaseStorageDefaults } from \"./defaults\";\n\nconst logger = createLogger(\"cache:persistent\");\n\n/**\n * Persistent cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n *\n * @example\n * const persistentStorage = new PersistentStorage(config, connector);\n * await persistentStorage.initialize();\n * await persistentStorage.get(\"my-key\");\n * await persistentStorage.set(\"my-key\", \"my-value\");\n * await persistentStorage.delete(\"my-key\");\n * await persistentStorage.clear();\n * await persistentStorage.has(\"my-key\");\n *\n */\nexport class PersistentStorage implements CacheStorage {\n private readonly connector: LakebaseConnector;\n private readonly tableName: string;\n private readonly maxBytes: number;\n private readonly maxEntryBytes: number;\n private readonly evictionBatchSize: number;\n private readonly evictionCheckProbability: number;\n private initialized: boolean;\n\n constructor(config: CacheConfig, connector: LakebaseConnector) {\n this.connector = connector;\n this.maxBytes = config.maxBytes ?? lakebaseStorageDefaults.maxBytes;\n this.maxEntryBytes =\n config.maxEntryBytes ?? lakebaseStorageDefaults.maxEntryBytes;\n this.evictionBatchSize = lakebaseStorageDefaults.evictionBatchSize;\n this.evictionCheckProbability =\n config.evictionCheckProbability ??\n lakebaseStorageDefaults.evictionCheckProbability;\n this.tableName = lakebaseStorageDefaults.tableName; // hardcoded, safe for now\n this.initialized = false;\n }\n\n /** Initialize the persistent storage and run migrations if necessary */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n try {\n await this.runMigrations();\n this.initialized = true;\n } catch (error) {\n logger.error(\"Error in persistent storage initialization: %O\", error);\n throw error;\n }\n }\n\n /**\n * Get a cached value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the cached value or null if not found\n */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n\n const result = await this.connector.query<{\n value: Buffer;\n expiry: string;\n }>(`SELECT value, expiry FROM ${this.tableName} WHERE key_hash = $1`, [\n keyHash,\n ]);\n\n if (result.rows.length === 0) return null;\n\n const entry = result.rows[0];\n\n // fire-and-forget update\n this.connector\n .query(\n `UPDATE ${this.tableName} SET last_accessed = NOW() WHERE key_hash = $1`,\n [keyHash],\n )\n .catch(() => {\n logger.debug(\"Error updating last_accessed time for key: %s\", key);\n });\n\n return {\n value: this.deserializeValue<T>(entry.value),\n expiry: Number(entry.expiry),\n };\n }\n\n /**\n * Set a value in the persistent storage\n * @param key - Cache key\n * @param entry - Cache entry\n * @returns Promise of the result\n */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n const keyBytes = Buffer.from(key, \"utf-8\");\n const valueBytes = this.serializeValue(entry.value);\n const byteSize = keyBytes.length + valueBytes.length;\n\n if (byteSize > this.maxEntryBytes) {\n throw ValidationError.invalidValue(\n \"cache entry size\",\n byteSize,\n `maximum ${this.maxEntryBytes} bytes`,\n );\n }\n\n // probabilistic eviction check\n if (Math.random() < this.evictionCheckProbability) {\n const totalBytes = await this.totalBytes();\n if (totalBytes + byteSize > this.maxBytes) {\n await this.evictBySize(byteSize);\n }\n }\n\n await this.connector.query(\n `INSERT INTO ${this.tableName} (key_hash, key, value, byte_size, expiry, created_at, last_accessed)\n VALUES ($1, $2, $3, $4, $5, NOW(), NOW())\n ON CONFLICT (key_hash)\n DO UPDATE SET value = $3, byte_size = $4, expiry = $5, last_accessed = NOW()\n `,\n [keyHash, keyBytes, valueBytes, byteSize, entry.expiry],\n );\n }\n\n /**\n * Delete a value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n await this.connector.query(\n `DELETE FROM ${this.tableName} WHERE key_hash = $1`,\n [keyHash],\n );\n }\n\n /** Clear the persistent storage */\n async clear(): Promise<void> {\n await this.ensureInitialized();\n await this.connector.query(`TRUNCATE TABLE ${this.tableName}`);\n }\n\n /**\n * Check if a value exists in the persistent storage\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n\n const result = await this.connector.query<{ exists: boolean }>(\n `SELECT EXISTS(SELECT 1 FROM ${this.tableName} WHERE key_hash = $1) as exists`,\n [keyHash],\n );\n\n return result.rows[0]?.exists ?? false;\n }\n\n /**\n * Get the size of the persistent storage\n * @returns Promise of the size of the storage\n */\n async size(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.connector.query<{ count: string }>(\n `SELECT COUNT(*) as count FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Get the total number of bytes in the persistent storage */\n async totalBytes(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.connector.query<{ total: string }>(\n `SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.total ?? \"0\", 10);\n }\n\n /**\n * Check if the persistent storage is persistent\n * @returns true if the storage is persistent, false otherwise\n */\n isPersistent(): boolean {\n return true;\n }\n\n /**\n * Check if the persistent storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async healthCheck(): Promise<boolean> {\n try {\n return await this.connector.healthCheck();\n } catch {\n return false;\n }\n }\n\n /** Close the persistent storage */\n async close(): Promise<void> {\n await this.connector.close();\n }\n\n /**\n * Cleanup expired entries from the persistent storage\n * @returns Promise of the number of expired entries\n */\n async cleanupExpired(): Promise<number> {\n await this.ensureInitialized();\n const result = await this.connector.query<{ count: string }>(\n `WITH deleted as (DELETE FROM ${this.tableName} WHERE expiry < $1 RETURNING *) SELECT COUNT(*) as count FROM deleted`,\n [Date.now()],\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Evict entries from the persistent storage by size */\n private async evictBySize(requiredBytes: number): Promise<void> {\n const freedByExpiry = await this.cleanupExpired();\n if (freedByExpiry > 0) {\n const currentBytes = await this.totalBytes();\n if (currentBytes + requiredBytes <= this.maxBytes) {\n return;\n }\n }\n\n await this.connector.query(\n `DELETE FROM ${this.tableName} WHERE key_hash IN\n (SELECT key_hash FROM ${this.tableName} ORDER BY last_accessed ASC LIMIT $1)`,\n [this.evictionBatchSize],\n );\n }\n\n /** Ensure the persistent storage is initialized */\n private async ensureInitialized(): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n }\n\n /** Generate a 64-bit hash for the cache key using SHA256 */\n private hashKey(key: string): bigint {\n if (!key) throw ValidationError.missingField(\"key\");\n const hash = createHash(\"sha256\").update(key).digest();\n return hash.readBigInt64BE(0);\n }\n\n /** Serialize a value to a buffer */\n private serializeValue<T>(value: T): Buffer {\n return Buffer.from(JSON.stringify(value), \"utf-8\");\n }\n\n /** Deserialize a value from a buffer */\n private deserializeValue<T>(buffer: Buffer): T {\n return JSON.parse(buffer.toString(\"utf-8\")) as T;\n }\n\n /** Run migrations for the persistent storage */\n private async runMigrations(): Promise<void> {\n try {\n await this.connector.query(`\n CREATE TABLE IF NOT EXISTS ${this.tableName} (\n id BIGSERIAL PRIMARY KEY,\n key_hash BIGINT NOT NULL,\n key BYTEA NOT NULL,\n value BYTEA NOT NULL,\n byte_size INTEGER NOT NULL,\n expiry BIGINT NOT NULL,\n created_at TIMESTAMP NOT NULL DEFAULT NOW(),\n last_accessed TIMESTAMP NOT NULL DEFAULT NOW()\n )\n `);\n\n // unique index on key_hash for fast lookups\n await this.connector.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS idx_${this.tableName}_key_hash ON ${this.tableName} (key_hash);`,\n );\n\n // index on expiry for cleanup queries\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_expiry ON ${this.tableName} (expiry); `,\n );\n\n // index on last_accessed for LRU eviction\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_last_accessed ON ${this.tableName} (last_accessed); `,\n );\n\n // index on byte_size for monitoring\n await this.connector.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.tableName} (byte_size); `,\n );\n } catch (error) {\n logger.error(\n \"Error in running migrations for persistent storage: %O\",\n error,\n );\n throw InitializationError.migrationFailed(error as Error);\n }\n }\n}\n"],"mappings":";;;;;;;;aAGoE;AAIpE,MAAM,SAAS,aAAa,mBAAmB;;;;;;;;;;;;;;;AAgB/C,IAAa,oBAAb,MAAuD;CACrD,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ;CAER,YAAY,QAAqB,WAA8B;AAC7D,OAAK,YAAY;AACjB,OAAK,WAAW,OAAO,YAAY,wBAAwB;AAC3D,OAAK,gBACH,OAAO,iBAAiB,wBAAwB;AAClD,OAAK,oBAAoB,wBAAwB;AACjD,OAAK,2BACH,OAAO,4BACP,wBAAwB;AAC1B,OAAK,YAAY,wBAAwB;AACzC,OAAK,cAAc;;;CAIrB,MAAM,aAA4B;AAChC,MAAI,KAAK,YAAa;AAEtB,MAAI;AACF,SAAM,KAAK,eAAe;AAC1B,QAAK,cAAc;WACZ,OAAO;AACd,UAAO,MAAM,kDAAkD,MAAM;AACrE,SAAM;;;;;;;;CASV,MAAM,IAAO,KAA4C;AACvD,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EAEjC,MAAM,SAAS,MAAM,KAAK,UAAU,MAGjC,6BAA6B,KAAK,UAAU,uBAAuB,CACpE,QACD,CAAC;AAEF,MAAI,OAAO,KAAK,WAAW,EAAG,QAAO;EAErC,MAAM,QAAQ,OAAO,KAAK;AAG1B,OAAK,UACF,MACC,UAAU,KAAK,UAAU,iDACzB,CAAC,QAAQ,CACV,CACA,YAAY;AACX,UAAO,MAAM,iDAAiD,IAAI;IAClE;AAEJ,SAAO;GACL,OAAO,KAAK,iBAAoB,MAAM,MAAM;GAC5C,QAAQ,OAAO,MAAM,OAAO;GAC7B;;;;;;;;CASH,MAAM,IAAO,KAAa,OAAqC;AAC7D,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EACjC,MAAM,WAAW,OAAO,KAAK,KAAK,QAAQ;EAC1C,MAAM,aAAa,KAAK,eAAe,MAAM,MAAM;EACnD,MAAM,WAAW,SAAS,SAAS,WAAW;AAE9C,MAAI,WAAW,KAAK,cAClB,OAAM,gBAAgB,aACpB,oBACA,UACA,WAAW,KAAK,cAAc,QAC/B;AAIH,MAAI,KAAK,QAAQ,GAAG,KAAK,0BAEvB;OADmB,MAAM,KAAK,YAAY,GACzB,WAAW,KAAK,SAC/B,OAAM,KAAK,YAAY,SAAS;;AAIpC,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU;;;;SAK9B;GAAC;GAAS;GAAU;GAAY;GAAU,MAAM;GAAO,CACxD;;;;;;;CAQH,MAAM,OAAO,KAA4B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AACjC,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU,uBAC9B,CAAC,QAAQ,CACV;;;CAIH,MAAM,QAAuB;AAC3B,QAAM,KAAK,mBAAmB;AAC9B,QAAM,KAAK,UAAU,MAAM,kBAAkB,KAAK,YAAY;;;;;;;CAQhE,MAAM,IAAI,KAA+B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AAOjC,UALe,MAAM,KAAK,UAAU,MAClC,+BAA+B,KAAK,UAAU,kCAC9C,CAAC,QAAQ,CACV,EAEa,KAAK,IAAI,UAAU;;;;;;CAOnC,MAAM,OAAwB;AAC5B,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,iCAAiC,KAAK,YACvC;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAM,aAA8B;AAClC,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,oDAAoD,KAAK,YAC1D;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;;;;CAOnD,eAAwB;AACtB,SAAO;;;;;;CAOT,MAAM,cAAgC;AACpC,MAAI;AACF,UAAO,MAAM,KAAK,UAAU,aAAa;UACnC;AACN,UAAO;;;;CAKX,MAAM,QAAuB;AAC3B,QAAM,KAAK,UAAU,OAAO;;;;;;CAO9B,MAAM,iBAAkC;AACtC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,SAAS,MAAM,KAAK,UAAU,MAClC,gCAAgC,KAAK,UAAU,wEAC/C,CAAC,KAAK,KAAK,CAAC,CACb;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAc,YAAY,eAAsC;AAE9D,MADsB,MAAM,KAAK,gBAAgB,GAC7B,GAElB;OADqB,MAAM,KAAK,YAAY,GACzB,iBAAiB,KAAK,SACvC;;AAIJ,QAAM,KAAK,UAAU,MACnB,eAAe,KAAK,UAAU;8BACN,KAAK,UAAU,wCACvC,CAAC,KAAK,kBAAkB,CACzB;;;CAIH,MAAc,oBAAmC;AAC/C,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;;;CAK3B,AAAQ,QAAQ,KAAqB;AACnC,MAAI,CAAC,IAAK,OAAM,gBAAgB,aAAa,MAAM;AAEnD,SADa,WAAW,SAAS,CAAC,OAAO,IAAI,CAAC,QAAQ,CAC1C,eAAe,EAAE;;;CAI/B,AAAQ,eAAkB,OAAkB;AAC1C,SAAO,OAAO,KAAK,KAAK,UAAU,MAAM,EAAE,QAAQ;;;CAIpD,AAAQ,iBAAoB,QAAmB;AAC7C,SAAO,KAAK,MAAM,OAAO,SAAS,QAAQ,CAAC;;;CAI7C,MAAc,gBAA+B;AAC3C,MAAI;AACF,SAAM,KAAK,UAAU,MAAM;yCACQ,KAAK,UAAU;;;;;;;;;;cAU1C;AAGR,SAAM,KAAK,UAAU,MACnB,yCAAyC,KAAK,UAAU,eAAe,KAAK,UAAU,cACvF;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,aAAa,KAAK,UAAU,aAC9E;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,oBAAoB,KAAK,UAAU,oBACrF;AAGD,SAAM,KAAK,UAAU,MACnB,kCAAkC,KAAK,UAAU,gBAAgB,KAAK,UAAU,gBACjF;WACM,OAAO;AACd,UAAO,MACL,0DACA,MACD;AACD,SAAM,oBAAoB,gBAAgB,MAAe"}
|
|
@@ -34,11 +34,15 @@ const logger = createLogger("connectors:lakebase");
|
|
|
34
34
|
* ```
|
|
35
35
|
*/
|
|
36
36
|
var LakebaseConnector = class {
|
|
37
|
+
name = "lakebase";
|
|
38
|
+
CACHE_BUFFER_MS = 120 * 1e3;
|
|
39
|
+
config;
|
|
40
|
+
connectionConfig;
|
|
41
|
+
pool = null;
|
|
42
|
+
credentials = null;
|
|
43
|
+
telemetry;
|
|
44
|
+
telemetryMetrics;
|
|
37
45
|
constructor(userConfig) {
|
|
38
|
-
this.name = "lakebase";
|
|
39
|
-
this.CACHE_BUFFER_MS = 120 * 1e3;
|
|
40
|
-
this.pool = null;
|
|
41
|
-
this.credentials = null;
|
|
42
46
|
this.config = deepMerge(lakebaseDefaults, userConfig);
|
|
43
47
|
this.connectionConfig = this.parseConnectionConfig();
|
|
44
48
|
this.telemetry = TelemetryManager.getProvider(this.name, this.config.telemetry);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"@/telemetry\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseDefaults } from \"./defaults\";\nimport type {\n LakebaseConfig,\n LakebaseConnectionConfig,\n LakebaseCredentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseConnector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseConnector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseConnector {\n private readonly name: string = \"lakebase\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseConfig;\n private readonly connectionConfig: LakebaseConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseCredentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseConfig>) {\n this.config = deepMerge(lakebaseDefaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.query\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAiBsB;AAUtB,MAAM,SAAS,aAAa,sBAAsB;;;;;;;;;;;;;;;;AAiBlD,IAAa,oBAAb,MAA+B;CAe7B,YAAY,YAAsC;cAdlB;yBACG,MAAS;cAGb;qBACmB;AAUhD,OAAK,SAAS,UAAU,kBAAkB,WAAW;AACrD,OAAK,mBAAmB,KAAK,uBAAuB;AAEpD,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UACd,UAAU,CACV,cAAc,wBAAwB;IACrC,aAAa;IACb,MAAM;IACP,CAAC;GACJ,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,2BAA2B;IAC1C,aAAa;IACb,MAAM;IACP,CAAC;GACL;AAGD,MAAI,KAAK,OAAO,cAAc,EAC5B,OAAM,gBAAgB,aACpB,eACA,KAAK,OAAO,aACZ,aACD;;;;;;;;;;;CAaL,MAAM,MACJ,KACA,QACA,aAAqB,GACO;EAC5B,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,kBACA,EACE,YAAY;GACV,aAAa;GACb,gBAAgB,IAAI,UAAU,GAAG,IAAI;GACrC,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAC/C,SAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AAEd,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,WAAM,KAAK,mBAAmB;KAE9B,MAAM,SAAS,OADC,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAClD,UAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,YAAO;;AAIT,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,wBAAwB;AACtC,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,MAAS,KAAK,QAAQ,aAAa,EAAE;;AAGzD,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,YAAY,MAAe;aACzC;IACR,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;;;;;;;;;;;;;;CAiBH,MAAM,YACJ,UACA,aAAqB,GACT;EACZ,MAAM,YAAY,KAAK,KAAK;AAC5B,SAAO,KAAK,UAAU,gBACpB,wBACA,EACE,YAAY;GACV,aAAa;GACb,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;GAEd,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,SAAS;AACnC,OAAI;AACF,UAAM,OAAO,MAAM,QAAQ;IAC3B,MAAM,SAAS,MAAM,SAAS,OAAO;AACrC,UAAM,OAAO,MAAM,SAAS;AAC5B,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,QAAI;AACF,WAAM,OAAO,MAAM,WAAW;YACxB;AAER,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,YAAO,SAAS;AAChB,WAAM,KAAK,mBAAmB;KAE9B,MAAM,cAAc,OADJ,MAAM,KAAK,SAAS,EACF,SAAS;AAC3C,SAAI;AACF,YAAM,OAAO,MAAM,QAAQ;MAC3B,MAAM,SAAS,MAAM,SAAS,YAAY;AAC1C,YAAM,OAAO,MAAM,SAAS;AAC5B,WAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,aAAO;cACA,YAAY;AACnB,UAAI;AACF,aAAM,YAAY,MAAM,WAAW;cAC7B;AACR,YAAM;eACE;AACR,kBAAY,SAAS;;;AAKzB,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,0BAA0B;AACxC,YAAO,SAAS;AAChB,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,YAAe,UAAU,aAAa,EAAE;;AAE5D,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,kBAAkB,MAAe;aAC/C;AACR,WAAO,SAAS;IAChB,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;CAIH,MAAM,cAAgC;AACpC,SAAO,KAAK,UAAU,gBACpB,wBACA,EAAE,EACF,OAAO,SAAS;AACd,OAAI;IAIF,MAAM,WAHS,MAAM,KAAK,MACxB,qBACD,EACsB,KAAK,IAAI,WAAW;AAC3C,SAAK,aAAa,cAAc,QAAQ;AACxC,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;WACD;AACN,SAAK,aAAa,cAAc,MAAM;AACtC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAO;aACC;AACR,SAAK,KAAK;;IAGf;;;CAIH,MAAM,QAAuB;AAC3B,MAAI,KAAK,MAAM;AACb,SAAM,KAAK,KAAK,KAAK,CAAC,OAAO,UAAmB;AAC9C,WAAO,MAAM,qCAAqC,MAAM;KACxD;AACF,QAAK,OAAO;;AAEd,OAAK,cAAc;;;CAIrB,WAAiB;AACf,UAAQ,GAAG,iBAAiB,KAAK,OAAO,CAAC;AACzC,UAAQ,GAAG,gBAAgB,KAAK,OAAO,CAAC;AACxC,OAAK,OAAO;;;CAId,AAAQ,qBAAsC;AAC5C,MAAI,KAAK,OAAO,gBACd,QAAO,KAAK,OAAO;AAGrB,MAAI;GACF,MAAM,EAAE,oBAAoB;GAC5B,MAAM,SAAS,WAAW;AAG1B,QAAK,OAAO,kBAAkB;AAC9B,UAAO;WACA,QAAQ;AACf,SAAM,gBAAgB,kBACpB,+BACA,mEACD;;;;CAKL,MAAc,UAA4B;AACxC,MAAI,CAAC,KAAK,iBACR,OAAM,mBAAmB,kBACvB,YACA,kGACD;AAGH,MAAI,CAAC,KAAK,MAAM;GACd,MAAM,QAAQ,MAAM,KAAK,gBAAgB;AACzC,QAAK,OAAO,KAAK,WAAW,MAAM;;AAEpC,SAAO,KAAK;;;CAId,AAAQ,WAAW,aAGP;EACV,MAAM,EAAE,MAAM,UAAU,MAAM,YAAY,KAAK;EAE/C,MAAM,OAAO,IAAI,GAAG,KAAK;GACvB;GACA;GACA;GACA,MAAM,YAAY;GAClB,UAAU,YAAY;GACtB,KAAK,KAAK,OAAO;GACjB,mBAAmB,KAAK,OAAO;GAC/B,yBAAyB,KAAK,OAAO;GACrC,KAAK,YAAY,YAAY,EAAE,oBAAoB,MAAM,GAAG;GAC7D,CAAC;AAEF,OAAK,GAAG,UAAU,UAAqC;AACrD,UAAO,MACL,wCACA,MAAM,SACN,MAAM,KACP;IACD;AAEF,SAAO;;;CAIT,MAAc,iBAGX;EACD,MAAM,MAAM,KAAK,KAAK;AAGtB,MACE,KAAK,eACL,MAAM,KAAK,YAAY,YAAY,KAAK,gBAExC,QAAO,KAAK;EAId,MAAM,WAAW,MAAM,KAAK,eAAe;EAC3C,MAAM,EAAE,OAAO,cAAc,MAAM,KAAK,eAAe;AAEvD,OAAK,cAAc;GACjB;GACA,UAAU;GACV;GACD;AAED,SAAO;GAAE;GAAU,UAAU;GAAO;;;CAItC,MAAc,oBAAmC;AAE/C,OAAK,cAAc;AAEnB,MAAI,KAAK,MAAM;GACb,MAAM,UAAU,KAAK;AACrB,QAAK,OAAO;AACZ,WAAQ,KAAK,CAAC,OAAO,UAAmB;AACtC,WAAO,MACL,yDACA,MACD;KACD;;;;CAKN,MAAc,gBAAiC;EAE7C,MAAM,OAAO,MADW,KAAK,oBAAoB,CACd,YAAY,IAAI;AACnD,MAAI,CAAC,KAAK,SACR,OAAM,oBAAoB,kBAAkB;AAE9C,SAAO,KAAK;;;CAId,MAAc,gBAA+D;EAG3E,MAAM,YAAY,IAAI,UADP,IAAI,OAAO,EAAE,MADJ,KAAK,oBAAoB,CACC,OAAO,MAAM,CAAC,CACzB;AAEvC,MAAI,CAAC,KAAK,iBAAiB,QACzB,OAAM,mBAAmB,iBAAiB,oBAAoB;EAGhE,MAAM,cAAc,MAAM,UAAU,QAAQ;GAC1C,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,SAAS;IACP,gBAAgB,CAAC,KAAK,iBAAiB,QAAQ;IAC/C,YAAY,YAAY;IACzB;GACF,CAAC;AAEF,MAAI,CAAC,KAAK,oBAAoB,YAAY,CACxC,OAAM,oBAAoB,kBACxB,KAAK,iBAAiB,QACvB;EAGH,MAAM,YAAY,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS;AAEjE,SAAO;GAAE,OAAO,YAAY;GAAO;GAAW;;;CAIhD,AAAQ,YAAY,OAAyB;AAC3C,SACE,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAAc,SAAS;;;CAK5B,AAAQ,iBAAiB,OAAyB;AAChD,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,UAAU,OAC7D,QAAO;EAGT,MAAM,OAAQ,MAAc;AAC5B,SACE,SAAS,gBACT,SAAS,kBACT,SAAS,eACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS;;;CAKb,AAAQ,oBACN,OACqD;AACrD,MAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;EAGT,MAAM,cAAc;AACpB,SACE,WAAW,eACX,OAAO,YAAY,UAAU,YAC7B,qBAAqB,eACrB,OAAO,YAAY,oBAAoB,YACvC,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS,GAAG,KAAK,KAAK;;;CAKhE,AAAQ,wBAAkD;AACxD,MAAI,KAAK,OAAO,iBACd,QAAO,KAAK,sBAAsB,KAAK,OAAO,iBAAiB;AAIjE,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,YAAY,KAAK,OAAO,QAC1D,QAAO;GACL,MAAM,KAAK,OAAO;GAClB,UAAU,KAAK,OAAO;GACtB,MAAM,KAAK,OAAO,QAAQ;GAC1B,SAAS,KAAK,OAAO,WAAW;GAChC,SAAS,KAAK,OAAO;GACtB;EAIH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,aAAa,QAAQ,IAAI;EAC/B,MAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,CAAC,UAAU,CAAC,cAAc,CAAC,UAC7B,OAAM,mBAAmB,kBACvB,YACA,mHACD;EAEH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,OAAO,SAAS,SAAS,QAAQ,GAAG,GAAG;AAE7C,MAAI,OAAO,MAAM,KAAK,CACpB,OAAM,gBAAgB,aAAa,QAAQ,QAAQ,WAAW;AAOhE,SAAO;GACL,MAAM;GACN,UAAU;GACV;GACA,SARgB,QAAQ,IAAI,aAEuB;GAOnD,SAAS;GACV;;CAGH,AAAQ,sBACN,kBAC0B;EAC1B,MAAM,MAAM,IAAI,IAAI,iBAAiB;EACrC,MAAM,UAAU,IAAI,aAAa,IAAI,UAAU;AAC/C,MAAI,CAAC,QACH,OAAM,mBAAmB,uBAAuB,UAAU;AAG5D,SAAO;GACL,MAAM,IAAI;GACV,UAAU,IAAI,SAAS,MAAM,EAAE;GAC/B,MAAM,IAAI,OAAO,SAAS,IAAI,MAAM,GAAG,GAAG;GAC1C,SACG,IAAI,aAAa,IAAI,UAAU,IAChC;GACO;GACV"}
|
|
1
|
+
{"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"@/telemetry\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseDefaults } from \"./defaults\";\nimport type {\n LakebaseConfig,\n LakebaseConnectionConfig,\n LakebaseCredentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseConnector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseConnector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseConnector {\n private readonly name: string = \"lakebase\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseConfig;\n private readonly connectionConfig: LakebaseConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseCredentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseConfig>) {\n this.config = deepMerge(lakebaseDefaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.query\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAiBsB;AAUtB,MAAM,SAAS,aAAa,sBAAsB;;;;;;;;;;;;;;;;AAiBlD,IAAa,oBAAb,MAA+B;CAC7B,AAAiB,OAAe;CAChC,AAAiB,kBAAkB,MAAS;CAC5C,AAAiB;CACjB,AAAiB;CACjB,AAAQ,OAAuB;CAC/B,AAAQ,cAA0C;CAGlD,AAAiB;CACjB,AAAiB;CAKjB,YAAY,YAAsC;AAChD,OAAK,SAAS,UAAU,kBAAkB,WAAW;AACrD,OAAK,mBAAmB,KAAK,uBAAuB;AAEpD,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,YAAY,KAAK,UACd,UAAU,CACV,cAAc,wBAAwB;IACrC,aAAa;IACb,MAAM;IACP,CAAC;GACJ,eAAe,KAAK,UACjB,UAAU,CACV,gBAAgB,2BAA2B;IAC1C,aAAa;IACb,MAAM;IACP,CAAC;GACL;AAGD,MAAI,KAAK,OAAO,cAAc,EAC5B,OAAM,gBAAgB,aACpB,eACA,KAAK,OAAO,aACZ,aACD;;;;;;;;;;;CAaL,MAAM,MACJ,KACA,QACA,aAAqB,GACO;EAC5B,MAAM,YAAY,KAAK,KAAK;AAE5B,SAAO,KAAK,UAAU,gBACpB,kBACA,EACE,YAAY;GACV,aAAa;GACb,gBAAgB,IAAI,UAAU,GAAG,IAAI;GACrC,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAC/C,SAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AAEd,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,WAAM,KAAK,mBAAmB;KAE9B,MAAM,SAAS,OADC,MAAM,KAAK,SAAS,EACP,MAAS,KAAK,OAAO;AAClD,UAAK,aAAa,oBAAoB,OAAO,YAAY,EAAE;AAC3D,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,YAAO;;AAIT,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,wBAAwB;AACtC,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,MAAS,KAAK,QAAQ,aAAa,EAAE;;AAGzD,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,YAAY,MAAe;aACzC;IACR,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;;;;;;;;;;;;;;CAiBH,MAAM,YACJ,UACA,aAAqB,GACT;EACZ,MAAM,YAAY,KAAK,KAAK;AAC5B,SAAO,KAAK,UAAU,gBACpB,wBACA,EACE,YAAY;GACV,aAAa;GACb,kBAAkB;GACnB,EACF,EACD,OAAO,SAAS;GAEd,MAAM,SAAS,OADF,MAAM,KAAK,SAAS,EACP,SAAS;AACnC,OAAI;AACF,UAAM,OAAO,MAAM,QAAQ;IAC3B,MAAM,SAAS,MAAM,SAAS,OAAO;AACrC,UAAM,OAAO,MAAM,SAAS;AAC5B,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,QAAI;AACF,WAAM,OAAO,MAAM,WAAW;YACxB;AAER,QAAI,KAAK,YAAY,MAAM,EAAE;AAC3B,UAAK,SAAS,mBAAmB;AACjC,YAAO,SAAS;AAChB,WAAM,KAAK,mBAAmB;KAE9B,MAAM,cAAc,OADJ,MAAM,KAAK,SAAS,EACF,SAAS;AAC3C,SAAI;AACF,YAAM,OAAO,MAAM,QAAQ;MAC3B,MAAM,SAAS,MAAM,SAAS,YAAY;AAC1C,YAAM,OAAO,MAAM,SAAS;AAC5B,WAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,aAAO;cACA,YAAY;AACnB,UAAI;AACF,aAAM,YAAY,MAAM,WAAW;cAC7B;AACR,YAAM;eACE;AACR,kBAAY,SAAS;;;AAKzB,QAAI,KAAK,iBAAiB,MAAM,IAAI,aAAa,GAAG;AAClD,UAAK,SAAS,0BAA0B;AACxC,YAAO,SAAS;AAChB,WAAM,IAAI,SAAS,YAAY,WAAW,SAAS,IAAI,CAAC;AACxD,YAAO,MAAM,KAAK,YAAe,UAAU,aAAa,EAAE;;AAE5D,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAE9C,QAAI,iBAAiB,YACnB,OAAM;AAER,UAAM,gBAAgB,kBAAkB,MAAe;aAC/C;AACR,WAAO,SAAS;IAChB,MAAM,WAAW,KAAK,KAAK,GAAG;AAC9B,SAAK,iBAAiB,WAAW,IAAI,EAAE;AACvC,SAAK,iBAAiB,cAAc,OAAO,SAAS;AACpD,SAAK,KAAK;;IAGf;;;CAIH,MAAM,cAAgC;AACpC,SAAO,KAAK,UAAU,gBACpB,wBACA,EAAE,EACF,OAAO,SAAS;AACd,OAAI;IAIF,MAAM,WAHS,MAAM,KAAK,MACxB,qBACD,EACsB,KAAK,IAAI,WAAW;AAC3C,SAAK,aAAa,cAAc,QAAQ;AACxC,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;WACD;AACN,SAAK,aAAa,cAAc,MAAM;AACtC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAO;aACC;AACR,SAAK,KAAK;;IAGf;;;CAIH,MAAM,QAAuB;AAC3B,MAAI,KAAK,MAAM;AACb,SAAM,KAAK,KAAK,KAAK,CAAC,OAAO,UAAmB;AAC9C,WAAO,MAAM,qCAAqC,MAAM;KACxD;AACF,QAAK,OAAO;;AAEd,OAAK,cAAc;;;CAIrB,WAAiB;AACf,UAAQ,GAAG,iBAAiB,KAAK,OAAO,CAAC;AACzC,UAAQ,GAAG,gBAAgB,KAAK,OAAO,CAAC;AACxC,OAAK,OAAO;;;CAId,AAAQ,qBAAsC;AAC5C,MAAI,KAAK,OAAO,gBACd,QAAO,KAAK,OAAO;AAGrB,MAAI;GACF,MAAM,EAAE,oBAAoB;GAC5B,MAAM,SAAS,WAAW;AAG1B,QAAK,OAAO,kBAAkB;AAC9B,UAAO;WACA,QAAQ;AACf,SAAM,gBAAgB,kBACpB,+BACA,mEACD;;;;CAKL,MAAc,UAA4B;AACxC,MAAI,CAAC,KAAK,iBACR,OAAM,mBAAmB,kBACvB,YACA,kGACD;AAGH,MAAI,CAAC,KAAK,MAAM;GACd,MAAM,QAAQ,MAAM,KAAK,gBAAgB;AACzC,QAAK,OAAO,KAAK,WAAW,MAAM;;AAEpC,SAAO,KAAK;;;CAId,AAAQ,WAAW,aAGP;EACV,MAAM,EAAE,MAAM,UAAU,MAAM,YAAY,KAAK;EAE/C,MAAM,OAAO,IAAI,GAAG,KAAK;GACvB;GACA;GACA;GACA,MAAM,YAAY;GAClB,UAAU,YAAY;GACtB,KAAK,KAAK,OAAO;GACjB,mBAAmB,KAAK,OAAO;GAC/B,yBAAyB,KAAK,OAAO;GACrC,KAAK,YAAY,YAAY,EAAE,oBAAoB,MAAM,GAAG;GAC7D,CAAC;AAEF,OAAK,GAAG,UAAU,UAAqC;AACrD,UAAO,MACL,wCACA,MAAM,SACN,MAAM,KACP;IACD;AAEF,SAAO;;;CAIT,MAAc,iBAGX;EACD,MAAM,MAAM,KAAK,KAAK;AAGtB,MACE,KAAK,eACL,MAAM,KAAK,YAAY,YAAY,KAAK,gBAExC,QAAO,KAAK;EAId,MAAM,WAAW,MAAM,KAAK,eAAe;EAC3C,MAAM,EAAE,OAAO,cAAc,MAAM,KAAK,eAAe;AAEvD,OAAK,cAAc;GACjB;GACA,UAAU;GACV;GACD;AAED,SAAO;GAAE;GAAU,UAAU;GAAO;;;CAItC,MAAc,oBAAmC;AAE/C,OAAK,cAAc;AAEnB,MAAI,KAAK,MAAM;GACb,MAAM,UAAU,KAAK;AACrB,QAAK,OAAO;AACZ,WAAQ,KAAK,CAAC,OAAO,UAAmB;AACtC,WAAO,MACL,yDACA,MACD;KACD;;;;CAKN,MAAc,gBAAiC;EAE7C,MAAM,OAAO,MADW,KAAK,oBAAoB,CACd,YAAY,IAAI;AACnD,MAAI,CAAC,KAAK,SACR,OAAM,oBAAoB,kBAAkB;AAE9C,SAAO,KAAK;;;CAId,MAAc,gBAA+D;EAG3E,MAAM,YAAY,IAAI,UADP,IAAI,OAAO,EAAE,MADJ,KAAK,oBAAoB,CACC,OAAO,MAAM,CAAC,CACzB;AAEvC,MAAI,CAAC,KAAK,iBAAiB,QACzB,OAAM,mBAAmB,iBAAiB,oBAAoB;EAGhE,MAAM,cAAc,MAAM,UAAU,QAAQ;GAC1C,MAAM;GACN,QAAQ;GACR,SAAS,IAAI,SAAS;GACtB,KAAK;GACL,SAAS;IACP,gBAAgB,CAAC,KAAK,iBAAiB,QAAQ;IAC/C,YAAY,YAAY;IACzB;GACF,CAAC;AAEF,MAAI,CAAC,KAAK,oBAAoB,YAAY,CACxC,OAAM,oBAAoB,kBACxB,KAAK,iBAAiB,QACvB;EAGH,MAAM,YAAY,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS;AAEjE,SAAO;GAAE,OAAO,YAAY;GAAO;GAAW;;;CAIhD,AAAQ,YAAY,OAAyB;AAC3C,SACE,OAAO,UAAU,YACjB,UAAU,QACV,UAAU,SACT,MAAc,SAAS;;;CAK5B,AAAQ,iBAAiB,OAAyB;AAChD,MAAI,OAAO,UAAU,YAAY,UAAU,QAAQ,EAAE,UAAU,OAC7D,QAAO;EAGT,MAAM,OAAQ,MAAc;AAC5B,SACE,SAAS,gBACT,SAAS,kBACT,SAAS,eACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS,WACT,SAAS;;;CAKb,AAAQ,oBACN,OACqD;AACrD,MAAI,OAAO,UAAU,YAAY,UAAU,KACzC,QAAO;EAGT,MAAM,cAAc;AACpB,SACE,WAAW,eACX,OAAO,YAAY,UAAU,YAC7B,qBAAqB,eACrB,OAAO,YAAY,oBAAoB,YACvC,IAAI,KAAK,YAAY,gBAAgB,CAAC,SAAS,GAAG,KAAK,KAAK;;;CAKhE,AAAQ,wBAAkD;AACxD,MAAI,KAAK,OAAO,iBACd,QAAO,KAAK,sBAAsB,KAAK,OAAO,iBAAiB;AAIjE,MAAI,KAAK,OAAO,QAAQ,KAAK,OAAO,YAAY,KAAK,OAAO,QAC1D,QAAO;GACL,MAAM,KAAK,OAAO;GAClB,UAAU,KAAK,OAAO;GACtB,MAAM,KAAK,OAAO,QAAQ;GAC1B,SAAS,KAAK,OAAO,WAAW;GAChC,SAAS,KAAK,OAAO;GACtB;EAIH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,aAAa,QAAQ,IAAI;EAC/B,MAAM,YAAY,QAAQ,IAAI;AAC9B,MAAI,CAAC,UAAU,CAAC,cAAc,CAAC,UAC7B,OAAM,mBAAmB,kBACvB,YACA,mHACD;EAEH,MAAM,SAAS,QAAQ,IAAI;EAC3B,MAAM,OAAO,SAAS,SAAS,QAAQ,GAAG,GAAG;AAE7C,MAAI,OAAO,MAAM,KAAK,CACpB,OAAM,gBAAgB,aAAa,QAAQ,QAAQ,WAAW;AAOhE,SAAO;GACL,MAAM;GACN,UAAU;GACV;GACA,SARgB,QAAQ,IAAI,aAEuB;GAOnD,SAAS;GACV;;CAGH,AAAQ,sBACN,kBAC0B;EAC1B,MAAM,MAAM,IAAI,IAAI,iBAAiB;EACrC,MAAM,UAAU,IAAI,aAAa,IAAI,UAAU;AAC/C,MAAI,CAAC,QACH,OAAM,mBAAmB,uBAAuB,UAAU;AAG5D,SAAO;GACL,MAAM,IAAI;GACV,UAAU,IAAI,SAAS,MAAM,EAAE;GAC/B,MAAM,IAAI,OAAO,SAAS,IAAI,MAAM,GAAG,GAAG;GAC1C,SACG,IAAI,aAAa,IAAI,UAAU,IAChC;GACO;GACV"}
|
|
@@ -14,9 +14,12 @@ import { Context } from "@databricks/sdk-experimental";
|
|
|
14
14
|
init_errors();
|
|
15
15
|
const logger = createLogger("connectors:sql-warehouse");
|
|
16
16
|
var SQLWarehouseConnector = class {
|
|
17
|
+
name = "sql-warehouse";
|
|
18
|
+
config;
|
|
19
|
+
_arrowProcessor = null;
|
|
20
|
+
telemetry;
|
|
21
|
+
telemetryMetrics;
|
|
17
22
|
constructor(config) {
|
|
18
|
-
this.name = "sql-warehouse";
|
|
19
|
-
this._arrowProcessor = null;
|
|
20
23
|
this.config = config;
|
|
21
24
|
this.telemetry = TelemetryManager.getProvider(this.name, this.config.telemetry);
|
|
22
25
|
this.telemetryMetrics = {
|