@databricks/appkit 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +37 -12
- package/CLAUDE.md +37 -12
- package/NOTICE.md +2 -0
- package/dist/analytics/analytics.d.ts +33 -8
- package/dist/analytics/analytics.d.ts.map +1 -1
- package/dist/analytics/analytics.js +51 -24
- package/dist/analytics/analytics.js.map +1 -1
- package/dist/analytics/defaults.js.map +1 -1
- package/dist/analytics/query.js +4 -4
- package/dist/analytics/query.js.map +1 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/defaults.js.map +1 -1
- package/dist/cache/index.d.ts +1 -0
- package/dist/cache/index.d.ts.map +1 -1
- package/dist/cache/index.js +1 -2
- package/dist/cache/index.js.map +1 -1
- package/dist/cache/storage/memory.js.map +1 -1
- package/dist/connectors/lakebase/client.js +7 -8
- package/dist/connectors/lakebase/client.js.map +1 -1
- package/dist/connectors/lakebase/defaults.js.map +1 -1
- package/dist/connectors/sql-warehouse/client.js.map +1 -1
- package/dist/connectors/sql-warehouse/defaults.js.map +1 -1
- package/dist/context/execution-context.js +75 -0
- package/dist/context/execution-context.js.map +1 -0
- package/dist/context/index.js +27 -0
- package/dist/context/index.js.map +1 -0
- package/dist/context/service-context.js +149 -0
- package/dist/context/service-context.js.map +1 -0
- package/dist/context/user-context.js +15 -0
- package/dist/context/user-context.js.map +1 -0
- package/dist/core/appkit.d.ts +3 -0
- package/dist/core/appkit.d.ts.map +1 -1
- package/dist/core/appkit.js +7 -0
- package/dist/core/appkit.js.map +1 -1
- package/dist/index.d.ts +5 -6
- package/dist/index.js +3 -10
- package/dist/plugin/interceptors/cache.js.map +1 -1
- package/dist/plugin/interceptors/retry.js.map +1 -1
- package/dist/plugin/interceptors/telemetry.js.map +1 -1
- package/dist/plugin/interceptors/timeout.js.map +1 -1
- package/dist/plugin/plugin.d.ts +39 -5
- package/dist/plugin/plugin.d.ts.map +1 -1
- package/dist/plugin/plugin.js +82 -6
- package/dist/plugin/plugin.js.map +1 -1
- package/dist/plugin/to-plugin.d.ts +4 -0
- package/dist/plugin/to-plugin.d.ts.map +1 -1
- package/dist/plugin/to-plugin.js +3 -0
- package/dist/plugin/to-plugin.js.map +1 -1
- package/dist/server/index.d.ts +3 -0
- package/dist/server/index.d.ts.map +1 -1
- package/dist/server/index.js +3 -4
- package/dist/server/index.js.map +1 -1
- package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
- package/dist/server/utils.js.map +1 -1
- package/dist/server/vite-dev-server.js +0 -2
- package/dist/server/vite-dev-server.js.map +1 -1
- package/dist/shared/src/sql/helpers.js.map +1 -1
- package/dist/stream/arrow-stream-processor.js.map +1 -1
- package/dist/stream/buffers.js.map +1 -1
- package/dist/stream/sse-writer.js.map +1 -1
- package/dist/stream/stream-manager.js.map +1 -1
- package/dist/stream/stream-registry.js.map +1 -1
- package/dist/telemetry/instrumentations.js.map +1 -1
- package/dist/type-generator/index.js.map +1 -1
- package/dist/type-generator/query-registry.js.map +1 -1
- package/dist/type-generator/types.js.map +1 -1
- package/dist/type-generator/vite-plugin.js.map +1 -1
- package/dist/utils/env-validator.js +1 -5
- package/dist/utils/env-validator.js.map +1 -1
- package/dist/utils/merge.js +1 -5
- package/dist/utils/merge.js.map +1 -1
- package/dist/utils/vite-config-merge.js +1 -5
- package/dist/utils/vite-config-merge.js.map +1 -1
- package/llms.txt +37 -12
- package/package.json +3 -1
- package/dist/index.js.map +0 -1
- package/dist/utils/databricks-client-middleware.d.ts +0 -17
- package/dist/utils/databricks-client-middleware.d.ts.map +0 -1
- package/dist/utils/databricks-client-middleware.js +0 -117
- package/dist/utils/databricks-client-middleware.js.map +0 -1
- package/dist/utils/index.js +0 -26
- package/dist/utils/index.js.map +0 -1
package/AGENTS.md
CHANGED
|
@@ -440,23 +440,49 @@ Formats:
|
|
|
440
440
|
- `format: "JSON"` (default) returns JSON rows
|
|
441
441
|
- `format: "ARROW"` returns an Arrow “statement_id” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
|
|
442
442
|
|
|
443
|
-
###
|
|
443
|
+
### Execution context and `asUser(req)`
|
|
444
444
|
|
|
445
|
-
|
|
445
|
+
AppKit manages Databricks authentication via two contexts:
|
|
446
446
|
|
|
447
|
-
|
|
447
|
+
- **ServiceContext** (singleton): Initialized at app startup with service principal credentials
|
|
448
|
+
- **ExecutionContext**: Determined at runtime - either service principal or user context
|
|
449
|
+
|
|
450
|
+
**Headers used for user context:**
|
|
448
451
|
|
|
449
452
|
- `x-forwarded-user`: required in production; identifies the user
|
|
450
|
-
- `x-forwarded-access-token`:
|
|
453
|
+
- `x-forwarded-access-token`: required for user token passthrough
|
|
454
|
+
|
|
455
|
+
**Using `asUser(req)` for user-scoped operations:**
|
|
456
|
+
|
|
457
|
+
The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials:
|
|
458
|
+
|
|
459
|
+
```ts
|
|
460
|
+
// In a custom plugin route handler
|
|
461
|
+
router.post("/users/me/data", async (req, res) => {
|
|
462
|
+
// Execute as the user (uses their Databricks permissions)
|
|
463
|
+
const result = await this.asUser(req).query("SELECT ...");
|
|
464
|
+
res.json(result);
|
|
465
|
+
});
|
|
466
|
+
|
|
467
|
+
// Service principal execution (default)
|
|
468
|
+
router.post("/system/data", async (req, res) => {
|
|
469
|
+
const result = await this.query("SELECT ...");
|
|
470
|
+
res.json(result);
|
|
471
|
+
});
|
|
472
|
+
```
|
|
473
|
+
|
|
474
|
+
**Context helper functions (exported from `@databricks/appkit`):**
|
|
475
|
+
|
|
476
|
+
- `getExecutionContext()`: Returns current context (user or service)
|
|
477
|
+
- `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise
|
|
478
|
+
- `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context
|
|
479
|
+
- `getWarehouseId()`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev)
|
|
480
|
+
- `getWorkspaceId()`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
|
|
481
|
+
- `isInUserContext()`: Returns `true` if currently executing in user context
|
|
451
482
|
|
|
452
|
-
|
|
483
|
+
**Development mode behavior:**
|
|
453
484
|
|
|
454
|
-
|
|
455
|
-
- `serviceUserId`: service principal/user ID
|
|
456
|
-
- `warehouseId`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID`, or auto-selected in development)
|
|
457
|
-
- `workspaceId`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
|
|
458
|
-
- `userDatabricksClient`: present only when passthrough is available (or in dev it equals service client)
|
|
459
|
-
- `serviceDatabricksClient`: always present
|
|
485
|
+
In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal.
|
|
460
486
|
|
|
461
487
|
### Custom plugins (backend)
|
|
462
488
|
|
|
@@ -469,7 +495,6 @@ import type express from "express";
|
|
|
469
495
|
class MyPlugin extends Plugin {
|
|
470
496
|
name = "my-plugin";
|
|
471
497
|
envVars = []; // list required env vars here
|
|
472
|
-
requiresDatabricksClient = false; // set true if you need getRequestContext()
|
|
473
498
|
|
|
474
499
|
injectRoutes(router: express.Router) {
|
|
475
500
|
this.route(router, {
|
package/CLAUDE.md
CHANGED
|
@@ -440,23 +440,49 @@ Formats:
|
|
|
440
440
|
- `format: "JSON"` (default) returns JSON rows
|
|
441
441
|
- `format: "ARROW"` returns an Arrow “statement_id” payload over SSE, then the client fetches binary Arrow from `/api/analytics/arrow-result/:jobId`
|
|
442
442
|
|
|
443
|
-
###
|
|
443
|
+
### Execution context and `asUser(req)`
|
|
444
444
|
|
|
445
|
-
|
|
445
|
+
AppKit manages Databricks authentication via two contexts:
|
|
446
446
|
|
|
447
|
-
|
|
447
|
+
- **ServiceContext** (singleton): Initialized at app startup with service principal credentials
|
|
448
|
+
- **ExecutionContext**: Determined at runtime - either service principal or user context
|
|
449
|
+
|
|
450
|
+
**Headers used for user context:**
|
|
448
451
|
|
|
449
452
|
- `x-forwarded-user`: required in production; identifies the user
|
|
450
|
-
- `x-forwarded-access-token`:
|
|
453
|
+
- `x-forwarded-access-token`: required for user token passthrough
|
|
454
|
+
|
|
455
|
+
**Using `asUser(req)` for user-scoped operations:**
|
|
456
|
+
|
|
457
|
+
The `asUser(req)` pattern allows plugins to execute operations using the requesting user's credentials:
|
|
458
|
+
|
|
459
|
+
```ts
|
|
460
|
+
// In a custom plugin route handler
|
|
461
|
+
router.post("/users/me/data", async (req, res) => {
|
|
462
|
+
// Execute as the user (uses their Databricks permissions)
|
|
463
|
+
const result = await this.asUser(req).query("SELECT ...");
|
|
464
|
+
res.json(result);
|
|
465
|
+
});
|
|
466
|
+
|
|
467
|
+
// Service principal execution (default)
|
|
468
|
+
router.post("/system/data", async (req, res) => {
|
|
469
|
+
const result = await this.query("SELECT ...");
|
|
470
|
+
res.json(result);
|
|
471
|
+
});
|
|
472
|
+
```
|
|
473
|
+
|
|
474
|
+
**Context helper functions (exported from `@databricks/appkit`):**
|
|
475
|
+
|
|
476
|
+
- `getExecutionContext()`: Returns current context (user or service)
|
|
477
|
+
- `getCurrentUserId()`: Returns user ID in user context, service user ID otherwise
|
|
478
|
+
- `getWorkspaceClient()`: Returns the appropriate WorkspaceClient for current context
|
|
479
|
+
- `getWarehouseId()`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID` or auto-selected in dev)
|
|
480
|
+
- `getWorkspaceId()`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
|
|
481
|
+
- `isInUserContext()`: Returns `true` if currently executing in user context
|
|
451
482
|
|
|
452
|
-
|
|
483
|
+
**Development mode behavior:**
|
|
453
484
|
|
|
454
|
-
|
|
455
|
-
- `serviceUserId`: service principal/user ID
|
|
456
|
-
- `warehouseId`: `Promise<string>` (from `DATABRICKS_WAREHOUSE_ID`, or auto-selected in development)
|
|
457
|
-
- `workspaceId`: `Promise<string>` (from `DATABRICKS_WORKSPACE_ID` or fetched)
|
|
458
|
-
- `userDatabricksClient`: present only when passthrough is available (or in dev it equals service client)
|
|
459
|
-
- `serviceDatabricksClient`: always present
|
|
485
|
+
In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and falls back to the service principal.
|
|
460
486
|
|
|
461
487
|
### Custom plugins (backend)
|
|
462
488
|
|
|
@@ -469,7 +495,6 @@ import type express from "express";
|
|
|
469
495
|
class MyPlugin extends Plugin {
|
|
470
496
|
name = "my-plugin";
|
|
471
497
|
envVars = []; // list required env vars here
|
|
472
|
-
requiresDatabricksClient = false; // set true if you need getRequestContext()
|
|
473
498
|
|
|
474
499
|
injectRoutes(router: express.Router) {
|
|
475
500
|
this.route(router, {
|
package/NOTICE.md
CHANGED
|
@@ -49,6 +49,7 @@ This Software contains code from the following open source projects:
|
|
|
49
49
|
| [@radix-ui/react-tooltip](https://www.npmjs.com/package/@radix-ui/react-tooltip) | 1.2.8 | MIT | https://radix-ui.com/primitives |
|
|
50
50
|
| [@tanstack/react-table](https://www.npmjs.com/package/@tanstack/react-table) | 8.21.3 | MIT | https://tanstack.com/table |
|
|
51
51
|
| [@tanstack/react-virtual](https://www.npmjs.com/package/@tanstack/react-virtual) | 3.13.12 | MIT | https://tanstack.com/virtual |
|
|
52
|
+
| [@types/semver](https://www.npmjs.com/package/@types/semver) | 7.7.1 | MIT | https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/semver |
|
|
52
53
|
| [apache-arrow](https://www.npmjs.com/package/apache-arrow) | 21.1.0 | Apache-2.0 | https://arrow.apache.org/js/ |
|
|
53
54
|
| [class-variance-authority](https://www.npmjs.com/package/class-variance-authority) | 0.7.1 | Apache-2.0 | https://github.com/joe-bell/cva#readme |
|
|
54
55
|
| [clsx](https://www.npmjs.com/package/clsx) | 2.1.1 | MIT | https://github.com/lukeed/clsx#readme |
|
|
@@ -66,6 +67,7 @@ This Software contains code from the following open source projects:
|
|
|
66
67
|
| [react-day-picker](https://www.npmjs.com/package/react-day-picker) | 9.12.0 | MIT | https://daypicker.dev |
|
|
67
68
|
| [react-hook-form](https://www.npmjs.com/package/react-hook-form) | 7.68.0 | MIT | https://react-hook-form.com |
|
|
68
69
|
| [react-resizable-panels](https://www.npmjs.com/package/react-resizable-panels) | 3.0.6 | MIT | https://github.com/bvaughn/react-resizable-panels#readme |
|
|
70
|
+
| [semver](https://www.npmjs.com/package/semver) | 6.3.1, 7.7.3 | ISC | https://github.com/npm/node-semver#readme |
|
|
69
71
|
| [sonner](https://www.npmjs.com/package/sonner) | 2.0.7 | MIT | https://sonner.emilkowal.ski/ |
|
|
70
72
|
| [tailwind-merge](https://www.npmjs.com/package/tailwind-merge) | 3.4.0 | MIT | https://github.com/dcastil/tailwind-merge |
|
|
71
73
|
| [vaul](https://www.npmjs.com/package/vaul) | 1.1.2 | MIT | https://vaul.emilkowal.ski/ |
|
|
@@ -3,28 +3,53 @@ import { SQLTypeMarker } from "../shared/src/sql/types.js";
|
|
|
3
3
|
import { Plugin } from "../plugin/plugin.js";
|
|
4
4
|
import { IAnalyticsConfig } from "./types.js";
|
|
5
5
|
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
6
|
+
import express from "express";
|
|
6
7
|
|
|
7
8
|
//#region src/analytics/analytics.d.ts
|
|
8
9
|
declare class AnalyticsPlugin extends Plugin {
|
|
9
10
|
name: string;
|
|
10
11
|
envVars: never[];
|
|
11
|
-
requiresDatabricksClient: boolean;
|
|
12
12
|
protected static description: string;
|
|
13
13
|
protected config: IAnalyticsConfig;
|
|
14
14
|
private SQLClient;
|
|
15
15
|
private queryProcessor;
|
|
16
16
|
constructor(config: IAnalyticsConfig);
|
|
17
17
|
injectRoutes(router: IAppRouter): void;
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
18
|
+
/**
|
|
19
|
+
* Handle Arrow data download requests.
|
|
20
|
+
* When called via asUser(req), uses the user's Databricks credentials.
|
|
21
|
+
*/
|
|
22
|
+
_handleArrowRoute(req: express.Request, res: express.Response): Promise<void>;
|
|
23
|
+
/**
|
|
24
|
+
* Handle SQL query execution requests.
|
|
25
|
+
* When called via asUser(req), uses the user's Databricks credentials.
|
|
26
|
+
*/
|
|
27
|
+
_handleQueryRoute(req: express.Request, res: express.Response): Promise<void>;
|
|
28
|
+
/**
|
|
29
|
+
* Execute a SQL query using the current execution context.
|
|
30
|
+
*
|
|
31
|
+
* When called directly: uses service principal credentials.
|
|
32
|
+
* When called via asUser(req).query(...): uses user's credentials.
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* ```typescript
|
|
36
|
+
* // Service principal execution
|
|
37
|
+
* const result = await analytics.query("SELECT * FROM table")
|
|
38
|
+
*
|
|
39
|
+
* // User context execution (in route handler)
|
|
40
|
+
* const result = await this.asUser(req).query("SELECT * FROM table")
|
|
41
|
+
* ```
|
|
42
|
+
*/
|
|
43
|
+
query(query: string, parameters?: Record<string, SQLTypeMarker | null | undefined>, formatParameters?: Record<string, any>, signal?: AbortSignal): Promise<any>;
|
|
44
|
+
/**
|
|
45
|
+
* Get Arrow-formatted data for a completed query job.
|
|
46
|
+
*/
|
|
25
47
|
protected getArrowData(workspaceClient: WorkspaceClient, jobId: string, signal?: AbortSignal): Promise<ReturnType<typeof this.SQLClient.getArrowData>>;
|
|
26
48
|
shutdown(): Promise<void>;
|
|
27
49
|
}
|
|
50
|
+
/**
|
|
51
|
+
* @internal
|
|
52
|
+
*/
|
|
28
53
|
declare const analytics: ToPlugin<typeof AnalyticsPlugin, IAnalyticsConfig, "analytics">;
|
|
29
54
|
//#endregion
|
|
30
55
|
export { analytics };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"analytics.d.ts","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":[],"mappings":";;;;;;;;cAuBa,eAAA,SAAwB,MAAA;;;EAAxB,iBAAA,WAAgB,EAAA,MAAA;EAAA,UAAA,MAAA,EAKD,gBALC;UAKD,SAAA;UAMN,cAAA;aAWC,CAAA,MAAA,EAXD,gBAWC;cA6CN,CAAA,MAAA,EA7CM,UA6CN,CAAA,EAAA,IAAA;;;;;mBAqCZ,CAAA,GAAA,EArCI,OAAA,CAAQ,OAqCZ,EAAA,GAAA,EApCI,OAAA,CAAQ,QAoCZ,CAAA,EAnCA,OAmCA,CAAA,IAAA,CAAA;;;;;mBAkGA,CAAA,GAAA,EApGI,OAAA,CAAQ,OAoGZ,EAAA,GAAA,EAnGI,OAAA,CAAQ,QAmGZ,CAAA,EAlGA,OAkGA,CAAA,IAAA,CAAA;;;;;;;;AAwCL;;;;;;;;oCA3CiB,eAAe,sDACT,8BACV,cACR;;;;0CAyBgB,yCAER,cACR,QAAQ;cAIO;;;;;cAQP,WAAS,gBAAA,iBAAA"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
1
|
+
import { getCurrentUserId, getWarehouseId, getWorkspaceClient } from "../context/execution-context.js";
|
|
2
|
+
import { init_context } from "../context/index.js";
|
|
3
3
|
import { SQLWarehouseConnector } from "../connectors/sql-warehouse/client.js";
|
|
4
4
|
import "../connectors/index.js";
|
|
5
5
|
import { Plugin } from "../plugin/plugin.js";
|
|
@@ -9,7 +9,7 @@ import { queryDefaults } from "./defaults.js";
|
|
|
9
9
|
import { QueryProcessor } from "./query.js";
|
|
10
10
|
|
|
11
11
|
//#region src/analytics/analytics.ts
|
|
12
|
-
|
|
12
|
+
init_context();
|
|
13
13
|
var AnalyticsPlugin = class extends Plugin {
|
|
14
14
|
static {
|
|
15
15
|
this.description = "Analytics plugin for data analysis";
|
|
@@ -18,7 +18,6 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
18
18
|
super(config);
|
|
19
19
|
this.name = "analytics";
|
|
20
20
|
this.envVars = [];
|
|
21
|
-
this.requiresDatabricksClient = true;
|
|
22
21
|
this.config = config;
|
|
23
22
|
this.queryProcessor = new QueryProcessor();
|
|
24
23
|
this.SQLClient = new SQLWarehouseConnector({
|
|
@@ -35,12 +34,20 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
35
34
|
await this._handleArrowRoute(req, res);
|
|
36
35
|
}
|
|
37
36
|
});
|
|
37
|
+
this.route(router, {
|
|
38
|
+
name: "query",
|
|
39
|
+
method: "post",
|
|
40
|
+
path: "/query/:query_key",
|
|
41
|
+
handler: async (req, res) => {
|
|
42
|
+
await this._handleQueryRoute(req, res);
|
|
43
|
+
}
|
|
44
|
+
});
|
|
38
45
|
this.route(router, {
|
|
39
46
|
name: "arrowAsUser",
|
|
40
47
|
method: "get",
|
|
41
48
|
path: "/users/me/arrow-result/:jobId",
|
|
42
49
|
handler: async (req, res) => {
|
|
43
|
-
await this._handleArrowRoute(req, res
|
|
50
|
+
await this.asUser(req)._handleArrowRoute(req, res);
|
|
44
51
|
}
|
|
45
52
|
});
|
|
46
53
|
this.route(router, {
|
|
@@ -48,22 +55,18 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
48
55
|
method: "post",
|
|
49
56
|
path: "/users/me/query/:query_key",
|
|
50
57
|
handler: async (req, res) => {
|
|
51
|
-
await this._handleQueryRoute(req, res
|
|
52
|
-
}
|
|
53
|
-
});
|
|
54
|
-
this.route(router, {
|
|
55
|
-
name: "query",
|
|
56
|
-
method: "post",
|
|
57
|
-
path: "/query/:query_key",
|
|
58
|
-
handler: async (req, res) => {
|
|
59
|
-
await this._handleQueryRoute(req, res, { asUser: false });
|
|
58
|
+
await this.asUser(req)._handleQueryRoute(req, res);
|
|
60
59
|
}
|
|
61
60
|
});
|
|
62
61
|
}
|
|
63
|
-
|
|
62
|
+
/**
|
|
63
|
+
* Handle Arrow data download requests.
|
|
64
|
+
* When called via asUser(req), uses the user's Databricks credentials.
|
|
65
|
+
*/
|
|
66
|
+
async _handleArrowRoute(req, res) {
|
|
64
67
|
try {
|
|
65
68
|
const { jobId } = req.params;
|
|
66
|
-
const workspaceClient = getWorkspaceClient(
|
|
69
|
+
const workspaceClient = getWorkspaceClient();
|
|
67
70
|
console.log(`Processing Arrow job request: ${jobId} for plugin: ${this.name}`);
|
|
68
71
|
const result = await this.getArrowData(workspaceClient, jobId);
|
|
69
72
|
res.setHeader("Content-Type", "application/octet-stream");
|
|
@@ -79,7 +82,11 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
79
82
|
});
|
|
80
83
|
}
|
|
81
84
|
}
|
|
82
|
-
|
|
85
|
+
/**
|
|
86
|
+
* Handle SQL query execution requests.
|
|
87
|
+
* When called via asUser(req), uses the user's Databricks credentials.
|
|
88
|
+
*/
|
|
89
|
+
async _handleQueryRoute(req, res) {
|
|
83
90
|
const { query_key } = req.params;
|
|
84
91
|
const { parameters, format = "JSON" } = req.body;
|
|
85
92
|
const queryParameters = format === "ARROW" ? {
|
|
@@ -89,8 +96,7 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
89
96
|
},
|
|
90
97
|
type: "arrow"
|
|
91
98
|
} : { type: "result" };
|
|
92
|
-
const
|
|
93
|
-
const userKey = asUser ? requestContext.userId : requestContext.serviceUserId;
|
|
99
|
+
const userKey = getCurrentUserId();
|
|
94
100
|
if (!query_key) {
|
|
95
101
|
res.status(400).json({ error: "query_key is required" });
|
|
96
102
|
return;
|
|
@@ -117,24 +123,42 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
117
123
|
} };
|
|
118
124
|
await this.executeStream(res, async (signal) => {
|
|
119
125
|
const processedParams = await this.queryProcessor.processQueryParams(query, parameters);
|
|
120
|
-
const result = await this.query(query, processedParams, queryParameters.formatParameters, signal
|
|
126
|
+
const result = await this.query(query, processedParams, queryParameters.formatParameters, signal);
|
|
121
127
|
return {
|
|
122
128
|
type: queryParameters.type,
|
|
123
129
|
...result
|
|
124
130
|
};
|
|
125
131
|
}, streamExecutionSettings, userKey);
|
|
126
132
|
}
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
133
|
+
/**
|
|
134
|
+
* Execute a SQL query using the current execution context.
|
|
135
|
+
*
|
|
136
|
+
* When called directly: uses service principal credentials.
|
|
137
|
+
* When called via asUser(req).query(...): uses user's credentials.
|
|
138
|
+
*
|
|
139
|
+
* @example
|
|
140
|
+
* ```typescript
|
|
141
|
+
* // Service principal execution
|
|
142
|
+
* const result = await analytics.query("SELECT * FROM table")
|
|
143
|
+
*
|
|
144
|
+
* // User context execution (in route handler)
|
|
145
|
+
* const result = await this.asUser(req).query("SELECT * FROM table")
|
|
146
|
+
* ```
|
|
147
|
+
*/
|
|
148
|
+
async query(query, parameters, formatParameters, signal) {
|
|
149
|
+
const workspaceClient = getWorkspaceClient();
|
|
150
|
+
const warehouseId = await getWarehouseId();
|
|
130
151
|
const { statement, parameters: sqlParameters } = this.queryProcessor.convertToSQLParameters(query, parameters);
|
|
131
152
|
return (await this.SQLClient.executeStatement(workspaceClient, {
|
|
132
153
|
statement,
|
|
133
|
-
warehouse_id:
|
|
154
|
+
warehouse_id: warehouseId,
|
|
134
155
|
parameters: sqlParameters,
|
|
135
156
|
...formatParameters
|
|
136
157
|
}, signal)).result;
|
|
137
158
|
}
|
|
159
|
+
/**
|
|
160
|
+
* Get Arrow-formatted data for a completed query job.
|
|
161
|
+
*/
|
|
138
162
|
async getArrowData(workspaceClient, jobId, signal) {
|
|
139
163
|
return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);
|
|
140
164
|
}
|
|
@@ -142,6 +166,9 @@ var AnalyticsPlugin = class extends Plugin {
|
|
|
142
166
|
this.streamManager.abortAll();
|
|
143
167
|
}
|
|
144
168
|
};
|
|
169
|
+
/**
|
|
170
|
+
* @internal
|
|
171
|
+
*/
|
|
145
172
|
const analytics = toPlugin(AnalyticsPlugin, "analytics");
|
|
146
173
|
|
|
147
174
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"analytics.js","names":["streamExecutionSettings: StreamExecutionSettings"],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport type { Request, Response } from \"../utils\";\nimport { getRequestContext, getWorkspaceClient } from \"../utils\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n requiresDatabricksClient = true;\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: Request, res: Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route(router, {\n name: \"arrowAsUser\",\n method: \"get\",\n path: \"/users/me/arrow-result/:jobId\",\n handler: async (req: Request, res: Response) => {\n await this._handleArrowRoute(req, res, { asUser: true });\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"queryAsUser\",\n method: \"post\",\n path: \"/users/me/query/:query_key\",\n handler: async (req: Request, res: Response) => {\n await this._handleQueryRoute(req, res, { asUser: true });\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: Request, res: Response) => {\n await this._handleQueryRoute(req, res, { asUser: false });\n },\n });\n }\n\n private async _handleArrowRoute(\n req: Request,\n res: Response,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n\n const workspaceClient = getWorkspaceClient(asUser);\n\n console.log(\n `Processing Arrow job request: ${jobId} for plugin: ${this.name}`,\n );\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n console.log(\n `Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n console.error(`Arrow job error for ${this.name}:`, error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n private async _handleQueryRoute(\n req: Request,\n res: Response,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n const requestContext = getRequestContext();\n const userKey = asUser\n ? requestContext.userId\n : requestContext.serviceUserId;\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const query = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!query) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n userKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await this.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await this.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n {\n asUser,\n },\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n userKey,\n );\n }\n\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<any> {\n const requestContext = getRequestContext();\n const workspaceClient = getWorkspaceClient(asUser);\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: await requestContext.warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n // If we need arrow stream in more plugins we can define this as a base method in the core plugin class\n // and have a generic endpoint for each plugin that consumes this arrow data.\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;YAUiE;AASjE,IAAa,kBAAb,cAAqC,OAAO;;qBAKX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAZR;iBACG,EAAE;kCACe;AAWzB,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAC/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,MAAM,CAAC;;GAE3D,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,MAAM,CAAC;;GAE3D,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,OAAO,CAAC;;GAE5D,CAAC;;CAGJ,MAAc,kBACZ,KACA,KACA,EAAE,SAAS,UAAgC,EAAE,EAC9B;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GAEtB,MAAM,kBAAkB,mBAAmB,OAAO;AAElD,WAAQ,IACN,iCAAiC,MAAM,eAAe,KAAK,OAC5D;GAED,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,WAAQ,IACN,yBAAyB,OAAO,KAAK,OAAO,iBAAiB,QAC9D;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,WAAQ,MAAM,uBAAuB,KAAK,KAAK,IAAI,MAAM;AACzD,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;CAIN,MAAc,kBACZ,KACA,KACA,EAAE,SAAS,UAAgC,EAAE,EAC9B;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;EAC5C,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAEP,MAAM,iBAAiB,mBAAmB;EAC1C,MAAM,UAAU,SACZ,eAAe,SACf,eAAe;AAEnB,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,QAAQ,MAAM,KAAK,IAAI,YAC3B,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAMA,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,KAAK,cACT,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,KAAK,MACxB,OACA,iBACA,gBAAgB,kBAChB,QACA,EACE,QACD,CACF;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,QACD;;CAGH,MAAM,MACJ,OACA,YACA,kBACA,QACA,EAAE,SAAS,UAAgC,EAAE,EAC/B;EACd,MAAM,iBAAiB,mBAAmB;EAC1C,MAAM,kBAAkB,mBAAmB,OAAO;EAElD,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc,MAAM,eAAe;GACnC,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;CAKlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;AAIjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
|
|
1
|
+
{"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport type express from \"express\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n\n // User context endpoints - use asUser(req) to execute with user's identity\n this.route(router, {\n name: \"arrowAsUser\",\n method: \"get\",\n path: \"/users/me/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"queryAsUser\",\n method: \"post\",\n path: \"/users/me/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n console.log(\n `Processing Arrow job request: ${jobId} for plugin: ${this.name}`,\n );\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n console.log(\n `Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n console.error(`Arrow job error for ${this.name}:`, error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n // Get user key from current context (automatically includes user ID when in user context)\n const userKey = getCurrentUserId();\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const query = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!query) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n userKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await this.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await this.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n userKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;cAYoB;AAWpB,IAAa,kBAAb,cAAqC,OAAO;;qBAIX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAXR;iBACG,EAAE;AAWV,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,WAAQ,IACN,iCAAiC,MAAM,eAAe,KAAK,OAC5D;GAED,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,WAAQ,IACN,yBAAyB,OAAO,KAAK,OAAO,iBAAiB,QAC9D;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,WAAQ,MAAM,uBAAuB,KAAK,KAAK,IAAI,MAAM;AACzD,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;EAC5C,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAGP,MAAM,UAAU,kBAAkB;AAElC,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,QAAQ,MAAM,KAAK,IAAI,YAC3B,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,KAAK,cACT,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,KAAK,MACxB,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,QACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;AAOjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"defaults.js","names":[
|
|
1
|
+
{"version":3,"file":"defaults.js","names":[],"sources":["../../src/analytics/defaults.ts"],"sourcesContent":["import type { PluginExecuteConfig } from \"shared\";\n\nexport const queryDefaults: PluginExecuteConfig = {\n cache: {\n enabled: true,\n ttl: 3600,\n },\n retry: {\n enabled: true,\n initialDelay: 1500,\n attempts: 3,\n },\n timeout: 18000,\n};\n"],"mappings":";AAEA,MAAa,gBAAqC;CAChD,OAAO;EACL,SAAS;EACT,KAAK;EACN;CACD,OAAO;EACL,SAAS;EACT,cAAc;EACd,UAAU;EACX;CACD,SAAS;CACV"}
|
package/dist/analytics/query.js
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
import { isSQLTypeMarker, sql } from "../shared/src/sql/helpers.js";
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
2
|
+
import { getWorkspaceId } from "../context/execution-context.js";
|
|
3
|
+
import { init_context } from "../context/index.js";
|
|
4
4
|
import { createHash } from "node:crypto";
|
|
5
5
|
|
|
6
6
|
//#region src/analytics/query.ts
|
|
7
|
-
|
|
7
|
+
init_context();
|
|
8
8
|
var QueryProcessor = class {
|
|
9
9
|
async processQueryParams(query, parameters) {
|
|
10
10
|
const processed = { ...parameters };
|
|
11
11
|
const paramMatches = query.matchAll(/:([a-zA-Z_]\w*)/g);
|
|
12
12
|
if (new Set(Array.from(paramMatches, (m) => m[1])).has("workspaceId") && !processed.workspaceId) {
|
|
13
|
-
const workspaceId = await
|
|
13
|
+
const workspaceId = await getWorkspaceId();
|
|
14
14
|
if (workspaceId) processed.workspaceId = sql.string(workspaceId);
|
|
15
15
|
}
|
|
16
16
|
return processed;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"query.js","names":["sqlHelpers"
|
|
1
|
+
{"version":3,"file":"query.js","names":["sqlHelpers"],"sources":["../../src/analytics/query.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { sql } from \"@databricks/sdk-experimental\";\nimport { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from \"shared\";\nimport { getWorkspaceId } from \"../context\";\n\ntype SQLParameterValue = SQLTypeMarker | null | undefined;\n\nexport class QueryProcessor {\n async processQueryParams(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): Promise<Record<string, SQLParameterValue>> {\n const processed = { ...parameters };\n\n // extract all params from the query\n const paramMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(paramMatches, (m) => m[1]));\n\n // auto-inject workspaceId if needed and not provided\n if (queryParams.has(\"workspaceId\") && !processed.workspaceId) {\n const workspaceId = await getWorkspaceId();\n if (workspaceId) {\n processed.workspaceId = sqlHelpers.string(workspaceId);\n }\n }\n\n return processed;\n }\n\n hashQuery(query: string): string {\n return createHash(\"md5\").update(query).digest(\"hex\");\n }\n\n convertToSQLParameters(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): { statement: string; parameters: sql.StatementParameterListItem[] } {\n const sqlParameters: sql.StatementParameterListItem[] = [];\n\n if (parameters) {\n // extract all params from the query\n const queryParamMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));\n\n // only allow parameters that exist in the query\n for (const key of Object.keys(parameters)) {\n if (!queryParams.has(key)) {\n throw new Error(\n `Parameter \"${key}\" not found in query. Valid parameters: ${\n Array.from(queryParams).join(\", \") || \"none\"\n }`,\n );\n }\n }\n\n // convert parameters to SQL parameters\n for (const [key, value] of Object.entries(parameters)) {\n const parameter = this._createParameter(key, value);\n if (parameter) {\n sqlParameters.push(parameter);\n }\n }\n }\n\n return { statement: query, parameters: sqlParameters };\n }\n\n private _createParameter(\n key: string,\n value: SQLParameterValue,\n ): sql.StatementParameterListItem | null {\n if (value === null || value === undefined) {\n return null;\n }\n\n if (!isSQLTypeMarker(value)) {\n throw new Error(\n `Parameter \"${key}\" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`,\n );\n }\n\n return {\n name: key,\n value: value.value,\n type: value.__sql_type,\n };\n }\n}\n"],"mappings":";;;;;;cAG4C;AAI5C,IAAa,iBAAb,MAA4B;CAC1B,MAAM,mBACJ,OACA,YAC4C;EAC5C,MAAM,YAAY,EAAE,GAAG,YAAY;EAGnC,MAAM,eAAe,MAAM,SAAS,mBAAmB;AAIvD,MAHoB,IAAI,IAAI,MAAM,KAAK,eAAe,MAAM,EAAE,GAAG,CAAC,CAGlD,IAAI,cAAc,IAAI,CAAC,UAAU,aAAa;GAC5D,MAAM,cAAc,MAAM,gBAAgB;AAC1C,OAAI,YACF,WAAU,cAAcA,IAAW,OAAO,YAAY;;AAI1D,SAAO;;CAGT,UAAU,OAAuB;AAC/B,SAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;CAGtD,uBACE,OACA,YACqE;EACrE,MAAM,gBAAkD,EAAE;AAE1D,MAAI,YAAY;GAEd,MAAM,oBAAoB,MAAM,SAAS,mBAAmB;GAC5D,MAAM,cAAc,IAAI,IAAI,MAAM,KAAK,oBAAoB,MAAM,EAAE,GAAG,CAAC;AAGvE,QAAK,MAAM,OAAO,OAAO,KAAK,WAAW,CACvC,KAAI,CAAC,YAAY,IAAI,IAAI,CACvB,OAAM,IAAI,MACR,cAAc,IAAI,0CAChB,MAAM,KAAK,YAAY,CAAC,KAAK,KAAK,IAAI,SAEzC;AAKL,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,EAAE;IACrD,MAAM,YAAY,KAAK,iBAAiB,KAAK,MAAM;AACnD,QAAI,UACF,eAAc,KAAK,UAAU;;;AAKnC,SAAO;GAAE,WAAW;GAAO,YAAY;GAAe;;CAGxD,AAAQ,iBACN,KACA,OACuC;AACvC,MAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO;AAGT,MAAI,CAAC,gBAAgB,MAAM,CACzB,OAAM,IAAI,MACR,cAAc,IAAI,sGACnB;AAGH,SAAO;GACL,MAAM;GACN,OAAO,MAAM;GACb,MAAM,MAAM;GACb"}
|
package/dist/appkit/package.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"defaults.js","names":[
|
|
1
|
+
{"version":3,"file":"defaults.js","names":[],"sources":["../../src/cache/defaults.ts"],"sourcesContent":["import type { CacheConfig } from \"shared\";\n\n/** Default configuration for cache */\nexport const cacheDefaults: CacheConfig = {\n enabled: true,\n ttl: 3600, // 1 hour\n maxSize: 1000, // 1000 entries\n cacheKey: [], // no cache key by default\n cleanupProbability: 0.01, // 1% probability of triggering cleanup on each get operation\n strictPersistence: false, // if false, use in-memory storage if lakebase is unavailable\n};\n"],"mappings":";;AAGA,MAAa,gBAA6B;CACxC,SAAS;CACT,KAAK;CACL,SAAS;CACT,UAAU,EAAE;CACZ,oBAAoB;CACpB,mBAAmB;CACpB"}
|
package/dist/cache/index.d.ts
CHANGED
|
@@ -9,6 +9,7 @@ import { CacheConfig } from "../shared/src/cache.js";
|
|
|
9
9
|
* The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access
|
|
10
10
|
* the singleton instance after initialization.
|
|
11
11
|
*
|
|
12
|
+
* @internal
|
|
12
13
|
* @example
|
|
13
14
|
* ```typescript
|
|
14
15
|
* const cache = CacheManager.getInstanceSync();
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":[],"mappings":";;;;;;
|
|
1
|
+
{"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":[],"mappings":";;;;;;AAwBA;;;;;;;;;;;;AAwP6B,cAxPhB,YAAA,CAwPgB;0BAkDlB,uBAAA;mBAEN,IAAA;iBAawB,QAAA;iBAMZ,WAAA;UAUS,OAAA;UA0BT,MAAA;UAQW,gBAAA;EAAO,QAAA,iBAAA;;;;;;;;;;;4BA3TP;;;;;;;;kCAkBX,QAAQ,eACpB,QAAQ;;;;;;;;;;;;;;;;;;;;;;+DAuFC,QAAQ;;MAGjB,QAAQ;;;;;;uBA2FgB,QAAQ;;;;;;;;;;6BAkD1B;;MAEN;;;;;;uBAawB;;WAMZ;;;;;;oBAUS;;;;;;;;;WA0BT;;;;;sBAQW"}
|
package/dist/cache/index.js
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { TelemetryManager } from "../telemetry/telemetry-manager.js";
|
|
2
2
|
import { SpanStatusCode } from "../telemetry/index.js";
|
|
3
3
|
import { deepMerge } from "../utils/merge.js";
|
|
4
|
-
import { init_utils } from "../utils/index.js";
|
|
5
4
|
import { LakebaseConnector } from "../connectors/lakebase/client.js";
|
|
6
5
|
import "../connectors/index.js";
|
|
7
6
|
import { cacheDefaults } from "./defaults.js";
|
|
@@ -12,7 +11,6 @@ import { createHash } from "node:crypto";
|
|
|
12
11
|
import { WorkspaceClient } from "@databricks/sdk-experimental";
|
|
13
12
|
|
|
14
13
|
//#region src/cache/index.ts
|
|
15
|
-
init_utils();
|
|
16
14
|
/**
|
|
17
15
|
* Cache manager class to handle cache operations.
|
|
18
16
|
* Can be used with in-memory storage or persistent storage (Lakebase).
|
|
@@ -20,6 +18,7 @@ init_utils();
|
|
|
20
18
|
* The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access
|
|
21
19
|
* the singleton instance after initialization.
|
|
22
20
|
*
|
|
21
|
+
* @internal
|
|
23
22
|
* @example
|
|
24
23
|
* ```typescript
|
|
25
24
|
* const cache = CacheManager.getInstanceSync();
|
package/dist/cache/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","names":["result"],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n // Telemetry\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw new Error(\n \"CacheManager not initialized. Ensure AppKit.create() has completed before accessing the cache.\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n console.debug(\"Error cleaning up expired entries:\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;YAMqC;;;;;;;;;;;;;;AAiBrC,IAAa,eAAb,MAAa,aAAa;;iCAC0B;;;kBAEH;;;qBACY;;CAe3D,AAAQ,YAAY,SAAuB,QAAqB;cAjBhC;AAkB9B,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,IAAI,MACR,iGACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AACF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AACF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,aAAW;AACtB,WAAM,KAAK,IAAI,UAAUA,UAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAOA;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAM;MACN,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,WAAQ,MAAM,sCAAsC,MAAM;IAC1D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|
|
1
|
+
{"version":3,"file":"index.js","names":["result"],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n // Telemetry\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw new Error(\n \"CacheManager not initialized. Ensure AppKit.create() has completed before accessing the cache.\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n console.debug(\"Error cleaning up expired entries:\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AAwBA,IAAa,eAAb,MAAa,aAAa;;iCAC0B;;;kBAEH;;;qBACY;;CAe3D,AAAQ,YAAY,SAAuB,QAAqB;cAjBhC;AAkB9B,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,IAAI,MACR,iGACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AACF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AACF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,aAAW;AACtB,WAAM,KAAK,IAAI,UAAUA,UAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAOA;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAM;MACN,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,WAAQ,MAAM,sCAAsC,MAAM;IAC1D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
|