@databricks/appkit 0.1.4 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (163) hide show
  1. package/AGENTS.md +89 -12
  2. package/CLAUDE.md +89 -12
  3. package/NOTICE.md +4 -0
  4. package/README.md +21 -15
  5. package/bin/appkit-lint.js +129 -0
  6. package/dist/analytics/analytics.d.ts +33 -8
  7. package/dist/analytics/analytics.d.ts.map +1 -1
  8. package/dist/analytics/analytics.js +67 -27
  9. package/dist/analytics/analytics.js.map +1 -1
  10. package/dist/analytics/defaults.js.map +1 -1
  11. package/dist/analytics/query.js +12 -6
  12. package/dist/analytics/query.js.map +1 -1
  13. package/dist/app/index.d.ts.map +1 -1
  14. package/dist/app/index.js +7 -5
  15. package/dist/app/index.js.map +1 -1
  16. package/dist/appkit/package.js +1 -1
  17. package/dist/cache/defaults.js.map +1 -1
  18. package/dist/cache/index.d.ts +1 -0
  19. package/dist/cache/index.d.ts.map +1 -1
  20. package/dist/cache/index.js +25 -5
  21. package/dist/cache/index.js.map +1 -1
  22. package/dist/cache/storage/memory.js.map +1 -1
  23. package/dist/cache/storage/persistent.js +12 -6
  24. package/dist/cache/storage/persistent.js.map +1 -1
  25. package/dist/connectors/lakebase/client.js +31 -21
  26. package/dist/connectors/lakebase/client.js.map +1 -1
  27. package/dist/connectors/lakebase/defaults.js.map +1 -1
  28. package/dist/connectors/sql-warehouse/client.js +68 -28
  29. package/dist/connectors/sql-warehouse/client.js.map +1 -1
  30. package/dist/connectors/sql-warehouse/defaults.js.map +1 -1
  31. package/dist/context/execution-context.js +75 -0
  32. package/dist/context/execution-context.js.map +1 -0
  33. package/dist/context/index.js +27 -0
  34. package/dist/context/index.js.map +1 -0
  35. package/dist/context/service-context.js +154 -0
  36. package/dist/context/service-context.js.map +1 -0
  37. package/dist/context/user-context.js +15 -0
  38. package/dist/context/user-context.js.map +1 -0
  39. package/dist/core/appkit.d.ts +3 -0
  40. package/dist/core/appkit.d.ts.map +1 -1
  41. package/dist/core/appkit.js +7 -0
  42. package/dist/core/appkit.js.map +1 -1
  43. package/dist/errors/authentication.d.ts +38 -0
  44. package/dist/errors/authentication.d.ts.map +1 -0
  45. package/dist/errors/authentication.js +48 -0
  46. package/dist/errors/authentication.js.map +1 -0
  47. package/dist/errors/base.d.ts +58 -0
  48. package/dist/errors/base.d.ts.map +1 -0
  49. package/dist/errors/base.js +70 -0
  50. package/dist/errors/base.js.map +1 -0
  51. package/dist/errors/configuration.d.ts +38 -0
  52. package/dist/errors/configuration.d.ts.map +1 -0
  53. package/dist/errors/configuration.js +45 -0
  54. package/dist/errors/configuration.js.map +1 -0
  55. package/dist/errors/connection.d.ts +42 -0
  56. package/dist/errors/connection.d.ts.map +1 -0
  57. package/dist/errors/connection.js +54 -0
  58. package/dist/errors/connection.js.map +1 -0
  59. package/dist/errors/execution.d.ts +42 -0
  60. package/dist/errors/execution.d.ts.map +1 -0
  61. package/dist/errors/execution.js +51 -0
  62. package/dist/errors/execution.js.map +1 -0
  63. package/dist/errors/index.js +28 -0
  64. package/dist/errors/index.js.map +1 -0
  65. package/dist/errors/initialization.d.ts +34 -0
  66. package/dist/errors/initialization.d.ts.map +1 -0
  67. package/dist/errors/initialization.js +42 -0
  68. package/dist/errors/initialization.js.map +1 -0
  69. package/dist/errors/server.d.ts +38 -0
  70. package/dist/errors/server.d.ts.map +1 -0
  71. package/dist/errors/server.js +45 -0
  72. package/dist/errors/server.js.map +1 -0
  73. package/dist/errors/tunnel.d.ts +38 -0
  74. package/dist/errors/tunnel.d.ts.map +1 -0
  75. package/dist/errors/tunnel.js +51 -0
  76. package/dist/errors/tunnel.js.map +1 -0
  77. package/dist/errors/validation.d.ts +36 -0
  78. package/dist/errors/validation.d.ts.map +1 -0
  79. package/dist/errors/validation.js +45 -0
  80. package/dist/errors/validation.js.map +1 -0
  81. package/dist/index.d.ts +12 -4
  82. package/dist/index.js +12 -4
  83. package/dist/index.js.map +1 -1
  84. package/dist/logging/logger.js +179 -0
  85. package/dist/logging/logger.js.map +1 -0
  86. package/dist/logging/sampling.js +56 -0
  87. package/dist/logging/sampling.js.map +1 -0
  88. package/dist/logging/wide-event-emitter.js +108 -0
  89. package/dist/logging/wide-event-emitter.js.map +1 -0
  90. package/dist/logging/wide-event.js +167 -0
  91. package/dist/logging/wide-event.js.map +1 -0
  92. package/dist/plugin/dev-reader.d.ts.map +1 -1
  93. package/dist/plugin/dev-reader.js +8 -3
  94. package/dist/plugin/dev-reader.js.map +1 -1
  95. package/dist/plugin/interceptors/cache.js.map +1 -1
  96. package/dist/plugin/interceptors/retry.js +10 -2
  97. package/dist/plugin/interceptors/retry.js.map +1 -1
  98. package/dist/plugin/interceptors/telemetry.js +24 -9
  99. package/dist/plugin/interceptors/telemetry.js.map +1 -1
  100. package/dist/plugin/interceptors/timeout.js +4 -0
  101. package/dist/plugin/interceptors/timeout.js.map +1 -1
  102. package/dist/plugin/plugin.d.ts +38 -4
  103. package/dist/plugin/plugin.d.ts.map +1 -1
  104. package/dist/plugin/plugin.js +86 -5
  105. package/dist/plugin/plugin.js.map +1 -1
  106. package/dist/plugin/to-plugin.d.ts +4 -0
  107. package/dist/plugin/to-plugin.d.ts.map +1 -1
  108. package/dist/plugin/to-plugin.js +3 -0
  109. package/dist/plugin/to-plugin.js.map +1 -1
  110. package/dist/server/index.d.ts +3 -0
  111. package/dist/server/index.d.ts.map +1 -1
  112. package/dist/server/index.js +25 -21
  113. package/dist/server/index.js.map +1 -1
  114. package/dist/server/remote-tunnel/remote-tunnel-controller.js +4 -2
  115. package/dist/server/remote-tunnel/remote-tunnel-controller.js.map +1 -1
  116. package/dist/server/remote-tunnel/remote-tunnel-manager.js +10 -8
  117. package/dist/server/remote-tunnel/remote-tunnel-manager.js.map +1 -1
  118. package/dist/server/utils.js.map +1 -1
  119. package/dist/server/vite-dev-server.js +8 -5
  120. package/dist/server/vite-dev-server.js.map +1 -1
  121. package/dist/shared/src/sql/helpers.js.map +1 -1
  122. package/dist/stream/arrow-stream-processor.js +13 -6
  123. package/dist/stream/arrow-stream-processor.js.map +1 -1
  124. package/dist/stream/buffers.js +5 -1
  125. package/dist/stream/buffers.js.map +1 -1
  126. package/dist/stream/sse-writer.js.map +1 -1
  127. package/dist/stream/stream-manager.d.ts.map +1 -1
  128. package/dist/stream/stream-manager.js +47 -36
  129. package/dist/stream/stream-manager.js.map +1 -1
  130. package/dist/stream/stream-registry.js.map +1 -1
  131. package/dist/stream/types.js.map +1 -1
  132. package/dist/telemetry/index.d.ts +2 -2
  133. package/dist/telemetry/index.js +2 -2
  134. package/dist/telemetry/instrumentations.js +14 -10
  135. package/dist/telemetry/instrumentations.js.map +1 -1
  136. package/dist/telemetry/telemetry-manager.js +8 -6
  137. package/dist/telemetry/telemetry-manager.js.map +1 -1
  138. package/dist/telemetry/trace-sampler.js +33 -0
  139. package/dist/telemetry/trace-sampler.js.map +1 -0
  140. package/dist/type-generator/index.js +4 -2
  141. package/dist/type-generator/index.js.map +1 -1
  142. package/dist/type-generator/query-registry.js +4 -2
  143. package/dist/type-generator/query-registry.js.map +1 -1
  144. package/dist/type-generator/types.js.map +1 -1
  145. package/dist/type-generator/vite-plugin.d.ts.map +1 -1
  146. package/dist/type-generator/vite-plugin.js +5 -3
  147. package/dist/type-generator/vite-plugin.js.map +1 -1
  148. package/dist/utils/env-validator.js +5 -5
  149. package/dist/utils/env-validator.js.map +1 -1
  150. package/dist/utils/merge.js +1 -5
  151. package/dist/utils/merge.js.map +1 -1
  152. package/dist/utils/path-exclusions.js +66 -0
  153. package/dist/utils/path-exclusions.js.map +1 -0
  154. package/dist/utils/vite-config-merge.js +1 -5
  155. package/dist/utils/vite-config-merge.js.map +1 -1
  156. package/llms.txt +89 -12
  157. package/package.json +6 -1
  158. package/dist/utils/databricks-client-middleware.d.ts +0 -17
  159. package/dist/utils/databricks-client-middleware.d.ts.map +0 -1
  160. package/dist/utils/databricks-client-middleware.js +0 -117
  161. package/dist/utils/databricks-client-middleware.js.map +0 -1
  162. package/dist/utils/index.js +0 -26
  163. package/dist/utils/index.js.map +0 -1
@@ -1,5 +1,6 @@
1
- import { getRequestContext, getWorkspaceClient } from "../utils/databricks-client-middleware.js";
2
- import { init_utils } from "../utils/index.js";
1
+ import { createLogger } from "../logging/logger.js";
2
+ import { getCurrentUserId, getWarehouseId, getWorkspaceClient } from "../context/execution-context.js";
3
+ import { init_context } from "../context/index.js";
3
4
  import { SQLWarehouseConnector } from "../connectors/sql-warehouse/client.js";
4
5
  import "../connectors/index.js";
5
6
  import { Plugin } from "../plugin/plugin.js";
@@ -9,7 +10,8 @@ import { queryDefaults } from "./defaults.js";
9
10
  import { QueryProcessor } from "./query.js";
10
11
 
11
12
  //#region src/analytics/analytics.ts
12
- init_utils();
13
+ init_context();
14
+ const logger = createLogger("analytics");
13
15
  var AnalyticsPlugin = class extends Plugin {
14
16
  static {
15
17
  this.description = "Analytics plugin for data analysis";
@@ -18,7 +20,6 @@ var AnalyticsPlugin = class extends Plugin {
18
20
  super(config);
19
21
  this.name = "analytics";
20
22
  this.envVars = [];
21
- this.requiresDatabricksClient = true;
22
23
  this.config = config;
23
24
  this.queryProcessor = new QueryProcessor();
24
25
  this.SQLClient = new SQLWarehouseConnector({
@@ -35,12 +36,20 @@ var AnalyticsPlugin = class extends Plugin {
35
36
  await this._handleArrowRoute(req, res);
36
37
  }
37
38
  });
39
+ this.route(router, {
40
+ name: "query",
41
+ method: "post",
42
+ path: "/query/:query_key",
43
+ handler: async (req, res) => {
44
+ await this._handleQueryRoute(req, res);
45
+ }
46
+ });
38
47
  this.route(router, {
39
48
  name: "arrowAsUser",
40
49
  method: "get",
41
50
  path: "/users/me/arrow-result/:jobId",
42
51
  handler: async (req, res) => {
43
- await this._handleArrowRoute(req, res, { asUser: true });
52
+ await this.asUser(req)._handleArrowRoute(req, res);
44
53
  }
45
54
  });
46
55
  this.route(router, {
@@ -48,40 +57,51 @@ var AnalyticsPlugin = class extends Plugin {
48
57
  method: "post",
49
58
  path: "/users/me/query/:query_key",
50
59
  handler: async (req, res) => {
51
- await this._handleQueryRoute(req, res, { asUser: true });
52
- }
53
- });
54
- this.route(router, {
55
- name: "query",
56
- method: "post",
57
- path: "/query/:query_key",
58
- handler: async (req, res) => {
59
- await this._handleQueryRoute(req, res, { asUser: false });
60
+ await this.asUser(req)._handleQueryRoute(req, res);
60
61
  }
61
62
  });
62
63
  }
63
- async _handleArrowRoute(req, res, { asUser = false } = {}) {
64
+ /**
65
+ * Handle Arrow data download requests.
66
+ * When called via asUser(req), uses the user's Databricks credentials.
67
+ */
68
+ async _handleArrowRoute(req, res) {
64
69
  try {
65
70
  const { jobId } = req.params;
66
- const workspaceClient = getWorkspaceClient(asUser);
67
- console.log(`Processing Arrow job request: ${jobId} for plugin: ${this.name}`);
71
+ const workspaceClient = getWorkspaceClient();
72
+ logger.debug("Processing Arrow job request for jobId=%s", jobId);
73
+ logger.event(req)?.setComponent("analytics", "getArrowData").setContext("analytics", {
74
+ job_id: jobId,
75
+ plugin: this.name
76
+ });
68
77
  const result = await this.getArrowData(workspaceClient, jobId);
69
78
  res.setHeader("Content-Type", "application/octet-stream");
70
79
  res.setHeader("Content-Length", result.data.length.toString());
71
80
  res.setHeader("Cache-Control", "public, max-age=3600");
72
- console.log(`Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`);
81
+ logger.debug("Sending Arrow buffer: %d bytes for job %s", result.data.length, jobId);
73
82
  res.send(Buffer.from(result.data));
74
83
  } catch (error) {
75
- console.error(`Arrow job error for ${this.name}:`, error);
84
+ logger.error("Arrow job error: %O", error);
76
85
  res.status(404).json({
77
86
  error: error instanceof Error ? error.message : "Arrow job not found",
78
87
  plugin: this.name
79
88
  });
80
89
  }
81
90
  }
82
- async _handleQueryRoute(req, res, { asUser = false } = {}) {
91
+ /**
92
+ * Handle SQL query execution requests.
93
+ * When called via asUser(req), uses the user's Databricks credentials.
94
+ */
95
+ async _handleQueryRoute(req, res) {
83
96
  const { query_key } = req.params;
84
97
  const { parameters, format = "JSON" } = req.body;
98
+ logger.debug(req, "Executing query: %s (format=%s)", query_key, format);
99
+ logger.event(req)?.setComponent("analytics", "executeQuery").setContext("analytics", {
100
+ query_key,
101
+ format,
102
+ parameter_count: parameters ? Object.keys(parameters).length : 0,
103
+ plugin: this.name
104
+ });
85
105
  const queryParameters = format === "ARROW" ? {
86
106
  formatParameters: {
87
107
  disposition: "EXTERNAL_LINKS",
@@ -89,8 +109,7 @@ var AnalyticsPlugin = class extends Plugin {
89
109
  },
90
110
  type: "arrow"
91
111
  } : { type: "result" };
92
- const requestContext = getRequestContext();
93
- const userKey = asUser ? requestContext.userId : requestContext.serviceUserId;
112
+ const userKey = getCurrentUserId();
94
113
  if (!query_key) {
95
114
  res.status(400).json({ error: "query_key is required" });
96
115
  return;
@@ -117,24 +136,42 @@ var AnalyticsPlugin = class extends Plugin {
117
136
  } };
118
137
  await this.executeStream(res, async (signal) => {
119
138
  const processedParams = await this.queryProcessor.processQueryParams(query, parameters);
120
- const result = await this.query(query, processedParams, queryParameters.formatParameters, signal, { asUser });
139
+ const result = await this.query(query, processedParams, queryParameters.formatParameters, signal);
121
140
  return {
122
141
  type: queryParameters.type,
123
142
  ...result
124
143
  };
125
144
  }, streamExecutionSettings, userKey);
126
145
  }
127
- async query(query, parameters, formatParameters, signal, { asUser = false } = {}) {
128
- const requestContext = getRequestContext();
129
- const workspaceClient = getWorkspaceClient(asUser);
146
+ /**
147
+ * Execute a SQL query using the current execution context.
148
+ *
149
+ * When called directly: uses service principal credentials.
150
+ * When called via asUser(req).query(...): uses user's credentials.
151
+ *
152
+ * @example
153
+ * ```typescript
154
+ * // Service principal execution
155
+ * const result = await analytics.query("SELECT * FROM table")
156
+ *
157
+ * // User context execution (in route handler)
158
+ * const result = await this.asUser(req).query("SELECT * FROM table")
159
+ * ```
160
+ */
161
+ async query(query, parameters, formatParameters, signal) {
162
+ const workspaceClient = getWorkspaceClient();
163
+ const warehouseId = await getWarehouseId();
130
164
  const { statement, parameters: sqlParameters } = this.queryProcessor.convertToSQLParameters(query, parameters);
131
165
  return (await this.SQLClient.executeStatement(workspaceClient, {
132
166
  statement,
133
- warehouse_id: await requestContext.warehouseId,
167
+ warehouse_id: warehouseId,
134
168
  parameters: sqlParameters,
135
169
  ...formatParameters
136
170
  }, signal)).result;
137
171
  }
172
+ /**
173
+ * Get Arrow-formatted data for a completed query job.
174
+ */
138
175
  async getArrowData(workspaceClient, jobId, signal) {
139
176
  return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);
140
177
  }
@@ -142,6 +179,9 @@ var AnalyticsPlugin = class extends Plugin {
142
179
  this.streamManager.abortAll();
143
180
  }
144
181
  };
182
+ /**
183
+ * @internal
184
+ */
145
185
  const analytics = toPlugin(AnalyticsPlugin, "analytics");
146
186
 
147
187
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"analytics.js","names":["streamExecutionSettings: StreamExecutionSettings"],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport type { Request, Response } from \"../utils\";\nimport { getRequestContext, getWorkspaceClient } from \"../utils\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n requiresDatabricksClient = true;\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: Request, res: Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route(router, {\n name: \"arrowAsUser\",\n method: \"get\",\n path: \"/users/me/arrow-result/:jobId\",\n handler: async (req: Request, res: Response) => {\n await this._handleArrowRoute(req, res, { asUser: true });\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"queryAsUser\",\n method: \"post\",\n path: \"/users/me/query/:query_key\",\n handler: async (req: Request, res: Response) => {\n await this._handleQueryRoute(req, res, { asUser: true });\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: Request, res: Response) => {\n await this._handleQueryRoute(req, res, { asUser: false });\n },\n });\n }\n\n private async _handleArrowRoute(\n req: Request,\n res: Response,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n\n const workspaceClient = getWorkspaceClient(asUser);\n\n console.log(\n `Processing Arrow job request: ${jobId} for plugin: ${this.name}`,\n );\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n console.log(\n `Sending Arrow buffer: ${result.data.length} bytes for job ${jobId}`,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n console.error(`Arrow job error for ${this.name}:`, error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n private async _handleQueryRoute(\n req: Request,\n res: Response,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n const requestContext = getRequestContext();\n const userKey = asUser\n ? requestContext.userId\n : requestContext.serviceUserId;\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const query = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!query) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n userKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await this.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await this.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n {\n asUser,\n },\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n userKey,\n );\n }\n\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n { asUser = false }: { asUser?: boolean } = {},\n ): Promise<any> {\n const requestContext = getRequestContext();\n const workspaceClient = getWorkspaceClient(asUser);\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: await requestContext.warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n // If we need arrow stream in more plugins we can define this as a base method in the core plugin class\n // and have a generic endpoint for each plugin that consumes this arrow data.\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;YAUiE;AASjE,IAAa,kBAAb,cAAqC,OAAO;;qBAKX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAZR;iBACG,EAAE;kCACe;AAWzB,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAC/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,MAAM,CAAC;;GAE3D,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,MAAM,CAAC;;GAE3D,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAc,QAAkB;AAC9C,UAAM,KAAK,kBAAkB,KAAK,KAAK,EAAE,QAAQ,OAAO,CAAC;;GAE5D,CAAC;;CAGJ,MAAc,kBACZ,KACA,KACA,EAAE,SAAS,UAAgC,EAAE,EAC9B;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GAEtB,MAAM,kBAAkB,mBAAmB,OAAO;AAElD,WAAQ,IACN,iCAAiC,MAAM,eAAe,KAAK,OAC5D;GAED,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,WAAQ,IACN,yBAAyB,OAAO,KAAK,OAAO,iBAAiB,QAC9D;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,WAAQ,MAAM,uBAAuB,KAAK,KAAK,IAAI,MAAM;AACzD,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;CAIN,MAAc,kBACZ,KACA,KACA,EAAE,SAAS,UAAgC,EAAE,EAC9B;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;EAC5C,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAEP,MAAM,iBAAiB,mBAAmB;EAC1C,MAAM,UAAU,SACZ,eAAe,SACf,eAAe;AAEnB,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,QAAQ,MAAM,KAAK,IAAI,YAC3B,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAMA,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,KAAK,cACT,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,KAAK,MACxB,OACA,iBACA,gBAAgB,kBAChB,QACA,EACE,QACD,CACF;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,QACD;;CAGH,MAAM,MACJ,OACA,YACA,kBACA,QACA,EAAE,SAAS,UAAgC,EAAE,EAC/B;EACd,MAAM,iBAAiB,mBAAmB;EAC1C,MAAM,kBAAkB,mBAAmB,OAAO;EAElD,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc,MAAM,eAAe;GACnC,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;CAKlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;AAIjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
1
+ {"version":3,"file":"analytics.js","names":[],"sources":["../../src/analytics/analytics.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type express from \"express\";\nimport type {\n IAppRouter,\n PluginExecuteConfig,\n SQLTypeMarker,\n StreamExecutionSettings,\n} from \"shared\";\nimport { SQLWarehouseConnector } from \"../connectors\";\nimport {\n getCurrentUserId,\n getWarehouseId,\n getWorkspaceClient,\n} from \"../context\";\nimport { createLogger } from \"../logging/logger\";\nimport { Plugin, toPlugin } from \"../plugin\";\nimport { queryDefaults } from \"./defaults\";\nimport { QueryProcessor } from \"./query\";\nimport type {\n AnalyticsQueryResponse,\n IAnalyticsConfig,\n IAnalyticsQueryRequest,\n} from \"./types\";\n\nconst logger = createLogger(\"analytics\");\n\nexport class AnalyticsPlugin extends Plugin {\n name = \"analytics\";\n envVars = [];\n\n protected static description = \"Analytics plugin for data analysis\";\n protected declare config: IAnalyticsConfig;\n\n // analytics services\n private SQLClient: SQLWarehouseConnector;\n private queryProcessor: QueryProcessor;\n\n constructor(config: IAnalyticsConfig) {\n super(config);\n this.config = config;\n this.queryProcessor = new QueryProcessor();\n\n this.SQLClient = new SQLWarehouseConnector({\n timeout: config.timeout,\n telemetry: config.telemetry,\n });\n }\n\n injectRoutes(router: IAppRouter) {\n // Service principal endpoints\n this.route(router, {\n name: \"arrow\",\n method: \"get\",\n path: \"/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"query\",\n method: \"post\",\n path: \"/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this._handleQueryRoute(req, res);\n },\n });\n\n // User context endpoints - use asUser(req) to execute with user's identity\n this.route(router, {\n name: \"arrowAsUser\",\n method: \"get\",\n path: \"/users/me/arrow-result/:jobId\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleArrowRoute(req, res);\n },\n });\n\n this.route<AnalyticsQueryResponse>(router, {\n name: \"queryAsUser\",\n method: \"post\",\n path: \"/users/me/query/:query_key\",\n handler: async (req: express.Request, res: express.Response) => {\n await this.asUser(req)._handleQueryRoute(req, res);\n },\n });\n }\n\n /**\n * Handle Arrow data download requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleArrowRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n try {\n const { jobId } = req.params;\n const workspaceClient = getWorkspaceClient();\n\n logger.debug(\"Processing Arrow job request for jobId=%s\", jobId);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"getArrowData\").setContext(\"analytics\", {\n job_id: jobId,\n plugin: this.name,\n });\n\n const result = await this.getArrowData(workspaceClient, jobId);\n\n res.setHeader(\"Content-Type\", \"application/octet-stream\");\n res.setHeader(\"Content-Length\", result.data.length.toString());\n res.setHeader(\"Cache-Control\", \"public, max-age=3600\");\n\n logger.debug(\n \"Sending Arrow buffer: %d bytes for job %s\",\n result.data.length,\n jobId,\n );\n res.send(Buffer.from(result.data));\n } catch (error) {\n logger.error(\"Arrow job error: %O\", error);\n res.status(404).json({\n error: error instanceof Error ? error.message : \"Arrow job not found\",\n plugin: this.name,\n });\n }\n }\n\n /**\n * Handle SQL query execution requests.\n * When called via asUser(req), uses the user's Databricks credentials.\n */\n async _handleQueryRoute(\n req: express.Request,\n res: express.Response,\n ): Promise<void> {\n const { query_key } = req.params;\n const { parameters, format = \"JSON\" } = req.body as IAnalyticsQueryRequest;\n\n // Request-scoped logging with WideEvent tracking\n logger.debug(req, \"Executing query: %s (format=%s)\", query_key, format);\n\n const event = logger.event(req);\n event?.setComponent(\"analytics\", \"executeQuery\").setContext(\"analytics\", {\n query_key,\n format,\n parameter_count: parameters ? Object.keys(parameters).length : 0,\n plugin: this.name,\n });\n\n const queryParameters =\n format === \"ARROW\"\n ? {\n formatParameters: {\n disposition: \"EXTERNAL_LINKS\",\n format: \"ARROW_STREAM\",\n },\n type: \"arrow\",\n }\n : {\n type: \"result\",\n };\n\n // Get user key from current context (automatically includes user ID when in user context)\n const userKey = getCurrentUserId();\n\n if (!query_key) {\n res.status(400).json({ error: \"query_key is required\" });\n return;\n }\n\n const query = await this.app.getAppQuery(\n query_key,\n req,\n this.devFileReader,\n );\n\n if (!query) {\n res.status(404).json({ error: \"Query not found\" });\n return;\n }\n\n const hashedQuery = this.queryProcessor.hashQuery(query);\n\n const defaultConfig: PluginExecuteConfig = {\n ...queryDefaults,\n cache: {\n ...queryDefaults.cache,\n cacheKey: [\n \"analytics:query\",\n query_key,\n JSON.stringify(parameters),\n JSON.stringify(format),\n hashedQuery,\n userKey,\n ],\n },\n };\n\n const streamExecutionSettings: StreamExecutionSettings = {\n default: defaultConfig,\n };\n\n await this.executeStream(\n res,\n async (signal) => {\n const processedParams = await this.queryProcessor.processQueryParams(\n query,\n parameters,\n );\n\n const result = await this.query(\n query,\n processedParams,\n queryParameters.formatParameters,\n signal,\n );\n\n return { type: queryParameters.type, ...result };\n },\n streamExecutionSettings,\n userKey,\n );\n }\n\n /**\n * Execute a SQL query using the current execution context.\n *\n * When called directly: uses service principal credentials.\n * When called via asUser(req).query(...): uses user's credentials.\n *\n * @example\n * ```typescript\n * // Service principal execution\n * const result = await analytics.query(\"SELECT * FROM table\")\n *\n * // User context execution (in route handler)\n * const result = await this.asUser(req).query(\"SELECT * FROM table\")\n * ```\n */\n async query(\n query: string,\n parameters?: Record<string, SQLTypeMarker | null | undefined>,\n formatParameters?: Record<string, any>,\n signal?: AbortSignal,\n ): Promise<any> {\n const workspaceClient = getWorkspaceClient();\n const warehouseId = await getWarehouseId();\n\n const { statement, parameters: sqlParameters } =\n this.queryProcessor.convertToSQLParameters(query, parameters);\n\n const response = await this.SQLClient.executeStatement(\n workspaceClient,\n {\n statement,\n warehouse_id: warehouseId,\n parameters: sqlParameters,\n ...formatParameters,\n },\n signal,\n );\n\n return response.result;\n }\n\n /**\n * Get Arrow-formatted data for a completed query job.\n */\n protected async getArrowData(\n workspaceClient: WorkspaceClient,\n jobId: string,\n signal?: AbortSignal,\n ): Promise<ReturnType<typeof this.SQLClient.getArrowData>> {\n return await this.SQLClient.getArrowData(workspaceClient, jobId, signal);\n }\n\n async shutdown(): Promise<void> {\n this.streamManager.abortAll();\n }\n}\n\n/**\n * @internal\n */\nexport const analytics = toPlugin<\n typeof AnalyticsPlugin,\n IAnalyticsConfig,\n \"analytics\"\n>(AnalyticsPlugin, \"analytics\");\n"],"mappings":";;;;;;;;;;;;cAaoB;AAWpB,MAAM,SAAS,aAAa,YAAY;AAExC,IAAa,kBAAb,cAAqC,OAAO;;qBAIX;;CAO/B,YAAY,QAA0B;AACpC,QAAM,OAAO;cAXR;iBACG,EAAE;AAWV,OAAK,SAAS;AACd,OAAK,iBAAiB,IAAI,gBAAgB;AAE1C,OAAK,YAAY,IAAI,sBAAsB;GACzC,SAAS,OAAO;GAChB,WAAW,OAAO;GACnB,CAAC;;CAGJ,aAAa,QAAoB;AAE/B,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,kBAAkB,KAAK,IAAI;;GAEzC,CAAC;AAGF,OAAK,MAAM,QAAQ;GACjB,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;AAEF,OAAK,MAA8B,QAAQ;GACzC,MAAM;GACN,QAAQ;GACR,MAAM;GACN,SAAS,OAAO,KAAsB,QAA0B;AAC9D,UAAM,KAAK,OAAO,IAAI,CAAC,kBAAkB,KAAK,IAAI;;GAErD,CAAC;;;;;;CAOJ,MAAM,kBACJ,KACA,KACe;AACf,MAAI;GACF,MAAM,EAAE,UAAU,IAAI;GACtB,MAAM,kBAAkB,oBAAoB;AAE5C,UAAO,MAAM,6CAA6C,MAAM;AAGhE,GADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;IACvE,QAAQ;IACR,QAAQ,KAAK;IACd,CAAC;GAEF,MAAM,SAAS,MAAM,KAAK,aAAa,iBAAiB,MAAM;AAE9D,OAAI,UAAU,gBAAgB,2BAA2B;AACzD,OAAI,UAAU,kBAAkB,OAAO,KAAK,OAAO,UAAU,CAAC;AAC9D,OAAI,UAAU,iBAAiB,uBAAuB;AAEtD,UAAO,MACL,6CACA,OAAO,KAAK,QACZ,MACD;AACD,OAAI,KAAK,OAAO,KAAK,OAAO,KAAK,CAAC;WAC3B,OAAO;AACd,UAAO,MAAM,uBAAuB,MAAM;AAC1C,OAAI,OAAO,IAAI,CAAC,KAAK;IACnB,OAAO,iBAAiB,QAAQ,MAAM,UAAU;IAChD,QAAQ,KAAK;IACd,CAAC;;;;;;;CAQN,MAAM,kBACJ,KACA,KACe;EACf,MAAM,EAAE,cAAc,IAAI;EAC1B,MAAM,EAAE,YAAY,SAAS,WAAW,IAAI;AAG5C,SAAO,MAAM,KAAK,mCAAmC,WAAW,OAAO;AAGvE,EADc,OAAO,MAAM,IAAI,EACxB,aAAa,aAAa,eAAe,CAAC,WAAW,aAAa;GACvE;GACA;GACA,iBAAiB,aAAa,OAAO,KAAK,WAAW,CAAC,SAAS;GAC/D,QAAQ,KAAK;GACd,CAAC;EAEF,MAAM,kBACJ,WAAW,UACP;GACE,kBAAkB;IAChB,aAAa;IACb,QAAQ;IACT;GACD,MAAM;GACP,GACD,EACE,MAAM,UACP;EAGP,MAAM,UAAU,kBAAkB;AAElC,MAAI,CAAC,WAAW;AACd,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,yBAAyB,CAAC;AACxD;;EAGF,MAAM,QAAQ,MAAM,KAAK,IAAI,YAC3B,WACA,KACA,KAAK,cACN;AAED,MAAI,CAAC,OAAO;AACV,OAAI,OAAO,IAAI,CAAC,KAAK,EAAE,OAAO,mBAAmB,CAAC;AAClD;;EAGF,MAAM,cAAc,KAAK,eAAe,UAAU,MAAM;EAiBxD,MAAM,0BAAmD,EACvD,SAhByC;GACzC,GAAG;GACH,OAAO;IACL,GAAG,cAAc;IACjB,UAAU;KACR;KACA;KACA,KAAK,UAAU,WAAW;KAC1B,KAAK,UAAU,OAAO;KACtB;KACA;KACD;IACF;GACF,EAIA;AAED,QAAM,KAAK,cACT,KACA,OAAO,WAAW;GAChB,MAAM,kBAAkB,MAAM,KAAK,eAAe,mBAChD,OACA,WACD;GAED,MAAM,SAAS,MAAM,KAAK,MACxB,OACA,iBACA,gBAAgB,kBAChB,OACD;AAED,UAAO;IAAE,MAAM,gBAAgB;IAAM,GAAG;IAAQ;KAElD,yBACA,QACD;;;;;;;;;;;;;;;;;CAkBH,MAAM,MACJ,OACA,YACA,kBACA,QACc;EACd,MAAM,kBAAkB,oBAAoB;EAC5C,MAAM,cAAc,MAAM,gBAAgB;EAE1C,MAAM,EAAE,WAAW,YAAY,kBAC7B,KAAK,eAAe,uBAAuB,OAAO,WAAW;AAa/D,UAXiB,MAAM,KAAK,UAAU,iBACpC,iBACA;GACE;GACA,cAAc;GACd,YAAY;GACZ,GAAG;GACJ,EACD,OACD,EAEe;;;;;CAMlB,MAAgB,aACd,iBACA,OACA,QACyD;AACzD,SAAO,MAAM,KAAK,UAAU,aAAa,iBAAiB,OAAO,OAAO;;CAG1E,MAAM,WAA0B;AAC9B,OAAK,cAAc,UAAU;;;;;;AAOjC,MAAa,YAAY,SAIvB,iBAAiB,YAAY"}
@@ -1 +1 @@
1
- {"version":3,"file":"defaults.js","names":["queryDefaults: PluginExecuteConfig"],"sources":["../../src/analytics/defaults.ts"],"sourcesContent":["import type { PluginExecuteConfig } from \"shared\";\n\nexport const queryDefaults: PluginExecuteConfig = {\n cache: {\n enabled: true,\n ttl: 3600,\n },\n retry: {\n enabled: true,\n initialDelay: 1500,\n attempts: 3,\n },\n timeout: 18000,\n};\n"],"mappings":";AAEA,MAAaA,gBAAqC;CAChD,OAAO;EACL,SAAS;EACT,KAAK;EACN;CACD,OAAO;EACL,SAAS;EACT,cAAc;EACd,UAAU;EACX;CACD,SAAS;CACV"}
1
+ {"version":3,"file":"defaults.js","names":[],"sources":["../../src/analytics/defaults.ts"],"sourcesContent":["import type { PluginExecuteConfig } from \"shared\";\n\nexport const queryDefaults: PluginExecuteConfig = {\n cache: {\n enabled: true,\n ttl: 3600,\n },\n retry: {\n enabled: true,\n initialDelay: 1500,\n attempts: 3,\n },\n timeout: 18000,\n};\n"],"mappings":";AAEA,MAAa,gBAAqC;CAChD,OAAO;EACL,SAAS;EACT,KAAK;EACN;CACD,OAAO;EACL,SAAS;EACT,cAAc;EACd,UAAU;EACX;CACD,SAAS;CACV"}
@@ -1,16 +1,19 @@
1
1
  import { isSQLTypeMarker, sql } from "../shared/src/sql/helpers.js";
2
- import { getRequestContext } from "../utils/databricks-client-middleware.js";
3
- import { init_utils } from "../utils/index.js";
2
+ import { ValidationError } from "../errors/validation.js";
3
+ import { init_errors } from "../errors/index.js";
4
+ import { getWorkspaceId } from "../context/execution-context.js";
5
+ import { init_context } from "../context/index.js";
4
6
  import { createHash } from "node:crypto";
5
7
 
6
8
  //#region src/analytics/query.ts
7
- init_utils();
9
+ init_context();
10
+ init_errors();
8
11
  var QueryProcessor = class {
9
12
  async processQueryParams(query, parameters) {
10
13
  const processed = { ...parameters };
11
14
  const paramMatches = query.matchAll(/:([a-zA-Z_]\w*)/g);
12
15
  if (new Set(Array.from(paramMatches, (m) => m[1])).has("workspaceId") && !processed.workspaceId) {
13
- const workspaceId = await getRequestContext().workspaceId;
16
+ const workspaceId = await getWorkspaceId();
14
17
  if (workspaceId) processed.workspaceId = sql.string(workspaceId);
15
18
  }
16
19
  return processed;
@@ -23,7 +26,10 @@ var QueryProcessor = class {
23
26
  if (parameters) {
24
27
  const queryParamMatches = query.matchAll(/:([a-zA-Z_]\w*)/g);
25
28
  const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));
26
- for (const key of Object.keys(parameters)) if (!queryParams.has(key)) throw new Error(`Parameter "${key}" not found in query. Valid parameters: ${Array.from(queryParams).join(", ") || "none"}`);
29
+ for (const key of Object.keys(parameters)) if (!queryParams.has(key)) {
30
+ const validParams = Array.from(queryParams).join(", ") || "none";
31
+ throw ValidationError.invalidValue(key, parameters[key], `a parameter defined in the query (valid: ${validParams})`);
32
+ }
27
33
  for (const [key, value] of Object.entries(parameters)) {
28
34
  const parameter = this._createParameter(key, value);
29
35
  if (parameter) sqlParameters.push(parameter);
@@ -36,7 +42,7 @@ var QueryProcessor = class {
36
42
  }
37
43
  _createParameter(key, value) {
38
44
  if (value === null || value === void 0) return null;
39
- if (!isSQLTypeMarker(value)) throw new Error(`Parameter "${key}" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`);
45
+ if (!isSQLTypeMarker(value)) throw ValidationError.invalidValue(key, value, "SQL type (use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean())");
40
46
  return {
41
47
  name: key,
42
48
  value: value.value,
@@ -1 +1 @@
1
- {"version":3,"file":"query.js","names":["sqlHelpers","sqlParameters: sql.StatementParameterListItem[]"],"sources":["../../src/analytics/query.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { sql } from \"@databricks/sdk-experimental\";\nimport { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from \"shared\";\nimport { getRequestContext } from \"../utils\";\n\ntype SQLParameterValue = SQLTypeMarker | null | undefined;\n\nexport class QueryProcessor {\n async processQueryParams(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): Promise<Record<string, SQLParameterValue>> {\n const processed = { ...parameters };\n\n // extract all params from the query\n const paramMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(paramMatches, (m) => m[1]));\n\n // auto-inject workspaceId if needed and not provided\n if (queryParams.has(\"workspaceId\") && !processed.workspaceId) {\n const requestContext = getRequestContext();\n const workspaceId = await requestContext.workspaceId;\n if (workspaceId) {\n processed.workspaceId = sqlHelpers.string(workspaceId);\n }\n }\n\n return processed;\n }\n\n hashQuery(query: string): string {\n return createHash(\"md5\").update(query).digest(\"hex\");\n }\n\n convertToSQLParameters(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): { statement: string; parameters: sql.StatementParameterListItem[] } {\n const sqlParameters: sql.StatementParameterListItem[] = [];\n\n if (parameters) {\n // extract all params from the query\n const queryParamMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));\n\n // only allow parameters that exist in the query\n for (const key of Object.keys(parameters)) {\n if (!queryParams.has(key)) {\n throw new Error(\n `Parameter \"${key}\" not found in query. Valid parameters: ${\n Array.from(queryParams).join(\", \") || \"none\"\n }`,\n );\n }\n }\n\n // convert parameters to SQL parameters\n for (const [key, value] of Object.entries(parameters)) {\n const parameter = this._createParameter(key, value);\n if (parameter) {\n sqlParameters.push(parameter);\n }\n }\n }\n\n return { statement: query, parameters: sqlParameters };\n }\n\n private _createParameter(\n key: string,\n value: SQLParameterValue,\n ): sql.StatementParameterListItem | null {\n if (value === null || value === undefined) {\n return null;\n }\n\n if (!isSQLTypeMarker(value)) {\n throw new Error(\n `Parameter \"${key}\" must be a SQL type. Use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean().`,\n );\n }\n\n return {\n name: key,\n value: value.value,\n type: value.__sql_type,\n };\n }\n}\n"],"mappings":";;;;;;YAG6C;AAI7C,IAAa,iBAAb,MAA4B;CAC1B,MAAM,mBACJ,OACA,YAC4C;EAC5C,MAAM,YAAY,EAAE,GAAG,YAAY;EAGnC,MAAM,eAAe,MAAM,SAAS,mBAAmB;AAIvD,MAHoB,IAAI,IAAI,MAAM,KAAK,eAAe,MAAM,EAAE,GAAG,CAAC,CAGlD,IAAI,cAAc,IAAI,CAAC,UAAU,aAAa;GAE5D,MAAM,cAAc,MADG,mBAAmB,CACD;AACzC,OAAI,YACF,WAAU,cAAcA,IAAW,OAAO,YAAY;;AAI1D,SAAO;;CAGT,UAAU,OAAuB;AAC/B,SAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;CAGtD,uBACE,OACA,YACqE;EACrE,MAAMC,gBAAkD,EAAE;AAE1D,MAAI,YAAY;GAEd,MAAM,oBAAoB,MAAM,SAAS,mBAAmB;GAC5D,MAAM,cAAc,IAAI,IAAI,MAAM,KAAK,oBAAoB,MAAM,EAAE,GAAG,CAAC;AAGvE,QAAK,MAAM,OAAO,OAAO,KAAK,WAAW,CACvC,KAAI,CAAC,YAAY,IAAI,IAAI,CACvB,OAAM,IAAI,MACR,cAAc,IAAI,0CAChB,MAAM,KAAK,YAAY,CAAC,KAAK,KAAK,IAAI,SAEzC;AAKL,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,EAAE;IACrD,MAAM,YAAY,KAAK,iBAAiB,KAAK,MAAM;AACnD,QAAI,UACF,eAAc,KAAK,UAAU;;;AAKnC,SAAO;GAAE,WAAW;GAAO,YAAY;GAAe;;CAGxD,AAAQ,iBACN,KACA,OACuC;AACvC,MAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO;AAGT,MAAI,CAAC,gBAAgB,MAAM,CACzB,OAAM,IAAI,MACR,cAAc,IAAI,sGACnB;AAGH,SAAO;GACL,MAAM;GACN,OAAO,MAAM;GACb,MAAM,MAAM;GACb"}
1
+ {"version":3,"file":"query.js","names":["sqlHelpers"],"sources":["../../src/analytics/query.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type { sql } from \"@databricks/sdk-experimental\";\nimport { isSQLTypeMarker, type SQLTypeMarker, sql as sqlHelpers } from \"shared\";\nimport { getWorkspaceId } from \"../context\";\nimport { ValidationError } from \"../errors\";\n\ntype SQLParameterValue = SQLTypeMarker | null | undefined;\n\nexport class QueryProcessor {\n async processQueryParams(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): Promise<Record<string, SQLParameterValue>> {\n const processed = { ...parameters };\n\n // extract all params from the query\n const paramMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(paramMatches, (m) => m[1]));\n\n // auto-inject workspaceId if needed and not provided\n if (queryParams.has(\"workspaceId\") && !processed.workspaceId) {\n const workspaceId = await getWorkspaceId();\n if (workspaceId) {\n processed.workspaceId = sqlHelpers.string(workspaceId);\n }\n }\n\n return processed;\n }\n\n hashQuery(query: string): string {\n return createHash(\"md5\").update(query).digest(\"hex\");\n }\n\n convertToSQLParameters(\n query: string,\n parameters?: Record<string, SQLParameterValue>,\n ): { statement: string; parameters: sql.StatementParameterListItem[] } {\n const sqlParameters: sql.StatementParameterListItem[] = [];\n\n if (parameters) {\n // extract all params from the query\n const queryParamMatches = query.matchAll(/:([a-zA-Z_]\\w*)/g);\n const queryParams = new Set(Array.from(queryParamMatches, (m) => m[1]));\n\n // only allow parameters that exist in the query\n for (const key of Object.keys(parameters)) {\n if (!queryParams.has(key)) {\n const validParams = Array.from(queryParams).join(\", \") || \"none\";\n throw ValidationError.invalidValue(\n key,\n parameters[key],\n `a parameter defined in the query (valid: ${validParams})`,\n );\n }\n }\n\n // convert parameters to SQL parameters\n for (const [key, value] of Object.entries(parameters)) {\n const parameter = this._createParameter(key, value);\n if (parameter) {\n sqlParameters.push(parameter);\n }\n }\n }\n\n return { statement: query, parameters: sqlParameters };\n }\n\n private _createParameter(\n key: string,\n value: SQLParameterValue,\n ): sql.StatementParameterListItem | null {\n if (value === null || value === undefined) {\n return null;\n }\n\n if (!isSQLTypeMarker(value)) {\n throw ValidationError.invalidValue(\n key,\n value,\n \"SQL type (use sql.string(), sql.number(), sql.date(), sql.timestamp(), or sql.boolean())\",\n );\n }\n\n return {\n name: key,\n value: value.value,\n type: value.__sql_type,\n };\n }\n}\n"],"mappings":";;;;;;;;cAG4C;aACA;AAI5C,IAAa,iBAAb,MAA4B;CAC1B,MAAM,mBACJ,OACA,YAC4C;EAC5C,MAAM,YAAY,EAAE,GAAG,YAAY;EAGnC,MAAM,eAAe,MAAM,SAAS,mBAAmB;AAIvD,MAHoB,IAAI,IAAI,MAAM,KAAK,eAAe,MAAM,EAAE,GAAG,CAAC,CAGlD,IAAI,cAAc,IAAI,CAAC,UAAU,aAAa;GAC5D,MAAM,cAAc,MAAM,gBAAgB;AAC1C,OAAI,YACF,WAAU,cAAcA,IAAW,OAAO,YAAY;;AAI1D,SAAO;;CAGT,UAAU,OAAuB;AAC/B,SAAO,WAAW,MAAM,CAAC,OAAO,MAAM,CAAC,OAAO,MAAM;;CAGtD,uBACE,OACA,YACqE;EACrE,MAAM,gBAAkD,EAAE;AAE1D,MAAI,YAAY;GAEd,MAAM,oBAAoB,MAAM,SAAS,mBAAmB;GAC5D,MAAM,cAAc,IAAI,IAAI,MAAM,KAAK,oBAAoB,MAAM,EAAE,GAAG,CAAC;AAGvE,QAAK,MAAM,OAAO,OAAO,KAAK,WAAW,CACvC,KAAI,CAAC,YAAY,IAAI,IAAI,EAAE;IACzB,MAAM,cAAc,MAAM,KAAK,YAAY,CAAC,KAAK,KAAK,IAAI;AAC1D,UAAM,gBAAgB,aACpB,KACA,WAAW,MACX,4CAA4C,YAAY,GACzD;;AAKL,QAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,WAAW,EAAE;IACrD,MAAM,YAAY,KAAK,iBAAiB,KAAK,MAAM;AACnD,QAAI,UACF,eAAc,KAAK,UAAU;;;AAKnC,SAAO;GAAE,WAAW;GAAO,YAAY;GAAe;;CAGxD,AAAQ,iBACN,KACA,OACuC;AACvC,MAAI,UAAU,QAAQ,UAAU,OAC9B,QAAO;AAGT,MAAI,CAAC,gBAAgB,MAAM,CACzB,OAAM,gBAAgB,aACpB,KACA,OACA,2FACD;AAGH,SAAO;GACL,MAAM;GACN,OAAO,MAAM;GACb,MAAM,MAAM;GACb"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":[],"mappings":";UAGU,WAAA;EAAA,KAAA,CAAA,EACA,MADW,CAAA,MAAA,EAAA,GAAA,CAAA;EAAA,OAAA,EAEV,MAFU,CAAA,MAAA,EAAA,MAAA,GAAA,MAAA,EAAA,GAAA,SAAA,CAAA;;UAKX,aAAA,CAHC;EAAM,QAAA,CAAA,QAAA,EAAA,MAAA,EAAA,GAAA,EAIiB,WAJjB,CAAA,EAI+B,OAJ/B,CAAA,MAAA,CAAA;AAAA;AAGM,cAIV,UAAA,CAJU;;;;AAIvB;;;;;;sCAYU,6BACU,gBACf"}
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":[],"mappings":";UAMU,WAAA;EAAA,KAAA,CAAA,EACA,MADW,CAAA,MAAA,EAAA,GAAA,CAAA;EAAA,OAAA,EAEV,MAFU,CAAA,MAAA,EAAA,MAAA,GAAA,MAAA,EAAA,GAAA,SAAA,CAAA;;UAKX,aAAA,CAHC;EAAM,QAAA,CAAA,QAAA,EAAA,MAAA,EAAA,GAAA,EAIiB,WAJjB,CAAA,EAI+B,OAJ/B,CAAA,MAAA,CAAA;AAAA;AAGM,cAIV,UAAA,CAJU;;;;AAIvB;;;;;;sCAYU,6BACU,gBACf"}
package/dist/app/index.js CHANGED
@@ -1,7 +1,9 @@
1
+ import { createLogger } from "../logging/logger.js";
1
2
  import fs from "node:fs/promises";
2
3
  import path from "node:path";
3
4
 
4
5
  //#region src/app/index.ts
6
+ const logger = createLogger("app");
5
7
  var AppManager = class {
6
8
  /**
7
9
  * Retrieves a query file by key from the queries directory
@@ -14,31 +16,31 @@ var AppManager = class {
14
16
  */
15
17
  async getAppQuery(queryKey, req, devFileReader) {
16
18
  if (!queryKey || !/^[a-zA-Z0-9_-]+$/.test(queryKey)) {
17
- console.error(`Invalid query key format: "${queryKey}". Only alphanumeric characters, underscores, and hyphens are allowed.`);
19
+ logger.error("Invalid query key format: %s. Only alphanumeric characters, underscores, and hyphens are allowed.", queryKey);
18
20
  return null;
19
21
  }
20
22
  const queryFilePath = path.join(process.cwd(), "config/queries", `${queryKey}.sql`);
21
23
  const resolvedPath = path.resolve(queryFilePath);
22
24
  const queriesDir = path.resolve(process.cwd(), "config/queries");
23
25
  if (!resolvedPath.startsWith(queriesDir)) {
24
- console.error(`Invalid query path: path traversal detected`);
26
+ logger.error("Invalid query path: path traversal detected");
25
27
  return null;
26
28
  }
27
29
  if (req?.query?.dev !== void 0 && devFileReader && req) try {
28
30
  const relativePath = path.relative(process.cwd(), resolvedPath);
29
31
  return await devFileReader.readFile(relativePath, req);
30
32
  } catch (error) {
31
- console.error(`Failed to read query "${queryKey}" from dev tunnel: ${error.message}`);
33
+ logger.error("Failed to read query %s from dev tunnel: %s", queryKey, error.message);
32
34
  return null;
33
35
  }
34
36
  try {
35
37
  return await fs.readFile(resolvedPath, "utf8");
36
38
  } catch (error) {
37
39
  if (error.code === "ENOENT") {
38
- console.error(`Query "${queryKey}" not found at path: ${resolvedPath}`);
40
+ logger.debug("Query %s not found at path: %s", queryKey, resolvedPath);
39
41
  return null;
40
42
  }
41
- console.error(`Failed to read query "${queryKey}" from server filesystem: ${error.message}`);
43
+ logger.error("Failed to read query %s from server filesystem: %s", queryKey, error.message);
42
44
  return null;
43
45
  }
44
46
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\n\ninterface RequestLike {\n query?: Record<string, any>;\n headers: Record<string, string | string[] | undefined>;\n}\n\ninterface DevFileReader {\n readFile(filePath: string, req: RequestLike): Promise<string>;\n}\n\nexport class AppManager {\n /**\n * Retrieves a query file by key from the queries directory\n * In dev mode with a request context, reads from local filesystem via WebSocket\n * @param queryKey - The query file name (without extension)\n * @param req - Optional request object to detect dev mode\n * @param devFileReader - Optional DevFileReader instance to read files from local filesystem\n * @returns The query content as a string\n * @throws Error if query key is invalid or file not found\n */\n async getAppQuery(\n queryKey: string,\n req?: RequestLike,\n devFileReader?: DevFileReader,\n ): Promise<string | null> {\n // Security: Sanitize query key to prevent path traversal\n if (!queryKey || !/^[a-zA-Z0-9_-]+$/.test(queryKey)) {\n console.error(\n `Invalid query key format: \"${queryKey}\". Only alphanumeric characters, underscores, and hyphens are allowed.`,\n );\n return null;\n }\n\n const queryFilePath = path.join(\n process.cwd(),\n \"config/queries\",\n `${queryKey}.sql`,\n );\n\n // Security: Validate resolved path is within queries directory\n const resolvedPath = path.resolve(queryFilePath);\n const queriesDir = path.resolve(process.cwd(), \"config/queries\");\n\n if (!resolvedPath.startsWith(queriesDir)) {\n console.error(`Invalid query path: path traversal detected`);\n return null;\n }\n\n // Check if we're in dev mode and should use WebSocket\n const isDevMode = req?.query?.dev !== undefined;\n\n if (isDevMode && devFileReader && req) {\n try {\n // Read from local filesystem via WebSocket tunnel\n const relativePath = path.relative(process.cwd(), resolvedPath);\n return await devFileReader.readFile(relativePath, req);\n } catch (error) {\n console.error(\n `Failed to read query \"${queryKey}\" from dev tunnel: ${(error as Error).message}`,\n );\n return null;\n }\n }\n\n // Production mode: read from server filesystem\n try {\n const query = await fs.readFile(resolvedPath, \"utf8\");\n return query;\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n console.error(`Query \"${queryKey}\" not found at path: ${resolvedPath}`);\n return null;\n }\n console.error(\n `Failed to read query \"${queryKey}\" from server filesystem: ${(error as Error).message}`,\n );\n return null;\n }\n }\n}\n\nexport type { DevFileReader, RequestLike };\n"],"mappings":";;;;AAYA,IAAa,aAAb,MAAwB;;;;;;;;;;CAUtB,MAAM,YACJ,UACA,KACA,eACwB;AAExB,MAAI,CAAC,YAAY,CAAC,mBAAmB,KAAK,SAAS,EAAE;AACnD,WAAQ,MACN,8BAA8B,SAAS,wEACxC;AACD,UAAO;;EAGT,MAAM,gBAAgB,KAAK,KACzB,QAAQ,KAAK,EACb,kBACA,GAAG,SAAS,MACb;EAGD,MAAM,eAAe,KAAK,QAAQ,cAAc;EAChD,MAAM,aAAa,KAAK,QAAQ,QAAQ,KAAK,EAAE,iBAAiB;AAEhE,MAAI,CAAC,aAAa,WAAW,WAAW,EAAE;AACxC,WAAQ,MAAM,8CAA8C;AAC5D,UAAO;;AAMT,MAFkB,KAAK,OAAO,QAAQ,UAErB,iBAAiB,IAChC,KAAI;GAEF,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,aAAa;AAC/D,UAAO,MAAM,cAAc,SAAS,cAAc,IAAI;WAC/C,OAAO;AACd,WAAQ,MACN,yBAAyB,SAAS,qBAAsB,MAAgB,UACzE;AACD,UAAO;;AAKX,MAAI;AAEF,UADc,MAAM,GAAG,SAAS,cAAc,OAAO;WAE9C,OAAO;AACd,OAAK,MAAgC,SAAS,UAAU;AACtD,YAAQ,MAAM,UAAU,SAAS,uBAAuB,eAAe;AACvE,WAAO;;AAET,WAAQ,MACN,yBAAyB,SAAS,4BAA6B,MAAgB,UAChF;AACD,UAAO"}
1
+ {"version":3,"file":"index.js","names":[],"sources":["../../src/app/index.ts"],"sourcesContent":["import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { createLogger } from \"../logging/logger\";\n\nconst logger = createLogger(\"app\");\n\ninterface RequestLike {\n query?: Record<string, any>;\n headers: Record<string, string | string[] | undefined>;\n}\n\ninterface DevFileReader {\n readFile(filePath: string, req: RequestLike): Promise<string>;\n}\n\nexport class AppManager {\n /**\n * Retrieves a query file by key from the queries directory\n * In dev mode with a request context, reads from local filesystem via WebSocket\n * @param queryKey - The query file name (without extension)\n * @param req - Optional request object to detect dev mode\n * @param devFileReader - Optional DevFileReader instance to read files from local filesystem\n * @returns The query content as a string\n * @throws Error if query key is invalid or file not found\n */\n async getAppQuery(\n queryKey: string,\n req?: RequestLike,\n devFileReader?: DevFileReader,\n ): Promise<string | null> {\n // Security: Sanitize query key to prevent path traversal\n if (!queryKey || !/^[a-zA-Z0-9_-]+$/.test(queryKey)) {\n logger.error(\n \"Invalid query key format: %s. Only alphanumeric characters, underscores, and hyphens are allowed.\",\n queryKey,\n );\n return null;\n }\n\n const queryFilePath = path.join(\n process.cwd(),\n \"config/queries\",\n `${queryKey}.sql`,\n );\n\n // Security: Validate resolved path is within queries directory\n const resolvedPath = path.resolve(queryFilePath);\n const queriesDir = path.resolve(process.cwd(), \"config/queries\");\n\n if (!resolvedPath.startsWith(queriesDir)) {\n logger.error(\"Invalid query path: path traversal detected\");\n return null;\n }\n\n // Check if we're in dev mode and should use WebSocket\n const isDevMode = req?.query?.dev !== undefined;\n\n if (isDevMode && devFileReader && req) {\n try {\n // Read from local filesystem via WebSocket tunnel\n const relativePath = path.relative(process.cwd(), resolvedPath);\n return await devFileReader.readFile(relativePath, req);\n } catch (error) {\n logger.error(\n \"Failed to read query %s from dev tunnel: %s\",\n queryKey,\n (error as Error).message,\n );\n return null;\n }\n }\n\n // Production mode: read from server filesystem\n try {\n const query = await fs.readFile(resolvedPath, \"utf8\");\n return query;\n } catch (error) {\n if ((error as NodeJS.ErrnoException).code === \"ENOENT\") {\n logger.debug(\"Query %s not found at path: %s\", queryKey, resolvedPath);\n return null;\n }\n logger.error(\n \"Failed to read query %s from server filesystem: %s\",\n queryKey,\n (error as Error).message,\n );\n return null;\n }\n }\n}\n\nexport type { DevFileReader, RequestLike };\n"],"mappings":";;;;;AAIA,MAAM,SAAS,aAAa,MAAM;AAWlC,IAAa,aAAb,MAAwB;;;;;;;;;;CAUtB,MAAM,YACJ,UACA,KACA,eACwB;AAExB,MAAI,CAAC,YAAY,CAAC,mBAAmB,KAAK,SAAS,EAAE;AACnD,UAAO,MACL,qGACA,SACD;AACD,UAAO;;EAGT,MAAM,gBAAgB,KAAK,KACzB,QAAQ,KAAK,EACb,kBACA,GAAG,SAAS,MACb;EAGD,MAAM,eAAe,KAAK,QAAQ,cAAc;EAChD,MAAM,aAAa,KAAK,QAAQ,QAAQ,KAAK,EAAE,iBAAiB;AAEhE,MAAI,CAAC,aAAa,WAAW,WAAW,EAAE;AACxC,UAAO,MAAM,8CAA8C;AAC3D,UAAO;;AAMT,MAFkB,KAAK,OAAO,QAAQ,UAErB,iBAAiB,IAChC,KAAI;GAEF,MAAM,eAAe,KAAK,SAAS,QAAQ,KAAK,EAAE,aAAa;AAC/D,UAAO,MAAM,cAAc,SAAS,cAAc,IAAI;WAC/C,OAAO;AACd,UAAO,MACL,+CACA,UACC,MAAgB,QAClB;AACD,UAAO;;AAKX,MAAI;AAEF,UADc,MAAM,GAAG,SAAS,cAAc,OAAO;WAE9C,OAAO;AACd,OAAK,MAAgC,SAAS,UAAU;AACtD,WAAO,MAAM,kCAAkC,UAAU,aAAa;AACtE,WAAO;;AAET,UAAO,MACL,sDACA,UACC,MAAgB,QAClB;AACD,UAAO"}
@@ -1,6 +1,6 @@
1
1
  //#region package.json
2
2
  var name = "@databricks/appkit";
3
- var version = "0.1.4";
3
+ var version = "0.2.0";
4
4
 
5
5
  //#endregion
6
6
  export { name, version };
@@ -1 +1 @@
1
- {"version":3,"file":"defaults.js","names":["cacheDefaults: CacheConfig"],"sources":["../../src/cache/defaults.ts"],"sourcesContent":["import type { CacheConfig } from \"shared\";\n\n/** Default configuration for cache */\nexport const cacheDefaults: CacheConfig = {\n enabled: true,\n ttl: 3600, // 1 hour\n maxSize: 1000, // 1000 entries\n cacheKey: [], // no cache key by default\n cleanupProbability: 0.01, // 1% probability of triggering cleanup on each get operation\n strictPersistence: false, // if false, use in-memory storage if lakebase is unavailable\n};\n"],"mappings":";;AAGA,MAAaA,gBAA6B;CACxC,SAAS;CACT,KAAK;CACL,SAAS;CACT,UAAU,EAAE;CACZ,oBAAoB;CACpB,mBAAmB;CACpB"}
1
+ {"version":3,"file":"defaults.js","names":[],"sources":["../../src/cache/defaults.ts"],"sourcesContent":["import type { CacheConfig } from \"shared\";\n\n/** Default configuration for cache */\nexport const cacheDefaults: CacheConfig = {\n enabled: true,\n ttl: 3600, // 1 hour\n maxSize: 1000, // 1000 entries\n cacheKey: [], // no cache key by default\n cleanupProbability: 0.01, // 1% probability of triggering cleanup on each get operation\n strictPersistence: false, // if false, use in-memory storage if lakebase is unavailable\n};\n"],"mappings":";;AAGA,MAAa,gBAA6B;CACxC,SAAS;CACT,KAAK;CACL,SAAS;CACT,UAAU,EAAE;CACZ,oBAAoB;CACpB,mBAAmB;CACpB"}
@@ -9,6 +9,7 @@ import { CacheConfig } from "../shared/src/cache.js";
9
9
  * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access
10
10
  * the singleton instance after initialization.
11
11
  *
12
+ * @internal
12
13
  * @example
13
14
  * ```typescript
14
15
  * const cache = CacheManager.getInstanceSync();
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":[],"mappings":";;;;;;AAuBA;;;;;;;;;;;AAwPqC,cAxPxB,YAAA,CAwPwB;0BAAR,uBAAA;mBAkDlB,IAAA;iBAEN,QAAA;iBAawB,WAAA;UAMZ,OAAA;UAUS,MAAA;UA0BT,gBAAA;UAQW,iBAAA;EAAO,QAAA,kBAAA;;;;;;;;;;4BA3TP;;;;;;;;kCAkBX,QAAQ,eACpB,QAAQ;;;;;;;;;;;;;;;;;;;;;;+DAuFC,QAAQ;;MAGjB,QAAQ;;;;;;uBA2FgB,QAAQ;;;;;;;;;;6BAkD1B;;MAEN;;;;;;uBAawB;;WAMZ;;;;;;oBAUS;;;;;;;;;WA0BT;;;;;sBAQW"}
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../../src/cache/index.ts"],"sourcesContent":[],"mappings":";;;;;;AA4BA;;;;;;;;;;;;AA+Q6B,cA/QhB,YAAA,CA+QgB;0BAkDlB,uBAAA;mBAEN,IAAA;iBAawB,QAAA;iBAMZ,WAAA;UAUS,OAAA;UA0BT,MAAA;UAQW,gBAAA;EAAO,QAAA,iBAAA;;;;;;;;;;;4BAnVP;;;;;;;;kCAmBX,QAAQ,eACpB,QAAQ;;;;;;;;;;;;;;;;;;;;;;+DAuFC,QAAQ;;MAGjB,QAAQ;;;;;;uBAkHgB,QAAQ;;;;;;;;;;6BAkD1B;;MAEN;;;;;;uBAawB;;WAMZ;;;;;;oBAUS;;;;;;;;;WA0BT;;;;;sBAQW"}
@@ -1,7 +1,11 @@
1
+ import { createLogger } from "../logging/logger.js";
1
2
  import { TelemetryManager } from "../telemetry/telemetry-manager.js";
2
3
  import { SpanStatusCode } from "../telemetry/index.js";
4
+ import { AppKitError } from "../errors/base.js";
5
+ import { ExecutionError } from "../errors/execution.js";
6
+ import { InitializationError } from "../errors/initialization.js";
7
+ import { init_errors } from "../errors/index.js";
3
8
  import { deepMerge } from "../utils/merge.js";
4
- import { init_utils } from "../utils/index.js";
5
9
  import { LakebaseConnector } from "../connectors/lakebase/client.js";
6
10
  import "../connectors/index.js";
7
11
  import { cacheDefaults } from "./defaults.js";
@@ -12,7 +16,8 @@ import { createHash } from "node:crypto";
12
16
  import { WorkspaceClient } from "@databricks/sdk-experimental";
13
17
 
14
18
  //#region src/cache/index.ts
15
- init_utils();
19
+ init_errors();
20
+ const logger = createLogger("cache");
16
21
  /**
17
22
  * Cache manager class to handle cache operations.
18
23
  * Can be used with in-memory storage or persistent storage (Lakebase).
@@ -20,6 +25,7 @@ init_utils();
20
25
  * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access
21
26
  * the singleton instance after initialization.
22
27
  *
28
+ * @internal
23
29
  * @example
24
30
  * ```typescript
25
31
  * const cache = CacheManager.getInstanceSync();
@@ -62,7 +68,7 @@ var CacheManager = class CacheManager {
62
68
  * @returns CacheManager instance
63
69
  */
64
70
  static getInstanceSync() {
65
- if (!CacheManager.instance) throw new Error("CacheManager not initialized. Ensure AppKit.create() has completed before accessing the cache.");
71
+ if (!CacheManager.instance) throw InitializationError.notInitialized("CacheManager", "Ensure AppKit.create() has completed before accessing the cache");
66
72
  return CacheManager.instance;
67
73
  }
68
74
  /**
@@ -144,6 +150,10 @@ var CacheManager = class CacheManager {
144
150
  span.setAttribute("cache.hit", true);
145
151
  span.setStatus({ code: SpanStatusCode.OK });
146
152
  this.telemetryMetrics.cacheHitCount.add(1, { "cache.key": cacheKey });
153
+ logger.event()?.setExecution({
154
+ cache_hit: true,
155
+ cache_key: cacheKey
156
+ });
147
157
  return cached.value;
148
158
  }
149
159
  const inFlight = this.inFlightRequests.get(cacheKey);
@@ -156,12 +166,21 @@ var CacheManager = class CacheManager {
156
166
  "cache.key": cacheKey,
157
167
  "cache.deduplication": "true"
158
168
  });
169
+ logger.event()?.setExecution({
170
+ cache_hit: true,
171
+ cache_key: cacheKey,
172
+ cache_deduplication: true
173
+ });
159
174
  span.end();
160
175
  return inFlight;
161
176
  }
162
177
  span.setAttribute("cache.hit", false);
163
178
  span.addEvent("cache.miss", { "cache.key": cacheKey });
164
179
  this.telemetryMetrics.cacheMissCount.add(1, { "cache.key": cacheKey });
180
+ logger.event()?.setExecution({
181
+ cache_hit: false,
182
+ cache_key: cacheKey
183
+ });
165
184
  const promise = fn().then(async (result$1) => {
166
185
  await this.set(cacheKey, result$1, options);
167
186
  span.addEvent("cache.value_stored", {
@@ -172,7 +191,8 @@ var CacheManager = class CacheManager {
172
191
  }).catch((error) => {
173
192
  span.recordException(error);
174
193
  span.setStatus({ code: SpanStatusCode.ERROR });
175
- throw error;
194
+ if (error instanceof AppKitError) throw error;
195
+ throw ExecutionError.statementFailed(error instanceof Error ? error.message : String(error));
176
196
  }).finally(() => {
177
197
  this.inFlightRequests.delete(cacheKey);
178
198
  });
@@ -219,7 +239,7 @@ var CacheManager = class CacheManager {
219
239
  this.lastCleanupAttempt = now;
220
240
  this.cleanupInProgress = true;
221
241
  this.storage.cleanupExpired().catch((error) => {
222
- console.debug("Error cleaning up expired entries:", error);
242
+ logger.debug("Error cleaning up expired entries: %O", error);
223
243
  }).finally(() => {
224
244
  this.cleanupInProgress = false;
225
245
  });
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["result"],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n // Telemetry\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw new Error(\n \"CacheManager not initialized. Ensure AppKit.create() has completed before accessing the cache.\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n console.debug(\"Error cleaning up expired entries:\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;YAMqC;;;;;;;;;;;;;;AAiBrC,IAAa,eAAb,MAAa,aAAa;;iCAC0B;;;kBAEH;;;qBACY;;CAe3D,AAAQ,YAAY,SAAuB,QAAqB;cAjBhC;AAkB9B,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,IAAI,MACR,iGACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AACF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AACF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,aAAW;AACtB,WAAM,KAAK,IAAI,UAAUA,UAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAOA;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,WAAM;MACN,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,WAAQ,MAAM,sCAAsC,MAAM;IAC1D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
1
+ {"version":3,"file":"index.js","names":["result"],"sources":["../../src/cache/index.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport type { CacheConfig, CacheStorage } from \"shared\";\nimport { LakebaseConnector } from \"@/connectors\";\nimport { AppKitError, ExecutionError, InitializationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport type { Counter, TelemetryProvider } from \"../telemetry\";\nimport { SpanStatusCode, TelemetryManager } from \"../telemetry\";\nimport { deepMerge } from \"../utils\";\nimport { cacheDefaults } from \"./defaults\";\nimport { InMemoryStorage, PersistentStorage } from \"./storage\";\n\nconst logger = createLogger(\"cache\");\n\n/**\n * Cache manager class to handle cache operations.\n * Can be used with in-memory storage or persistent storage (Lakebase).\n *\n * The cache is automatically initialized by AppKit. Use `getInstanceSync()` to access\n * the singleton instance after initialization.\n *\n * @internal\n * @example\n * ```typescript\n * const cache = CacheManager.getInstanceSync();\n * const result = await cache.getOrExecute([\"users\", userId], () => fetchUser(userId), userKey);\n * ```\n */\nexport class CacheManager {\n private static readonly MIN_CLEANUP_INTERVAL_MS = 60_000;\n private readonly name: string = \"cache-manager\";\n private static instance: CacheManager | null = null;\n private static initPromise: Promise<CacheManager> | null = null;\n\n private storage: CacheStorage;\n private config: CacheConfig;\n private inFlightRequests: Map<string, Promise<unknown>>;\n private cleanupInProgress: boolean;\n private lastCleanupAttempt: number;\n\n private telemetry: TelemetryProvider;\n private telemetryMetrics: {\n cacheHitCount: Counter;\n cacheMissCount: Counter;\n };\n\n private constructor(storage: CacheStorage, config: CacheConfig) {\n this.storage = storage;\n this.config = config;\n this.inFlightRequests = new Map();\n this.cleanupInProgress = false;\n this.lastCleanupAttempt = 0;\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n cacheHitCount: this.telemetry.getMeter().createCounter(\"cache.hit\", {\n description: \"Total number of cache hits\",\n unit: \"1\",\n }),\n cacheMissCount: this.telemetry.getMeter().createCounter(\"cache.miss\", {\n description: \"Total number of cache misses\",\n unit: \"1\",\n }),\n };\n }\n\n /**\n * Get the singleton instance of the cache manager (sync version).\n *\n * Throws if not initialized - ensure AppKit.create() has completed first.\n * @returns CacheManager instance\n */\n static getInstanceSync(): CacheManager {\n if (!CacheManager.instance) {\n throw InitializationError.notInitialized(\n \"CacheManager\",\n \"Ensure AppKit.create() has completed before accessing the cache\",\n );\n }\n\n return CacheManager.instance;\n }\n\n /**\n * Initialize and get the singleton instance of the cache manager.\n * Called internally by AppKit - prefer `getInstanceSync()` for plugin access.\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n * @internal\n */\n static async getInstance(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n if (CacheManager.instance) {\n return CacheManager.instance;\n }\n\n if (!CacheManager.initPromise) {\n CacheManager.initPromise = CacheManager.create(userConfig).then(\n (instance) => {\n CacheManager.instance = instance;\n return instance;\n },\n );\n }\n\n return CacheManager.initPromise;\n }\n\n /**\n * Create a new cache manager instance\n *\n * Storage selection logic:\n * 1. If `storage` provided and healthy → use provided storage\n * 2. If `storage` provided but unhealthy → fallback to InMemory (or disable if strictPersistence)\n * 3. If no `storage` provided and Lakebase available → use Lakebase\n * 4. If no `storage` provided and Lakebase unavailable → fallback to InMemory (or disable if strictPersistence)\n *\n * @param userConfig - User configuration for the cache manager\n * @returns CacheManager instance\n */\n private static async create(\n userConfig?: Partial<CacheConfig>,\n ): Promise<CacheManager> {\n const config = deepMerge(cacheDefaults, userConfig);\n\n if (config.storage) {\n const isHealthy = await config.storage.healthCheck();\n if (isHealthy) {\n return new CacheManager(config.storage, config);\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n // try to use lakebase storage\n try {\n const workspaceClient = new WorkspaceClient({});\n const connector = new LakebaseConnector({ workspaceClient });\n const isHealthy = await connector.healthCheck();\n\n if (isHealthy) {\n const persistentStorage = new PersistentStorage(config, connector);\n await persistentStorage.initialize();\n return new CacheManager(persistentStorage, config);\n }\n } catch {\n // lakebase unavailable, continue with in-memory storage\n }\n\n if (config.strictPersistence) {\n const disabledConfig = { ...config, enabled: false };\n return new CacheManager(\n new InMemoryStorage(disabledConfig),\n disabledConfig,\n );\n }\n\n return new CacheManager(new InMemoryStorage(config), config);\n }\n\n /**\n * Get or execute a function and cache the result\n * @param key - Cache key\n * @param fn - Function to execute\n * @param userKey - User key\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async getOrExecute<T>(\n key: (string | number | object)[],\n fn: () => Promise<T>,\n userKey: string,\n options?: { ttl?: number },\n ): Promise<T> {\n if (!this.config.enabled) return fn();\n\n const cacheKey = this.generateKey(key, userKey);\n\n return this.telemetry.startActiveSpan(\n \"cache.getOrExecute\",\n {\n attributes: {\n \"cache.key\": cacheKey,\n \"cache.enabled\": this.config.enabled,\n \"cache.persistent\": this.storage.isPersistent(),\n },\n },\n async (span) => {\n try {\n // check if the value is in the cache\n const cached = await this.storage.get<T>(cacheKey);\n if (cached !== null) {\n span.setAttribute(\"cache.hit\", true);\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n });\n\n return cached.value as T;\n }\n\n // check if the value is being processed by another request\n const inFlight = this.inFlightRequests.get(cacheKey);\n if (inFlight) {\n span.setAttribute(\"cache.hit\", true);\n span.setAttribute(\"cache.deduplication\", true);\n span.addEvent(\"cache.deduplication_used\", {\n \"cache.key\": cacheKey,\n });\n span.setStatus({ code: SpanStatusCode.OK });\n this.telemetryMetrics.cacheHitCount.add(1, {\n \"cache.key\": cacheKey,\n \"cache.deduplication\": \"true\",\n });\n\n logger.event()?.setExecution({\n cache_hit: true,\n cache_key: cacheKey,\n cache_deduplication: true,\n });\n\n span.end();\n return inFlight as Promise<T>;\n }\n\n // cache miss - execute function\n span.setAttribute(\"cache.hit\", false);\n span.addEvent(\"cache.miss\", { \"cache.key\": cacheKey });\n this.telemetryMetrics.cacheMissCount.add(1, {\n \"cache.key\": cacheKey,\n });\n\n logger.event()?.setExecution({\n cache_hit: false,\n cache_key: cacheKey,\n });\n\n const promise = fn()\n .then(async (result) => {\n await this.set(cacheKey, result, options);\n span.addEvent(\"cache.value_stored\", {\n \"cache.key\": cacheKey,\n \"cache.ttl\": options?.ttl ?? this.config.ttl ?? 3600,\n });\n return result;\n })\n .catch((error) => {\n span.recordException(error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ExecutionError.statementFailed(\n error instanceof Error ? error.message : String(error),\n );\n })\n .finally(() => {\n this.inFlightRequests.delete(cacheKey);\n });\n\n this.inFlightRequests.set(cacheKey, promise);\n\n const result = await promise;\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n throw error;\n } finally {\n span.end();\n }\n },\n { name: this.name, includePrefix: true },\n );\n }\n\n /**\n * Get a cached value\n * @param key - Cache key\n * @returns Promise of the value or null if not found or expired\n */\n async get<T>(key: string): Promise<T | null> {\n if (!this.config.enabled) return null;\n\n // probabilistic cleanup trigger\n this.maybeCleanup();\n\n const entry = await this.storage.get<T>(key);\n if (!entry) return null;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return null;\n }\n return entry.value as T;\n }\n\n /** Probabilistically trigger cleanup of expired entries (fire-and-forget) */\n private maybeCleanup(): void {\n if (this.cleanupInProgress) return;\n if (!this.storage.isPersistent()) return;\n const now = Date.now();\n if (now - this.lastCleanupAttempt < CacheManager.MIN_CLEANUP_INTERVAL_MS)\n return;\n\n const probability = this.config.cleanupProbability ?? 0.01;\n\n if (Math.random() > probability) return;\n\n this.lastCleanupAttempt = now;\n\n this.cleanupInProgress = true;\n (this.storage as PersistentStorage)\n .cleanupExpired()\n .catch((error) => {\n logger.debug(\"Error cleaning up expired entries: %O\", error);\n })\n .finally(() => {\n this.cleanupInProgress = false;\n });\n }\n\n /**\n * Set a value in the cache\n * @param key - Cache key\n * @param value - Value to set\n * @param options - Options for the cache\n * @returns Promise of the result\n */\n async set<T>(\n key: string,\n value: T,\n options?: { ttl?: number },\n ): Promise<void> {\n if (!this.config.enabled) return;\n\n const ttl = options?.ttl ?? this.config.ttl ?? 3600;\n const expiryTime = Date.now() + ttl * 1000;\n await this.storage.set(key, { value, expiry: expiryTime });\n }\n\n /**\n * Delete a value from the cache\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n if (!this.config.enabled) return;\n await this.storage.delete(key);\n }\n\n /** Clear the cache */\n async clear(): Promise<void> {\n await this.storage.clear();\n this.inFlightRequests.clear();\n }\n\n /**\n * Check if a value exists in the cache\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n if (!this.config.enabled) return false;\n\n const entry = await this.storage.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n await this.storage.delete(key);\n return false;\n }\n return true;\n }\n\n /**\n * Generate a cache key\n * @param parts - Parts of the key\n * @param userKey - User key\n * @returns Cache key\n */\n generateKey(parts: (string | number | object)[], userKey: string): string {\n const allParts = [userKey, ...parts];\n const serialized = JSON.stringify(allParts);\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n await this.storage.close();\n }\n\n /**\n * Check if the storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async isStorageHealthy(): Promise<boolean> {\n return this.storage.healthCheck();\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;aAI6E;AAQ7E,MAAM,SAAS,aAAa,QAAQ;;;;;;;;;;;;;;;AAgBpC,IAAa,eAAb,MAAa,aAAa;;iCAC0B;;;kBAEH;;;qBACY;;CAc3D,AAAQ,YAAY,SAAuB,QAAqB;cAhBhC;AAiB9B,OAAK,UAAU;AACf,OAAK,SAAS;AACd,OAAK,mCAAmB,IAAI,KAAK;AACjC,OAAK,oBAAoB;AACzB,OAAK,qBAAqB;AAE1B,OAAK,YAAY,iBAAiB,YAChC,KAAK,MACL,KAAK,OAAO,UACb;AACD,OAAK,mBAAmB;GACtB,eAAe,KAAK,UAAU,UAAU,CAAC,cAAc,aAAa;IAClE,aAAa;IACb,MAAM;IACP,CAAC;GACF,gBAAgB,KAAK,UAAU,UAAU,CAAC,cAAc,cAAc;IACpE,aAAa;IACb,MAAM;IACP,CAAC;GACH;;;;;;;;CASH,OAAO,kBAAgC;AACrC,MAAI,CAAC,aAAa,SAChB,OAAM,oBAAoB,eACxB,gBACA,kEACD;AAGH,SAAO,aAAa;;;;;;;;;CAUtB,aAAa,YACX,YACuB;AACvB,MAAI,aAAa,SACf,QAAO,aAAa;AAGtB,MAAI,CAAC,aAAa,YAChB,cAAa,cAAc,aAAa,OAAO,WAAW,CAAC,MACxD,aAAa;AACZ,gBAAa,WAAW;AACxB,UAAO;IAEV;AAGH,SAAO,aAAa;;;;;;;;;;;;;;CAetB,aAAqB,OACnB,YACuB;EACvB,MAAM,SAAS,UAAU,eAAe,WAAW;AAEnD,MAAI,OAAO,SAAS;AAElB,OADkB,MAAM,OAAO,QAAQ,aAAa,CAElD,QAAO,IAAI,aAAa,OAAO,SAAS,OAAO;AAGjD,OAAI,OAAO,mBAAmB;IAC5B,MAAM,iBAAiB;KAAE,GAAG;KAAQ,SAAS;KAAO;AACpD,WAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,UAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;AAI9D,MAAI;GAEF,MAAM,YAAY,IAAI,kBAAkB,EAAE,iBADlB,IAAI,gBAAgB,EAAE,CAAC,EACY,CAAC;AAG5D,OAFkB,MAAM,UAAU,aAAa,EAEhC;IACb,MAAM,oBAAoB,IAAI,kBAAkB,QAAQ,UAAU;AAClE,UAAM,kBAAkB,YAAY;AACpC,WAAO,IAAI,aAAa,mBAAmB,OAAO;;UAE9C;AAIR,MAAI,OAAO,mBAAmB;GAC5B,MAAM,iBAAiB;IAAE,GAAG;IAAQ,SAAS;IAAO;AACpD,UAAO,IAAI,aACT,IAAI,gBAAgB,eAAe,EACnC,eACD;;AAGH,SAAO,IAAI,aAAa,IAAI,gBAAgB,OAAO,EAAE,OAAO;;;;;;;;;;CAW9D,MAAM,aACJ,KACA,IACA,SACA,SACY;AACZ,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO,IAAI;EAErC,MAAM,WAAW,KAAK,YAAY,KAAK,QAAQ;AAE/C,SAAO,KAAK,UAAU,gBACpB,sBACA,EACE,YAAY;GACV,aAAa;GACb,iBAAiB,KAAK,OAAO;GAC7B,oBAAoB,KAAK,QAAQ,cAAc;GAChD,EACF,EACD,OAAO,SAAS;AACd,OAAI;IAEF,MAAM,SAAS,MAAM,KAAK,QAAQ,IAAO,SAAS;AAClD,QAAI,WAAW,MAAM;AACnB,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG,EACzC,aAAa,UACd,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACZ,CAAC;AAEF,YAAO,OAAO;;IAIhB,MAAM,WAAW,KAAK,iBAAiB,IAAI,SAAS;AACpD,QAAI,UAAU;AACZ,UAAK,aAAa,aAAa,KAAK;AACpC,UAAK,aAAa,uBAAuB,KAAK;AAC9C,UAAK,SAAS,4BAA4B,EACxC,aAAa,UACd,CAAC;AACF,UAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,UAAK,iBAAiB,cAAc,IAAI,GAAG;MACzC,aAAa;MACb,uBAAuB;MACxB,CAAC;AAEF,YAAO,OAAO,EAAE,aAAa;MAC3B,WAAW;MACX,WAAW;MACX,qBAAqB;MACtB,CAAC;AAEF,UAAK,KAAK;AACV,YAAO;;AAIT,SAAK,aAAa,aAAa,MAAM;AACrC,SAAK,SAAS,cAAc,EAAE,aAAa,UAAU,CAAC;AACtD,SAAK,iBAAiB,eAAe,IAAI,GAAG,EAC1C,aAAa,UACd,CAAC;AAEF,WAAO,OAAO,EAAE,aAAa;KAC3B,WAAW;KACX,WAAW;KACZ,CAAC;IAEF,MAAM,UAAU,IAAI,CACjB,KAAK,OAAO,aAAW;AACtB,WAAM,KAAK,IAAI,UAAUA,UAAQ,QAAQ;AACzC,UAAK,SAAS,sBAAsB;MAClC,aAAa;MACb,aAAa,SAAS,OAAO,KAAK,OAAO,OAAO;MACjD,CAAC;AACF,YAAOA;MACP,CACD,OAAO,UAAU;AAChB,UAAK,gBAAgB,MAAM;AAC3B,UAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,SAAI,iBAAiB,YACnB,OAAM;AAER,WAAM,eAAe,gBACnB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,CACvD;MACD,CACD,cAAc;AACb,UAAK,iBAAiB,OAAO,SAAS;MACtC;AAEJ,SAAK,iBAAiB,IAAI,UAAU,QAAQ;IAE5C,MAAM,SAAS,MAAM;AACrB,SAAK,UAAU,EAAE,MAAM,eAAe,IAAI,CAAC;AAC3C,WAAO;YACA,OAAO;AACd,SAAK,gBAAgB,MAAe;AACpC,SAAK,UAAU,EAAE,MAAM,eAAe,OAAO,CAAC;AAC9C,UAAM;aACE;AACR,SAAK,KAAK;;KAGd;GAAE,MAAM,KAAK;GAAM,eAAe;GAAM,CACzC;;;;;;;CAQH,MAAM,IAAO,KAAgC;AAC3C,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;AAGjC,OAAK,cAAc;EAEnB,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAO,IAAI;AAC5C,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO,MAAM;;;CAIf,AAAQ,eAAqB;AAC3B,MAAI,KAAK,kBAAmB;AAC5B,MAAI,CAAC,KAAK,QAAQ,cAAc,CAAE;EAClC,MAAM,MAAM,KAAK,KAAK;AACtB,MAAI,MAAM,KAAK,qBAAqB,aAAa,wBAC/C;EAEF,MAAM,cAAc,KAAK,OAAO,sBAAsB;AAEtD,MAAI,KAAK,QAAQ,GAAG,YAAa;AAEjC,OAAK,qBAAqB;AAE1B,OAAK,oBAAoB;AACzB,EAAC,KAAK,QACH,gBAAgB,CAChB,OAAO,UAAU;AAChB,UAAO,MAAM,yCAAyC,MAAM;IAC5D,CACD,cAAc;AACb,QAAK,oBAAoB;IACzB;;;;;;;;;CAUN,MAAM,IACJ,KACA,OACA,SACe;AACf,MAAI,CAAC,KAAK,OAAO,QAAS;EAE1B,MAAM,MAAM,SAAS,OAAO,KAAK,OAAO,OAAO;EAC/C,MAAM,aAAa,KAAK,KAAK,GAAG,MAAM;AACtC,QAAM,KAAK,QAAQ,IAAI,KAAK;GAAE;GAAO,QAAQ;GAAY,CAAC;;;;;;;CAQ5D,MAAM,OAAO,KAA4B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS;AAC1B,QAAM,KAAK,QAAQ,OAAO,IAAI;;;CAIhC,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;AAC1B,OAAK,iBAAiB,OAAO;;;;;;;CAQ/B,MAAM,IAAI,KAA+B;AACvC,MAAI,CAAC,KAAK,OAAO,QAAS,QAAO;EAEjC,MAAM,QAAQ,MAAM,KAAK,QAAQ,IAAI,IAAI;AACzC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,SAAM,KAAK,QAAQ,OAAO,IAAI;AAC9B,UAAO;;AAET,SAAO;;;;;;;;CAST,YAAY,OAAqC,SAAyB;EACxE,MAAM,WAAW,CAAC,SAAS,GAAG,MAAM;EACpC,MAAM,aAAa,KAAK,UAAU,SAAS;AAC3C,SAAO,WAAW,SAAS,CAAC,OAAO,WAAW,CAAC,OAAO,MAAM;;;CAI9D,MAAM,QAAuB;AAC3B,QAAM,KAAK,QAAQ,OAAO;;;;;;CAO5B,MAAM,mBAAqC;AACzC,SAAO,KAAK,QAAQ,aAAa"}
@@ -1 +1 @@
1
- {"version":3,"file":"memory.js","names":["oldestKey: string | null"],"sources":["../../../src/cache/storage/memory.ts"],"sourcesContent":["import type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { inMemoryStorageDefaults } from \"./defaults\";\n\n/**\n * In-memory cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n */\nexport class InMemoryStorage implements CacheStorage {\n private cache: Map<string, CacheEntry> = new Map();\n private accessOrder: Map<string, number> = new Map();\n private accessCounter: number;\n private maxSize: number;\n\n constructor(config: CacheConfig) {\n this.cache = new Map();\n this.accessOrder = new Map();\n this.maxSize = config.maxSize ?? inMemoryStorageDefaults.maxSize;\n this.accessCounter = 0;\n }\n\n /** Get an entry from the cache */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n const entry = this.cache.get(key);\n if (!entry) return null;\n\n this.accessOrder.set(key, ++this.accessCounter);\n return entry as CacheEntry<T>;\n }\n\n /** Set an entry in the cache */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n if (this.cache.size >= this.maxSize && !this.cache.has(key)) {\n this.evictLRU();\n }\n\n this.cache.set(key, entry);\n this.accessOrder.set(key, ++this.accessCounter);\n }\n\n /** Delete an entry from the cache */\n async delete(key: string): Promise<void> {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n }\n\n /** Clean in-memory cache */\n async clear(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Check if the cache has an entry */\n async has(key: string): Promise<boolean> {\n const entry = this.cache.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n return false;\n }\n\n return true;\n }\n\n /** Get the size of the cache */\n async size(): Promise<number> {\n return this.cache.size;\n }\n\n /** Check if the cache is persistent */\n isPersistent(): boolean {\n return false;\n }\n\n /** Check the health of the cache */\n async healthCheck(): Promise<boolean> {\n return true;\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Evict the least recently used entry (LRU) */\n private evictLRU(): void {\n let oldestKey: string | null = null;\n let oldestAccess = Infinity;\n\n for (const [key, accessTime] of this.accessOrder) {\n if (accessTime < oldestAccess) {\n oldestAccess = accessTime;\n oldestKey = key;\n }\n }\n\n if (oldestKey) {\n this.cache.delete(oldestKey);\n this.accessOrder.delete(oldestKey);\n }\n }\n}\n"],"mappings":";;;;;;;AAOA,IAAa,kBAAb,MAAqD;CAMnD,YAAY,QAAqB;+BALQ,IAAI,KAAK;qCACP,IAAI,KAAK;AAKlD,OAAK,wBAAQ,IAAI,KAAK;AACtB,OAAK,8BAAc,IAAI,KAAK;AAC5B,OAAK,UAAU,OAAO,WAAW,wBAAwB;AACzD,OAAK,gBAAgB;;;CAIvB,MAAM,IAAO,KAA4C;EACvD,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;AAC/C,SAAO;;;CAIT,MAAM,IAAO,KAAa,OAAqC;AAC7D,MAAI,KAAK,MAAM,QAAQ,KAAK,WAAW,CAAC,KAAK,MAAM,IAAI,IAAI,CACzD,MAAK,UAAU;AAGjB,OAAK,MAAM,IAAI,KAAK,MAAM;AAC1B,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;;;CAIjD,MAAM,OAAO,KAA4B;AACvC,OAAK,MAAM,OAAO,IAAI;AACtB,OAAK,YAAY,OAAO,IAAI;;;CAI9B,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,MAAM,IAAI,KAA+B;EACvC,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,QAAK,MAAM,OAAO,IAAI;AACtB,QAAK,YAAY,OAAO,IAAI;AAC5B,UAAO;;AAGT,SAAO;;;CAIT,MAAM,OAAwB;AAC5B,SAAO,KAAK,MAAM;;;CAIpB,eAAwB;AACtB,SAAO;;;CAIT,MAAM,cAAgC;AACpC,SAAO;;;CAIT,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,AAAQ,WAAiB;EACvB,IAAIA,YAA2B;EAC/B,IAAI,eAAe;AAEnB,OAAK,MAAM,CAAC,KAAK,eAAe,KAAK,YACnC,KAAI,aAAa,cAAc;AAC7B,kBAAe;AACf,eAAY;;AAIhB,MAAI,WAAW;AACb,QAAK,MAAM,OAAO,UAAU;AAC5B,QAAK,YAAY,OAAO,UAAU"}
1
+ {"version":3,"file":"memory.js","names":[],"sources":["../../../src/cache/storage/memory.ts"],"sourcesContent":["import type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { inMemoryStorageDefaults } from \"./defaults\";\n\n/**\n * In-memory cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n */\nexport class InMemoryStorage implements CacheStorage {\n private cache: Map<string, CacheEntry> = new Map();\n private accessOrder: Map<string, number> = new Map();\n private accessCounter: number;\n private maxSize: number;\n\n constructor(config: CacheConfig) {\n this.cache = new Map();\n this.accessOrder = new Map();\n this.maxSize = config.maxSize ?? inMemoryStorageDefaults.maxSize;\n this.accessCounter = 0;\n }\n\n /** Get an entry from the cache */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n const entry = this.cache.get(key);\n if (!entry) return null;\n\n this.accessOrder.set(key, ++this.accessCounter);\n return entry as CacheEntry<T>;\n }\n\n /** Set an entry in the cache */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n if (this.cache.size >= this.maxSize && !this.cache.has(key)) {\n this.evictLRU();\n }\n\n this.cache.set(key, entry);\n this.accessOrder.set(key, ++this.accessCounter);\n }\n\n /** Delete an entry from the cache */\n async delete(key: string): Promise<void> {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n }\n\n /** Clean in-memory cache */\n async clear(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Check if the cache has an entry */\n async has(key: string): Promise<boolean> {\n const entry = this.cache.get(key);\n if (!entry) return false;\n\n if (Date.now() > entry.expiry) {\n this.cache.delete(key);\n this.accessOrder.delete(key);\n return false;\n }\n\n return true;\n }\n\n /** Get the size of the cache */\n async size(): Promise<number> {\n return this.cache.size;\n }\n\n /** Check if the cache is persistent */\n isPersistent(): boolean {\n return false;\n }\n\n /** Check the health of the cache */\n async healthCheck(): Promise<boolean> {\n return true;\n }\n\n /** Close the cache */\n async close(): Promise<void> {\n this.cache.clear();\n this.accessOrder.clear();\n this.accessCounter = 0;\n }\n\n /** Evict the least recently used entry (LRU) */\n private evictLRU(): void {\n let oldestKey: string | null = null;\n let oldestAccess = Infinity;\n\n for (const [key, accessTime] of this.accessOrder) {\n if (accessTime < oldestAccess) {\n oldestAccess = accessTime;\n oldestKey = key;\n }\n }\n\n if (oldestKey) {\n this.cache.delete(oldestKey);\n this.accessOrder.delete(oldestKey);\n }\n }\n}\n"],"mappings":";;;;;;;AAOA,IAAa,kBAAb,MAAqD;CAMnD,YAAY,QAAqB;+BALQ,IAAI,KAAK;qCACP,IAAI,KAAK;AAKlD,OAAK,wBAAQ,IAAI,KAAK;AACtB,OAAK,8BAAc,IAAI,KAAK;AAC5B,OAAK,UAAU,OAAO,WAAW,wBAAwB;AACzD,OAAK,gBAAgB;;;CAIvB,MAAM,IAAO,KAA4C;EACvD,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;AAC/C,SAAO;;;CAIT,MAAM,IAAO,KAAa,OAAqC;AAC7D,MAAI,KAAK,MAAM,QAAQ,KAAK,WAAW,CAAC,KAAK,MAAM,IAAI,IAAI,CACzD,MAAK,UAAU;AAGjB,OAAK,MAAM,IAAI,KAAK,MAAM;AAC1B,OAAK,YAAY,IAAI,KAAK,EAAE,KAAK,cAAc;;;CAIjD,MAAM,OAAO,KAA4B;AACvC,OAAK,MAAM,OAAO,IAAI;AACtB,OAAK,YAAY,OAAO,IAAI;;;CAI9B,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,MAAM,IAAI,KAA+B;EACvC,MAAM,QAAQ,KAAK,MAAM,IAAI,IAAI;AACjC,MAAI,CAAC,MAAO,QAAO;AAEnB,MAAI,KAAK,KAAK,GAAG,MAAM,QAAQ;AAC7B,QAAK,MAAM,OAAO,IAAI;AACtB,QAAK,YAAY,OAAO,IAAI;AAC5B,UAAO;;AAGT,SAAO;;;CAIT,MAAM,OAAwB;AAC5B,SAAO,KAAK,MAAM;;;CAIpB,eAAwB;AACtB,SAAO;;;CAIT,MAAM,cAAgC;AACpC,SAAO;;;CAIT,MAAM,QAAuB;AAC3B,OAAK,MAAM,OAAO;AAClB,OAAK,YAAY,OAAO;AACxB,OAAK,gBAAgB;;;CAIvB,AAAQ,WAAiB;EACvB,IAAI,YAA2B;EAC/B,IAAI,eAAe;AAEnB,OAAK,MAAM,CAAC,KAAK,eAAe,KAAK,YACnC,KAAI,aAAa,cAAc;AAC7B,kBAAe;AACf,eAAY;;AAIhB,MAAI,WAAW;AACb,QAAK,MAAM,OAAO,UAAU;AAC5B,QAAK,YAAY,OAAO,UAAU"}