@databricks/appkit 0.34.1 → 0.35.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/CLAUDE.md +3 -0
  2. package/README.md +3 -3
  3. package/dist/appkit/package.js +1 -1
  4. package/dist/cache/index.js +1 -1
  5. package/dist/connectors/index.js +2 -0
  6. package/dist/connectors/lakebase/index.d.ts +2 -0
  7. package/dist/connectors/lakebase/index.d.ts.map +1 -1
  8. package/dist/connectors/lakebase/index.js +2 -0
  9. package/dist/connectors/lakebase/index.js.map +1 -1
  10. package/dist/connectors/lakebase/pool-manager.d.ts +54 -0
  11. package/dist/connectors/lakebase/pool-manager.d.ts.map +1 -0
  12. package/dist/connectors/lakebase/pool-manager.js +77 -0
  13. package/dist/connectors/lakebase/pool-manager.js.map +1 -0
  14. package/dist/connectors/lakebase/routing-pool.d.ts +22 -0
  15. package/dist/connectors/lakebase/routing-pool.d.ts.map +1 -0
  16. package/dist/connectors/lakebase/routing-pool.js +48 -0
  17. package/dist/connectors/lakebase/routing-pool.js.map +1 -0
  18. package/dist/context/execution-context.js +9 -1
  19. package/dist/context/execution-context.js.map +1 -1
  20. package/dist/context/service-context.d.ts.map +1 -1
  21. package/dist/context/service-context.js +4 -1
  22. package/dist/context/service-context.js.map +1 -1
  23. package/dist/context/user-context.d.ts +4 -0
  24. package/dist/context/user-context.d.ts.map +1 -1
  25. package/dist/context/user-context.js.map +1 -1
  26. package/dist/core/appkit.d.ts.map +1 -1
  27. package/dist/core/appkit.js +24 -4
  28. package/dist/core/appkit.js.map +1 -1
  29. package/dist/index.d.ts +3 -1
  30. package/dist/index.js +5 -4
  31. package/dist/index.js.map +1 -1
  32. package/dist/plugin/interceptors/telemetry.js +1 -1
  33. package/dist/plugin/plugin.d.ts.map +1 -1
  34. package/dist/plugin/plugin.js +12 -4
  35. package/dist/plugin/plugin.js.map +1 -1
  36. package/dist/plugins/files/plugin.js +1 -1
  37. package/dist/plugins/jobs/plugin.js +1 -1
  38. package/dist/plugins/lakebase/lakebase.d.ts +40 -14
  39. package/dist/plugins/lakebase/lakebase.d.ts.map +1 -1
  40. package/dist/plugins/lakebase/lakebase.js +91 -21
  41. package/dist/plugins/lakebase/lakebase.js.map +1 -1
  42. package/dist/plugins/serving/serving.js +1 -1
  43. package/docs/api/appkit/Function.createLakebasePoolManager.md +36 -0
  44. package/docs/api/appkit/Interface.LakebasePool.md +84 -0
  45. package/docs/api/appkit/Interface.LakebasePoolManager.md +101 -0
  46. package/docs/api/appkit.md +3 -0
  47. package/docs/development/llm-guide.md +0 -1
  48. package/docs/plugins/execution-context.md +6 -0
  49. package/docs/plugins/lakebase.md +112 -6
  50. package/llms.txt +3 -0
  51. package/package.json +1 -1
  52. package/sbom.cdx.json +1 -1
@@ -1,4 +1,7 @@
1
1
  import { createLogger } from "../../logging/logger.js";
2
+ import { createLakebasePoolManager } from "../../connectors/lakebase/pool-manager.js";
3
+ import { getUserContext, init_execution_context } from "../../context/execution-context.js";
4
+ import { RoutingPool } from "../../connectors/lakebase/routing-pool.js";
2
5
  import { createLakebasePool, getLakebaseOrmConfig, getLakebasePgConfig, getUsernameWithApiLookup } from "../../connectors/lakebase/index.js";
3
6
  import { Plugin } from "../../plugin/plugin.js";
4
7
  import { toPlugin } from "../../plugin/to-plugin.js";
@@ -10,13 +13,26 @@ import manifest_default from "./manifest.js";
10
13
  import { z } from "zod";
11
14
 
12
15
  //#region src/plugins/lakebase/lakebase.ts
16
+ init_execution_context();
13
17
  const logger = createLogger("lakebase");
18
+ /** Default pool settings for per-user OBO pools. */
19
+ const OBO_POOL_DEFAULTS = {
20
+ max: 3,
21
+ allowExitOnIdle: true,
22
+ idleTimeoutMillis: 3e4
23
+ };
14
24
  /**
15
25
  * AppKit plugin for Databricks Lakebase Autoscaling.
16
26
  *
17
27
  * Wraps `@databricks/lakebase` to provide a standard `pg.Pool` with automatic
18
28
  * OAuth token refresh, integrated with AppKit's logger and OpenTelemetry setup.
19
29
  *
30
+ * Supports On-Behalf-Of (OBO) via `asUser(req)` — each user gets a separate
31
+ * `pg.Pool` authenticated with their Databricks identity, enabling features
32
+ * like Row-Level Security (RLS). Routing is handled transparently by
33
+ * {@link RoutingPool}, which reads the execution context set by the base
34
+ * class `asUser()`.
35
+ *
20
36
  * @example
21
37
  * ```ts
22
38
  * import { createApp, lakebase, server } from "@databricks/appkit";
@@ -25,32 +41,58 @@ const logger = createLogger("lakebase");
25
41
  * plugins: [server(), lakebase()],
26
42
  * });
27
43
  *
44
+ * // Service principal query
28
45
  * const result = await AppKit.lakebase.query("SELECT * FROM users WHERE id = $1", [userId]);
46
+ *
47
+ * // User-scoped query (per-user pool, RLS enforced)
48
+ * const mine = await AppKit.lakebase.asUser(req).query("SELECT * FROM my_data");
29
49
  * ```
30
50
  */
31
51
  var LakebasePlugin = class extends Plugin {
32
52
  /** Plugin manifest declaring metadata and resource requirements */
33
53
  static manifest = manifest_default;
34
54
  pool = null;
55
+ oboPoolManager = null;
35
56
  /**
36
- * Initializes the Lakebase connection pool.
57
+ * Initializes the Lakebase connection pool and OBO pool manager.
37
58
  * Called automatically by AppKit during the plugin setup phase.
38
59
  *
39
- * Resolves the PostgreSQL username via {@link getUsernameWithApiLookup},
40
- * which tries config, env vars, and finally the Databricks workspace API.
60
+ * Creates a {@link RoutingPool} that automatically routes queries to either
61
+ * the service-principal pool or a per-user pool based on the execution
62
+ * context (set by `Plugin.asUser(req)` via AsyncLocalStorage).
41
63
  */
42
64
  async setup() {
43
65
  const poolConfig = this.config.pool;
44
66
  const user = await getUsernameWithApiLookup(poolConfig);
45
- this.pool = createLakebasePool({
67
+ const spPool = createLakebasePool({
46
68
  ...poolConfig,
47
69
  user
48
70
  });
49
- logger.info("Lakebase pool initialized");
71
+ logger.info("Lakebase SP pool initialized");
72
+ this.oboPoolManager = createLakebasePoolManager({
73
+ ...poolConfig,
74
+ ...OBO_POOL_DEFAULTS
75
+ });
76
+ logger.info("Lakebase OBO pool manager initialized");
77
+ const oboManager = this.oboPoolManager;
78
+ this.pool = new RoutingPool(spPool, (ctx) => {
79
+ if (!oboManager) throw new Error("OBO pool manager not initialized");
80
+ const userKey = ctx.userEmail ?? ctx.userId;
81
+ const isNew = !oboManager.hasPool(userKey);
82
+ const pool = oboManager.getPool(userKey, {
83
+ workspaceClient: ctx.client,
84
+ user: userKey
85
+ }, ctx.tokenFingerprint);
86
+ if (isNew) logger.debug("Created OBO pool for user (total: %d)", oboManager.size);
87
+ return pool;
88
+ });
50
89
  }
51
90
  /**
52
91
  * Executes a parameterized SQL query against the Lakebase pool.
53
92
  *
93
+ * When called inside `asUser(req)`, the query automatically routes to
94
+ * the per-user pool via {@link RoutingPool}.
95
+ *
54
96
  * @param text - SQL query string, using `$1`, `$2`, ... placeholders
55
97
  * @param values - Parameter values corresponding to placeholders
56
98
  * @returns Query result with typed rows
@@ -94,28 +136,27 @@ var LakebasePlugin = class extends Plugin {
94
136
  }
95
137
  }
96
138
  /**
97
- * Gracefully drains and closes the connection pool.
139
+ * Gracefully drains and closes all connection pools (SP + OBO).
98
140
  * Called automatically by AppKit during shutdown.
99
141
  */
100
142
  abortActiveOperations() {
101
143
  super.abortActiveOperations();
102
144
  if (this.pool) {
103
- logger.info("Closing Lakebase pool");
145
+ logger.info("Closing Lakebase SP pool");
104
146
  this.pool.end().catch((err) => {
105
- logger.error("Error closing Lakebase pool: %O", err);
147
+ logger.error("Error closing Lakebase SP pool: %O", err);
106
148
  });
107
149
  this.pool = null;
108
150
  }
151
+ if (this.oboPoolManager) {
152
+ logger.info("Closing all Lakebase OBO pools (%d)", this.oboPoolManager.size);
153
+ this.oboPoolManager.closeAll().catch((err) => {
154
+ logger.error("Error closing Lakebase OBO pools: %O", err);
155
+ });
156
+ this.oboPoolManager = null;
157
+ }
109
158
  }
110
159
  /**
111
- * Returns the plugin's public API, accessible via `AppKit.lakebase`.
112
- *
113
- * - `pool` — The raw `pg.Pool` instance, for use with ORMs or advanced scenarios
114
- * - `query` — Convenience method for executing parameterized SQL queries
115
- * - `getOrmConfig()` — Returns a config object compatible with Drizzle, TypeORM, Sequelize, etc.
116
- * - `getPgConfig()` — Returns a `pg.PoolConfig` object for manual pool construction
117
- */
118
- /**
119
160
  * Agent tool registry. Empty by default — the Lakebase plugin does NOT
120
161
  * expose its SQL connection to LLM agents unless the developer explicitly
121
162
  * opts in via `config.exposeAsAgentTool`. See {@link buildQueryTool}.
@@ -126,20 +167,21 @@ var LakebasePlugin = class extends Plugin {
126
167
  this.config = config;
127
168
  if (config.exposeAsAgentTool) {
128
169
  this.tools = { query: this.buildQueryTool(config.exposeAsAgentTool) };
129
- logger.warn("Lakebase agent tool is enabled (readOnly=%s). Every agent with access to this plugin can execute SQL against the Lakebase database as the service principal.", config.exposeAsAgentTool.readOnly !== false);
170
+ logger.warn("Lakebase agent tool is enabled (readOnly=%s). Every agent with access to this plugin can execute SQL against the Lakebase database as the requesting user's identity.", config.exposeAsAgentTool.readOnly !== false);
130
171
  }
131
172
  }
132
173
  buildQueryTool(opt) {
133
174
  const readOnly = opt.readOnly !== false;
134
175
  return defineTool({
135
- description: readOnly ? "Execute a read-only SQL query against the Lakebase PostgreSQL database. Only SELECT, WITH, SHOW, EXPLAIN, and DESCRIBE statements are accepted. Use $1, $2, etc. as placeholders and pass values separately. Runs as the application's service principal." : "Execute a parameterized SQL statement against the Lakebase PostgreSQL database. Use $1, $2, etc. as placeholders and pass values separately. Runs as the application's service principal. This tool can modify data; every invocation requires explicit human approval.",
176
+ description: readOnly ? "Execute a read-only SQL query against the Lakebase PostgreSQL database. Only SELECT, WITH, SHOW, EXPLAIN, and DESCRIBE statements are accepted. Use $1, $2, etc. as placeholders and pass values separately." : "Execute a parameterized SQL statement against the Lakebase PostgreSQL database. Use $1, $2, etc. as placeholders and pass values separately. This tool can modify data; every invocation requires explicit human approval.",
136
177
  schema: z.object({
137
178
  text: z.string().describe("SQL statement with $1, $2, ... placeholders for parameters"),
138
179
  values: z.array(z.unknown()).optional().describe("Parameter values corresponding to placeholders")
139
180
  }),
140
181
  annotations: {
141
182
  effect: readOnly ? "read" : "destructive",
142
- idempotent: false
183
+ idempotent: false,
184
+ requiresUserContext: true
143
185
  },
144
186
  execute: async (args, signal) => {
145
187
  signal?.throwIfAborted();
@@ -160,12 +202,40 @@ var LakebasePlugin = class extends Plugin {
160
202
  toolkit(opts) {
161
203
  return buildToolkitEntries(this.name, this.tools, opts);
162
204
  }
205
+ /**
206
+ * Returns the pool config for the current execution context.
207
+ * Inside `asUser(req)`, returns user-scoped config; otherwise SP config.
208
+ */
209
+ activePoolConfig() {
210
+ const ctx = getUserContext();
211
+ if (ctx) {
212
+ const user = ctx.userEmail ?? ctx.userId;
213
+ return {
214
+ ...this.config.pool,
215
+ workspaceClient: ctx.client,
216
+ user
217
+ };
218
+ }
219
+ return this.config.pool;
220
+ }
221
+ /**
222
+ * Returns the plugin's public API, accessible via `AppKit.lakebase`.
223
+ *
224
+ * - `pool` — The connection pool (routes to per-user pool when inside `asUser(req)`)
225
+ * - `query` — Convenience method for executing parameterized SQL queries
226
+ * - `getOrmConfig()` — Returns a config object compatible with Drizzle, TypeORM, Sequelize, etc.
227
+ * Inside `asUser(req)`, returns user-scoped config.
228
+ * - `getPgConfig()` — Returns a `pg.PoolConfig` object for manual pool construction.
229
+ * Inside `asUser(req)`, returns user-scoped config.
230
+ *
231
+ * Use `AppKit.lakebase.asUser(req)` to get the same API backed by a per-user pool.
232
+ */
163
233
  exports() {
164
234
  return {
165
235
  pool: this.pool,
166
236
  query: this.query.bind(this),
167
- getOrmConfig: () => getLakebaseOrmConfig(this.config.pool),
168
- getPgConfig: () => getLakebasePgConfig(this.config.pool)
237
+ getOrmConfig: () => getLakebaseOrmConfig(this.activePoolConfig()),
238
+ getPgConfig: () => getLakebasePgConfig(this.activePoolConfig())
169
239
  };
170
240
  }
171
241
  };
@@ -1 +1 @@
1
- {"version":3,"file":"lakebase.js","names":["manifest"],"sources":["../../../src/plugins/lakebase/lakebase.ts"],"sourcesContent":["import type { Pool, QueryResult, QueryResultRow } from \"pg\";\nimport type { AgentToolDefinition, ToolProvider } from \"shared\";\nimport { z } from \"zod\";\nimport {\n createLakebasePool,\n getLakebaseOrmConfig,\n getLakebasePgConfig,\n getUsernameWithApiLookup,\n} from \"../../connectors/lakebase\";\nimport { buildToolkitEntries } from \"../../core/agent/build-toolkit\";\nimport {\n defineTool,\n executeFromRegistry,\n toolsFromRegistry,\n} from \"../../core/agent/tools/define-tool\";\nimport { assertReadOnlySql } from \"../../core/agent/tools/sql-policy\";\nimport { createLogger } from \"../../logging/logger\";\nimport { Plugin, toPlugin } from \"../../plugin\";\nimport type { PluginManifest } from \"../../registry\";\nimport manifest from \"./manifest.json\";\nimport type { ILakebaseConfig } from \"./types\";\n\nconst logger = createLogger(\"lakebase\");\n\n/**\n * AppKit plugin for Databricks Lakebase Autoscaling.\n *\n * Wraps `@databricks/lakebase` to provide a standard `pg.Pool` with automatic\n * OAuth token refresh, integrated with AppKit's logger and OpenTelemetry setup.\n *\n * @example\n * ```ts\n * import { createApp, lakebase, server } from \"@databricks/appkit\";\n *\n * const AppKit = await createApp({\n * plugins: [server(), lakebase()],\n * });\n *\n * const result = await AppKit.lakebase.query(\"SELECT * FROM users WHERE id = $1\", [userId]);\n * ```\n */\nexport class LakebasePlugin extends Plugin implements ToolProvider {\n /** Plugin manifest declaring metadata and resource requirements */\n static manifest = manifest as PluginManifest<\"lakebase\">;\n\n protected declare config: ILakebaseConfig;\n private pool: Pool | null = null;\n\n /**\n * Initializes the Lakebase connection pool.\n * Called automatically by AppKit during the plugin setup phase.\n *\n * Resolves the PostgreSQL username via {@link getUsernameWithApiLookup},\n * which tries config, env vars, and finally the Databricks workspace API.\n */\n async setup() {\n const poolConfig = this.config.pool;\n const user = await getUsernameWithApiLookup(poolConfig);\n this.pool = createLakebasePool({ ...poolConfig, user });\n logger.info(\"Lakebase pool initialized\");\n }\n\n /**\n * Executes a parameterized SQL query against the Lakebase pool.\n *\n * @param text - SQL query string, using `$1`, `$2`, ... placeholders\n * @param values - Parameter values corresponding to placeholders\n * @returns Query result with typed rows\n *\n * @example\n * ```ts\n * const result = await AppKit.lakebase.query<{ id: number; name: string }>(\n * \"SELECT id, name FROM users WHERE active = $1\",\n * [true],\n * );\n * ```\n */\n async query<T extends QueryResultRow = any>(\n text: string,\n values?: unknown[],\n ): Promise<QueryResult<T>> {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API\n return this.pool!.query<T>(text, values);\n }\n\n /**\n * Execute a single statement inside a `BEGIN READ ONLY … ROLLBACK`\n * transaction on a dedicated client.\n *\n * The three commands MUST share a connection — a naive\n * `pool.query(\"BEGIN READ ONLY; <stmt>; ROLLBACK\")` batch cannot accept\n * parameter values (PostgreSQL's Extended Query protocol rejects multi-\n * statement prepared queries), which would silently break every\n * parameterized query the agent tool issues.\n *\n * Returns the raw `rows` array for the user's statement. Side effects the\n * statement may attempt (writes, writable-function side effects) are\n * rejected by PostgreSQL under the read-only transaction posture.\n */\n private async runReadOnlyStatement(\n text: string,\n values?: unknown[],\n ): Promise<unknown[]> {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup()\n const client = await this.pool!.connect();\n try {\n await client.query(\"BEGIN READ ONLY\");\n const result = await client.query(text, values);\n return result.rows;\n } finally {\n try {\n await client.query(\"ROLLBACK\");\n } finally {\n client.release();\n }\n }\n }\n\n /**\n * Gracefully drains and closes the connection pool.\n * Called automatically by AppKit during shutdown.\n */\n abortActiveOperations(): void {\n super.abortActiveOperations();\n if (this.pool) {\n logger.info(\"Closing Lakebase pool\");\n this.pool.end().catch((err) => {\n logger.error(\"Error closing Lakebase pool: %O\", err);\n });\n this.pool = null;\n }\n }\n\n /**\n * Returns the plugin's public API, accessible via `AppKit.lakebase`.\n *\n * - `pool` — The raw `pg.Pool` instance, for use with ORMs or advanced scenarios\n * - `query` — Convenience method for executing parameterized SQL queries\n * - `getOrmConfig()` — Returns a config object compatible with Drizzle, TypeORM, Sequelize, etc.\n * - `getPgConfig()` — Returns a `pg.PoolConfig` object for manual pool construction\n */\n\n /**\n * Agent tool registry. Empty by default — the Lakebase plugin does NOT\n * expose its SQL connection to LLM agents unless the developer explicitly\n * opts in via `config.exposeAsAgentTool`. See {@link buildQueryTool}.\n */\n private tools: Record<string, ReturnType<typeof this.buildQueryTool>> = {};\n\n constructor(config: ILakebaseConfig) {\n super(config);\n this.config = config;\n if (config.exposeAsAgentTool) {\n this.tools = { query: this.buildQueryTool(config.exposeAsAgentTool) };\n logger.warn(\n \"Lakebase agent tool is enabled (readOnly=%s). Every agent with access to this plugin can execute SQL against the Lakebase database as the service principal.\",\n config.exposeAsAgentTool.readOnly !== false,\n );\n }\n }\n\n private buildQueryTool(\n opt: NonNullable<ILakebaseConfig[\"exposeAsAgentTool\"]>,\n ) {\n const readOnly = opt.readOnly !== false;\n return defineTool({\n description: readOnly\n ? \"Execute a read-only SQL query against the Lakebase PostgreSQL database. Only SELECT, WITH, SHOW, EXPLAIN, and DESCRIBE statements are accepted. Use $1, $2, etc. as placeholders and pass values separately. Runs as the application's service principal.\"\n : \"Execute a parameterized SQL statement against the Lakebase PostgreSQL database. Use $1, $2, etc. as placeholders and pass values separately. Runs as the application's service principal. This tool can modify data; every invocation requires explicit human approval.\",\n schema: z.object({\n text: z\n .string()\n .describe(\n \"SQL statement with $1, $2, ... placeholders for parameters\",\n ),\n values: z\n .array(z.unknown())\n .optional()\n .describe(\"Parameter values corresponding to placeholders\"),\n }),\n annotations: {\n effect: readOnly ? \"read\" : \"destructive\",\n idempotent: false,\n },\n execute: async (args, signal) => {\n // Matches the files plugin pattern: the pg connection API\n // doesn't accept AbortSignal in its current shape, so deeper\n // mid-call cancellation needs a separate plumbing pass on the\n // connector. This entry check still catches the common case —\n // a tool dispatched after the user already cancelled the\n // stream — and unwinds cleanly instead of running to\n // completion against the SQL warehouse.\n signal?.throwIfAborted();\n if (readOnly) {\n assertReadOnlySql(args.text);\n return this.runReadOnlyStatement(args.text, args.values);\n }\n const result = await this.query(args.text, args.values);\n return result.rows;\n },\n });\n }\n\n getAgentTools(): AgentToolDefinition[] {\n return toolsFromRegistry(this.tools);\n }\n\n async executeAgentTool(\n name: string,\n args: unknown,\n signal?: AbortSignal,\n ): Promise<unknown> {\n return executeFromRegistry(this.tools, name, args, signal);\n }\n\n toolkit(opts?: import(\"../../core/agent/types\").ToolkitOptions) {\n return buildToolkitEntries(this.name, this.tools, opts);\n }\n\n exports() {\n return {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API\n pool: this.pool!,\n query: this.query.bind(this),\n getOrmConfig: () => getLakebaseOrmConfig(this.config.pool),\n getPgConfig: () => getLakebasePgConfig(this.config.pool),\n };\n }\n}\n\n/**\n * @internal\n */\nexport const lakebase = toPlugin(LakebasePlugin);\n"],"mappings":";;;;;;;;;;;;AAsBA,MAAM,SAAS,aAAa,WAAW;;;;;;;;;;;;;;;;;;AAmBvC,IAAa,iBAAb,cAAoC,OAA+B;;CAEjE,OAAO,WAAWA;CAGlB,AAAQ,OAAoB;;;;;;;;CAS5B,MAAM,QAAQ;EACZ,MAAM,aAAa,KAAK,OAAO;EAC/B,MAAM,OAAO,MAAM,yBAAyB,WAAW;AACvD,OAAK,OAAO,mBAAmB;GAAE,GAAG;GAAY;GAAM,CAAC;AACvD,SAAO,KAAK,4BAA4B;;;;;;;;;;;;;;;;;CAkB1C,MAAM,MACJ,MACA,QACyB;AAEzB,SAAO,KAAK,KAAM,MAAS,MAAM,OAAO;;;;;;;;;;;;;;;;CAiB1C,MAAc,qBACZ,MACA,QACoB;EAEpB,MAAM,SAAS,MAAM,KAAK,KAAM,SAAS;AACzC,MAAI;AACF,SAAM,OAAO,MAAM,kBAAkB;AAErC,WADe,MAAM,OAAO,MAAM,MAAM,OAAO,EACjC;YACN;AACR,OAAI;AACF,UAAM,OAAO,MAAM,WAAW;aACtB;AACR,WAAO,SAAS;;;;;;;;CAStB,wBAA8B;AAC5B,QAAM,uBAAuB;AAC7B,MAAI,KAAK,MAAM;AACb,UAAO,KAAK,wBAAwB;AACpC,QAAK,KAAK,KAAK,CAAC,OAAO,QAAQ;AAC7B,WAAO,MAAM,mCAAmC,IAAI;KACpD;AACF,QAAK,OAAO;;;;;;;;;;;;;;;;CAkBhB,AAAQ,QAAgE,EAAE;CAE1E,YAAY,QAAyB;AACnC,QAAM,OAAO;AACb,OAAK,SAAS;AACd,MAAI,OAAO,mBAAmB;AAC5B,QAAK,QAAQ,EAAE,OAAO,KAAK,eAAe,OAAO,kBAAkB,EAAE;AACrE,UAAO,KACL,gKACA,OAAO,kBAAkB,aAAa,MACvC;;;CAIL,AAAQ,eACN,KACA;EACA,MAAM,WAAW,IAAI,aAAa;AAClC,SAAO,WAAW;GAChB,aAAa,WACT,8PACA;GACJ,QAAQ,EAAE,OAAO;IACf,MAAM,EACH,QAAQ,CACR,SACC,6DACD;IACH,QAAQ,EACL,MAAM,EAAE,SAAS,CAAC,CAClB,UAAU,CACV,SAAS,iDAAiD;IAC9D,CAAC;GACF,aAAa;IACX,QAAQ,WAAW,SAAS;IAC5B,YAAY;IACb;GACD,SAAS,OAAO,MAAM,WAAW;AAQ/B,YAAQ,gBAAgB;AACxB,QAAI,UAAU;AACZ,uBAAkB,KAAK,KAAK;AAC5B,YAAO,KAAK,qBAAqB,KAAK,MAAM,KAAK,OAAO;;AAG1D,YADe,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,OAAO,EACzC;;GAEjB,CAAC;;CAGJ,gBAAuC;AACrC,SAAO,kBAAkB,KAAK,MAAM;;CAGtC,MAAM,iBACJ,MACA,MACA,QACkB;AAClB,SAAO,oBAAoB,KAAK,OAAO,MAAM,MAAM,OAAO;;CAG5D,QAAQ,MAAwD;AAC9D,SAAO,oBAAoB,KAAK,MAAM,KAAK,OAAO,KAAK;;CAGzD,UAAU;AACR,SAAO;GAEL,MAAM,KAAK;GACX,OAAO,KAAK,MAAM,KAAK,KAAK;GAC5B,oBAAoB,qBAAqB,KAAK,OAAO,KAAK;GAC1D,mBAAmB,oBAAoB,KAAK,OAAO,KAAK;GACzD;;;;;;AAOL,MAAa,WAAW,SAAS,eAAe"}
1
+ {"version":3,"file":"lakebase.js","names":["manifest"],"sources":["../../../src/plugins/lakebase/lakebase.ts"],"sourcesContent":["import type { QueryResult, QueryResultRow } from \"pg\";\nimport type { AgentToolDefinition, ToolProvider } from \"shared\";\nimport { z } from \"zod\";\nimport {\n createLakebasePool,\n createLakebasePoolManager,\n getLakebaseOrmConfig,\n getLakebasePgConfig,\n getUsernameWithApiLookup,\n type LakebasePool,\n type LakebasePoolManager,\n RoutingPool,\n} from \"../../connectors/lakebase\";\nimport { getUserContext } from \"../../context/execution-context\";\nimport { buildToolkitEntries } from \"../../core/agent/build-toolkit\";\nimport {\n defineTool,\n executeFromRegistry,\n toolsFromRegistry,\n} from \"../../core/agent/tools/define-tool\";\nimport { assertReadOnlySql } from \"../../core/agent/tools/sql-policy\";\nimport { createLogger } from \"../../logging/logger\";\nimport { Plugin, toPlugin } from \"../../plugin\";\nimport type { PluginManifest } from \"../../registry\";\nimport manifest from \"./manifest.json\";\nimport type { ILakebaseConfig } from \"./types\";\n\nconst logger = createLogger(\"lakebase\");\n\n/** Default pool settings for per-user OBO pools. */\nconst OBO_POOL_DEFAULTS = {\n max: 3,\n allowExitOnIdle: true,\n idleTimeoutMillis: 30_000,\n};\n\n/**\n * AppKit plugin for Databricks Lakebase Autoscaling.\n *\n * Wraps `@databricks/lakebase` to provide a standard `pg.Pool` with automatic\n * OAuth token refresh, integrated with AppKit's logger and OpenTelemetry setup.\n *\n * Supports On-Behalf-Of (OBO) via `asUser(req)` — each user gets a separate\n * `pg.Pool` authenticated with their Databricks identity, enabling features\n * like Row-Level Security (RLS). Routing is handled transparently by\n * {@link RoutingPool}, which reads the execution context set by the base\n * class `asUser()`.\n *\n * @example\n * ```ts\n * import { createApp, lakebase, server } from \"@databricks/appkit\";\n *\n * const AppKit = await createApp({\n * plugins: [server(), lakebase()],\n * });\n *\n * // Service principal query\n * const result = await AppKit.lakebase.query(\"SELECT * FROM users WHERE id = $1\", [userId]);\n *\n * // User-scoped query (per-user pool, RLS enforced)\n * const mine = await AppKit.lakebase.asUser(req).query(\"SELECT * FROM my_data\");\n * ```\n */\nexport class LakebasePlugin extends Plugin implements ToolProvider {\n /** Plugin manifest declaring metadata and resource requirements */\n static manifest = manifest as PluginManifest<\"lakebase\">;\n\n protected declare config: ILakebaseConfig;\n private pool: RoutingPool | null = null;\n private oboPoolManager: LakebasePoolManager | null = null;\n\n /**\n * Initializes the Lakebase connection pool and OBO pool manager.\n * Called automatically by AppKit during the plugin setup phase.\n *\n * Creates a {@link RoutingPool} that automatically routes queries to either\n * the service-principal pool or a per-user pool based on the execution\n * context (set by `Plugin.asUser(req)` via AsyncLocalStorage).\n */\n async setup() {\n const poolConfig = this.config.pool;\n const user = await getUsernameWithApiLookup(poolConfig);\n\n const spPool = createLakebasePool({ ...poolConfig, user });\n logger.info(\"Lakebase SP pool initialized\");\n\n this.oboPoolManager = createLakebasePoolManager({\n ...poolConfig,\n ...OBO_POOL_DEFAULTS,\n });\n logger.info(\"Lakebase OBO pool manager initialized\");\n\n const oboManager = this.oboPoolManager;\n this.pool = new RoutingPool(spPool, (ctx) => {\n if (!oboManager) throw new Error(\"OBO pool manager not initialized\");\n // Lakebase OAuth roles use email as the postgres role when available\n const userKey = ctx.userEmail ?? ctx.userId;\n const isNew = !oboManager.hasPool(userKey);\n const pool = oboManager.getPool(\n userKey,\n { workspaceClient: ctx.client, user: userKey },\n ctx.tokenFingerprint,\n );\n if (isNew) {\n logger.debug(\"Created OBO pool for user (total: %d)\", oboManager.size);\n }\n return pool;\n });\n }\n\n /**\n * Executes a parameterized SQL query against the Lakebase pool.\n *\n * When called inside `asUser(req)`, the query automatically routes to\n * the per-user pool via {@link RoutingPool}.\n *\n * @param text - SQL query string, using `$1`, `$2`, ... placeholders\n * @param values - Parameter values corresponding to placeholders\n * @returns Query result with typed rows\n *\n * @example\n * ```ts\n * const result = await AppKit.lakebase.query<{ id: number; name: string }>(\n * \"SELECT id, name FROM users WHERE active = $1\",\n * [true],\n * );\n * ```\n */\n async query<T extends QueryResultRow = any>(\n text: string,\n values?: unknown[],\n ): Promise<QueryResult<T>> {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API\n return this.pool!.query<T>(text, values);\n }\n\n /**\n * Execute a single statement inside a `BEGIN READ ONLY … ROLLBACK`\n * transaction on a dedicated client.\n *\n * The three commands MUST share a connection — a naive\n * `pool.query(\"BEGIN READ ONLY; <stmt>; ROLLBACK\")` batch cannot accept\n * parameter values (PostgreSQL's Extended Query protocol rejects multi-\n * statement prepared queries), which would silently break every\n * parameterized query the agent tool issues.\n *\n * Returns the raw `rows` array for the user's statement. Side effects the\n * statement may attempt (writes, writable-function side effects) are\n * rejected by PostgreSQL under the read-only transaction posture.\n */\n private async runReadOnlyStatement(\n text: string,\n values?: unknown[],\n ): Promise<unknown[]> {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup()\n const client = await this.pool!.connect();\n try {\n await client.query(\"BEGIN READ ONLY\");\n const result = await client.query(text, values);\n return result.rows;\n } finally {\n try {\n await client.query(\"ROLLBACK\");\n } finally {\n client.release();\n }\n }\n }\n\n /**\n * Gracefully drains and closes all connection pools (SP + OBO).\n * Called automatically by AppKit during shutdown.\n */\n abortActiveOperations(): void {\n super.abortActiveOperations();\n if (this.pool) {\n logger.info(\"Closing Lakebase SP pool\");\n this.pool.end().catch((err) => {\n logger.error(\"Error closing Lakebase SP pool: %O\", err);\n });\n this.pool = null;\n }\n if (this.oboPoolManager) {\n logger.info(\n \"Closing all Lakebase OBO pools (%d)\",\n this.oboPoolManager.size,\n );\n this.oboPoolManager.closeAll().catch((err) => {\n logger.error(\"Error closing Lakebase OBO pools: %O\", err);\n });\n this.oboPoolManager = null;\n }\n }\n\n /**\n * Agent tool registry. Empty by default — the Lakebase plugin does NOT\n * expose its SQL connection to LLM agents unless the developer explicitly\n * opts in via `config.exposeAsAgentTool`. See {@link buildQueryTool}.\n */\n private tools: Record<string, ReturnType<typeof this.buildQueryTool>> = {};\n\n constructor(config: ILakebaseConfig) {\n super(config);\n this.config = config;\n if (config.exposeAsAgentTool) {\n this.tools = { query: this.buildQueryTool(config.exposeAsAgentTool) };\n logger.warn(\n \"Lakebase agent tool is enabled (readOnly=%s). Every agent with access to this plugin can execute SQL against the Lakebase database as the requesting user's identity.\",\n config.exposeAsAgentTool.readOnly !== false,\n );\n }\n }\n\n private buildQueryTool(\n opt: NonNullable<ILakebaseConfig[\"exposeAsAgentTool\"]>,\n ) {\n const readOnly = opt.readOnly !== false;\n return defineTool({\n description: readOnly\n ? \"Execute a read-only SQL query against the Lakebase PostgreSQL database. Only SELECT, WITH, SHOW, EXPLAIN, and DESCRIBE statements are accepted. Use $1, $2, etc. as placeholders and pass values separately.\"\n : \"Execute a parameterized SQL statement against the Lakebase PostgreSQL database. Use $1, $2, etc. as placeholders and pass values separately. This tool can modify data; every invocation requires explicit human approval.\",\n schema: z.object({\n text: z\n .string()\n .describe(\n \"SQL statement with $1, $2, ... placeholders for parameters\",\n ),\n values: z\n .array(z.unknown())\n .optional()\n .describe(\"Parameter values corresponding to placeholders\"),\n }),\n annotations: {\n effect: readOnly ? \"read\" : \"destructive\",\n idempotent: false,\n requiresUserContext: true,\n },\n execute: async (args, signal) => {\n // Matches the files plugin pattern: the pg connection API\n // doesn't accept AbortSignal in its current shape, so deeper\n // mid-call cancellation needs a separate plumbing pass on the\n // connector. This entry check still catches the common case —\n // a tool dispatched after the user already cancelled the\n // stream — and unwinds cleanly instead of running to\n // completion against the SQL warehouse.\n signal?.throwIfAborted();\n if (readOnly) {\n assertReadOnlySql(args.text);\n return this.runReadOnlyStatement(args.text, args.values);\n }\n const result = await this.query(args.text, args.values);\n return result.rows;\n },\n });\n }\n\n getAgentTools(): AgentToolDefinition[] {\n return toolsFromRegistry(this.tools);\n }\n\n async executeAgentTool(\n name: string,\n args: unknown,\n signal?: AbortSignal,\n ): Promise<unknown> {\n return executeFromRegistry(this.tools, name, args, signal);\n }\n\n toolkit(opts?: import(\"../../core/agent/types\").ToolkitOptions) {\n return buildToolkitEntries(this.name, this.tools, opts);\n }\n\n /**\n * Returns the pool config for the current execution context.\n * Inside `asUser(req)`, returns user-scoped config; otherwise SP config.\n */\n private activePoolConfig() {\n const ctx = getUserContext();\n if (ctx) {\n const user = ctx.userEmail ?? ctx.userId;\n return { ...this.config.pool, workspaceClient: ctx.client, user };\n }\n return this.config.pool;\n }\n\n /**\n * Returns the plugin's public API, accessible via `AppKit.lakebase`.\n *\n * - `pool` — The connection pool (routes to per-user pool when inside `asUser(req)`)\n * - `query` — Convenience method for executing parameterized SQL queries\n * - `getOrmConfig()` — Returns a config object compatible with Drizzle, TypeORM, Sequelize, etc.\n * Inside `asUser(req)`, returns user-scoped config.\n * - `getPgConfig()` — Returns a `pg.PoolConfig` object for manual pool construction.\n * Inside `asUser(req)`, returns user-scoped config.\n *\n * Use `AppKit.lakebase.asUser(req)` to get the same API backed by a per-user pool.\n */\n exports() {\n return {\n // biome-ignore lint/style/noNonNullAssertion: pool is guaranteed non-null after setup(), which AppKit always awaits before exposing the plugin API\n pool: this.pool! as LakebasePool,\n query: this.query.bind(this),\n getOrmConfig: () => getLakebaseOrmConfig(this.activePoolConfig()),\n getPgConfig: () => getLakebasePgConfig(this.activePoolConfig()),\n };\n }\n}\n\n/**\n * @internal\n */\nexport const lakebase = toPlugin(LakebasePlugin);\n"],"mappings":";;;;;;;;;;;;;;;wBAaiE;AAcjE,MAAM,SAAS,aAAa,WAAW;;AAGvC,MAAM,oBAAoB;CACxB,KAAK;CACL,iBAAiB;CACjB,mBAAmB;CACpB;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BD,IAAa,iBAAb,cAAoC,OAA+B;;CAEjE,OAAO,WAAWA;CAGlB,AAAQ,OAA2B;CACnC,AAAQ,iBAA6C;;;;;;;;;CAUrD,MAAM,QAAQ;EACZ,MAAM,aAAa,KAAK,OAAO;EAC/B,MAAM,OAAO,MAAM,yBAAyB,WAAW;EAEvD,MAAM,SAAS,mBAAmB;GAAE,GAAG;GAAY;GAAM,CAAC;AAC1D,SAAO,KAAK,+BAA+B;AAE3C,OAAK,iBAAiB,0BAA0B;GAC9C,GAAG;GACH,GAAG;GACJ,CAAC;AACF,SAAO,KAAK,wCAAwC;EAEpD,MAAM,aAAa,KAAK;AACxB,OAAK,OAAO,IAAI,YAAY,SAAS,QAAQ;AAC3C,OAAI,CAAC,WAAY,OAAM,IAAI,MAAM,mCAAmC;GAEpE,MAAM,UAAU,IAAI,aAAa,IAAI;GACrC,MAAM,QAAQ,CAAC,WAAW,QAAQ,QAAQ;GAC1C,MAAM,OAAO,WAAW,QACtB,SACA;IAAE,iBAAiB,IAAI;IAAQ,MAAM;IAAS,EAC9C,IAAI,iBACL;AACD,OAAI,MACF,QAAO,MAAM,yCAAyC,WAAW,KAAK;AAExE,UAAO;IACP;;;;;;;;;;;;;;;;;;;;CAqBJ,MAAM,MACJ,MACA,QACyB;AAEzB,SAAO,KAAK,KAAM,MAAS,MAAM,OAAO;;;;;;;;;;;;;;;;CAiB1C,MAAc,qBACZ,MACA,QACoB;EAEpB,MAAM,SAAS,MAAM,KAAK,KAAM,SAAS;AACzC,MAAI;AACF,SAAM,OAAO,MAAM,kBAAkB;AAErC,WADe,MAAM,OAAO,MAAM,MAAM,OAAO,EACjC;YACN;AACR,OAAI;AACF,UAAM,OAAO,MAAM,WAAW;aACtB;AACR,WAAO,SAAS;;;;;;;;CAStB,wBAA8B;AAC5B,QAAM,uBAAuB;AAC7B,MAAI,KAAK,MAAM;AACb,UAAO,KAAK,2BAA2B;AACvC,QAAK,KAAK,KAAK,CAAC,OAAO,QAAQ;AAC7B,WAAO,MAAM,sCAAsC,IAAI;KACvD;AACF,QAAK,OAAO;;AAEd,MAAI,KAAK,gBAAgB;AACvB,UAAO,KACL,uCACA,KAAK,eAAe,KACrB;AACD,QAAK,eAAe,UAAU,CAAC,OAAO,QAAQ;AAC5C,WAAO,MAAM,wCAAwC,IAAI;KACzD;AACF,QAAK,iBAAiB;;;;;;;;CAS1B,AAAQ,QAAgE,EAAE;CAE1E,YAAY,QAAyB;AACnC,QAAM,OAAO;AACb,OAAK,SAAS;AACd,MAAI,OAAO,mBAAmB;AAC5B,QAAK,QAAQ,EAAE,OAAO,KAAK,eAAe,OAAO,kBAAkB,EAAE;AACrE,UAAO,KACL,yKACA,OAAO,kBAAkB,aAAa,MACvC;;;CAIL,AAAQ,eACN,KACA;EACA,MAAM,WAAW,IAAI,aAAa;AAClC,SAAO,WAAW;GAChB,aAAa,WACT,iNACA;GACJ,QAAQ,EAAE,OAAO;IACf,MAAM,EACH,QAAQ,CACR,SACC,6DACD;IACH,QAAQ,EACL,MAAM,EAAE,SAAS,CAAC,CAClB,UAAU,CACV,SAAS,iDAAiD;IAC9D,CAAC;GACF,aAAa;IACX,QAAQ,WAAW,SAAS;IAC5B,YAAY;IACZ,qBAAqB;IACtB;GACD,SAAS,OAAO,MAAM,WAAW;AAQ/B,YAAQ,gBAAgB;AACxB,QAAI,UAAU;AACZ,uBAAkB,KAAK,KAAK;AAC5B,YAAO,KAAK,qBAAqB,KAAK,MAAM,KAAK,OAAO;;AAG1D,YADe,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,OAAO,EACzC;;GAEjB,CAAC;;CAGJ,gBAAuC;AACrC,SAAO,kBAAkB,KAAK,MAAM;;CAGtC,MAAM,iBACJ,MACA,MACA,QACkB;AAClB,SAAO,oBAAoB,KAAK,OAAO,MAAM,MAAM,OAAO;;CAG5D,QAAQ,MAAwD;AAC9D,SAAO,oBAAoB,KAAK,MAAM,KAAK,OAAO,KAAK;;;;;;CAOzD,AAAQ,mBAAmB;EACzB,MAAM,MAAM,gBAAgB;AAC5B,MAAI,KAAK;GACP,MAAM,OAAO,IAAI,aAAa,IAAI;AAClC,UAAO;IAAE,GAAG,KAAK,OAAO;IAAM,iBAAiB,IAAI;IAAQ;IAAM;;AAEnE,SAAO,KAAK,OAAO;;;;;;;;;;;;;;CAerB,UAAU;AACR,SAAO;GAEL,MAAM,KAAK;GACX,OAAO,KAAK,MAAM,KAAK,KAAK;GAC5B,oBAAoB,qBAAqB,KAAK,kBAAkB,CAAC;GACjE,mBAAmB,oBAAoB,KAAK,kBAAkB,CAAC;GAChE;;;;;;AAOL,MAAa,WAAW,SAAS,eAAe"}
@@ -1,9 +1,9 @@
1
1
  import { createLogger } from "../../logging/logger.js";
2
2
  import { getWorkspaceClient } from "../../context/execution-context.js";
3
3
  import { init_context } from "../../context/index.js";
4
+ import { Plugin } from "../../plugin/plugin.js";
4
5
  import { ResourceType } from "../../registry/types.generated.js";
5
6
  import "../../registry/index.js";
6
- import { Plugin } from "../../plugin/plugin.js";
7
7
  import { toPlugin } from "../../plugin/to-plugin.js";
8
8
  import "../../plugin/index.js";
9
9
  import "../../logging/index.js";
@@ -0,0 +1,36 @@
1
+ # Function: createLakebasePoolManager()
2
+
3
+ ```ts
4
+ function createLakebasePoolManager(baseConfig?: Partial<LakebasePoolConfig>): LakebasePoolManager;
5
+
6
+ ```
7
+
8
+ Create a pool manager that maintains per-key Lakebase connection pools.
9
+
10
+ Each pool is created via `createLakebasePool` with the base config merged with per-pool overrides (e.g. a user's `workspaceClient` and `user`).
11
+
12
+ A periodic cleanup removes empty Pool objects (where all connections have been closed by pg's built-in `idleTimeoutMillis`) from the internal Map.
13
+
14
+ ## Parameters[​](#parameters "Direct link to Parameters")
15
+
16
+ | Parameter | Type |
17
+ | ------------- | ------------------------------------------------------------------------------------------ |
18
+ | `baseConfig?` | `Partial`<[`LakebasePoolConfig`](./docs/api/appkit/Interface.LakebasePoolConfig.md)> |
19
+
20
+ ## Returns[​](#returns "Direct link to Returns")
21
+
22
+ [`LakebasePoolManager`](./docs/api/appkit/Interface.LakebasePoolManager.md)
23
+
24
+ ## Example[​](#example "Direct link to Example")
25
+
26
+ ```typescript
27
+ const poolManager = createLakebasePoolManager();
28
+
29
+ // In a route handler:
30
+ const userPool = poolManager.getPool(userName, {
31
+ workspaceClient: new WorkspaceClient({ token: userToken, host, authType: "pat" }),
32
+ user: userName,
33
+ });
34
+ const result = await userPool.query("SELECT * FROM products");
35
+
36
+ ```
@@ -0,0 +1,84 @@
1
+ # Interface: LakebasePool
2
+
3
+ Subset of `pg.Pool` exposed by the Lakebase plugin.
4
+
5
+ RoutingPool does not extend EventEmitter — event listener methods like `on('error', ...)` are not available. Use `query()`, `connect()`, and `end()` for all pool operations.
6
+
7
+ ## Properties[​](#properties "Direct link to Properties")
8
+
9
+ ### idleCount[​](#idlecount "Direct link to idleCount")
10
+
11
+ ```ts
12
+ readonly idleCount: number;
13
+
14
+ ```
15
+
16
+ ***
17
+
18
+ ### totalCount[​](#totalcount "Direct link to totalCount")
19
+
20
+ ```ts
21
+ readonly totalCount: number;
22
+
23
+ ```
24
+
25
+ ***
26
+
27
+ ### waitingCount[​](#waitingcount "Direct link to waitingCount")
28
+
29
+ ```ts
30
+ readonly waitingCount: number;
31
+
32
+ ```
33
+
34
+ ## Methods[​](#methods "Direct link to Methods")
35
+
36
+ ### connect()[​](#connect "Direct link to connect()")
37
+
38
+ ```ts
39
+ connect(): Promise<PoolClient>;
40
+
41
+ ```
42
+
43
+ #### Returns[​](#returns "Direct link to Returns")
44
+
45
+ `Promise`<`PoolClient`>
46
+
47
+ ***
48
+
49
+ ### end()[​](#end "Direct link to end()")
50
+
51
+ ```ts
52
+ end(): Promise<void>;
53
+
54
+ ```
55
+
56
+ #### Returns[​](#returns-1 "Direct link to Returns")
57
+
58
+ `Promise`<`void`>
59
+
60
+ ***
61
+
62
+ ### query()[​](#query "Direct link to query()")
63
+
64
+ ```ts
65
+ query<T>(text: string, values?: unknown[]): Promise<QueryResult<T>>;
66
+
67
+ ```
68
+
69
+ #### Type Parameters[​](#type-parameters "Direct link to Type Parameters")
70
+
71
+ | Type Parameter | Default type |
72
+ | ------------------------------ | ------------ |
73
+ | `T` *extends* `QueryResultRow` | `any` |
74
+
75
+ #### Parameters[​](#parameters "Direct link to Parameters")
76
+
77
+ | Parameter | Type |
78
+ | --------- | ------------ |
79
+ | `text` | `string` |
80
+ | `values?` | `unknown`\[] |
81
+
82
+ #### Returns[​](#returns-2 "Direct link to Returns")
83
+
84
+ `Promise`<`QueryResult`<`T`>>
@@ -0,0 +1,101 @@
1
+ # Interface: LakebasePoolManager
2
+
3
+ Manages multiple Lakebase connection pools keyed by an identifier (e.g. userId).
4
+
5
+ Used for On-Behalf-Of (OBO) scenarios where each user needs their own pool with their own OAuth token refresh, enabling features like Row-Level Security.
6
+
7
+ ## Properties[​](#properties "Direct link to Properties")
8
+
9
+ ### size[​](#size "Direct link to size")
10
+
11
+ ```ts
12
+ readonly size: number;
13
+
14
+ ```
15
+
16
+ Number of active pools.
17
+
18
+ ## Methods[​](#methods "Direct link to Methods")
19
+
20
+ ### closeAll()[​](#closeall "Direct link to closeAll()")
21
+
22
+ ```ts
23
+ closeAll(): Promise<void>;
24
+
25
+ ```
26
+
27
+ Close all managed pools and stop cleanup (for graceful shutdown).
28
+
29
+ #### Returns[​](#returns "Direct link to Returns")
30
+
31
+ `Promise`<`void`>
32
+
33
+ ***
34
+
35
+ ### closePool()[​](#closepool "Direct link to closePool()")
36
+
37
+ ```ts
38
+ closePool(key: string): Promise<void>;
39
+
40
+ ```
41
+
42
+ Close and remove a specific pool.
43
+
44
+ #### Parameters[​](#parameters "Direct link to Parameters")
45
+
46
+ | Parameter | Type |
47
+ | --------- | -------- |
48
+ | `key` | `string` |
49
+
50
+ #### Returns[​](#returns-1 "Direct link to Returns")
51
+
52
+ `Promise`<`void`>
53
+
54
+ ***
55
+
56
+ ### getPool()[​](#getpool "Direct link to getPool()")
57
+
58
+ ```ts
59
+ getPool(
60
+ key: string,
61
+ perPoolConfig: Partial<LakebasePoolConfig>,
62
+ tokenFingerprint?: string): Pool;
63
+
64
+ ```
65
+
66
+ Get an existing pool or create a new one for the given key. When creating, merges `perPoolConfig` with the base config passed to the factory.
67
+
68
+ If `tokenFingerprint` is provided and differs from the cached pool's fingerprint, the stale pool is closed and a fresh one is created with the new config (including the updated `workspaceClient`).
69
+
70
+ #### Parameters[​](#parameters-1 "Direct link to Parameters")
71
+
72
+ | Parameter | Type |
73
+ | ------------------- | ------------------------------------------------------------------------------------------ |
74
+ | `key` | `string` |
75
+ | `perPoolConfig` | `Partial`<[`LakebasePoolConfig`](./docs/api/appkit/Interface.LakebasePoolConfig.md)> |
76
+ | `tokenFingerprint?` | `string` |
77
+
78
+ #### Returns[​](#returns-2 "Direct link to Returns")
79
+
80
+ `Pool`
81
+
82
+ ***
83
+
84
+ ### hasPool()[​](#haspool "Direct link to hasPool()")
85
+
86
+ ```ts
87
+ hasPool(key: string): boolean;
88
+
89
+ ```
90
+
91
+ Check whether a pool exists for the given key.
92
+
93
+ #### Parameters[​](#parameters-2 "Direct link to Parameters")
94
+
95
+ | Parameter | Type |
96
+ | --------- | -------- |
97
+ | `key` | `string` |
98
+
99
+ #### Returns[​](#returns-3 "Direct link to Returns")
100
+
101
+ `boolean`
@@ -52,7 +52,9 @@ Documentation merge entry for Typedoc — combines the stable `@databricks/appki
52
52
  | [JobAPI](./docs/api/appkit/Interface.JobAPI.md) | User-facing API for a single configured job. |
53
53
  | [JobConfig](./docs/api/appkit/Interface.JobConfig.md) | Per-job configuration options. |
54
54
  | [JobsConnectorConfig](./docs/api/appkit/Interface.JobsConnectorConfig.md) | - |
55
+ | [LakebasePool](./docs/api/appkit/Interface.LakebasePool.md) | Subset of `pg.Pool` exposed by the Lakebase plugin. |
55
56
  | [LakebasePoolConfig](./docs/api/appkit/Interface.LakebasePoolConfig.md) | Configuration for creating a Lakebase connection pool |
57
+ | [LakebasePoolManager](./docs/api/appkit/Interface.LakebasePoolManager.md) | Manages multiple Lakebase connection pools keyed by an identifier (e.g. userId). |
56
58
  | [McpConnectAllResult](./docs/api/appkit/Interface.McpConnectAllResult.md) | Per-endpoint outcome of [AppKitMcpClient.connectAll](./docs/api/appkit/Class.AppKitMcpClient.md#connectall). Callers (the agents plugin in particular) use the split to warn at startup when some MCP servers are unreachable without aborting boot for the rest. |
57
59
  | [Message](./docs/api/appkit/Interface.Message.md) | - |
58
60
  | [PluginManifest](./docs/api/appkit/Interface.PluginManifest.md) | Plugin manifest that declares metadata and resource requirements. Attached to plugin classes as a static property. Extends the shared PluginManifest with strict resource types. |
@@ -124,6 +126,7 @@ Documentation merge entry for Typedoc — combines the stable `@databricks/appki
124
126
  | [createAgent](./docs/api/appkit/Function.createAgent.md) | Pure factory for agent definitions. Returns the passed-in definition after cycle-detecting the sub-agent graph. Accepts the full `AgentDefinition` shape and is safe to call at module top-level. |
125
127
  | [createApp](./docs/api/appkit/Function.createApp.md) | Bootstraps AppKit with the provided configuration. |
126
128
  | [createLakebasePool](./docs/api/appkit/Function.createLakebasePool.md) | Create a Lakebase pool with appkit's logger integration. Telemetry automatically uses appkit's OpenTelemetry configuration via global registry. |
129
+ | [createLakebasePoolManager](./docs/api/appkit/Function.createLakebasePoolManager.md) | Create a pool manager that maintains per-key Lakebase connection pools. |
127
130
  | [defineTool](./docs/api/appkit/Function.defineTool.md) | Defines a single tool entry for a plugin's internal registry. |
128
131
  | [executeFromRegistry](./docs/api/appkit/Function.executeFromRegistry.md) | Validates tool-call arguments against the entry's schema and invokes its handler. On validation failure, returns an LLM-friendly error string (matching the behavior of `tool()`) rather than throwing, so the model can self-correct on its next turn. |
129
132
  | [extractServingEndpoints](./docs/api/appkit/Function.extractServingEndpoints.md) | Extract serving endpoint config from a server file by AST-parsing it. Looks for `serving({ endpoints: { alias: { env: "..." }, ... } })` calls and extracts the endpoint alias names and their environment variable mappings. |
@@ -24,7 +24,6 @@ This guide is designed to work even when you *do not* have access to the AppKit
24
24
 
25
25
  * **Do not invent APIs**. If unsure, stick to the patterns shown in the documentation and only use documented exports from `@databricks/appkit` and `@databricks/appkit-ui`.
26
26
  * **`createApp()` is async**. Prefer **top-level `await createApp(...)`**. If you can't, use `void createApp(...)` and do not ignore promise rejection.
27
- * **Always memoize query parameters** passed to `useAnalyticsQuery` / charts to avoid refetch loops.
28
27
  * **Always handle loading/error/empty states** in UI (use `Skeleton`, error text, empty state).
29
28
  * **Always use `sql.*` helpers** for query parameters (do not pass raw strings/numbers unless the query expects none).
30
29
  * **Never construct SQL strings dynamically**. Use parameterized queries with `:paramName`.
@@ -51,6 +51,12 @@ The `plugin.execute` span created by the execution interceptor chain includes th
51
51
 
52
52
  These attributes are automatically added when your plugin uses `execute()` or `executeStream()`. All built-in plugins use these methods for their OBO operations. Custom plugins should do the same to get automatic telemetry instrumentation.
53
53
 
54
+ ## Lakebase per-user connections[​](#lakebase-per-user-connections "Direct link to Lakebase per-user connections")
55
+
56
+ The Lakebase plugin uses a different mechanism for `asUser(req)`: instead of swapping the `WorkspaceClient` via AsyncLocalStorage, it creates a **separate `pg.Pool` per user**, each with its own OAuth token refresh. This is necessary because PostgreSQL connections are authenticated at connection time — the pool itself is the authentication boundary.
57
+
58
+ See [Lakebase plugin — per-user connections](./docs/plugins/lakebase.md#on-behalf-of-obo--per-user-connections) for details.
59
+
54
60
  ## Development mode behavior[​](#development-mode-behavior "Direct link to Development mode behavior")
55
61
 
56
62
  In local development (`NODE_ENV=development`), if `asUser(req)` is called without a user token, it logs a warning and skips user impersonation — the operation runs with the default credentials configured for the app instead. The telemetry span will show `execution.context: "service"` with `execution.obo_dev_fallback: true` to distinguish these from regular service principal calls.