@databricks/appkit-ui 0.19.0 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CLAUDE.md CHANGED
@@ -46,7 +46,7 @@ npx @databricks/appkit docs <query>
46
46
  - [Execution context](./docs/plugins/execution-context.md): AppKit manages Databricks authentication via two contexts:
47
47
  - [Files plugin](./docs/plugins/files.md): File operations against Databricks Unity Catalog Volumes. Supports listing, reading, downloading, uploading, deleting, and previewing files with built-in caching, retry, and timeout handling via the execution interceptor pipeline.
48
48
  - [Genie plugin](./docs/plugins/genie.md): Integrates Databricks AI/BI Genie spaces into your AppKit application, enabling natural language data queries via a conversational interface.
49
- - [Lakebase plugin](./docs/plugins/lakebase.md): Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release.
49
+ - [Lakebase plugin](./docs/plugins/lakebase.md): Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
50
50
  - [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
51
51
  - [Server plugin](./docs/plugins/server.md): Provides HTTP server capabilities with development and production modes.
52
52
 
@@ -111,6 +111,7 @@
111
111
  "uc_function",
112
112
  "uc_connection",
113
113
  "database",
114
+ "postgres",
114
115
  "genie_space",
115
116
  "experiment",
116
117
  "app"
@@ -162,6 +163,11 @@
162
163
  "enum": ["CAN_CONNECT_AND_CREATE"],
163
164
  "description": "Permission for database resources"
164
165
  },
166
+ "postgresPermission": {
167
+ "type": "string",
168
+ "enum": ["CAN_CONNECT_AND_CREATE"],
169
+ "description": "Permission for Postgres resources"
170
+ },
165
171
  "genieSpacePermission": {
166
172
  "type": "string",
167
173
  "enum": ["CAN_VIEW", "CAN_RUN", "CAN_EDIT", "CAN_MANAGE"],
@@ -179,7 +185,6 @@
179
185
  },
180
186
  "resourceFieldEntry": {
181
187
  "type": "object",
182
- "required": ["env"],
183
188
  "properties": {
184
189
  "env": {
185
190
  "type": "string",
@@ -190,20 +195,37 @@
190
195
  "description": {
191
196
  "type": "string",
192
197
  "description": "Human-readable description for this field"
198
+ },
199
+ "bundleIgnore": {
200
+ "type": "boolean",
201
+ "default": false,
202
+ "description": "When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation."
203
+ },
204
+ "examples": {
205
+ "type": "array",
206
+ "items": { "type": "string" },
207
+ "description": "Example values showing the expected format for this field"
208
+ },
209
+ "localOnly": {
210
+ "type": "boolean",
211
+ "default": false,
212
+ "description": "When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time."
213
+ },
214
+ "value": {
215
+ "type": "string",
216
+ "description": "Static value for this field. Used when no prompted or resolved value exists."
217
+ },
218
+ "resolve": {
219
+ "type": "string",
220
+ "pattern": "^[a-z_]+:[a-zA-Z]+$",
221
+ "description": "Named resolver prefixed by resource type (e.g., 'postgres:host'). The CLI resolves this value during the init prompt flow."
193
222
  }
194
223
  },
195
224
  "additionalProperties": false
196
225
  },
197
226
  "resourceRequirement": {
198
227
  "type": "object",
199
- "required": [
200
- "type",
201
- "alias",
202
- "resourceKey",
203
- "description",
204
- "permission",
205
- "fields"
206
- ],
228
+ "required": ["type", "alias", "resourceKey", "description", "permission"],
207
229
  "properties": {
208
230
  "type": {
209
231
  "$ref": "#/$defs/resourceType"
@@ -337,6 +359,17 @@
337
359
  }
338
360
  }
339
361
  },
362
+ {
363
+ "if": {
364
+ "properties": { "type": { "const": "postgres" } },
365
+ "required": ["type"]
366
+ },
367
+ "then": {
368
+ "properties": {
369
+ "permission": { "$ref": "#/$defs/postgresPermission" }
370
+ }
371
+ }
372
+ },
340
373
  {
341
374
  "if": {
342
375
  "properties": { "type": { "const": "genie_space" } },
@@ -49,6 +49,15 @@ JOB: "job";
49
49
 
50
50
  ***
51
51
 
52
+ ### POSTGRES[​](#postgres "Direct link to POSTGRES")
53
+
54
+ ```ts
55
+ POSTGRES: "postgres";
56
+
57
+ ```
58
+
59
+ ***
60
+
52
61
  ### SECRET[​](#secret "Direct link to SECRET")
53
62
 
54
63
  ```ts
@@ -4,6 +4,17 @@ Defines a single field for a resource. Each field has its own environment variab
4
4
 
5
5
  ## Properties[​](#properties "Direct link to Properties")
6
6
 
7
+ ### bundleIgnore?[​](#bundleignore "Direct link to bundleIgnore?")
8
+
9
+ ```ts
10
+ optional bundleIgnore: boolean;
11
+
12
+ ```
13
+
14
+ When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation.
15
+
16
+ ***
17
+
7
18
  ### description?[​](#description "Direct link to description?")
8
19
 
9
20
  ```ts
@@ -15,11 +26,55 @@ Human-readable description for this field
15
26
 
16
27
  ***
17
28
 
18
- ### env[​](#env "Direct link to env")
29
+ ### env?[​](#env "Direct link to env?")
19
30
 
20
31
  ```ts
21
- env: string;
32
+ optional env: string;
22
33
 
23
34
  ```
24
35
 
25
36
  Environment variable name for this field
37
+
38
+ ***
39
+
40
+ ### examples?[​](#examples "Direct link to examples?")
41
+
42
+ ```ts
43
+ optional examples: string[];
44
+
45
+ ```
46
+
47
+ Example values showing the expected format for this field
48
+
49
+ ***
50
+
51
+ ### localOnly?[​](#localonly "Direct link to localOnly?")
52
+
53
+ ```ts
54
+ optional localOnly: boolean;
55
+
56
+ ```
57
+
58
+ When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time.
59
+
60
+ ***
61
+
62
+ ### resolve?[​](#resolve "Direct link to resolve?")
63
+
64
+ ```ts
65
+ optional resolve: string;
66
+
67
+ ```
68
+
69
+ Named resolver prefixed by resource type (e.g., 'postgres<!-- -->:host<!-- -->'). The CLI resolves this value during the init prompt flow.
70
+
71
+ ***
72
+
73
+ ### value?[​](#value "Direct link to value?")
74
+
75
+ ```ts
76
+ optional value: string;
77
+
78
+ ```
79
+
80
+ Static value for this field. Used when no prompted or resolved value exists.
@@ -11,6 +11,7 @@ type ResourcePermission =
11
11
  | UcFunctionPermission
12
12
  | UcConnectionPermission
13
13
  | DatabasePermission
14
+ | PostgresPermission
14
15
  | GenieSpacePermission
15
16
  | ExperimentPermission
16
17
  | AppPermission;
@@ -1,9 +1,5 @@
1
1
  # Lakebase plugin
2
2
 
3
- info
4
-
5
- Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release.
6
-
7
3
  Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
8
4
 
9
5
  **Key features:**
@@ -12,90 +8,24 @@ Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with a
12
8
  * Automatic OAuth token refresh (1-hour tokens, 2-minute refresh buffer)
13
9
  * Token caching to minimize API calls
14
10
  * Built-in OpenTelemetry instrumentation (query duration, pool connections, token refresh)
11
+ * AppKit logger configured by default for query and connection events
15
12
 
16
- ## Setting up Lakebase[​](#setting-up-lakebase "Direct link to Setting up Lakebase")
17
-
18
- Before using the plugin, you need to connect your Databricks App's service principal to your Lakebase database.
19
-
20
- ### 1. Find your app's service principal[​](#1-find-your-apps-service-principal "Direct link to 1. Find your app's service principal")
21
-
22
- Create a Databricks App from the UI (`Compute > Apps > Create App > Create a custom app`). Navigate to the **Environment** tab and note the `DATABRICKS_CLIENT_ID` value — this is the service principal that will connect to your Lakebase database.
23
-
24
- ![App environment tab](/appkit/assets/images/step-1-073320f925a3961838afa0842c727307.png)
25
-
26
- ### 2. Find your Project ID and Branch ID[​](#2-find-your-project-id-and-branch-id "Direct link to 2. Find your Project ID and Branch ID")
13
+ ## Getting started with the Lakebase[​](#getting-started-with-the-lakebase "Direct link to Getting started with the Lakebase")
27
14
 
28
- Create a new Lakebase Postgres Autoscaling project. Navigate to your Lakebase project's branch details and switch to the **Compute** tab. Note the **Project ID** and **Branch ID** from the URL.
15
+ The easiest way to get started with the Lakebase plugin is to use the Databricks CLI to create a new Databricks app with AppKit installed and the Lakebase plugin.
29
16
 
30
- ![Branch details](/appkit/assets/images/step-2-25954a56aecd4dafe4966f7cecc6e8f4.png)
17
+ ### Prerequisites[](#prerequisites "Direct link to Prerequisites")
31
18
 
32
- ### 3. Find your endpoint[​](#3-find-your-endpoint "Direct link to 3. Find your endpoint")
19
+ * [Node.js](https://nodejs.org) v22+ environment with `npm`
20
+ * Databricks CLI (v0.287.0 or higher): install and configure it according to the [official tutorial](https://docs.databricks.com/aws/en/dev-tools/cli/tutorial).
21
+ * A new Databricks app with AppKit installed. See [Bootstrap a new Databricks app](./docs.md#quick-start-options) for more details.
33
22
 
34
- Use the Databricks CLI to list endpoints for the branch. Note the `name` field from the output — this is your `LAKEBASE_ENDPOINT` value.
23
+ ### Steps[​](#steps "Direct link to Steps")
35
24
 
36
- ```bash
37
- databricks postgres list-endpoints projects/{project-id}/branches/{branch-id}
38
-
39
- ```
40
-
41
- Example output:
42
-
43
- ```json
44
- [
45
- {
46
- "create_time": "2026-02-19T12:13:02Z",
47
- "name": "projects/{project-id}/branches/{branch-id}/endpoints/primary"
48
- }
49
- ]
50
-
51
- ```
52
-
53
- ### 4. Get connection parameters[​](#4-get-connection-parameters "Direct link to 4. Get connection parameters")
54
-
55
- Click the **Connect** button on your Lakebase branch and copy the `PGHOST` and `PGDATABASE` values for later.
56
-
57
- ![Connect dialog](/appkit/assets/images/step-4-78b906d125c2c130f6e14984a9f89a62.png)
58
-
59
- ### 5. Grant access to the service principal[​](#5-grant-access-to-the-service-principal "Direct link to 5. Grant access to the service principal")
60
-
61
- Navigate to the **SQL Editor** tab on your Lakebase branch. Run the following SQL against the `databricks_postgres` database, replacing the service principal ID in the `DECLARE` block with the `DATABRICKS_CLIENT_ID` value from step 1:
62
-
63
- ```sql
64
- CREATE EXTENSION IF NOT EXISTS databricks_auth;
65
-
66
- DO $$
67
- DECLARE
68
- sp TEXT := 'your-service-principal-id'; -- Replace with DATABRICKS_CLIENT_ID from Step 1
69
- BEGIN
70
- -- Create service principal role
71
- PERFORM databricks_create_role(sp, 'SERVICE_PRINCIPAL');
72
-
73
- -- Connection and schema access
74
- EXECUTE format('GRANT CONNECT ON DATABASE "databricks_postgres" TO %I', sp);
75
- EXECUTE format('GRANT ALL ON SCHEMA public TO %I', sp);
76
-
77
- -- Privileges on existing objects
78
- EXECUTE format('GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO %I', sp);
79
- EXECUTE format('GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO %I', sp);
80
- EXECUTE format('GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public TO %I', sp);
81
- EXECUTE format('GRANT ALL PRIVILEGES ON ALL PROCEDURES IN SCHEMA public TO %I', sp);
82
-
83
- -- Default privileges on future objects you create
84
- EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO %I', sp);
85
- EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO %I', sp);
86
- EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON FUNCTIONS TO %I', sp);
87
- EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON ROUTINES TO %I', sp);
88
- END $$;
89
-
90
- ```
91
-
92
- ![SQL Editor](/appkit/assets/images/step-5-38bdf3e3ac8aadf2c0cd57aa5f0ba090.png)
93
-
94
- ### 6. Verify the role[​](#6-verify-the-role "Direct link to 6. Verify the role")
95
-
96
- Navigate to the **Roles & Databases** tab and confirm the role is visible. You may need to fully refresh the page.
97
-
98
- ![Roles \&amp; Databases tab](/appkit/assets/images/step-6-edbb462e89a66c46d58424768c163e4e.png)
25
+ 1. Firstly, create a new Lakebase Postgres Autoscaling project according to the [Get started documentation](https://docs.databricks.com/aws/en/oltp/projects/get-started).
26
+ 2. To add the Lakebase plugin to your project, run the `databricks apps init` command and interactively select the **Lakebase** plugin. The CLI will guide you through picking a Lakebase project, branch, and database.
27
+ <!-- -->
28
+ * When asked, select **Yes** to deploy the app to Databricks Apps right after its creation.
99
29
 
100
30
  ## Basic usage[​](#basic-usage "Direct link to Basic usage")
101
31
 
@@ -108,34 +38,6 @@ await createApp({
108
38
 
109
39
  ```
110
40
 
111
- ## Environment variables[​](#environment-variables "Direct link to Environment variables")
112
-
113
- The required environment variables:
114
-
115
- | Variable | Description |
116
- | ------------------- | ----------------------------------------------------------------------- |
117
- | `PGHOST` | Lakebase host |
118
- | `PGDATABASE` | Database name |
119
- | `LAKEBASE_ENDPOINT` | Endpoint resource path (e.g. `projects/.../branches/.../endpoints/...`) |
120
- | `PGSSLMODE` | TLS mode — set to `require` |
121
-
122
- Ensure that those environment variables are set both for local development (`.env` file) and for deployment (`app.yaml` file):
123
-
124
- ```yaml
125
- env:
126
- - name: LAKEBASE_ENDPOINT
127
- value: projects/{project-id}/branches/{branch-id}/endpoints/primary
128
- - name: PGHOST
129
- value: {your-lakebase-host}
130
- - name: PGDATABASE
131
- value: databricks_postgres
132
- - name: PGSSLMODE
133
- value: require
134
-
135
- ```
136
-
137
- For the full configuration reference (SSL, pool size, timeouts, logging, ORM examples), see the [`@databricks/lakebase` README](https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md).
138
-
139
41
  ## Accessing the pool[​](#accessing-the-pool "Direct link to Accessing the pool")
140
42
 
141
43
  After initialization, access Lakebase through the `AppKit.lakebase` object:
@@ -145,9 +47,17 @@ const AppKit = await createApp({
145
47
  plugins: [server(), lakebase()],
146
48
  });
147
49
 
148
- // Direct query (parameterized)
50
+ await AppKit.lakebase.query(`CREATE SCHEMA IF NOT EXISTS app`);
51
+
52
+ await AppKit.lakebase.query(`CREATE TABLE IF NOT EXISTS app.orders (
53
+ id SERIAL PRIMARY KEY,
54
+ user_id VARCHAR(255) NOT NULL,
55
+ amount DECIMAL(10, 2) NOT NULL,
56
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
57
+ )`);
58
+
149
59
  const result = await AppKit.lakebase.query(
150
- "SELECT * FROM orders WHERE user_id = $1",
60
+ "SELECT * FROM app.orders WHERE user_id = $1",
151
61
  [userId],
152
62
  );
153
63
 
@@ -160,7 +70,33 @@ const pgConfig = AppKit.lakebase.getPgConfig(); // pg.PoolConfig
160
70
 
161
71
  ```
162
72
 
163
- ## Configuration options[​](#configuration-options "Direct link to Configuration options")
73
+ ## Configuration[​](#configuration "Direct link to Configuration")
74
+
75
+ ### Environment variables[​](#environment-variables "Direct link to Environment variables")
76
+
77
+ The required environment variables are:
78
+
79
+ | Variable | Description |
80
+ | ------------------- | ---------------------------------------------------------------------------------------------------- |
81
+ | `LAKEBASE_ENDPOINT` | Endpoint resource path (e.g. `projects/.../branches/.../endpoints/...`) |
82
+ | `PGHOST` | Lakebase host (auto-injected in production by the `postgres` Databricks Apps resource) |
83
+ | `PGDATABASE` | Database name (auto-injected in production by the `postgres` Databricks Apps resource) |
84
+ | `PGSSLMODE` | TLS mode - set to `require` (auto-injected in production by the `postgres` Databricks Apps resource) |
85
+
86
+ When deployed to Databricks Apps with a `postgres` database resource configured, `PGHOST`, `PGDATABASE`, `PGSSLMODE`, `PGUSER`, `PGPORT`, and `PGAPPNAME` are automatically injected by the platform. Only `LAKEBASE_ENDPOINT` must be set explicitly:
87
+
88
+ ```yaml
89
+ env:
90
+ - name: LAKEBASE_ENDPOINT
91
+ valueFrom: postgres
92
+
93
+ ```
94
+
95
+ For local development, the `.env` file is automatically generated by `databricks apps init` with the correct values for your Lakebase project.
96
+
97
+ For the full configuration reference (SSL, pool size, timeouts, logging, ORM examples), see the [`@databricks/lakebase` README](https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md).
98
+
99
+ ### Pool configuration[​](#pool-configuration "Direct link to Pool configuration")
164
100
 
165
101
  Pass a `pool` object to override any defaults:
166
102
 
@@ -178,3 +114,74 @@ await createApp({
178
114
  });
179
115
 
180
116
  ```
117
+
118
+ ## Database Permissions[​](#database-permissions "Direct link to Database Permissions")
119
+
120
+ When you create the app with the Lakebase resource using the [Getting started](#getting-started-with-the-lakebase) guide, the Service Principal is automatically granted `CONNECT_AND_CREATE` permission on the `postgres` resource. This lets the Service Principal connect to the database and create new objects, but **not access any existing schemas or tables.**
121
+
122
+ ### Local development[​](#local-development "Direct link to Local development")
123
+
124
+ To develop locally against a deployed Lakebase database:
125
+
126
+ 1. **Deploy the app first.** The Service Principal creates the database schema and tables on first deploy. Apps generated from `databricks apps init` handle this automatically - they check if tables exist on startup and skip creation if they do.
127
+
128
+ 2. **Grant `databricks_superuser` via the Lakebase UI:**
129
+
130
+ 1. Open the Lakebase Autoscaling UI and navigate to your project's **Branch Overview** page.
131
+ 2. Click **Add role** (or **Edit role** if your OAuth role already exists).
132
+ 3. Select your Databricks identity as the principal and check the **`databricks_superuser`** system role.
133
+
134
+ 3. **Run locally** - your Databricks user identity (email) is used for OAuth authentication. The `databricks_superuser` role gives full **DML access** (read/write data) but **not DDL** (creating schemas or tables) - that's why deploying first matters (see note below).
135
+
136
+ For other users, use the same **Add role** flow in the Lakebase UI to create an OAuth role with `databricks_superuser` for each user.
137
+
138
+ tip
139
+
140
+ [Postgres password authentication](https://docs.databricks.com/aws/en/oltp/projects/authentication#overview) is a simpler alternative that avoids OAuth role permission complexity. However, it requires you to set up a password for the user in the **Branch Overview** page in the Lakebase Autoscaling UI.
141
+
142
+ Why deploy first?
143
+
144
+ When the app is deployed, the Service Principal creates schemas and tables and becomes their owner. A `databricks_superuser` has full **DML access** (SELECT, INSERT, UPDATE, DELETE) to these objects, but **cannot run DDL** (CREATE SCHEMA, CREATE TABLE) on schemas owned by the Service Principal. Deploying first ensures all objects exist before local development begins.
145
+
146
+ ### Fine-grained permissions[​](#fine-grained-permissions "Direct link to Fine-grained permissions")
147
+
148
+ For most use cases, `databricks_superuser` is sufficient. If you need schema-level grants instead, refer to the official documentation:
149
+
150
+ * [Manage database permissions](https://docs.databricks.com/aws/en/oltp/projects/manage-roles-permissions)
151
+ * [Postgres roles](https://docs.databricks.com/aws/en/oltp/projects/postgres-roles)
152
+
153
+ SQL script for fine-grained grants
154
+
155
+ Deploy and run the app at least once before executing these grants so the Service Principal initializes the database schema first.
156
+
157
+ Replace `subject` with the user email and `schema` with your schema name:
158
+
159
+ ```sql
160
+ CREATE EXTENSION IF NOT EXISTS databricks_auth;
161
+
162
+ DO $$
163
+ DECLARE
164
+ subject TEXT := 'your-subject'; -- User email like name@databricks.com
165
+ schema TEXT := 'your_schema'; -- Replace 'your_schema' with your schema name
166
+ BEGIN
167
+ -- Create OAuth role for the Databricks identity
168
+ PERFORM databricks_create_role(subject, 'USER');
169
+
170
+ -- Connection and schema access
171
+ EXECUTE format('GRANT CONNECT ON DATABASE "databricks_postgres" TO %I', subject);
172
+ EXECUTE format('GRANT ALL ON SCHEMA %s TO %I', schema, subject);
173
+
174
+ -- Privileges on existing objects
175
+ EXECUTE format('GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA %s TO %I', schema, subject);
176
+ EXECUTE format('GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA %s TO %I', schema, subject);
177
+ EXECUTE format('GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA %s TO %I', schema, subject);
178
+ EXECUTE format('GRANT ALL PRIVILEGES ON ALL PROCEDURES IN SCHEMA %s TO %I', schema, subject);
179
+
180
+ -- Default privileges on future objects
181
+ EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON TABLES TO %I', schema, subject);
182
+ EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON SEQUENCES TO %I', schema, subject);
183
+ EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON FUNCTIONS TO %I', schema, subject);
184
+ EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON ROUTINES TO %I', schema, subject);
185
+ END $$;
186
+
187
+ ```
package/llms.txt CHANGED
@@ -46,7 +46,7 @@ npx @databricks/appkit docs <query>
46
46
  - [Execution context](./docs/plugins/execution-context.md): AppKit manages Databricks authentication via two contexts:
47
47
  - [Files plugin](./docs/plugins/files.md): File operations against Databricks Unity Catalog Volumes. Supports listing, reading, downloading, uploading, deleting, and previewing files with built-in caching, retry, and timeout handling via the execution interceptor pipeline.
48
48
  - [Genie plugin](./docs/plugins/genie.md): Integrates Databricks AI/BI Genie spaces into your AppKit application, enabling natural language data queries via a conversational interface.
49
- - [Lakebase plugin](./docs/plugins/lakebase.md): Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release.
49
+ - [Lakebase plugin](./docs/plugins/lakebase.md): Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
50
50
  - [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
51
51
  - [Server plugin](./docs/plugins/server.md): Provides HTTP server capabilities with development and production modes.
52
52
 
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@databricks/appkit-ui",
3
3
  "type": "module",
4
- "version": "0.19.0",
4
+ "version": "0.20.0",
5
5
  "license": "Apache-2.0",
6
6
  "repository": {
7
7
  "type": "git",