@databricks/appkit 0.19.1 → 0.20.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CLAUDE.md +1 -1
- package/dist/appkit/package.js +1 -1
- package/dist/cache/storage/defaults.js +1 -0
- package/dist/cache/storage/defaults.js.map +1 -1
- package/dist/cache/storage/persistent.js +45 -21
- package/dist/cache/storage/persistent.js.map +1 -1
- package/dist/plugins/lakebase/manifest.js +48 -2
- package/dist/registry/resource-registry.d.ts.map +1 -1
- package/dist/registry/resource-registry.js +1 -0
- package/dist/registry/resource-registry.js.map +1 -1
- package/dist/registry/types.d.ts +11 -1
- package/dist/registry/types.d.ts.map +1 -1
- package/dist/registry/types.generated.d.ts +4 -1
- package/dist/registry/types.generated.d.ts.map +1 -1
- package/dist/registry/types.generated.js +2 -0
- package/dist/registry/types.generated.js.map +1 -1
- package/dist/registry/types.js.map +1 -1
- package/dist/schemas/plugin-manifest.schema.json +42 -9
- package/dist/shared/src/plugin.d.ts +11 -1
- package/dist/shared/src/plugin.d.ts.map +1 -1
- package/docs/api/appkit/Enumeration.ResourceType.md +9 -0
- package/docs/api/appkit/Interface.ResourceFieldEntry.md +57 -2
- package/docs/api/appkit/TypeAlias.ResourcePermission.md +1 -0
- package/docs/plugins/lakebase.md +120 -113
- package/llms.txt +1 -1
- package/package.json +1 -1
package/CLAUDE.md
CHANGED
|
@@ -46,7 +46,7 @@ npx @databricks/appkit docs <query>
|
|
|
46
46
|
- [Execution context](./docs/plugins/execution-context.md): AppKit manages Databricks authentication via two contexts:
|
|
47
47
|
- [Files plugin](./docs/plugins/files.md): File operations against Databricks Unity Catalog Volumes. Supports listing, reading, downloading, uploading, deleting, and previewing files with built-in caching, retry, and timeout handling via the execution interceptor pipeline.
|
|
48
48
|
- [Genie plugin](./docs/plugins/genie.md): Integrates Databricks AI/BI Genie spaces into your AppKit application, enabling natural language data queries via a conversational interface.
|
|
49
|
-
- [Lakebase plugin](./docs/plugins/lakebase.md):
|
|
49
|
+
- [Lakebase plugin](./docs/plugins/lakebase.md): Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
|
|
50
50
|
- [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
|
|
51
51
|
- [Server plugin](./docs/plugins/server.md): Provides HTTP server capabilities with development and production modes.
|
|
52
52
|
|
package/dist/appkit/package.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"defaults.js","names":[],"sources":["../../../src/cache/storage/defaults.ts"],"sourcesContent":["/** Default configuration for in-memory storage */\nexport const inMemoryStorageDefaults = {\n /** Maximum number of entries in the cache */\n maxSize: 1000,\n};\n\n/** Default configuration for Lakebase storage */\nexport const lakebaseStorageDefaults = {\n /** Table name for the cache */\n tableName: \"appkit_cache_entries\",\n /** Maximum number of bytes in the cache */\n maxBytes: 256 * 1024 * 1024, // 256MB\n /** Maximum number of bytes per entry in the cache */\n maxEntryBytes: 10 * 1024 * 1024, // 10MB\n /** Maximum number of entries in the cache */\n maxSize: 1000,\n /** Number of entries to evict when cache is full */\n evictionBatchSize: 100,\n /** Probability (0-1) of checking total bytes on each write operation */\n evictionCheckProbability: 0.1,\n};\n"],"mappings":";;AACA,MAAa,0BAA0B,EAErC,SAAS,KACV;;AAGD,MAAa,0BAA0B;CAErC,WAAW;CAEX,UAAU,MAAM,OAAO;CAEvB,eAAe,KAAK,OAAO;CAE3B,SAAS;CAET,mBAAmB;CAEnB,0BAA0B;CAC3B"}
|
|
1
|
+
{"version":3,"file":"defaults.js","names":[],"sources":["../../../src/cache/storage/defaults.ts"],"sourcesContent":["/** Default configuration for in-memory storage */\nexport const inMemoryStorageDefaults = {\n /** Maximum number of entries in the cache */\n maxSize: 1000,\n};\n\n/** Default configuration for Lakebase storage */\nexport const lakebaseStorageDefaults = {\n /** Schema name for the cache tables */\n schemaName: \"appkit\",\n /** Table name for the cache */\n tableName: \"appkit_cache_entries\",\n /** Maximum number of bytes in the cache */\n maxBytes: 256 * 1024 * 1024, // 256MB\n /** Maximum number of bytes per entry in the cache */\n maxEntryBytes: 10 * 1024 * 1024, // 10MB\n /** Maximum number of entries in the cache */\n maxSize: 1000,\n /** Number of entries to evict when cache is full */\n evictionBatchSize: 100,\n /** Probability (0-1) of checking total bytes on each write operation */\n evictionCheckProbability: 0.1,\n};\n"],"mappings":";;AACA,MAAa,0BAA0B,EAErC,SAAS,KACV;;AAGD,MAAa,0BAA0B;CAErC,YAAY;CAEZ,WAAW;CAEX,UAAU,MAAM,OAAO;CAEvB,eAAe,KAAK,OAAO;CAE3B,SAAS;CAET,mBAAmB;CAEnB,0BAA0B;CAC3B"}
|
|
@@ -25,7 +25,9 @@ const logger = createLogger("cache:persistent");
|
|
|
25
25
|
*/
|
|
26
26
|
var PersistentStorage = class {
|
|
27
27
|
pool;
|
|
28
|
+
schemaName;
|
|
28
29
|
tableName;
|
|
30
|
+
qualifiedTableName;
|
|
29
31
|
maxBytes;
|
|
30
32
|
maxEntryBytes;
|
|
31
33
|
evictionBatchSize;
|
|
@@ -37,7 +39,9 @@ var PersistentStorage = class {
|
|
|
37
39
|
this.maxEntryBytes = config.maxEntryBytes ?? lakebaseStorageDefaults.maxEntryBytes;
|
|
38
40
|
this.evictionBatchSize = lakebaseStorageDefaults.evictionBatchSize;
|
|
39
41
|
this.evictionCheckProbability = config.evictionCheckProbability ?? lakebaseStorageDefaults.evictionCheckProbability;
|
|
42
|
+
this.schemaName = lakebaseStorageDefaults.schemaName;
|
|
40
43
|
this.tableName = lakebaseStorageDefaults.tableName;
|
|
44
|
+
this.qualifiedTableName = `${this.schemaName}.${this.tableName}`;
|
|
41
45
|
this.initialized = false;
|
|
42
46
|
}
|
|
43
47
|
/** Initialize the persistent storage and run migrations if necessary */
|
|
@@ -59,10 +63,10 @@ var PersistentStorage = class {
|
|
|
59
63
|
async get(key) {
|
|
60
64
|
await this.ensureInitialized();
|
|
61
65
|
const keyHash = this.hashKey(key);
|
|
62
|
-
const result = await this.pool.query(`SELECT value, expiry FROM ${this.
|
|
66
|
+
const result = await this.pool.query(`SELECT value, expiry FROM ${this.qualifiedTableName} WHERE key_hash = $1`, [keyHash]);
|
|
63
67
|
if (result.rows.length === 0) return null;
|
|
64
68
|
const entry = result.rows[0];
|
|
65
|
-
this.pool.query(`UPDATE ${this.
|
|
69
|
+
this.pool.query(`UPDATE ${this.qualifiedTableName} SET last_accessed = NOW() WHERE key_hash = $1`, [keyHash]).catch(() => {
|
|
66
70
|
logger.debug("Error updating last_accessed time for key: %s", key);
|
|
67
71
|
});
|
|
68
72
|
return {
|
|
@@ -86,7 +90,7 @@ var PersistentStorage = class {
|
|
|
86
90
|
if (Math.random() < this.evictionCheckProbability) {
|
|
87
91
|
if (await this.totalBytes() + byteSize > this.maxBytes) await this.evictBySize(byteSize);
|
|
88
92
|
}
|
|
89
|
-
await this.pool.query(`INSERT INTO ${this.
|
|
93
|
+
await this.pool.query(`INSERT INTO ${this.qualifiedTableName} (key_hash, key, value, byte_size, expiry, created_at, last_accessed)
|
|
90
94
|
VALUES ($1, $2, $3, $4, $5, NOW(), NOW())
|
|
91
95
|
ON CONFLICT (key_hash)
|
|
92
96
|
DO UPDATE SET value = $3, byte_size = $4, expiry = $5, last_accessed = NOW()
|
|
@@ -106,12 +110,12 @@ var PersistentStorage = class {
|
|
|
106
110
|
async delete(key) {
|
|
107
111
|
await this.ensureInitialized();
|
|
108
112
|
const keyHash = this.hashKey(key);
|
|
109
|
-
await this.pool.query(`DELETE FROM ${this.
|
|
113
|
+
await this.pool.query(`DELETE FROM ${this.qualifiedTableName} WHERE key_hash = $1`, [keyHash]);
|
|
110
114
|
}
|
|
111
115
|
/** Clear the persistent storage */
|
|
112
116
|
async clear() {
|
|
113
117
|
await this.ensureInitialized();
|
|
114
|
-
await this.pool.query(`TRUNCATE TABLE ${this.
|
|
118
|
+
await this.pool.query(`TRUNCATE TABLE ${this.qualifiedTableName}`);
|
|
115
119
|
}
|
|
116
120
|
/**
|
|
117
121
|
* Check if a value exists in the persistent storage
|
|
@@ -121,7 +125,7 @@ var PersistentStorage = class {
|
|
|
121
125
|
async has(key) {
|
|
122
126
|
await this.ensureInitialized();
|
|
123
127
|
const keyHash = this.hashKey(key);
|
|
124
|
-
return (await this.pool.query(`SELECT EXISTS(SELECT 1 FROM ${this.
|
|
128
|
+
return (await this.pool.query(`SELECT EXISTS(SELECT 1 FROM ${this.qualifiedTableName} WHERE key_hash = $1) as exists`, [keyHash])).rows[0]?.exists ?? false;
|
|
125
129
|
}
|
|
126
130
|
/**
|
|
127
131
|
* Get the size of the persistent storage
|
|
@@ -129,13 +133,13 @@ var PersistentStorage = class {
|
|
|
129
133
|
*/
|
|
130
134
|
async size() {
|
|
131
135
|
await this.ensureInitialized();
|
|
132
|
-
const result = await this.pool.query(`SELECT COUNT(*) as count FROM ${this.
|
|
136
|
+
const result = await this.pool.query(`SELECT COUNT(*) as count FROM ${this.qualifiedTableName}`);
|
|
133
137
|
return parseInt(result.rows[0]?.count ?? "0", 10);
|
|
134
138
|
}
|
|
135
139
|
/** Get the total number of bytes in the persistent storage */
|
|
136
140
|
async totalBytes() {
|
|
137
141
|
await this.ensureInitialized();
|
|
138
|
-
const result = await this.pool.query(`SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.
|
|
142
|
+
const result = await this.pool.query(`SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.qualifiedTableName}`);
|
|
139
143
|
return parseInt(result.rows[0]?.total ?? "0", 10);
|
|
140
144
|
}
|
|
141
145
|
/**
|
|
@@ -167,7 +171,7 @@ var PersistentStorage = class {
|
|
|
167
171
|
*/
|
|
168
172
|
async cleanupExpired() {
|
|
169
173
|
await this.ensureInitialized();
|
|
170
|
-
const result = await this.pool.query(`WITH deleted as (DELETE FROM ${this.
|
|
174
|
+
const result = await this.pool.query(`WITH deleted as (DELETE FROM ${this.qualifiedTableName} WHERE expiry < $1 RETURNING *) SELECT COUNT(*) as count FROM deleted`, [Date.now()]);
|
|
171
175
|
return parseInt(result.rows[0]?.count ?? "0", 10);
|
|
172
176
|
}
|
|
173
177
|
/** Evict entries from the persistent storage by size */
|
|
@@ -175,8 +179,8 @@ var PersistentStorage = class {
|
|
|
175
179
|
if (await this.cleanupExpired() > 0) {
|
|
176
180
|
if (await this.totalBytes() + requiredBytes <= this.maxBytes) return;
|
|
177
181
|
}
|
|
178
|
-
await this.pool.query(`DELETE FROM ${this.
|
|
179
|
-
(SELECT key_hash FROM ${this.
|
|
182
|
+
await this.pool.query(`DELETE FROM ${this.qualifiedTableName} WHERE key_hash IN
|
|
183
|
+
(SELECT key_hash FROM ${this.qualifiedTableName} ORDER BY last_accessed ASC LIMIT $1)`, [this.evictionBatchSize]);
|
|
180
184
|
}
|
|
181
185
|
/** Ensure the persistent storage is initialized */
|
|
182
186
|
async ensureInitialized() {
|
|
@@ -197,9 +201,14 @@ var PersistentStorage = class {
|
|
|
197
201
|
}
|
|
198
202
|
/** Run migrations for the persistent storage */
|
|
199
203
|
async runMigrations() {
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
204
|
+
const steps = [
|
|
205
|
+
{
|
|
206
|
+
name: "create schema",
|
|
207
|
+
query: `CREATE SCHEMA IF NOT EXISTS ${this.schemaName}`
|
|
208
|
+
},
|
|
209
|
+
{
|
|
210
|
+
name: "create table",
|
|
211
|
+
query: `CREATE TABLE IF NOT EXISTS ${this.qualifiedTableName} (
|
|
203
212
|
id BIGSERIAL PRIMARY KEY,
|
|
204
213
|
key_hash BIGINT NOT NULL,
|
|
205
214
|
key BYTEA NOT NULL,
|
|
@@ -208,14 +217,29 @@ var PersistentStorage = class {
|
|
|
208
217
|
expiry BIGINT NOT NULL,
|
|
209
218
|
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
|
|
210
219
|
last_accessed TIMESTAMP NOT NULL DEFAULT NOW()
|
|
211
|
-
)
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
220
|
+
)`
|
|
221
|
+
},
|
|
222
|
+
{
|
|
223
|
+
name: "create index (key_hash)",
|
|
224
|
+
query: `CREATE UNIQUE INDEX IF NOT EXISTS idx_${this.tableName}_key_hash ON ${this.qualifiedTableName} (key_hash)`
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
name: "create index (expiry)",
|
|
228
|
+
query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_expiry ON ${this.qualifiedTableName} (expiry)`
|
|
229
|
+
},
|
|
230
|
+
{
|
|
231
|
+
name: "create index (last_accessed)",
|
|
232
|
+
query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_last_accessed ON ${this.qualifiedTableName} (last_accessed)`
|
|
233
|
+
},
|
|
234
|
+
{
|
|
235
|
+
name: "create index (byte_size)",
|
|
236
|
+
query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.qualifiedTableName} (byte_size)`
|
|
237
|
+
}
|
|
238
|
+
];
|
|
239
|
+
for (const step of steps) try {
|
|
240
|
+
await this.pool.query(step.query);
|
|
217
241
|
} catch (error) {
|
|
218
|
-
logger.error("
|
|
242
|
+
logger.error("Migration step '%s' failed: %O", step.name, error);
|
|
219
243
|
throw InitializationError.migrationFailed(error);
|
|
220
244
|
}
|
|
221
245
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"persistent.js","names":[],"sources":["../../../src/cache/storage/persistent.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type pg from \"pg\";\nimport type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { InitializationError, ValidationError } from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { lakebaseStorageDefaults } from \"./defaults\";\n\nconst logger = createLogger(\"cache:persistent\");\n\n/**\n * Persistent cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n *\n * @example\n * const pool = createLakebasePool({ workspaceClient });\n * const persistentStorage = new PersistentStorage(config, pool);\n * await persistentStorage.initialize();\n * await persistentStorage.get(\"my-key\");\n * await persistentStorage.set(\"my-key\", \"my-value\");\n * await persistentStorage.delete(\"my-key\");\n * await persistentStorage.clear();\n * await persistentStorage.has(\"my-key\");\n *\n */\nexport class PersistentStorage implements CacheStorage {\n private readonly pool: pg.Pool;\n private readonly tableName: string;\n private readonly maxBytes: number;\n private readonly maxEntryBytes: number;\n private readonly evictionBatchSize: number;\n private readonly evictionCheckProbability: number;\n private initialized: boolean;\n\n constructor(config: CacheConfig, pool: pg.Pool) {\n this.pool = pool;\n this.maxBytes = config.maxBytes ?? lakebaseStorageDefaults.maxBytes;\n this.maxEntryBytes =\n config.maxEntryBytes ?? lakebaseStorageDefaults.maxEntryBytes;\n this.evictionBatchSize = lakebaseStorageDefaults.evictionBatchSize;\n this.evictionCheckProbability =\n config.evictionCheckProbability ??\n lakebaseStorageDefaults.evictionCheckProbability;\n this.tableName = lakebaseStorageDefaults.tableName; // hardcoded, safe for now\n this.initialized = false;\n }\n\n /** Initialize the persistent storage and run migrations if necessary */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n try {\n await this.runMigrations();\n this.initialized = true;\n } catch (error) {\n logger.error(\"Error in persistent storage initialization: %O\", error);\n throw error;\n }\n }\n\n /**\n * Get a cached value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the cached value or null if not found\n */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n\n const result = await this.pool.query<{\n value: Buffer;\n expiry: string;\n }>(`SELECT value, expiry FROM ${this.tableName} WHERE key_hash = $1`, [\n keyHash,\n ]);\n\n if (result.rows.length === 0) return null;\n\n const entry = result.rows[0];\n\n // fire-and-forget update\n this.pool\n .query(\n `UPDATE ${this.tableName} SET last_accessed = NOW() WHERE key_hash = $1`,\n [keyHash],\n )\n .catch(() => {\n logger.debug(\"Error updating last_accessed time for key: %s\", key);\n });\n\n return {\n value: this.deserializeValue<T>(entry.value),\n expiry: Number(entry.expiry),\n };\n }\n\n /**\n * Set a value in the persistent storage\n * @param key - Cache key\n * @param entry - Cache entry\n * @returns Promise of the result\n */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n const keyBytes = Buffer.from(key, \"utf-8\");\n const valueBytes = this.serializeValue(entry.value);\n const byteSize = keyBytes.length + valueBytes.length;\n\n if (byteSize > this.maxEntryBytes) {\n throw ValidationError.invalidValue(\n \"cache entry size\",\n byteSize,\n `maximum ${this.maxEntryBytes} bytes`,\n );\n }\n\n // probabilistic eviction check\n if (Math.random() < this.evictionCheckProbability) {\n const totalBytes = await this.totalBytes();\n if (totalBytes + byteSize > this.maxBytes) {\n await this.evictBySize(byteSize);\n }\n }\n\n await this.pool.query(\n `INSERT INTO ${this.tableName} (key_hash, key, value, byte_size, expiry, created_at, last_accessed)\n VALUES ($1, $2, $3, $4, $5, NOW(), NOW())\n ON CONFLICT (key_hash)\n DO UPDATE SET value = $3, byte_size = $4, expiry = $5, last_accessed = NOW()\n `,\n [keyHash, keyBytes, valueBytes, byteSize, entry.expiry],\n );\n }\n\n /**\n * Delete a value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n await this.pool.query(`DELETE FROM ${this.tableName} WHERE key_hash = $1`, [\n keyHash,\n ]);\n }\n\n /** Clear the persistent storage */\n async clear(): Promise<void> {\n await this.ensureInitialized();\n await this.pool.query(`TRUNCATE TABLE ${this.tableName}`);\n }\n\n /**\n * Check if a value exists in the persistent storage\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n\n const result = await this.pool.query<{ exists: boolean }>(\n `SELECT EXISTS(SELECT 1 FROM ${this.tableName} WHERE key_hash = $1) as exists`,\n [keyHash],\n );\n\n return result.rows[0]?.exists ?? false;\n }\n\n /**\n * Get the size of the persistent storage\n * @returns Promise of the size of the storage\n */\n async size(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.pool.query<{ count: string }>(\n `SELECT COUNT(*) as count FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Get the total number of bytes in the persistent storage */\n async totalBytes(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.pool.query<{ total: string }>(\n `SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.tableName}`,\n );\n return parseInt(result.rows[0]?.total ?? \"0\", 10);\n }\n\n /**\n * Check if the persistent storage is persistent\n * @returns true if the storage is persistent, false otherwise\n */\n isPersistent(): boolean {\n return true;\n }\n\n /**\n * Check if the persistent storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async healthCheck(): Promise<boolean> {\n try {\n await this.pool.query(\"SELECT 1\");\n return true;\n } catch {\n return false;\n }\n }\n\n /** Close the persistent storage */\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Cleanup expired entries from the persistent storage\n * @returns Promise of the number of expired entries\n */\n async cleanupExpired(): Promise<number> {\n await this.ensureInitialized();\n const result = await this.pool.query<{ count: string }>(\n `WITH deleted as (DELETE FROM ${this.tableName} WHERE expiry < $1 RETURNING *) SELECT COUNT(*) as count FROM deleted`,\n [Date.now()],\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Evict entries from the persistent storage by size */\n private async evictBySize(requiredBytes: number): Promise<void> {\n const freedByExpiry = await this.cleanupExpired();\n if (freedByExpiry > 0) {\n const currentBytes = await this.totalBytes();\n if (currentBytes + requiredBytes <= this.maxBytes) {\n return;\n }\n }\n\n await this.pool.query(\n `DELETE FROM ${this.tableName} WHERE key_hash IN\n (SELECT key_hash FROM ${this.tableName} ORDER BY last_accessed ASC LIMIT $1)`,\n [this.evictionBatchSize],\n );\n }\n\n /** Ensure the persistent storage is initialized */\n private async ensureInitialized(): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n }\n\n /** Generate a 64-bit hash for the cache key using SHA256 */\n private hashKey(key: string): bigint {\n if (!key) throw ValidationError.missingField(\"key\");\n const hash = createHash(\"sha256\").update(key).digest();\n return hash.readBigInt64BE(0);\n }\n\n /** Serialize a value to a buffer */\n private serializeValue<T>(value: T): Buffer {\n return Buffer.from(JSON.stringify(value), \"utf-8\");\n }\n\n /** Deserialize a value from a buffer */\n private deserializeValue<T>(buffer: Buffer): T {\n return JSON.parse(buffer.toString(\"utf-8\")) as T;\n }\n\n /** Run migrations for the persistent storage */\n private async runMigrations(): Promise<void> {\n try {\n await this.pool.query(`\n CREATE TABLE IF NOT EXISTS ${this.tableName} (\n id BIGSERIAL PRIMARY KEY,\n key_hash BIGINT NOT NULL,\n key BYTEA NOT NULL,\n value BYTEA NOT NULL,\n byte_size INTEGER NOT NULL,\n expiry BIGINT NOT NULL,\n created_at TIMESTAMP NOT NULL DEFAULT NOW(),\n last_accessed TIMESTAMP NOT NULL DEFAULT NOW()\n )\n `);\n\n // unique index on key_hash for fast lookups\n await this.pool.query(\n `CREATE UNIQUE INDEX IF NOT EXISTS idx_${this.tableName}_key_hash ON ${this.tableName} (key_hash);`,\n );\n\n // index on expiry for cleanup queries\n await this.pool.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_expiry ON ${this.tableName} (expiry); `,\n );\n\n // index on last_accessed for LRU eviction\n await this.pool.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_last_accessed ON ${this.tableName} (last_accessed); `,\n );\n\n // index on byte_size for monitoring\n await this.pool.query(\n `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.tableName} (byte_size); `,\n );\n } catch (error) {\n logger.error(\n \"Error in running migrations for persistent storage: %O\",\n error,\n );\n throw InitializationError.migrationFailed(error as Error);\n }\n }\n}\n"],"mappings":";;;;;;;;aAGoE;AAIpE,MAAM,SAAS,aAAa,mBAAmB;;;;;;;;;;;;;;;;AAiB/C,IAAa,oBAAb,MAAuD;CACrD,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ;CAER,YAAY,QAAqB,MAAe;AAC9C,OAAK,OAAO;AACZ,OAAK,WAAW,OAAO,YAAY,wBAAwB;AAC3D,OAAK,gBACH,OAAO,iBAAiB,wBAAwB;AAClD,OAAK,oBAAoB,wBAAwB;AACjD,OAAK,2BACH,OAAO,4BACP,wBAAwB;AAC1B,OAAK,YAAY,wBAAwB;AACzC,OAAK,cAAc;;;CAIrB,MAAM,aAA4B;AAChC,MAAI,KAAK,YAAa;AAEtB,MAAI;AACF,SAAM,KAAK,eAAe;AAC1B,QAAK,cAAc;WACZ,OAAO;AACd,UAAO,MAAM,kDAAkD,MAAM;AACrE,SAAM;;;;;;;;CASV,MAAM,IAAO,KAA4C;AACvD,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EAEjC,MAAM,SAAS,MAAM,KAAK,KAAK,MAG5B,6BAA6B,KAAK,UAAU,uBAAuB,CACpE,QACD,CAAC;AAEF,MAAI,OAAO,KAAK,WAAW,EAAG,QAAO;EAErC,MAAM,QAAQ,OAAO,KAAK;AAG1B,OAAK,KACF,MACC,UAAU,KAAK,UAAU,iDACzB,CAAC,QAAQ,CACV,CACA,YAAY;AACX,UAAO,MAAM,iDAAiD,IAAI;IAClE;AAEJ,SAAO;GACL,OAAO,KAAK,iBAAoB,MAAM,MAAM;GAC5C,QAAQ,OAAO,MAAM,OAAO;GAC7B;;;;;;;;CASH,MAAM,IAAO,KAAa,OAAqC;AAC7D,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EACjC,MAAM,WAAW,OAAO,KAAK,KAAK,QAAQ;EAC1C,MAAM,aAAa,KAAK,eAAe,MAAM,MAAM;EACnD,MAAM,WAAW,SAAS,SAAS,WAAW;AAE9C,MAAI,WAAW,KAAK,cAClB,OAAM,gBAAgB,aACpB,oBACA,UACA,WAAW,KAAK,cAAc,QAC/B;AAIH,MAAI,KAAK,QAAQ,GAAG,KAAK,0BAEvB;OADmB,MAAM,KAAK,YAAY,GACzB,WAAW,KAAK,SAC/B,OAAM,KAAK,YAAY,SAAS;;AAIpC,QAAM,KAAK,KAAK,MACd,eAAe,KAAK,UAAU;;;;SAK9B;GAAC;GAAS;GAAU;GAAY;GAAU,MAAM;GAAO,CACxD;;;;;;;CAQH,MAAM,OAAO,KAA4B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AACjC,QAAM,KAAK,KAAK,MAAM,eAAe,KAAK,UAAU,uBAAuB,CACzE,QACD,CAAC;;;CAIJ,MAAM,QAAuB;AAC3B,QAAM,KAAK,mBAAmB;AAC9B,QAAM,KAAK,KAAK,MAAM,kBAAkB,KAAK,YAAY;;;;;;;CAQ3D,MAAM,IAAI,KAA+B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AAOjC,UALe,MAAM,KAAK,KAAK,MAC7B,+BAA+B,KAAK,UAAU,kCAC9C,CAAC,QAAQ,CACV,EAEa,KAAK,IAAI,UAAU;;;;;;CAOnC,MAAM,OAAwB;AAC5B,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,iCAAiC,KAAK,YACvC;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAM,aAA8B;AAClC,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,oDAAoD,KAAK,YAC1D;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;;;;CAOnD,eAAwB;AACtB,SAAO;;;;;;CAOT,MAAM,cAAgC;AACpC,MAAI;AACF,SAAM,KAAK,KAAK,MAAM,WAAW;AACjC,UAAO;UACD;AACN,UAAO;;;;CAKX,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,MAAM,iBAAkC;AACtC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,gCAAgC,KAAK,UAAU,wEAC/C,CAAC,KAAK,KAAK,CAAC,CACb;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAc,YAAY,eAAsC;AAE9D,MADsB,MAAM,KAAK,gBAAgB,GAC7B,GAElB;OADqB,MAAM,KAAK,YAAY,GACzB,iBAAiB,KAAK,SACvC;;AAIJ,QAAM,KAAK,KAAK,MACd,eAAe,KAAK,UAAU;8BACN,KAAK,UAAU,wCACvC,CAAC,KAAK,kBAAkB,CACzB;;;CAIH,MAAc,oBAAmC;AAC/C,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;;;CAK3B,AAAQ,QAAQ,KAAqB;AACnC,MAAI,CAAC,IAAK,OAAM,gBAAgB,aAAa,MAAM;AAEnD,SADa,WAAW,SAAS,CAAC,OAAO,IAAI,CAAC,QAAQ,CAC1C,eAAe,EAAE;;;CAI/B,AAAQ,eAAkB,OAAkB;AAC1C,SAAO,OAAO,KAAK,KAAK,UAAU,MAAM,EAAE,QAAQ;;;CAIpD,AAAQ,iBAAoB,QAAmB;AAC7C,SAAO,KAAK,MAAM,OAAO,SAAS,QAAQ,CAAC;;;CAI7C,MAAc,gBAA+B;AAC3C,MAAI;AACF,SAAM,KAAK,KAAK,MAAM;yCACa,KAAK,UAAU;;;;;;;;;;cAU1C;AAGR,SAAM,KAAK,KAAK,MACd,yCAAyC,KAAK,UAAU,eAAe,KAAK,UAAU,cACvF;AAGD,SAAM,KAAK,KAAK,MACd,kCAAkC,KAAK,UAAU,aAAa,KAAK,UAAU,aAC9E;AAGD,SAAM,KAAK,KAAK,MACd,kCAAkC,KAAK,UAAU,oBAAoB,KAAK,UAAU,oBACrF;AAGD,SAAM,KAAK,KAAK,MACd,kCAAkC,KAAK,UAAU,gBAAgB,KAAK,UAAU,gBACjF;WACM,OAAO;AACd,UAAO,MACL,0DACA,MACD;AACD,SAAM,oBAAoB,gBAAgB,MAAe"}
|
|
1
|
+
{"version":3,"file":"persistent.js","names":[],"sources":["../../../src/cache/storage/persistent.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport type pg from \"pg\";\nimport type { CacheConfig, CacheEntry, CacheStorage } from \"shared\";\nimport { InitializationError, ValidationError } from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { lakebaseStorageDefaults } from \"./defaults\";\n\nconst logger = createLogger(\"cache:persistent\");\n\n/**\n * Persistent cache storage implementation. Uses a least recently used (LRU) eviction policy\n * to manage memory usage and ensure efficient cache operations.\n *\n * @example\n * const pool = createLakebasePool({ workspaceClient });\n * const persistentStorage = new PersistentStorage(config, pool);\n * await persistentStorage.initialize();\n * await persistentStorage.get(\"my-key\");\n * await persistentStorage.set(\"my-key\", \"my-value\");\n * await persistentStorage.delete(\"my-key\");\n * await persistentStorage.clear();\n * await persistentStorage.has(\"my-key\");\n *\n */\nexport class PersistentStorage implements CacheStorage {\n private readonly pool: pg.Pool;\n private readonly schemaName: string;\n private readonly tableName: string;\n private readonly qualifiedTableName: string;\n private readonly maxBytes: number;\n private readonly maxEntryBytes: number;\n private readonly evictionBatchSize: number;\n private readonly evictionCheckProbability: number;\n private initialized: boolean;\n\n constructor(config: CacheConfig, pool: pg.Pool) {\n this.pool = pool;\n this.maxBytes = config.maxBytes ?? lakebaseStorageDefaults.maxBytes;\n this.maxEntryBytes =\n config.maxEntryBytes ?? lakebaseStorageDefaults.maxEntryBytes;\n this.evictionBatchSize = lakebaseStorageDefaults.evictionBatchSize;\n this.evictionCheckProbability =\n config.evictionCheckProbability ??\n lakebaseStorageDefaults.evictionCheckProbability;\n this.schemaName = lakebaseStorageDefaults.schemaName;\n this.tableName = lakebaseStorageDefaults.tableName;\n this.qualifiedTableName = `${this.schemaName}.${this.tableName}`;\n this.initialized = false;\n }\n\n /** Initialize the persistent storage and run migrations if necessary */\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n try {\n await this.runMigrations();\n this.initialized = true;\n } catch (error) {\n logger.error(\"Error in persistent storage initialization: %O\", error);\n throw error;\n }\n }\n\n /**\n * Get a cached value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the cached value or null if not found\n */\n async get<T>(key: string): Promise<CacheEntry<T> | null> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n\n const result = await this.pool.query<{\n value: Buffer;\n expiry: string;\n }>(\n `SELECT value, expiry FROM ${this.qualifiedTableName} WHERE key_hash = $1`,\n [keyHash],\n );\n\n if (result.rows.length === 0) return null;\n\n const entry = result.rows[0];\n\n // fire-and-forget update\n this.pool\n .query(\n `UPDATE ${this.qualifiedTableName} SET last_accessed = NOW() WHERE key_hash = $1`,\n [keyHash],\n )\n .catch(() => {\n logger.debug(\"Error updating last_accessed time for key: %s\", key);\n });\n\n return {\n value: this.deserializeValue<T>(entry.value),\n expiry: Number(entry.expiry),\n };\n }\n\n /**\n * Set a value in the persistent storage\n * @param key - Cache key\n * @param entry - Cache entry\n * @returns Promise of the result\n */\n async set<T>(key: string, entry: CacheEntry<T>): Promise<void> {\n await this.ensureInitialized();\n\n const keyHash = this.hashKey(key);\n const keyBytes = Buffer.from(key, \"utf-8\");\n const valueBytes = this.serializeValue(entry.value);\n const byteSize = keyBytes.length + valueBytes.length;\n\n if (byteSize > this.maxEntryBytes) {\n throw ValidationError.invalidValue(\n \"cache entry size\",\n byteSize,\n `maximum ${this.maxEntryBytes} bytes`,\n );\n }\n\n // probabilistic eviction check\n if (Math.random() < this.evictionCheckProbability) {\n const totalBytes = await this.totalBytes();\n if (totalBytes + byteSize > this.maxBytes) {\n await this.evictBySize(byteSize);\n }\n }\n\n await this.pool.query(\n `INSERT INTO ${this.qualifiedTableName} (key_hash, key, value, byte_size, expiry, created_at, last_accessed)\n VALUES ($1, $2, $3, $4, $5, NOW(), NOW())\n ON CONFLICT (key_hash)\n DO UPDATE SET value = $3, byte_size = $4, expiry = $5, last_accessed = NOW()\n `,\n [keyHash, keyBytes, valueBytes, byteSize, entry.expiry],\n );\n }\n\n /**\n * Delete a value from the persistent storage\n * @param key - Cache key\n * @returns Promise of the result\n */\n async delete(key: string): Promise<void> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n await this.pool.query(\n `DELETE FROM ${this.qualifiedTableName} WHERE key_hash = $1`,\n [keyHash],\n );\n }\n\n /** Clear the persistent storage */\n async clear(): Promise<void> {\n await this.ensureInitialized();\n await this.pool.query(`TRUNCATE TABLE ${this.qualifiedTableName}`);\n }\n\n /**\n * Check if a value exists in the persistent storage\n * @param key - Cache key\n * @returns Promise of true if the value exists, false otherwise\n */\n async has(key: string): Promise<boolean> {\n await this.ensureInitialized();\n const keyHash = this.hashKey(key);\n\n const result = await this.pool.query<{ exists: boolean }>(\n `SELECT EXISTS(SELECT 1 FROM ${this.qualifiedTableName} WHERE key_hash = $1) as exists`,\n [keyHash],\n );\n\n return result.rows[0]?.exists ?? false;\n }\n\n /**\n * Get the size of the persistent storage\n * @returns Promise of the size of the storage\n */\n async size(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.pool.query<{ count: string }>(\n `SELECT COUNT(*) as count FROM ${this.qualifiedTableName}`,\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Get the total number of bytes in the persistent storage */\n async totalBytes(): Promise<number> {\n await this.ensureInitialized();\n\n const result = await this.pool.query<{ total: string }>(\n `SELECT COALESCE(SUM(byte_size), 0) as total FROM ${this.qualifiedTableName}`,\n );\n return parseInt(result.rows[0]?.total ?? \"0\", 10);\n }\n\n /**\n * Check if the persistent storage is persistent\n * @returns true if the storage is persistent, false otherwise\n */\n isPersistent(): boolean {\n return true;\n }\n\n /**\n * Check if the persistent storage is healthy\n * @returns Promise of true if the storage is healthy, false otherwise\n */\n async healthCheck(): Promise<boolean> {\n try {\n await this.pool.query(\"SELECT 1\");\n return true;\n } catch {\n return false;\n }\n }\n\n /** Close the persistent storage */\n async close(): Promise<void> {\n await this.pool.end();\n }\n\n /**\n * Cleanup expired entries from the persistent storage\n * @returns Promise of the number of expired entries\n */\n async cleanupExpired(): Promise<number> {\n await this.ensureInitialized();\n const result = await this.pool.query<{ count: string }>(\n `WITH deleted as (DELETE FROM ${this.qualifiedTableName} WHERE expiry < $1 RETURNING *) SELECT COUNT(*) as count FROM deleted`,\n [Date.now()],\n );\n return parseInt(result.rows[0]?.count ?? \"0\", 10);\n }\n\n /** Evict entries from the persistent storage by size */\n private async evictBySize(requiredBytes: number): Promise<void> {\n const freedByExpiry = await this.cleanupExpired();\n if (freedByExpiry > 0) {\n const currentBytes = await this.totalBytes();\n if (currentBytes + requiredBytes <= this.maxBytes) {\n return;\n }\n }\n\n await this.pool.query(\n `DELETE FROM ${this.qualifiedTableName} WHERE key_hash IN\n (SELECT key_hash FROM ${this.qualifiedTableName} ORDER BY last_accessed ASC LIMIT $1)`,\n [this.evictionBatchSize],\n );\n }\n\n /** Ensure the persistent storage is initialized */\n private async ensureInitialized(): Promise<void> {\n if (!this.initialized) {\n await this.initialize();\n }\n }\n\n /** Generate a 64-bit hash for the cache key using SHA256 */\n private hashKey(key: string): bigint {\n if (!key) throw ValidationError.missingField(\"key\");\n const hash = createHash(\"sha256\").update(key).digest();\n return hash.readBigInt64BE(0);\n }\n\n /** Serialize a value to a buffer */\n private serializeValue<T>(value: T): Buffer {\n return Buffer.from(JSON.stringify(value), \"utf-8\");\n }\n\n /** Deserialize a value from a buffer */\n private deserializeValue<T>(buffer: Buffer): T {\n return JSON.parse(buffer.toString(\"utf-8\")) as T;\n }\n\n /** Run migrations for the persistent storage */\n private async runMigrations(): Promise<void> {\n const steps = [\n {\n name: \"create schema\",\n query: `CREATE SCHEMA IF NOT EXISTS ${this.schemaName}`,\n },\n {\n name: \"create table\",\n query: `CREATE TABLE IF NOT EXISTS ${this.qualifiedTableName} (\n id BIGSERIAL PRIMARY KEY,\n key_hash BIGINT NOT NULL,\n key BYTEA NOT NULL,\n value BYTEA NOT NULL,\n byte_size INTEGER NOT NULL,\n expiry BIGINT NOT NULL,\n created_at TIMESTAMP NOT NULL DEFAULT NOW(),\n last_accessed TIMESTAMP NOT NULL DEFAULT NOW()\n )`,\n },\n {\n name: \"create index (key_hash)\",\n query: `CREATE UNIQUE INDEX IF NOT EXISTS idx_${this.tableName}_key_hash ON ${this.qualifiedTableName} (key_hash)`,\n },\n {\n name: \"create index (expiry)\",\n query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_expiry ON ${this.qualifiedTableName} (expiry)`,\n },\n {\n name: \"create index (last_accessed)\",\n query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_last_accessed ON ${this.qualifiedTableName} (last_accessed)`,\n },\n {\n name: \"create index (byte_size)\",\n query: `CREATE INDEX IF NOT EXISTS idx_${this.tableName}_byte_size ON ${this.qualifiedTableName} (byte_size)`,\n },\n ];\n\n for (const step of steps) {\n try {\n await this.pool.query(step.query);\n } catch (error) {\n logger.error(\"Migration step '%s' failed: %O\", step.name, error);\n throw InitializationError.migrationFailed(error as Error);\n }\n }\n }\n}\n"],"mappings":";;;;;;;;aAGoE;AAIpE,MAAM,SAAS,aAAa,mBAAmB;;;;;;;;;;;;;;;;AAiB/C,IAAa,oBAAb,MAAuD;CACrD,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAiB;CACjB,AAAQ;CAER,YAAY,QAAqB,MAAe;AAC9C,OAAK,OAAO;AACZ,OAAK,WAAW,OAAO,YAAY,wBAAwB;AAC3D,OAAK,gBACH,OAAO,iBAAiB,wBAAwB;AAClD,OAAK,oBAAoB,wBAAwB;AACjD,OAAK,2BACH,OAAO,4BACP,wBAAwB;AAC1B,OAAK,aAAa,wBAAwB;AAC1C,OAAK,YAAY,wBAAwB;AACzC,OAAK,qBAAqB,GAAG,KAAK,WAAW,GAAG,KAAK;AACrD,OAAK,cAAc;;;CAIrB,MAAM,aAA4B;AAChC,MAAI,KAAK,YAAa;AAEtB,MAAI;AACF,SAAM,KAAK,eAAe;AAC1B,QAAK,cAAc;WACZ,OAAO;AACd,UAAO,MAAM,kDAAkD,MAAM;AACrE,SAAM;;;;;;;;CASV,MAAM,IAAO,KAA4C;AACvD,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EAEjC,MAAM,SAAS,MAAM,KAAK,KAAK,MAI7B,6BAA6B,KAAK,mBAAmB,uBACrD,CAAC,QAAQ,CACV;AAED,MAAI,OAAO,KAAK,WAAW,EAAG,QAAO;EAErC,MAAM,QAAQ,OAAO,KAAK;AAG1B,OAAK,KACF,MACC,UAAU,KAAK,mBAAmB,iDAClC,CAAC,QAAQ,CACV,CACA,YAAY;AACX,UAAO,MAAM,iDAAiD,IAAI;IAClE;AAEJ,SAAO;GACL,OAAO,KAAK,iBAAoB,MAAM,MAAM;GAC5C,QAAQ,OAAO,MAAM,OAAO;GAC7B;;;;;;;;CASH,MAAM,IAAO,KAAa,OAAqC;AAC7D,QAAM,KAAK,mBAAmB;EAE9B,MAAM,UAAU,KAAK,QAAQ,IAAI;EACjC,MAAM,WAAW,OAAO,KAAK,KAAK,QAAQ;EAC1C,MAAM,aAAa,KAAK,eAAe,MAAM,MAAM;EACnD,MAAM,WAAW,SAAS,SAAS,WAAW;AAE9C,MAAI,WAAW,KAAK,cAClB,OAAM,gBAAgB,aACpB,oBACA,UACA,WAAW,KAAK,cAAc,QAC/B;AAIH,MAAI,KAAK,QAAQ,GAAG,KAAK,0BAEvB;OADmB,MAAM,KAAK,YAAY,GACzB,WAAW,KAAK,SAC/B,OAAM,KAAK,YAAY,SAAS;;AAIpC,QAAM,KAAK,KAAK,MACd,eAAe,KAAK,mBAAmB;;;;SAKvC;GAAC;GAAS;GAAU;GAAY;GAAU,MAAM;GAAO,CACxD;;;;;;;CAQH,MAAM,OAAO,KAA4B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AACjC,QAAM,KAAK,KAAK,MACd,eAAe,KAAK,mBAAmB,uBACvC,CAAC,QAAQ,CACV;;;CAIH,MAAM,QAAuB;AAC3B,QAAM,KAAK,mBAAmB;AAC9B,QAAM,KAAK,KAAK,MAAM,kBAAkB,KAAK,qBAAqB;;;;;;;CAQpE,MAAM,IAAI,KAA+B;AACvC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,UAAU,KAAK,QAAQ,IAAI;AAOjC,UALe,MAAM,KAAK,KAAK,MAC7B,+BAA+B,KAAK,mBAAmB,kCACvD,CAAC,QAAQ,CACV,EAEa,KAAK,IAAI,UAAU;;;;;;CAOnC,MAAM,OAAwB;AAC5B,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,iCAAiC,KAAK,qBACvC;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAM,aAA8B;AAClC,QAAM,KAAK,mBAAmB;EAE9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,oDAAoD,KAAK,qBAC1D;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;;;;CAOnD,eAAwB;AACtB,SAAO;;;;;;CAOT,MAAM,cAAgC;AACpC,MAAI;AACF,SAAM,KAAK,KAAK,MAAM,WAAW;AACjC,UAAO;UACD;AACN,UAAO;;;;CAKX,MAAM,QAAuB;AAC3B,QAAM,KAAK,KAAK,KAAK;;;;;;CAOvB,MAAM,iBAAkC;AACtC,QAAM,KAAK,mBAAmB;EAC9B,MAAM,SAAS,MAAM,KAAK,KAAK,MAC7B,gCAAgC,KAAK,mBAAmB,wEACxD,CAAC,KAAK,KAAK,CAAC,CACb;AACD,SAAO,SAAS,OAAO,KAAK,IAAI,SAAS,KAAK,GAAG;;;CAInD,MAAc,YAAY,eAAsC;AAE9D,MADsB,MAAM,KAAK,gBAAgB,GAC7B,GAElB;OADqB,MAAM,KAAK,YAAY,GACzB,iBAAiB,KAAK,SACvC;;AAIJ,QAAM,KAAK,KAAK,MACd,eAAe,KAAK,mBAAmB;8BACf,KAAK,mBAAmB,wCAChD,CAAC,KAAK,kBAAkB,CACzB;;;CAIH,MAAc,oBAAmC;AAC/C,MAAI,CAAC,KAAK,YACR,OAAM,KAAK,YAAY;;;CAK3B,AAAQ,QAAQ,KAAqB;AACnC,MAAI,CAAC,IAAK,OAAM,gBAAgB,aAAa,MAAM;AAEnD,SADa,WAAW,SAAS,CAAC,OAAO,IAAI,CAAC,QAAQ,CAC1C,eAAe,EAAE;;;CAI/B,AAAQ,eAAkB,OAAkB;AAC1C,SAAO,OAAO,KAAK,KAAK,UAAU,MAAM,EAAE,QAAQ;;;CAIpD,AAAQ,iBAAoB,QAAmB;AAC7C,SAAO,KAAK,MAAM,OAAO,SAAS,QAAQ,CAAC;;;CAI7C,MAAc,gBAA+B;EAC3C,MAAM,QAAQ;GACZ;IACE,MAAM;IACN,OAAO,+BAA+B,KAAK;IAC5C;GACD;IACE,MAAM;IACN,OAAO,8BAA8B,KAAK,mBAAmB;;;;;;;;;;IAU9D;GACD;IACE,MAAM;IACN,OAAO,yCAAyC,KAAK,UAAU,eAAe,KAAK,mBAAmB;IACvG;GACD;IACE,MAAM;IACN,OAAO,kCAAkC,KAAK,UAAU,aAAa,KAAK,mBAAmB;IAC9F;GACD;IACE,MAAM;IACN,OAAO,kCAAkC,KAAK,UAAU,oBAAoB,KAAK,mBAAmB;IACrG;GACD;IACE,MAAM;IACN,OAAO,kCAAkC,KAAK,UAAU,gBAAgB,KAAK,mBAAmB;IACjG;GACF;AAED,OAAK,MAAM,QAAQ,MACjB,KAAI;AACF,SAAM,KAAK,KAAK,MAAM,KAAK,MAAM;WAC1B,OAAO;AACd,UAAO,MAAM,kCAAkC,KAAK,MAAM,MAAM;AAChE,SAAM,oBAAoB,gBAAgB,MAAe"}
|
|
@@ -4,10 +4,56 @@ var manifest_default = {
|
|
|
4
4
|
name: "lakebase",
|
|
5
5
|
displayName: "Lakebase",
|
|
6
6
|
description: "SQL query execution against Databricks Lakebase Autoscaling",
|
|
7
|
-
onSetupMessage: "Configure environment variables before running or deploying the app.\nSee: https://databricks.github.io/appkit/docs/plugins/lakebase",
|
|
8
7
|
hidden: false,
|
|
9
8
|
resources: {
|
|
10
|
-
"required": [
|
|
9
|
+
"required": [{
|
|
10
|
+
"type": "postgres",
|
|
11
|
+
"alias": "Postgres",
|
|
12
|
+
"resourceKey": "postgres",
|
|
13
|
+
"description": "Lakebase Postgres database for persistent storage",
|
|
14
|
+
"permission": "CAN_CONNECT_AND_CREATE",
|
|
15
|
+
"fields": {
|
|
16
|
+
"branch": {
|
|
17
|
+
"description": "Full Lakebase Postgres branch resource name. Obtain by running `databricks postgres list-branches projects/{project-id}`, select the desired item from the output array and use its .name value.",
|
|
18
|
+
"examples": ["projects/{project-id}/branches/{branch-id}"]
|
|
19
|
+
},
|
|
20
|
+
"database": {
|
|
21
|
+
"description": "Full Lakebase Postgres database resource name. Obtain by running `databricks postgres list-databases {branch-name}`, select the desired item from the output array and use its .name value. Requires the branch resource name.",
|
|
22
|
+
"examples": ["projects/{project-id}/branches/{branch-id}/databases/{database-id}"]
|
|
23
|
+
},
|
|
24
|
+
"host": {
|
|
25
|
+
"env": "PGHOST",
|
|
26
|
+
"localOnly": true,
|
|
27
|
+
"resolve": "postgres:host",
|
|
28
|
+
"description": "Postgres host for local development. Auto-injected by the platform at deploy time."
|
|
29
|
+
},
|
|
30
|
+
"databaseName": {
|
|
31
|
+
"env": "PGDATABASE",
|
|
32
|
+
"localOnly": true,
|
|
33
|
+
"resolve": "postgres:databaseName",
|
|
34
|
+
"description": "Postgres database name for local development. Auto-injected by the platform at deploy time."
|
|
35
|
+
},
|
|
36
|
+
"endpointPath": {
|
|
37
|
+
"env": "LAKEBASE_ENDPOINT",
|
|
38
|
+
"bundleIgnore": true,
|
|
39
|
+
"resolve": "postgres:endpointPath",
|
|
40
|
+
"description": "Lakebase endpoint resource name. Auto-injected at runtime via app.yaml valueFrom: postgres. For local development, obtain by running `databricks postgres list-endpoints {branch-name}`, select the desired item from the output array and use its .name value.",
|
|
41
|
+
"examples": ["projects/{project-id}/branches/{branch-id}/endpoints/{endpoint-id}"]
|
|
42
|
+
},
|
|
43
|
+
"port": {
|
|
44
|
+
"env": "PGPORT",
|
|
45
|
+
"localOnly": true,
|
|
46
|
+
"value": "5432",
|
|
47
|
+
"description": "Postgres port. Auto-injected by the platform at deploy time."
|
|
48
|
+
},
|
|
49
|
+
"sslmode": {
|
|
50
|
+
"env": "PGSSLMODE",
|
|
51
|
+
"localOnly": true,
|
|
52
|
+
"value": "require",
|
|
53
|
+
"description": "Postgres SSL mode. Auto-injected by the platform at deploy time."
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}],
|
|
11
57
|
"optional": []
|
|
12
58
|
}
|
|
13
59
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"resource-registry.d.ts","names":[],"sources":["../../src/registry/resource-registry.ts"],"mappings":";;;;;;;;;cAqDa,gBAAA;EAAA,QACH,SAAA;
|
|
1
|
+
{"version":3,"file":"resource-registry.d.ts","names":[],"sources":["../../src/registry/resource-registry.ts"],"mappings":";;;;;;;;;cAqDa,gBAAA;EAAA,QACH,SAAA;EAiYsC;;;;;;;;;;;;EAnXvC,QAAA,CAAS,MAAA,UAAgB,QAAA,EAAU,mBAAA;EAiKnC;;;;;;;EAtIA,gBAAA,CACL,UAAA,EAAY,UAAA,CAAW,iBAAA;EAyKlB;;;;EAAA,QAjIC,cAAA;EAqJD;;;;;;EAxDA,MAAA,CAAA,GAAU,aAAA;EA6L6B;;;;;;;EAlLvC,GAAA,CAAI,IAAA,UAAc,WAAA,WAAsB,aAAA;;;;;EAQxC,KAAA,CAAA;;;;EAOA,IAAA,CAAA;;;;;;;EAUA,WAAA,CAAY,UAAA,WAAqB,aAAA;;;;;;EAWjC,WAAA,CAAA,GAAe,aAAA;;;;;;EASf,WAAA,CAAA,GAAe,aAAA;;;;;;;;;;;;;;;;;;;;;;EAyBf,QAAA,CAAA,GAAY,gBAAA;;;;;;;;;;;;EA+DZ,iBAAA,CAAA,GAAqB,gBAAA;;;;;;;SA6Cd,sBAAA,CAAuB,OAAA,EAAS,aAAA;;;;;;;;SAqBhC,sBAAA,CAAuB,OAAA,EAAS,aAAA;AAAA"}
|
|
@@ -203,6 +203,7 @@ var ResourceRegistry = class ResourceRegistry {
|
|
|
203
203
|
const values = {};
|
|
204
204
|
let allSet = true;
|
|
205
205
|
for (const [fieldName, fieldDef] of Object.entries(entry.fields)) {
|
|
206
|
+
if (!fieldDef.env) continue;
|
|
206
207
|
const val = process.env[fieldDef.env];
|
|
207
208
|
if (val !== void 0 && val !== "") values[fieldName] = val;
|
|
208
209
|
else allSet = false;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"resource-registry.js","names":[],"sources":["../../src/registry/resource-registry.ts"],"sourcesContent":["/**\n * Resource Registry\n *\n * Central registry that tracks all resource requirements across all plugins.\n * Provides visibility into Databricks resources needed by the application\n * and handles deduplication when multiple plugins require the same resource\n * (dedup key: type + resourceKey).\n *\n * Use `new ResourceRegistry()` for instance-scoped usage (e.g. createApp).\n * getInstance() / resetInstance() remain for backward compatibility in tests.\n */\n\nimport type { BasePluginConfig, PluginConstructor, PluginData } from \"shared\";\nimport { ConfigurationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport { getPluginManifest } from \"./manifest-loader\";\nimport type {\n ResourceEntry,\n ResourcePermission,\n ResourceRequirement,\n ValidationResult,\n} from \"./types\";\nimport { PERMISSION_HIERARCHY_BY_TYPE, type ResourceType } from \"./types\";\n\nconst logger = createLogger(\"resource-registry\");\n\n/**\n * Dedup key for registry: type + resourceKey (machine-stable).\n * alias is for UI/display only.\n */\nfunction getDedupKey(type: string, resourceKey: string): string {\n return `${type}:${resourceKey}`;\n}\n\n/**\n * Returns the most permissive permission for a given resource type.\n * Uses per-type hierarchy; unknown permissions are treated as least permissive.\n */\nfunction getMostPermissivePermission(\n resourceType: ResourceType,\n p1: ResourcePermission,\n p2: ResourcePermission,\n): ResourcePermission {\n const hierarchy = PERMISSION_HIERARCHY_BY_TYPE[resourceType as ResourceType];\n const index1 = hierarchy?.indexOf(p1) ?? -1;\n const index2 = hierarchy?.indexOf(p2) ?? -1;\n return index1 > index2 ? p1 : p2;\n}\n\n/**\n * Central registry for tracking plugin resource requirements.\n * Deduplication uses type + resourceKey (machine-stable); alias is for display only.\n */\nexport class ResourceRegistry {\n private resources: Map<string, ResourceEntry> = new Map();\n\n /**\n * Registers a resource requirement for a plugin.\n * If a resource with the same type+resourceKey already exists, merges them:\n * - Combines plugin names (comma-separated)\n * - Uses the most permissive permission (per-type hierarchy)\n * - Marks as required if any plugin requires it\n * - Combines descriptions if they differ\n * - Merges fields; warns when same field name uses different env vars\n *\n * @param plugin - Name of the plugin registering the resource\n * @param resource - Resource requirement specification\n */\n public register(plugin: string, resource: ResourceRequirement): void {\n const key = getDedupKey(resource.type, resource.resourceKey);\n const existing = this.resources.get(key);\n\n if (existing) {\n // Merge with existing resource\n const merged = this.mergeResources(existing, plugin, resource);\n this.resources.set(key, merged);\n } else {\n // Create new resource entry with permission source tracking\n const entry: ResourceEntry = {\n ...resource,\n plugin,\n resolved: false,\n permissionSources: { [plugin]: resource.permission },\n };\n this.resources.set(key, entry);\n }\n }\n\n /**\n * Collects and registers resource requirements from an array of plugins.\n * For each plugin, loads its manifest (required) and runtime resource requirements.\n *\n * @param rawPlugins - Array of plugin data entries from createApp configuration\n * @throws {ConfigurationError} If any plugin is missing a manifest or manifest is invalid\n */\n public collectResources(\n rawPlugins: PluginData<PluginConstructor, unknown, string>[],\n ): void {\n for (const pluginData of rawPlugins) {\n if (!pluginData?.plugin) continue;\n\n const pluginName = pluginData.name;\n const manifest = getPluginManifest(pluginData.plugin);\n\n // Register required resources\n for (const resource of manifest.resources.required) {\n this.register(pluginName, { ...resource, required: true });\n }\n\n // Register optional resources\n for (const resource of manifest.resources.optional || []) {\n this.register(pluginName, { ...resource, required: false });\n }\n\n // Check for runtime resource requirements\n if (typeof pluginData.plugin.getResourceRequirements === \"function\") {\n const runtimeResources = pluginData.plugin.getResourceRequirements(\n pluginData.config as BasePluginConfig,\n );\n for (const resource of runtimeResources) {\n this.register(pluginName, resource as ResourceRequirement);\n }\n }\n\n logger.debug(\n \"Collected resources from plugin %s: %d total\",\n pluginName,\n this.getByPlugin(pluginName).length,\n );\n }\n }\n\n /**\n * Merges a new resource requirement with an existing entry.\n * Applies intelligent merging logic for conflicting properties.\n */\n private mergeResources(\n existing: ResourceEntry,\n newPlugin: string,\n newResource: ResourceRequirement,\n ): ResourceEntry {\n // Combine plugin names if not already included\n const plugins = existing.plugin.split(\", \");\n if (!plugins.includes(newPlugin)) {\n plugins.push(newPlugin);\n }\n\n // Track per-plugin permission sources\n const permissionSources: Record<string, ResourcePermission> = {\n ...(existing.permissionSources ?? {}),\n [newPlugin]: newResource.permission,\n };\n\n // Use the most permissive permission for this resource type; warn when escalating\n const permission = getMostPermissivePermission(\n existing.type as ResourceType,\n existing.permission,\n newResource.permission,\n );\n\n if (permission !== existing.permission) {\n logger.warn(\n 'Resource %s:%s permission escalated from \"%s\" to \"%s\" due to plugin \"%s\" ' +\n \"(previously requested by: %s). Review plugin permissions to ensure least-privilege.\",\n existing.type,\n existing.resourceKey,\n existing.permission,\n permission,\n newPlugin,\n existing.plugin,\n );\n }\n\n // Mark as required if any plugin requires it\n const required = existing.required || newResource.required;\n\n // Combine descriptions if they differ\n let description = existing.description;\n if (\n newResource.description &&\n newResource.description !== existing.description\n ) {\n if (!existing.description.includes(newResource.description)) {\n description = `${existing.description}; ${newResource.description}`;\n }\n }\n\n // Merge fields: union of field names; warn when same field name uses different env\n const fields = { ...(existing.fields ?? {}) };\n for (const [fieldName, newField] of Object.entries(\n newResource.fields ?? {},\n )) {\n const existingField = fields[fieldName];\n if (existingField) {\n if (existingField.env !== newField.env) {\n logger.warn(\n 'Resource %s:%s field \"%s\": conflicting env vars \"%s\" (from %s) vs \"%s\" (from %s). Using first.',\n existing.type,\n existing.resourceKey,\n fieldName,\n existingField.env,\n existing.plugin,\n newField.env,\n newPlugin,\n );\n }\n // keep existing\n } else {\n fields[fieldName] = newField;\n }\n }\n\n return {\n ...existing,\n plugin: plugins.join(\", \"),\n permission,\n permissionSources,\n required,\n description,\n fields,\n };\n }\n\n /**\n * Retrieves all registered resources.\n * Returns a copy of the array to prevent external mutations.\n *\n * @returns Array of all registered resource entries\n */\n public getAll(): ResourceEntry[] {\n return Array.from(this.resources.values());\n }\n\n /**\n * Gets a specific resource by type and resourceKey (dedup key).\n *\n * @param type - Resource type\n * @param resourceKey - Stable machine key (not alias; alias is for display only)\n * @returns The resource entry if found, undefined otherwise\n */\n public get(type: string, resourceKey: string): ResourceEntry | undefined {\n return this.resources.get(getDedupKey(type, resourceKey));\n }\n\n /**\n * Clears all registered resources.\n * Useful for testing or when rebuilding the registry.\n */\n public clear(): void {\n this.resources.clear();\n }\n\n /**\n * Returns the number of registered resources.\n */\n public size(): number {\n return this.resources.size;\n }\n\n /**\n * Gets all resources required by a specific plugin.\n *\n * @param pluginName - Name of the plugin\n * @returns Array of resources where the plugin is listed as a requester\n */\n public getByPlugin(pluginName: string): ResourceEntry[] {\n return this.getAll().filter((entry) =>\n entry.plugin.split(\", \").includes(pluginName),\n );\n }\n\n /**\n * Gets all required resources (where required=true).\n *\n * @returns Array of required resource entries\n */\n public getRequired(): ResourceEntry[] {\n return this.getAll().filter((entry) => entry.required);\n }\n\n /**\n * Gets all optional resources (where required=false).\n *\n * @returns Array of optional resource entries\n */\n public getOptional(): ResourceEntry[] {\n return this.getAll().filter((entry) => !entry.required);\n }\n\n /**\n * Validates all registered resources against the environment.\n *\n * Checks each resource's field environment variables to determine if it's resolved.\n * Updates the `resolved` and `values` fields on each resource entry.\n *\n * Only required resources affect the `valid` status - optional resources\n * are checked but don't cause validation failure.\n *\n * @returns ValidationResult with validity status, missing resources, and all resources\n *\n * @example\n * ```typescript\n * const registry = ResourceRegistry.getInstance();\n * const result = registry.validate();\n *\n * if (!result.valid) {\n * console.error(\"Missing resources:\", result.missing.map(r => Object.values(r.fields).map(f => f.env)));\n * }\n * ```\n */\n public validate(): ValidationResult {\n const missing: ResourceEntry[] = [];\n\n for (const entry of this.resources.values()) {\n const values: Record<string, string> = {};\n let allSet = true;\n for (const [fieldName, fieldDef] of Object.entries(entry.fields)) {\n const val = process.env[fieldDef.env];\n if (val !== undefined && val !== \"\") {\n values[fieldName] = val;\n } else {\n allSet = false;\n }\n }\n if (allSet) {\n entry.resolved = true;\n entry.values = values;\n logger.debug(\n \"Resource %s:%s resolved from fields\",\n entry.type,\n entry.alias,\n );\n } else {\n entry.resolved = false;\n entry.values = Object.keys(values).length > 0 ? values : undefined;\n if (entry.required) {\n missing.push(entry);\n logger.debug(\n \"Required resource %s:%s missing (fields: %s)\",\n entry.type,\n entry.alias,\n Object.keys(entry.fields).join(\", \"),\n );\n } else {\n logger.debug(\n \"Optional resource %s:%s not configured (fields: %s)\",\n entry.type,\n entry.alias,\n Object.keys(entry.fields).join(\", \"),\n );\n }\n }\n }\n\n return {\n valid: missing.length === 0,\n missing,\n all: this.getAll(),\n };\n }\n\n /**\n * Validates all registered resources and enforces the result.\n *\n * - In production: throws a {@link ConfigurationError} if any required resources are missing.\n * - In development (`NODE_ENV=development`): logs a warning but continues, unless\n * `APPKIT_STRICT_VALIDATION=true` is set, in which case throws like production.\n * - When all resources are valid: logs a debug message with the count.\n *\n * @returns ValidationResult with validity status, missing resources, and all resources\n * @throws {ConfigurationError} In production when required resources are missing, or in dev when APPKIT_STRICT_VALIDATION=true\n */\n public enforceValidation(): ValidationResult {\n const validation = this.validate();\n const isDevelopment = process.env.NODE_ENV === \"development\";\n const strictValidation =\n process.env.APPKIT_STRICT_VALIDATION === \"true\" ||\n process.env.APPKIT_STRICT_VALIDATION === \"1\";\n\n if (!validation.valid) {\n const errorMessage = ResourceRegistry.formatMissingResources(\n validation.missing,\n );\n\n const shouldThrow = !isDevelopment || strictValidation;\n\n if (shouldThrow) {\n throw new ConfigurationError(errorMessage, {\n context: {\n missingResources: validation.missing.map((r) => ({\n type: r.type,\n alias: r.alias,\n plugin: r.plugin,\n envVars: Object.values(r.fields).map((f) => f.env),\n })),\n },\n });\n }\n\n // Dev mode without strict: use a visually prominent box so the warning can't be missed\n const banner = ResourceRegistry.formatDevWarningBanner(\n validation.missing,\n );\n logger.warn(\"\\n%s\", banner);\n } else if (this.size() > 0) {\n logger.debug(\"All %d resources validated successfully\", this.size());\n }\n\n return validation;\n }\n\n /**\n * Formats missing resources into a human-readable error message.\n *\n * @param missing - Array of missing resource entries\n * @returns Formatted error message string\n */\n public static formatMissingResources(missing: ResourceEntry[]): string {\n if (missing.length === 0) {\n return \"No missing resources\";\n }\n\n const lines = missing.map((entry) => {\n const envVars = Object.values(entry.fields).map((f) => f.env);\n const envHint = ` (set ${envVars.join(\", \")})`;\n return ` - ${entry.type}:${entry.alias} [${entry.plugin}]${envHint}`;\n });\n\n return `Missing required resources:\\n${lines.join(\"\\n\")}`;\n }\n\n /**\n * Formats a highly visible warning banner for dev-mode missing resources.\n * Uses box drawing to ensure the message is impossible to miss in scrolling logs.\n *\n * @param missing - Array of missing resource entries\n * @returns Formatted banner string\n */\n public static formatDevWarningBanner(missing: ResourceEntry[]): string {\n const contentLines: string[] = [\n \"MISSING REQUIRED RESOURCES (dev mode — would fail in production)\",\n \"\",\n ];\n\n for (const entry of missing) {\n const envVars = Object.values(entry.fields).map((f) => f.env);\n contentLines.push(\n ` ${entry.type}:${entry.alias} (plugin: ${entry.plugin})`,\n );\n contentLines.push(` Set: ${envVars.join(\", \")}`);\n }\n\n contentLines.push(\"\");\n contentLines.push(\n \"Add these to your .env file or environment to suppress this warning.\",\n );\n\n const maxLen = Math.max(...contentLines.map((l) => l.length));\n const border = \"=\".repeat(maxLen + 4);\n\n const boxed = contentLines.map((line) => `| ${line.padEnd(maxLen)} |`);\n\n return [border, ...boxed, border].join(\"\\n\");\n }\n}\n"],"mappings":";;;;;;;;aAa+C;AAW/C,MAAM,SAAS,aAAa,oBAAoB;;;;;AAMhD,SAAS,YAAY,MAAc,aAA6B;AAC9D,QAAO,GAAG,KAAK,GAAG;;;;;;AAOpB,SAAS,4BACP,cACA,IACA,IACoB;CACpB,MAAM,YAAY,6BAA6B;AAG/C,SAFe,WAAW,QAAQ,GAAG,IAAI,OAC1B,WAAW,QAAQ,GAAG,IAAI,MAChB,KAAK;;;;;;AAOhC,IAAa,mBAAb,MAAa,iBAAiB;CAC5B,AAAQ,4BAAwC,IAAI,KAAK;;;;;;;;;;;;;CAczD,AAAO,SAAS,QAAgB,UAAqC;EACnE,MAAM,MAAM,YAAY,SAAS,MAAM,SAAS,YAAY;EAC5D,MAAM,WAAW,KAAK,UAAU,IAAI,IAAI;AAExC,MAAI,UAAU;GAEZ,MAAM,SAAS,KAAK,eAAe,UAAU,QAAQ,SAAS;AAC9D,QAAK,UAAU,IAAI,KAAK,OAAO;SAC1B;GAEL,MAAM,QAAuB;IAC3B,GAAG;IACH;IACA,UAAU;IACV,mBAAmB,GAAG,SAAS,SAAS,YAAY;IACrD;AACD,QAAK,UAAU,IAAI,KAAK,MAAM;;;;;;;;;;CAWlC,AAAO,iBACL,YACM;AACN,OAAK,MAAM,cAAc,YAAY;AACnC,OAAI,CAAC,YAAY,OAAQ;GAEzB,MAAM,aAAa,WAAW;GAC9B,MAAM,WAAW,kBAAkB,WAAW,OAAO;AAGrD,QAAK,MAAM,YAAY,SAAS,UAAU,SACxC,MAAK,SAAS,YAAY;IAAE,GAAG;IAAU,UAAU;IAAM,CAAC;AAI5D,QAAK,MAAM,YAAY,SAAS,UAAU,YAAY,EAAE,CACtD,MAAK,SAAS,YAAY;IAAE,GAAG;IAAU,UAAU;IAAO,CAAC;AAI7D,OAAI,OAAO,WAAW,OAAO,4BAA4B,YAAY;IACnE,MAAM,mBAAmB,WAAW,OAAO,wBACzC,WAAW,OACZ;AACD,SAAK,MAAM,YAAY,iBACrB,MAAK,SAAS,YAAY,SAAgC;;AAI9D,UAAO,MACL,gDACA,YACA,KAAK,YAAY,WAAW,CAAC,OAC9B;;;;;;;CAQL,AAAQ,eACN,UACA,WACA,aACe;EAEf,MAAM,UAAU,SAAS,OAAO,MAAM,KAAK;AAC3C,MAAI,CAAC,QAAQ,SAAS,UAAU,CAC9B,SAAQ,KAAK,UAAU;EAIzB,MAAM,oBAAwD;GAC5D,GAAI,SAAS,qBAAqB,EAAE;IACnC,YAAY,YAAY;GAC1B;EAGD,MAAM,aAAa,4BACjB,SAAS,MACT,SAAS,YACT,YAAY,WACb;AAED,MAAI,eAAe,SAAS,WAC1B,QAAO,KACL,sKAEA,SAAS,MACT,SAAS,aACT,SAAS,YACT,YACA,WACA,SAAS,OACV;EAIH,MAAM,WAAW,SAAS,YAAY,YAAY;EAGlD,IAAI,cAAc,SAAS;AAC3B,MACE,YAAY,eACZ,YAAY,gBAAgB,SAAS,aAErC;OAAI,CAAC,SAAS,YAAY,SAAS,YAAY,YAAY,CACzD,eAAc,GAAG,SAAS,YAAY,IAAI,YAAY;;EAK1D,MAAM,SAAS,EAAE,GAAI,SAAS,UAAU,EAAE,EAAG;AAC7C,OAAK,MAAM,CAAC,WAAW,aAAa,OAAO,QACzC,YAAY,UAAU,EAAE,CACzB,EAAE;GACD,MAAM,gBAAgB,OAAO;AAC7B,OAAI,eACF;QAAI,cAAc,QAAQ,SAAS,IACjC,QAAO,KACL,wGACA,SAAS,MACT,SAAS,aACT,WACA,cAAc,KACd,SAAS,QACT,SAAS,KACT,UACD;SAIH,QAAO,aAAa;;AAIxB,SAAO;GACL,GAAG;GACH,QAAQ,QAAQ,KAAK,KAAK;GAC1B;GACA;GACA;GACA;GACA;GACD;;;;;;;;CASH,AAAO,SAA0B;AAC/B,SAAO,MAAM,KAAK,KAAK,UAAU,QAAQ,CAAC;;;;;;;;;CAU5C,AAAO,IAAI,MAAc,aAAgD;AACvE,SAAO,KAAK,UAAU,IAAI,YAAY,MAAM,YAAY,CAAC;;;;;;CAO3D,AAAO,QAAc;AACnB,OAAK,UAAU,OAAO;;;;;CAMxB,AAAO,OAAe;AACpB,SAAO,KAAK,UAAU;;;;;;;;CASxB,AAAO,YAAY,YAAqC;AACtD,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAC3B,MAAM,OAAO,MAAM,KAAK,CAAC,SAAS,WAAW,CAC9C;;;;;;;CAQH,AAAO,cAA+B;AACpC,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAAU,MAAM,SAAS;;;;;;;CAQxD,AAAO,cAA+B;AACpC,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAAU,CAAC,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;CAwBzD,AAAO,WAA6B;EAClC,MAAM,UAA2B,EAAE;AAEnC,OAAK,MAAM,SAAS,KAAK,UAAU,QAAQ,EAAE;GAC3C,MAAM,SAAiC,EAAE;GACzC,IAAI,SAAS;AACb,QAAK,MAAM,CAAC,WAAW,aAAa,OAAO,QAAQ,MAAM,OAAO,EAAE;IAChE,MAAM,MAAM,QAAQ,IAAI,SAAS;AACjC,QAAI,QAAQ,UAAa,QAAQ,GAC/B,QAAO,aAAa;QAEpB,UAAS;;AAGb,OAAI,QAAQ;AACV,UAAM,WAAW;AACjB,UAAM,SAAS;AACf,WAAO,MACL,uCACA,MAAM,MACN,MAAM,MACP;UACI;AACL,UAAM,WAAW;AACjB,UAAM,SAAS,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,SAAS;AACzD,QAAI,MAAM,UAAU;AAClB,aAAQ,KAAK,MAAM;AACnB,YAAO,MACL,gDACA,MAAM,MACN,MAAM,OACN,OAAO,KAAK,MAAM,OAAO,CAAC,KAAK,KAAK,CACrC;UAED,QAAO,MACL,uDACA,MAAM,MACN,MAAM,OACN,OAAO,KAAK,MAAM,OAAO,CAAC,KAAK,KAAK,CACrC;;;AAKP,SAAO;GACL,OAAO,QAAQ,WAAW;GAC1B;GACA,KAAK,KAAK,QAAQ;GACnB;;;;;;;;;;;;;CAcH,AAAO,oBAAsC;EAC3C,MAAM,aAAa,KAAK,UAAU;EAClC,MAAM,gBAAgB,QAAQ,IAAI,aAAa;EAC/C,MAAM,mBACJ,QAAQ,IAAI,6BAA6B,UACzC,QAAQ,IAAI,6BAA6B;AAE3C,MAAI,CAAC,WAAW,OAAO;GACrB,MAAM,eAAe,iBAAiB,uBACpC,WAAW,QACZ;AAID,OAFoB,CAAC,iBAAiB,iBAGpC,OAAM,IAAI,mBAAmB,cAAc,EACzC,SAAS,EACP,kBAAkB,WAAW,QAAQ,KAAK,OAAO;IAC/C,MAAM,EAAE;IACR,OAAO,EAAE;IACT,QAAQ,EAAE;IACV,SAAS,OAAO,OAAO,EAAE,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI;IACnD,EAAE,EACJ,EACF,CAAC;GAIJ,MAAM,SAAS,iBAAiB,uBAC9B,WAAW,QACZ;AACD,UAAO,KAAK,QAAQ,OAAO;aAClB,KAAK,MAAM,GAAG,EACvB,QAAO,MAAM,2CAA2C,KAAK,MAAM,CAAC;AAGtE,SAAO;;;;;;;;CAST,OAAc,uBAAuB,SAAkC;AACrE,MAAI,QAAQ,WAAW,EACrB,QAAO;AAST,SAAO,gCANO,QAAQ,KAAK,UAAU;GAEnC,MAAM,UAAU,SADA,OAAO,OAAO,MAAM,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI,CAC5B,KAAK,KAAK,CAAC;AAC5C,UAAO,OAAO,MAAM,KAAK,GAAG,MAAM,MAAM,IAAI,MAAM,OAAO,GAAG;IAC5D,CAE2C,KAAK,KAAK;;;;;;;;;CAUzD,OAAc,uBAAuB,SAAkC;EACrE,MAAM,eAAyB,CAC7B,oEACA,GACD;AAED,OAAK,MAAM,SAAS,SAAS;GAC3B,MAAM,UAAU,OAAO,OAAO,MAAM,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI;AAC7D,gBAAa,KACX,KAAK,MAAM,KAAK,GAAG,MAAM,MAAM,aAAa,MAAM,OAAO,GAC1D;AACD,gBAAa,KAAK,YAAY,QAAQ,KAAK,KAAK,GAAG;;AAGrD,eAAa,KAAK,GAAG;AACrB,eAAa,KACX,uEACD;EAED,MAAM,SAAS,KAAK,IAAI,GAAG,aAAa,KAAK,MAAM,EAAE,OAAO,CAAC;EAC7D,MAAM,SAAS,IAAI,OAAO,SAAS,EAAE;AAIrC,SAAO;GAAC;GAAQ,GAFF,aAAa,KAAK,SAAS,KAAK,KAAK,OAAO,OAAO,CAAC,IAAI;GAE5C;GAAO,CAAC,KAAK,KAAK"}
|
|
1
|
+
{"version":3,"file":"resource-registry.js","names":[],"sources":["../../src/registry/resource-registry.ts"],"sourcesContent":["/**\n * Resource Registry\n *\n * Central registry that tracks all resource requirements across all plugins.\n * Provides visibility into Databricks resources needed by the application\n * and handles deduplication when multiple plugins require the same resource\n * (dedup key: type + resourceKey).\n *\n * Use `new ResourceRegistry()` for instance-scoped usage (e.g. createApp).\n * getInstance() / resetInstance() remain for backward compatibility in tests.\n */\n\nimport type { BasePluginConfig, PluginConstructor, PluginData } from \"shared\";\nimport { ConfigurationError } from \"../errors\";\nimport { createLogger } from \"../logging/logger\";\nimport { getPluginManifest } from \"./manifest-loader\";\nimport type {\n ResourceEntry,\n ResourcePermission,\n ResourceRequirement,\n ValidationResult,\n} from \"./types\";\nimport { PERMISSION_HIERARCHY_BY_TYPE, type ResourceType } from \"./types\";\n\nconst logger = createLogger(\"resource-registry\");\n\n/**\n * Dedup key for registry: type + resourceKey (machine-stable).\n * alias is for UI/display only.\n */\nfunction getDedupKey(type: string, resourceKey: string): string {\n return `${type}:${resourceKey}`;\n}\n\n/**\n * Returns the most permissive permission for a given resource type.\n * Uses per-type hierarchy; unknown permissions are treated as least permissive.\n */\nfunction getMostPermissivePermission(\n resourceType: ResourceType,\n p1: ResourcePermission,\n p2: ResourcePermission,\n): ResourcePermission {\n const hierarchy = PERMISSION_HIERARCHY_BY_TYPE[resourceType as ResourceType];\n const index1 = hierarchy?.indexOf(p1) ?? -1;\n const index2 = hierarchy?.indexOf(p2) ?? -1;\n return index1 > index2 ? p1 : p2;\n}\n\n/**\n * Central registry for tracking plugin resource requirements.\n * Deduplication uses type + resourceKey (machine-stable); alias is for display only.\n */\nexport class ResourceRegistry {\n private resources: Map<string, ResourceEntry> = new Map();\n\n /**\n * Registers a resource requirement for a plugin.\n * If a resource with the same type+resourceKey already exists, merges them:\n * - Combines plugin names (comma-separated)\n * - Uses the most permissive permission (per-type hierarchy)\n * - Marks as required if any plugin requires it\n * - Combines descriptions if they differ\n * - Merges fields; warns when same field name uses different env vars\n *\n * @param plugin - Name of the plugin registering the resource\n * @param resource - Resource requirement specification\n */\n public register(plugin: string, resource: ResourceRequirement): void {\n const key = getDedupKey(resource.type, resource.resourceKey);\n const existing = this.resources.get(key);\n\n if (existing) {\n // Merge with existing resource\n const merged = this.mergeResources(existing, plugin, resource);\n this.resources.set(key, merged);\n } else {\n // Create new resource entry with permission source tracking\n const entry: ResourceEntry = {\n ...resource,\n plugin,\n resolved: false,\n permissionSources: { [plugin]: resource.permission },\n };\n this.resources.set(key, entry);\n }\n }\n\n /**\n * Collects and registers resource requirements from an array of plugins.\n * For each plugin, loads its manifest (required) and runtime resource requirements.\n *\n * @param rawPlugins - Array of plugin data entries from createApp configuration\n * @throws {ConfigurationError} If any plugin is missing a manifest or manifest is invalid\n */\n public collectResources(\n rawPlugins: PluginData<PluginConstructor, unknown, string>[],\n ): void {\n for (const pluginData of rawPlugins) {\n if (!pluginData?.plugin) continue;\n\n const pluginName = pluginData.name;\n const manifest = getPluginManifest(pluginData.plugin);\n\n // Register required resources\n for (const resource of manifest.resources.required) {\n this.register(pluginName, { ...resource, required: true });\n }\n\n // Register optional resources\n for (const resource of manifest.resources.optional || []) {\n this.register(pluginName, { ...resource, required: false });\n }\n\n // Check for runtime resource requirements\n if (typeof pluginData.plugin.getResourceRequirements === \"function\") {\n const runtimeResources = pluginData.plugin.getResourceRequirements(\n pluginData.config as BasePluginConfig,\n );\n for (const resource of runtimeResources) {\n this.register(pluginName, resource as ResourceRequirement);\n }\n }\n\n logger.debug(\n \"Collected resources from plugin %s: %d total\",\n pluginName,\n this.getByPlugin(pluginName).length,\n );\n }\n }\n\n /**\n * Merges a new resource requirement with an existing entry.\n * Applies intelligent merging logic for conflicting properties.\n */\n private mergeResources(\n existing: ResourceEntry,\n newPlugin: string,\n newResource: ResourceRequirement,\n ): ResourceEntry {\n // Combine plugin names if not already included\n const plugins = existing.plugin.split(\", \");\n if (!plugins.includes(newPlugin)) {\n plugins.push(newPlugin);\n }\n\n // Track per-plugin permission sources\n const permissionSources: Record<string, ResourcePermission> = {\n ...(existing.permissionSources ?? {}),\n [newPlugin]: newResource.permission,\n };\n\n // Use the most permissive permission for this resource type; warn when escalating\n const permission = getMostPermissivePermission(\n existing.type as ResourceType,\n existing.permission,\n newResource.permission,\n );\n\n if (permission !== existing.permission) {\n logger.warn(\n 'Resource %s:%s permission escalated from \"%s\" to \"%s\" due to plugin \"%s\" ' +\n \"(previously requested by: %s). Review plugin permissions to ensure least-privilege.\",\n existing.type,\n existing.resourceKey,\n existing.permission,\n permission,\n newPlugin,\n existing.plugin,\n );\n }\n\n // Mark as required if any plugin requires it\n const required = existing.required || newResource.required;\n\n // Combine descriptions if they differ\n let description = existing.description;\n if (\n newResource.description &&\n newResource.description !== existing.description\n ) {\n if (!existing.description.includes(newResource.description)) {\n description = `${existing.description}; ${newResource.description}`;\n }\n }\n\n // Merge fields: union of field names; warn when same field name uses different env\n const fields = { ...(existing.fields ?? {}) };\n for (const [fieldName, newField] of Object.entries(\n newResource.fields ?? {},\n )) {\n const existingField = fields[fieldName];\n if (existingField) {\n if (existingField.env !== newField.env) {\n logger.warn(\n 'Resource %s:%s field \"%s\": conflicting env vars \"%s\" (from %s) vs \"%s\" (from %s). Using first.',\n existing.type,\n existing.resourceKey,\n fieldName,\n existingField.env,\n existing.plugin,\n newField.env,\n newPlugin,\n );\n }\n // keep existing\n } else {\n fields[fieldName] = newField;\n }\n }\n\n return {\n ...existing,\n plugin: plugins.join(\", \"),\n permission,\n permissionSources,\n required,\n description,\n fields,\n };\n }\n\n /**\n * Retrieves all registered resources.\n * Returns a copy of the array to prevent external mutations.\n *\n * @returns Array of all registered resource entries\n */\n public getAll(): ResourceEntry[] {\n return Array.from(this.resources.values());\n }\n\n /**\n * Gets a specific resource by type and resourceKey (dedup key).\n *\n * @param type - Resource type\n * @param resourceKey - Stable machine key (not alias; alias is for display only)\n * @returns The resource entry if found, undefined otherwise\n */\n public get(type: string, resourceKey: string): ResourceEntry | undefined {\n return this.resources.get(getDedupKey(type, resourceKey));\n }\n\n /**\n * Clears all registered resources.\n * Useful for testing or when rebuilding the registry.\n */\n public clear(): void {\n this.resources.clear();\n }\n\n /**\n * Returns the number of registered resources.\n */\n public size(): number {\n return this.resources.size;\n }\n\n /**\n * Gets all resources required by a specific plugin.\n *\n * @param pluginName - Name of the plugin\n * @returns Array of resources where the plugin is listed as a requester\n */\n public getByPlugin(pluginName: string): ResourceEntry[] {\n return this.getAll().filter((entry) =>\n entry.plugin.split(\", \").includes(pluginName),\n );\n }\n\n /**\n * Gets all required resources (where required=true).\n *\n * @returns Array of required resource entries\n */\n public getRequired(): ResourceEntry[] {\n return this.getAll().filter((entry) => entry.required);\n }\n\n /**\n * Gets all optional resources (where required=false).\n *\n * @returns Array of optional resource entries\n */\n public getOptional(): ResourceEntry[] {\n return this.getAll().filter((entry) => !entry.required);\n }\n\n /**\n * Validates all registered resources against the environment.\n *\n * Checks each resource's field environment variables to determine if it's resolved.\n * Updates the `resolved` and `values` fields on each resource entry.\n *\n * Only required resources affect the `valid` status - optional resources\n * are checked but don't cause validation failure.\n *\n * @returns ValidationResult with validity status, missing resources, and all resources\n *\n * @example\n * ```typescript\n * const registry = ResourceRegistry.getInstance();\n * const result = registry.validate();\n *\n * if (!result.valid) {\n * console.error(\"Missing resources:\", result.missing.map(r => Object.values(r.fields).map(f => f.env)));\n * }\n * ```\n */\n public validate(): ValidationResult {\n const missing: ResourceEntry[] = [];\n\n for (const entry of this.resources.values()) {\n const values: Record<string, string> = {};\n let allSet = true;\n for (const [fieldName, fieldDef] of Object.entries(entry.fields)) {\n if (!fieldDef.env) continue;\n const val = process.env[fieldDef.env];\n if (val !== undefined && val !== \"\") {\n values[fieldName] = val;\n } else {\n allSet = false;\n }\n }\n if (allSet) {\n entry.resolved = true;\n entry.values = values;\n logger.debug(\n \"Resource %s:%s resolved from fields\",\n entry.type,\n entry.alias,\n );\n } else {\n entry.resolved = false;\n entry.values = Object.keys(values).length > 0 ? values : undefined;\n if (entry.required) {\n missing.push(entry);\n logger.debug(\n \"Required resource %s:%s missing (fields: %s)\",\n entry.type,\n entry.alias,\n Object.keys(entry.fields).join(\", \"),\n );\n } else {\n logger.debug(\n \"Optional resource %s:%s not configured (fields: %s)\",\n entry.type,\n entry.alias,\n Object.keys(entry.fields).join(\", \"),\n );\n }\n }\n }\n\n return {\n valid: missing.length === 0,\n missing,\n all: this.getAll(),\n };\n }\n\n /**\n * Validates all registered resources and enforces the result.\n *\n * - In production: throws a {@link ConfigurationError} if any required resources are missing.\n * - In development (`NODE_ENV=development`): logs a warning but continues, unless\n * `APPKIT_STRICT_VALIDATION=true` is set, in which case throws like production.\n * - When all resources are valid: logs a debug message with the count.\n *\n * @returns ValidationResult with validity status, missing resources, and all resources\n * @throws {ConfigurationError} In production when required resources are missing, or in dev when APPKIT_STRICT_VALIDATION=true\n */\n public enforceValidation(): ValidationResult {\n const validation = this.validate();\n const isDevelopment = process.env.NODE_ENV === \"development\";\n const strictValidation =\n process.env.APPKIT_STRICT_VALIDATION === \"true\" ||\n process.env.APPKIT_STRICT_VALIDATION === \"1\";\n\n if (!validation.valid) {\n const errorMessage = ResourceRegistry.formatMissingResources(\n validation.missing,\n );\n\n const shouldThrow = !isDevelopment || strictValidation;\n\n if (shouldThrow) {\n throw new ConfigurationError(errorMessage, {\n context: {\n missingResources: validation.missing.map((r) => ({\n type: r.type,\n alias: r.alias,\n plugin: r.plugin,\n envVars: Object.values(r.fields).map((f) => f.env),\n })),\n },\n });\n }\n\n // Dev mode without strict: use a visually prominent box so the warning can't be missed\n const banner = ResourceRegistry.formatDevWarningBanner(\n validation.missing,\n );\n logger.warn(\"\\n%s\", banner);\n } else if (this.size() > 0) {\n logger.debug(\"All %d resources validated successfully\", this.size());\n }\n\n return validation;\n }\n\n /**\n * Formats missing resources into a human-readable error message.\n *\n * @param missing - Array of missing resource entries\n * @returns Formatted error message string\n */\n public static formatMissingResources(missing: ResourceEntry[]): string {\n if (missing.length === 0) {\n return \"No missing resources\";\n }\n\n const lines = missing.map((entry) => {\n const envVars = Object.values(entry.fields).map((f) => f.env);\n const envHint = ` (set ${envVars.join(\", \")})`;\n return ` - ${entry.type}:${entry.alias} [${entry.plugin}]${envHint}`;\n });\n\n return `Missing required resources:\\n${lines.join(\"\\n\")}`;\n }\n\n /**\n * Formats a highly visible warning banner for dev-mode missing resources.\n * Uses box drawing to ensure the message is impossible to miss in scrolling logs.\n *\n * @param missing - Array of missing resource entries\n * @returns Formatted banner string\n */\n public static formatDevWarningBanner(missing: ResourceEntry[]): string {\n const contentLines: string[] = [\n \"MISSING REQUIRED RESOURCES (dev mode — would fail in production)\",\n \"\",\n ];\n\n for (const entry of missing) {\n const envVars = Object.values(entry.fields).map((f) => f.env);\n contentLines.push(\n ` ${entry.type}:${entry.alias} (plugin: ${entry.plugin})`,\n );\n contentLines.push(` Set: ${envVars.join(\", \")}`);\n }\n\n contentLines.push(\"\");\n contentLines.push(\n \"Add these to your .env file or environment to suppress this warning.\",\n );\n\n const maxLen = Math.max(...contentLines.map((l) => l.length));\n const border = \"=\".repeat(maxLen + 4);\n\n const boxed = contentLines.map((line) => `| ${line.padEnd(maxLen)} |`);\n\n return [border, ...boxed, border].join(\"\\n\");\n }\n}\n"],"mappings":";;;;;;;;aAa+C;AAW/C,MAAM,SAAS,aAAa,oBAAoB;;;;;AAMhD,SAAS,YAAY,MAAc,aAA6B;AAC9D,QAAO,GAAG,KAAK,GAAG;;;;;;AAOpB,SAAS,4BACP,cACA,IACA,IACoB;CACpB,MAAM,YAAY,6BAA6B;AAG/C,SAFe,WAAW,QAAQ,GAAG,IAAI,OAC1B,WAAW,QAAQ,GAAG,IAAI,MAChB,KAAK;;;;;;AAOhC,IAAa,mBAAb,MAAa,iBAAiB;CAC5B,AAAQ,4BAAwC,IAAI,KAAK;;;;;;;;;;;;;CAczD,AAAO,SAAS,QAAgB,UAAqC;EACnE,MAAM,MAAM,YAAY,SAAS,MAAM,SAAS,YAAY;EAC5D,MAAM,WAAW,KAAK,UAAU,IAAI,IAAI;AAExC,MAAI,UAAU;GAEZ,MAAM,SAAS,KAAK,eAAe,UAAU,QAAQ,SAAS;AAC9D,QAAK,UAAU,IAAI,KAAK,OAAO;SAC1B;GAEL,MAAM,QAAuB;IAC3B,GAAG;IACH;IACA,UAAU;IACV,mBAAmB,GAAG,SAAS,SAAS,YAAY;IACrD;AACD,QAAK,UAAU,IAAI,KAAK,MAAM;;;;;;;;;;CAWlC,AAAO,iBACL,YACM;AACN,OAAK,MAAM,cAAc,YAAY;AACnC,OAAI,CAAC,YAAY,OAAQ;GAEzB,MAAM,aAAa,WAAW;GAC9B,MAAM,WAAW,kBAAkB,WAAW,OAAO;AAGrD,QAAK,MAAM,YAAY,SAAS,UAAU,SACxC,MAAK,SAAS,YAAY;IAAE,GAAG;IAAU,UAAU;IAAM,CAAC;AAI5D,QAAK,MAAM,YAAY,SAAS,UAAU,YAAY,EAAE,CACtD,MAAK,SAAS,YAAY;IAAE,GAAG;IAAU,UAAU;IAAO,CAAC;AAI7D,OAAI,OAAO,WAAW,OAAO,4BAA4B,YAAY;IACnE,MAAM,mBAAmB,WAAW,OAAO,wBACzC,WAAW,OACZ;AACD,SAAK,MAAM,YAAY,iBACrB,MAAK,SAAS,YAAY,SAAgC;;AAI9D,UAAO,MACL,gDACA,YACA,KAAK,YAAY,WAAW,CAAC,OAC9B;;;;;;;CAQL,AAAQ,eACN,UACA,WACA,aACe;EAEf,MAAM,UAAU,SAAS,OAAO,MAAM,KAAK;AAC3C,MAAI,CAAC,QAAQ,SAAS,UAAU,CAC9B,SAAQ,KAAK,UAAU;EAIzB,MAAM,oBAAwD;GAC5D,GAAI,SAAS,qBAAqB,EAAE;IACnC,YAAY,YAAY;GAC1B;EAGD,MAAM,aAAa,4BACjB,SAAS,MACT,SAAS,YACT,YAAY,WACb;AAED,MAAI,eAAe,SAAS,WAC1B,QAAO,KACL,sKAEA,SAAS,MACT,SAAS,aACT,SAAS,YACT,YACA,WACA,SAAS,OACV;EAIH,MAAM,WAAW,SAAS,YAAY,YAAY;EAGlD,IAAI,cAAc,SAAS;AAC3B,MACE,YAAY,eACZ,YAAY,gBAAgB,SAAS,aAErC;OAAI,CAAC,SAAS,YAAY,SAAS,YAAY,YAAY,CACzD,eAAc,GAAG,SAAS,YAAY,IAAI,YAAY;;EAK1D,MAAM,SAAS,EAAE,GAAI,SAAS,UAAU,EAAE,EAAG;AAC7C,OAAK,MAAM,CAAC,WAAW,aAAa,OAAO,QACzC,YAAY,UAAU,EAAE,CACzB,EAAE;GACD,MAAM,gBAAgB,OAAO;AAC7B,OAAI,eACF;QAAI,cAAc,QAAQ,SAAS,IACjC,QAAO,KACL,wGACA,SAAS,MACT,SAAS,aACT,WACA,cAAc,KACd,SAAS,QACT,SAAS,KACT,UACD;SAIH,QAAO,aAAa;;AAIxB,SAAO;GACL,GAAG;GACH,QAAQ,QAAQ,KAAK,KAAK;GAC1B;GACA;GACA;GACA;GACA;GACD;;;;;;;;CASH,AAAO,SAA0B;AAC/B,SAAO,MAAM,KAAK,KAAK,UAAU,QAAQ,CAAC;;;;;;;;;CAU5C,AAAO,IAAI,MAAc,aAAgD;AACvE,SAAO,KAAK,UAAU,IAAI,YAAY,MAAM,YAAY,CAAC;;;;;;CAO3D,AAAO,QAAc;AACnB,OAAK,UAAU,OAAO;;;;;CAMxB,AAAO,OAAe;AACpB,SAAO,KAAK,UAAU;;;;;;;;CASxB,AAAO,YAAY,YAAqC;AACtD,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAC3B,MAAM,OAAO,MAAM,KAAK,CAAC,SAAS,WAAW,CAC9C;;;;;;;CAQH,AAAO,cAA+B;AACpC,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAAU,MAAM,SAAS;;;;;;;CAQxD,AAAO,cAA+B;AACpC,SAAO,KAAK,QAAQ,CAAC,QAAQ,UAAU,CAAC,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;CAwBzD,AAAO,WAA6B;EAClC,MAAM,UAA2B,EAAE;AAEnC,OAAK,MAAM,SAAS,KAAK,UAAU,QAAQ,EAAE;GAC3C,MAAM,SAAiC,EAAE;GACzC,IAAI,SAAS;AACb,QAAK,MAAM,CAAC,WAAW,aAAa,OAAO,QAAQ,MAAM,OAAO,EAAE;AAChE,QAAI,CAAC,SAAS,IAAK;IACnB,MAAM,MAAM,QAAQ,IAAI,SAAS;AACjC,QAAI,QAAQ,UAAa,QAAQ,GAC/B,QAAO,aAAa;QAEpB,UAAS;;AAGb,OAAI,QAAQ;AACV,UAAM,WAAW;AACjB,UAAM,SAAS;AACf,WAAO,MACL,uCACA,MAAM,MACN,MAAM,MACP;UACI;AACL,UAAM,WAAW;AACjB,UAAM,SAAS,OAAO,KAAK,OAAO,CAAC,SAAS,IAAI,SAAS;AACzD,QAAI,MAAM,UAAU;AAClB,aAAQ,KAAK,MAAM;AACnB,YAAO,MACL,gDACA,MAAM,MACN,MAAM,OACN,OAAO,KAAK,MAAM,OAAO,CAAC,KAAK,KAAK,CACrC;UAED,QAAO,MACL,uDACA,MAAM,MACN,MAAM,OACN,OAAO,KAAK,MAAM,OAAO,CAAC,KAAK,KAAK,CACrC;;;AAKP,SAAO;GACL,OAAO,QAAQ,WAAW;GAC1B;GACA,KAAK,KAAK,QAAQ;GACnB;;;;;;;;;;;;;CAcH,AAAO,oBAAsC;EAC3C,MAAM,aAAa,KAAK,UAAU;EAClC,MAAM,gBAAgB,QAAQ,IAAI,aAAa;EAC/C,MAAM,mBACJ,QAAQ,IAAI,6BAA6B,UACzC,QAAQ,IAAI,6BAA6B;AAE3C,MAAI,CAAC,WAAW,OAAO;GACrB,MAAM,eAAe,iBAAiB,uBACpC,WAAW,QACZ;AAID,OAFoB,CAAC,iBAAiB,iBAGpC,OAAM,IAAI,mBAAmB,cAAc,EACzC,SAAS,EACP,kBAAkB,WAAW,QAAQ,KAAK,OAAO;IAC/C,MAAM,EAAE;IACR,OAAO,EAAE;IACT,QAAQ,EAAE;IACV,SAAS,OAAO,OAAO,EAAE,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI;IACnD,EAAE,EACJ,EACF,CAAC;GAIJ,MAAM,SAAS,iBAAiB,uBAC9B,WAAW,QACZ;AACD,UAAO,KAAK,QAAQ,OAAO;aAClB,KAAK,MAAM,GAAG,EACvB,QAAO,MAAM,2CAA2C,KAAK,MAAM,CAAC;AAGtE,SAAO;;;;;;;;CAST,OAAc,uBAAuB,SAAkC;AACrE,MAAI,QAAQ,WAAW,EACrB,QAAO;AAST,SAAO,gCANO,QAAQ,KAAK,UAAU;GAEnC,MAAM,UAAU,SADA,OAAO,OAAO,MAAM,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI,CAC5B,KAAK,KAAK,CAAC;AAC5C,UAAO,OAAO,MAAM,KAAK,GAAG,MAAM,MAAM,IAAI,MAAM,OAAO,GAAG;IAC5D,CAE2C,KAAK,KAAK;;;;;;;;;CAUzD,OAAc,uBAAuB,SAAkC;EACrE,MAAM,eAAyB,CAC7B,oEACA,GACD;AAED,OAAK,MAAM,SAAS,SAAS;GAC3B,MAAM,UAAU,OAAO,OAAO,MAAM,OAAO,CAAC,KAAK,MAAM,EAAE,IAAI;AAC7D,gBAAa,KACX,KAAK,MAAM,KAAK,GAAG,MAAM,MAAM,aAAa,MAAM,OAAO,GAC1D;AACD,gBAAa,KAAK,YAAY,QAAQ,KAAK,KAAK,GAAG;;AAGrD,eAAa,KAAK,GAAG;AACrB,eAAa,KACX,uEACD;EAED,MAAM,SAAS,KAAK,IAAI,GAAG,aAAa,KAAK,MAAM,EAAE,OAAO,CAAC;EAC7D,MAAM,SAAS,IAAI,OAAO,SAAS,EAAE;AAIrC,SAAO;GAAC;GAAQ,GAFF,aAAa,KAAK,SAAS,KAAK,KAAK,OAAO,OAAO,CAAC,IAAI;GAE5C;GAAO,CAAC,KAAK,KAAK"}
|
package/dist/registry/types.d.ts
CHANGED
|
@@ -8,9 +8,19 @@ import { JSONSchema7 } from "json-schema";
|
|
|
8
8
|
*/
|
|
9
9
|
interface ResourceFieldEntry {
|
|
10
10
|
/** Environment variable name for this field */
|
|
11
|
-
env
|
|
11
|
+
env?: string;
|
|
12
12
|
/** Human-readable description for this field */
|
|
13
13
|
description?: string;
|
|
14
|
+
/** When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation. */
|
|
15
|
+
bundleIgnore?: boolean;
|
|
16
|
+
/** Example values showing the expected format for this field */
|
|
17
|
+
examples?: string[];
|
|
18
|
+
/** When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time. */
|
|
19
|
+
localOnly?: boolean;
|
|
20
|
+
/** Static value for this field. Used when no prompted or resolved value exists. */
|
|
21
|
+
value?: string;
|
|
22
|
+
/** Named resolver prefixed by resource type (e.g., 'postgres:host'). The CLI resolves this value during the init prompt flow. */
|
|
23
|
+
resolve?: string;
|
|
14
24
|
}
|
|
15
25
|
/**
|
|
16
26
|
* Declares a resource requirement for a plugin.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","names":[],"sources":["../../src/registry/types.ts"],"mappings":";;;;;;;;UA0DiB,kBAAA;EA+
|
|
1
|
+
{"version":3,"file":"types.d.ts","names":[],"sources":["../../src/registry/types.ts"],"mappings":";;;;;;;;UA0DiB,kBAAA;EAqBA;EAnBf,GAAA;;EAEA,WAAA;EA+BY;EA7BZ,YAAA;EAmCQ;EAjCR,QAAA;EAiCc;EA/Bd,SAAA;EAaM;EAXN,KAAA;EAiBA;EAfA,OAAA;AAAA;;;;;UAOe,mBAAA;EAuBP;EArBR,IAAA,EAAM,YAAA;EA4BS;EAzBf,KAAA;;EAGA,WAAA;EAqCmC;EAlCnC,WAAA;EAmBqC;EAhBrC,UAAA,EAAY,kBAAA;EAgB4C;;;;EAVxD,MAAA,EAAQ,MAAA,SAAe,kBAAA;EAkBd;EAfT,QAAA;AAAA;;;;AA4BF;UArBiB,aAAA,SAAsB,mBAAA;;EAErC,MAAA;EAqBA;EAlBA,QAAA;EAqBS;EAlBT,MAAA,GAAS,MAAA;EAqBJ;;;AAWP;;EAzBE,iBAAA,GAAoB,MAAA,SAAe,kBAAA;AAAA;;AA+BrC;;UAzBiB,gBAAA;EA2BT;EAzBN,KAAA;EAsCY;EAnCZ,OAAA,EAAS,aAAA;EAsCG;EAnCZ,GAAA,EAAK,aAAA;AAAA;;;;;;;KAWK,YAAA,GAAe,WAAA;;;;;UAMV,cAAA;EAyBf;EAvBA,IAAA,EAAM,KAAA;EAwBI;EArBV,WAAA;EAgCA;EA7BA,WAAA;EA+BA;;;EA1BA,SAAA;IA4BO,kEA1BL,QAAA,EAAU,IAAA,CAAK,mBAAA;IAGf,QAAA,EAAU,IAAA,CAAK,mBAAA;EAAA;;;;;EAOjB,MAAA;IACE,MAAA,EAAQ,YAAA;EAAA;;;;EAMV,MAAA;;;;EAKA,MAAA;EACA,OAAA;EACA,UAAA;EACA,QAAA;EACA,OAAA;AAAA"}
|
|
@@ -10,6 +10,7 @@ declare enum ResourceType {
|
|
|
10
10
|
UC_FUNCTION = "uc_function",
|
|
11
11
|
UC_CONNECTION = "uc_connection",
|
|
12
12
|
DATABASE = "database",
|
|
13
|
+
POSTGRES = "postgres",
|
|
13
14
|
GENIE_SPACE = "genie_space",
|
|
14
15
|
EXPERIMENT = "experiment",
|
|
15
16
|
APP = "app"
|
|
@@ -32,6 +33,8 @@ type UcFunctionPermission = "EXECUTE";
|
|
|
32
33
|
type UcConnectionPermission = "USE_CONNECTION";
|
|
33
34
|
/** Permissions for DATABASE resources */
|
|
34
35
|
type DatabasePermission = "CAN_CONNECT_AND_CREATE";
|
|
36
|
+
/** Permissions for POSTGRES resources */
|
|
37
|
+
type PostgresPermission = "CAN_CONNECT_AND_CREATE";
|
|
35
38
|
/** Permissions for GENIE_SPACE resources */
|
|
36
39
|
type GenieSpacePermission = "CAN_VIEW" | "CAN_RUN" | "CAN_EDIT" | "CAN_MANAGE";
|
|
37
40
|
/** Permissions for EXPERIMENT resources */
|
|
@@ -39,7 +42,7 @@ type ExperimentPermission = "CAN_READ" | "CAN_EDIT" | "CAN_MANAGE";
|
|
|
39
42
|
/** Permissions for APP resources */
|
|
40
43
|
type AppPermission = "CAN_USE";
|
|
41
44
|
/** Union of all possible permission levels across all resource types. */
|
|
42
|
-
type ResourcePermission = SecretPermission | JobPermission | SqlWarehousePermission | ServingEndpointPermission | VolumePermission | VectorSearchIndexPermission | UcFunctionPermission | UcConnectionPermission | DatabasePermission | GenieSpacePermission | ExperimentPermission | AppPermission;
|
|
45
|
+
type ResourcePermission = SecretPermission | JobPermission | SqlWarehousePermission | ServingEndpointPermission | VolumePermission | VectorSearchIndexPermission | UcFunctionPermission | UcConnectionPermission | DatabasePermission | PostgresPermission | GenieSpacePermission | ExperimentPermission | AppPermission;
|
|
43
46
|
//#endregion
|
|
44
47
|
export { AppPermission, DatabasePermission, ExperimentPermission, GenieSpacePermission, JobPermission, ResourcePermission, ResourceType, SecretPermission, ServingEndpointPermission, SqlWarehousePermission, UcConnectionPermission, UcFunctionPermission, VectorSearchIndexPermission, VolumePermission };
|
|
45
48
|
//# sourceMappingURL=types.generated.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.generated.d.ts","names":[],"sources":["../../src/registry/types.generated.ts"],"mappings":";;aAIY,YAAA;EACV,MAAA;EACA,GAAA;EACA,aAAA;EACA,gBAAA;EACA,MAAA;EACA,mBAAA;EACA,WAAA;EACA,aAAA;EACA,QAAA;EACA,WAAA;EACA,UAAA;EACA,GAAA;AAAA;;KAOU,gBAAA;;KAGA,aAAA;AAHZ;AAAA,KAMY,sBAAA;;KAGA,yBAAA;;KAGA,gBAAA;;KAGA,2BAAA;;KAGA,oBAAA;AAZZ;AAAA,KAeY,sBAAA;;KAGA,kBAAA;;KAGA,
|
|
1
|
+
{"version":3,"file":"types.generated.d.ts","names":[],"sources":["../../src/registry/types.generated.ts"],"mappings":";;aAIY,YAAA;EACV,MAAA;EACA,GAAA;EACA,aAAA;EACA,gBAAA;EACA,MAAA;EACA,mBAAA;EACA,WAAA;EACA,aAAA;EACA,QAAA;EACA,QAAA;EACA,WAAA;EACA,UAAA;EACA,GAAA;AAAA;;KAOU,gBAAA;;KAGA,aAAA;AAHZ;AAAA,KAMY,sBAAA;;KAGA,yBAAA;;KAGA,gBAAA;;KAGA,2BAAA;;KAGA,oBAAA;AAZZ;AAAA,KAeY,sBAAA;;KAGA,kBAAA;;KAGA,kBAAA;;KAGA,oBAAA;;KAGA,oBAAA;AArBZ;AAAA,KAwBY,aAAA;;KAGA,kBAAA,GACR,gBAAA,GACA,aAAA,GACA,sBAAA,GACA,yBAAA,GACA,gBAAA,GACA,2BAAA,GACA,oBAAA,GACA,sBAAA,GACA,kBAAA,GACA,kBAAA,GACA,oBAAA,GACA,oBAAA,GACA,aAAA"}
|
|
@@ -10,6 +10,7 @@ let ResourceType = /* @__PURE__ */ function(ResourceType) {
|
|
|
10
10
|
ResourceType["UC_FUNCTION"] = "uc_function";
|
|
11
11
|
ResourceType["UC_CONNECTION"] = "uc_connection";
|
|
12
12
|
ResourceType["DATABASE"] = "database";
|
|
13
|
+
ResourceType["POSTGRES"] = "postgres";
|
|
13
14
|
ResourceType["GENIE_SPACE"] = "genie_space";
|
|
14
15
|
ResourceType["EXPERIMENT"] = "experiment";
|
|
15
16
|
ResourceType["APP"] = "app";
|
|
@@ -38,6 +39,7 @@ const PERMISSION_HIERARCHY_BY_TYPE = {
|
|
|
38
39
|
[ResourceType.UC_FUNCTION]: ["EXECUTE"],
|
|
39
40
|
[ResourceType.UC_CONNECTION]: ["USE_CONNECTION"],
|
|
40
41
|
[ResourceType.DATABASE]: ["CAN_CONNECT_AND_CREATE"],
|
|
42
|
+
[ResourceType.POSTGRES]: ["CAN_CONNECT_AND_CREATE"],
|
|
41
43
|
[ResourceType.GENIE_SPACE]: [
|
|
42
44
|
"CAN_VIEW",
|
|
43
45
|
"CAN_RUN",
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.generated.js","names":[],"sources":["../../src/registry/types.generated.ts"],"sourcesContent":["// AUTO-GENERATED from packages/shared/src/schemas/plugin-manifest.schema.json\n// Do not edit. Run: pnpm exec tsx tools/generate-registry-types.ts\n\n/** Resource types from schema $defs.resourceType.enum */\nexport enum ResourceType {\n SECRET = \"secret\",\n JOB = \"job\",\n SQL_WAREHOUSE = \"sql_warehouse\",\n SERVING_ENDPOINT = \"serving_endpoint\",\n VOLUME = \"volume\",\n VECTOR_SEARCH_INDEX = \"vector_search_index\",\n UC_FUNCTION = \"uc_function\",\n UC_CONNECTION = \"uc_connection\",\n DATABASE = \"database\",\n GENIE_SPACE = \"genie_space\",\n EXPERIMENT = \"experiment\",\n APP = \"app\",\n}\n\n// ============================================================================\n// Permissions per resource type (from schema permission $defs)\n// ============================================================================\n/** Permissions for SECRET resources */\nexport type SecretPermission = \"READ\" | \"WRITE\" | \"MANAGE\";\n\n/** Permissions for JOB resources */\nexport type JobPermission = \"CAN_VIEW\" | \"CAN_MANAGE_RUN\" | \"CAN_MANAGE\";\n\n/** Permissions for SQL_WAREHOUSE resources */\nexport type SqlWarehousePermission = \"CAN_USE\" | \"CAN_MANAGE\";\n\n/** Permissions for SERVING_ENDPOINT resources */\nexport type ServingEndpointPermission = \"CAN_VIEW\" | \"CAN_QUERY\" | \"CAN_MANAGE\";\n\n/** Permissions for VOLUME resources */\nexport type VolumePermission = \"READ_VOLUME\" | \"WRITE_VOLUME\";\n\n/** Permissions for VECTOR_SEARCH_INDEX resources */\nexport type VectorSearchIndexPermission = \"SELECT\";\n\n/** Permissions for UC_FUNCTION resources */\nexport type UcFunctionPermission = \"EXECUTE\";\n\n/** Permissions for UC_CONNECTION resources */\nexport type UcConnectionPermission = \"USE_CONNECTION\";\n\n/** Permissions for DATABASE resources */\nexport type DatabasePermission = \"CAN_CONNECT_AND_CREATE\";\n\n/** Permissions for GENIE_SPACE resources */\nexport type GenieSpacePermission = \"CAN_VIEW\" | \"CAN_RUN\" | \"CAN_EDIT\" | \"CAN_MANAGE\";\n\n/** Permissions for EXPERIMENT resources */\nexport type ExperimentPermission = \"CAN_READ\" | \"CAN_EDIT\" | \"CAN_MANAGE\";\n\n/** Permissions for APP resources */\nexport type AppPermission = \"CAN_USE\";\n\n/** Union of all possible permission levels across all resource types. */\nexport type ResourcePermission =\n | SecretPermission\n | JobPermission\n | SqlWarehousePermission\n | ServingEndpointPermission\n | VolumePermission\n | VectorSearchIndexPermission\n | UcFunctionPermission\n | UcConnectionPermission\n | DatabasePermission\n | GenieSpacePermission\n | ExperimentPermission\n | AppPermission;\n\n/** Permission hierarchy per resource type (weakest to strongest). Schema enum order. */\nexport const PERMISSION_HIERARCHY_BY_TYPE: Record<ResourceType, readonly ResourcePermission[]> = {\n [ResourceType.SECRET]: [\"READ\", \"WRITE\", \"MANAGE\"],\n [ResourceType.JOB]: [\"CAN_VIEW\", \"CAN_MANAGE_RUN\", \"CAN_MANAGE\"],\n [ResourceType.SQL_WAREHOUSE]: [\"CAN_USE\", \"CAN_MANAGE\"],\n [ResourceType.SERVING_ENDPOINT]: [\"CAN_VIEW\", \"CAN_QUERY\", \"CAN_MANAGE\"],\n [ResourceType.VOLUME]: [\"READ_VOLUME\", \"WRITE_VOLUME\"],\n [ResourceType.VECTOR_SEARCH_INDEX]: [\"SELECT\"],\n [ResourceType.UC_FUNCTION]: [\"EXECUTE\"],\n [ResourceType.UC_CONNECTION]: [\"USE_CONNECTION\"],\n [ResourceType.DATABASE]: [\"CAN_CONNECT_AND_CREATE\"],\n [ResourceType.GENIE_SPACE]: [\"CAN_VIEW\", \"CAN_RUN\", \"CAN_EDIT\", \"CAN_MANAGE\"],\n [ResourceType.EXPERIMENT]: [\"CAN_READ\", \"CAN_EDIT\", \"CAN_MANAGE\"],\n [ResourceType.APP]: [\"CAN_USE\"],\n} as const;\n\n/** Set of valid permissions per type (for validation). */\nexport const PERMISSIONS_BY_TYPE: Record<ResourceType, readonly ResourcePermission[]> = PERMISSION_HIERARCHY_BY_TYPE;\n"],"mappings":";;AAIA,IAAY,sDAAL;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;
|
|
1
|
+
{"version":3,"file":"types.generated.js","names":[],"sources":["../../src/registry/types.generated.ts"],"sourcesContent":["// AUTO-GENERATED from packages/shared/src/schemas/plugin-manifest.schema.json\n// Do not edit. Run: pnpm exec tsx tools/generate-registry-types.ts\n\n/** Resource types from schema $defs.resourceType.enum */\nexport enum ResourceType {\n SECRET = \"secret\",\n JOB = \"job\",\n SQL_WAREHOUSE = \"sql_warehouse\",\n SERVING_ENDPOINT = \"serving_endpoint\",\n VOLUME = \"volume\",\n VECTOR_SEARCH_INDEX = \"vector_search_index\",\n UC_FUNCTION = \"uc_function\",\n UC_CONNECTION = \"uc_connection\",\n DATABASE = \"database\",\n POSTGRES = \"postgres\",\n GENIE_SPACE = \"genie_space\",\n EXPERIMENT = \"experiment\",\n APP = \"app\",\n}\n\n// ============================================================================\n// Permissions per resource type (from schema permission $defs)\n// ============================================================================\n/** Permissions for SECRET resources */\nexport type SecretPermission = \"READ\" | \"WRITE\" | \"MANAGE\";\n\n/** Permissions for JOB resources */\nexport type JobPermission = \"CAN_VIEW\" | \"CAN_MANAGE_RUN\" | \"CAN_MANAGE\";\n\n/** Permissions for SQL_WAREHOUSE resources */\nexport type SqlWarehousePermission = \"CAN_USE\" | \"CAN_MANAGE\";\n\n/** Permissions for SERVING_ENDPOINT resources */\nexport type ServingEndpointPermission = \"CAN_VIEW\" | \"CAN_QUERY\" | \"CAN_MANAGE\";\n\n/** Permissions for VOLUME resources */\nexport type VolumePermission = \"READ_VOLUME\" | \"WRITE_VOLUME\";\n\n/** Permissions for VECTOR_SEARCH_INDEX resources */\nexport type VectorSearchIndexPermission = \"SELECT\";\n\n/** Permissions for UC_FUNCTION resources */\nexport type UcFunctionPermission = \"EXECUTE\";\n\n/** Permissions for UC_CONNECTION resources */\nexport type UcConnectionPermission = \"USE_CONNECTION\";\n\n/** Permissions for DATABASE resources */\nexport type DatabasePermission = \"CAN_CONNECT_AND_CREATE\";\n\n/** Permissions for POSTGRES resources */\nexport type PostgresPermission = \"CAN_CONNECT_AND_CREATE\";\n\n/** Permissions for GENIE_SPACE resources */\nexport type GenieSpacePermission = \"CAN_VIEW\" | \"CAN_RUN\" | \"CAN_EDIT\" | \"CAN_MANAGE\";\n\n/** Permissions for EXPERIMENT resources */\nexport type ExperimentPermission = \"CAN_READ\" | \"CAN_EDIT\" | \"CAN_MANAGE\";\n\n/** Permissions for APP resources */\nexport type AppPermission = \"CAN_USE\";\n\n/** Union of all possible permission levels across all resource types. */\nexport type ResourcePermission =\n | SecretPermission\n | JobPermission\n | SqlWarehousePermission\n | ServingEndpointPermission\n | VolumePermission\n | VectorSearchIndexPermission\n | UcFunctionPermission\n | UcConnectionPermission\n | DatabasePermission\n | PostgresPermission\n | GenieSpacePermission\n | ExperimentPermission\n | AppPermission;\n\n/** Permission hierarchy per resource type (weakest to strongest). Schema enum order. */\nexport const PERMISSION_HIERARCHY_BY_TYPE: Record<ResourceType, readonly ResourcePermission[]> = {\n [ResourceType.SECRET]: [\"READ\", \"WRITE\", \"MANAGE\"],\n [ResourceType.JOB]: [\"CAN_VIEW\", \"CAN_MANAGE_RUN\", \"CAN_MANAGE\"],\n [ResourceType.SQL_WAREHOUSE]: [\"CAN_USE\", \"CAN_MANAGE\"],\n [ResourceType.SERVING_ENDPOINT]: [\"CAN_VIEW\", \"CAN_QUERY\", \"CAN_MANAGE\"],\n [ResourceType.VOLUME]: [\"READ_VOLUME\", \"WRITE_VOLUME\"],\n [ResourceType.VECTOR_SEARCH_INDEX]: [\"SELECT\"],\n [ResourceType.UC_FUNCTION]: [\"EXECUTE\"],\n [ResourceType.UC_CONNECTION]: [\"USE_CONNECTION\"],\n [ResourceType.DATABASE]: [\"CAN_CONNECT_AND_CREATE\"],\n [ResourceType.POSTGRES]: [\"CAN_CONNECT_AND_CREATE\"],\n [ResourceType.GENIE_SPACE]: [\"CAN_VIEW\", \"CAN_RUN\", \"CAN_EDIT\", \"CAN_MANAGE\"],\n [ResourceType.EXPERIMENT]: [\"CAN_READ\", \"CAN_EDIT\", \"CAN_MANAGE\"],\n [ResourceType.APP]: [\"CAN_USE\"],\n} as const;\n\n/** Set of valid permissions per type (for validation). */\nexport const PERMISSIONS_BY_TYPE: Record<ResourceType, readonly ResourcePermission[]> = PERMISSION_HIERARCHY_BY_TYPE;\n"],"mappings":";;AAIA,IAAY,sDAAL;AACL;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;AA8DF,MAAa,+BAAoF;EAC9F,aAAa,SAAS;EAAC;EAAQ;EAAS;EAAS;EACjD,aAAa,MAAM;EAAC;EAAY;EAAkB;EAAa;EAC/D,aAAa,gBAAgB,CAAC,WAAW,aAAa;EACtD,aAAa,mBAAmB;EAAC;EAAY;EAAa;EAAa;EACvE,aAAa,SAAS,CAAC,eAAe,eAAe;EACrD,aAAa,sBAAsB,CAAC,SAAS;EAC7C,aAAa,cAAc,CAAC,UAAU;EACtC,aAAa,gBAAgB,CAAC,iBAAiB;EAC/C,aAAa,WAAW,CAAC,yBAAyB;EAClD,aAAa,WAAW,CAAC,yBAAyB;EAClD,aAAa,cAAc;EAAC;EAAY;EAAW;EAAY;EAAa;EAC5E,aAAa,aAAa;EAAC;EAAY;EAAY;EAAa;EAChE,aAAa,MAAM,CAAC,UAAU;CAChC;;AAGD,MAAa,sBAA2E"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.js","names":[],"sources":["../../src/registry/types.ts"],"sourcesContent":["/**\n * Resource Registry Type System\n *\n * This module defines the type system for the AppKit Resource Registry,\n * which enables plugins to declare their Databricks resource requirements\n * in a machine-readable format.\n *\n * Resource types and permissions are generated from plugin-manifest.schema.json\n * (see types.generated.ts). Hand-written interfaces below define the registry API.\n */\n\n// Re-export generated registry types (enum + const must be value exports for runtime)\nimport {\n type AppPermission,\n type DatabasePermission,\n type ExperimentPermission,\n type GenieSpacePermission,\n type JobPermission,\n PERMISSION_HIERARCHY_BY_TYPE,\n PERMISSIONS_BY_TYPE,\n type ResourcePermission,\n ResourceType,\n type SecretPermission,\n type ServingEndpointPermission,\n type SqlWarehousePermission,\n type UcConnectionPermission,\n type UcFunctionPermission,\n type VectorSearchIndexPermission,\n type VolumePermission,\n} from \"./types.generated\";\n\nexport {\n PERMISSION_HIERARCHY_BY_TYPE,\n PERMISSIONS_BY_TYPE,\n ResourceType,\n type AppPermission,\n type DatabasePermission,\n type ExperimentPermission,\n type GenieSpacePermission,\n type JobPermission,\n type ResourcePermission,\n type SecretPermission,\n type ServingEndpointPermission,\n type SqlWarehousePermission,\n type UcConnectionPermission,\n type UcFunctionPermission,\n type VectorSearchIndexPermission,\n type VolumePermission,\n};\n\n// ============================================================================\n// Hand-written interfaces (not in JSON schema)\n// ============================================================================\n\n/**\n * Defines a single field for a resource. Each field has its own environment variable and optional description.\n * Single-value types use one key (e.g. id); multi-value types (database, secret) use multiple (e.g. instance_name, database_name or scope, key).\n */\nexport interface ResourceFieldEntry {\n /** Environment variable name for this field */\n env
|
|
1
|
+
{"version":3,"file":"types.js","names":[],"sources":["../../src/registry/types.ts"],"sourcesContent":["/**\n * Resource Registry Type System\n *\n * This module defines the type system for the AppKit Resource Registry,\n * which enables plugins to declare their Databricks resource requirements\n * in a machine-readable format.\n *\n * Resource types and permissions are generated from plugin-manifest.schema.json\n * (see types.generated.ts). Hand-written interfaces below define the registry API.\n */\n\n// Re-export generated registry types (enum + const must be value exports for runtime)\nimport {\n type AppPermission,\n type DatabasePermission,\n type ExperimentPermission,\n type GenieSpacePermission,\n type JobPermission,\n PERMISSION_HIERARCHY_BY_TYPE,\n PERMISSIONS_BY_TYPE,\n type ResourcePermission,\n ResourceType,\n type SecretPermission,\n type ServingEndpointPermission,\n type SqlWarehousePermission,\n type UcConnectionPermission,\n type UcFunctionPermission,\n type VectorSearchIndexPermission,\n type VolumePermission,\n} from \"./types.generated\";\n\nexport {\n PERMISSION_HIERARCHY_BY_TYPE,\n PERMISSIONS_BY_TYPE,\n ResourceType,\n type AppPermission,\n type DatabasePermission,\n type ExperimentPermission,\n type GenieSpacePermission,\n type JobPermission,\n type ResourcePermission,\n type SecretPermission,\n type ServingEndpointPermission,\n type SqlWarehousePermission,\n type UcConnectionPermission,\n type UcFunctionPermission,\n type VectorSearchIndexPermission,\n type VolumePermission,\n};\n\n// ============================================================================\n// Hand-written interfaces (not in JSON schema)\n// ============================================================================\n\n/**\n * Defines a single field for a resource. Each field has its own environment variable and optional description.\n * Single-value types use one key (e.g. id); multi-value types (database, secret) use multiple (e.g. instance_name, database_name or scope, key).\n */\nexport interface ResourceFieldEntry {\n /** Environment variable name for this field */\n env?: string;\n /** Human-readable description for this field */\n description?: string;\n /** When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation. */\n bundleIgnore?: boolean;\n /** Example values showing the expected format for this field */\n examples?: string[];\n /** When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time. */\n localOnly?: boolean;\n /** Static value for this field. Used when no prompted or resolved value exists. */\n value?: string;\n /** Named resolver prefixed by resource type (e.g., 'postgres:host'). The CLI resolves this value during the init prompt flow. */\n resolve?: string;\n}\n\n/**\n * Declares a resource requirement for a plugin.\n * Can be defined statically in a manifest or dynamically via getResourceRequirements().\n */\nexport interface ResourceRequirement {\n /** Type of Databricks resource required */\n type: ResourceType;\n\n /** Unique alias for this resource within the plugin (e.g., 'warehouse', 'secrets'). Used for UI/display. */\n alias: string;\n\n /** Stable key for machine use (env naming, composite keys, app.yaml). Required. */\n resourceKey: string;\n\n /** Human-readable description of why this resource is needed */\n description: string;\n\n /** Required permission level for the resource */\n permission: ResourcePermission;\n\n /**\n * Map of field name to env and optional description.\n * Single-value types use one key (e.g. id); multi-value (database, secret) use multiple keys.\n */\n fields: Record<string, ResourceFieldEntry>;\n\n /** Whether this resource is required (true) or optional (false) */\n required: boolean;\n}\n\n/**\n * Internal representation of a resource in the registry.\n * Extends ResourceRequirement with resolution state and plugin ownership.\n */\nexport interface ResourceEntry extends ResourceRequirement {\n /** Plugin(s) that require this resource (comma-separated if multiple) */\n plugin: string;\n\n /** Whether the resource has been resolved (all field env vars set) */\n resolved: boolean;\n\n /** Resolved value per field name. Populated by validate() when all field env vars are set. */\n values?: Record<string, string>;\n\n /**\n * Per-plugin permission tracking.\n * Maps plugin name to the permission it originally requested.\n * Populated when multiple plugins share the same resource.\n */\n permissionSources?: Record<string, ResourcePermission>;\n}\n\n/**\n * Result of validating all registered resources against the environment.\n */\nexport interface ValidationResult {\n /** Whether all required resources are available */\n valid: boolean;\n\n /** List of missing required resources */\n missing: ResourceEntry[];\n\n /** Complete list of all registered resources (required and optional) */\n all: ResourceEntry[];\n}\n\nimport type { JSONSchema7 } from \"json-schema\";\n\n/**\n * Configuration schema definition for plugin config.\n * Re-exported from the standard JSON Schema Draft 7 types.\n *\n * @see {@link https://json-schema.org/draft-07/json-schema-release-notes | JSON Schema Draft 7}\n */\nexport type ConfigSchema = JSONSchema7;\n\n/**\n * Plugin manifest that declares metadata and resource requirements.\n * Attached to plugin classes as a static property.\n */\nexport interface PluginManifest<TName extends string = string> {\n /** Plugin identifier — the single source of truth for the plugin's name */\n name: TName;\n\n /** Human-readable display name for UI/CLI */\n displayName: string;\n\n /** Brief description of what the plugin does */\n description: string;\n\n /**\n * Resource requirements declaration\n */\n resources: {\n /** Resources that must be available for the plugin to function */\n required: Omit<ResourceRequirement, \"required\">[];\n\n /** Resources that enhance functionality but are not mandatory */\n optional: Omit<ResourceRequirement, \"required\">[];\n };\n\n /**\n * Configuration schema for the plugin.\n * Defines the shape and validation rules for plugin config.\n */\n config?: {\n schema: ConfigSchema;\n };\n\n /**\n * When true, excluded from the template plugins manifest during sync.\n */\n hidden?: boolean;\n\n /**\n * Optional metadata for community plugins\n */\n author?: string;\n version?: string;\n repository?: string;\n keywords?: string[];\n license?: string;\n}\n"],"mappings":""}
|
|
@@ -111,6 +111,7 @@
|
|
|
111
111
|
"uc_function",
|
|
112
112
|
"uc_connection",
|
|
113
113
|
"database",
|
|
114
|
+
"postgres",
|
|
114
115
|
"genie_space",
|
|
115
116
|
"experiment",
|
|
116
117
|
"app"
|
|
@@ -162,6 +163,11 @@
|
|
|
162
163
|
"enum": ["CAN_CONNECT_AND_CREATE"],
|
|
163
164
|
"description": "Permission for database resources"
|
|
164
165
|
},
|
|
166
|
+
"postgresPermission": {
|
|
167
|
+
"type": "string",
|
|
168
|
+
"enum": ["CAN_CONNECT_AND_CREATE"],
|
|
169
|
+
"description": "Permission for Postgres resources"
|
|
170
|
+
},
|
|
165
171
|
"genieSpacePermission": {
|
|
166
172
|
"type": "string",
|
|
167
173
|
"enum": ["CAN_VIEW", "CAN_RUN", "CAN_EDIT", "CAN_MANAGE"],
|
|
@@ -179,7 +185,6 @@
|
|
|
179
185
|
},
|
|
180
186
|
"resourceFieldEntry": {
|
|
181
187
|
"type": "object",
|
|
182
|
-
"required": ["env"],
|
|
183
188
|
"properties": {
|
|
184
189
|
"env": {
|
|
185
190
|
"type": "string",
|
|
@@ -190,20 +195,37 @@
|
|
|
190
195
|
"description": {
|
|
191
196
|
"type": "string",
|
|
192
197
|
"description": "Human-readable description for this field"
|
|
198
|
+
},
|
|
199
|
+
"bundleIgnore": {
|
|
200
|
+
"type": "boolean",
|
|
201
|
+
"default": false,
|
|
202
|
+
"description": "When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation."
|
|
203
|
+
},
|
|
204
|
+
"examples": {
|
|
205
|
+
"type": "array",
|
|
206
|
+
"items": { "type": "string" },
|
|
207
|
+
"description": "Example values showing the expected format for this field"
|
|
208
|
+
},
|
|
209
|
+
"localOnly": {
|
|
210
|
+
"type": "boolean",
|
|
211
|
+
"default": false,
|
|
212
|
+
"description": "When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time."
|
|
213
|
+
},
|
|
214
|
+
"value": {
|
|
215
|
+
"type": "string",
|
|
216
|
+
"description": "Static value for this field. Used when no prompted or resolved value exists."
|
|
217
|
+
},
|
|
218
|
+
"resolve": {
|
|
219
|
+
"type": "string",
|
|
220
|
+
"pattern": "^[a-z_]+:[a-zA-Z]+$",
|
|
221
|
+
"description": "Named resolver prefixed by resource type (e.g., 'postgres:host'). The CLI resolves this value during the init prompt flow."
|
|
193
222
|
}
|
|
194
223
|
},
|
|
195
224
|
"additionalProperties": false
|
|
196
225
|
},
|
|
197
226
|
"resourceRequirement": {
|
|
198
227
|
"type": "object",
|
|
199
|
-
"required": [
|
|
200
|
-
"type",
|
|
201
|
-
"alias",
|
|
202
|
-
"resourceKey",
|
|
203
|
-
"description",
|
|
204
|
-
"permission",
|
|
205
|
-
"fields"
|
|
206
|
-
],
|
|
228
|
+
"required": ["type", "alias", "resourceKey", "description", "permission"],
|
|
207
229
|
"properties": {
|
|
208
230
|
"type": {
|
|
209
231
|
"$ref": "#/$defs/resourceType"
|
|
@@ -337,6 +359,17 @@
|
|
|
337
359
|
}
|
|
338
360
|
}
|
|
339
361
|
},
|
|
362
|
+
{
|
|
363
|
+
"if": {
|
|
364
|
+
"properties": { "type": { "const": "postgres" } },
|
|
365
|
+
"required": ["type"]
|
|
366
|
+
},
|
|
367
|
+
"then": {
|
|
368
|
+
"properties": {
|
|
369
|
+
"permission": { "$ref": "#/$defs/postgresPermission" }
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
},
|
|
340
373
|
{
|
|
341
374
|
"if": {
|
|
342
375
|
"properties": { "type": { "const": "genie_space" } },
|
|
@@ -74,9 +74,19 @@ interface PluginManifest<TName extends string = string> {
|
|
|
74
74
|
*/
|
|
75
75
|
interface ResourceFieldEntry {
|
|
76
76
|
/** Environment variable name for this field */
|
|
77
|
-
env
|
|
77
|
+
env?: string;
|
|
78
78
|
/** Human-readable description for this field */
|
|
79
79
|
description?: string;
|
|
80
|
+
/** When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation. */
|
|
81
|
+
bundleIgnore?: boolean;
|
|
82
|
+
/** Example values showing the expected format for this field */
|
|
83
|
+
examples?: string[];
|
|
84
|
+
/** When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time. */
|
|
85
|
+
localOnly?: boolean;
|
|
86
|
+
/** Static value for this field. Used when no prompted or resolved value exists. */
|
|
87
|
+
value?: string;
|
|
88
|
+
/** Named resolver prefixed by resource type (e.g., 'postgres:host'). The CLI resolves this value during the init prompt flow. */
|
|
89
|
+
resolve?: string;
|
|
80
90
|
}
|
|
81
91
|
/**
|
|
82
92
|
* Resource requirement declaration (imported from registry types).
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"plugin.d.ts","names":[],"sources":["../../../../shared/src/plugin.ts"],"mappings":";;;;;UAIiB,UAAA;EACf,IAAA;EAEA,qBAAA;EAEA,KAAA,IAAS,OAAA;EAET,YAAA,CAAa,MAAA,EAAQ,OAAA,CAAQ,MAAA;EAE7B,YAAA,IAAgB,iBAAA;EAEhB,uBAAA,KAA4B,WAAA;EAE5B,OAAA;AAAA;;UAIe,gBAAA;EACf,IAAA;EACA,IAAA;EAAA,CAEC,GAAA;EAMD,SAAA,GAAY,gBAAA;AAAA;AAAA,KAGF,gBAAA;EAGN,MAAA;EACA,OAAA;EACA,IAAA;AAAA;AAAA,KAQM,WAAA;;;;;KAMA,iBAAA,KACN,gBAAA,YACM,UAAA,GAAa,UAAA,UAEvB,MAAA,EAAQ,CAAA,KACL,CAAA;EACH,cAAA,GAAiB,MAAA;EACjB,KAAA,GAAQ,WAAA;EA7BI;;;AAGd;EA+BE,QAAA,EAAU,cAAA;;;;;EAKV,uBAAA,EAAyB,MAAA,EAAQ,CAAA,GAAI,mBAAA;AAAA;;AAvBvC;;;UA8BiB,cAAA;EACf,IAAA,EAAM,KAAA;EACN,WAAA;EACA,WAAA;EACA,SAAA;IACE,QAAA,EAAU,IAAA,CAAK,mBAAA;IACf,QAAA,EAAU,IAAA,CAAK,mBAAA;EAAA;EAEjB,MAAA;IACE,MAAA,EAAQ,WAAA;EAAA;EAEV,cAAA;EACA,MAAA;EACA,MAAA;EACA,OAAA;EACA,UAAA;EACA,QAAA;EACA,OAAA;AAAA;;;;;;;UASe,kBAAA;EA5CE;EA8CjB,GAAA;EA7CQ;EA+CR,WAAA;AAAA;;;;;UAOe,mBAAA;EACf,IAAA;EACA,KAAA;
|
|
1
|
+
{"version":3,"file":"plugin.d.ts","names":[],"sources":["../../../../shared/src/plugin.ts"],"mappings":";;;;;UAIiB,UAAA;EACf,IAAA;EAEA,qBAAA;EAEA,KAAA,IAAS,OAAA;EAET,YAAA,CAAa,MAAA,EAAQ,OAAA,CAAQ,MAAA;EAE7B,YAAA,IAAgB,iBAAA;EAEhB,uBAAA,KAA4B,WAAA;EAE5B,OAAA;AAAA;;UAIe,gBAAA;EACf,IAAA;EACA,IAAA;EAAA,CAEC,GAAA;EAMD,SAAA,GAAY,gBAAA;AAAA;AAAA,KAGF,gBAAA;EAGN,MAAA;EACA,OAAA;EACA,IAAA;AAAA;AAAA,KAQM,WAAA;;;;;KAMA,iBAAA,KACN,gBAAA,YACM,UAAA,GAAa,UAAA,UAEvB,MAAA,EAAQ,CAAA,KACL,CAAA;EACH,cAAA,GAAiB,MAAA;EACjB,KAAA,GAAQ,WAAA;EA7BI;;;AAGd;EA+BE,QAAA,EAAU,cAAA;;;;;EAKV,uBAAA,EAAyB,MAAA,EAAQ,CAAA,GAAI,mBAAA;AAAA;;AAvBvC;;;UA8BiB,cAAA;EACf,IAAA,EAAM,KAAA;EACN,WAAA;EACA,WAAA;EACA,SAAA;IACE,QAAA,EAAU,IAAA,CAAK,mBAAA;IACf,QAAA,EAAU,IAAA,CAAK,mBAAA;EAAA;EAEjB,MAAA;IACE,MAAA,EAAQ,WAAA;EAAA;EAEV,cAAA;EACA,MAAA;EACA,MAAA;EACA,OAAA;EACA,UAAA;EACA,QAAA;EACA,OAAA;AAAA;;;;;;;UASe,kBAAA;EA5CE;EA8CjB,GAAA;EA7CQ;EA+CR,WAAA;EA1CU;EA4CV,YAAA;EAvCiC;EAyCjC,QAAA;EAzCqC;EA2CrC,SAAA;EA3CwD;EA6CxD,KAAA;EAtC6B;EAwC7B,OAAA;AAAA;;;;;UAOe,mBAAA;EACf,IAAA;EACA,KAAA;EAjD8B;EAmD9B,WAAA;EACA,WAAA;EACA,UAAA;EAlDA;;;;EAuDA,MAAA,EAAQ,MAAA,SAAe,kBAAA;EACvB,QAAA;AAAA;;;;;;KAoCU,aAAA,WAAwB,UAAA,IAClC,CAAA,sCAAqC,CAAA,GAAI,MAAA;;;;AAjD3C;;;;KA0DY,UAAA,QAAkB,GAAA,cAAgB,IAAA,mBAC1C,GAAA,GACA,GAAA;EA1DF;;;;;EAgEI,MAAA,GAAS,GAAA,EAAK,WAAA,KAAgB,GAAA;AAAA;;;;AAlBpC;;;;;KA6BY,SAAA,oBACS,UAAA,CAAW,iBAAA,gCAExB,CAAA,YAAa,CAAA,WAAY,UAAA,CAC7B,aAAA,CAAc,YAAA,CAAa,CAAA;AAAA,KAInB,UAAA;EAAwB,MAAA,EAAQ,CAAA;EAAG,MAAA,EAAQ,CAAA;EAAG,IAAA,EAAM,CAAA;AAAA;AAAA,KACpD,QAAA,4BACV,MAAA,GAAS,CAAA,KACN,UAAA,CAAW,CAAA,EAAG,CAAA,EAAG,CAAA;;KAGV,UAAA,GAAa,OAAA,CAAQ,MAAA;AAAA,KACrB,YAAA,GAAe,OAAA,CAAQ,QAAA;AAAA,KACvB,WAAA,GAAc,OAAA,CAAQ,OAAA;AAAA,KAEtB,UAAA;AAAA,KAEA,WAAA;EAtCR,+DAwCF,IAAA;EACA,MAAA,EAAQ,UAAA;EACR,IAAA;EACA,OAAA,GAAU,GAAA,EAAK,WAAA,EAAa,GAAA,EAAK,YAAA,KAAiB,OAAA,QApCb;EAsCrC,eAAA;AAAA;;KAIU,iBAAA,GAAoB,MAAA"}
|
|
@@ -4,6 +4,17 @@ Defines a single field for a resource. Each field has its own environment variab
|
|
|
4
4
|
|
|
5
5
|
## Properties[](#properties "Direct link to Properties")
|
|
6
6
|
|
|
7
|
+
### bundleIgnore?[](#bundleignore "Direct link to bundleIgnore?")
|
|
8
|
+
|
|
9
|
+
```ts
|
|
10
|
+
optional bundleIgnore: boolean;
|
|
11
|
+
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
When true, this field is excluded from Databricks bundle configuration (databricks.yml) generation.
|
|
15
|
+
|
|
16
|
+
***
|
|
17
|
+
|
|
7
18
|
### description?[](#description "Direct link to description?")
|
|
8
19
|
|
|
9
20
|
```ts
|
|
@@ -15,11 +26,55 @@ Human-readable description for this field
|
|
|
15
26
|
|
|
16
27
|
***
|
|
17
28
|
|
|
18
|
-
### env[](#env "Direct link to env")
|
|
29
|
+
### env?[](#env "Direct link to env?")
|
|
19
30
|
|
|
20
31
|
```ts
|
|
21
|
-
env: string;
|
|
32
|
+
optional env: string;
|
|
22
33
|
|
|
23
34
|
```
|
|
24
35
|
|
|
25
36
|
Environment variable name for this field
|
|
37
|
+
|
|
38
|
+
***
|
|
39
|
+
|
|
40
|
+
### examples?[](#examples "Direct link to examples?")
|
|
41
|
+
|
|
42
|
+
```ts
|
|
43
|
+
optional examples: string[];
|
|
44
|
+
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
Example values showing the expected format for this field
|
|
48
|
+
|
|
49
|
+
***
|
|
50
|
+
|
|
51
|
+
### localOnly?[](#localonly "Direct link to localOnly?")
|
|
52
|
+
|
|
53
|
+
```ts
|
|
54
|
+
optional localOnly: boolean;
|
|
55
|
+
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
When true, this field is only generated for local .env files. The Databricks Apps platform auto-injects it at deploy time.
|
|
59
|
+
|
|
60
|
+
***
|
|
61
|
+
|
|
62
|
+
### resolve?[](#resolve "Direct link to resolve?")
|
|
63
|
+
|
|
64
|
+
```ts
|
|
65
|
+
optional resolve: string;
|
|
66
|
+
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
Named resolver prefixed by resource type (e.g., 'postgres<!-- -->:host<!-- -->'). The CLI resolves this value during the init prompt flow.
|
|
70
|
+
|
|
71
|
+
***
|
|
72
|
+
|
|
73
|
+
### value?[](#value "Direct link to value?")
|
|
74
|
+
|
|
75
|
+
```ts
|
|
76
|
+
optional value: string;
|
|
77
|
+
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
Static value for this field. Used when no prompted or resolved value exists.
|
package/docs/plugins/lakebase.md
CHANGED
|
@@ -1,9 +1,5 @@
|
|
|
1
1
|
# Lakebase plugin
|
|
2
2
|
|
|
3
|
-
info
|
|
4
|
-
|
|
5
|
-
Currently, the Lakebase plugin currently requires a one-time manual setup to connect your Databricks App with your Lakebase database. An automated setup process is planned for an upcoming future release.
|
|
6
|
-
|
|
7
3
|
Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
|
|
8
4
|
|
|
9
5
|
**Key features:**
|
|
@@ -12,90 +8,24 @@ Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with a
|
|
|
12
8
|
* Automatic OAuth token refresh (1-hour tokens, 2-minute refresh buffer)
|
|
13
9
|
* Token caching to minimize API calls
|
|
14
10
|
* Built-in OpenTelemetry instrumentation (query duration, pool connections, token refresh)
|
|
11
|
+
* AppKit logger configured by default for query and connection events
|
|
15
12
|
|
|
16
|
-
##
|
|
17
|
-
|
|
18
|
-
Before using the plugin, you need to connect your Databricks App's service principal to your Lakebase database.
|
|
19
|
-
|
|
20
|
-
### 1. Find your app's service principal[](#1-find-your-apps-service-principal "Direct link to 1. Find your app's service principal")
|
|
21
|
-
|
|
22
|
-
Create a Databricks App from the UI (`Compute > Apps > Create App > Create a custom app`). Navigate to the **Environment** tab and note the `DATABRICKS_CLIENT_ID` value — this is the service principal that will connect to your Lakebase database.
|
|
23
|
-
|
|
24
|
-

|
|
25
|
-
|
|
26
|
-
### 2. Find your Project ID and Branch ID[](#2-find-your-project-id-and-branch-id "Direct link to 2. Find your Project ID and Branch ID")
|
|
13
|
+
## Getting started with the Lakebase[](#getting-started-with-the-lakebase "Direct link to Getting started with the Lakebase")
|
|
27
14
|
|
|
28
|
-
|
|
15
|
+
The easiest way to get started with the Lakebase plugin is to use the Databricks CLI to create a new Databricks app with AppKit installed and the Lakebase plugin.
|
|
29
16
|
|
|
30
|
-
|
|
17
|
+
### Prerequisites[](#prerequisites "Direct link to Prerequisites")
|
|
31
18
|
|
|
32
|
-
|
|
19
|
+
* [Node.js](https://nodejs.org) v22+ environment with `npm`
|
|
20
|
+
* Databricks CLI (v0.287.0 or higher): install and configure it according to the [official tutorial](https://docs.databricks.com/aws/en/dev-tools/cli/tutorial).
|
|
21
|
+
* A new Databricks app with AppKit installed. See [Bootstrap a new Databricks app](./docs.md#quick-start-options) for more details.
|
|
33
22
|
|
|
34
|
-
|
|
23
|
+
### Steps[](#steps "Direct link to Steps")
|
|
35
24
|
|
|
36
|
-
|
|
37
|
-
databricks
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
Example output:
|
|
42
|
-
|
|
43
|
-
```json
|
|
44
|
-
[
|
|
45
|
-
{
|
|
46
|
-
"create_time": "2026-02-19T12:13:02Z",
|
|
47
|
-
"name": "projects/{project-id}/branches/{branch-id}/endpoints/primary"
|
|
48
|
-
}
|
|
49
|
-
]
|
|
50
|
-
|
|
51
|
-
```
|
|
52
|
-
|
|
53
|
-
### 4. Get connection parameters[](#4-get-connection-parameters "Direct link to 4. Get connection parameters")
|
|
54
|
-
|
|
55
|
-
Click the **Connect** button on your Lakebase branch and copy the `PGHOST` and `PGDATABASE` values for later.
|
|
56
|
-
|
|
57
|
-

|
|
58
|
-
|
|
59
|
-
### 5. Grant access to the service principal[](#5-grant-access-to-the-service-principal "Direct link to 5. Grant access to the service principal")
|
|
60
|
-
|
|
61
|
-
Navigate to the **SQL Editor** tab on your Lakebase branch. Run the following SQL against the `databricks_postgres` database, replacing the service principal ID in the `DECLARE` block with the `DATABRICKS_CLIENT_ID` value from step 1:
|
|
62
|
-
|
|
63
|
-
```sql
|
|
64
|
-
CREATE EXTENSION IF NOT EXISTS databricks_auth;
|
|
65
|
-
|
|
66
|
-
DO $$
|
|
67
|
-
DECLARE
|
|
68
|
-
sp TEXT := 'your-service-principal-id'; -- Replace with DATABRICKS_CLIENT_ID from Step 1
|
|
69
|
-
BEGIN
|
|
70
|
-
-- Create service principal role
|
|
71
|
-
PERFORM databricks_create_role(sp, 'SERVICE_PRINCIPAL');
|
|
72
|
-
|
|
73
|
-
-- Connection and schema access
|
|
74
|
-
EXECUTE format('GRANT CONNECT ON DATABASE "databricks_postgres" TO %I', sp);
|
|
75
|
-
EXECUTE format('GRANT ALL ON SCHEMA public TO %I', sp);
|
|
76
|
-
|
|
77
|
-
-- Privileges on existing objects
|
|
78
|
-
EXECUTE format('GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO %I', sp);
|
|
79
|
-
EXECUTE format('GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO %I', sp);
|
|
80
|
-
EXECUTE format('GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public TO %I', sp);
|
|
81
|
-
EXECUTE format('GRANT ALL PRIVILEGES ON ALL PROCEDURES IN SCHEMA public TO %I', sp);
|
|
82
|
-
|
|
83
|
-
-- Default privileges on future objects you create
|
|
84
|
-
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO %I', sp);
|
|
85
|
-
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO %I', sp);
|
|
86
|
-
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON FUNCTIONS TO %I', sp);
|
|
87
|
-
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON ROUTINES TO %I', sp);
|
|
88
|
-
END $$;
|
|
89
|
-
|
|
90
|
-
```
|
|
91
|
-
|
|
92
|
-

|
|
93
|
-
|
|
94
|
-
### 6. Verify the role[](#6-verify-the-role "Direct link to 6. Verify the role")
|
|
95
|
-
|
|
96
|
-
Navigate to the **Roles & Databases** tab and confirm the role is visible. You may need to fully refresh the page.
|
|
97
|
-
|
|
98
|
-

|
|
25
|
+
1. Firstly, create a new Lakebase Postgres Autoscaling project according to the [Get started documentation](https://docs.databricks.com/aws/en/oltp/projects/get-started).
|
|
26
|
+
2. To add the Lakebase plugin to your project, run the `databricks apps init` command and interactively select the **Lakebase** plugin. The CLI will guide you through picking a Lakebase project, branch, and database.
|
|
27
|
+
<!-- -->
|
|
28
|
+
* When asked, select **Yes** to deploy the app to Databricks Apps right after its creation.
|
|
99
29
|
|
|
100
30
|
## Basic usage[](#basic-usage "Direct link to Basic usage")
|
|
101
31
|
|
|
@@ -108,34 +38,6 @@ await createApp({
|
|
|
108
38
|
|
|
109
39
|
```
|
|
110
40
|
|
|
111
|
-
## Environment variables[](#environment-variables "Direct link to Environment variables")
|
|
112
|
-
|
|
113
|
-
The required environment variables:
|
|
114
|
-
|
|
115
|
-
| Variable | Description |
|
|
116
|
-
| ------------------- | ----------------------------------------------------------------------- |
|
|
117
|
-
| `PGHOST` | Lakebase host |
|
|
118
|
-
| `PGDATABASE` | Database name |
|
|
119
|
-
| `LAKEBASE_ENDPOINT` | Endpoint resource path (e.g. `projects/.../branches/.../endpoints/...`) |
|
|
120
|
-
| `PGSSLMODE` | TLS mode — set to `require` |
|
|
121
|
-
|
|
122
|
-
Ensure that those environment variables are set both for local development (`.env` file) and for deployment (`app.yaml` file):
|
|
123
|
-
|
|
124
|
-
```yaml
|
|
125
|
-
env:
|
|
126
|
-
- name: LAKEBASE_ENDPOINT
|
|
127
|
-
value: projects/{project-id}/branches/{branch-id}/endpoints/primary
|
|
128
|
-
- name: PGHOST
|
|
129
|
-
value: {your-lakebase-host}
|
|
130
|
-
- name: PGDATABASE
|
|
131
|
-
value: databricks_postgres
|
|
132
|
-
- name: PGSSLMODE
|
|
133
|
-
value: require
|
|
134
|
-
|
|
135
|
-
```
|
|
136
|
-
|
|
137
|
-
For the full configuration reference (SSL, pool size, timeouts, logging, ORM examples), see the [`@databricks/lakebase` README](https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md).
|
|
138
|
-
|
|
139
41
|
## Accessing the pool[](#accessing-the-pool "Direct link to Accessing the pool")
|
|
140
42
|
|
|
141
43
|
After initialization, access Lakebase through the `AppKit.lakebase` object:
|
|
@@ -145,9 +47,17 @@ const AppKit = await createApp({
|
|
|
145
47
|
plugins: [server(), lakebase()],
|
|
146
48
|
});
|
|
147
49
|
|
|
148
|
-
|
|
50
|
+
await AppKit.lakebase.query(`CREATE SCHEMA IF NOT EXISTS app`);
|
|
51
|
+
|
|
52
|
+
await AppKit.lakebase.query(`CREATE TABLE IF NOT EXISTS app.orders (
|
|
53
|
+
id SERIAL PRIMARY KEY,
|
|
54
|
+
user_id VARCHAR(255) NOT NULL,
|
|
55
|
+
amount DECIMAL(10, 2) NOT NULL,
|
|
56
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
57
|
+
)`);
|
|
58
|
+
|
|
149
59
|
const result = await AppKit.lakebase.query(
|
|
150
|
-
"SELECT * FROM orders WHERE user_id = $1",
|
|
60
|
+
"SELECT * FROM app.orders WHERE user_id = $1",
|
|
151
61
|
[userId],
|
|
152
62
|
);
|
|
153
63
|
|
|
@@ -160,7 +70,33 @@ const pgConfig = AppKit.lakebase.getPgConfig(); // pg.PoolConfig
|
|
|
160
70
|
|
|
161
71
|
```
|
|
162
72
|
|
|
163
|
-
## Configuration
|
|
73
|
+
## Configuration[](#configuration "Direct link to Configuration")
|
|
74
|
+
|
|
75
|
+
### Environment variables[](#environment-variables "Direct link to Environment variables")
|
|
76
|
+
|
|
77
|
+
The required environment variables are:
|
|
78
|
+
|
|
79
|
+
| Variable | Description |
|
|
80
|
+
| ------------------- | ---------------------------------------------------------------------------------------------------- |
|
|
81
|
+
| `LAKEBASE_ENDPOINT` | Endpoint resource path (e.g. `projects/.../branches/.../endpoints/...`) |
|
|
82
|
+
| `PGHOST` | Lakebase host (auto-injected in production by the `postgres` Databricks Apps resource) |
|
|
83
|
+
| `PGDATABASE` | Database name (auto-injected in production by the `postgres` Databricks Apps resource) |
|
|
84
|
+
| `PGSSLMODE` | TLS mode - set to `require` (auto-injected in production by the `postgres` Databricks Apps resource) |
|
|
85
|
+
|
|
86
|
+
When deployed to Databricks Apps with a `postgres` database resource configured, `PGHOST`, `PGDATABASE`, `PGSSLMODE`, `PGUSER`, `PGPORT`, and `PGAPPNAME` are automatically injected by the platform. Only `LAKEBASE_ENDPOINT` must be set explicitly:
|
|
87
|
+
|
|
88
|
+
```yaml
|
|
89
|
+
env:
|
|
90
|
+
- name: LAKEBASE_ENDPOINT
|
|
91
|
+
valueFrom: postgres
|
|
92
|
+
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
For local development, the `.env` file is automatically generated by `databricks apps init` with the correct values for your Lakebase project.
|
|
96
|
+
|
|
97
|
+
For the full configuration reference (SSL, pool size, timeouts, logging, ORM examples), see the [`@databricks/lakebase` README](https://github.com/databricks/appkit/blob/main/packages/lakebase/README.md).
|
|
98
|
+
|
|
99
|
+
### Pool configuration[](#pool-configuration "Direct link to Pool configuration")
|
|
164
100
|
|
|
165
101
|
Pass a `pool` object to override any defaults:
|
|
166
102
|
|
|
@@ -178,3 +114,74 @@ await createApp({
|
|
|
178
114
|
});
|
|
179
115
|
|
|
180
116
|
```
|
|
117
|
+
|
|
118
|
+
## Database Permissions[](#database-permissions "Direct link to Database Permissions")
|
|
119
|
+
|
|
120
|
+
When you create the app with the Lakebase resource using the [Getting started](#getting-started-with-the-lakebase) guide, the Service Principal is automatically granted `CONNECT_AND_CREATE` permission on the `postgres` resource. This lets the Service Principal connect to the database and create new objects, but **not access any existing schemas or tables.**
|
|
121
|
+
|
|
122
|
+
### Local development[](#local-development "Direct link to Local development")
|
|
123
|
+
|
|
124
|
+
To develop locally against a deployed Lakebase database:
|
|
125
|
+
|
|
126
|
+
1. **Deploy the app first.** The Service Principal creates the database schema and tables on first deploy. Apps generated from `databricks apps init` handle this automatically - they check if tables exist on startup and skip creation if they do.
|
|
127
|
+
|
|
128
|
+
2. **Grant `databricks_superuser` via the Lakebase UI:**
|
|
129
|
+
|
|
130
|
+
1. Open the Lakebase Autoscaling UI and navigate to your project's **Branch Overview** page.
|
|
131
|
+
2. Click **Add role** (or **Edit role** if your OAuth role already exists).
|
|
132
|
+
3. Select your Databricks identity as the principal and check the **`databricks_superuser`** system role.
|
|
133
|
+
|
|
134
|
+
3. **Run locally** - your Databricks user identity (email) is used for OAuth authentication. The `databricks_superuser` role gives full **DML access** (read/write data) but **not DDL** (creating schemas or tables) - that's why deploying first matters (see note below).
|
|
135
|
+
|
|
136
|
+
For other users, use the same **Add role** flow in the Lakebase UI to create an OAuth role with `databricks_superuser` for each user.
|
|
137
|
+
|
|
138
|
+
tip
|
|
139
|
+
|
|
140
|
+
[Postgres password authentication](https://docs.databricks.com/aws/en/oltp/projects/authentication#overview) is a simpler alternative that avoids OAuth role permission complexity. However, it requires you to set up a password for the user in the **Branch Overview** page in the Lakebase Autoscaling UI.
|
|
141
|
+
|
|
142
|
+
Why deploy first?
|
|
143
|
+
|
|
144
|
+
When the app is deployed, the Service Principal creates schemas and tables and becomes their owner. A `databricks_superuser` has full **DML access** (SELECT, INSERT, UPDATE, DELETE) to these objects, but **cannot run DDL** (CREATE SCHEMA, CREATE TABLE) on schemas owned by the Service Principal. Deploying first ensures all objects exist before local development begins.
|
|
145
|
+
|
|
146
|
+
### Fine-grained permissions[](#fine-grained-permissions "Direct link to Fine-grained permissions")
|
|
147
|
+
|
|
148
|
+
For most use cases, `databricks_superuser` is sufficient. If you need schema-level grants instead, refer to the official documentation:
|
|
149
|
+
|
|
150
|
+
* [Manage database permissions](https://docs.databricks.com/aws/en/oltp/projects/manage-roles-permissions)
|
|
151
|
+
* [Postgres roles](https://docs.databricks.com/aws/en/oltp/projects/postgres-roles)
|
|
152
|
+
|
|
153
|
+
SQL script for fine-grained grants
|
|
154
|
+
|
|
155
|
+
Deploy and run the app at least once before executing these grants so the Service Principal initializes the database schema first.
|
|
156
|
+
|
|
157
|
+
Replace `subject` with the user email and `schema` with your schema name:
|
|
158
|
+
|
|
159
|
+
```sql
|
|
160
|
+
CREATE EXTENSION IF NOT EXISTS databricks_auth;
|
|
161
|
+
|
|
162
|
+
DO $$
|
|
163
|
+
DECLARE
|
|
164
|
+
subject TEXT := 'your-subject'; -- User email like name@databricks.com
|
|
165
|
+
schema TEXT := 'your_schema'; -- Replace 'your_schema' with your schema name
|
|
166
|
+
BEGIN
|
|
167
|
+
-- Create OAuth role for the Databricks identity
|
|
168
|
+
PERFORM databricks_create_role(subject, 'USER');
|
|
169
|
+
|
|
170
|
+
-- Connection and schema access
|
|
171
|
+
EXECUTE format('GRANT CONNECT ON DATABASE "databricks_postgres" TO %I', subject);
|
|
172
|
+
EXECUTE format('GRANT ALL ON SCHEMA %s TO %I', schema, subject);
|
|
173
|
+
|
|
174
|
+
-- Privileges on existing objects
|
|
175
|
+
EXECUTE format('GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA %s TO %I', schema, subject);
|
|
176
|
+
EXECUTE format('GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA %s TO %I', schema, subject);
|
|
177
|
+
EXECUTE format('GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA %s TO %I', schema, subject);
|
|
178
|
+
EXECUTE format('GRANT ALL PRIVILEGES ON ALL PROCEDURES IN SCHEMA %s TO %I', schema, subject);
|
|
179
|
+
|
|
180
|
+
-- Default privileges on future objects
|
|
181
|
+
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON TABLES TO %I', schema, subject);
|
|
182
|
+
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON SEQUENCES TO %I', schema, subject);
|
|
183
|
+
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON FUNCTIONS TO %I', schema, subject);
|
|
184
|
+
EXECUTE format('ALTER DEFAULT PRIVILEGES IN SCHEMA %s GRANT ALL ON ROUTINES TO %I', schema, subject);
|
|
185
|
+
END $$;
|
|
186
|
+
|
|
187
|
+
```
|
package/llms.txt
CHANGED
|
@@ -46,7 +46,7 @@ npx @databricks/appkit docs <query>
|
|
|
46
46
|
- [Execution context](./docs/plugins/execution-context.md): AppKit manages Databricks authentication via two contexts:
|
|
47
47
|
- [Files plugin](./docs/plugins/files.md): File operations against Databricks Unity Catalog Volumes. Supports listing, reading, downloading, uploading, deleting, and previewing files with built-in caching, retry, and timeout handling via the execution interceptor pipeline.
|
|
48
48
|
- [Genie plugin](./docs/plugins/genie.md): Integrates Databricks AI/BI Genie spaces into your AppKit application, enabling natural language data queries via a conversational interface.
|
|
49
|
-
- [Lakebase plugin](./docs/plugins/lakebase.md):
|
|
49
|
+
- [Lakebase plugin](./docs/plugins/lakebase.md): Provides a PostgreSQL connection pool for Databricks Lakebase Autoscaling with automatic OAuth token refresh.
|
|
50
50
|
- [Plugin management](./docs/plugins/plugin-management.md): AppKit includes a CLI for managing plugins. All commands are available under npx @databricks/appkit plugin.
|
|
51
51
|
- [Server plugin](./docs/plugins/server.md): Provides HTTP server capabilities with development and production modes.
|
|
52
52
|
|