databricks-tellr 0.1.15__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- databricks_tellr-0.1.15/PKG-INFO +30 -0
- databricks_tellr-0.1.15/README.md +20 -0
- databricks_tellr-0.1.15/databricks_tellr/__init__.py +5 -0
- databricks_tellr-0.1.15/databricks_tellr/_templates/app.yaml.template +23 -0
- databricks_tellr-0.1.15/databricks_tellr/_templates/requirements.txt.template +2 -0
- databricks_tellr-0.1.15/databricks_tellr/deploy.py +868 -0
- databricks_tellr-0.1.15/databricks_tellr.egg-info/PKG-INFO +30 -0
- databricks_tellr-0.1.15/databricks_tellr.egg-info/SOURCES.txt +11 -0
- databricks_tellr-0.1.15/databricks_tellr.egg-info/dependency_links.txt +1 -0
- databricks_tellr-0.1.15/databricks_tellr.egg-info/requires.txt +3 -0
- databricks_tellr-0.1.15/databricks_tellr.egg-info/top_level.txt +1 -0
- databricks_tellr-0.1.15/pyproject.toml +22 -0
- databricks_tellr-0.1.15/setup.cfg +4 -0
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: databricks-tellr
|
|
3
|
+
Version: 0.1.15
|
|
4
|
+
Summary: Tellr deployment tooling for Databricks Apps
|
|
5
|
+
Requires-Python: >=3.10
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: databricks-sdk>=0.20.0
|
|
8
|
+
Requires-Dist: psycopg2-binary>=2.9.0
|
|
9
|
+
Requires-Dist: pyyaml>=6.0.0
|
|
10
|
+
|
|
11
|
+
# databricks-tellr
|
|
12
|
+
|
|
13
|
+
Python deployment tooling for Tellr on Databricks Apps.
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
```python
|
|
18
|
+
import databricks_tellr as tellr
|
|
19
|
+
|
|
20
|
+
!pip install --upgrade databricks-sdk==0.73.0
|
|
21
|
+
|
|
22
|
+
result = tellr.setup(
|
|
23
|
+
lakebase_name="ai-slide-generator-db-dev",
|
|
24
|
+
schema_name="app_data_dev",
|
|
25
|
+
app_name="ai-slide-generator-dev",
|
|
26
|
+
app_file_workspace_path="/Workspace/Users/you@example.com/.apps/dev/ai-slide-generator",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
print(result["url"])
|
|
30
|
+
```
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# databricks-tellr
|
|
2
|
+
|
|
3
|
+
Python deployment tooling for Tellr on Databricks Apps.
|
|
4
|
+
|
|
5
|
+
## Usage
|
|
6
|
+
|
|
7
|
+
```python
|
|
8
|
+
import databricks_tellr as tellr
|
|
9
|
+
|
|
10
|
+
!pip install --upgrade databricks-sdk==0.73.0
|
|
11
|
+
|
|
12
|
+
result = tellr.setup(
|
|
13
|
+
lakebase_name="ai-slide-generator-db-dev",
|
|
14
|
+
schema_name="app_data_dev",
|
|
15
|
+
app_name="ai-slide-generator-dev",
|
|
16
|
+
app_file_workspace_path="/Workspace/Users/you@example.com/.apps/dev/ai-slide-generator",
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
print(result["url"])
|
|
20
|
+
```
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# Databricks App Configuration
|
|
2
|
+
name: "tellr"
|
|
3
|
+
description: "Tellr - AI Slide Generator"
|
|
4
|
+
|
|
5
|
+
command:
|
|
6
|
+
- "sh"
|
|
7
|
+
- "-c"
|
|
8
|
+
- |
|
|
9
|
+
pip install --upgrade --no-cache-dir -r requirements.txt && \
|
|
10
|
+
python -c "from databricks_tellr_app.run import init_database; init_database()" && \
|
|
11
|
+
python -m databricks_tellr_app.run
|
|
12
|
+
|
|
13
|
+
env:
|
|
14
|
+
- name: ENVIRONMENT
|
|
15
|
+
value: "production"
|
|
16
|
+
- name: LAKEBASE_INSTANCE
|
|
17
|
+
value: "${LAKEBASE_INSTANCE}"
|
|
18
|
+
- name: LAKEBASE_SCHEMA
|
|
19
|
+
value: "${LAKEBASE_SCHEMA}"
|
|
20
|
+
- name: DATABRICKS_HOST
|
|
21
|
+
valueFrom: "system.databricks_host"
|
|
22
|
+
- name: DATABRICKS_TOKEN
|
|
23
|
+
valueFrom: "system.databricks_token"
|
|
@@ -0,0 +1,868 @@
|
|
|
1
|
+
"""Deployment orchestration for Tellr on Databricks Apps.
|
|
2
|
+
|
|
3
|
+
This module provides the main create/update/delete functions for deploying
|
|
4
|
+
the Tellr AI slide generator to Databricks Apps from a notebook.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import shutil
|
|
12
|
+
import uuid
|
|
13
|
+
from contextlib import contextmanager
|
|
14
|
+
from importlib import metadata, resources
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from string import Template
|
|
17
|
+
from typing import Any, Iterator, Optional
|
|
18
|
+
|
|
19
|
+
import yaml
|
|
20
|
+
from databricks.sdk import WorkspaceClient
|
|
21
|
+
from databricks.sdk.service.apps import (
|
|
22
|
+
App,
|
|
23
|
+
AppDeployment,
|
|
24
|
+
AppResource,
|
|
25
|
+
AppResourceDatabase,
|
|
26
|
+
AppResourceDatabaseDatabasePermission,
|
|
27
|
+
ComputeSize,
|
|
28
|
+
)
|
|
29
|
+
from databricks.sdk.service.database import DatabaseInstance
|
|
30
|
+
from databricks.sdk.service.workspace import ImportFormat
|
|
31
|
+
|
|
32
|
+
logger = logging.getLogger(__name__)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class DeploymentError(Exception):
|
|
36
|
+
"""Raised when deployment fails."""
|
|
37
|
+
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
# -----------------------------------------------------------------------------
|
|
42
|
+
# WorkspaceClient Factory
|
|
43
|
+
# -----------------------------------------------------------------------------
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_workspace_client(
|
|
47
|
+
client: WorkspaceClient | None = None,
|
|
48
|
+
profile: str | None = None,
|
|
49
|
+
) -> WorkspaceClient:
|
|
50
|
+
"""Get WorkspaceClient with priority: external > profile > env vars.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
client: Externally created WorkspaceClient (highest priority)
|
|
54
|
+
profile: Profile name from .databrickscfg file
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
WorkspaceClient configured with the appropriate authentication
|
|
58
|
+
"""
|
|
59
|
+
if client is not None:
|
|
60
|
+
return client
|
|
61
|
+
if profile:
|
|
62
|
+
return WorkspaceClient(profile=profile)
|
|
63
|
+
return WorkspaceClient()
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
# -----------------------------------------------------------------------------
|
|
67
|
+
# Public API
|
|
68
|
+
# -----------------------------------------------------------------------------
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def create(
|
|
72
|
+
lakebase_name: str | None = None,
|
|
73
|
+
schema_name: str | None = None,
|
|
74
|
+
app_name: str | None = None,
|
|
75
|
+
app_file_workspace_path: str | None = None,
|
|
76
|
+
lakebase_compute: str = "CU_1",
|
|
77
|
+
app_compute: str = "MEDIUM",
|
|
78
|
+
app_version: Optional[str] = None,
|
|
79
|
+
description: str = "Tellr AI Slide Generator",
|
|
80
|
+
client: WorkspaceClient | None = None,
|
|
81
|
+
profile: str | None = None,
|
|
82
|
+
config_yaml_path: str | None = None,
|
|
83
|
+
) -> dict[str, Any]:
|
|
84
|
+
"""Deploy Tellr to Databricks Apps.
|
|
85
|
+
|
|
86
|
+
This function creates all necessary infrastructure and deploys the app:
|
|
87
|
+
1. Creates/gets Lakebase database instance
|
|
88
|
+
2. Generates requirements.txt with pinned app version
|
|
89
|
+
3. Generates app.yaml with environment variables
|
|
90
|
+
4. Uploads files to workspace
|
|
91
|
+
5. Creates Databricks App with database resource
|
|
92
|
+
6. Sets up database schema with permissions
|
|
93
|
+
|
|
94
|
+
Authentication priority:
|
|
95
|
+
1. External WorkspaceClient passed via `client` parameter
|
|
96
|
+
2. Profile name from .databrickscfg via `profile` parameter
|
|
97
|
+
3. Environment variables (DATABRICKS_HOST, DATABRICKS_TOKEN, etc.)
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
lakebase_name: Name for the Lakebase database instance
|
|
101
|
+
schema_name: PostgreSQL schema name for app tables
|
|
102
|
+
app_name: Name for the Databricks App
|
|
103
|
+
app_file_workspace_path: Workspace path to upload app files
|
|
104
|
+
lakebase_compute: Lakebase capacity (CU_1, CU_2, CU_4, CU_8)
|
|
105
|
+
app_compute: App compute size (MEDIUM, LARGE, LIQUID)
|
|
106
|
+
app_version: Specific databricks-tellr-app version (default: latest)
|
|
107
|
+
description: App description
|
|
108
|
+
client: External WorkspaceClient (optional)
|
|
109
|
+
profile: Databricks CLI profile name (optional)
|
|
110
|
+
config_yaml_path: Path to deployment config YAML (mutually exclusive with other args)
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Dictionary with deployment info:
|
|
114
|
+
- url: App URL
|
|
115
|
+
- app_name: Created app name
|
|
116
|
+
- lakebase_name: Database instance name
|
|
117
|
+
- schema_name: Schema name
|
|
118
|
+
- status: "created"
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
DeploymentError: If deployment fails
|
|
122
|
+
ValueError: If required arguments are missing or config_yaml_path used with other args
|
|
123
|
+
"""
|
|
124
|
+
ws = _get_workspace_client(client, profile)
|
|
125
|
+
|
|
126
|
+
# Handle YAML config loading
|
|
127
|
+
if config_yaml_path:
|
|
128
|
+
if any([lakebase_name, schema_name, app_name, app_file_workspace_path]):
|
|
129
|
+
raise ValueError("config_yaml_path cannot be used with other arguments")
|
|
130
|
+
config = _load_deployment_config(config_yaml_path)
|
|
131
|
+
lakebase_name = config.get("lakebase_name")
|
|
132
|
+
schema_name = config.get("schema_name")
|
|
133
|
+
app_name = config.get("app_name")
|
|
134
|
+
app_file_workspace_path = config.get("app_file_workspace_path")
|
|
135
|
+
lakebase_compute = config.get("lakebase_compute", lakebase_compute)
|
|
136
|
+
app_compute = config.get("app_compute", app_compute)
|
|
137
|
+
|
|
138
|
+
# Validate required arguments
|
|
139
|
+
if not all([lakebase_name, schema_name, app_name, app_file_workspace_path]):
|
|
140
|
+
raise ValueError(
|
|
141
|
+
"lakebase_name, schema_name, app_name, and app_file_workspace_path are required"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
print("Deploying Tellr to Databricks Apps...")
|
|
145
|
+
print(f" App name: {app_name}")
|
|
146
|
+
print(f" Workspace path: {app_file_workspace_path}")
|
|
147
|
+
print(f" Lakebase: {lakebase_name} (capacity: {lakebase_compute})")
|
|
148
|
+
print(f" Schema: {schema_name}")
|
|
149
|
+
print()
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
# Step 1: Create/get Lakebase instance
|
|
153
|
+
print("Setting up Lakebase database...")
|
|
154
|
+
lakebase_result = _get_or_create_lakebase(ws, lakebase_name, lakebase_compute)
|
|
155
|
+
print(f" Lakebase: {lakebase_result['name']} ({lakebase_result['status']})")
|
|
156
|
+
print()
|
|
157
|
+
|
|
158
|
+
# Step 2: Generate and upload files
|
|
159
|
+
print("Preparing deployment files...")
|
|
160
|
+
with _staging_dir() as staging:
|
|
161
|
+
_write_requirements(staging, app_version)
|
|
162
|
+
print(" Generated requirements.txt")
|
|
163
|
+
|
|
164
|
+
_write_app_yaml(staging, lakebase_name, schema_name)
|
|
165
|
+
print(" Generated app.yaml")
|
|
166
|
+
|
|
167
|
+
print(f"Uploading to: {app_file_workspace_path}")
|
|
168
|
+
_upload_files(ws, staging, app_file_workspace_path)
|
|
169
|
+
print(" Files uploaded")
|
|
170
|
+
print()
|
|
171
|
+
|
|
172
|
+
# Step 3: Create app
|
|
173
|
+
print(f"Creating Databricks App: {app_name}")
|
|
174
|
+
app = _create_app(
|
|
175
|
+
ws,
|
|
176
|
+
app_name=app_name,
|
|
177
|
+
description=description,
|
|
178
|
+
workspace_path=app_file_workspace_path,
|
|
179
|
+
compute_size=app_compute,
|
|
180
|
+
lakebase_name=lakebase_name,
|
|
181
|
+
)
|
|
182
|
+
print(" App created")
|
|
183
|
+
if app.url:
|
|
184
|
+
print(f" URL: {app.url}")
|
|
185
|
+
print()
|
|
186
|
+
|
|
187
|
+
# Step 4: Set up database schema
|
|
188
|
+
print("Setting up database schema...")
|
|
189
|
+
_setup_database_schema(ws, app, lakebase_name, schema_name)
|
|
190
|
+
print(f" Schema '{schema_name}' configured")
|
|
191
|
+
print()
|
|
192
|
+
|
|
193
|
+
print("Deployment complete!")
|
|
194
|
+
return {
|
|
195
|
+
"url": app.url,
|
|
196
|
+
"app_name": app_name,
|
|
197
|
+
"lakebase_name": lakebase_name,
|
|
198
|
+
"schema_name": schema_name,
|
|
199
|
+
"status": "created",
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
except Exception as e:
|
|
203
|
+
raise DeploymentError(f"Deployment failed: {e}") from e
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def update(
|
|
207
|
+
app_name: str,
|
|
208
|
+
app_file_workspace_path: str,
|
|
209
|
+
lakebase_name: str,
|
|
210
|
+
schema_name: str,
|
|
211
|
+
app_version: Optional[str] = None,
|
|
212
|
+
reset_database: bool = False,
|
|
213
|
+
client: WorkspaceClient | None = None,
|
|
214
|
+
profile: str | None = None,
|
|
215
|
+
) -> dict[str, Any]:
|
|
216
|
+
"""Deploy a new version of an existing Tellr app.
|
|
217
|
+
|
|
218
|
+
Updates the app files and triggers a new deployment.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
app_name: Name of the existing Databricks App
|
|
222
|
+
app_file_workspace_path: Workspace path with app files
|
|
223
|
+
lakebase_name: Lakebase instance name
|
|
224
|
+
schema_name: Schema name
|
|
225
|
+
app_version: Specific databricks-tellr-app version (default: latest)
|
|
226
|
+
reset_database: If True, drop and recreate the schema (tables recreated on app startup)
|
|
227
|
+
client: External WorkspaceClient (optional)
|
|
228
|
+
profile: Databricks CLI profile name (optional)
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Dictionary with deployment info
|
|
232
|
+
|
|
233
|
+
Raises:
|
|
234
|
+
DeploymentError: If update fails
|
|
235
|
+
"""
|
|
236
|
+
print(f"Updating Tellr app: {app_name}")
|
|
237
|
+
|
|
238
|
+
ws = _get_workspace_client(client, profile)
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
# Reset database if requested
|
|
242
|
+
if reset_database:
|
|
243
|
+
print("Resetting database schema...")
|
|
244
|
+
app = ws.apps.get(name=app_name)
|
|
245
|
+
_reset_schema(ws, app, lakebase_name, schema_name)
|
|
246
|
+
print(f" Schema '{schema_name}' reset (tables will be recreated on app startup)")
|
|
247
|
+
print()
|
|
248
|
+
|
|
249
|
+
# Generate and upload updated files
|
|
250
|
+
with _staging_dir() as staging:
|
|
251
|
+
_write_requirements(staging, app_version)
|
|
252
|
+
_write_app_yaml(staging, lakebase_name, schema_name)
|
|
253
|
+
_upload_files(ws, staging, app_file_workspace_path)
|
|
254
|
+
print(" Files updated")
|
|
255
|
+
|
|
256
|
+
# Trigger new deployment
|
|
257
|
+
print(" Deploying...")
|
|
258
|
+
deployment = AppDeployment(source_code_path=app_file_workspace_path)
|
|
259
|
+
result = ws.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
|
|
260
|
+
print(f" Deployment completed: {result.deployment_id}")
|
|
261
|
+
|
|
262
|
+
app = ws.apps.get(name=app_name)
|
|
263
|
+
if app.url:
|
|
264
|
+
print(f" URL: {app.url}")
|
|
265
|
+
|
|
266
|
+
return {
|
|
267
|
+
"url": app.url,
|
|
268
|
+
"app_name": app_name,
|
|
269
|
+
"deployment_id": result.deployment_id,
|
|
270
|
+
"status": "updated",
|
|
271
|
+
"database_reset": reset_database,
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
except Exception as e:
|
|
275
|
+
raise DeploymentError(f"Update failed: {e}") from e
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def delete(
|
|
279
|
+
app_name: str,
|
|
280
|
+
lakebase_name: str | None = None,
|
|
281
|
+
schema_name: str | None = None,
|
|
282
|
+
reset_database: bool = False,
|
|
283
|
+
client: WorkspaceClient | None = None,
|
|
284
|
+
profile: str | None = None,
|
|
285
|
+
) -> dict[str, Any]:
|
|
286
|
+
"""Delete a Tellr app.
|
|
287
|
+
|
|
288
|
+
Note: This does not delete the Lakebase instance by default.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
app_name: Name of the app to delete
|
|
292
|
+
lakebase_name: Lakebase instance name (required if reset_database=True)
|
|
293
|
+
schema_name: Schema name (required if reset_database=True)
|
|
294
|
+
reset_database: If True, drop the schema before deleting the app
|
|
295
|
+
client: External WorkspaceClient (optional)
|
|
296
|
+
profile: Databricks CLI profile name (optional)
|
|
297
|
+
|
|
298
|
+
Returns:
|
|
299
|
+
Dictionary with deletion status
|
|
300
|
+
|
|
301
|
+
Raises:
|
|
302
|
+
DeploymentError: If deletion fails
|
|
303
|
+
ValueError: If reset_database=True but lakebase_name or schema_name not provided
|
|
304
|
+
"""
|
|
305
|
+
print(f"Deleting app: {app_name}")
|
|
306
|
+
|
|
307
|
+
ws = _get_workspace_client(client, profile)
|
|
308
|
+
|
|
309
|
+
try:
|
|
310
|
+
# Reset database if requested
|
|
311
|
+
if reset_database:
|
|
312
|
+
if not lakebase_name or not schema_name:
|
|
313
|
+
raise ValueError(
|
|
314
|
+
"lakebase_name and schema_name are required when reset_database=True"
|
|
315
|
+
)
|
|
316
|
+
print("Dropping database schema...")
|
|
317
|
+
app = ws.apps.get(name=app_name)
|
|
318
|
+
_reset_schema(ws, app, lakebase_name, schema_name, drop_only=True)
|
|
319
|
+
print(f" Schema '{schema_name}' dropped")
|
|
320
|
+
|
|
321
|
+
ws.apps.delete(name=app_name)
|
|
322
|
+
print(" App deleted")
|
|
323
|
+
|
|
324
|
+
return {
|
|
325
|
+
"app_name": app_name,
|
|
326
|
+
"status": "deleted",
|
|
327
|
+
"database_reset": reset_database,
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
except Exception as e:
|
|
331
|
+
raise DeploymentError(f"Deletion failed: {e}") from e
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
# -----------------------------------------------------------------------------
|
|
335
|
+
# Internal functions
|
|
336
|
+
# -----------------------------------------------------------------------------
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def _load_deployment_config(config_yaml_path: str) -> dict[str, str]:
|
|
340
|
+
"""Load deployment settings from config/deployment.yaml-style files.
|
|
341
|
+
|
|
342
|
+
Expected structure (see config/deployment.yaml):
|
|
343
|
+
environments:
|
|
344
|
+
development:
|
|
345
|
+
app_name: ...
|
|
346
|
+
workspace_path: ...
|
|
347
|
+
compute_size: ...
|
|
348
|
+
lakebase:
|
|
349
|
+
database_name: ...
|
|
350
|
+
schema: ...
|
|
351
|
+
capacity: ...
|
|
352
|
+
"""
|
|
353
|
+
with open(config_yaml_path, "r", encoding="utf-8") as handle:
|
|
354
|
+
config = yaml.safe_load(handle) or {}
|
|
355
|
+
|
|
356
|
+
environments = config.get("environments", {})
|
|
357
|
+
if not environments:
|
|
358
|
+
raise ValueError("No environments found in deployment config")
|
|
359
|
+
|
|
360
|
+
env_name = os.getenv("ENVIRONMENT", "development")
|
|
361
|
+
if env_name not in environments:
|
|
362
|
+
raise ValueError(f"Environment '{env_name}' not found in deployment config")
|
|
363
|
+
|
|
364
|
+
env_config = environments[env_name]
|
|
365
|
+
lakebase_config = env_config.get("lakebase", {})
|
|
366
|
+
|
|
367
|
+
return {
|
|
368
|
+
"app_name": env_config.get("app_name"),
|
|
369
|
+
"app_file_workspace_path": env_config.get("workspace_path"),
|
|
370
|
+
"app_compute": env_config.get("compute_size"),
|
|
371
|
+
"lakebase_name": lakebase_config.get("database_name"),
|
|
372
|
+
"schema_name": lakebase_config.get("schema"),
|
|
373
|
+
"lakebase_compute": lakebase_config.get("capacity"),
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _get_or_create_lakebase(
|
|
378
|
+
ws: WorkspaceClient, database_name: str, capacity: str
|
|
379
|
+
) -> dict[str, Any]:
|
|
380
|
+
"""Get or create a Lakebase database instance."""
|
|
381
|
+
try:
|
|
382
|
+
existing = ws.database.get_database_instance(name=database_name)
|
|
383
|
+
return {
|
|
384
|
+
"name": existing.name,
|
|
385
|
+
"status": "exists",
|
|
386
|
+
"state": existing.state.value if existing.state else "UNKNOWN",
|
|
387
|
+
}
|
|
388
|
+
except Exception as e:
|
|
389
|
+
error_str = str(e).lower()
|
|
390
|
+
if "not found" not in error_str and "does not exist" not in error_str:
|
|
391
|
+
raise
|
|
392
|
+
|
|
393
|
+
instance = ws.database.create_database_instance_and_wait(
|
|
394
|
+
DatabaseInstance(name=database_name, capacity=capacity)
|
|
395
|
+
)
|
|
396
|
+
return {
|
|
397
|
+
"name": instance.name,
|
|
398
|
+
"status": "created",
|
|
399
|
+
"state": instance.state.value if instance.state else "RUNNING",
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def _write_requirements(staging_dir: Path, app_version: Optional[str]) -> None:
|
|
404
|
+
"""Generate requirements.txt with app package."""
|
|
405
|
+
resolved_version = app_version or _resolve_installed_app_version()
|
|
406
|
+
|
|
407
|
+
if resolved_version and not _is_valid_version(resolved_version):
|
|
408
|
+
raise DeploymentError(
|
|
409
|
+
f"Invalid app version '{resolved_version}'. Expected a PEP 440 version."
|
|
410
|
+
)
|
|
411
|
+
|
|
412
|
+
if resolved_version:
|
|
413
|
+
package_line = f"databricks-tellr-app=={resolved_version}"
|
|
414
|
+
else:
|
|
415
|
+
package_line = "databricks-tellr-app"
|
|
416
|
+
|
|
417
|
+
content = "\n".join(
|
|
418
|
+
[
|
|
419
|
+
"# Generated by databricks-tellr create",
|
|
420
|
+
package_line,
|
|
421
|
+
]
|
|
422
|
+
)
|
|
423
|
+
(staging_dir / "requirements.txt").write_text(content)
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def _resolve_installed_app_version() -> str | None:
|
|
427
|
+
"""Try to resolve the installed version of databricks-tellr-app."""
|
|
428
|
+
try:
|
|
429
|
+
return metadata.version("databricks-tellr-app")
|
|
430
|
+
except metadata.PackageNotFoundError:
|
|
431
|
+
return None
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def _write_app_yaml(staging_dir: Path, lakebase_name: str, schema_name: str) -> None:
|
|
435
|
+
"""Generate app.yaml with environment variables."""
|
|
436
|
+
template_content = _load_template("app.yaml.template")
|
|
437
|
+
content = Template(template_content).substitute(
|
|
438
|
+
LAKEBASE_INSTANCE=lakebase_name,
|
|
439
|
+
LAKEBASE_SCHEMA=schema_name,
|
|
440
|
+
)
|
|
441
|
+
(staging_dir / "app.yaml").write_text(content)
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def _load_template(template_name: str) -> str:
|
|
445
|
+
"""Load a template file from package resources."""
|
|
446
|
+
try:
|
|
447
|
+
files = resources.files("databricks_tellr") / "_templates" / template_name
|
|
448
|
+
return files.read_text()
|
|
449
|
+
except (TypeError, AttributeError):
|
|
450
|
+
with resources.open_text(
|
|
451
|
+
"databricks_tellr._templates", template_name
|
|
452
|
+
) as f:
|
|
453
|
+
return f.read()
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def _is_valid_version(version: str) -> bool:
|
|
457
|
+
"""Check if a version string is valid PEP 440."""
|
|
458
|
+
try:
|
|
459
|
+
from packaging.version import Version
|
|
460
|
+
except ImportError:
|
|
461
|
+
return True
|
|
462
|
+
try:
|
|
463
|
+
Version(version)
|
|
464
|
+
return True
|
|
465
|
+
except Exception:
|
|
466
|
+
return False
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
@contextmanager
|
|
470
|
+
def _staging_dir() -> Iterator[Path]:
|
|
471
|
+
"""Create a temporary staging directory for deployment files."""
|
|
472
|
+
import tempfile
|
|
473
|
+
|
|
474
|
+
staging_dir = Path(tempfile.mkdtemp(prefix="tellr_staging_"))
|
|
475
|
+
try:
|
|
476
|
+
yield staging_dir
|
|
477
|
+
finally:
|
|
478
|
+
shutil.rmtree(staging_dir, ignore_errors=True)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def _upload_files(
|
|
482
|
+
ws: WorkspaceClient, staging_dir: Path, workspace_path: str
|
|
483
|
+
) -> None:
|
|
484
|
+
"""Upload files from staging directory to workspace."""
|
|
485
|
+
try:
|
|
486
|
+
ws.workspace.mkdirs(workspace_path)
|
|
487
|
+
except Exception:
|
|
488
|
+
pass # May already exist
|
|
489
|
+
|
|
490
|
+
for file_path in staging_dir.iterdir():
|
|
491
|
+
if file_path.is_file():
|
|
492
|
+
workspace_file_path = f"{workspace_path}/{file_path.name}"
|
|
493
|
+
with open(file_path, "rb") as f:
|
|
494
|
+
ws.workspace.upload(
|
|
495
|
+
workspace_file_path,
|
|
496
|
+
f,
|
|
497
|
+
format=ImportFormat.AUTO,
|
|
498
|
+
overwrite=True,
|
|
499
|
+
)
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def _create_app(
|
|
503
|
+
ws: WorkspaceClient,
|
|
504
|
+
app_name: str,
|
|
505
|
+
description: str,
|
|
506
|
+
workspace_path: str,
|
|
507
|
+
compute_size: str,
|
|
508
|
+
lakebase_name: str,
|
|
509
|
+
) -> App:
|
|
510
|
+
"""Create Databricks App with database resource."""
|
|
511
|
+
compute_size_enum = ComputeSize(compute_size)
|
|
512
|
+
|
|
513
|
+
app_resources = [
|
|
514
|
+
AppResource(
|
|
515
|
+
name="app_database",
|
|
516
|
+
database=AppResourceDatabase(
|
|
517
|
+
instance_name=lakebase_name,
|
|
518
|
+
database_name="databricks_postgres",
|
|
519
|
+
permission=AppResourceDatabaseDatabasePermission.CAN_CONNECT_AND_CREATE,
|
|
520
|
+
),
|
|
521
|
+
)
|
|
522
|
+
]
|
|
523
|
+
|
|
524
|
+
app = App(
|
|
525
|
+
name=app_name,
|
|
526
|
+
description=description,
|
|
527
|
+
compute_size=compute_size_enum,
|
|
528
|
+
default_source_code_path=workspace_path,
|
|
529
|
+
resources=app_resources,
|
|
530
|
+
user_api_scopes=[
|
|
531
|
+
"sql",
|
|
532
|
+
"dashboards.genie",
|
|
533
|
+
"catalog.tables:read",
|
|
534
|
+
"catalog.schemas:read",
|
|
535
|
+
"catalog.catalogs:read",
|
|
536
|
+
"serving.serving-endpoints",
|
|
537
|
+
],
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
ws.apps.create_and_wait(app)
|
|
541
|
+
|
|
542
|
+
deployment = AppDeployment(source_code_path=workspace_path)
|
|
543
|
+
ws.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
|
|
544
|
+
|
|
545
|
+
return ws.apps.get(name=app_name)
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
def _get_app_client_id(app: App) -> str | None:
|
|
549
|
+
"""Extract the service principal client ID from an app."""
|
|
550
|
+
if hasattr(app, "service_principal_client_id") and app.service_principal_client_id:
|
|
551
|
+
return app.service_principal_client_id
|
|
552
|
+
if hasattr(app, "service_principal_id") and app.service_principal_id:
|
|
553
|
+
return str(app.service_principal_id)
|
|
554
|
+
return None
|
|
555
|
+
|
|
556
|
+
|
|
557
|
+
def _get_lakebase_connection(
|
|
558
|
+
ws: WorkspaceClient, lakebase_name: str
|
|
559
|
+
) -> tuple[Any, str]:
|
|
560
|
+
"""Get a psycopg2 connection to Lakebase and the current username.
|
|
561
|
+
|
|
562
|
+
Returns:
|
|
563
|
+
Tuple of (connection, username)
|
|
564
|
+
"""
|
|
565
|
+
try:
|
|
566
|
+
import psycopg2
|
|
567
|
+
except ImportError as exc:
|
|
568
|
+
raise DeploymentError(
|
|
569
|
+
"psycopg2-binary is required for database operations"
|
|
570
|
+
) from exc
|
|
571
|
+
|
|
572
|
+
instance = ws.database.get_database_instance(name=lakebase_name)
|
|
573
|
+
user = ws.current_user.me().user_name
|
|
574
|
+
|
|
575
|
+
cred = ws.database.generate_database_credential(
|
|
576
|
+
request_id=str(uuid.uuid4()),
|
|
577
|
+
instance_names=[lakebase_name],
|
|
578
|
+
)
|
|
579
|
+
|
|
580
|
+
conn = psycopg2.connect(
|
|
581
|
+
host=instance.read_write_dns,
|
|
582
|
+
port=5432,
|
|
583
|
+
user=user,
|
|
584
|
+
password=cred.token,
|
|
585
|
+
dbname="databricks_postgres",
|
|
586
|
+
sslmode="require",
|
|
587
|
+
)
|
|
588
|
+
conn.autocommit = True
|
|
589
|
+
|
|
590
|
+
return conn, user
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def _setup_database_schema(
|
|
594
|
+
ws: WorkspaceClient, app: App, lakebase_name: str, schema_name: str
|
|
595
|
+
) -> None:
|
|
596
|
+
"""Set up database schema and grant permissions to app."""
|
|
597
|
+
client_id = _get_app_client_id(app)
|
|
598
|
+
|
|
599
|
+
if not client_id:
|
|
600
|
+
print(" Warning: Could not get app client ID - schema setup skipped")
|
|
601
|
+
return
|
|
602
|
+
|
|
603
|
+
conn, _ = _get_lakebase_connection(ws, lakebase_name)
|
|
604
|
+
|
|
605
|
+
try:
|
|
606
|
+
with conn.cursor() as cur:
|
|
607
|
+
cur.execute(f'CREATE SCHEMA IF NOT EXISTS "{schema_name}"')
|
|
608
|
+
_grant_schema_permissions(cur, schema_name, client_id)
|
|
609
|
+
finally:
|
|
610
|
+
conn.close()
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
def _reset_schema(
|
|
614
|
+
ws: WorkspaceClient,
|
|
615
|
+
app: App,
|
|
616
|
+
lakebase_name: str,
|
|
617
|
+
schema_name: str,
|
|
618
|
+
drop_only: bool = False,
|
|
619
|
+
) -> None:
|
|
620
|
+
"""Drop and recreate schema (tables will be recreated by app on startup).
|
|
621
|
+
|
|
622
|
+
Args:
|
|
623
|
+
ws: WorkspaceClient
|
|
624
|
+
app: The Databricks App (needed for service principal ID)
|
|
625
|
+
lakebase_name: Lakebase instance name
|
|
626
|
+
schema_name: Schema to reset
|
|
627
|
+
drop_only: If True, only drop the schema without recreating
|
|
628
|
+
"""
|
|
629
|
+
client_id = _get_app_client_id(app)
|
|
630
|
+
|
|
631
|
+
conn, _ = _get_lakebase_connection(ws, lakebase_name)
|
|
632
|
+
|
|
633
|
+
try:
|
|
634
|
+
with conn.cursor() as cur:
|
|
635
|
+
# Drop schema with CASCADE to remove all objects
|
|
636
|
+
cur.execute(f'DROP SCHEMA IF EXISTS "{schema_name}" CASCADE')
|
|
637
|
+
|
|
638
|
+
if not drop_only:
|
|
639
|
+
# Recreate schema
|
|
640
|
+
cur.execute(f'CREATE SCHEMA "{schema_name}"')
|
|
641
|
+
|
|
642
|
+
# Re-grant permissions if we have the client ID
|
|
643
|
+
if client_id:
|
|
644
|
+
_grant_schema_permissions(cur, schema_name, client_id)
|
|
645
|
+
finally:
|
|
646
|
+
conn.close()
|
|
647
|
+
|
|
648
|
+
|
|
649
|
+
def _grant_schema_permissions(cur: Any, schema_name: str, client_id: str) -> None:
|
|
650
|
+
"""Grant schema permissions to an app's service principal."""
|
|
651
|
+
cur.execute(f'GRANT USAGE ON SCHEMA "{schema_name}" TO "{client_id}"')
|
|
652
|
+
cur.execute(f'GRANT CREATE ON SCHEMA "{schema_name}" TO "{client_id}"')
|
|
653
|
+
cur.execute(
|
|
654
|
+
f'GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA "{schema_name}" TO "{client_id}"'
|
|
655
|
+
)
|
|
656
|
+
cur.execute(
|
|
657
|
+
f'GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA "{schema_name}" TO "{client_id}"'
|
|
658
|
+
)
|
|
659
|
+
cur.execute(
|
|
660
|
+
f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema_name}" '
|
|
661
|
+
f'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO "{client_id}"'
|
|
662
|
+
)
|
|
663
|
+
cur.execute(
|
|
664
|
+
f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema_name}" '
|
|
665
|
+
f'GRANT USAGE, SELECT ON SEQUENCES TO "{client_id}"'
|
|
666
|
+
)
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
# -----------------------------------------------------------------------------
|
|
670
|
+
# Internal Functions (not exported, for Databricks internal use only)
|
|
671
|
+
# -----------------------------------------------------------------------------
|
|
672
|
+
|
|
673
|
+
|
|
674
|
+
def _seed_internal_content(
|
|
675
|
+
lakebase_name: str,
|
|
676
|
+
schema_name: str,
|
|
677
|
+
client: WorkspaceClient | None = None,
|
|
678
|
+
profile: str | None = None,
|
|
679
|
+
) -> dict[str, Any]:
|
|
680
|
+
"""Seed Databricks-internal content (prompts and styles).
|
|
681
|
+
|
|
682
|
+
This is an internal function for Databricks use only. It seeds:
|
|
683
|
+
- Databricks Brand slide style
|
|
684
|
+
- Consumption Review deck prompt
|
|
685
|
+
- Use Case Analysis deck prompt
|
|
686
|
+
|
|
687
|
+
This function should be called AFTER the app has been deployed and
|
|
688
|
+
has already created the base tables and generic content.
|
|
689
|
+
|
|
690
|
+
Args:
|
|
691
|
+
lakebase_name: Lakebase instance name
|
|
692
|
+
schema_name: Schema where tables exist
|
|
693
|
+
client: External WorkspaceClient (optional)
|
|
694
|
+
profile: Databricks CLI profile name (optional)
|
|
695
|
+
|
|
696
|
+
Returns:
|
|
697
|
+
Dictionary with seeding results
|
|
698
|
+
"""
|
|
699
|
+
print("Seeding internal Databricks content...")
|
|
700
|
+
|
|
701
|
+
ws = _get_workspace_client(client, profile)
|
|
702
|
+
conn, _ = _get_lakebase_connection(ws, lakebase_name)
|
|
703
|
+
|
|
704
|
+
# Internal content definitions
|
|
705
|
+
internal_styles = [
|
|
706
|
+
{
|
|
707
|
+
"name": "Databricks Brand",
|
|
708
|
+
"description": "Official Databricks brand colors and typography. Navy headers, Lava red accents, clean modern layout.",
|
|
709
|
+
"category": "Brand",
|
|
710
|
+
"is_system": False,
|
|
711
|
+
},
|
|
712
|
+
]
|
|
713
|
+
|
|
714
|
+
internal_prompts = [
|
|
715
|
+
{
|
|
716
|
+
"name": "Consumption Review",
|
|
717
|
+
"description": "Template for consumption review meetings. Analyzes usage trends, identifies key drivers, and highlights areas for optimization.",
|
|
718
|
+
"category": "Review",
|
|
719
|
+
"prompt_content": """PRESENTATION TYPE: Consumption Review
|
|
720
|
+
|
|
721
|
+
When creating a consumption review presentation, focus on:
|
|
722
|
+
|
|
723
|
+
1. EXECUTIVE SUMMARY
|
|
724
|
+
- Overall consumption trend (increasing/decreasing/stable)
|
|
725
|
+
- Key highlight metrics (total spend, month-over-month change)
|
|
726
|
+
- Top 3 insights that require attention
|
|
727
|
+
|
|
728
|
+
2. USAGE ANALYSIS
|
|
729
|
+
- Query for consumption data over the past 6-12 months
|
|
730
|
+
- Break down by major categories (compute, storage, etc.)
|
|
731
|
+
- Identify the top consumers and their growth patterns
|
|
732
|
+
|
|
733
|
+
3. TREND IDENTIFICATION
|
|
734
|
+
- Look for seasonal patterns or anomalies
|
|
735
|
+
- Compare current period to previous periods
|
|
736
|
+
- Highlight significant changes (>10% movement)
|
|
737
|
+
|
|
738
|
+
4. OPTIMIZATION OPPORTUNITIES
|
|
739
|
+
- Identify underutilized resources
|
|
740
|
+
- Highlight cost-saving opportunities
|
|
741
|
+
- Recommend actions based on data
|
|
742
|
+
|
|
743
|
+
5. FORWARD OUTLOOK
|
|
744
|
+
- Project future consumption based on trends
|
|
745
|
+
- Flag any concerns or risks
|
|
746
|
+
- Provide actionable recommendations
|
|
747
|
+
|
|
748
|
+
Structure the deck with clear data visualizations showing trends over time.""",
|
|
749
|
+
},
|
|
750
|
+
{
|
|
751
|
+
"name": "Use Case Analysis",
|
|
752
|
+
"description": "Template for analyzing use case progression and identifying blockers or accelerators.",
|
|
753
|
+
"category": "Analysis",
|
|
754
|
+
"prompt_content": """PRESENTATION TYPE: Use Case Analysis
|
|
755
|
+
|
|
756
|
+
When analyzing use cases, focus on:
|
|
757
|
+
|
|
758
|
+
1. PORTFOLIO OVERVIEW
|
|
759
|
+
- Total number of use cases in scope
|
|
760
|
+
- Distribution by stage/status
|
|
761
|
+
- Overall health metrics
|
|
762
|
+
|
|
763
|
+
2. PROGRESSION ANALYSIS
|
|
764
|
+
- Query for use case movement between stages
|
|
765
|
+
- Identify velocity patterns
|
|
766
|
+
- Calculate average time in each stage
|
|
767
|
+
|
|
768
|
+
3. BLOCKER IDENTIFICATION
|
|
769
|
+
- Find use cases that are stuck or slowed
|
|
770
|
+
- Categorize blockers (technical, resource, dependency)
|
|
771
|
+
- Quantify impact of each blocker type
|
|
772
|
+
|
|
773
|
+
4. SUCCESS PATTERNS
|
|
774
|
+
- Identify fast-moving use cases
|
|
775
|
+
- Find common characteristics of successful progression
|
|
776
|
+
- Extract best practices
|
|
777
|
+
|
|
778
|
+
5. RECOMMENDATIONS
|
|
779
|
+
- Specific actions to unblock stuck use cases
|
|
780
|
+
- Resource allocation suggestions
|
|
781
|
+
- Process improvements
|
|
782
|
+
|
|
783
|
+
Use funnel charts for progression and bar charts for blocker analysis.""",
|
|
784
|
+
},
|
|
785
|
+
]
|
|
786
|
+
|
|
787
|
+
# Default slide style content (same as System Default)
|
|
788
|
+
default_style_content = """SLIDE VISUAL STYLE:
|
|
789
|
+
|
|
790
|
+
Typography & Colors:
|
|
791
|
+
- Modern sans-serif font (Inter/SF Pro/Helvetica)
|
|
792
|
+
- H1: 40-52px bold, Navy #102025 | H2: 28-36px, Navy #2B3940 | Body: 16-18px, #5D6D71
|
|
793
|
+
- Primary accent: Lava #EB4A34 | Success: Green #4BA676 | Warning: Yellow #F2AE3D | Info: Blue #3C71AF
|
|
794
|
+
- Background: Oat Light #F9FAFB
|
|
795
|
+
|
|
796
|
+
Layout & Structure:
|
|
797
|
+
- Fixed slide size: 1280x720px per slide, white background
|
|
798
|
+
- Body: width:1280px; height:720px; margin:0; padding:0; overflow:hidden
|
|
799
|
+
- Use flexbox for layout with appropriate gaps (≥12px)
|
|
800
|
+
- Cards/boxes: padding ≥16px, border-radius 8-12px, shadow: 0 4px 6px rgba(0,0,0,0.1)
|
|
801
|
+
|
|
802
|
+
Content Per Slide:
|
|
803
|
+
- ONE clear title (≤55 chars) that states the key insight
|
|
804
|
+
- Subtitle for context
|
|
805
|
+
- Body text ≤40 words
|
|
806
|
+
- Maximum 2 data visualizations per slide
|
|
807
|
+
|
|
808
|
+
Chart Brand Colors:
|
|
809
|
+
['#EB4A34','#4BA676','#3C71AF','#F2AE3D']"""
|
|
810
|
+
|
|
811
|
+
styles_seeded = 0
|
|
812
|
+
prompts_seeded = 0
|
|
813
|
+
|
|
814
|
+
try:
|
|
815
|
+
with conn.cursor() as cur:
|
|
816
|
+
# Set search path
|
|
817
|
+
cur.execute(f'SET search_path TO "{schema_name}"')
|
|
818
|
+
|
|
819
|
+
# Seed styles
|
|
820
|
+
for style in internal_styles:
|
|
821
|
+
cur.execute(
|
|
822
|
+
"SELECT id FROM slide_style_library WHERE name = %s",
|
|
823
|
+
(style["name"],)
|
|
824
|
+
)
|
|
825
|
+
if cur.fetchone():
|
|
826
|
+
print(f" Style '{style['name']}' already exists, skipping")
|
|
827
|
+
continue
|
|
828
|
+
|
|
829
|
+
cur.execute(
|
|
830
|
+
"""INSERT INTO slide_style_library
|
|
831
|
+
(name, description, category, style_content, is_active, is_system, created_by, updated_by, created_at, updated_at)
|
|
832
|
+
VALUES (%s, %s, %s, %s, true, %s, 'system', 'system', NOW(), NOW())""",
|
|
833
|
+
(style["name"], style["description"], style["category"],
|
|
834
|
+
default_style_content, style["is_system"])
|
|
835
|
+
)
|
|
836
|
+
print(f" Seeded style: {style['name']}")
|
|
837
|
+
styles_seeded += 1
|
|
838
|
+
|
|
839
|
+
# Seed prompts
|
|
840
|
+
for prompt in internal_prompts:
|
|
841
|
+
cur.execute(
|
|
842
|
+
"SELECT id FROM slide_deck_prompt_library WHERE name = %s",
|
|
843
|
+
(prompt["name"],)
|
|
844
|
+
)
|
|
845
|
+
if cur.fetchone():
|
|
846
|
+
print(f" Prompt '{prompt['name']}' already exists, skipping")
|
|
847
|
+
continue
|
|
848
|
+
|
|
849
|
+
cur.execute(
|
|
850
|
+
"""INSERT INTO slide_deck_prompt_library
|
|
851
|
+
(name, description, category, prompt_content, is_active, created_by, updated_by, created_at, updated_at)
|
|
852
|
+
VALUES (%s, %s, %s, %s, true, 'system', 'system', NOW(), NOW())""",
|
|
853
|
+
(prompt["name"], prompt["description"], prompt["category"], prompt["prompt_content"])
|
|
854
|
+
)
|
|
855
|
+
print(f" Seeded prompt: {prompt['name']}")
|
|
856
|
+
prompts_seeded += 1
|
|
857
|
+
|
|
858
|
+
print(f"Internal content seeding complete: {styles_seeded} styles, {prompts_seeded} prompts")
|
|
859
|
+
return {
|
|
860
|
+
"status": "success",
|
|
861
|
+
"styles_seeded": styles_seeded,
|
|
862
|
+
"prompts_seeded": prompts_seeded,
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
except Exception as e:
|
|
866
|
+
raise DeploymentError(f"Failed to seed internal content: {e}") from e
|
|
867
|
+
finally:
|
|
868
|
+
conn.close()
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: databricks-tellr
|
|
3
|
+
Version: 0.1.15
|
|
4
|
+
Summary: Tellr deployment tooling for Databricks Apps
|
|
5
|
+
Requires-Python: >=3.10
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: databricks-sdk>=0.20.0
|
|
8
|
+
Requires-Dist: psycopg2-binary>=2.9.0
|
|
9
|
+
Requires-Dist: pyyaml>=6.0.0
|
|
10
|
+
|
|
11
|
+
# databricks-tellr
|
|
12
|
+
|
|
13
|
+
Python deployment tooling for Tellr on Databricks Apps.
|
|
14
|
+
|
|
15
|
+
## Usage
|
|
16
|
+
|
|
17
|
+
```python
|
|
18
|
+
import databricks_tellr as tellr
|
|
19
|
+
|
|
20
|
+
!pip install --upgrade databricks-sdk==0.73.0
|
|
21
|
+
|
|
22
|
+
result = tellr.setup(
|
|
23
|
+
lakebase_name="ai-slide-generator-db-dev",
|
|
24
|
+
schema_name="app_data_dev",
|
|
25
|
+
app_name="ai-slide-generator-dev",
|
|
26
|
+
app_file_workspace_path="/Workspace/Users/you@example.com/.apps/dev/ai-slide-generator",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
print(result["url"])
|
|
30
|
+
```
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
databricks_tellr/__init__.py
|
|
4
|
+
databricks_tellr/deploy.py
|
|
5
|
+
databricks_tellr.egg-info/PKG-INFO
|
|
6
|
+
databricks_tellr.egg-info/SOURCES.txt
|
|
7
|
+
databricks_tellr.egg-info/dependency_links.txt
|
|
8
|
+
databricks_tellr.egg-info/requires.txt
|
|
9
|
+
databricks_tellr.egg-info/top_level.txt
|
|
10
|
+
databricks_tellr/_templates/app.yaml.template
|
|
11
|
+
databricks_tellr/_templates/requirements.txt.template
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
databricks_tellr
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0", "wheel"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "databricks-tellr"
|
|
7
|
+
version = "0.1.15"
|
|
8
|
+
description = "Tellr deployment tooling for Databricks Apps"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.10"
|
|
11
|
+
dependencies = [
|
|
12
|
+
"databricks-sdk>=0.20.0",
|
|
13
|
+
"psycopg2-binary>=2.9.0",
|
|
14
|
+
"pyyaml>=6.0.0",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[tool.setuptools.packages.find]
|
|
18
|
+
where = ["."]
|
|
19
|
+
include = ["databricks_tellr*"]
|
|
20
|
+
|
|
21
|
+
[tool.setuptools.package-data]
|
|
22
|
+
databricks_tellr = ["_templates/*"]
|