databricks-tellr 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-tellr might be problematic. Click here for more details.

@@ -0,0 +1,5 @@
1
+ """Tellr deployment package for Databricks Apps."""
2
+
3
+ from databricks_tellr.deploy import delete, setup, update
4
+
5
+ __all__ = ["setup", "update", "delete"]
@@ -0,0 +1,22 @@
1
+ # Databricks App Configuration
2
+ name: "tellr"
3
+ description: "Tellr - AI Slide Generator"
4
+
5
+ command:
6
+ - "sh"
7
+ - "-c"
8
+ - |
9
+ pip install -r requirements.txt && \
10
+ python -m databricks_tellr_app.run
11
+
12
+ env:
13
+ - name: ENVIRONMENT
14
+ value: "production"
15
+ - name: LAKEBASE_INSTANCE
16
+ value: "${LAKEBASE_INSTANCE}"
17
+ - name: LAKEBASE_SCHEMA
18
+ value: "${LAKEBASE_SCHEMA}"
19
+ - name: DATABRICKS_HOST
20
+ valueFrom: "system.databricks_host"
21
+ - name: DATABRICKS_TOKEN
22
+ valueFrom: "system.databricks_token"
@@ -0,0 +1,2 @@
1
+ # Generated by databricks-tellr setup
2
+ databricks-tellr-app${APP_VERSION_SUFFIX}
@@ -0,0 +1,770 @@
1
+ """Databricks Apps deployment utilities for Tellr."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ import os
7
+ import uuid
8
+ from importlib import metadata
9
+ from pathlib import Path
10
+ from string import Template
11
+ from typing import Any
12
+
13
+ import yaml
14
+ from databricks.sdk import WorkspaceClient
15
+ from databricks.sdk.service.apps import (
16
+ App,
17
+ AppDeployment,
18
+ AppResource,
19
+ AppResourceDatabase,
20
+ AppResourceDatabaseDatabasePermission,
21
+ ComputeSize,
22
+ )
23
+ from databricks.sdk.service.database import DatabaseInstance
24
+ from databricks.sdk.service.workspace import ImportFormat
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+
29
+ class DeploymentError(Exception):
30
+ """Raised when deployment fails."""
31
+
32
+
33
+ def setup(
34
+ lakebase_name: str | None = None,
35
+ schema_name: str | None = None,
36
+ app_name: str | None = None,
37
+ app_file_workspace_path: str | None = None,
38
+ lakebase_compute: str | None = "CU_1",
39
+ app_compute: str | None = "MEDIUM",
40
+ app_version: str | None = None,
41
+ client: WorkspaceClient | None = None,
42
+ databricks_cfg_profile_name: str | None = None,
43
+ config_yaml_path: str | None = None,
44
+ ) -> dict[str, Any]:
45
+ """
46
+ Deploy Tellr to Databricks Apps using PyPI-only installs.
47
+
48
+ Steps:
49
+ 1. Create/get Lakebase instance
50
+ 2. Generate requirements.txt with pinned app version
51
+ 3. Generate app.yaml with env vars
52
+ 4. Upload files to workspace
53
+ 5. Create Databricks App with database resource
54
+ 6. Return app URL
55
+
56
+ Authentication flow;
57
+ - If a Workspace Client is provided, use it
58
+ - If databricks_cfg_profile_name is provided, use it to create a Workspace Client
59
+ - Else, use the default Databricks CLI profile to create a Workspace Client
60
+ """
61
+
62
+ # create workspace client if necessary
63
+ if client:
64
+ pass
65
+ elif databricks_cfg_profile_name:
66
+ client = WorkspaceClient(profile=databricks_cfg_profile_name)
67
+ else:
68
+ client = WorkspaceClient()
69
+
70
+ # check if config_yaml_path is provided and error if yaml and other args are set
71
+ if config_yaml_path and (lakebase_name or schema_name or app_name or app_file_workspace_path):
72
+ raise ValueError("config_yaml_path cannot be used with other arguments")
73
+
74
+ # load config from yaml if provided
75
+ config = None
76
+ if config_yaml_path:
77
+ config = _load_deployment_config(config_yaml_path)
78
+
79
+ # set args from config if provided
80
+ if config:
81
+ lakebase_name = config.get("lakebase_name", lakebase_name)
82
+ schema_name = config.get("schema_name", schema_name)
83
+ app_name = config.get("app_name", app_name)
84
+ app_file_workspace_path = config.get("app_file_workspace_path", app_file_workspace_path)
85
+ lakebase_compute = config.get("lakebase_compute", lakebase_compute)
86
+ app_compute = config.get("app_compute", app_compute)
87
+
88
+ if not all([lakebase_name, schema_name, app_name, app_file_workspace_path]):
89
+ raise ValueError("lakebase_name, schema_name, app_name, and app_file_workspace_path are required")
90
+
91
+ _ensure_lakebase_instance(client, lakebase_name, lakebase_compute or "CU_1")
92
+ requirements_content = _render_requirements(app_version)
93
+ app_yaml_content = _render_app_yaml(lakebase_name, schema_name)
94
+
95
+ _upload_artifacts(
96
+ client,
97
+ app_file_workspace_path,
98
+ {
99
+ "requirements.txt": requirements_content,
100
+ "app.yaml": app_yaml_content,
101
+ },
102
+ )
103
+
104
+ app = _create_app(
105
+ client,
106
+ app_name,
107
+ app_file_workspace_path,
108
+ app_compute,
109
+ lakebase_name,
110
+ )
111
+ _setup_schema(client, lakebase_name, schema_name, _get_app_client_id(app))
112
+
113
+ return {"app_name": app.name, "url": getattr(app, "url", None)}
114
+
115
+
116
+ def _load_deployment_config(config_yaml_path: str) -> dict[str, str]:
117
+ """
118
+ Load deployment settings from config/deployment.yaml-style files.
119
+
120
+ Expected structure (see config/deployment.yaml):
121
+ environments:
122
+ development:
123
+ app_name: ...
124
+ workspace_path: ...
125
+ compute_size: ...
126
+ lakebase:
127
+ database_name: ...
128
+ schema: ...
129
+ capacity: ...
130
+ """
131
+ with open(config_yaml_path, "r", encoding="utf-8") as handle:
132
+ config = yaml.safe_load(handle) or {}
133
+
134
+ environments = config.get("environments", {})
135
+ if not environments:
136
+ raise ValueError("No environments found in deployment config")
137
+
138
+ env_name = os.getenv("ENVIRONMENT", "development")
139
+ if env_name not in environments:
140
+ raise ValueError(f"Environment '{env_name}' not found in deployment config")
141
+
142
+ env_config = environments[env_name]
143
+ lakebase_config = env_config.get("lakebase", {})
144
+
145
+ return {
146
+ "app_name": env_config.get("app_name"),
147
+ "app_file_workspace_path": env_config.get("workspace_path"),
148
+ "app_compute": env_config.get("compute_size"),
149
+ "lakebase_name": lakebase_config.get("database_name"),
150
+ "schema_name": lakebase_config.get("schema"),
151
+ "lakebase_compute": lakebase_config.get("capacity"),
152
+ }
153
+
154
+
155
+ def update(app_name: str, app_file_workspace_path: str, app_version: str | None = None) -> None:
156
+ """Update the app deployment with a new app version."""
157
+ client = WorkspaceClient()
158
+ requirements_content = _render_requirements(app_version)
159
+
160
+ _upload_artifacts(
161
+ client,
162
+ app_file_workspace_path,
163
+ {
164
+ "requirements.txt": requirements_content,
165
+ },
166
+ )
167
+
168
+ deployment = AppDeployment(source_code_path=app_file_workspace_path)
169
+ client.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
170
+
171
+
172
+ def delete(app_name: str) -> None:
173
+ """Delete the Databricks App."""
174
+ client = WorkspaceClient()
175
+ client.apps.delete(name=app_name)
176
+
177
+
178
+ def _render_app_yaml(lakebase_name: str, schema_name: str) -> str:
179
+ template_path = Path(__file__).parent / "_templates" / "app.yaml.template"
180
+ template = Template(template_path.read_text())
181
+ return template.substitute(
182
+ LAKEBASE_INSTANCE=lakebase_name,
183
+ LAKEBASE_SCHEMA=schema_name,
184
+ )
185
+
186
+
187
+ def _render_requirements(app_version: str | None) -> str:
188
+ resolved_version = app_version or _resolve_installed_app_version()
189
+ version_suffix = f"=={resolved_version}" if resolved_version else ""
190
+ version_value = resolved_version or ""
191
+ template_path = Path(__file__).parent / "_templates" / "requirements.txt.template"
192
+ template = Template(template_path.read_text())
193
+ return template.safe_substitute(
194
+ APP_VERSION_SUFFIX=version_suffix,
195
+ APP_VERSION=version_value,
196
+ )
197
+
198
+
199
+ def _resolve_installed_app_version() -> str | None:
200
+ try:
201
+ return metadata.version("databricks-tellr-app")
202
+ except metadata.PackageNotFoundError:
203
+ return None
204
+
205
+
206
+ def _upload_artifacts(
207
+ client: WorkspaceClient,
208
+ workspace_path: str,
209
+ files: dict[str, str],
210
+ ) -> None:
211
+ client.workspace.mkdirs(workspace_path)
212
+ for name, content in files.items():
213
+ upload_path = f"{workspace_path}/{name}"
214
+ client.workspace.upload(
215
+ upload_path,
216
+ content.encode("utf-8"),
217
+ format=ImportFormat.AUTO,
218
+ overwrite=True,
219
+ )
220
+ logger.info("Uploaded %s to %s", name, upload_path)
221
+
222
+
223
+ def _ensure_lakebase_instance(
224
+ client: WorkspaceClient, instance_name: str, capacity: str
225
+ ) -> None:
226
+ try:
227
+ client.database.get_database_instance(name=instance_name)
228
+ return
229
+ except Exception:
230
+ pass
231
+
232
+ client.database.create_database_instance_and_wait(
233
+ DatabaseInstance(name=instance_name, capacity=capacity)
234
+ )
235
+
236
+
237
+ def _create_app(
238
+ client: WorkspaceClient,
239
+ app_name: str,
240
+ workspace_path: str,
241
+ compute_size: str,
242
+ lakebase_instance: str,
243
+ ) -> App:
244
+ resources = [
245
+ AppResource(
246
+ name="app_database",
247
+ database=AppResourceDatabase(
248
+ instance_name=lakebase_instance,
249
+ database_name="databricks_postgres",
250
+ permission=AppResourceDatabaseDatabasePermission.CAN_CONNECT_AND_CREATE,
251
+ ),
252
+ )
253
+ ]
254
+
255
+ app = App(
256
+ name=app_name,
257
+ compute_size=ComputeSize(compute_size),
258
+ default_source_code_path=workspace_path,
259
+ resources=resources,
260
+ )
261
+ result = client.apps.create_and_wait(app)
262
+
263
+ deployment = AppDeployment(source_code_path=workspace_path)
264
+ client.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
265
+ return client.apps.get(name=app_name)
266
+
267
+
268
+ def _get_app_client_id(app: App) -> str:
269
+ if hasattr(app, "service_principal_client_id") and app.service_principal_client_id:
270
+ return app.service_principal_client_id
271
+ if hasattr(app, "service_principal_id") and app.service_principal_id:
272
+ return str(app.service_principal_id)
273
+ raise DeploymentError("Could not determine app service principal client ID")
274
+
275
+
276
+ def _setup_schema(
277
+ client: WorkspaceClient, instance_name: str, schema: str, client_id: str
278
+ ) -> None:
279
+ try:
280
+ import psycopg2
281
+ except ImportError as exc:
282
+ raise DeploymentError("psycopg2-binary is required for schema setup") from exc
283
+
284
+ instance = client.database.get_database_instance(name=instance_name)
285
+ user = client.current_user.me().user_name
286
+ credential = client.database.generate_database_credential(
287
+ request_id=str(uuid.uuid4()),
288
+ instance_names=[instance_name],
289
+ )
290
+
291
+ conn = psycopg2.connect(
292
+ host=instance.read_write_dns,
293
+ port=5432,
294
+ user=user,
295
+ password=credential.token,
296
+ dbname="databricks_postgres",
297
+ sslmode="require",
298
+ )
299
+ conn.autocommit = True
300
+
301
+ with conn.cursor() as cur:
302
+ cur.execute(f'CREATE SCHEMA IF NOT EXISTS "{schema}"')
303
+ cur.execute(f'GRANT USAGE ON SCHEMA "{schema}" TO "{client_id}"')
304
+ cur.execute(f'GRANT CREATE ON SCHEMA "{schema}" TO "{client_id}"')
305
+ cur.execute(
306
+ f'GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA "{schema}" TO "{client_id}"'
307
+ )
308
+ cur.execute(
309
+ f'GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA "{schema}" TO "{client_id}"'
310
+ )
311
+ cur.execute(
312
+ f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema}" '
313
+ f'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO "{client_id}"'
314
+ )
315
+ cur.execute(
316
+ f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema}" '
317
+ f'GRANT USAGE, SELECT ON SEQUENCES TO "{client_id}"'
318
+ )
319
+
320
+ conn.close()
321
+ """Deployment orchestration for Tellr on Databricks Apps.
322
+
323
+ This module provides the main setup/update/delete functions for deploying
324
+ the Tellr AI slide generator to Databricks Apps from a notebook.
325
+ """
326
+
327
+ import logging
328
+ import os
329
+ import tempfile
330
+ import uuid
331
+ from importlib import resources
332
+ from pathlib import Path
333
+ from string import Template
334
+ from typing import Optional
335
+
336
+ import psycopg2
337
+ from databricks.sdk import WorkspaceClient
338
+ from databricks.sdk.service.apps import (
339
+ App,
340
+ AppDeployment,
341
+ AppResource,
342
+ AppResourceDatabase,
343
+ AppResourceDatabaseDatabasePermission,
344
+ ComputeSize,
345
+ )
346
+ from databricks.sdk.service.database import DatabaseInstance
347
+ from databricks.sdk.service.workspace import ImportFormat
348
+
349
+ logger = logging.getLogger(__name__)
350
+
351
+
352
+ class DeploymentError(Exception):
353
+ """Raised when deployment fails."""
354
+
355
+ pass
356
+
357
+
358
+ def setup(
359
+ lakebase_name: str,
360
+ schema_name: str,
361
+ app_name: str,
362
+ app_file_workspace_path: str,
363
+ lakebase_compute: str = "CU_1",
364
+ app_compute: str = "MEDIUM",
365
+ app_version: Optional[str] = None,
366
+ description: str = "Tellr AI Slide Generator",
367
+ ) -> dict:
368
+ """Deploy Tellr to Databricks Apps.
369
+
370
+ This function creates all necessary infrastructure and deploys the app:
371
+ 1. Creates/gets Lakebase database instance
372
+ 2. Generates requirements.txt with pinned app version
373
+ 3. Generates app.yaml with environment variables
374
+ 4. Uploads files to workspace
375
+ 5. Creates Databricks App with database resource
376
+ 6. Sets up database schema and seeds default data
377
+
378
+ Args:
379
+ lakebase_name: Name for the Lakebase database instance
380
+ schema_name: PostgreSQL schema name for app tables
381
+ app_name: Name for the Databricks App
382
+ app_file_workspace_path: Workspace path to upload app files
383
+ lakebase_compute: Lakebase capacity (CU_1, CU_2, CU_4, CU_8)
384
+ app_compute: App compute size (MEDIUM, LARGE, LIQUID)
385
+ app_version: Specific databricks-tellr-app version (default: latest)
386
+ description: App description
387
+
388
+ Returns:
389
+ Dictionary with deployment info:
390
+ - url: App URL
391
+ - app_name: Created app name
392
+ - lakebase_name: Database instance name
393
+ - schema_name: Schema name
394
+ - status: "created"
395
+
396
+ Raises:
397
+ DeploymentError: If deployment fails
398
+ """
399
+ print(f"🚀 Deploying Tellr to Databricks Apps...")
400
+ print(f" App name: {app_name}")
401
+ print(f" Workspace path: {app_file_workspace_path}")
402
+ print(f" Lakebase: {lakebase_name} (capacity: {lakebase_compute})")
403
+ print(f" Schema: {schema_name}")
404
+ print()
405
+
406
+ # Get workspace client (uses notebook auth)
407
+ ws = WorkspaceClient()
408
+
409
+ try:
410
+ # Step 1: Create/get Lakebase instance
411
+ print("📊 Setting up Lakebase database...")
412
+ lakebase_result = _get_or_create_lakebase(ws, lakebase_name, lakebase_compute)
413
+ print(f" ✅ Lakebase: {lakebase_result['name']} ({lakebase_result['status']})")
414
+ print()
415
+
416
+ # Step 2: Generate and upload files
417
+ print("📁 Preparing deployment files...")
418
+ with tempfile.TemporaryDirectory() as staging_dir:
419
+ staging = Path(staging_dir)
420
+
421
+ # Generate requirements.txt
422
+ _write_requirements(staging, app_version)
423
+ print(" ✓ Generated requirements.txt")
424
+
425
+ # Generate app.yaml
426
+ _write_app_yaml(staging, lakebase_name, schema_name)
427
+ print(" ✓ Generated app.yaml")
428
+
429
+ # Upload to workspace
430
+ print(f"☁️ Uploading to: {app_file_workspace_path}")
431
+ _upload_files(ws, staging, app_file_workspace_path)
432
+ print(" ✅ Files uploaded")
433
+ print()
434
+
435
+ # Step 3: Create app
436
+ print(f"🔧 Creating Databricks App: {app_name}")
437
+ app = _create_app(
438
+ ws,
439
+ app_name=app_name,
440
+ description=description,
441
+ workspace_path=app_file_workspace_path,
442
+ compute_size=app_compute,
443
+ lakebase_name=lakebase_name,
444
+ )
445
+ print(f" ✅ App created")
446
+ if app.url:
447
+ print(f" 🌐 URL: {app.url}")
448
+ print()
449
+
450
+ # Step 4: Set up database schema
451
+ print("📊 Setting up database schema...")
452
+ _setup_database_schema(ws, app, lakebase_name, schema_name)
453
+ print(f" ✅ Schema '{schema_name}' configured")
454
+ print()
455
+
456
+ print("✅ Deployment complete!")
457
+ return {
458
+ "url": app.url,
459
+ "app_name": app_name,
460
+ "lakebase_name": lakebase_name,
461
+ "schema_name": schema_name,
462
+ "status": "created",
463
+ }
464
+
465
+ except Exception as e:
466
+ raise DeploymentError(f"Deployment failed: {e}") from e
467
+
468
+
469
+ def update(
470
+ app_name: str,
471
+ app_file_workspace_path: str,
472
+ lakebase_name: str,
473
+ schema_name: str,
474
+ app_version: Optional[str] = None,
475
+ ) -> dict:
476
+ """Deploy a new version of an existing Tellr app.
477
+
478
+ Updates the app files and triggers a new deployment.
479
+
480
+ Args:
481
+ app_name: Name of the existing Databricks App
482
+ app_file_workspace_path: Workspace path with app files
483
+ lakebase_name: Lakebase instance name
484
+ schema_name: Schema name
485
+ app_version: Specific databricks-tellr-app version (default: latest)
486
+
487
+ Returns:
488
+ Dictionary with deployment info
489
+
490
+ Raises:
491
+ DeploymentError: If update fails
492
+ """
493
+ print(f"🔄 Updating Tellr app: {app_name}")
494
+
495
+ ws = WorkspaceClient()
496
+
497
+ try:
498
+ # Generate and upload updated files
499
+ with tempfile.TemporaryDirectory() as staging_dir:
500
+ staging = Path(staging_dir)
501
+
502
+ _write_requirements(staging, app_version)
503
+ _write_app_yaml(staging, lakebase_name, schema_name)
504
+ _upload_files(ws, staging, app_file_workspace_path)
505
+ print(" ✅ Files updated")
506
+
507
+ # Trigger new deployment
508
+ print(" ⏳ Deploying...")
509
+ deployment = AppDeployment(source_code_path=app_file_workspace_path)
510
+ result = ws.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
511
+ print(f" ✅ Deployment completed: {result.deployment_id}")
512
+
513
+ app = ws.apps.get(name=app_name)
514
+ if app.url:
515
+ print(f" 🌐 URL: {app.url}")
516
+
517
+ return {
518
+ "url": app.url,
519
+ "app_name": app_name,
520
+ "deployment_id": result.deployment_id,
521
+ "status": "updated",
522
+ }
523
+
524
+ except Exception as e:
525
+ raise DeploymentError(f"Update failed: {e}") from e
526
+
527
+
528
+ def delete(app_name: str) -> dict:
529
+ """Delete a Tellr app.
530
+
531
+ Note: This does not delete the Lakebase instance or data.
532
+
533
+ Args:
534
+ app_name: Name of the app to delete
535
+
536
+ Returns:
537
+ Dictionary with deletion status
538
+
539
+ Raises:
540
+ DeploymentError: If deletion fails
541
+ """
542
+ print(f"🗑️ Deleting app: {app_name}")
543
+
544
+ ws = WorkspaceClient()
545
+
546
+ try:
547
+ ws.apps.delete(name=app_name)
548
+ print(" ✅ App deleted")
549
+ return {"app_name": app_name, "status": "deleted"}
550
+ except Exception as e:
551
+ raise DeploymentError(f"Deletion failed: {e}") from e
552
+
553
+
554
+ # -----------------------------------------------------------------------------
555
+ # Internal functions
556
+ # -----------------------------------------------------------------------------
557
+
558
+
559
+ def _get_or_create_lakebase(
560
+ ws: WorkspaceClient, database_name: str, capacity: str
561
+ ) -> dict:
562
+ """Get or create a Lakebase database instance."""
563
+ try:
564
+ # Check if exists
565
+ existing = ws.database.get_database_instance(name=database_name)
566
+ return {
567
+ "name": existing.name,
568
+ "status": "exists",
569
+ "state": existing.state.value if existing.state else "UNKNOWN",
570
+ }
571
+ except Exception as e:
572
+ error_str = str(e).lower()
573
+ if "not found" not in error_str and "does not exist" not in error_str:
574
+ raise
575
+
576
+ # Create new instance
577
+ instance = ws.database.create_database_instance_and_wait(
578
+ DatabaseInstance(name=database_name, capacity=capacity)
579
+ )
580
+ return {
581
+ "name": instance.name,
582
+ "status": "created",
583
+ "state": instance.state.value if instance.state else "RUNNING",
584
+ }
585
+
586
+
587
+ def _write_requirements(staging_dir: Path, app_version: Optional[str]) -> None:
588
+ """Generate requirements.txt with app package."""
589
+ # Get version
590
+ if not app_version:
591
+ app_version = _get_latest_app_version()
592
+
593
+ # Load template
594
+ template_content = _load_template("requirements.txt.template")
595
+ content = Template(template_content).substitute(APP_VERSION=app_version)
596
+
597
+ (staging_dir / "requirements.txt").write_text(content)
598
+
599
+
600
+ def _write_app_yaml(staging_dir: Path, lakebase_name: str, schema_name: str) -> None:
601
+ """Generate app.yaml with environment variables."""
602
+ template_content = _load_template("app.yaml.template")
603
+ content = Template(template_content).substitute(
604
+ LAKEBASE_INSTANCE=lakebase_name,
605
+ LAKEBASE_SCHEMA=schema_name,
606
+ )
607
+ (staging_dir / "app.yaml").write_text(content)
608
+
609
+
610
+ def _load_template(template_name: str) -> str:
611
+ """Load a template file from package resources."""
612
+ try:
613
+ # Python 3.9+ style
614
+ files = resources.files("databricks_tellr") / "_templates" / template_name
615
+ return files.read_text()
616
+ except (TypeError, AttributeError):
617
+ # Fallback for older Python
618
+ with resources.open_text(
619
+ "databricks_tellr._templates", template_name
620
+ ) as f:
621
+ return f.read()
622
+
623
+
624
+ def _get_latest_app_version() -> str:
625
+ """Get the latest version of databricks-tellr-app from PyPI.
626
+
627
+ Falls back to a default version if PyPI is unreachable.
628
+ """
629
+ try:
630
+ import urllib.request
631
+ import json
632
+
633
+ url = "https://pypi.org/pypi/databricks-tellr-app/json"
634
+ with urllib.request.urlopen(url, timeout=5) as response:
635
+ data = json.loads(response.read())
636
+ return data["info"]["version"]
637
+ except Exception:
638
+ # Fallback to a reasonable default
639
+ return "0.1.0"
640
+
641
+
642
+ def _upload_files(
643
+ ws: WorkspaceClient, staging_dir: Path, workspace_path: str
644
+ ) -> None:
645
+ """Upload files from staging directory to workspace."""
646
+ # Ensure directory exists
647
+ try:
648
+ ws.workspace.mkdirs(workspace_path)
649
+ except Exception:
650
+ pass # May already exist
651
+
652
+ # Upload each file
653
+ for file_path in staging_dir.iterdir():
654
+ if file_path.is_file():
655
+ workspace_file_path = f"{workspace_path}/{file_path.name}"
656
+ with open(file_path, "rb") as f:
657
+ ws.workspace.upload(
658
+ workspace_file_path,
659
+ f,
660
+ format=ImportFormat.AUTO,
661
+ overwrite=True,
662
+ )
663
+
664
+
665
+ def _create_app(
666
+ ws: WorkspaceClient,
667
+ app_name: str,
668
+ description: str,
669
+ workspace_path: str,
670
+ compute_size: str,
671
+ lakebase_name: str,
672
+ ) -> App:
673
+ """Create Databricks App with database resource."""
674
+ compute_size_enum = ComputeSize(compute_size)
675
+
676
+ # Database resource
677
+ resources = [
678
+ AppResource(
679
+ name="app_database",
680
+ database=AppResourceDatabase(
681
+ instance_name=lakebase_name,
682
+ database_name="databricks_postgres",
683
+ permission=AppResourceDatabaseDatabasePermission.CAN_CONNECT_AND_CREATE,
684
+ ),
685
+ )
686
+ ]
687
+
688
+ # Create app
689
+ app = App(
690
+ name=app_name,
691
+ description=description,
692
+ compute_size=compute_size_enum,
693
+ default_source_code_path=workspace_path,
694
+ resources=resources,
695
+ user_api_scopes=[
696
+ "sql",
697
+ "dashboards.genie",
698
+ "catalog.tables:read",
699
+ "catalog.schemas:read",
700
+ "catalog.catalogs:read",
701
+ "serving.serving-endpoints",
702
+ ],
703
+ )
704
+
705
+ result = ws.apps.create_and_wait(app)
706
+
707
+ # Trigger initial deployment
708
+ deployment = AppDeployment(source_code_path=workspace_path)
709
+ ws.apps.deploy_and_wait(app_name=app_name, app_deployment=deployment)
710
+
711
+ # Refresh to get URL
712
+ return ws.apps.get(name=app_name)
713
+
714
+
715
+ def _setup_database_schema(
716
+ ws: WorkspaceClient, app: App, lakebase_name: str, schema_name: str
717
+ ) -> None:
718
+ """Set up database schema and grant permissions to app."""
719
+ # Get app's service principal client ID
720
+ client_id = None
721
+ if hasattr(app, "service_principal_client_id") and app.service_principal_client_id:
722
+ client_id = app.service_principal_client_id
723
+ elif hasattr(app, "service_principal_id") and app.service_principal_id:
724
+ client_id = str(app.service_principal_id)
725
+
726
+ if not client_id:
727
+ print(" ⚠️ Could not get app client ID - schema setup skipped")
728
+ return
729
+
730
+ # Get connection info
731
+ instance = ws.database.get_database_instance(name=lakebase_name)
732
+ user = ws.current_user.me().user_name
733
+
734
+ # Generate credential
735
+ cred = ws.database.generate_database_credential(
736
+ request_id=str(uuid.uuid4()),
737
+ instance_names=[lakebase_name],
738
+ )
739
+
740
+ # Connect and create schema
741
+ conn = psycopg2.connect(
742
+ host=instance.read_write_dns,
743
+ port=5432,
744
+ user=user,
745
+ password=cred.token,
746
+ dbname="databricks_postgres",
747
+ sslmode="require",
748
+ )
749
+ conn.autocommit = True
750
+
751
+ with conn.cursor() as cur:
752
+ cur.execute(f'CREATE SCHEMA IF NOT EXISTS "{schema_name}"')
753
+ cur.execute(f'GRANT USAGE ON SCHEMA "{schema_name}" TO "{client_id}"')
754
+ cur.execute(f'GRANT CREATE ON SCHEMA "{schema_name}" TO "{client_id}"')
755
+ cur.execute(
756
+ f'GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA "{schema_name}" TO "{client_id}"'
757
+ )
758
+ cur.execute(
759
+ f'GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA "{schema_name}" TO "{client_id}"'
760
+ )
761
+ cur.execute(
762
+ f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema_name}" '
763
+ f'GRANT SELECT, INSERT, UPDATE, DELETE ON TABLES TO "{client_id}"'
764
+ )
765
+ cur.execute(
766
+ f'ALTER DEFAULT PRIVILEGES IN SCHEMA "{schema_name}" '
767
+ f'GRANT USAGE, SELECT ON SEQUENCES TO "{client_id}"'
768
+ )
769
+
770
+ conn.close()
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: databricks-tellr
3
+ Version: 0.1.2
4
+ Summary: Tellr deployment tooling for Databricks Apps
5
+ Requires-Python: >=3.10
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: databricks-sdk>=0.20.0
8
+ Requires-Dist: psycopg2-binary>=2.9.0
9
+ Requires-Dist: pyyaml>=6.0.0
10
+
11
+ # databricks-tellr
12
+
13
+ Python deployment tooling for Tellr on Databricks Apps.
14
+
15
+ ## Usage
16
+
17
+ ```python
18
+ import databricks_tellr as tellr
19
+
20
+ !pip install --upgrade databricks-sdk==0.73.0
21
+
22
+ result = tellr.setup(
23
+ lakebase_name="ai-slide-generator-db-dev",
24
+ schema_name="app_data_dev",
25
+ app_name="ai-slide-generator-dev",
26
+ app_file_workspace_path="/Workspace/Users/you@example.com/.apps/dev/ai-slide-generator",
27
+ )
28
+
29
+ print(result["url"])
30
+ ```
@@ -0,0 +1,8 @@
1
+ databricks_tellr/__init__.py,sha256=3IRv7qd7esvmuFjgBW-GEiQe1c3kjXTLdVjHG_UYPmw,152
2
+ databricks_tellr/deploy.py,sha256=foSbQ-AC8Z1v1Nbqt8-u94RAwyCfQZHoa_PzdGFi3ME,24715
3
+ databricks_tellr/_templates/app.yaml.template,sha256=tw0QR4Qcz3V0CJf0aZaMZU1rUejzsPsM8TKaJTxgSaU,507
4
+ databricks_tellr/_templates/requirements.txt.template,sha256=9VSvhCHvzX27A77-F62-g4nGoA-UOf0j76liUm35Ldg,80
5
+ databricks_tellr-0.1.2.dist-info/METADATA,sha256=FnY8cuOJ5K5CmcZKPEu3SOALAIFbbd63aSB4VOCykF4,723
6
+ databricks_tellr-0.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
+ databricks_tellr-0.1.2.dist-info/top_level.txt,sha256=2tGhbgzwvfXM9UOsTqiVZkFqR9-pz2o3XdqiIOOkuC0,17
8
+ databricks_tellr-0.1.2.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ databricks_tellr