fh-saas 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fh_saas/__init__.py +1 -0
- fh_saas/_modidx.py +201 -0
- fh_saas/core.py +9 -0
- fh_saas/db_host.py +153 -0
- fh_saas/db_tenant.py +142 -0
- fh_saas/utils_api.py +109 -0
- fh_saas/utils_auth.py +647 -0
- fh_saas/utils_bgtsk.py +112 -0
- fh_saas/utils_blog.py +147 -0
- fh_saas/utils_db.py +151 -0
- fh_saas/utils_email.py +327 -0
- fh_saas/utils_graphql.py +257 -0
- fh_saas/utils_log.py +56 -0
- fh_saas/utils_polars_mapper.py +134 -0
- fh_saas/utils_seo.py +230 -0
- fh_saas/utils_sql.py +320 -0
- fh_saas/utils_sync.py +115 -0
- fh_saas/utils_webhook.py +216 -0
- fh_saas/utils_workflow.py +23 -0
- fh_saas-0.9.5.dist-info/METADATA +274 -0
- fh_saas-0.9.5.dist-info/RECORD +25 -0
- fh_saas-0.9.5.dist-info/WHEEL +5 -0
- fh_saas-0.9.5.dist-info/entry_points.txt +2 -0
- fh_saas-0.9.5.dist-info/licenses/LICENSE +201 -0
- fh_saas-0.9.5.dist-info/top_level.txt +1 -0
fh_saas/utils_bgtsk.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Lightweight background task execution for tenant-level operations with retry logic and status tracking."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/03_utils_bgtsk.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% auto 0
|
|
6
|
+
__all__ = ['logger', 'TenantJob', 'BackgroundTaskManager']
|
|
7
|
+
|
|
8
|
+
# %% ../nbs/03_utils_bgtsk.ipynb 2
|
|
9
|
+
from datetime import datetime
|
|
10
|
+
from typing import Optional, Callable, Any
|
|
11
|
+
import json
|
|
12
|
+
import traceback
|
|
13
|
+
import asyncio
|
|
14
|
+
import logging
|
|
15
|
+
from starlette.background import BackgroundTask
|
|
16
|
+
from fastcore.utils import *
|
|
17
|
+
from fastsql.core import *
|
|
18
|
+
from nbdev.showdoc import show_doc
|
|
19
|
+
from .db_host import timestamp, gen_id
|
|
20
|
+
|
|
21
|
+
# Module-level logger - configured by app via configure_logging()
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# %% ../nbs/03_utils_bgtsk.ipynb 5
|
|
26
|
+
class TenantJob:
|
|
27
|
+
"""Tenant-level background job with retry support."""
|
|
28
|
+
id: str
|
|
29
|
+
job_type: str
|
|
30
|
+
status: str # 'pending', 'running', 'completed', 'failed'
|
|
31
|
+
payload: str # JSON string
|
|
32
|
+
result: str = None # JSON string
|
|
33
|
+
error_log: str = None
|
|
34
|
+
retry_count: int = 0
|
|
35
|
+
max_retries: int = 3
|
|
36
|
+
created_at: str = None
|
|
37
|
+
started_at: str = None
|
|
38
|
+
completed_at: str = None
|
|
39
|
+
|
|
40
|
+
# %% ../nbs/03_utils_bgtsk.ipynb 7
|
|
41
|
+
class BackgroundTaskManager:
|
|
42
|
+
"""Lightweight background task manager for tenant-level operations."""
|
|
43
|
+
|
|
44
|
+
def __init__(self, db: Database):
|
|
45
|
+
"""Initialize with tenant database."""
|
|
46
|
+
self.db = db
|
|
47
|
+
self.tenant_jobs = db.create(TenantJob, name="tenant_jobs", pk='id')
|
|
48
|
+
|
|
49
|
+
def submit(self, job_type: str, task_func: Callable, max_retries: int = 3, **task_kwargs) -> tuple[str, BackgroundTask]:
|
|
50
|
+
"""Submit a new background task for execution."""
|
|
51
|
+
job_id = gen_id()
|
|
52
|
+
job = TenantJob(
|
|
53
|
+
id=job_id,
|
|
54
|
+
job_type=job_type,
|
|
55
|
+
status='pending',
|
|
56
|
+
payload=json.dumps(task_kwargs),
|
|
57
|
+
max_retries=max_retries,
|
|
58
|
+
created_at=timestamp()
|
|
59
|
+
)
|
|
60
|
+
self.tenant_jobs.insert(job)
|
|
61
|
+
|
|
62
|
+
bg_task = BackgroundTask(
|
|
63
|
+
self._execute_with_retry,
|
|
64
|
+
job_id=job_id,
|
|
65
|
+
task_func=task_func,
|
|
66
|
+
**task_kwargs
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
return job_id, bg_task
|
|
70
|
+
|
|
71
|
+
def _execute_with_retry(self, job_id: str, task_func: Callable, **task_kwargs):
|
|
72
|
+
"""Execute task with automatic retry logic and status tracking."""
|
|
73
|
+
job = self.tenant_jobs[job_id]
|
|
74
|
+
|
|
75
|
+
try:
|
|
76
|
+
self.tenant_jobs.update(id=job_id, status='running', started_at=timestamp())
|
|
77
|
+
result = task_func(**task_kwargs)
|
|
78
|
+
self.tenant_jobs.update(
|
|
79
|
+
id=job_id,
|
|
80
|
+
status='completed',
|
|
81
|
+
result=json.dumps(result) if result else None,
|
|
82
|
+
completed_at=timestamp()
|
|
83
|
+
)
|
|
84
|
+
except Exception as e:
|
|
85
|
+
self._handle_failure(job_id, job, e)
|
|
86
|
+
|
|
87
|
+
def _handle_failure(self, job_id: str, job: TenantJob, error: Exception):
|
|
88
|
+
"""Handle task failure with retry logic."""
|
|
89
|
+
error_msg = f"{type(error).__name__}: {str(error)}\n{traceback.format_exc()}"
|
|
90
|
+
retry_count = job.retry_count + 1
|
|
91
|
+
|
|
92
|
+
logger.error(f"Job {job_id} ({job.job_type}) failed (attempt {retry_count}/{job.max_retries}): {error}", exc_info=True)
|
|
93
|
+
|
|
94
|
+
if retry_count < job.max_retries:
|
|
95
|
+
self.tenant_jobs.update(id=job_id, status='pending', retry_count=retry_count, error_log=error_msg)
|
|
96
|
+
else:
|
|
97
|
+
self.tenant_jobs.update(id=job_id, status='failed', retry_count=retry_count, error_log=error_msg, completed_at=timestamp())
|
|
98
|
+
|
|
99
|
+
def get_job(self, job_id: str) -> TenantJob:
|
|
100
|
+
"""Get job status and details."""
|
|
101
|
+
return self.tenant_jobs[job_id]
|
|
102
|
+
|
|
103
|
+
def list_jobs(self, job_type: Optional[str] = None, status: Optional[str] = None, limit: int = 100) -> list[TenantJob]:
|
|
104
|
+
"""List jobs with optional filtering."""
|
|
105
|
+
where_clauses = []
|
|
106
|
+
if job_type:
|
|
107
|
+
where_clauses.append(f"job_type = '{job_type}'")
|
|
108
|
+
if status:
|
|
109
|
+
where_clauses.append(f"status = '{status}'")
|
|
110
|
+
|
|
111
|
+
where = " AND ".join(where_clauses) if where_clauses else None
|
|
112
|
+
return self.tenant_jobs(where=where, order_by="created_at DESC", limit=limit)
|
fh_saas/utils_blog.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
"""Load markdown posts with frontmatter and render to SEO-friendly HTML."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/11_utils_blog.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% ../nbs/11_utils_blog.ipynb 2
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
import frontmatter
|
|
8
|
+
import markdown
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import List, Dict, Optional
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from nbdev.showdoc import show_doc
|
|
13
|
+
|
|
14
|
+
# %% auto 0
|
|
15
|
+
__all__ = ['PostLoader', 'MarkdownEngine']
|
|
16
|
+
|
|
17
|
+
# %% ../nbs/11_utils_blog.ipynb 5
|
|
18
|
+
def _generate_slug(filename: str) -> str:
|
|
19
|
+
"""Generate URL-safe slug from filename"""
|
|
20
|
+
return filename.replace('.md', '').replace(' ', '-').lower()
|
|
21
|
+
|
|
22
|
+
def _parse_date(date_val: any) -> Optional[datetime]:
|
|
23
|
+
"""Parse date from frontmatter (handles datetime, str, None)"""
|
|
24
|
+
if isinstance(date_val, datetime):
|
|
25
|
+
return date_val
|
|
26
|
+
if isinstance(date_val, str):
|
|
27
|
+
try:
|
|
28
|
+
return datetime.fromisoformat(date_val.replace('Z', '+00:00'))
|
|
29
|
+
except:
|
|
30
|
+
return None
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
# %% ../nbs/11_utils_blog.ipynb 6
|
|
34
|
+
class PostLoader:
|
|
35
|
+
"""Load and parse markdown blog posts from filesystem"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, posts_dir: str): # Directory containing .md files
|
|
38
|
+
"""Initialize PostLoader with posts directory"""
|
|
39
|
+
self.posts_dir = Path(posts_dir)
|
|
40
|
+
|
|
41
|
+
def load_posts(self) -> List[Dict]:
|
|
42
|
+
"""
|
|
43
|
+
Load all markdown posts from directory.
|
|
44
|
+
|
|
45
|
+
Returns list of post dicts sorted by date (newest first).
|
|
46
|
+
Each post contains: title, date, slug, body, categories, author, series.
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
```python
|
|
50
|
+
loader = PostLoader('blog/posts')
|
|
51
|
+
posts = loader.load_posts()
|
|
52
|
+
|
|
53
|
+
for post in posts:
|
|
54
|
+
print(f"{post['title']} - {post['slug']}")
|
|
55
|
+
```
|
|
56
|
+
"""
|
|
57
|
+
posts = []
|
|
58
|
+
|
|
59
|
+
if not self.posts_dir.exists():
|
|
60
|
+
return posts
|
|
61
|
+
|
|
62
|
+
for md_file in self.posts_dir.glob('*.md'):
|
|
63
|
+
post = frontmatter.load(md_file)
|
|
64
|
+
|
|
65
|
+
posts.append({
|
|
66
|
+
'title': post.get('title', md_file.stem),
|
|
67
|
+
'date': _parse_date(post.get('date')),
|
|
68
|
+
'slug': _generate_slug(md_file.name),
|
|
69
|
+
'body': post.content,
|
|
70
|
+
'categories': post.get('categories', []),
|
|
71
|
+
'author': post.get('author'),
|
|
72
|
+
'series': post.get('series'),
|
|
73
|
+
'description': post.get('description', ''),
|
|
74
|
+
'image': post.get('image')
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
# Sort by date (newest first)
|
|
78
|
+
posts.sort(key=lambda p: p['date'] or datetime.min, reverse=True)
|
|
79
|
+
return posts
|
|
80
|
+
|
|
81
|
+
def get_post(self, slug: str) -> Optional[Dict]: # URL slug (e.g., 'my-post')
|
|
82
|
+
"""
|
|
83
|
+
Get single post by slug.
|
|
84
|
+
|
|
85
|
+
Example:
|
|
86
|
+
```python
|
|
87
|
+
post = loader.get_post('bg0010')
|
|
88
|
+
if post:
|
|
89
|
+
print(post['title'])
|
|
90
|
+
```
|
|
91
|
+
"""
|
|
92
|
+
posts = self.load_posts()
|
|
93
|
+
return next((p for p in posts if p['slug'] == slug), None)
|
|
94
|
+
|
|
95
|
+
# %% ../nbs/11_utils_blog.ipynb 10
|
|
96
|
+
class MarkdownEngine:
|
|
97
|
+
"""Render markdown to HTML with SEO extensions"""
|
|
98
|
+
|
|
99
|
+
def __init__(self):
|
|
100
|
+
"""Initialize markdown renderer with standard extensions"""
|
|
101
|
+
self.md = markdown.Markdown(
|
|
102
|
+
extensions=[
|
|
103
|
+
'toc', # Table of contents
|
|
104
|
+
'fenced_code', # ```code blocks```
|
|
105
|
+
'tables', # Markdown tables
|
|
106
|
+
'codehilite', # Syntax highlighting
|
|
107
|
+
'extra' # Abbreviations, definitions, etc.
|
|
108
|
+
],
|
|
109
|
+
extension_configs={
|
|
110
|
+
'codehilite': {
|
|
111
|
+
'css_class': 'highlight',
|
|
112
|
+
'linenums': False
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
def render(self, content: str) -> str: # Markdown content
|
|
118
|
+
"""
|
|
119
|
+
Convert markdown to HTML.
|
|
120
|
+
|
|
121
|
+
Returns HTML string with proper semantic tags for SEO.
|
|
122
|
+
|
|
123
|
+
Example:
|
|
124
|
+
```python
|
|
125
|
+
engine = MarkdownEngine()
|
|
126
|
+
html = engine.render('# Hello\n\nThis is **bold**.')
|
|
127
|
+
print(html) # <h1>Hello</h1><p>This is <strong>bold</strong>.</p>
|
|
128
|
+
```
|
|
129
|
+
"""
|
|
130
|
+
self.md.reset() # Reset parser state
|
|
131
|
+
return self.md.convert(content)
|
|
132
|
+
|
|
133
|
+
def get_toc(self) -> str:
|
|
134
|
+
"""
|
|
135
|
+
Get table of contents HTML from last render.
|
|
136
|
+
|
|
137
|
+
Must call render() first. Returns empty string if no headings.
|
|
138
|
+
|
|
139
|
+
Example:
|
|
140
|
+
```python
|
|
141
|
+
engine = MarkdownEngine()
|
|
142
|
+
html = engine.render('# Title\n## Section 1\n## Section 2')
|
|
143
|
+
toc = engine.get_toc()
|
|
144
|
+
print(toc) # <ul><li><a href="#section-1">Section 1</a>...</li></ul>
|
|
145
|
+
```
|
|
146
|
+
"""
|
|
147
|
+
return self.md.toc if hasattr(self.md, 'toc') else ''
|
fh_saas/utils_db.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
"""Atomic table and index management utilities for tenant databases."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/15_utils_db.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% auto 0
|
|
6
|
+
__all__ = ['logger', 'register_table', 'register_tables', 'drop_table', 'create_index', 'create_indexes', 'drop_index',
|
|
7
|
+
'table_exists']
|
|
8
|
+
|
|
9
|
+
# %% ../nbs/15_utils_db.ipynb 2
|
|
10
|
+
from fastsql import *
|
|
11
|
+
from sqlalchemy import text
|
|
12
|
+
import logging
|
|
13
|
+
from typing import List, Dict, Any, Optional, Tuple, Type
|
|
14
|
+
from .utils_sql import get_db_type
|
|
15
|
+
from nbdev.showdoc import show_doc
|
|
16
|
+
|
|
17
|
+
# Module-level logger - configured by app via configure_logging()
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
# %% ../nbs/15_utils_db.ipynb 5
|
|
21
|
+
def register_table(tenant_db: Database, model_class: Type, table_name: str, pk: str = 'id'):
|
|
22
|
+
"""Create a table from a dataclass model if it doesn't exist (atomic)."""
|
|
23
|
+
try:
|
|
24
|
+
table = tenant_db.create(model_class, name=table_name, pk=pk)
|
|
25
|
+
tenant_db.conn.commit()
|
|
26
|
+
return table
|
|
27
|
+
except Exception as e:
|
|
28
|
+
logger.error(f"Failed to create table '{table_name}': {e}", exc_info=True)
|
|
29
|
+
tenant_db.conn.rollback()
|
|
30
|
+
raise Exception(f"Failed to create table '{table_name}': {str(e)}") from e
|
|
31
|
+
|
|
32
|
+
# %% ../nbs/15_utils_db.ipynb 7
|
|
33
|
+
def register_tables(tenant_db: Database, models: List[Tuple[Type, str, str]]) -> Dict[str, Any]:
|
|
34
|
+
"""Create multiple tables atomically (all succeed or all rollback)."""
|
|
35
|
+
current_table = None
|
|
36
|
+
try:
|
|
37
|
+
tables = {}
|
|
38
|
+
for model_class, table_name, pk in models:
|
|
39
|
+
current_table = table_name
|
|
40
|
+
tables[table_name] = tenant_db.create(model_class, name=table_name, pk=pk)
|
|
41
|
+
tenant_db.conn.commit()
|
|
42
|
+
return tables
|
|
43
|
+
except Exception as e:
|
|
44
|
+
logger.error(f"Failed to create table '{current_table}': {e}", exc_info=True)
|
|
45
|
+
tenant_db.conn.rollback()
|
|
46
|
+
raise Exception(f"Failed to create table '{current_table}': {str(e)}") from e
|
|
47
|
+
|
|
48
|
+
# %% ../nbs/15_utils_db.ipynb 10
|
|
49
|
+
def drop_table(tenant_db: Database, table_name: str) -> None:
|
|
50
|
+
"""Drop a table if it exists (atomic operation)."""
|
|
51
|
+
sql = f"DROP TABLE IF EXISTS {table_name}"
|
|
52
|
+
try:
|
|
53
|
+
tenant_db.conn.execute(text(sql))
|
|
54
|
+
tenant_db.conn.commit()
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logger.error(f"Failed to drop table '{table_name}': {e}", exc_info=True)
|
|
57
|
+
tenant_db.conn.rollback()
|
|
58
|
+
raise Exception(f"Failed to drop table '{table_name}': {str(e)}") from e
|
|
59
|
+
|
|
60
|
+
# %% ../nbs/15_utils_db.ipynb 13
|
|
61
|
+
def create_index(tenant_db: Database, table_name: str, columns: List[str],
|
|
62
|
+
unique: bool = False, index_name: str = None) -> None:
|
|
63
|
+
"""Create an index on a table if it doesn't exist (atomic operation)."""
|
|
64
|
+
# Auto-generate index name if not provided
|
|
65
|
+
if index_name is None:
|
|
66
|
+
index_name = f"idx_{table_name}_{'_'.join(columns)}"
|
|
67
|
+
|
|
68
|
+
unique_clause = "UNIQUE " if unique else ""
|
|
69
|
+
columns_clause = ", ".join(columns)
|
|
70
|
+
|
|
71
|
+
# CREATE INDEX IF NOT EXISTS works for both PostgreSQL and SQLite
|
|
72
|
+
sql = f"CREATE {unique_clause}INDEX IF NOT EXISTS {index_name} ON {table_name} ({columns_clause})"
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
tenant_db.conn.execute(text(sql))
|
|
76
|
+
tenant_db.conn.commit()
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.error(f"Failed to create index '{index_name}' on '{table_name}': {e}", exc_info=True)
|
|
79
|
+
tenant_db.conn.rollback()
|
|
80
|
+
raise Exception(f"Failed to create index '{index_name}' on '{table_name}': {str(e)}") from e
|
|
81
|
+
|
|
82
|
+
# %% ../nbs/15_utils_db.ipynb 15
|
|
83
|
+
def create_indexes(tenant_db: Database, indexes: List[Tuple[str, List[str], bool, Optional[str]]]) -> None:
|
|
84
|
+
"""Create multiple indexes atomically (all succeed or all rollback)."""
|
|
85
|
+
current_index = None
|
|
86
|
+
current_table = None
|
|
87
|
+
try:
|
|
88
|
+
for table_name, columns, unique, index_name in indexes:
|
|
89
|
+
current_table = table_name
|
|
90
|
+
# Auto-generate index name if not provided
|
|
91
|
+
if index_name is None:
|
|
92
|
+
index_name = f"idx_{table_name}_{'_'.join(columns)}"
|
|
93
|
+
current_index = index_name
|
|
94
|
+
|
|
95
|
+
unique_clause = "UNIQUE " if unique else ""
|
|
96
|
+
columns_clause = ", ".join(columns)
|
|
97
|
+
sql = f"CREATE {unique_clause}INDEX IF NOT EXISTS {index_name} ON {table_name} ({columns_clause})"
|
|
98
|
+
tenant_db.conn.execute(text(sql))
|
|
99
|
+
|
|
100
|
+
tenant_db.conn.commit()
|
|
101
|
+
except Exception as e:
|
|
102
|
+
logger.error(f"Failed to create index '{current_index}' on '{current_table}': {e}", exc_info=True)
|
|
103
|
+
tenant_db.conn.rollback()
|
|
104
|
+
raise Exception(f"Failed to create index '{current_index}' on '{current_table}': {str(e)}") from e
|
|
105
|
+
|
|
106
|
+
# %% ../nbs/15_utils_db.ipynb 18
|
|
107
|
+
def drop_index(tenant_db: Database, index_name: str, table_name: str = None) -> None:
|
|
108
|
+
"""Drop an index if it exists (atomic operation)."""
|
|
109
|
+
db_type = get_db_type()
|
|
110
|
+
|
|
111
|
+
if db_type == "POSTGRESQL":
|
|
112
|
+
# PostgreSQL: DROP INDEX IF EXISTS index_name
|
|
113
|
+
sql = f"DROP INDEX IF EXISTS {index_name}"
|
|
114
|
+
else:
|
|
115
|
+
# SQLite: DROP INDEX IF EXISTS index_name
|
|
116
|
+
sql = f"DROP INDEX IF EXISTS {index_name}"
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
tenant_db.conn.execute(text(sql))
|
|
120
|
+
tenant_db.conn.commit()
|
|
121
|
+
except Exception as e:
|
|
122
|
+
logger.error(f"Failed to drop index '{index_name}': {e}", exc_info=True)
|
|
123
|
+
tenant_db.conn.rollback()
|
|
124
|
+
raise Exception(f"Failed to drop index '{index_name}': {str(e)}") from e
|
|
125
|
+
|
|
126
|
+
# %% ../nbs/15_utils_db.ipynb 21
|
|
127
|
+
def table_exists(tenant_db: Database, table_name: str) -> bool:
|
|
128
|
+
"""Check if a table exists in the database."""
|
|
129
|
+
db_type = get_db_type()
|
|
130
|
+
|
|
131
|
+
if db_type == "POSTGRESQL":
|
|
132
|
+
sql = """
|
|
133
|
+
SELECT EXISTS (
|
|
134
|
+
SELECT FROM information_schema.tables
|
|
135
|
+
WHERE table_name = :table_name
|
|
136
|
+
)
|
|
137
|
+
"""
|
|
138
|
+
else: # SQLite
|
|
139
|
+
sql = """
|
|
140
|
+
SELECT EXISTS (
|
|
141
|
+
SELECT 1 FROM sqlite_master
|
|
142
|
+
WHERE type = 'table' AND name = :table_name
|
|
143
|
+
)
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
try:
|
|
147
|
+
result = tenant_db.conn.execute(text(sql), {"table_name": table_name})
|
|
148
|
+
return result.scalar()
|
|
149
|
+
except Exception as e:
|
|
150
|
+
logger.error(f"Failed to check if table '{table_name}' exists: {e}", exc_info=True)
|
|
151
|
+
raise Exception(f"Failed to check if table '{table_name}' exists: {str(e)}") from e
|