fh-saas 0.9.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fh_saas/__init__.py +1 -0
- fh_saas/_modidx.py +201 -0
- fh_saas/core.py +9 -0
- fh_saas/db_host.py +153 -0
- fh_saas/db_tenant.py +142 -0
- fh_saas/utils_api.py +109 -0
- fh_saas/utils_auth.py +647 -0
- fh_saas/utils_bgtsk.py +112 -0
- fh_saas/utils_blog.py +147 -0
- fh_saas/utils_db.py +151 -0
- fh_saas/utils_email.py +327 -0
- fh_saas/utils_graphql.py +257 -0
- fh_saas/utils_log.py +56 -0
- fh_saas/utils_polars_mapper.py +134 -0
- fh_saas/utils_seo.py +230 -0
- fh_saas/utils_sql.py +320 -0
- fh_saas/utils_sync.py +115 -0
- fh_saas/utils_webhook.py +216 -0
- fh_saas/utils_workflow.py +23 -0
- fh_saas-0.9.5.dist-info/METADATA +274 -0
- fh_saas-0.9.5.dist-info/RECORD +25 -0
- fh_saas-0.9.5.dist-info/WHEEL +5 -0
- fh_saas-0.9.5.dist-info/entry_points.txt +2 -0
- fh_saas-0.9.5.dist-info/licenses/LICENSE +201 -0
- fh_saas-0.9.5.dist-info/top_level.txt +1 -0
fh_saas/utils_sync.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"""End-to-end data sync pipeline: GraphQL API → Polars → Database."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/10_utils_sync.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% ../nbs/10_utils_sync.ipynb 2
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
import polars as pl
|
|
8
|
+
from .utils_graphql import GraphQLClient
|
|
9
|
+
from .utils_polars_mapper import map_and_upsert, apply_schema
|
|
10
|
+
from typing import Dict, Optional, List
|
|
11
|
+
import logging
|
|
12
|
+
from nbdev.showdoc import show_doc
|
|
13
|
+
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
# %% auto 0
|
|
17
|
+
__all__ = ['logger', 'sync_external_data', 'sync_incremental']
|
|
18
|
+
|
|
19
|
+
# %% ../nbs/10_utils_sync.ipynb 5
|
|
20
|
+
async def sync_external_data(
|
|
21
|
+
client: GraphQLClient, # Initialized GraphQL client
|
|
22
|
+
query_template: str, # GraphQL query with $cursor variable
|
|
23
|
+
variables: dict, # Initial variables (e.g., {'cursor': None})
|
|
24
|
+
items_path: list[str], # Path to data list in response
|
|
25
|
+
cursor_path: list[str], # Path to next cursor
|
|
26
|
+
table_name: str, # Target database table
|
|
27
|
+
key_col: str, # Primary key for upsert
|
|
28
|
+
db_uri: str, # Database connection string
|
|
29
|
+
column_map: dict = None, # Optional column renaming
|
|
30
|
+
type_map: dict = None, # Optional type conversions
|
|
31
|
+
has_next_path: list[str] = None, # Optional hasNextPage path
|
|
32
|
+
batch_size: int = 5000 # Max rows per batch (pagination page size)
|
|
33
|
+
) -> Dict[str, int]:
|
|
34
|
+
"""Sync external data from GraphQL API to database (streaming, memory-efficient)."""
|
|
35
|
+
total_records = 0
|
|
36
|
+
batch_count = 0
|
|
37
|
+
|
|
38
|
+
# Stream paginated data
|
|
39
|
+
async for batch in client.fetch_pages_generator(
|
|
40
|
+
query_template=query_template,
|
|
41
|
+
variables=variables,
|
|
42
|
+
items_path=items_path,
|
|
43
|
+
cursor_path=cursor_path,
|
|
44
|
+
has_next_path=has_next_path
|
|
45
|
+
):
|
|
46
|
+
batch_count += 1
|
|
47
|
+
|
|
48
|
+
# Convert to Polars DataFrame
|
|
49
|
+
df = pl.DataFrame(batch)
|
|
50
|
+
logger.info(f"Batch {batch_count}: Converted {len(df)} records to DataFrame")
|
|
51
|
+
|
|
52
|
+
# Apply type conversions if specified
|
|
53
|
+
if type_map:
|
|
54
|
+
df = apply_schema(df, type_map)
|
|
55
|
+
logger.info(f"Batch {batch_count}: Applied type conversions")
|
|
56
|
+
|
|
57
|
+
# Upsert to database
|
|
58
|
+
map_and_upsert(
|
|
59
|
+
df=df,
|
|
60
|
+
table_name=table_name,
|
|
61
|
+
key_col=key_col,
|
|
62
|
+
db_uri=db_uri,
|
|
63
|
+
column_map=column_map
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
total_records += len(df)
|
|
67
|
+
logger.info(f"Batch {batch_count}: Upserted {len(df)} records (total: {total_records})")
|
|
68
|
+
|
|
69
|
+
return {
|
|
70
|
+
'total_records': total_records,
|
|
71
|
+
'batches': batch_count
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
# %% ../nbs/10_utils_sync.ipynb 8
|
|
75
|
+
async def sync_incremental(
|
|
76
|
+
client: GraphQLClient, # Initialized GraphQL client
|
|
77
|
+
query_template: str, # GraphQL query with $cursor and $last_sync variables
|
|
78
|
+
last_sync_time: str, # ISO timestamp (e.g., '2024-01-15T10:00:00Z')
|
|
79
|
+
items_path: list[str], # Path to data list
|
|
80
|
+
cursor_path: list[str], # Path to cursor
|
|
81
|
+
table_name: str, # Target table
|
|
82
|
+
key_col: str, # Primary key
|
|
83
|
+
db_uri: str, # Database connection
|
|
84
|
+
column_map: dict = None, # Optional column map
|
|
85
|
+
type_map: dict = None, # Optional type map
|
|
86
|
+
has_next_path: list[str] = None # Optional hasNextPage path
|
|
87
|
+
) -> Dict[str, any]:
|
|
88
|
+
"""Incremental sync: fetch only records updated after last sync timestamp."""
|
|
89
|
+
from datetime import datetime
|
|
90
|
+
|
|
91
|
+
# Add last_sync to variables
|
|
92
|
+
variables = {
|
|
93
|
+
'cursor': None,
|
|
94
|
+
'last_sync': last_sync_time
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
# Execute sync
|
|
98
|
+
stats = await sync_external_data(
|
|
99
|
+
client=client,
|
|
100
|
+
query_template=query_template,
|
|
101
|
+
variables=variables,
|
|
102
|
+
items_path=items_path,
|
|
103
|
+
cursor_path=cursor_path,
|
|
104
|
+
table_name=table_name,
|
|
105
|
+
key_col=key_col,
|
|
106
|
+
db_uri=db_uri,
|
|
107
|
+
column_map=column_map,
|
|
108
|
+
type_map=type_map,
|
|
109
|
+
has_next_path=has_next_path
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Add current timestamp
|
|
113
|
+
stats['last_sync_time'] = datetime.utcnow().isoformat() + 'Z'
|
|
114
|
+
|
|
115
|
+
return stats
|
fh_saas/utils_webhook.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
"""Process webhooks with signature verification, idempotency, and custom handlers."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/14_utils_webhook.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% auto 0
|
|
6
|
+
__all__ = ['logger', 'verify_webhook_signature', 'check_idempotency', 'log_webhook_event', 'update_webhook_status',
|
|
7
|
+
'process_webhook', 'handle_webhook_request']
|
|
8
|
+
|
|
9
|
+
# %% ../nbs/14_utils_webhook.ipynb 2
|
|
10
|
+
from fastcore.utils import *
|
|
11
|
+
from fastcore.all import *
|
|
12
|
+
from fastsql import *
|
|
13
|
+
import hmac
|
|
14
|
+
import hashlib
|
|
15
|
+
import json
|
|
16
|
+
import os
|
|
17
|
+
import logging
|
|
18
|
+
from datetime import datetime
|
|
19
|
+
from typing import Callable, Dict, Any, Optional
|
|
20
|
+
|
|
21
|
+
# Module-level logger - configured by app via configure_logging()
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
# %% ../nbs/14_utils_webhook.ipynb 5
|
|
25
|
+
def verify_webhook_signature(
|
|
26
|
+
payload: str, # Raw request body as string
|
|
27
|
+
signature: str, # Signature from header (format: "sha256=<hex>")
|
|
28
|
+
secret: Optional[str] = None # Secret key, defaults to WEBHOOK_SECRET env var
|
|
29
|
+
) -> bool:
|
|
30
|
+
"""Verify HMAC-SHA256 signature for webhook payload. Returns True if valid."""
|
|
31
|
+
secret = secret or os.getenv('WEBHOOK_SECRET')
|
|
32
|
+
if not secret:
|
|
33
|
+
raise ValueError("WEBHOOK_SECRET not configured")
|
|
34
|
+
|
|
35
|
+
# Compute expected signature
|
|
36
|
+
expected = hmac.new(
|
|
37
|
+
secret.encode('utf-8'),
|
|
38
|
+
payload.encode('utf-8'),
|
|
39
|
+
hashlib.sha256
|
|
40
|
+
).hexdigest()
|
|
41
|
+
|
|
42
|
+
# Extract hex from signature (handle "sha256=<hex>" format)
|
|
43
|
+
if '=' in signature:
|
|
44
|
+
signature = signature.split('=', 1)[1]
|
|
45
|
+
|
|
46
|
+
# Constant-time comparison to prevent timing attacks
|
|
47
|
+
return hmac.compare_digest(expected, signature)
|
|
48
|
+
|
|
49
|
+
# %% ../nbs/14_utils_webhook.ipynb 9
|
|
50
|
+
def check_idempotency(
|
|
51
|
+
db: Database, # Tenant database connection
|
|
52
|
+
idempotency_key: str # Unique key for this webhook event
|
|
53
|
+
) -> bool:
|
|
54
|
+
"""Check if webhook event already processed. Returns True if duplicate."""
|
|
55
|
+
result = db.q(
|
|
56
|
+
"SELECT webhook_id FROM webhook_events WHERE idempotency_key = ?",
|
|
57
|
+
[idempotency_key]
|
|
58
|
+
)
|
|
59
|
+
return len(result) > 0
|
|
60
|
+
|
|
61
|
+
# %% ../nbs/14_utils_webhook.ipynb 12
|
|
62
|
+
def log_webhook_event(
|
|
63
|
+
db: Database, # Tenant database connection
|
|
64
|
+
webhook_id: str, # Unique webhook ID
|
|
65
|
+
source: str, # Source system (e.g., "stripe", "github")
|
|
66
|
+
event_type: str, # Event type (e.g., "payment.success")
|
|
67
|
+
payload: Dict[str, Any], # Full webhook payload
|
|
68
|
+
signature: str, # Request signature
|
|
69
|
+
idempotency_key: str, # Idempotency key
|
|
70
|
+
status: str = 'pending' # Status: pending, processing, completed, failed
|
|
71
|
+
):
|
|
72
|
+
"""Log webhook event to database"""
|
|
73
|
+
db.insert(dict(
|
|
74
|
+
webhook_id=webhook_id,
|
|
75
|
+
source=source,
|
|
76
|
+
event_type=event_type,
|
|
77
|
+
payload_json=json.dumps(payload),
|
|
78
|
+
signature=signature,
|
|
79
|
+
idempotency_key=idempotency_key,
|
|
80
|
+
status=status,
|
|
81
|
+
created_at=datetime.utcnow().isoformat()
|
|
82
|
+
), 'webhook_events')
|
|
83
|
+
|
|
84
|
+
# %% ../nbs/14_utils_webhook.ipynb 15
|
|
85
|
+
def update_webhook_status(
|
|
86
|
+
db: Database, # Tenant database connection
|
|
87
|
+
webhook_id: str, # Webhook ID to update
|
|
88
|
+
status: str, # New status
|
|
89
|
+
error_message: Optional[str] = None # Optional error message
|
|
90
|
+
):
|
|
91
|
+
"""Update webhook event status and processed timestamp"""
|
|
92
|
+
update_data = {
|
|
93
|
+
'status': status,
|
|
94
|
+
'processed_at': datetime.utcnow().isoformat()
|
|
95
|
+
}
|
|
96
|
+
if error_message:
|
|
97
|
+
update_data['error_message'] = error_message
|
|
98
|
+
|
|
99
|
+
db.update(update_data, 'webhook_events', 'webhook_id', webhook_id)
|
|
100
|
+
|
|
101
|
+
# %% ../nbs/14_utils_webhook.ipynb 18
|
|
102
|
+
async def process_webhook(
|
|
103
|
+
db: Database, # Tenant database connection
|
|
104
|
+
webhook_id: str, # Unique webhook ID
|
|
105
|
+
source: str, # Source system
|
|
106
|
+
event_type: str, # Event type
|
|
107
|
+
payload: Dict[str, Any], # Webhook payload
|
|
108
|
+
signature: str, # Request signature
|
|
109
|
+
idempotency_key: str, # Idempotency key
|
|
110
|
+
raw_body: str, # Raw request body for signature verification
|
|
111
|
+
handler: Callable, # App-specific webhook handler function
|
|
112
|
+
secret: Optional[str] = None # Optional webhook secret
|
|
113
|
+
) -> Dict[str, Any]:
|
|
114
|
+
"""Process webhook with verification, idempotency, and custom handler execution"""
|
|
115
|
+
|
|
116
|
+
# Verify signature
|
|
117
|
+
if not verify_webhook_signature(raw_body, signature, secret):
|
|
118
|
+
return {'status': 'error', 'message': 'Invalid signature'}
|
|
119
|
+
|
|
120
|
+
# Check idempotency
|
|
121
|
+
if check_idempotency(db, idempotency_key):
|
|
122
|
+
return {'status': 'duplicate', 'message': 'Event already processed'}
|
|
123
|
+
|
|
124
|
+
# Log event
|
|
125
|
+
log_webhook_event(db, webhook_id, source, event_type, payload, signature, idempotency_key, 'processing')
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
# Execute custom handler
|
|
129
|
+
result = await handler(payload, db)
|
|
130
|
+
|
|
131
|
+
# Update status
|
|
132
|
+
update_webhook_status(db, webhook_id, 'completed')
|
|
133
|
+
|
|
134
|
+
return {'status': 'success', 'result': result}
|
|
135
|
+
|
|
136
|
+
except Exception as e:
|
|
137
|
+
# Log failure
|
|
138
|
+
logger.error(f"Webhook {webhook_id} ({source}/{event_type}) failed: {e}", exc_info=True)
|
|
139
|
+
update_webhook_status(db, webhook_id, 'failed', str(e))
|
|
140
|
+
raise
|
|
141
|
+
|
|
142
|
+
# %% ../nbs/14_utils_webhook.ipynb 20
|
|
143
|
+
@delegates(process_webhook)
|
|
144
|
+
async def handle_webhook_request(
|
|
145
|
+
request, # FastHTML request object
|
|
146
|
+
db: Database, # Tenant database instance
|
|
147
|
+
source: str, # Webhook source identifier
|
|
148
|
+
handler: Callable, # App-specific handler function
|
|
149
|
+
signature_header: str = 'X-Webhook-Signature', # Header containing signature
|
|
150
|
+
idempotency_header: str = 'X-Idempotency-Key', # Header containing idempotency key
|
|
151
|
+
event_type_field: str = 'type', # Field in payload containing event type
|
|
152
|
+
**kwargs # Additional args passed to process_webhook
|
|
153
|
+
) -> tuple: # Returns (response_dict, status_code)
|
|
154
|
+
"""FastHTML route handler for webhook requests.
|
|
155
|
+
|
|
156
|
+
Example:
|
|
157
|
+
@app.post('/webhooks/stripe')
|
|
158
|
+
async def stripe_webhook(request):
|
|
159
|
+
return await handle_webhook_request(
|
|
160
|
+
request=request,
|
|
161
|
+
db=get_tenant_db(request),
|
|
162
|
+
source='stripe',
|
|
163
|
+
handler=handle_stripe_event,
|
|
164
|
+
signature_header='X-Stripe-Signature',
|
|
165
|
+
run_in_background=True
|
|
166
|
+
)
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
try:
|
|
170
|
+
# Parse request
|
|
171
|
+
payload = await request.json()
|
|
172
|
+
signature = request.headers.get(signature_header)
|
|
173
|
+
idempotency_key = request.headers.get(idempotency_header) or payload.get('id')
|
|
174
|
+
event_type = payload.get(event_type_field, 'unknown')
|
|
175
|
+
|
|
176
|
+
# Validate required fields
|
|
177
|
+
if not signature:
|
|
178
|
+
return {'error': f'Missing {signature_header} header'}, 401
|
|
179
|
+
if not idempotency_key:
|
|
180
|
+
return {'error': f'Missing {idempotency_header} or id field'}, 400
|
|
181
|
+
|
|
182
|
+
# Process webhook
|
|
183
|
+
result = process_webhook(
|
|
184
|
+
db=db,
|
|
185
|
+
source=source,
|
|
186
|
+
event_type=event_type,
|
|
187
|
+
payload=payload,
|
|
188
|
+
signature=signature,
|
|
189
|
+
idempotency_key=idempotency_key,
|
|
190
|
+
handler=handler,
|
|
191
|
+
**kwargs
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
# Return appropriate response
|
|
195
|
+
if result['status'] == 'duplicate':
|
|
196
|
+
return {'status': 'ok', 'message': 'Event already processed'}, 200
|
|
197
|
+
elif result['status'] == 'accepted':
|
|
198
|
+
return {
|
|
199
|
+
'status': 'accepted',
|
|
200
|
+
'webhook_id': result['webhook_id'],
|
|
201
|
+
'job_id': result['job_id']
|
|
202
|
+
}, 202
|
|
203
|
+
else:
|
|
204
|
+
return {
|
|
205
|
+
'status': 'ok',
|
|
206
|
+
'webhook_id': result['webhook_id']
|
|
207
|
+
}, 200
|
|
208
|
+
|
|
209
|
+
except ValueError as e:
|
|
210
|
+
# Signature verification failed
|
|
211
|
+
logger.warning(f"Webhook signature verification failed from {source}: {e}")
|
|
212
|
+
return {'error': str(e)}, 401
|
|
213
|
+
except Exception as e:
|
|
214
|
+
# Other errors
|
|
215
|
+
logger.error(f"Webhook processing failed: {e}", exc_info=True)
|
|
216
|
+
return {'error': f'Processing failed: {str(e)}'}, 500
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Minimal wrapper for executing callable sequences in order."""
|
|
2
|
+
|
|
3
|
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/13_utils_workflow.ipynb.
|
|
4
|
+
|
|
5
|
+
# %% auto 0
|
|
6
|
+
__all__ = ['Workflow']
|
|
7
|
+
|
|
8
|
+
# %% ../nbs/13_utils_workflow.ipynb 2
|
|
9
|
+
from fastcore.utils import *
|
|
10
|
+
from typing import Callable, List
|
|
11
|
+
|
|
12
|
+
# %% ../nbs/13_utils_workflow.ipynb 5
|
|
13
|
+
class Workflow:
|
|
14
|
+
"""Execute a list of callables sequentially. Minimal wrapper for code readability."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, steps: List[Callable]):
|
|
17
|
+
"""Initialize workflow with list of callable steps"""
|
|
18
|
+
self.steps = steps
|
|
19
|
+
|
|
20
|
+
def execute(self):
|
|
21
|
+
"""Execute all steps in order"""
|
|
22
|
+
for step in self.steps:
|
|
23
|
+
step()
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fh-saas
|
|
3
|
+
Version: 0.9.5
|
|
4
|
+
Summary: Production-ready multi-tenant SaaS toolkit for FastHTML applications with authentication, billing, and integrations
|
|
5
|
+
Home-page: https://github.com/abhisheksreesaila/fh-saas
|
|
6
|
+
Author: abhishek sreesaila
|
|
7
|
+
Author-email: abhishek.sreesaila@gmail.com
|
|
8
|
+
License: Apache Software License 2.0
|
|
9
|
+
Keywords: nbdev jupyter notebook python fasthtml saas multi-tenant oauth authentication
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: Natural Language :: English
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
17
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
|
+
Requires-Python: >=3.9
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
License-File: LICENSE
|
|
21
|
+
Requires-Dist: httpx>=0.25.0
|
|
22
|
+
Requires-Dist: tenacity>=8.2.0
|
|
23
|
+
Requires-Dist: polars>=0.20.0
|
|
24
|
+
Requires-Dist: Markdown>=3.4.0
|
|
25
|
+
Requires-Dist: python-frontmatter>=1.1.0
|
|
26
|
+
Requires-Dist: pygments>=2.17.0
|
|
27
|
+
Requires-Dist: fastsql
|
|
28
|
+
Requires-Dist: python-fasthtml
|
|
29
|
+
Requires-Dist: starlette
|
|
30
|
+
Requires-Dist: python-dotenv
|
|
31
|
+
Requires-Dist: sqlalchemy
|
|
32
|
+
Requires-Dist: fastcore
|
|
33
|
+
Provides-Extra: dev
|
|
34
|
+
Dynamic: author
|
|
35
|
+
Dynamic: author-email
|
|
36
|
+
Dynamic: classifier
|
|
37
|
+
Dynamic: description
|
|
38
|
+
Dynamic: description-content-type
|
|
39
|
+
Dynamic: home-page
|
|
40
|
+
Dynamic: keywords
|
|
41
|
+
Dynamic: license
|
|
42
|
+
Dynamic: license-file
|
|
43
|
+
Dynamic: provides-extra
|
|
44
|
+
Dynamic: requires-dist
|
|
45
|
+
Dynamic: requires-python
|
|
46
|
+
Dynamic: summary
|
|
47
|
+
|
|
48
|
+
# 🚀 fh-saas
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->
|
|
52
|
+
|
|
53
|
+
## ⚡ Quick Start
|
|
54
|
+
|
|
55
|
+
### 1. Install
|
|
56
|
+
|
|
57
|
+
``` bash
|
|
58
|
+
pip install fh-saas
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### 2. Configure Environment
|
|
62
|
+
|
|
63
|
+
``` bash
|
|
64
|
+
# .env file
|
|
65
|
+
DB_TYPE=POSTGRESQL
|
|
66
|
+
DB_USER=postgres
|
|
67
|
+
DB_PASS=your_password
|
|
68
|
+
DB_HOST=localhost
|
|
69
|
+
DB_NAME=app_host
|
|
70
|
+
|
|
71
|
+
# Optional integrations
|
|
72
|
+
STRIPE_SECRET_KEY=sk_...
|
|
73
|
+
RESEND_API_KEY=re_...
|
|
74
|
+
GOOGLE_CLIENT_ID=...
|
|
75
|
+
GOOGLE_CLIENT_SECRET=...
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### 3. Initialize Your App
|
|
79
|
+
|
|
80
|
+
``` python
|
|
81
|
+
from fh_saas.db_host import HostDatabase, GlobalUser, TenantCatalog, Membership, gen_id, timestamp
|
|
82
|
+
from fh_saas.db_tenant import get_or_create_tenant_db
|
|
83
|
+
from fh_saas.utils_db import register_tables, create_indexes
|
|
84
|
+
from fh_saas.utils_log import configure_logging
|
|
85
|
+
|
|
86
|
+
# Configure logging (once at startup)
|
|
87
|
+
configure_logging()
|
|
88
|
+
|
|
89
|
+
# Connect to host database
|
|
90
|
+
host_db = HostDatabase.from_env()
|
|
91
|
+
|
|
92
|
+
# Create a new user
|
|
93
|
+
user = GlobalUser(
|
|
94
|
+
id=gen_id(),
|
|
95
|
+
email="founder@startup.com",
|
|
96
|
+
oauth_id="google_abc123",
|
|
97
|
+
created_at=timestamp()
|
|
98
|
+
)
|
|
99
|
+
host_db.global_users.insert(user)
|
|
100
|
+
|
|
101
|
+
# Create a tenant for their organization
|
|
102
|
+
tenant = TenantCatalog(
|
|
103
|
+
id=gen_id(),
|
|
104
|
+
name="Acme Corp",
|
|
105
|
+
db_url="postgresql://...",
|
|
106
|
+
created_at=timestamp()
|
|
107
|
+
)
|
|
108
|
+
host_db.tenant_catalogs.insert(tenant)
|
|
109
|
+
|
|
110
|
+
# Link user to tenant as owner
|
|
111
|
+
membership = Membership(
|
|
112
|
+
id=gen_id(),
|
|
113
|
+
user_id=user.id,
|
|
114
|
+
tenant_id=tenant.id,
|
|
115
|
+
profile_id=gen_id(),
|
|
116
|
+
role="owner",
|
|
117
|
+
created_at=timestamp()
|
|
118
|
+
)
|
|
119
|
+
host_db.memberships.insert(membership)
|
|
120
|
+
host_db.commit()
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
### 4. Work with Tenant Data
|
|
124
|
+
|
|
125
|
+
``` python
|
|
126
|
+
# Get tenant's isolated database
|
|
127
|
+
tenant_db = get_or_create_tenant_db(tenant.id, tenant.name)
|
|
128
|
+
|
|
129
|
+
# Define your app's data models
|
|
130
|
+
class Project:
|
|
131
|
+
id: str
|
|
132
|
+
name: str
|
|
133
|
+
status: str = "active"
|
|
134
|
+
created_at: str
|
|
135
|
+
|
|
136
|
+
class Task:
|
|
137
|
+
id: str
|
|
138
|
+
project_id: str
|
|
139
|
+
title: str
|
|
140
|
+
completed: bool = False
|
|
141
|
+
|
|
142
|
+
# Register tables (creates if not exist)
|
|
143
|
+
tables = register_tables(tenant_db, [
|
|
144
|
+
(Project, "projects", "id"),
|
|
145
|
+
(Task, "tasks", "id"),
|
|
146
|
+
])
|
|
147
|
+
|
|
148
|
+
# Add indexes for performance
|
|
149
|
+
create_indexes(tenant_db, [
|
|
150
|
+
("tasks", ["project_id"], False, None),
|
|
151
|
+
("tasks", ["completed"], False, None),
|
|
152
|
+
])
|
|
153
|
+
|
|
154
|
+
# Use the tables
|
|
155
|
+
projects = tables["projects"]
|
|
156
|
+
projects.insert(Project(id=gen_id(), name="Launch MVP", created_at=timestamp()))
|
|
157
|
+
tenant_db.conn.commit()
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
------------------------------------------------------------------------
|
|
161
|
+
|
|
162
|
+
## 📚 Documentation Guide
|
|
163
|
+
|
|
164
|
+
| | |
|
|
165
|
+
|----|----|
|
|
166
|
+
| Section | What You'll Learn |
|
|
167
|
+
| **📦 Core** | |
|
|
168
|
+
| [Multi-Tenant Setup](https://abhisheksreesaila.github.io/fh-saas/db_host.html) | Host database, user management, tenant registry |
|
|
169
|
+
| [Tenant Databases](https://abhisheksreesaila.github.io/fh-saas/db_tenant.html) | Isolated databases per tenant, connection pooling |
|
|
170
|
+
| [Table Management](https://abhisheksreesaila.github.io/fh-saas/utils_db.html) | Create tables & indexes from dataclasses |
|
|
171
|
+
| **🔌 Integrations** | |
|
|
172
|
+
| [HTTP Client](https://abhisheksreesaila.github.io/fh-saas/utils_api.html) | REST APIs with retries, rate limiting, auth |
|
|
173
|
+
| [GraphQL Client](https://abhisheksreesaila.github.io/fh-saas/utils_graphql.html) | Streaming pagination for large datasets |
|
|
174
|
+
| [Webhooks](https://abhisheksreesaila.github.io/fh-saas/utils_webhook.html) | Receive & verify external webhooks |
|
|
175
|
+
| **⚙️ Data Pipeline** | |
|
|
176
|
+
| [Background Tasks](https://abhisheksreesaila.github.io/fh-saas/utils_bgtsk.html) | Async job execution |
|
|
177
|
+
| [Data Transforms](https://abhisheksreesaila.github.io/fh-saas/utils_polars_mapper.html) | JSON → Polars → Database pipeline |
|
|
178
|
+
| [API → DB Sync](https://abhisheksreesaila.github.io/fh-saas/utils_sync.html) | End-to-end external data sync |
|
|
179
|
+
| **🛠️ Utilities** | |
|
|
180
|
+
| [SQL Helpers](https://abhisheksreesaila.github.io/fh-saas/utils_sql.html) | Database type detection, query builders |
|
|
181
|
+
| [Logging](https://abhisheksreesaila.github.io/fh-saas/utils_log.html) | Configurable logging for all modules |
|
|
182
|
+
| [Authentication](https://abhisheksreesaila.github.io/fh-saas/utils_oauth.html) | OAuth flows (Google, GitHub) |
|
|
183
|
+
| [Email Sending](https://abhisheksreesaila.github.io/fh-saas/utils_email.html) | Transactional emails via Resend |
|
|
184
|
+
| **📣 Content** | |
|
|
185
|
+
| [Blog Publishing](https://abhisheksreesaila.github.io/fh-saas/utils_blog.html) | Markdown blog with frontmatter |
|
|
186
|
+
| [SEO & Sitemaps](https://abhisheksreesaila.github.io/fh-saas/utils_seo.html) | Sitemap generation, meta tags |
|
|
187
|
+
| [Workflow Engine](https://abhisheksreesaila.github.io/fh-saas/utils_workflow.html) | Multi-step automation |
|
|
188
|
+
|
|
189
|
+
------------------------------------------------------------------------
|
|
190
|
+
|
|
191
|
+
## 🛠️ Developer Guide
|
|
192
|
+
|
|
193
|
+
This project uses [nbdev](https://nbdev.fast.ai/) for literate
|
|
194
|
+
programming. The source of truth is in the `nbs/` notebooks.
|
|
195
|
+
|
|
196
|
+
### Development Setup
|
|
197
|
+
|
|
198
|
+
``` bash
|
|
199
|
+
# Clone and install in dev mode
|
|
200
|
+
git clone https://github.com/abhisheksreesaila/fh-saas.git
|
|
201
|
+
cd fh-saas
|
|
202
|
+
pip install -e .
|
|
203
|
+
|
|
204
|
+
# Make changes in nbs/ directory, then compile
|
|
205
|
+
nbdev_prepare
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
------------------------------------------------------------------------
|
|
209
|
+
|
|
210
|
+
## 📦 Installation
|
|
211
|
+
|
|
212
|
+
``` bash
|
|
213
|
+
# From PyPI (recommended)
|
|
214
|
+
pip install fh-saas
|
|
215
|
+
|
|
216
|
+
# From GitHub (latest)
|
|
217
|
+
pip install git+https://github.com/abhisheksreesaila/fh-saas.git
|
|
218
|
+
```
|
|
219
|
+
|
|
220
|
+
[GitHub](https://github.com/abhisheksreesaila/fh-saas) ·
|
|
221
|
+
[PyPI](https://pypi.org/project/fh-saas/) ·
|
|
222
|
+
[Documentation](https://abhisheksreesaila.github.io/fh-saas/)
|
|
223
|
+
|
|
224
|
+
**🤖 For AI Assistants:** [Download
|
|
225
|
+
llms-ctx.txt](https://raw.githubusercontent.com/abhisheksreesaila/fh-saas/main/llms-ctx.txt)
|
|
226
|
+
— Complete API documentation for LLMs
|
|
227
|
+
|
|
228
|
+
------------------------------------------------------------------------
|
|
229
|
+
|
|
230
|
+
## 🤖 AI Assistant Context
|
|
231
|
+
|
|
232
|
+
**For AI coding assistants** (GitHub Copilot, Claude, ChatGPT, Cursor,
|
|
233
|
+
etc.), download the complete API documentation:
|
|
234
|
+
|
|
235
|
+
**[📥 Download
|
|
236
|
+
llms-ctx.txt](https://raw.githubusercontent.com/abhisheksreesaila/fh-saas/main/llms-ctx.txt)**
|
|
237
|
+
— Complete fh-saas documentation in LLM-friendly format
|
|
238
|
+
|
|
239
|
+
### How to use:
|
|
240
|
+
|
|
241
|
+
1. Download the context file
|
|
242
|
+
2. Add to your AI assistant’s instructions/knowledge base
|
|
243
|
+
3. Get accurate code suggestions for fh-saas APIs
|
|
244
|
+
|
|
245
|
+
This file contains all module documentation, examples, and API
|
|
246
|
+
signatures formatted for optimal LLM understanding.
|
|
247
|
+
|
|
248
|
+
------------------------------------------------------------------------
|
|
249
|
+
|
|
250
|
+
## 🤝 Contributing
|
|
251
|
+
|
|
252
|
+
Contributions are welcome! Please check the [GitHub
|
|
253
|
+
repository](https://github.com/abhisheksreesaila/fh-saas) for issues and
|
|
254
|
+
discussions.
|
|
255
|
+
|
|
256
|
+
## 🤖 AI Assistant Context
|
|
257
|
+
|
|
258
|
+
**For AI coding assistants** (GitHub Copilot, Claude, ChatGPT, Cursor,
|
|
259
|
+
etc.), download the complete API documentation:
|
|
260
|
+
|
|
261
|
+
### [📥 Download llms-ctx.txt](https://raw.githubusercontent.com/abhisheksreesaila/fh-saas/main/llms-ctx.txt)
|
|
262
|
+
|
|
263
|
+
**What’s included:** - Complete module documentation and API
|
|
264
|
+
signatures - Code examples and usage patterns - Multi-tenant
|
|
265
|
+
architecture patterns - Authentication and security flows - Data
|
|
266
|
+
pipeline implementations
|
|
267
|
+
|
|
268
|
+
**How to use:** 1. Download the `llms-ctx.txt` file 2. Add it to your AI
|
|
269
|
+
assistant’s project context or instructions 3. Get accurate,
|
|
270
|
+
context-aware code suggestions for fh-saas
|
|
271
|
+
|
|
272
|
+
This consolidated context file (~500KB) contains all documentation
|
|
273
|
+
formatted for optimal LLM understanding, enabling your AI assistant to
|
|
274
|
+
provide precise help with fh-saas APIs.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
fh_saas/__init__.py,sha256=iPcoATf7BiWjSu-KocRdM5zFTR4wx4ktCHlGGpvdc1M,23
|
|
2
|
+
fh_saas/_modidx.py,sha256=7u_UOf6EvBsEOf4efU8qx3R0nAfRLMY-i3CuNfLojdw,24414
|
|
3
|
+
fh_saas/core.py,sha256=lrCYVHRzmOEwTBsC1FIU0uVnfPLiy_36-YXuNsOgiK4,192
|
|
4
|
+
fh_saas/db_host.py,sha256=2fQ03uwHIDReAsPUNe1bP5Nygj8-l7Vmgd8xrElwalQ,5387
|
|
5
|
+
fh_saas/db_tenant.py,sha256=KNlJoPJFwjuL2xA3qG0sVExWgUp0LhCDwBxai78IRUk,5624
|
|
6
|
+
fh_saas/utils_api.py,sha256=HG79jD4aQ_J1gXwACgGvlHkNnx6p1poyIlXswpdq4jg,3781
|
|
7
|
+
fh_saas/utils_auth.py,sha256=pQIWiAjpATnHAK8vjO0IPuEkjs5JHwwsX7WwkOqDRbM,24209
|
|
8
|
+
fh_saas/utils_bgtsk.py,sha256=-1pIa362btOWFSmwDFfNyVpgR4LlLn7TR2tWjw6EYr0,4319
|
|
9
|
+
fh_saas/utils_blog.py,sha256=70H0Wx1nHF5a_iUhN4hCHDSDoU5p56VgoHvE6b-WOgo,5052
|
|
10
|
+
fh_saas/utils_db.py,sha256=xZpMBW2XqLskhybxhKEe9Dcnxwif4m-AYsQcHGkyOMo,6448
|
|
11
|
+
fh_saas/utils_email.py,sha256=pZa069FsF9fNfSOx2xhLhJcJL06erYb8ASZxrdpJxxk,12080
|
|
12
|
+
fh_saas/utils_graphql.py,sha256=ytqycpxxxDCWjtf_yMSesZDL6HeoGpv05g_sMK-C_n8,9798
|
|
13
|
+
fh_saas/utils_log.py,sha256=b9IHI8bPTvnT8id1wbqj30YfCUipITRGO0NaaJtXzVY,1770
|
|
14
|
+
fh_saas/utils_polars_mapper.py,sha256=QVC2CmTFyiceoCxUV54pCPF59CYX6GigNizy4nqAhnU,5445
|
|
15
|
+
fh_saas/utils_seo.py,sha256=zW1qPkcMU0kt-FwJLR-3H-4fe5EmsixuqCu8rWF7B2A,7905
|
|
16
|
+
fh_saas/utils_sql.py,sha256=eZXQttvMd82VVcYR0OIO9Cl1-ia4IKMeKRM-9CEYWDc,12494
|
|
17
|
+
fh_saas/utils_sync.py,sha256=P6qjbrwkKp-kUoKuZ-Yar9LHF4P2z3nfmZCosZxq4u8,4054
|
|
18
|
+
fh_saas/utils_webhook.py,sha256=z8ZrQtR4PGUP9ANq8AFVXRv7g6RArBt-dNjr6NeJKHM,8132
|
|
19
|
+
fh_saas/utils_workflow.py,sha256=w856z8a_gFvUXBtb0ca5jr15U3u7yE4EveNGSJzyDEs,709
|
|
20
|
+
fh_saas-0.9.5.dist-info/licenses/LICENSE,sha256=xV8xoN4VOL0uw9X8RSs2IMuD_Ss_a9yAbtGNeBWZwnw,11337
|
|
21
|
+
fh_saas-0.9.5.dist-info/METADATA,sha256=E8MdKe_rMSm6V8bK2B1HI1R6XlPPEYGBkXOno56sbQA,9080
|
|
22
|
+
fh_saas-0.9.5.dist-info/WHEEL,sha256=qELbo2s1Yzl39ZmrAibXA2jjPLUYfnVhUNTlyF1rq0Y,92
|
|
23
|
+
fh_saas-0.9.5.dist-info/entry_points.txt,sha256=-FoknhHi3NcTaFYCMeABodgfmEa4y0pxjHrL1LsoWdU,36
|
|
24
|
+
fh_saas-0.9.5.dist-info/top_level.txt,sha256=nD7PCoBHeexfugt5JzccwP6te564l7yb9j_RM0ERBlI,8
|
|
25
|
+
fh_saas-0.9.5.dist-info/RECORD,,
|