vibetuner 2.26.9__py3-none-any.whl → 2.44.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vibetuner might be problematic. Click here for more details.
- vibetuner/cli/__init__.py +2 -17
- vibetuner/cli/db.py +40 -0
- vibetuner/cli/run.py +126 -110
- vibetuner/cli/scaffold.py +7 -0
- vibetuner/config.py +55 -3
- vibetuner/frontend/__init__.py +1 -0
- vibetuner/frontend/lifespan.py +7 -2
- vibetuner/frontend/middleware.py +0 -25
- vibetuner/frontend/proxy.py +14 -0
- vibetuner/frontend/routes/debug.py +41 -31
- vibetuner/frontend/templates.py +81 -0
- vibetuner/mongo.py +55 -15
- vibetuner/paths.py +16 -13
- vibetuner/services/blob.py +31 -45
- vibetuner/services/email.py +14 -15
- vibetuner/services/s3_storage.py +454 -0
- vibetuner/sqlmodel.py +109 -0
- vibetuner/tasks/lifespan.py +7 -2
- vibetuner/tasks/worker.py +9 -8
- vibetuner/templates/frontend/email_sent.html.jinja +1 -1
- vibetuner/templates/frontend/index.html.jinja +2 -2
- vibetuner/templates/frontend/login.html.jinja +1 -1
- vibetuner/templates/frontend/user/edit.html.jinja +1 -1
- vibetuner/templates/frontend/user/profile.html.jinja +1 -1
- {vibetuner-2.26.9.dist-info → vibetuner-2.44.1.dist-info}/METADATA +39 -29
- {vibetuner-2.26.9.dist-info → vibetuner-2.44.1.dist-info}/RECORD +28 -24
- {vibetuner-2.26.9.dist-info → vibetuner-2.44.1.dist-info}/WHEEL +1 -1
- {vibetuner-2.26.9.dist-info → vibetuner-2.44.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,454 @@
|
|
|
1
|
+
"""ABOUTME: S3-compatible storage service for managing buckets and objects.
|
|
2
|
+
ABOUTME: Provides async operations for R2, MinIO, and other S3-compatible storage providers.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Literal
|
|
6
|
+
|
|
7
|
+
import aioboto3
|
|
8
|
+
from aiobotocore.config import AioConfig
|
|
9
|
+
from botocore.exceptions import ClientError
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
S3_SERVICE_NAME: Literal["s3"] = "s3"
|
|
13
|
+
DEFAULT_CONTENT_TYPE: str = "application/octet-stream"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class S3StorageService:
|
|
17
|
+
"""Async S3-compatible storage service for bucket and object operations.
|
|
18
|
+
|
|
19
|
+
This service provides a clean interface to S3-compatible storage providers
|
|
20
|
+
(AWS S3, Cloudflare R2, MinIO, etc.) without any database dependencies.
|
|
21
|
+
|
|
22
|
+
All operations are async and use aioboto3 for efficient I/O.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
endpoint_url: str,
|
|
28
|
+
access_key: str,
|
|
29
|
+
secret_key: str,
|
|
30
|
+
region: str = "auto",
|
|
31
|
+
default_bucket: str | None = None,
|
|
32
|
+
session: aioboto3.Session | None = None,
|
|
33
|
+
) -> None:
|
|
34
|
+
"""Initialize S3 storage service with explicit configuration.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
endpoint_url: S3-compatible endpoint URL (e.g., "https://xxx.r2.cloudflarestorage.com")
|
|
38
|
+
access_key: Access key ID for authentication
|
|
39
|
+
secret_key: Secret access key for authentication
|
|
40
|
+
region: AWS region (default "auto" for R2/MinIO)
|
|
41
|
+
default_bucket: Optional default bucket for operations
|
|
42
|
+
session: Optional custom aioboto3 session
|
|
43
|
+
"""
|
|
44
|
+
self.endpoint_url = endpoint_url
|
|
45
|
+
self.default_bucket = default_bucket
|
|
46
|
+
self.session = session or aioboto3.Session(
|
|
47
|
+
aws_access_key_id=access_key,
|
|
48
|
+
aws_secret_access_key=secret_key,
|
|
49
|
+
region_name=region,
|
|
50
|
+
)
|
|
51
|
+
self.config = AioConfig(
|
|
52
|
+
request_checksum_calculation="when_required",
|
|
53
|
+
response_checksum_validation="when_required",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
def _get_bucket(self, bucket: str | None) -> str:
|
|
57
|
+
"""Get bucket name, using default if not specified.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
bucket: Optional bucket name
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Bucket name to use
|
|
64
|
+
|
|
65
|
+
Raises:
|
|
66
|
+
ValueError: If no bucket specified and no default bucket set
|
|
67
|
+
"""
|
|
68
|
+
if bucket is None:
|
|
69
|
+
if self.default_bucket is None:
|
|
70
|
+
raise ValueError(
|
|
71
|
+
"No bucket specified and no default bucket configured. "
|
|
72
|
+
"Provide bucket parameter or set default_bucket during initialization."
|
|
73
|
+
)
|
|
74
|
+
return self.default_bucket
|
|
75
|
+
return bucket
|
|
76
|
+
|
|
77
|
+
# =========================================================================
|
|
78
|
+
# Object Operations
|
|
79
|
+
# =========================================================================
|
|
80
|
+
|
|
81
|
+
async def put_object(
|
|
82
|
+
self,
|
|
83
|
+
key: str,
|
|
84
|
+
body: bytes,
|
|
85
|
+
content_type: str = DEFAULT_CONTENT_TYPE,
|
|
86
|
+
bucket: str | None = None,
|
|
87
|
+
metadata: dict[str, str] | None = None,
|
|
88
|
+
) -> None:
|
|
89
|
+
"""Upload an object to S3-compatible storage.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
key: Object key (path) in the bucket
|
|
93
|
+
body: Raw bytes to upload
|
|
94
|
+
content_type: MIME type of the object
|
|
95
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
96
|
+
metadata: Optional custom metadata dict
|
|
97
|
+
"""
|
|
98
|
+
bucket_name = self._get_bucket(bucket)
|
|
99
|
+
|
|
100
|
+
async with self.session.client(
|
|
101
|
+
service_name=S3_SERVICE_NAME,
|
|
102
|
+
endpoint_url=self.endpoint_url,
|
|
103
|
+
config=self.config,
|
|
104
|
+
) as s3_client:
|
|
105
|
+
put_params: dict[str, Any] = {
|
|
106
|
+
"Bucket": bucket_name,
|
|
107
|
+
"Key": key,
|
|
108
|
+
"Body": body,
|
|
109
|
+
"ContentType": content_type,
|
|
110
|
+
}
|
|
111
|
+
if metadata:
|
|
112
|
+
put_params["Metadata"] = metadata
|
|
113
|
+
|
|
114
|
+
await s3_client.put_object(**put_params)
|
|
115
|
+
|
|
116
|
+
async def get_object(self, key: str, bucket: str | None = None) -> bytes:
|
|
117
|
+
"""Retrieve an object from S3-compatible storage.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
key: Object key (path) in the bucket
|
|
121
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
Raw bytes of the object
|
|
125
|
+
|
|
126
|
+
Raises:
|
|
127
|
+
ClientError: If object doesn't exist or other S3 error
|
|
128
|
+
"""
|
|
129
|
+
bucket_name = self._get_bucket(bucket)
|
|
130
|
+
|
|
131
|
+
async with self.session.client(
|
|
132
|
+
service_name=S3_SERVICE_NAME,
|
|
133
|
+
endpoint_url=self.endpoint_url,
|
|
134
|
+
config=self.config,
|
|
135
|
+
) as s3_client:
|
|
136
|
+
response = await s3_client.get_object(
|
|
137
|
+
Bucket=bucket_name,
|
|
138
|
+
Key=key,
|
|
139
|
+
)
|
|
140
|
+
return await response["Body"].read()
|
|
141
|
+
|
|
142
|
+
async def delete_object(self, key: str, bucket: str | None = None) -> None:
|
|
143
|
+
"""Delete an object from S3-compatible storage.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
key: Object key (path) in the bucket
|
|
147
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
148
|
+
"""
|
|
149
|
+
bucket_name = self._get_bucket(bucket)
|
|
150
|
+
|
|
151
|
+
async with self.session.client(
|
|
152
|
+
service_name=S3_SERVICE_NAME,
|
|
153
|
+
endpoint_url=self.endpoint_url,
|
|
154
|
+
config=self.config,
|
|
155
|
+
) as s3_client:
|
|
156
|
+
await s3_client.delete_object(
|
|
157
|
+
Bucket=bucket_name,
|
|
158
|
+
Key=key,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
async def object_exists(self, key: str, bucket: str | None = None) -> bool:
|
|
162
|
+
"""Check if an object exists in S3-compatible storage.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
key: Object key (path) in the bucket
|
|
166
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
True if object exists, False otherwise
|
|
170
|
+
"""
|
|
171
|
+
bucket_name = self._get_bucket(bucket)
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
async with self.session.client(
|
|
175
|
+
service_name=S3_SERVICE_NAME,
|
|
176
|
+
endpoint_url=self.endpoint_url,
|
|
177
|
+
config=self.config,
|
|
178
|
+
) as s3_client:
|
|
179
|
+
await s3_client.head_object(
|
|
180
|
+
Bucket=bucket_name,
|
|
181
|
+
Key=key,
|
|
182
|
+
)
|
|
183
|
+
return True
|
|
184
|
+
except ClientError as e:
|
|
185
|
+
error_code = e.response.get("Error", {}).get("Code", "")
|
|
186
|
+
if error_code == "404":
|
|
187
|
+
return False
|
|
188
|
+
raise
|
|
189
|
+
|
|
190
|
+
async def list_objects(
|
|
191
|
+
self,
|
|
192
|
+
prefix: str | None = None,
|
|
193
|
+
bucket: str | None = None,
|
|
194
|
+
max_keys: int = 1000,
|
|
195
|
+
) -> list[dict[str, Any]]:
|
|
196
|
+
"""List objects in a bucket with optional prefix filter.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
prefix: Optional prefix to filter objects
|
|
200
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
201
|
+
max_keys: Maximum number of keys to return (default 1000)
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
List of object metadata dicts with keys: key, size, last_modified, etag
|
|
205
|
+
"""
|
|
206
|
+
bucket_name = self._get_bucket(bucket)
|
|
207
|
+
|
|
208
|
+
async with self.session.client(
|
|
209
|
+
service_name=S3_SERVICE_NAME,
|
|
210
|
+
endpoint_url=self.endpoint_url,
|
|
211
|
+
config=self.config,
|
|
212
|
+
) as s3_client:
|
|
213
|
+
list_params: dict[str, Any] = {
|
|
214
|
+
"Bucket": bucket_name,
|
|
215
|
+
"MaxKeys": max_keys,
|
|
216
|
+
}
|
|
217
|
+
if prefix:
|
|
218
|
+
list_params["Prefix"] = prefix
|
|
219
|
+
|
|
220
|
+
response = await s3_client.list_objects_v2(**list_params)
|
|
221
|
+
|
|
222
|
+
if "Contents" not in response:
|
|
223
|
+
return []
|
|
224
|
+
|
|
225
|
+
return [
|
|
226
|
+
{
|
|
227
|
+
"key": obj.get("Key", ""),
|
|
228
|
+
"size": obj.get("Size", 0),
|
|
229
|
+
"last_modified": obj.get("LastModified"),
|
|
230
|
+
"etag": obj.get("ETag", "").strip('"'),
|
|
231
|
+
}
|
|
232
|
+
for obj in response["Contents"]
|
|
233
|
+
]
|
|
234
|
+
|
|
235
|
+
async def get_object_metadata(
|
|
236
|
+
self, key: str, bucket: str | None = None
|
|
237
|
+
) -> dict[str, Any]:
|
|
238
|
+
"""Get metadata for an object without downloading it.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
key: Object key (path) in the bucket
|
|
242
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
Metadata dict with keys: content_type, size, last_modified, etag, metadata
|
|
246
|
+
"""
|
|
247
|
+
bucket_name = self._get_bucket(bucket)
|
|
248
|
+
|
|
249
|
+
async with self.session.client(
|
|
250
|
+
service_name=S3_SERVICE_NAME,
|
|
251
|
+
endpoint_url=self.endpoint_url,
|
|
252
|
+
config=self.config,
|
|
253
|
+
) as s3_client:
|
|
254
|
+
response = await s3_client.head_object(
|
|
255
|
+
Bucket=bucket_name,
|
|
256
|
+
Key=key,
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
return {
|
|
260
|
+
"content_type": response.get("ContentType"),
|
|
261
|
+
"size": response.get("ContentLength"),
|
|
262
|
+
"last_modified": response.get("LastModified"),
|
|
263
|
+
"etag": response.get("ETag", "").strip('"'),
|
|
264
|
+
"metadata": response.get("Metadata", {}),
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
# =========================================================================
|
|
268
|
+
# Bucket Operations
|
|
269
|
+
# =========================================================================
|
|
270
|
+
|
|
271
|
+
async def list_buckets(self) -> list[dict[str, Any]]:
|
|
272
|
+
"""List all buckets accessible with current credentials.
|
|
273
|
+
|
|
274
|
+
Returns:
|
|
275
|
+
List of bucket metadata dicts with keys: name, creation_date
|
|
276
|
+
"""
|
|
277
|
+
async with self.session.client(
|
|
278
|
+
service_name=S3_SERVICE_NAME,
|
|
279
|
+
endpoint_url=self.endpoint_url,
|
|
280
|
+
config=self.config,
|
|
281
|
+
) as s3_client:
|
|
282
|
+
response = await s3_client.list_buckets()
|
|
283
|
+
|
|
284
|
+
return [
|
|
285
|
+
{
|
|
286
|
+
"name": bucket.get("Name", ""),
|
|
287
|
+
"creation_date": bucket.get("CreationDate"),
|
|
288
|
+
}
|
|
289
|
+
for bucket in response.get("Buckets", [])
|
|
290
|
+
]
|
|
291
|
+
|
|
292
|
+
async def create_bucket(self, bucket: str, region: str | None = None) -> None:
|
|
293
|
+
"""Create a new bucket.
|
|
294
|
+
|
|
295
|
+
Args:
|
|
296
|
+
bucket: Name of the bucket to create
|
|
297
|
+
region: Optional region (uses session default if None)
|
|
298
|
+
"""
|
|
299
|
+
async with self.session.client(
|
|
300
|
+
service_name=S3_SERVICE_NAME,
|
|
301
|
+
endpoint_url=self.endpoint_url,
|
|
302
|
+
config=self.config,
|
|
303
|
+
) as s3_client:
|
|
304
|
+
create_params: dict[str, Any] = {"Bucket": bucket}
|
|
305
|
+
|
|
306
|
+
# Only set CreateBucketConfiguration for non-us-east-1 regions
|
|
307
|
+
if region and region not in ("us-east-1", "auto"):
|
|
308
|
+
create_params["CreateBucketConfiguration"] = {
|
|
309
|
+
"LocationConstraint": region
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
await s3_client.create_bucket(**create_params)
|
|
313
|
+
|
|
314
|
+
async def delete_bucket(self, bucket: str, force: bool = False) -> None:
|
|
315
|
+
"""Delete a bucket.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
bucket: Name of the bucket to delete
|
|
319
|
+
force: If True, delete all objects in bucket first
|
|
320
|
+
|
|
321
|
+
Note:
|
|
322
|
+
S3 buckets must be empty before deletion unless force=True
|
|
323
|
+
"""
|
|
324
|
+
if force:
|
|
325
|
+
# Delete all objects in the bucket first
|
|
326
|
+
objects = await self.list_objects(bucket=bucket)
|
|
327
|
+
async with self.session.client(
|
|
328
|
+
service_name=S3_SERVICE_NAME,
|
|
329
|
+
endpoint_url=self.endpoint_url,
|
|
330
|
+
config=self.config,
|
|
331
|
+
) as s3_client:
|
|
332
|
+
for obj in objects:
|
|
333
|
+
await s3_client.delete_object(
|
|
334
|
+
Bucket=bucket,
|
|
335
|
+
Key=obj["key"],
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
async with self.session.client(
|
|
339
|
+
service_name=S3_SERVICE_NAME,
|
|
340
|
+
endpoint_url=self.endpoint_url,
|
|
341
|
+
config=self.config,
|
|
342
|
+
) as s3_client:
|
|
343
|
+
await s3_client.delete_bucket(Bucket=bucket)
|
|
344
|
+
|
|
345
|
+
async def bucket_exists(self, bucket: str) -> bool:
|
|
346
|
+
"""Check if a bucket exists and is accessible.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
bucket: Name of the bucket to check
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
True if bucket exists and is accessible, False otherwise
|
|
353
|
+
"""
|
|
354
|
+
try:
|
|
355
|
+
async with self.session.client(
|
|
356
|
+
service_name=S3_SERVICE_NAME,
|
|
357
|
+
endpoint_url=self.endpoint_url,
|
|
358
|
+
config=self.config,
|
|
359
|
+
) as s3_client:
|
|
360
|
+
await s3_client.head_bucket(Bucket=bucket)
|
|
361
|
+
return True
|
|
362
|
+
except ClientError as e:
|
|
363
|
+
error_code = e.response.get("Error", {}).get("Code", "")
|
|
364
|
+
if error_code in ("404", "NoSuchBucket"):
|
|
365
|
+
return False
|
|
366
|
+
raise
|
|
367
|
+
|
|
368
|
+
async def get_bucket_location(self, bucket: str) -> str:
|
|
369
|
+
"""Get the region/location of a bucket.
|
|
370
|
+
|
|
371
|
+
Args:
|
|
372
|
+
bucket: Name of the bucket
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
Region string (e.g., "us-east-1", "auto")
|
|
376
|
+
"""
|
|
377
|
+
async with self.session.client(
|
|
378
|
+
service_name=S3_SERVICE_NAME,
|
|
379
|
+
endpoint_url=self.endpoint_url,
|
|
380
|
+
config=self.config,
|
|
381
|
+
) as s3_client:
|
|
382
|
+
response = await s3_client.get_bucket_location(Bucket=bucket)
|
|
383
|
+
location = response.get("LocationConstraint")
|
|
384
|
+
# S3 returns None for us-east-1
|
|
385
|
+
return location if location else "us-east-1"
|
|
386
|
+
|
|
387
|
+
# =========================================================================
|
|
388
|
+
# Advanced Operations
|
|
389
|
+
# =========================================================================
|
|
390
|
+
|
|
391
|
+
async def copy_object(
|
|
392
|
+
self,
|
|
393
|
+
src_key: str,
|
|
394
|
+
dest_key: str,
|
|
395
|
+
src_bucket: str | None = None,
|
|
396
|
+
dest_bucket: str | None = None,
|
|
397
|
+
) -> None:
|
|
398
|
+
"""Copy an object from one location to another.
|
|
399
|
+
|
|
400
|
+
Args:
|
|
401
|
+
src_key: Source object key
|
|
402
|
+
dest_key: Destination object key
|
|
403
|
+
src_bucket: Source bucket (uses default_bucket if None)
|
|
404
|
+
dest_bucket: Destination bucket (uses default_bucket if None)
|
|
405
|
+
"""
|
|
406
|
+
src_bucket_name = self._get_bucket(src_bucket)
|
|
407
|
+
dest_bucket_name = self._get_bucket(dest_bucket)
|
|
408
|
+
|
|
409
|
+
async with self.session.client(
|
|
410
|
+
service_name=S3_SERVICE_NAME,
|
|
411
|
+
endpoint_url=self.endpoint_url,
|
|
412
|
+
config=self.config,
|
|
413
|
+
) as s3_client:
|
|
414
|
+
copy_source = f"{src_bucket_name}/{src_key}"
|
|
415
|
+
await s3_client.copy_object(
|
|
416
|
+
CopySource=copy_source,
|
|
417
|
+
Bucket=dest_bucket_name,
|
|
418
|
+
Key=dest_key,
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
async def generate_presigned_url(
|
|
422
|
+
self,
|
|
423
|
+
key: str,
|
|
424
|
+
bucket: str | None = None,
|
|
425
|
+
expiration: int = 3600,
|
|
426
|
+
method: str = "get_object",
|
|
427
|
+
) -> str:
|
|
428
|
+
"""Generate a presigned URL for temporary access to an object.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
key: Object key
|
|
432
|
+
bucket: Bucket name (uses default_bucket if None)
|
|
433
|
+
expiration: URL expiration time in seconds (default 3600 = 1 hour)
|
|
434
|
+
method: S3 method name ("get_object" or "put_object")
|
|
435
|
+
|
|
436
|
+
Returns:
|
|
437
|
+
Presigned URL string
|
|
438
|
+
"""
|
|
439
|
+
bucket_name = self._get_bucket(bucket)
|
|
440
|
+
|
|
441
|
+
async with self.session.client(
|
|
442
|
+
service_name=S3_SERVICE_NAME,
|
|
443
|
+
endpoint_url=self.endpoint_url,
|
|
444
|
+
config=self.config,
|
|
445
|
+
) as s3_client:
|
|
446
|
+
url = await s3_client.generate_presigned_url(
|
|
447
|
+
ClientMethod=method,
|
|
448
|
+
Params={
|
|
449
|
+
"Bucket": bucket_name,
|
|
450
|
+
"Key": key,
|
|
451
|
+
},
|
|
452
|
+
ExpiresIn=expiration,
|
|
453
|
+
)
|
|
454
|
+
return url
|
vibetuner/sqlmodel.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# ABOUTME: SQLModel/SQLAlchemy async engine setup and session management.
|
|
2
|
+
# ABOUTME: Provides database initialization, teardown, and FastAPI dependency injection.
|
|
3
|
+
|
|
4
|
+
from typing import AsyncGenerator, Optional
|
|
5
|
+
|
|
6
|
+
from sqlalchemy.ext.asyncio import (
|
|
7
|
+
AsyncEngine,
|
|
8
|
+
AsyncSession,
|
|
9
|
+
async_sessionmaker,
|
|
10
|
+
create_async_engine,
|
|
11
|
+
)
|
|
12
|
+
from sqlmodel import SQLModel
|
|
13
|
+
|
|
14
|
+
from vibetuner.config import settings
|
|
15
|
+
from vibetuner.logging import logger
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# These will be filled lazily if/when database_url is set
|
|
19
|
+
engine: Optional[AsyncEngine] = None
|
|
20
|
+
async_session: Optional[async_sessionmaker[AsyncSession]] = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _ensure_engine() -> None:
|
|
24
|
+
"""
|
|
25
|
+
Lazily configure the engine + sessionmaker if database_url is set.
|
|
26
|
+
|
|
27
|
+
Safe to call multiple times.
|
|
28
|
+
"""
|
|
29
|
+
global engine, async_session
|
|
30
|
+
|
|
31
|
+
if settings.database_url is None:
|
|
32
|
+
logger.warning("database_url is not configured. SQLModel engine is disabled.")
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
if engine is None:
|
|
36
|
+
engine = create_async_engine(
|
|
37
|
+
str(settings.database_url),
|
|
38
|
+
echo=settings.debug,
|
|
39
|
+
)
|
|
40
|
+
async_session = async_sessionmaker(
|
|
41
|
+
engine,
|
|
42
|
+
class_=AsyncSession,
|
|
43
|
+
expire_on_commit=False,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
async def init_sqlmodel() -> None:
|
|
48
|
+
"""
|
|
49
|
+
Called from lifespan/startup.
|
|
50
|
+
Initializes the database engine if DB is configured.
|
|
51
|
+
|
|
52
|
+
Note: This does NOT create tables. Use `vibetuner db create-schema` CLI command
|
|
53
|
+
for schema creation, or call `create_schema()` directly.
|
|
54
|
+
"""
|
|
55
|
+
_ensure_engine()
|
|
56
|
+
|
|
57
|
+
if engine is None:
|
|
58
|
+
# Nothing to do, DB not configured
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
logger.info("SQLModel engine initialized successfully.")
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
async def create_schema() -> None:
|
|
65
|
+
"""
|
|
66
|
+
Create all tables defined in SQLModel metadata.
|
|
67
|
+
|
|
68
|
+
Call this from the CLI command `vibetuner db create-schema` or manually
|
|
69
|
+
during initial setup. This is idempotent - existing tables are not modified.
|
|
70
|
+
"""
|
|
71
|
+
_ensure_engine()
|
|
72
|
+
|
|
73
|
+
if engine is None:
|
|
74
|
+
raise RuntimeError("database_url is not configured. Cannot create schema.")
|
|
75
|
+
|
|
76
|
+
async with engine.begin() as conn:
|
|
77
|
+
await conn.run_sync(SQLModel.metadata.create_all)
|
|
78
|
+
logger.info("SQLModel schema created successfully.")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
async def teardown_sqlmodel() -> None:
|
|
82
|
+
"""
|
|
83
|
+
Called from lifespan/shutdown.
|
|
84
|
+
"""
|
|
85
|
+
global engine
|
|
86
|
+
|
|
87
|
+
if engine is None:
|
|
88
|
+
return
|
|
89
|
+
|
|
90
|
+
await engine.dispose()
|
|
91
|
+
engine = None
|
|
92
|
+
logger.info("SQLModel engine disposed.")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
|
96
|
+
"""
|
|
97
|
+
FastAPI dependency.
|
|
98
|
+
|
|
99
|
+
If the DB is not configured, you can:
|
|
100
|
+
- raise a RuntimeError (fail fast), OR
|
|
101
|
+
- raise HTTPException(500), OR
|
|
102
|
+
- return a dummy object in tests.
|
|
103
|
+
"""
|
|
104
|
+
if async_session is None:
|
|
105
|
+
# Fail fast – you can customize this to HTTPException if used only in web context
|
|
106
|
+
raise RuntimeError("database_url is not configured. No DB session available.")
|
|
107
|
+
|
|
108
|
+
async with async_session() as session:
|
|
109
|
+
yield session
|
vibetuner/tasks/lifespan.py
CHANGED
|
@@ -3,17 +3,22 @@ from typing import AsyncGenerator
|
|
|
3
3
|
|
|
4
4
|
from vibetuner.context import Context, ctx
|
|
5
5
|
from vibetuner.logging import logger
|
|
6
|
-
from vibetuner.mongo import
|
|
6
|
+
from vibetuner.mongo import init_mongodb, teardown_mongodb
|
|
7
|
+
from vibetuner.sqlmodel import init_sqlmodel, teardown_sqlmodel
|
|
7
8
|
|
|
8
9
|
|
|
9
10
|
@asynccontextmanager
|
|
10
11
|
async def base_lifespan() -> AsyncGenerator[Context, None]:
|
|
11
12
|
logger.info("Vibetuner task worker starting")
|
|
12
13
|
|
|
13
|
-
await
|
|
14
|
+
await init_mongodb()
|
|
15
|
+
await init_sqlmodel()
|
|
14
16
|
|
|
15
17
|
yield ctx
|
|
16
18
|
|
|
19
|
+
await teardown_sqlmodel()
|
|
20
|
+
await teardown_mongodb()
|
|
21
|
+
|
|
17
22
|
logger.info("Vibetuner task worker stopping")
|
|
18
23
|
|
|
19
24
|
|
vibetuner/tasks/worker.py
CHANGED
|
@@ -4,12 +4,13 @@ from vibetuner.config import settings
|
|
|
4
4
|
from vibetuner.tasks.lifespan import lifespan
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
worker =
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
settings.
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
)
|
|
14
|
-
|
|
7
|
+
worker: Worker | None = (
|
|
8
|
+
Worker(
|
|
9
|
+
redis_url=str(settings.redis_url),
|
|
10
|
+
queue_name=settings.redis_key_prefix.rstrip(":"),
|
|
11
|
+
lifespan=lifespan,
|
|
12
|
+
concurrency=settings.worker_concurrency,
|
|
13
|
+
)
|
|
14
|
+
if settings.workers_available
|
|
15
|
+
else None
|
|
15
16
|
)
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
{{ _("Check Your Email") }}
|
|
5
5
|
{% endblock title %}
|
|
6
6
|
{% block body %}
|
|
7
|
-
<div class="min-h-screen bg-
|
|
7
|
+
<div class="min-h-screen bg-linear-to-br from-primary to-secondary flex items-center justify-center p-4">
|
|
8
8
|
<div class="card w-full max-w-md bg-base-100 shadow-2xl backdrop-blur-sm bg-opacity-95">
|
|
9
9
|
<div class="card-body text-center">
|
|
10
10
|
<!-- Success Icon -->
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
{% set BODY_CLASS = "min-h-screen bg-
|
|
1
|
+
{% set BODY_CLASS = "min-h-screen bg-linear-to-br from-slate-50 via-blue-50 to-indigo-100 flex flex-col justify-between" %}
|
|
2
2
|
{% extends "base/skeleton.html.jinja" %}
|
|
3
3
|
|
|
4
4
|
{% block body %}
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
</p>
|
|
14
14
|
<!-- Decorative element -->
|
|
15
15
|
<div class="mt-12 flex justify-center">
|
|
16
|
-
<div class="w-24 h-1 bg-
|
|
16
|
+
<div class="w-24 h-1 bg-linear-to-r from-blue-400 to-indigo-500 rounded-full"></div>
|
|
17
17
|
</div>
|
|
18
18
|
</div>
|
|
19
19
|
</div>
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
{{ _("Sign In") }}
|
|
5
5
|
{% endblock title %}
|
|
6
6
|
{% block body %}
|
|
7
|
-
<div class="min-h-screen bg-
|
|
7
|
+
<div class="min-h-screen bg-linear-to-br from-primary to-secondary flex items-center justify-center p-4">
|
|
8
8
|
<div class="card w-full max-w-md bg-base-100 shadow-2xl backdrop-blur-sm bg-opacity-95">
|
|
9
9
|
<div class="card-body">
|
|
10
10
|
<!-- Header -->
|
|
@@ -64,7 +64,7 @@
|
|
|
64
64
|
<!-- Additional Info Card -->
|
|
65
65
|
<div class="mt-6 bg-blue-50 border border-blue-200 rounded-lg p-4">
|
|
66
66
|
<div class="flex items-start">
|
|
67
|
-
<div class="
|
|
67
|
+
<div class="shrink-0">
|
|
68
68
|
<svg class="h-5 w-5 text-blue-600"
|
|
69
69
|
xmlns="http://www.w3.org/2000/svg"
|
|
70
70
|
viewBox="0 0 20 20"
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
<!-- Profile Card -->
|
|
16
16
|
<div class="bg-white shadow-lg rounded-lg overflow-hidden">
|
|
17
17
|
<!-- Profile Header -->
|
|
18
|
-
<div class="bg-
|
|
18
|
+
<div class="bg-linear-to-r from-blue-500 to-indigo-600 px-6 py-8">
|
|
19
19
|
<div class="flex items-center space-x-4">
|
|
20
20
|
<!-- Avatar -->
|
|
21
21
|
{% if user.picture or (user.oauth_accounts and user.oauth_accounts[0].picture) %}
|