@weirdfingers/baseboards 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +191 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +887 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
- package/templates/README.md +120 -0
- package/templates/api/.env.example +62 -0
- package/templates/api/Dockerfile +32 -0
- package/templates/api/README.md +132 -0
- package/templates/api/alembic/env.py +106 -0
- package/templates/api/alembic/script.py.mako +28 -0
- package/templates/api/alembic/versions/20250101_000000_initial_schema.py +448 -0
- package/templates/api/alembic/versions/20251022_174729_remove_provider_name_from_generations.py +71 -0
- package/templates/api/alembic/versions/20251023_165852_switch_to_declarative_base_and_mapping.py +411 -0
- package/templates/api/alembic/versions/2025925_62735_add_seed_data_for_default_tenant.py +85 -0
- package/templates/api/alembic.ini +36 -0
- package/templates/api/config/generators.yaml +25 -0
- package/templates/api/config/storage_config.yaml +26 -0
- package/templates/api/docs/ADDING_GENERATORS.md +409 -0
- package/templates/api/docs/GENERATORS_API.md +502 -0
- package/templates/api/docs/MIGRATIONS.md +472 -0
- package/templates/api/docs/storage_providers.md +337 -0
- package/templates/api/pyproject.toml +165 -0
- package/templates/api/src/boards/__init__.py +10 -0
- package/templates/api/src/boards/api/app.py +171 -0
- package/templates/api/src/boards/api/auth.py +75 -0
- package/templates/api/src/boards/api/endpoints/__init__.py +3 -0
- package/templates/api/src/boards/api/endpoints/jobs.py +76 -0
- package/templates/api/src/boards/api/endpoints/setup.py +505 -0
- package/templates/api/src/boards/api/endpoints/sse.py +129 -0
- package/templates/api/src/boards/api/endpoints/storage.py +74 -0
- package/templates/api/src/boards/api/endpoints/tenant_registration.py +296 -0
- package/templates/api/src/boards/api/endpoints/webhooks.py +13 -0
- package/templates/api/src/boards/auth/__init__.py +15 -0
- package/templates/api/src/boards/auth/adapters/__init__.py +20 -0
- package/templates/api/src/boards/auth/adapters/auth0.py +220 -0
- package/templates/api/src/boards/auth/adapters/base.py +73 -0
- package/templates/api/src/boards/auth/adapters/clerk.py +172 -0
- package/templates/api/src/boards/auth/adapters/jwt.py +122 -0
- package/templates/api/src/boards/auth/adapters/none.py +102 -0
- package/templates/api/src/boards/auth/adapters/oidc.py +284 -0
- package/templates/api/src/boards/auth/adapters/supabase.py +110 -0
- package/templates/api/src/boards/auth/context.py +35 -0
- package/templates/api/src/boards/auth/factory.py +115 -0
- package/templates/api/src/boards/auth/middleware.py +221 -0
- package/templates/api/src/boards/auth/provisioning.py +129 -0
- package/templates/api/src/boards/auth/tenant_extraction.py +278 -0
- package/templates/api/src/boards/cli.py +354 -0
- package/templates/api/src/boards/config.py +116 -0
- package/templates/api/src/boards/database/__init__.py +7 -0
- package/templates/api/src/boards/database/cli.py +110 -0
- package/templates/api/src/boards/database/connection.py +252 -0
- package/templates/api/src/boards/database/models.py +19 -0
- package/templates/api/src/boards/database/seed_data.py +182 -0
- package/templates/api/src/boards/dbmodels/__init__.py +455 -0
- package/templates/api/src/boards/generators/__init__.py +57 -0
- package/templates/api/src/boards/generators/artifacts.py +53 -0
- package/templates/api/src/boards/generators/base.py +140 -0
- package/templates/api/src/boards/generators/implementations/__init__.py +12 -0
- package/templates/api/src/boards/generators/implementations/audio/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/audio/whisper.py +66 -0
- package/templates/api/src/boards/generators/implementations/image/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/image/dalle3.py +93 -0
- package/templates/api/src/boards/generators/implementations/image/flux_pro.py +85 -0
- package/templates/api/src/boards/generators/implementations/video/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/video/lipsync.py +70 -0
- package/templates/api/src/boards/generators/loader.py +253 -0
- package/templates/api/src/boards/generators/registry.py +114 -0
- package/templates/api/src/boards/generators/resolution.py +515 -0
- package/templates/api/src/boards/generators/testmods/class_gen.py +34 -0
- package/templates/api/src/boards/generators/testmods/import_side_effect.py +35 -0
- package/templates/api/src/boards/graphql/__init__.py +7 -0
- package/templates/api/src/boards/graphql/access_control.py +136 -0
- package/templates/api/src/boards/graphql/mutations/root.py +136 -0
- package/templates/api/src/boards/graphql/queries/root.py +116 -0
- package/templates/api/src/boards/graphql/resolvers/__init__.py +8 -0
- package/templates/api/src/boards/graphql/resolvers/auth.py +12 -0
- package/templates/api/src/boards/graphql/resolvers/board.py +1055 -0
- package/templates/api/src/boards/graphql/resolvers/generation.py +889 -0
- package/templates/api/src/boards/graphql/resolvers/generator.py +50 -0
- package/templates/api/src/boards/graphql/resolvers/user.py +25 -0
- package/templates/api/src/boards/graphql/schema.py +81 -0
- package/templates/api/src/boards/graphql/types/board.py +102 -0
- package/templates/api/src/boards/graphql/types/generation.py +130 -0
- package/templates/api/src/boards/graphql/types/generator.py +17 -0
- package/templates/api/src/boards/graphql/types/user.py +47 -0
- package/templates/api/src/boards/jobs/repository.py +104 -0
- package/templates/api/src/boards/logging.py +195 -0
- package/templates/api/src/boards/middleware.py +339 -0
- package/templates/api/src/boards/progress/__init__.py +4 -0
- package/templates/api/src/boards/progress/models.py +25 -0
- package/templates/api/src/boards/progress/publisher.py +64 -0
- package/templates/api/src/boards/py.typed +0 -0
- package/templates/api/src/boards/redis_pool.py +118 -0
- package/templates/api/src/boards/storage/__init__.py +52 -0
- package/templates/api/src/boards/storage/base.py +363 -0
- package/templates/api/src/boards/storage/config.py +187 -0
- package/templates/api/src/boards/storage/factory.py +278 -0
- package/templates/api/src/boards/storage/implementations/__init__.py +27 -0
- package/templates/api/src/boards/storage/implementations/gcs.py +340 -0
- package/templates/api/src/boards/storage/implementations/local.py +201 -0
- package/templates/api/src/boards/storage/implementations/s3.py +294 -0
- package/templates/api/src/boards/storage/implementations/supabase.py +218 -0
- package/templates/api/src/boards/tenant_isolation.py +446 -0
- package/templates/api/src/boards/validation.py +262 -0
- package/templates/api/src/boards/workers/__init__.py +1 -0
- package/templates/api/src/boards/workers/actors.py +201 -0
- package/templates/api/src/boards/workers/cli.py +125 -0
- package/templates/api/src/boards/workers/context.py +188 -0
- package/templates/api/src/boards/workers/middleware.py +58 -0
- package/templates/api/src/py.typed +0 -0
- package/templates/compose.dev.yaml +39 -0
- package/templates/compose.yaml +109 -0
- package/templates/docker/env.example +23 -0
- package/templates/web/.env.example +28 -0
- package/templates/web/Dockerfile +51 -0
- package/templates/web/components.json +22 -0
- package/templates/web/imageLoader.js +18 -0
- package/templates/web/next-env.d.ts +5 -0
- package/templates/web/next.config.js +36 -0
- package/templates/web/package.json +37 -0
- package/templates/web/postcss.config.mjs +7 -0
- package/templates/web/public/favicon.ico +0 -0
- package/templates/web/src/app/boards/[boardId]/page.tsx +232 -0
- package/templates/web/src/app/globals.css +120 -0
- package/templates/web/src/app/layout.tsx +21 -0
- package/templates/web/src/app/page.tsx +35 -0
- package/templates/web/src/app/providers.tsx +18 -0
- package/templates/web/src/components/boards/ArtifactInputSlots.tsx +142 -0
- package/templates/web/src/components/boards/ArtifactPreview.tsx +125 -0
- package/templates/web/src/components/boards/GenerationGrid.tsx +45 -0
- package/templates/web/src/components/boards/GenerationInput.tsx +251 -0
- package/templates/web/src/components/boards/GeneratorSelector.tsx +89 -0
- package/templates/web/src/components/header.tsx +30 -0
- package/templates/web/src/components/ui/button.tsx +58 -0
- package/templates/web/src/components/ui/card.tsx +92 -0
- package/templates/web/src/components/ui/navigation-menu.tsx +168 -0
- package/templates/web/src/lib/utils.ts +6 -0
- package/templates/web/tsconfig.json +47 -0
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
"""AWS S3 storage provider with IAM auth and CloudFront CDN support."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import AsyncIterator
|
|
4
|
+
from datetime import UTC, datetime, timedelta
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
import boto3
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
import aioboto3
|
|
12
|
+
import boto3
|
|
13
|
+
from botocore.config import Config
|
|
14
|
+
from botocore.exceptions import ClientError, NoCredentialsError
|
|
15
|
+
|
|
16
|
+
_s3_available = True
|
|
17
|
+
except ImportError:
|
|
18
|
+
boto3 = None
|
|
19
|
+
ClientError = None
|
|
20
|
+
NoCredentialsError = None
|
|
21
|
+
Config = None
|
|
22
|
+
aioboto3 = None
|
|
23
|
+
_s3_available = False
|
|
24
|
+
|
|
25
|
+
from ...logging import get_logger
|
|
26
|
+
from ..base import StorageException, StorageProvider
|
|
27
|
+
|
|
28
|
+
logger = get_logger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class S3StorageProvider(StorageProvider):
|
|
32
|
+
"""AWS S3 storage with IAM auth, CloudFront CDN, and proper async patterns."""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
bucket: str,
|
|
37
|
+
region: str = "us-east-1",
|
|
38
|
+
aws_access_key_id: str | None = None,
|
|
39
|
+
aws_secret_access_key: str | None = None,
|
|
40
|
+
aws_session_token: str | None = None,
|
|
41
|
+
endpoint_url: str | None = None,
|
|
42
|
+
cloudfront_domain: str | None = None,
|
|
43
|
+
upload_config: dict[str, Any] | None = None,
|
|
44
|
+
):
|
|
45
|
+
if not _s3_available:
|
|
46
|
+
raise ImportError("boto3 and aioboto3 are required for S3StorageProvider")
|
|
47
|
+
|
|
48
|
+
self.bucket = bucket
|
|
49
|
+
self.region = region
|
|
50
|
+
self.aws_access_key_id = aws_access_key_id
|
|
51
|
+
self.aws_secret_access_key = aws_secret_access_key
|
|
52
|
+
self.aws_session_token = aws_session_token
|
|
53
|
+
self.endpoint_url = endpoint_url
|
|
54
|
+
self.cloudfront_domain = cloudfront_domain
|
|
55
|
+
|
|
56
|
+
# Default upload configuration
|
|
57
|
+
self.upload_config = {
|
|
58
|
+
"ServerSideEncryption": "AES256",
|
|
59
|
+
"StorageClass": "STANDARD",
|
|
60
|
+
**(upload_config or {}),
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
# Configure boto3 with optimized settings
|
|
64
|
+
self.config = Config( # type: ignore[reportUnknownMemberType]
|
|
65
|
+
region_name=self.region,
|
|
66
|
+
retries={"max_attempts": 3, "mode": "adaptive"},
|
|
67
|
+
max_pool_connections=50,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
self._session: Any | None = None
|
|
71
|
+
|
|
72
|
+
def _get_session(self) -> Any:
|
|
73
|
+
"""Get or create the aioboto3 session."""
|
|
74
|
+
if self._session is None:
|
|
75
|
+
self._session = aioboto3.Session( # type: ignore[reportUnknownMemberType]
|
|
76
|
+
aws_access_key_id=self.aws_access_key_id,
|
|
77
|
+
aws_secret_access_key=self.aws_secret_access_key,
|
|
78
|
+
aws_session_token=self.aws_session_token,
|
|
79
|
+
region_name=self.region,
|
|
80
|
+
)
|
|
81
|
+
return self._session
|
|
82
|
+
|
|
83
|
+
async def upload(
|
|
84
|
+
self,
|
|
85
|
+
key: str,
|
|
86
|
+
content: bytes | AsyncIterator[bytes],
|
|
87
|
+
content_type: str,
|
|
88
|
+
metadata: dict[str, Any] | None = None,
|
|
89
|
+
) -> str:
|
|
90
|
+
"""Upload content to S3."""
|
|
91
|
+
try:
|
|
92
|
+
session = self._get_session()
|
|
93
|
+
|
|
94
|
+
# Prepare upload parameters
|
|
95
|
+
upload_params = {
|
|
96
|
+
"Bucket": self.bucket,
|
|
97
|
+
"Key": key,
|
|
98
|
+
"ContentType": content_type,
|
|
99
|
+
**self.upload_config,
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
# Add custom metadata (S3 requires x-amz-meta- prefix)
|
|
103
|
+
if metadata:
|
|
104
|
+
s3_metadata = {}
|
|
105
|
+
for k, v in metadata.items():
|
|
106
|
+
# Convert values to strings and sanitize keys
|
|
107
|
+
clean_key = k.replace("-", "_").replace(" ", "_")
|
|
108
|
+
s3_metadata[clean_key] = str(v)
|
|
109
|
+
upload_params["Metadata"] = s3_metadata
|
|
110
|
+
|
|
111
|
+
# Handle streaming content for large files
|
|
112
|
+
if isinstance(content, bytes):
|
|
113
|
+
upload_params["Body"] = content
|
|
114
|
+
else:
|
|
115
|
+
# Collect streaming content into memory for upload
|
|
116
|
+
# For very large files, consider using S3 multipart upload
|
|
117
|
+
chunks = []
|
|
118
|
+
total_size = 0
|
|
119
|
+
async for chunk in content:
|
|
120
|
+
chunks.append(chunk)
|
|
121
|
+
total_size += len(chunk)
|
|
122
|
+
# For files larger than 100MB, we could implement multipart upload
|
|
123
|
+
if total_size > 100 * 1024 * 1024:
|
|
124
|
+
logger.warning(
|
|
125
|
+
f"Large file upload ({total_size} bytes) - "
|
|
126
|
+
f"consider implementing multipart upload for key: {key}"
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
upload_params["Body"] = b"".join(chunks)
|
|
130
|
+
|
|
131
|
+
# Upload using aioboto3
|
|
132
|
+
async with session.client(
|
|
133
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
134
|
+
) as s3:
|
|
135
|
+
await s3.put_object(**upload_params)
|
|
136
|
+
|
|
137
|
+
# Return the CloudFront URL if configured, otherwise S3 URL
|
|
138
|
+
if self.cloudfront_domain:
|
|
139
|
+
return f"https://{self.cloudfront_domain}/{key}"
|
|
140
|
+
else:
|
|
141
|
+
return f"https://{self.bucket}.s3.{self.region}.amazonaws.com/{key}"
|
|
142
|
+
|
|
143
|
+
except Exception as e:
|
|
144
|
+
if isinstance(e, StorageException):
|
|
145
|
+
raise
|
|
146
|
+
logger.error(f"Unexpected error uploading {key} to S3: {e}")
|
|
147
|
+
raise StorageException(f"S3 upload failed: {e}") from e
|
|
148
|
+
|
|
149
|
+
async def download(self, key: str) -> bytes:
|
|
150
|
+
"""Download file content from S3."""
|
|
151
|
+
try:
|
|
152
|
+
session = self._get_session()
|
|
153
|
+
async with session.client(
|
|
154
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
155
|
+
) as s3:
|
|
156
|
+
response = await s3.get_object(Bucket=self.bucket, Key=key)
|
|
157
|
+
|
|
158
|
+
# Read the streaming body
|
|
159
|
+
content = await response["Body"].read()
|
|
160
|
+
return content
|
|
161
|
+
|
|
162
|
+
except Exception as e:
|
|
163
|
+
if isinstance(e, StorageException):
|
|
164
|
+
raise
|
|
165
|
+
logger.error(f"Failed to download {key} from S3: {e}")
|
|
166
|
+
raise StorageException(f"S3 download failed: {e}") from e
|
|
167
|
+
|
|
168
|
+
async def get_presigned_upload_url(
|
|
169
|
+
self,
|
|
170
|
+
key: str,
|
|
171
|
+
content_type: str,
|
|
172
|
+
expires_in: timedelta | None = None,
|
|
173
|
+
) -> dict[str, Any]:
|
|
174
|
+
"""Generate presigned URL for direct client uploads."""
|
|
175
|
+
if expires_in is None:
|
|
176
|
+
expires_in = timedelta(hours=1)
|
|
177
|
+
|
|
178
|
+
try:
|
|
179
|
+
session = self._get_session()
|
|
180
|
+
async with session.client(
|
|
181
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
182
|
+
) as s3:
|
|
183
|
+
# Generate presigned POST for direct uploads with form fields
|
|
184
|
+
response = await s3.generate_presigned_post(
|
|
185
|
+
Bucket=self.bucket,
|
|
186
|
+
Key=key,
|
|
187
|
+
Fields={"Content-Type": content_type, **self.upload_config},
|
|
188
|
+
Conditions=[
|
|
189
|
+
{"Content-Type": content_type},
|
|
190
|
+
[
|
|
191
|
+
"content-length-range",
|
|
192
|
+
1,
|
|
193
|
+
self.upload_config.get("max_file_size", 100 * 1024 * 1024),
|
|
194
|
+
],
|
|
195
|
+
],
|
|
196
|
+
ExpiresIn=int(expires_in.total_seconds()),
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
return {
|
|
200
|
+
"url": response["url"],
|
|
201
|
+
"fields": response["fields"],
|
|
202
|
+
"expires_at": (datetime.now(UTC) + expires_in).isoformat(),
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
except Exception as e:
|
|
206
|
+
if isinstance(e, StorageException):
|
|
207
|
+
raise
|
|
208
|
+
logger.error(f"Failed to create presigned upload URL for {key}: {e}")
|
|
209
|
+
raise StorageException(f"S3 presigned URL creation failed: {e}") from e
|
|
210
|
+
|
|
211
|
+
async def get_presigned_download_url(
|
|
212
|
+
self, key: str, expires_in: timedelta | None = None
|
|
213
|
+
) -> str:
|
|
214
|
+
"""Generate presigned URL for secure downloads."""
|
|
215
|
+
if expires_in is None:
|
|
216
|
+
expires_in = timedelta(hours=1)
|
|
217
|
+
|
|
218
|
+
try:
|
|
219
|
+
# Always use S3 native presigned URLs for security
|
|
220
|
+
session = self._get_session()
|
|
221
|
+
async with session.client(
|
|
222
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
223
|
+
) as s3:
|
|
224
|
+
url = await s3.generate_presigned_url(
|
|
225
|
+
"get_object",
|
|
226
|
+
Params={"Bucket": self.bucket, "Key": key},
|
|
227
|
+
ExpiresIn=int(expires_in.total_seconds()),
|
|
228
|
+
)
|
|
229
|
+
return url
|
|
230
|
+
|
|
231
|
+
except Exception as e:
|
|
232
|
+
if isinstance(e, StorageException):
|
|
233
|
+
raise
|
|
234
|
+
logger.error(f"Failed to create presigned download URL for {key}: {e}")
|
|
235
|
+
raise StorageException(f"S3 presigned download URL creation failed: {e}") from e
|
|
236
|
+
|
|
237
|
+
async def delete(self, key: str) -> bool:
|
|
238
|
+
"""Delete file by storage key."""
|
|
239
|
+
try:
|
|
240
|
+
session = self._get_session()
|
|
241
|
+
async with session.client(
|
|
242
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
243
|
+
) as s3:
|
|
244
|
+
await s3.delete_object(Bucket=self.bucket, Key=key)
|
|
245
|
+
return True
|
|
246
|
+
|
|
247
|
+
except Exception as e:
|
|
248
|
+
logger.error(f"Unexpected error deleting {key} from S3: {e}")
|
|
249
|
+
raise StorageException(f"S3 delete failed: {e}") from e
|
|
250
|
+
|
|
251
|
+
async def exists(self, key: str) -> bool:
|
|
252
|
+
"""Check if file exists."""
|
|
253
|
+
try:
|
|
254
|
+
session = self._get_session()
|
|
255
|
+
async with session.client(
|
|
256
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
257
|
+
) as s3:
|
|
258
|
+
await s3.head_object(Bucket=self.bucket, Key=key)
|
|
259
|
+
return True
|
|
260
|
+
except Exception:
|
|
261
|
+
return False
|
|
262
|
+
|
|
263
|
+
async def get_metadata(self, key: str) -> dict[str, Any]:
|
|
264
|
+
"""Get file metadata (size, modified date, etc.)."""
|
|
265
|
+
try:
|
|
266
|
+
session = self._get_session()
|
|
267
|
+
async with session.client(
|
|
268
|
+
"s3", config=self.config, endpoint_url=self.endpoint_url
|
|
269
|
+
) as s3:
|
|
270
|
+
response = await s3.head_object(Bucket=self.bucket, Key=key)
|
|
271
|
+
|
|
272
|
+
# Extract metadata
|
|
273
|
+
result = {
|
|
274
|
+
"size": response.get("ContentLength", 0),
|
|
275
|
+
"last_modified": response.get("LastModified"),
|
|
276
|
+
"content_type": response.get("ContentType"),
|
|
277
|
+
"etag": response.get("ETag", "").strip('"'),
|
|
278
|
+
"version_id": response.get("VersionId"),
|
|
279
|
+
"storage_class": response.get("StorageClass", "STANDARD"),
|
|
280
|
+
"server_side_encryption": response.get("ServerSideEncryption"),
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
# Add custom metadata (remove x-amz-meta- prefix)
|
|
284
|
+
custom_metadata = response.get("Metadata", {})
|
|
285
|
+
if custom_metadata:
|
|
286
|
+
result["custom_metadata"] = custom_metadata
|
|
287
|
+
|
|
288
|
+
return result
|
|
289
|
+
|
|
290
|
+
except Exception as e:
|
|
291
|
+
if isinstance(e, StorageException):
|
|
292
|
+
raise
|
|
293
|
+
logger.error(f"Failed to get metadata for {key} from S3: {e}")
|
|
294
|
+
raise StorageException(f"S3 get metadata failed: {e}") from e
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
"""Supabase storage provider with integrated auth and CDN support."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import tempfile
|
|
5
|
+
from collections.abc import AsyncIterator
|
|
6
|
+
from datetime import UTC, datetime, timedelta
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
8
|
+
|
|
9
|
+
import aiofiles
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from supabase import AsyncClient, create_async_client
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
from supabase import AsyncClient, create_async_client
|
|
16
|
+
|
|
17
|
+
_supabase_available = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
# Handle case where supabase is not installed
|
|
20
|
+
create_async_client = None
|
|
21
|
+
# AsyncClient = None
|
|
22
|
+
_supabase_available = False
|
|
23
|
+
|
|
24
|
+
from ...logging import get_logger
|
|
25
|
+
from ..base import StorageException, StorageProvider
|
|
26
|
+
|
|
27
|
+
logger = get_logger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class SupabaseStorageProvider(StorageProvider):
|
|
31
|
+
"""Supabase storage with integrated auth, CDN, and proper async patterns."""
|
|
32
|
+
|
|
33
|
+
def __init__(self, url: str, key: str, bucket: str):
|
|
34
|
+
if not _supabase_available:
|
|
35
|
+
raise ImportError("supabase-py is required for SupabaseStorageProvider")
|
|
36
|
+
|
|
37
|
+
self.url = url
|
|
38
|
+
self.key = key
|
|
39
|
+
self.bucket = bucket
|
|
40
|
+
self._client: AsyncClient | None = None
|
|
41
|
+
|
|
42
|
+
async def _get_client(self) -> "AsyncClient":
|
|
43
|
+
"""Get or create the async Supabase client."""
|
|
44
|
+
if self._client is None:
|
|
45
|
+
if create_async_client is None:
|
|
46
|
+
raise ImportError("Async Supabase client not available")
|
|
47
|
+
self._client = await create_async_client(self.url, self.key)
|
|
48
|
+
return self._client
|
|
49
|
+
|
|
50
|
+
async def upload(
|
|
51
|
+
self,
|
|
52
|
+
key: str,
|
|
53
|
+
content: bytes | AsyncIterator[bytes],
|
|
54
|
+
content_type: str,
|
|
55
|
+
metadata: dict[str, Any] | None = None,
|
|
56
|
+
) -> str:
|
|
57
|
+
try:
|
|
58
|
+
client = await self._get_client()
|
|
59
|
+
|
|
60
|
+
# Handle streaming content for large files
|
|
61
|
+
if isinstance(content, bytes):
|
|
62
|
+
file_content = content
|
|
63
|
+
else:
|
|
64
|
+
# Stream to temp file to avoid memory issues
|
|
65
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
|
|
66
|
+
tmp_file_path = tmp_file.name
|
|
67
|
+
|
|
68
|
+
# Use async file operations for streaming content
|
|
69
|
+
async with aiofiles.open(tmp_file_path, "wb") as f:
|
|
70
|
+
async for chunk in content:
|
|
71
|
+
await f.write(chunk)
|
|
72
|
+
|
|
73
|
+
# Read the temp file asynchronously and upload
|
|
74
|
+
async with aiofiles.open(tmp_file_path, "rb") as f:
|
|
75
|
+
file_content = await f.read()
|
|
76
|
+
|
|
77
|
+
# Clean up temp file
|
|
78
|
+
os.unlink(tmp_file_path)
|
|
79
|
+
|
|
80
|
+
# Use async Supabase client methods
|
|
81
|
+
response = await client.storage.from_(self.bucket).upload(
|
|
82
|
+
path=key,
|
|
83
|
+
file=file_content,
|
|
84
|
+
file_options={
|
|
85
|
+
"content-type": content_type,
|
|
86
|
+
"upsert": "false", # Prevent accidental overwrites
|
|
87
|
+
},
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return response.path
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
if isinstance(e, StorageException):
|
|
94
|
+
raise
|
|
95
|
+
logger.error(f"Unexpected error uploading {key} to Supabase: {e}")
|
|
96
|
+
raise StorageException(f"Supabase upload failed: {e}") from e
|
|
97
|
+
|
|
98
|
+
async def download(self, key: str) -> bytes:
|
|
99
|
+
"""Download file content from Supabase storage."""
|
|
100
|
+
try:
|
|
101
|
+
client = await self._get_client()
|
|
102
|
+
response = await client.storage.from_(self.bucket).download(key)
|
|
103
|
+
|
|
104
|
+
return response
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
if isinstance(e, StorageException):
|
|
108
|
+
raise
|
|
109
|
+
logger.error(f"Failed to download {key} from Supabase: {e}")
|
|
110
|
+
raise StorageException(f"Download failed: {e}") from e
|
|
111
|
+
|
|
112
|
+
async def get_presigned_upload_url(
|
|
113
|
+
self,
|
|
114
|
+
key: str,
|
|
115
|
+
content_type: str,
|
|
116
|
+
expires_in: timedelta | None = None,
|
|
117
|
+
) -> dict[str, Any]:
|
|
118
|
+
"""Generate presigned URL for direct client uploads."""
|
|
119
|
+
if expires_in is None:
|
|
120
|
+
expires_in = timedelta(hours=1)
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
client = await self._get_client()
|
|
124
|
+
response = await client.storage.from_(self.bucket).create_signed_upload_url(path=key)
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
"url": response["signed_url"],
|
|
128
|
+
"fields": {}, # Supabase doesn't use form fields like S3
|
|
129
|
+
"expires_at": (datetime.now(UTC) + expires_in).isoformat(),
|
|
130
|
+
}
|
|
131
|
+
except Exception as e:
|
|
132
|
+
if isinstance(e, StorageException):
|
|
133
|
+
raise
|
|
134
|
+
logger.error(f"Failed to create presigned upload URL for {key}: {e}")
|
|
135
|
+
raise StorageException(f"Presigned URL creation failed: {e}") from e
|
|
136
|
+
|
|
137
|
+
async def get_presigned_download_url(
|
|
138
|
+
self, key: str, expires_in: timedelta | None = None
|
|
139
|
+
) -> str:
|
|
140
|
+
"""Generate presigned URL for secure downloads."""
|
|
141
|
+
if expires_in is None:
|
|
142
|
+
expires_in = timedelta(hours=1)
|
|
143
|
+
|
|
144
|
+
try:
|
|
145
|
+
client = await self._get_client()
|
|
146
|
+
response = await client.storage.from_(self.bucket).create_signed_url(
|
|
147
|
+
path=key, expires_in=int(expires_in.total_seconds())
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
return response["signedURL"]
|
|
151
|
+
|
|
152
|
+
except Exception as e:
|
|
153
|
+
if isinstance(e, StorageException):
|
|
154
|
+
raise
|
|
155
|
+
logger.error(f"Failed to create presigned download URL for {key}: {e}")
|
|
156
|
+
raise StorageException(f"Presigned download URL creation failed: {e}") from e
|
|
157
|
+
|
|
158
|
+
async def delete(self, key: str) -> bool:
|
|
159
|
+
"""Delete file by storage key."""
|
|
160
|
+
try:
|
|
161
|
+
client = await self._get_client()
|
|
162
|
+
await client.storage.from_(self.bucket).remove([key]) # type: ignore[reportUnknownMemberType]
|
|
163
|
+
|
|
164
|
+
return True
|
|
165
|
+
|
|
166
|
+
except Exception as e:
|
|
167
|
+
logger.error(f"Unexpected error deleting {key} from Supabase: {e}")
|
|
168
|
+
raise StorageException(f"Delete failed: {e}") from e
|
|
169
|
+
|
|
170
|
+
async def exists(self, key: str) -> bool:
|
|
171
|
+
"""Check if file exists."""
|
|
172
|
+
try:
|
|
173
|
+
client = await self._get_client()
|
|
174
|
+
# Try to get file info - if it doesn't exist, this will error
|
|
175
|
+
await client.storage.from_(self.bucket).get_public_url(key)
|
|
176
|
+
# If we get here without error, the file exists
|
|
177
|
+
return True
|
|
178
|
+
except Exception:
|
|
179
|
+
# Any error means the file doesn't exist or we can't access it
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
async def get_metadata(self, key: str) -> dict[str, Any]:
|
|
183
|
+
"""Get file metadata (size, modified date, etc.)."""
|
|
184
|
+
try:
|
|
185
|
+
client = await self._get_client()
|
|
186
|
+
# Supabase doesn't have a direct metadata endpoint
|
|
187
|
+
# We'll need to use the list method with a prefix
|
|
188
|
+
response = await client.storage.from_(self.bucket).list(
|
|
189
|
+
path="/".join(key.split("/")[:-1]) or "/"
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# Find our file in the results
|
|
193
|
+
file_info = None
|
|
194
|
+
filename = key.split("/")[-1]
|
|
195
|
+
for item in response:
|
|
196
|
+
if item.get("name") == filename:
|
|
197
|
+
file_info = item
|
|
198
|
+
break
|
|
199
|
+
|
|
200
|
+
if not file_info:
|
|
201
|
+
raise StorageException(f"File not found: {key}")
|
|
202
|
+
|
|
203
|
+
metadata = file_info.get("metadata", {})
|
|
204
|
+
result = {
|
|
205
|
+
"size": file_info.get("size", 0),
|
|
206
|
+
"last_modified": file_info.get("updated_at"),
|
|
207
|
+
"content_type": file_info.get("mimetype"),
|
|
208
|
+
"etag": file_info.get("id"),
|
|
209
|
+
}
|
|
210
|
+
if isinstance(metadata, dict):
|
|
211
|
+
result.update(metadata)
|
|
212
|
+
return result
|
|
213
|
+
|
|
214
|
+
except Exception as e:
|
|
215
|
+
if isinstance(e, StorageException):
|
|
216
|
+
raise
|
|
217
|
+
logger.error(f"Failed to get metadata for {key} from Supabase: {e}")
|
|
218
|
+
raise StorageException(f"Get metadata failed: {e}") from e
|