@weirdfingers/baseboards 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +191 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +887 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
- package/templates/README.md +120 -0
- package/templates/api/.env.example +62 -0
- package/templates/api/Dockerfile +32 -0
- package/templates/api/README.md +132 -0
- package/templates/api/alembic/env.py +106 -0
- package/templates/api/alembic/script.py.mako +28 -0
- package/templates/api/alembic/versions/20250101_000000_initial_schema.py +448 -0
- package/templates/api/alembic/versions/20251022_174729_remove_provider_name_from_generations.py +71 -0
- package/templates/api/alembic/versions/20251023_165852_switch_to_declarative_base_and_mapping.py +411 -0
- package/templates/api/alembic/versions/2025925_62735_add_seed_data_for_default_tenant.py +85 -0
- package/templates/api/alembic.ini +36 -0
- package/templates/api/config/generators.yaml +25 -0
- package/templates/api/config/storage_config.yaml +26 -0
- package/templates/api/docs/ADDING_GENERATORS.md +409 -0
- package/templates/api/docs/GENERATORS_API.md +502 -0
- package/templates/api/docs/MIGRATIONS.md +472 -0
- package/templates/api/docs/storage_providers.md +337 -0
- package/templates/api/pyproject.toml +165 -0
- package/templates/api/src/boards/__init__.py +10 -0
- package/templates/api/src/boards/api/app.py +171 -0
- package/templates/api/src/boards/api/auth.py +75 -0
- package/templates/api/src/boards/api/endpoints/__init__.py +3 -0
- package/templates/api/src/boards/api/endpoints/jobs.py +76 -0
- package/templates/api/src/boards/api/endpoints/setup.py +505 -0
- package/templates/api/src/boards/api/endpoints/sse.py +129 -0
- package/templates/api/src/boards/api/endpoints/storage.py +74 -0
- package/templates/api/src/boards/api/endpoints/tenant_registration.py +296 -0
- package/templates/api/src/boards/api/endpoints/webhooks.py +13 -0
- package/templates/api/src/boards/auth/__init__.py +15 -0
- package/templates/api/src/boards/auth/adapters/__init__.py +20 -0
- package/templates/api/src/boards/auth/adapters/auth0.py +220 -0
- package/templates/api/src/boards/auth/adapters/base.py +73 -0
- package/templates/api/src/boards/auth/adapters/clerk.py +172 -0
- package/templates/api/src/boards/auth/adapters/jwt.py +122 -0
- package/templates/api/src/boards/auth/adapters/none.py +102 -0
- package/templates/api/src/boards/auth/adapters/oidc.py +284 -0
- package/templates/api/src/boards/auth/adapters/supabase.py +110 -0
- package/templates/api/src/boards/auth/context.py +35 -0
- package/templates/api/src/boards/auth/factory.py +115 -0
- package/templates/api/src/boards/auth/middleware.py +221 -0
- package/templates/api/src/boards/auth/provisioning.py +129 -0
- package/templates/api/src/boards/auth/tenant_extraction.py +278 -0
- package/templates/api/src/boards/cli.py +354 -0
- package/templates/api/src/boards/config.py +116 -0
- package/templates/api/src/boards/database/__init__.py +7 -0
- package/templates/api/src/boards/database/cli.py +110 -0
- package/templates/api/src/boards/database/connection.py +252 -0
- package/templates/api/src/boards/database/models.py +19 -0
- package/templates/api/src/boards/database/seed_data.py +182 -0
- package/templates/api/src/boards/dbmodels/__init__.py +455 -0
- package/templates/api/src/boards/generators/__init__.py +57 -0
- package/templates/api/src/boards/generators/artifacts.py +53 -0
- package/templates/api/src/boards/generators/base.py +140 -0
- package/templates/api/src/boards/generators/implementations/__init__.py +12 -0
- package/templates/api/src/boards/generators/implementations/audio/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/audio/whisper.py +66 -0
- package/templates/api/src/boards/generators/implementations/image/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/image/dalle3.py +93 -0
- package/templates/api/src/boards/generators/implementations/image/flux_pro.py +85 -0
- package/templates/api/src/boards/generators/implementations/video/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/video/lipsync.py +70 -0
- package/templates/api/src/boards/generators/loader.py +253 -0
- package/templates/api/src/boards/generators/registry.py +114 -0
- package/templates/api/src/boards/generators/resolution.py +515 -0
- package/templates/api/src/boards/generators/testmods/class_gen.py +34 -0
- package/templates/api/src/boards/generators/testmods/import_side_effect.py +35 -0
- package/templates/api/src/boards/graphql/__init__.py +7 -0
- package/templates/api/src/boards/graphql/access_control.py +136 -0
- package/templates/api/src/boards/graphql/mutations/root.py +136 -0
- package/templates/api/src/boards/graphql/queries/root.py +116 -0
- package/templates/api/src/boards/graphql/resolvers/__init__.py +8 -0
- package/templates/api/src/boards/graphql/resolvers/auth.py +12 -0
- package/templates/api/src/boards/graphql/resolvers/board.py +1055 -0
- package/templates/api/src/boards/graphql/resolvers/generation.py +889 -0
- package/templates/api/src/boards/graphql/resolvers/generator.py +50 -0
- package/templates/api/src/boards/graphql/resolvers/user.py +25 -0
- package/templates/api/src/boards/graphql/schema.py +81 -0
- package/templates/api/src/boards/graphql/types/board.py +102 -0
- package/templates/api/src/boards/graphql/types/generation.py +130 -0
- package/templates/api/src/boards/graphql/types/generator.py +17 -0
- package/templates/api/src/boards/graphql/types/user.py +47 -0
- package/templates/api/src/boards/jobs/repository.py +104 -0
- package/templates/api/src/boards/logging.py +195 -0
- package/templates/api/src/boards/middleware.py +339 -0
- package/templates/api/src/boards/progress/__init__.py +4 -0
- package/templates/api/src/boards/progress/models.py +25 -0
- package/templates/api/src/boards/progress/publisher.py +64 -0
- package/templates/api/src/boards/py.typed +0 -0
- package/templates/api/src/boards/redis_pool.py +118 -0
- package/templates/api/src/boards/storage/__init__.py +52 -0
- package/templates/api/src/boards/storage/base.py +363 -0
- package/templates/api/src/boards/storage/config.py +187 -0
- package/templates/api/src/boards/storage/factory.py +278 -0
- package/templates/api/src/boards/storage/implementations/__init__.py +27 -0
- package/templates/api/src/boards/storage/implementations/gcs.py +340 -0
- package/templates/api/src/boards/storage/implementations/local.py +201 -0
- package/templates/api/src/boards/storage/implementations/s3.py +294 -0
- package/templates/api/src/boards/storage/implementations/supabase.py +218 -0
- package/templates/api/src/boards/tenant_isolation.py +446 -0
- package/templates/api/src/boards/validation.py +262 -0
- package/templates/api/src/boards/workers/__init__.py +1 -0
- package/templates/api/src/boards/workers/actors.py +201 -0
- package/templates/api/src/boards/workers/cli.py +125 -0
- package/templates/api/src/boards/workers/context.py +188 -0
- package/templates/api/src/boards/workers/middleware.py +58 -0
- package/templates/api/src/py.typed +0 -0
- package/templates/compose.dev.yaml +39 -0
- package/templates/compose.yaml +109 -0
- package/templates/docker/env.example +23 -0
- package/templates/web/.env.example +28 -0
- package/templates/web/Dockerfile +51 -0
- package/templates/web/components.json +22 -0
- package/templates/web/imageLoader.js +18 -0
- package/templates/web/next-env.d.ts +5 -0
- package/templates/web/next.config.js +36 -0
- package/templates/web/package.json +37 -0
- package/templates/web/postcss.config.mjs +7 -0
- package/templates/web/public/favicon.ico +0 -0
- package/templates/web/src/app/boards/[boardId]/page.tsx +232 -0
- package/templates/web/src/app/globals.css +120 -0
- package/templates/web/src/app/layout.tsx +21 -0
- package/templates/web/src/app/page.tsx +35 -0
- package/templates/web/src/app/providers.tsx +18 -0
- package/templates/web/src/components/boards/ArtifactInputSlots.tsx +142 -0
- package/templates/web/src/components/boards/ArtifactPreview.tsx +125 -0
- package/templates/web/src/components/boards/GenerationGrid.tsx +45 -0
- package/templates/web/src/components/boards/GenerationInput.tsx +251 -0
- package/templates/web/src/components/boards/GeneratorSelector.tsx +89 -0
- package/templates/web/src/components/header.tsx +30 -0
- package/templates/web/src/components/ui/button.tsx +58 -0
- package/templates/web/src/components/ui/card.tsx +92 -0
- package/templates/web/src/components/ui/navigation-menu.tsx +168 -0
- package/templates/web/src/lib/utils.ts +6 -0
- package/templates/web/tsconfig.json +47 -0
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
"""Google Cloud Storage provider with IAM auth and CDN support."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from collections.abc import AsyncIterator
|
|
6
|
+
from datetime import UTC, datetime, timedelta
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from google.cloud import storage
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
import asyncio
|
|
15
|
+
|
|
16
|
+
from google.auth import default
|
|
17
|
+
from google.auth.exceptions import DefaultCredentialsError
|
|
18
|
+
from google.cloud import storage
|
|
19
|
+
from google.cloud.exceptions import GoogleCloudError, NotFound
|
|
20
|
+
|
|
21
|
+
_gcs_available = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
storage = None
|
|
24
|
+
NotFound = None
|
|
25
|
+
GoogleCloudError = None
|
|
26
|
+
default = None
|
|
27
|
+
DefaultCredentialsError = None
|
|
28
|
+
_gcs_available = False
|
|
29
|
+
|
|
30
|
+
from ...logging import get_logger
|
|
31
|
+
from ..base import StorageException, StorageProvider
|
|
32
|
+
|
|
33
|
+
logger = get_logger(__name__)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class GCSStorageProvider(StorageProvider):
|
|
37
|
+
"""Google Cloud Storage with IAM auth, Cloud CDN, and proper async patterns."""
|
|
38
|
+
|
|
39
|
+
def __init__(
|
|
40
|
+
self,
|
|
41
|
+
bucket: str,
|
|
42
|
+
project_id: str | None = None,
|
|
43
|
+
credentials_path: str | None = None,
|
|
44
|
+
credentials_json: str | None = None,
|
|
45
|
+
cdn_domain: str | None = None,
|
|
46
|
+
upload_config: dict[str, Any] | None = None,
|
|
47
|
+
):
|
|
48
|
+
if not _gcs_available:
|
|
49
|
+
raise ImportError(
|
|
50
|
+
"google-cloud-storage is required for GCSStorageProvider. "
|
|
51
|
+
"Install with: pip install google-cloud-storage"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
self.bucket_name = bucket
|
|
55
|
+
self.project_id = project_id
|
|
56
|
+
self.credentials_path = credentials_path
|
|
57
|
+
self.credentials_json = credentials_json
|
|
58
|
+
self.cdn_domain = cdn_domain
|
|
59
|
+
|
|
60
|
+
# Default upload configuration
|
|
61
|
+
self.upload_config = {
|
|
62
|
+
"cache_control": "public, max-age=3600",
|
|
63
|
+
"predefined_acl": None, # Use bucket's default ACL
|
|
64
|
+
**(upload_config or {}),
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
self._client: Any | None = None
|
|
68
|
+
self._bucket: Any | None = None
|
|
69
|
+
|
|
70
|
+
# Client will be initialized lazily on first use
|
|
71
|
+
|
|
72
|
+
def _get_client(self) -> Any:
|
|
73
|
+
"""Get or create the GCS client with proper authentication."""
|
|
74
|
+
if self._client is None:
|
|
75
|
+
if storage is None:
|
|
76
|
+
raise ImportError("google-cloud-storage is required for GCSStorageProvider")
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
if self.credentials_json:
|
|
80
|
+
# Use JSON credentials string
|
|
81
|
+
credentials_info = json.loads(self.credentials_json)
|
|
82
|
+
from google.oauth2 import service_account
|
|
83
|
+
|
|
84
|
+
credentials = service_account.Credentials.from_service_account_info(
|
|
85
|
+
credentials_info,
|
|
86
|
+
scopes=["https://www.googleapis.com/auth/cloud-platform"],
|
|
87
|
+
)
|
|
88
|
+
self._client = storage.Client(credentials=credentials, project=self.project_id)
|
|
89
|
+
elif self.credentials_path:
|
|
90
|
+
# Use service account file
|
|
91
|
+
credentials_path = Path(self.credentials_path)
|
|
92
|
+
if not credentials_path.exists():
|
|
93
|
+
raise FileNotFoundError(
|
|
94
|
+
f"Credentials file not found: {self.credentials_path}"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = str(credentials_path)
|
|
98
|
+
self._client = storage.Client(project=self.project_id)
|
|
99
|
+
else:
|
|
100
|
+
# Use default credentials (environment variables, gcloud, etc.)
|
|
101
|
+
self._client = storage.Client(project=self.project_id)
|
|
102
|
+
|
|
103
|
+
# Get bucket reference
|
|
104
|
+
self._bucket = self._client.bucket(self.bucket_name)
|
|
105
|
+
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.error(f"Failed to initialize GCS client: {e}")
|
|
108
|
+
raise StorageException(f"GCS client initialization failed: {e}") from e
|
|
109
|
+
|
|
110
|
+
return self._client
|
|
111
|
+
|
|
112
|
+
async def _run_sync(self, func, *args, **kwargs) -> Any:
|
|
113
|
+
"""Run synchronous GCS operations in thread pool."""
|
|
114
|
+
loop = asyncio.get_event_loop()
|
|
115
|
+
return await loop.run_in_executor(None, func, *args, **kwargs)
|
|
116
|
+
|
|
117
|
+
async def upload(
|
|
118
|
+
self,
|
|
119
|
+
key: str,
|
|
120
|
+
content: bytes | AsyncIterator[bytes],
|
|
121
|
+
content_type: str,
|
|
122
|
+
metadata: dict[str, Any] | None = None,
|
|
123
|
+
) -> str:
|
|
124
|
+
"""Upload content to GCS."""
|
|
125
|
+
try:
|
|
126
|
+
# Get client (initializes on first use)
|
|
127
|
+
client = self._get_client()
|
|
128
|
+
bucket = client.bucket(self.bucket_name)
|
|
129
|
+
|
|
130
|
+
# Create blob object
|
|
131
|
+
blob = bucket.blob(key)
|
|
132
|
+
|
|
133
|
+
# Set content type
|
|
134
|
+
blob.content_type = content_type
|
|
135
|
+
|
|
136
|
+
# Set cache control and other configuration
|
|
137
|
+
if self.upload_config.get("cache_control"):
|
|
138
|
+
blob.cache_control = self.upload_config["cache_control"]
|
|
139
|
+
|
|
140
|
+
# Add custom metadata
|
|
141
|
+
if metadata:
|
|
142
|
+
# GCS metadata keys must be lowercase and can contain only letters,
|
|
143
|
+
# numbers, and underscores
|
|
144
|
+
gcs_metadata = {}
|
|
145
|
+
for k, v in metadata.items():
|
|
146
|
+
# Convert key to lowercase and replace invalid characters
|
|
147
|
+
clean_key = k.lower().replace("-", "_").replace(" ", "_")
|
|
148
|
+
gcs_metadata[clean_key] = str(v)
|
|
149
|
+
blob.metadata = gcs_metadata
|
|
150
|
+
|
|
151
|
+
# Handle streaming content for large files
|
|
152
|
+
if isinstance(content, bytes):
|
|
153
|
+
file_content = content
|
|
154
|
+
else:
|
|
155
|
+
# Collect streaming content into memory for upload
|
|
156
|
+
# For very large files, consider using resumable uploads
|
|
157
|
+
chunks = []
|
|
158
|
+
total_size = 0
|
|
159
|
+
async for chunk in content:
|
|
160
|
+
chunks.append(chunk)
|
|
161
|
+
total_size += len(chunk)
|
|
162
|
+
# For files larger than 100MB, we could implement resumable upload
|
|
163
|
+
if total_size > 100 * 1024 * 1024:
|
|
164
|
+
logger.warning(
|
|
165
|
+
f"Large file upload ({total_size} bytes) - "
|
|
166
|
+
f"consider implementing resumable upload for key: {key}"
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
file_content = b"".join(chunks)
|
|
170
|
+
|
|
171
|
+
# Upload using thread pool to avoid blocking
|
|
172
|
+
await self._run_sync(blob.upload_from_string, file_content, content_type=content_type)
|
|
173
|
+
|
|
174
|
+
# Return the CDN URL if configured, otherwise public GCS URL
|
|
175
|
+
if self.cdn_domain:
|
|
176
|
+
return f"https://{self.cdn_domain}/{key}"
|
|
177
|
+
else:
|
|
178
|
+
return f"https://storage.googleapis.com/{self.bucket_name}/{key}"
|
|
179
|
+
|
|
180
|
+
except Exception as e:
|
|
181
|
+
if isinstance(e, StorageException):
|
|
182
|
+
raise
|
|
183
|
+
logger.error(f"Unexpected error uploading {key} to GCS: {e}")
|
|
184
|
+
raise StorageException(f"GCS upload failed: {e}") from e
|
|
185
|
+
|
|
186
|
+
async def download(self, key: str) -> bytes:
|
|
187
|
+
"""Download file content from GCS."""
|
|
188
|
+
try:
|
|
189
|
+
# Get client (initializes on first use)
|
|
190
|
+
client = self._get_client()
|
|
191
|
+
bucket = client.bucket(self.bucket_name)
|
|
192
|
+
|
|
193
|
+
blob = bucket.blob(key)
|
|
194
|
+
|
|
195
|
+
# Download using thread pool to avoid blocking
|
|
196
|
+
content = await self._run_sync(blob.download_as_bytes)
|
|
197
|
+
return content
|
|
198
|
+
|
|
199
|
+
except Exception as e:
|
|
200
|
+
if isinstance(e, StorageException):
|
|
201
|
+
raise
|
|
202
|
+
logger.error(f"Failed to download {key} from GCS: {e}")
|
|
203
|
+
raise StorageException(f"GCS download failed: {e}") from e
|
|
204
|
+
|
|
205
|
+
async def get_presigned_upload_url(
|
|
206
|
+
self,
|
|
207
|
+
key: str,
|
|
208
|
+
content_type: str,
|
|
209
|
+
expires_in: timedelta | None = None,
|
|
210
|
+
) -> dict[str, Any]:
|
|
211
|
+
"""Generate presigned URL for direct client uploads."""
|
|
212
|
+
if expires_in is None:
|
|
213
|
+
expires_in = timedelta(hours=1)
|
|
214
|
+
|
|
215
|
+
try:
|
|
216
|
+
# Get client (initializes on first use)
|
|
217
|
+
client = self._get_client()
|
|
218
|
+
bucket = client.bucket(self.bucket_name)
|
|
219
|
+
|
|
220
|
+
blob = bucket.blob(key)
|
|
221
|
+
|
|
222
|
+
# Generate signed URL for PUT operations
|
|
223
|
+
url = await self._run_sync(
|
|
224
|
+
blob.generate_signed_url,
|
|
225
|
+
version="v4",
|
|
226
|
+
expiration=expires_in,
|
|
227
|
+
method="PUT",
|
|
228
|
+
content_type=content_type,
|
|
229
|
+
headers={"Content-Type": content_type},
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
"url": url,
|
|
234
|
+
"method": "PUT",
|
|
235
|
+
"headers": {"Content-Type": content_type},
|
|
236
|
+
"expires_at": (datetime.now(UTC) + expires_in).isoformat(),
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
except Exception as e:
|
|
240
|
+
if isinstance(e, StorageException):
|
|
241
|
+
raise
|
|
242
|
+
logger.error(f"Failed to create presigned upload URL for {key}: {e}")
|
|
243
|
+
raise StorageException(f"GCS presigned URL creation failed: {e}") from e
|
|
244
|
+
|
|
245
|
+
async def get_presigned_download_url(
|
|
246
|
+
self, key: str, expires_in: timedelta | None = None
|
|
247
|
+
) -> str:
|
|
248
|
+
"""Generate presigned URL for secure downloads."""
|
|
249
|
+
if expires_in is None:
|
|
250
|
+
expires_in = timedelta(hours=1)
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
# Always use GCS native signed URLs for security
|
|
254
|
+
# Get client (initializes on first use)
|
|
255
|
+
client = self._get_client()
|
|
256
|
+
bucket = client.bucket(self.bucket_name)
|
|
257
|
+
|
|
258
|
+
blob = bucket.blob(key)
|
|
259
|
+
|
|
260
|
+
# Generate signed URL for GET operations
|
|
261
|
+
url = await self._run_sync(
|
|
262
|
+
blob.generate_signed_url,
|
|
263
|
+
version="v4",
|
|
264
|
+
expiration=expires_in,
|
|
265
|
+
method="GET",
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
return url
|
|
269
|
+
|
|
270
|
+
except Exception as e:
|
|
271
|
+
if isinstance(e, StorageException):
|
|
272
|
+
raise
|
|
273
|
+
logger.error(f"Failed to create presigned download URL for {key}: {e}")
|
|
274
|
+
raise StorageException(f"GCS presigned download URL creation failed: {e}") from e
|
|
275
|
+
|
|
276
|
+
async def delete(self, key: str) -> bool:
|
|
277
|
+
"""Delete file by storage key."""
|
|
278
|
+
try:
|
|
279
|
+
# Get client (initializes on first use)
|
|
280
|
+
client = self._get_client()
|
|
281
|
+
bucket = client.bucket(self.bucket_name)
|
|
282
|
+
|
|
283
|
+
blob = bucket.blob(key)
|
|
284
|
+
await self._run_sync(blob.delete)
|
|
285
|
+
return True
|
|
286
|
+
|
|
287
|
+
except Exception as e:
|
|
288
|
+
logger.error(f"Unexpected error deleting {key} from GCS: {e}")
|
|
289
|
+
raise StorageException(f"GCS delete failed: {e}") from e
|
|
290
|
+
|
|
291
|
+
async def exists(self, key: str) -> bool:
|
|
292
|
+
"""Check if file exists."""
|
|
293
|
+
try:
|
|
294
|
+
# Get client (initializes on first use)
|
|
295
|
+
client = self._get_client()
|
|
296
|
+
bucket = client.bucket(self.bucket_name)
|
|
297
|
+
|
|
298
|
+
blob = bucket.blob(key)
|
|
299
|
+
exists = await self._run_sync(blob.exists)
|
|
300
|
+
return exists
|
|
301
|
+
|
|
302
|
+
except Exception:
|
|
303
|
+
return False
|
|
304
|
+
|
|
305
|
+
async def get_metadata(self, key: str) -> dict[str, Any]:
|
|
306
|
+
"""Get file metadata (size, modified date, etc.)."""
|
|
307
|
+
try:
|
|
308
|
+
# Get client (initializes on first use)
|
|
309
|
+
client = self._get_client()
|
|
310
|
+
bucket = client.bucket(self.bucket_name)
|
|
311
|
+
|
|
312
|
+
blob = bucket.blob(key)
|
|
313
|
+
|
|
314
|
+
# Reload blob to get latest metadata
|
|
315
|
+
await self._run_sync(blob.reload)
|
|
316
|
+
|
|
317
|
+
result = {
|
|
318
|
+
"size": blob.size or 0,
|
|
319
|
+
"last_modified": blob.updated,
|
|
320
|
+
"content_type": blob.content_type,
|
|
321
|
+
"etag": blob.etag,
|
|
322
|
+
"generation": blob.generation,
|
|
323
|
+
"storage_class": blob.storage_class,
|
|
324
|
+
"cache_control": blob.cache_control,
|
|
325
|
+
"content_encoding": blob.content_encoding,
|
|
326
|
+
"content_disposition": blob.content_disposition,
|
|
327
|
+
"content_language": blob.content_language,
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
# Add custom metadata
|
|
331
|
+
if blob.metadata:
|
|
332
|
+
result["custom_metadata"] = blob.metadata
|
|
333
|
+
|
|
334
|
+
return result
|
|
335
|
+
|
|
336
|
+
except Exception as e:
|
|
337
|
+
if isinstance(e, StorageException):
|
|
338
|
+
raise
|
|
339
|
+
logger.error(f"Failed to get metadata for {key} from GCS: {e}")
|
|
340
|
+
raise StorageException(f"GCS get metadata failed: {e}") from e
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Local filesystem storage provider for development and self-hosted deployments."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from collections.abc import AsyncIterable
|
|
5
|
+
from datetime import timedelta
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
from urllib.parse import quote
|
|
9
|
+
|
|
10
|
+
import aiofiles
|
|
11
|
+
|
|
12
|
+
from ...logging import get_logger
|
|
13
|
+
from ..base import SecurityException, StorageException, StorageProvider
|
|
14
|
+
|
|
15
|
+
logger = get_logger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class LocalStorageProvider(StorageProvider):
|
|
19
|
+
"""Local filesystem storage for development and self-hosted with security."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, base_path: Path, public_url_base: str | None = None):
|
|
22
|
+
self.base_path = Path(base_path).resolve() # Resolve to absolute path
|
|
23
|
+
self.public_url_base = public_url_base
|
|
24
|
+
self.base_path.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
|
|
26
|
+
def _get_safe_file_path(self, key: str) -> Path:
|
|
27
|
+
"""Get file path with security validation."""
|
|
28
|
+
# Ensure the resolved path is within base_path
|
|
29
|
+
file_path = (self.base_path / key).resolve()
|
|
30
|
+
|
|
31
|
+
# Check that resolved path is within base directory
|
|
32
|
+
try:
|
|
33
|
+
file_path.relative_to(self.base_path)
|
|
34
|
+
except ValueError as e:
|
|
35
|
+
raise SecurityException(f"Path traversal detected: {key}") from e
|
|
36
|
+
|
|
37
|
+
return file_path
|
|
38
|
+
|
|
39
|
+
async def upload(
|
|
40
|
+
self,
|
|
41
|
+
key: str,
|
|
42
|
+
content: bytes | bytearray | memoryview | AsyncIterable[bytes],
|
|
43
|
+
content_type: str,
|
|
44
|
+
metadata: dict[str, Any] | None = None,
|
|
45
|
+
) -> str:
|
|
46
|
+
logger.info("Uploading file", key=key, content_type=content_type, metadata=metadata)
|
|
47
|
+
try:
|
|
48
|
+
file_path = self._get_safe_file_path(key)
|
|
49
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
50
|
+
|
|
51
|
+
# Handle both bytes-like and async iterable content
|
|
52
|
+
if isinstance(content, bytes | bytearray | memoryview):
|
|
53
|
+
# aiofiles accepts bytes-like objects directly
|
|
54
|
+
async with aiofiles.open(file_path, "wb") as f:
|
|
55
|
+
await f.write(content)
|
|
56
|
+
else: # isinstance(content, AsyncIterable):
|
|
57
|
+
async with aiofiles.open(file_path, "wb") as f:
|
|
58
|
+
async for chunk in content:
|
|
59
|
+
# Just write the chunk directly - aiofiles accepts bytes-like objects
|
|
60
|
+
# It will raise an error if chunk is not bytes-like
|
|
61
|
+
await f.write(chunk)
|
|
62
|
+
|
|
63
|
+
# Store metadata atomically
|
|
64
|
+
if metadata:
|
|
65
|
+
try:
|
|
66
|
+
metadata_path = file_path.with_suffix(file_path.suffix + ".meta")
|
|
67
|
+
metadata_json = json.dumps(metadata, indent=2)
|
|
68
|
+
|
|
69
|
+
async with aiofiles.open(metadata_path, "w") as f:
|
|
70
|
+
await f.write(metadata_json)
|
|
71
|
+
except Exception as e:
|
|
72
|
+
logger.warning(f"Failed to write metadata for {key}: {e}")
|
|
73
|
+
# Continue - metadata failure shouldn't fail the upload
|
|
74
|
+
|
|
75
|
+
logger.debug(f"Successfully uploaded {key} to local storage")
|
|
76
|
+
return self._get_public_url(key)
|
|
77
|
+
|
|
78
|
+
except OSError as e:
|
|
79
|
+
logger.error(f"File system error uploading {key}: {e}")
|
|
80
|
+
raise StorageException(f"Failed to write file: {e}") from e
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.error(f"Unexpected error uploading {key}: {e}")
|
|
83
|
+
raise StorageException(f"Upload failed: {e}") from e
|
|
84
|
+
|
|
85
|
+
def _get_public_url(self, key: str) -> str:
|
|
86
|
+
"""Generate public URL for the stored file."""
|
|
87
|
+
if self.public_url_base:
|
|
88
|
+
# URL-encode the key for safety
|
|
89
|
+
encoded_key = quote(key, safe="/")
|
|
90
|
+
return f"{self.public_url_base.rstrip('/')}/{encoded_key}"
|
|
91
|
+
else:
|
|
92
|
+
return f"file://{self.base_path / key}"
|
|
93
|
+
|
|
94
|
+
async def download(self, key: str) -> bytes:
|
|
95
|
+
"""Download file content from local storage."""
|
|
96
|
+
try:
|
|
97
|
+
file_path = self._get_safe_file_path(key)
|
|
98
|
+
|
|
99
|
+
if not file_path.exists():
|
|
100
|
+
raise StorageException(f"File not found: {key}")
|
|
101
|
+
|
|
102
|
+
async with aiofiles.open(file_path, "rb") as f:
|
|
103
|
+
return await f.read()
|
|
104
|
+
|
|
105
|
+
except OSError as e:
|
|
106
|
+
logger.error(f"File system error downloading {key}: {e}")
|
|
107
|
+
raise StorageException(f"Failed to read file: {e}") from e
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logger.error(f"Unexpected error downloading {key}: {e}")
|
|
110
|
+
raise StorageException(f"Download failed: {e}") from e
|
|
111
|
+
|
|
112
|
+
async def get_presigned_upload_url(
|
|
113
|
+
self, key: str, content_type: str, expires_in: timedelta | None = None
|
|
114
|
+
) -> dict[str, Any]:
|
|
115
|
+
"""Local storage doesn't support presigned URLs - return direct upload info."""
|
|
116
|
+
# For local storage, we can't really do presigned URLs
|
|
117
|
+
# This would be handled by the web server (e.g., FastAPI endpoint)
|
|
118
|
+
return {
|
|
119
|
+
"url": f"/api/storage/upload/{quote(key, safe='/')}",
|
|
120
|
+
"fields": {"content-type": content_type},
|
|
121
|
+
"method": "PUT",
|
|
122
|
+
"expires_at": None, # Handled by server session
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
async def get_presigned_download_url(
|
|
126
|
+
self, key: str, expires_in: timedelta | None = None
|
|
127
|
+
) -> str:
|
|
128
|
+
"""Return the public URL for local storage."""
|
|
129
|
+
return self._get_public_url(key)
|
|
130
|
+
|
|
131
|
+
async def delete(self, key: str) -> bool:
|
|
132
|
+
"""Delete file by storage key."""
|
|
133
|
+
try:
|
|
134
|
+
file_path = self._get_safe_file_path(key)
|
|
135
|
+
|
|
136
|
+
if not file_path.exists():
|
|
137
|
+
return False
|
|
138
|
+
|
|
139
|
+
# Delete the main file
|
|
140
|
+
file_path.unlink()
|
|
141
|
+
|
|
142
|
+
# Delete metadata file if it exists
|
|
143
|
+
metadata_path = file_path.with_suffix(file_path.suffix + ".meta")
|
|
144
|
+
if metadata_path.exists():
|
|
145
|
+
metadata_path.unlink()
|
|
146
|
+
|
|
147
|
+
logger.debug(f"Successfully deleted {key} from local storage")
|
|
148
|
+
return True
|
|
149
|
+
|
|
150
|
+
except OSError as e:
|
|
151
|
+
logger.error(f"File system error deleting {key}: {e}")
|
|
152
|
+
raise StorageException(f"Failed to delete file: {e}") from e
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.error(f"Unexpected error deleting {key}: {e}")
|
|
155
|
+
raise StorageException(f"Delete failed: {e}") from e
|
|
156
|
+
|
|
157
|
+
async def exists(self, key: str) -> bool:
|
|
158
|
+
"""Check if file exists."""
|
|
159
|
+
try:
|
|
160
|
+
file_path = self._get_safe_file_path(key)
|
|
161
|
+
return file_path.exists()
|
|
162
|
+
except SecurityException:
|
|
163
|
+
return False
|
|
164
|
+
except Exception as e:
|
|
165
|
+
logger.warning(f"Error checking existence of {key}: {e}")
|
|
166
|
+
return False
|
|
167
|
+
|
|
168
|
+
async def get_metadata(self, key: str) -> dict[str, Any]:
|
|
169
|
+
"""Get file metadata (size, modified date, etc.)."""
|
|
170
|
+
try:
|
|
171
|
+
file_path = self._get_safe_file_path(key)
|
|
172
|
+
|
|
173
|
+
if not file_path.exists():
|
|
174
|
+
raise StorageException(f"File not found: {key}")
|
|
175
|
+
|
|
176
|
+
stat = file_path.stat()
|
|
177
|
+
|
|
178
|
+
# Try to load stored metadata
|
|
179
|
+
stored_metadata = {}
|
|
180
|
+
metadata_path = file_path.with_suffix(file_path.suffix + ".meta")
|
|
181
|
+
if metadata_path.exists():
|
|
182
|
+
try:
|
|
183
|
+
async with aiofiles.open(metadata_path) as f:
|
|
184
|
+
metadata_content = await f.read()
|
|
185
|
+
stored_metadata = json.loads(metadata_content)
|
|
186
|
+
except Exception as e:
|
|
187
|
+
logger.warning(f"Failed to load metadata for {key}: {e}")
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
"size": stat.st_size,
|
|
191
|
+
"modified_time": stat.st_mtime,
|
|
192
|
+
"created_time": stat.st_ctime,
|
|
193
|
+
**stored_metadata,
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
except OSError as e:
|
|
197
|
+
logger.error(f"File system error getting metadata for {key}: {e}")
|
|
198
|
+
raise StorageException(f"Failed to get metadata: {e}") from e
|
|
199
|
+
except Exception as e:
|
|
200
|
+
logger.error(f"Unexpected error getting metadata for {key}: {e}")
|
|
201
|
+
raise StorageException(f"Get metadata failed: {e}") from e
|