@weirdfingers/baseboards 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +191 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +887 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
- package/templates/README.md +120 -0
- package/templates/api/.env.example +62 -0
- package/templates/api/Dockerfile +32 -0
- package/templates/api/README.md +132 -0
- package/templates/api/alembic/env.py +106 -0
- package/templates/api/alembic/script.py.mako +28 -0
- package/templates/api/alembic/versions/20250101_000000_initial_schema.py +448 -0
- package/templates/api/alembic/versions/20251022_174729_remove_provider_name_from_generations.py +71 -0
- package/templates/api/alembic/versions/20251023_165852_switch_to_declarative_base_and_mapping.py +411 -0
- package/templates/api/alembic/versions/2025925_62735_add_seed_data_for_default_tenant.py +85 -0
- package/templates/api/alembic.ini +36 -0
- package/templates/api/config/generators.yaml +25 -0
- package/templates/api/config/storage_config.yaml +26 -0
- package/templates/api/docs/ADDING_GENERATORS.md +409 -0
- package/templates/api/docs/GENERATORS_API.md +502 -0
- package/templates/api/docs/MIGRATIONS.md +472 -0
- package/templates/api/docs/storage_providers.md +337 -0
- package/templates/api/pyproject.toml +165 -0
- package/templates/api/src/boards/__init__.py +10 -0
- package/templates/api/src/boards/api/app.py +171 -0
- package/templates/api/src/boards/api/auth.py +75 -0
- package/templates/api/src/boards/api/endpoints/__init__.py +3 -0
- package/templates/api/src/boards/api/endpoints/jobs.py +76 -0
- package/templates/api/src/boards/api/endpoints/setup.py +505 -0
- package/templates/api/src/boards/api/endpoints/sse.py +129 -0
- package/templates/api/src/boards/api/endpoints/storage.py +74 -0
- package/templates/api/src/boards/api/endpoints/tenant_registration.py +296 -0
- package/templates/api/src/boards/api/endpoints/webhooks.py +13 -0
- package/templates/api/src/boards/auth/__init__.py +15 -0
- package/templates/api/src/boards/auth/adapters/__init__.py +20 -0
- package/templates/api/src/boards/auth/adapters/auth0.py +220 -0
- package/templates/api/src/boards/auth/adapters/base.py +73 -0
- package/templates/api/src/boards/auth/adapters/clerk.py +172 -0
- package/templates/api/src/boards/auth/adapters/jwt.py +122 -0
- package/templates/api/src/boards/auth/adapters/none.py +102 -0
- package/templates/api/src/boards/auth/adapters/oidc.py +284 -0
- package/templates/api/src/boards/auth/adapters/supabase.py +110 -0
- package/templates/api/src/boards/auth/context.py +35 -0
- package/templates/api/src/boards/auth/factory.py +115 -0
- package/templates/api/src/boards/auth/middleware.py +221 -0
- package/templates/api/src/boards/auth/provisioning.py +129 -0
- package/templates/api/src/boards/auth/tenant_extraction.py +278 -0
- package/templates/api/src/boards/cli.py +354 -0
- package/templates/api/src/boards/config.py +116 -0
- package/templates/api/src/boards/database/__init__.py +7 -0
- package/templates/api/src/boards/database/cli.py +110 -0
- package/templates/api/src/boards/database/connection.py +252 -0
- package/templates/api/src/boards/database/models.py +19 -0
- package/templates/api/src/boards/database/seed_data.py +182 -0
- package/templates/api/src/boards/dbmodels/__init__.py +455 -0
- package/templates/api/src/boards/generators/__init__.py +57 -0
- package/templates/api/src/boards/generators/artifacts.py +53 -0
- package/templates/api/src/boards/generators/base.py +140 -0
- package/templates/api/src/boards/generators/implementations/__init__.py +12 -0
- package/templates/api/src/boards/generators/implementations/audio/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/audio/whisper.py +66 -0
- package/templates/api/src/boards/generators/implementations/image/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/image/dalle3.py +93 -0
- package/templates/api/src/boards/generators/implementations/image/flux_pro.py +85 -0
- package/templates/api/src/boards/generators/implementations/video/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/video/lipsync.py +70 -0
- package/templates/api/src/boards/generators/loader.py +253 -0
- package/templates/api/src/boards/generators/registry.py +114 -0
- package/templates/api/src/boards/generators/resolution.py +515 -0
- package/templates/api/src/boards/generators/testmods/class_gen.py +34 -0
- package/templates/api/src/boards/generators/testmods/import_side_effect.py +35 -0
- package/templates/api/src/boards/graphql/__init__.py +7 -0
- package/templates/api/src/boards/graphql/access_control.py +136 -0
- package/templates/api/src/boards/graphql/mutations/root.py +136 -0
- package/templates/api/src/boards/graphql/queries/root.py +116 -0
- package/templates/api/src/boards/graphql/resolvers/__init__.py +8 -0
- package/templates/api/src/boards/graphql/resolvers/auth.py +12 -0
- package/templates/api/src/boards/graphql/resolvers/board.py +1055 -0
- package/templates/api/src/boards/graphql/resolvers/generation.py +889 -0
- package/templates/api/src/boards/graphql/resolvers/generator.py +50 -0
- package/templates/api/src/boards/graphql/resolvers/user.py +25 -0
- package/templates/api/src/boards/graphql/schema.py +81 -0
- package/templates/api/src/boards/graphql/types/board.py +102 -0
- package/templates/api/src/boards/graphql/types/generation.py +130 -0
- package/templates/api/src/boards/graphql/types/generator.py +17 -0
- package/templates/api/src/boards/graphql/types/user.py +47 -0
- package/templates/api/src/boards/jobs/repository.py +104 -0
- package/templates/api/src/boards/logging.py +195 -0
- package/templates/api/src/boards/middleware.py +339 -0
- package/templates/api/src/boards/progress/__init__.py +4 -0
- package/templates/api/src/boards/progress/models.py +25 -0
- package/templates/api/src/boards/progress/publisher.py +64 -0
- package/templates/api/src/boards/py.typed +0 -0
- package/templates/api/src/boards/redis_pool.py +118 -0
- package/templates/api/src/boards/storage/__init__.py +52 -0
- package/templates/api/src/boards/storage/base.py +363 -0
- package/templates/api/src/boards/storage/config.py +187 -0
- package/templates/api/src/boards/storage/factory.py +278 -0
- package/templates/api/src/boards/storage/implementations/__init__.py +27 -0
- package/templates/api/src/boards/storage/implementations/gcs.py +340 -0
- package/templates/api/src/boards/storage/implementations/local.py +201 -0
- package/templates/api/src/boards/storage/implementations/s3.py +294 -0
- package/templates/api/src/boards/storage/implementations/supabase.py +218 -0
- package/templates/api/src/boards/tenant_isolation.py +446 -0
- package/templates/api/src/boards/validation.py +262 -0
- package/templates/api/src/boards/workers/__init__.py +1 -0
- package/templates/api/src/boards/workers/actors.py +201 -0
- package/templates/api/src/boards/workers/cli.py +125 -0
- package/templates/api/src/boards/workers/context.py +188 -0
- package/templates/api/src/boards/workers/middleware.py +58 -0
- package/templates/api/src/py.typed +0 -0
- package/templates/compose.dev.yaml +39 -0
- package/templates/compose.yaml +109 -0
- package/templates/docker/env.example +23 -0
- package/templates/web/.env.example +28 -0
- package/templates/web/Dockerfile +51 -0
- package/templates/web/components.json +22 -0
- package/templates/web/imageLoader.js +18 -0
- package/templates/web/next-env.d.ts +5 -0
- package/templates/web/next.config.js +36 -0
- package/templates/web/package.json +37 -0
- package/templates/web/postcss.config.mjs +7 -0
- package/templates/web/public/favicon.ico +0 -0
- package/templates/web/src/app/boards/[boardId]/page.tsx +232 -0
- package/templates/web/src/app/globals.css +120 -0
- package/templates/web/src/app/layout.tsx +21 -0
- package/templates/web/src/app/page.tsx +35 -0
- package/templates/web/src/app/providers.tsx +18 -0
- package/templates/web/src/components/boards/ArtifactInputSlots.tsx +142 -0
- package/templates/web/src/components/boards/ArtifactPreview.tsx +125 -0
- package/templates/web/src/components/boards/GenerationGrid.tsx +45 -0
- package/templates/web/src/components/boards/GenerationInput.tsx +251 -0
- package/templates/web/src/components/boards/GeneratorSelector.tsx +89 -0
- package/templates/web/src/components/header.tsx +30 -0
- package/templates/web/src/components/ui/button.tsx +58 -0
- package/templates/web/src/components/ui/card.tsx +92 -0
- package/templates/web/src/components/ui/navigation-menu.tsx +168 -0
- package/templates/web/src/lib/utils.ts +6 -0
- package/templates/web/tsconfig.json +47 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"""Centralized Redis connection pool management.
|
|
2
|
+
|
|
3
|
+
This module provides a singleton Redis connection pool that can be shared
|
|
4
|
+
across the application to reduce connection overhead and improve performance.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import redis.asyncio as redis
|
|
10
|
+
from redis.asyncio.connection import ConnectionPool
|
|
11
|
+
|
|
12
|
+
from .config import Settings
|
|
13
|
+
from .logging import get_logger
|
|
14
|
+
|
|
15
|
+
logger = get_logger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class RedisPoolManager:
|
|
19
|
+
"""Singleton manager for Redis connection pool."""
|
|
20
|
+
|
|
21
|
+
_instance: RedisPoolManager | None = None
|
|
22
|
+
_pool: ConnectionPool | None = None
|
|
23
|
+
_client: redis.Redis | None = None
|
|
24
|
+
|
|
25
|
+
def __new__(cls) -> RedisPoolManager:
|
|
26
|
+
if cls._instance is None:
|
|
27
|
+
cls._instance = super().__new__(cls)
|
|
28
|
+
return cls._instance
|
|
29
|
+
|
|
30
|
+
def __init__(self):
|
|
31
|
+
"""Initialize the Redis pool manager."""
|
|
32
|
+
if self._pool is None:
|
|
33
|
+
settings = Settings()
|
|
34
|
+
|
|
35
|
+
# Create connection pool with sensible defaults
|
|
36
|
+
# These can be tuned based on your application's needs
|
|
37
|
+
self._pool = redis.ConnectionPool.from_url(
|
|
38
|
+
settings.redis_url,
|
|
39
|
+
decode_responses=True,
|
|
40
|
+
max_connections=50, # Maximum number of connections
|
|
41
|
+
socket_connect_timeout=5, # Connection timeout in seconds
|
|
42
|
+
socket_timeout=5, # Socket timeout in seconds
|
|
43
|
+
retry_on_timeout=True, # Retry on timeout
|
|
44
|
+
health_check_interval=30, # Health check every 30 seconds
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
# Create Redis client using the pool
|
|
48
|
+
self._client = redis.Redis(connection_pool=self._pool)
|
|
49
|
+
|
|
50
|
+
logger.info(
|
|
51
|
+
"Redis connection pool initialized with max_connections=50, "
|
|
52
|
+
"health_check_interval=30s"
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def client(self) -> redis.Redis:
|
|
57
|
+
"""Get the Redis client with connection pooling."""
|
|
58
|
+
if self._client is None:
|
|
59
|
+
raise RuntimeError("Redis pool not initialized")
|
|
60
|
+
return self._client
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def pool(self) -> ConnectionPool:
|
|
64
|
+
"""Get the underlying connection pool."""
|
|
65
|
+
if self._pool is None:
|
|
66
|
+
raise RuntimeError("Redis pool not initialized")
|
|
67
|
+
return self._pool
|
|
68
|
+
|
|
69
|
+
async def close(self):
|
|
70
|
+
"""Close the Redis connection pool."""
|
|
71
|
+
if self._client:
|
|
72
|
+
await self._client.close()
|
|
73
|
+
logger.info("Redis client closed")
|
|
74
|
+
if self._pool:
|
|
75
|
+
await self._pool.disconnect()
|
|
76
|
+
logger.info("Redis connection pool disconnected")
|
|
77
|
+
|
|
78
|
+
async def health_check(self) -> bool:
|
|
79
|
+
"""Check if Redis connection is healthy."""
|
|
80
|
+
try:
|
|
81
|
+
if self._client is None:
|
|
82
|
+
logger.error("Redis client not initialized")
|
|
83
|
+
return False
|
|
84
|
+
await self._client.ping()
|
|
85
|
+
return True
|
|
86
|
+
except Exception as e:
|
|
87
|
+
logger.error(f"Redis health check failed: {e}")
|
|
88
|
+
return False
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# Global instance
|
|
92
|
+
_redis_pool_manager = RedisPoolManager()
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def get_redis_client() -> redis.Redis:
|
|
96
|
+
"""Get a Redis client with connection pooling.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
Redis client instance with connection pooling enabled.
|
|
100
|
+
"""
|
|
101
|
+
return _redis_pool_manager.client
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
async def close_redis_pool():
|
|
105
|
+
"""Close the Redis connection pool.
|
|
106
|
+
|
|
107
|
+
Call this during application shutdown to cleanly close connections.
|
|
108
|
+
"""
|
|
109
|
+
await _redis_pool_manager.close()
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
async def check_redis_health() -> bool:
|
|
113
|
+
"""Check if Redis is healthy and accessible.
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
True if Redis is healthy, False otherwise.
|
|
117
|
+
"""
|
|
118
|
+
return await _redis_pool_manager.health_check()
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""Storage system for Boards artifacts.
|
|
2
|
+
|
|
3
|
+
This module provides a pluggable storage architecture that supports:
|
|
4
|
+
- Local filesystem storage for development
|
|
5
|
+
- Supabase storage with auth integration
|
|
6
|
+
- S3 storage for enterprise deployments
|
|
7
|
+
- Custom storage providers via plugin system
|
|
8
|
+
|
|
9
|
+
Main components:
|
|
10
|
+
- StorageProvider: Abstract base class for storage implementations
|
|
11
|
+
- StorageManager: Central coordinator for routing and operations
|
|
12
|
+
- ArtifactReference: Metadata about stored artifacts
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from .base import (
|
|
16
|
+
ArtifactReference,
|
|
17
|
+
SecurityException,
|
|
18
|
+
StorageConfig,
|
|
19
|
+
StorageException,
|
|
20
|
+
StorageManager,
|
|
21
|
+
StorageProvider,
|
|
22
|
+
ValidationException,
|
|
23
|
+
)
|
|
24
|
+
from .config import (
|
|
25
|
+
create_example_config,
|
|
26
|
+
load_storage_config,
|
|
27
|
+
)
|
|
28
|
+
from .factory import (
|
|
29
|
+
create_development_storage,
|
|
30
|
+
create_storage_manager,
|
|
31
|
+
create_storage_provider,
|
|
32
|
+
get_storage_config,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
__all__ = [
|
|
36
|
+
# Base classes and exceptions
|
|
37
|
+
"StorageProvider",
|
|
38
|
+
"StorageManager",
|
|
39
|
+
"StorageConfig",
|
|
40
|
+
"ArtifactReference",
|
|
41
|
+
"StorageException",
|
|
42
|
+
"SecurityException",
|
|
43
|
+
"ValidationException",
|
|
44
|
+
# Factory functions
|
|
45
|
+
"create_storage_provider",
|
|
46
|
+
"create_storage_manager",
|
|
47
|
+
"create_development_storage",
|
|
48
|
+
"get_storage_config",
|
|
49
|
+
# Configuration
|
|
50
|
+
"load_storage_config",
|
|
51
|
+
"create_example_config",
|
|
52
|
+
]
|
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
"""Core storage interfaces and manager implementation."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import re
|
|
5
|
+
import uuid
|
|
6
|
+
from abc import ABC, abstractmethod
|
|
7
|
+
from collections.abc import AsyncIterator
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from datetime import UTC, datetime, timedelta
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from ..logging import get_logger
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class StorageConfig:
|
|
19
|
+
"""Configuration for storage system."""
|
|
20
|
+
|
|
21
|
+
default_provider: str
|
|
22
|
+
providers: dict[str, dict[str, Any]]
|
|
23
|
+
routing_rules: list[dict[str, Any]]
|
|
24
|
+
max_file_size: int = 100 * 1024 * 1024 # 100MB default
|
|
25
|
+
allowed_content_types: set[str] = field(default_factory=set)
|
|
26
|
+
|
|
27
|
+
def __post_init__(self):
|
|
28
|
+
if not self.allowed_content_types:
|
|
29
|
+
self.allowed_content_types = {
|
|
30
|
+
"image/jpeg",
|
|
31
|
+
"image/png",
|
|
32
|
+
"image/webp",
|
|
33
|
+
"image/gif",
|
|
34
|
+
"video/mp4",
|
|
35
|
+
"video/webm",
|
|
36
|
+
"video/quicktime",
|
|
37
|
+
"audio/mpeg",
|
|
38
|
+
"audio/wav",
|
|
39
|
+
"audio/ogg",
|
|
40
|
+
"text/plain",
|
|
41
|
+
"application/json",
|
|
42
|
+
"text/markdown",
|
|
43
|
+
"application/octet-stream", # For model files
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class ArtifactReference:
|
|
49
|
+
"""Reference to a stored artifact."""
|
|
50
|
+
|
|
51
|
+
artifact_id: str
|
|
52
|
+
storage_key: str
|
|
53
|
+
storage_provider: str
|
|
54
|
+
storage_url: str
|
|
55
|
+
content_type: str
|
|
56
|
+
size: int = 0
|
|
57
|
+
created_at: datetime | None = None
|
|
58
|
+
|
|
59
|
+
def __post_init__(self):
|
|
60
|
+
if self.created_at is None:
|
|
61
|
+
self.created_at = datetime.now(UTC)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class StorageException(Exception):
|
|
65
|
+
"""Base exception for storage operations."""
|
|
66
|
+
|
|
67
|
+
pass
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class SecurityException(StorageException):
|
|
71
|
+
"""Security-related storage exception."""
|
|
72
|
+
|
|
73
|
+
pass
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ValidationException(StorageException):
|
|
77
|
+
"""Content validation exception."""
|
|
78
|
+
|
|
79
|
+
pass
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class StorageProvider(ABC):
|
|
83
|
+
"""Abstract base class for all storage providers."""
|
|
84
|
+
|
|
85
|
+
@abstractmethod
|
|
86
|
+
async def upload(
|
|
87
|
+
self,
|
|
88
|
+
key: str,
|
|
89
|
+
content: bytes | AsyncIterator[bytes],
|
|
90
|
+
content_type: str,
|
|
91
|
+
metadata: dict[str, Any] | None = None,
|
|
92
|
+
) -> str:
|
|
93
|
+
"""Upload content and return storage reference.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
key: Storage key (must be validated before calling)
|
|
97
|
+
content: File content as bytes or async iterator
|
|
98
|
+
content_type: MIME type (must be validated)
|
|
99
|
+
metadata: Optional metadata dictionary
|
|
100
|
+
|
|
101
|
+
Returns:
|
|
102
|
+
storage reference
|
|
103
|
+
|
|
104
|
+
Raises:
|
|
105
|
+
StorageException: On upload failure
|
|
106
|
+
SecurityException: On security validation failure
|
|
107
|
+
"""
|
|
108
|
+
pass
|
|
109
|
+
|
|
110
|
+
@abstractmethod
|
|
111
|
+
async def download(self, key: str) -> bytes:
|
|
112
|
+
"""Download content by storage key."""
|
|
113
|
+
pass
|
|
114
|
+
|
|
115
|
+
@abstractmethod
|
|
116
|
+
async def get_presigned_upload_url(
|
|
117
|
+
self, key: str, content_type: str, expires_in: timedelta | None = None
|
|
118
|
+
) -> dict[str, Any]:
|
|
119
|
+
"""Generate presigned URL for direct client uploads."""
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
@abstractmethod
|
|
123
|
+
async def get_presigned_download_url(
|
|
124
|
+
self, key: str, expires_in: timedelta | None = None
|
|
125
|
+
) -> str:
|
|
126
|
+
"""Generate presigned URL for secure downloads."""
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
@abstractmethod
|
|
130
|
+
async def delete(self, key: str) -> bool:
|
|
131
|
+
"""Delete file by storage key."""
|
|
132
|
+
pass
|
|
133
|
+
|
|
134
|
+
@abstractmethod
|
|
135
|
+
async def exists(self, key: str) -> bool:
|
|
136
|
+
"""Check if file exists."""
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
@abstractmethod
|
|
140
|
+
async def get_metadata(self, key: str) -> dict[str, Any]:
|
|
141
|
+
"""Get file metadata (size, modified date, etc.)."""
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class StorageManager:
|
|
146
|
+
"""Central storage coordinator handling provider selection and routing."""
|
|
147
|
+
|
|
148
|
+
def __init__(self, config: StorageConfig):
|
|
149
|
+
self.providers: dict[str, StorageProvider] = {}
|
|
150
|
+
self.default_provider = config.default_provider
|
|
151
|
+
self.routing_rules = config.routing_rules
|
|
152
|
+
self.config = config
|
|
153
|
+
|
|
154
|
+
def _validate_storage_key(self, key: str) -> str:
|
|
155
|
+
"""Validate and sanitize storage key to prevent path traversal."""
|
|
156
|
+
# Remove any path traversal attempts
|
|
157
|
+
if ".." in key or key.startswith("/") or "\\" in key:
|
|
158
|
+
raise SecurityException(f"Invalid storage key: {key}")
|
|
159
|
+
|
|
160
|
+
# Sanitize key components
|
|
161
|
+
key_parts = key.split("/")
|
|
162
|
+
sanitized_parts: list[str] = []
|
|
163
|
+
|
|
164
|
+
for part in key_parts:
|
|
165
|
+
# Remove dangerous characters, keep alphanumeric, hyphens, underscores, dots
|
|
166
|
+
sanitized = re.sub(r"[^a-zA-Z0-9._-]", "", part)
|
|
167
|
+
if not sanitized:
|
|
168
|
+
raise SecurityException(f"Invalid key component: {part}")
|
|
169
|
+
sanitized_parts.append(sanitized)
|
|
170
|
+
|
|
171
|
+
return "/".join(sanitized_parts)
|
|
172
|
+
|
|
173
|
+
def _validate_content_type(self, content_type: str) -> None:
|
|
174
|
+
"""Validate content type against allowed types."""
|
|
175
|
+
if content_type not in self.config.allowed_content_types:
|
|
176
|
+
raise ValidationException(f"Content type not allowed: {content_type}")
|
|
177
|
+
|
|
178
|
+
def _validate_file_size(self, content_size: int) -> None:
|
|
179
|
+
"""Validate file size against limits."""
|
|
180
|
+
if content_size > self.config.max_file_size:
|
|
181
|
+
raise ValidationException(
|
|
182
|
+
f"File size {content_size} exceeds limit {self.config.max_file_size}"
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
def register_provider(self, name: str, provider: StorageProvider):
|
|
186
|
+
"""Register a storage provider."""
|
|
187
|
+
self.providers[name] = provider
|
|
188
|
+
|
|
189
|
+
async def store_artifact(
|
|
190
|
+
self,
|
|
191
|
+
artifact_id: str,
|
|
192
|
+
content: bytes | AsyncIterator[bytes],
|
|
193
|
+
artifact_type: str,
|
|
194
|
+
content_type: str,
|
|
195
|
+
tenant_id: str | None = None,
|
|
196
|
+
board_id: str | None = None,
|
|
197
|
+
) -> ArtifactReference:
|
|
198
|
+
"""Store artifact with comprehensive validation and error handling."""
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
# Validate content type
|
|
202
|
+
self._validate_content_type(content_type)
|
|
203
|
+
|
|
204
|
+
# Validate content size if it's bytes
|
|
205
|
+
if isinstance(content, bytes):
|
|
206
|
+
self._validate_file_size(len(content))
|
|
207
|
+
|
|
208
|
+
# Generate and validate storage key
|
|
209
|
+
key = self._generate_storage_key(artifact_id, artifact_type, tenant_id, board_id)
|
|
210
|
+
validated_key = self._validate_storage_key(key)
|
|
211
|
+
|
|
212
|
+
# Select provider based on routing rules
|
|
213
|
+
provider_name = self._select_provider(artifact_type, content)
|
|
214
|
+
if provider_name not in self.providers:
|
|
215
|
+
raise StorageException(f"Provider not found: {provider_name}")
|
|
216
|
+
|
|
217
|
+
provider = self.providers[provider_name]
|
|
218
|
+
|
|
219
|
+
# Prepare metadata
|
|
220
|
+
metadata = {
|
|
221
|
+
"artifact_id": artifact_id,
|
|
222
|
+
"artifact_type": artifact_type,
|
|
223
|
+
"tenant_id": tenant_id,
|
|
224
|
+
"board_id": board_id,
|
|
225
|
+
"uploaded_at": datetime.now(UTC).isoformat(),
|
|
226
|
+
"content_type": content_type,
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
# Store the content with retry logic
|
|
230
|
+
storage_url = await self._upload_with_retry(
|
|
231
|
+
provider, validated_key, content, content_type, metadata
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
logger.info(f"Successfully stored artifact {artifact_id} at {validated_key}")
|
|
235
|
+
|
|
236
|
+
return ArtifactReference(
|
|
237
|
+
artifact_id=artifact_id,
|
|
238
|
+
storage_key=validated_key,
|
|
239
|
+
storage_provider=provider_name,
|
|
240
|
+
storage_url=storage_url,
|
|
241
|
+
content_type=content_type,
|
|
242
|
+
size=len(content) if isinstance(content, bytes) else 0,
|
|
243
|
+
created_at=datetime.now(UTC),
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
except (SecurityException, ValidationException) as e:
|
|
247
|
+
logger.error(f"Validation failed for artifact {artifact_id}: {e}")
|
|
248
|
+
raise
|
|
249
|
+
except Exception as e:
|
|
250
|
+
logger.error(f"Failed to store artifact {artifact_id}: {e}")
|
|
251
|
+
raise StorageException(f"Storage operation failed: {e}") from e
|
|
252
|
+
|
|
253
|
+
async def _upload_with_retry(
|
|
254
|
+
self,
|
|
255
|
+
provider: StorageProvider,
|
|
256
|
+
key: str,
|
|
257
|
+
content: bytes | AsyncIterator[bytes],
|
|
258
|
+
content_type: str,
|
|
259
|
+
metadata: dict[str, Any],
|
|
260
|
+
max_retries: int = 3,
|
|
261
|
+
) -> str:
|
|
262
|
+
"""Upload with exponential backoff retry logic."""
|
|
263
|
+
|
|
264
|
+
if max_retries <= 0:
|
|
265
|
+
max_retries = 1
|
|
266
|
+
|
|
267
|
+
for attempt in range(max_retries):
|
|
268
|
+
try:
|
|
269
|
+
return await provider.upload(key, content, content_type, metadata)
|
|
270
|
+
except Exception as e:
|
|
271
|
+
if attempt == max_retries - 1:
|
|
272
|
+
raise
|
|
273
|
+
|
|
274
|
+
wait_time = 2**attempt # Exponential backoff
|
|
275
|
+
logger.warning(
|
|
276
|
+
f"Upload attempt {attempt + 1} failed: {e}. Retrying in {wait_time}s"
|
|
277
|
+
)
|
|
278
|
+
await asyncio.sleep(wait_time)
|
|
279
|
+
|
|
280
|
+
# This should never be reached due to the exception handling above
|
|
281
|
+
raise StorageException("Upload failed after all retries")
|
|
282
|
+
|
|
283
|
+
def _generate_storage_key(
|
|
284
|
+
self,
|
|
285
|
+
artifact_id: str,
|
|
286
|
+
artifact_type: str,
|
|
287
|
+
tenant_id: str | None = None,
|
|
288
|
+
board_id: str | None = None,
|
|
289
|
+
variant: str = "original",
|
|
290
|
+
) -> str:
|
|
291
|
+
"""Generate hierarchical storage key with collision prevention."""
|
|
292
|
+
|
|
293
|
+
# Use tenant_id or default
|
|
294
|
+
tenant = tenant_id or "default"
|
|
295
|
+
|
|
296
|
+
# Add timestamp and UUID for uniqueness
|
|
297
|
+
timestamp = datetime.now(UTC).strftime("%Y%m%d%H%M%S")
|
|
298
|
+
unique_suffix = str(uuid.uuid4())[:8]
|
|
299
|
+
|
|
300
|
+
if board_id:
|
|
301
|
+
# Board-scoped artifact
|
|
302
|
+
return f"{tenant}/{artifact_type}/{board_id}/{artifact_id}_{timestamp}_{unique_suffix}/{variant}" # noqa: E501
|
|
303
|
+
else:
|
|
304
|
+
# Global artifact (like LoRA models)
|
|
305
|
+
return f"{tenant}/{artifact_type}/{artifact_id}_{timestamp}_{unique_suffix}/{variant}"
|
|
306
|
+
|
|
307
|
+
def _select_provider(self, artifact_type: str, content: bytes | AsyncIterator[bytes]) -> str:
|
|
308
|
+
"""Select storage provider based on routing rules."""
|
|
309
|
+
content_size = len(content) if isinstance(content, bytes) else 0
|
|
310
|
+
|
|
311
|
+
for rule in self.routing_rules:
|
|
312
|
+
condition = rule.get("condition", {})
|
|
313
|
+
|
|
314
|
+
# Check artifact type condition
|
|
315
|
+
if "artifact_type" in condition:
|
|
316
|
+
if condition["artifact_type"] != artifact_type:
|
|
317
|
+
continue
|
|
318
|
+
|
|
319
|
+
# Check size condition
|
|
320
|
+
if "size_gt" in condition:
|
|
321
|
+
size_limit = self._parse_size(condition["size_gt"])
|
|
322
|
+
if content_size <= size_limit:
|
|
323
|
+
continue
|
|
324
|
+
elif not isinstance(content, bytes):
|
|
325
|
+
logger.warning(
|
|
326
|
+
f"Size-based routing rule ignored for {artifact_type} - "
|
|
327
|
+
f"content size unknown for async iterator"
|
|
328
|
+
)
|
|
329
|
+
continue
|
|
330
|
+
|
|
331
|
+
# If all conditions match, return this provider
|
|
332
|
+
return rule["provider"]
|
|
333
|
+
|
|
334
|
+
# Return default if no rules match
|
|
335
|
+
return self.default_provider
|
|
336
|
+
|
|
337
|
+
def _parse_size(self, size_str: str) -> int:
|
|
338
|
+
"""Parse size string like '100MB' to bytes."""
|
|
339
|
+
size_str = size_str.upper()
|
|
340
|
+
if size_str.endswith("KB"):
|
|
341
|
+
return int(size_str[:-2]) * 1024
|
|
342
|
+
elif size_str.endswith("MB"):
|
|
343
|
+
return int(size_str[:-2]) * 1024 * 1024
|
|
344
|
+
elif size_str.endswith("GB"):
|
|
345
|
+
return int(size_str[:-2]) * 1024 * 1024 * 1024
|
|
346
|
+
else:
|
|
347
|
+
return int(size_str)
|
|
348
|
+
|
|
349
|
+
async def get_download_url(self, storage_key: str, provider_name: str) -> str:
|
|
350
|
+
"""Get download URL for a stored artifact."""
|
|
351
|
+
if provider_name not in self.providers:
|
|
352
|
+
raise StorageException(f"Provider not found: {provider_name}")
|
|
353
|
+
|
|
354
|
+
provider = self.providers[provider_name]
|
|
355
|
+
return await provider.get_presigned_download_url(storage_key)
|
|
356
|
+
|
|
357
|
+
async def delete_artifact(self, storage_key: str, provider_name: str) -> bool:
|
|
358
|
+
"""Delete a stored artifact."""
|
|
359
|
+
if provider_name not in self.providers:
|
|
360
|
+
raise StorageException(f"Provider not found: {provider_name}")
|
|
361
|
+
|
|
362
|
+
provider = self.providers[provider_name]
|
|
363
|
+
return await provider.delete(storage_key)
|