@weirdfingers/baseboards 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +191 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +887 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
- package/templates/README.md +120 -0
- package/templates/api/.env.example +62 -0
- package/templates/api/Dockerfile +32 -0
- package/templates/api/README.md +132 -0
- package/templates/api/alembic/env.py +106 -0
- package/templates/api/alembic/script.py.mako +28 -0
- package/templates/api/alembic/versions/20250101_000000_initial_schema.py +448 -0
- package/templates/api/alembic/versions/20251022_174729_remove_provider_name_from_generations.py +71 -0
- package/templates/api/alembic/versions/20251023_165852_switch_to_declarative_base_and_mapping.py +411 -0
- package/templates/api/alembic/versions/2025925_62735_add_seed_data_for_default_tenant.py +85 -0
- package/templates/api/alembic.ini +36 -0
- package/templates/api/config/generators.yaml +25 -0
- package/templates/api/config/storage_config.yaml +26 -0
- package/templates/api/docs/ADDING_GENERATORS.md +409 -0
- package/templates/api/docs/GENERATORS_API.md +502 -0
- package/templates/api/docs/MIGRATIONS.md +472 -0
- package/templates/api/docs/storage_providers.md +337 -0
- package/templates/api/pyproject.toml +165 -0
- package/templates/api/src/boards/__init__.py +10 -0
- package/templates/api/src/boards/api/app.py +171 -0
- package/templates/api/src/boards/api/auth.py +75 -0
- package/templates/api/src/boards/api/endpoints/__init__.py +3 -0
- package/templates/api/src/boards/api/endpoints/jobs.py +76 -0
- package/templates/api/src/boards/api/endpoints/setup.py +505 -0
- package/templates/api/src/boards/api/endpoints/sse.py +129 -0
- package/templates/api/src/boards/api/endpoints/storage.py +74 -0
- package/templates/api/src/boards/api/endpoints/tenant_registration.py +296 -0
- package/templates/api/src/boards/api/endpoints/webhooks.py +13 -0
- package/templates/api/src/boards/auth/__init__.py +15 -0
- package/templates/api/src/boards/auth/adapters/__init__.py +20 -0
- package/templates/api/src/boards/auth/adapters/auth0.py +220 -0
- package/templates/api/src/boards/auth/adapters/base.py +73 -0
- package/templates/api/src/boards/auth/adapters/clerk.py +172 -0
- package/templates/api/src/boards/auth/adapters/jwt.py +122 -0
- package/templates/api/src/boards/auth/adapters/none.py +102 -0
- package/templates/api/src/boards/auth/adapters/oidc.py +284 -0
- package/templates/api/src/boards/auth/adapters/supabase.py +110 -0
- package/templates/api/src/boards/auth/context.py +35 -0
- package/templates/api/src/boards/auth/factory.py +115 -0
- package/templates/api/src/boards/auth/middleware.py +221 -0
- package/templates/api/src/boards/auth/provisioning.py +129 -0
- package/templates/api/src/boards/auth/tenant_extraction.py +278 -0
- package/templates/api/src/boards/cli.py +354 -0
- package/templates/api/src/boards/config.py +116 -0
- package/templates/api/src/boards/database/__init__.py +7 -0
- package/templates/api/src/boards/database/cli.py +110 -0
- package/templates/api/src/boards/database/connection.py +252 -0
- package/templates/api/src/boards/database/models.py +19 -0
- package/templates/api/src/boards/database/seed_data.py +182 -0
- package/templates/api/src/boards/dbmodels/__init__.py +455 -0
- package/templates/api/src/boards/generators/__init__.py +57 -0
- package/templates/api/src/boards/generators/artifacts.py +53 -0
- package/templates/api/src/boards/generators/base.py +140 -0
- package/templates/api/src/boards/generators/implementations/__init__.py +12 -0
- package/templates/api/src/boards/generators/implementations/audio/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/audio/whisper.py +66 -0
- package/templates/api/src/boards/generators/implementations/image/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/image/dalle3.py +93 -0
- package/templates/api/src/boards/generators/implementations/image/flux_pro.py +85 -0
- package/templates/api/src/boards/generators/implementations/video/__init__.py +3 -0
- package/templates/api/src/boards/generators/implementations/video/lipsync.py +70 -0
- package/templates/api/src/boards/generators/loader.py +253 -0
- package/templates/api/src/boards/generators/registry.py +114 -0
- package/templates/api/src/boards/generators/resolution.py +515 -0
- package/templates/api/src/boards/generators/testmods/class_gen.py +34 -0
- package/templates/api/src/boards/generators/testmods/import_side_effect.py +35 -0
- package/templates/api/src/boards/graphql/__init__.py +7 -0
- package/templates/api/src/boards/graphql/access_control.py +136 -0
- package/templates/api/src/boards/graphql/mutations/root.py +136 -0
- package/templates/api/src/boards/graphql/queries/root.py +116 -0
- package/templates/api/src/boards/graphql/resolvers/__init__.py +8 -0
- package/templates/api/src/boards/graphql/resolvers/auth.py +12 -0
- package/templates/api/src/boards/graphql/resolvers/board.py +1055 -0
- package/templates/api/src/boards/graphql/resolvers/generation.py +889 -0
- package/templates/api/src/boards/graphql/resolvers/generator.py +50 -0
- package/templates/api/src/boards/graphql/resolvers/user.py +25 -0
- package/templates/api/src/boards/graphql/schema.py +81 -0
- package/templates/api/src/boards/graphql/types/board.py +102 -0
- package/templates/api/src/boards/graphql/types/generation.py +130 -0
- package/templates/api/src/boards/graphql/types/generator.py +17 -0
- package/templates/api/src/boards/graphql/types/user.py +47 -0
- package/templates/api/src/boards/jobs/repository.py +104 -0
- package/templates/api/src/boards/logging.py +195 -0
- package/templates/api/src/boards/middleware.py +339 -0
- package/templates/api/src/boards/progress/__init__.py +4 -0
- package/templates/api/src/boards/progress/models.py +25 -0
- package/templates/api/src/boards/progress/publisher.py +64 -0
- package/templates/api/src/boards/py.typed +0 -0
- package/templates/api/src/boards/redis_pool.py +118 -0
- package/templates/api/src/boards/storage/__init__.py +52 -0
- package/templates/api/src/boards/storage/base.py +363 -0
- package/templates/api/src/boards/storage/config.py +187 -0
- package/templates/api/src/boards/storage/factory.py +278 -0
- package/templates/api/src/boards/storage/implementations/__init__.py +27 -0
- package/templates/api/src/boards/storage/implementations/gcs.py +340 -0
- package/templates/api/src/boards/storage/implementations/local.py +201 -0
- package/templates/api/src/boards/storage/implementations/s3.py +294 -0
- package/templates/api/src/boards/storage/implementations/supabase.py +218 -0
- package/templates/api/src/boards/tenant_isolation.py +446 -0
- package/templates/api/src/boards/validation.py +262 -0
- package/templates/api/src/boards/workers/__init__.py +1 -0
- package/templates/api/src/boards/workers/actors.py +201 -0
- package/templates/api/src/boards/workers/cli.py +125 -0
- package/templates/api/src/boards/workers/context.py +188 -0
- package/templates/api/src/boards/workers/middleware.py +58 -0
- package/templates/api/src/py.typed +0 -0
- package/templates/compose.dev.yaml +39 -0
- package/templates/compose.yaml +109 -0
- package/templates/docker/env.example +23 -0
- package/templates/web/.env.example +28 -0
- package/templates/web/Dockerfile +51 -0
- package/templates/web/components.json +22 -0
- package/templates/web/imageLoader.js +18 -0
- package/templates/web/next-env.d.ts +5 -0
- package/templates/web/next.config.js +36 -0
- package/templates/web/package.json +37 -0
- package/templates/web/postcss.config.mjs +7 -0
- package/templates/web/public/favicon.ico +0 -0
- package/templates/web/src/app/boards/[boardId]/page.tsx +232 -0
- package/templates/web/src/app/globals.css +120 -0
- package/templates/web/src/app/layout.tsx +21 -0
- package/templates/web/src/app/page.tsx +35 -0
- package/templates/web/src/app/providers.tsx +18 -0
- package/templates/web/src/components/boards/ArtifactInputSlots.tsx +142 -0
- package/templates/web/src/components/boards/ArtifactPreview.tsx +125 -0
- package/templates/web/src/components/boards/GenerationGrid.tsx +45 -0
- package/templates/web/src/components/boards/GenerationInput.tsx +251 -0
- package/templates/web/src/components/boards/GeneratorSelector.tsx +89 -0
- package/templates/web/src/components/header.tsx +30 -0
- package/templates/web/src/components/ui/button.tsx +58 -0
- package/templates/web/src/components/ui/card.tsx +92 -0
- package/templates/web/src/components/ui/navigation-menu.tsx +168 -0
- package/templates/web/src/lib/utils.ts +6 -0
- package/templates/web/tsconfig.json +47 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Workers package: Dramatiq actors and execution utilities."""
|
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Dramatiq actors for generation processing."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import traceback
|
|
6
|
+
|
|
7
|
+
import dramatiq
|
|
8
|
+
from dramatiq import actor
|
|
9
|
+
from dramatiq.brokers.redis import RedisBroker
|
|
10
|
+
from dramatiq.middleware import AsyncIO
|
|
11
|
+
|
|
12
|
+
from ..config import Settings
|
|
13
|
+
from ..database.connection import get_async_session
|
|
14
|
+
from ..generators.registry import registry as generator_registry
|
|
15
|
+
from ..jobs import repository as jobs_repo
|
|
16
|
+
from ..logging import get_logger
|
|
17
|
+
from ..progress.models import ProgressUpdate
|
|
18
|
+
from ..progress.publisher import ProgressPublisher
|
|
19
|
+
from ..storage.factory import create_storage_manager
|
|
20
|
+
from .context import GeneratorExecutionContext
|
|
21
|
+
from .middleware import GeneratorLoaderMiddleware
|
|
22
|
+
|
|
23
|
+
logger = get_logger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
settings = Settings()
|
|
27
|
+
broker = RedisBroker(url=settings.redis_url)
|
|
28
|
+
dramatiq.set_broker(broker)
|
|
29
|
+
|
|
30
|
+
# Enable async actor support - Dramatiq will manage event loops per thread
|
|
31
|
+
# This avoids the event loop conflicts from using asyncio.run()
|
|
32
|
+
broker.add_middleware(AsyncIO())
|
|
33
|
+
|
|
34
|
+
# Load generators when worker process starts via middleware
|
|
35
|
+
# Middleware runs before_worker_boot hook once per worker process at startup
|
|
36
|
+
broker.add_middleware(GeneratorLoaderMiddleware())
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@actor(queue_name="boards-jobs", max_retries=3, min_backoff=5000, max_backoff=30000)
|
|
40
|
+
async def process_generation(generation_id: str) -> None:
|
|
41
|
+
"""Entry actor: load job context and dispatch to the generator.
|
|
42
|
+
|
|
43
|
+
Retry policy:
|
|
44
|
+
- max_retries: 3 attempts
|
|
45
|
+
- min_backoff: 5 seconds
|
|
46
|
+
- max_backoff: 30 seconds
|
|
47
|
+
|
|
48
|
+
Note: This is an async actor. Dramatiq manages the event loop lifecycle properly,
|
|
49
|
+
avoiding the event loop conflicts that would occur with asyncio.run().
|
|
50
|
+
|
|
51
|
+
Process a generation job with comprehensive error handling.
|
|
52
|
+
"""
|
|
53
|
+
logger.info("Starting generation processing", generation_id=generation_id)
|
|
54
|
+
|
|
55
|
+
publisher = ProgressPublisher(settings)
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Initialize processing
|
|
59
|
+
await publisher.publish_progress(
|
|
60
|
+
generation_id,
|
|
61
|
+
ProgressUpdate(
|
|
62
|
+
job_id=generation_id,
|
|
63
|
+
status="processing",
|
|
64
|
+
progress=0.0,
|
|
65
|
+
phase="initializing",
|
|
66
|
+
),
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
# Load generation from DB
|
|
70
|
+
async with get_async_session() as session:
|
|
71
|
+
gen = await jobs_repo.get_generation(session, generation_id)
|
|
72
|
+
# Access all attributes while session is active to avoid DetachedInstanceError
|
|
73
|
+
generator_name = gen.generator_name
|
|
74
|
+
input_params = gen.input_params
|
|
75
|
+
gen_id = gen.id
|
|
76
|
+
tenant_id = gen.tenant_id
|
|
77
|
+
board_id = gen.board_id
|
|
78
|
+
|
|
79
|
+
# Initialize storage manager
|
|
80
|
+
# This will use the default storage configuration from environment/config
|
|
81
|
+
storage_manager = create_storage_manager()
|
|
82
|
+
|
|
83
|
+
# Validate generator exists
|
|
84
|
+
generator = generator_registry.get(generator_name)
|
|
85
|
+
if generator is None:
|
|
86
|
+
error_msg = "Unknown generator"
|
|
87
|
+
logger.error(error_msg, generator_name=generator_name)
|
|
88
|
+
raise RuntimeError(f"Unknown generator: {generator_name}")
|
|
89
|
+
|
|
90
|
+
# Build and validate typed inputs
|
|
91
|
+
try:
|
|
92
|
+
input_schema = generator.get_input_schema()
|
|
93
|
+
typed_inputs = input_schema.model_validate(input_params)
|
|
94
|
+
except Exception as e:
|
|
95
|
+
error_msg = "Invalid input parameters"
|
|
96
|
+
logger.error(error_msg, generation_id=generation_id, error=str(e))
|
|
97
|
+
raise ValueError(f"Invalid input parameters: {e}") from e
|
|
98
|
+
|
|
99
|
+
# Build context and run generator
|
|
100
|
+
# TODO(generators): make a way for a generator to add additional generations
|
|
101
|
+
# based on eg outputs=4, or similar.
|
|
102
|
+
context = GeneratorExecutionContext(gen_id, publisher, storage_manager, tenant_id, board_id)
|
|
103
|
+
|
|
104
|
+
await publisher.publish_progress(
|
|
105
|
+
generation_id,
|
|
106
|
+
ProgressUpdate(
|
|
107
|
+
job_id=generation_id,
|
|
108
|
+
status="processing",
|
|
109
|
+
progress=0.05,
|
|
110
|
+
phase="processing",
|
|
111
|
+
message="Starting generation",
|
|
112
|
+
),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Execute generator
|
|
116
|
+
logger.info(
|
|
117
|
+
"Executing generator",
|
|
118
|
+
generator_name=generator_name,
|
|
119
|
+
generation_id=generation_id,
|
|
120
|
+
)
|
|
121
|
+
# TODO: Consider implementing credit refund logic on failure
|
|
122
|
+
# await refund_credits(gen.user_id, gen.estimated_cost)
|
|
123
|
+
output = await generator.generate(typed_inputs, context)
|
|
124
|
+
logger.info(
|
|
125
|
+
"Generator completed successfully",
|
|
126
|
+
generator_name=generator_name,
|
|
127
|
+
generation_id=generation_id,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
# Find the artifact with matching generation_id
|
|
131
|
+
# Generators should return exactly one artifact with the matching generation_id
|
|
132
|
+
matching_artifacts = [art for art in output.outputs if art.generation_id == generation_id]
|
|
133
|
+
|
|
134
|
+
if len(matching_artifacts) == 0:
|
|
135
|
+
raise RuntimeError(
|
|
136
|
+
f"No artifact found with generation_id {generation_id} in generator output. "
|
|
137
|
+
f"Generator returned {len(output.outputs)} artifact(s) but none matched."
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
if len(matching_artifacts) > 1:
|
|
141
|
+
logger.warning(
|
|
142
|
+
"Generator returned multiple artifacts with same generation_id, using first one",
|
|
143
|
+
generation_id=generation_id,
|
|
144
|
+
artifact_count=len(matching_artifacts),
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
artifact = matching_artifacts[0]
|
|
148
|
+
|
|
149
|
+
# Extract storage URL and convert artifact to dict
|
|
150
|
+
storage_url = artifact.storage_url
|
|
151
|
+
output_metadata = artifact.model_dump()
|
|
152
|
+
|
|
153
|
+
# Finalize DB with storage URL and output metadata
|
|
154
|
+
async with get_async_session() as session:
|
|
155
|
+
await jobs_repo.finalize_success(
|
|
156
|
+
session,
|
|
157
|
+
generation_id,
|
|
158
|
+
storage_url=storage_url,
|
|
159
|
+
output_metadata=output_metadata,
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
logger.info("Job finalized successfully", generation_id=generation_id)
|
|
163
|
+
|
|
164
|
+
# Publish completion (DB already updated by finalize_success)
|
|
165
|
+
await publisher.publish_only(
|
|
166
|
+
generation_id,
|
|
167
|
+
ProgressUpdate(
|
|
168
|
+
job_id=generation_id,
|
|
169
|
+
status="completed",
|
|
170
|
+
progress=1.0,
|
|
171
|
+
phase="finalizing",
|
|
172
|
+
message="Completed",
|
|
173
|
+
),
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
except Exception as e:
|
|
177
|
+
# Log the full traceback for debugging
|
|
178
|
+
logger.error(
|
|
179
|
+
"Job failed with error",
|
|
180
|
+
generation_id=generation_id,
|
|
181
|
+
error=str(e),
|
|
182
|
+
traceback=traceback.format_exc(),
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Publish failure status (this also persists to DB via ProgressPublisher)
|
|
186
|
+
try:
|
|
187
|
+
await publisher.publish_progress(
|
|
188
|
+
generation_id,
|
|
189
|
+
ProgressUpdate(
|
|
190
|
+
job_id=generation_id,
|
|
191
|
+
status="failed",
|
|
192
|
+
progress=0.0,
|
|
193
|
+
phase="finalizing",
|
|
194
|
+
message=str(e),
|
|
195
|
+
),
|
|
196
|
+
)
|
|
197
|
+
except Exception as pub_error:
|
|
198
|
+
logger.error("Failed to publish error status", error=str(pub_error))
|
|
199
|
+
|
|
200
|
+
# Re-raise for Dramatiq retry mechanism
|
|
201
|
+
# raise
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
CLI entry point for Boards background workers.
|
|
4
|
+
|
|
5
|
+
For auto-reload during development, use a file watcher like entr or nodemon:
|
|
6
|
+
|
|
7
|
+
# Using entr (recommended):
|
|
8
|
+
make dev-worker-watch
|
|
9
|
+
|
|
10
|
+
# Or manually with entr:
|
|
11
|
+
find packages/backend/src -name '*.py' | entr -r uv run boards-worker
|
|
12
|
+
|
|
13
|
+
# Using nodemon:
|
|
14
|
+
nodemon --watch packages/backend/src --exec "uv run boards-worker"
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import sys
|
|
18
|
+
|
|
19
|
+
import click
|
|
20
|
+
|
|
21
|
+
from boards import __version__
|
|
22
|
+
from boards.logging import configure_logging, get_logger
|
|
23
|
+
|
|
24
|
+
logger = get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def start_worker(
|
|
28
|
+
processes: int,
|
|
29
|
+
threads: int,
|
|
30
|
+
queue_list: list[str],
|
|
31
|
+
log_level: str,
|
|
32
|
+
) -> None:
|
|
33
|
+
"""Start the Dramatiq worker process."""
|
|
34
|
+
# Configure logging
|
|
35
|
+
configure_logging(debug=(log_level == "debug"))
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
# Import workers to register them (if they exist)
|
|
39
|
+
try:
|
|
40
|
+
from boards.workers import actors # noqa: F401
|
|
41
|
+
except ImportError:
|
|
42
|
+
logger.warning("No worker actors found - continuing with empty worker")
|
|
43
|
+
|
|
44
|
+
# Start the worker
|
|
45
|
+
from dramatiq.cli import main as dramatiq_main
|
|
46
|
+
|
|
47
|
+
# Build dramatiq CLI args
|
|
48
|
+
args = [
|
|
49
|
+
"dramatiq",
|
|
50
|
+
"boards.workers.actors",
|
|
51
|
+
f"--processes={processes}",
|
|
52
|
+
f"--threads={threads}",
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
for queue in queue_list:
|
|
56
|
+
args.extend(["--queues", queue])
|
|
57
|
+
|
|
58
|
+
# Override sys.argv for dramatiq CLI
|
|
59
|
+
original_argv = sys.argv
|
|
60
|
+
sys.argv = args
|
|
61
|
+
|
|
62
|
+
dramatiq_main()
|
|
63
|
+
|
|
64
|
+
except KeyboardInterrupt:
|
|
65
|
+
logger.info("Worker shutdown requested by user")
|
|
66
|
+
except Exception as e:
|
|
67
|
+
logger.error("Worker startup failed", error=str(e))
|
|
68
|
+
sys.exit(1)
|
|
69
|
+
finally:
|
|
70
|
+
# Restore original argv
|
|
71
|
+
sys.argv = original_argv
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@click.command()
|
|
75
|
+
@click.option(
|
|
76
|
+
"--processes",
|
|
77
|
+
default=1,
|
|
78
|
+
type=int,
|
|
79
|
+
help="Number of worker processes (default: 1)",
|
|
80
|
+
)
|
|
81
|
+
@click.option(
|
|
82
|
+
"--threads",
|
|
83
|
+
default=1,
|
|
84
|
+
type=int,
|
|
85
|
+
help="Number of worker threads per process (default: 1)",
|
|
86
|
+
)
|
|
87
|
+
@click.option(
|
|
88
|
+
"--queues",
|
|
89
|
+
default="boards-jobs",
|
|
90
|
+
help="Comma-separated list of queues to process (default: boards-jobs)",
|
|
91
|
+
)
|
|
92
|
+
@click.option(
|
|
93
|
+
"--log-level",
|
|
94
|
+
default="info",
|
|
95
|
+
type=click.Choice(["debug", "info", "warning", "error"]),
|
|
96
|
+
help="Log level (default: info)",
|
|
97
|
+
)
|
|
98
|
+
@click.version_option(version=__version__, prog_name="boards-worker")
|
|
99
|
+
def main(
|
|
100
|
+
processes: int,
|
|
101
|
+
threads: int,
|
|
102
|
+
queues: str,
|
|
103
|
+
log_level: str,
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Start Boards background workers."""
|
|
106
|
+
|
|
107
|
+
# Configure logging
|
|
108
|
+
configure_logging(debug=(log_level == "debug"))
|
|
109
|
+
|
|
110
|
+
queue_list = [q.strip() for q in queues.split(",")]
|
|
111
|
+
|
|
112
|
+
logger.info(
|
|
113
|
+
"Starting Boards workers",
|
|
114
|
+
processes=processes,
|
|
115
|
+
threads=threads,
|
|
116
|
+
queues=queue_list,
|
|
117
|
+
log_level=log_level,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
start_worker(processes, threads, queue_list, log_level)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
# meaningless
|
|
124
|
+
if __name__ == "__main__":
|
|
125
|
+
main()
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"""Execution context passed to generators for storage/DB/progress access."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from uuid import UUID
|
|
6
|
+
|
|
7
|
+
from ..database.connection import get_async_session
|
|
8
|
+
from ..generators import resolution
|
|
9
|
+
from ..generators.artifacts import AudioArtifact, ImageArtifact, TextArtifact, VideoArtifact
|
|
10
|
+
from ..jobs import repository as jobs_repo
|
|
11
|
+
from ..logging import get_logger
|
|
12
|
+
from ..progress.models import ProgressUpdate
|
|
13
|
+
from ..progress.publisher import ProgressPublisher
|
|
14
|
+
from ..storage.base import StorageManager
|
|
15
|
+
|
|
16
|
+
logger = get_logger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class GeneratorExecutionContext:
|
|
20
|
+
def __init__(
|
|
21
|
+
self,
|
|
22
|
+
generation_id: UUID,
|
|
23
|
+
publisher: ProgressPublisher,
|
|
24
|
+
storage_manager: StorageManager,
|
|
25
|
+
tenant_id: UUID,
|
|
26
|
+
board_id: UUID,
|
|
27
|
+
) -> None:
|
|
28
|
+
self.generation_id = str(generation_id)
|
|
29
|
+
self.publisher = publisher
|
|
30
|
+
self.storage_manager = storage_manager
|
|
31
|
+
self.tenant_id = str(tenant_id)
|
|
32
|
+
self.board_id = str(board_id)
|
|
33
|
+
logger.info(
|
|
34
|
+
"Created execution context",
|
|
35
|
+
generation_id=str(generation_id),
|
|
36
|
+
tenant_id=str(tenant_id),
|
|
37
|
+
board_id=str(board_id),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
async def resolve_artifact(self, artifact) -> str:
|
|
41
|
+
"""Resolve an artifact to a file path."""
|
|
42
|
+
logger.debug("Resolving artifact", generation_id=self.generation_id)
|
|
43
|
+
try:
|
|
44
|
+
result = await resolution.resolve_artifact(artifact)
|
|
45
|
+
logger.debug("Artifact resolved successfully", result=result)
|
|
46
|
+
return result
|
|
47
|
+
except Exception as e:
|
|
48
|
+
logger.error("Failed to resolve artifact", error=str(e))
|
|
49
|
+
raise
|
|
50
|
+
|
|
51
|
+
async def store_image_result(
|
|
52
|
+
self,
|
|
53
|
+
storage_url: str,
|
|
54
|
+
format: str,
|
|
55
|
+
width: int,
|
|
56
|
+
height: int,
|
|
57
|
+
) -> ImageArtifact:
|
|
58
|
+
"""Store image generation result."""
|
|
59
|
+
logger.debug("Storing image result", generation_id=self.generation_id)
|
|
60
|
+
try:
|
|
61
|
+
result = await resolution.store_image_result(
|
|
62
|
+
storage_manager=self.storage_manager,
|
|
63
|
+
generation_id=self.generation_id,
|
|
64
|
+
tenant_id=self.tenant_id,
|
|
65
|
+
board_id=self.board_id,
|
|
66
|
+
storage_url=storage_url,
|
|
67
|
+
format=format,
|
|
68
|
+
width=width,
|
|
69
|
+
height=height,
|
|
70
|
+
)
|
|
71
|
+
logger.info("Image result stored", generation_id=self.generation_id)
|
|
72
|
+
return result
|
|
73
|
+
except Exception as e:
|
|
74
|
+
logger.error("Failed to store image result", error=str(e))
|
|
75
|
+
raise
|
|
76
|
+
|
|
77
|
+
async def store_video_result(
|
|
78
|
+
self,
|
|
79
|
+
storage_url: str,
|
|
80
|
+
format: str,
|
|
81
|
+
width: int,
|
|
82
|
+
height: int,
|
|
83
|
+
duration: float | None = None,
|
|
84
|
+
fps: float | None = None,
|
|
85
|
+
) -> VideoArtifact:
|
|
86
|
+
"""Store video generation result."""
|
|
87
|
+
logger.debug("Storing video result", generation_id=self.generation_id)
|
|
88
|
+
try:
|
|
89
|
+
result = await resolution.store_video_result(
|
|
90
|
+
storage_manager=self.storage_manager,
|
|
91
|
+
generation_id=self.generation_id,
|
|
92
|
+
tenant_id=self.tenant_id,
|
|
93
|
+
board_id=self.board_id,
|
|
94
|
+
storage_url=storage_url,
|
|
95
|
+
format=format,
|
|
96
|
+
width=width,
|
|
97
|
+
height=height,
|
|
98
|
+
duration=duration,
|
|
99
|
+
fps=fps,
|
|
100
|
+
)
|
|
101
|
+
logger.info("Video result stored", generation_id=self.generation_id)
|
|
102
|
+
return result
|
|
103
|
+
except Exception as e:
|
|
104
|
+
logger.error("Failed to store video result", error=str(e))
|
|
105
|
+
raise
|
|
106
|
+
|
|
107
|
+
async def store_audio_result(
|
|
108
|
+
self,
|
|
109
|
+
storage_url: str,
|
|
110
|
+
format: str,
|
|
111
|
+
duration: float | None = None,
|
|
112
|
+
sample_rate: int | None = None,
|
|
113
|
+
channels: int | None = None,
|
|
114
|
+
) -> AudioArtifact:
|
|
115
|
+
"""Store audio generation result."""
|
|
116
|
+
logger.debug("Storing audio result", generation_id=self.generation_id)
|
|
117
|
+
try:
|
|
118
|
+
result = await resolution.store_audio_result(
|
|
119
|
+
storage_manager=self.storage_manager,
|
|
120
|
+
generation_id=self.generation_id,
|
|
121
|
+
tenant_id=self.tenant_id,
|
|
122
|
+
board_id=self.board_id,
|
|
123
|
+
storage_url=storage_url,
|
|
124
|
+
format=format,
|
|
125
|
+
duration=duration,
|
|
126
|
+
sample_rate=sample_rate,
|
|
127
|
+
channels=channels,
|
|
128
|
+
)
|
|
129
|
+
logger.info("Audio result stored", generation_id=self.generation_id)
|
|
130
|
+
return result
|
|
131
|
+
except Exception as e:
|
|
132
|
+
logger.error("Failed to store audio result", error=str(e))
|
|
133
|
+
raise
|
|
134
|
+
|
|
135
|
+
async def store_text_result(
|
|
136
|
+
self,
|
|
137
|
+
content: str,
|
|
138
|
+
format: str,
|
|
139
|
+
) -> TextArtifact:
|
|
140
|
+
"""Store text generation result."""
|
|
141
|
+
logger.debug("Storing text result", generation_id=self.generation_id)
|
|
142
|
+
try:
|
|
143
|
+
result = await resolution.store_text_result(
|
|
144
|
+
storage_manager=self.storage_manager,
|
|
145
|
+
generation_id=self.generation_id,
|
|
146
|
+
tenant_id=self.tenant_id,
|
|
147
|
+
board_id=self.board_id,
|
|
148
|
+
content=content,
|
|
149
|
+
format=format,
|
|
150
|
+
)
|
|
151
|
+
logger.info("Text result stored", generation_id=self.generation_id)
|
|
152
|
+
return result
|
|
153
|
+
except Exception as e:
|
|
154
|
+
logger.error("Failed to store text result", error=str(e))
|
|
155
|
+
raise
|
|
156
|
+
|
|
157
|
+
async def publish_progress(self, update: ProgressUpdate) -> None:
|
|
158
|
+
"""Publish progress update for the generation."""
|
|
159
|
+
logger.debug(
|
|
160
|
+
"Publishing progress",
|
|
161
|
+
generation_id=self.generation_id,
|
|
162
|
+
status=update.status,
|
|
163
|
+
progress=update.progress,
|
|
164
|
+
)
|
|
165
|
+
try:
|
|
166
|
+
await self.publisher.publish_progress(self.generation_id, update)
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.error(
|
|
169
|
+
"Failed to publish progress update - "
|
|
170
|
+
"generation will continue but progress may not be visible",
|
|
171
|
+
generation_id=self.generation_id,
|
|
172
|
+
status=update.status,
|
|
173
|
+
progress=update.progress,
|
|
174
|
+
error=str(e),
|
|
175
|
+
error_type=type(e).__name__,
|
|
176
|
+
)
|
|
177
|
+
# Don't raise here - progress updates are non-critical
|
|
178
|
+
# The generation should continue even if progress updates fail
|
|
179
|
+
|
|
180
|
+
async def set_external_job_id(self, external_id: str) -> None:
|
|
181
|
+
"""Set the external job ID from the provider."""
|
|
182
|
+
logger.info(
|
|
183
|
+
"Setting external job ID",
|
|
184
|
+
external_job_id=external_id,
|
|
185
|
+
generation_id=self.generation_id,
|
|
186
|
+
)
|
|
187
|
+
async with get_async_session() as session:
|
|
188
|
+
await jobs_repo.set_external_job_id(session, self.generation_id, external_id)
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Dramatiq middleware for worker process initialization.
|
|
2
|
+
|
|
3
|
+
This module provides custom middleware for managing worker lifecycle,
|
|
4
|
+
particularly for loading generators during worker startup.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING
|
|
10
|
+
|
|
11
|
+
from dramatiq.middleware import Middleware
|
|
12
|
+
|
|
13
|
+
from ..config import initialize_generator_api_keys
|
|
14
|
+
from ..generators.loader import load_generators_from_config
|
|
15
|
+
from ..generators.registry import registry as generator_registry
|
|
16
|
+
from ..logging import get_logger
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from dramatiq import Broker, Worker
|
|
20
|
+
|
|
21
|
+
logger = get_logger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class GeneratorLoaderMiddleware(Middleware):
|
|
25
|
+
"""Middleware to load generators when worker process starts.
|
|
26
|
+
|
|
27
|
+
This ensures that generators are registered in each worker process's
|
|
28
|
+
registry before any jobs are processed. Since Dramatiq uses multiprocessing,
|
|
29
|
+
each worker process gets its own copy of the registry, so initialization
|
|
30
|
+
must happen in each process.
|
|
31
|
+
|
|
32
|
+
The before_worker_boot hook runs once per worker process at startup,
|
|
33
|
+
before any actors are executed. Worker processes are long-running and
|
|
34
|
+
reused across many jobs, so this initialization overhead happens only
|
|
35
|
+
once per worker lifetime.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def before_worker_boot(self, broker: Broker, worker: Worker) -> None:
|
|
39
|
+
"""Load generators when worker process starts.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
broker: The Dramatiq broker instance
|
|
43
|
+
worker: The worker process instance
|
|
44
|
+
"""
|
|
45
|
+
logger.info("Loading generators in worker process", worker_id=id(worker))
|
|
46
|
+
|
|
47
|
+
# Initialize generator API keys before loading generators
|
|
48
|
+
initialize_generator_api_keys()
|
|
49
|
+
logger.info("Generator API keys initialized in worker process")
|
|
50
|
+
|
|
51
|
+
load_generators_from_config()
|
|
52
|
+
|
|
53
|
+
logger.info(
|
|
54
|
+
"Generators loaded in worker process",
|
|
55
|
+
worker_id=id(worker),
|
|
56
|
+
generator_count=len(generator_registry.list_names()),
|
|
57
|
+
generators=generator_registry.list_names(),
|
|
58
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# Development overrides for Docker Compose
|
|
2
|
+
# Enables hot reload by mounting source code
|
|
3
|
+
|
|
4
|
+
services:
|
|
5
|
+
api:
|
|
6
|
+
volumes:
|
|
7
|
+
- ./api:/app
|
|
8
|
+
- ./data/storage:/app/data/storage
|
|
9
|
+
environment:
|
|
10
|
+
- PYTHONUNBUFFERED=1
|
|
11
|
+
command:
|
|
12
|
+
[
|
|
13
|
+
"uvicorn",
|
|
14
|
+
"boards.api.app:app",
|
|
15
|
+
"--host",
|
|
16
|
+
"0.0.0.0",
|
|
17
|
+
"--port",
|
|
18
|
+
"8800",
|
|
19
|
+
"--reload",
|
|
20
|
+
"--log-level",
|
|
21
|
+
"debug",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
worker:
|
|
25
|
+
volumes:
|
|
26
|
+
- ./api:/app
|
|
27
|
+
- ./data/storage:/app/data/storage
|
|
28
|
+
environment:
|
|
29
|
+
- PYTHONUNBUFFERED=1
|
|
30
|
+
|
|
31
|
+
web:
|
|
32
|
+
command: sh -c "pnpm install && pnpm dev"
|
|
33
|
+
volumes:
|
|
34
|
+
- ./web:/app
|
|
35
|
+
- /app/node_modules
|
|
36
|
+
- /app/.next
|
|
37
|
+
environment:
|
|
38
|
+
- NODE_ENV=development
|
|
39
|
+
- INTERNAL_API_URL=http://api:8800
|