karaoke-gen 0.101.0__py3-none-any.whl → 0.105.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. backend/Dockerfile.base +1 -0
  2. backend/api/routes/admin.py +226 -3
  3. backend/api/routes/audio_search.py +4 -32
  4. backend/api/routes/file_upload.py +18 -83
  5. backend/api/routes/jobs.py +2 -2
  6. backend/api/routes/push.py +238 -0
  7. backend/api/routes/rate_limits.py +428 -0
  8. backend/api/routes/users.py +79 -19
  9. backend/config.py +25 -1
  10. backend/exceptions.py +66 -0
  11. backend/main.py +26 -1
  12. backend/models/job.py +4 -0
  13. backend/models/user.py +20 -2
  14. backend/services/email_validation_service.py +646 -0
  15. backend/services/firestore_service.py +21 -0
  16. backend/services/gce_encoding/main.py +22 -8
  17. backend/services/job_defaults_service.py +113 -0
  18. backend/services/job_manager.py +109 -13
  19. backend/services/push_notification_service.py +409 -0
  20. backend/services/rate_limit_service.py +641 -0
  21. backend/services/stripe_service.py +2 -2
  22. backend/tests/conftest.py +8 -1
  23. backend/tests/test_admin_delete_outputs.py +352 -0
  24. backend/tests/test_audio_search.py +12 -8
  25. backend/tests/test_email_validation_service.py +298 -0
  26. backend/tests/test_file_upload.py +8 -6
  27. backend/tests/test_gce_encoding_worker.py +229 -0
  28. backend/tests/test_impersonation.py +18 -3
  29. backend/tests/test_made_for_you.py +6 -4
  30. backend/tests/test_push_notification_service.py +460 -0
  31. backend/tests/test_push_routes.py +357 -0
  32. backend/tests/test_rate_limit_service.py +396 -0
  33. backend/tests/test_rate_limits_api.py +392 -0
  34. backend/tests/test_stripe_service.py +205 -0
  35. backend/workers/video_worker_orchestrator.py +42 -0
  36. karaoke_gen/instrumental_review/static/index.html +35 -9
  37. {karaoke_gen-0.101.0.dist-info → karaoke_gen-0.105.4.dist-info}/METADATA +2 -1
  38. {karaoke_gen-0.101.0.dist-info → karaoke_gen-0.105.4.dist-info}/RECORD +41 -26
  39. {karaoke_gen-0.101.0.dist-info → karaoke_gen-0.105.4.dist-info}/WHEEL +0 -0
  40. {karaoke_gen-0.101.0.dist-info → karaoke_gen-0.105.4.dist-info}/entry_points.txt +0 -0
  41. {karaoke_gen-0.101.0.dist-info → karaoke_gen-0.105.4.dist-info}/licenses/LICENSE +0 -0
@@ -338,19 +338,33 @@ def run_encoding(job_id: str, work_dir: Path, config: dict):
338
338
  # Search more specifically for karaoke video
339
339
  karaoke_video = find_file(work_dir, "*Karaoke*.mkv", "*Karaoke*.mov", "*vocals*.mkv")
340
340
 
341
- # Instrumental audio
342
- instrumental = find_file(
343
- work_dir,
344
- "*instrumental_clean*.flac", "*Instrumental Clean*.flac",
345
- "*instrumental*.flac", "*Instrumental*.flac",
346
- "*instrumental*.wav"
347
- )
341
+ # Instrumental audio - respect user's selection from encoding config
342
+ instrumental_selection = config.get("instrumental_selection", "clean")
343
+ logger.info(f"Instrumental selection from config: {instrumental_selection}")
344
+
345
+ if instrumental_selection == "with_backing":
346
+ # User selected instrumental with backing vocals
347
+ instrumental = find_file(
348
+ work_dir,
349
+ "*instrumental_with_backing*.flac", "*Instrumental Backing*.flac",
350
+ "*with_backing*.flac", "*Backing*.flac",
351
+ "*instrumental*.flac", "*Instrumental*.flac",
352
+ "*instrumental*.wav"
353
+ )
354
+ else:
355
+ # Default to clean instrumental
356
+ instrumental = find_file(
357
+ work_dir,
358
+ "*instrumental_clean*.flac", "*Instrumental Clean*.flac",
359
+ "*instrumental*.flac", "*Instrumental*.flac",
360
+ "*instrumental*.wav"
361
+ )
348
362
 
349
363
  logger.info(f"Found files:")
350
364
  logger.info(f" Title video: {title_video}")
351
365
  logger.info(f" Karaoke video: {karaoke_video}")
352
366
  logger.info(f" End video: {end_video}")
353
- logger.info(f" Instrumental: {instrumental}")
367
+ logger.info(f" Instrumental ({instrumental_selection}): {instrumental}")
354
368
 
355
369
  # Validate required files
356
370
  if not title_video:
@@ -0,0 +1,113 @@
1
+ """
2
+ Centralized job defaults service.
3
+
4
+ This module provides consistent handling of job creation defaults across all
5
+ endpoints (file_upload, audio_search, made-for-you webhook, etc.).
6
+
7
+ Centralizing these defaults prevents divergence between code paths and ensures
8
+ all jobs receive the same default configuration.
9
+ """
10
+ from dataclasses import dataclass
11
+ from typing import Optional, Tuple
12
+
13
+ from backend.config import get_settings
14
+
15
+
16
+ @dataclass
17
+ class EffectiveDistributionSettings:
18
+ """Distribution settings with defaults applied from environment variables."""
19
+ dropbox_path: Optional[str]
20
+ gdrive_folder_id: Optional[str]
21
+ discord_webhook_url: Optional[str]
22
+ brand_prefix: Optional[str]
23
+ enable_youtube_upload: bool
24
+ youtube_description: Optional[str]
25
+
26
+
27
+ def get_effective_distribution_settings(
28
+ dropbox_path: Optional[str] = None,
29
+ gdrive_folder_id: Optional[str] = None,
30
+ discord_webhook_url: Optional[str] = None,
31
+ brand_prefix: Optional[str] = None,
32
+ enable_youtube_upload: Optional[bool] = None,
33
+ youtube_description: Optional[str] = None,
34
+ ) -> EffectiveDistributionSettings:
35
+ """
36
+ Get distribution settings with defaults applied from environment variables.
37
+
38
+ This ensures consistent handling of defaults across all job creation endpoints.
39
+ Each parameter, if not provided (None), falls back to the corresponding
40
+ environment variable configured in settings.
41
+
42
+ Args:
43
+ dropbox_path: Explicit Dropbox path or None for default
44
+ gdrive_folder_id: Explicit Google Drive folder ID or None for default
45
+ discord_webhook_url: Explicit Discord webhook URL or None for default
46
+ brand_prefix: Explicit brand prefix or None for default
47
+ enable_youtube_upload: Explicit flag or None for default
48
+ youtube_description: Explicit description or None for default
49
+
50
+ Returns:
51
+ EffectiveDistributionSettings with defaults applied
52
+ """
53
+ settings = get_settings()
54
+ return EffectiveDistributionSettings(
55
+ dropbox_path=dropbox_path or settings.default_dropbox_path,
56
+ gdrive_folder_id=gdrive_folder_id or settings.default_gdrive_folder_id,
57
+ discord_webhook_url=discord_webhook_url or settings.default_discord_webhook_url,
58
+ brand_prefix=brand_prefix or settings.default_brand_prefix,
59
+ enable_youtube_upload=enable_youtube_upload if enable_youtube_upload is not None else settings.default_enable_youtube_upload,
60
+ youtube_description=youtube_description or settings.default_youtube_description,
61
+ )
62
+
63
+
64
+ def resolve_cdg_txt_defaults(
65
+ theme_id: Optional[str],
66
+ enable_cdg: Optional[bool] = None,
67
+ enable_txt: Optional[bool] = None,
68
+ ) -> Tuple[bool, bool]:
69
+ """
70
+ Resolve CDG/TXT settings based on theme and explicit settings.
71
+
72
+ The resolution logic is:
73
+ 1. If explicit True/False is provided, use that value
74
+ 2. Otherwise, if a theme is set, use the server defaults (settings.default_enable_cdg/txt)
75
+ 3. If no theme is set, default to False (CDG/TXT require style configuration)
76
+
77
+ This ensures CDG/TXT are only enabled when:
78
+ - A theme is configured (provides necessary style params), AND
79
+ - The server defaults allow it (DEFAULT_ENABLE_CDG=true by default)
80
+
81
+ Args:
82
+ theme_id: Theme identifier (if any)
83
+ enable_cdg: Explicit CDG setting (None means use default)
84
+ enable_txt: Explicit TXT setting (None means use default)
85
+
86
+ Returns:
87
+ Tuple of (resolved_enable_cdg, resolved_enable_txt)
88
+ """
89
+ settings = get_settings()
90
+
91
+ # Default based on whether theme is set AND server defaults
92
+ # Theme is required because CDG/TXT need style configuration
93
+ theme_is_set = theme_id is not None
94
+ default_cdg = theme_is_set and settings.default_enable_cdg
95
+ default_txt = theme_is_set and settings.default_enable_txt
96
+
97
+ # Explicit values override defaults, None uses computed default
98
+ resolved_cdg = enable_cdg if enable_cdg is not None else default_cdg
99
+ resolved_txt = enable_txt if enable_txt is not None else default_txt
100
+
101
+ return resolved_cdg, resolved_txt
102
+
103
+
104
+ # Singleton instance (optional, for convenience)
105
+ _service_instance = None
106
+
107
+
108
+ def get_job_defaults_service():
109
+ """Get the job defaults service (module-level functions work fine, this is for consistency)."""
110
+ return {
111
+ 'get_effective_distribution_settings': get_effective_distribution_settings,
112
+ 'resolve_cdg_txt_defaults': resolve_cdg_txt_defaults,
113
+ }
@@ -14,6 +14,7 @@ from datetime import datetime
14
14
  from typing import Optional, Dict, Any, List
15
15
 
16
16
  from backend.config import settings
17
+ from backend.exceptions import RateLimitExceededError
17
18
  from backend.models.job import Job, JobStatus, JobCreate, STATE_TRANSITIONS
18
19
  from backend.models.worker_log import WorkerLogEntry
19
20
  from backend.services.firestore_service import FirestoreService
@@ -41,16 +42,44 @@ class JobManager:
41
42
  self.firestore = FirestoreService()
42
43
  self.storage = StorageService()
43
44
 
44
- def create_job(self, job_create: JobCreate) -> Job:
45
+ def create_job(self, job_create: JobCreate, is_admin: bool = False) -> Job:
45
46
  """
46
47
  Create a new job with initial state PENDING.
47
48
 
48
49
  Jobs start in PENDING state and transition to DOWNLOADING
49
50
  when a worker picks them up.
50
51
 
52
+ Args:
53
+ job_create: Job creation parameters
54
+ is_admin: Whether the requesting user is an admin (bypasses rate limits)
55
+
51
56
  Raises:
52
57
  ValueError: If theme_id is not provided (all jobs require a theme)
58
+ RateLimitExceededError: If user has exceeded their daily job limit
53
59
  """
60
+ # Check rate limit FIRST (before any other validation)
61
+ # This prevents wasted work if user is rate limited
62
+ if job_create.user_email:
63
+ from backend.services.rate_limit_service import get_rate_limit_service
64
+
65
+ rate_limit_service = get_rate_limit_service()
66
+ allowed, remaining, message = rate_limit_service.check_user_job_limit(
67
+ user_email=job_create.user_email,
68
+ is_admin=is_admin
69
+ )
70
+ if not allowed:
71
+ from backend.services.rate_limit_service import _seconds_until_midnight_utc
72
+
73
+ # Get actual current count - remaining is clamped to 0 which loses info
74
+ current_count = rate_limit_service.get_user_job_count_today(job_create.user_email)
75
+ raise RateLimitExceededError(
76
+ message=message,
77
+ limit_type="jobs_per_day",
78
+ remaining_seconds=_seconds_until_midnight_utc(),
79
+ current_count=current_count,
80
+ limit_value=settings.rate_limit_jobs_per_day
81
+ )
82
+
54
83
  # Enforce theme requirement - all jobs must have a theme
55
84
  # This prevents unstyled videos from ever being generated
56
85
  if not job_create.theme_id:
@@ -105,7 +134,17 @@ class JobManager:
105
134
 
106
135
  self.firestore.create_job(job)
107
136
  logger.info(f"Created new job {job_id} with status PENDING")
108
-
137
+
138
+ # Record job creation for rate limiting (after successful persistence)
139
+ if job_create.user_email:
140
+ try:
141
+ from backend.services.rate_limit_service import get_rate_limit_service
142
+ rate_limit_service = get_rate_limit_service()
143
+ rate_limit_service.record_job_creation(job_create.user_email, job_id)
144
+ except Exception as e:
145
+ # Don't fail job creation if rate limit recording fails
146
+ logger.warning(f"Failed to record job creation for rate limiting: {e}")
147
+
109
148
  return job
110
149
 
111
150
  def get_job(self, job_id: str) -> Optional[Job]:
@@ -335,22 +374,22 @@ class JobManager:
335
374
  updates['progress'] = progress
336
375
 
337
376
  # Generate review token when entering AWAITING_REVIEW state
377
+ # Tokens don't expire - they're job-scoped so low risk, and natural expiry happens when job completes
338
378
  if new_status == JobStatus.AWAITING_REVIEW:
339
- from backend.api.dependencies import generate_review_token, get_review_token_expiry
379
+ from backend.api.dependencies import generate_review_token
340
380
  review_token = generate_review_token()
341
- review_token_expires = get_review_token_expiry(hours=48) # 48 hour expiry
342
381
  updates['review_token'] = review_token
343
- updates['review_token_expires_at'] = review_token_expires
344
- logger.info(f"Generated review token for job {job_id}, expires in 48 hours")
345
-
382
+ updates['review_token_expires_at'] = None # No expiry - token is job-scoped
383
+ logger.info(f"Generated review token for job {job_id} (no expiry)")
384
+
346
385
  # Generate instrumental token when entering AWAITING_INSTRUMENTAL_SELECTION state
386
+ # Tokens don't expire - they're job-scoped so low risk, and natural expiry happens when job completes
347
387
  if new_status == JobStatus.AWAITING_INSTRUMENTAL_SELECTION:
348
- from backend.api.dependencies import generate_review_token, get_review_token_expiry
388
+ from backend.api.dependencies import generate_review_token
349
389
  instrumental_token = generate_review_token() # Reuse same token generator
350
- instrumental_token_expires = get_review_token_expiry(hours=48) # 48 hour expiry
351
390
  updates['instrumental_token'] = instrumental_token
352
- updates['instrumental_token_expires_at'] = instrumental_token_expires
353
- logger.info(f"Generated instrumental token for job {job_id}, expires in 48 hours")
391
+ updates['instrumental_token_expires_at'] = None # No expiry - token is job-scoped
392
+ logger.info(f"Generated instrumental token for job {job_id} (no expiry)")
354
393
 
355
394
  # If we have state_data_updates, merge them with existing state_data
356
395
  merged_state_data = None
@@ -399,7 +438,7 @@ class JobManager:
399
438
 
400
439
  def _trigger_state_notifications(self, job_id: str, new_status: JobStatus) -> None:
401
440
  """
402
- Trigger email notifications based on state transitions.
441
+ Trigger email and push notifications based on state transitions.
403
442
 
404
443
  This is fire-and-forget - notification failures don't affect job processing.
405
444
 
@@ -419,10 +458,14 @@ class JobManager:
419
458
  # Job completion notification
420
459
  if new_status == JobStatus.COMPLETE:
421
460
  self._schedule_completion_email(job)
461
+ self._send_push_notification(job, "complete")
422
462
 
423
463
  # Idle reminder scheduling for blocking states
424
464
  elif new_status in [JobStatus.AWAITING_REVIEW, JobStatus.AWAITING_INSTRUMENTAL_SELECTION]:
425
465
  self._schedule_idle_reminder(job, new_status)
466
+ # Send push notification for blocking states
467
+ action_type = "lyrics" if new_status == JobStatus.AWAITING_REVIEW else "instrumental"
468
+ self._send_push_notification(job, action_type)
426
469
 
427
470
  except Exception as e:
428
471
  # Never let notification failures affect job processing
@@ -542,7 +585,60 @@ class JobManager:
542
585
 
543
586
  except Exception as e:
544
587
  logger.error(f"Failed to schedule idle reminder for job {job.job_id}: {e}")
545
-
588
+
589
+ def _send_push_notification(self, job: Job, action_type: str) -> None:
590
+ """
591
+ Send a push notification for job state changes.
592
+
593
+ Fire-and-forget - failures don't affect job processing.
594
+
595
+ Args:
596
+ job: Job object
597
+ action_type: Type of notification ("lyrics", "instrumental", or "complete")
598
+ """
599
+ import asyncio
600
+ import threading
601
+
602
+ try:
603
+ from backend.services.push_notification_service import get_push_notification_service
604
+
605
+ push_service = get_push_notification_service()
606
+
607
+ # Skip if push notifications not enabled
608
+ if not push_service.is_enabled():
609
+ logger.debug("Push notifications not enabled, skipping")
610
+ return
611
+
612
+ # Build job dict for notification service
613
+ job_dict = {
614
+ "job_id": job.job_id,
615
+ "user_email": job.user_email,
616
+ "artist": job.artist,
617
+ "title": job.title,
618
+ }
619
+
620
+ async def send_notification():
621
+ if action_type == "complete":
622
+ await push_service.send_completion_notification(job_dict)
623
+ else:
624
+ await push_service.send_blocking_notification(job_dict, action_type)
625
+
626
+ # Try to get existing event loop, create new one if none exists
627
+ try:
628
+ loop = asyncio.get_running_loop()
629
+ loop.create_task(send_notification())
630
+ except RuntimeError:
631
+ # No event loop - we're in a sync context
632
+ def run_in_thread():
633
+ asyncio.run(send_notification())
634
+ thread = threading.Thread(target=run_in_thread, daemon=True)
635
+ thread.start()
636
+
637
+ logger.debug(f"Scheduled push notification for job {job.job_id} ({action_type})")
638
+
639
+ except Exception as e:
640
+ logger.error(f"Failed to send push notification for job {job.job_id}: {e}")
641
+
546
642
  def update_state_data(self, job_id: str, key: str, value: Any) -> None:
547
643
  """
548
644
  Update a specific key in the job's state_data field.