karaoke-gen 0.101.0__py3-none-any.whl → 0.103.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,641 @@
1
+ """
2
+ Rate limiting service for job creation, YouTube uploads, and beta enrollment.
3
+
4
+ Uses Firestore for distributed rate limit tracking with date-based document IDs
5
+ that automatically reset at UTC midnight.
6
+ """
7
+ import hashlib
8
+ import logging
9
+ from datetime import datetime, timezone
10
+ from typing import Tuple, Optional, List, Dict, Any
11
+
12
+ from google.cloud import firestore
13
+
14
+ from backend.config import settings
15
+
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+ # Firestore collection names
20
+ RATE_LIMITS_COLLECTION = "rate_limits"
21
+ BLOCKLISTS_COLLECTION = "blocklists"
22
+ OVERRIDES_COLLECTION = "overrides"
23
+
24
+
25
+ def _get_today_date_str() -> str:
26
+ """Get today's date in UTC as YYYY-MM-DD string."""
27
+ return datetime.now(timezone.utc).strftime("%Y-%m-%d")
28
+
29
+
30
+ def _seconds_until_midnight_utc() -> int:
31
+ """Calculate seconds until UTC midnight (when rate limits reset)."""
32
+ now = datetime.now(timezone.utc)
33
+ midnight = now.replace(hour=0, minute=0, second=0, microsecond=0)
34
+ # Add 1 day to get next midnight
35
+ from datetime import timedelta
36
+ next_midnight = midnight + timedelta(days=1)
37
+ return int((next_midnight - now).total_seconds())
38
+
39
+
40
+ def _hash_ip(ip_address: str) -> str:
41
+ """Hash an IP address for storage (privacy-preserving)."""
42
+ return hashlib.sha256(ip_address.encode()).hexdigest()[:16]
43
+
44
+
45
+ class RateLimitService:
46
+ """
47
+ Service for managing rate limits across the platform.
48
+
49
+ Rate limits use Firestore for distributed tracking with automatic
50
+ reset at UTC midnight via date-based document IDs.
51
+ """
52
+
53
+ def __init__(self, db: Optional[firestore.Client] = None):
54
+ """Initialize rate limit service with Firestore client."""
55
+ if db is None:
56
+ self.db = firestore.Client(project=settings.google_cloud_project)
57
+ else:
58
+ self.db = db
59
+ self._blocklist_cache: Optional[Dict[str, Any]] = None
60
+ self._blocklist_cache_time: Optional[datetime] = None
61
+ self._cache_ttl_seconds = 300 # 5 minute cache
62
+
63
+ # -------------------------------------------------------------------------
64
+ # Job Rate Limiting
65
+ # -------------------------------------------------------------------------
66
+
67
+ def check_user_job_limit(self, user_email: str, is_admin: bool = False) -> Tuple[bool, int, str]:
68
+ """
69
+ Check if a user can create a new job.
70
+
71
+ Args:
72
+ user_email: User's email address
73
+ is_admin: Whether the user is an admin (admins bypass limits)
74
+
75
+ Returns:
76
+ Tuple of (allowed, remaining, message)
77
+ - allowed: True if user can create job
78
+ - remaining: Number of jobs remaining today
79
+ - message: Human-readable status message
80
+ """
81
+ if not settings.enable_rate_limiting:
82
+ return True, -1, "Rate limiting disabled"
83
+
84
+ if is_admin:
85
+ return True, -1, "Admin users bypass rate limits"
86
+
87
+ limit = settings.rate_limit_jobs_per_day
88
+ if limit == 0:
89
+ return True, -1, "No job limit configured"
90
+
91
+ # Check for user override
92
+ override = self.get_user_override(user_email)
93
+ if override and override.get("bypass_job_limit"):
94
+ return True, -1, "User has rate limit bypass"
95
+ if override and override.get("custom_daily_job_limit"):
96
+ limit = override["custom_daily_job_limit"]
97
+
98
+ # Get current count
99
+ current_count = self.get_user_job_count_today(user_email)
100
+ remaining = max(0, limit - current_count)
101
+
102
+ if current_count >= limit:
103
+ seconds = _seconds_until_midnight_utc()
104
+ logger.warning(
105
+ f"Rate limit exceeded for {user_email}: {current_count}/{limit} jobs today"
106
+ )
107
+ return (
108
+ False,
109
+ 0,
110
+ f"Daily job limit reached ({limit} jobs per day). Resets in {seconds // 3600}h {(seconds % 3600) // 60}m."
111
+ )
112
+
113
+ return True, remaining, f"{remaining} jobs remaining today"
114
+
115
+ def get_user_job_count_today(self, user_email: str) -> int:
116
+ """Get the number of jobs created by a user today."""
117
+ date_str = _get_today_date_str()
118
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
119
+ f"user_jobs_{user_email}_{date_str}"
120
+ )
121
+ doc = doc_ref.get()
122
+
123
+ if not doc.exists:
124
+ return 0
125
+
126
+ return doc.to_dict().get("count", 0)
127
+
128
+ def record_job_creation(self, user_email: str, job_id: str) -> None:
129
+ """
130
+ Record a job creation for rate limiting.
131
+
132
+ Uses Firestore transactions for atomic increment.
133
+ """
134
+ if not settings.enable_rate_limiting:
135
+ return
136
+
137
+ date_str = _get_today_date_str()
138
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
139
+ f"user_jobs_{user_email}_{date_str}"
140
+ )
141
+
142
+ @firestore.transactional
143
+ def update_in_transaction(transaction, doc_ref):
144
+ doc = doc_ref.get(transaction=transaction)
145
+ if doc.exists:
146
+ data = doc.to_dict()
147
+ count = data.get("count", 0) + 1
148
+ job_ids = data.get("job_ids", [])
149
+ job_ids.append(job_id)
150
+ else:
151
+ count = 1
152
+ job_ids = [job_id]
153
+
154
+ transaction.set(doc_ref, {
155
+ "user_email": user_email,
156
+ "date": date_str,
157
+ "count": count,
158
+ "job_ids": job_ids,
159
+ "updated_at": datetime.now(timezone.utc),
160
+ })
161
+
162
+ transaction = self.db.transaction()
163
+ update_in_transaction(transaction, doc_ref)
164
+ logger.info(f"Recorded job {job_id} for user {user_email} rate limiting")
165
+
166
+ # -------------------------------------------------------------------------
167
+ # YouTube Upload Rate Limiting
168
+ # -------------------------------------------------------------------------
169
+
170
+ def check_youtube_upload_limit(self) -> Tuple[bool, int, str]:
171
+ """
172
+ Check if the system can perform a YouTube upload.
173
+
174
+ YouTube uploads are limited system-wide due to API quota constraints.
175
+
176
+ Returns:
177
+ Tuple of (allowed, remaining, message)
178
+ """
179
+ if not settings.enable_rate_limiting:
180
+ return True, -1, "Rate limiting disabled"
181
+
182
+ limit = settings.rate_limit_youtube_uploads_per_day
183
+ if limit == 0:
184
+ return True, -1, "No YouTube upload limit configured"
185
+
186
+ current_count = self.get_youtube_uploads_today()
187
+ remaining = max(0, limit - current_count)
188
+
189
+ if current_count >= limit:
190
+ seconds = _seconds_until_midnight_utc()
191
+ logger.warning(
192
+ f"YouTube upload limit exceeded: {current_count}/{limit} uploads today"
193
+ )
194
+ return (
195
+ False,
196
+ 0,
197
+ f"Daily YouTube upload limit reached ({limit} uploads per day). Resets in {seconds // 3600}h {(seconds % 3600) // 60}m."
198
+ )
199
+
200
+ return True, remaining, f"{remaining} YouTube uploads remaining today"
201
+
202
+ def get_youtube_uploads_today(self) -> int:
203
+ """Get the number of YouTube uploads performed today (system-wide)."""
204
+ date_str = _get_today_date_str()
205
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
206
+ f"youtube_uploads_{date_str}"
207
+ )
208
+ doc = doc_ref.get()
209
+
210
+ if not doc.exists:
211
+ return 0
212
+
213
+ return doc.to_dict().get("count", 0)
214
+
215
+ def record_youtube_upload(self, job_id: str, user_email: str) -> None:
216
+ """
217
+ Record a YouTube upload for rate limiting.
218
+
219
+ Uses Firestore transactions for atomic increment.
220
+ """
221
+ if not settings.enable_rate_limiting:
222
+ return
223
+
224
+ date_str = _get_today_date_str()
225
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
226
+ f"youtube_uploads_{date_str}"
227
+ )
228
+
229
+ @firestore.transactional
230
+ def update_in_transaction(transaction, doc_ref):
231
+ doc = doc_ref.get(transaction=transaction)
232
+ if doc.exists:
233
+ data = doc.to_dict()
234
+ count = data.get("count", 0) + 1
235
+ uploads = data.get("uploads", [])
236
+ else:
237
+ count = 1
238
+ uploads = []
239
+
240
+ uploads.append({
241
+ "job_id": job_id,
242
+ "user_email": user_email,
243
+ "timestamp": datetime.now(timezone.utc),
244
+ })
245
+
246
+ transaction.set(doc_ref, {
247
+ "date": date_str,
248
+ "count": count,
249
+ "uploads": uploads,
250
+ "updated_at": datetime.now(timezone.utc),
251
+ })
252
+
253
+ transaction = self.db.transaction()
254
+ update_in_transaction(transaction, doc_ref)
255
+ logger.info(f"Recorded YouTube upload for job {job_id}")
256
+
257
+ # -------------------------------------------------------------------------
258
+ # Beta Enrollment IP Rate Limiting
259
+ # -------------------------------------------------------------------------
260
+
261
+ def check_beta_ip_limit(self, ip_address: str) -> Tuple[bool, int, str]:
262
+ """
263
+ Check if an IP address can enroll in the beta program.
264
+
265
+ Args:
266
+ ip_address: Client IP address
267
+
268
+ Returns:
269
+ Tuple of (allowed, remaining, message)
270
+ """
271
+ if not settings.enable_rate_limiting:
272
+ return True, -1, "Rate limiting disabled"
273
+
274
+ limit = settings.rate_limit_beta_ip_per_day
275
+ if limit == 0:
276
+ return True, -1, "No beta IP limit configured"
277
+
278
+ # Check today's enrollment count for this IP
279
+ ip_hash = _hash_ip(ip_address)
280
+ date_str = _get_today_date_str()
281
+
282
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
283
+ f"beta_ip_{ip_hash}_{date_str}"
284
+ )
285
+ doc = doc_ref.get()
286
+
287
+ current_count = 0
288
+ if doc.exists:
289
+ current_count = doc.to_dict().get("count", 0)
290
+
291
+ remaining = max(0, limit - current_count)
292
+
293
+ if current_count >= limit:
294
+ seconds = _seconds_until_midnight_utc()
295
+ logger.warning(
296
+ f"Beta enrollment IP limit exceeded for {ip_hash}: {current_count}/{limit} enrollments today"
297
+ )
298
+ return (
299
+ False,
300
+ 0,
301
+ f"Too many beta enrollments from this network today. Please try again tomorrow."
302
+ )
303
+
304
+ return True, remaining, f"{remaining} beta enrollments remaining from this IP today"
305
+
306
+ def record_beta_enrollment(self, ip_address: str, email: str) -> None:
307
+ """
308
+ Record a beta enrollment for IP rate limiting.
309
+
310
+ Uses Firestore transactions for atomic increment.
311
+ """
312
+ if not settings.enable_rate_limiting:
313
+ return
314
+
315
+ ip_hash = _hash_ip(ip_address)
316
+ date_str = _get_today_date_str()
317
+ doc_ref = self.db.collection(RATE_LIMITS_COLLECTION).document(
318
+ f"beta_ip_{ip_hash}_{date_str}"
319
+ )
320
+
321
+ @firestore.transactional
322
+ def update_in_transaction(transaction, doc_ref):
323
+ doc = doc_ref.get(transaction=transaction)
324
+ if doc.exists:
325
+ data = doc.to_dict()
326
+ count = data.get("count", 0) + 1
327
+ enrollments = data.get("enrollments", [])
328
+ else:
329
+ count = 1
330
+ enrollments = []
331
+
332
+ enrollments.append({
333
+ "email": email,
334
+ "timestamp": datetime.now(timezone.utc),
335
+ })
336
+
337
+ transaction.set(doc_ref, {
338
+ "ip_hash": ip_hash,
339
+ "date": date_str,
340
+ "count": count,
341
+ "enrollments": enrollments,
342
+ "updated_at": datetime.now(timezone.utc),
343
+ })
344
+
345
+ transaction = self.db.transaction()
346
+ update_in_transaction(transaction, doc_ref)
347
+ logger.info(f"Recorded beta enrollment from IP {ip_hash} for {email}")
348
+
349
+ # -------------------------------------------------------------------------
350
+ # User Overrides (Whitelist)
351
+ # -------------------------------------------------------------------------
352
+
353
+ def get_user_override(self, user_email: str) -> Optional[Dict[str, Any]]:
354
+ """
355
+ Get rate limit override settings for a user.
356
+
357
+ Returns:
358
+ Override settings dict or None if no override exists
359
+ """
360
+ doc_ref = self.db.collection(OVERRIDES_COLLECTION).document(user_email.lower())
361
+ doc = doc_ref.get()
362
+
363
+ if not doc.exists:
364
+ return None
365
+
366
+ return doc.to_dict()
367
+
368
+ def set_user_override(
369
+ self,
370
+ user_email: str,
371
+ bypass_job_limit: bool = False,
372
+ custom_daily_job_limit: Optional[int] = None,
373
+ reason: str = "",
374
+ admin_email: str = ""
375
+ ) -> None:
376
+ """
377
+ Set rate limit override for a user.
378
+
379
+ Args:
380
+ user_email: User to override
381
+ bypass_job_limit: If True, user bypasses all job limits
382
+ custom_daily_job_limit: Custom limit (None = use default)
383
+ reason: Reason for override
384
+ admin_email: Admin who set the override
385
+ """
386
+ doc_ref = self.db.collection(OVERRIDES_COLLECTION).document(user_email.lower())
387
+ doc_ref.set({
388
+ "email": user_email.lower(),
389
+ "bypass_job_limit": bypass_job_limit,
390
+ "custom_daily_job_limit": custom_daily_job_limit,
391
+ "reason": reason,
392
+ "created_by": admin_email,
393
+ "created_at": datetime.now(timezone.utc),
394
+ })
395
+ logger.info(f"Set rate limit override for {user_email} by {admin_email}")
396
+
397
+ def remove_user_override(self, user_email: str, admin_email: str = "") -> bool:
398
+ """Remove rate limit override for a user."""
399
+ doc_ref = self.db.collection(OVERRIDES_COLLECTION).document(user_email.lower())
400
+ doc = doc_ref.get()
401
+
402
+ if not doc.exists:
403
+ return False
404
+
405
+ doc_ref.delete()
406
+ logger.info(f"Removed rate limit override for {user_email} by {admin_email}")
407
+ return True
408
+
409
+ def list_user_overrides(self) -> List[Dict[str, Any]]:
410
+ """List all user rate limit overrides."""
411
+ docs = self.db.collection(OVERRIDES_COLLECTION).stream()
412
+ return [doc.to_dict() for doc in docs]
413
+
414
+ def get_all_overrides(self) -> Dict[str, Dict[str, Any]]:
415
+ """Get all user rate limit overrides as a dict keyed by email."""
416
+ docs = self.db.collection(OVERRIDES_COLLECTION).stream()
417
+ return {doc.id: doc.to_dict() for doc in docs}
418
+
419
+ # -------------------------------------------------------------------------
420
+ # Blocklist Management
421
+ # -------------------------------------------------------------------------
422
+
423
+ def _load_blocklist(self, force_refresh: bool = False) -> Dict[str, Any]:
424
+ """
425
+ Load blocklist from Firestore with caching.
426
+
427
+ Returns:
428
+ Dict with keys: disposable_domains, blocked_emails, blocked_ips
429
+ """
430
+ now = datetime.now(timezone.utc)
431
+
432
+ # Check cache
433
+ if not force_refresh and self._blocklist_cache and self._blocklist_cache_time:
434
+ cache_age = (now - self._blocklist_cache_time).total_seconds()
435
+ if cache_age < self._cache_ttl_seconds:
436
+ return self._blocklist_cache
437
+
438
+ # Load from Firestore
439
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
440
+ doc = doc_ref.get()
441
+
442
+ if not doc.exists:
443
+ # Initialize with empty blocklist
444
+ self._blocklist_cache = {
445
+ "disposable_domains": [],
446
+ "blocked_emails": [],
447
+ "blocked_ips": [],
448
+ }
449
+ else:
450
+ self._blocklist_cache = doc.to_dict()
451
+
452
+ self._blocklist_cache_time = now
453
+ return self._blocklist_cache
454
+
455
+ def is_disposable_domain(self, domain: str) -> bool:
456
+ """Check if a domain is in the disposable domains blocklist."""
457
+ blocklist = self._load_blocklist()
458
+ return domain.lower() in [d.lower() for d in blocklist.get("disposable_domains", [])]
459
+
460
+ def is_blocked_email(self, email: str) -> bool:
461
+ """Check if an email is explicitly blocked."""
462
+ blocklist = self._load_blocklist()
463
+ return email.lower() in [e.lower() for e in blocklist.get("blocked_emails", [])]
464
+
465
+ def is_blocked_ip(self, ip_address: str) -> bool:
466
+ """Check if an IP address is blocked."""
467
+ blocklist = self._load_blocklist()
468
+ return ip_address in blocklist.get("blocked_ips", [])
469
+
470
+ def get_blocklist(self) -> Dict[str, Any]:
471
+ """Get all blocklist data."""
472
+ return self._load_blocklist(force_refresh=True)
473
+
474
+ def add_disposable_domain(self, domain: str, admin_email: str) -> None:
475
+ """Add a domain to the disposable domains blocklist."""
476
+ blocklist = self._load_blocklist(force_refresh=True)
477
+ domains = set(blocklist.get("disposable_domains", []))
478
+ domains.add(domain.lower())
479
+
480
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
481
+ doc_ref.set({
482
+ **blocklist,
483
+ "disposable_domains": list(domains),
484
+ "updated_at": datetime.now(timezone.utc),
485
+ "updated_by": admin_email,
486
+ }, merge=True)
487
+
488
+ self._blocklist_cache = None # Invalidate cache
489
+ logger.info(f"Added disposable domain {domain} by {admin_email}")
490
+
491
+ def remove_disposable_domain(self, domain: str) -> bool:
492
+ """Remove a domain from the disposable domains blocklist."""
493
+ blocklist = self._load_blocklist(force_refresh=True)
494
+ domains = set(blocklist.get("disposable_domains", []))
495
+
496
+ if domain.lower() not in [d.lower() for d in domains]:
497
+ return False
498
+
499
+ domains = {d for d in domains if d.lower() != domain.lower()}
500
+
501
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
502
+ doc_ref.set({
503
+ **blocklist,
504
+ "disposable_domains": list(domains),
505
+ "updated_at": datetime.now(timezone.utc),
506
+ }, merge=True)
507
+
508
+ self._blocklist_cache = None
509
+ logger.info(f"Removed disposable domain {domain}")
510
+ return True
511
+
512
+ def add_blocked_email(self, email: str, admin_email: str) -> None:
513
+ """Add an email to the blocked emails list."""
514
+ blocklist = self._load_blocklist(force_refresh=True)
515
+ emails = set(blocklist.get("blocked_emails", []))
516
+ emails.add(email.lower())
517
+
518
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
519
+ doc_ref.set({
520
+ **blocklist,
521
+ "blocked_emails": list(emails),
522
+ "updated_at": datetime.now(timezone.utc),
523
+ "updated_by": admin_email,
524
+ }, merge=True)
525
+
526
+ self._blocklist_cache = None
527
+ logger.info(f"Added blocked email {email} by {admin_email}")
528
+
529
+ def remove_blocked_email(self, email: str) -> bool:
530
+ """Remove an email from the blocked emails list."""
531
+ blocklist = self._load_blocklist(force_refresh=True)
532
+ emails = set(blocklist.get("blocked_emails", []))
533
+
534
+ if email.lower() not in [e.lower() for e in emails]:
535
+ return False
536
+
537
+ emails = {e for e in emails if e.lower() != email.lower()}
538
+
539
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
540
+ doc_ref.set({
541
+ **blocklist,
542
+ "blocked_emails": list(emails),
543
+ "updated_at": datetime.now(timezone.utc),
544
+ }, merge=True)
545
+
546
+ self._blocklist_cache = None
547
+ logger.info(f"Removed blocked email {email}")
548
+ return True
549
+
550
+ def add_blocked_ip(self, ip_address: str, admin_email: str) -> None:
551
+ """Add an IP address to the blocked IPs list."""
552
+ blocklist = self._load_blocklist(force_refresh=True)
553
+ ips = set(blocklist.get("blocked_ips", []))
554
+ ips.add(ip_address)
555
+
556
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
557
+ doc_ref.set({
558
+ **blocklist,
559
+ "blocked_ips": list(ips),
560
+ "updated_at": datetime.now(timezone.utc),
561
+ "updated_by": admin_email,
562
+ }, merge=True)
563
+
564
+ self._blocklist_cache = None
565
+ logger.info(f"Added blocked IP {ip_address} by {admin_email}")
566
+
567
+ def remove_blocked_ip(self, ip_address: str) -> bool:
568
+ """Remove an IP address from the blocked IPs list."""
569
+ blocklist = self._load_blocklist(force_refresh=True)
570
+ ips = set(blocklist.get("blocked_ips", []))
571
+
572
+ if ip_address not in ips:
573
+ return False
574
+
575
+ ips.discard(ip_address)
576
+
577
+ doc_ref = self.db.collection(BLOCKLISTS_COLLECTION).document("config")
578
+ doc_ref.set({
579
+ **blocklist,
580
+ "blocked_ips": list(ips),
581
+ "updated_at": datetime.now(timezone.utc),
582
+ }, merge=True)
583
+
584
+ self._blocklist_cache = None
585
+ logger.info(f"Removed blocked IP {ip_address}")
586
+ return True
587
+
588
+ # -------------------------------------------------------------------------
589
+ # Stats
590
+ # -------------------------------------------------------------------------
591
+
592
+ def get_rate_limit_stats(self) -> Dict[str, Any]:
593
+ """
594
+ Get current rate limit statistics.
595
+
596
+ Returns:
597
+ Dict with current usage stats
598
+ """
599
+ date_str = _get_today_date_str()
600
+
601
+ # Get YouTube upload count
602
+ youtube_count = self.get_youtube_uploads_today()
603
+
604
+ # Count unique users with jobs today
605
+ users_with_jobs = set()
606
+ total_jobs_today = 0
607
+
608
+ # Query all user job documents for today
609
+ docs = self.db.collection(RATE_LIMITS_COLLECTION).where(
610
+ "date", "==", date_str
611
+ ).stream()
612
+
613
+ for doc in docs:
614
+ data = doc.to_dict()
615
+ if data.get("user_email"):
616
+ users_with_jobs.add(data["user_email"])
617
+ total_jobs_today += data.get("count", 0)
618
+
619
+ return {
620
+ "date": date_str,
621
+ "youtube_uploads_today": youtube_count,
622
+ "youtube_uploads_limit": settings.rate_limit_youtube_uploads_per_day,
623
+ "total_jobs_today": total_jobs_today,
624
+ "users_with_jobs_today": len(users_with_jobs),
625
+ "job_limit_per_user": settings.rate_limit_jobs_per_day,
626
+ "beta_ip_limit_per_day": settings.rate_limit_beta_ip_per_day,
627
+ "rate_limiting_enabled": settings.enable_rate_limiting,
628
+ "seconds_until_reset": _seconds_until_midnight_utc(),
629
+ }
630
+
631
+
632
+ # Singleton instance
633
+ _rate_limit_service: Optional[RateLimitService] = None
634
+
635
+
636
+ def get_rate_limit_service() -> RateLimitService:
637
+ """Get the singleton RateLimitService instance."""
638
+ global _rate_limit_service
639
+ if _rate_limit_service is None:
640
+ _rate_limit_service = RateLimitService()
641
+ return _rate_limit_service
backend/tests/conftest.py CHANGED
@@ -91,7 +91,13 @@ def mock_auth_dependency(request):
91
91
  if 'emulator' in test_path or 'integration' in test_path:
92
92
  yield
93
93
  return
94
-
94
+
95
+ # Skip for service-only unit tests that don't need the FastAPI app
96
+ service_only_tests = ['test_rate_limit_service', 'test_email_validation_service', 'test_rate_limits_api']
97
+ if any(test_name in test_path for test_name in service_only_tests):
98
+ yield
99
+ return
100
+
95
101
  # Skip if FIRESTORE_EMULATOR_HOST is set (running in emulator environment)
96
102
  import os
97
103
  if os.environ.get('FIRESTORE_EMULATOR_HOST'):