ipulse-shared-core-ftredge 20.0.1__py3-none-any.whl → 23.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/cache/shared_cache.py +1 -2
- ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +60 -23
- ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +128 -157
- ipulse_shared_core_ftredge/exceptions/base_exceptions.py +35 -4
- ipulse_shared_core_ftredge/models/__init__.py +3 -7
- ipulse_shared_core_ftredge/models/base_data_model.py +17 -19
- ipulse_shared_core_ftredge/models/catalog/__init__.py +10 -0
- ipulse_shared_core_ftredge/models/catalog/subscriptionplan.py +274 -0
- ipulse_shared_core_ftredge/models/catalog/usertype.py +177 -0
- ipulse_shared_core_ftredge/models/user/__init__.py +5 -0
- ipulse_shared_core_ftredge/models/user/user_permissions.py +66 -0
- ipulse_shared_core_ftredge/models/user/user_subscription.py +348 -0
- ipulse_shared_core_ftredge/models/{user_auth.py → user/userauth.py} +19 -10
- ipulse_shared_core_ftredge/models/{user_profile.py → user/userprofile.py} +53 -21
- ipulse_shared_core_ftredge/models/user/userstatus.py +479 -0
- ipulse_shared_core_ftredge/monitoring/__init__.py +0 -2
- ipulse_shared_core_ftredge/monitoring/tracemon.py +6 -6
- ipulse_shared_core_ftredge/services/__init__.py +11 -13
- ipulse_shared_core_ftredge/services/base/__init__.py +3 -1
- ipulse_shared_core_ftredge/services/base/base_firestore_service.py +77 -16
- ipulse_shared_core_ftredge/services/{cache_aware_firestore_service.py → base/cache_aware_firestore_service.py} +46 -32
- ipulse_shared_core_ftredge/services/catalog/__init__.py +14 -0
- ipulse_shared_core_ftredge/services/catalog/catalog_subscriptionplan_service.py +277 -0
- ipulse_shared_core_ftredge/services/catalog/catalog_usertype_service.py +376 -0
- ipulse_shared_core_ftredge/services/charging_processors.py +25 -25
- ipulse_shared_core_ftredge/services/user/__init__.py +5 -25
- ipulse_shared_core_ftredge/services/user/user_core_service.py +536 -510
- ipulse_shared_core_ftredge/services/user/user_multistep_operations.py +796 -0
- ipulse_shared_core_ftredge/services/user/user_permissions_operations.py +392 -0
- ipulse_shared_core_ftredge/services/user/user_subscription_operations.py +488 -0
- ipulse_shared_core_ftredge/services/user/userauth_operations.py +928 -0
- ipulse_shared_core_ftredge/services/user/userprofile_operations.py +166 -0
- ipulse_shared_core_ftredge/services/user/userstatus_operations.py +476 -0
- ipulse_shared_core_ftredge/services/{charging_service.py → user_charging_service.py} +9 -9
- {ipulse_shared_core_ftredge-20.0.1.dist-info → ipulse_shared_core_ftredge-23.1.1.dist-info}/METADATA +3 -4
- ipulse_shared_core_ftredge-23.1.1.dist-info/RECORD +50 -0
- ipulse_shared_core_ftredge/models/subscription.py +0 -190
- ipulse_shared_core_ftredge/models/user_status.py +0 -495
- ipulse_shared_core_ftredge/monitoring/microservmon.py +0 -526
- ipulse_shared_core_ftredge/services/user/iam_management_operations.py +0 -326
- ipulse_shared_core_ftredge/services/user/subscription_management_operations.py +0 -384
- ipulse_shared_core_ftredge/services/user/user_account_operations.py +0 -479
- ipulse_shared_core_ftredge/services/user/user_auth_operations.py +0 -305
- ipulse_shared_core_ftredge/services/user/user_holistic_operations.py +0 -436
- ipulse_shared_core_ftredge-20.0.1.dist-info/RECORD +0 -42
- {ipulse_shared_core_ftredge-20.0.1.dist-info → ipulse_shared_core_ftredge-23.1.1.dist-info}/WHEEL +0 -0
- {ipulse_shared_core_ftredge-20.0.1.dist-info → ipulse_shared_core_ftredge-23.1.1.dist-info}/licenses/LICENCE +0 -0
- {ipulse_shared_core_ftredge-20.0.1.dist-info → ipulse_shared_core_ftredge-23.1.1.dist-info}/top_level.txt +0 -0
|
@@ -7,7 +7,7 @@ This provides the foundation for all Firestore-based services.
|
|
|
7
7
|
|
|
8
8
|
import json
|
|
9
9
|
import logging
|
|
10
|
-
from datetime import datetime, timezone
|
|
10
|
+
from datetime import datetime, timezone, date
|
|
11
11
|
from typing import Any, AsyncGenerator, Dict, Generic, List, Optional, TypeVar, Type, Union
|
|
12
12
|
|
|
13
13
|
from google.cloud import firestore
|
|
@@ -20,6 +20,32 @@ from ...exceptions import ResourceNotFoundError, ServiceError, ValidationError a
|
|
|
20
20
|
T = TypeVar('T', bound=BaseModel)
|
|
21
21
|
|
|
22
22
|
|
|
23
|
+
def _sanitize_firestore_data(data: Any) -> Any:
|
|
24
|
+
"""
|
|
25
|
+
Recursively sanitize data before sending to Firestore.
|
|
26
|
+
Converts datetime.date objects to datetime.datetime objects since Firestore
|
|
27
|
+
only supports datetime.datetime, not datetime.date.
|
|
28
|
+
"""
|
|
29
|
+
if isinstance(data, date) and not isinstance(data, datetime):
|
|
30
|
+
# Convert date to datetime (start of day in UTC)
|
|
31
|
+
return datetime.combine(data, datetime.min.time()).replace(tzinfo=timezone.utc)
|
|
32
|
+
|
|
33
|
+
if isinstance(data, BaseModel):
|
|
34
|
+
# Convert Pydantic model to dict and sanitize recursively
|
|
35
|
+
return _sanitize_firestore_data(data.model_dump())
|
|
36
|
+
|
|
37
|
+
if isinstance(data, dict):
|
|
38
|
+
# Recurse into dictionaries
|
|
39
|
+
return {k: _sanitize_firestore_data(v) for k, v in data.items()}
|
|
40
|
+
|
|
41
|
+
if isinstance(data, list):
|
|
42
|
+
# Recurse into lists
|
|
43
|
+
return [_sanitize_firestore_data(item) for item in data]
|
|
44
|
+
|
|
45
|
+
# Return everything else as-is (str, int, float, bool, datetime, etc.)
|
|
46
|
+
return data
|
|
47
|
+
|
|
48
|
+
|
|
23
49
|
class BaseFirestoreService(Generic[T]):
|
|
24
50
|
"""
|
|
25
51
|
Base service class for Firestore operations using Pydantic models
|
|
@@ -95,7 +121,7 @@ class BaseFirestoreService(Generic[T]):
|
|
|
95
121
|
additional_info={"validation_errors": e.errors()}
|
|
96
122
|
)
|
|
97
123
|
|
|
98
|
-
async def get_document(self, doc_id: str, convert_to_model: bool = True) -> Union[Dict[str, Any]
|
|
124
|
+
async def get_document(self, doc_id: str, convert_to_model: bool = True) -> Union[T, Dict[str, Any]]:
|
|
99
125
|
"""
|
|
100
126
|
Get a document by ID
|
|
101
127
|
|
|
@@ -104,7 +130,7 @@ class BaseFirestoreService(Generic[T]):
|
|
|
104
130
|
convert_to_model: Whether to convert to Pydantic model
|
|
105
131
|
|
|
106
132
|
Returns:
|
|
107
|
-
Document as
|
|
133
|
+
Document as a model instance or dict.
|
|
108
134
|
|
|
109
135
|
Raises:
|
|
110
136
|
ResourceNotFoundError: If document doesn't exist
|
|
@@ -121,7 +147,21 @@ class BaseFirestoreService(Generic[T]):
|
|
|
121
147
|
)
|
|
122
148
|
|
|
123
149
|
doc_dict = doc.to_dict()
|
|
124
|
-
|
|
150
|
+
if not doc_dict:
|
|
151
|
+
# This case should ideally not be reached if doc.exists is true,
|
|
152
|
+
# but as a safeguard:
|
|
153
|
+
raise ServiceError(
|
|
154
|
+
operation="get_document",
|
|
155
|
+
error=ValueError("Document exists but data is empty."),
|
|
156
|
+
resource_type=self.resource_type,
|
|
157
|
+
resource_id=doc_id
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if convert_to_model and self.model_class:
|
|
161
|
+
return self._convert_to_model(doc_dict, doc_id)
|
|
162
|
+
else:
|
|
163
|
+
doc_dict['id'] = doc_id
|
|
164
|
+
return doc_dict
|
|
125
165
|
|
|
126
166
|
except ResourceNotFoundError:
|
|
127
167
|
raise
|
|
@@ -162,6 +202,9 @@ class BaseFirestoreService(Generic[T]):
|
|
|
162
202
|
else:
|
|
163
203
|
doc_dict = data.copy()
|
|
164
204
|
|
|
205
|
+
# Sanitize data for Firestore (convert date objects to datetime)
|
|
206
|
+
doc_dict = _sanitize_firestore_data(doc_dict)
|
|
207
|
+
|
|
165
208
|
# Ensure ID is set correctly
|
|
166
209
|
doc_dict['id'] = doc_id
|
|
167
210
|
|
|
@@ -234,6 +277,10 @@ class BaseFirestoreService(Generic[T]):
|
|
|
234
277
|
|
|
235
278
|
# Add update timestamp and user
|
|
236
279
|
updates = update_data.copy()
|
|
280
|
+
|
|
281
|
+
# Sanitize data for Firestore (convert date objects to datetime)
|
|
282
|
+
updates = _sanitize_firestore_data(updates)
|
|
283
|
+
|
|
237
284
|
updates['updated_at'] = datetime.now(timezone.utc)
|
|
238
285
|
updates['updated_by'] = updater_uid
|
|
239
286
|
|
|
@@ -310,7 +357,7 @@ class BaseFirestoreService(Generic[T]):
|
|
|
310
357
|
"""
|
|
311
358
|
try:
|
|
312
359
|
doc_ref = self._get_collection().document(doc_id)
|
|
313
|
-
doc =
|
|
360
|
+
doc = doc_ref.get() # Remove await - this is synchronous
|
|
314
361
|
return doc.exists
|
|
315
362
|
except Exception as e:
|
|
316
363
|
raise ServiceError(
|
|
@@ -325,8 +372,10 @@ class BaseFirestoreService(Generic[T]):
|
|
|
325
372
|
limit: Optional[int] = None,
|
|
326
373
|
start_after: Optional[str] = None,
|
|
327
374
|
order_by: Optional[str] = None,
|
|
328
|
-
|
|
329
|
-
|
|
375
|
+
order_direction: str = firestore.Query.ASCENDING,
|
|
376
|
+
filters: Optional[List[tuple]] = None,
|
|
377
|
+
as_models: bool = True
|
|
378
|
+
) -> Union[List[T], List[Dict[str, Any]]]:
|
|
330
379
|
"""
|
|
331
380
|
List documents with optional filtering and pagination
|
|
332
381
|
|
|
@@ -334,10 +383,12 @@ class BaseFirestoreService(Generic[T]):
|
|
|
334
383
|
limit: Maximum number of documents to return
|
|
335
384
|
start_after: Document ID to start after for pagination
|
|
336
385
|
order_by: Field to order by
|
|
337
|
-
|
|
386
|
+
order_direction: Direction to order by (e.g., "ASCENDING", "DESCENDING")
|
|
387
|
+
filters: List of field filters as tuples (field, operator, value)
|
|
388
|
+
as_models: Whether to convert documents to Pydantic models
|
|
338
389
|
|
|
339
390
|
Returns:
|
|
340
|
-
List of documents as model instances
|
|
391
|
+
List of documents as model instances or dicts
|
|
341
392
|
|
|
342
393
|
Raises:
|
|
343
394
|
ServiceError: If an error occurs during listing
|
|
@@ -348,11 +399,12 @@ class BaseFirestoreService(Generic[T]):
|
|
|
348
399
|
# Apply filters
|
|
349
400
|
if filters:
|
|
350
401
|
for filter_condition in filters:
|
|
351
|
-
|
|
402
|
+
field, operator, value = filter_condition
|
|
403
|
+
query = query.where(field, operator, value)
|
|
352
404
|
|
|
353
405
|
# Apply ordering
|
|
354
406
|
if order_by:
|
|
355
|
-
query = query.order_by(order_by)
|
|
407
|
+
query = query.order_by(order_by, direction=order_direction)
|
|
356
408
|
|
|
357
409
|
# Apply pagination
|
|
358
410
|
if start_after:
|
|
@@ -373,7 +425,7 @@ class BaseFirestoreService(Generic[T]):
|
|
|
373
425
|
if doc_dict is None:
|
|
374
426
|
continue # Skip documents that don't exist
|
|
375
427
|
|
|
376
|
-
if self.model_class:
|
|
428
|
+
if as_models and self.model_class:
|
|
377
429
|
model_instance = self._convert_to_model(doc_dict, doc.id)
|
|
378
430
|
results.append(model_instance)
|
|
379
431
|
else:
|
|
@@ -412,20 +464,27 @@ class BaseFirestoreService(Generic[T]):
|
|
|
412
464
|
ServiceError: If an error occurs during archival
|
|
413
465
|
"""
|
|
414
466
|
try:
|
|
467
|
+
# Generate unique archive document ID to handle duplicates
|
|
468
|
+
archive_timestamp = datetime.now(timezone.utc)
|
|
469
|
+
timestamp_str = archive_timestamp.strftime("%Y%m%d_%H%M%S_%f")[:-3] # microseconds to milliseconds
|
|
470
|
+
unique_archive_doc_id = f"{doc_id}_{timestamp_str}"
|
|
471
|
+
|
|
415
472
|
# Add archival metadata
|
|
416
473
|
archive_data = document_data.copy()
|
|
417
474
|
archive_data.update({
|
|
418
|
-
"archived_at":
|
|
475
|
+
"archived_at": archive_timestamp,
|
|
419
476
|
"archived_by": archived_by,
|
|
477
|
+
"updated_at": archive_timestamp,
|
|
478
|
+
"updated_by": archived_by,
|
|
420
479
|
"original_collection": self.collection_name,
|
|
421
480
|
"original_doc_id": doc_id
|
|
422
481
|
})
|
|
423
482
|
|
|
424
|
-
# Store in archive collection
|
|
425
|
-
archive_ref = self.db.collection(archive_collection).document(
|
|
483
|
+
# Store in archive collection with unique ID
|
|
484
|
+
archive_ref = self.db.collection(archive_collection).document(unique_archive_doc_id)
|
|
426
485
|
archive_ref.set(archive_data, timeout=self.timeout)
|
|
427
486
|
|
|
428
|
-
self.logger.info(f"Successfully archived {self.resource_type} {doc_id} to {archive_collection}")
|
|
487
|
+
self.logger.info(f"Successfully archived {self.resource_type} {doc_id} to {archive_collection} as {unique_archive_doc_id}")
|
|
429
488
|
return True
|
|
430
489
|
|
|
431
490
|
except Exception as e:
|
|
@@ -518,3 +577,5 @@ class BaseFirestoreService(Generic[T]):
|
|
|
518
577
|
resource_type=self.resource_type,
|
|
519
578
|
resource_id=doc_id
|
|
520
579
|
)
|
|
580
|
+
|
|
581
|
+
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
"""Cache-aware Firestore service base class."""
|
|
2
2
|
import time
|
|
3
|
-
from typing import TypeVar, Generic, Dict, Any, List, Optional
|
|
3
|
+
from typing import TypeVar, Generic, Dict, Any, List, Optional, Union, Type
|
|
4
4
|
from google.cloud import firestore
|
|
5
|
-
from .
|
|
6
|
-
from
|
|
7
|
-
from
|
|
8
|
-
from
|
|
5
|
+
from . import BaseFirestoreService
|
|
6
|
+
from ...exceptions import ResourceNotFoundError, ServiceError
|
|
7
|
+
from ...cache.shared_cache import SharedCache
|
|
8
|
+
from ...models import BaseDataModel
|
|
9
9
|
|
|
10
10
|
T = TypeVar('T', bound=BaseDataModel)
|
|
11
11
|
|
|
@@ -20,12 +20,13 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
20
20
|
db: firestore.Client,
|
|
21
21
|
collection_name: str,
|
|
22
22
|
resource_type: str,
|
|
23
|
-
|
|
23
|
+
model_class: Optional[Type[T]] = None,
|
|
24
|
+
logger=None,
|
|
24
25
|
document_cache: Optional[SharedCache] = None,
|
|
25
26
|
collection_cache: Optional[SharedCache] = None,
|
|
26
27
|
timeout: float = 30.0
|
|
27
28
|
):
|
|
28
|
-
super().__init__(db, collection_name, resource_type, logger)
|
|
29
|
+
super().__init__(db, collection_name, resource_type, model_class, logger, timeout)
|
|
29
30
|
self.document_cache = document_cache
|
|
30
31
|
self.collection_cache = collection_cache
|
|
31
32
|
self.timeout = timeout
|
|
@@ -36,15 +37,16 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
36
37
|
if self.collection_cache:
|
|
37
38
|
self.logger.info(f"Collection cache enabled for {resource_type}: {self.collection_cache.name}")
|
|
38
39
|
|
|
39
|
-
async def get_document(self, doc_id: str) -> Dict[str, Any]:
|
|
40
|
+
async def get_document(self, doc_id: str, convert_to_model: bool = True) -> Union[T, Dict[str, Any]]:
|
|
40
41
|
"""
|
|
41
42
|
Get a document with caching support.
|
|
42
43
|
|
|
43
44
|
Args:
|
|
44
45
|
doc_id: Document ID to fetch
|
|
46
|
+
convert_to_model: Whether to convert to Pydantic model
|
|
45
47
|
|
|
46
48
|
Returns:
|
|
47
|
-
Document
|
|
49
|
+
Document as model instance or dictionary
|
|
48
50
|
|
|
49
51
|
Raises:
|
|
50
52
|
ResourceNotFoundError: If document doesn't exist
|
|
@@ -57,35 +59,29 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
57
59
|
|
|
58
60
|
if cached_doc is not None:
|
|
59
61
|
self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
|
|
60
|
-
|
|
62
|
+
if convert_to_model and self.model_class:
|
|
63
|
+
return self._convert_to_model(cached_doc, doc_id)
|
|
64
|
+
else:
|
|
65
|
+
cached_doc['id'] = doc_id
|
|
66
|
+
return cached_doc
|
|
61
67
|
else:
|
|
62
68
|
self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
|
|
63
69
|
|
|
64
|
-
# Fetch from Firestore
|
|
65
|
-
|
|
66
|
-
doc_ref = self.db.collection(self.collection_name).document(doc_id)
|
|
67
|
-
doc = doc_ref.get(timeout=self.timeout)
|
|
68
|
-
firestore_time = (time.time() - start_time) * 1000
|
|
69
|
-
|
|
70
|
-
if not doc.exists:
|
|
71
|
-
self.logger.info(f"Document {doc_id} not found in Firestore after {firestore_time:.2f}ms")
|
|
72
|
-
raise ResourceNotFoundError(self.resource_type, doc_id)
|
|
73
|
-
|
|
74
|
-
doc_data = doc.to_dict()
|
|
75
|
-
self.logger.debug(f"Fetched document {doc_id} from Firestore in {firestore_time:.2f}ms")
|
|
76
|
-
|
|
77
|
-
# Cache the result
|
|
78
|
-
if self.document_cache and doc_data:
|
|
79
|
-
self.document_cache.set(doc_id, doc_data)
|
|
80
|
-
self.logger.debug(f"Cached document {doc_id}")
|
|
70
|
+
# Fetch from Firestore using parent method
|
|
71
|
+
return await super().get_document(doc_id, convert_to_model)
|
|
81
72
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
async def get_all_documents(self, cache_key: Optional[str] = None) -> List[Dict[str, Any]]:
|
|
73
|
+
async def get_all_documents(self, cache_key: Optional[str] = None, as_models: bool = True) -> Union[List[T], List[Dict[str, Any]]]:
|
|
85
74
|
"""
|
|
86
75
|
Retrieves all documents from the collection.
|
|
87
76
|
Uses collection_cache if cache_key is provided and cache is available.
|
|
88
77
|
Also populates document_cache for each retrieved document.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
cache_key: Optional cache key for collection-level caching
|
|
81
|
+
as_models: Whether to convert documents to Pydantic models
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
List of documents as model instances or dicts
|
|
89
85
|
"""
|
|
90
86
|
if cache_key and self.collection_cache:
|
|
91
87
|
cached_collection_data = self.collection_cache.get(cache_key)
|
|
@@ -96,6 +92,15 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
96
92
|
for doc_data in cached_collection_data:
|
|
97
93
|
if "id" in doc_data and not self.document_cache.get(doc_data["id"]):
|
|
98
94
|
self._cache_document_data(doc_data["id"], doc_data)
|
|
95
|
+
|
|
96
|
+
# Convert to models if requested
|
|
97
|
+
if as_models and self.model_class:
|
|
98
|
+
results = []
|
|
99
|
+
for doc_data in cached_collection_data:
|
|
100
|
+
if "id" in doc_data:
|
|
101
|
+
model_instance = self._convert_to_model(doc_data, doc_data["id"])
|
|
102
|
+
results.append(model_instance)
|
|
103
|
+
return results
|
|
99
104
|
return cached_collection_data
|
|
100
105
|
else:
|
|
101
106
|
self.logger.debug(f"Cache MISS for collection key '{cache_key}' in {self.collection_cache.name} - checking Firestore")
|
|
@@ -127,6 +132,15 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
127
132
|
# _cache_document_data expects 'id' to be in doc_data for keying
|
|
128
133
|
self._cache_document_data(doc_data["id"], doc_data)
|
|
129
134
|
|
|
135
|
+
# Convert to models if requested
|
|
136
|
+
if as_models and self.model_class:
|
|
137
|
+
results = []
|
|
138
|
+
for doc_data in docs_data_list:
|
|
139
|
+
if "id" in doc_data:
|
|
140
|
+
model_instance = self._convert_to_model(doc_data, doc_data["id"])
|
|
141
|
+
results.append(model_instance)
|
|
142
|
+
return results
|
|
143
|
+
|
|
130
144
|
return docs_data_list
|
|
131
145
|
|
|
132
146
|
except Exception as e:
|
|
@@ -139,9 +153,9 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
139
153
|
self.document_cache.set(doc_id, data)
|
|
140
154
|
self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
|
|
141
155
|
|
|
142
|
-
async def create_document(self, doc_id: str, data: T, creator_uid: str) -> Dict[str, Any]:
|
|
156
|
+
async def create_document(self, doc_id: str, data: Union[T, Dict[str, Any]], creator_uid: str, merge: bool = False) -> Dict[str, Any]:
|
|
143
157
|
"""Create document and invalidate cache."""
|
|
144
|
-
result = await super().create_document(doc_id, data, creator_uid)
|
|
158
|
+
result = await super().create_document(doc_id, data, creator_uid, merge)
|
|
145
159
|
self._invalidate_document_cache(doc_id)
|
|
146
160
|
self._invalidate_all_collection_caches()
|
|
147
161
|
return result
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Catalog Services Module
|
|
3
|
+
|
|
4
|
+
This module provides services for managing catalog data including subscription plans
|
|
5
|
+
and user type templates stored in Firestore.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from .catalog_subscriptionplan_service import CatalogSubscriptionPlanService
|
|
9
|
+
from .catalog_usertype_service import CatalogUserTypeService
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"CatalogSubscriptionPlanService",
|
|
13
|
+
"CatalogUserTypeService",
|
|
14
|
+
]
|
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Subscription Plan Catalog Service
|
|
3
|
+
|
|
4
|
+
This service manages subscription plan templates stored in Firestore.
|
|
5
|
+
These templates are used to configure and create user subscriptions consistently.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
from typing import Dict, List, Optional, Any
|
|
10
|
+
from google.cloud import firestore
|
|
11
|
+
from google.cloud.firestore import Client
|
|
12
|
+
from ipulse_shared_base_ftredge import SubscriptionPlanName
|
|
13
|
+
from ipulse_shared_base_ftredge.enums.enums_status import ObjectOverallStatus
|
|
14
|
+
from ipulse_shared_core_ftredge.models.catalog.subscriptionplan import SubscriptionPlan
|
|
15
|
+
from ipulse_shared_core_ftredge.services.base.base_firestore_service import BaseFirestoreService
|
|
16
|
+
from ipulse_shared_core_ftredge.exceptions import ServiceError
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CatalogSubscriptionPlanService(BaseFirestoreService[SubscriptionPlan]):
|
|
20
|
+
"""
|
|
21
|
+
Service for managing subscription plan catalog configurations.
|
|
22
|
+
|
|
23
|
+
This service provides CRUD operations for subscription plan templates that define
|
|
24
|
+
the structure and defaults for user subscriptions.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
firestore_client: Client,
|
|
30
|
+
logger: Optional[logging.Logger] = None
|
|
31
|
+
):
|
|
32
|
+
"""
|
|
33
|
+
Initialize the Subscription Plan Catalog Service.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
firestore_client: Firestore client instance
|
|
37
|
+
logger: Logger instance (optional)
|
|
38
|
+
"""
|
|
39
|
+
super().__init__(
|
|
40
|
+
db=firestore_client,
|
|
41
|
+
collection_name="papp_core_catalog_subscriptionplans",
|
|
42
|
+
resource_type="subscriptionplan",
|
|
43
|
+
model_class=SubscriptionPlan,
|
|
44
|
+
logger=logger or logging.getLogger(__name__)
|
|
45
|
+
)
|
|
46
|
+
self.archive_collection_name = "~archive_papp_core_catalog_subscriptionplans"
|
|
47
|
+
|
|
48
|
+
async def create_subscriptionplan(
|
|
49
|
+
self,
|
|
50
|
+
subscriptionplan_id: str,
|
|
51
|
+
subscription_plan: SubscriptionPlan,
|
|
52
|
+
creator_uid: str
|
|
53
|
+
) -> SubscriptionPlan:
|
|
54
|
+
"""
|
|
55
|
+
Create a new subscription plan.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
subscriptionplan_id: Unique identifier for the plan
|
|
59
|
+
subscription_plan: Subscription plan data
|
|
60
|
+
creator_uid: UID of the user creating the plan
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Created subscription plan
|
|
64
|
+
|
|
65
|
+
Raises:
|
|
66
|
+
ServiceError: If creation fails
|
|
67
|
+
ValidationError: If plan data is invalid
|
|
68
|
+
"""
|
|
69
|
+
self.logger.info(f"Creating subscription plan: {subscriptionplan_id}")
|
|
70
|
+
|
|
71
|
+
# Create the document
|
|
72
|
+
created_doc = await self.create_document(
|
|
73
|
+
doc_id=subscriptionplan_id,
|
|
74
|
+
data=subscription_plan,
|
|
75
|
+
creator_uid=creator_uid
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Convert back to model
|
|
79
|
+
result = SubscriptionPlan.model_validate(created_doc)
|
|
80
|
+
self.logger.info(f"Successfully created subscription plan: {subscriptionplan_id}")
|
|
81
|
+
return result
|
|
82
|
+
|
|
83
|
+
async def get_subscriptionplan(self, subscriptionplan_id: str) -> Optional[SubscriptionPlan]:
|
|
84
|
+
"""
|
|
85
|
+
Retrieve a subscription plan by ID.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
subscriptionplan_id: Unique identifier for the plan
|
|
89
|
+
|
|
90
|
+
Returns:
|
|
91
|
+
Subscription plan if found, None otherwise
|
|
92
|
+
|
|
93
|
+
Raises:
|
|
94
|
+
ServiceError: If retrieval fails
|
|
95
|
+
"""
|
|
96
|
+
self.logger.debug(f"Retrieving subscription plan: {subscriptionplan_id}")
|
|
97
|
+
doc_data = await self.get_document(subscriptionplan_id)
|
|
98
|
+
if doc_data is None:
|
|
99
|
+
return None
|
|
100
|
+
return SubscriptionPlan.model_validate(doc_data) if isinstance(doc_data, dict) else doc_data
|
|
101
|
+
|
|
102
|
+
async def update_subscriptionplan(
|
|
103
|
+
self,
|
|
104
|
+
subscriptionplan_id: str,
|
|
105
|
+
updates: Dict[str, Any],
|
|
106
|
+
updater_uid: str
|
|
107
|
+
) -> SubscriptionPlan:
|
|
108
|
+
"""
|
|
109
|
+
Update a subscription plan.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
subscriptionplan_id: Unique identifier for the plan
|
|
113
|
+
updates: Fields to update
|
|
114
|
+
updater_uid: UID of the user updating the plan
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Updated subscription plan
|
|
118
|
+
|
|
119
|
+
Raises:
|
|
120
|
+
ServiceError: If update fails
|
|
121
|
+
ResourceNotFoundError: If plan not found
|
|
122
|
+
ValidationError: If update data is invalid
|
|
123
|
+
"""
|
|
124
|
+
self.logger.info(f"Updating subscription plan: {subscriptionplan_id}")
|
|
125
|
+
|
|
126
|
+
updated_doc = await self.update_document(
|
|
127
|
+
doc_id=subscriptionplan_id,
|
|
128
|
+
update_data=updates,
|
|
129
|
+
updater_uid=updater_uid
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
result = SubscriptionPlan.model_validate(updated_doc)
|
|
133
|
+
self.logger.info(f"Successfully updated subscription plan: {subscriptionplan_id}")
|
|
134
|
+
return result
|
|
135
|
+
|
|
136
|
+
async def delete_subscriptionplan(
|
|
137
|
+
self,
|
|
138
|
+
subscriptionplan_id: str,
|
|
139
|
+
archive: bool = True
|
|
140
|
+
) -> bool:
|
|
141
|
+
"""
|
|
142
|
+
Delete a subscription plan.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
subscriptionplan_id: Unique identifier for the plan
|
|
146
|
+
archive: Whether to archive the plan before deletion
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
True if deletion was successful
|
|
150
|
+
|
|
151
|
+
Raises:
|
|
152
|
+
ServiceError: If deletion fails
|
|
153
|
+
ResourceNotFoundError: If plan not found
|
|
154
|
+
"""
|
|
155
|
+
self.logger.info(f"Deleting subscription plan: {subscriptionplan_id}")
|
|
156
|
+
|
|
157
|
+
if archive:
|
|
158
|
+
# Get the plan data before deletion for archiving
|
|
159
|
+
template = await self.get_subscriptionplan(subscriptionplan_id)
|
|
160
|
+
if template:
|
|
161
|
+
await self.archive_document(
|
|
162
|
+
document_data=template.model_dump(),
|
|
163
|
+
doc_id=subscriptionplan_id,
|
|
164
|
+
archive_collection=self.archive_collection_name,
|
|
165
|
+
archived_by="system"
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
result = await self.delete_document(subscriptionplan_id)
|
|
169
|
+
self.logger.info(f"Successfully deleted subscription plan: {subscriptionplan_id}")
|
|
170
|
+
return result
|
|
171
|
+
|
|
172
|
+
async def list_subscriptionplans(
|
|
173
|
+
self,
|
|
174
|
+
plan_name: Optional[SubscriptionPlanName] = None,
|
|
175
|
+
pulse_status: Optional[ObjectOverallStatus] = None,
|
|
176
|
+
latest_version_only: bool = False,
|
|
177
|
+
limit: Optional[int] = None,
|
|
178
|
+
version_ordering: str = "DESCENDING"
|
|
179
|
+
) -> List[SubscriptionPlan]:
|
|
180
|
+
"""
|
|
181
|
+
List subscription plans with optional filtering.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
plan_name: Filter by specific plan name (FREE, BASE, PREMIUM)
|
|
185
|
+
pulse_status: Filter by specific pulse status
|
|
186
|
+
latest_version_only: Only return the latest version per plan
|
|
187
|
+
limit: Maximum number of plans to return
|
|
188
|
+
version_ordering: Order direction for version ('ASCENDING' or 'DESCENDING')
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
List of subscription plans
|
|
192
|
+
|
|
193
|
+
Raises:
|
|
194
|
+
ServiceError: If listing fails
|
|
195
|
+
"""
|
|
196
|
+
self.logger.debug(f"Listing subscription plans - plan_name: {plan_name}, pulse_status: {pulse_status}, latest_version_only: {latest_version_only}, version_ordering: {version_ordering}")
|
|
197
|
+
|
|
198
|
+
# Build query filters
|
|
199
|
+
filters = []
|
|
200
|
+
if plan_name:
|
|
201
|
+
filters.append(("plan_name", "==", plan_name.value))
|
|
202
|
+
if pulse_status:
|
|
203
|
+
filters.append(("pulse_status", "==", pulse_status.value))
|
|
204
|
+
|
|
205
|
+
# Set ordering
|
|
206
|
+
order_by = "plan_version"
|
|
207
|
+
order_direction = firestore.Query.DESCENDING if version_ordering == "DESCENDING" else firestore.Query.ASCENDING
|
|
208
|
+
|
|
209
|
+
# Optimize query if only the latest version of a specific plan is needed
|
|
210
|
+
query_limit = limit
|
|
211
|
+
if latest_version_only and plan_name:
|
|
212
|
+
query_limit = 1
|
|
213
|
+
# Ensure descending order to get the latest
|
|
214
|
+
order_direction = firestore.Query.DESCENDING
|
|
215
|
+
|
|
216
|
+
docs = await self.list_documents(
|
|
217
|
+
filters=filters,
|
|
218
|
+
order_by=order_by,
|
|
219
|
+
order_direction=order_direction,
|
|
220
|
+
limit=query_limit
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
# Convert to SubscriptionPlan models
|
|
224
|
+
plans = [SubscriptionPlan.model_validate(doc) for doc in docs]
|
|
225
|
+
|
|
226
|
+
# If we need the latest of all plans, we fetch all sorted by version
|
|
227
|
+
# and then pick the first one for each plan_name in Python.
|
|
228
|
+
if latest_version_only and not plan_name:
|
|
229
|
+
# This assumes the list is sorted by version descending.
|
|
230
|
+
if order_direction != firestore.Query.DESCENDING:
|
|
231
|
+
self.logger.warning("latest_version_only is True but version_ordering is not DESCENDING. Results may not be the latest.")
|
|
232
|
+
|
|
233
|
+
plan_groups = {}
|
|
234
|
+
for plan in plans:
|
|
235
|
+
key = plan.plan_name.value
|
|
236
|
+
if key not in plan_groups:
|
|
237
|
+
plan_groups[key] = plan # First one is the latest due to sorting
|
|
238
|
+
|
|
239
|
+
return list(plan_groups.values())
|
|
240
|
+
|
|
241
|
+
return plans
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _get_collection(self):
|
|
245
|
+
"""Get the Firestore collection reference."""
|
|
246
|
+
return self.db.collection(self.collection_name)
|
|
247
|
+
|
|
248
|
+
async def subscriptionplan_exists(self, subscriptionplan_id: str) -> bool:
|
|
249
|
+
"""
|
|
250
|
+
Check if a subscription plan exists.
|
|
251
|
+
|
|
252
|
+
Args:
|
|
253
|
+
subscriptionplan_id: Unique identifier for the plan
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
True if plan exists, False otherwise
|
|
257
|
+
|
|
258
|
+
Raises:
|
|
259
|
+
ServiceError: If check fails
|
|
260
|
+
"""
|
|
261
|
+
return await self.document_exists(subscriptionplan_id)
|
|
262
|
+
|
|
263
|
+
async def validate_subscriptionplan_data(self, subscriptionplan_data: Dict[str, Any]) -> tuple[bool, List[str]]:
|
|
264
|
+
"""
|
|
265
|
+
Validate subscription plan data.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
subscriptionplan_data: Plan data to validate
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
Tuple of (is_valid, list_of_errors)
|
|
272
|
+
"""
|
|
273
|
+
try:
|
|
274
|
+
SubscriptionPlan.model_validate(subscriptionplan_data)
|
|
275
|
+
return True, []
|
|
276
|
+
except Exception as e:
|
|
277
|
+
return False, [str(e)]
|