ipulse-shared-core-ftredge 16.0.1__py3-none-any.whl → 19.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/__init__.py +1 -12
- ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +8 -5
- ipulse_shared_core_ftredge/exceptions/__init__.py +47 -0
- ipulse_shared_core_ftredge/exceptions/user_exceptions.py +219 -0
- ipulse_shared_core_ftredge/models/__init__.py +1 -3
- ipulse_shared_core_ftredge/models/base_api_response.py +15 -0
- ipulse_shared_core_ftredge/models/base_data_model.py +7 -6
- ipulse_shared_core_ftredge/models/user_auth.py +59 -4
- ipulse_shared_core_ftredge/models/user_profile.py +41 -7
- ipulse_shared_core_ftredge/models/user_status.py +44 -138
- ipulse_shared_core_ftredge/monitoring/__init__.py +5 -0
- ipulse_shared_core_ftredge/monitoring/microservmon.py +483 -0
- ipulse_shared_core_ftredge/services/__init__.py +21 -14
- ipulse_shared_core_ftredge/services/base/__init__.py +12 -0
- ipulse_shared_core_ftredge/services/base/base_firestore_service.py +520 -0
- ipulse_shared_core_ftredge/services/cache_aware_firestore_service.py +44 -8
- ipulse_shared_core_ftredge/services/charging_service.py +1 -1
- ipulse_shared_core_ftredge/services/user/__init__.py +37 -0
- ipulse_shared_core_ftredge/services/user/iam_management_operations.py +326 -0
- ipulse_shared_core_ftredge/services/user/subscription_management_operations.py +384 -0
- ipulse_shared_core_ftredge/services/user/user_account_operations.py +479 -0
- ipulse_shared_core_ftredge/services/user/user_auth_operations.py +305 -0
- ipulse_shared_core_ftredge/services/user/user_core_service.py +651 -0
- ipulse_shared_core_ftredge/services/user/user_holistic_operations.py +436 -0
- {ipulse_shared_core_ftredge-16.0.1.dist-info → ipulse_shared_core_ftredge-19.0.1.dist-info}/METADATA +2 -2
- ipulse_shared_core_ftredge-19.0.1.dist-info/RECORD +41 -0
- {ipulse_shared_core_ftredge-16.0.1.dist-info → ipulse_shared_core_ftredge-19.0.1.dist-info}/WHEEL +1 -1
- ipulse_shared_core_ftredge/models/organization_profile.py +0 -96
- ipulse_shared_core_ftredge/models/user_profile_update.py +0 -39
- ipulse_shared_core_ftredge/services/base_firestore_service.py +0 -249
- ipulse_shared_core_ftredge/services/fastapiservicemon.py +0 -140
- ipulse_shared_core_ftredge/services/servicemon.py +0 -240
- ipulse_shared_core_ftredge-16.0.1.dist-info/RECORD +0 -33
- ipulse_shared_core_ftredge/{services/base_service_exceptions.py → exceptions/base_exceptions.py} +1 -1
- {ipulse_shared_core_ftredge-16.0.1.dist-info → ipulse_shared_core_ftredge-19.0.1.dist-info}/licenses/LICENCE +0 -0
- {ipulse_shared_core_ftredge-16.0.1.dist-info → ipulse_shared_core_ftredge-19.0.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,520 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Base Firestore service class
|
|
3
|
+
|
|
4
|
+
Moved from services/base_firestore_service.py to prevent circular imports.
|
|
5
|
+
This provides the foundation for all Firestore-based services.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from typing import Any, AsyncGenerator, Dict, Generic, List, Optional, TypeVar, Type, Union
|
|
12
|
+
|
|
13
|
+
from google.cloud import firestore
|
|
14
|
+
from google.cloud.firestore_v1.base_query import FieldFilter
|
|
15
|
+
from pydantic import BaseModel, ValidationError
|
|
16
|
+
|
|
17
|
+
from ...exceptions import ResourceNotFoundError, ServiceError, ValidationError as ServiceValidationError
|
|
18
|
+
|
|
19
|
+
# Type variable for the model type
|
|
20
|
+
T = TypeVar('T', bound=BaseModel)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class BaseFirestoreService(Generic[T]):
|
|
24
|
+
"""
|
|
25
|
+
Base service class for Firestore operations using Pydantic models
|
|
26
|
+
|
|
27
|
+
This class provides common CRUD operations for Firestore collections
|
|
28
|
+
with type safety through Pydantic models.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
db: firestore.Client,
|
|
34
|
+
collection_name: str,
|
|
35
|
+
resource_type: str,
|
|
36
|
+
model_class: Optional[Type[T]] = None,
|
|
37
|
+
logger: Optional[logging.Logger] = None,
|
|
38
|
+
timeout: float = 10.0
|
|
39
|
+
):
|
|
40
|
+
"""
|
|
41
|
+
Initialize the BaseFirestoreService
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
db: Firestore client instance
|
|
45
|
+
collection_name: Name of the Firestore collection
|
|
46
|
+
resource_type: Resource type name for error reporting
|
|
47
|
+
model_class: Pydantic model class for the resource
|
|
48
|
+
logger: Logger instance for logging operations
|
|
49
|
+
timeout: Default timeout for Firestore operations
|
|
50
|
+
"""
|
|
51
|
+
self.db = db
|
|
52
|
+
self.collection_name = collection_name
|
|
53
|
+
self.resource_type = resource_type
|
|
54
|
+
self.model_class = model_class
|
|
55
|
+
self.logger = logger or logging.getLogger(__name__)
|
|
56
|
+
self.timeout = timeout
|
|
57
|
+
|
|
58
|
+
def _get_collection(self) -> firestore.CollectionReference:
|
|
59
|
+
"""Get the Firestore collection reference"""
|
|
60
|
+
return self.db.collection(self.collection_name)
|
|
61
|
+
|
|
62
|
+
def _convert_to_model(self, doc_dict: Dict[str, Any], doc_id: str) -> T:
|
|
63
|
+
"""
|
|
64
|
+
Convert Firestore document data to Pydantic model
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
doc_dict: Document data from Firestore
|
|
68
|
+
doc_id: Document ID
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Pydantic model instance
|
|
72
|
+
|
|
73
|
+
Raises:
|
|
74
|
+
ServiceValidationError: If validation fails
|
|
75
|
+
"""
|
|
76
|
+
if not self.model_class:
|
|
77
|
+
raise ServiceError(
|
|
78
|
+
operation="convert_to_model",
|
|
79
|
+
error=ValueError("No model class specified"),
|
|
80
|
+
resource_type=self.resource_type,
|
|
81
|
+
resource_id=doc_id
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
# Add ID to the document data if not present
|
|
86
|
+
if 'id' not in doc_dict:
|
|
87
|
+
doc_dict['id'] = doc_id
|
|
88
|
+
|
|
89
|
+
return self.model_class(**doc_dict)
|
|
90
|
+
except ValidationError as e:
|
|
91
|
+
raise ServiceValidationError(
|
|
92
|
+
resource_type=self.resource_type,
|
|
93
|
+
detail=f"Validation failed: {str(e)}",
|
|
94
|
+
resource_id=doc_id,
|
|
95
|
+
additional_info={"validation_errors": e.errors()}
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
async def get_document(self, doc_id: str, convert_to_model: bool = True) -> Union[Dict[str, Any], None]:
|
|
99
|
+
"""
|
|
100
|
+
Get a document by ID
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
doc_id: Document ID
|
|
104
|
+
convert_to_model: Whether to convert to Pydantic model
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Document as dict, or None if not found
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
ResourceNotFoundError: If document doesn't exist
|
|
111
|
+
ServiceError: If an error occurs during retrieval
|
|
112
|
+
"""
|
|
113
|
+
try:
|
|
114
|
+
doc_ref = self._get_collection().document(doc_id)
|
|
115
|
+
doc = doc_ref.get()
|
|
116
|
+
|
|
117
|
+
if not doc.exists:
|
|
118
|
+
raise ResourceNotFoundError(
|
|
119
|
+
resource_type=self.resource_type,
|
|
120
|
+
resource_id=doc_id
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
doc_dict = doc.to_dict()
|
|
124
|
+
return doc_dict
|
|
125
|
+
|
|
126
|
+
except ResourceNotFoundError:
|
|
127
|
+
raise
|
|
128
|
+
except Exception as e:
|
|
129
|
+
raise ServiceError(
|
|
130
|
+
operation="get_document",
|
|
131
|
+
error=e,
|
|
132
|
+
resource_type=self.resource_type,
|
|
133
|
+
resource_id=doc_id
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
async def create_document(
|
|
137
|
+
self,
|
|
138
|
+
doc_id: str,
|
|
139
|
+
data: Union[T, Dict[str, Any]],
|
|
140
|
+
creator_uid: str,
|
|
141
|
+
merge: bool = False
|
|
142
|
+
) -> Dict[str, Any]:
|
|
143
|
+
"""
|
|
144
|
+
Create a new document
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
doc_id: Document ID
|
|
148
|
+
data: Document data as model instance or dict
|
|
149
|
+
creator_uid: UID of the user creating the document
|
|
150
|
+
merge: Whether to merge with existing document
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
Created document as dict
|
|
154
|
+
|
|
155
|
+
Raises:
|
|
156
|
+
ServiceError: If an error occurs during creation
|
|
157
|
+
"""
|
|
158
|
+
try:
|
|
159
|
+
# Convert model to dict if necessary
|
|
160
|
+
if isinstance(data, BaseModel):
|
|
161
|
+
doc_dict = data.model_dump()
|
|
162
|
+
else:
|
|
163
|
+
doc_dict = data.copy()
|
|
164
|
+
|
|
165
|
+
# Ensure ID is set correctly
|
|
166
|
+
doc_dict['id'] = doc_id
|
|
167
|
+
|
|
168
|
+
# Add metadata
|
|
169
|
+
now = datetime.now(timezone.utc)
|
|
170
|
+
if 'created_at' not in doc_dict:
|
|
171
|
+
doc_dict['created_at'] = now
|
|
172
|
+
if 'created_by' not in doc_dict:
|
|
173
|
+
doc_dict['created_by'] = creator_uid
|
|
174
|
+
doc_dict['updated_at'] = now
|
|
175
|
+
doc_dict['updated_by'] = creator_uid
|
|
176
|
+
|
|
177
|
+
# Create document
|
|
178
|
+
doc_ref = self._get_collection().document(doc_id)
|
|
179
|
+
doc_ref.set(doc_dict, merge=merge)
|
|
180
|
+
|
|
181
|
+
return doc_dict
|
|
182
|
+
|
|
183
|
+
except Exception as e:
|
|
184
|
+
if isinstance(e, ServiceValidationError):
|
|
185
|
+
raise
|
|
186
|
+
raise ServiceError(
|
|
187
|
+
operation=f"creating {self.resource_type}",
|
|
188
|
+
error=e,
|
|
189
|
+
resource_type=self.resource_type,
|
|
190
|
+
resource_id=doc_id
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
async def update_document(
|
|
194
|
+
self,
|
|
195
|
+
doc_id: str,
|
|
196
|
+
update_data: Dict[str, Any],
|
|
197
|
+
updater_uid: str,
|
|
198
|
+
require_exists: bool = True
|
|
199
|
+
) -> Dict[str, Any]:
|
|
200
|
+
"""
|
|
201
|
+
Update an existing document
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
doc_id: Document ID
|
|
205
|
+
update_data: Fields to update
|
|
206
|
+
updater_uid: UID of the user updating the document
|
|
207
|
+
require_exists: Whether to require the document to exist
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
Updated document as dict
|
|
211
|
+
|
|
212
|
+
Raises:
|
|
213
|
+
ValidationError: If update_data is empty
|
|
214
|
+
ResourceNotFoundError: If document doesn't exist and require_exists is True
|
|
215
|
+
ServiceError: If an error occurs during update
|
|
216
|
+
"""
|
|
217
|
+
# Validate update data is not empty
|
|
218
|
+
if not update_data:
|
|
219
|
+
raise ServiceValidationError(
|
|
220
|
+
resource_type=self.resource_type,
|
|
221
|
+
detail="Update data cannot be empty"
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
doc_ref = self._get_collection().document(doc_id)
|
|
226
|
+
|
|
227
|
+
if require_exists:
|
|
228
|
+
doc = doc_ref.get()
|
|
229
|
+
if not doc.exists:
|
|
230
|
+
raise ResourceNotFoundError(
|
|
231
|
+
resource_type=self.resource_type,
|
|
232
|
+
resource_id=doc_id
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
# Add update timestamp and user
|
|
236
|
+
updates = update_data.copy()
|
|
237
|
+
updates['updated_at'] = datetime.now(timezone.utc)
|
|
238
|
+
updates['updated_by'] = updater_uid
|
|
239
|
+
|
|
240
|
+
# Update document
|
|
241
|
+
doc_ref.update(updates)
|
|
242
|
+
|
|
243
|
+
# Get updated document
|
|
244
|
+
updated_doc = doc_ref.get()
|
|
245
|
+
return updated_doc.to_dict() if updated_doc.exists else {}
|
|
246
|
+
|
|
247
|
+
except ResourceNotFoundError:
|
|
248
|
+
raise
|
|
249
|
+
except Exception as e:
|
|
250
|
+
raise ServiceError(
|
|
251
|
+
operation="update_document",
|
|
252
|
+
error=e,
|
|
253
|
+
resource_type=self.resource_type,
|
|
254
|
+
resource_id=doc_id
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
async def delete_document(self, doc_id: str, require_exists: bool = True) -> bool:
|
|
258
|
+
"""
|
|
259
|
+
Delete a document
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
doc_id: Document ID
|
|
263
|
+
require_exists: Whether to require the document to exist
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
True if deleted, False if not found and require_exists is False
|
|
267
|
+
|
|
268
|
+
Raises:
|
|
269
|
+
ResourceNotFoundError: If document doesn't exist and require_exists is True
|
|
270
|
+
ServiceError: If an error occurs during deletion
|
|
271
|
+
"""
|
|
272
|
+
try:
|
|
273
|
+
doc_ref = self._get_collection().document(doc_id)
|
|
274
|
+
|
|
275
|
+
if require_exists:
|
|
276
|
+
doc = doc_ref.get()
|
|
277
|
+
if not doc.exists:
|
|
278
|
+
raise ResourceNotFoundError(
|
|
279
|
+
resource_type=self.resource_type,
|
|
280
|
+
resource_id=doc_id
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
doc_ref.delete()
|
|
284
|
+
return True
|
|
285
|
+
|
|
286
|
+
except ResourceNotFoundError:
|
|
287
|
+
if require_exists:
|
|
288
|
+
raise
|
|
289
|
+
return False
|
|
290
|
+
except Exception as e:
|
|
291
|
+
raise ServiceError(
|
|
292
|
+
operation="delete_document",
|
|
293
|
+
error=e,
|
|
294
|
+
resource_type=self.resource_type,
|
|
295
|
+
resource_id=doc_id
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
async def document_exists(self, doc_id: str) -> bool:
|
|
299
|
+
"""
|
|
300
|
+
Check if a document exists
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
doc_id: Document ID
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
True if document exists, False otherwise
|
|
307
|
+
|
|
308
|
+
Raises:
|
|
309
|
+
ServiceError: If an error occurs during check
|
|
310
|
+
"""
|
|
311
|
+
try:
|
|
312
|
+
doc_ref = self._get_collection().document(doc_id)
|
|
313
|
+
doc = await doc_ref.get()
|
|
314
|
+
return doc.exists
|
|
315
|
+
except Exception as e:
|
|
316
|
+
raise ServiceError(
|
|
317
|
+
operation="document_exists",
|
|
318
|
+
error=e,
|
|
319
|
+
resource_type=self.resource_type,
|
|
320
|
+
resource_id=doc_id
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
async def list_documents(
|
|
324
|
+
self,
|
|
325
|
+
limit: Optional[int] = None,
|
|
326
|
+
start_after: Optional[str] = None,
|
|
327
|
+
order_by: Optional[str] = None,
|
|
328
|
+
filters: Optional[List[FieldFilter]] = None
|
|
329
|
+
) -> List[T]:
|
|
330
|
+
"""
|
|
331
|
+
List documents with optional filtering and pagination
|
|
332
|
+
|
|
333
|
+
Args:
|
|
334
|
+
limit: Maximum number of documents to return
|
|
335
|
+
start_after: Document ID to start after for pagination
|
|
336
|
+
order_by: Field to order by
|
|
337
|
+
filters: List of field filters
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
List of documents as model instances
|
|
341
|
+
|
|
342
|
+
Raises:
|
|
343
|
+
ServiceError: If an error occurs during listing
|
|
344
|
+
"""
|
|
345
|
+
try:
|
|
346
|
+
query = self._get_collection()
|
|
347
|
+
|
|
348
|
+
# Apply filters
|
|
349
|
+
if filters:
|
|
350
|
+
for filter_condition in filters:
|
|
351
|
+
query = query.where(filter=filter_condition)
|
|
352
|
+
|
|
353
|
+
# Apply ordering
|
|
354
|
+
if order_by:
|
|
355
|
+
query = query.order_by(order_by)
|
|
356
|
+
|
|
357
|
+
# Apply pagination
|
|
358
|
+
if start_after:
|
|
359
|
+
start_doc = await self._get_collection().document(start_after).get()
|
|
360
|
+
if start_doc.exists:
|
|
361
|
+
query = query.start_after(start_doc)
|
|
362
|
+
|
|
363
|
+
if limit:
|
|
364
|
+
query = query.limit(limit)
|
|
365
|
+
|
|
366
|
+
# Execute query
|
|
367
|
+
docs = query.get()
|
|
368
|
+
|
|
369
|
+
# Convert to models
|
|
370
|
+
results = []
|
|
371
|
+
for doc in docs:
|
|
372
|
+
doc_dict = doc.to_dict()
|
|
373
|
+
if doc_dict is None:
|
|
374
|
+
continue # Skip documents that don't exist
|
|
375
|
+
|
|
376
|
+
if self.model_class:
|
|
377
|
+
model_instance = self._convert_to_model(doc_dict, doc.id)
|
|
378
|
+
results.append(model_instance)
|
|
379
|
+
else:
|
|
380
|
+
doc_dict['id'] = doc.id
|
|
381
|
+
results.append(doc_dict)
|
|
382
|
+
|
|
383
|
+
return results
|
|
384
|
+
|
|
385
|
+
except Exception as e:
|
|
386
|
+
raise ServiceError(
|
|
387
|
+
operation="list_documents",
|
|
388
|
+
error=e,
|
|
389
|
+
resource_type=self.resource_type
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
async def archive_document(
|
|
393
|
+
self,
|
|
394
|
+
document_data: Dict[str, Any],
|
|
395
|
+
doc_id: str,
|
|
396
|
+
archive_collection: str,
|
|
397
|
+
archived_by: str
|
|
398
|
+
) -> bool:
|
|
399
|
+
"""
|
|
400
|
+
Archive a document by copying it to an archive collection with metadata
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
document_data: The document data to archive
|
|
404
|
+
doc_id: Document ID
|
|
405
|
+
archive_collection: Name of the archive collection
|
|
406
|
+
archived_by: UID of the user performing the archive
|
|
407
|
+
|
|
408
|
+
Returns:
|
|
409
|
+
True if archival was successful
|
|
410
|
+
|
|
411
|
+
Raises:
|
|
412
|
+
ServiceError: If an error occurs during archival
|
|
413
|
+
"""
|
|
414
|
+
try:
|
|
415
|
+
# Add archival metadata
|
|
416
|
+
archive_data = document_data.copy()
|
|
417
|
+
archive_data.update({
|
|
418
|
+
"archived_at": datetime.now(timezone.utc),
|
|
419
|
+
"archived_by": archived_by,
|
|
420
|
+
"original_collection": self.collection_name,
|
|
421
|
+
"original_doc_id": doc_id
|
|
422
|
+
})
|
|
423
|
+
|
|
424
|
+
# Store in archive collection
|
|
425
|
+
archive_ref = self.db.collection(archive_collection).document(doc_id)
|
|
426
|
+
archive_ref.set(archive_data, timeout=self.timeout)
|
|
427
|
+
|
|
428
|
+
self.logger.info(f"Successfully archived {self.resource_type} {doc_id} to {archive_collection}")
|
|
429
|
+
return True
|
|
430
|
+
|
|
431
|
+
except Exception as e:
|
|
432
|
+
raise ServiceError(
|
|
433
|
+
operation="archive_document",
|
|
434
|
+
error=e,
|
|
435
|
+
resource_type=self.resource_type,
|
|
436
|
+
resource_id=doc_id
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
async def restore_document(
|
|
440
|
+
self,
|
|
441
|
+
doc_id: str,
|
|
442
|
+
source_collection: str,
|
|
443
|
+
target_collection: str,
|
|
444
|
+
restored_by: str
|
|
445
|
+
) -> bool:
|
|
446
|
+
"""
|
|
447
|
+
Restore a document from an archive collection to the target collection
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
doc_id: Document ID to restore
|
|
451
|
+
source_collection: Archive collection name to restore from
|
|
452
|
+
target_collection: Target collection name to restore to
|
|
453
|
+
restored_by: UID of the user performing the restore
|
|
454
|
+
|
|
455
|
+
Returns:
|
|
456
|
+
True if restoration was successful
|
|
457
|
+
|
|
458
|
+
Raises:
|
|
459
|
+
ServiceError: If an error occurs during restoration
|
|
460
|
+
ResourceNotFoundError: If document not found in archive
|
|
461
|
+
"""
|
|
462
|
+
try:
|
|
463
|
+
# Get document from archive collection
|
|
464
|
+
archive_ref = self.db.collection(source_collection).document(doc_id)
|
|
465
|
+
archive_doc = archive_ref.get(timeout=self.timeout)
|
|
466
|
+
|
|
467
|
+
if not archive_doc.exists:
|
|
468
|
+
raise ResourceNotFoundError(
|
|
469
|
+
resource_type=self.resource_type,
|
|
470
|
+
resource_id=doc_id,
|
|
471
|
+
additional_info={"message": f"Document not found in archive collection {source_collection}"}
|
|
472
|
+
)
|
|
473
|
+
|
|
474
|
+
archive_data = archive_doc.to_dict()
|
|
475
|
+
if not archive_data:
|
|
476
|
+
raise ServiceError(
|
|
477
|
+
operation="restore_document",
|
|
478
|
+
error=ValueError("Archive document data is empty"),
|
|
479
|
+
resource_type=self.resource_type,
|
|
480
|
+
resource_id=doc_id
|
|
481
|
+
)
|
|
482
|
+
|
|
483
|
+
# Prepare restored data (remove archive metadata)
|
|
484
|
+
restored_data = archive_data.copy()
|
|
485
|
+
|
|
486
|
+
# Remove archive-specific fields
|
|
487
|
+
archive_fields_to_remove = [
|
|
488
|
+
"archived_at", "archived_by", "original_collection",
|
|
489
|
+
"original_doc_id", "restored_at", "restored_by"
|
|
490
|
+
]
|
|
491
|
+
for field in archive_fields_to_remove:
|
|
492
|
+
restored_data.pop(field, None)
|
|
493
|
+
|
|
494
|
+
# Add restoration metadata
|
|
495
|
+
restored_data.update({
|
|
496
|
+
"restored_at": datetime.now(timezone.utc),
|
|
497
|
+
"restored_by": restored_by,
|
|
498
|
+
"updated_at": datetime.now(timezone.utc),
|
|
499
|
+
"updated_by": restored_by
|
|
500
|
+
})
|
|
501
|
+
|
|
502
|
+
# Restore to target collection
|
|
503
|
+
target_ref = self.db.collection(target_collection).document(doc_id)
|
|
504
|
+
target_ref.set(restored_data, timeout=self.timeout)
|
|
505
|
+
|
|
506
|
+
# Remove from archive collection
|
|
507
|
+
archive_ref.delete()
|
|
508
|
+
|
|
509
|
+
self.logger.info(f"Successfully restored {self.resource_type} {doc_id} from {source_collection} to {target_collection}")
|
|
510
|
+
return True
|
|
511
|
+
|
|
512
|
+
except ResourceNotFoundError:
|
|
513
|
+
raise
|
|
514
|
+
except Exception as e:
|
|
515
|
+
raise ServiceError(
|
|
516
|
+
operation="restore_document",
|
|
517
|
+
error=e,
|
|
518
|
+
resource_type=self.resource_type,
|
|
519
|
+
resource_id=doc_id
|
|
520
|
+
)
|
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
import time
|
|
3
3
|
from typing import TypeVar, Generic, Dict, Any, List, Optional
|
|
4
4
|
from google.cloud import firestore
|
|
5
|
-
from
|
|
6
|
-
from
|
|
7
|
-
from
|
|
8
|
-
from
|
|
5
|
+
from .base import BaseFirestoreService
|
|
6
|
+
from ..exceptions import ResourceNotFoundError, ServiceError
|
|
7
|
+
from ..cache.shared_cache import SharedCache
|
|
8
|
+
from ..models import BaseDataModel
|
|
9
9
|
|
|
10
10
|
T = TypeVar('T', bound=BaseDataModel)
|
|
11
11
|
|
|
@@ -143,18 +143,22 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
143
143
|
"""Create document and invalidate cache."""
|
|
144
144
|
result = await super().create_document(doc_id, data, creator_uid)
|
|
145
145
|
self._invalidate_document_cache(doc_id)
|
|
146
|
+
self._invalidate_all_collection_caches()
|
|
146
147
|
return result
|
|
147
148
|
|
|
148
|
-
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str) -> Dict[str, Any]:
|
|
149
|
+
async def update_document(self, doc_id: str, update_data: Dict[str, Any], updater_uid: str, require_exists: bool = True) -> Dict[str, Any]:
|
|
149
150
|
"""Update document and invalidate cache."""
|
|
150
|
-
result = await super().update_document(doc_id, update_data, updater_uid)
|
|
151
|
+
result = await super().update_document(doc_id, update_data, updater_uid, require_exists)
|
|
151
152
|
self._invalidate_document_cache(doc_id)
|
|
153
|
+
self._invalidate_all_collection_caches()
|
|
152
154
|
return result
|
|
153
155
|
|
|
154
|
-
async def delete_document(self, doc_id: str,
|
|
156
|
+
async def delete_document(self, doc_id: str, require_exists: bool = True) -> bool:
|
|
155
157
|
"""Delete document and invalidate cache."""
|
|
156
|
-
await super().delete_document(doc_id)
|
|
158
|
+
result = await super().delete_document(doc_id, require_exists)
|
|
157
159
|
self._invalidate_document_cache(doc_id)
|
|
160
|
+
self._invalidate_all_collection_caches()
|
|
161
|
+
return result
|
|
158
162
|
|
|
159
163
|
def _invalidate_document_cache(self, doc_id: str) -> None:
|
|
160
164
|
"""Invalidate document cache for a specific document."""
|
|
@@ -167,3 +171,35 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
|
|
|
167
171
|
if self.collection_cache:
|
|
168
172
|
self.collection_cache.invalidate(cache_key)
|
|
169
173
|
self.logger.debug(f"Invalidated collection cache for key {cache_key}")
|
|
174
|
+
|
|
175
|
+
def _invalidate_all_collection_caches(self) -> None:
|
|
176
|
+
"""Invalidate all collection cache entries."""
|
|
177
|
+
if self.collection_cache:
|
|
178
|
+
self.collection_cache.invalidate_all()
|
|
179
|
+
self.logger.debug(f"Invalidated all collection cache entries")
|
|
180
|
+
|
|
181
|
+
async def archive_document(
|
|
182
|
+
self,
|
|
183
|
+
document_data: Dict[str, Any],
|
|
184
|
+
doc_id: str,
|
|
185
|
+
archive_collection: str,
|
|
186
|
+
archived_by: str
|
|
187
|
+
) -> bool:
|
|
188
|
+
"""Archive document and invalidate cache."""
|
|
189
|
+
result = await super().archive_document(document_data, doc_id, archive_collection, archived_by)
|
|
190
|
+
self._invalidate_document_cache(doc_id)
|
|
191
|
+
self._invalidate_all_collection_caches()
|
|
192
|
+
return result
|
|
193
|
+
|
|
194
|
+
async def restore_document(
|
|
195
|
+
self,
|
|
196
|
+
doc_id: str,
|
|
197
|
+
source_collection: str,
|
|
198
|
+
target_collection: str,
|
|
199
|
+
restored_by: str
|
|
200
|
+
) -> bool:
|
|
201
|
+
"""Restore document and invalidate cache."""
|
|
202
|
+
result = await super().restore_document(doc_id, source_collection, target_collection, restored_by)
|
|
203
|
+
self._invalidate_document_cache(doc_id)
|
|
204
|
+
self._invalidate_all_collection_caches()
|
|
205
|
+
return result
|
|
@@ -3,7 +3,7 @@ import logging
|
|
|
3
3
|
from typing import Dict, Any, Optional, Tuple
|
|
4
4
|
from datetime import datetime, timezone
|
|
5
5
|
from google.cloud import firestore
|
|
6
|
-
from ipulse_shared_core_ftredge.
|
|
6
|
+
from ipulse_shared_core_ftredge.exceptions import ServiceError, ResourceNotFoundError, ValidationError
|
|
7
7
|
from ipulse_shared_core_ftredge.models.user_status import UserStatus
|
|
8
8
|
|
|
9
9
|
# Default Firestore timeout if not provided by the consuming application
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""
|
|
2
|
+
User Management Services Module
|
|
3
|
+
|
|
4
|
+
This module contains all user-related services organized into specialized operation classes:
|
|
5
|
+
- UserManagementOperations: Core CRUD operations for user profiles and status
|
|
6
|
+
- SubscriptionManagementOperations: Subscription plan management and operations
|
|
7
|
+
- IAMManagementOperations: Firebase Auth claims and permissions management
|
|
8
|
+
- UserDeletionOperations: User deletion and cleanup operations
|
|
9
|
+
- UserCoreService: Orchestrating service that composes all operation classes
|
|
10
|
+
- User-specific exceptions: Specialized exception classes for user operations
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from .user_account_operations import UserAccountOperations
|
|
14
|
+
from .subscription_management_operations import (
|
|
15
|
+
SubscriptionManagementOperations,
|
|
16
|
+
SubscriptionPlanDocument
|
|
17
|
+
)
|
|
18
|
+
from .iam_management_operations import IAMManagementOperations
|
|
19
|
+
from .user_auth_operations import UserAuthOperations
|
|
20
|
+
from .user_holistic_operations import UserHolisticOperations
|
|
21
|
+
from .user_core_service import UserCoreService, UserTypeDefaultsDocument
|
|
22
|
+
|
|
23
|
+
__all__ = [
|
|
24
|
+
# Operation classes
|
|
25
|
+
'UserAccountOperations',
|
|
26
|
+
'SubscriptionManagementOperations',
|
|
27
|
+
'IAMManagementOperations',
|
|
28
|
+
'UserAuthOperations',
|
|
29
|
+
'UserHolisticOperations',
|
|
30
|
+
|
|
31
|
+
# Main orchestrating service
|
|
32
|
+
'UserCoreService',
|
|
33
|
+
|
|
34
|
+
# Supporting models
|
|
35
|
+
'SubscriptionPlanDocument',
|
|
36
|
+
'UserTypeDefaultsDocument'
|
|
37
|
+
]
|