ipulse-shared-core-ftredge 27.2.0__tar.gz → 27.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (64) hide show
  1. {ipulse_shared_core_ftredge-27.2.0/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-27.4.0}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/setup.py +1 -1
  3. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/base_api_response.py +4 -0
  4. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/time_series_packaged_dataset_model.py +9 -7
  5. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/__init__.py +1 -1
  6. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/base/__init__.py +1 -0
  7. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/base/cache_aware_firestore_service.py +67 -5
  8. ipulse_shared_core_ftredge-27.4.0/src/ipulse_shared_core_ftredge/services/base/multi_collection_cache_aware_firestore_service.py +244 -0
  9. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  10. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +2 -3
  11. ipulse_shared_core_ftredge-27.2.0/src/ipulse_shared_core_ftredge/utils/authz_credit_extraction.py +0 -0
  12. ipulse_shared_core_ftredge-27.2.0/tests/test_shared_cache.py +0 -147
  13. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/LICENCE +0 -0
  14. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/README.md +0 -0
  15. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/pyproject.toml +0 -0
  16. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/setup.cfg +0 -0
  17. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/__init__.py +0 -0
  18. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/cache/__init__.py +0 -0
  19. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/cache/shared_cache.py +0 -0
  20. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/__init__.py +0 -0
  21. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/auth_firebase_token_validation.py +0 -0
  22. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/auth_protected_router.py +0 -0
  23. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/authz_credit_extraction.py +0 -0
  24. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/authz_for_apis.py +0 -0
  25. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/dependencies/firestore_client.py +0 -0
  26. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/exceptions/__init__.py +0 -0
  27. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/exceptions/base_exceptions.py +0 -0
  28. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/exceptions/user_exceptions.py +0 -0
  29. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  30. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/base_data_model.py +0 -0
  31. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/catalog/__init__.py +0 -0
  32. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/catalog/subscriptionplan.py +0 -0
  33. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/catalog/usertype.py +0 -0
  34. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/credit_api_response.py +0 -0
  35. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/custom_json_response.py +0 -0
  36. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/__init__.py +0 -0
  37. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/user_permissions.py +0 -0
  38. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/user_subscription.py +0 -0
  39. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/userauth.py +0 -0
  40. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/userprofile.py +0 -0
  41. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/models/user/userstatus.py +0 -0
  42. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/monitoring/__init__.py +0 -0
  43. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/monitoring/tracemon.py +0 -0
  44. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/base/base_firestore_service.py +0 -0
  45. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/catalog/__init__.py +0 -0
  46. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/catalog/catalog_subscriptionplan_service.py +0 -0
  47. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/catalog/catalog_usertype_service.py +0 -0
  48. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/charging_processors.py +0 -0
  49. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/__init__.py +0 -0
  50. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/user_charging_operations.py +0 -0
  51. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/user_core_service.py +0 -0
  52. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/user_multistep_operations.py +0 -0
  53. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/user_permissions_operations.py +0 -0
  54. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/user_subscription_operations.py +0 -0
  55. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/userauth_operations.py +0 -0
  56. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/userprofile_operations.py +0 -0
  57. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user/userstatus_operations.py +0 -0
  58. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/services/user_charging_service.py +0 -0
  59. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -0
  60. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py +0 -0
  61. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge/utils/json_encoder.py +0 -0
  62. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  63. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  64. {ipulse_shared_core_ftredge-27.2.0 → ipulse_shared_core_ftredge-27.4.0}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 27.2.0
3
+ Version: 27.4.0
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='27.2.0',
6
+ version='27.4.0',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -16,6 +16,10 @@ class BaseAPIResponse(BaseModel, Generic[T]):
16
16
  message: Optional[str] = None
17
17
  error: Optional[str] = None
18
18
 
19
+ # Optional fields for specific use cases
20
+ cache_hit: Optional[bool] = None # Whether data came from cache
21
+ charged: Optional[bool] = None # Whether credits were charged for this request
22
+
19
23
  metadata: Dict[str, Any] = {
20
24
  "timestamp": dt.datetime.now(dt.timezone.utc).isoformat()
21
25
  }
@@ -3,7 +3,6 @@ from typing import List, Optional, TypeVar, Generic
3
3
  from datetime import datetime
4
4
  from pydantic import Field, BaseModel
5
5
  from ipulse_shared_core_ftredge.models.base_data_model import BaseDataModel
6
- from ipulse_shared_base_ftredge.enums import DatasetLineage, DatasetScope
7
6
 
8
7
  # Generic type for the records within the dataset
9
8
  RecordsSamplingType = TypeVar('RecordsSamplingType', bound=BaseModel)
@@ -13,11 +12,14 @@ class TimeSeriesPackagedDatasetModel(BaseDataModel, Generic[RecordsSamplingType]
13
12
  An intermediary model for time series datasets that holds aggregated records.
14
13
  It provides a generic way to handle different types of time series records.
15
14
  """
16
- dataset_id: str = Field(..., description="The unique identifier for this dataset, often matching the asset ID.")
15
+ # Subject identification fields
16
+ subject_id: str = Field(default="", description="The unique identifier for the subject.")
17
+ pulse_sector_category: str = Field(default="", description="The sector category for the subject.")
17
18
 
18
- dataset_modality: str = Field(..., description="The modality of the dataset, e.g., 'time_series', 'cross_sectional'.")
19
- dataset_lineage: DatasetLineage = Field(..., description="The lineage of the data, indicating its origin and transformations.")
20
- dataset_partition: DatasetScope = Field(..., description="The partition type of the dataset, e.g., full, subsampled.")
19
+ # Schema identification fields
20
+ schema_id: str = Field(default="", description="The unique identifier for the schema.")
21
+ schema_name: str = Field(default="", description="The name of the schema.")
22
+ schema_version: int = Field(default=1, description="The version of the schema.")
21
23
 
22
24
  # Generic lists for different temporal buckets of records
23
25
  max_bulk_records: List[RecordsSamplingType] = Field(default_factory=list)
@@ -39,5 +41,5 @@ class TimeSeriesPackagedDatasetModel(BaseDataModel, Generic[RecordsSamplingType]
39
41
 
40
42
  @property
41
43
  def id(self) -> str:
42
- """Return dataset_id for backward compatibility and consistency."""
43
- return self.dataset_id
44
+ """Return subject_id for backward compatibility and consistency."""
45
+ return self.subject_id
@@ -2,7 +2,7 @@
2
2
 
3
3
 
4
4
  # Import from base services
5
- from .base import BaseFirestoreService, CacheAwareFirestoreService
5
+ from .base import BaseFirestoreService, CacheAwareFirestoreService, MultiCollectionCacheAwareFirestoreService
6
6
 
7
7
  from .charging_processors import ChargingProcessor
8
8
  from .user_charging_service import UserChargingService
@@ -7,6 +7,7 @@ preventing circular import dependencies.
7
7
 
8
8
  from .base_firestore_service import BaseFirestoreService
9
9
  from .cache_aware_firestore_service import CacheAwareFirestoreService
10
+ from .multi_collection_cache_aware_firestore_service import MultiCollectionCacheAwareFirestoreService
10
11
 
11
12
  __all__ = [
12
13
  'BaseFirestoreService',
@@ -1,13 +1,14 @@
1
1
  """Cache-aware Firestore service base class."""
2
2
  import time
3
- from typing import TypeVar, Generic, Dict, Any, List, Optional, Union, Type
3
+ from typing import TypeVar, Generic, Dict, Any, List, Optional, Union, Type, Tuple
4
4
  from google.cloud import firestore
5
5
  from . import BaseFirestoreService
6
6
  from ...exceptions import ResourceNotFoundError, ServiceError
7
7
  from ...cache.shared_cache import SharedCache
8
8
  from ...models import BaseDataModel
9
9
 
10
- T = TypeVar('T', bound=BaseDataModel)
10
+ T = TypeVar('T', bound=BaseDataModel)0
11
+ 0
11
12
 
12
13
  class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
13
14
  """
@@ -58,7 +59,9 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
58
59
  cache_check_time = (time.time() - start_time) * 1000
59
60
 
60
61
  if cached_doc is not None:
61
- self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
62
+ # SharedCache.get() already logs cache hit, only log timing if significant
63
+ if cache_check_time > 5.0: # Only log if cache check took >5ms
64
+ self.logger.debug(f"Cache HIT for document {doc_id} in {cache_check_time:.2f}ms")
62
65
  if convert_to_model and self.model_class:
63
66
  return self._convert_to_model(cached_doc, doc_id)
64
67
  else:
@@ -68,7 +71,66 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
68
71
  self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
69
72
 
70
73
  # Fetch from Firestore using parent method
71
- return await super().get_document(doc_id, convert_to_model)
74
+ result = await super().get_document(doc_id, convert_to_model)
75
+
76
+ # Cache the result if we have a cache and got valid data
77
+ if self.document_cache and result is not None:
78
+ if convert_to_model and isinstance(result, BaseDataModel):
79
+ # Cache the model's dict representation
80
+ self._cache_document_data(doc_id, result.model_dump())
81
+ elif isinstance(result, dict):
82
+ # Cache the dict directly
83
+ self._cache_document_data(doc_id, result)
84
+
85
+ return result
86
+
87
+ async def get_document_with_cache_info(self, doc_id: str, convert_to_model: bool = True) -> Tuple[Union[T, Dict[str, Any], None], bool]:
88
+ """
89
+ Get a document with cache hit information.
90
+
91
+ Args:
92
+ doc_id: Document ID to fetch
93
+ convert_to_model: Whether to convert to Pydantic model
94
+
95
+ Returns:
96
+ Tuple of (document, cache_hit) where cache_hit indicates if from cache
97
+
98
+ Raises:
99
+ ResourceNotFoundError: If document doesn't exist
100
+ """
101
+ cache_hit = False
102
+
103
+ # Check cache first
104
+ if self.document_cache:
105
+ cached_doc = self.document_cache.get(doc_id)
106
+ if cached_doc is not None:
107
+ cache_hit = True
108
+ # Note: SharedCache.get() already logs cache hit at DEBUG level
109
+ if convert_to_model and self.model_class:
110
+ return self._convert_to_model(cached_doc, doc_id), cache_hit
111
+ else:
112
+ cached_doc['id'] = doc_id
113
+ return cached_doc, cache_hit
114
+
115
+ # Cache miss - fetch from Firestore
116
+ self.logger.debug(f"Cache MISS for document {doc_id} - checking Firestore")
117
+
118
+ try:
119
+ result = await super().get_document(doc_id, convert_to_model)
120
+
121
+ # Cache the result if we have a cache and got valid data
122
+ if self.document_cache and result is not None:
123
+ if convert_to_model and isinstance(result, BaseDataModel):
124
+ # Cache the model's dict representation
125
+ self._cache_document_data(doc_id, result.model_dump())
126
+ elif isinstance(result, dict):
127
+ # Cache the dict directly
128
+ self._cache_document_data(doc_id, result)
129
+
130
+ return result, cache_hit
131
+
132
+ except ResourceNotFoundError:
133
+ return None, cache_hit
72
134
 
73
135
  async def get_all_documents(self, cache_key: Optional[str] = None, as_models: bool = True) -> Union[List[T], List[Dict[str, Any]]]:
74
136
  """
@@ -151,7 +213,7 @@ class CacheAwareFirestoreService(BaseFirestoreService[T], Generic[T]):
151
213
  """Helper to cache document data if document_cache is available."""
152
214
  if self.document_cache:
153
215
  self.document_cache.set(doc_id, data)
154
- self.logger.debug(f"Cached item {doc_id} in {self.document_cache.name}")
216
+ # Note: SharedCache.set() already logs at DEBUG level
155
217
 
156
218
  async def create_document(self, doc_id: str, data: Union[T, Dict[str, Any]], creator_uid: str, merge: bool = False) -> Dict[str, Any]:
157
219
  """Create document and invalidate cache."""
@@ -0,0 +1,244 @@
1
+ """
2
+ Generic multi-collection cache-aware Firestore service.
3
+
4
+ This service extends CacheAwareFirestoreService to support dynamic collection operations
5
+ while maintaining all proven infrastructure patterns. It's designed to be generic and
6
+ reusable across different model types.
7
+ """
8
+ from typing import Dict, Any, List, Optional, Union, Type, TypeVar, Generic
9
+ from google.cloud import firestore
10
+ from .cache_aware_firestore_service import CacheAwareFirestoreService
11
+ from ...exceptions import ServiceError, ValidationError, ResourceNotFoundError
12
+ from ...cache.shared_cache import SharedCache
13
+ from ...models import BaseDataModel
14
+ import logging
15
+
16
+ # Generic type for BaseDataModel subclasses
17
+ T = TypeVar('T', bound=BaseDataModel)
18
+
19
+
20
+ class MultiCollectionCacheAwareFirestoreService(CacheAwareFirestoreService[T], Generic[T]):
21
+ """
22
+ Generic multi-collection extension of CacheAwareFirestoreService.
23
+
24
+ This service extends the proven CacheAwareFirestoreService infrastructure to support
25
+ dynamic collection operations based on storage_location_path while maintaining
26
+ all caching, error handling, and CRUD capabilities.
27
+
28
+ This is a generic base class that can be extended for specific model types.
29
+ """
30
+
31
+ def __init__(self,
32
+ db: firestore.Client,
33
+ logger: logging.Logger,
34
+ model_class: Type[T],
35
+ resource_type: str,
36
+ base_collection_name: str,
37
+ timeout: float = 30.0):
38
+
39
+ # Initialize the parent CacheAwareFirestoreService with a base collection
40
+ # We'll override the collection_name dynamically per operation
41
+ super().__init__(
42
+ db=db,
43
+ collection_name=base_collection_name, # Base collection name
44
+ resource_type=resource_type,
45
+ model_class=model_class,
46
+ logger=logger,
47
+ document_cache=None, # We'll manage caches per collection
48
+ collection_cache=None, # We'll manage caches per collection
49
+ timeout=timeout
50
+ )
51
+
52
+ # Cache for per-collection cache instances
53
+ self._collection_caches: Dict[str, Dict[str, SharedCache]] = {}
54
+
55
+ self.logger.info(f"MultiCollectionCacheAwareFirestoreService initialized for {resource_type}")
56
+
57
+ def _get_collection_caches(self, storage_location_path: str) -> Dict[str, SharedCache]:
58
+ """Get or create cache instances for a specific storage location."""
59
+ if storage_location_path not in self._collection_caches:
60
+ # Create collection-specific cache instances
61
+ # No need for safe_name transformation - dots are fine in strings
62
+
63
+ document_cache = SharedCache(
64
+ name=f"MultiColDoc_{storage_location_path}",
65
+ ttl=600.0, # 10 minutes
66
+ enabled=True,
67
+ logger=self.logger
68
+ )
69
+
70
+ collection_cache = SharedCache(
71
+ name=f"MultiColCollection_{storage_location_path}",
72
+ ttl=600.0, # 10 minutes
73
+ enabled=True,
74
+ logger=self.logger
75
+ )
76
+
77
+ self._collection_caches[storage_location_path] = {
78
+ 'document': document_cache,
79
+ 'collection': collection_cache
80
+ }
81
+
82
+ self.logger.info(f"Created cache instances for collection: {storage_location_path}")
83
+
84
+ return self._collection_caches[storage_location_path]
85
+
86
+ def _set_collection_context(self, storage_location_path: str):
87
+ """Set the collection context for the current operation."""
88
+ # Update the collection name for this operation
89
+ self.collection_name = storage_location_path
90
+
91
+ # Update the cache references for this collection
92
+ caches = self._get_collection_caches(storage_location_path)
93
+ self.document_cache = caches['document']
94
+ self.collection_cache = caches['collection']
95
+
96
+ async def get_document_from_collection(self,
97
+ storage_location_path: str,
98
+ doc_id: str,
99
+ convert_to_model: bool = True) -> Union[T, Dict[str, Any], None]:
100
+ """
101
+ Get a document from a specific collection using the cache-aware infrastructure.
102
+ """
103
+ try:
104
+ # Set collection context
105
+ self._set_collection_context(storage_location_path)
106
+
107
+ # Use the parent's cache-aware get_document method
108
+ return await super().get_document(doc_id, convert_to_model)
109
+
110
+ except ResourceNotFoundError:
111
+ self.logger.info(f"Document {doc_id} not found in {storage_location_path}")
112
+ return None
113
+ except Exception as e:
114
+ self.logger.error(f"Error getting document {doc_id} from {storage_location_path}: {str(e)}", exc_info=True)
115
+ raise ServiceError(
116
+ operation=f"getting document from {storage_location_path}",
117
+ error=e,
118
+ resource_type=self.resource_type,
119
+ resource_id=doc_id
120
+ ) from e
121
+
122
+ async def get_all_documents_from_collection(self,
123
+ storage_location_path: str,
124
+ cache_key: Optional[str] = None) -> List[T]:
125
+ """
126
+ Get all documents from a specific collection using cache-aware infrastructure.
127
+ """
128
+ try:
129
+ # Set collection context
130
+ self._set_collection_context(storage_location_path)
131
+
132
+ # Use cache key if not provided
133
+ if not cache_key:
134
+ cache_key = f"all_documents_{storage_location_path}"
135
+
136
+ # Use the parent's cache-aware get_all_documents method
137
+ results = await super().get_all_documents(cache_key=cache_key, as_models=True)
138
+
139
+ # Ensure we return model instances
140
+ model_results: List[T] = []
141
+ for item in results:
142
+ if isinstance(item, BaseDataModel) and self.model_class and isinstance(item, self.model_class):
143
+ model_results.append(item) # type: ignore
144
+ elif isinstance(item, dict) and self.model_class:
145
+ try:
146
+ model_results.append(self.model_class.model_validate(item))
147
+ except Exception as e:
148
+ self.logger.warning(f"Failed to convert dict to model: {e}")
149
+
150
+ return model_results
151
+
152
+ except Exception as e:
153
+ self.logger.error(f"Error getting all documents from {storage_location_path}: {str(e)}", exc_info=True)
154
+ raise ServiceError(
155
+ operation=f"getting all documents from {storage_location_path}",
156
+ error=e,
157
+ resource_type=self.resource_type
158
+ ) from e
159
+
160
+ async def create_document_in_collection(self,
161
+ storage_location_path: str,
162
+ doc_id: str,
163
+ data: Union[T, Dict[str, Any]],
164
+ creator_uid: str,
165
+ merge: bool = False) -> Dict[str, Any]:
166
+ """
167
+ Create a document in a specific collection using cache-aware infrastructure.
168
+ Automatically handles cache invalidation.
169
+ """
170
+ try:
171
+ # Set collection context
172
+ self._set_collection_context(storage_location_path)
173
+
174
+ # Use the parent's cache-aware create_document method
175
+ return await super().create_document(doc_id, data, creator_uid, merge)
176
+
177
+ except Exception as e:
178
+ self.logger.error(f"Error creating document {doc_id} in {storage_location_path}: {str(e)}", exc_info=True)
179
+ raise ServiceError(
180
+ operation=f"creating document in {storage_location_path}",
181
+ error=e,
182
+ resource_type=self.resource_type,
183
+ resource_id=doc_id
184
+ ) from e
185
+
186
+ async def update_document_in_collection(self,
187
+ storage_location_path: str,
188
+ doc_id: str,
189
+ update_data: Dict[str, Any],
190
+ updater_uid: str,
191
+ require_exists: bool = True) -> Dict[str, Any]:
192
+ """
193
+ Update a document in a specific collection using cache-aware infrastructure.
194
+ Automatically handles cache invalidation.
195
+ """
196
+ try:
197
+ # Set collection context
198
+ self._set_collection_context(storage_location_path)
199
+
200
+ # Use the parent's cache-aware update_document method
201
+ return await super().update_document(doc_id, update_data, updater_uid, require_exists)
202
+
203
+ except Exception as e:
204
+ self.logger.error(f"Error updating document {doc_id} in {storage_location_path}: {str(e)}", exc_info=True)
205
+ raise ServiceError(
206
+ operation=f"updating document in {storage_location_path}",
207
+ error=e,
208
+ resource_type=self.resource_type,
209
+ resource_id=doc_id
210
+ ) from e
211
+
212
+ async def delete_document_from_collection(self,
213
+ storage_location_path: str,
214
+ doc_id: str,
215
+ require_exists: bool = True) -> bool:
216
+ """
217
+ Delete a document from a specific collection using cache-aware infrastructure.
218
+ Automatically handles cache invalidation.
219
+ """
220
+ try:
221
+ # Set collection context
222
+ self._set_collection_context(storage_location_path)
223
+
224
+ # Use the parent's cache-aware delete_document method
225
+ return await super().delete_document(doc_id, require_exists)
226
+
227
+ except Exception as e:
228
+ self.logger.error(f"Error deleting document {doc_id} from {storage_location_path}: {str(e)}", exc_info=True)
229
+ raise ServiceError(
230
+ operation=f"deleting document from {storage_location_path}",
231
+ error=e,
232
+ resource_type=self.resource_type,
233
+ resource_id=doc_id
234
+ ) from e
235
+
236
+ def get_cache_stats(self) -> Dict[str, Any]:
237
+ """Get cache statistics for all collections managed by this service."""
238
+ stats = {}
239
+ for storage_path, caches in self._collection_caches.items():
240
+ stats[storage_path] = {
241
+ 'document_cache': caches['document'].get_stats(),
242
+ 'collection_cache': caches['collection'].get_stats()
243
+ }
244
+ return stats
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 27.2.0
3
+ Version: 27.4.0
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -42,6 +42,7 @@ src/ipulse_shared_core_ftredge/services/user_charging_service.py
42
42
  src/ipulse_shared_core_ftredge/services/base/__init__.py
43
43
  src/ipulse_shared_core_ftredge/services/base/base_firestore_service.py
44
44
  src/ipulse_shared_core_ftredge/services/base/cache_aware_firestore_service.py
45
+ src/ipulse_shared_core_ftredge/services/base/multi_collection_cache_aware_firestore_service.py
45
46
  src/ipulse_shared_core_ftredge/services/catalog/__init__.py
46
47
  src/ipulse_shared_core_ftredge/services/catalog/catalog_subscriptionplan_service.py
47
48
  src/ipulse_shared_core_ftredge/services/catalog/catalog_usertype_service.py
@@ -55,7 +56,5 @@ src/ipulse_shared_core_ftredge/services/user/userauth_operations.py
55
56
  src/ipulse_shared_core_ftredge/services/user/userprofile_operations.py
56
57
  src/ipulse_shared_core_ftredge/services/user/userstatus_operations.py
57
58
  src/ipulse_shared_core_ftredge/utils/__init__.py
58
- src/ipulse_shared_core_ftredge/utils/authz_credit_extraction.py
59
59
  src/ipulse_shared_core_ftredge/utils/custom_json_encoder.py
60
- src/ipulse_shared_core_ftredge/utils/json_encoder.py
61
- tests/test_shared_cache.py
60
+ src/ipulse_shared_core_ftredge/utils/json_encoder.py
@@ -1,147 +0,0 @@
1
- """Tests for the SharedCache implementation."""
2
-
3
- import time
4
- import unittest
5
- import logging
6
- from ipulse_shared_core_ftredge.cache.shared_cache import SharedCache
7
-
8
- # Configure logging for tests
9
- logging.basicConfig(level=logging.INFO)
10
- logger = logging.getLogger(__name__)
11
-
12
-
13
- class TestSharedCache(unittest.TestCase):
14
- """Test cases for SharedCache."""
15
-
16
- def setUp(self):
17
- """Set up test fixtures."""
18
- self.cache = SharedCache[str](
19
- name="TestCache",
20
- ttl=0.5, # Short TTL for faster testing
21
- enabled=True,
22
- logger=logger
23
- )
24
-
25
- def test_cache_set_get(self):
26
- """Test basic cache set and get operations."""
27
- # Set a value
28
- self.cache.set("test_key", "test_value")
29
-
30
- # Get the value
31
- cached_value = self.cache.get("test_key")
32
-
33
- # Verify value was cached
34
- self.assertEqual(cached_value, "test_value")
35
-
36
- def test_cache_ttl_expiration(self):
37
- """Test cache TTL expiration."""
38
- # Set a value
39
- self.cache.set("expiring_key", "expiring_value")
40
-
41
- # Verify it's initially cached
42
- self.assertEqual(self.cache.get("expiring_key"), "expiring_value")
43
-
44
- # Wait for TTL to expire
45
- time.sleep(0.6) # Slightly longer than TTL (0.5s)
46
-
47
- # Verify value is no longer cached
48
- self.assertIsNone(self.cache.get("expiring_key"))
49
-
50
- def test_cache_invalidate(self):
51
- """Test cache invalidation."""
52
- # Set multiple values
53
- self.cache.set("key1", "value1")
54
- self.cache.set("key2", "value2")
55
-
56
- # Invalidate specific key
57
- self.cache.invalidate("key1")
58
-
59
- # Verify key1 is gone but key2 remains
60
- self.assertIsNone(self.cache.get("key1"))
61
- self.assertEqual(self.cache.get("key2"), "value2")
62
-
63
- def test_cache_invalidate_all(self):
64
- """Test invalidating all cache entries."""
65
- # Set multiple values
66
- self.cache.set("key1", "value1")
67
- self.cache.set("key2", "value2")
68
-
69
- # Invalidate all
70
- self.cache.invalidate_all()
71
-
72
- # Verify both keys are gone
73
- self.assertIsNone(self.cache.get("key1"))
74
- self.assertIsNone(self.cache.get("key2"))
75
-
76
- def test_cache_get_or_set(self):
77
- """Test get_or_set functionality."""
78
- # Define a counter to verify how many times the loader is called
79
- counter = [0]
80
-
81
- def data_loader():
82
- counter[0] += 1
83
- return f"loaded_value_{counter[0]}"
84
-
85
- # First call should use data_loader
86
- value1, was_cached1 = self.cache.get_or_set("loader_key", data_loader)
87
-
88
- # Second call should use cached value
89
- value2, was_cached2 = self.cache.get_or_set("loader_key", data_loader)
90
-
91
- # Verify results
92
- self.assertEqual(value1, "loaded_value_1")
93
- self.assertEqual(value2, "loaded_value_1") # Same value from cache
94
- self.assertFalse(was_cached1) # First call was not cached
95
- self.assertTrue(was_cached2) # Second call was cached
96
- self.assertEqual(counter[0], 1) # Loader called exactly once
97
-
98
- def test_cache_disabled(self):
99
- """Test cache behavior when disabled."""
100
- # Create disabled cache
101
- disabled_cache = SharedCache[str](
102
- name="DisabledCache",
103
- ttl=1.0,
104
- enabled=False,
105
- logger=logger
106
- )
107
-
108
- # Set a value
109
- disabled_cache.set("disabled_key", "disabled_value")
110
-
111
- # Attempt to get - should return None since cache is disabled
112
- cached_value = disabled_cache.get("disabled_key")
113
- self.assertIsNone(cached_value)
114
-
115
- def test_cache_generic_typing(self):
116
- """Test cache with different data types."""
117
- # Integer cache
118
- int_cache = SharedCache[int](name="IntCache", ttl=1.0, enabled=True)
119
- int_cache.set("int_key", 123)
120
- self.assertEqual(int_cache.get("int_key"), 123)
121
-
122
- # Dictionary cache
123
- dict_cache = SharedCache[dict](name="DictCache", ttl=1.0, enabled=True)
124
- dict_cache.set("dict_key", {"a": 1, "b": 2})
125
- self.assertEqual(dict_cache.get("dict_key"), {"a": 1, "b": 2})
126
-
127
- def test_cache_stats(self):
128
- """Test cache statistics."""
129
- # Add some data
130
- self.cache.set("stats_key1", "stats_value1")
131
- self.cache.set("stats_key2", "stats_value2")
132
-
133
- # Get stats
134
- stats = self.cache.get_stats()
135
-
136
- # Verify stats
137
- self.assertEqual(stats["name"], "TestCache")
138
- self.assertEqual(stats["enabled"], True)
139
- self.assertEqual(stats["ttl_seconds"], 0.5)
140
- self.assertEqual(stats["item_count"], 2)
141
- self.assertIn("stats_key1", stats["first_20_keys"])
142
- self.assertIn("stats_key2", stats["first_20_keys"])
143
- self.assertEqual(stats["total_keys"], 2)
144
-
145
-
146
- if __name__ == "__main__":
147
- unittest.main()