ipulse-shared-core-ftredge 2.8__tar.gz → 2.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (34) hide show
  1. {ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.8.1}/PKG-INFO +4 -2
  2. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/README.md +5 -0
  3. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/setup.py +5 -2
  4. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/__init__.py +18 -0
  5. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/__init__.py +1 -0
  6. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py +144 -0
  7. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/utils_logging.py +72 -0
  8. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -1
  9. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/models/organisation.py +71 -0
  10. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +115 -0
  11. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +5 -5
  12. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/models/user_profile_update.py +36 -0
  13. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_status.py +12 -9
  14. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/utils/__init__.py +3 -0
  15. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +10 -0
  16. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +4 -2
  17. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +7 -4
  18. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge.egg-info/requires.txt +5 -0
  19. ipulse_shared_core_ftredge-2.8.1/tests/test_utils_gcp.py +189 -0
  20. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/__init__.py +0 -5
  21. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/models/organisation.py +0 -65
  22. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/models/pulse_enums.py +0 -222
  23. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -189
  24. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -18
  25. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  26. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/tests/test.py +0 -17
  27. ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -3
  28. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/LICENCE +0 -0
  29. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/pyproject.toml +0 -0
  30. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/setup.cfg +0 -0
  31. {ipulse_shared_core_ftredge-2.8/src/ipulse_shared_core_ftredge/models → ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging}/audit_log_firestore.py +0 -0
  32. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  33. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  34. {ipulse_shared_core_ftredge-2.8 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,10 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.8
4
- Summary: Shared models for the Pulse platform project. Using AI for financial advisory and investment management.
3
+ Version: 2.8.1
4
+ Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
7
7
  License-File: LICENCE
8
8
  Requires-Dist: pydantic[email]~=2.5
9
9
  Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
+ Requires-Dist: google-cloud-logging~=3.10.0
12
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
@@ -1,2 +1,7 @@
1
1
  # ipulse_shared_core
2
2
  Shared Models like User, Organisation etc. Also includes shared enum_sets
3
+
4
+
5
+ ### Enums
6
+
7
+ Contains majority of all Enums used in Pulse
@@ -1,8 +1,9 @@
1
+ # pylint: disable=import-error
1
2
  from setuptools import setup, find_packages
2
3
 
3
4
  setup(
4
5
  name='ipulse_shared_core_ftredge',
5
- version='2.8',
6
+ version='2.8.1',
6
7
  package_dir={'': 'src'}, # Specify the source directory
7
8
  packages=find_packages(where='src'), # Look for packages in 'src'
8
9
  install_requires=[
@@ -10,8 +11,10 @@ setup(
10
11
  'pydantic[email]~=2.5',
11
12
  'python-dateutil~=2.8',
12
13
  'pytest~=7.1',
14
+ 'google-cloud-logging~=3.10.0',
15
+ 'google-cloud-error-reporting~=1.11.0'
13
16
  ],
14
17
  author='Russlan Ramdowar',
15
- description='Shared models for the Pulse platform project. Using AI for financial advisory and investment management.',
18
+ description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
16
19
  url='https://github.com/TheFutureEdge/ipulse_shared_core',
17
20
  )
@@ -0,0 +1,18 @@
1
+ # pylint: disable=missing-module-docstring
2
+ from .models import ( UserAuth, UserProfile,
3
+ UserStatus, UserProfileUpdate)
4
+
5
+ from .enums import (TargetLogs,LogLevel, Status, Unit, Frequency,
6
+ Module, Domain, FinCoreCategory, FincCoreSubCategory,
7
+ FinCoreRecordsCategory, FinancialExchangeOrPublisher,
8
+ DataPrimaryCategory, DataState, DatasetScope,
9
+ DataSourceType,PipelineTriggerType,DataOperationType,
10
+ MatchConditionType, DuplicationHandling, DuplicationHandlingStatus,
11
+ CodingLanguage, ExecutionLocation, ExecutionComputeType,
12
+ CloudProvider,LoggingHandler)
13
+ from .utils import (list_as_strings)
14
+
15
+ from .logging import (get_logger,
16
+ log_error,
17
+ log_warning,
18
+ log_info)
@@ -0,0 +1 @@
1
+ from .utils_logging import get_logger, log_error, log_warning, log_info
@@ -0,0 +1,144 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ import logging
9
+ import traceback
10
+ import json
11
+ import os
12
+ from google.cloud import error_reporting
13
+ from google.cloud import logging as cloud_logging
14
+
15
+ ##########################################################################################################################
16
+ #################################### Custom logging FORMATTERS #####################################################
17
+ ##########################################################################################################################
18
+
19
+
20
+ class CloudLogFormatter(logging.Formatter):
21
+ """Formats log records as structured JSON."""
22
+
23
+ def format(self, record):
24
+ log_entry = {
25
+ 'message': record.msg,
26
+ 'timestamp': self.formatTime(record, self.datefmt),
27
+ 'name': record.name,
28
+ 'severity': record.levelname,
29
+ 'pathname': record.pathname,
30
+ 'lineno': record.lineno,
31
+ }
32
+ if record.exc_info:
33
+ log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
34
+ if isinstance(record.msg, dict):
35
+ log_entry.update(record.msg)
36
+ return json.dumps(log_entry)
37
+
38
+
39
+ class LocalLogFormatter(logging.Formatter):
40
+ """Formats log records for local output to the console."""
41
+
42
+ def format(self, record): # Make sure you have the 'record' argument here!
43
+ path_parts = record.pathname.split(os.sep)
44
+
45
+ # Get the last two parts of the path if they exist
46
+ if len(path_parts) >= 2:
47
+ short_path = os.path.join(path_parts[-2], path_parts[-1])
48
+ else:
49
+ short_path = record.pathname
50
+
51
+ # Format log messages differently based on the log level
52
+ if record.levelno == logging.INFO:
53
+ log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
54
+ elif record.levelno == logging.DEBUG:
55
+ log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
56
+ elif record.levelno == logging.ERROR:
57
+ log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
58
+ if record.exc_info:
59
+ log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
60
+ else:
61
+ log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
62
+
63
+
64
+ return log_message
65
+
66
+ #############################################################################################################################################
67
+ ######################################## Logging handlers for Google Cloud ########################################
68
+ #############################################################################################################################################
69
+
70
+ class CustomGCPLoggingHandler(cloud_logging.handlers.CloudLoggingHandler):
71
+ """Custom handler for Google Cloud Logging with a dynamic logName."""
72
+ def __init__(self, client, name, resource=None, labels=None):
73
+ super().__init__(client=client, name=name, resource=resource, labels=labels)
74
+ self.client = client # Ensure client is consistently used
75
+
76
+ def emit(self, record):
77
+ try:
78
+ # 1. Create the basic log entry dictionary
79
+ log_entry = {
80
+ 'message': record.msg,
81
+ 'severity': record.levelname,
82
+ 'name': record.name,
83
+ 'pathname': record.filename,
84
+ 'lineno': record.lineno,
85
+ }
86
+ if record.exc_info:
87
+ log_entry['exception_traceback'] = ''.join(
88
+ traceback.format_exception(*record.exc_info)
89
+ )
90
+
91
+ # 2. Apply the formatter to the 'message' field if it's a dictionary
92
+ if isinstance(record.msg, dict):
93
+ formatted_message = self.formatter.format(record)
94
+ try:
95
+ log_entry['message'] = json.loads(formatted_message)
96
+ except json.JSONDecodeError:
97
+ log_entry['message'] = formatted_message
98
+ else:
99
+ log_entry['message'] = record.msg
100
+
101
+ # 3. Set the custom logName
102
+ log_entry['logName'] = f"projects/{self.client.project}/logs/{record.name}"
103
+
104
+ # 4. Send to Google Cloud Logging
105
+ super().emit(record)
106
+ except Exception as e:
107
+ self.handleError(record)
108
+
109
+ class CustomGCPErrorReportingHandler(logging.Handler):
110
+ def __init__(self, client=None, level=logging.ERROR):
111
+ super().__init__(level)
112
+ self.error_client = error_reporting.Client() if client is None else client
113
+ self.propagate = True
114
+
115
+ def emit(self, record):
116
+ try:
117
+ if record.levelno >= logging.ERROR:
118
+ log_struct = {
119
+ 'message': self.format(record),
120
+ 'severity': record.levelname,
121
+ 'pathname': getattr(record, 'pathname', None),
122
+ 'lineno': getattr(record, 'lineno', None)
123
+ }
124
+ if record.exc_info:
125
+ log_struct['exception'] = ''.join(
126
+ traceback.format_exception(*record.exc_info)
127
+ )
128
+ self.error_client.report(str(log_struct))
129
+ except Exception as e:
130
+ self.handleError(record)
131
+
132
+
133
+ def add_gcp_cloud_logging(logger, formatter, client=None):
134
+ """Sets up Google Cloud Logging for the logger."""
135
+ client = client or cloud_logging.Client()
136
+ handler = CustomGCPLoggingHandler(client, logger.name)
137
+ handler.setFormatter(formatter)
138
+ logger.addHandler(handler)
139
+
140
+ def add_gcp_error_reporting(logger, client=None):
141
+ """Sets up Google Cloud Error Reporting for the logger."""
142
+ client = client or error_reporting.Client()
143
+ handler = CustomGCPErrorReportingHandler(client=client)
144
+ logger.addHandler(handler)
@@ -0,0 +1,72 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+ import logging
8
+ from typing import List, Union
9
+ from ipulse_shared_core_ftredge.enums import LoggingHandler
10
+ from ipulse_shared_core_ftredge.logging.logging_handlers_and_formatters import (CloudLogFormatter,
11
+ LocalLogFormatter,
12
+ add_gcp_cloud_logging,
13
+ add_gcp_error_reporting)
14
+
15
+
16
+ def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandler, List[LoggingHandler]] = LoggingHandler.NONE):
17
+
18
+ """Creates and configures a logger with the specified handlers."""
19
+
20
+ logger = logging.getLogger(logger_name)
21
+ logger.setLevel(level)
22
+ cloud_formatter = CloudLogFormatter()
23
+
24
+ # Ensure logging_handler_providers is a list for consistent processing
25
+ if not isinstance(logging_handler_providers, list):
26
+ logging_handler_providers = [logging_handler_providers]
27
+
28
+ supported_remote_handlers = [
29
+ LoggingHandler.GCP_CLOUD_LOGGING,
30
+ LoggingHandler.GCP_ERROR_REPORTING,
31
+ LoggingHandler.LOCAL_STREAM,
32
+ LoggingHandler.NONE, # If NONE is considered a remote handler
33
+ ]
34
+
35
+ # Remote handlers
36
+
37
+ for handler_provider in logging_handler_providers:
38
+ if handler_provider in supported_remote_handlers:
39
+ if handler_provider == LoggingHandler.GCP_CLOUD_LOGGING:
40
+ add_gcp_cloud_logging(logger, cloud_formatter)
41
+ elif handler_provider == LoggingHandler.GCP_ERROR_REPORTING:
42
+ add_gcp_error_reporting(logger)
43
+ elif handler_provider == LoggingHandler.LOCAL_STREAM: # Handle local stream
44
+ local_handler = logging.StreamHandler()
45
+ local_handler.setFormatter(LocalLogFormatter())
46
+ logger.addHandler(local_handler)
47
+ else:
48
+ raise ValueError(
49
+ f"Unsupported logging provider: {handler_provider}. "
50
+ f"Supported providers: {[h.value for h in supported_remote_handlers]}"
51
+ )
52
+ return logger
53
+
54
+
55
+ def log_error(msg,logger=None , print_out=False, exc_info=False):
56
+ if print_out:
57
+ print(msg)
58
+ if logger:
59
+ logger.error(msg, exc_info=exc_info)
60
+
61
+ def log_warning(msg, logger=None, print_out=False):
62
+ if print_out:
63
+ print(msg)
64
+ if logger:
65
+ logger.warning(msg)
66
+
67
+
68
+ def log_info(msg, logger=None, print_out=False):
69
+ if print_out:
70
+ print(msg)
71
+ if logger:
72
+ logger.info(msg)
@@ -1,7 +1,6 @@
1
1
  from .user_profile import UserProfile
2
2
  from .user_status import UserStatus
3
3
  from .user_profile_update import UserProfileUpdate
4
- from .organisation import Organisation
5
4
  from .user_auth import UserAuth
6
5
 
7
6
 
@@ -0,0 +1,71 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ # pylint: disable=broad-exception-caught
5
+ # pylint: disable=line-too-long
6
+ # pylint: disable=unused-variable
7
+ # pylint: disable=broad-exception-caught
8
+ # from pydantic import BaseModel, validator, ValidationError, Field
9
+ # from typing import Set, Optional
10
+ # import uuid
11
+ # from datetime import datetime
12
+ # import dateutil.parser
13
+
14
+ # CLASS_VERSION= 1.0
15
+ # MODULE= "core"
16
+ # CLASS_REF="orgn"
17
+
18
+
19
+ # class Organisation(BaseModel):
20
+ # puid: str = Field(default_factory=f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower())
21
+ # name: str
22
+ # creat_date: datetime = Field(default_factory=datetime.utcnow())
23
+ # updt_date: datetime = Field(default_factory=datetime.utcnow())
24
+ # creat_by_user: Optional[str] = None
25
+ # updt_by_user: Optional[str] = None
26
+ # relations: Optional[Set[str]]=None
27
+ # description: Optional[str] = None # Updated to use Optional
28
+ # industries: Optional[Set[str]] = None # Updated to use Optional
29
+ # website: Optional[str] = None # Updated to use Optional
30
+ # org_admin_user_uids: Optional[Set[str]] = None # Updated to use Optional
31
+ # class Config:
32
+ # extra = "forbid"
33
+
34
+
35
+ # @validator('relations', pre=True, always=True)
36
+ # def validate_relations(cls, relations):
37
+ # if not set(relations).issubset(enums.organisation_relations):
38
+ # raise ValueError("Invalid relation values provided.")
39
+ # return relations
40
+
41
+
42
+ # @validator('industries', pre=True, always=True)
43
+ # def validate_industries(cls, industries):
44
+ # if industries is not None and not set(industries).issubset(enums.organisation_industries):
45
+ # raise ValueError("Invalid industry values provided.")
46
+ # return industries
47
+
48
+ # @validator('creat_date', 'updt_date', pre=True)
49
+ # def parse_date(cls, value):
50
+ # if value is None:
51
+ # return value
52
+ # if isinstance(value, datetime):
53
+ # return value
54
+ # try:
55
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
56
+ # print("Putting Updt or Creat date in a valid format in a Validator when creating Organisation object")
57
+ # return dateutil.parser.isoparse(value)
58
+ # except (TypeError, ValueError):
59
+ # raise ValidationError(f"Invalid datetime format inside Organisation: {value}")
60
+
61
+
62
+ # ### Description, Industries, and Website are optional for Retail Customer and mandatory for Non Retail Customer
63
+ # @validator('description', 'industries', 'website', pre=True, always=True)
64
+ # def validate_optional_fields(cls, value, values):
65
+ # if values.get('name') == 'Retail Customer' and values.get('relations') == {"retail_customer"} or values.get('relations') == ["retail_customer"]:
66
+ # if value is not None:
67
+ # raise ValueError("For 'Retail Customer' with only 'retail_customer' relations, description, industries, and website should not be provided.")
68
+ # else:
69
+ # if value is None:
70
+ # raise ValueError("For Non Retail Customer, description, industries, and website are mandatory.")
71
+ # return value
@@ -0,0 +1,115 @@
1
+ # import uuid
2
+ # from datetime import datetime
3
+ # from pydantic import BaseModel, validator, ValidationError
4
+ # from typing import Dict, Any, Set, Optional
5
+
6
+ # import dateutil.parser
7
+
8
+ # CLASS_VERSION = 1.0
9
+ # CLASS_REF = "resdes"
10
+ # MODULE = "core"
11
+
12
+ # class ResourceCatalogItem(BaseModel):
13
+
14
+ # resr_puid_or_name: str #Ex: username
15
+ # resr_path: str #Ex: ipulse-401013/cloud/firesotre/Users/{user_uid}/username
16
+ # resr_name: str #Ex: username
17
+ # resr_pulse_module: str #Ex: core
18
+ # resr_type: str
19
+ # resr_classifications: Set[str]
20
+ # resr_contents:Set[str]
21
+ # resr_original_or_processed: str
22
+ # resr_origin: str
23
+ # resr_origin_organisations_uids: Set[str]
24
+ # resr_origin_description: str
25
+ # resr_licences_types: Set[str]
26
+ # resr_description_details: str
27
+ # resr_updtbl_by_non_staff: bool
28
+ # resr_creat_by_user_uid: str
29
+ # resr_creat_date: datetime
30
+ # class_version:float = CLASS_VERSION
31
+ # resr_columns_count: int
32
+ # resr_columns: Optional[Dict[Any, Any]] = None #OPTIONAL
33
+ # resr_structure_version: Optional[str]=None # OPTIONAL
34
+ # resr_structure_updt_date: Optional[str]=None #OPTIONAL
35
+ # resr_structure_updt_by_user_uid: Optional[str]=None # OPTIONAL
36
+ # resr_tags: Optional[Dict[Any, Any]] = None #OPTIONAL
37
+ # resr_content_updt_date: Optional[str]=None #OPTIONAL
38
+ # resr_content_updt_by_user_uid: Optional[str]=None # OPTIONAL
39
+ # puid: Optional[str] = None #TO BE SETUP BY Validator
40
+ # metadata_version: Optional[float] = None #TO BE SETUP BY Validator
41
+ # metadata_creat_date: Optional[datetime] = None #TO BE SETUP BY Management Service
42
+ # metadata_creat_by: Optional[str] = None #TO BE SETUP BY Management Service
43
+ # metadata_updt_date: Optional[datetime] = None #TO BE SETUP BY Management Service
44
+ # metadata_updt_by: Optional[str] = None #TO BE SETUP BY Management Service
45
+
46
+ # @validator('puid', pre=True, always=True)
47
+ # def set_puid(cls, puid, values):
48
+ # if puid is None:
49
+ # return f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower()
50
+ # return puid
51
+
52
+ # @validator('metadata_version', pre=True, always=True)
53
+ # def set_metadata_version(cls, metadata_version, values):
54
+ # if metadata_version is None:
55
+ # return 1.0
56
+ # else:
57
+ # return metadata_version + 0.1
58
+
59
+
60
+ # @validator('resr_pulse_module', pre=True, always=True)
61
+ # def validate_resr_pulse_module(cls, resr_pulse_modules):
62
+ # if resr_pulse_modules not in enums.pulse_modules:
63
+ # raise ValueError("Invalid pulse_modules values provided.")
64
+ # return resr_pulse_modules
65
+
66
+ # @validator('resr_type', pre=True, always=True)
67
+ # def validate_resr_type(cls, resr_type):
68
+ # if resr_type not in enums.resource_types:
69
+ # raise ValueError("Invalid resource_types value provided.")
70
+ # return resr_type
71
+
72
+ # @validator('resr_classifications', pre=True, always=True)
73
+ # def validate_resr_classifications(cls, resr_classifications):
74
+ # if not resr_classifications.issubset(enums.resource_classifications):
75
+ # raise ValueError("Invalid resr_classifications values provided.")
76
+ # return resr_classifications
77
+
78
+ # @validator('resr_contents', pre=True, always=True)
79
+ # def validate_resr_contents(cls, resr_contents):
80
+ # if not resr_contents.issubset(enums.resource_contents):
81
+ # raise ValueError("Invalid resr_contents values provided.")
82
+ # return resr_contents
83
+
84
+ # @validator('resr_original_or_processed', pre=True, always=True)
85
+ # def validate_resr_original_or_processed(cls, resr_original_or_processed):
86
+ # if resr_original_or_processed not in enums.resource_original_or_processed:
87
+ # raise ValueError("Invalid resr_original_or_processed value provided.")
88
+ # return resr_original_or_processed
89
+
90
+ # @validator('resr_origin', pre=True, always=True)
91
+ # def validate_resr_origin(cls, resr_origin):
92
+ # if resr_origin not in enums.resource_origins:
93
+ # raise ValueError("Invalid resource_origins value provided.")
94
+ # return resr_origin
95
+
96
+
97
+ # @validator('metadata_creat_date', 'metadata_updt_date', pre=True)
98
+ # def parse_date(cls, value):
99
+ # if value is None:
100
+ # return value
101
+ # if isinstance(value, datetime):
102
+ # return value
103
+ # try:
104
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
105
+ # return dateutil.parser.isoparse(value)
106
+ # except (TypeError, ValueError):
107
+ # raise ValidationError(f"Invalid datetime format inside Resource Description: {value}")
108
+
109
+
110
+
111
+ # @validator('metadata_updt_date', 'metadata_updt_date', pre=True, always=True)
112
+ # def set_default_updt_date(cls, date, values):
113
+ # if date is None:
114
+ # return datetime.utcnow().isoformat()
115
+ # return date
@@ -11,12 +11,12 @@ CLASS_ORGIN_DATE=datetime(2024, 1, 16, 20, 5)
11
11
  CLASS_VERSION = 3.01
12
12
  CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
13
13
  CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
14
+ LAST_MODIFICATION="Fixed typo"
15
+
14
16
 
15
17
  DOMAIN="user"
16
18
  OBJ_REF = "usprfl"
17
19
 
18
-
19
-
20
20
  class UserProfile(BaseModel):
21
21
  schema_version: float = Field(default=CLASS_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
22
22
  # uid: str = Field(frozen=True, description="Combination of user_usrprof_<Firebase Auth UID>") #User can Read only ---> STORED as Firestore Doc ID
@@ -33,9 +33,9 @@ class UserProfile(BaseModel):
33
33
  provider_id: str #User can Read only
34
34
 
35
35
  username: Optional[str] = None #User can Read and Edit
36
- dob: Optional[date] = None #User can Read and Edit
37
- first_name: Optional[str] = None #User can Read and Edit
38
- last_name: Optional[str] = None #User can Read and Edit
36
+ dob: Optional[date] = None #User can Read and Edit
37
+ first_name: Optional[str] = None #User can Read and Edit
38
+ last_name: Optional[str] = None #User can Read and Edit
39
39
  mobile: Optional[str] = None #User can Read and Edit
40
40
  class Config:
41
41
  extra = "forbid"
@@ -0,0 +1,36 @@
1
+ from typing import Optional, Set
2
+ from pydantic import BaseModel, Field, EmailStr
3
+ from datetime import date, datetime
4
+
5
+
6
+ CLASS_ORIGIN_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
7
+ CLASS_ORGIN_DATE=datetime(2024, 3, 15, 20, 15)
8
+
9
+ CLASS_VERSION = 2.01
10
+ CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
11
+ CLASS_REVISION_DATE=datetime(2024, 3, 15, 20, 15)
12
+ LAST_MODIFICATION="Created , with all fields Optional"
13
+
14
+ class UserProfileUpdate(BaseModel):
15
+ schema_version: Optional[float] = Field(None, description="Version of this Class == version of DB Schema")
16
+ email: Optional[EmailStr] = Field(None, description="Propagated from Firebase Auth")
17
+ organizations_uids: Optional[Set[str]] = Field(None, description="Depends on Subscription Plan, Regularly Updated")
18
+ creat_date: Optional[datetime] = Field(None, description="Creation date")
19
+ creat_by_user: Optional[str] = Field(None, description="Created by user")
20
+ updt_date: Optional[datetime] = Field(None, description="Update date")
21
+ updt_by_user: Optional[str] = Field(None, description="Updated by user")
22
+ aliases: Optional[Set[str]] = Field(None, description="User aliases")
23
+ provider_id: Optional[str] = Field(None, description="Provider ID")
24
+
25
+ username: Optional[str] = Field(None, description="Username")
26
+ dob: Optional[date] = Field(None, description="Date of Birth")
27
+ first_name: Optional[str] = Field(None, description="First Name")
28
+ last_name: Optional[str] = Field(None, description="Last Name")
29
+ mobile: Optional[str] = Field(None, description="Mobile Number")
30
+
31
+ # def model_dump(self, **kwargs):
32
+ # return super().model_dump(exclude_none=True, **kwargs)
33
+
34
+
35
+
36
+
@@ -1,42 +1,45 @@
1
1
  from pydantic import BaseModel, Field
2
+
2
3
  from datetime import datetime
3
4
  from dateutil.relativedelta import relativedelta
4
- from typing import Set, Optional
5
- # import uuid
6
- from . import pulse_enums as enums
5
+ from typing import Set, Optional, Dict, List
6
+
7
+
7
8
 
8
9
 
9
10
  CLASS_ORIGIN_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
10
11
  CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
11
12
 
12
- CLASS_VERSION = 2.1
13
+ SCHEMA_VERSION = 2.3
13
14
  CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
14
15
  CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
16
+ LAST_MODIFICATION="Changed default IAM_GROUPS"
15
17
 
16
18
  DOMAIN="user"
17
19
  OBJ_REF = "usrsttus"
18
20
 
19
- DEFAULT_USER_GROUPS={"pulseroot__authuser_open"}
21
+ DEFAULT_IAM_GROUPS={"pulseroot":["full_open_read"]}
20
22
  DEFAULT_SUBSCRIPTION_PLAN="subscription_free"
21
23
  DEFAULT_SUBSCRIPTION_STATUS="active"
22
- DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=7
24
+ DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=10
23
25
  DEFAULT_EXTRA_INSIGHT_CREDITS=0
24
26
 
27
+ ############################################ !!!!! ALWAYS UPDATE SCHEMA VERSION , IF SCHEMA IS BEING MODIFIED !!! ############################################
25
28
  class UserStatus(BaseModel):
26
- schema_version: float = Field(default=CLASS_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
29
+ schema_version: float = Field(default=SCHEMA_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
27
30
  # uid: str = Field(frozen=True, description="Generated by Firebase Auth") #User can Read only
28
31
  # puid:str = Field(default_factory=lambda: f"{DOMAIN}{OBJ_REF}{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}".lower(),
29
32
  # frozen=True,
30
33
  # description="Generated Automatically by default_factory") #User can Read only
31
34
 
32
- iam_groups: Set[str] = Field(default_factory=lambda:DEFAULT_USER_GROUPS, description="User's Groups, with a default one for all authenticated Pulse users") #User can Read only
35
+ iam_groups: Dict[str, List[str]] = Field(default_factory=lambda:DEFAULT_IAM_GROUPS, description="User's Groups, with a default one for all authenticated Pulse users") #User can Read only
33
36
  sbscrptn_plan: str=Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_PLAN, description="Subscription Plan ") #User can Read only
34
37
  sbscrptn_status: str=Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_STATUS, description="Subscription Status") #User can Read only
35
38
  sbscrptn_start_date: datetime=Field(default_factory=lambda:datetime.utcnow(), description="Subscription Start Date") #User can Read only
36
39
  sbscrptn_end_date: datetime=Field(default_factory=lambda:datetime.utcnow()+relativedelta(years=1) , description="Subscription End Date") #User can Read only
37
40
  sbscrptn_insight_credits: int= Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS, description="Depends on Subscription Plan, Set Amount udated at Regular Intervals or at Regular Time") #User can Read only
38
41
  sbscrptn_ins_crdts_updtd_since_datetime: datetime=Field(default_factory=lambda:datetime.utcnow(), description="Subscription Start Date") #User can Read only #User can Read only #User can Read only
39
- extra_insigth_credits: int= Field(default_factory=lambda:DEFAULT_EXTRA_INSIGHT_CREDITS, description="If user purchased extra Insigth Credits they shouldn't Expire") #User can Read only
42
+ extra_insight_credits: int= Field(default_factory=lambda:DEFAULT_EXTRA_INSIGHT_CREDITS, description="If user purchased extra Insigth Credits they shouldn't Expire") #User can Read only
40
43
  payment_refs_uids: Optional[Set[str]] = None #User can Read only
41
44
 
42
45
  creat_date: datetime #User can Read only
@@ -0,0 +1,3 @@
1
+ # pylint: disable=missing-module-docstring
2
+
3
+ from .utils_common import (list_as_strings)
@@ -0,0 +1,10 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ def list_as_strings(*enums):
9
+ """Converts a list of Enum members to their string values."""
10
+ return [str(enum) for enum in enums]
@@ -1,10 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.8
4
- Summary: Shared models for the Pulse platform project. Using AI for financial advisory and investment management.
3
+ Version: 2.8.1
4
+ Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
7
7
  License-File: LICENCE
8
8
  Requires-Dist: pydantic[email]~=2.5
9
9
  Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
+ Requires-Dist: google-cloud-logging~=3.10.0
12
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
@@ -8,14 +8,17 @@ src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
8
8
  src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
9
9
  src/ipulse_shared_core_ftredge.egg-info/requires.txt
10
10
  src/ipulse_shared_core_ftredge.egg-info/top_level.txt
11
+ src/ipulse_shared_core_ftredge/logging/__init__.py
12
+ src/ipulse_shared_core_ftredge/logging/audit_log_firestore.py
13
+ src/ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py
14
+ src/ipulse_shared_core_ftredge/logging/utils_logging.py
11
15
  src/ipulse_shared_core_ftredge/models/__init__.py
12
- src/ipulse_shared_core_ftredge/models/audit_log_firestore.py
13
16
  src/ipulse_shared_core_ftredge/models/organisation.py
14
- src/ipulse_shared_core_ftredge/models/pulse_enums.py
15
17
  src/ipulse_shared_core_ftredge/models/resource_catalog_item.py
16
18
  src/ipulse_shared_core_ftredge/models/user_auth.py
17
19
  src/ipulse_shared_core_ftredge/models/user_profile.py
18
20
  src/ipulse_shared_core_ftredge/models/user_profile_update.py
19
21
  src/ipulse_shared_core_ftredge/models/user_status.py
20
- src/ipulse_shared_core_ftredge/tests/__init__.py
21
- src/ipulse_shared_core_ftredge/tests/test.py
22
+ src/ipulse_shared_core_ftredge/utils/__init__.py
23
+ src/ipulse_shared_core_ftredge/utils/utils_common.py
24
+ tests/test_utils_gcp.py