ipulse-shared-core-ftredge 2.8__py3-none-any.whl → 2.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (23) hide show
  1. ipulse_shared_core_ftredge/__init__.py +16 -3
  2. ipulse_shared_core_ftredge/logging/__init__.py +1 -0
  3. ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py +144 -0
  4. ipulse_shared_core_ftredge/logging/utils_logging.py +72 -0
  5. ipulse_shared_core_ftredge/models/__init__.py +0 -1
  6. ipulse_shared_core_ftredge/models/organisation.py +61 -55
  7. ipulse_shared_core_ftredge/models/resource_catalog_item.py +97 -171
  8. ipulse_shared_core_ftredge/models/user_profile.py +5 -5
  9. ipulse_shared_core_ftredge/models/user_profile_update.py +32 -14
  10. ipulse_shared_core_ftredge/models/user_status.py +12 -9
  11. ipulse_shared_core_ftredge/utils/__init__.py +3 -0
  12. ipulse_shared_core_ftredge/utils/utils_common.py +10 -0
  13. ipulse_shared_core_ftredge-2.8.1.dist-info/METADATA +13 -0
  14. ipulse_shared_core_ftredge-2.8.1.dist-info/RECORD +19 -0
  15. {ipulse_shared_core_ftredge-2.8.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/WHEEL +1 -1
  16. ipulse_shared_core_ftredge/models/pulse_enums.py +0 -222
  17. ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  18. ipulse_shared_core_ftredge/tests/test.py +0 -17
  19. ipulse_shared_core_ftredge-2.8.dist-info/METADATA +0 -11
  20. ipulse_shared_core_ftredge-2.8.dist-info/RECORD +0 -17
  21. /ipulse_shared_core_ftredge/{models → logging}/audit_log_firestore.py +0 -0
  22. {ipulse_shared_core_ftredge-2.8.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/LICENCE +0 -0
  23. {ipulse_shared_core_ftredge-2.8.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,18 @@
1
- from .models import Organisation, UserAuth, UserProfile, UserStatus, UserProfileUpdate, pulse_enums
1
+ # pylint: disable=missing-module-docstring
2
+ from .models import ( UserAuth, UserProfile,
3
+ UserStatus, UserProfileUpdate)
2
4
 
5
+ from .enums import (TargetLogs,LogLevel, Status, Unit, Frequency,
6
+ Module, Domain, FinCoreCategory, FincCoreSubCategory,
7
+ FinCoreRecordsCategory, FinancialExchangeOrPublisher,
8
+ DataPrimaryCategory, DataState, DatasetScope,
9
+ DataSourceType,PipelineTriggerType,DataOperationType,
10
+ MatchConditionType, DuplicationHandling, DuplicationHandlingStatus,
11
+ CodingLanguage, ExecutionLocation, ExecutionComputeType,
12
+ CloudProvider,LoggingHandler)
13
+ from .utils import (list_as_strings)
3
14
 
4
-
5
-
15
+ from .logging import (get_logger,
16
+ log_error,
17
+ log_warning,
18
+ log_info)
@@ -0,0 +1 @@
1
+ from .utils_logging import get_logger, log_error, log_warning, log_info
@@ -0,0 +1,144 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ import logging
9
+ import traceback
10
+ import json
11
+ import os
12
+ from google.cloud import error_reporting
13
+ from google.cloud import logging as cloud_logging
14
+
15
+ ##########################################################################################################################
16
+ #################################### Custom logging FORMATTERS #####################################################
17
+ ##########################################################################################################################
18
+
19
+
20
+ class CloudLogFormatter(logging.Formatter):
21
+ """Formats log records as structured JSON."""
22
+
23
+ def format(self, record):
24
+ log_entry = {
25
+ 'message': record.msg,
26
+ 'timestamp': self.formatTime(record, self.datefmt),
27
+ 'name': record.name,
28
+ 'severity': record.levelname,
29
+ 'pathname': record.pathname,
30
+ 'lineno': record.lineno,
31
+ }
32
+ if record.exc_info:
33
+ log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
34
+ if isinstance(record.msg, dict):
35
+ log_entry.update(record.msg)
36
+ return json.dumps(log_entry)
37
+
38
+
39
+ class LocalLogFormatter(logging.Formatter):
40
+ """Formats log records for local output to the console."""
41
+
42
+ def format(self, record): # Make sure you have the 'record' argument here!
43
+ path_parts = record.pathname.split(os.sep)
44
+
45
+ # Get the last two parts of the path if they exist
46
+ if len(path_parts) >= 2:
47
+ short_path = os.path.join(path_parts[-2], path_parts[-1])
48
+ else:
49
+ short_path = record.pathname
50
+
51
+ # Format log messages differently based on the log level
52
+ if record.levelno == logging.INFO:
53
+ log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
54
+ elif record.levelno == logging.DEBUG:
55
+ log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
56
+ elif record.levelno == logging.ERROR:
57
+ log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
58
+ if record.exc_info:
59
+ log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
60
+ else:
61
+ log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
62
+
63
+
64
+ return log_message
65
+
66
+ #############################################################################################################################################
67
+ ######################################## Logging handlers for Google Cloud ########################################
68
+ #############################################################################################################################################
69
+
70
+ class CustomGCPLoggingHandler(cloud_logging.handlers.CloudLoggingHandler):
71
+ """Custom handler for Google Cloud Logging with a dynamic logName."""
72
+ def __init__(self, client, name, resource=None, labels=None):
73
+ super().__init__(client=client, name=name, resource=resource, labels=labels)
74
+ self.client = client # Ensure client is consistently used
75
+
76
+ def emit(self, record):
77
+ try:
78
+ # 1. Create the basic log entry dictionary
79
+ log_entry = {
80
+ 'message': record.msg,
81
+ 'severity': record.levelname,
82
+ 'name': record.name,
83
+ 'pathname': record.filename,
84
+ 'lineno': record.lineno,
85
+ }
86
+ if record.exc_info:
87
+ log_entry['exception_traceback'] = ''.join(
88
+ traceback.format_exception(*record.exc_info)
89
+ )
90
+
91
+ # 2. Apply the formatter to the 'message' field if it's a dictionary
92
+ if isinstance(record.msg, dict):
93
+ formatted_message = self.formatter.format(record)
94
+ try:
95
+ log_entry['message'] = json.loads(formatted_message)
96
+ except json.JSONDecodeError:
97
+ log_entry['message'] = formatted_message
98
+ else:
99
+ log_entry['message'] = record.msg
100
+
101
+ # 3. Set the custom logName
102
+ log_entry['logName'] = f"projects/{self.client.project}/logs/{record.name}"
103
+
104
+ # 4. Send to Google Cloud Logging
105
+ super().emit(record)
106
+ except Exception as e:
107
+ self.handleError(record)
108
+
109
+ class CustomGCPErrorReportingHandler(logging.Handler):
110
+ def __init__(self, client=None, level=logging.ERROR):
111
+ super().__init__(level)
112
+ self.error_client = error_reporting.Client() if client is None else client
113
+ self.propagate = True
114
+
115
+ def emit(self, record):
116
+ try:
117
+ if record.levelno >= logging.ERROR:
118
+ log_struct = {
119
+ 'message': self.format(record),
120
+ 'severity': record.levelname,
121
+ 'pathname': getattr(record, 'pathname', None),
122
+ 'lineno': getattr(record, 'lineno', None)
123
+ }
124
+ if record.exc_info:
125
+ log_struct['exception'] = ''.join(
126
+ traceback.format_exception(*record.exc_info)
127
+ )
128
+ self.error_client.report(str(log_struct))
129
+ except Exception as e:
130
+ self.handleError(record)
131
+
132
+
133
+ def add_gcp_cloud_logging(logger, formatter, client=None):
134
+ """Sets up Google Cloud Logging for the logger."""
135
+ client = client or cloud_logging.Client()
136
+ handler = CustomGCPLoggingHandler(client, logger.name)
137
+ handler.setFormatter(formatter)
138
+ logger.addHandler(handler)
139
+
140
+ def add_gcp_error_reporting(logger, client=None):
141
+ """Sets up Google Cloud Error Reporting for the logger."""
142
+ client = client or error_reporting.Client()
143
+ handler = CustomGCPErrorReportingHandler(client=client)
144
+ logger.addHandler(handler)
@@ -0,0 +1,72 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+ import logging
8
+ from typing import List, Union
9
+ from ipulse_shared_core_ftredge.enums import LoggingHandler
10
+ from ipulse_shared_core_ftredge.logging.logging_handlers_and_formatters import (CloudLogFormatter,
11
+ LocalLogFormatter,
12
+ add_gcp_cloud_logging,
13
+ add_gcp_error_reporting)
14
+
15
+
16
+ def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandler, List[LoggingHandler]] = LoggingHandler.NONE):
17
+
18
+ """Creates and configures a logger with the specified handlers."""
19
+
20
+ logger = logging.getLogger(logger_name)
21
+ logger.setLevel(level)
22
+ cloud_formatter = CloudLogFormatter()
23
+
24
+ # Ensure logging_handler_providers is a list for consistent processing
25
+ if not isinstance(logging_handler_providers, list):
26
+ logging_handler_providers = [logging_handler_providers]
27
+
28
+ supported_remote_handlers = [
29
+ LoggingHandler.GCP_CLOUD_LOGGING,
30
+ LoggingHandler.GCP_ERROR_REPORTING,
31
+ LoggingHandler.LOCAL_STREAM,
32
+ LoggingHandler.NONE, # If NONE is considered a remote handler
33
+ ]
34
+
35
+ # Remote handlers
36
+
37
+ for handler_provider in logging_handler_providers:
38
+ if handler_provider in supported_remote_handlers:
39
+ if handler_provider == LoggingHandler.GCP_CLOUD_LOGGING:
40
+ add_gcp_cloud_logging(logger, cloud_formatter)
41
+ elif handler_provider == LoggingHandler.GCP_ERROR_REPORTING:
42
+ add_gcp_error_reporting(logger)
43
+ elif handler_provider == LoggingHandler.LOCAL_STREAM: # Handle local stream
44
+ local_handler = logging.StreamHandler()
45
+ local_handler.setFormatter(LocalLogFormatter())
46
+ logger.addHandler(local_handler)
47
+ else:
48
+ raise ValueError(
49
+ f"Unsupported logging provider: {handler_provider}. "
50
+ f"Supported providers: {[h.value for h in supported_remote_handlers]}"
51
+ )
52
+ return logger
53
+
54
+
55
+ def log_error(msg,logger=None , print_out=False, exc_info=False):
56
+ if print_out:
57
+ print(msg)
58
+ if logger:
59
+ logger.error(msg, exc_info=exc_info)
60
+
61
+ def log_warning(msg, logger=None, print_out=False):
62
+ if print_out:
63
+ print(msg)
64
+ if logger:
65
+ logger.warning(msg)
66
+
67
+
68
+ def log_info(msg, logger=None, print_out=False):
69
+ if print_out:
70
+ print(msg)
71
+ if logger:
72
+ logger.info(msg)
@@ -1,7 +1,6 @@
1
1
  from .user_profile import UserProfile
2
2
  from .user_status import UserStatus
3
3
  from .user_profile_update import UserProfileUpdate
4
- from .organisation import Organisation
5
4
  from .user_auth import UserAuth
6
5
 
7
6
 
@@ -1,65 +1,71 @@
1
- from pydantic import BaseModel, validator, ValidationError, Field
2
- from typing import Set, Optional
3
- import uuid
4
- from datetime import datetime
5
- from . import pulse_enums as enums
6
- import dateutil.parser
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ # pylint: disable=broad-exception-caught
5
+ # pylint: disable=line-too-long
6
+ # pylint: disable=unused-variable
7
+ # pylint: disable=broad-exception-caught
8
+ # from pydantic import BaseModel, validator, ValidationError, Field
9
+ # from typing import Set, Optional
10
+ # import uuid
11
+ # from datetime import datetime
12
+ # import dateutil.parser
7
13
 
8
- CLASS_VERSION= 1.0
9
- MODULE= "core"
10
- CLASS_REF="orgn"
14
+ # CLASS_VERSION= 1.0
15
+ # MODULE= "core"
16
+ # CLASS_REF="orgn"
11
17
 
12
18
 
13
- class Organisation(BaseModel):
14
- puid: str = Field(default_factory=lambda: f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower())
15
- name: str
16
- creat_date: datetime = Field(default_factory=lambda:datetime.utcnow())
17
- updt_date: datetime = Field(default_factory=lambda:datetime.utcnow())
18
- creat_by_user: Optional[str] = None
19
- updt_by_user: Optional[str] = None
20
- relations: Optional[Set[str]]=None
21
- description: Optional[str] = None # Updated to use Optional
22
- industries: Optional[Set[str]] = None # Updated to use Optional
23
- website: Optional[str] = None # Updated to use Optional
24
- org_admin_user_uids: Optional[Set[str]] = None # Updated to use Optional
25
- class Config:
26
- extra = "forbid"
19
+ # class Organisation(BaseModel):
20
+ # puid: str = Field(default_factory=f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower())
21
+ # name: str
22
+ # creat_date: datetime = Field(default_factory=datetime.utcnow())
23
+ # updt_date: datetime = Field(default_factory=datetime.utcnow())
24
+ # creat_by_user: Optional[str] = None
25
+ # updt_by_user: Optional[str] = None
26
+ # relations: Optional[Set[str]]=None
27
+ # description: Optional[str] = None # Updated to use Optional
28
+ # industries: Optional[Set[str]] = None # Updated to use Optional
29
+ # website: Optional[str] = None # Updated to use Optional
30
+ # org_admin_user_uids: Optional[Set[str]] = None # Updated to use Optional
31
+ # class Config:
32
+ # extra = "forbid"
27
33
 
28
34
 
29
- @validator('relations', pre=True, always=True)
30
- def validate_relations(cls, relations):
31
- if not set(relations).issubset(enums.organisation_relations):
32
- raise ValueError("Invalid relation values provided.")
33
- return relations
35
+ # @validator('relations', pre=True, always=True)
36
+ # def validate_relations(cls, relations):
37
+ # if not set(relations).issubset(enums.organisation_relations):
38
+ # raise ValueError("Invalid relation values provided.")
39
+ # return relations
34
40
 
35
41
 
36
- @validator('industries', pre=True, always=True)
37
- def validate_industries(cls, industries):
38
- if industries is not None and not set(industries).issubset(enums.organisation_industries):
39
- raise ValueError("Invalid industry values provided.")
40
- return industries
42
+ # @validator('industries', pre=True, always=True)
43
+ # def validate_industries(cls, industries):
44
+ # if industries is not None and not set(industries).issubset(enums.organisation_industries):
45
+ # raise ValueError("Invalid industry values provided.")
46
+ # return industries
41
47
 
42
- @validator('creat_date', 'updt_date', pre=True)
43
- def parse_date(cls, value):
44
- if value is None:
45
- return value
46
- if isinstance(value, datetime):
47
- return value
48
- try:
49
- # Assuming Firestore returns an ISO 8601 string, adjust if necessary
50
- print("Putting Updt or Creat date in a valid format in a Validator when creating Organisation object")
51
- return dateutil.parser.isoparse(value)
52
- except (TypeError, ValueError):
53
- raise ValidationError(f"Invalid datetime format inside Organisation: {value}")
48
+ # @validator('creat_date', 'updt_date', pre=True)
49
+ # def parse_date(cls, value):
50
+ # if value is None:
51
+ # return value
52
+ # if isinstance(value, datetime):
53
+ # return value
54
+ # try:
55
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
56
+ # print("Putting Updt or Creat date in a valid format in a Validator when creating Organisation object")
57
+ # return dateutil.parser.isoparse(value)
58
+ # except (TypeError, ValueError):
59
+ # raise ValidationError(f"Invalid datetime format inside Organisation: {value}")
54
60
 
55
61
 
56
- ### Description, Industries, and Website are optional for Retail Customer and mandatory for Non Retail Customer
57
- @validator('description', 'industries', 'website', pre=True, always=True)
58
- def validate_optional_fields(cls, value, values):
59
- if values.get('name') == 'Retail Customer' and values.get('relations') == {"retail_customer"} or values.get('relations') == ["retail_customer"]:
60
- if value is not None:
61
- raise ValueError("For 'Retail Customer' with only 'retail_customer' relations, description, industries, and website should not be provided.")
62
- else:
63
- if value is None:
64
- raise ValueError("For Non Retail Customer, description, industries, and website are mandatory.")
65
- return value
62
+ # ### Description, Industries, and Website are optional for Retail Customer and mandatory for Non Retail Customer
63
+ # @validator('description', 'industries', 'website', pre=True, always=True)
64
+ # def validate_optional_fields(cls, value, values):
65
+ # if values.get('name') == 'Retail Customer' and values.get('relations') == {"retail_customer"} or values.get('relations') == ["retail_customer"]:
66
+ # if value is not None:
67
+ # raise ValueError("For 'Retail Customer' with only 'retail_customer' relations, description, industries, and website should not be provided.")
68
+ # else:
69
+ # if value is None:
70
+ # raise ValueError("For Non Retail Customer, description, industries, and website are mandatory.")
71
+ # return value
@@ -1,189 +1,115 @@
1
- import uuid
2
- from datetime import datetime
3
- from pydantic import BaseModel, validator, ValidationError
4
- from typing import Dict, Any, Set, Optional
5
- from . import pulse_enums as enums
1
+ # import uuid
2
+ # from datetime import datetime
3
+ # from pydantic import BaseModel, validator, ValidationError
4
+ # from typing import Dict, Any, Set, Optional
6
5
 
7
- import dateutil.parser
6
+ # import dateutil.parser
8
7
 
9
- CLASS_VERSION = 1.0
10
- CLASS_REF = "resdes"
11
- MODULE = "core"
8
+ # CLASS_VERSION = 1.0
9
+ # CLASS_REF = "resdes"
10
+ # MODULE = "core"
12
11
 
13
- # metadata = {
14
- # "resr_puid_or_name":"Users",
15
- # "resr_path":"firestore/Users",
16
- # "resr_name":"Users",
17
- # "resr_pulse_module":MODULE,
18
- # "resr_type":"firestore_collection",
19
- # "resr_classifications":{ "auth_required_confidential"},
20
- # "resr_contents":{"user_core_profile"},
21
- # "resr_original_or_processed":"original_source",
22
- # "resr_origin":"internal",
23
- # "resr_origin_organisations_uids":{"20231220futureedgegroup_coreorgn"},
24
- # }
12
+ # class ResourceCatalogItem(BaseModel):
25
13
 
26
- # Annotated[str, {"resr_classification":"auth_required_restricted",
27
- # "resr_readable_by": ["owner", "selected_by_admin"],
28
- # "resr_updatable_by": ["admin"],
29
- # "resr_original_or_processed" : "original_source",
30
- # "resr_origin":"internal",
31
- # "resr_origin_organisation_uids":["20231220futureedgegroup_coreorgn"],
32
- # "resr_origin_description":"Original User field",
33
- # "resr_creat_date":datetime(2023, 12, 23),
34
- # "resr_creat_by_user":"Russlan Ramdowar;russlan@ftredge.com",
35
- # "metadata_updt_date":datetime(2023, 12, 23),
36
- # "metadata_updt_by_user":"Russlan Ramdowar;russlan@ftredge.com"}]
37
-
38
- # metadata={"resr_classification":"auth_required_restricted",
39
- # "resr_readable_by": ["owner", "selected_by_admin"],
40
- # "resr_updatable_by": [],
41
- # "resr_original_or_processed" : "original_source",
42
- # "resr_origin":"internal",
43
- # "resr_origin_organisation_uids":["20231220futureedgegroup_coreorgn"],
44
- # "resr_origin_description":"Original User field",
45
- # "resr_creat_date":datetime(2023, 12, 23),
46
- # "resr_creat_by_user":"Russlan Ramdowar;russlan@ftredge.com",
47
- # "metadata_updt_date":datetime(2023, 12, 23),
48
- # "metadata_updt_by_user":"Russlan Ramdowar;russlan@ftredge.com"}
49
-
50
- # metadata={"resr_classification":"auth_required_restricted",
51
- # "resr_readable_by": ["owner", "selected_by_admin"],
52
- # "resr_updatable_by": ["owner","admin"],
53
- # "resr_original_or_processed" : "original_source",
54
- # "resr_origin":"internal",
55
- # "resr_origin_organisation_uids":["20231220futureedgegroup_coreorgn"],
56
- # "resr_origin_description":"Original User field",
57
- # "resr_creat_date":datetime(2023, 12, 23),
58
- # "resr_creat_by_user":"Russlan Ramdowar;russlan@ftredge.com",
59
- # "metadata_updt_date":datetime(2023, 12, 23),
60
- # "metadata_updt_by_user":"Russlan Ramdowar;russlan@ftredge.com"}
61
-
62
- # metadata={"resr_classification":"auth_required_confidential",
63
- # "resr_readable_by": ["owner", "selected_by_admin"],
64
- # "resr_updatable_by": ["admin"],
65
- # "resr_original_or_processed" : "original_source",
66
- # "resr_origin":"internal",
67
- # "resr_origin_organisation_uids":["20231220futureedgegroup_coreorgn"],
68
- # "resr_origin_description":"Original User field",
69
- # "resr_creat_date":datetime(2023, 12, 23),
70
- # "resr_creat_by_user":"Russlan Ramdowar",
71
- # "metadata_updt_date":datetime(2023, 12, 23),
72
- # "metadata_updt_by_user":"Russlan Ramdowar"}
73
-
74
- # metadata={"resr_classification":"auth_required_confidential",
75
- # "resr_readable_by": ["owner", "selected_by_admin"],
76
- # "resr_updatable_by": ["selected_by_admin"],
77
- # "resr_original_or_processed" : "original_source",
78
- # "resr_origin":"internal",
79
- # "resr_origin_organisation_uids":["20231220futureedgegroup_coreorgn"],
80
- # "resr_origin_description":"Original User field",
81
- # "resr_creat_date":datetime(2023, 12, 23),
82
- # "resr_creat_by_user":"Russlan Ramdowar",
83
- # "metadata_updt_date":datetime(2023, 12, 23),
84
- # "metadata_updt_by_user":"Russlan Ramdowar"}
85
-
86
- class ResourceCatalogItem(BaseModel):
87
-
88
- resr_puid_or_name: str #Ex: username
89
- resr_path: str #Ex: ipulse-401013/cloud/firesotre/Users/{user_uid}/username
90
- resr_name: str #Ex: username
91
- resr_pulse_module: str #Ex: core
92
- resr_type: str
93
- resr_classifications: Set[str]
94
- resr_contents:Set[str]
95
- resr_original_or_processed: str
96
- resr_origin: str
97
- resr_origin_organisations_uids: Set[str]
98
- resr_origin_description: str
99
- resr_licences_types: Set[str]
100
- resr_description_details: str
101
- resr_updtbl_by_non_staff: bool
102
- resr_creat_by_user_uid: str
103
- resr_creat_date: datetime
104
- class_version:float = CLASS_VERSION
105
- resr_columns_count: int
106
- resr_columns: Optional[Dict[Any, Any]] = None #OPTIONAL
107
- resr_structure_version: Optional[str]=None # OPTIONAL
108
- resr_structure_updt_date: Optional[str]=None #OPTIONAL
109
- resr_structure_updt_by_user_uid: Optional[str]=None # OPTIONAL
110
- resr_tags: Optional[Dict[Any, Any]] = None #OPTIONAL
111
- resr_content_updt_date: Optional[str]=None #OPTIONAL
112
- resr_content_updt_by_user_uid: Optional[str]=None # OPTIONAL
113
- puid: Optional[str] = None #TO BE SETUP BY Validator
114
- metadata_version: Optional[float] = None #TO BE SETUP BY Validator
115
- metadata_creat_date: Optional[datetime] = None #TO BE SETUP BY Management Service
116
- metadata_creat_by: Optional[str] = None #TO BE SETUP BY Management Service
117
- metadata_updt_date: Optional[datetime] = None #TO BE SETUP BY Management Service
118
- metadata_updt_by: Optional[str] = None #TO BE SETUP BY Management Service
14
+ # resr_puid_or_name: str #Ex: username
15
+ # resr_path: str #Ex: ipulse-401013/cloud/firesotre/Users/{user_uid}/username
16
+ # resr_name: str #Ex: username
17
+ # resr_pulse_module: str #Ex: core
18
+ # resr_type: str
19
+ # resr_classifications: Set[str]
20
+ # resr_contents:Set[str]
21
+ # resr_original_or_processed: str
22
+ # resr_origin: str
23
+ # resr_origin_organisations_uids: Set[str]
24
+ # resr_origin_description: str
25
+ # resr_licences_types: Set[str]
26
+ # resr_description_details: str
27
+ # resr_updtbl_by_non_staff: bool
28
+ # resr_creat_by_user_uid: str
29
+ # resr_creat_date: datetime
30
+ # class_version:float = CLASS_VERSION
31
+ # resr_columns_count: int
32
+ # resr_columns: Optional[Dict[Any, Any]] = None #OPTIONAL
33
+ # resr_structure_version: Optional[str]=None # OPTIONAL
34
+ # resr_structure_updt_date: Optional[str]=None #OPTIONAL
35
+ # resr_structure_updt_by_user_uid: Optional[str]=None # OPTIONAL
36
+ # resr_tags: Optional[Dict[Any, Any]] = None #OPTIONAL
37
+ # resr_content_updt_date: Optional[str]=None #OPTIONAL
38
+ # resr_content_updt_by_user_uid: Optional[str]=None # OPTIONAL
39
+ # puid: Optional[str] = None #TO BE SETUP BY Validator
40
+ # metadata_version: Optional[float] = None #TO BE SETUP BY Validator
41
+ # metadata_creat_date: Optional[datetime] = None #TO BE SETUP BY Management Service
42
+ # metadata_creat_by: Optional[str] = None #TO BE SETUP BY Management Service
43
+ # metadata_updt_date: Optional[datetime] = None #TO BE SETUP BY Management Service
44
+ # metadata_updt_by: Optional[str] = None #TO BE SETUP BY Management Service
119
45
 
120
- @validator('puid', pre=True, always=True)
121
- def set_puid(cls, puid, values):
122
- if puid is None:
123
- return f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower()
124
- return puid
46
+ # @validator('puid', pre=True, always=True)
47
+ # def set_puid(cls, puid, values):
48
+ # if puid is None:
49
+ # return f"{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}_{MODULE}{CLASS_REF}".lower()
50
+ # return puid
125
51
 
126
- @validator('metadata_version', pre=True, always=True)
127
- def set_metadata_version(cls, metadata_version, values):
128
- if metadata_version is None:
129
- return 1.0
130
- else:
131
- return metadata_version + 0.1
52
+ # @validator('metadata_version', pre=True, always=True)
53
+ # def set_metadata_version(cls, metadata_version, values):
54
+ # if metadata_version is None:
55
+ # return 1.0
56
+ # else:
57
+ # return metadata_version + 0.1
132
58
 
133
59
 
134
- @validator('resr_pulse_module', pre=True, always=True)
135
- def validate_resr_pulse_module(cls, resr_pulse_modules):
136
- if resr_pulse_modules not in enums.pulse_modules:
137
- raise ValueError("Invalid pulse_modules values provided.")
138
- return resr_pulse_modules
60
+ # @validator('resr_pulse_module', pre=True, always=True)
61
+ # def validate_resr_pulse_module(cls, resr_pulse_modules):
62
+ # if resr_pulse_modules not in enums.pulse_modules:
63
+ # raise ValueError("Invalid pulse_modules values provided.")
64
+ # return resr_pulse_modules
139
65
 
140
- @validator('resr_type', pre=True, always=True)
141
- def validate_resr_type(cls, resr_type):
142
- if resr_type not in enums.resource_types:
143
- raise ValueError("Invalid resource_types value provided.")
144
- return resr_type
66
+ # @validator('resr_type', pre=True, always=True)
67
+ # def validate_resr_type(cls, resr_type):
68
+ # if resr_type not in enums.resource_types:
69
+ # raise ValueError("Invalid resource_types value provided.")
70
+ # return resr_type
145
71
 
146
- @validator('resr_classifications', pre=True, always=True)
147
- def validate_resr_classifications(cls, resr_classifications):
148
- if not resr_classifications.issubset(enums.resource_classifications):
149
- raise ValueError("Invalid resr_classifications values provided.")
150
- return resr_classifications
72
+ # @validator('resr_classifications', pre=True, always=True)
73
+ # def validate_resr_classifications(cls, resr_classifications):
74
+ # if not resr_classifications.issubset(enums.resource_classifications):
75
+ # raise ValueError("Invalid resr_classifications values provided.")
76
+ # return resr_classifications
151
77
 
152
- @validator('resr_contents', pre=True, always=True)
153
- def validate_resr_contents(cls, resr_contents):
154
- if not resr_contents.issubset(enums.resource_contents):
155
- raise ValueError("Invalid resr_contents values provided.")
156
- return resr_contents
78
+ # @validator('resr_contents', pre=True, always=True)
79
+ # def validate_resr_contents(cls, resr_contents):
80
+ # if not resr_contents.issubset(enums.resource_contents):
81
+ # raise ValueError("Invalid resr_contents values provided.")
82
+ # return resr_contents
157
83
 
158
- @validator('resr_original_or_processed', pre=True, always=True)
159
- def validate_resr_original_or_processed(cls, resr_original_or_processed):
160
- if resr_original_or_processed not in enums.resource_original_or_processed:
161
- raise ValueError("Invalid resr_original_or_processed value provided.")
162
- return resr_original_or_processed
84
+ # @validator('resr_original_or_processed', pre=True, always=True)
85
+ # def validate_resr_original_or_processed(cls, resr_original_or_processed):
86
+ # if resr_original_or_processed not in enums.resource_original_or_processed:
87
+ # raise ValueError("Invalid resr_original_or_processed value provided.")
88
+ # return resr_original_or_processed
163
89
 
164
- @validator('resr_origin', pre=True, always=True)
165
- def validate_resr_origin(cls, resr_origin):
166
- if resr_origin not in enums.resource_origins:
167
- raise ValueError("Invalid resource_origins value provided.")
168
- return resr_origin
90
+ # @validator('resr_origin', pre=True, always=True)
91
+ # def validate_resr_origin(cls, resr_origin):
92
+ # if resr_origin not in enums.resource_origins:
93
+ # raise ValueError("Invalid resource_origins value provided.")
94
+ # return resr_origin
169
95
 
170
96
 
171
- @validator('metadata_creat_date', 'metadata_updt_date', pre=True)
172
- def parse_date(cls, value):
173
- if value is None:
174
- return value
175
- if isinstance(value, datetime):
176
- return value
177
- try:
178
- # Assuming Firestore returns an ISO 8601 string, adjust if necessary
179
- return dateutil.parser.isoparse(value)
180
- except (TypeError, ValueError):
181
- raise ValidationError(f"Invalid datetime format inside Resource Description: {value}")
97
+ # @validator('metadata_creat_date', 'metadata_updt_date', pre=True)
98
+ # def parse_date(cls, value):
99
+ # if value is None:
100
+ # return value
101
+ # if isinstance(value, datetime):
102
+ # return value
103
+ # try:
104
+ # # Assuming Firestore returns an ISO 8601 string, adjust if necessary
105
+ # return dateutil.parser.isoparse(value)
106
+ # except (TypeError, ValueError):
107
+ # raise ValidationError(f"Invalid datetime format inside Resource Description: {value}")
182
108
 
183
109
 
184
110
 
185
- @validator('metadata_updt_date', 'metadata_updt_date', pre=True, always=True)
186
- def set_default_updt_date(cls, date, values):
187
- if date is None:
188
- return datetime.utcnow().isoformat()
189
- return date
111
+ # @validator('metadata_updt_date', 'metadata_updt_date', pre=True, always=True)
112
+ # def set_default_updt_date(cls, date, values):
113
+ # if date is None:
114
+ # return datetime.utcnow().isoformat()
115
+ # return date
@@ -11,12 +11,12 @@ CLASS_ORGIN_DATE=datetime(2024, 1, 16, 20, 5)
11
11
  CLASS_VERSION = 3.01
12
12
  CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
13
13
  CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
14
+ LAST_MODIFICATION="Fixed typo"
15
+
14
16
 
15
17
  DOMAIN="user"
16
18
  OBJ_REF = "usprfl"
17
19
 
18
-
19
-
20
20
  class UserProfile(BaseModel):
21
21
  schema_version: float = Field(default=CLASS_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
22
22
  # uid: str = Field(frozen=True, description="Combination of user_usrprof_<Firebase Auth UID>") #User can Read only ---> STORED as Firestore Doc ID
@@ -33,9 +33,9 @@ class UserProfile(BaseModel):
33
33
  provider_id: str #User can Read only
34
34
 
35
35
  username: Optional[str] = None #User can Read and Edit
36
- dob: Optional[date] = None #User can Read and Edit
37
- first_name: Optional[str] = None #User can Read and Edit
38
- last_name: Optional[str] = None #User can Read and Edit
36
+ dob: Optional[date] = None #User can Read and Edit
37
+ first_name: Optional[str] = None #User can Read and Edit
38
+ last_name: Optional[str] = None #User can Read and Edit
39
39
  mobile: Optional[str] = None #User can Read and Edit
40
40
  class Config:
41
41
  extra = "forbid"
@@ -1,18 +1,36 @@
1
-
2
1
  from typing import Optional, Set
3
- from pydantic import BaseModel
4
- from datetime import datetime , date
2
+ from pydantic import BaseModel, Field, EmailStr
3
+ from datetime import date, datetime
4
+
5
+
6
+ CLASS_ORIGIN_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
7
+ CLASS_ORGIN_DATE=datetime(2024, 3, 15, 20, 15)
8
+
9
+ CLASS_VERSION = 2.01
10
+ CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
11
+ CLASS_REVISION_DATE=datetime(2024, 3, 15, 20, 15)
12
+ LAST_MODIFICATION="Created , with all fields Optional"
5
13
 
6
14
  class UserProfileUpdate(BaseModel):
7
- email: Optional[str] = None
8
- username: Optional[str] = None
9
- aliases: Optional[Set[str]] = None
10
- first_name: Optional[str] = None
11
- last_name: Optional[str] = None
12
- mobile: Optional[str] = None
13
- dob: Optional[date] = None
14
- updt_date: Optional[datetime] = None
15
- updt_by_user: Optional[str] = None
15
+ schema_version: Optional[float] = Field(None, description="Version of this Class == version of DB Schema")
16
+ email: Optional[EmailStr] = Field(None, description="Propagated from Firebase Auth")
17
+ organizations_uids: Optional[Set[str]] = Field(None, description="Depends on Subscription Plan, Regularly Updated")
18
+ creat_date: Optional[datetime] = Field(None, description="Creation date")
19
+ creat_by_user: Optional[str] = Field(None, description="Created by user")
20
+ updt_date: Optional[datetime] = Field(None, description="Update date")
21
+ updt_by_user: Optional[str] = Field(None, description="Updated by user")
22
+ aliases: Optional[Set[str]] = Field(None, description="User aliases")
23
+ provider_id: Optional[str] = Field(None, description="Provider ID")
24
+
25
+ username: Optional[str] = Field(None, description="Username")
26
+ dob: Optional[date] = Field(None, description="Date of Birth")
27
+ first_name: Optional[str] = Field(None, description="First Name")
28
+ last_name: Optional[str] = Field(None, description="Last Name")
29
+ mobile: Optional[str] = Field(None, description="Mobile Number")
16
30
 
17
- def model_dump(self, **kwargs):
18
- return super().model_dump(exclude_none=True, **kwargs)
31
+ # def model_dump(self, **kwargs):
32
+ # return super().model_dump(exclude_none=True, **kwargs)
33
+
34
+
35
+
36
+
@@ -1,42 +1,45 @@
1
1
  from pydantic import BaseModel, Field
2
+
2
3
  from datetime import datetime
3
4
  from dateutil.relativedelta import relativedelta
4
- from typing import Set, Optional
5
- # import uuid
6
- from . import pulse_enums as enums
5
+ from typing import Set, Optional, Dict, List
6
+
7
+
7
8
 
8
9
 
9
10
  CLASS_ORIGIN_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
10
11
  CLASS_ORGIN_DATE=datetime(2024, 2, 12, 20, 5)
11
12
 
12
- CLASS_VERSION = 2.1
13
+ SCHEMA_VERSION = 2.3
13
14
  CLASS_REVISION_AUTHOR="Russlan Ramdowar;russlan@ftredge.com"
14
15
  CLASS_REVISION_DATE=datetime(2024, 2, 13, 20, 15)
16
+ LAST_MODIFICATION="Changed default IAM_GROUPS"
15
17
 
16
18
  DOMAIN="user"
17
19
  OBJ_REF = "usrsttus"
18
20
 
19
- DEFAULT_USER_GROUPS={"pulseroot__authuser_open"}
21
+ DEFAULT_IAM_GROUPS={"pulseroot":["full_open_read"]}
20
22
  DEFAULT_SUBSCRIPTION_PLAN="subscription_free"
21
23
  DEFAULT_SUBSCRIPTION_STATUS="active"
22
- DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=7
24
+ DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS=10
23
25
  DEFAULT_EXTRA_INSIGHT_CREDITS=0
24
26
 
27
+ ############################################ !!!!! ALWAYS UPDATE SCHEMA VERSION , IF SCHEMA IS BEING MODIFIED !!! ############################################
25
28
  class UserStatus(BaseModel):
26
- schema_version: float = Field(default=CLASS_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
29
+ schema_version: float = Field(default=SCHEMA_VERSION, description="Version of this Class == version of DB Schema") #User can Read only
27
30
  # uid: str = Field(frozen=True, description="Generated by Firebase Auth") #User can Read only
28
31
  # puid:str = Field(default_factory=lambda: f"{DOMAIN}{OBJ_REF}{datetime.utcnow().strftime('%Y%m%d%H%M')}{uuid.uuid4().hex[:8]}".lower(),
29
32
  # frozen=True,
30
33
  # description="Generated Automatically by default_factory") #User can Read only
31
34
 
32
- iam_groups: Set[str] = Field(default_factory=lambda:DEFAULT_USER_GROUPS, description="User's Groups, with a default one for all authenticated Pulse users") #User can Read only
35
+ iam_groups: Dict[str, List[str]] = Field(default_factory=lambda:DEFAULT_IAM_GROUPS, description="User's Groups, with a default one for all authenticated Pulse users") #User can Read only
33
36
  sbscrptn_plan: str=Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_PLAN, description="Subscription Plan ") #User can Read only
34
37
  sbscrptn_status: str=Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_STATUS, description="Subscription Status") #User can Read only
35
38
  sbscrptn_start_date: datetime=Field(default_factory=lambda:datetime.utcnow(), description="Subscription Start Date") #User can Read only
36
39
  sbscrptn_end_date: datetime=Field(default_factory=lambda:datetime.utcnow()+relativedelta(years=1) , description="Subscription End Date") #User can Read only
37
40
  sbscrptn_insight_credits: int= Field(default_factory=lambda:DEFAULT_SUBSCRIPTION_INSIGHT_CREDITS, description="Depends on Subscription Plan, Set Amount udated at Regular Intervals or at Regular Time") #User can Read only
38
41
  sbscrptn_ins_crdts_updtd_since_datetime: datetime=Field(default_factory=lambda:datetime.utcnow(), description="Subscription Start Date") #User can Read only #User can Read only #User can Read only
39
- extra_insigth_credits: int= Field(default_factory=lambda:DEFAULT_EXTRA_INSIGHT_CREDITS, description="If user purchased extra Insigth Credits they shouldn't Expire") #User can Read only
42
+ extra_insight_credits: int= Field(default_factory=lambda:DEFAULT_EXTRA_INSIGHT_CREDITS, description="If user purchased extra Insigth Credits they shouldn't Expire") #User can Read only
40
43
  payment_refs_uids: Optional[Set[str]] = None #User can Read only
41
44
 
42
45
  creat_date: datetime #User can Read only
@@ -0,0 +1,3 @@
1
+ # pylint: disable=missing-module-docstring
2
+
3
+ from .utils_common import (list_as_strings)
@@ -0,0 +1,10 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ def list_as_strings(*enums):
9
+ """Converts a list of Enum members to their string values."""
10
+ return [str(enum) for enum in enums]
@@ -0,0 +1,13 @@
1
+ Metadata-Version: 2.1
2
+ Name: ipulse_shared_core_ftredge
3
+ Version: 2.8.1
4
+ Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
+ Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
+ Author: Russlan Ramdowar
7
+ License-File: LICENCE
8
+ Requires-Dist: pydantic[email]~=2.5
9
+ Requires-Dist: python-dateutil~=2.8
10
+ Requires-Dist: pytest~=7.1
11
+ Requires-Dist: google-cloud-logging~=3.10.0
12
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
13
+
@@ -0,0 +1,19 @@
1
+ ipulse_shared_core_ftredge/__init__.py,sha256=gVXmJATW1Yp-1XEZQT6KbkYoWNljmPblysPlhfSzdGk,882
2
+ ipulse_shared_core_ftredge/logging/__init__.py,sha256=hE42Z23NuJW-pn1Lm_1MHUnht0I3NnoBSMz1J5A9g5E,72
3
+ ipulse_shared_core_ftredge/logging/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
4
+ ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py,sha256=k5gQU6matAzviRd7X29AIAThaOeXYVFsFaxT08kpqj4,6287
5
+ ipulse_shared_core_ftredge/logging/utils_logging.py,sha256=q8VpDROulW0krG4O0kX0HRovmELBAvfCE8n1_MUn4Y4,2846
6
+ ipulse_shared_core_ftredge/models/__init__.py,sha256=MeGH2ZBxkrwldUiWyUaI_TMyfq78tuSwRkN_mEfKD8U,161
7
+ ipulse_shared_core_ftredge/models/organisation.py,sha256=22esRGYuJmKN3papkgozleEmDNJrVwUgIzKp7annvWs,3280
8
+ ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=mEGX8AftzrhEHqFVXjr62CuRnXC1vK4z3bHl_XBJodU,4964
9
+ ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
10
+ ipulse_shared_core_ftredge/models/user_profile.py,sha256=D3BB9D6XEv7IVZgsURgf0hWmUZW5rms3uiBXS0ZGLeE,1927
11
+ ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=oKK0XsQDKkgDvjFPhX2XlqEqlKLBQ4AkvPHXEuZbFMY,1712
12
+ ipulse_shared_core_ftredge/models/user_status.py,sha256=8TyRd8tBK9_xb0MPKbI5pn9-lX7ovKbeiuWYYPtIOiw,3202
13
+ ipulse_shared_core_ftredge/utils/__init__.py,sha256=Yziq9yAHlOeXTa4LuBWSxZqBMBMfq0XBc0XtcS7lFfw,88
14
+ ipulse_shared_core_ftredge/utils/utils_common.py,sha256=bxpRcd_uI2LyqbgyNquLarqOwXFnFxA-pDAn0JKRlqs,388
15
+ ipulse_shared_core_ftredge-2.8.1.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
16
+ ipulse_shared_core_ftredge-2.8.1.dist-info/METADATA,sha256=QupPp0QxKGLUDmwZXXIWjOwchya5NROy8Dr5mK-WAlY,511
17
+ ipulse_shared_core_ftredge-2.8.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
18
+ ipulse_shared_core_ftredge-2.8.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
19
+ ipulse_shared_core_ftredge-2.8.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,222 +0,0 @@
1
- resource_classifications = {
2
- "*",
3
- "childs_based", # Meaning need to look into child fields to determine classifications
4
-
5
- "public", #Anyone Can Access ex: synthetic data
6
- "authuser_open", # Any Authenticated Can Access ex: prices of gold, bitcoin etc.
7
- # "authuser_subscription",
8
- "authuser_confidential", ## Only User Owner Can Access and Specific Admin
9
- "authuser_limitedacl" , ## Has to be in the ACL
10
- "authuser_owner"
11
- "internal_open", ## Any Internal employees only Can Access ex: public reports, emails etc.
12
- "internal_sensitive", ## Many Internal employees Can Access IF meet special condition ex: internal financials summary reports , web and app analytics, list of admin users etc.
13
- "internal_confidential", ## Few Internal employees Can Access. ex: internal user data, key financials, salaries and bonuses etc
14
- "internal_limitedacl", ## Has to employee usertype and in the ACL
15
- "internal_owner"
16
-
17
-
18
- }
19
-
20
- default_organisations_uids = {
21
-
22
- }
23
-
24
-
25
- resource_domain = {
26
- "*",
27
- ############### GYM #########
28
- "gym_domain",
29
- "gym_data_domain",
30
- "gym_ai_domain",
31
- ############## ORACLE #########
32
- "oracle_domain",
33
- "oracle_world_data_domain"
34
- "oracle_ai_domain",
35
- "oracle_assets_historic_prices_domain",
36
- "oracle_assests_historic_info_dmoain",
37
- "oracle_indicators_historic_domain",
38
- "oracle_news_historic_domain",
39
- "oracle_calendar_domain",
40
- "oracle_modelinfo_domain",
41
- "oracle_modelmetrics_domain",
42
- "oracle_modelpredictions_domain",
43
- ######### ORGANISATIONS #########
44
- "organisation_domain",
45
- ################### USER #########
46
- "user_domain",
47
- "user_management_domain",
48
- "user_portfolio_domain",
49
- "user_groups_and_roles_domain",
50
- ############### BUSINESS #########
51
- "business_domain",
52
- ############### ANALYTICS #########
53
- "analytics_domain",
54
-
55
- "system_domain"
56
- }
57
-
58
- resource_location = {
59
- "firestore_default_",
60
- "github_ipulse_ui_main", "github_authz_main","github_authz_staging",
61
- }
62
-
63
-
64
- resource_types = {
65
- "db", "sql_db", "nosql_db", "dynamodb",
66
- "big_query", "big_query_project", "big_query_table", "big_query_column",
67
- "big_query_row", "big_query_cell",
68
- "firestore", "firestore_project", "firestore_collection",
69
- "firestore_document","firestore_document_with_timeseries" "firestore_document_field",
70
- "pandas_dataframe", "spark_dataframe",
71
- "s3_bucket", "storage_bucket",
72
- "folder", "file", "json_file", "csv_file", "pdf_file",
73
- "unstructured_file", "image", "video", "audio", "text",
74
- "api", "report", "dashboard", "webpage", "website", "web"
75
- }
76
-
77
- resource_origins = {"*", "internal", "external", "mixed"}
78
-
79
- resource_original_or_processed = {"*",
80
- "original_source", # Example User Profiles
81
- "original_copy",
82
- "processed_source",
83
- "processed_copy",
84
- "mixed_source",
85
- "mixed_copy" }
86
-
87
- pulse_modules={
88
- "*",
89
- "core",
90
- "gym",
91
- "orcl",
92
- "scen",
93
- "invs",
94
- "prfl",
95
- "trde",
96
- "bet",
97
- "chat"
98
- }
99
-
100
- organisation_relations = {
101
- "*",
102
- "retail_customer",
103
- "corporate_customer",
104
- "parent",
105
- "sister",
106
- "self",
107
- "partner",
108
- "supplier",
109
- "sponsor",
110
- "investor",
111
- "regulator",
112
- "other"
113
- }
114
-
115
- organisation_industries = {
116
- "*",
117
- "data",
118
- "government",
119
- "media",
120
- "academic",
121
- "commercial",
122
- "fund",
123
- "finance",
124
- "advisory",
125
- "hedgefund",
126
- "bank",
127
- "vc",
128
- "pe",
129
- "construction",
130
- "healthcare",
131
- "technology",
132
- "consulting",
133
- "retail",
134
- "non_profit",
135
- "individual",
136
- "freelancer",
137
- "other"
138
- }
139
-
140
- licences_types={
141
- "*",
142
- ######################################### OPEN or FULL Rights
143
- "public",
144
- "open",
145
- "open_no_tandc",
146
- "full_rights",
147
- "full_rights_for_sale",
148
- "commercial_licence_perpetual",
149
- "customer_private_tac",
150
- ######################################### SPECIAL CONDITIONS
151
- "open_with_tandc",
152
- "on_special_request",
153
- "commercial_licence_limited_time",
154
- "customer_owned_for_sale",
155
- ######################################### Not for Commercial Use
156
- "full_rights_not_for_sale",
157
- "internal_only",
158
- "academic_licence",
159
- "not_for_commercial_use",
160
- "customer_private"
161
- ######################################### Unknown
162
- "commercial_licence_not_purchased",
163
- "web_scrapped",
164
- "unknown"
165
- }
166
-
167
-
168
-
169
- effects={"allow", "deny"}
170
-
171
- actions ={"GET",
172
- "POST",
173
- "DELETE",
174
- "PUT",
175
- "create",
176
- "batch_create",
177
- "read",
178
- "batch_read",
179
- "edit",
180
- "batch_edit",
181
- "add",
182
- "batch_add",
183
- "remove",
184
- "batch_remove",
185
- "delete",
186
- "batch_delete",
187
- "rename" ,
188
- "batch_rename",
189
- "move",
190
- "batch_move",
191
- "download",
192
- "upload",
193
- "share"
194
- }
195
-
196
- resource_readable_by={
197
- "*",
198
- "all",
199
- "authenticated",
200
- "restircted",
201
- "owner",
202
- "selected_by_owner",
203
- "admin",
204
- "selected_by_admin",
205
- "super_admin",
206
- "super_admin_selected",
207
- "system"
208
- }
209
-
210
- resource_updatable_by={
211
- "*",
212
- "all",
213
- "authenticated",
214
- "restircted",
215
- "owner",
216
- "selected_by_owner",
217
- "admin",
218
- "selected_by_admin",
219
- "super_admin",
220
- "super_admin_selected",
221
- "system"
222
- }
File without changes
@@ -1,17 +0,0 @@
1
- from ipulse_shared_core_ftredge import UserProfile, UserAuth
2
- import datetime
3
- import logging
4
- logging.basicConfig(level=logging.INFO)
5
- ex=UserProfile(uid="uid",
6
- organizations_uids={"20231220retailcustomer_coreorgn"},
7
- email="email@gmail.com",
8
- creat_date= datetime.datetime.now(datetime.UTC),
9
- creat_by_user='creat_by_user',
10
- updt_date=datetime.datetime.now(datetime.UTC),
11
- updt_by_user="subscriber_cf_persistUserAuthToUserProfile",
12
- approved=True,
13
- provider_id='provider_id',
14
- username='username')
15
-
16
-
17
- logging.info(ex.model_dump(exclude_unset=True))
@@ -1,11 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: ipulse_shared_core_ftredge
3
- Version: 2.8
4
- Summary: Shared models for the Pulse platform project. Using AI for financial advisory and investment management.
5
- Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
- Author: Russlan Ramdowar
7
- License-File: LICENCE
8
- Requires-Dist: pydantic[email] ~=2.5
9
- Requires-Dist: python-dateutil ~=2.8
10
- Requires-Dist: pytest ~=7.1
11
-
@@ -1,17 +0,0 @@
1
- ipulse_shared_core_ftredge/__init__.py,sha256=gZ2QIubXOdzhGe4Jd3Vwx8NrULkn4Is6uVZ2ThEvrpE,104
2
- ipulse_shared_core_ftredge/models/__init__.py,sha256=gE22Gzhil0RYQa7YLtdtT44_AsWqklcDfRtgLAQc1dI,200
3
- ipulse_shared_core_ftredge/models/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
4
- ipulse_shared_core_ftredge/models/organisation.py,sha256=4f1ATEWh5WT-CDJBLEZUhUwyl3V06ogRkteZAqW_nko,2953
5
- ipulse_shared_core_ftredge/models/pulse_enums.py,sha256=zBt03Ij1TBW3vIOof0SrH2cRNht_m0XzOq9D8u6twhY,5529
6
- ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=PxeRvI8fe8KOiHr6NW2Jz_yocyLId09PW8QyTZxjHAA,9809
7
- ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
8
- ipulse_shared_core_ftredge/models/user_profile.py,sha256=q1sGCsqV-W-qlkQRNTlODKx47C-UtC3m6dZny637e2o,1900
9
- ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=e2jO24JRcnUYJvQWOARbtCW-23WPcGbsWDYw0Iow6mQ,572
10
- ipulse_shared_core_ftredge/models/user_status.py,sha256=i2jd7NU2fFpauu9yOoNBdLfvFYvIo9CJsVFlG0I3smA,3013
11
- ipulse_shared_core_ftredge/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- ipulse_shared_core_ftredge/tests/test.py,sha256=0lS8HP5Quo_BqNoscU40qOH9aJRaa1Pfam5VUBmdld8,682
13
- ipulse_shared_core_ftredge-2.8.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
14
- ipulse_shared_core_ftredge-2.8.dist-info/METADATA,sha256=yW2Wg5ZsbCk8zfFWWzpKLuz59cU4-xqsZDqchSNVISY,395
15
- ipulse_shared_core_ftredge-2.8.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
16
- ipulse_shared_core_ftredge-2.8.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
17
- ipulse_shared_core_ftredge-2.8.dist-info/RECORD,,