ipulse-shared-core-ftredge 2.6__py3-none-any.whl → 2.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (36) hide show
  1. ipulse_shared_core_ftredge/__init__.py +21 -4
  2. ipulse_shared_core_ftredge/enums/__init__.py +32 -0
  3. ipulse_shared_core_ftredge/enums/enums_cloud.py +17 -0
  4. ipulse_shared_core_ftredge/enums/enums_common_utils.py +98 -0
  5. ipulse_shared_core_ftredge/enums/enums_data_eng.py +109 -0
  6. ipulse_shared_core_ftredge/enums/enums_logs.py +79 -0
  7. ipulse_shared_core_ftredge/enums/enums_module_fincore.py +58 -0
  8. ipulse_shared_core_ftredge/enums/enums_modules.py +25 -0
  9. ipulse_shared_core_ftredge/{models → enums}/pulse_enums.py +10 -46
  10. ipulse_shared_core_ftredge/models/__init__.py +0 -1
  11. ipulse_shared_core_ftredge/models/organisation.py +61 -55
  12. ipulse_shared_core_ftredge/models/resource_catalog_item.py +97 -171
  13. ipulse_shared_core_ftredge/models/user_profile.py +10 -9
  14. ipulse_shared_core_ftredge/models/user_profile_update.py +32 -14
  15. ipulse_shared_core_ftredge/models/user_status.py +21 -11
  16. ipulse_shared_core_ftredge/utils/__init__.py +19 -0
  17. ipulse_shared_core_ftredge/utils/logs/__init__.py +2 -0
  18. ipulse_shared_core_ftredge/{models → utils/logs}/audit_log_firestore.py +1 -1
  19. ipulse_shared_core_ftredge/utils/logs/context_log.py +211 -0
  20. ipulse_shared_core_ftredge/utils/logs/get_logger.py +76 -0
  21. ipulse_shared_core_ftredge/utils/utils_cloud.py +44 -0
  22. ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +311 -0
  23. ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +169 -0
  24. ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +26 -0
  25. ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +356 -0
  26. ipulse_shared_core_ftredge/utils/utils_common.py +145 -0
  27. ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +151 -0
  28. ipulse_shared_core_ftredge-2.6.1.dist-info/METADATA +14 -0
  29. ipulse_shared_core_ftredge-2.6.1.dist-info/RECORD +33 -0
  30. {ipulse_shared_core_ftredge-2.6.dist-info → ipulse_shared_core_ftredge-2.6.1.dist-info}/WHEEL +1 -1
  31. ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  32. ipulse_shared_core_ftredge/tests/test.py +0 -17
  33. ipulse_shared_core_ftredge-2.6.dist-info/METADATA +0 -11
  34. ipulse_shared_core_ftredge-2.6.dist-info/RECORD +0 -17
  35. {ipulse_shared_core_ftredge-2.6.dist-info → ipulse_shared_core_ftredge-2.6.1.dist-info}/LICENCE +0 -0
  36. {ipulse_shared_core_ftredge-2.6.dist-info → ipulse_shared_core_ftredge-2.6.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,145 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ import os
9
+ import json
10
+
11
+ def log_error(message, logger=None , print_out=False, exc_info=False):
12
+ if logger:
13
+ logger.error(message, exc_info=exc_info)
14
+ elif print_out:
15
+ print(message)
16
+
17
+ def log_warning(message, logger=None, print_out=False):
18
+ if logger:
19
+ logger.warning(message)
20
+ elif print_out:
21
+ print(message)
22
+
23
+ def log_info(message, logger=None, print_out=False):
24
+ if logger:
25
+ logger.info(message)
26
+ elif print_out:
27
+ print(message)
28
+
29
+
30
+ def save_json_locally_extended(data:dict | list | str, file_name:str, local_path:str, file_exists_if_starts_with_prefix:str=None, overwrite_if_exists:bool=False, increment_if_exists:bool=False,
31
+ max_deletable_files:int=1, logger=None, print_out=False):
32
+
33
+ """Saves data to a local JSON file.
34
+
35
+ Args:
36
+ data (dict | list | str): The data to save.
37
+ file_name (str): The desired file name.
38
+ local_path (str): The directory where the file should be saved.
39
+ file_exists_if_starts_with_prefix (str, optional): If provided, used to check for
40
+ existing files with the given prefix. Defaults to None.
41
+ overwrite_if_exists (bool, optional): If True, overwrites existing files. Defaults to False.
42
+ increment_if_exists (bool, optional): If True, increments the file name if it exists.
43
+ Defaults to False.
44
+ max_deletable_files (int, optional): Maximum number of files to delete when overwriting.
45
+ Defaults to 1.
46
+
47
+ Returns:
48
+ dict: Metadata about the save operation, including:
49
+ - local_path (str): The full path to the saved file (or None if not saved).
50
+ - local_file_already_exists (bool): True if a file with the same name or prefix existed.
51
+ - local_file_overwritten (bool): True if an existing file was overwritten.
52
+ - local_deleted_file_names (str): A comma-separated string of deleted file names (or None).
53
+ - local_file_saved_with_increment (bool): True if the file was saved with an incremented name.
54
+ - local_save_error (bool): True if there was an error saving the file.
55
+ """
56
+
57
+ local_save_error=None
58
+ # Input validation
59
+ if overwrite_if_exists and increment_if_exists:
60
+ msg = "Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
61
+ log_error(msg, logger=logger, print_out=print_out)
62
+ return {"local_save_error" : msg}
63
+ if max_deletable_files > 10:
64
+ msg = "max_deletable_files should be less than 10 for safety. For more, use another method."
65
+ log_error(msg, logger=logger, print_out=print_out)
66
+ return {"local_save_error" : msg}
67
+
68
+
69
+ # Prepare data
70
+ if isinstance(data, (list, dict)):
71
+ data_str = json.dumps(data, indent=2)
72
+ else:
73
+ data_str = data
74
+
75
+
76
+ # Extract directory from file_name if present
77
+ directory = os.path.dirname(file_name)
78
+ full_local_path = os.path.join(local_path, directory)
79
+
80
+ # Create the full directory path if it doesn't exist
81
+ os.makedirs(full_local_path, exist_ok=True)
82
+
83
+ # Now use the full file path including the subdirectories
84
+ base_file_name, ext = os.path.splitext(os.path.basename(file_name))
85
+ increment = 0
86
+ full_file_path = os.path.join(full_local_path, f"{base_file_name}{ext}")
87
+
88
+ # Metadata
89
+ local_path = None
90
+ local_file_already_exists = False
91
+ local_file_overwritten = False
92
+ local_file_saved_with_increment = False
93
+ local_deleted_file_names = None
94
+ local_save_error = None
95
+
96
+ try:
97
+ # --- Overwrite Logic ---
98
+ if overwrite_if_exists:
99
+ if file_exists_if_starts_with_prefix:
100
+ files_to_delete = [
101
+ f for f in os.listdir(full_local_path)
102
+ if f.startswith(file_exists_if_starts_with_prefix)
103
+ ]
104
+ if len(files_to_delete) > max_deletable_files:
105
+ raise ValueError(
106
+ f"Error: Attempt to delete {len(files_to_delete)} matched files, but limit is {max_deletable_files}."
107
+ )
108
+ if files_to_delete:
109
+ local_file_already_exists = True
110
+ for file in files_to_delete:
111
+ os.remove(os.path.join(full_local_path, file))
112
+ local_file_overwritten = True
113
+ local_deleted_file_names = ",,,".join(files_to_delete)
114
+ elif os.path.exists(full_file_path):
115
+ local_file_already_exists = True
116
+ os.remove(full_file_path)
117
+ local_file_overwritten = True
118
+
119
+ # --- Increment Logic ---
120
+ elif increment_if_exists:
121
+ while os.path.exists(full_file_path):
122
+ local_file_already_exists = True
123
+ increment += 1
124
+ file_name = f"{base_file_name}_v{increment}{ext}"
125
+ full_file_path = os.path.join(full_local_path, file_name)
126
+ local_file_saved_with_increment = True
127
+
128
+ # --- Save the File ---
129
+ with open(full_file_path, "w", encoding="utf-8") as f:
130
+ f.write(data_str)
131
+ local_path = full_file_path
132
+
133
+ except Exception as e:
134
+ local_save_error=f"Error saving file to local path: {full_file_path} : {type(e).__name__}-{str(e)}"
135
+ log_error(local_save_error, logger=logger, print_out=print_out)
136
+ return {"local_save_error" : msg}
137
+
138
+ return {
139
+ "local_path": local_path,
140
+ "local_file_already_exists": local_file_already_exists,
141
+ "local_file_overwritten": local_file_overwritten,
142
+ "local_deleted_file_names": local_deleted_file_names,
143
+ "local_file_saved_with_increment": local_file_saved_with_increment,
144
+ "local_save_error": local_save_error,
145
+ }
@@ -0,0 +1,151 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=broad-exception-caught
6
+
7
+ import datetime
8
+ from ipulse_shared_core_ftredge.enums.enums_logs import LogLevel
9
+ from ipulse_shared_core_ftredge.utils.logs.context_log import ContextLog
10
+
11
+
12
+ def check_format_against_schema_template(data_to_check, schema, dt_ts_to_str=True, check_max_length=True):
13
+ """Ensure Update dict corresponds to the config schema, ensuring proper formats and lengths."""
14
+ checked_data = {}
15
+ warnings_or_error = [] # Group warnings and errors for a given run
16
+
17
+ try:
18
+ # Process updates to conform to the schema
19
+ for field in schema:
20
+ field_name = field["name"]
21
+ field_type = field["type"]
22
+ mode = field["mode"]
23
+
24
+ # Initialize notice to None at the start of each field processing
25
+ warning = None
26
+
27
+ if field_name in data_to_check:
28
+ value = data_to_check[field_name]
29
+ if value is None:
30
+ if mode == "REQUIRED":
31
+ warnings_or_error.append(ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
32
+ subject=field_name,
33
+ description=f"Required field '{field_name}' is missing in the updates."))
34
+ continue
35
+ else:
36
+ # Handle date and timestamp formatting
37
+ if field_type == "DATE":
38
+ value, warning = handle_date_fields(field_name, value, dt_ts_to_str)
39
+ elif field_type == "TIMESTAMP":
40
+ value, warning = handle_timestamp_fields(field_name, value, dt_ts_to_str)
41
+ elif field_type in ["STRING", "INT64", "FLOAT64", "BOOL"]:
42
+ value, warning = handle_type_conversion(field_type, field_name, value)
43
+
44
+ if warning:
45
+ warnings_or_error.append(warning)
46
+
47
+ # Check and handle max length restriction
48
+ if check_max_length and "max_length" in field:
49
+ value, warning = check_and_truncate_length(field_name, value, field["max_length"])
50
+ if warning:
51
+ warnings_or_error.append(warning)
52
+
53
+ # Only add to the dictionary if value is not None or the field is required
54
+ checked_data[field_name] = value
55
+
56
+ elif mode == "REQUIRED":
57
+ warnings_or_error.append(ContextLog(level=LogLevel.WARNING,
58
+ subject=field_name,
59
+ description=f"Required field '{field_name}' is missing in the updates."))
60
+
61
+ except Exception as e:
62
+ warnings_or_error.append(ContextLog(level=LogLevel.ERROR_EXCEPTION,
63
+ e=e,
64
+ subject=data_to_check,
65
+ description=f"An error occurred during update check: {str(e)}"))
66
+
67
+ return checked_data, warnings_or_error
68
+
69
+
70
+
71
+ def handle_date_fields(field_name, value, dt_ts_to_str):
72
+ """Handles date fields, ensuring they are in the correct format and optionally converts them to string."""
73
+ if isinstance(value, datetime.date):
74
+ if dt_ts_to_str:
75
+ return value.strftime("%Y-%m-%d"), None
76
+ return value, None
77
+ elif isinstance(value, str):
78
+ try:
79
+ parsed_date = datetime.datetime.strptime(value, "%Y-%m-%d").date()
80
+ if dt_ts_to_str:
81
+ return value, None
82
+ return parsed_date, None
83
+ except ValueError:
84
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
85
+ subject=field_name,
86
+ description=f"Expected a DATE in YYYY-MM-DD format but got {value}.")
87
+ else:
88
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
89
+ subject=field_name,
90
+ description= f"Expected a DATE or YYYY-MM-DD str format but got {value} of type {type(value).__name__}.")
91
+
92
+
93
+ def handle_timestamp_fields(field_name, value, dt_ts_to_str):
94
+ """Handles timestamp fields, ensuring they are in the correct format and optionally converts them to ISO format string."""
95
+ if isinstance(value, datetime.datetime):
96
+ if dt_ts_to_str:
97
+ return value.isoformat(), None
98
+ return value, None
99
+ elif isinstance(value, str):
100
+ try:
101
+ parsed_datetime = datetime.datetime.fromisoformat(value)
102
+ if dt_ts_to_str:
103
+ return value, None
104
+ return parsed_datetime, None
105
+ except ValueError:
106
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
107
+ subject=field_name,
108
+ description= f"Expected ISO format TIMESTAMP but got {value}.")
109
+ else:
110
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
111
+ subject=field_name,
112
+ description= f"Expected ISO format TIMESTAMP but got {value} of type {type(value).__name__}.")
113
+
114
+
115
+ def check_and_truncate_length(field_name, value, max_length):
116
+ """Checks and truncates the length of string fields if they exceed the max length."""
117
+ if isinstance(value, str) and len(value) > max_length:
118
+ return value[:max_length], ContextLog(level=LogLevel.WARNING_FIX_RECOMMENDED,
119
+ subject= field_name,
120
+ description= f"Field exceeds max length: {len(value)}/{max_length}. Truncating.")
121
+
122
+ return value, None
123
+
124
+
125
+
126
+ def handle_type_conversion(field_type, field_name, value):
127
+ if field_type == "STRING" and not isinstance(value, str):
128
+ return str(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
129
+ subject=field_name,
130
+ description= f"Expected STRING but got {value} of type {type(value).__name__}.")
131
+
132
+ if field_type == "INT64" and not isinstance(value, int):
133
+ try:
134
+ return int(value), None
135
+ except ValueError:
136
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
137
+ subject= field_name,
138
+ description=f"Expected INTEGER, but got {value} of type {type(value).__name__}.")
139
+ if field_type == "FLOAT64" and not isinstance(value, float):
140
+ try:
141
+ return float(value), None
142
+ except ValueError:
143
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
144
+ subject=field_name,
145
+ description=f"Expected FLOAT, but got {value} of type {type(value).__name__}.")
146
+ if field_type == "BOOL" and not isinstance(value, bool):
147
+ return bool(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
148
+ subject=field_name,
149
+ description=f"Expected BOOL, but got {value}. Converting as {bool(value)}.")
150
+
151
+ return value, None
@@ -0,0 +1,14 @@
1
+ Metadata-Version: 2.1
2
+ Name: ipulse_shared_core_ftredge
3
+ Version: 2.6.1
4
+ Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
+ Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
+ Author: Russlan Ramdowar
7
+ License-File: LICENCE
8
+ Requires-Dist: pydantic[email]~=2.5
9
+ Requires-Dist: python-dateutil~=2.8
10
+ Requires-Dist: pytest~=7.1
11
+ Requires-Dist: google-cloud-logging~=3.10.0
12
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
13
+ Requires-Dist: google-cloud-bigquery~=3.24.0
14
+
@@ -0,0 +1,33 @@
1
+ ipulse_shared_core_ftredge/__init__.py,sha256=7yQzgsmIy7SlAwxTxUBIor_OuFfvoCMBBuonDXjMCC4,1188
2
+ ipulse_shared_core_ftredge/enums/__init__.py,sha256=KWDgmon4URp9PpRw4HCwiviggWXzTo-icTuaOWKjrcI,1034
3
+ ipulse_shared_core_ftredge/enums/enums_cloud.py,sha256=svlVyTwOr4C9XuMBSu3_B7nBEyJUT1rnEJaLvt53NjU,375
4
+ ipulse_shared_core_ftredge/enums/enums_common_utils.py,sha256=AtLcAq5_NmVzCmX4WEYw6T0-O3UQQjdyu7XZUizC8Y8,3476
5
+ ipulse_shared_core_ftredge/enums/enums_data_eng.py,sha256=U6SBw3CjoNt29bMOH-aw0yxmWldcwwouT1Hp9x1gSGI,4080
6
+ ipulse_shared_core_ftredge/enums/enums_logs.py,sha256=WR6i9T7EmBAEN_Q4nXe435IHPxoAoPnbVm4joqLTwBs,3881
7
+ ipulse_shared_core_ftredge/enums/enums_module_fincore.py,sha256=W1TkSLu3ryLf_aif2VcKsFznWz0igeMUR_buoGEG6w8,1406
8
+ ipulse_shared_core_ftredge/enums/enums_modules.py,sha256=I-EfnpB7WMPXEQMK18vCO711uq84pt8b3ycSVB7oLcU,548
9
+ ipulse_shared_core_ftredge/enums/pulse_enums.py,sha256=0RjIJbK0pt1Mzo4k_lhhxZL8myEUergQwOY9JOLZIJ4,4716
10
+ ipulse_shared_core_ftredge/models/__init__.py,sha256=MeGH2ZBxkrwldUiWyUaI_TMyfq78tuSwRkN_mEfKD8U,161
11
+ ipulse_shared_core_ftredge/models/organisation.py,sha256=22esRGYuJmKN3papkgozleEmDNJrVwUgIzKp7annvWs,3280
12
+ ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=mEGX8AftzrhEHqFVXjr62CuRnXC1vK4z3bHl_XBJodU,4964
13
+ ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
14
+ ipulse_shared_core_ftredge/models/user_profile.py,sha256=D3BB9D6XEv7IVZgsURgf0hWmUZW5rms3uiBXS0ZGLeE,1927
15
+ ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=oKK0XsQDKkgDvjFPhX2XlqEqlKLBQ4AkvPHXEuZbFMY,1712
16
+ ipulse_shared_core_ftredge/models/user_status.py,sha256=8TyRd8tBK9_xb0MPKbI5pn9-lX7ovKbeiuWYYPtIOiw,3202
17
+ ipulse_shared_core_ftredge/utils/__init__.py,sha256=hGi4WtdFSUxI_LQBsRUADS2-FmNifM9BSMTH9KhEmpg,851
18
+ ipulse_shared_core_ftredge/utils/utils_cloud.py,sha256=eWMwr1ZBbUy3TdUSnO2-MGiUeq5RIoG1p_jZJ3f6LOw,2386
19
+ ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py,sha256=8HvizgqXaRjLCvIfsxtfC6SnO7KY7ONScB0_1FqumbE,14162
20
+ ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py,sha256=hiesZhK-UMHMAy9QZZXAHKItLY30EqS-RDeUdEz-uaY,11251
21
+ ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py,sha256=5siDvpgHJxfdykS1bjPS5sqtn5SYYPLV4ZYSoniSd0E,1678
22
+ ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py,sha256=QTZA0UIXAbYEuwUuYrP4QfELAFEg36ef1iJbAad1nxM,15017
23
+ ipulse_shared_core_ftredge/utils/utils_common.py,sha256=n9TgX1RNVWlXA5AJjtkvVQqL_B2tr0NOZgPLKivyUZ4,6044
24
+ ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py,sha256=6mtuaGui5y8ECA87eCiaptUi6o-7m_vqaqni7mIxIQU,7512
25
+ ipulse_shared_core_ftredge/utils/logs/__init__.py,sha256=fY6UNr197HYwUDi7uj5fsXw1Ma5gyAFzALxnoDSHFG0,71
26
+ ipulse_shared_core_ftredge/utils/logs/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
27
+ ipulse_shared_core_ftredge/utils/logs/context_log.py,sha256=nsUfVmC_DP5QjQuSlBRalqpaaktxedwmx834h4_ZhYA,8673
28
+ ipulse_shared_core_ftredge/utils/logs/get_logger.py,sha256=RPSdqT1O8MbfwAqdIdH8P9fOn39AmDLW8N7sCU2EAbM,3255
29
+ ipulse_shared_core_ftredge-2.6.1.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
30
+ ipulse_shared_core_ftredge-2.6.1.dist-info/METADATA,sha256=C6xUGmUY19PDLy0o8nz8pfQ5DCGTQ-1fxrqjYvt3aNU,556
31
+ ipulse_shared_core_ftredge-2.6.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
32
+ ipulse_shared_core_ftredge-2.6.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
33
+ ipulse_shared_core_ftredge-2.6.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes
@@ -1,17 +0,0 @@
1
- from ipulse_shared_core_ftredge import UserProfile, UserAuth
2
- import datetime
3
- import logging
4
- logging.basicConfig(level=logging.INFO)
5
- ex=UserProfile(uid="uid",
6
- organizations_uids={"20231220retailcustomer_coreorgn"},
7
- email="email@gmail.com",
8
- creat_date= datetime.datetime.now(datetime.UTC),
9
- creat_by_user='creat_by_user',
10
- updt_date=datetime.datetime.now(datetime.UTC),
11
- updt_by_user="subscriber_cf_persistUserAuthToUserProfile",
12
- approved=True,
13
- provider_id='provider_id',
14
- username='username')
15
-
16
-
17
- logging.info(ex.model_dump(exclude_unset=True))
@@ -1,11 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: ipulse_shared_core_ftredge
3
- Version: 2.6
4
- Summary: Shared models for the Pulse platform project. Using AI for financial advisory and investment management.
5
- Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
- Author: Russlan Ramdowar
7
- License-File: LICENCE
8
- Requires-Dist: pydantic[email] ~=2.5
9
- Requires-Dist: python-dateutil ~=2.8
10
- Requires-Dist: pytest ~=7.1
11
-
@@ -1,17 +0,0 @@
1
- ipulse_shared_core_ftredge/__init__.py,sha256=gZ2QIubXOdzhGe4Jd3Vwx8NrULkn4Is6uVZ2ThEvrpE,104
2
- ipulse_shared_core_ftredge/models/__init__.py,sha256=gE22Gzhil0RYQa7YLtdtT44_AsWqklcDfRtgLAQc1dI,200
3
- ipulse_shared_core_ftredge/models/audit_log_firestore.py,sha256=jjl9lq-3J0eKXZAdEco8vNVQBLLZq-EH6V7NSubsBWw,270
4
- ipulse_shared_core_ftredge/models/organisation.py,sha256=4f1ATEWh5WT-CDJBLEZUhUwyl3V06ogRkteZAqW_nko,2953
5
- ipulse_shared_core_ftredge/models/pulse_enums.py,sha256=PDt1Xhjr885AW_GRfg7Y-zkdVrulNS0O6gWK1xjYOMI,5490
6
- ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=PxeRvI8fe8KOiHr6NW2Jz_yocyLId09PW8QyTZxjHAA,9809
7
- ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
8
- ipulse_shared_core_ftredge/models/user_profile.py,sha256=zP4J-bE3oXzgXzxcHlK5Bz-RpgUppIYmP-U8e2lfHe4,1993
9
- ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=e2jO24JRcnUYJvQWOARbtCW-23WPcGbsWDYw0Iow6mQ,572
10
- ipulse_shared_core_ftredge/models/user_status.py,sha256=S4ZAilJQ4P931gf6u20UAKOxcGTkkMOTiSVnLzvNd8k,2728
11
- ipulse_shared_core_ftredge/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- ipulse_shared_core_ftredge/tests/test.py,sha256=0lS8HP5Quo_BqNoscU40qOH9aJRaa1Pfam5VUBmdld8,682
13
- ipulse_shared_core_ftredge-2.6.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
14
- ipulse_shared_core_ftredge-2.6.dist-info/METADATA,sha256=hLQSigj32Ug-4LUyB2JtBxZHJf7DqgNWhZeU-Gv5ey0,395
15
- ipulse_shared_core_ftredge-2.6.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
16
- ipulse_shared_core_ftredge-2.6.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
17
- ipulse_shared_core_ftredge-2.6.dist-info/RECORD,,