ipulse-shared-core-ftredge 2.38__tar.gz → 2.50__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- {ipulse_shared_core_ftredge-2.38/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.50}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/setup.py +2 -1
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/__init__.py +4 -5
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/enums/enums_common_utils.py +20 -9
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +18 -19
- ipulse_shared_core_ftredge-2.50/src/ipulse_shared_core_ftredge/utils_common.py +415 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/utils_gcp.py +107 -30
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/utils_templates_and_schemas.py +39 -43
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +1 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/README.md +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/pyproject.toml +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/setup.cfg +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/enums/__init__.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/enums/enums_modules.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/audit_log_firestore.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/pulse_enums.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/tests/__init__.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge/tests/test.py +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
- {ipulse_shared_core_ftredge-2.38 → ipulse_shared_core_ftredge-2.50}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.50
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
+
# pylint: disable=import-error
|
|
1
2
|
from setuptools import setup, find_packages
|
|
2
3
|
|
|
3
4
|
setup(
|
|
4
5
|
name='ipulse_shared_core_ftredge',
|
|
5
|
-
version='2.
|
|
6
|
+
version='2.50',
|
|
6
7
|
package_dir={'': 'src'}, # Specify the source directory
|
|
7
8
|
packages=find_packages(where='src'), # Look for packages in 'src'
|
|
8
9
|
install_requires=[
|
|
@@ -2,15 +2,14 @@ from .models import (Organisation, UserAuth, UserProfile,
|
|
|
2
2
|
UserStatus, UserProfileUpdate, pulse_enums)
|
|
3
3
|
from .utils_gcp import (setup_gcp_logger_and_error_report,
|
|
4
4
|
read_csv_from_gcs, read_json_from_gcs,
|
|
5
|
-
write_csv_to_gcs,
|
|
5
|
+
write_csv_to_gcs, write_data_to_gcs)
|
|
6
6
|
from .utils_templates_and_schemas import (create_bigquery_schema_from_json,
|
|
7
|
-
update_check_with_schema_template
|
|
8
|
-
|
|
7
|
+
update_check_with_schema_template)
|
|
8
|
+
from .utils_common import (Notice, NoticeSeverity, NoticesManager,SuccessLog, SuccessLogManager)
|
|
9
|
+
|
|
9
10
|
from .enums import (NoticeSeverity, Unit, Frequency,
|
|
10
11
|
Module, SubModule, BaseDataCategory,
|
|
11
12
|
FinCoreCategory, FincCoreSubCategory,
|
|
12
13
|
FinCoreRecordsCategory, ExchangeOrPublisher,
|
|
13
14
|
SourcingPipelineType, SourcingTriggerType,
|
|
14
15
|
DWEvent, DWEventTriggerType)
|
|
15
|
-
|
|
16
|
-
|
|
@@ -17,20 +17,31 @@ class NoticeSeverity(Enum):
|
|
|
17
17
|
|
|
18
18
|
# Warnings indicate potential issues that might require attention:
|
|
19
19
|
WARNING_NO_ACTION = 401 # Minor issue or Unexpected Behavior, no immediate action required (can be logged frequently)
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
WARNING_REVIEW_RECOMMENDED = 402 # Action recommended to prevent potential future issues
|
|
21
|
+
WARNING_FIX_RECOMMENDED = 403 # Action recommended to prevent potential future issues
|
|
22
|
+
WARNING_FIX_REQUIRED = 404 # Action required, pipeline can likely continue
|
|
22
23
|
|
|
23
24
|
# Errors indicate a problem that disrupts normal pipeline execution:
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
25
|
+
ERROR_EXCEPTION_REDO = 502
|
|
26
|
+
ERROR_CUSTOM_REDO = 503 # Temporary error, automatic retry likely to succeed
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
ERROR_EXCEPTION_INVESTIGATE = 601 # Exception occured after some data was likely persisted (e.g., to GCS or BQ)
|
|
30
|
+
ERROR_CUSTOM_INVESTIGATE= 602
|
|
31
|
+
ERROR_EXCEPTION_PERSTISTANCE = 603 # Exception occured after data was persisted (e.g., to GCS or BQ)
|
|
32
|
+
ERROR_CUSTOM_PERSTISTANCE = 604
|
|
29
33
|
|
|
30
34
|
# Critical errors indicate severe failures requiring immediate attention:
|
|
31
|
-
CRITICAL_SYSTEM_FAILURE =
|
|
32
|
-
CRITICAL_PIPELINE_FAILURE =
|
|
35
|
+
CRITICAL_SYSTEM_FAILURE = 701 # System-level failure (e.g., infrastructure), requires immediate action
|
|
36
|
+
CRITICAL_PIPELINE_FAILURE = 702 # Complete pipeline failure, requires investigation and potential rollback
|
|
37
|
+
|
|
38
|
+
UNKNOWN=1001 # Unknown error, should not be used in normal operation
|
|
33
39
|
|
|
40
|
+
### Exception during full exection, partially saved
|
|
41
|
+
# Exception during ensemble pipeline; modifications collected in local object , nothing persisted
|
|
42
|
+
# Exception during ensemble pipeline; modifications persisted , metadata failed
|
|
43
|
+
# Exception during ensemble pipeline; modifications persisted , metadata persisted
|
|
44
|
+
# Exception during ensemble pipeline; modifications persisted , metadata persisted
|
|
34
45
|
|
|
35
46
|
|
|
36
47
|
class Unit(Enum):
|
|
@@ -20,25 +20,24 @@ class SourcingPipelineType(Enum):
|
|
|
20
20
|
CLOUD_GET_API_INMEMORY = "cloud_get_api_inmemory"
|
|
21
21
|
|
|
22
22
|
class DWEventTriggerType(Enum):
|
|
23
|
-
|
|
23
|
+
GCS_UPLOAD_TRIGGER_CF = "gcs_upload_trigger_cf"
|
|
24
|
+
HTTP_TRIGGER_CF_FOR_GCS_FILE = "http_trigger_cf_for_gcs_file"
|
|
25
|
+
PUBSUB_TRIGGER_CF_FOR_GCS_FILE = "pubsub_trigger_cf_for_gcs_file"
|
|
26
|
+
LOCAL_SCRIPT_FOR_GCS_FILE = "local_script_for_gcs_file"
|
|
24
27
|
INSIDE_SOURCING_FUNCTION = "inside_sourcing_function"
|
|
25
|
-
HTTP_FUNC_TO_GCS = "http_func_to_gcs"
|
|
26
|
-
LOCAL_FROM_GCS_FILE = "local_from_gcs_file"
|
|
27
|
-
MANUAL_FROM_LOCAL_FILE = "manual_from_local_file"
|
|
28
|
-
PUBSUBC_TOPIC = "pubsubc_topic"
|
|
29
28
|
|
|
30
29
|
class DWEvent(Enum):
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
30
|
+
INSERT_NOREPLACE_1O_NT = "insert_noreplace_1o_nt"
|
|
31
|
+
MERGE_NOREPLACE_NO_1T = "merge_noreplace_no_1t"
|
|
32
|
+
MERGE_NOREPLACE_NO_NT = "merge_noreplace_no_nt"
|
|
33
|
+
INSERT_NOREPLACE_1O_1T = "insert_noreplace_1o_1t"
|
|
34
|
+
MERGE_NOREPLACE_1O_NT = "merge_noreplace_1o_nt"
|
|
35
|
+
INSERT_REPLACE_1O_1T = "insert_replace_1o_1t"
|
|
36
|
+
INSERT_REPLACE_1O_NT = "insert_replace_1o_nt"
|
|
37
|
+
MERGE_REPLACE_NO_NT = "merge_replace_no_nt"
|
|
38
|
+
MERGE_REPLACE_1O_NT = "merge_replace_1o_nt"
|
|
39
|
+
MERGE_REPLACE_NO_1T = "merge_replace_no_1t"
|
|
40
|
+
DELETE_1O_1T = "delete_1o_1t"
|
|
41
|
+
DELETE_1O_NT = "delete_1o_nt"
|
|
42
|
+
DELETE_NO_1T = "delete_no_1t"
|
|
43
|
+
DELETE_NO_NT = "delete_no_nt"
|
|
@@ -0,0 +1,415 @@
|
|
|
1
|
+
# pylint: disable=missing-module-docstring
|
|
2
|
+
# pylint: disable=missing-function-docstring
|
|
3
|
+
# pylint: disable=logging-fstring-interpolation
|
|
4
|
+
# pylint: disable=line-too-long
|
|
5
|
+
import traceback
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import time
|
|
9
|
+
from datetime import datetime, timezone
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from typing import List
|
|
12
|
+
from ipulse_shared_core_ftredge.enums.enums_common_utils import NoticeSeverity
|
|
13
|
+
from ipulse_shared_core_ftredge.utils_gcp import write_data_to_gcs
|
|
14
|
+
|
|
15
|
+
def create_notice(severity, e=None, e_type=None, e_message=None, e_traceback=None, subject=None, message=None,context=None):
|
|
16
|
+
# Validate input: ensure severity is provided, use a default if not
|
|
17
|
+
if severity is None:
|
|
18
|
+
severity = NoticeSeverity.UNKNOWN # Assume Severity.UNKNOWN is a default fallback
|
|
19
|
+
|
|
20
|
+
# If an exception object is provided, use it to extract details
|
|
21
|
+
if e is not None:
|
|
22
|
+
e_type = type(e).__name__ if e_type is None else e_type
|
|
23
|
+
e_message = str(e) if e_message is None else e_message
|
|
24
|
+
e_traceback = traceback.format_exc() if e_traceback is None else e_traceback
|
|
25
|
+
else:
|
|
26
|
+
# Calculate traceback if not provided and if exception details are partially present
|
|
27
|
+
if e_traceback is None and (e_type or e_message):
|
|
28
|
+
e_traceback = traceback.format_exc()
|
|
29
|
+
|
|
30
|
+
# Prepare the base notice dictionary with all fields
|
|
31
|
+
notice = {
|
|
32
|
+
"severity_code": severity.value,
|
|
33
|
+
"severity_name": severity.name,
|
|
34
|
+
"subject": subject,
|
|
35
|
+
"message": message,
|
|
36
|
+
"exception_code": e_type,
|
|
37
|
+
"exception_message": e_message,
|
|
38
|
+
"exception_traceback": e_traceback or None, # Ensure field is present even if traceback isn't calculated
|
|
39
|
+
"context": context or ""
|
|
40
|
+
}
|
|
41
|
+
return notice
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def merge_notices_dicts(dict1, dict2):
|
|
47
|
+
"""
|
|
48
|
+
Merge two dictionaries of lists, combining lists for overlapping keys.
|
|
49
|
+
|
|
50
|
+
Parameters:
|
|
51
|
+
dict1 (dict): The first dictionary of lists.
|
|
52
|
+
dict2 (dict): The second dictionary of lists.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
dict: A new dictionary with combined lists for overlapping keys.
|
|
56
|
+
"""
|
|
57
|
+
merged_dict = {}
|
|
58
|
+
|
|
59
|
+
# Get all unique keys from both dictionaries
|
|
60
|
+
all_keys = set(dict1) | set(dict2)
|
|
61
|
+
|
|
62
|
+
for key in all_keys:
|
|
63
|
+
# Combine lists from both dictionaries for each key
|
|
64
|
+
merged_dict[key] = dict1.get(key, []) + dict2.get(key, [])
|
|
65
|
+
|
|
66
|
+
return merged_dict
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# ["data_import","data_quality", "data_processing","data_general","data_persistance","metadata_quality", "metadata_processing", "metadata_persistance","metadata_general"]
|
|
70
|
+
|
|
71
|
+
class Notice:
|
|
72
|
+
def __init__(self, severity: NoticeSeverity, e: Exception = None, e_type: str = None, e_message: str = None, e_traceback: str = None, subject: str = None, message: str = None, context: str = None):
|
|
73
|
+
|
|
74
|
+
# If an exception object is provided, use it to extract details
|
|
75
|
+
if e is not None:
|
|
76
|
+
e_type = type(e).__name__ if e_type is None else e_type
|
|
77
|
+
e_message = str(e) if e_message is None else e_message
|
|
78
|
+
e_traceback = traceback.format_exc() if e_traceback is None else e_traceback
|
|
79
|
+
# If exception details are provided but not from an exception object
|
|
80
|
+
elif e_traceback is None and (e_type or e_message):
|
|
81
|
+
e_traceback = traceback.format_exc()
|
|
82
|
+
|
|
83
|
+
self.timestamp = datetime.now(timezone.utc).isoformat()
|
|
84
|
+
self.severity = severity
|
|
85
|
+
self.subject = subject
|
|
86
|
+
self.message = message
|
|
87
|
+
self.context = context
|
|
88
|
+
self.exception_type = e_type
|
|
89
|
+
self.exception_message = e_message
|
|
90
|
+
self.exception_traceback = e_traceback
|
|
91
|
+
|
|
92
|
+
def to_dict(self):
|
|
93
|
+
return {
|
|
94
|
+
"context": self.context,
|
|
95
|
+
"severity_code": self.severity.value,
|
|
96
|
+
"severity_name": self.severity.name,
|
|
97
|
+
"subject": self.subject,
|
|
98
|
+
"message": self.message,
|
|
99
|
+
"exception_type": self.exception_type,
|
|
100
|
+
"exception_message": self.exception_message,
|
|
101
|
+
"exception_traceback": self.exception_traceback,
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
class NoticesManager:
|
|
105
|
+
ERROR_CODE_START_VALUE = 500
|
|
106
|
+
|
|
107
|
+
def __init__(self):
|
|
108
|
+
self.notices = []
|
|
109
|
+
self.error_count = 0
|
|
110
|
+
self.severity_counts = {severity.name: 0 for severity in NoticeSeverity}
|
|
111
|
+
self.context_stack = []
|
|
112
|
+
|
|
113
|
+
@contextmanager
|
|
114
|
+
def notice_context(self, context):
|
|
115
|
+
self.push_context(context)
|
|
116
|
+
try:
|
|
117
|
+
yield
|
|
118
|
+
finally:
|
|
119
|
+
self.pop_context()
|
|
120
|
+
|
|
121
|
+
def push_context(self, context):
|
|
122
|
+
self.context_stack.append(context)
|
|
123
|
+
|
|
124
|
+
def pop_context(self):
|
|
125
|
+
if self.context_stack:
|
|
126
|
+
self.context_stack.pop()
|
|
127
|
+
|
|
128
|
+
def get_notices_by_context(self, context_substring: str):
|
|
129
|
+
return [
|
|
130
|
+
notice for notice in self.notices
|
|
131
|
+
if context_substring in notice["context"]
|
|
132
|
+
]
|
|
133
|
+
|
|
134
|
+
def get_current_context(self):
|
|
135
|
+
return " >> ".join(self.context_stack)
|
|
136
|
+
|
|
137
|
+
def get_all_notices(self):
|
|
138
|
+
return self.notices
|
|
139
|
+
def add_notice(self, notice: Notice):
|
|
140
|
+
notice.context = self.get_current_context()
|
|
141
|
+
notice_dict = notice.to_dict()
|
|
142
|
+
self.notices.append(notice_dict)
|
|
143
|
+
self._update_counts(notice_dict)
|
|
144
|
+
|
|
145
|
+
def add_notices(self, notices: List[Notice]):
|
|
146
|
+
for notice in notices:
|
|
147
|
+
notice.context = self.get_current_context()
|
|
148
|
+
notice_dict = notice.to_dict()
|
|
149
|
+
self.notices.append(notice_dict)
|
|
150
|
+
self._update_counts(notice_dict)
|
|
151
|
+
|
|
152
|
+
def remove_notice(self, notice: Notice):
|
|
153
|
+
notice_dict = notice.to_dict()
|
|
154
|
+
if notice_dict in self.notices:
|
|
155
|
+
self.notices.remove(notice_dict)
|
|
156
|
+
self._update_counts(notice_dict, remove=True)
|
|
157
|
+
|
|
158
|
+
def clear_notices(self):
|
|
159
|
+
self.notices = []
|
|
160
|
+
self.error_count = 0
|
|
161
|
+
self.severity_counts = {severity.name: 0 for severity in NoticeSeverity}
|
|
162
|
+
|
|
163
|
+
def contains_errors(self):
|
|
164
|
+
return self.error_count > 0
|
|
165
|
+
|
|
166
|
+
def count_errors(self):
|
|
167
|
+
return self.error_count
|
|
168
|
+
|
|
169
|
+
def count_notices_by_severity(self, severity: NoticeSeverity):
|
|
170
|
+
return self.severity_counts.get(severity.name, 0)
|
|
171
|
+
|
|
172
|
+
def count_errors_for_current_context(self):
|
|
173
|
+
current_context = self.get_current_context()
|
|
174
|
+
return sum(
|
|
175
|
+
1 for notice in self.notices
|
|
176
|
+
if notice["context"] == current_context and notice["severity_code"] >= self.ERROR_CODE_START_VALUE
|
|
177
|
+
)
|
|
178
|
+
def count_all_notices(self):
|
|
179
|
+
return len(self.notices)
|
|
180
|
+
|
|
181
|
+
def count_notices_for_current_context(self):
|
|
182
|
+
current_context = self.get_current_context()
|
|
183
|
+
return sum(
|
|
184
|
+
1 for notice in self.notices
|
|
185
|
+
if notice["context"] == current_context
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
def count_notices_by_severity_for_current_context(self, severity: NoticeSeverity):
|
|
189
|
+
current_context = self.get_current_context()
|
|
190
|
+
return sum(
|
|
191
|
+
1 for notice in self.notices
|
|
192
|
+
if notice["context"] == current_context and notice["severity_code"] == severity.value
|
|
193
|
+
)
|
|
194
|
+
def count_notices_for_current_and_nested_contexts(self):
|
|
195
|
+
current_context = self.get_current_context()
|
|
196
|
+
return sum(
|
|
197
|
+
1 for notice in self.notices
|
|
198
|
+
if current_context in notice["context"]
|
|
199
|
+
)
|
|
200
|
+
def count_errors_for_current_and_nested_contexts(self):
|
|
201
|
+
current_context = self.get_current_context()
|
|
202
|
+
return sum(
|
|
203
|
+
1 for notice in self.notices
|
|
204
|
+
if current_context in notice["context"] and notice["severity_code"] >= self.ERROR_CODE_START_VALUE
|
|
205
|
+
)
|
|
206
|
+
def count_notices_by_severity_for_current_and_nested_contexts(self, severity: NoticeSeverity):
|
|
207
|
+
current_context = self.get_current_context()
|
|
208
|
+
return sum(
|
|
209
|
+
1 for notice in self.notices
|
|
210
|
+
if current_context in notice["context"] and notice["severity_code"] == severity.value
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
def export_notices_to_gcs_file(self, bucket_name, storage_client, file_name=None, top_level_context=None, save_locally=False, local_path=None, logger=None, max_retries=2):
|
|
214
|
+
def log_message(message):
|
|
215
|
+
if logger:
|
|
216
|
+
logger.info(message)
|
|
217
|
+
|
|
218
|
+
def log_error(message, exc_info=False):
|
|
219
|
+
if logger:
|
|
220
|
+
logger.error(message, exc_info=exc_info)
|
|
221
|
+
|
|
222
|
+
if not file_name:
|
|
223
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
|
|
224
|
+
if top_level_context:
|
|
225
|
+
file_name = f"notices_{timestamp}_{top_level_context}_len{len(self.notices)}.json"
|
|
226
|
+
else:
|
|
227
|
+
file_name = f"notices_{timestamp}_len{len(self.notices)}.json"
|
|
228
|
+
|
|
229
|
+
cloud_path = None # Initialize cloud_path here
|
|
230
|
+
local_path = None # Initialize local_path here
|
|
231
|
+
try:
|
|
232
|
+
cloud_path, local_path = write_data_to_gcs(
|
|
233
|
+
bucket_name=bucket_name,
|
|
234
|
+
storage_client=storage_client,
|
|
235
|
+
data=self.notices,
|
|
236
|
+
file_name=file_name,
|
|
237
|
+
save_locally=save_locally,
|
|
238
|
+
local_path=local_path,
|
|
239
|
+
logger=logger,
|
|
240
|
+
max_retries=max_retries
|
|
241
|
+
)
|
|
242
|
+
log_message(f"Notices successfully saved to GCS at {cloud_path} and locally at {local_path}.")
|
|
243
|
+
except Exception as e:
|
|
244
|
+
log_error(f"Failed to export notices: {type(e).__name__} - {str(e)}", exc_info=True)
|
|
245
|
+
|
|
246
|
+
return cloud_path , local_path
|
|
247
|
+
|
|
248
|
+
def import_notices_from_json(self, json_or_file, logger=None):
|
|
249
|
+
def log_message(message):
|
|
250
|
+
if logger:
|
|
251
|
+
logger.info(message)
|
|
252
|
+
else:
|
|
253
|
+
print(message)
|
|
254
|
+
|
|
255
|
+
def log_error(message, exc_info=False):
|
|
256
|
+
if logger:
|
|
257
|
+
logger.error(message, exc_info=exc_info)
|
|
258
|
+
else:
|
|
259
|
+
print(message)
|
|
260
|
+
try:
|
|
261
|
+
if isinstance(json_or_file, str): # Load from string
|
|
262
|
+
imported_notices = json.loads(json_or_file)
|
|
263
|
+
elif hasattr(json_or_file, 'read'): # Load from file-like object
|
|
264
|
+
imported_notices = json.load(json_or_file)
|
|
265
|
+
self.add_notice(imported_notices)
|
|
266
|
+
log_message("Successfully imported notices from json.")
|
|
267
|
+
except Exception as e:
|
|
268
|
+
log_error(f"Failed to import notices from json: {type(e).__name__} - {str(e)}", exc_info=True)
|
|
269
|
+
|
|
270
|
+
def _update_counts(self, notice, remove=False):
|
|
271
|
+
if remove:
|
|
272
|
+
if notice["severity_code"] >= self.ERROR_CODE_START_VALUE:
|
|
273
|
+
self.error_count -= 1
|
|
274
|
+
self.severity_counts[notice["severity_name"]] -= 1
|
|
275
|
+
else:
|
|
276
|
+
if notice["severity_code"] >= self.ERROR_CODE_START_VALUE:
|
|
277
|
+
self.error_count += 1
|
|
278
|
+
self.severity_counts[notice["severity_name"]] += 1
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
class SuccessLog:
|
|
282
|
+
def __init__(self, subject:str, description:str=None, context:str=None):
|
|
283
|
+
self.context = context
|
|
284
|
+
self.subject = subject
|
|
285
|
+
self.timestamp = datetime.now(timezone.utc).isoformat()
|
|
286
|
+
self.description = description
|
|
287
|
+
|
|
288
|
+
def to_dict(self):
|
|
289
|
+
return {
|
|
290
|
+
"context": self.context or "",
|
|
291
|
+
"subject": self.subject,
|
|
292
|
+
"timestamp": self.timestamp,
|
|
293
|
+
"description": self.description or ""
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
class SuccessLogManager:
|
|
298
|
+
def __init__(self):
|
|
299
|
+
self.successlogs = []
|
|
300
|
+
self.context_stack = []
|
|
301
|
+
|
|
302
|
+
@contextmanager
|
|
303
|
+
def successlog_context(self, context):
|
|
304
|
+
self.push_context(context)
|
|
305
|
+
try:
|
|
306
|
+
yield
|
|
307
|
+
finally:
|
|
308
|
+
self.pop_context()
|
|
309
|
+
|
|
310
|
+
def push_context(self, context):
|
|
311
|
+
self.context_stack.append(context)
|
|
312
|
+
|
|
313
|
+
def pop_context(self):
|
|
314
|
+
if self.context_stack:
|
|
315
|
+
self.context_stack.pop()
|
|
316
|
+
|
|
317
|
+
def get_current_context(self):
|
|
318
|
+
return " >> ".join(self.context_stack)
|
|
319
|
+
|
|
320
|
+
def get_all_successlogs(self):
|
|
321
|
+
return self.successlogs
|
|
322
|
+
|
|
323
|
+
def add_successlog(self, successlog: SuccessLog):
|
|
324
|
+
successlog.context = self.get_current_context()
|
|
325
|
+
successlog_dict = successlog.to_dict()
|
|
326
|
+
self.successlogs.append(successlog_dict)
|
|
327
|
+
|
|
328
|
+
def add_successlogs(self, successlogs: List[SuccessLog]):
|
|
329
|
+
for successlog in successlogs:
|
|
330
|
+
successlog.context = self.get_current_context()
|
|
331
|
+
successlog_dict = successlog.to_dict()
|
|
332
|
+
self.successlogs.append(successlog_dict)
|
|
333
|
+
|
|
334
|
+
def remove_successlog(self, successlog: SuccessLog):
|
|
335
|
+
successlog_dict = successlog.to_dict()
|
|
336
|
+
if successlog_dict in self.successlogs:
|
|
337
|
+
self.successlogs.remove(successlog_dict)
|
|
338
|
+
|
|
339
|
+
def clear_successlogs(self):
|
|
340
|
+
self.successlogs = []
|
|
341
|
+
|
|
342
|
+
def count_all_successlogs(self):
|
|
343
|
+
return len(self.successlogs)
|
|
344
|
+
|
|
345
|
+
def count_successlogs_for_current_context(self):
|
|
346
|
+
current_context = self.get_current_context()
|
|
347
|
+
return sum(
|
|
348
|
+
1 for successlog in self.successlogs
|
|
349
|
+
if successlog["context"] == current_context
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
def count_successlogs_for_current_and_nested_contexts(self):
|
|
353
|
+
current_context = self.get_current_context()
|
|
354
|
+
return sum(
|
|
355
|
+
1 for successlog in self.successlogs
|
|
356
|
+
if current_context in successlog["context"]
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
def export_successlogs_to_gcs_file(self, bucket_name, storage_client, file_name=None, top_level_context=None, save_locally=False, local_path=None, logger=None, max_retries=3):
|
|
361
|
+
def log_message(message):
|
|
362
|
+
if logger:
|
|
363
|
+
logger.info(message)
|
|
364
|
+
|
|
365
|
+
def log_error(message, exc_info=False):
|
|
366
|
+
if logger:
|
|
367
|
+
logger.error(message, exc_info=exc_info)
|
|
368
|
+
|
|
369
|
+
if not file_name:
|
|
370
|
+
timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
|
|
371
|
+
if top_level_context:
|
|
372
|
+
file_name = f"successlogs_{timestamp}_{top_level_context}_len{len(self.successlogs)}.json"
|
|
373
|
+
else:
|
|
374
|
+
file_name = f"successlogs_{timestamp}_len{len(self.successlogs)}.json"
|
|
375
|
+
|
|
376
|
+
cloud_path=None
|
|
377
|
+
local_path=None
|
|
378
|
+
try:
|
|
379
|
+
cloud_path, local_path = write_data_to_gcs(
|
|
380
|
+
bucket_name=bucket_name,
|
|
381
|
+
storage_client=storage_client,
|
|
382
|
+
data=self.successlogs,
|
|
383
|
+
file_name=file_name,
|
|
384
|
+
save_locally=save_locally,
|
|
385
|
+
local_path=local_path,
|
|
386
|
+
logger=logger,
|
|
387
|
+
max_retries=max_retries
|
|
388
|
+
)
|
|
389
|
+
log_message(f"Success logs successfully saved to GCS at {cloud_path} and locally at {local_path}.")
|
|
390
|
+
except Exception as e:
|
|
391
|
+
log_error(f"Failed to export success logs: {type(e).__name__} - {str(e)}", exc_info=True)
|
|
392
|
+
|
|
393
|
+
return cloud_path, local_path
|
|
394
|
+
|
|
395
|
+
def import_successlogs_from_json(self, json_or_file, logger=None):
|
|
396
|
+
def log_message(message):
|
|
397
|
+
if logger:
|
|
398
|
+
logger.info(message)
|
|
399
|
+
else:
|
|
400
|
+
print(message)
|
|
401
|
+
|
|
402
|
+
def log_error(message, exc_info=False):
|
|
403
|
+
if logger:
|
|
404
|
+
logger.error(message, exc_info=exc_info)
|
|
405
|
+
else:
|
|
406
|
+
print(message)
|
|
407
|
+
try:
|
|
408
|
+
if isinstance(json_or_file, str): # Load from string
|
|
409
|
+
imported_success_logs = json.loads(json_or_file)
|
|
410
|
+
elif hasattr(json_or_file, 'read'): # Load from file-like object
|
|
411
|
+
imported_success_logs = json.load(json_or_file)
|
|
412
|
+
self.add_successlog(imported_success_logs)
|
|
413
|
+
log_message("Successfully imported success logs from json.")
|
|
414
|
+
except Exception as e:
|
|
415
|
+
log_error(f"Failed to import success logs from json: {type(e).__name__} - {str(e)}", exc_info=True)
|
|
@@ -1,8 +1,13 @@
|
|
|
1
|
+
# pylint: disable=missing-module-docstring
|
|
2
|
+
# pylint: disable=missing-function-docstring
|
|
3
|
+
# pylint: disable=missing-class-docstring
|
|
1
4
|
import json
|
|
2
5
|
import csv
|
|
3
6
|
from io import StringIO
|
|
4
7
|
import logging
|
|
5
8
|
import os
|
|
9
|
+
import time
|
|
10
|
+
from datetime import datetime, timezone
|
|
6
11
|
import traceback
|
|
7
12
|
from google.cloud import error_reporting, logging as cloud_logging
|
|
8
13
|
from google.api_core.exceptions import NotFound
|
|
@@ -22,7 +27,7 @@ from google.api_core.exceptions import NotFound
|
|
|
22
27
|
## TODO Fix the issue with POST 0B Nan.... printed in Cloud Logging , which is referring to posting to Cloud Logging probably.
|
|
23
28
|
ENV = os.getenv('ENV', 'LOCAL').strip("'")
|
|
24
29
|
|
|
25
|
-
def setup_gcp_logger_and_error_report(logger_name):
|
|
30
|
+
def setup_gcp_logger_and_error_report(logger_name,level=logging.INFO, use_cloud_logging=True):
|
|
26
31
|
"""Sets up a logger with Error Reporting and Cloud Logging handlers.
|
|
27
32
|
|
|
28
33
|
Args:
|
|
@@ -52,26 +57,26 @@ def setup_gcp_logger_and_error_report(logger_name):
|
|
|
52
57
|
self.handleError(record)
|
|
53
58
|
|
|
54
59
|
logger = logging.getLogger(logger_name)
|
|
55
|
-
logger.setLevel(
|
|
56
|
-
|
|
57
|
-
# Create Error Reporting handler
|
|
58
|
-
error_reporting_handler = ErrorReportingHandler()
|
|
59
|
-
|
|
60
|
-
# Create Google Cloud Logging handler
|
|
61
|
-
cloud_logging_client = cloud_logging.Client()
|
|
62
|
-
cloud_logging_handler = cloud_logging_client.get_default_handler()
|
|
63
|
-
|
|
64
|
-
# Add handlers to the logger
|
|
65
|
-
logger.addHandler(error_reporting_handler)
|
|
66
|
-
logger.addHandler(cloud_logging_handler)
|
|
60
|
+
logger.setLevel(level)
|
|
67
61
|
|
|
68
62
|
# Add a console handler for local development
|
|
69
|
-
if ENV == "LOCAL":
|
|
63
|
+
if ENV == "LOCAL" or not use_cloud_logging:
|
|
70
64
|
formatter = logging.Formatter('%(levelname)s : %(name)s : %(asctime)s : %(message)s')
|
|
71
65
|
console_handler = logging.StreamHandler()
|
|
72
66
|
console_handler.setFormatter(formatter)
|
|
73
67
|
logger.addHandler(console_handler)
|
|
74
68
|
|
|
69
|
+
if use_cloud_logging:
|
|
70
|
+
# Create Error Reporting handler
|
|
71
|
+
error_reporting_handler = ErrorReportingHandler()
|
|
72
|
+
|
|
73
|
+
# Create Google Cloud Logging handler
|
|
74
|
+
cloud_logging_client = cloud_logging.Client()
|
|
75
|
+
cloud_logging_handler = cloud_logging_client.get_default_handler()
|
|
76
|
+
|
|
77
|
+
# Add handlers to the logger
|
|
78
|
+
logger.addHandler(error_reporting_handler)
|
|
79
|
+
logger.addHandler(cloud_logging_handler)
|
|
75
80
|
return logger
|
|
76
81
|
############################################################################
|
|
77
82
|
|
|
@@ -116,21 +121,93 @@ def read_csv_from_gcs(bucket_name, file_name, storage_client, logger):
|
|
|
116
121
|
logger.error(f"An unexpected error occurred: {e}", exc_info=True)
|
|
117
122
|
return None
|
|
118
123
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def write_data_to_gcs(bucket_name, storage_client, data, file_name=None,
|
|
127
|
+
save_locally=False, local_path=None, logger=None, max_retries=3):
|
|
128
|
+
"""Saves data to Google Cloud Storage and optionally locally.
|
|
129
|
+
|
|
130
|
+
This function attempts to upload data to GCS. If the upload fails after
|
|
131
|
+
retries and `save_locally` is True or `local_path` is provided, it attempts
|
|
132
|
+
to save the data locally.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
bucket_name (str): Name of the GCS bucket.
|
|
136
|
+
storage_client (google.cloud.storage.Client): GCS client object.
|
|
137
|
+
data (list, dict, or str): Data to be saved.
|
|
138
|
+
file_name (str, optional): File name for GCS and local. Defaults to None.
|
|
139
|
+
save_locally (bool, optional): Save locally if GCS fails. Defaults to False.
|
|
140
|
+
local_path (str, optional): Local directory to save. Defaults to None.
|
|
141
|
+
logger (logging.Logger, optional): Logger for messages. Defaults to None.
|
|
142
|
+
max_retries (int, optional): Number of GCS upload retries. Defaults to 3.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
tuple: A tuple containing the GCS path (or None if upload failed) and
|
|
146
|
+
the local path (or None if not saved locally).
|
|
147
|
+
|
|
148
|
+
Raises:
|
|
149
|
+
ValueError: If data is not a list, dict, or str.
|
|
150
|
+
Exception: If GCS upload fails after retries and local saving fails or
|
|
151
|
+
is not requested. If GCS upload fails after retries and
|
|
152
|
+
local saving is requested but unsuccessful.
|
|
153
|
+
"""
|
|
154
|
+
|
|
155
|
+
def log_message(message):
|
|
156
|
+
if logger:
|
|
157
|
+
logger.info(message)
|
|
158
|
+
|
|
159
|
+
def log_error(message, exc_info=False):
|
|
160
|
+
if logger:
|
|
161
|
+
logger.error(message, exc_info=exc_info)
|
|
162
|
+
|
|
163
|
+
attempts = 0
|
|
164
|
+
success = False
|
|
165
|
+
cloud_path = None
|
|
166
|
+
local_path_final = None
|
|
167
|
+
gcs_upload_exception = None # Store potential GCS exception
|
|
168
|
+
|
|
169
|
+
if isinstance(data, (list, dict)):
|
|
170
|
+
data_str = json.dumps(data, indent=2)
|
|
171
|
+
elif isinstance(data, str):
|
|
172
|
+
data_str = data
|
|
173
|
+
else:
|
|
174
|
+
raise ValueError("Unsupported data type. It should be a list, dict, or str.")
|
|
175
|
+
|
|
176
|
+
while attempts < max_retries and not success:
|
|
177
|
+
try:
|
|
178
|
+
bucket = storage_client.bucket(bucket_name)
|
|
179
|
+
blob = bucket.blob(file_name)
|
|
180
|
+
blob.upload_from_string(data_str, content_type='application/json')
|
|
181
|
+
cloud_path = f"{bucket_name}/{file_name}"
|
|
182
|
+
log_message(f"Successfully saved file to GCS {cloud_path}.")
|
|
183
|
+
success = True
|
|
184
|
+
except Exception as e:
|
|
185
|
+
gcs_upload_exception = e
|
|
186
|
+
attempts += 1
|
|
187
|
+
log_error(f"Attempt {attempts} - Failed to write {file_name} "
|
|
188
|
+
f"to GCS bucket '{bucket_name}': {e}") # Log with full traceback
|
|
189
|
+
if attempts < max_retries:
|
|
190
|
+
time.sleep(2 ** attempts)
|
|
191
|
+
|
|
192
|
+
if not success and (save_locally or local_path):
|
|
193
|
+
try:
|
|
194
|
+
if not local_path:
|
|
195
|
+
local_path_final = os.path.join("/tmp", file_name)
|
|
196
|
+
else:
|
|
197
|
+
local_path_final = os.path.join(local_path, file_name)
|
|
198
|
+
with open(local_path_final, 'w', encoding='utf-8') as f:
|
|
199
|
+
f.write(data_str)
|
|
200
|
+
log_message(f"Saved {file_name} locally at {local_path_final}.")
|
|
201
|
+
except Exception as local_e:
|
|
202
|
+
log_error(f"Failed to write {file_name} locally: {local_e}",exc_info=True)
|
|
203
|
+
|
|
204
|
+
# If GCS upload failed, raise a single exception here
|
|
205
|
+
|
|
206
|
+
if gcs_upload_exception:
|
|
207
|
+
raise gcs_upload_exception from None # Propagate without nesting
|
|
208
|
+
|
|
209
|
+
return cloud_path, local_path_final
|
|
210
|
+
|
|
134
211
|
|
|
135
212
|
def write_csv_to_gcs(bucket_name, file_name, data, storage_client, logger,log_info_verbose=True):
|
|
136
213
|
""" Helper function to write a CSV file to Google Cloud Storage """
|
|
@@ -151,4 +228,4 @@ def write_csv_to_gcs(bucket_name, file_name, data, storage_client, logger,log_in
|
|
|
151
228
|
except ValueError as e:
|
|
152
229
|
logger.error(f"ValueError: {e}")
|
|
153
230
|
except Exception as e:
|
|
154
|
-
|
|
231
|
+
logger.error(f"An unexpected error occurred while writing CSV to GCS: {e}", exc_info=True)
|
|
@@ -6,14 +6,8 @@
|
|
|
6
6
|
import datetime
|
|
7
7
|
from google.cloud import bigquery
|
|
8
8
|
from ipulse_shared_core_ftredge.enums.enums_common_utils import NoticeSeverity
|
|
9
|
+
from ipulse_shared_core_ftredge.utils_common import Notice
|
|
9
10
|
|
|
10
|
-
def create_data_check_notice(severity, field_name, message):
|
|
11
|
-
return {
|
|
12
|
-
"severity_code": severity.value,
|
|
13
|
-
"severity_name": severity.name,
|
|
14
|
-
"subject": field_name,
|
|
15
|
-
"message": message
|
|
16
|
-
}
|
|
17
11
|
|
|
18
12
|
def create_bigquery_schema_from_json(json_schema):
|
|
19
13
|
schema = []
|
|
@@ -29,14 +23,14 @@ def update_check_with_schema_template(updates, schema, dt_ts_to_str=True, check_
|
|
|
29
23
|
|
|
30
24
|
"""Ensure Update dict corresponds to the config schema, ensuring proper formats and lengths."""
|
|
31
25
|
valid_updates = {}
|
|
32
|
-
notices=[] ### THIS IS TO AVOID LOGGING A WARNING RANDOMLY
|
|
26
|
+
notices=[] ### THIS IS TO AVOID LOGGING A WARNING RANDOMLY, INSTEAD GROUPPING FOR A GIVEN RUN
|
|
33
27
|
|
|
34
28
|
# Process updates to conform to the schema
|
|
35
29
|
for field in schema:
|
|
36
30
|
field_name = field["name"]
|
|
37
31
|
field_type = field["type"]
|
|
38
32
|
mode = field["mode"]
|
|
39
|
-
|
|
33
|
+
|
|
40
34
|
# Initialize notice to None at the start of each field processing
|
|
41
35
|
notice = None
|
|
42
36
|
|
|
@@ -44,7 +38,7 @@ def update_check_with_schema_template(updates, schema, dt_ts_to_str=True, check_
|
|
|
44
38
|
value = updates[field_name]
|
|
45
39
|
|
|
46
40
|
# Handle date and timestamp formatting
|
|
47
|
-
|
|
41
|
+
|
|
48
42
|
# Validate and potentially convert date and timestamp fields
|
|
49
43
|
if field_type == "DATE":
|
|
50
44
|
value, notice = handle_date_fields(field_name, value, dt_ts_to_str)
|
|
@@ -61,18 +55,18 @@ def update_check_with_schema_template(updates, schema, dt_ts_to_str=True, check_
|
|
|
61
55
|
value,notice = check_and_truncate_length(field_name, value, field["max_length"])
|
|
62
56
|
if notice:
|
|
63
57
|
notices.append(notice)
|
|
64
|
-
|
|
58
|
+
|
|
65
59
|
# Only add to the dictionary if value is not None or the field is required
|
|
66
60
|
if value is not None or mode == "REQUIRED":
|
|
67
61
|
valid_updates[field_name] = value
|
|
68
62
|
|
|
69
63
|
elif mode == "REQUIRED":
|
|
70
|
-
notice=
|
|
71
|
-
field_name,
|
|
72
|
-
f"Required field '{field_name}' is missing in the updates.")
|
|
64
|
+
notice=Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
65
|
+
subject=field_name,
|
|
66
|
+
message=f"Required field '{field_name}' is missing in the updates.")
|
|
73
67
|
|
|
74
68
|
notices.append(notice)
|
|
75
|
-
|
|
69
|
+
|
|
76
70
|
return valid_updates, notices
|
|
77
71
|
|
|
78
72
|
def handle_date_fields(field_name, value, dt_ts_to_str):
|
|
@@ -88,13 +82,13 @@ def handle_date_fields(field_name, value, dt_ts_to_str):
|
|
|
88
82
|
return value, None
|
|
89
83
|
return parsed_date, None
|
|
90
84
|
except ValueError:
|
|
91
|
-
return None,
|
|
92
|
-
field_name,
|
|
93
|
-
|
|
85
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
86
|
+
subject=field_name,
|
|
87
|
+
message=f"Expected a DATE in YYYY-MM-DD format but got {value}.")
|
|
94
88
|
else:
|
|
95
|
-
return None,
|
|
96
|
-
field_name,
|
|
97
|
-
f"Expected a DATE or YYYY-MM-DD str format but got {value} of type {type(value).__name__}.")
|
|
89
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
90
|
+
subject=field_name,
|
|
91
|
+
message= f"Expected a DATE or YYYY-MM-DD str format but got {value} of type {type(value).__name__}.")
|
|
98
92
|
|
|
99
93
|
|
|
100
94
|
def handle_timestamp_fields(field_name, value, dt_ts_to_str):
|
|
@@ -110,47 +104,49 @@ def handle_timestamp_fields(field_name, value, dt_ts_to_str):
|
|
|
110
104
|
return value, None
|
|
111
105
|
return parsed_datetime, None
|
|
112
106
|
except ValueError:
|
|
113
|
-
return None,
|
|
114
|
-
field_name,
|
|
115
|
-
f"Expected ISO format TIMESTAMP but got {value}.")
|
|
107
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
108
|
+
subject=field_name,
|
|
109
|
+
message= f"Expected ISO format TIMESTAMP but got {value}.")
|
|
116
110
|
else:
|
|
117
|
-
return None,
|
|
118
|
-
field_name,
|
|
119
|
-
f"Expected ISO format TIMESTAMP but got {value} of type {type(value).__name__}.")
|
|
111
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
112
|
+
subject=field_name,
|
|
113
|
+
message= f"Expected ISO format TIMESTAMP but got {value} of type {type(value).__name__}.")
|
|
120
114
|
|
|
121
115
|
|
|
122
116
|
def check_and_truncate_length(field_name, value, max_length):
|
|
123
117
|
"""Checks and truncates the length of string fields if they exceed the max length."""
|
|
124
118
|
if isinstance(value, str) and len(value) > max_length:
|
|
125
|
-
return value[:max_length],
|
|
126
|
-
|
|
127
|
-
f"Field exceeds max length: {len(value)}/{max_length}. Truncating.")
|
|
128
|
-
|
|
119
|
+
return value[:max_length], Notice(severity=NoticeSeverity.WARNING_FIX_RECOMMENDED,
|
|
120
|
+
subject= field_name,
|
|
121
|
+
message= f"Field exceeds max length: {len(value)}/{max_length}. Truncating.")
|
|
122
|
+
|
|
129
123
|
return value, None
|
|
130
124
|
|
|
131
125
|
|
|
132
126
|
|
|
133
127
|
def handle_type_conversion(field_type, field_name, value):
|
|
134
128
|
if field_type == "STRING" and not isinstance(value, str):
|
|
135
|
-
return str(value),
|
|
136
|
-
field_name,
|
|
137
|
-
f"Expected STRING but got {value} of type {type(value).__name__}.")
|
|
129
|
+
return str(value), Notice(severity=NoticeSeverity.WARNING_REVIEW_RECOMMENDED,
|
|
130
|
+
subject=field_name,
|
|
131
|
+
message= f"Expected STRING but got {value} of type {type(value).__name__}.")
|
|
138
132
|
|
|
139
133
|
if field_type == "INT64" and not isinstance(value, int):
|
|
140
134
|
try:
|
|
141
135
|
return int(value), None
|
|
142
136
|
except ValueError:
|
|
143
|
-
return None,
|
|
144
|
-
|
|
145
|
-
|
|
137
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
138
|
+
subject= field_name,
|
|
139
|
+
message=f"Expected INTEGER, but got {value} of type {type(value).__name__}.")
|
|
146
140
|
if field_type == "FLOAT64" and not isinstance(value, float):
|
|
147
141
|
try:
|
|
148
142
|
return float(value), None
|
|
149
143
|
except ValueError:
|
|
150
|
-
return None,
|
|
151
|
-
|
|
152
|
-
|
|
144
|
+
return None, Notice(severity=NoticeSeverity.WARNING_FIX_REQUIRED,
|
|
145
|
+
subject=field_name,
|
|
146
|
+
message=f"Expected FLOAT, but got {value} of type {type(value).__name__}.")
|
|
153
147
|
if field_type == "BOOL" and not isinstance(value, bool):
|
|
154
|
-
return bool(value),
|
|
155
|
-
|
|
156
|
-
|
|
148
|
+
return bool(value), Notice(severity=NoticeSeverity.WARNING_REVIEW_RECOMMENDED,
|
|
149
|
+
subject=field_name,
|
|
150
|
+
message=f"Expected BOOL, but got {value}. Converting as {bool(value)}.")
|
|
151
|
+
|
|
152
|
+
return value, None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.50
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -3,6 +3,7 @@ README.md
|
|
|
3
3
|
pyproject.toml
|
|
4
4
|
setup.py
|
|
5
5
|
src/ipulse_shared_core_ftredge/__init__.py
|
|
6
|
+
src/ipulse_shared_core_ftredge/utils_common.py
|
|
6
7
|
src/ipulse_shared_core_ftredge/utils_gcp.py
|
|
7
8
|
src/ipulse_shared_core_ftredge/utils_templates_and_schemas.py
|
|
8
9
|
src/ipulse_shared_core_ftredge.egg-info/PKG-INFO
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|