ipulse-shared-core-ftredge 2.56__py3-none-any.whl → 3.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/__init__.py +10 -14
- ipulse_shared_core_ftredge/models/__init__.py +0 -1
- ipulse_shared_core_ftredge/models/organisation.py +61 -55
- ipulse_shared_core_ftredge/models/resource_catalog_item.py +97 -171
- ipulse_shared_core_ftredge/models/user_profile.py +3 -3
- ipulse_shared_core_ftredge/utils/__init__.py +3 -0
- ipulse_shared_core_ftredge/utils/utils_common.py +10 -0
- {ipulse_shared_core_ftredge-2.56.dist-info → ipulse_shared_core_ftredge-3.1.1.dist-info}/METADATA +5 -7
- ipulse_shared_core_ftredge-3.1.1.dist-info/RECORD +15 -0
- {ipulse_shared_core_ftredge-2.56.dist-info → ipulse_shared_core_ftredge-3.1.1.dist-info}/WHEEL +1 -1
- ipulse_shared_core_ftredge/enums/__init__.py +0 -28
- ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -171
- ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -44
- ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -58
- ipulse_shared_core_ftredge/enums/enums_modules.py +0 -33
- ipulse_shared_core_ftredge/models/audit_log_firestore.py +0 -12
- ipulse_shared_core_ftredge/models/pulse_enums.py +0 -196
- ipulse_shared_core_ftredge/tests/__init__.py +0 -0
- ipulse_shared_core_ftredge/tests/test.py +0 -17
- ipulse_shared_core_ftredge/utils_common.py +0 -543
- ipulse_shared_core_ftredge/utils_gcp.py +0 -267
- ipulse_shared_core_ftredge/utils_templates_and_schemas.py +0 -155
- ipulse_shared_core_ftredge-2.56.dist-info/RECORD +0 -25
- {ipulse_shared_core_ftredge-2.56.dist-info → ipulse_shared_core_ftredge-3.1.1.dist-info}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-2.56.dist-info → ipulse_shared_core_ftredge-3.1.1.dist-info}/top_level.txt +0 -0
|
@@ -1,267 +0,0 @@
|
|
|
1
|
-
# pylint: disable=missing-module-docstring
|
|
2
|
-
# pylint: disable=missing-function-docstring
|
|
3
|
-
# pylint: disable=missing-class-docstring
|
|
4
|
-
# pylint: disable=broad-exception-caught
|
|
5
|
-
# pylint: disable=line-too-long
|
|
6
|
-
# pylint: disable=unused-variable
|
|
7
|
-
import json
|
|
8
|
-
import csv
|
|
9
|
-
from io import StringIO
|
|
10
|
-
import logging
|
|
11
|
-
import os
|
|
12
|
-
import time
|
|
13
|
-
import traceback
|
|
14
|
-
from google.cloud import error_reporting, logging as cloud_logging
|
|
15
|
-
from google.api_core.exceptions import NotFound
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
############################################################################
|
|
19
|
-
##################### SETTING UP LOGGER ##########################
|
|
20
|
-
|
|
21
|
-
####DEPCREACATED: THIS APPROACH WAS GOOD, BUT ERRORS WERE NOT REPORTED TO ERROR REPORTING
|
|
22
|
-
# logging.basicConfig(level=logging.INFO)
|
|
23
|
-
# logging_client = google.cloud.logging.Client()
|
|
24
|
-
# logging_client.setup_logging()
|
|
25
|
-
###################################
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
##### THIS APPROACH IS USED NOW ########
|
|
29
|
-
ENV = os.getenv('ENV', 'LOCAL').strip("'")
|
|
30
|
-
|
|
31
|
-
def setup_gcp_logger_and_error_report(logger_name,level=logging.INFO, use_cloud_logging=True):
|
|
32
|
-
"""Sets up a logger with Error Reporting and Cloud Logging handlers.
|
|
33
|
-
|
|
34
|
-
Args:
|
|
35
|
-
logger_name: The name of the logger.
|
|
36
|
-
|
|
37
|
-
Returns:
|
|
38
|
-
logging.Logger: The configured logger instance.
|
|
39
|
-
"""
|
|
40
|
-
|
|
41
|
-
class ErrorReportingHandler(logging.Handler):
|
|
42
|
-
def __init__(self, level=logging.ERROR):
|
|
43
|
-
super().__init__(level)
|
|
44
|
-
self.error_client = error_reporting.Client()
|
|
45
|
-
self.propagate = True
|
|
46
|
-
|
|
47
|
-
def emit(self, record):
|
|
48
|
-
try:
|
|
49
|
-
if record.levelno >= logging.ERROR:
|
|
50
|
-
message = self.format(record)
|
|
51
|
-
if record.exc_info:
|
|
52
|
-
message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
|
|
53
|
-
if hasattr(record, 'pathname') and hasattr(record, 'lineno'):
|
|
54
|
-
message += f"\nFile: {record.pathname}, Line: {record.lineno}"
|
|
55
|
-
self.error_client.report(message)
|
|
56
|
-
except Exception as e:
|
|
57
|
-
# Ensure no exceptions are raised during logging
|
|
58
|
-
self.handleError(record)
|
|
59
|
-
|
|
60
|
-
logger = logging.getLogger(logger_name)
|
|
61
|
-
logger.setLevel(level)
|
|
62
|
-
|
|
63
|
-
# Add a console handler for local development
|
|
64
|
-
if ENV == "LOCAL" or not use_cloud_logging:
|
|
65
|
-
formatter = logging.Formatter('%(levelname)s : %(name)s : %(asctime)s : %(message)s')
|
|
66
|
-
console_handler = logging.StreamHandler()
|
|
67
|
-
console_handler.setFormatter(formatter)
|
|
68
|
-
logger.addHandler(console_handler)
|
|
69
|
-
|
|
70
|
-
if use_cloud_logging:
|
|
71
|
-
# Create Error Reporting handler
|
|
72
|
-
error_reporting_handler = ErrorReportingHandler()
|
|
73
|
-
|
|
74
|
-
# Create Google Cloud Logging handler
|
|
75
|
-
cloud_logging_client = cloud_logging.Client()
|
|
76
|
-
cloud_logging_handler = cloud_logging_client.get_default_handler()
|
|
77
|
-
|
|
78
|
-
# Add handlers to the logger
|
|
79
|
-
logger.addHandler(error_reporting_handler)
|
|
80
|
-
logger.addHandler(cloud_logging_handler)
|
|
81
|
-
return logger
|
|
82
|
-
############################################################################
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
############################################################################
|
|
86
|
-
##################### GOOGLE CLOUD STORAGE UTILS ##########################
|
|
87
|
-
|
|
88
|
-
def read_json_from_gcs(bucket_name, file_name, stor_client, logger):
|
|
89
|
-
""" Helper function to read a JSON file from Google Cloud Storage """
|
|
90
|
-
try:
|
|
91
|
-
bucket = stor_client.bucket(bucket_name)
|
|
92
|
-
blob = bucket.blob(file_name)
|
|
93
|
-
data_string = blob.download_as_text()
|
|
94
|
-
data = json.loads(data_string)
|
|
95
|
-
return data
|
|
96
|
-
except NotFound:
|
|
97
|
-
logger.error(f"Error: The file {file_name} was not found in the bucket {bucket_name}.")
|
|
98
|
-
return None
|
|
99
|
-
except json.JSONDecodeError:
|
|
100
|
-
logger.error(f"Error: The file {file_name} could not be decoded as JSON.")
|
|
101
|
-
return None
|
|
102
|
-
except Exception as e:
|
|
103
|
-
logger.error(f"An unexpected error occurred: {e}", exc_info=True)
|
|
104
|
-
return None
|
|
105
|
-
|
|
106
|
-
def read_csv_from_gcs(bucket_name, file_name, storage_client, logger):
|
|
107
|
-
""" Helper function to read a CSV file from Google Cloud Storage """
|
|
108
|
-
try:
|
|
109
|
-
bucket = storage_client.bucket(bucket_name)
|
|
110
|
-
blob = bucket.blob(file_name)
|
|
111
|
-
data_string = blob.download_as_text()
|
|
112
|
-
data_file = StringIO(data_string)
|
|
113
|
-
reader = csv.DictReader(data_file)
|
|
114
|
-
return list(reader)
|
|
115
|
-
except NotFound:
|
|
116
|
-
logger.error(f"Error: The file {file_name} was not found in the bucket {bucket_name}.")
|
|
117
|
-
return None
|
|
118
|
-
except csv.Error:
|
|
119
|
-
logger.error(f"Error: The file {file_name} could not be read as CSV.")
|
|
120
|
-
return None
|
|
121
|
-
except Exception as e:
|
|
122
|
-
logger.error(f"An unexpected error occurred: {e}", exc_info=True)
|
|
123
|
-
return None
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
def write_json_to_gcs(bucket_name, storage_client, data, file_name,
|
|
128
|
-
save_locally=False, local_path=None, logger=None, max_retries=2,
|
|
129
|
-
overwrite_if_exists=False, increment_if_exists=False):
|
|
130
|
-
"""Saves data to Google Cloud Storage and optionally locally.
|
|
131
|
-
|
|
132
|
-
This function attempts to upload data to GCS. If the upload fails after
|
|
133
|
-
retries and `save_locally` is True or `local_path` is provided, it attempts
|
|
134
|
-
to save the data locally.
|
|
135
|
-
It also tries to handle file name conflicts by overwriting or incrementing. If both are provided as Ture, an exception will be raised.
|
|
136
|
-
"""
|
|
137
|
-
|
|
138
|
-
def log_message(message):
|
|
139
|
-
if logger:
|
|
140
|
-
logger.info(message)
|
|
141
|
-
|
|
142
|
-
def log_error(message, exc_info=False):
|
|
143
|
-
if logger:
|
|
144
|
-
logger.error(message, exc_info=exc_info)
|
|
145
|
-
|
|
146
|
-
def log_warning(message):
|
|
147
|
-
if logger:
|
|
148
|
-
logger.warning(message)
|
|
149
|
-
|
|
150
|
-
attempts = 0
|
|
151
|
-
success = False
|
|
152
|
-
gcs_path = None
|
|
153
|
-
local_path_final = None
|
|
154
|
-
gcs_file_overwritten = False
|
|
155
|
-
gcs_file_already_exists = False
|
|
156
|
-
gcs_file_saved_with_increment = False
|
|
157
|
-
gcs_upload_exception = None # Store potential GCS exception
|
|
158
|
-
|
|
159
|
-
# Check for conflicting options
|
|
160
|
-
if overwrite_if_exists and increment_if_exists:
|
|
161
|
-
raise ValueError("When writing JSON to GCS, both overwrite and increment_if_exists cannot be True at the same time.")
|
|
162
|
-
|
|
163
|
-
if isinstance(data, (list, dict)):
|
|
164
|
-
data_str = json.dumps(data, indent=2)
|
|
165
|
-
elif isinstance(data, str):
|
|
166
|
-
data_str = data
|
|
167
|
-
else:
|
|
168
|
-
raise ValueError("Unsupported data type. It should be a list, dict, or str.")
|
|
169
|
-
|
|
170
|
-
bucket = storage_client.bucket(bucket_name)
|
|
171
|
-
base_file_name, ext = os.path.splitext(file_name)
|
|
172
|
-
increment = 0
|
|
173
|
-
|
|
174
|
-
while attempts < max_retries and not success:
|
|
175
|
-
try:
|
|
176
|
-
if increment_if_exists:
|
|
177
|
-
while bucket.blob(file_name).exists():
|
|
178
|
-
gcs_file_already_exists = True
|
|
179
|
-
increment += 1
|
|
180
|
-
file_name = f"{base_file_name}_{increment}{ext}"
|
|
181
|
-
gcs_file_saved_with_increment = True
|
|
182
|
-
log_warning(f"File {file_name} already exists in bucket {bucket_name}. Writing with increment: {increment_if_exists}")
|
|
183
|
-
else:
|
|
184
|
-
blob = bucket.blob(file_name)
|
|
185
|
-
|
|
186
|
-
# Check if the file exists
|
|
187
|
-
if blob.exists():
|
|
188
|
-
gcs_file_already_exists = True
|
|
189
|
-
gcs_path = f"gs://{bucket_name}/{file_name}"
|
|
190
|
-
log_message(f"File {file_name} already exists in bucket {bucket_name}. Overwriting: {overwrite_if_exists}")
|
|
191
|
-
if not overwrite_if_exists:
|
|
192
|
-
log_warning(f"File {file_name} already exists and overwrite is set to False. Skipping save to GCS.")
|
|
193
|
-
break
|
|
194
|
-
else:
|
|
195
|
-
gcs_file_overwritten = True
|
|
196
|
-
|
|
197
|
-
blob.upload_from_string(data_str, content_type='application/json')
|
|
198
|
-
gcs_path = f"gs://{bucket_name}/{file_name}"
|
|
199
|
-
log_message(f"Successfully saved file to GCS {gcs_path}.")
|
|
200
|
-
success = True
|
|
201
|
-
except Exception as e:
|
|
202
|
-
gcs_upload_exception = e
|
|
203
|
-
attempts += 1
|
|
204
|
-
if attempts < max_retries:
|
|
205
|
-
time.sleep(2 ** attempts)
|
|
206
|
-
else:
|
|
207
|
-
log_error(f"Failed to write {file_name} to GCS bucket {bucket_name} after {max_retries} attempts: {e}")
|
|
208
|
-
|
|
209
|
-
if not success or save_locally or local_path:
|
|
210
|
-
try:
|
|
211
|
-
if not local_path:
|
|
212
|
-
local_path_final = os.path.join("/tmp", file_name)
|
|
213
|
-
else:
|
|
214
|
-
local_path_final = os.path.join(local_path, file_name)
|
|
215
|
-
|
|
216
|
-
if os.path.exists(local_path_final):
|
|
217
|
-
if increment_if_exists:
|
|
218
|
-
increment = 0
|
|
219
|
-
while os.path.exists(local_path_final):
|
|
220
|
-
increment += 1
|
|
221
|
-
local_path_final = os.path.join(local_path, f"{base_file_name}_{increment}{ext}")
|
|
222
|
-
elif not overwrite_if_exists:
|
|
223
|
-
log_message(f"File {file_name} already exists locally at {local_path_final} and overwrite is set to False. Skipping save.")
|
|
224
|
-
success = True
|
|
225
|
-
else:
|
|
226
|
-
log_message(f"File {file_name} already exists locally at {local_path_final}. Overwriting: {overwrite_if_exists}")
|
|
227
|
-
|
|
228
|
-
if not success:
|
|
229
|
-
with open(local_path_final, 'w', encoding='utf-8') as f:
|
|
230
|
-
f.write(data_str)
|
|
231
|
-
log_message(f"Saved {file_name} locally at {local_path_final}. Overwritten: {overwrite_if_exists}")
|
|
232
|
-
success = True
|
|
233
|
-
except Exception as local_e:
|
|
234
|
-
log_error(f"Failed to write {file_name} locally: {local_e}", exc_info=True)
|
|
235
|
-
|
|
236
|
-
if gcs_upload_exception is not None:
|
|
237
|
-
raise gcs_upload_exception # Propagate without nesting
|
|
238
|
-
|
|
239
|
-
return {
|
|
240
|
-
"gcs_path": gcs_path,
|
|
241
|
-
"local_path": local_path_final,
|
|
242
|
-
"gcs_file_already_exists": gcs_file_already_exists,
|
|
243
|
-
"gcs_file_overwritten": gcs_file_overwritten,
|
|
244
|
-
"gcs_file_saved_with_increment": gcs_file_saved_with_increment
|
|
245
|
-
}
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
def write_csv_to_gcs(bucket_name, file_name, data, storage_client, logger,log_info_verbose=True):
|
|
249
|
-
""" Helper function to write a CSV file to Google Cloud Storage """
|
|
250
|
-
try:
|
|
251
|
-
bucket = storage_client.bucket(bucket_name)
|
|
252
|
-
blob = bucket.blob(file_name)
|
|
253
|
-
data_file = StringIO()
|
|
254
|
-
if data and isinstance(data, list) and isinstance(data[0], dict):
|
|
255
|
-
fieldnames = data[0].keys()
|
|
256
|
-
writer = csv.DictWriter(data_file, fieldnames=fieldnames)
|
|
257
|
-
writer.writeheader()
|
|
258
|
-
writer.writerows(data)
|
|
259
|
-
else:
|
|
260
|
-
raise ValueError("Data should be a list of dictionaries")
|
|
261
|
-
blob.upload_from_string(data_file.getvalue(), content_type='text/csv')
|
|
262
|
-
if log_info_verbose:
|
|
263
|
-
logger.info(f"Successfully wrote CSV to {file_name} in bucket {bucket_name}.")
|
|
264
|
-
except ValueError as e:
|
|
265
|
-
logger.error(f"ValueError: {e}")
|
|
266
|
-
except Exception as e:
|
|
267
|
-
logger.error(f"An unexpected error occurred while writing CSV to GCS: {e}", exc_info=True)
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
# pylint: disable=missing-module-docstring
|
|
2
|
-
# pylint: disable=missing-function-docstring
|
|
3
|
-
# pylint: disable=logging-fstring-interpolation
|
|
4
|
-
# pylint: disable=line-too-long
|
|
5
|
-
|
|
6
|
-
import datetime
|
|
7
|
-
from google.cloud import bigquery
|
|
8
|
-
from ipulse_shared_core_ftredge.enums.enums_common_utils import LogLevel
|
|
9
|
-
from ipulse_shared_core_ftredge.utils_common import ContextLog
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def create_bigquery_schema_from_json(json_schema):
|
|
13
|
-
schema = []
|
|
14
|
-
for field in json_schema:
|
|
15
|
-
if "max_length" in field:
|
|
16
|
-
schema.append(bigquery.SchemaField(field["name"], field["type"], mode=field["mode"], max_length=field["max_length"]))
|
|
17
|
-
else:
|
|
18
|
-
schema.append(bigquery.SchemaField(field["name"], field["type"], mode=field["mode"]))
|
|
19
|
-
return schema
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
def check_format_against_schema_template(data_to_check, schema, dt_ts_to_str=True, check_max_length=True):
|
|
23
|
-
"""Ensure Update dict corresponds to the config schema, ensuring proper formats and lengths."""
|
|
24
|
-
checked_data = {}
|
|
25
|
-
warnings_or_error = [] # Group warnings and errors for a given run
|
|
26
|
-
|
|
27
|
-
try:
|
|
28
|
-
# Process updates to conform to the schema
|
|
29
|
-
for field in schema:
|
|
30
|
-
field_name = field["name"]
|
|
31
|
-
field_type = field["type"]
|
|
32
|
-
mode = field["mode"]
|
|
33
|
-
|
|
34
|
-
# Initialize notice to None at the start of each field processing
|
|
35
|
-
warning = None
|
|
36
|
-
|
|
37
|
-
if field_name in data_to_check:
|
|
38
|
-
value = data_to_check[field_name]
|
|
39
|
-
|
|
40
|
-
# Handle date and timestamp formatting
|
|
41
|
-
if field_type == "DATE":
|
|
42
|
-
value, warning = handle_date_fields(field_name, value, dt_ts_to_str)
|
|
43
|
-
elif field_type == "TIMESTAMP":
|
|
44
|
-
value, warning = handle_timestamp_fields(field_name, value, dt_ts_to_str)
|
|
45
|
-
elif field_type in ["STRING", "INT64", "FLOAT64", "BOOL"]:
|
|
46
|
-
value, warning = handle_type_conversion(field_type, field_name, value)
|
|
47
|
-
|
|
48
|
-
if warning:
|
|
49
|
-
warnings_or_error.append(warning)
|
|
50
|
-
|
|
51
|
-
# Check and handle max length restriction
|
|
52
|
-
if check_max_length and "max_length" in field:
|
|
53
|
-
value, warning = check_and_truncate_length(field_name, value, field["max_length"])
|
|
54
|
-
if warning:
|
|
55
|
-
warnings_or_error.append(warning)
|
|
56
|
-
|
|
57
|
-
# Only add to the dictionary if value is not None or the field is required
|
|
58
|
-
if value is not None or mode == "REQUIRED":
|
|
59
|
-
checked_data[field_name] = value
|
|
60
|
-
|
|
61
|
-
elif mode == "REQUIRED":
|
|
62
|
-
warning = ContextLog(level=LogLevel.WARNING,
|
|
63
|
-
subject=field_name,
|
|
64
|
-
description=f"Required field '{field_name}' is missing in the updates.")
|
|
65
|
-
warnings_or_error.append(warning)
|
|
66
|
-
|
|
67
|
-
except Exception as e:
|
|
68
|
-
error_log = ContextLog(level=LogLevel.ERROR_EXCEPTION_REDO,
|
|
69
|
-
subject=data_to_check,
|
|
70
|
-
description=f"An error occurred during update check: {str(e)}")
|
|
71
|
-
warnings_or_error.append(error_log)
|
|
72
|
-
|
|
73
|
-
return checked_data, warnings_or_error
|
|
74
|
-
|
|
75
|
-
def handle_date_fields(field_name, value, dt_ts_to_str):
|
|
76
|
-
"""Handles date fields, ensuring they are in the correct format and optionally converts them to string."""
|
|
77
|
-
if isinstance(value, datetime.date):
|
|
78
|
-
if dt_ts_to_str:
|
|
79
|
-
return value.strftime("%Y-%m-%d"), None
|
|
80
|
-
return value, None
|
|
81
|
-
elif isinstance(value, str):
|
|
82
|
-
try:
|
|
83
|
-
parsed_date = datetime.datetime.strptime(value, "%Y-%m-%d").date()
|
|
84
|
-
if dt_ts_to_str:
|
|
85
|
-
return value, None
|
|
86
|
-
return parsed_date, None
|
|
87
|
-
except ValueError:
|
|
88
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
89
|
-
subject=field_name,
|
|
90
|
-
description=f"Expected a DATE in YYYY-MM-DD format but got {value}.")
|
|
91
|
-
else:
|
|
92
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
93
|
-
subject=field_name,
|
|
94
|
-
description= f"Expected a DATE or YYYY-MM-DD str format but got {value} of type {type(value).__name__}.")
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def handle_timestamp_fields(field_name, value, dt_ts_to_str):
|
|
98
|
-
"""Handles timestamp fields, ensuring they are in the correct format and optionally converts them to ISO format string."""
|
|
99
|
-
if isinstance(value, datetime.datetime):
|
|
100
|
-
if dt_ts_to_str:
|
|
101
|
-
return value.isoformat(), None
|
|
102
|
-
return value, None
|
|
103
|
-
elif isinstance(value, str):
|
|
104
|
-
try:
|
|
105
|
-
parsed_datetime = datetime.datetime.fromisoformat(value)
|
|
106
|
-
if dt_ts_to_str:
|
|
107
|
-
return value, None
|
|
108
|
-
return parsed_datetime, None
|
|
109
|
-
except ValueError:
|
|
110
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
111
|
-
subject=field_name,
|
|
112
|
-
description= f"Expected ISO format TIMESTAMP but got {value}.")
|
|
113
|
-
else:
|
|
114
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
115
|
-
subject=field_name,
|
|
116
|
-
description= f"Expected ISO format TIMESTAMP but got {value} of type {type(value).__name__}.")
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
def check_and_truncate_length(field_name, value, max_length):
|
|
120
|
-
"""Checks and truncates the length of string fields if they exceed the max length."""
|
|
121
|
-
if isinstance(value, str) and len(value) > max_length:
|
|
122
|
-
return value[:max_length], ContextLog(level=LogLevel.WARNING_FIX_RECOMMENDED,
|
|
123
|
-
subject= field_name,
|
|
124
|
-
description= f"Field exceeds max length: {len(value)}/{max_length}. Truncating.")
|
|
125
|
-
|
|
126
|
-
return value, None
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
def handle_type_conversion(field_type, field_name, value):
|
|
131
|
-
if field_type == "STRING" and not isinstance(value, str):
|
|
132
|
-
return str(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
|
|
133
|
-
subject=field_name,
|
|
134
|
-
description= f"Expected STRING but got {value} of type {type(value).__name__}.")
|
|
135
|
-
|
|
136
|
-
if field_type == "INT64" and not isinstance(value, int):
|
|
137
|
-
try:
|
|
138
|
-
return int(value), None
|
|
139
|
-
except ValueError:
|
|
140
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
141
|
-
subject= field_name,
|
|
142
|
-
description=f"Expected INTEGER, but got {value} of type {type(value).__name__}.")
|
|
143
|
-
if field_type == "FLOAT64" and not isinstance(value, float):
|
|
144
|
-
try:
|
|
145
|
-
return float(value), None
|
|
146
|
-
except ValueError:
|
|
147
|
-
return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
|
|
148
|
-
subject=field_name,
|
|
149
|
-
description=f"Expected FLOAT, but got {value} of type {type(value).__name__}.")
|
|
150
|
-
if field_type == "BOOL" and not isinstance(value, bool):
|
|
151
|
-
return bool(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
|
|
152
|
-
subject=field_name,
|
|
153
|
-
description=f"Expected BOOL, but got {value}. Converting as {bool(value)}.")
|
|
154
|
-
|
|
155
|
-
return value, None
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
ipulse_shared_core_ftredge/__init__.py,sha256=gIwh_6xjoG7UIuTKNkq0iGvCuxsbyXNrTyV3vB8YdGc,869
|
|
2
|
-
ipulse_shared_core_ftredge/utils_common.py,sha256=UOaEiTE9moJCoSGFnD6aPI_w1UujovWcYOH6ifJxPxg,22533
|
|
3
|
-
ipulse_shared_core_ftredge/utils_gcp.py,sha256=z1Rdgi1wP0768a9vnp9qmj_UtoLdmFOqYMcnrL1v1pw,10999
|
|
4
|
-
ipulse_shared_core_ftredge/utils_templates_and_schemas.py,sha256=AwGl9J-XQc_aO_VKWhR_TuA1ML8nWxHuzHtxBH8yfwE,7499
|
|
5
|
-
ipulse_shared_core_ftredge/enums/__init__.py,sha256=pdi9OIDyb8pfTFbeBqnhQ_ZO-gnYfmLKwHZBYOMnkqE,888
|
|
6
|
-
ipulse_shared_core_ftredge/enums/enums_common_utils.py,sha256=UQG-dd302r5VFhV2kwDMXCg20nsE2ht4XBEZx9rzX1o,6724
|
|
7
|
-
ipulse_shared_core_ftredge/enums/enums_data_eng.py,sha256=7w3Jjmw84Wq22Bb5Qs09Z82Bdf-j8nhRiQJfw60_g80,1903
|
|
8
|
-
ipulse_shared_core_ftredge/enums/enums_module_fincore.py,sha256=W1TkSLu3ryLf_aif2VcKsFznWz0igeMUR_buoGEG6w8,1406
|
|
9
|
-
ipulse_shared_core_ftredge/enums/enums_modules.py,sha256=AyXUoNmR75DZLaEHi3snV6LngR25LeZRqzrLDaAupbY,1244
|
|
10
|
-
ipulse_shared_core_ftredge/models/__init__.py,sha256=gE22Gzhil0RYQa7YLtdtT44_AsWqklcDfRtgLAQc1dI,200
|
|
11
|
-
ipulse_shared_core_ftredge/models/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
|
|
12
|
-
ipulse_shared_core_ftredge/models/organisation.py,sha256=4f1ATEWh5WT-CDJBLEZUhUwyl3V06ogRkteZAqW_nko,2953
|
|
13
|
-
ipulse_shared_core_ftredge/models/pulse_enums.py,sha256=YJhtvoX6Dk3_SyJUD8vVDSRIzWy5n0I0AOwe19fmDT8,4851
|
|
14
|
-
ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=PxeRvI8fe8KOiHr6NW2Jz_yocyLId09PW8QyTZxjHAA,9809
|
|
15
|
-
ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
|
|
16
|
-
ipulse_shared_core_ftredge/models/user_profile.py,sha256=3-HHB3wK2s4hsP9d5Xp2JYrsGe8iFzz4VaoygDIO_-s,1930
|
|
17
|
-
ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=oKK0XsQDKkgDvjFPhX2XlqEqlKLBQ4AkvPHXEuZbFMY,1712
|
|
18
|
-
ipulse_shared_core_ftredge/models/user_status.py,sha256=8TyRd8tBK9_xb0MPKbI5pn9-lX7ovKbeiuWYYPtIOiw,3202
|
|
19
|
-
ipulse_shared_core_ftredge/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
-
ipulse_shared_core_ftredge/tests/test.py,sha256=0lS8HP5Quo_BqNoscU40qOH9aJRaa1Pfam5VUBmdld8,682
|
|
21
|
-
ipulse_shared_core_ftredge-2.56.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
|
|
22
|
-
ipulse_shared_core_ftredge-2.56.dist-info/METADATA,sha256=WdSqfe6RTQDh4M9EBSA33UaYrmMud26ZYm-P-LRj5OQ,561
|
|
23
|
-
ipulse_shared_core_ftredge-2.56.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
|
|
24
|
-
ipulse_shared_core_ftredge-2.56.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
|
|
25
|
-
ipulse_shared_core_ftredge-2.56.dist-info/RECORD,,
|
{ipulse_shared_core_ftredge-2.56.dist-info → ipulse_shared_core_ftredge-3.1.1.dist-info}/LICENCE
RENAMED
|
File without changes
|
|
File without changes
|