ipulse-shared-core-ftredge 2.7.1__py3-none-any.whl → 2.8.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/__init__.py +7 -12
- ipulse_shared_core_ftredge/logging/__init__.py +1 -0
- ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py +144 -0
- ipulse_shared_core_ftredge/logging/utils_logging.py +72 -0
- ipulse_shared_core_ftredge/utils/__init__.py +1 -21
- ipulse_shared_core_ftredge/utils/utils_common.py +3 -173
- {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/METADATA +1 -2
- ipulse_shared_core_ftredge-2.8.1.dist-info/RECORD +19 -0
- ipulse_shared_core_ftredge/enums/__init__.py +0 -37
- ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -107
- ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -313
- ipulse_shared_core_ftredge/enums/enums_logging.py +0 -108
- ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -72
- ipulse_shared_core_ftredge/enums/enums_modules.py +0 -31
- ipulse_shared_core_ftredge/enums/enums_solution_providers.py +0 -24
- ipulse_shared_core_ftredge/enums/pulse_enums.py +0 -182
- ipulse_shared_core_ftredge/utils/logs/__init__.py +0 -2
- ipulse_shared_core_ftredge/utils/logs/context_log.py +0 -210
- ipulse_shared_core_ftredge/utils/logs/get_logger.py +0 -103
- ipulse_shared_core_ftredge/utils/utils_cloud.py +0 -53
- ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +0 -442
- ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +0 -166
- ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +0 -27
- ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +0 -356
- ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +0 -151
- ipulse_shared_core_ftredge-2.7.1.dist-info/RECORD +0 -33
- /ipulse_shared_core_ftredge/{utils/logs → logging}/audit_log_firestore.py +0 -0
- {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/WHEEL +0 -0
- {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/top_level.txt +0 -0
|
@@ -9,15 +9,10 @@ from .enums import (TargetLogs,LogLevel, Status, Unit, Frequency,
|
|
|
9
9
|
DataSourceType,PipelineTriggerType,DataOperationType,
|
|
10
10
|
MatchConditionType, DuplicationHandling, DuplicationHandlingStatus,
|
|
11
11
|
CodingLanguage, ExecutionLocation, ExecutionComputeType,
|
|
12
|
-
CloudProvider,
|
|
13
|
-
from .utils import (
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
read_csv_from_gcs,
|
|
20
|
-
read_json_from_gcs,
|
|
21
|
-
check_format_against_schema_template,
|
|
22
|
-
create_bigquery_schema_from_json,
|
|
23
|
-
Pipelinemon, ContextLog)
|
|
12
|
+
CloudProvider,LoggingHandler)
|
|
13
|
+
from .utils import (list_as_strings)
|
|
14
|
+
|
|
15
|
+
from .logging import (get_logger,
|
|
16
|
+
log_error,
|
|
17
|
+
log_warning,
|
|
18
|
+
log_info)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .utils_logging import get_logger, log_error, log_warning, log_info
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
# pylint: disable=missing-module-docstring
|
|
2
|
+
# pylint: disable=missing-function-docstring
|
|
3
|
+
# pylint: disable=logging-fstring-interpolation
|
|
4
|
+
# pylint: disable=line-too-long
|
|
5
|
+
# pylint: disable=missing-class-docstring
|
|
6
|
+
# pylint: disable=broad-exception-caught
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import traceback
|
|
10
|
+
import json
|
|
11
|
+
import os
|
|
12
|
+
from google.cloud import error_reporting
|
|
13
|
+
from google.cloud import logging as cloud_logging
|
|
14
|
+
|
|
15
|
+
##########################################################################################################################
|
|
16
|
+
#################################### Custom logging FORMATTERS #####################################################
|
|
17
|
+
##########################################################################################################################
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class CloudLogFormatter(logging.Formatter):
|
|
21
|
+
"""Formats log records as structured JSON."""
|
|
22
|
+
|
|
23
|
+
def format(self, record):
|
|
24
|
+
log_entry = {
|
|
25
|
+
'message': record.msg,
|
|
26
|
+
'timestamp': self.formatTime(record, self.datefmt),
|
|
27
|
+
'name': record.name,
|
|
28
|
+
'severity': record.levelname,
|
|
29
|
+
'pathname': record.pathname,
|
|
30
|
+
'lineno': record.lineno,
|
|
31
|
+
}
|
|
32
|
+
if record.exc_info:
|
|
33
|
+
log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
|
|
34
|
+
if isinstance(record.msg, dict):
|
|
35
|
+
log_entry.update(record.msg)
|
|
36
|
+
return json.dumps(log_entry)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class LocalLogFormatter(logging.Formatter):
|
|
40
|
+
"""Formats log records for local output to the console."""
|
|
41
|
+
|
|
42
|
+
def format(self, record): # Make sure you have the 'record' argument here!
|
|
43
|
+
path_parts = record.pathname.split(os.sep)
|
|
44
|
+
|
|
45
|
+
# Get the last two parts of the path if they exist
|
|
46
|
+
if len(path_parts) >= 2:
|
|
47
|
+
short_path = os.path.join(path_parts[-2], path_parts[-1])
|
|
48
|
+
else:
|
|
49
|
+
short_path = record.pathname
|
|
50
|
+
|
|
51
|
+
# Format log messages differently based on the log level
|
|
52
|
+
if record.levelno == logging.INFO:
|
|
53
|
+
log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
|
|
54
|
+
elif record.levelno == logging.DEBUG:
|
|
55
|
+
log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
56
|
+
elif record.levelno == logging.ERROR:
|
|
57
|
+
log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
58
|
+
if record.exc_info:
|
|
59
|
+
log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
|
|
60
|
+
else:
|
|
61
|
+
log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
return log_message
|
|
65
|
+
|
|
66
|
+
#############################################################################################################################################
|
|
67
|
+
######################################## Logging handlers for Google Cloud ########################################
|
|
68
|
+
#############################################################################################################################################
|
|
69
|
+
|
|
70
|
+
class CustomGCPLoggingHandler(cloud_logging.handlers.CloudLoggingHandler):
|
|
71
|
+
"""Custom handler for Google Cloud Logging with a dynamic logName."""
|
|
72
|
+
def __init__(self, client, name, resource=None, labels=None):
|
|
73
|
+
super().__init__(client=client, name=name, resource=resource, labels=labels)
|
|
74
|
+
self.client = client # Ensure client is consistently used
|
|
75
|
+
|
|
76
|
+
def emit(self, record):
|
|
77
|
+
try:
|
|
78
|
+
# 1. Create the basic log entry dictionary
|
|
79
|
+
log_entry = {
|
|
80
|
+
'message': record.msg,
|
|
81
|
+
'severity': record.levelname,
|
|
82
|
+
'name': record.name,
|
|
83
|
+
'pathname': record.filename,
|
|
84
|
+
'lineno': record.lineno,
|
|
85
|
+
}
|
|
86
|
+
if record.exc_info:
|
|
87
|
+
log_entry['exception_traceback'] = ''.join(
|
|
88
|
+
traceback.format_exception(*record.exc_info)
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
# 2. Apply the formatter to the 'message' field if it's a dictionary
|
|
92
|
+
if isinstance(record.msg, dict):
|
|
93
|
+
formatted_message = self.formatter.format(record)
|
|
94
|
+
try:
|
|
95
|
+
log_entry['message'] = json.loads(formatted_message)
|
|
96
|
+
except json.JSONDecodeError:
|
|
97
|
+
log_entry['message'] = formatted_message
|
|
98
|
+
else:
|
|
99
|
+
log_entry['message'] = record.msg
|
|
100
|
+
|
|
101
|
+
# 3. Set the custom logName
|
|
102
|
+
log_entry['logName'] = f"projects/{self.client.project}/logs/{record.name}"
|
|
103
|
+
|
|
104
|
+
# 4. Send to Google Cloud Logging
|
|
105
|
+
super().emit(record)
|
|
106
|
+
except Exception as e:
|
|
107
|
+
self.handleError(record)
|
|
108
|
+
|
|
109
|
+
class CustomGCPErrorReportingHandler(logging.Handler):
|
|
110
|
+
def __init__(self, client=None, level=logging.ERROR):
|
|
111
|
+
super().__init__(level)
|
|
112
|
+
self.error_client = error_reporting.Client() if client is None else client
|
|
113
|
+
self.propagate = True
|
|
114
|
+
|
|
115
|
+
def emit(self, record):
|
|
116
|
+
try:
|
|
117
|
+
if record.levelno >= logging.ERROR:
|
|
118
|
+
log_struct = {
|
|
119
|
+
'message': self.format(record),
|
|
120
|
+
'severity': record.levelname,
|
|
121
|
+
'pathname': getattr(record, 'pathname', None),
|
|
122
|
+
'lineno': getattr(record, 'lineno', None)
|
|
123
|
+
}
|
|
124
|
+
if record.exc_info:
|
|
125
|
+
log_struct['exception'] = ''.join(
|
|
126
|
+
traceback.format_exception(*record.exc_info)
|
|
127
|
+
)
|
|
128
|
+
self.error_client.report(str(log_struct))
|
|
129
|
+
except Exception as e:
|
|
130
|
+
self.handleError(record)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def add_gcp_cloud_logging(logger, formatter, client=None):
|
|
134
|
+
"""Sets up Google Cloud Logging for the logger."""
|
|
135
|
+
client = client or cloud_logging.Client()
|
|
136
|
+
handler = CustomGCPLoggingHandler(client, logger.name)
|
|
137
|
+
handler.setFormatter(formatter)
|
|
138
|
+
logger.addHandler(handler)
|
|
139
|
+
|
|
140
|
+
def add_gcp_error_reporting(logger, client=None):
|
|
141
|
+
"""Sets up Google Cloud Error Reporting for the logger."""
|
|
142
|
+
client = client or error_reporting.Client()
|
|
143
|
+
handler = CustomGCPErrorReportingHandler(client=client)
|
|
144
|
+
logger.addHandler(handler)
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# pylint: disable=missing-module-docstring
|
|
2
|
+
# pylint: disable=missing-function-docstring
|
|
3
|
+
# pylint: disable=logging-fstring-interpolation
|
|
4
|
+
# pylint: disable=line-too-long
|
|
5
|
+
# pylint: disable=missing-class-docstring
|
|
6
|
+
# pylint: disable=broad-exception-caught
|
|
7
|
+
import logging
|
|
8
|
+
from typing import List, Union
|
|
9
|
+
from ipulse_shared_core_ftredge.enums import LoggingHandler
|
|
10
|
+
from ipulse_shared_core_ftredge.logging.logging_handlers_and_formatters import (CloudLogFormatter,
|
|
11
|
+
LocalLogFormatter,
|
|
12
|
+
add_gcp_cloud_logging,
|
|
13
|
+
add_gcp_error_reporting)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandler, List[LoggingHandler]] = LoggingHandler.NONE):
|
|
17
|
+
|
|
18
|
+
"""Creates and configures a logger with the specified handlers."""
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(logger_name)
|
|
21
|
+
logger.setLevel(level)
|
|
22
|
+
cloud_formatter = CloudLogFormatter()
|
|
23
|
+
|
|
24
|
+
# Ensure logging_handler_providers is a list for consistent processing
|
|
25
|
+
if not isinstance(logging_handler_providers, list):
|
|
26
|
+
logging_handler_providers = [logging_handler_providers]
|
|
27
|
+
|
|
28
|
+
supported_remote_handlers = [
|
|
29
|
+
LoggingHandler.GCP_CLOUD_LOGGING,
|
|
30
|
+
LoggingHandler.GCP_ERROR_REPORTING,
|
|
31
|
+
LoggingHandler.LOCAL_STREAM,
|
|
32
|
+
LoggingHandler.NONE, # If NONE is considered a remote handler
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
# Remote handlers
|
|
36
|
+
|
|
37
|
+
for handler_provider in logging_handler_providers:
|
|
38
|
+
if handler_provider in supported_remote_handlers:
|
|
39
|
+
if handler_provider == LoggingHandler.GCP_CLOUD_LOGGING:
|
|
40
|
+
add_gcp_cloud_logging(logger, cloud_formatter)
|
|
41
|
+
elif handler_provider == LoggingHandler.GCP_ERROR_REPORTING:
|
|
42
|
+
add_gcp_error_reporting(logger)
|
|
43
|
+
elif handler_provider == LoggingHandler.LOCAL_STREAM: # Handle local stream
|
|
44
|
+
local_handler = logging.StreamHandler()
|
|
45
|
+
local_handler.setFormatter(LocalLogFormatter())
|
|
46
|
+
logger.addHandler(local_handler)
|
|
47
|
+
else:
|
|
48
|
+
raise ValueError(
|
|
49
|
+
f"Unsupported logging provider: {handler_provider}. "
|
|
50
|
+
f"Supported providers: {[h.value for h in supported_remote_handlers]}"
|
|
51
|
+
)
|
|
52
|
+
return logger
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def log_error(msg,logger=None , print_out=False, exc_info=False):
|
|
56
|
+
if print_out:
|
|
57
|
+
print(msg)
|
|
58
|
+
if logger:
|
|
59
|
+
logger.error(msg, exc_info=exc_info)
|
|
60
|
+
|
|
61
|
+
def log_warning(msg, logger=None, print_out=False):
|
|
62
|
+
if print_out:
|
|
63
|
+
print(msg)
|
|
64
|
+
if logger:
|
|
65
|
+
logger.warning(msg)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def log_info(msg, logger=None, print_out=False):
|
|
69
|
+
if print_out:
|
|
70
|
+
print(msg)
|
|
71
|
+
if logger:
|
|
72
|
+
logger.info(msg)
|
|
@@ -1,23 +1,3 @@
|
|
|
1
1
|
# pylint: disable=missing-module-docstring
|
|
2
2
|
|
|
3
|
-
from .
|
|
4
|
-
from .utils_common import (save_json_locally_extended,
|
|
5
|
-
log_error,
|
|
6
|
-
log_warning,
|
|
7
|
-
log_info,
|
|
8
|
-
prepare_full_file_path)
|
|
9
|
-
|
|
10
|
-
from .utils_collector_pipelinemon import ( Pipelinemon)
|
|
11
|
-
|
|
12
|
-
from .utils_cloud_gcp import (add_gcp_cloud_logging,
|
|
13
|
-
add_gcp_error_reporting,
|
|
14
|
-
create_bigquery_schema_from_json,
|
|
15
|
-
read_csv_from_gcs, read_json_from_gcs,
|
|
16
|
-
write_csv_to_gcs,write_json_to_gcs_extended)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
from .utils_cloud import (write_json_to_cloud_storage_extended,
|
|
20
|
-
read_json_from_cloud_storage)
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
from .utils_templates_and_schemas import (check_format_against_schema_template)
|
|
3
|
+
from .utils_common import (list_as_strings)
|
|
@@ -5,176 +5,6 @@
|
|
|
5
5
|
# pylint: disable=missing-class-docstring
|
|
6
6
|
# pylint: disable=broad-exception-caught
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from ipulse_shared_core_ftredge.enums import DuplicationHandling, DuplicationHandlingStatus, MatchConditionType
|
|
12
|
-
# from ipulse_shared_core_ftredge.utils import Pipelinemon, ContextLog
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def log_error(msg,logger=None , print_out=False, exc_info=False):
|
|
16
|
-
if print_out:
|
|
17
|
-
print(msg)
|
|
18
|
-
if logger:
|
|
19
|
-
logger.error(msg, exc_info=exc_info)
|
|
20
|
-
|
|
21
|
-
def log_warning(msg, logger=None, print_out=False):
|
|
22
|
-
if print_out:
|
|
23
|
-
print(msg)
|
|
24
|
-
if logger:
|
|
25
|
-
logger.warning(msg)
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def log_info(msg, logger=None, print_out=False):
|
|
29
|
-
if print_out:
|
|
30
|
-
print(msg)
|
|
31
|
-
if logger:
|
|
32
|
-
logger.info(msg)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
def prepare_full_file_path(file_name: str, output_directory: str = None) -> str:
|
|
36
|
-
"""
|
|
37
|
-
Prepares the full file path, ensuring the output directory and subdirectories exist.
|
|
38
|
-
|
|
39
|
-
Args:
|
|
40
|
-
file_name (str): The name of the file, which may include subdirectories or a full path.
|
|
41
|
-
output_directory (str, optional): The directory where the file should be saved. Defaults to the current working directory.
|
|
42
|
-
|
|
43
|
-
Returns:
|
|
44
|
-
str: The full path to the file.
|
|
45
|
-
"""
|
|
46
|
-
if os.path.isabs(file_name):
|
|
47
|
-
# If file_name is an absolute path, use it directly
|
|
48
|
-
full_file_path = file_name
|
|
49
|
-
else:
|
|
50
|
-
# Prepare the output directory
|
|
51
|
-
output_directory = output_directory or os.getcwd()
|
|
52
|
-
full_file_path = os.path.join(output_directory, file_name)
|
|
53
|
-
|
|
54
|
-
# Create the directory if it doesn't exist
|
|
55
|
-
os.makedirs(os.path.dirname(full_file_path), exist_ok=True)
|
|
56
|
-
|
|
57
|
-
return full_file_path
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def save_json_locally_extended(data:dict | list | str, file_name:str,
|
|
61
|
-
duplication_handling:DuplicationHandling ,
|
|
62
|
-
duplication_match_condition_type:MatchConditionType,
|
|
63
|
-
output_directory:str=None,
|
|
64
|
-
duplication_match_condition:str | List[str] = "",
|
|
65
|
-
max_matched_deletable_files:int=1, logger=None, print_out=False, raise_e=False):
|
|
66
|
-
|
|
67
|
-
"""Saves data to a local JSON file.
|
|
68
|
-
"""
|
|
69
|
-
|
|
70
|
-
max_deletable_files_allowed = 3
|
|
71
|
-
|
|
72
|
-
saved_to_file_path = None
|
|
73
|
-
matched_duplicates_count = 0 # Default to 0
|
|
74
|
-
duplication_handling_status = None
|
|
75
|
-
matched_duplicates_deleted = None
|
|
76
|
-
error_during_operation = None
|
|
77
|
-
|
|
78
|
-
response={
|
|
79
|
-
"saved_to_file_path": saved_to_file_path,
|
|
80
|
-
"matched_duplicates_count": matched_duplicates_count,
|
|
81
|
-
"matched_duplicates_deleted": matched_duplicates_deleted,
|
|
82
|
-
"duplication_handling_status": duplication_handling_status,
|
|
83
|
-
"duplication_match_condition_type": duplication_match_condition_type,
|
|
84
|
-
"duplication_match_condition": duplication_match_condition,
|
|
85
|
-
"error_during_operation": error_during_operation
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
supported_match_condition_types = [MatchConditionType.EXACT, MatchConditionType.PREFIX]
|
|
89
|
-
supported_duplication_handling = [DuplicationHandling.RAISE_ERROR, DuplicationHandling.OVERWRITE, DuplicationHandling.INCREMENT, DuplicationHandling.SKIP]
|
|
90
|
-
|
|
91
|
-
try:
|
|
92
|
-
|
|
93
|
-
# Use the helper function to get the full file path
|
|
94
|
-
full_file_path = prepare_full_file_path(file_name=file_name, output_directory=output_directory)
|
|
95
|
-
# Extract the directory path, base file name, and extension
|
|
96
|
-
directory_path = os.path.dirname(full_file_path)
|
|
97
|
-
|
|
98
|
-
if max_matched_deletable_files > max_deletable_files_allowed:
|
|
99
|
-
msg = f"Error: max_deletable_files should be less than or equal to {max_deletable_files_allowed} for safety. For more, use specific Delete method."
|
|
100
|
-
raise ValueError(msg)
|
|
101
|
-
|
|
102
|
-
if duplication_handling not in supported_duplication_handling:
|
|
103
|
-
msg = f"Error: Duplication handling not supported. Supported types: {supported_duplication_handling}"
|
|
104
|
-
raise ValueError(msg)
|
|
105
|
-
|
|
106
|
-
if duplication_match_condition_type not in supported_match_condition_types:
|
|
107
|
-
msg = f"Error: Match condition type not supported. Supported types: {supported_match_condition_types}"
|
|
108
|
-
raise ValueError(msg)
|
|
109
|
-
|
|
110
|
-
elif duplication_match_condition_type!=MatchConditionType.EXACT and not duplication_match_condition:
|
|
111
|
-
msg = f"Error: Match condition is required for match condition type: {duplication_match_condition_type}"
|
|
112
|
-
raise ValueError(msg)
|
|
113
|
-
|
|
114
|
-
# Prepare data
|
|
115
|
-
if isinstance(data, (list, dict)):
|
|
116
|
-
data_str = json.dumps(data, indent=2)
|
|
117
|
-
else:
|
|
118
|
-
data_str = data
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
# --- Check if File Exists ---
|
|
122
|
-
if duplication_match_condition_type==MatchConditionType.PREFIX:
|
|
123
|
-
files_matched_on_condition = [
|
|
124
|
-
os.path.join(directory_path, f) for f in os.listdir(directory_path)
|
|
125
|
-
if f.startswith(duplication_match_condition)
|
|
126
|
-
]
|
|
127
|
-
if files_matched_on_condition:
|
|
128
|
-
matched_duplicates_count = len(files_matched_on_condition)
|
|
129
|
-
elif duplication_match_condition_type==MatchConditionType.EXACT:
|
|
130
|
-
if os.path.exists(full_file_path):
|
|
131
|
-
files_matched_on_condition = [full_file_path] # Always assign a list
|
|
132
|
-
matched_duplicates_count = 1
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if matched_duplicates_count:
|
|
136
|
-
if duplication_handling==DuplicationHandling.RAISE_ERROR:
|
|
137
|
-
msg = f"Error: File already exists at file path: {full_file_path}"
|
|
138
|
-
raise FileExistsError(msg)
|
|
139
|
-
|
|
140
|
-
if duplication_handling == DuplicationHandling.SKIP:
|
|
141
|
-
log_warning(f"Skipping saving to file path: {full_file_path} - file already exists.", logger=logger, print_out=print_out)
|
|
142
|
-
response["duplication_handling_status"] = DuplicationHandlingStatus.SKIPPED.value
|
|
143
|
-
return response # Return here
|
|
144
|
-
# --- Overwrite Logic --> Delete ---
|
|
145
|
-
if duplication_handling==DuplicationHandling.OVERWRITE:
|
|
146
|
-
if matched_duplicates_count > max_matched_deletable_files:
|
|
147
|
-
msg = f"Error: Attempt to delete {len(files_matched_on_condition)} matched files, but limit is {max_matched_deletable_files}. Operation Cancelled."
|
|
148
|
-
raise ValueError(msg)
|
|
149
|
-
|
|
150
|
-
for path in files_matched_on_condition:
|
|
151
|
-
os.remove(path)
|
|
152
|
-
|
|
153
|
-
deleted_files=",,,".join( files_matched_on_condition)
|
|
154
|
-
log_info(f"Deleted {len(files_matched_on_condition)} files that matched condition: {deleted_files}", logger=logger, print_out=print_out)
|
|
155
|
-
response["matched_duplicates_deleted"] = deleted_files
|
|
156
|
-
response["duplication_handling_status"] = DuplicationHandlingStatus.OVERWRITTEN.value
|
|
157
|
-
# --- Increment Logic ---
|
|
158
|
-
elif duplication_handling==DuplicationHandling.INCREMENT:
|
|
159
|
-
increment = 0
|
|
160
|
-
base_file_name, ext = os.path.splitext(os.path.basename(full_file_path))
|
|
161
|
-
while os.path.exists(full_file_path):
|
|
162
|
-
increment += 1
|
|
163
|
-
file_name = f"{base_file_name}_v{increment}{ext}"
|
|
164
|
-
full_file_path = os.path.join(directory_path, file_name)
|
|
165
|
-
response["duplication_handling_status"] = DuplicationHandlingStatus.INCREMENTED.value
|
|
166
|
-
|
|
167
|
-
# --- Save the File ---
|
|
168
|
-
with open(full_file_path, "w", encoding="utf-8") as f:
|
|
169
|
-
f.write(data_str)
|
|
170
|
-
response["saved_to_file_path"] = full_file_path
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
except Exception as e:
|
|
174
|
-
error_during_operation=f"Error occurred while writing JSON to file path: {full_file_path} : {type(e).__name__}-{str(e)}"
|
|
175
|
-
log_error(error_during_operation, logger=logger, print_out=print_out)
|
|
176
|
-
response["error_during_operation"] = error_during_operation
|
|
177
|
-
if raise_e:
|
|
178
|
-
raise e
|
|
179
|
-
|
|
180
|
-
return response # Return response once at the end
|
|
8
|
+
def list_as_strings(*enums):
|
|
9
|
+
"""Converts a list of Enum members to their string values."""
|
|
10
|
+
return [str(enum) for enum in enums]
|
{ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ipulse_shared_core_ftredge
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.8.1
|
|
4
4
|
Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
|
|
5
5
|
Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
|
|
6
6
|
Author: Russlan Ramdowar
|
|
@@ -10,5 +10,4 @@ Requires-Dist: python-dateutil~=2.8
|
|
|
10
10
|
Requires-Dist: pytest~=7.1
|
|
11
11
|
Requires-Dist: google-cloud-logging~=3.10.0
|
|
12
12
|
Requires-Dist: google-cloud-error-reporting~=1.11.0
|
|
13
|
-
Requires-Dist: google-cloud-bigquery~=3.24.0
|
|
14
13
|
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
ipulse_shared_core_ftredge/__init__.py,sha256=gVXmJATW1Yp-1XEZQT6KbkYoWNljmPblysPlhfSzdGk,882
|
|
2
|
+
ipulse_shared_core_ftredge/logging/__init__.py,sha256=hE42Z23NuJW-pn1Lm_1MHUnht0I3NnoBSMz1J5A9g5E,72
|
|
3
|
+
ipulse_shared_core_ftredge/logging/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
|
|
4
|
+
ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py,sha256=k5gQU6matAzviRd7X29AIAThaOeXYVFsFaxT08kpqj4,6287
|
|
5
|
+
ipulse_shared_core_ftredge/logging/utils_logging.py,sha256=q8VpDROulW0krG4O0kX0HRovmELBAvfCE8n1_MUn4Y4,2846
|
|
6
|
+
ipulse_shared_core_ftredge/models/__init__.py,sha256=MeGH2ZBxkrwldUiWyUaI_TMyfq78tuSwRkN_mEfKD8U,161
|
|
7
|
+
ipulse_shared_core_ftredge/models/organisation.py,sha256=22esRGYuJmKN3papkgozleEmDNJrVwUgIzKp7annvWs,3280
|
|
8
|
+
ipulse_shared_core_ftredge/models/resource_catalog_item.py,sha256=mEGX8AftzrhEHqFVXjr62CuRnXC1vK4z3bHl_XBJodU,4964
|
|
9
|
+
ipulse_shared_core_ftredge/models/user_auth.py,sha256=35HNN7ZW4ZELCqaJrAtoSsVLFAZ1KL2S_VmuzbcEMm4,119
|
|
10
|
+
ipulse_shared_core_ftredge/models/user_profile.py,sha256=D3BB9D6XEv7IVZgsURgf0hWmUZW5rms3uiBXS0ZGLeE,1927
|
|
11
|
+
ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=oKK0XsQDKkgDvjFPhX2XlqEqlKLBQ4AkvPHXEuZbFMY,1712
|
|
12
|
+
ipulse_shared_core_ftredge/models/user_status.py,sha256=8TyRd8tBK9_xb0MPKbI5pn9-lX7ovKbeiuWYYPtIOiw,3202
|
|
13
|
+
ipulse_shared_core_ftredge/utils/__init__.py,sha256=Yziq9yAHlOeXTa4LuBWSxZqBMBMfq0XBc0XtcS7lFfw,88
|
|
14
|
+
ipulse_shared_core_ftredge/utils/utils_common.py,sha256=bxpRcd_uI2LyqbgyNquLarqOwXFnFxA-pDAn0JKRlqs,388
|
|
15
|
+
ipulse_shared_core_ftredge-2.8.1.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
|
|
16
|
+
ipulse_shared_core_ftredge-2.8.1.dist-info/METADATA,sha256=QupPp0QxKGLUDmwZXXIWjOwchya5NROy8Dr5mK-WAlY,511
|
|
17
|
+
ipulse_shared_core_ftredge-2.8.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
18
|
+
ipulse_shared_core_ftredge-2.8.1.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
|
|
19
|
+
ipulse_shared_core_ftredge-2.8.1.dist-info/RECORD,,
|
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# pylint: disable=missing-module-docstring
|
|
3
|
-
# pylint: disable=missing-function-docstring
|
|
4
|
-
# pylint: disable=missing-class-docstring
|
|
5
|
-
|
|
6
|
-
from .enums_common_utils import (Status,
|
|
7
|
-
Unit,
|
|
8
|
-
Frequency)
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
from .enums_modules import(Module,
|
|
12
|
-
Domain)
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
from .enums_module_fincore import (FinCoreCategory,
|
|
16
|
-
FincCoreSubCategory,
|
|
17
|
-
FinCoreRecordsCategory,
|
|
18
|
-
FinancialExchangeOrPublisher)
|
|
19
|
-
|
|
20
|
-
from .enums_logging import (TargetLogs,
|
|
21
|
-
LogLevel,
|
|
22
|
-
LoggingHandlers)
|
|
23
|
-
|
|
24
|
-
from .enums_data_eng import (DataPrimaryCategory,
|
|
25
|
-
DataState,
|
|
26
|
-
DatasetScope,
|
|
27
|
-
DataSourceType,
|
|
28
|
-
PipelineTriggerType,
|
|
29
|
-
DataOperationType,
|
|
30
|
-
MatchConditionType,
|
|
31
|
-
DuplicationHandling,
|
|
32
|
-
DuplicationHandlingStatus,
|
|
33
|
-
CodingLanguage,
|
|
34
|
-
ExecutionLocation,
|
|
35
|
-
ExecutionComputeType)
|
|
36
|
-
|
|
37
|
-
from .enums_solution_providers import (CloudProvider)
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
# pylint: disable=missing-module-docstring
|
|
3
|
-
# pylint: disable=missing-function-docstring
|
|
4
|
-
# pylint: disable=missing-class-docstring
|
|
5
|
-
# pylint: disable=line-too-long
|
|
6
|
-
|
|
7
|
-
from enum import Enum
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class Status(Enum):
|
|
11
|
-
OPEN = "open"
|
|
12
|
-
ACKNOWLEDGED = "acknowledged"
|
|
13
|
-
ESCALATED = "escalated"
|
|
14
|
-
IN_PROGRESS = "in_progress"
|
|
15
|
-
IN_REVIEW = "in_review"
|
|
16
|
-
RESOLVED = "resolved"
|
|
17
|
-
IGNORED = "ignored"
|
|
18
|
-
CANCELLED = "cancelled"
|
|
19
|
-
CLOSED = "closed"
|
|
20
|
-
|
|
21
|
-
def __str__(self):
|
|
22
|
-
return self.value
|
|
23
|
-
|
|
24
|
-
### Exception during full exection, partially saved
|
|
25
|
-
# Exception during ensemble pipeline; modifications collected in local object , nothing persisted
|
|
26
|
-
# Exception during ensemble pipeline; modifications persisted , metadata failed
|
|
27
|
-
# Exception during ensemble pipeline; modifications persisted , metadata persisted
|
|
28
|
-
# Exception during ensemble pipeline; modifications persisted , metadata persisted
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class Unit(Enum):
|
|
32
|
-
MIX="MIX"
|
|
33
|
-
# Currency and Financial Values
|
|
34
|
-
USD = "USD" # United States Dollar
|
|
35
|
-
EUR = "EUR" # Euro
|
|
36
|
-
JPY = "JPY" # Japanese Yen
|
|
37
|
-
GBP = "GBP" # British Pound Sterling
|
|
38
|
-
AUD = "AUD" # Australian Dollar
|
|
39
|
-
CAD = "CAD" # Canadian Dollar
|
|
40
|
-
CHF = "CHF" # Swiss Franc
|
|
41
|
-
CNY = "CNY" # Chinese Yuan Renminbi
|
|
42
|
-
SEK = "SEK" # Swedish Krona
|
|
43
|
-
NZD = "NZD" # New Zealand Dollar
|
|
44
|
-
MXN = "MXN" # Mexican Peso
|
|
45
|
-
SGD = "SGD" # Singapore Dollar
|
|
46
|
-
HKD = "HKD" # Hong Kong Dollar
|
|
47
|
-
NOK = "NOK" # Norwegian Krone
|
|
48
|
-
KRW = "KRW" # South Korean Won
|
|
49
|
-
RUB = "RUB" # Russian Ruble
|
|
50
|
-
INR = "INR" # Indian Rupee
|
|
51
|
-
BRL = "BRL" # Brazilian Real
|
|
52
|
-
ZAR = "ZAR" # South African Rand
|
|
53
|
-
CURRENCY = "currency" # General currency, when specific currency is not needed
|
|
54
|
-
|
|
55
|
-
# Stock Market and Investments
|
|
56
|
-
SHARES = "shares" # Number of shares
|
|
57
|
-
PERCENT = "prcnt" # Percentage, used for rates and ratios
|
|
58
|
-
BPS = "bps" # Basis points, often used for interest rates and financial ratios
|
|
59
|
-
|
|
60
|
-
# Volume and Quantitative Measurements
|
|
61
|
-
VOLUME = "volume" # Trading volume in units
|
|
62
|
-
MILLIONS = "mills" # Millions, used for large quantities or sums
|
|
63
|
-
BILLIONS = "bills" # Billions, used for very large quantities or sums
|
|
64
|
-
|
|
65
|
-
# Commodity Specific Units
|
|
66
|
-
BARRELS = "barrels" # Barrels, specifically for oil and similar liquids
|
|
67
|
-
TONNES = "tonnes" # Tonnes, for bulk materials like metals or grains
|
|
68
|
-
TROY_OUNCES = "troy_oz" # Troy ounces, specifically for precious metals
|
|
69
|
-
|
|
70
|
-
# Real Estate and Physical Properties
|
|
71
|
-
SQUARE_FEET = "sq_ft" # Square feet, for area measurement in real estate
|
|
72
|
-
METER_SQUARE = "m2" # Square meters, for area measurement in real estate
|
|
73
|
-
ACRES = "acres" # Acres, used for measuring large plots of land
|
|
74
|
-
|
|
75
|
-
# Miscellaneous and Other Measures
|
|
76
|
-
UNITS = "units" # Generic units, applicable when other specific units are not suitable
|
|
77
|
-
COUNT = "count" # Count, used for tallying items or events
|
|
78
|
-
INDEX_POINTS = "index_pnts" # Index points, used in measuring indices like stock market indices
|
|
79
|
-
RATIO = "ratio" # Ratio, for various financial ratios
|
|
80
|
-
|
|
81
|
-
def __str__(self):
|
|
82
|
-
return self.value
|
|
83
|
-
|
|
84
|
-
class Frequency(Enum):
|
|
85
|
-
ONE_MIN = "1min"
|
|
86
|
-
FIVE_MIN="5min"
|
|
87
|
-
FIFTEEN_MIN="15min"
|
|
88
|
-
THIRTY_MIN = "30min"
|
|
89
|
-
ONE_H = "1h"
|
|
90
|
-
TWO_H = "2h"
|
|
91
|
-
SIX_H = "6h"
|
|
92
|
-
TWELVE_H = "12h"
|
|
93
|
-
FOUR_H = "4h"
|
|
94
|
-
EOD="eod"
|
|
95
|
-
ONE_D = "1d"
|
|
96
|
-
TWO_D = "2d"
|
|
97
|
-
THREE_D = "3d"
|
|
98
|
-
ONE_W = "1w"
|
|
99
|
-
ONE_M = "1m"
|
|
100
|
-
TWO_M="2m"
|
|
101
|
-
THREE_M="3m"
|
|
102
|
-
SIX_M="6m"
|
|
103
|
-
ONE_Y="1y"
|
|
104
|
-
THREE_Y="3y"
|
|
105
|
-
|
|
106
|
-
def __str__(self):
|
|
107
|
-
return self.value
|