ipulse-shared-core-ftredge 2.6.1__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.
- ipulse_shared_core_ftredge/__init__.py +10 -9
- ipulse_shared_core_ftredge/enums/__init__.py +12 -7
- ipulse_shared_core_ftredge/enums/enums_common_utils.py +9 -0
- ipulse_shared_core_ftredge/enums/enums_data_eng.py +280 -76
- ipulse_shared_core_ftredge/enums/{enums_logs.py → enums_logging.py} +30 -1
- ipulse_shared_core_ftredge/enums/enums_module_fincore.py +16 -2
- ipulse_shared_core_ftredge/enums/enums_modules.py +6 -0
- ipulse_shared_core_ftredge/enums/{enums_cloud.py → enums_solution_providers.py} +11 -4
- ipulse_shared_core_ftredge/utils/__init__.py +11 -7
- ipulse_shared_core_ftredge/utils/logs/context_log.py +2 -3
- ipulse_shared_core_ftredge/utils/logs/get_logger.py +47 -20
- ipulse_shared_core_ftredge/utils/utils_cloud.py +26 -17
- ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +311 -180
- ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +150 -153
- ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +16 -15
- ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +2 -2
- ipulse_shared_core_ftredge/utils/utils_common.py +145 -110
- ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +2 -2
- {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/METADATA +1 -1
- ipulse_shared_core_ftredge-2.7.1.dist-info/RECORD +33 -0
- ipulse_shared_core_ftredge-2.6.1.dist-info/RECORD +0 -33
- {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/LICENCE +0 -0
- {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/WHEEL +0 -0
- {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/top_level.txt +0 -0
|
@@ -9,8 +9,9 @@ import logging
|
|
|
9
9
|
import os
|
|
10
10
|
import json
|
|
11
11
|
import traceback
|
|
12
|
-
from
|
|
13
|
-
from ipulse_shared_core_ftredge.
|
|
12
|
+
from typing import List, Union
|
|
13
|
+
from ipulse_shared_core_ftredge.enums import LoggingHandlers
|
|
14
|
+
from ipulse_shared_core_ftredge.utils.utils_cloud_gcp import add_gcp_cloud_logging, add_gcp_error_reporting
|
|
14
15
|
|
|
15
16
|
###################################################################################################
|
|
16
17
|
##################################################################################################
|
|
@@ -21,10 +22,10 @@ class CloudLogFormatter(logging.Formatter):
|
|
|
21
22
|
|
|
22
23
|
def format(self, record):
|
|
23
24
|
log_entry = {
|
|
25
|
+
'message': record.msg,
|
|
24
26
|
'timestamp': self.formatTime(record, self.datefmt),
|
|
25
27
|
'name': record.name,
|
|
26
28
|
'severity': record.levelname,
|
|
27
|
-
'message': record.msg,
|
|
28
29
|
'pathname': record.pathname,
|
|
29
30
|
'lineno': record.lineno,
|
|
30
31
|
}
|
|
@@ -40,37 +41,63 @@ class LocalLogFormatter(logging.Formatter):
|
|
|
40
41
|
|
|
41
42
|
def format(self, record): # Make sure you have the 'record' argument here!
|
|
42
43
|
path_parts = record.pathname.split(os.sep)
|
|
43
|
-
|
|
44
|
+
|
|
44
45
|
# Get the last two parts of the path if they exist
|
|
45
46
|
if len(path_parts) >= 2:
|
|
46
47
|
short_path = os.path.join(path_parts[-2], path_parts[-1])
|
|
47
48
|
else:
|
|
48
49
|
short_path = record.pathname
|
|
50
|
+
|
|
51
|
+
# Format log messages differently based on the log level
|
|
52
|
+
if record.levelno == logging.INFO:
|
|
53
|
+
log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
|
|
54
|
+
elif record.levelno == logging.DEBUG:
|
|
55
|
+
log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
56
|
+
elif record.levelno == logging.ERROR:
|
|
57
|
+
log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
58
|
+
if record.exc_info:
|
|
59
|
+
log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
|
|
60
|
+
else:
|
|
61
|
+
log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
|
|
62
|
+
|
|
49
63
|
|
|
50
|
-
log_message = f"{record.levelname} ::: {record.name} ::: {short_path} ::: lineno: {record.lineno} ::: {self.formatTime(record, self.datefmt)} ::: message: {record.msg}"
|
|
51
|
-
if record.exc_info:
|
|
52
|
-
log_message += "\n" + ''.join(
|
|
53
|
-
traceback.format_exception(*record.exc_info)
|
|
54
|
-
)
|
|
55
64
|
return log_message
|
|
56
65
|
|
|
57
66
|
|
|
58
|
-
def get_logger( logger_name:str ,level=logging.INFO,
|
|
67
|
+
def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandlers, List[LoggingHandlers]] = LoggingHandlers.NONE):
|
|
68
|
+
|
|
69
|
+
"""Creates and configures a logger with the specified handlers."""
|
|
59
70
|
|
|
60
71
|
logger = logging.getLogger(logger_name)
|
|
61
72
|
logger.setLevel(level)
|
|
62
73
|
cloud_formatter = CloudLogFormatter()
|
|
63
74
|
|
|
64
|
-
|
|
75
|
+
# Ensure logging_handler_providers is a list for consistent processing
|
|
76
|
+
if not isinstance(logging_handler_providers, list):
|
|
77
|
+
logging_handler_providers = [logging_handler_providers]
|
|
65
78
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
79
|
+
supported_remote_handlers = [
|
|
80
|
+
LoggingHandlers.GCP_CLOUD_LOGGING,
|
|
81
|
+
LoggingHandlers.GCP_ERROR_REPORTING,
|
|
82
|
+
LoggingHandlers.LOCAL_STREAM,
|
|
83
|
+
LoggingHandlers.NONE, # If NONE is considered a remote handler
|
|
84
|
+
]
|
|
70
85
|
|
|
71
|
-
|
|
72
|
-
setup_gcp_logging(logger=logger, formatter=cloud_formatter, enable_error_reporting=enable_error_reporting)
|
|
73
|
-
elif cloud_provider not in without_cloud_logging_handler:
|
|
74
|
-
raise ValueError(f"Unsupported cloud provider: {cloud_provider}. Supported cloud providers: {CloudProvider.GCP.value}")
|
|
86
|
+
# Remote handlers
|
|
75
87
|
|
|
76
|
-
|
|
88
|
+
for handler_provider in logging_handler_providers:
|
|
89
|
+
if handler_provider in supported_remote_handlers:
|
|
90
|
+
if handler_provider == LoggingHandlers.GCP_CLOUD_LOGGING:
|
|
91
|
+
add_gcp_cloud_logging(logger, cloud_formatter)
|
|
92
|
+
elif handler_provider == LoggingHandlers.GCP_ERROR_REPORTING:
|
|
93
|
+
add_gcp_error_reporting(logger)
|
|
94
|
+
elif handler_provider == LoggingHandlers.LOCAL_STREAM: # Handle local stream
|
|
95
|
+
local_handler = logging.StreamHandler()
|
|
96
|
+
local_handler.setFormatter(LocalLogFormatter())
|
|
97
|
+
logger.addHandler(local_handler)
|
|
98
|
+
else:
|
|
99
|
+
raise ValueError(
|
|
100
|
+
f"Unsupported logging provider: {handler_provider}. "
|
|
101
|
+
f"Supported providers: {[h.value for h in supported_remote_handlers]}"
|
|
102
|
+
)
|
|
103
|
+
return logger
|
|
@@ -5,40 +5,49 @@
|
|
|
5
5
|
# pylint: disable=line-too-long
|
|
6
6
|
# pylint: disable=unused-variable
|
|
7
7
|
# pylint: disable=broad-exception-caught
|
|
8
|
-
from
|
|
9
|
-
from
|
|
10
|
-
from .utils_cloud_gcp import (write_json_to_gcs_extended,
|
|
8
|
+
from ipulse_shared_core_ftredge.enums import CloudProvider, DataSourceType, DuplicationHandling, MatchConditionType
|
|
9
|
+
from .utils_collector_pipelinemon import Pipelinemon
|
|
10
|
+
from .utils_cloud_gcp import (write_json_to_gcs_extended,
|
|
11
|
+
read_json_from_gcs)
|
|
11
12
|
|
|
12
13
|
#######################################################################################################################
|
|
13
14
|
#######################################################################################################################
|
|
14
15
|
################################################# cloud IO functions ########################################
|
|
15
16
|
|
|
16
17
|
# Define the central function that routes to the relevant cloud-specific function
|
|
17
|
-
def write_json_to_cloud_storage_extended(
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
def write_json_to_cloud_storage_extended(cloud_storage:CloudProvider | DataSourceType, storage_client, data:dict | list | str, bucket_name: str, file_name: str,
|
|
19
|
+
duplication_handling:DuplicationHandling, duplication_match_condition_type: MatchConditionType, duplication_match_condition: str = "",
|
|
20
|
+
max_retries:int=2, max_matched_deletable_files:int=1,
|
|
21
|
+
pipelinemon: Pipelinemon = None, logger=None, print_out=False, raise_e=False):
|
|
20
22
|
|
|
21
23
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
+
supported_cloud_storage_values = [CloudProvider.GCP, DataSourceType.GCS]
|
|
25
|
+
|
|
26
|
+
if cloud_storage in [CloudProvider.GCP, DataSourceType.GCS]:
|
|
27
|
+
return write_json_to_gcs_extended(
|
|
28
|
+
pipelinemon=pipelinemon,
|
|
24
29
|
storage_client=storage_client,
|
|
25
30
|
data=data,
|
|
26
31
|
bucket_name=bucket_name,
|
|
27
32
|
file_name=file_name,
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
33
|
+
duplication_handling_enum=duplication_handling,
|
|
34
|
+
duplication_match_condition_type_enum=duplication_match_condition_type,
|
|
35
|
+
duplication_match_condition=duplication_match_condition,
|
|
31
36
|
max_retries=max_retries,
|
|
32
|
-
max_deletable_files=
|
|
37
|
+
max_deletable_files=max_matched_deletable_files,
|
|
33
38
|
logger=logger,
|
|
34
|
-
print_out=print_out
|
|
39
|
+
print_out=print_out,
|
|
40
|
+
raise_e=raise_e
|
|
35
41
|
)
|
|
36
42
|
|
|
37
|
-
raise ValueError(f"Unsupported cloud
|
|
43
|
+
raise ValueError(f"Unsupported cloud storage : {cloud_storage}. Supported cloud storage values: {supported_cloud_storage_values}")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def read_json_from_cloud_storage(cloud_storage:CloudProvider | DataSourceType , storage_client, bucket_name:str, file_name:str, logger=None, print_out:bool=False):
|
|
38
47
|
|
|
48
|
+
supported_cloud_storage_values = [CloudProvider.GCP, DataSourceType.GCS]
|
|
39
49
|
|
|
40
|
-
|
|
41
|
-
if cloud_provider == CloudProvider.GCP:
|
|
50
|
+
if cloud_storage in [CloudProvider.GCP, DataSourceType.GCS]:
|
|
42
51
|
return read_json_from_gcs(storage_client=storage_client, bucket_name=bucket_name, file_name=file_name, logger=logger, print_out=print_out)
|
|
43
52
|
|
|
44
|
-
raise ValueError(f"Unsupported cloud
|
|
53
|
+
raise ValueError(f"Unsupported cloud storage: {cloud_storage}. Supported cloud storage values: {supported_cloud_storage_values}")
|