ipulse-shared-core-ftredge 2.6.1__py3-none-any.whl → 2.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (24) hide show
  1. ipulse_shared_core_ftredge/__init__.py +10 -9
  2. ipulse_shared_core_ftredge/enums/__init__.py +12 -7
  3. ipulse_shared_core_ftredge/enums/enums_common_utils.py +9 -0
  4. ipulse_shared_core_ftredge/enums/enums_data_eng.py +280 -76
  5. ipulse_shared_core_ftredge/enums/{enums_logs.py → enums_logging.py} +30 -1
  6. ipulse_shared_core_ftredge/enums/enums_module_fincore.py +16 -2
  7. ipulse_shared_core_ftredge/enums/enums_modules.py +6 -0
  8. ipulse_shared_core_ftredge/enums/{enums_cloud.py → enums_solution_providers.py} +11 -4
  9. ipulse_shared_core_ftredge/utils/__init__.py +11 -7
  10. ipulse_shared_core_ftredge/utils/logs/context_log.py +2 -3
  11. ipulse_shared_core_ftredge/utils/logs/get_logger.py +47 -20
  12. ipulse_shared_core_ftredge/utils/utils_cloud.py +26 -17
  13. ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +311 -180
  14. ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +150 -153
  15. ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +16 -15
  16. ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +2 -2
  17. ipulse_shared_core_ftredge/utils/utils_common.py +145 -110
  18. ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +2 -2
  19. {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/METADATA +1 -1
  20. ipulse_shared_core_ftredge-2.7.1.dist-info/RECORD +33 -0
  21. ipulse_shared_core_ftredge-2.6.1.dist-info/RECORD +0 -33
  22. {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/LICENCE +0 -0
  23. {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/WHEEL +0 -0
  24. {ipulse_shared_core_ftredge-2.6.1.dist-info → ipulse_shared_core_ftredge-2.7.1.dist-info}/top_level.txt +0 -0
@@ -6,164 +6,161 @@
6
6
  # pylint: disable=unused-variable
7
7
  # pylint: disable=broad-exception-raised
8
8
 
9
- import json
10
- import os
11
- import time
12
- from typing import Optional
13
- from google.cloud.storage import Client as GCSClient
14
- from ipulse_shared_core_ftredge.enums.enums_logs import LogLevel
15
- from .utils_collector_pipelinemon import Pipelinemon
16
- from .logs.context_log import ContextLog
9
+ # import json
10
+ # import os
11
+ # import time
12
+ # from google.cloud.storage import Client as GCSClient
13
+ # from ipulse_shared_core_ftredge.enums import LogLevel, DuplicationHandling, DuplicationHandlingStatus, MatchConditionType, DataSourceType
14
+ # from ipulse_shared_core_ftredge.utils import log_error, log_info
15
+ # from .utils_collector_pipelinemon import Pipelinemon
16
+ # from .logs.context_log import ContextLog
17
17
 
18
18
 
19
- ############################################################################
20
- ##################### SETTING UP LOGGER ##########################
21
19
 
22
20
 
23
21
 
24
-
25
- def write_json_to_gcs_with_pipelinemon_extended( pipelinemon:Pipelinemon, storage_client:GCSClient, data:dict | list | str, bucket_name: str, file_name: str,
26
- file_exists_if_starts_with_prefix:Optional[str] =None, overwrite_if_exists:bool=False, increment_if_exists:bool=False,
27
- max_retries:int=2, max_deletable_files:int=1):
28
- """Saves data to Google Cloud Storage and optionally locally.
22
+ # def write_json_to_gcs_with_pipelinemon_extended( pipelinemon:Pipelinemon, storage_client:GCSClient, data:dict | list | str, bucket_name: str, file_name: str,
23
+ # file_exists_if_starts_with_prefix:Optional[str] =None, overwrite_if_exists:bool=False, increment_if_exists:bool=False,
24
+ # max_retries:int=2, max_deletable_files:int=1):
25
+ # """Saves data to Google Cloud Storage and optionally locally.
29
26
 
30
- This function attempts to upload data to GCS.
31
- - If the upload fails after retries and `save_locally` is True or `local_path` is provided, it attempts to save the data locally.
32
- - It handles file name conflicts based on these rules:
33
- - If `overwrite_if_exists` is True:
34
- - If `file_exists_if_contains_substr` is provided, ANY existing file containing the substring is deleted, and the new file is saved with the provided `file_name`.
35
- - If `file_exists_if_contains_substr` is None, and a file with the exact `file_name` exists, it's overwritten.
36
- - If `increment_if_exists` is True:
37
- - If `file_exists_if_contains_substr` is provided, a new file with an incremented version is created ONLY if a file with the EXACT `file_name` exists.
38
- - If `file_exists_if_contains_substr` is None, a new file with an incremented version is created if a file with the exact `file_name` exists.
27
+ # This function attempts to upload data to GCS.
28
+ # - If the upload fails after retries and `save_locally` is True or `local_path` is provided, it attempts to save the data locally.
29
+ # - It handles file name conflicts based on these rules:
30
+ # - If `overwrite_if_exists` is True:
31
+ # - If `file_exists_if_contains_substr` is provided, ANY existing file containing the substring is deleted, and the new file is saved with the provided `file_name`.
32
+ # - If `file_exists_if_contains_substr` is None, and a file with the exact `file_name` exists, it's overwritten.
33
+ # - If `increment_if_exists` is True:
34
+ # - If `file_exists_if_contains_substr` is provided, a new file with an incremented version is created ONLY if a file with the EXACT `file_name` exists.
35
+ # - If `file_exists_if_contains_substr` is None, a new file with an incremented version is created if a file with the exact `file_name` exists.
39
36
 
40
- -If both overwrite_if_exists and increment_if_exists are provided as Ture, an exception will be raised.
41
- """
42
-
43
- cloud_storage_ref="GCP_GCS"
44
-
45
- with pipelinemon.context(f"write_json_to_{cloud_storage_ref}_with_pipelinemon"):
46
- cloud_storage_upload_error = None
47
- # Input validation
48
- if overwrite_if_exists and increment_if_exists:
49
- err_msg="Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
50
- pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM, subject="Param validation", description=err_msg))
51
- return {"cloud_storage_upload_error": err_msg}
52
- if max_deletable_files > 10:
53
- err_msg="max_deletable_files should be less than 10 for safety. For more use another method."
54
- pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM,subject="max_deletable_files", description=err_msg))
55
- return {"cloud_storage_upload_error": err_msg}
56
-
57
- # Prepare data
58
- if isinstance(data, (list, dict)):
59
- data_str = json.dumps(data, indent=2)
60
- else:
61
- data_str = data
62
-
63
- bucket = storage_client.bucket(bucket_name)
64
- base_file_name, ext = os.path.splitext(file_name)
65
- increment = 0
66
- attempts = 0
67
- success = False
68
-
69
- # GCS-related metadata
70
- cloud_storage_path = None
71
- cloud_storage_file_overwritten = False
72
- cloud_storage_file_already_exists = False
73
- cloud_storage_file_saved_with_increment = False
74
- cloud_storage_file_exists_checked_on_name = file_name
75
- cloud_storage_deleted_files=[]
76
-
77
- try:
78
- upload_allowed = True
79
- # --- Overwrite Logic ---
80
- if overwrite_if_exists:
81
- with pipelinemon.context("overwriting"):
82
- if file_exists_if_starts_with_prefix:
83
- cloud_storage_file_exists_checked_on_name = file_exists_if_starts_with_prefix
84
- blobs_to_delete = list(bucket.list_blobs(prefix=file_exists_if_starts_with_prefix))
85
- if len(blobs_to_delete) > max_deletable_files:
86
- err_msg=f"Error: Attempt to delete {len(blobs_to_delete)} matched files, but limit is {max_deletable_files}."
87
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_to_delete)} files in bucket {bucket_name}"))
88
- pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM, subject="Too many files", description=err_msg))
89
- #### Ensuring to quit the operation if too many files are found, it will be catched below
90
- return {"cloud_storage_upload_error": err_msg}
91
- if blobs_to_delete:
92
- cloud_storage_file_already_exists = True
93
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_to_delete)} files in bucket {bucket_name}"))
94
- for blob in blobs_to_delete:
95
- cloud_storage_path_del = f"gs://{bucket_name}/{blob.name}"
96
- pipelinemon.add_system_impacted(f"delete: {cloud_storage_ref}_bucket_file: {cloud_storage_path_del}")
97
- blob.delete()
98
- pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_DELETE_COMPLETE, subject= cloud_storage_path_del, description=f"file deleted from {cloud_storage_ref} as part of overwrite, matched with prefix"))
99
- cloud_storage_deleted_files.append(cloud_storage_path_del)
100
- cloud_storage_file_overwritten = True
101
- elif bucket.blob(file_name).exists():
102
- cloud_storage_file_already_exists = True
103
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Exact name matched with existing file in bucket {bucket_name}"))
104
- cloud_storage_path_del = f"gs://{bucket_name}/{file_name}"
105
- pipelinemon.add_system_impacted(f"delete: {cloud_storage_ref}_bucket_file: {cloud_storage_path_del}")
106
- blob.delete() # Delete the existing blob
107
- pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_DELETE_COMPLETE, subject= cloud_storage_path_del, description=f"file deleted from {cloud_storage_ref} as part of overwrite, matched with exact name"))
108
- cloud_storage_deleted_files.append(cloud_storage_path_del)
109
- cloud_storage_file_overwritten = True
110
- # --- Increment Logic ---
111
- elif increment_if_exists:
112
- with pipelinemon.context("incrementing"):
113
- cloud_storage_file_exists_checked_on_name = file_name # We only increment if the exact name exists
114
- while bucket.blob(file_name).exists():
115
- cloud_storage_file_already_exists = True
116
- increment += 1
117
- file_name = f"{base_file_name}_v{increment}{ext}"
118
- cloud_storage_file_saved_with_increment = True
119
- if increment>0:
120
- cloud_storage_path = f"gs://{bucket_name}/{file_name}"
121
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Attempting to save file with incremented version in {bucket_name}"))
122
- # --- Check for Conflicts (Including Prefix) ---
123
- else:
124
- if file_exists_if_starts_with_prefix:
125
- blobs_matched = list(bucket.list_blobs(prefix=file_exists_if_starts_with_prefix))
126
- cloud_storage_file_exists_checked_on_name = file_exists_if_starts_with_prefix
127
- if blobs_matched:
128
- upload_allowed = False
129
- cloud_storage_file_already_exists = True
130
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_matched)} existing files in bucket {bucket_name}."))
131
- elif bucket.blob(file_name).exists():
132
- pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Exact name matched with existing file in bucket {bucket_name}."))
133
- upload_allowed = False
134
- cloud_storage_file_already_exists = True
135
-
136
- # --- GCS Upload ---
137
- cloud_storage_path = f"gs://{bucket_name}/{file_name}"
138
- if overwrite_if_exists or increment_if_exists or upload_allowed:
139
- with pipelinemon.context("uploading"):
140
- while attempts < max_retries and not success:
141
- try:
142
- blob = bucket.blob(file_name) # Use the potentially updated file_name
143
- pipelinemon.add_system_impacted(f"upload: {cloud_storage_ref}_bucket_file: {cloud_storage_path}")
144
- blob.upload_from_string(data_str, content_type='application/json')
145
- pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_PERSISTNACE_COMPLETE, subject= cloud_storage_path, description=f"file uploaded to {cloud_storage_ref}"))
146
- success = True
147
- except Exception as e:
148
- attempts += 1
149
- if attempts < max_retries:
150
- time.sleep(2 ** attempts)
151
- else:
152
- err_msg=f"Error uploading file to {cloud_storage_ref} bucket {bucket_name} with name {file_name} : {type(e).__name__}-{str(e)}"
153
- pipelinemon.add_log(ContextLog(LogLevel.ERROR_EXCEPTION, e=e, description=err_msg))
154
- return {"cloud_storage_upload_error": err_msg}
155
-
156
- except Exception as e:
157
- pipelinemon.add_log(ContextLog(LogLevel.ERROR_EXCEPTION, e=e))
158
- return {"cloud_storage_upload_error": f"Exception in GCS upload {type(e).__name__}-{str(e)}"}
159
- # --- Return Metadata ---
160
- return {
161
- "cloud_storage_path": cloud_storage_path if ((success or not upload_allowed) and not cloud_storage_upload_error ) else None,
162
- "cloud_storage_file_already_exists": cloud_storage_file_already_exists,
163
- "cloud_storage_file_exists_checked_on_name":cloud_storage_file_exists_checked_on_name ,
164
- "cloud_storage_file_overwritten": cloud_storage_file_overwritten,
165
- "cloud_storage_deleted_file_names": ",,,".join(cloud_storage_deleted_files) if cloud_storage_deleted_files else None,
166
- "cloud_storage_file_saved_with_increment": cloud_storage_file_saved_with_increment,
167
- "cloud_storage_upload_error": cloud_storage_upload_error
168
- }
37
+ # -If both overwrite_if_exists and increment_if_exists are provided as Ture, an exception will be raised.
38
+ # """
39
+
40
+ # cloud_storage_ref="GCP_GCS"
41
+
42
+ # with pipelinemon.context(f"write_json_to_{cloud_storage_ref}_with_pipelinemon"):
43
+ # cloud_storage_upload_error = None
44
+ # # Input validation
45
+ # if overwrite_if_exists and increment_if_exists:
46
+ # err_msg="Both 'overwrite_if_exists' and 'increment_if_exists' cannot be True simultaneously."
47
+ # pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM, subject="Param validation", description=err_msg))
48
+ # return {"cloud_storage_upload_error": err_msg}
49
+ # if max_deletable_files > 10:
50
+ # err_msg="max_deletable_files should be less than 10 for safety. For more use another method."
51
+ # pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM,subject="max_deletable_files", description=err_msg))
52
+ # return {"cloud_storage_upload_error": err_msg}
53
+
54
+ # # Prepare data
55
+ # if isinstance(data, (list, dict)):
56
+ # data_str = json.dumps(data, indent=2)
57
+ # else:
58
+ # data_str = data
59
+
60
+ # bucket = storage_client.bucket(bucket_name)
61
+ # base_file_name, ext = os.path.splitext(file_name)
62
+ # increment = 0
63
+ # attempts = 0
64
+ # success = False
65
+
66
+ # # GCS-related metadata
67
+ # cloud_storage_path = None
68
+ # cloud_storage_file_overwritten = False
69
+ # cloud_storage_file_already_exists = False
70
+ # cloud_storage_file_saved_with_increment = False
71
+ # cloud_storage_file_exists_checked_on_name = file_name
72
+ # cloud_storage_deleted_files=[]
73
+
74
+ # try:
75
+ # upload_allowed = True
76
+ # # --- Overwrite Logic ---
77
+ # if overwrite_if_exists:
78
+ # with pipelinemon.context("overwriting"):
79
+ # if file_exists_if_starts_with_prefix:
80
+ # cloud_storage_file_exists_checked_on_name = file_exists_if_starts_with_prefix
81
+ # blobs_to_delete = list(bucket.list_blobs(prefix=file_exists_if_starts_with_prefix))
82
+ # if len(blobs_to_delete) > max_deletable_files:
83
+ # err_msg=f"Error: Attempt to delete {len(blobs_to_delete)} matched files, but limit is {max_deletable_files}."
84
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_to_delete)} files in bucket {bucket_name}"))
85
+ # pipelinemon.add_log(ContextLog(LogLevel.ERROR_CUSTOM, subject="Too many files", description=err_msg))
86
+ # #### Ensuring to quit the operation if too many files are found, it will be catched below
87
+ # return {"cloud_storage_upload_error": err_msg}
88
+ # if blobs_to_delete:
89
+ # cloud_storage_file_already_exists = True
90
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_to_delete)} files in bucket {bucket_name}"))
91
+ # for blob in blobs_to_delete:
92
+ # cloud_storage_path_del = f"gs://{bucket_name}/{blob.name}"
93
+ # pipelinemon.add_system_impacted(f"delete: {cloud_storage_ref}_bucket_file: {cloud_storage_path_del}")
94
+ # blob.delete()
95
+ # pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_DELETE_COMPLETE, subject= cloud_storage_path_del, description=f"file deleted from {cloud_storage_ref} as part of overwrite, matched with prefix"))
96
+ # cloud_storage_deleted_files.append(cloud_storage_path_del)
97
+ # cloud_storage_file_overwritten = True
98
+ # elif bucket.blob(file_name).exists():
99
+ # cloud_storage_file_already_exists = True
100
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Exact name matched with existing file in bucket {bucket_name}"))
101
+ # cloud_storage_path_del = f"gs://{bucket_name}/{file_name}"
102
+ # pipelinemon.add_system_impacted(f"delete: {cloud_storage_ref}_bucket_file: {cloud_storage_path_del}")
103
+ # blob.delete() # Delete the existing blob
104
+ # pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_DELETE_COMPLETE, subject= cloud_storage_path_del, description=f"file deleted from {cloud_storage_ref} as part of overwrite, matched with exact name"))
105
+ # cloud_storage_deleted_files.append(cloud_storage_path_del)
106
+ # cloud_storage_file_overwritten = True
107
+ # # --- Increment Logic ---
108
+ # elif increment_if_exists:
109
+ # with pipelinemon.context("incrementing"):
110
+ # cloud_storage_file_exists_checked_on_name = file_name # We only increment if the exact name exists
111
+ # while bucket.blob(file_name).exists():
112
+ # cloud_storage_file_already_exists = True
113
+ # increment += 1
114
+ # file_name = f"{base_file_name}_v{increment}{ext}"
115
+ # cloud_storage_file_saved_with_increment = True
116
+ # if increment>0:
117
+ # cloud_storage_path = f"gs://{bucket_name}/{file_name}"
118
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Attempting to save file with incremented version in {bucket_name}"))
119
+ # # --- Check for Conflicts (Including Prefix) ---
120
+ # else:
121
+ # if file_exists_if_starts_with_prefix:
122
+ # blobs_matched = list(bucket.list_blobs(prefix=file_exists_if_starts_with_prefix))
123
+ # cloud_storage_file_exists_checked_on_name = file_exists_if_starts_with_prefix
124
+ # if blobs_matched:
125
+ # upload_allowed = False
126
+ # cloud_storage_file_already_exists = True
127
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_exists_if_starts_with_prefix, description=f"Prefix matched with {len(blobs_matched)} existing files in bucket {bucket_name}."))
128
+ # elif bucket.blob(file_name).exists():
129
+ # pipelinemon.add_log(ContextLog(LogLevel.NOTICE_ALREADY_EXISTS, subject=file_name, description=f"Exact name matched with existing file in bucket {bucket_name}."))
130
+ # upload_allowed = False
131
+ # cloud_storage_file_already_exists = True
132
+
133
+ # # --- GCS Upload ---
134
+ # cloud_storage_path = f"gs://{bucket_name}/{file_name}"
135
+ # if overwrite_if_exists or increment_if_exists or upload_allowed:
136
+ # with pipelinemon.context("uploading"):
137
+ # while attempts < max_retries and not success:
138
+ # try:
139
+ # blob = bucket.blob(file_name) # Use the potentially updated file_name
140
+ # pipelinemon.add_system_impacted(f"upload: {cloud_storage_ref}_bucket_file: {cloud_storage_path}")
141
+ # blob.upload_from_string(data_str, content_type='application/json')
142
+ # pipelinemon.add_log(ContextLog(LogLevel.INFO_REMOTE_PERSISTNACE_COMPLETE, subject= cloud_storage_path, description=f"file uploaded to {cloud_storage_ref}"))
143
+ # success = True
144
+ # except Exception as e:
145
+ # attempts += 1
146
+ # if attempts < max_retries:
147
+ # time.sleep(2 ** attempts)
148
+ # else:
149
+ # err_msg=f"Error uploading file to {cloud_storage_ref} bucket {bucket_name} with name {file_name} : {type(e).__name__}-{str(e)}"
150
+ # pipelinemon.add_log(ContextLog(LogLevel.ERROR_EXCEPTION, e=e, description=err_msg))
151
+ # return {"cloud_storage_upload_error": err_msg}
152
+
153
+ # except Exception as e:
154
+ # pipelinemon.add_log(ContextLog(LogLevel.ERROR_EXCEPTION, e=e))
155
+ # return {"cloud_storage_upload_error": f"Exception in GCS upload {type(e).__name__}-{str(e)}"}
156
+ # # --- Return Metadata ---
157
+ # return {
158
+ # "cloud_storage_path": cloud_storage_path if ((success or not upload_allowed) and not cloud_storage_upload_error ) else None,
159
+ # "cloud_storage_file_already_exists": cloud_storage_file_already_exists,
160
+ # "cloud_storage_file_exists_checked_on_name":cloud_storage_file_exists_checked_on_name ,
161
+ # "cloud_storage_file_overwritten": cloud_storage_file_overwritten,
162
+ # "cloud_storage_deleted_file_names": ",,,".join(cloud_storage_deleted_files) if cloud_storage_deleted_files else None,
163
+ # "cloud_storage_file_saved_with_increment": cloud_storage_file_saved_with_increment,
164
+ # "cloud_storage_upload_error": cloud_storage_upload_error
165
+ # }
169
166
 
@@ -5,22 +5,23 @@
5
5
  # pylint: disable=line-too-long
6
6
  # pylint: disable=unused-variable
7
7
  # pylint: disable=broad-exception-raised
8
- from typing import Optional
9
- from ipulse_shared_core_ftredge.enums.enums_cloud import CloudProvider
10
- from .utils_collector_pipelinemon import Pipelinemon
11
- from .utils_cloud_gcp_with_collectors import write_json_to_gcs_with_pipelinemon_extended
8
+ # from typing import Optional
9
+ # from ipulse_shared_core_ftredge.enums import DataSourceType
10
+ # from .utils_collector_pipelinemon import Pipelinemon
11
+ # from .utils_cloud_gcp import write_json_to_gcs_extended
12
12
 
13
13
 
14
- def write_json_to_cloud_storage_with_pipelinemon_extended(cloud_provider:CloudProvider, pipelinemon:Pipelinemon, storage_client, data:dict | list | str, bucket_name: str, file_name: str,
15
- file_exists_if_starts_with_prefix:Optional[str] =None, overwrite_if_exists:bool=False, increment_if_exists:bool=False,
16
- max_retries:int=2, max_deletable_files:int=1):
14
+ # def write_json_to_cloud_storage_with_pipelinemon_extended(cloud_storage_type:DataSourceType, cloud_storage_client, pipelinemon:Pipelinemon,
15
+ # data:dict | list | str, bucket_name: str, file_name: str,
16
+
17
+
18
+ # max_retries:int=2, max_deletable_files:int=1):
17
19
 
20
+ # supported_cloud_storage_types = [DataSourceType.GCS]
21
+ # if cloud_storage_type == DataSourceType.GCS:
22
+ # return write_json_to_gcs_extended(pipelinemon=pipelinemon, storage_client=cloud_storage_client, data=data, bucket_name=bucket_name, file_name=file_name,
23
+ # ,
24
+ # max_retries=max_retries,
25
+ # max_deletable_files=max_deletable_files)
18
26
 
19
- if cloud_provider == CloudProvider.GCP:
20
- return write_json_to_gcs_with_pipelinemon_extended(pipelinemon=pipelinemon, storage_client=storage_client, data=data, bucket_name=bucket_name, file_name=file_name,
21
- file_exists_if_starts_with_prefix=file_exists_if_starts_with_prefix,
22
- overwrite_if_exists=overwrite_if_exists, increment_if_exists=increment_if_exists,
23
- max_retries=max_retries,
24
- max_deletable_files=max_deletable_files)
25
-
26
- raise ValueError(f"Unsupported cloud provider: {cloud_provider}. Supported cloud providers: {CloudProvider.GCP.value}")
27
+ # raise ValueError(f"Unsupported cloud provider: {cloud_storage_type}. Supported cloud providers: {supported_cloud_storage_types}")
@@ -9,8 +9,8 @@ import uuid
9
9
  from datetime import datetime, timezone
10
10
  from contextlib import contextmanager
11
11
  from typing import List
12
- from ipulse_shared_core_ftredge.enums.enums_logs import TargetLogs, LogLevel
13
- from ipulse_shared_core_ftredge.utils.logs.context_log import ContextLog
12
+ from ipulse_shared_core_ftredge.enums import LogLevel, TargetLogs
13
+ from ipulse_shared_core_ftredge.utils.logs import ContextLog
14
14
  ############################################################################
15
15
  ##### PIPINEMON Collector for Logs and Statuses of running pipelines #######
16
16
  class Pipelinemon: