ipulse-shared-core-ftredge 2.7.1__py3-none-any.whl → 2.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (30) hide show
  1. ipulse_shared_core_ftredge/__init__.py +7 -12
  2. ipulse_shared_core_ftredge/logging/__init__.py +1 -0
  3. ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py +144 -0
  4. ipulse_shared_core_ftredge/logging/utils_logging.py +72 -0
  5. ipulse_shared_core_ftredge/utils/__init__.py +1 -21
  6. ipulse_shared_core_ftredge/utils/utils_common.py +3 -173
  7. {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/METADATA +1 -2
  8. ipulse_shared_core_ftredge-2.8.1.dist-info/RECORD +19 -0
  9. ipulse_shared_core_ftredge/enums/__init__.py +0 -37
  10. ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -107
  11. ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -313
  12. ipulse_shared_core_ftredge/enums/enums_logging.py +0 -108
  13. ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -72
  14. ipulse_shared_core_ftredge/enums/enums_modules.py +0 -31
  15. ipulse_shared_core_ftredge/enums/enums_solution_providers.py +0 -24
  16. ipulse_shared_core_ftredge/enums/pulse_enums.py +0 -182
  17. ipulse_shared_core_ftredge/utils/logs/__init__.py +0 -2
  18. ipulse_shared_core_ftredge/utils/logs/context_log.py +0 -210
  19. ipulse_shared_core_ftredge/utils/logs/get_logger.py +0 -103
  20. ipulse_shared_core_ftredge/utils/utils_cloud.py +0 -53
  21. ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +0 -442
  22. ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +0 -166
  23. ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +0 -27
  24. ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +0 -356
  25. ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +0 -151
  26. ipulse_shared_core_ftredge-2.7.1.dist-info/RECORD +0 -33
  27. /ipulse_shared_core_ftredge/{utils/logs → logging}/audit_log_firestore.py +0 -0
  28. {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/LICENCE +0 -0
  29. {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/WHEEL +0 -0
  30. {ipulse_shared_core_ftredge-2.7.1.dist-info → ipulse_shared_core_ftredge-2.8.1.dist-info}/top_level.txt +0 -0
@@ -1,182 +0,0 @@
1
- resource_classifications = {
2
- "*",
3
- "childs_based", # Meaning need to look into child fields to determine classifications
4
-
5
- "public", #Anyone Can Access ex: synthetic data
6
- "authuser_open", # Any Authenticated Can Access ex: prices of gold, bitcoin etc.
7
- # "authuser_subscription",
8
- "authuser_confidential", ## Only User Owner Can Access and Specific Admin
9
- "authuser_limitedacl" , ## Has to be in the ACL
10
- "authuser_owner"
11
- "internal_open", ## Any Internal employees only Can Access ex: public reports, emails etc.
12
- "internal_sensitive", ## Many Internal employees Can Access IF meet special condition ex: internal financials summary reports , web and app analytics, list of admin users etc.
13
- "internal_confidential", ## Few Internal employees Can Access. ex: internal user data, key financials, salaries and bonuses etc
14
- "internal_limitedacl", ## Has to employee usertype and in the ACL
15
- "internal_owner"
16
- }
17
-
18
-
19
- resource_domain = {
20
- "*",
21
- ############### GYM #########
22
- "gym_domain",
23
- "gym_data_domain",
24
- "gym_ai_domain",
25
- ############## ORACLE #########
26
- "oracle_domain",
27
- "oracle_historic_prices_domain",
28
- "oracle_ai_domain",
29
- "oracle_assests_historic_info_domain",
30
- "oracle_historic_econometrics_domain",
31
- "oracle_news_historic_domain",
32
- "oracle_calendar_domain",
33
- "oracle_modelinfo_domain",
34
- "oracle_modelmetrics_domain",
35
- "oracle_modelpredictions_domain",
36
- ######### ORGANISATIONS #########
37
- "organisation_domain",
38
- ################### USER #########
39
- "user_domain",
40
- "user_management_domain",
41
- "user_portfolio_domain",
42
- "user_groups_and_roles_domain",
43
- ############### BUSINESS #########
44
- "business_domain",
45
- ############### ANALYTICS #########
46
- "analytics_domain",
47
- "system_domain"
48
- }
49
-
50
- resource_types = {
51
- "db", "sql_db", "nosql_db", "dynamodb",
52
- "big_query", "big_query_project", "big_query_table", "big_query_column",
53
- "big_query_row", "big_query_cell",
54
- "firestore", "firestore_project", "firestore_collection",
55
- "firestore_document","firestore_document_with_timeseries" "firestore_document_field",
56
- "pandas_dataframe", "spark_dataframe",
57
- "s3_bucket", "storage_bucket",
58
- "folder", "file", "json_file", "csv_file", "pdf_file",
59
- "unstructured_file", "image", "video", "audio", "text",
60
- "api", "report", "dashboard", "webpage", "website", "web"
61
- }
62
-
63
- organisation_relations = {
64
- "*",
65
- "retail_customer",
66
- "corporate_customer",
67
- "parent",
68
- "sister",
69
- "self",
70
- "partner",
71
- "supplier",
72
- "sponsor",
73
- "investor",
74
- "regulator",
75
- "other"
76
- }
77
-
78
- organisation_industries = {
79
- "*",
80
- "data",
81
- "government",
82
- "media",
83
- "academic",
84
- "commercial",
85
- "fund",
86
- "finance",
87
- "advisory",
88
- "hedgefund",
89
- "bank",
90
- "vc",
91
- "pe",
92
- "construction",
93
- "healthcare",
94
- "technology",
95
- "consulting",
96
- "retail",
97
- "non_profit",
98
- "individual",
99
- "freelancer",
100
- "other"
101
- }
102
-
103
- licences_types={
104
- "*",
105
- ######################################### OPEN or FULL Rights
106
- "public",
107
- "open",
108
- "open_no_tandc",
109
- "full_rights",
110
- "full_rights_for_sale",
111
- "commercial_licence_perpetual",
112
- "customer_private_tac",
113
- ######################################### SPECIAL CONDITIONS
114
- "open_with_tandc",
115
- "on_special_request",
116
- "commercial_licence_limited_time",
117
- "customer_owned_for_sale",
118
- ######################################### Not for Commercial Use
119
- "full_rights_not_for_sale",
120
- "internal_only",
121
- "academic_licence",
122
- "not_for_commercial_use",
123
- "customer_private"
124
- ######################################### Unknown
125
- "commercial_licence_not_purchased",
126
- "web_scrapped",
127
- "unknown"
128
- }
129
-
130
-
131
- actions ={"GET",
132
- "POST",
133
- "DELETE",
134
- "PUT",
135
- "create",
136
- "batch_create",
137
- "read",
138
- "batch_read",
139
- "edit",
140
- "batch_edit",
141
- "add",
142
- "batch_add",
143
- "remove",
144
- "batch_remove",
145
- "delete",
146
- "batch_delete",
147
- "rename" ,
148
- "batch_rename",
149
- "move",
150
- "batch_move",
151
- "download",
152
- "upload",
153
- "share"
154
- }
155
-
156
- resource_readable_by={
157
- "*",
158
- "all",
159
- "authenticated",
160
- "restircted",
161
- "owner",
162
- "selected_by_owner",
163
- "admin",
164
- "selected_by_admin",
165
- "super_admin",
166
- "super_admin_selected",
167
- "system"
168
- }
169
-
170
- resource_updatable_by={
171
- "*",
172
- "all",
173
- "authenticated",
174
- "restircted",
175
- "owner",
176
- "selected_by_owner",
177
- "admin",
178
- "selected_by_admin",
179
- "super_admin",
180
- "super_admin_selected",
181
- "system"
182
- }
@@ -1,2 +0,0 @@
1
- from .context_log import ContextLog
2
- from .get_logger import get_logger
@@ -1,210 +0,0 @@
1
-
2
- # pylint: disable=missing-module-docstring
3
- # pylint: disable=missing-function-docstring
4
- # pylint: disable=logging-fstring-interpolation
5
- # pylint: disable=line-too-long
6
- # pylint: disable=missing-class-docstring
7
- # pylint: disable=broad-exception-caught
8
- # pylint: disable=unused-variable
9
- import traceback
10
- import json
11
- from datetime import datetime, timezone
12
- from typing import List
13
- from ipulse_shared_core_ftredge import Status, LogLevel
14
-
15
- ############################################################################
16
- ##################### SETTING UP custom LOGGING format= DICT ##########################
17
- ### Cloud Agnostic, can be used with any cloud provider , jsut use to_dict() method to get the log in dict format
18
- class ContextLog:
19
-
20
- def __init__(self, level: LogLevel, base_context: str = None, collector_id: str = None,
21
- context: str = None, description: str = None,
22
- e: Exception = None, e_type: str = None, e_message: str = None, e_traceback: str = None,
23
- log_status: Status = Status.OPEN, subject: str = None, systems_impacted: List[str] = None,
24
- ):
25
-
26
- if e is not None:
27
- e_type = type(e).__name__ if e_type is None else e_type
28
- e_message = str(e) if e_message is None else e_message
29
- e_traceback = traceback.format_exc() if e_traceback is None else e_traceback
30
- elif (e_traceback is None or e_traceback== "") and (e_type or e_message):
31
- e_traceback = traceback.format_exc()
32
-
33
- self.level = level
34
- self.subject = subject
35
- self.description = description
36
- self._base_context = base_context
37
- self._context = context
38
- self._systems_impacted = systems_impacted if systems_impacted else []
39
- self.collector_id = collector_id
40
- self.exception_type = e_type
41
- self.exception_message = e_message
42
- self.exception_traceback = e_traceback
43
- self.log_status = log_status
44
- self.timestamp = datetime.now(timezone.utc).isoformat()
45
-
46
- @property
47
- def base_context(self):
48
- return self._base_context
49
-
50
- @base_context.setter
51
- def base_context(self, value):
52
- self._base_context = value
53
-
54
- @property
55
- def context(self):
56
- return self._context
57
-
58
- @context.setter
59
- def context(self, value):
60
- self._context = value
61
-
62
- @property
63
- def systems_impacted(self):
64
- return self._systems_impacted
65
-
66
- @systems_impacted.setter
67
- def systems_impacted(self, list_of_si: List[str]):
68
- self._systems_impacted = list_of_si
69
-
70
- def add_system_impacted(self, system_impacted: str):
71
- if self._systems_impacted is None:
72
- self._systems_impacted = []
73
- self._systems_impacted.append(system_impacted)
74
-
75
- def remove_system_impacted(self, system_impacted: str):
76
- if self._systems_impacted is not None:
77
- self._systems_impacted.remove(system_impacted)
78
-
79
- def clear_systems_impacted(self):
80
- self._systems_impacted = []
81
-
82
- def _format_traceback(self, e_traceback, e_message, max_field_len:int, max_traceback_lines:int):
83
- if not e_traceback or e_traceback == 'None\n':
84
- return None
85
-
86
- traceback_lines = e_traceback.splitlines()
87
-
88
- # Check if the traceback is within the limits
89
- if len(traceback_lines) <= max_traceback_lines and len(e_traceback) <= max_field_len:
90
- return e_traceback
91
-
92
- # Remove lines that are part of the exception message if they are present in traceback
93
- message_lines = e_message.splitlines() if e_message else []
94
- if message_lines:
95
- for message_line in message_lines:
96
- if message_line in traceback_lines:
97
- traceback_lines.remove(message_line)
98
-
99
- # Filter out lines from third-party libraries (like site-packages)
100
- filtered_lines = [line for line in traceback_lines if "site-packages" not in line]
101
-
102
- # If filtering results in too few lines, revert to original traceback
103
- if len(filtered_lines) < 2:
104
- filtered_lines = traceback_lines
105
-
106
- # Combine standalone bracket lines with previous or next lines
107
- combined_lines = []
108
- for line in filtered_lines:
109
- if line.strip() in {"(", ")", "{", "}", "[", "]"} and combined_lines:
110
- combined_lines[-1] += " " + line.strip()
111
- else:
112
- combined_lines.append(line)
113
-
114
- # Ensure the number of lines doesn't exceed MAX_TRACEBACK_LINES
115
- if len(combined_lines) > max_traceback_lines:
116
- keep_lines_start = min(max_traceback_lines // 2, len(combined_lines))
117
- keep_lines_end = min(max_traceback_lines // 2, len(combined_lines) - keep_lines_start)
118
- combined_lines = (
119
- combined_lines[:keep_lines_start] +
120
- ['... (truncated) ...'] +
121
- combined_lines[-keep_lines_end:]
122
- )
123
-
124
- formatted_traceback = '\n'.join(combined_lines)
125
-
126
- # Ensure the total length doesn't exceed MAX_TRACEBACK_LENGTH
127
- if len(formatted_traceback) > max_field_len:
128
- truncated_length = max_field_len - len('... (truncated) ...')
129
- half_truncated_length = truncated_length // 2
130
- formatted_traceback = (
131
- formatted_traceback[:half_truncated_length] +
132
- '\n... (truncated) ...\n' +
133
- formatted_traceback[-half_truncated_length:]
134
- )
135
- return formatted_traceback
136
-
137
- def to_dict(self, max_field_len:int =10000, size_limit:float=256 * 1024 * 0.80,max_traceback_lines:int = 30):
138
- size_limit = int(size_limit) # Ensure size_limit is an integer
139
-
140
- # Unified list of all fields
141
- systems_impacted_str = f"{len(self.systems_impacted)} system(s): " + " ,,, ".join(self.systems_impacted) if self.systems_impacted else None
142
- fields = [
143
- ("log_status", str(self.log_status.name)),
144
- ("level_code", self.level.value),
145
- ("level_name", str(self.level.name)),
146
- ("base_context", str(self.base_context)),
147
- ("timestamp", str(self.timestamp)),
148
- ("collector_id", str(self.collector_id)),
149
- ("systems_impacted", systems_impacted_str),
150
- ("context", str(self.context)), # special sizing rules apply to it
151
- ("subject", str(self.subject)),
152
- ("description", str(self.description)),
153
- ("exception_type", str(self.exception_type)),
154
- ("exception_message", str(self.exception_message)),
155
- ("exception_traceback", str(self._format_traceback(self.exception_traceback,self.exception_message, max_field_len, max_traceback_lines)))
156
- ]
157
-
158
- # Function to calculate the byte size of a JSON-encoded field
159
- def field_size(key, value):
160
- return len(json.dumps({key: value}).encode('utf-8'))
161
-
162
- # Function to truncate a value based on its type
163
- # Function to truncate a value based on its type
164
- def truncate_value(value, max_size):
165
- if isinstance(value, str):
166
- half_size = max_size // 2
167
- return value[:half_size] + '...' + value[-(max_size - half_size - 3):]
168
- return value
169
-
170
- # Ensure no field exceeds max_field_len
171
- for i, (key, value) in enumerate(fields):
172
- if isinstance(value, str) and len(value) > max_field_len:
173
- fields[i] = (key, truncate_value(value, max_field_len))
174
-
175
- # Ensure total size of the dict doesn't exceed size_limit
176
- total_size = sum(field_size(key, value) for key, value in fields)
177
- log_dict = {}
178
- truncated = False
179
-
180
- if total_size > size_limit:
181
- truncated = True
182
- remaining_size = size_limit
183
- remaining_fields = len(fields)
184
-
185
- for key, value in fields:
186
- if remaining_fields > 0:
187
- max_size_per_field = remaining_size // remaining_fields
188
- else:
189
- max_size_per_field = 0
190
-
191
- field_sz = field_size(key, value)
192
- if field_sz > max_size_per_field:
193
- value = truncate_value(value, max_size_per_field)
194
- field_sz = field_size(key, value)
195
-
196
- log_dict[key] = value
197
- remaining_size -= field_sz
198
- remaining_fields -= 1
199
- else:
200
- log_dict = dict(fields)
201
-
202
- log_dict['trunc'] = truncated
203
-
204
- return log_dict
205
-
206
- def __str__(self):
207
- return json.dumps(self.to_dict(), indent=4)
208
-
209
- def __repr__(self):
210
- return self.__str__()
@@ -1,103 +0,0 @@
1
- # pylint: disable=missing-module-docstring
2
- # pylint: disable=missing-function-docstring
3
- # pylint: disable=missing-class-docstring
4
- # pylint: disable=broad-exception-caught
5
- # pylint: disable=line-too-long
6
- # pylint: disable=unused-variable
7
- # pylint: disable=broad-exception-raised
8
- import logging
9
- import os
10
- import json
11
- import traceback
12
- from typing import List, Union
13
- from ipulse_shared_core_ftredge.enums import LoggingHandlers
14
- from ipulse_shared_core_ftredge.utils.utils_cloud_gcp import add_gcp_cloud_logging, add_gcp_error_reporting
15
-
16
- ###################################################################################################
17
- ##################################################################################################
18
- ##################################### SETTING UP LOGGER ##########################################
19
-
20
- class CloudLogFormatter(logging.Formatter):
21
- """Formats log records as structured JSON."""
22
-
23
- def format(self, record):
24
- log_entry = {
25
- 'message': record.msg,
26
- 'timestamp': self.formatTime(record, self.datefmt),
27
- 'name': record.name,
28
- 'severity': record.levelname,
29
- 'pathname': record.pathname,
30
- 'lineno': record.lineno,
31
- }
32
- if record.exc_info:
33
- log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
34
- if isinstance(record.msg, dict):
35
- log_entry.update(record.msg)
36
- return json.dumps(log_entry)
37
-
38
-
39
- class LocalLogFormatter(logging.Formatter):
40
- """Formats log records for local output to the console."""
41
-
42
- def format(self, record): # Make sure you have the 'record' argument here!
43
- path_parts = record.pathname.split(os.sep)
44
-
45
- # Get the last two parts of the path if they exist
46
- if len(path_parts) >= 2:
47
- short_path = os.path.join(path_parts[-2], path_parts[-1])
48
- else:
49
- short_path = record.pathname
50
-
51
- # Format log messages differently based on the log level
52
- if record.levelno == logging.INFO:
53
- log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
54
- elif record.levelno == logging.DEBUG:
55
- log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
56
- elif record.levelno == logging.ERROR:
57
- log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
58
- if record.exc_info:
59
- log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
60
- else:
61
- log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
62
-
63
-
64
- return log_message
65
-
66
-
67
- def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandlers, List[LoggingHandlers]] = LoggingHandlers.NONE):
68
-
69
- """Creates and configures a logger with the specified handlers."""
70
-
71
- logger = logging.getLogger(logger_name)
72
- logger.setLevel(level)
73
- cloud_formatter = CloudLogFormatter()
74
-
75
- # Ensure logging_handler_providers is a list for consistent processing
76
- if not isinstance(logging_handler_providers, list):
77
- logging_handler_providers = [logging_handler_providers]
78
-
79
- supported_remote_handlers = [
80
- LoggingHandlers.GCP_CLOUD_LOGGING,
81
- LoggingHandlers.GCP_ERROR_REPORTING,
82
- LoggingHandlers.LOCAL_STREAM,
83
- LoggingHandlers.NONE, # If NONE is considered a remote handler
84
- ]
85
-
86
- # Remote handlers
87
-
88
- for handler_provider in logging_handler_providers:
89
- if handler_provider in supported_remote_handlers:
90
- if handler_provider == LoggingHandlers.GCP_CLOUD_LOGGING:
91
- add_gcp_cloud_logging(logger, cloud_formatter)
92
- elif handler_provider == LoggingHandlers.GCP_ERROR_REPORTING:
93
- add_gcp_error_reporting(logger)
94
- elif handler_provider == LoggingHandlers.LOCAL_STREAM: # Handle local stream
95
- local_handler = logging.StreamHandler()
96
- local_handler.setFormatter(LocalLogFormatter())
97
- logger.addHandler(local_handler)
98
- else:
99
- raise ValueError(
100
- f"Unsupported logging provider: {handler_provider}. "
101
- f"Supported providers: {[h.value for h in supported_remote_handlers]}"
102
- )
103
- return logger
@@ -1,53 +0,0 @@
1
- # pylint: disable=missing-module-docstring
2
- # pylint: disable=missing-function-docstring
3
- # pylint: disable=missing-class-docstring
4
- # pylint: disable=broad-exception-caught
5
- # pylint: disable=line-too-long
6
- # pylint: disable=unused-variable
7
- # pylint: disable=broad-exception-caught
8
- from ipulse_shared_core_ftredge.enums import CloudProvider, DataSourceType, DuplicationHandling, MatchConditionType
9
- from .utils_collector_pipelinemon import Pipelinemon
10
- from .utils_cloud_gcp import (write_json_to_gcs_extended,
11
- read_json_from_gcs)
12
-
13
- #######################################################################################################################
14
- #######################################################################################################################
15
- ################################################# cloud IO functions ########################################
16
-
17
- # Define the central function that routes to the relevant cloud-specific function
18
- def write_json_to_cloud_storage_extended(cloud_storage:CloudProvider | DataSourceType, storage_client, data:dict | list | str, bucket_name: str, file_name: str,
19
- duplication_handling:DuplicationHandling, duplication_match_condition_type: MatchConditionType, duplication_match_condition: str = "",
20
- max_retries:int=2, max_matched_deletable_files:int=1,
21
- pipelinemon: Pipelinemon = None, logger=None, print_out=False, raise_e=False):
22
-
23
-
24
- supported_cloud_storage_values = [CloudProvider.GCP, DataSourceType.GCS]
25
-
26
- if cloud_storage in [CloudProvider.GCP, DataSourceType.GCS]:
27
- return write_json_to_gcs_extended(
28
- pipelinemon=pipelinemon,
29
- storage_client=storage_client,
30
- data=data,
31
- bucket_name=bucket_name,
32
- file_name=file_name,
33
- duplication_handling_enum=duplication_handling,
34
- duplication_match_condition_type_enum=duplication_match_condition_type,
35
- duplication_match_condition=duplication_match_condition,
36
- max_retries=max_retries,
37
- max_deletable_files=max_matched_deletable_files,
38
- logger=logger,
39
- print_out=print_out,
40
- raise_e=raise_e
41
- )
42
-
43
- raise ValueError(f"Unsupported cloud storage : {cloud_storage}. Supported cloud storage values: {supported_cloud_storage_values}")
44
-
45
-
46
- def read_json_from_cloud_storage(cloud_storage:CloudProvider | DataSourceType , storage_client, bucket_name:str, file_name:str, logger=None, print_out:bool=False):
47
-
48
- supported_cloud_storage_values = [CloudProvider.GCP, DataSourceType.GCS]
49
-
50
- if cloud_storage in [CloudProvider.GCP, DataSourceType.GCS]:
51
- return read_json_from_gcs(storage_client=storage_client, bucket_name=bucket_name, file_name=file_name, logger=logger, print_out=print_out)
52
-
53
- raise ValueError(f"Unsupported cloud storage: {cloud_storage}. Supported cloud storage values: {supported_cloud_storage_values}")