ipulse-shared-core-ftredge 2.52__py3-none-any.whl → 2.54__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

@@ -4,10 +4,10 @@ from .utils_gcp import (setup_gcp_logger_and_error_report,
4
4
  read_csv_from_gcs, read_json_from_gcs,
5
5
  write_csv_to_gcs, write_json_to_gcs)
6
6
  from .utils_templates_and_schemas import (create_bigquery_schema_from_json,
7
- update_check_with_schema_template)
8
- from .utils_common import (Notice, NoticesManager)
7
+ check_format_against_schema_template)
8
+ from .utils_common import (ContextLog, PipelineWatcher)
9
9
 
10
- from .enums import (NoticeManagerCategory, NoticeLevel, Unit, Frequency,
10
+ from .enums import (TargetLogs, LogLevel, Unit, Frequency,
11
11
  Module, SubModule, BaseDataCategory,
12
12
  FinCoreCategory, FincCoreSubCategory,
13
13
  FinCoreRecordsCategory, ExchangeOrPublisher,
@@ -3,8 +3,8 @@
3
3
  # pylint: disable=missing-function-docstring
4
4
  # pylint: disable=missing-class-docstring
5
5
 
6
- from .enums_common_utils import (NoticeLevel,
7
- NoticeManagerCategory,
6
+ from .enums_common_utils import (LogLevel,
7
+ TargetLogs,
8
8
  Unit,
9
9
  Frequency)
10
10
 
@@ -6,11 +6,17 @@
6
6
  from enum import Enum
7
7
 
8
8
 
9
- class NoticeManagerCategory(Enum):
10
- NOTICES = "notices"
11
- WARN_ERRS = "warn_errs"
12
- SUCCESSES = "successes"
13
- class NoticeLevel(Enum):
9
+ class TargetLogs(Enum):
10
+ MIXED="mixed_logs"
11
+ SUCCESSES = "success_logs"
12
+ NOTICES = "notice_logs"
13
+ SUCCESSES_AND_NOTICES = "succs_n_notc_logs"
14
+ WARNINGS = "warning_logs"
15
+ WARNINGS_AND_ERRORS = "warn_n_err_logs"
16
+ ERRORS = "error_logs"
17
+
18
+
19
+ class LogLevel(Enum):
14
20
  """
15
21
  Standardized notice levels for data engineering pipelines,
16
22
  designed for easy analysis and identification of manual
@@ -19,12 +25,16 @@ class NoticeLevel(Enum):
19
25
  DEBUG = 100 # Detailed debug information (for development/troubleshooting)
20
26
 
21
27
  INFO = 200
28
+ SUCCESS = 201
22
29
 
23
- SUCCESS = 300 # Events requiring attention, but not necessarily errors
30
+ NOTICE = 300 # Maybe same file or data already fully or partially exists
31
+ NOTICE_ALREADY_EXISTS = 301 # Data already exists, no action required
32
+ NOTICE_PARTIAL_EXISTS = 302 # Partial data exists, no action required
33
+ NOTICE_CANCELLED = 303 # Data processing cancelled, no action required
24
34
 
25
35
  # Warnings indicate potential issues that might require attention:
26
36
  WARNING = 400 # General warning, no immediate action required
27
- WARNING_NO_ACTION = 401 # Minor issue or Unexpected Behavior, no immediate action required (can be logged frequently)
37
+ # WARNING_NO_ACTION = 401 # Minor issue or Unexpected Behavior, no immediate action required (can be logged frequently)
28
38
  WARNING_REVIEW_RECOMMENDED = 402 # Action recommended to prevent potential future issues
29
39
  WARNING_FIX_RECOMMENDED = 403 # Action recommended to prevent potential future issues
30
40
  WARNING_FIX_REQUIRED = 404 # Action required, pipeline can likely continue
@@ -46,16 +56,16 @@ class NoticeLevel(Enum):
46
56
  UNKNOWN=1001 # Unknown error, should not be used in normal operation
47
57
 
48
58
 
49
- class NoticeStatus(Enum):
59
+ class LogStatus(Enum):
50
60
  OPEN = "open"
51
61
  ACKNOWLEDGED = "acknowledged"
52
62
  IN_PROGRESS = "in_progress"
53
63
  RESOLVED = "resolved"
54
64
  IGNORED = "ignored"
65
+ CANCELLED = "cancelled"
55
66
 
56
67
 
57
68
 
58
-
59
69
  ### Exception during full exection, partially saved
60
70
  # Exception during ensemble pipeline; modifications collected in local object , nothing persisted
61
71
  # Exception during ensemble pipeline; modifications persisted , metadata failed
@@ -88,17 +98,17 @@ class Unit(Enum):
88
98
  CURRENCY = "currency" # General currency, when specific currency is not needed
89
99
 
90
100
  # Stock Market and Investments
91
- SHARES = "shars" # Number of shares
101
+ SHARES = "shares" # Number of shares
92
102
  PERCENT = "prcnt" # Percentage, used for rates and ratios
93
103
  BPS = "bps" # Basis points, often used for interest rates and financial ratios
94
104
 
95
105
  # Volume and Quantitative Measurements
96
- VOLUME = "vol" # Trading volume in units
106
+ VOLUME = "volume" # Trading volume in units
97
107
  MILLIONS = "mills" # Millions, used for large quantities or sums
98
108
  BILLIONS = "bills" # Billions, used for very large quantities or sums
99
109
 
100
110
  # Commodity Specific Units
101
- BARRELS = "barrls" # Barrels, specifically for oil and similar liquids
111
+ BARRELS = "barrels" # Barrels, specifically for oil and similar liquids
102
112
  TONNES = "tonnes" # Tonnes, for bulk materials like metals or grains
103
113
  TROY_OUNCES = "troy_oz" # Troy ounces, specifically for precious metals
104
114
 
@@ -4,11 +4,12 @@
4
4
  from enum import Enum
5
5
 
6
6
  class SourcingTriggerType(Enum):
7
- HISTORIC_MANUAL = "historic_manual"
8
- LIVE_SCHEDULED = "live_scheduled"
7
+ BULK_MANUAL = "bulk_manual"
8
+ BULK_SCHEDULED = "bulk_scheduled" # almost always historic bulk is manual
9
+ RECENT_SCHEDULED = "recent_scheduled"
10
+ RECENT_MANUAL = "recent_manual"
9
11
  ADHOC_MANUAL = "adhoc_manual"
10
12
  ADHOC_SCHEDULED = "adhoc_scheduled"
11
- LIVE_MANUAL = "live_manual"
12
13
 
13
14
  class SourcingPipelineType(Enum):
14
15
  LOCAL_GET_API_TO_GCS = "local_get_api_to_gcs"
@@ -5,14 +5,14 @@ from enum import Enum
5
5
 
6
6
 
7
7
  class FinCoreCategory(Enum):
8
- MARKET="market"
9
- ECONOMY="economy"
10
- POLITICS="poltcs"
11
- CORPORATE="corp"
8
+ MARKET="market" # Market prices data
9
+ CORPORATE="corp" # Corporate data such as financial statements and earnings, similar to fundamental data
12
10
  FUNDAMENTAL="fundam"
13
- SENTIMENT="sntmnt"
11
+ ECONOMY="economy"
14
12
  NEWS="news"
13
+ SENTIMENT="sntmnt"
15
14
  SOCIAL="social"
15
+ POLITICS="poltcs"
16
16
  OTHER="other"
17
17
 
18
18
  class FincCoreSubCategory(Enum):
@@ -32,16 +32,16 @@ class FincCoreSubCategory(Enum):
32
32
 
33
33
  class FinCoreRecordsCategory(Enum):
34
34
  PRICE="pric"
35
- PRICE_SPOT= "pric.s"
36
- PRICE_OHLCVA="pric.ohlcva"
37
- PRICE_OHLCV="pric.ohlcv"
38
- PRICE_OPEN="pric.o"
39
- PRICE_HIGH="pric.h"
40
- PRICE_LOW="pric.l"
41
- PRICE_CLOSE="pric.c"
42
- PRICE_VOLUME="pric.v"
43
- PRICE_ADJC="pric.a"
44
- FUNDAMENTAL="fundam" # treat this differently
35
+ SPOT= "spot"
36
+ OHLCVA="ohlcva"
37
+ OHLCV="ohlcv"
38
+ OPEN="open"
39
+ HIGH="high"
40
+ LOW="low"
41
+ CLOSE="close"
42
+ VOLUME="volume"
43
+ ADJC="adjc"
44
+ FUNDAMENTAL="fundam" # treat this differently
45
45
  EARNINGS="earnings"
46
46
  CASH_FLOW="cashflw"
47
47
  BALANCE_SHEET="blnce_sht"
@@ -10,18 +10,18 @@ from datetime import datetime, timezone
10
10
  from contextlib import contextmanager
11
11
  from typing import List
12
12
  from google.cloud import logging as cloudlogging
13
- from ipulse_shared_core_ftredge.enums.enums_common_utils import NoticeLevel, NoticeManagerCategory, NoticeStatus
13
+ from ipulse_shared_core_ftredge.enums.enums_common_utils import TargetLogs, LogLevel, LogStatus
14
14
  from ipulse_shared_core_ftredge.utils_gcp import write_json_to_gcs
15
15
 
16
16
 
17
17
  # ["data_import","data_quality", "data_processing","data_general","data_persistance","metadata_quality", "metadata_processing", "metadata_persistance","metadata_general"]
18
18
 
19
- class Notice:
19
+ class ContextLog:
20
20
  MAX_TRACEBACK_LINES = 14 # Define the maximum number of traceback lines to include
21
- def __init__(self, level: NoticeLevel, start_context: str = None, notice_manager_id: str = None,
21
+ def __init__(self, level: LogLevel, base_context: str = None, collector_id: str = None,
22
22
  e: Exception = None, e_type: str = None, e_message: str = None, e_traceback: str = None,
23
23
  subject: str = None, description: str = None, context: str = None,
24
- notice_status: NoticeStatus = NoticeStatus.OPEN):
24
+ log_status: LogStatus = LogStatus.OPEN):
25
25
  if e is not None:
26
26
  e_type = type(e).__name__ if e_type is None else e_type
27
27
  e_message = str(e) if e_message is None else e_message
@@ -32,13 +32,13 @@ class Notice:
32
32
  self.level = level
33
33
  self.subject = subject
34
34
  self.description = description
35
- self._start_context = start_context
35
+ self._base_context = base_context
36
36
  self._context = context
37
- self.notice_manager_id = notice_manager_id
37
+ self.collector_id = collector_id
38
38
  self.exception_type = e_type
39
39
  self.exception_message = e_message
40
40
  self.exception_traceback = self._format_traceback(e_traceback,e_message)
41
- self.notice_status = notice_status
41
+ self.log_status = log_status
42
42
  self.timestamp = datetime.now(timezone.utc).isoformat()
43
43
 
44
44
  def _format_traceback(self, e_traceback, e_message):
@@ -83,15 +83,15 @@ class Notice:
83
83
  else:
84
84
  formatted_traceback = '\n'.join(combined_lines)
85
85
 
86
- return formatted_traceback
86
+ return formatted_traceback
87
87
 
88
88
  @property
89
- def start_context(self):
90
- return self._start_context
89
+ def base_context(self):
90
+ return self._base_context
91
91
 
92
- @start_context.setter
93
- def start_context(self, value):
94
- self._start_context = value
92
+ @base_context.setter
93
+ def base_context(self, value):
94
+ self._base_context = value
95
95
 
96
96
  @property
97
97
  def context(self):
@@ -103,7 +103,7 @@ class Notice:
103
103
 
104
104
  def to_dict(self):
105
105
  return {
106
- "start_context": self.start_context,
106
+ "base_context": self.base_context,
107
107
  "context": self.context,
108
108
  "level_code": self.level.value,
109
109
  "level_name": self.level.name,
@@ -112,27 +112,29 @@ class Notice:
112
112
  "exception_type": self.exception_type,
113
113
  "exception_message": self.exception_message,
114
114
  "exception_traceback": self.exception_traceback,
115
- "notice_status": self.notice_status.value,
116
- "notice_manager_id": self.notice_manager_id,
115
+ "log_status": self.log_status.value,
116
+ "collector_id": self.collector_id,
117
117
  "timestamp": self.timestamp
118
118
  }
119
119
 
120
- class NoticesManager:
121
- ERROR_CODE_START_VALUE = NoticeLevel.ERROR.value
122
- WARNING_CODE_START_VALUE = NoticeLevel.WARNING.value
123
- SUCCESS_CODE_START_VALUE = NoticeLevel.SUCCESS.value
120
+ class PipelineWatcher:
121
+ ERROR_START_CODE = LogLevel.ERROR.value
122
+ WARNING_START_CODE = LogLevel.WARNING.value
123
+ NOTICE_START_CODE = LogLevel.NOTICE.value
124
+ SUCCESS_START_CODE = LogLevel.SUCCESS.value
124
125
 
125
- def __init__(self, start_context: str, category: NoticeManagerCategory = NoticeManagerCategory.NOTICES, logger_name=None):
126
+ def __init__(self, base_context: str, target_logs: TargetLogs = TargetLogs.MIXED, logger_name=None):
126
127
  self._id = str(uuid.uuid4())
127
- self._notices = []
128
+ self._logs = []
128
129
  self._early_stop = False
129
130
  self._errors_count = 0
130
131
  self._warnings_count = 0
132
+ self._notices_count = 0
131
133
  self._successes_count = 0
132
- self._level_counts = {level.name: 0 for level in NoticeLevel}
133
- self._start_context = start_context
134
+ self._level_counts = {level.name: 0 for level in LogLevel}
135
+ self._base_context = base_context
134
136
  self._context_stack = []
135
- self._category = category.value
137
+ self._target_logs = target_logs.value
136
138
  self._logger = self._initialize_logger(logger_name)
137
139
 
138
140
  def _initialize_logger(self, logger_name):
@@ -141,7 +143,6 @@ class NoticesManager:
141
143
  return logging_client.logger(logger_name)
142
144
  return None
143
145
 
144
-
145
146
  @contextmanager
146
147
  def context(self, context):
147
148
  self.push_context(context)
@@ -162,8 +163,8 @@ class NoticesManager:
162
163
  return " >> ".join(self._context_stack)
163
164
 
164
165
  @property
165
- def start_context(self):
166
- return self._start_context
166
+ def base_context(self):
167
+ return self._base_context
167
168
 
168
169
  @property
169
170
  def id(self):
@@ -173,14 +174,14 @@ class NoticesManager:
173
174
  def early_stop(self):
174
175
  return self._early_stop
175
176
 
176
- def set_early_stop(self, max_errors_tolerance:int, create_error_notice=True,pop_context=False):
177
- self.early_stop = True
178
- if create_error_notice:
177
+ def set_early_stop(self, max_errors_tolerance: int, create_error_log=True, pop_context=False):
178
+ self._early_stop = True
179
+ if create_error_log:
179
180
  if pop_context:
180
181
  self.pop_context()
181
- self.add_notice(Notice(level=NoticeLevel.ERROR,
182
- subject="EARLY_STOP",
183
- description=f"Total MAX_ERRORS_TOLERANCE of {max_errors_tolerance} has been reached."))
182
+ self.add_log(ContextLog(level=LogLevel.ERROR,
183
+ subject="EARLY_STOP",
184
+ description=f"Total MAX_ERRORS_TOLERANCE of {max_errors_tolerance} has been reached."))
184
185
 
185
186
  def reset_early_stop(self):
186
187
  self._early_stop = False
@@ -188,49 +189,54 @@ class NoticesManager:
188
189
  def get_early_stop(self):
189
190
  return self._early_stop
190
191
 
191
- def add_notice(self, notice: Notice):
192
- if (self._category == NoticeManagerCategory.SUCCESSES.value and notice.level != NoticeLevel.SUCCESS) or \
193
- (self._category == NoticeManagerCategory.WARN_ERRS.value and notice.level.value < self.WARNING_CODE_START_VALUE):
194
- raise ValueError(f"Invalid notice level {notice.level.name} for category {self._category}")
195
- notice.start_context = self.start_context
196
- notice.context = self.current_context
197
- notice.notice_manager_id = self.id
198
- notice_dict = notice.to_dict()
199
- self._notices.append(notice_dict)
200
- self._update_counts(notice_dict)
192
+ def add_log(self, log: ContextLog):
193
+ if (self._target_logs == TargetLogs.SUCCESSES and log.level >=self.NOTICE_START_CODE) or \
194
+ (self._target_logs == TargetLogs.WARNINGS_AND_ERRORS and log.level.value < self.WARNING_START_CODE):
195
+ raise ValueError(f"Invalid log level {log.level.name} for Pipeline Watcher target logs setup: {self._target_logs}")
196
+ log.base_context = self.base_context
197
+ log.context = self.current_context
198
+ log.collector_id = self.id
199
+ log_dict = log.to_dict()
200
+ self._logs.append(log_dict)
201
+ self._update_counts(log_dict)
201
202
 
202
203
  if self._logger:
203
- if notice.level.value >= self.WARNING_CODE_START_VALUE:
204
- self._logger.log_struct(notice_dict, severity="WARNING")
204
+ # We specifically want to avoid having an ERROR log level for this structured Pipeline Watcher reporting, to ensure Errors are alerting on Critical Application Services.
205
+ # A single ERROR log level can be used for the entire pipeline, which shall be used at the end of the pipeline
206
+ if log.level.value >= self.WARNING_START_CODE:
207
+ self._logger.log_struct(log_dict, severity="WARNING")
208
+ elif log.level.value >= self.NOTICE_START_CODE:
209
+ self._logger.log_struct(log_dict, severity="NOTICE")
205
210
  else:
206
- self._logger.log_struct(notice_dict, severity="INFO")
211
+ self._logger.log_struct(log_dict, severity="INFO")
207
212
 
208
- def add_notices(self, notices: List[Notice]):
209
- for notice in notices:
210
- self.add_notice(notice)
213
+ def add_logs(self, logs: List[ContextLog]):
214
+ for log in logs:
215
+ self.add_log(log)
211
216
 
212
- def clear_notices_and_counts(self):
213
- self._notices = []
217
+ def clear_logs_and_counts(self):
218
+ self._logs = []
214
219
  self._errors_count = 0
215
220
  self._warnings_count = 0
221
+ self._notices_count = 0
216
222
  self._successes_count = 0
217
- self._level_counts = {level.name: 0 for level in NoticeLevel}
223
+ self._level_counts = {level.name: 0 for level in LogLevel}
218
224
 
219
- def clear_notices(self):
220
- self._notices = []
225
+ def clear_logs(self):
226
+ self._logs = []
221
227
 
222
- def get_all_notices(self):
223
- return self._notices
228
+ def get_all_logs(self):
229
+ return self._logs
224
230
 
225
- def get_notices_for_level(self, level: NoticeLevel):
226
- return [notice for notice in self._notices if notice["level_code"] == level.value]
231
+ def get_logs_for_level(self, level: LogLevel):
232
+ return [log for log in self._logs if log["level_code"] == level.value]
227
233
 
228
- def get_notices_by_str_in_context(self, context_substring: str):
234
+ def get_logs_by_str_in_context(self, context_substring: str):
229
235
  return [
230
- notice for notice in self._notices
231
- if context_substring in notice["context"]
236
+ log for log in self._logs
237
+ if context_substring in log["context"]
232
238
  ]
233
-
239
+
234
240
  def contains_errors(self):
235
241
  return self._errors_count > 0
236
242
 
@@ -246,60 +252,69 @@ class NoticesManager:
246
252
  def count_warnings(self):
247
253
  return self._warnings_count
248
254
 
255
+ def count_notices(self):
256
+ return self._notices_count
257
+
249
258
  def count_successes(self):
250
259
  return self._successes_count
251
260
 
252
- def count_all_notices(self):
253
- return len(self._notices)
261
+ def count_all_logs(self):
262
+ return len(self._logs)
254
263
 
255
- def count_notices_by_level(self, level: NoticeLevel):
264
+ def count_logs_by_level(self, level: LogLevel):
256
265
  return self._level_counts.get(level.name, 0)
257
266
 
258
- def _count_notices(self, context_substring: str, exact_match=False, level_code_min=None, level_code_max=None):
267
+ def _count_logs(self, context_substring: str, exact_match=False, level_code_min=None, level_code_max=None):
259
268
  return sum(
260
- 1 for notice in self._notices
261
- if (notice["context"] == context_substring if exact_match else context_substring in notice["context"]) and
262
- (level_code_min is None or notice["level_code"] >= level_code_min) and
263
- (level_code_max is None or notice["level_code"] <= level_code_max)
269
+ 1 for log in self._logs
270
+ if (log["context"] == context_substring if exact_match else context_substring in log["context"]) and
271
+ (level_code_min is None or log["level_code"] >= level_code_min) and
272
+ (level_code_max is None or log["level_code"] <= level_code_max)
264
273
  )
265
274
 
266
- def count_notices_for_current_context(self):
267
- return self._count_notices(self.current_context, exact_match=True)
275
+ def count_logs_for_current_context(self):
276
+ return self._count_logs(self.current_context, exact_match=True)
268
277
 
269
- def count_notices_for_current_and_nested_contexts(self):
270
- return self._count_notices(self.current_context)
278
+ def count_logs_for_current_and_nested_contexts(self):
279
+ return self._count_logs(self.current_context)
271
280
 
272
- def count_notices_by_level_for_current_context(self, level: NoticeLevel):
273
- return self._count_notices(self.current_context, exact_match=True, level_code_min=level.value, level_code_max=level.value)
281
+ def count_logs_by_level_for_current_context(self, level: LogLevel):
282
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=level.value, level_code_max=level.value)
274
283
 
275
- def count_notices_by_level_for_current_and_nested_contexts(self, level: NoticeLevel):
276
- return self._count_notices(self.current_context, level_code_min=level.value, level_code_max=level.value)
284
+ def count_logs_by_level_for_current_and_nested_contexts(self, level: LogLevel):
285
+ return self._count_logs(self.current_context, level_code_min=level.value, level_code_max=level.value)
277
286
 
278
287
  def count_errors_for_current_context(self):
279
- return self._count_notices(self.current_context, exact_match=True, level_code_min=self.ERROR_CODE_START_VALUE)
288
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=self.ERROR_START_CODE)
280
289
 
281
290
  def count_errors_for_current_and_nested_contexts(self):
282
- return self._count_notices(self.current_context, level_code_min=self.ERROR_CODE_START_VALUE)
291
+ return self._count_logs(self.current_context, level_code_min=self.ERROR_START_CODE)
283
292
 
284
293
  def count_warnings_and_errors_for_current_context(self):
285
- return self._count_notices(self.current_context, exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE)
294
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=self.WARNING_START_CODE)
286
295
 
287
296
  def count_warnings_and_errors_for_current_and_nested_contexts(self):
288
- return self._count_notices(self.current_context, level_code_min=self.WARNING_CODE_START_VALUE)
297
+ return self._count_logs(self.current_context, level_code_min=self.WARNING_START_CODE)
289
298
 
290
299
  def count_warnings_for_current_context(self):
291
- return self._count_notices(self.current_context, exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
300
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=self.WARNING_START_CODE, level_code_max=self.ERROR_START_CODE - 1)
292
301
 
293
302
  def count_warnings_for_current_and_nested_contexts(self):
294
- return self._count_notices(self.current_context, level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
303
+ return self._count_logs(self.current_context, level_code_min=self.WARNING_START_CODE, level_code_max=self.ERROR_START_CODE - 1)
304
+
305
+ def count_notices_for_current_context(self):
306
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=self.NOTICE_START_CODE, level_code_max=self.WARNING_START_CODE-1)
307
+
308
+ def count_notices_for_current_and_nested_contexts(self):
309
+ return self._count_logs(self.current_context, level_code_min=self.NOTICE_START_CODE, level_code_max=self.WARNING_START_CODE-1)
295
310
 
296
311
  def count_successes_for_current_context(self):
297
- return self._count_notices(self.current_context, exact_match=True, level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
312
+ return self._count_logs(self.current_context, exact_match=True, level_code_min=self.SUCCESS_START_CODE, level_code_max=self.NOTICE_START_CODE-1)
298
313
 
299
314
  def count_successes_for_current_and_nested_contexts(self):
300
- return self._count_notices(self.current_context, level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
315
+ return self._count_logs(self.current_context, level_code_min=self.SUCCESS_START_CODE, level_code_max=self.NOTICE_START_CODE-1)
301
316
 
302
- def export_notices_to_gcs_file(self, bucket_name, storage_client, file_prefix=None, file_name=None, top_level_context=None, save_locally=False, local_path=None, logger=None, max_retries=2):
317
+ def export_logs_to_gcs_file(self, bucket_name, storage_client, file_prefix=None, file_name=None, top_level_context=None, save_locally=False, local_path=None, logger=None, max_retries=2):
303
318
  def log_message(message):
304
319
  if logger:
305
320
  logger.info(message)
@@ -309,34 +324,34 @@ class NoticesManager:
309
324
  logger.error(message, exc_info=exc_info)
310
325
 
311
326
  if not file_prefix:
312
- file_prefix = self._category
327
+ file_prefix = self._target_logs
313
328
  if not file_name:
314
329
  timestamp = datetime.now(timezone.utc).strftime("%Y%m%d_%H%M%S")
315
330
  if top_level_context:
316
- file_name = f"{file_prefix}_{timestamp}_{top_level_context}_len{len(self._notices)}.json"
331
+ file_name = f"{file_prefix}_{timestamp}_{top_level_context}_len{len(self._logs)}.json"
317
332
  else:
318
- file_name = f"{file_prefix}_{timestamp}_len{len(self._notices)}.json"
333
+ file_name = f"{file_prefix}_{timestamp}_len{len(self._logs)}.json"
319
334
 
320
- result=None
335
+ result = None
321
336
  try:
322
- result= write_json_to_gcs(
337
+ result = write_json_to_gcs(
323
338
  bucket_name=bucket_name,
324
339
  storage_client=storage_client,
325
- data=self._notices,
340
+ data=self._logs,
326
341
  file_name=file_name,
327
342
  save_locally=save_locally,
328
343
  local_path=local_path,
329
344
  logger=logger,
330
345
  max_retries=max_retries,
331
- overwrite=True
346
+ overwrite_if_exists=False
332
347
  )
333
- log_message(f"{file_prefix} successfully saved (ovewritten={result.get("ovewritten")}) to GCS at {result.get("gcs_path")} and locally at {result.get("local_path")}.")
348
+ log_message(f"{file_prefix} successfully saved (overwritten={result.get('gcs_file_overwritten')}) to GCS at {result.get('gcs_path')} and locally at {result.get('local_path')}.")
334
349
  except Exception as e:
335
- log_error(f"Failed at export_notices_to_gcs_file for {file_prefix} for file {file_name} to bucket {bucket_name}: {type(e).__name__} - {str(e)}")
350
+ log_error(f"Failed at export_logs_to_gcs_file for {file_prefix} for file {file_name} to bucket {bucket_name}: {type(e).__name__} - {str(e)}")
336
351
 
337
352
  return result
338
353
 
339
- def import_notices_from_json(self, json_or_file, logger=None):
354
+ def import_logs_from_json(self, json_or_file, logger=None):
340
355
  def log_message(message):
341
356
  if logger:
342
357
  logger.info(message)
@@ -347,31 +362,35 @@ class NoticesManager:
347
362
 
348
363
  try:
349
364
  if isinstance(json_or_file, str): # Load from string
350
- imported_notices = json.loads(json_or_file)
365
+ imported_logs = json.loads(json_or_file)
351
366
  elif hasattr(json_or_file, 'read'): # Load from file-like object
352
- imported_notices = json.load(json_or_file)
353
- self.add_notices(imported_notices)
354
- log_message("Successfully imported notices from json.")
367
+ imported_logs = json.load(json_or_file)
368
+ self.add_logs(imported_logs)
369
+ log_message("Successfully imported logs from json.")
355
370
  except Exception as e:
356
- log_warning(f"Failed to import notices from json: {type(e).__name__} - {str(e)}", exc_info=True)
371
+ log_warning(f"Failed to import logs from json: {type(e).__name__} - {str(e)}", exc_info=True)
357
372
 
358
- def _update_counts(self, notice, remove=False):
359
- level_code = notice["level_code"]
360
- level_name = notice["level_name"]
373
+ def _update_counts(self, log, remove=False):
374
+ level_code = log["level_code"]
375
+ level_name = log["level_name"]
361
376
 
362
377
  if remove:
363
- if level_code >= self.ERROR_CODE_START_VALUE:
378
+ if level_code >= self.ERROR_START_CODE:
364
379
  self._errors_count -= 1
365
- elif level_code >= self.WARNING_CODE_START_VALUE:
380
+ elif self.WARNING_START_CODE <= level_code < self.ERROR_START_CODE:
366
381
  self._warnings_count -= 1
367
- elif level_code >= self.SUCCESS_CODE_START_VALUE:
382
+ elif self.NOTICE_START_CODE <= level_code < self.WARNING_START_CODE:
383
+ self._notices_count -= 1
384
+ elif self.SUCCESS_START_CODE <= level_code < self.NOTICE_START_CODE:
368
385
  self._successes_count -= 1
369
386
  self._level_counts[level_name] -= 1
370
387
  else:
371
- if level_code >= self.ERROR_CODE_START_VALUE:
388
+ if level_code >= self.ERROR_START_CODE:
372
389
  self._errors_count += 1
373
- elif level_code >= self.WARNING_CODE_START_VALUE:
390
+ elif self.WARNING_START_CODE <= level_code < self.ERROR_START_CODE:
374
391
  self._warnings_count += 1
375
- elif level_code == self.SUCCESS_CODE_START_VALUE:
392
+ elif self.NOTICE_START_CODE <= level_code < self.WARNING_START_CODE:
393
+ self._notices_count += 1
394
+ elif self.SUCCESS_START_CODE <= level_code < self.NOTICE_START_CODE:
376
395
  self._successes_count += 1
377
396
  self._level_counts[level_name] += 1
@@ -122,8 +122,9 @@ def read_csv_from_gcs(bucket_name, file_name, storage_client, logger):
122
122
 
123
123
 
124
124
 
125
- def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
126
- save_locally=False, local_path=None, logger=None, max_retries=3, overwrite=True):
125
+ def write_json_to_gcs(bucket_name, storage_client, data, file_name,
126
+ save_locally=False, local_path=None, logger=None, max_retries=2,
127
+ overwrite_if_exists=False, increment_if_exists=False):
127
128
  """Saves data to Google Cloud Storage and optionally locally.
128
129
 
129
130
  This function attempts to upload data to GCS. If the upload fails after
@@ -132,24 +133,35 @@ def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
132
133
 
133
134
  Returns:
134
135
  dict: A dictionary containing the GCS path (or None if upload failed),
135
- the local path (or None if not saved locally), and a boolean indicating if the file was overwritten.
136
+ the local path (or None if not saved locally), a boolean indicating if the file was overwritten,
137
+ a boolean indicating if the file already existed, and a boolean indicating if the file was saved with an incremented name.
136
138
  """
137
139
 
138
140
  def log_message(message):
139
141
  if logger:
140
142
  logger.info(message)
141
143
 
142
- def log_error(message,exc_info=False):
144
+ def log_error(message, exc_info=False):
143
145
  if logger:
144
146
  logger.error(message, exc_info=exc_info)
145
147
 
148
+ def log_warning(message):
149
+ if logger:
150
+ logger.warning(message)
151
+
146
152
  attempts = 0
147
153
  success = False
148
154
  gcs_path = None
149
155
  local_path_final = None
150
- overwritten = False
156
+ gcs_file_overwritten = False
157
+ gcs_file_already_exists = False
158
+ gcs_file_saved_with_increment = False
151
159
  gcs_upload_exception = None # Store potential GCS exception
152
160
 
161
+ # Check for conflicting options
162
+ if overwrite_if_exists and increment_if_exists:
163
+ raise ValueError("When writing JSON to GCS, both overwrite and increment_if_exists cannot be True at the same time.")
164
+
153
165
  if isinstance(data, (list, dict)):
154
166
  data_str = json.dumps(data, indent=2)
155
167
  elif isinstance(data, str):
@@ -157,17 +169,32 @@ def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
157
169
  else:
158
170
  raise ValueError("Unsupported data type. It should be a list, dict, or str.")
159
171
 
172
+ bucket = storage_client.bucket(bucket_name)
173
+ base_file_name, ext = os.path.splitext(file_name)
174
+ increment = 0
175
+
160
176
  while attempts < max_retries and not success:
161
177
  try:
162
- bucket = storage_client.bucket(bucket_name)
163
- blob = bucket.blob(file_name)
164
-
165
- # Check if the file exists and if we should overwrite it
166
- if blob.exists():
167
- if not overwrite:
168
- raise FileExistsError(f"File {file_name} already exists in bucket {bucket_name} and overwrite is set to False.")
169
- else:
170
- overwritten = True
178
+ if increment_if_exists:
179
+ while bucket.blob(file_name).exists():
180
+ gcs_file_already_exists = True
181
+ increment += 1
182
+ file_name = f"{base_file_name}_{increment}{ext}"
183
+ gcs_file_saved_with_increment = True
184
+ log_warning(f"File {file_name} already exists in bucket {bucket_name}. Writing with increment: {increment_if_exists}")
185
+ else:
186
+ blob = bucket.blob(file_name)
187
+
188
+ # Check if the file exists
189
+ if blob.exists():
190
+ gcs_file_already_exists = True
191
+ gcs_path = f"gs://{bucket_name}/{file_name}"
192
+ log_message(f"File {file_name} already exists in bucket {bucket_name}. Overwriting: {overwrite_if_exists}")
193
+ if not overwrite_if_exists:
194
+ log_warning(f"File {file_name} already exists and overwrite is set to False. Skipping save to GCS.")
195
+ break
196
+ else:
197
+ gcs_file_overwritten = True
171
198
 
172
199
  blob.upload_from_string(data_str, content_type='application/json')
173
200
  gcs_path = f"gs://{bucket_name}/{file_name}"
@@ -181,15 +208,31 @@ def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
181
208
  else:
182
209
  log_error(f"Failed to write {file_name} to GCS bucket {bucket_name} after {max_retries} attempts: {e}")
183
210
 
184
- if not success and (save_locally or local_path):
211
+ if not success or save_locally or local_path:
185
212
  try:
186
213
  if not local_path:
187
214
  local_path_final = os.path.join("/tmp", file_name)
188
215
  else:
189
216
  local_path_final = os.path.join(local_path, file_name)
190
- with open(local_path_final, 'w', encoding='utf-8') as f:
191
- f.write(data_str)
192
- log_message(f"Saved {file_name} locally at {local_path_final}.")
217
+
218
+ if os.path.exists(local_path_final):
219
+ if increment_if_exists:
220
+ increment = 0
221
+ while os.path.exists(local_path_final):
222
+ increment += 1
223
+ local_path_final = os.path.join(local_path, f"{base_file_name}_{increment}{ext}")
224
+ gcs_file_saved_with_increment = True
225
+ elif not overwrite_if_exists:
226
+ log_message(f"File {file_name} already exists locally at {local_path_final} and overwrite is set to False. Skipping save.")
227
+ success = True
228
+ else:
229
+ log_message(f"File {file_name} already exists locally at {local_path_final}. Overwriting: {overwrite_if_exists}")
230
+
231
+ if not success:
232
+ with open(local_path_final, 'w', encoding='utf-8') as f:
233
+ f.write(data_str)
234
+ log_message(f"Saved {file_name} locally at {local_path_final}. Overwritten: {overwrite_if_exists}")
235
+ success = True
193
236
  except Exception as local_e:
194
237
  log_error(f"Failed to write {file_name} locally: {local_e}", exc_info=True)
195
238
 
@@ -199,7 +242,9 @@ def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
199
242
  return {
200
243
  "gcs_path": gcs_path,
201
244
  "local_path": local_path_final,
202
- "overwritten": overwritten
245
+ "gcs_file_already_exists": gcs_file_already_exists,
246
+ "gcs_file_overwritten": gcs_file_overwritten,
247
+ "gcs_file_saved_with_increment": gcs_file_saved_with_increment
203
248
  }
204
249
 
205
250
 
@@ -5,8 +5,8 @@
5
5
 
6
6
  import datetime
7
7
  from google.cloud import bigquery
8
- from ipulse_shared_core_ftredge.enums.enums_common_utils import NoticeLevel
9
- from ipulse_shared_core_ftredge.utils_common import Notice
8
+ from ipulse_shared_core_ftredge.enums.enums_common_utils import LogLevel
9
+ from ipulse_shared_core_ftredge.utils_common import ContextLog
10
10
 
11
11
 
12
12
  def create_bigquery_schema_from_json(json_schema):
@@ -19,55 +19,58 @@ def create_bigquery_schema_from_json(json_schema):
19
19
  return schema
20
20
 
21
21
 
22
- def update_check_with_schema_template(updates, schema, dt_ts_to_str=True, check_max_length=True):
23
-
22
+ def check_format_against_schema_template(data_to_check, schema, dt_ts_to_str=True, check_max_length=True):
24
23
  """Ensure Update dict corresponds to the config schema, ensuring proper formats and lengths."""
25
- valid_updates = {}
26
- notices=[] ### THIS IS TO AVOID LOGGING A WARNING RANDOMLY, INSTEAD GROUPPING FOR A GIVEN RUN
27
-
28
- # Process updates to conform to the schema
29
- for field in schema:
30
- field_name = field["name"]
31
- field_type = field["type"]
32
- mode = field["mode"]
33
-
34
- # Initialize notice to None at the start of each field processing
35
- notice = None
36
-
37
- if field_name in updates:
38
- value = updates[field_name]
39
-
40
- # Handle date and timestamp formatting
41
-
42
- # Validate and potentially convert date and timestamp fields
43
- if field_type == "DATE":
44
- value, notice = handle_date_fields(field_name, value, dt_ts_to_str)
45
- elif field_type == "TIMESTAMP":
46
- value, notice = handle_timestamp_fields(field_name, value, dt_ts_to_str)
47
- elif field_type in ["STRING", "INT64", "FLOAT64", "BOOL"]:
48
- value, notice = handle_type_conversion(field_type, field_name, value )
49
-
50
- if notice:
51
- notices.append(notice)
52
-
53
- # Check and handle max length restriction
54
- if check_max_length and "max_length" in field:
55
- value,notice = check_and_truncate_length(field_name, value, field["max_length"])
56
- if notice:
57
- notices.append(notice)
58
-
59
- # Only add to the dictionary if value is not None or the field is required
60
- if value is not None or mode == "REQUIRED":
61
- valid_updates[field_name] = value
62
-
63
- elif mode == "REQUIRED":
64
- notice=Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
65
- subject=field_name,
66
- description=f"Required field '{field_name}' is missing in the updates.")
24
+ checked_data = {}
25
+ warnings_or_error = [] # Group warnings and errors for a given run
26
+
27
+ try:
28
+ # Process updates to conform to the schema
29
+ for field in schema:
30
+ field_name = field["name"]
31
+ field_type = field["type"]
32
+ mode = field["mode"]
33
+
34
+ # Initialize notice to None at the start of each field processing
35
+ warning = None
36
+
37
+ if field_name in data_to_check:
38
+ value = data_to_check[field_name]
39
+
40
+ # Handle date and timestamp formatting
41
+ if field_type == "DATE":
42
+ value, warning = handle_date_fields(field_name, value, dt_ts_to_str)
43
+ elif field_type == "TIMESTAMP":
44
+ value, warning = handle_timestamp_fields(field_name, value, dt_ts_to_str)
45
+ elif field_type in ["STRING", "INT64", "FLOAT64", "BOOL"]:
46
+ value, warning = handle_type_conversion(field_type, field_name, value)
47
+
48
+ if warning:
49
+ warnings_or_error.append(warning)
50
+
51
+ # Check and handle max length restriction
52
+ if check_max_length and "max_length" in field:
53
+ value, warning = check_and_truncate_length(field_name, value, field["max_length"])
54
+ if warning:
55
+ warnings_or_error.append(warning)
56
+
57
+ # Only add to the dictionary if value is not None or the field is required
58
+ if value is not None or mode == "REQUIRED":
59
+ checked_data[field_name] = value
60
+
61
+ elif mode == "REQUIRED":
62
+ warning = ContextLog(level=LogLevel.WARNING,
63
+ subject=field_name,
64
+ description=f"Required field '{field_name}' is missing in the updates.")
65
+ warnings_or_error.append(warning)
67
66
 
68
- notices.append(notice)
67
+ except Exception as e:
68
+ error_log = ContextLog(level=LogLevel.ERROR_EXCEPTION_REDO,
69
+ subject=data_to_check,
70
+ description=f"An error occurred during update check: {str(e)}")
71
+ warnings_or_error.append(error_log)
69
72
 
70
- return valid_updates, notices
73
+ return checked_data, warnings_or_error
71
74
 
72
75
  def handle_date_fields(field_name, value, dt_ts_to_str):
73
76
  """Handles date fields, ensuring they are in the correct format and optionally converts them to string."""
@@ -82,11 +85,11 @@ def handle_date_fields(field_name, value, dt_ts_to_str):
82
85
  return value, None
83
86
  return parsed_date, None
84
87
  except ValueError:
85
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
88
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
86
89
  subject=field_name,
87
90
  description=f"Expected a DATE in YYYY-MM-DD format but got {value}.")
88
91
  else:
89
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
92
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
90
93
  subject=field_name,
91
94
  description= f"Expected a DATE or YYYY-MM-DD str format but got {value} of type {type(value).__name__}.")
92
95
 
@@ -104,11 +107,11 @@ def handle_timestamp_fields(field_name, value, dt_ts_to_str):
104
107
  return value, None
105
108
  return parsed_datetime, None
106
109
  except ValueError:
107
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
110
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
108
111
  subject=field_name,
109
112
  description= f"Expected ISO format TIMESTAMP but got {value}.")
110
113
  else:
111
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
114
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
112
115
  subject=field_name,
113
116
  description= f"Expected ISO format TIMESTAMP but got {value} of type {type(value).__name__}.")
114
117
 
@@ -116,7 +119,7 @@ def handle_timestamp_fields(field_name, value, dt_ts_to_str):
116
119
  def check_and_truncate_length(field_name, value, max_length):
117
120
  """Checks and truncates the length of string fields if they exceed the max length."""
118
121
  if isinstance(value, str) and len(value) > max_length:
119
- return value[:max_length], Notice(level=NoticeLevel.WARNING_FIX_RECOMMENDED,
122
+ return value[:max_length], ContextLog(level=LogLevel.WARNING_FIX_RECOMMENDED,
120
123
  subject= field_name,
121
124
  description= f"Field exceeds max length: {len(value)}/{max_length}. Truncating.")
122
125
 
@@ -126,7 +129,7 @@ def check_and_truncate_length(field_name, value, max_length):
126
129
 
127
130
  def handle_type_conversion(field_type, field_name, value):
128
131
  if field_type == "STRING" and not isinstance(value, str):
129
- return str(value), Notice(level=NoticeLevel.WARNING_REVIEW_RECOMMENDED,
132
+ return str(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
130
133
  subject=field_name,
131
134
  description= f"Expected STRING but got {value} of type {type(value).__name__}.")
132
135
 
@@ -134,18 +137,18 @@ def handle_type_conversion(field_type, field_name, value):
134
137
  try:
135
138
  return int(value), None
136
139
  except ValueError:
137
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
140
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
138
141
  subject= field_name,
139
142
  description=f"Expected INTEGER, but got {value} of type {type(value).__name__}.")
140
143
  if field_type == "FLOAT64" and not isinstance(value, float):
141
144
  try:
142
145
  return float(value), None
143
146
  except ValueError:
144
- return None, Notice(level=NoticeLevel.WARNING_FIX_REQUIRED,
147
+ return None, ContextLog(level=LogLevel.WARNING_FIX_REQUIRED,
145
148
  subject=field_name,
146
149
  description=f"Expected FLOAT, but got {value} of type {type(value).__name__}.")
147
150
  if field_type == "BOOL" and not isinstance(value, bool):
148
- return bool(value), Notice(level=NoticeLevel.WARNING_REVIEW_RECOMMENDED,
151
+ return bool(value), ContextLog(level=LogLevel.WARNING_REVIEW_RECOMMENDED,
149
152
  subject=field_name,
150
153
  description=f"Expected BOOL, but got {value}. Converting as {bool(value)}.")
151
154
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.52
3
+ Version: 2.54
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -1,11 +1,11 @@
1
- ipulse_shared_core_ftredge/__init__.py,sha256=g3PRhnLbDFPVAFPIpL6tQgiEJFbPGPmeleHoxPNaQzE,879
2
- ipulse_shared_core_ftredge/utils_common.py,sha256=AT8R7BfEbOvMztgxQKv6d6jwGfTCJMO5FDz9idTSE4A,15564
3
- ipulse_shared_core_ftredge/utils_gcp.py,sha256=KZSuugt-746lfSAlNi8ks1llxxPzTskmtZwnm18SnhQ,8873
4
- ipulse_shared_core_ftredge/utils_templates_and_schemas.py,sha256=OriQHxM4AU6T3kGwwhjRdMt3ZYGmMJe0B5PLcHyzgXk,7084
5
- ipulse_shared_core_ftredge/enums/__init__.py,sha256=Pg8LUhBb7PJAHULoM13TrFEzG9wgCmw-ZuOdN3Rw6Og,853
6
- ipulse_shared_core_ftredge/enums/enums_common_utils.py,sha256=oW0zhmJZfeYycVXWDCede1_Vaa0Q-KClp_KOK4kzIj8,5261
7
- ipulse_shared_core_ftredge/enums/enums_data_eng.py,sha256=2i6Qo6Yi_j_O9xxnOD6QA-r0Cv7mWAUaKUx907XMRio,1825
8
- ipulse_shared_core_ftredge/enums/enums_module_fincore.py,sha256=MuqQg249clrWUOBb1S-iPsoOldN2_F3ohRQizbjhwG0,1374
1
+ ipulse_shared_core_ftredge/__init__.py,sha256=BBOoNtI-PdKen1nCcmfBEXJ40X0Jf6b8Yeo6ytJT5rQ,873
2
+ ipulse_shared_core_ftredge/utils_common.py,sha256=r4jYBsu6TnAINTewk72CZebWH_P_oVf5LlkeDy9-ndU,16482
3
+ ipulse_shared_core_ftredge/utils_gcp.py,sha256=8KgsOPkLe1-1i3M_UX5niKg_CjjiNoUhZXiWFIHJdmY,11286
4
+ ipulse_shared_core_ftredge/utils_templates_and_schemas.py,sha256=AwGl9J-XQc_aO_VKWhR_TuA1ML8nWxHuzHtxBH8yfwE,7499
5
+ ipulse_shared_core_ftredge/enums/__init__.py,sha256=reCHJE0j_QTGwag7uo3cQXMTRaGRR_YOwywweb5pFb8,839
6
+ ipulse_shared_core_ftredge/enums/enums_common_utils.py,sha256=gYckmSAzhh5MA3CyV18Z9-YtHsFJlmEcKC-4nuqMXu4,5673
7
+ ipulse_shared_core_ftredge/enums/enums_data_eng.py,sha256=7w3Jjmw84Wq22Bb5Qs09Z82Bdf-j8nhRiQJfw60_g80,1903
8
+ ipulse_shared_core_ftredge/enums/enums_module_fincore.py,sha256=W1TkSLu3ryLf_aif2VcKsFznWz0igeMUR_buoGEG6w8,1406
9
9
  ipulse_shared_core_ftredge/enums/enums_modules.py,sha256=AyXUoNmR75DZLaEHi3snV6LngR25LeZRqzrLDaAupbY,1244
10
10
  ipulse_shared_core_ftredge/models/__init__.py,sha256=gE22Gzhil0RYQa7YLtdtT44_AsWqklcDfRtgLAQc1dI,200
11
11
  ipulse_shared_core_ftredge/models/audit_log_firestore.py,sha256=5AwO6NHuOncq65n400eqM8QPrS2EGGaP3Z_6l2rxdBE,261
@@ -18,8 +18,8 @@ ipulse_shared_core_ftredge/models/user_profile_update.py,sha256=oKK0XsQDKkgDvjFP
18
18
  ipulse_shared_core_ftredge/models/user_status.py,sha256=8TyRd8tBK9_xb0MPKbI5pn9-lX7ovKbeiuWYYPtIOiw,3202
19
19
  ipulse_shared_core_ftredge/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
20
  ipulse_shared_core_ftredge/tests/test.py,sha256=0lS8HP5Quo_BqNoscU40qOH9aJRaa1Pfam5VUBmdld8,682
21
- ipulse_shared_core_ftredge-2.52.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
22
- ipulse_shared_core_ftredge-2.52.dist-info/METADATA,sha256=TKMk8quvkJXQbkQmBUcANVJfow831f3dHL2QGqwU9IM,561
23
- ipulse_shared_core_ftredge-2.52.dist-info/WHEEL,sha256=rWxmBtp7hEUqVLOnTaDOPpR-cZpCDkzhhcBce-Zyd5k,91
24
- ipulse_shared_core_ftredge-2.52.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
25
- ipulse_shared_core_ftredge-2.52.dist-info/RECORD,,
21
+ ipulse_shared_core_ftredge-2.54.dist-info/LICENCE,sha256=YBtYAXNqCCOo9Mr2hfkbSPAM9CeAr2j1VZBSwQTrNwE,1060
22
+ ipulse_shared_core_ftredge-2.54.dist-info/METADATA,sha256=EB0Hd9nuPAFaIClcQR5RIGECC1L9G2ZpVXsZ7gYNFB4,561
23
+ ipulse_shared_core_ftredge-2.54.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
24
+ ipulse_shared_core_ftredge-2.54.dist-info/top_level.txt,sha256=8sgYrptpexkA_6_HyGvho26cVFH9kmtGvaK8tHbsGHk,27
25
+ ipulse_shared_core_ftredge-2.54.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (71.0.4)
2
+ Generator: setuptools (71.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5