ipulse-shared-core-ftredge 2.51__tar.gz → 2.52__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {ipulse_shared_core_ftredge-2.51/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.52}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/setup.py +1 -1
  3. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/__init__.py +1 -1
  4. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/utils_common.py +60 -52
  5. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/utils_gcp.py +25 -30
  6. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  7. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/LICENCE +0 -0
  8. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/README.md +0 -0
  9. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/pyproject.toml +0 -0
  10. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/setup.cfg +0 -0
  11. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/enums/__init__.py +0 -0
  12. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -0
  13. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -0
  14. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -0
  15. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/enums/enums_modules.py +0 -0
  16. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  17. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/audit_log_firestore.py +0 -0
  18. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
  19. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/pulse_enums.py +0 -0
  20. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
  21. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  22. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  23. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  24. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  25. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  26. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/tests/test.py +0 -0
  27. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge/utils_templates_and_schemas.py +0 -0
  28. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +0 -0
  29. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  30. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  31. {ipulse_shared_core_ftredge-2.51 → ipulse_shared_core_ftredge-2.52}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.51
3
+ Version: 2.52
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='2.51',
6
+ version='2.52',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -2,7 +2,7 @@ from .models import (Organisation, UserAuth, UserProfile,
2
2
  UserStatus, UserProfileUpdate, pulse_enums)
3
3
  from .utils_gcp import (setup_gcp_logger_and_error_report,
4
4
  read_csv_from_gcs, read_json_from_gcs,
5
- write_csv_to_gcs, write_data_to_gcs)
5
+ write_csv_to_gcs, write_json_to_gcs)
6
6
  from .utils_templates_and_schemas import (create_bigquery_schema_from_json,
7
7
  update_check_with_schema_template)
8
8
  from .utils_common import (Notice, NoticesManager)
@@ -11,7 +11,7 @@ from contextlib import contextmanager
11
11
  from typing import List
12
12
  from google.cloud import logging as cloudlogging
13
13
  from ipulse_shared_core_ftredge.enums.enums_common_utils import NoticeLevel, NoticeManagerCategory, NoticeStatus
14
- from ipulse_shared_core_ftredge.utils_gcp import write_data_to_gcs
14
+ from ipulse_shared_core_ftredge.utils_gcp import write_json_to_gcs
15
15
 
16
16
 
17
17
  # ["data_import","data_quality", "data_processing","data_general","data_persistance","metadata_quality", "metadata_processing", "metadata_persistance","metadata_general"]
@@ -72,7 +72,7 @@ class Notice:
72
72
  # Determine the number of lines to keep from the start and end
73
73
  keep_lines_start = min(self.MAX_TRACEBACK_LINES // 2, len(combined_lines))
74
74
  keep_lines_end = min(self.MAX_TRACEBACK_LINES // 2, len(combined_lines) - keep_lines_start)
75
-
75
+
76
76
  if len(combined_lines) > self.MAX_TRACEBACK_LINES:
77
77
  # Include the first few and last few lines, and an indicator of truncation
78
78
  formatted_traceback = '\n'.join(
@@ -82,8 +82,8 @@ class Notice:
82
82
  )
83
83
  else:
84
84
  formatted_traceback = '\n'.join(combined_lines)
85
-
86
- return formatted_traceback
85
+
86
+ return formatted_traceback
87
87
 
88
88
  @property
89
89
  def start_context(self):
@@ -123,12 +123,12 @@ class NoticesManager:
123
123
  SUCCESS_CODE_START_VALUE = NoticeLevel.SUCCESS.value
124
124
 
125
125
  def __init__(self, start_context: str, category: NoticeManagerCategory = NoticeManagerCategory.NOTICES, logger_name=None):
126
- self._notice_manager_id = str(uuid.uuid4())
126
+ self._id = str(uuid.uuid4())
127
127
  self._notices = []
128
128
  self._early_stop = False
129
- self._error_count = 0
130
- self._warning_count = 0
131
- self._success_count = 0
129
+ self._errors_count = 0
130
+ self._warnings_count = 0
131
+ self._successes_count = 0
132
132
  self._level_counts = {level.name: 0 for level in NoticeLevel}
133
133
  self._start_context = start_context
134
134
  self._context_stack = []
@@ -157,23 +157,31 @@ class NoticesManager:
157
157
  if self._context_stack:
158
158
  self._context_stack.pop()
159
159
 
160
- def get_current_context(self):
160
+ @property
161
+ def current_context(self):
161
162
  return " >> ".join(self._context_stack)
162
163
 
163
- def get_start_context(self):
164
+ @property
165
+ def start_context(self):
164
166
  return self._start_context
165
-
166
- def get_notice_manager_id(self):
167
- return self._notice_manager_id
168
-
167
+
168
+ @property
169
+ def id(self):
170
+ return self._id
171
+
172
+ @property
173
+ def early_stop(self):
174
+ return self._early_stop
175
+
169
176
  def set_early_stop(self, max_errors_tolerance:int, create_error_notice=True,pop_context=False):
170
- self._early_stop = True
177
+ self.early_stop = True
171
178
  if create_error_notice:
172
179
  if pop_context:
173
180
  self.pop_context()
174
181
  self.add_notice(Notice(level=NoticeLevel.ERROR,
175
182
  subject="EARLY_STOP",
176
183
  description=f"Total MAX_ERRORS_TOLERANCE of {max_errors_tolerance} has been reached."))
184
+
177
185
  def reset_early_stop(self):
178
186
  self._early_stop = False
179
187
 
@@ -184,9 +192,9 @@ class NoticesManager:
184
192
  if (self._category == NoticeManagerCategory.SUCCESSES.value and notice.level != NoticeLevel.SUCCESS) or \
185
193
  (self._category == NoticeManagerCategory.WARN_ERRS.value and notice.level.value < self.WARNING_CODE_START_VALUE):
186
194
  raise ValueError(f"Invalid notice level {notice.level.name} for category {self._category}")
187
- notice.start_context = self.get_start_context()
188
- notice.context = self.get_current_context()
189
- notice.notice_manager_id = self._notice_manager_id
195
+ notice.start_context = self.start_context
196
+ notice.context = self.current_context
197
+ notice.notice_manager_id = self.id
190
198
  notice_dict = notice.to_dict()
191
199
  self._notices.append(notice_dict)
192
200
  self._update_counts(notice_dict)
@@ -203,9 +211,9 @@ class NoticesManager:
203
211
 
204
212
  def clear_notices_and_counts(self):
205
213
  self._notices = []
206
- self._error_count = 0
207
- self._warning_count = 0
208
- self._success_count = 0
214
+ self._errors_count = 0
215
+ self._warnings_count = 0
216
+ self._successes_count = 0
209
217
  self._level_counts = {level.name: 0 for level in NoticeLevel}
210
218
 
211
219
  def clear_notices(self):
@@ -222,24 +230,24 @@ class NoticesManager:
222
230
  notice for notice in self._notices
223
231
  if context_substring in notice["context"]
224
232
  ]
225
-
233
+
226
234
  def contains_errors(self):
227
- return self._error_count > 0
235
+ return self._errors_count > 0
228
236
 
229
237
  def count_errors(self):
230
- return self._error_count
238
+ return self._errors_count
231
239
 
232
240
  def contains_warnings_or_errors(self):
233
- return self._warning_count > 0 or self._error_count > 0
241
+ return self._warnings_count > 0 or self._errors_count > 0
234
242
 
235
243
  def count_warnings_and_errors(self):
236
- return self._warning_count + self._error_count
244
+ return self._warnings_count + self._errors_count
237
245
 
238
246
  def count_warnings(self):
239
- return self._warning_count
247
+ return self._warnings_count
240
248
 
241
249
  def count_successes(self):
242
- return self._success_count
250
+ return self._successes_count
243
251
 
244
252
  def count_all_notices(self):
245
253
  return len(self._notices)
@@ -256,40 +264,40 @@ class NoticesManager:
256
264
  )
257
265
 
258
266
  def count_notices_for_current_context(self):
259
- return self._count_notices(self.get_current_context(), exact_match=True)
267
+ return self._count_notices(self.current_context, exact_match=True)
260
268
 
261
269
  def count_notices_for_current_and_nested_contexts(self):
262
- return self._count_notices(self.get_current_context())
270
+ return self._count_notices(self.current_context)
263
271
 
264
272
  def count_notices_by_level_for_current_context(self, level: NoticeLevel):
265
- return self._count_notices(self.get_current_context(), exact_match=True, level_code_min=level.value, level_code_max=level.value)
273
+ return self._count_notices(self.current_context, exact_match=True, level_code_min=level.value, level_code_max=level.value)
266
274
 
267
275
  def count_notices_by_level_for_current_and_nested_contexts(self, level: NoticeLevel):
268
- return self._count_notices(self.get_current_context(), level_code_min=level.value, level_code_max=level.value)
276
+ return self._count_notices(self.current_context, level_code_min=level.value, level_code_max=level.value)
269
277
 
270
278
  def count_errors_for_current_context(self):
271
- return self._count_notices(self.get_current_context(), exact_match=True, level_code_min=self.ERROR_CODE_START_VALUE)
279
+ return self._count_notices(self.current_context, exact_match=True, level_code_min=self.ERROR_CODE_START_VALUE)
272
280
 
273
281
  def count_errors_for_current_and_nested_contexts(self):
274
- return self._count_notices(self.get_current_context(), level_code_min=self.ERROR_CODE_START_VALUE)
282
+ return self._count_notices(self.current_context, level_code_min=self.ERROR_CODE_START_VALUE)
275
283
 
276
284
  def count_warnings_and_errors_for_current_context(self):
277
- return self._count_notices(self.get_current_context(), exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE)
285
+ return self._count_notices(self.current_context, exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE)
278
286
 
279
287
  def count_warnings_and_errors_for_current_and_nested_contexts(self):
280
- return self._count_notices(self.get_current_context(), level_code_min=self.WARNING_CODE_START_VALUE)
288
+ return self._count_notices(self.current_context, level_code_min=self.WARNING_CODE_START_VALUE)
281
289
 
282
290
  def count_warnings_for_current_context(self):
283
- return self._count_notices(self.get_current_context(), exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
291
+ return self._count_notices(self.current_context, exact_match=True, level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
284
292
 
285
293
  def count_warnings_for_current_and_nested_contexts(self):
286
- return self._count_notices(self.get_current_context(), level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
294
+ return self._count_notices(self.current_context, level_code_min=self.WARNING_CODE_START_VALUE, level_code_max=self.ERROR_CODE_START_VALUE - 1)
287
295
 
288
296
  def count_successes_for_current_context(self):
289
- return self._count_notices(self.get_current_context(), exact_match=True, level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
297
+ return self._count_notices(self.current_context, exact_match=True, level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
290
298
 
291
299
  def count_successes_for_current_and_nested_contexts(self):
292
- return self._count_notices(self.get_current_context(), level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
300
+ return self._count_notices(self.current_context, level_code_min=self.SUCCESS_CODE_START_VALUE, level_code_max=self.SUCCESS_CODE_START_VALUE)
293
301
 
294
302
  def export_notices_to_gcs_file(self, bucket_name, storage_client, file_prefix=None, file_name=None, top_level_context=None, save_locally=False, local_path=None, logger=None, max_retries=2):
295
303
  def log_message(message):
@@ -309,10 +317,9 @@ class NoticesManager:
309
317
  else:
310
318
  file_name = f"{file_prefix}_{timestamp}_len{len(self._notices)}.json"
311
319
 
312
- cloud_path = None
313
- local_path = None
320
+ result=None
314
321
  try:
315
- cloud_path, local_path = write_data_to_gcs(
322
+ result= write_json_to_gcs(
316
323
  bucket_name=bucket_name,
317
324
  storage_client=storage_client,
318
325
  data=self._notices,
@@ -320,13 +327,14 @@ class NoticesManager:
320
327
  save_locally=save_locally,
321
328
  local_path=local_path,
322
329
  logger=logger,
323
- max_retries=max_retries
330
+ max_retries=max_retries,
331
+ overwrite=True
324
332
  )
325
- log_message(f"{file_prefix} successfully saved to GCS at {cloud_path} and locally at {local_path}.")
333
+ log_message(f"{file_prefix} successfully saved (ovewritten={result.get("ovewritten")}) to GCS at {result.get("gcs_path")} and locally at {result.get("local_path")}.")
326
334
  except Exception as e:
327
335
  log_error(f"Failed at export_notices_to_gcs_file for {file_prefix} for file {file_name} to bucket {bucket_name}: {type(e).__name__} - {str(e)}")
328
336
 
329
- return cloud_path, local_path
337
+ return result
330
338
 
331
339
  def import_notices_from_json(self, json_or_file, logger=None):
332
340
  def log_message(message):
@@ -353,17 +361,17 @@ class NoticesManager:
353
361
 
354
362
  if remove:
355
363
  if level_code >= self.ERROR_CODE_START_VALUE:
356
- self._error_count -= 1
364
+ self._errors_count -= 1
357
365
  elif level_code >= self.WARNING_CODE_START_VALUE:
358
- self._warning_count -= 1
366
+ self._warnings_count -= 1
359
367
  elif level_code >= self.SUCCESS_CODE_START_VALUE:
360
- self._success_count -= 1
368
+ self._successes_count -= 1
361
369
  self._level_counts[level_name] -= 1
362
370
  else:
363
371
  if level_code >= self.ERROR_CODE_START_VALUE:
364
- self._error_count += 1
372
+ self._errors_count += 1
365
373
  elif level_code >= self.WARNING_CODE_START_VALUE:
366
- self._warning_count += 1
374
+ self._warnings_count += 1
367
375
  elif level_code == self.SUCCESS_CODE_START_VALUE:
368
- self._success_count += 1
376
+ self._successes_count += 1
369
377
  self._level_counts[level_name] += 1
@@ -122,47 +122,32 @@ def read_csv_from_gcs(bucket_name, file_name, storage_client, logger):
122
122
 
123
123
 
124
124
 
125
- def write_data_to_gcs(bucket_name, storage_client, data, file_name=None,
126
- save_locally=False, local_path=None, logger=None, max_retries=3):
125
+ def write_json_to_gcs(bucket_name, storage_client, data, file_name=None,
126
+ save_locally=False, local_path=None, logger=None, max_retries=3, overwrite=True):
127
127
  """Saves data to Google Cloud Storage and optionally locally.
128
128
 
129
129
  This function attempts to upload data to GCS. If the upload fails after
130
130
  retries and `save_locally` is True or `local_path` is provided, it attempts
131
131
  to save the data locally.
132
132
 
133
- Args:
134
- bucket_name (str): Name of the GCS bucket.
135
- storage_client (google.cloud.storage.Client): GCS client object.
136
- data (list, dict, or str): Data to be saved.
137
- file_name (str, optional): File name for GCS and local. Defaults to None.
138
- save_locally (bool, optional): Save locally if GCS fails. Defaults to False.
139
- local_path (str, optional): Local directory to save. Defaults to None.
140
- logger (logging.Logger, optional): Logger for messages. Defaults to None.
141
- max_retries (int, optional): Number of GCS upload retries. Defaults to 3.
142
-
143
133
  Returns:
144
- tuple: A tuple containing the GCS path (or None if upload failed) and
145
- the local path (or None if not saved locally).
146
-
147
- Raises:
148
- ValueError: If data is not a list, dict, or str.
149
- Exception: If GCS upload fails after retries and local saving fails or
150
- is not requested. If GCS upload fails after retries and
151
- local saving is requested but unsuccessful.
134
+ dict: A dictionary containing the GCS path (or None if upload failed),
135
+ the local path (or None if not saved locally), and a boolean indicating if the file was overwritten.
152
136
  """
153
137
 
154
138
  def log_message(message):
155
139
  if logger:
156
140
  logger.info(message)
157
141
 
158
- def log_error(message, exc_info=False):
142
+ def log_error(message,exc_info=False):
159
143
  if logger:
160
144
  logger.error(message, exc_info=exc_info)
161
145
 
162
146
  attempts = 0
163
147
  success = False
164
- cloud_path = None
148
+ gcs_path = None
165
149
  local_path_final = None
150
+ overwritten = False
166
151
  gcs_upload_exception = None # Store potential GCS exception
167
152
 
168
153
  if isinstance(data, (list, dict)):
@@ -176,9 +161,17 @@ def write_data_to_gcs(bucket_name, storage_client, data, file_name=None,
176
161
  try:
177
162
  bucket = storage_client.bucket(bucket_name)
178
163
  blob = bucket.blob(file_name)
164
+
165
+ # Check if the file exists and if we should overwrite it
166
+ if blob.exists():
167
+ if not overwrite:
168
+ raise FileExistsError(f"File {file_name} already exists in bucket {bucket_name} and overwrite is set to False.")
169
+ else:
170
+ overwritten = True
171
+
179
172
  blob.upload_from_string(data_str, content_type='application/json')
180
- cloud_path = f"{bucket_name}/{file_name}"
181
- log_message(f"Successfully saved file to GCS {cloud_path}.")
173
+ gcs_path = f"gs://{bucket_name}/{file_name}"
174
+ log_message(f"Successfully saved file to GCS {gcs_path}.")
182
175
  success = True
183
176
  except Exception as e:
184
177
  gcs_upload_exception = e
@@ -186,7 +179,7 @@ def write_data_to_gcs(bucket_name, storage_client, data, file_name=None,
186
179
  if attempts < max_retries:
187
180
  time.sleep(2 ** attempts)
188
181
  else:
189
- log_error(f"Failed to write {file_name} to GCS bucket {bucket_name} after {max_retries} attempts :{e}")
182
+ log_error(f"Failed to write {file_name} to GCS bucket {bucket_name} after {max_retries} attempts: {e}")
190
183
 
191
184
  if not success and (save_locally or local_path):
192
185
  try:
@@ -198,14 +191,16 @@ def write_data_to_gcs(bucket_name, storage_client, data, file_name=None,
198
191
  f.write(data_str)
199
192
  log_message(f"Saved {file_name} locally at {local_path_final}.")
200
193
  except Exception as local_e:
201
- log_error(f"Failed to write {file_name} locally: {local_e}",exc_info=True)
202
-
203
- # If GCS upload failed, raise a single exception here
194
+ log_error(f"Failed to write {file_name} locally: {local_e}", exc_info=True)
204
195
 
205
196
  if gcs_upload_exception is not None:
206
- raise gcs_upload_exception # Propagate without nesting
197
+ raise gcs_upload_exception # Propagate without nesting
207
198
 
208
- return cloud_path, local_path_final
199
+ return {
200
+ "gcs_path": gcs_path,
201
+ "local_path": local_path_final,
202
+ "overwritten": overwritten
203
+ }
209
204
 
210
205
 
211
206
  def write_csv_to_gcs(bucket_name, file_name, data, storage_client, logger,log_info_verbose=True):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.51
3
+ Version: 2.52
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar