ipulse-shared-core-ftredge 2.19__tar.gz → 2.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (26) hide show
  1. {ipulse_shared_core_ftredge-2.19/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.21}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/setup.py +1 -1
  3. ipulse_shared_core_ftredge-2.21/src/ipulse_shared_core_ftredge/__init__.py +5 -0
  4. ipulse_shared_core_ftredge-2.21/src/ipulse_shared_core_ftredge/gcp_utils.py +150 -0
  5. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  6. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +1 -1
  7. ipulse_shared_core_ftredge-2.19/src/ipulse_shared_core_ftredge/__init__.py +0 -5
  8. ipulse_shared_core_ftredge-2.19/src/ipulse_shared_core_ftredge/gcp_logger.py +0 -77
  9. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/LICENCE +0 -0
  10. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/README.md +0 -0
  11. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/pyproject.toml +0 -0
  12. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/setup.cfg +0 -0
  13. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  14. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/audit_log_firestore.py +0 -0
  15. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
  16. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/pulse_enums.py +0 -0
  17. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
  18. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  19. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  20. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  21. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  22. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/tests/__init__.py +0 -0
  23. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge/tests/test.py +0 -0
  24. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  25. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  26. {ipulse_shared_core_ftredge-2.19 → ipulse_shared_core_ftredge-2.21}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.19
3
+ Version: 2.21
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='ipulse_shared_core_ftredge',
5
- version='2.19',
5
+ version='2.21',
6
6
  package_dir={'': 'src'}, # Specify the source directory
7
7
  packages=find_packages(where='src'), # Look for packages in 'src'
8
8
  install_requires=[
@@ -0,0 +1,5 @@
1
+ from .models import Organisation, UserAuth, UserProfile, UserStatus, UserProfileUpdate, pulse_enums
2
+ from .gcp_utils import setup_gcp_logger_and_error_report, read_csv_from_gcs, read_json_from_gcs, write_csv_to_gcs, write_json_to_gcs
3
+
4
+
5
+
@@ -0,0 +1,150 @@
1
+ import json
2
+ import csv
3
+ from io import StringIO
4
+ import logging
5
+ import os
6
+ import traceback
7
+ from google.cloud import error_reporting, logging as cloud_logging
8
+ from google.api_core.exceptions import NotFound
9
+
10
+
11
+ ############################################################################
12
+ ##################### SETTING UP LOGGER ##########################
13
+
14
+ ####DEPCREACATED: THIS APPROACH WAS GOOD, BUT ERRORS WERE NOT REPORTED TO ERROR REPORTING
15
+ # logging.basicConfig(level=logging.INFO)
16
+ # logging_client = google.cloud.logging.Client()
17
+ # logging_client.setup_logging()
18
+ ###################################
19
+
20
+
21
+ ##### THIS APPROACH IS USED NOW ########
22
+ ## TODO Fix the issue with POST 0B Nan.... printed in Cloud Logging , which is referring to posting to Cloud Logging probably.
23
+ ENV = os.getenv('ENV', 'LOCAL').strip("'")
24
+
25
+ def setup_gcp_logger_and_error_report(logger_name):
26
+ """Sets up a logger with Error Reporting and Cloud Logging handlers.
27
+
28
+ Args:
29
+ logger_name: The name of the logger.
30
+
31
+ Returns:
32
+ logging.Logger: The configured logger instance.
33
+ """
34
+
35
+ class ErrorReportingHandler(logging.Handler):
36
+ def __init__(self, level=logging.ERROR):
37
+ super().__init__(level)
38
+ self.error_client = error_reporting.Client()
39
+ self.propagate = True
40
+
41
+ def emit(self, record):
42
+ try:
43
+ if record.levelno >= logging.ERROR:
44
+ message = self.format(record)
45
+ if record.exc_info:
46
+ message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
47
+ if hasattr(record, 'pathname') and hasattr(record, 'lineno'):
48
+ message += f"\nFile: {record.pathname}, Line: {record.lineno}"
49
+ self.error_client.report(message)
50
+ except Exception as e:
51
+ # Ensure no exceptions are raised during logging
52
+ self.handleError(record)
53
+
54
+ logger = logging.getLogger(logger_name)
55
+ logger.setLevel(logging.INFO)
56
+
57
+ # Create Error Reporting handler
58
+ error_reporting_handler = ErrorReportingHandler()
59
+
60
+ # Create Google Cloud Logging handler
61
+ cloud_logging_client = cloud_logging.Client()
62
+ cloud_logging_handler = cloud_logging_client.get_default_handler()
63
+
64
+ # Add handlers to the logger
65
+ logger.addHandler(error_reporting_handler)
66
+ logger.addHandler(cloud_logging_handler)
67
+
68
+ # Add a console handler for local development
69
+ if ENV == "LOCAL":
70
+ formatter = logging.Formatter('%(levelname)s : %(name)s : %(asctime)s : %(message)s')
71
+ console_handler = logging.StreamHandler()
72
+ console_handler.setFormatter(formatter)
73
+ logger.addHandler(console_handler)
74
+
75
+ return logger
76
+ ############################################################################
77
+
78
+
79
+ ############################################################################
80
+ ##################### GOOGLE CLOUD STORAGE UTILS ##########################
81
+
82
+ def read_json_from_gcs(bucket_name, file_name, stor_client, logger):
83
+ """ Helper function to read a JSON file from Google Cloud Storage """
84
+ try:
85
+ bucket = stor_client.bucket(bucket_name)
86
+ blob = bucket.blob(file_name)
87
+ data_string = blob.download_as_text()
88
+ data = json.loads(data_string)
89
+ return data
90
+ except NotFound:
91
+ logger.error(f"Error: The file {file_name} was not found in the bucket {bucket_name}.")
92
+ return None
93
+ except json.JSONDecodeError:
94
+ logger.error(f"Error: The file {file_name} could not be decoded as JSON.")
95
+ return None
96
+ except Exception as e:
97
+ logger.error(f"An unexpected error occurred: {e}", exc_info=True)
98
+ return None
99
+
100
+ def read_csv_from_gcs(bucket_name, file_name, storage_client, logger):
101
+ """ Helper function to read a CSV file from Google Cloud Storage """
102
+ try:
103
+ bucket = storage_client.bucket(bucket_name)
104
+ blob = bucket.blob(file_name)
105
+ data_string = blob.download_as_text()
106
+ data_file = StringIO(data_string)
107
+ reader = csv.DictReader(data_file)
108
+ return list(reader)
109
+ except NotFound:
110
+ logger.error(f"Error: The file {file_name} was not found in the bucket {bucket_name}.")
111
+ return None
112
+ except csv.Error:
113
+ logger.error(f"Error: The file {file_name} could not be read as CSV.")
114
+ return None
115
+ except Exception as e:
116
+ logger.error(f"An unexpected error occurred: {e}", exc_info=True)
117
+ return None
118
+
119
+ def write_json_to_gcs(bucket_name, file_name, data, stor_client, logger, log_info_verbose=True):
120
+ """ Helper function to write a JSON file to Google Cloud Storage """
121
+ try:
122
+ bucket = stor_client.bucket(bucket_name)
123
+ blob = bucket.blob(file_name)
124
+ data_string = json.dumps(data)
125
+ blob.upload_from_string(data_string, content_type='application/json')
126
+ if log_info_verbose:
127
+ logger.info(f"Successfully wrote JSON to {file_name} in bucket {bucket_name}.")
128
+ except Exception as e:
129
+ logger.error(f"An unexpected error occurred while writing JSON to GCS: {e}", exc_info=True)
130
+
131
+ def write_csv_to_gcs(bucket_name, file_name, data, storage_client, logger,log_info_verbose=True):
132
+ """ Helper function to write a CSV file to Google Cloud Storage """
133
+ try:
134
+ bucket = storage_client.bucket(bucket_name)
135
+ blob = bucket.blob(file_name)
136
+ data_file = StringIO()
137
+ if data and isinstance(data, list) and isinstance(data[0], dict):
138
+ fieldnames = data[0].keys()
139
+ writer = csv.DictWriter(data_file, fieldnames=fieldnames)
140
+ writer.writeheader()
141
+ writer.writerows(data)
142
+ else:
143
+ raise ValueError("Data should be a list of dictionaries")
144
+ blob.upload_from_string(data_file.getvalue(), content_type='text/csv')
145
+ if log_info_verbose:
146
+ logger.info(f"Successfully wrote CSV to {file_name} in bucket {bucket_name}.")
147
+ except ValueError as e:
148
+ logger.error(f"ValueError: {e}")
149
+ except Exception as e:
150
+ logger.error(f"An unexpected error occurred while writing CSV to GCS: {e}", exc_info=True)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.19
3
+ Version: 2.21
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ README.md
3
3
  pyproject.toml
4
4
  setup.py
5
5
  src/ipulse_shared_core_ftredge/__init__.py
6
- src/ipulse_shared_core_ftredge/gcp_logger.py
6
+ src/ipulse_shared_core_ftredge/gcp_utils.py
7
7
  src/ipulse_shared_core_ftredge.egg-info/PKG-INFO
8
8
  src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
9
9
  src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
@@ -1,5 +0,0 @@
1
- from .models import Organisation, UserAuth, UserProfile, UserStatus, UserProfileUpdate, pulse_enums
2
- from .gcp_logger import setup_gcp_logger_and_error_report
3
-
4
-
5
-
@@ -1,77 +0,0 @@
1
- import logging
2
- import os
3
- import traceback
4
- from google.cloud import error_reporting, logging as cloud_logging
5
-
6
-
7
- ############################################################################
8
- ##################### SETTING UP LOGGER ##########################
9
- ##########
10
-
11
- ####DEPCREACATED: THIS APPROACH WAS GOOD, BUT ERRORS WERE NOT REPORTED TO ERROR REPORTING
12
-
13
- # logging.basicConfig(level=logging.INFO)
14
- # logging_client = google.cloud.logging.Client()
15
- # # Retrieves a Cloud Logging handler based on the environment
16
- # # you're running in and integrates the handler with the
17
- # # Python logging module. By default this captures all logs
18
- # # at INFO level and higher
19
- # logging_client.setup_logging()
20
- ###################################
21
-
22
- ##### THIS APPROACH IS USED NOW ########
23
- ## TODO Fix the issue with POST 0B Nan.... printed in Cloud Logging , which is referring to posting to Cloud Logging probably.
24
-
25
- ENV = os.getenv('ENV', 'LOCAL').strip("'")
26
-
27
- def setup_gcp_logger_and_error_report(logger_name):
28
- """Sets up a logger with Error Reporting and Cloud Logging handlers.
29
-
30
- Args:
31
- logger_name: The name of the logger.
32
-
33
- Returns:
34
- logging.Logger: The configured logger instance.
35
- """
36
-
37
- class ErrorReportingHandler(logging.Handler):
38
- def __init__(self, level=logging.ERROR):
39
- super().__init__(level)
40
- self.error_client = error_reporting.Client()
41
- self.propagate = True
42
-
43
- def emit(self, record):
44
- try:
45
- if record.levelno >= logging.ERROR:
46
- message = self.format(record)
47
- if record.exc_info:
48
- message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
49
- if hasattr(record, 'pathname') and hasattr(record, 'lineno'):
50
- message += f"\nFile: {record.pathname}, Line: {record.lineno}"
51
- self.error_client.report(message)
52
- except Exception as e:
53
- # Ensure no exceptions are raised during logging
54
- self.handleError(record)
55
-
56
- logger = logging.getLogger(logger_name)
57
- logger.setLevel(logging.INFO)
58
-
59
- # Create Error Reporting handler
60
- error_reporting_handler = ErrorReportingHandler()
61
-
62
- # Create Google Cloud Logging handler
63
- cloud_logging_client = cloud_logging.Client()
64
- cloud_logging_handler = cloud_logging_client.get_default_handler()
65
-
66
- # Add handlers to the logger
67
- logger.addHandler(error_reporting_handler)
68
- logger.addHandler(cloud_logging_handler)
69
-
70
- # Add a console handler for local development
71
- if ENV == "LOCAL":
72
- formatter = logging.Formatter('%(levelname)s : %(name)s : %(asctime)s : %(message)s')
73
- console_handler = logging.StreamHandler()
74
- console_handler.setFormatter(formatter)
75
- logger.addHandler(console_handler)
76
-
77
- return logger