ipulse-shared-core-ftredge 2.7.1__tar.gz → 2.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (47) hide show
  1. {ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.8.1}/PKG-INFO +1 -2
  2. ipulse_shared_core_ftredge-2.8.1/README.md +7 -0
  3. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/setup.py +2 -3
  4. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/__init__.py +7 -12
  5. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/__init__.py +1 -0
  6. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py +144 -0
  7. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging/utils_logging.py +72 -0
  8. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/utils/__init__.py +3 -0
  9. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +10 -0
  10. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -2
  11. ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +24 -0
  12. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -1
  13. ipulse_shared_core_ftredge-2.7.1/README.md +0 -21
  14. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/__init__.py +0 -37
  15. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_common_utils.py +0 -107
  16. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -313
  17. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_logging.py +0 -108
  18. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py +0 -72
  19. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_modules.py +0 -31
  20. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_solution_providers.py +0 -24
  21. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/pulse_enums.py +0 -182
  22. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/__init__.py +0 -23
  23. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/logs/__init__.py +0 -2
  24. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/logs/context_log.py +0 -210
  25. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/logs/get_logger.py +0 -103
  26. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud.py +0 -53
  27. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +0 -442
  28. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +0 -166
  29. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +0 -27
  30. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +0 -356
  31. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +0 -180
  32. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +0 -151
  33. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +0 -38
  34. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/LICENCE +0 -0
  35. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/pyproject.toml +0 -0
  36. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/setup.cfg +0 -0
  37. {ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/logs → ipulse_shared_core_ftredge-2.8.1/src/ipulse_shared_core_ftredge/logging}/audit_log_firestore.py +0 -0
  38. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  39. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
  40. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
  41. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  42. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  43. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  44. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  45. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  46. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
  47. {ipulse_shared_core_ftredge-2.7.1 → ipulse_shared_core_ftredge-2.8.1}/tests/test_utils_gcp.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.7.1
3
+ Version: 2.8.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -10,4 +10,3 @@ Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
11
  Requires-Dist: google-cloud-logging~=3.10.0
12
12
  Requires-Dist: google-cloud-error-reporting~=1.11.0
13
- Requires-Dist: google-cloud-bigquery~=3.24.0
@@ -0,0 +1,7 @@
1
+ # ipulse_shared_core
2
+ Shared Models like User, Organisation etc. Also includes shared enum_sets
3
+
4
+
5
+ ### Enums
6
+
7
+ Contains majority of all Enums used in Pulse
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='2.7.1',
6
+ version='2.8.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -12,8 +12,7 @@ setup(
12
12
  'python-dateutil~=2.8',
13
13
  'pytest~=7.1',
14
14
  'google-cloud-logging~=3.10.0',
15
- 'google-cloud-error-reporting~=1.11.0',
16
- 'google-cloud-bigquery~=3.24.0'
15
+ 'google-cloud-error-reporting~=1.11.0'
17
16
  ],
18
17
  author='Russlan Ramdowar',
19
18
  description='Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.',
@@ -9,15 +9,10 @@ from .enums import (TargetLogs,LogLevel, Status, Unit, Frequency,
9
9
  DataSourceType,PipelineTriggerType,DataOperationType,
10
10
  MatchConditionType, DuplicationHandling, DuplicationHandlingStatus,
11
11
  CodingLanguage, ExecutionLocation, ExecutionComputeType,
12
- CloudProvider,LoggingHandlers)
13
- from .utils import (get_logger,
14
- save_json_locally_extended,
15
- write_json_to_cloud_storage_extended,
16
- write_json_to_gcs_extended,
17
- write_csv_to_gcs,
18
- read_json_from_cloud_storage,
19
- read_csv_from_gcs,
20
- read_json_from_gcs,
21
- check_format_against_schema_template,
22
- create_bigquery_schema_from_json,
23
- Pipelinemon, ContextLog)
12
+ CloudProvider,LoggingHandler)
13
+ from .utils import (list_as_strings)
14
+
15
+ from .logging import (get_logger,
16
+ log_error,
17
+ log_warning,
18
+ log_info)
@@ -0,0 +1 @@
1
+ from .utils_logging import get_logger, log_error, log_warning, log_info
@@ -0,0 +1,144 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ import logging
9
+ import traceback
10
+ import json
11
+ import os
12
+ from google.cloud import error_reporting
13
+ from google.cloud import logging as cloud_logging
14
+
15
+ ##########################################################################################################################
16
+ #################################### Custom logging FORMATTERS #####################################################
17
+ ##########################################################################################################################
18
+
19
+
20
+ class CloudLogFormatter(logging.Formatter):
21
+ """Formats log records as structured JSON."""
22
+
23
+ def format(self, record):
24
+ log_entry = {
25
+ 'message': record.msg,
26
+ 'timestamp': self.formatTime(record, self.datefmt),
27
+ 'name': record.name,
28
+ 'severity': record.levelname,
29
+ 'pathname': record.pathname,
30
+ 'lineno': record.lineno,
31
+ }
32
+ if record.exc_info:
33
+ log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
34
+ if isinstance(record.msg, dict):
35
+ log_entry.update(record.msg)
36
+ return json.dumps(log_entry)
37
+
38
+
39
+ class LocalLogFormatter(logging.Formatter):
40
+ """Formats log records for local output to the console."""
41
+
42
+ def format(self, record): # Make sure you have the 'record' argument here!
43
+ path_parts = record.pathname.split(os.sep)
44
+
45
+ # Get the last two parts of the path if they exist
46
+ if len(path_parts) >= 2:
47
+ short_path = os.path.join(path_parts[-2], path_parts[-1])
48
+ else:
49
+ short_path = record.pathname
50
+
51
+ # Format log messages differently based on the log level
52
+ if record.levelno == logging.INFO:
53
+ log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
54
+ elif record.levelno == logging.DEBUG:
55
+ log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
56
+ elif record.levelno == logging.ERROR:
57
+ log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
58
+ if record.exc_info:
59
+ log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
60
+ else:
61
+ log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
62
+
63
+
64
+ return log_message
65
+
66
+ #############################################################################################################################################
67
+ ######################################## Logging handlers for Google Cloud ########################################
68
+ #############################################################################################################################################
69
+
70
+ class CustomGCPLoggingHandler(cloud_logging.handlers.CloudLoggingHandler):
71
+ """Custom handler for Google Cloud Logging with a dynamic logName."""
72
+ def __init__(self, client, name, resource=None, labels=None):
73
+ super().__init__(client=client, name=name, resource=resource, labels=labels)
74
+ self.client = client # Ensure client is consistently used
75
+
76
+ def emit(self, record):
77
+ try:
78
+ # 1. Create the basic log entry dictionary
79
+ log_entry = {
80
+ 'message': record.msg,
81
+ 'severity': record.levelname,
82
+ 'name': record.name,
83
+ 'pathname': record.filename,
84
+ 'lineno': record.lineno,
85
+ }
86
+ if record.exc_info:
87
+ log_entry['exception_traceback'] = ''.join(
88
+ traceback.format_exception(*record.exc_info)
89
+ )
90
+
91
+ # 2. Apply the formatter to the 'message' field if it's a dictionary
92
+ if isinstance(record.msg, dict):
93
+ formatted_message = self.formatter.format(record)
94
+ try:
95
+ log_entry['message'] = json.loads(formatted_message)
96
+ except json.JSONDecodeError:
97
+ log_entry['message'] = formatted_message
98
+ else:
99
+ log_entry['message'] = record.msg
100
+
101
+ # 3. Set the custom logName
102
+ log_entry['logName'] = f"projects/{self.client.project}/logs/{record.name}"
103
+
104
+ # 4. Send to Google Cloud Logging
105
+ super().emit(record)
106
+ except Exception as e:
107
+ self.handleError(record)
108
+
109
+ class CustomGCPErrorReportingHandler(logging.Handler):
110
+ def __init__(self, client=None, level=logging.ERROR):
111
+ super().__init__(level)
112
+ self.error_client = error_reporting.Client() if client is None else client
113
+ self.propagate = True
114
+
115
+ def emit(self, record):
116
+ try:
117
+ if record.levelno >= logging.ERROR:
118
+ log_struct = {
119
+ 'message': self.format(record),
120
+ 'severity': record.levelname,
121
+ 'pathname': getattr(record, 'pathname', None),
122
+ 'lineno': getattr(record, 'lineno', None)
123
+ }
124
+ if record.exc_info:
125
+ log_struct['exception'] = ''.join(
126
+ traceback.format_exception(*record.exc_info)
127
+ )
128
+ self.error_client.report(str(log_struct))
129
+ except Exception as e:
130
+ self.handleError(record)
131
+
132
+
133
+ def add_gcp_cloud_logging(logger, formatter, client=None):
134
+ """Sets up Google Cloud Logging for the logger."""
135
+ client = client or cloud_logging.Client()
136
+ handler = CustomGCPLoggingHandler(client, logger.name)
137
+ handler.setFormatter(formatter)
138
+ logger.addHandler(handler)
139
+
140
+ def add_gcp_error_reporting(logger, client=None):
141
+ """Sets up Google Cloud Error Reporting for the logger."""
142
+ client = client or error_reporting.Client()
143
+ handler = CustomGCPErrorReportingHandler(client=client)
144
+ logger.addHandler(handler)
@@ -0,0 +1,72 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+ import logging
8
+ from typing import List, Union
9
+ from ipulse_shared_core_ftredge.enums import LoggingHandler
10
+ from ipulse_shared_core_ftredge.logging.logging_handlers_and_formatters import (CloudLogFormatter,
11
+ LocalLogFormatter,
12
+ add_gcp_cloud_logging,
13
+ add_gcp_error_reporting)
14
+
15
+
16
+ def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandler, List[LoggingHandler]] = LoggingHandler.NONE):
17
+
18
+ """Creates and configures a logger with the specified handlers."""
19
+
20
+ logger = logging.getLogger(logger_name)
21
+ logger.setLevel(level)
22
+ cloud_formatter = CloudLogFormatter()
23
+
24
+ # Ensure logging_handler_providers is a list for consistent processing
25
+ if not isinstance(logging_handler_providers, list):
26
+ logging_handler_providers = [logging_handler_providers]
27
+
28
+ supported_remote_handlers = [
29
+ LoggingHandler.GCP_CLOUD_LOGGING,
30
+ LoggingHandler.GCP_ERROR_REPORTING,
31
+ LoggingHandler.LOCAL_STREAM,
32
+ LoggingHandler.NONE, # If NONE is considered a remote handler
33
+ ]
34
+
35
+ # Remote handlers
36
+
37
+ for handler_provider in logging_handler_providers:
38
+ if handler_provider in supported_remote_handlers:
39
+ if handler_provider == LoggingHandler.GCP_CLOUD_LOGGING:
40
+ add_gcp_cloud_logging(logger, cloud_formatter)
41
+ elif handler_provider == LoggingHandler.GCP_ERROR_REPORTING:
42
+ add_gcp_error_reporting(logger)
43
+ elif handler_provider == LoggingHandler.LOCAL_STREAM: # Handle local stream
44
+ local_handler = logging.StreamHandler()
45
+ local_handler.setFormatter(LocalLogFormatter())
46
+ logger.addHandler(local_handler)
47
+ else:
48
+ raise ValueError(
49
+ f"Unsupported logging provider: {handler_provider}. "
50
+ f"Supported providers: {[h.value for h in supported_remote_handlers]}"
51
+ )
52
+ return logger
53
+
54
+
55
+ def log_error(msg,logger=None , print_out=False, exc_info=False):
56
+ if print_out:
57
+ print(msg)
58
+ if logger:
59
+ logger.error(msg, exc_info=exc_info)
60
+
61
+ def log_warning(msg, logger=None, print_out=False):
62
+ if print_out:
63
+ print(msg)
64
+ if logger:
65
+ logger.warning(msg)
66
+
67
+
68
+ def log_info(msg, logger=None, print_out=False):
69
+ if print_out:
70
+ print(msg)
71
+ if logger:
72
+ logger.info(msg)
@@ -0,0 +1,3 @@
1
+ # pylint: disable=missing-module-docstring
2
+
3
+ from .utils_common import (list_as_strings)
@@ -0,0 +1,10 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ def list_as_strings(*enums):
9
+ """Converts a list of Enum members to their string values."""
10
+ return [str(enum) for enum in enums]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.7.1
3
+ Version: 2.8.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -10,4 +10,3 @@ Requires-Dist: python-dateutil~=2.8
10
10
  Requires-Dist: pytest~=7.1
11
11
  Requires-Dist: google-cloud-logging~=3.10.0
12
12
  Requires-Dist: google-cloud-error-reporting~=1.11.0
13
- Requires-Dist: google-cloud-bigquery~=3.24.0
@@ -0,0 +1,24 @@
1
+ LICENCE
2
+ README.md
3
+ pyproject.toml
4
+ setup.py
5
+ src/ipulse_shared_core_ftredge/__init__.py
6
+ src/ipulse_shared_core_ftredge.egg-info/PKG-INFO
7
+ src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt
8
+ src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt
9
+ src/ipulse_shared_core_ftredge.egg-info/requires.txt
10
+ src/ipulse_shared_core_ftredge.egg-info/top_level.txt
11
+ src/ipulse_shared_core_ftredge/logging/__init__.py
12
+ src/ipulse_shared_core_ftredge/logging/audit_log_firestore.py
13
+ src/ipulse_shared_core_ftredge/logging/logging_handlers_and_formatters.py
14
+ src/ipulse_shared_core_ftredge/logging/utils_logging.py
15
+ src/ipulse_shared_core_ftredge/models/__init__.py
16
+ src/ipulse_shared_core_ftredge/models/organisation.py
17
+ src/ipulse_shared_core_ftredge/models/resource_catalog_item.py
18
+ src/ipulse_shared_core_ftredge/models/user_auth.py
19
+ src/ipulse_shared_core_ftredge/models/user_profile.py
20
+ src/ipulse_shared_core_ftredge/models/user_profile_update.py
21
+ src/ipulse_shared_core_ftredge/models/user_status.py
22
+ src/ipulse_shared_core_ftredge/utils/__init__.py
23
+ src/ipulse_shared_core_ftredge/utils/utils_common.py
24
+ tests/test_utils_gcp.py
@@ -3,4 +3,3 @@ python-dateutil~=2.8
3
3
  pytest~=7.1
4
4
  google-cloud-logging~=3.10.0
5
5
  google-cloud-error-reporting~=1.11.0
6
- google-cloud-bigquery~=3.24.0
@@ -1,21 +0,0 @@
1
- # ipulse_shared_core
2
- Shared Models like User, Organisation etc. Also includes shared enum_sets
3
-
4
-
5
- ### Enums
6
-
7
- Contains majority of all Enums used in Pulse
8
-
9
-
10
-
11
- ### Collectors i.e. Pipelinemon
12
-
13
- Collectors are smart Objects which are added to long running functions or pipelines for which we want to collect an overall number of successes, notices, warnings or errors.
14
-
15
- We can wait until the full pipeline is finished in order to write off a single Summary file from a Collector, or we can attach to it a logger, which will be reporting major status along the journey, which is often times better. Because if a function crashes midway through , all logs will be lost, and it would be hard to investigate if anythign has bee persisted and has to be rolled back. THis will require a lot of manual effort to recollect.
16
-
17
- Pipelinemon , short of Pipeline Monitoring system is a type of very powerful Collector which Russlan created specifically for Pulse Data Engineering pipelines.
18
-
19
- Pipelinemon writes all observation logs to Google CLoud Logging, and you have to setup a Log Sink (Router) which will send the Pipelinemon's observation logs to BigQuery.
20
-
21
- Great thing about Pipelinemin is its "context" keeping feature.
@@ -1,37 +0,0 @@
1
-
2
- # pylint: disable=missing-module-docstring
3
- # pylint: disable=missing-function-docstring
4
- # pylint: disable=missing-class-docstring
5
-
6
- from .enums_common_utils import (Status,
7
- Unit,
8
- Frequency)
9
-
10
-
11
- from .enums_modules import(Module,
12
- Domain)
13
-
14
-
15
- from .enums_module_fincore import (FinCoreCategory,
16
- FincCoreSubCategory,
17
- FinCoreRecordsCategory,
18
- FinancialExchangeOrPublisher)
19
-
20
- from .enums_logging import (TargetLogs,
21
- LogLevel,
22
- LoggingHandlers)
23
-
24
- from .enums_data_eng import (DataPrimaryCategory,
25
- DataState,
26
- DatasetScope,
27
- DataSourceType,
28
- PipelineTriggerType,
29
- DataOperationType,
30
- MatchConditionType,
31
- DuplicationHandling,
32
- DuplicationHandlingStatus,
33
- CodingLanguage,
34
- ExecutionLocation,
35
- ExecutionComputeType)
36
-
37
- from .enums_solution_providers import (CloudProvider)
@@ -1,107 +0,0 @@
1
-
2
- # pylint: disable=missing-module-docstring
3
- # pylint: disable=missing-function-docstring
4
- # pylint: disable=missing-class-docstring
5
- # pylint: disable=line-too-long
6
-
7
- from enum import Enum
8
-
9
-
10
- class Status(Enum):
11
- OPEN = "open"
12
- ACKNOWLEDGED = "acknowledged"
13
- ESCALATED = "escalated"
14
- IN_PROGRESS = "in_progress"
15
- IN_REVIEW = "in_review"
16
- RESOLVED = "resolved"
17
- IGNORED = "ignored"
18
- CANCELLED = "cancelled"
19
- CLOSED = "closed"
20
-
21
- def __str__(self):
22
- return self.value
23
-
24
- ### Exception during full exection, partially saved
25
- # Exception during ensemble pipeline; modifications collected in local object , nothing persisted
26
- # Exception during ensemble pipeline; modifications persisted , metadata failed
27
- # Exception during ensemble pipeline; modifications persisted , metadata persisted
28
- # Exception during ensemble pipeline; modifications persisted , metadata persisted
29
-
30
-
31
- class Unit(Enum):
32
- MIX="MIX"
33
- # Currency and Financial Values
34
- USD = "USD" # United States Dollar
35
- EUR = "EUR" # Euro
36
- JPY = "JPY" # Japanese Yen
37
- GBP = "GBP" # British Pound Sterling
38
- AUD = "AUD" # Australian Dollar
39
- CAD = "CAD" # Canadian Dollar
40
- CHF = "CHF" # Swiss Franc
41
- CNY = "CNY" # Chinese Yuan Renminbi
42
- SEK = "SEK" # Swedish Krona
43
- NZD = "NZD" # New Zealand Dollar
44
- MXN = "MXN" # Mexican Peso
45
- SGD = "SGD" # Singapore Dollar
46
- HKD = "HKD" # Hong Kong Dollar
47
- NOK = "NOK" # Norwegian Krone
48
- KRW = "KRW" # South Korean Won
49
- RUB = "RUB" # Russian Ruble
50
- INR = "INR" # Indian Rupee
51
- BRL = "BRL" # Brazilian Real
52
- ZAR = "ZAR" # South African Rand
53
- CURRENCY = "currency" # General currency, when specific currency is not needed
54
-
55
- # Stock Market and Investments
56
- SHARES = "shares" # Number of shares
57
- PERCENT = "prcnt" # Percentage, used for rates and ratios
58
- BPS = "bps" # Basis points, often used for interest rates and financial ratios
59
-
60
- # Volume and Quantitative Measurements
61
- VOLUME = "volume" # Trading volume in units
62
- MILLIONS = "mills" # Millions, used for large quantities or sums
63
- BILLIONS = "bills" # Billions, used for very large quantities or sums
64
-
65
- # Commodity Specific Units
66
- BARRELS = "barrels" # Barrels, specifically for oil and similar liquids
67
- TONNES = "tonnes" # Tonnes, for bulk materials like metals or grains
68
- TROY_OUNCES = "troy_oz" # Troy ounces, specifically for precious metals
69
-
70
- # Real Estate and Physical Properties
71
- SQUARE_FEET = "sq_ft" # Square feet, for area measurement in real estate
72
- METER_SQUARE = "m2" # Square meters, for area measurement in real estate
73
- ACRES = "acres" # Acres, used for measuring large plots of land
74
-
75
- # Miscellaneous and Other Measures
76
- UNITS = "units" # Generic units, applicable when other specific units are not suitable
77
- COUNT = "count" # Count, used for tallying items or events
78
- INDEX_POINTS = "index_pnts" # Index points, used in measuring indices like stock market indices
79
- RATIO = "ratio" # Ratio, for various financial ratios
80
-
81
- def __str__(self):
82
- return self.value
83
-
84
- class Frequency(Enum):
85
- ONE_MIN = "1min"
86
- FIVE_MIN="5min"
87
- FIFTEEN_MIN="15min"
88
- THIRTY_MIN = "30min"
89
- ONE_H = "1h"
90
- TWO_H = "2h"
91
- SIX_H = "6h"
92
- TWELVE_H = "12h"
93
- FOUR_H = "4h"
94
- EOD="eod"
95
- ONE_D = "1d"
96
- TWO_D = "2d"
97
- THREE_D = "3d"
98
- ONE_W = "1w"
99
- ONE_M = "1m"
100
- TWO_M="2m"
101
- THREE_M="3m"
102
- SIX_M="6m"
103
- ONE_Y="1y"
104
- THREE_Y="3y"
105
-
106
- def __str__(self):
107
- return self.value