ipulse-shared-core-ftredge 2.6.1__tar.gz → 2.7.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipulse-shared-core-ftredge might be problematic. Click here for more details.

Files changed (47) hide show
  1. {ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge.egg-info → ipulse_shared_core_ftredge-2.7.1}/PKG-INFO +1 -1
  2. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/setup.py +1 -1
  3. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/__init__.py +10 -9
  4. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/enums/__init__.py +12 -7
  5. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/enums/enums_common_utils.py +9 -0
  6. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +313 -0
  7. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/enums/enums_logs.py → ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_logging.py +30 -1
  8. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/enums/enums_module_fincore.py +16 -2
  9. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/enums/enums_modules.py +6 -0
  10. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/enums/enums_cloud.py → ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/enums/enums_solution_providers.py +11 -4
  11. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/__init__.py +11 -7
  12. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/logs/context_log.py +2 -3
  13. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/logs/get_logger.py +103 -0
  14. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud.py +53 -0
  15. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +442 -0
  16. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +166 -0
  17. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +27 -0
  18. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/utils_collector_pipelinemon.py +2 -2
  19. ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +180 -0
  20. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/utils_templates_and_schemas.py +2 -2
  21. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1/src/ipulse_shared_core_ftredge.egg-info}/PKG-INFO +1 -1
  22. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge.egg-info/SOURCES.txt +2 -2
  23. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/enums/enums_data_eng.py +0 -109
  24. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/logs/get_logger.py +0 -76
  25. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/utils_cloud.py +0 -44
  26. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp.py +0 -311
  27. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_gcp_with_collectors.py +0 -169
  28. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/utils_cloud_with_collectors.py +0 -26
  29. ipulse_shared_core_ftredge-2.6.1/src/ipulse_shared_core_ftredge/utils/utils_common.py +0 -145
  30. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/LICENCE +0 -0
  31. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/README.md +0 -0
  32. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/pyproject.toml +0 -0
  33. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/setup.cfg +0 -0
  34. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/enums/pulse_enums.py +0 -0
  35. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/__init__.py +0 -0
  36. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/organisation.py +0 -0
  37. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/resource_catalog_item.py +0 -0
  38. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/user_auth.py +0 -0
  39. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/user_profile.py +0 -0
  40. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/user_profile_update.py +0 -0
  41. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/models/user_status.py +0 -0
  42. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/logs/__init__.py +0 -0
  43. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge/utils/logs/audit_log_firestore.py +0 -0
  44. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge.egg-info/dependency_links.txt +0 -0
  45. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge.egg-info/requires.txt +0 -0
  46. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/src/ipulse_shared_core_ftredge.egg-info/top_level.txt +0 -0
  47. {ipulse_shared_core_ftredge-2.6.1 → ipulse_shared_core_ftredge-2.7.1}/tests/test_utils_gcp.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ipulse_shared_core_ftredge
3
- Version: 2.6.1
3
+ Version: 2.7.1
4
4
  Summary: Shared Core models and Logger util for the Pulse platform project. Using AI for financial advisory and investment management.
5
5
  Home-page: https://github.com/TheFutureEdge/ipulse_shared_core
6
6
  Author: Russlan Ramdowar
@@ -3,7 +3,7 @@ from setuptools import setup, find_packages
3
3
 
4
4
  setup(
5
5
  name='ipulse_shared_core_ftredge',
6
- version='2.6.1',
6
+ version='2.7.1',
7
7
  package_dir={'': 'src'}, # Specify the source directory
8
8
  packages=find_packages(where='src'), # Look for packages in 'src'
9
9
  install_requires=[
@@ -4,19 +4,20 @@ from .models import ( UserAuth, UserProfile,
4
4
 
5
5
  from .enums import (TargetLogs,LogLevel, Status, Unit, Frequency,
6
6
  Module, Domain, FinCoreCategory, FincCoreSubCategory,
7
- FinCoreRecordsCategory, ExchangeOrPublisher,
8
- DataPrimaryCategory, DataState, DatasetPortionType,
9
- DataSourceType,PipelineTriggerType, ExecutionLocation,
10
- DataEventType, ComputeType, CloudProvider)
7
+ FinCoreRecordsCategory, FinancialExchangeOrPublisher,
8
+ DataPrimaryCategory, DataState, DatasetScope,
9
+ DataSourceType,PipelineTriggerType,DataOperationType,
10
+ MatchConditionType, DuplicationHandling, DuplicationHandlingStatus,
11
+ CodingLanguage, ExecutionLocation, ExecutionComputeType,
12
+ CloudProvider,LoggingHandlers)
11
13
  from .utils import (get_logger,
12
14
  save_json_locally_extended,
13
15
  write_json_to_cloud_storage_extended,
14
- write_json_to_cloud_storage_with_pipelinemon_extended,
16
+ write_json_to_gcs_extended,
17
+ write_csv_to_gcs,
15
18
  read_json_from_cloud_storage,
19
+ read_csv_from_gcs,
20
+ read_json_from_gcs,
16
21
  check_format_against_schema_template,
17
22
  create_bigquery_schema_from_json,
18
- read_csv_from_gcs, read_json_from_gcs,
19
- write_csv_to_gcs,
20
- write_json_to_gcs_extended,
21
- write_json_to_gcs_with_pipelinemon_extended,
22
23
  Pipelinemon, ContextLog)
@@ -15,18 +15,23 @@ from .enums_modules import(Module,
15
15
  from .enums_module_fincore import (FinCoreCategory,
16
16
  FincCoreSubCategory,
17
17
  FinCoreRecordsCategory,
18
- ExchangeOrPublisher)
18
+ FinancialExchangeOrPublisher)
19
19
 
20
- from .enums_logs import (TargetLogs,
21
- LogLevel)
20
+ from .enums_logging import (TargetLogs,
21
+ LogLevel,
22
+ LoggingHandlers)
22
23
 
23
24
  from .enums_data_eng import (DataPrimaryCategory,
24
25
  DataState,
25
- DatasetPortionType,
26
+ DatasetScope,
26
27
  DataSourceType,
27
28
  PipelineTriggerType,
29
+ DataOperationType,
30
+ MatchConditionType,
31
+ DuplicationHandling,
32
+ DuplicationHandlingStatus,
33
+ CodingLanguage,
28
34
  ExecutionLocation,
29
- DataEventType,
30
- ComputeType)
35
+ ExecutionComputeType)
31
36
 
32
- from .enums_cloud import (CloudProvider)
37
+ from .enums_solution_providers import (CloudProvider)
@@ -18,6 +18,9 @@ class Status(Enum):
18
18
  CANCELLED = "cancelled"
19
19
  CLOSED = "closed"
20
20
 
21
+ def __str__(self):
22
+ return self.value
23
+
21
24
  ### Exception during full exection, partially saved
22
25
  # Exception during ensemble pipeline; modifications collected in local object , nothing persisted
23
26
  # Exception during ensemble pipeline; modifications persisted , metadata failed
@@ -75,6 +78,9 @@ class Unit(Enum):
75
78
  INDEX_POINTS = "index_pnts" # Index points, used in measuring indices like stock market indices
76
79
  RATIO = "ratio" # Ratio, for various financial ratios
77
80
 
81
+ def __str__(self):
82
+ return self.value
83
+
78
84
  class Frequency(Enum):
79
85
  ONE_MIN = "1min"
80
86
  FIVE_MIN="5min"
@@ -96,3 +102,6 @@ class Frequency(Enum):
96
102
  SIX_M="6m"
97
103
  ONE_Y="1y"
98
104
  THREE_Y="3y"
105
+
106
+ def __str__(self):
107
+ return self.value
@@ -0,0 +1,313 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ from enum import Enum
5
+
6
+
7
+ class DataPrimaryCategory(Enum):
8
+ SIMULATION="simulation" # Simulation data, based on models and simulations
9
+ HISTORIC = "historic" # Historical data, usually accurate and complete
10
+ REALTIME="realtime" # Real-time data, not always certain, can have error
11
+ ANALYTICS="analytics" # Analytical data and modelling, derived from historical and prediction data. Normally shall be making Human readable sense. vs. Features
12
+ FEATURES="features" # Feature data, used for training models
13
+ PREDICTIVE="predictive" # Predictive data, based on models and simulations
14
+
15
+ def __str__(self):
16
+ return self.value
17
+
18
+ class DataState(Enum):
19
+ RAW = "raw"
20
+ FORMATTED= "formatted"
21
+ CLEANED = "cleaned"
22
+ PROCESSED = "processed"
23
+ SIMULATED = "simulated"
24
+ ANALYZED = "analyzed"
25
+ VALIDATED = "validated"
26
+ INVALID = "invalid"
27
+
28
+ def __str__(self):
29
+ return self.value
30
+
31
+ class DatasetScope(Enum):
32
+ FULL = "full_dataset"
33
+ INCREMENTAL = "incremental_dataset"
34
+ PARTIAL = "partial_dataset"
35
+ UNKNOWN = "unknown_dataset"
36
+
37
+ def __str__(self):
38
+ return self.value
39
+
40
+
41
+ class DataSourceType(Enum):
42
+ # --- General ---
43
+ API = "api"
44
+ RPC = "rpc"
45
+ GRPC = "grpc"
46
+ WEBSITE = "website"
47
+ # --SQL Databases--
48
+ ORACLE = "oracle"
49
+ POSTGRESQL = "postgresql"
50
+ SQLSERVER = "sqlserver"
51
+ MYSQL = "mysql"
52
+ BIGQUERY = "bigquery"
53
+ SNOWFLAKE = "snowflake"
54
+ REDSHIFT = "redshift"
55
+ ATHENA = "athena"
56
+ # --NOSQL Databases--
57
+ MONGODB = "mongodb"
58
+ REDIS = "redis"
59
+ CASSANDRA = "cassandra"
60
+ NEO4J = "neo4j"
61
+ FIRESTORE = "firestore"
62
+ DYNAMODB = "dynamodb"
63
+ # --NEWSQL Databases--
64
+ COCKROACHDB = "cockroachdb"
65
+ SPANNER = "spanner"
66
+ # --- Messaging ---
67
+ MESSAGING_KAFKA = "messaging_kafka"
68
+ MESSAGING_SQS = "messaging_sqs"
69
+ MESSAGING_PUBSUB = "messaging_pubsub"
70
+ # --- Real-time Communication ---
71
+ REALTIME_WEBSOCKET = "websocket"
72
+ # --- Notifications ---
73
+ NOTIFICATION_WEBHOOK = "webhook"
74
+ # --- Storage ---
75
+ LOCAL_STORAGE = "local_storage"
76
+ INMEMORY = "inmemory"
77
+ GCS = "gcs"
78
+ S3 = "s3"
79
+ AZURE_BLOB = "azure_blob"
80
+ HDFS = "hdfs"
81
+ # --- Files ---
82
+ FILE = "file"
83
+ FILE_CSV = "file_csv"
84
+ FILE_EXCEL = "file_excel"
85
+ FILE_JSON = "file_json"
86
+ FILE_PARQUET = "file_parquet"
87
+ FILE_ORC = "file_orc"
88
+ FILE_AVRO = "file_avro"
89
+ FILE_TEXT = "file_text"
90
+ FILE_IMAGE = "file_image"
91
+ FILE_VIDEO = "file_video"
92
+ FILE_AUDIO = "file_audio"
93
+ FILE_PDF = "file_pdf"
94
+ FILE_WORD = "file_word"
95
+ FILE_POWERPOINT = "file_powerpoint"
96
+ FILE_HTML = "file_html"
97
+ FILE_MARKDOWN = "file_markdown"
98
+ FILE_XML = "file_xml"
99
+ FILE_YAML = "file_yaml"
100
+ FILE_TOML = "file_toml"
101
+ FILE_OTHER = "file_other"
102
+
103
+ def __str__(self):
104
+ return self.value
105
+
106
+ class PipelineTriggerType(Enum):
107
+ MANUAL = "manual"
108
+ SCHEDULER = "scheduler"
109
+ SCHEDULER_MAIN = "scheduler_main"
110
+ SCHEDULER_FALLBACK = "scheduler_fallback"
111
+ SCHEDULER_RETRY = "scheduler_retry"
112
+ SCHEDULED_VERIFICATION = "scheduled_verification"
113
+ EVENT_GCS_UPLOAD= "event_gcs_upload"
114
+ EVENT_PUBSUB= "event_pubsub"
115
+ ANOTHER_PIPELINE = "another_pipeline"
116
+
117
+ def __str__(self):
118
+ return self.value
119
+
120
+
121
+ class DataOperationType(Enum):
122
+ # --- Read operations ---
123
+ SOURCE="source" # For reading data from source
124
+ QUERY = "query" # For databases or systems that support queries
125
+ SCAN = "scan" # For reading all data sequentially (e.g., files)
126
+ READ= "read" # For general read operations
127
+ GET= "get" # For getting a single record
128
+ IMPORT = "import"
129
+ # --- Transform operations ---
130
+ TRANSFORM = "transform"
131
+ PREPROCESS = "preprocess"
132
+ ENRICH = "enrich"
133
+ JOIN = "join"
134
+ AGGREGATE = "aggregate"
135
+ FILTER = "filter"
136
+ SORT = "sort"
137
+ GROUP = "group"
138
+ # --- Write operations ---
139
+ POST= "post" # For creating new records
140
+ PUT= "put"
141
+ PATCH= "patch"
142
+ WRITE = "write"
143
+ WRITE_TO_FILE = "write_to_file"
144
+ APPEND = "append"
145
+ UPSERT = "upsert"
146
+ INSERT = "insert"
147
+ OVERWRITE = "overwrite"
148
+ INCREMENT = "increment"
149
+ UPDATE = "update"
150
+ DELETE = "delete"
151
+ EXPORT = "export"
152
+ COPY = "copy"
153
+ MERGE = "merge" ## For merging data, combines INSERT, UPDATE, DELETE operations
154
+ MERGE_UPSERT = "merge_upsert" ## For merging data, combines INSERT, UPDATE, DELETE operations
155
+ BIGQUERY_WRITE_APPEND = "bigquery_write_append" # For emptying table and writing data, specific to BIGQUERY
156
+ BIGQUERY_WRITE_TRUNCATE = "bigquery_write_truncate" #For writing data to empty table, fails if table not empty, specific to BIGQUERY
157
+ BIGQUERY_WRITE_EMPTY = "bigquery_write_empty" # For updating or inserting records
158
+ # --- Create operations ---
159
+ CREATE_TABLE = "create_table"
160
+ CREATE_DATABASE = "create_database"
161
+ CREATE_COLLECTION = "create_collection"
162
+ CREATE_INDEX = "create_index"
163
+ CREATE_SCHEMA = "create_schema"
164
+ CREATE_MODEL = "create_model"
165
+ CREATE_VIEW = "create_view"
166
+ # --- Alter operations ---
167
+ ALTER_TABLE = "alter_table"
168
+ ALTER_DATABASE = "alter_database"
169
+ ALTER_COLLECTION = "alter_collection"
170
+ ALTER_INDEX = "alter_index"
171
+ ALTER_SCHEMA = "alter_schema"
172
+ ALTER_MODEL = "alter_model"
173
+ ALTER_VIEW = "alter_view"
174
+ # --- Drop operations ---
175
+ DROP_TABLE = "drop_table"
176
+ DROP_DATABASE = "drop_database"
177
+ DROP_COLLECTION = "drop_collection"
178
+ DROP_INDEX = "drop_index"
179
+ DROP_SCHEMA = "drop_schema"
180
+ DROP_MODEL = "drop_model"
181
+ DROP_VIEW = "drop_view"
182
+ # --- Truncate operations ---
183
+ TRUNCATE_TABLE = "truncate_table"
184
+ TRUNCATE_COLLECTION = "truncate_collection"
185
+
186
+ def __str__(self):
187
+ return self.value
188
+ class MatchConditionType(Enum):
189
+ EXACT = "exact"
190
+ PREFIX = "prefix"
191
+ SUFFIX = "suffix"
192
+ CONTAINS = "contains"
193
+ REGEX = "regex"
194
+ IN_RANGE = "in_range"
195
+ NOT_IN_RANGE = "not_in_range"
196
+ GREATER_THAN = "greater_than"
197
+ LESS_THAN = "less_than"
198
+ GREATER_THAN_OR_EQUAL = "greater_than_or_equal"
199
+ LESS_THAN_OR_EQUAL = "less_than_or_equal"
200
+ IN_LIST = "in_list"
201
+ NOT_IN_LIST = "not_in_list"
202
+ ON_FIELD_MATCH = "on_field_match"
203
+ ON_FIELD_EQUAL = "on_field_equal"
204
+ ON_FIELDS_EQUAL_TO = "on_fields_equal_to"
205
+ ON_FIELDS_COMBINATION = "on_fields_combination"
206
+ NOT_APPLICABLE = "not_applicable"
207
+
208
+ def __str__(self):
209
+ return self.value
210
+
211
+
212
+ class DuplicationHandling(Enum):
213
+ RAISE_ERROR = "raise_error"
214
+ OVERWRITE = "overwrite"
215
+ INCREMENT = "increment"
216
+ SKIP = "skip"
217
+ SYSTEM_DEFAULT = "system_default"
218
+ ALLOW = "allow" ## applicable for databases allowing this operation i.e. BigQuery
219
+ MERGE_DEFAULT = "merge_default"
220
+ MERGE_PRESERVE_SOURCE_ON_DUPLICATES = "merge_preserve_source_on_dups"
221
+ MERGE_PRESERVE_TARGET_ON_DUPLICATES = "merge_preserve_target_on_dups"
222
+ MERGE_PRESERVE_BOTH_ON_DUPLICATES = "merge_preserve_both_on_dups"
223
+ MERGE_RAISE_ERROR_ON_DUPLICATES = "merge_raise_error_on_dups"
224
+ MERGE_CUSTOM = "merge_custom"
225
+
226
+ def __str__(self):
227
+ return self.value
228
+
229
+
230
+ class DuplicationHandlingStatus(Enum):
231
+ ALLOWED = "allowed"
232
+ RAISED_ERROR = "raised_error"
233
+ SYSTEM_DEFAULT = "system_default"
234
+ OVERWRITTEN = "overwritten"
235
+ SKIPPED = "skipped"
236
+ INCREMENTED = "incremented"
237
+ OPERATION_CANCELLED = "operation_cancelled"
238
+ MERGED = "merged"
239
+ MERGED_PRESERVED_SOURCE = "merged_preserved_source"
240
+ MERGED_PRESERVED_TARGET = "merged_preserved_target"
241
+ MERGED_PRESERVED_BOTH = "merged_preserved_both"
242
+ MERGED_RAISED_ERROR = "merged_raised_error"
243
+ MERGED_CUSTOM = "merged_custom"
244
+ NO_DUPLICATES = "no_duplicates"
245
+ UNKNOWN = "unknown"
246
+ UNEXPECTED_ERROR= "unexpected_error"
247
+ CONDITIONAL_ERROR = "conditional_error"
248
+ NOT_APPLICABLE = "not_applicable"
249
+
250
+ def __str__(self):
251
+ return self.value
252
+
253
+ class CodingLanguage(Enum):
254
+ PYTHON = "python"
255
+ NODEJS = "nodejs"
256
+ JAVA = "java"
257
+ JAVASCRIPT = "javascript"
258
+ TYPESCRIPT = "typescript"
259
+ REACTJS = "reactjs"
260
+
261
+ def __str__(self):
262
+ return self.value
263
+
264
+
265
+ class ExecutionLocation(Enum):
266
+ # Add local execution environments
267
+ LOCAL_SCRIPT = "local_script"
268
+ LOCAL_JUPYTER_NOTEBOOK = "local_jupyter_notebook"
269
+ LOCAL_SERVER = "local_server"
270
+ LOCAL_DOCKER = "local_docker" # Add local Docker environment
271
+ LOCAL_KUBERNETES = "local_kubernetes" # Add local Kubernetes environment
272
+
273
+ LOCAL_GCP_CLOUD_FUNCTION = "local_gcp_cloud_function"
274
+ LOCAL_GCP_CLOUD_RUN = "local_gcp_cloud_run"
275
+
276
+ # Add GCP execution environments
277
+ CLOUD_GCP_JUPYTER_NOTEBOOK = "cloud_gcp_jupyter_notebook"
278
+ CLOUD_GCP_CLOUD_FUNCTION = "cloud_gcp_cloud_function"
279
+ CLOUD_GCP_CLOUD_RUN = "cloud_gcp_cloud_run"
280
+ CLOUD_GCP_COMPUTE_ENGINE = "cloud_gcp_compute_engine"
281
+ CLOUD_GCP_DATAPROC = "cloud_gcp_dataproc"
282
+ CLOUD_GCP_DATAFLOW = "cloud_gcp_dataflow"
283
+ CLOUD_GCP_BIGQUERY = "cloud_gcp_bigquery"
284
+ # Add AWS execution environments
285
+ CLOUD_AWS_LAMBDA = "cloud_aws_lambda"
286
+ CLOUD_AWS_EC2 = "cloud_aws_ec2"
287
+ CLOUD_AWS_EMR = "cloud_aws_emr"
288
+ CLOUD_AWS_GLUE = "cloud_aws_glue"
289
+ CLOUD_AWS_ATHENA = "cloud_aws_athena"
290
+ CLOUD_AWS_REDSHIFT = "cloud_aws_redshift"
291
+ # Add Azure execution environments
292
+ CLOUD_AZURE_FUNCTIONS = "cloud_azure_functions"
293
+ CLOUD_AZURE_VIRTUAL_MACHINES = "cloud_azure_virtual_machines"
294
+ CLOUD_AZURE_SYNAPSE_ANALYTICS = "cloud_azure_synapse_analytics"
295
+ CLOUD_AZURE_DATA_FACTORY = "cloud_azure_data_factory"
296
+
297
+ def __str__(self):
298
+ return self.value
299
+
300
+ class ExecutionComputeType(Enum):
301
+
302
+ CPU_INTEL = "cpu_intel"
303
+ CPU_AMD = "cpu_amd"
304
+ CPU_ARM = "cpu_arm"
305
+ GPU_NVIDIA = "gpu_nvidia"
306
+ GPU_AMD = "gpu_amd"
307
+ GPU_INTEL = "gpu_intel"
308
+ TPU_GOOGLE = "tpu_google"
309
+ TPU_INTEL = "tpu_intel"
310
+ TPU_AMD = "tpu_amd"
311
+
312
+ def __str__(self):
313
+ return self.value
@@ -4,6 +4,29 @@
4
4
  from enum import Enum
5
5
 
6
6
 
7
+ class LoggingHandlers(Enum):
8
+ NONE = "none" # No remote handler
9
+ LOCAL_STREAM = "local_stream" # Local stream handler
10
+ GCP_CLOUD_LOGGING = "gcp_cloud_logging"
11
+ GCP_ERROR_REPORTING = "gcp_error_reporting"
12
+ GCP_FIREBASE = "gcp_firebase"
13
+ AWS_CLOUD_WATCH = "aws_cloud_watch"
14
+ AZURE_MONITOR = "azure_monitor"
15
+ AZURE_APPLICATION_INSIGHTS = "azure_application_insights"
16
+ IBM_LOG_ANALYTICS = "ibm_log_analytics"
17
+ ALIBABA_LOG_SERVICE = "alibaba_log_service"
18
+ LOGGLY = "loggly"
19
+ DATADOG = "datadog"
20
+ NEW_RELIC = "new_relic"
21
+ SENTRY = "sentry"
22
+ SUMOLOGIC = "sumologic"
23
+ # --- Other ---
24
+ SYSLOG = "syslog" # For system logs
25
+ CUSTOM = "custom" # For a user-defined remote handler
26
+ OTHER = "other"
27
+
28
+ def __str__(self):
29
+ return self.value
7
30
 
8
31
  class TargetLogs(Enum):
9
32
  MIXED="mixed_logs"
@@ -14,6 +37,9 @@ class TargetLogs(Enum):
14
37
  WARNINGS_AND_ERRORS = "warn_n_err_logs"
15
38
  ERRORS = "error_logs"
16
39
 
40
+ def __str__(self):
41
+ return self.value
42
+
17
43
  class LogLevel(Enum):
18
44
  """
19
45
  Standardized notice levels for data engineering pipelines,
@@ -76,4 +102,7 @@ class LogLevel(Enum):
76
102
  CRITICAL=600 # General critical error, requires immediate action
77
103
  CRITICAL_SYSTEM_FAILURE = 601 # System-level failure (e.g., infrastructure, stackoverflow ), requires immediate action
78
104
 
79
- UNKNOWN=1001 # Unknown error, should not be used in normal operation
105
+ UNKNOWN=1001 # Unknown error, should not be used in normal operation
106
+
107
+ def __str__(self):
108
+ return self.value
@@ -15,6 +15,11 @@ class FinCoreCategory(Enum):
15
15
  POLITICS="poltcs"
16
16
  OTHER="other"
17
17
 
18
+ def __str__(self):
19
+ return self.value
20
+
21
+
22
+
18
23
  class FincCoreSubCategory(Enum):
19
24
  STOCKS = "stocks"
20
25
  BONDS = "bonds"
@@ -30,6 +35,9 @@ class FincCoreSubCategory(Enum):
30
35
  FUNDAMENTALS = "fundam"
31
36
  OTHER = "othr"
32
37
 
38
+ def __str__(self):
39
+ return self.value
40
+
33
41
  class FinCoreRecordsCategory(Enum):
34
42
  PRICE="pric"
35
43
  SPOT= "spot"
@@ -52,7 +60,13 @@ class FinCoreRecordsCategory(Enum):
52
60
  TWEET="tweet"
53
61
  OTHER="othr"
54
62
 
55
- class ExchangeOrPublisher(Enum):
63
+ def __str__(self):
64
+ return self.value
65
+
66
+ class FinancialExchangeOrPublisher(Enum):
56
67
  CC="cc"
57
68
  US="us"
58
- NASDAQ="nasdaq"
69
+ NASDAQ="nasdaq"
70
+
71
+ def __str__(self):
72
+ return self.value
@@ -14,6 +14,9 @@ class Module(Enum):
14
14
  TRADING="trading"
15
15
  SIMULATION="simulation"
16
16
 
17
+ def __str__(self):
18
+ return self.value
19
+
17
20
  class Domain(Enum):
18
21
  FINCORE="fincore"
19
22
  GYMCORE="gymcore"
@@ -23,3 +26,6 @@ class Domain(Enum):
23
26
  POLICORE="policore"
24
27
  CUSTOM="custom"
25
28
 
29
+ def __str__(self):
30
+ return self.value
31
+
@@ -6,12 +6,19 @@
6
6
 
7
7
  from enum import Enum
8
8
 
9
-
10
9
  class CloudProvider(Enum):
11
- GCP = "gcp"
12
- AWS = "aws"
13
- AZURE = "azure"
10
+ GCP = "cloud_gcp"
11
+ AWS = "cloud_aws"
12
+ AZURE = "cloud_azure"
13
+ IBM = "cloud_ibm"
14
+ ALIBABA = "cloud_alibaba"
14
15
  NO_CLOUD = "no_cloud"
15
16
  CLOUD_AGNOSTIC = "cloud_agnostic"
16
17
  OTHER = "other"
17
18
  UNKNWON = "unknown"
19
+
20
+ def __str__(self):
21
+ return self.value
22
+
23
+
24
+
@@ -1,19 +1,23 @@
1
1
  # pylint: disable=missing-module-docstring
2
- from .utils_common import (save_json_locally_extended)
2
+
3
+ from .logs import (ContextLog, get_logger)
4
+ from .utils_common import (save_json_locally_extended,
5
+ log_error,
6
+ log_warning,
7
+ log_info,
8
+ prepare_full_file_path)
9
+
3
10
  from .utils_collector_pipelinemon import ( Pipelinemon)
4
11
 
5
- from .utils_cloud_gcp import (setup_gcp_logging,
12
+ from .utils_cloud_gcp import (add_gcp_cloud_logging,
13
+ add_gcp_error_reporting,
6
14
  create_bigquery_schema_from_json,
7
15
  read_csv_from_gcs, read_json_from_gcs,
8
16
  write_csv_to_gcs,write_json_to_gcs_extended)
9
17
 
10
- from .utils_cloud_gcp_with_collectors import (write_json_to_gcs_with_pipelinemon_extended )
11
18
 
12
19
  from .utils_cloud import (write_json_to_cloud_storage_extended,
13
20
  read_json_from_cloud_storage)
14
- from .utils_cloud_with_collectors import (write_json_to_cloud_storage_with_pipelinemon_extended)
15
21
 
16
22
 
17
- from .utils_templates_and_schemas import (check_format_against_schema_template)
18
-
19
- from .logs import (ContextLog, get_logger)
23
+ from .utils_templates_and_schemas import (check_format_against_schema_template)
@@ -10,8 +10,7 @@ import traceback
10
10
  import json
11
11
  from datetime import datetime, timezone
12
12
  from typing import List
13
- from ipulse_shared_core_ftredge.enums.enums_common_utils import Status
14
- from ipulse_shared_core_ftredge.enums.enums_logs import LogLevel
13
+ from ipulse_shared_core_ftredge import Status, LogLevel
15
14
 
16
15
  ############################################################################
17
16
  ##################### SETTING UP custom LOGGING format= DICT ##########################
@@ -28,7 +27,7 @@ class ContextLog:
28
27
  e_type = type(e).__name__ if e_type is None else e_type
29
28
  e_message = str(e) if e_message is None else e_message
30
29
  e_traceback = traceback.format_exc() if e_traceback is None else e_traceback
31
- elif e_traceback is None and (e_type or e_message):
30
+ elif (e_traceback is None or e_traceback== "") and (e_type or e_message):
32
31
  e_traceback = traceback.format_exc()
33
32
 
34
33
  self.level = level