ipulse-shared-base-ftredge 2.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. ipulse_shared_base_ftredge-2.2.2/LICENCE +19 -0
  2. ipulse_shared_base_ftredge-2.2.2/PKG-INFO +9 -0
  3. ipulse_shared_base_ftredge-2.2.2/README.md +8 -0
  4. ipulse_shared_base_ftredge-2.2.2/pyproject.toml +3 -0
  5. ipulse_shared_base_ftredge-2.2.2/setup.cfg +4 -0
  6. ipulse_shared_base_ftredge-2.2.2/setup.py +17 -0
  7. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/__init__.py +32 -0
  8. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/__init__.py +37 -0
  9. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_common_utils.py +106 -0
  10. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_data_eng.py +335 -0
  11. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_logging.py +104 -0
  12. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_module_fincore.py +69 -0
  13. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_pulse.py +30 -0
  14. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/enums_solution_providers.py +21 -0
  15. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/enums/pulse_enums.py +182 -0
  16. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/logging/__init__.py +1 -0
  17. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/logging/audit_log_firestore.py +12 -0
  18. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/logging/logging_handlers_and_formatters.py +144 -0
  19. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge/logging/utils_logging.py +78 -0
  20. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge.egg-info/PKG-INFO +9 -0
  21. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge.egg-info/SOURCES.txt +22 -0
  22. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge.egg-info/dependency_links.txt +1 -0
  23. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge.egg-info/requires.txt +2 -0
  24. ipulse_shared_base_ftredge-2.2.2/src/ipulse_shared_base_ftredge.egg-info/top_level.txt +1 -0
@@ -0,0 +1,19 @@
1
+ Copyright (c) 2023 Future Edge Group
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy
4
+ of this software and associated documentation files (the "Software"), to deal
5
+ in the Software without restriction, including without limitation the rights
6
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7
+ copies of the Software, and to permit persons to whom the Software is
8
+ furnished to do so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included in all
11
+ copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19
+ SOFTWARE.
@@ -0,0 +1,9 @@
1
+ Metadata-Version: 2.1
2
+ Name: ipulse_shared_base_ftredge
3
+ Version: 2.2.2
4
+ Summary: Shared Enums, Logger and other Base Utils for Pulse Platform . Using AI for Asset Management and Financial Advisory.
5
+ Home-page: https://github.com/TheFutureEdge/ipulse_shared_base
6
+ Author: Russlan Ramdowar
7
+ License-File: LICENCE
8
+ Requires-Dist: google-cloud-logging~=3.10.0
9
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
@@ -0,0 +1,8 @@
1
+ # ipulse_shared_enums
2
+ Shared Enums for the whole project
3
+
4
+
5
+
6
+ ### Enums
7
+
8
+ Contains majority of all Enums used in Pulse
@@ -0,0 +1,3 @@
1
+ [build-system]
2
+ requires = ["setuptools", "wheel"]
3
+ build-backend = "setuptools.build_meta"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,17 @@
1
+ # pylint: disable=import-error
2
+ from setuptools import setup, find_packages
3
+
4
+ setup(
5
+ name='ipulse_shared_base_ftredge',
6
+ version='2.2.2',
7
+ package_dir={'': 'src'}, # Specify the source directory
8
+ packages=find_packages(where='src'), # Look for packages in 'src'
9
+ install_requires=[
10
+ # List your dependencies here
11
+ 'google-cloud-logging~=3.10.0',
12
+ 'google-cloud-error-reporting~=1.11.0'
13
+ ],
14
+ author='Russlan Ramdowar',
15
+ description='Shared Enums, Logger and other Base Utils for Pulse Platform . Using AI for Asset Management and Financial Advisory.',
16
+ url='https://github.com/TheFutureEdge/ipulse_shared_base',
17
+ )
@@ -0,0 +1,32 @@
1
+ from .enums import (LogLevel,
2
+ LoggingHandler,
3
+ DataSourceType,
4
+ DuplicationHandling,
5
+ MatchConditionType,
6
+ Status,
7
+ Unit,
8
+ Frequency,
9
+ Module,
10
+ Sector,
11
+ AttributeType,
12
+ DataPrimaryCategory,
13
+ DataState,
14
+ DatasetScope,
15
+ PipelineTriggerType,
16
+ DataOperationType,
17
+ DuplicationHandlingStatus,
18
+ CodingLanguage,
19
+ ExecutionLocation,
20
+ ExecutionComputeType,
21
+ FinCoreCategory,
22
+ FincCoreSubCategory,
23
+ FinCoreRecordsCategory,
24
+ FinancialExchangeOrPublisher,
25
+ CloudProvider)
26
+
27
+ from .logging import (get_logger,
28
+ log_warning,
29
+ log_error,
30
+ log_info,
31
+ log_debug
32
+ )
@@ -0,0 +1,37 @@
1
+
2
+ # pylint: disable=missing-module-docstring
3
+ # pylint: disable=missing-function-docstring
4
+ # pylint: disable=missing-class-docstring
5
+
6
+ from .enums_common_utils import (Status,
7
+ Unit,
8
+ Frequency)
9
+
10
+
11
+ from .enums_pulse import (Module,
12
+ Sector)
13
+
14
+ from .enums_data_eng import (AttributeType,
15
+ DataPrimaryCategory,
16
+ DataState,
17
+ DatasetScope,
18
+ DataSourceType,
19
+ PipelineTriggerType,
20
+ DataOperationType,
21
+ MatchConditionType,
22
+ DuplicationHandling,
23
+ DuplicationHandlingStatus,
24
+ CodingLanguage,
25
+ ExecutionLocation,
26
+ ExecutionComputeType)
27
+
28
+
29
+ from .enums_logging import (LogLevel,
30
+ LoggingHandler)
31
+
32
+ from .enums_module_fincore import (FinCoreCategory,
33
+ FincCoreSubCategory,
34
+ FinCoreRecordsCategory,
35
+ FinancialExchangeOrPublisher)
36
+
37
+ from .enums_solution_providers import (CloudProvider)
@@ -0,0 +1,106 @@
1
+
2
+ # pylint: disable=missing-module-docstring
3
+ # pylint: disable=missing-function-docstring
4
+ # pylint: disable=missing-class-docstring
5
+ # pylint: disable=line-too-long
6
+
7
+ from enum import Enum
8
+
9
+
10
+ class Status(Enum):
11
+ OPEN = "open"
12
+ ACKNOWLEDGED = "acknowledged"
13
+ ESCALATED = "escalated"
14
+ IN_PROGRESS = "in_progress"
15
+ IN_REVIEW = "in_review"
16
+ RESOLVED = "resolved"
17
+ IGNORED = "ignored"
18
+ CANCELLED = "cancelled"
19
+ CLOSED = "closed"
20
+
21
+ def __str__(self):
22
+ return self.value
23
+
24
+ ### Exception during full exection, partially saved
25
+ # Exception during ensemble pipeline; modifications collected in local object , nothing persisted
26
+ # Exception during ensemble pipeline; modifications persisted , metadata failed
27
+ # Exception during ensemble pipeline; modifications persisted , metadata persisted
28
+ # Exception during ensemble pipeline; modifications persisted , metadata persisted
29
+
30
+ class Unit(Enum):
31
+ MIX="MIX"
32
+ # Currency and Financial Values
33
+ USD = "USD" # United States Dollar
34
+ EUR = "EUR" # Euro
35
+ JPY = "JPY" # Japanese Yen
36
+ GBP = "GBP" # British Pound Sterling
37
+ AUD = "AUD" # Australian Dollar
38
+ CAD = "CAD" # Canadian Dollar
39
+ CHF = "CHF" # Swiss Franc
40
+ CNY = "CNY" # Chinese Yuan Renminbi
41
+ SEK = "SEK" # Swedish Krona
42
+ NZD = "NZD" # New Zealand Dollar
43
+ MXN = "MXN" # Mexican Peso
44
+ SGD = "SGD" # Singapore Dollar
45
+ HKD = "HKD" # Hong Kong Dollar
46
+ NOK = "NOK" # Norwegian Krone
47
+ KRW = "KRW" # South Korean Won
48
+ RUB = "RUB" # Russian Ruble
49
+ INR = "INR" # Indian Rupee
50
+ BRL = "BRL" # Brazilian Real
51
+ ZAR = "ZAR" # South African Rand
52
+ CURRENCY = "currency" # General currency, when specific currency is not needed
53
+
54
+ # Stock Market and Investments
55
+ SHARES = "shares" # Number of shares
56
+ PERCENT = "prcnt" # Percentage, used for rates and ratios
57
+ BPS = "bps" # Basis points, often used for interest rates and financial ratios
58
+
59
+ # Volume and Quantitative Measurements
60
+ VOLUME = "volume" # Trading volume in units
61
+ MILLIONS = "mills" # Millions, used for large quantities or sums
62
+ BILLIONS = "bills" # Billions, used for very large quantities or sums
63
+
64
+ # Commodity Specific Units
65
+ BARRELS = "barrels" # Barrels, specifically for oil and similar liquids
66
+ TONNES = "tonnes" # Tonnes, for bulk materials like metals or grains
67
+ TROY_OUNCES = "troy_oz" # Troy ounces, specifically for precious metals
68
+
69
+ # Real Estate and Physical Properties
70
+ SQUARE_FEET = "sq_ft" # Square feet, for area measurement in real estate
71
+ METER_SQUARE = "m2" # Square meters, for area measurement in real estate
72
+ ACRES = "acres" # Acres, used for measuring large plots of land
73
+
74
+ # Miscellaneous and Other Measures
75
+ UNITS = "units" # Generic units, applicable when other specific units are not suitable
76
+ COUNT = "count" # Count, used for tallying items or events
77
+ INDEX_POINTS = "index_pnts" # Index points, used in measuring indices like stock market indices
78
+ RATIO = "ratio" # Ratio, for various financial ratios
79
+
80
+ def __str__(self):
81
+ return self.value
82
+
83
+ class Frequency(Enum):
84
+ ONE_MIN = "1min"
85
+ FIVE_MIN="5min"
86
+ FIFTEEN_MIN="15min"
87
+ THIRTY_MIN = "30min"
88
+ ONE_H = "1h"
89
+ TWO_H = "2h"
90
+ SIX_H = "6h"
91
+ TWELVE_H = "12h"
92
+ FOUR_H = "4h"
93
+ EOD="eod"
94
+ ONE_D = "1d"
95
+ TWO_D = "2d"
96
+ THREE_D = "3d"
97
+ ONE_W = "1w"
98
+ ONE_M = "1m"
99
+ TWO_M="2m"
100
+ THREE_M="3m"
101
+ SIX_M="6m"
102
+ ONE_Y="1y"
103
+ THREE_Y="3y"
104
+
105
+ def __str__(self):
106
+ return self.value
@@ -0,0 +1,335 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ from enum import Enum
5
+
6
+
7
+ class AttributeType(Enum):
8
+ RECENT_DATE = "recent_date"
9
+ RECENT_TIMESTAMP = "recent_timestamp"
10
+ RECENT_DATETIME = "recent_datetime"
11
+ OLDEST_DATE = "oldest_date"
12
+ OLDEST_TIMESTAMP = "oldest_timestamp"
13
+ OLDEST_DATETIME = "oldest_datetime"
14
+ MAX_VALUE = "max_value"
15
+ MIN_VALUE = "min_value"
16
+ TOTAL_COUNT = "total_count"
17
+ TOTAL_SUM = "total_sum"
18
+ MEAN = "mean"
19
+ MEDIAN = "median"
20
+ MODE = "mode"
21
+ STANDARD_DEVIATION = "standard_deviation"
22
+ NB_FIELDS_PER_RECORDS = "nb_fields_per_records"
23
+
24
+
25
+
26
+
27
+ class DataPrimaryCategory(Enum):
28
+ SIMULATION="simulation" # Simulation data, based on models and simulations
29
+ HISTORIC = "historic" # Historical data, usually accurate and complete
30
+ REALTIME="realtime" # Real-time data, not always certain, can have error
31
+ ANALYTICS="analytics" # Analytical data and modelling, derived from historical and prediction data. Normally shall be making Human readable sense. vs. Features
32
+ FEATURES="features" # Feature data, used for training models
33
+ PREDICTIVE="predictive" # Predictive data, based on models and simulations
34
+
35
+ def __str__(self):
36
+ return self.value
37
+
38
+ class DataState(Enum):
39
+ RAW = "raw"
40
+ FORMATTED= "formatted"
41
+ CLEANED = "cleaned"
42
+ PROCESSED = "processed"
43
+ SIMULATED = "simulated"
44
+ ANALYZED = "analyzed"
45
+ VALIDATED = "validated"
46
+ INVALID = "invalid"
47
+
48
+ def __str__(self):
49
+ return self.value
50
+
51
+ class DatasetScope(Enum):
52
+ FULL = "full_dataset"
53
+ INCREMENTAL = "incremental_dataset"
54
+ PARTIAL = "partial_dataset"
55
+ UNKNOWN = "unknown_dataset"
56
+
57
+ def __str__(self):
58
+ return self.value
59
+
60
+
61
+ class DataSourceType(Enum):
62
+ # --- General ---
63
+ API = "api"
64
+ RPC = "rpc"
65
+ GRPC = "grpc"
66
+ WEBSITE = "website"
67
+ # --SQL Databases--
68
+ ORACLE = "oracle"
69
+ POSTGRESQL = "postgresql"
70
+ SQLSERVER = "sqlserver"
71
+ MYSQL = "mysql"
72
+ BIGQUERY = "bigquery"
73
+ SNOWFLAKE = "snowflake"
74
+ REDSHIFT = "redshift"
75
+ ATHENA = "athena"
76
+ # --NOSQL Databases--
77
+ MONGODB = "mongodb"
78
+ REDIS = "redis"
79
+ CASSANDRA = "cassandra"
80
+ NEO4J = "neo4j"
81
+ FIRESTORE = "firestore"
82
+ DYNAMODB = "dynamodb"
83
+ # --NEWSQL Databases--
84
+ COCKROACHDB = "cockroachdb"
85
+ SPANNER = "spanner"
86
+ # --- Messaging ---
87
+ MESSAGING_KAFKA = "messaging_kafka"
88
+ MESSAGING_SQS = "messaging_sqs"
89
+ MESSAGING_PUBSUB = "messaging_pubsub"
90
+ # --- Real-time Communication ---
91
+ REALTIME_WEBSOCKET = "websocket"
92
+ # --- Notifications ---
93
+ NOTIFICATION_WEBHOOK = "webhook"
94
+ # --- Storage ---
95
+ LOCAL_STORAGE = "local_storage"
96
+ INMEMORY = "inmemory"
97
+ GCS = "gcs"
98
+ S3 = "s3"
99
+ AZURE_BLOB = "azure_blob"
100
+ HDFS = "hdfs"
101
+ # --- Files ---
102
+ FILE = "file"
103
+ FILE_CSV = "file_csv"
104
+ FILE_EXCEL = "file_excel"
105
+ FILE_JSON = "file_json"
106
+ FILE_PARQUET = "file_parquet"
107
+ FILE_ORC = "file_orc"
108
+ FILE_AVRO = "file_avro"
109
+ FILE_TEXT = "file_text"
110
+ FILE_IMAGE = "file_image"
111
+ FILE_VIDEO = "file_video"
112
+ FILE_AUDIO = "file_audio"
113
+ FILE_PDF = "file_pdf"
114
+ FILE_WORD = "file_word"
115
+ FILE_POWERPOINT = "file_powerpoint"
116
+ FILE_HTML = "file_html"
117
+ FILE_MARKDOWN = "file_markdown"
118
+ FILE_XML = "file_xml"
119
+ FILE_YAML = "file_yaml"
120
+ FILE_TOML = "file_toml"
121
+ FILE_OTHER = "file_other"
122
+
123
+ def __str__(self):
124
+ return self.value
125
+
126
+ class PipelineTriggerType(Enum):
127
+ MANUAL = "manual"
128
+ SCHEDULER = "scheduler"
129
+ SCHEDULER_MAIN = "scheduler_main"
130
+ SCHEDULER_FALLBACK = "scheduler_fallback"
131
+ SCHEDULER_RETRY = "scheduler_retry"
132
+ SCHEDULED_VERIFICATION = "scheduled_verification"
133
+ EVENT_GCS_UPLOAD= "event_gcs_upload"
134
+ EVENT_PUBSUB= "event_pubsub"
135
+ ANOTHER_PIPELINE = "another_pipeline"
136
+
137
+ def __str__(self):
138
+ return self.value
139
+
140
+
141
+ class DataOperationType(Enum):
142
+ # --- Read operations ---
143
+ SOURCE="source" # For reading data from source
144
+ QUERY = "query" # For databases or systems that support queries
145
+ SCAN = "scan" # For reading all data sequentially (e.g., files)
146
+ READ= "read" # For general read operations
147
+ GET= "get" # For getting a single record
148
+ IMPORT = "import"
149
+ # --- Transform operations ---
150
+ NO_CHANGE = "no_change"
151
+ TRANSFORM = "transform"
152
+ PREPROCESS = "preprocess"
153
+ ENRICH = "enrich"
154
+ NORMALIZE = "normalize"
155
+ JOIN = "join"
156
+ AGGREGATE = "aggregate"
157
+ FILTER = "filter"
158
+ SORT = "sort"
159
+ GROUP = "group"
160
+ # --- Write operations ---
161
+ POST= "post" # For creating new records
162
+ PUT= "put"
163
+ PATCH= "patch"
164
+ WRITE = "write"
165
+ WRITE_TO_FILE = "write_to_file"
166
+ APPEND = "append"
167
+ UPSERT = "upsert"
168
+ INSERT = "insert"
169
+ OVERWRITE = "overwrite"
170
+ INCREMENT = "increment"
171
+ UPDATE = "update"
172
+ DELETE = "delete"
173
+ EXPORT = "export"
174
+ COPY = "copy"
175
+ MERGE = "merge" ## For merging data, combines INSERT, UPDATE, DELETE operations
176
+ MERGE_UPSERT = "merge_upsert" ## For merging data, combines INSERT, UPDATE, DELETE operations
177
+ BIGQUERY_WRITE_APPEND = "bigquery_write_append" # For emptying table and writing data, specific to BIGQUERY
178
+ BIGQUERY_WRITE_TRUNCATE = "bigquery_write_truncate" #For writing data to empty table, fails if table not empty, specific to BIGQUERY
179
+ BIGQUERY_WRITE_EMPTY = "bigquery_write_empty" # For updating or inserting records
180
+ # --- Create operations ---
181
+ CREATE_TABLE = "create_table"
182
+ CREATE_DATABASE = "create_database"
183
+ CREATE_COLLECTION = "create_collection"
184
+ CREATE_INDEX = "create_index"
185
+ CREATE_SCHEMA = "create_schema"
186
+ CREATE_MODEL = "create_model"
187
+ CREATE_VIEW = "create_view"
188
+ # --- Alter operations ---
189
+ ALTER_TABLE = "alter_table"
190
+ ALTER_DATABASE = "alter_database"
191
+ ALTER_COLLECTION = "alter_collection"
192
+ ALTER_INDEX = "alter_index"
193
+ ALTER_SCHEMA = "alter_schema"
194
+ ALTER_MODEL = "alter_model"
195
+ ALTER_VIEW = "alter_view"
196
+ # --- Drop operations ---
197
+ DROP_TABLE = "drop_table"
198
+ DROP_DATABASE = "drop_database"
199
+ DROP_COLLECTION = "drop_collection"
200
+ DROP_INDEX = "drop_index"
201
+ DROP_SCHEMA = "drop_schema"
202
+ DROP_MODEL = "drop_model"
203
+ DROP_VIEW = "drop_view"
204
+ # --- Truncate operations ---
205
+ TRUNCATE_TABLE = "truncate_table"
206
+ TRUNCATE_COLLECTION = "truncate_collection"
207
+
208
+ def __str__(self):
209
+ return self.value
210
+ class MatchConditionType(Enum):
211
+ EXACT = "exact"
212
+ PREFIX = "prefix"
213
+ SUFFIX = "suffix"
214
+ CONTAINS = "contains"
215
+ REGEX = "regex"
216
+ IN_RANGE = "in_range"
217
+ NOT_IN_RANGE = "not_in_range"
218
+ GREATER_THAN = "greater_than"
219
+ LESS_THAN = "less_than"
220
+ GREATER_THAN_OR_EQUAL = "greater_than_or_equal"
221
+ LESS_THAN_OR_EQUAL = "less_than_or_equal"
222
+ IN_LIST = "in_list"
223
+ NOT_IN_LIST = "not_in_list"
224
+ ON_FIELD_MATCH = "on_field_match"
225
+ ON_FIELD_EQUAL = "on_field_equal"
226
+ ON_FIELDS_EQUAL_TO = "on_fields_equal_to"
227
+ ON_FIELDS_COMBINATION = "on_fields_combination"
228
+ NOT_APPLICABLE = "not_applicable"
229
+
230
+ def __str__(self):
231
+ return self.value
232
+
233
+
234
+ class DuplicationHandling(Enum):
235
+ RAISE_ERROR = "raise_error"
236
+ OVERWRITE = "overwrite"
237
+ INCREMENT = "increment"
238
+ SKIP = "skip"
239
+ SYSTEM_DEFAULT = "system_default"
240
+ ALLOW = "allow" ## applicable for databases allowing this operation i.e. BigQuery
241
+ MERGE_DEFAULT = "merge_default"
242
+ MERGE_PRESERVE_SOURCE_ON_DUPLICATES = "merge_preserve_source_on_dups"
243
+ MERGE_PRESERVE_TARGET_ON_DUPLICATES = "merge_preserve_target_on_dups"
244
+ MERGE_PRESERVE_BOTH_ON_DUPLICATES = "merge_preserve_both_on_dups"
245
+ MERGE_RAISE_ERROR_ON_DUPLICATES = "merge_raise_error_on_dups"
246
+ MERGE_CUSTOM = "merge_custom"
247
+
248
+ def __str__(self):
249
+ return self.value
250
+
251
+
252
+ class DuplicationHandlingStatus(Enum):
253
+ ALLOWED = "allowed"
254
+ RAISED_ERROR = "raised_error"
255
+ SYSTEM_DEFAULT = "system_default"
256
+ OVERWRITTEN = "overwritten"
257
+ SKIPPED = "skipped"
258
+ INCREMENTED = "incremented"
259
+ OPERATION_CANCELLED = "operation_cancelled"
260
+ MERGED = "merged"
261
+ MERGED_PRESERVED_SOURCE = "merged_preserved_source"
262
+ MERGED_PRESERVED_TARGET = "merged_preserved_target"
263
+ MERGED_PRESERVED_BOTH = "merged_preserved_both"
264
+ MERGED_RAISED_ERROR = "merged_raised_error"
265
+ MERGED_CUSTOM = "merged_custom"
266
+ NO_DUPLICATES = "no_duplicates"
267
+ UNKNOWN = "unknown"
268
+ UNEXPECTED_ERROR= "unexpected_error"
269
+ CONDITIONAL_ERROR = "conditional_error"
270
+ NOT_APPLICABLE = "not_applicable"
271
+
272
+ def __str__(self):
273
+ return self.value
274
+
275
+ class CodingLanguage(Enum):
276
+ PYTHON = "python"
277
+ NODEJS = "nodejs"
278
+ JAVA = "java"
279
+ JAVASCRIPT = "javascript"
280
+ TYPESCRIPT = "typescript"
281
+ REACTJS = "reactjs"
282
+
283
+ def __str__(self):
284
+ return self.value
285
+
286
+
287
+ class ExecutionLocation(Enum):
288
+ # Add local execution environments
289
+ LOCAL_SCRIPT = "local_script"
290
+ LOCAL_JUPYTER_NOTEBOOK = "local_jupyter_notebook"
291
+ LOCAL_SERVER = "local_server"
292
+ LOCAL_DOCKER = "local_docker" # Add local Docker environment
293
+ LOCAL_KUBERNETES = "local_kubernetes" # Add local Kubernetes environment
294
+
295
+ LOCAL_GCP_CLOUD_FUNCTION = "local_gcp_cloud_function"
296
+ LOCAL_GCP_CLOUD_RUN = "local_gcp_cloud_run"
297
+
298
+ # Add GCP execution environments
299
+ CLOUD_GCP_JUPYTER_NOTEBOOK = "cloud_gcp_jupyter_notebook"
300
+ CLOUD_GCP_CLOUD_FUNCTION = "cloud_gcp_cloud_function"
301
+ CLOUD_GCP_CLOUD_RUN = "cloud_gcp_cloud_run"
302
+ CLOUD_GCP_COMPUTE_ENGINE = "cloud_gcp_compute_engine"
303
+ CLOUD_GCP_DATAPROC = "cloud_gcp_dataproc"
304
+ CLOUD_GCP_DATAFLOW = "cloud_gcp_dataflow"
305
+ CLOUD_GCP_BIGQUERY = "cloud_gcp_bigquery"
306
+ # Add AWS execution environments
307
+ CLOUD_AWS_LAMBDA = "cloud_aws_lambda"
308
+ CLOUD_AWS_EC2 = "cloud_aws_ec2"
309
+ CLOUD_AWS_EMR = "cloud_aws_emr"
310
+ CLOUD_AWS_GLUE = "cloud_aws_glue"
311
+ CLOUD_AWS_ATHENA = "cloud_aws_athena"
312
+ CLOUD_AWS_REDSHIFT = "cloud_aws_redshift"
313
+ # Add Azure execution environments
314
+ CLOUD_AZURE_FUNCTIONS = "cloud_azure_functions"
315
+ CLOUD_AZURE_VIRTUAL_MACHINES = "cloud_azure_virtual_machines"
316
+ CLOUD_AZURE_SYNAPSE_ANALYTICS = "cloud_azure_synapse_analytics"
317
+ CLOUD_AZURE_DATA_FACTORY = "cloud_azure_data_factory"
318
+
319
+ def __str__(self):
320
+ return self.value
321
+
322
+ class ExecutionComputeType(Enum):
323
+
324
+ CPU_INTEL = "cpu_intel"
325
+ CPU_AMD = "cpu_amd"
326
+ CPU_ARM = "cpu_arm"
327
+ GPU_NVIDIA = "gpu_nvidia"
328
+ GPU_AMD = "gpu_amd"
329
+ GPU_INTEL = "gpu_intel"
330
+ TPU_GOOGLE = "tpu_google"
331
+ TPU_INTEL = "tpu_intel"
332
+ TPU_AMD = "tpu_amd"
333
+
334
+ def __str__(self):
335
+ return self.value
@@ -0,0 +1,104 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ from enum import Enum
5
+
6
+
7
+ class LoggingHandler(Enum):
8
+
9
+ """
10
+ Standardized remote logging handlers for data engineering pipelines,
11
+ designed for easy analysis and identification of remote logging
12
+ requirements
13
+ """
14
+
15
+ NONE = "none" # No remote handler
16
+ LOCAL_STREAM = "local_stream" # Local stream handler
17
+ GCP_CLOUD_LOGGING = "gcp_cloud_logging"
18
+ GCP_ERROR_REPORTING = "gcp_error_reporting"
19
+ GCP_FIREBASE = "gcp_firebase"
20
+ AWS_CLOUD_WATCH = "aws_cloud_watch"
21
+ AZURE_MONITOR = "azure_monitor"
22
+ AZURE_APPLICATION_INSIGHTS = "azure_application_insights"
23
+ IBM_LOG_ANALYTICS = "ibm_log_analytics"
24
+ ALIBABA_LOG_SERVICE = "alibaba_log_service"
25
+ LOGGLY = "loggly"
26
+ DATADOG = "datadog"
27
+ NEW_RELIC = "new_relic"
28
+ SENTRY = "sentry"
29
+ SUMOLOGIC = "sumologic"
30
+ # --- Other ---
31
+ SYSLOG = "syslog" # For system logs
32
+ CUSTOM = "custom" # For a user-defined remote handler
33
+ OTHER = "other"
34
+
35
+ def __str__(self):
36
+ return self.value
37
+
38
+
39
+ class LogLevel(Enum):
40
+ """
41
+ Standardized notice levels for data engineering pipelines,
42
+ designed for easy analysis and identification of manual
43
+ intervention needs.
44
+ """
45
+ DEBUG = 10 # Detailed debug information (for development/troubleshooting)
46
+
47
+ INFO = 100
48
+ INFO_REMOTE_PERSISTNACE_COMPLETE= 101
49
+ INFO_REMOTE_UPDATE_COMPLETE = 102
50
+ INFO_REMOTE_DELETE_COMPLETE = 103
51
+
52
+ INFO_REMOTE_BULK_PERSISTNACE_COMPLETE= 111
53
+ INFO_REMOTE_BULK_UPDATE_COMPLETE = 112
54
+ INFO_REMOTE_BULK_DELETE_COMPLETE = 113
55
+
56
+ INFO_LOCAL_PERSISTNACE_COMPLETE = 121
57
+
58
+ SUCCESS = 201
59
+ SUCCESS_WITH_NOTICES = 211
60
+ SUCCESS_WITH_WARNINGS = 212
61
+
62
+ NOTICE = 300 # Maybe same file or data already fully or partially exists
63
+ NOTICE_ALREADY_EXISTS = 301 # Data already exists, no action required
64
+ NOTICE_PARTIAL_EXISTS = 302 # Partial data exists, no action required
65
+ NOTICE_ACTION_CANCELLED = 303 # Data processing cancelled, no action required
66
+
67
+ # Warnings indicate potential issues that might require attention:
68
+ WARNING = 400 # General warning, no immediate action required
69
+ # WARNING_NO_ACTION = 401 # Minor issue or Unexpected Behavior, no immediate action required (can be logged frequently)
70
+ WARNING_REVIEW_RECOMMENDED = 402 # Action recommended to prevent potential future issues
71
+ WARNING_FIX_RECOMMENDED = 403 # Action recommended to prevent potential future issues
72
+ WARNING_FIX_REQUIRED = 404 # Action required, pipeline can likely continue
73
+
74
+ ERROR = 500 # General error, no immediate action required
75
+
76
+ ERROR_EXCEPTION = 501
77
+ ERROR_CUSTOM = 502 # Temporary error, automatic retry likely to succeed
78
+
79
+ ERROR_OPERATION_PARTIALLY_FAILED = 511 # Partial or full failure, manual intervention required
80
+ ERROR_OPERATION_FAILED = 512 # Operation failed, manual intervention required
81
+ ERORR_OPERATION_WITH_WARNINGS = 513 # Partial or full failure, manual intervention required
82
+ ERORR_OPERATION_WITH_ERRORS = 514 # Partial or full failure, manual intervention required
83
+ ERORR_OPERATION_WITH_WARNINGS_OR_ERRORS = 515 # Partial or full failure, manual intervention required
84
+
85
+ ERROR_PERSISTANCE_FAILED = 522 # Data persistance failed, manual intervention required
86
+ ERROR_UPDATE_FAILED = 523 # Data update failed, manual intervention required
87
+ ERROR_DELETE_FAILED = 524 # Data deletion failed, manual intervention required
88
+ ERROR_PERSISTANCE_WITH_ERRORS = 525 # Data persistance failed, manual intervention required
89
+ ERROR_UPDATE_WITH_ERRORS = 526 # Data update failed, manual intervention required
90
+ ERROR_DELETE_WITH_ERRORS = 527 # Data deletion failed, manual intervention required
91
+
92
+ ERROR_THRESHOLD_REACHED = 551
93
+ ERROR_PIPELINE_THRESHOLD_REACHED = 552 # Error due to threshold reached, no immediate action required
94
+ ERROR_SUBTHRESHOLD_REACHED = 553 # Error due to threshold reached, no immediate action required
95
+ ERROR_DATA_QUALITY_THRESHOLD_REACHED = 554 # Error due to threshold reached, no immediate action required
96
+ ERROR_METADATA_QUALITY_THRESHOLD_REACHED = 555 # Error due to threshold reached, no immediate action required
97
+ # Critical errors indicate severe failures requiring immediate attention:
98
+ CRITICAL=600 # General critical error, requires immediate action
99
+ CRITICAL_SYSTEM_FAILURE = 601 # System-level failure (e.g., infrastructure, stackoverflow ), requires immediate action
100
+
101
+ UNKNOWN=1001 # Unknown error, should not be used in normal operation
102
+
103
+ def __str__(self):
104
+ return self.value
@@ -0,0 +1,69 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=missing-class-docstring
4
+ from enum import Enum
5
+
6
+ class FinCoreCategory(Enum):
7
+ MARKET="market" # Market prices data
8
+ CORPORATE="corp" # Corporate data such as financial statements and earnings, similar to fundamental data
9
+ FUNDAMENTAL="fundam"
10
+ ECONOMY="economy"
11
+ NEWS="news"
12
+ SENTIMENT="sntmnt"
13
+ SOCIAL="social"
14
+ POLITICS="poltcs"
15
+ OTHER="other"
16
+
17
+ def __str__(self):
18
+ return self.value
19
+
20
+ class FincCoreSubCategory(Enum):
21
+ STOCKS = "stocks"
22
+ BONDS = "bonds"
23
+ COMMODITIES = "cmmdt"
24
+ CURRENCIES = "crrncy"
25
+ CRYPTOCURRENCIES = "crypto"
26
+ REAL_ESTATE = "realest"
27
+ EQUITY_INDICES = "eqindx"
28
+ OPTIONS = "options"
29
+ FUTURES = "futures"
30
+ ETF = "etf"
31
+ ECONOMIC_INDICATORS = "ecoind"
32
+ FUNDAMENTALS = "fundam"
33
+ OTHER = "othr"
34
+
35
+ def __str__(self):
36
+ return self.value
37
+
38
+ class FinCoreRecordsCategory(Enum):
39
+ PRICE="pric"
40
+ SPOT= "spot"
41
+ OHLCVA="ohlcva"
42
+ OHLCV="ohlcv"
43
+ OPEN="open"
44
+ HIGH="high"
45
+ LOW="low"
46
+ CLOSE="close"
47
+ VOLUME="volume"
48
+ ADJC="adjc"
49
+ FUNDAMENTAL="fundam" # treat this differently
50
+ EARNINGS="earnings"
51
+ CASH_FLOW="cashflw"
52
+ BALANCE_SHEET="blnce_sht"
53
+ INTERNAL_TRANSACTIONS="internaltrans"
54
+ INDICATORS="indic"
55
+ ARTICLE="article"
56
+ INSTA_POST="isntapost"
57
+ TWEET="tweet"
58
+ OTHER="othr"
59
+
60
+ def __str__(self):
61
+ return self.value
62
+
63
+ class FinancialExchangeOrPublisher(Enum):
64
+ CC="cc"
65
+ US="us"
66
+ NASDAQ="nasdaq"
67
+
68
+ def __str__(self):
69
+ return self.value
@@ -0,0 +1,30 @@
1
+
2
+ # pylint: disable=missing-module-docstring
3
+ # pylint: disable=missing-function-docstring
4
+ # pylint: disable=missing-class-docstring
5
+ # pylint: disable=line-too-long
6
+ from enum import Enum
7
+
8
+ class Module(Enum):
9
+ CORE="core"
10
+ ORACLE="oracle"
11
+ PORTFOLIO="portfolio"
12
+ RISK="risk"
13
+ RESEARCH="research"
14
+ TRADING="trading"
15
+ SIMULATION="simulation"
16
+
17
+ def __str__(self):
18
+ return self.value
19
+
20
+ class Sector(Enum):
21
+ FINCORE="fincore"
22
+ GYMCORE="gymcore"
23
+ HEALTHCORE="healthcore"
24
+ ENVICORE="envicore"
25
+ SPORTSCORE="sportscore"
26
+ POLICORE="policore"
27
+ CUSTOM="custom"
28
+
29
+ def __str__(self):
30
+ return self.value
@@ -0,0 +1,21 @@
1
+
2
+ # pylint: disable=missing-module-docstring
3
+ # pylint: disable=missing-function-docstring
4
+ # pylint: disable=missing-class-docstring
5
+ # pylint: disable=line-too-long
6
+
7
+ from enum import Enum
8
+
9
+ class CloudProvider(Enum):
10
+ GCP = "cloud_gcp"
11
+ AWS = "cloud_aws"
12
+ AZURE = "cloud_azure"
13
+ IBM = "cloud_ibm"
14
+ ALIBABA = "cloud_alibaba"
15
+ NO_CLOUD = "no_cloud"
16
+ CLOUD_AGNOSTIC = "cloud_agnostic"
17
+ OTHER = "other"
18
+ UNKNWON = "unknown"
19
+
20
+ def __str__(self):
21
+ return self.value
@@ -0,0 +1,182 @@
1
+ resource_classifications = {
2
+ "*",
3
+ "childs_based", # Meaning need to look into child fields to determine classifications
4
+
5
+ "public", #Anyone Can Access ex: synthetic data
6
+ "authuser_open", # Any Authenticated Can Access ex: prices of gold, bitcoin etc.
7
+ # "authuser_subscription",
8
+ "authuser_confidential", ## Only User Owner Can Access and Specific Admin
9
+ "authuser_limitedacl" , ## Has to be in the ACL
10
+ "authuser_owner"
11
+ "internal_open", ## Any Internal employees only Can Access ex: public reports, emails etc.
12
+ "internal_sensitive", ## Many Internal employees Can Access IF meet special condition ex: internal financials summary reports , web and app analytics, list of admin users etc.
13
+ "internal_confidential", ## Few Internal employees Can Access. ex: internal user data, key financials, salaries and bonuses etc
14
+ "internal_limitedacl", ## Has to employee usertype and in the ACL
15
+ "internal_owner"
16
+ }
17
+
18
+
19
+ resource_domain = {
20
+ "*",
21
+ ############### GYM #########
22
+ "gym_domain",
23
+ "gym_data_domain",
24
+ "gym_ai_domain",
25
+ ############## ORACLE #########
26
+ "oracle_domain",
27
+ "oracle_historic_prices_domain",
28
+ "oracle_ai_domain",
29
+ "oracle_assests_historic_info_domain",
30
+ "oracle_historic_econometrics_domain",
31
+ "oracle_news_historic_domain",
32
+ "oracle_calendar_domain",
33
+ "oracle_modelinfo_domain",
34
+ "oracle_modelmetrics_domain",
35
+ "oracle_modelpredictions_domain",
36
+ ######### ORGANISATIONS #########
37
+ "organisation_domain",
38
+ ################### USER #########
39
+ "user_domain",
40
+ "user_management_domain",
41
+ "user_portfolio_domain",
42
+ "user_groups_and_roles_domain",
43
+ ############### BUSINESS #########
44
+ "business_domain",
45
+ ############### ANALYTICS #########
46
+ "analytics_domain",
47
+ "system_domain"
48
+ }
49
+
50
+ resource_types = {
51
+ "db", "sql_db", "nosql_db", "dynamodb",
52
+ "big_query", "big_query_project", "big_query_table", "big_query_column",
53
+ "big_query_row", "big_query_cell",
54
+ "firestore", "firestore_project", "firestore_collection",
55
+ "firestore_document","firestore_document_with_timeseries" "firestore_document_field",
56
+ "pandas_dataframe", "spark_dataframe",
57
+ "s3_bucket", "storage_bucket",
58
+ "folder", "file", "json_file", "csv_file", "pdf_file",
59
+ "unstructured_file", "image", "video", "audio", "text",
60
+ "api", "report", "dashboard", "webpage", "website", "web"
61
+ }
62
+
63
+ organisation_relations = {
64
+ "*",
65
+ "retail_customer",
66
+ "corporate_customer",
67
+ "parent",
68
+ "sister",
69
+ "self",
70
+ "partner",
71
+ "supplier",
72
+ "sponsor",
73
+ "investor",
74
+ "regulator",
75
+ "other"
76
+ }
77
+
78
+ organisation_industries = {
79
+ "*",
80
+ "data",
81
+ "government",
82
+ "media",
83
+ "academic",
84
+ "commercial",
85
+ "fund",
86
+ "finance",
87
+ "advisory",
88
+ "hedgefund",
89
+ "bank",
90
+ "vc",
91
+ "pe",
92
+ "construction",
93
+ "healthcare",
94
+ "technology",
95
+ "consulting",
96
+ "retail",
97
+ "non_profit",
98
+ "individual",
99
+ "freelancer",
100
+ "other"
101
+ }
102
+
103
+ licences_types={
104
+ "*",
105
+ ######################################### OPEN or FULL Rights
106
+ "public",
107
+ "open",
108
+ "open_no_tandc",
109
+ "full_rights",
110
+ "full_rights_for_sale",
111
+ "commercial_licence_perpetual",
112
+ "customer_private_tac",
113
+ ######################################### SPECIAL CONDITIONS
114
+ "open_with_tandc",
115
+ "on_special_request",
116
+ "commercial_licence_limited_time",
117
+ "customer_owned_for_sale",
118
+ ######################################### Not for Commercial Use
119
+ "full_rights_not_for_sale",
120
+ "internal_only",
121
+ "academic_licence",
122
+ "not_for_commercial_use",
123
+ "customer_private"
124
+ ######################################### Unknown
125
+ "commercial_licence_not_purchased",
126
+ "web_scrapped",
127
+ "unknown"
128
+ }
129
+
130
+
131
+ actions ={"GET",
132
+ "POST",
133
+ "DELETE",
134
+ "PUT",
135
+ "create",
136
+ "batch_create",
137
+ "read",
138
+ "batch_read",
139
+ "edit",
140
+ "batch_edit",
141
+ "add",
142
+ "batch_add",
143
+ "remove",
144
+ "batch_remove",
145
+ "delete",
146
+ "batch_delete",
147
+ "rename" ,
148
+ "batch_rename",
149
+ "move",
150
+ "batch_move",
151
+ "download",
152
+ "upload",
153
+ "share"
154
+ }
155
+
156
+ resource_readable_by={
157
+ "*",
158
+ "all",
159
+ "authenticated",
160
+ "restircted",
161
+ "owner",
162
+ "selected_by_owner",
163
+ "admin",
164
+ "selected_by_admin",
165
+ "super_admin",
166
+ "super_admin_selected",
167
+ "system"
168
+ }
169
+
170
+ resource_updatable_by={
171
+ "*",
172
+ "all",
173
+ "authenticated",
174
+ "restircted",
175
+ "owner",
176
+ "selected_by_owner",
177
+ "admin",
178
+ "selected_by_admin",
179
+ "super_admin",
180
+ "super_admin_selected",
181
+ "system"
182
+ }
@@ -0,0 +1 @@
1
+ from .utils_logging import get_logger, log_error, log_warning, log_info, log_debug
@@ -0,0 +1,12 @@
1
+ from pydantic import BaseModel
2
+ from datetime import datetime
3
+
4
+ class AuditLogFirestore(BaseModel):
5
+ user_uid: str
6
+ action: str
7
+ collection_name: str
8
+ document_name: str
9
+ field_name: str
10
+ old_value: str
11
+ new_value: str
12
+ timestamp: datetime
@@ -0,0 +1,144 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+
8
+ import logging
9
+ import traceback
10
+ import json
11
+ import os
12
+ from google.cloud import error_reporting
13
+ from google.cloud import logging as cloud_logging
14
+
15
+ ##########################################################################################################################
16
+ #################################### Custom logging FORMATTERS #####################################################
17
+ ##########################################################################################################################
18
+
19
+
20
+ class CloudLogFormatter(logging.Formatter):
21
+ """Formats log records as structured JSON."""
22
+
23
+ def format(self, record):
24
+ log_entry = {
25
+ 'message': record.msg,
26
+ 'timestamp': self.formatTime(record, self.datefmt),
27
+ 'name': record.name,
28
+ 'severity': record.levelname,
29
+ 'pathname': record.pathname,
30
+ 'lineno': record.lineno,
31
+ }
32
+ if record.exc_info:
33
+ log_entry['exception_traceback'] = ''.join(traceback.format_exception(*record.exc_info))
34
+ if isinstance(record.msg, dict):
35
+ log_entry.update(record.msg)
36
+ return json.dumps(log_entry)
37
+
38
+
39
+ class LocalLogFormatter(logging.Formatter):
40
+ """Formats log records for local output to the console."""
41
+
42
+ def format(self, record): # Make sure you have the 'record' argument here!
43
+ path_parts = record.pathname.split(os.sep)
44
+
45
+ # Get the last two parts of the path if they exist
46
+ if len(path_parts) >= 2:
47
+ short_path = os.path.join(path_parts[-2], path_parts[-1])
48
+ else:
49
+ short_path = record.pathname
50
+
51
+ # Format log messages differently based on the log level
52
+ if record.levelno == logging.INFO:
53
+ log_message = f"[INFO] {self.formatTime(record, self.datefmt)} :: {record.msg}"
54
+ elif record.levelno == logging.DEBUG:
55
+ log_message = f"[DEBUG] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
56
+ elif record.levelno == logging.ERROR:
57
+ log_message = f"[ERROR] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
58
+ if record.exc_info:
59
+ log_message += "\n" + ''.join(traceback.format_exception(*record.exc_info))
60
+ else:
61
+ log_message = f"[{record.levelname}] {self.formatTime(record, self.datefmt)} :: {record.msg} :: {short_path} :: lineno {record.lineno} :: {record.name}"
62
+
63
+
64
+ return log_message
65
+
66
+ #############################################################################################################################################
67
+ ######################################## Logging handlers for Google Cloud ########################################
68
+ #############################################################################################################################################
69
+
70
+ class CustomGCPLoggingHandler(cloud_logging.handlers.CloudLoggingHandler):
71
+ """Custom handler for Google Cloud Logging with a dynamic logName."""
72
+ def __init__(self, client, name, resource=None, labels=None):
73
+ super().__init__(client=client, name=name, resource=resource, labels=labels)
74
+ self.client = client # Ensure client is consistently used
75
+
76
+ def emit(self, record):
77
+ try:
78
+ # 1. Create the basic log entry dictionary
79
+ log_entry = {
80
+ 'message': record.msg,
81
+ 'severity': record.levelname,
82
+ 'name': record.name,
83
+ 'pathname': record.filename,
84
+ 'lineno': record.lineno,
85
+ }
86
+ if record.exc_info:
87
+ log_entry['exception_traceback'] = ''.join(
88
+ traceback.format_exception(*record.exc_info)
89
+ )
90
+
91
+ # 2. Apply the formatter to the 'message' field if it's a dictionary
92
+ if isinstance(record.msg, dict):
93
+ formatted_message = self.formatter.format(record)
94
+ try:
95
+ log_entry['message'] = json.loads(formatted_message)
96
+ except json.JSONDecodeError:
97
+ log_entry['message'] = formatted_message
98
+ else:
99
+ log_entry['message'] = record.msg
100
+
101
+ # 3. Set the custom logName
102
+ log_entry['logName'] = f"projects/{self.client.project}/logs/{record.name}"
103
+
104
+ # 4. Send to Google Cloud Logging
105
+ super().emit(record)
106
+ except Exception as e:
107
+ self.handleError(record)
108
+
109
+ class CustomGCPErrorReportingHandler(logging.Handler):
110
+ def __init__(self, client=None, level=logging.ERROR):
111
+ super().__init__(level)
112
+ self.error_client = error_reporting.Client() if client is None else client
113
+ self.propagate = True
114
+
115
+ def emit(self, record):
116
+ try:
117
+ if record.levelno >= logging.ERROR:
118
+ log_struct = {
119
+ 'message': self.format(record),
120
+ 'severity': record.levelname,
121
+ 'pathname': getattr(record, 'pathname', None),
122
+ 'lineno': getattr(record, 'lineno', None)
123
+ }
124
+ if record.exc_info:
125
+ log_struct['exception'] = ''.join(
126
+ traceback.format_exception(*record.exc_info)
127
+ )
128
+ self.error_client.report(str(log_struct))
129
+ except Exception as e:
130
+ self.handleError(record)
131
+
132
+
133
+ def add_gcp_cloud_logging(logger, formatter, client=None):
134
+ """Sets up Google Cloud Logging for the logger."""
135
+ client = client or cloud_logging.Client()
136
+ handler = CustomGCPLoggingHandler(client, logger.name)
137
+ handler.setFormatter(formatter)
138
+ logger.addHandler(handler)
139
+
140
+ def add_gcp_error_reporting(logger, client=None):
141
+ """Sets up Google Cloud Error Reporting for the logger."""
142
+ client = client or error_reporting.Client()
143
+ handler = CustomGCPErrorReportingHandler(client=client)
144
+ logger.addHandler(handler)
@@ -0,0 +1,78 @@
1
+ # pylint: disable=missing-module-docstring
2
+ # pylint: disable=missing-function-docstring
3
+ # pylint: disable=logging-fstring-interpolation
4
+ # pylint: disable=line-too-long
5
+ # pylint: disable=missing-class-docstring
6
+ # pylint: disable=broad-exception-caught
7
+ import logging
8
+ from typing import List, Union
9
+ from ipulse_shared_base_ftredge import LoggingHandler
10
+ from .logging_handlers_and_formatters import (CloudLogFormatter,
11
+ LocalLogFormatter,
12
+ add_gcp_cloud_logging,
13
+ add_gcp_error_reporting)
14
+
15
+
16
+ def get_logger( logger_name:str ,level=logging.INFO, logging_handler_providers: Union[LoggingHandler, List[LoggingHandler]] = LoggingHandler.NONE):
17
+
18
+ """Creates and configures a logger with the specified handlers."""
19
+
20
+ logger = logging.getLogger(logger_name)
21
+ logger.setLevel(level)
22
+ cloud_formatter = CloudLogFormatter()
23
+
24
+ # Ensure logging_handler_providers is a list for consistent processing
25
+ if not isinstance(logging_handler_providers, list):
26
+ logging_handler_providers = [logging_handler_providers]
27
+
28
+ supported_remote_handlers = [
29
+ LoggingHandler.GCP_CLOUD_LOGGING,
30
+ LoggingHandler.GCP_ERROR_REPORTING,
31
+ LoggingHandler.LOCAL_STREAM,
32
+ LoggingHandler.NONE, # If NONE is considered a remote handler
33
+ ]
34
+
35
+ # Remote handlers
36
+
37
+ for handler_provider in logging_handler_providers:
38
+ if handler_provider in supported_remote_handlers:
39
+ if handler_provider == LoggingHandler.GCP_CLOUD_LOGGING:
40
+ add_gcp_cloud_logging(logger, cloud_formatter)
41
+ elif handler_provider == LoggingHandler.GCP_ERROR_REPORTING:
42
+ add_gcp_error_reporting(logger)
43
+ elif handler_provider == LoggingHandler.LOCAL_STREAM: # Handle local stream
44
+ local_handler = logging.StreamHandler()
45
+ local_handler.setFormatter(LocalLogFormatter())
46
+ logger.addHandler(local_handler)
47
+ else:
48
+ raise ValueError(
49
+ f"Unsupported logging provider: {handler_provider}. "
50
+ f"Supported providers: {[h.value for h in supported_remote_handlers]}"
51
+ )
52
+ return logger
53
+
54
+
55
+ def log_error(msg,logger=None , print_out=False, exc_info=False):
56
+ if print_out:
57
+ print(msg)
58
+ if logger:
59
+ logger.error(msg, exc_info=exc_info)
60
+
61
+ def log_warning(msg, logger=None, print_out=False):
62
+ if print_out:
63
+ print(msg)
64
+ if logger:
65
+ logger.warning(msg)
66
+
67
+
68
+ def log_info(msg, logger=None, print_out=False):
69
+ if print_out:
70
+ print(msg)
71
+ if logger:
72
+ logger.info(msg)
73
+
74
+ def log_debug(msg, logger=None, print_out=False):
75
+ if print_out:
76
+ print(msg)
77
+ if logger:
78
+ logger.debug(msg)
@@ -0,0 +1,9 @@
1
+ Metadata-Version: 2.1
2
+ Name: ipulse_shared_base_ftredge
3
+ Version: 2.2.2
4
+ Summary: Shared Enums, Logger and other Base Utils for Pulse Platform . Using AI for Asset Management and Financial Advisory.
5
+ Home-page: https://github.com/TheFutureEdge/ipulse_shared_base
6
+ Author: Russlan Ramdowar
7
+ License-File: LICENCE
8
+ Requires-Dist: google-cloud-logging~=3.10.0
9
+ Requires-Dist: google-cloud-error-reporting~=1.11.0
@@ -0,0 +1,22 @@
1
+ LICENCE
2
+ README.md
3
+ pyproject.toml
4
+ setup.py
5
+ src/ipulse_shared_base_ftredge/__init__.py
6
+ src/ipulse_shared_base_ftredge.egg-info/PKG-INFO
7
+ src/ipulse_shared_base_ftredge.egg-info/SOURCES.txt
8
+ src/ipulse_shared_base_ftredge.egg-info/dependency_links.txt
9
+ src/ipulse_shared_base_ftredge.egg-info/requires.txt
10
+ src/ipulse_shared_base_ftredge.egg-info/top_level.txt
11
+ src/ipulse_shared_base_ftredge/enums/__init__.py
12
+ src/ipulse_shared_base_ftredge/enums/enums_common_utils.py
13
+ src/ipulse_shared_base_ftredge/enums/enums_data_eng.py
14
+ src/ipulse_shared_base_ftredge/enums/enums_logging.py
15
+ src/ipulse_shared_base_ftredge/enums/enums_module_fincore.py
16
+ src/ipulse_shared_base_ftredge/enums/enums_pulse.py
17
+ src/ipulse_shared_base_ftredge/enums/enums_solution_providers.py
18
+ src/ipulse_shared_base_ftredge/enums/pulse_enums.py
19
+ src/ipulse_shared_base_ftredge/logging/__init__.py
20
+ src/ipulse_shared_base_ftredge/logging/audit_log_firestore.py
21
+ src/ipulse_shared_base_ftredge/logging/logging_handlers_and_formatters.py
22
+ src/ipulse_shared_base_ftredge/logging/utils_logging.py
@@ -0,0 +1,2 @@
1
+ google-cloud-logging~=3.10.0
2
+ google-cloud-error-reporting~=1.11.0