berryworld 1.0.0.197234__py3-none-any.whl → 1.0.0.199020__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
berryworld/__init__.py CHANGED
@@ -4,6 +4,7 @@ from .transportation_solver import TransportationAlgorithm
4
4
  from .allocation_solver import AllocationSolver
5
5
  from .pickle_management import PickleManagement
6
6
  from .email_logging import EmailLogging
7
+ from .python_logs import ErrorLogs
7
8
  from .verify_keys import Verify
8
9
  from .credentials import SnowflakeCredentials, SQLCredentials, WebServiceCredentials, MicrosoftTeamsCredentials
9
10
  from .persistent_storage import PersistentStorage
berryworld/logging.py CHANGED
@@ -1,6 +1,3 @@
1
-
2
-
3
-
4
1
  class PythonLogs:
5
2
  """ Register the Python Logs """
6
3
  def __init__(self, conn, batch_process):
@@ -0,0 +1,112 @@
1
+ import time
2
+ import math
3
+ import datetime
4
+ from uuid import uuid4
5
+ import pandas as pd
6
+ from threading import Thread
7
+
8
+
9
+ class ErrorLogs:
10
+ """ Register Python Error Logs """
11
+
12
+ def __init__(self, project_name, pipeline, ip_address, request_url, sql_con, timeout=30*60, print_sql=False):
13
+ """ Initialize the class
14
+ :param project_name: Name of the project being run. it must be already declared in PythonEmailProjectSeverity
15
+ :param pipeline: Pipeline name being run. It must identify the process being executed uniquely
16
+ :param ip_address: IP Address
17
+ :param request_url: URL requested by the client
18
+ :param sql_con: Connection to the Database to upload the Logs
19
+ :param timeout: Time in seconds after which an unsuccessful log will be sent
20
+ :param print_sql: Print the SQL statement sent to the server
21
+ """
22
+ self.log_df = pd.DataFrame({'ProjectName': [project_name], 'Successful': [0], 'Sent': [0],
23
+ 'IPAddress': [str(ip_address).replace("'", '"')],
24
+ 'RequestUrl': [str(request_url).replace("'", '"')],
25
+ 'StartedDate': [datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")]})
26
+ self.sql_con = sql_con
27
+ self.timeout = timeout
28
+ self.print_sql = print_sql
29
+ self.guid = str(uuid4())
30
+ self.pipeline = pipeline
31
+ self.failure_type = []
32
+ Thread(target=self.start_threading).start()
33
+
34
+ def start_threading(self):
35
+ """ Start a threading to update on failure if the script breaks or the pipeline gets blocked
36
+ """
37
+ time_range = math.ceil(self.timeout / 10)
38
+ for times in range(time_range):
39
+ time.sleep(10)
40
+ if len(self.failure_type) > 0:
41
+ break
42
+
43
+ if len(self.failure_type) == 0:
44
+ elapsed_time = str(datetime.timedelta(seconds=round(self.timeout)))[2:]
45
+ self.on_failure(error_message=f'The pipeline failed to succeed after running '
46
+ f'for {elapsed_time} minutes')
47
+
48
+ def on_success(self, pipeline=None):
49
+ """ Update log on success
50
+ :param pipeline: Pipeline name being run. It must identify the process being executed uniquely
51
+ """
52
+ if not any(self.failure_type):
53
+ if pipeline is not None:
54
+ self.pipeline = pipeline
55
+ successful_columns = {'Successful': 1, 'Resolved': 1, 'Pipeline': self.pipeline, 'GuidKey': self.guid,
56
+ 'FinishedDate': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")}
57
+ self.log_df = self.log_df.assign(**successful_columns)
58
+ self.sql_con.insert(self.log_df, 'Staging', 'Logs', print_sql=self.print_sql)
59
+ self.failure_type.append(True)
60
+
61
+ Thread(target=self.trigger_merge_stored_procedure).start()
62
+
63
+ def on_failure(self, error_message, pipeline=None, section=None, critical=True, proposed_solution=None):
64
+ """ Update log on failure
65
+ :param error_message: Error message to be sent in the Log
66
+ :param pipeline: Pipeline name being run. It must identify the process being executed uniquely
67
+ :param section: Indicate the script section. Useful to locate the error
68
+ :param critical: Indicate whether it should avoid sending successful logs
69
+ :param proposed_solution: Proposed solution to the error message
70
+ """
71
+ save_failure = True
72
+ if section is not None:
73
+ if ((('prod' not in str(self.sql_con.server)) | ('prd' not in str(self.sql_con.server)))
74
+ & (('connection' in str(section).lower()) & ('dw' in str(section).lower()))):
75
+ print('Avoiding to report a connection DW error in a non-production environment')
76
+ save_failure = False
77
+
78
+ if save_failure:
79
+ if pipeline is not None:
80
+ self.pipeline = pipeline
81
+ unsuccessful_columns = {'Successful': 0, 'Section': section, 'Pipeline': self.pipeline,
82
+ 'GuidKey': self.guid, 'Critical': 1 if critical is True else 0,
83
+ 'FinishedDate': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
84
+ 'ErrorMessage': str(error_message).replace("'", '"')}
85
+ self.log_df = self.log_df.assign(**unsuccessful_columns)
86
+ if proposed_solution is not None:
87
+ self.log_df = self.log_df.assign(**{'ProposedSolution': proposed_solution})
88
+ self.sql_con.insert(self.log_df, 'Staging', 'Logs', print_sql=self.print_sql)
89
+ self.failure_type.append(critical)
90
+
91
+ Thread(target=self.trigger_merge_stored_procedure).start()
92
+
93
+ def trigger_merge_stored_procedure(self):
94
+ """ Trigger the merge stored procedure to aggregate logs
95
+ """
96
+ stored_procedure = 'Logging.spLogsMerge'
97
+ self.sql_con.execute(f"EXEC {stored_procedure}")
98
+
99
+ @staticmethod
100
+ def register_failure(email_log, section, error_, solutions=None):
101
+ """Register a failure in the email log
102
+ :param email_log: Instance of PythonEmailProjectSeverity
103
+ :param section: Indicate the script section. Useful to locate the error
104
+ :param error_: Error message to be sent in the Log
105
+ :param solutions: List of solutions for the error messages
106
+ """
107
+ solution_header = list(filter(lambda x: section.startswith(x), list(solutions.keys())))
108
+ if len(solution_header) > 0:
109
+ proposed_solution = solutions[solution_header[0]]
110
+ email_log.on_failure(error_, section=section, proposed_solution=proposed_solution)
111
+ else:
112
+ email_log.on_failure(error_, section=section)
@@ -2,7 +2,6 @@ import os
2
2
  import re
3
3
  import ast
4
4
  import math
5
- import time
6
5
  import pyodbc
7
6
  import traceback
8
7
  import numpy as np
@@ -10,7 +9,6 @@ import pandas as pd
10
9
  import sqlalchemy as sa
11
10
  from urllib import parse
12
11
  from numbers import Number
13
- from threading import Thread
14
12
  from sqlalchemy.pool import QueuePool
15
13
 
16
14
 
@@ -1029,7 +1027,7 @@ class SQLPoolEngine:
1029
1027
  """ Connect a Pool Engine to a Microsoft SQL """
1030
1028
 
1031
1029
  def __init__(self, db_reference, server, master=False, trusted_certificate=True, encrypt=True, multi_db=False,
1032
- commit_as_transaction=True, pool_size=10, max_overflow=10, pool_timeout=30, timeout=300):
1030
+ commit_as_transaction=True, pool_size=10, max_overflow=10, pool_timeout=30):
1033
1031
  """ Initialize the class
1034
1032
  It requires the
1035
1033
  SQL-DBREFERENCE-PROD = 'server_name db_name user password'
@@ -1049,7 +1047,6 @@ class SQLPoolEngine:
1049
1047
  :param pool_size: Number of connections to keep in the pool
1050
1048
  :param max_overflow: Extra connections beyond pool_size
1051
1049
  :param pool_timeout: Timeout for getting a connection
1052
- :param timeout: Connection timeout in seconds
1053
1050
  """
1054
1051
  self.con_string_read = None
1055
1052
  self.con_string_write = None
@@ -1060,7 +1057,6 @@ class SQLPoolEngine:
1060
1057
  self.pool_size = pool_size
1061
1058
  self.max_overflow = max_overflow
1062
1059
  self.pool_timeout = pool_timeout
1063
- self.timeout = timeout
1064
1060
 
1065
1061
  self.db_reference = db_reference.replace("_", "") if "_" in db_reference else db_reference
1066
1062
  self.server = server
@@ -1103,9 +1099,6 @@ class SQLPoolEngine:
1103
1099
 
1104
1100
  self.create_write_engine(commit_as_transaction=self.commit_as_transaction)
1105
1101
 
1106
- # Dispose the engine after a certain timeout
1107
- Thread(target=self.dispose_engine, args=(self.timeout,)).start()
1108
-
1109
1102
  def credentials(self):
1110
1103
  """ Return the credentials used to connect to the SQL Server
1111
1104
  :return: Dictionary with the credentials used to connect to the SQL Server
@@ -1160,8 +1153,7 @@ class SQLPoolEngine:
1160
1153
  poolclass=QueuePool,
1161
1154
  pool_size=self.pool_size, # Number of connections to keep in the pool
1162
1155
  max_overflow=self.max_overflow, # Extra connections beyond pool_size
1163
- pool_timeout=self.pool_timeout, # Timeout for getting a connection
1164
- pool_recycle=self.timeout # Recycle connections after X minutes
1156
+ pool_timeout=self.pool_timeout # Timeout for getting a connection
1165
1157
  )
1166
1158
 
1167
1159
  if not commit_as_transaction:
@@ -1182,8 +1174,7 @@ class SQLPoolEngine:
1182
1174
  poolclass=QueuePool,
1183
1175
  pool_size=10, # Number of connections to keep in the pool
1184
1176
  max_overflow=10, # Extra connections beyond pool_size
1185
- pool_timeout=30, # Timeout for getting a connection
1186
- pool_recycle=self.timeout # Recycle connections after X minutes
1177
+ pool_timeout=30 # Timeout for getting a connection
1187
1178
  )
1188
1179
 
1189
1180
  if not commit_as_transaction:
@@ -1197,13 +1188,10 @@ class SQLPoolEngine:
1197
1188
  self.create_read_engine(commit_as_transaction=commit_as_transaction)
1198
1189
  self.con = self.engine_read.connect().connection
1199
1190
 
1200
- def dispose_engine(self, timeout=0):
1191
+ def dispose_engine(self):
1201
1192
  """ Dispose any opened engines with the Server
1202
1193
  :return: None
1203
1194
  """
1204
- if timeout > 0:
1205
- time.sleep(timeout)
1206
-
1207
1195
  if self.engine_read:
1208
1196
  self.engine_read.dispose()
1209
1197
 
@@ -1255,26 +1243,16 @@ class SQLConnectionPool:
1255
1243
  self.commit_as_transaction = pool_class.commit_as_transaction
1256
1244
  self.db_name = pool_class.db_name
1257
1245
  self.server = pool_class.server
1258
- self.timeout = pool_class.timeout
1259
-
1260
- Thread(target=self.close_connection, args=(self.timeout,)).start()
1261
1246
 
1262
- def close_connection(self, timeout=0):
1247
+ def close_connection(self):
1263
1248
  """ Close any opened connections with the Server
1264
1249
  :return: None
1265
1250
  """
1266
- if timeout > 0:
1267
- time.sleep(timeout)
1268
-
1269
1251
  if self.con_read is not None:
1270
1252
  self.con_read.close()
1271
- if self.engine_read:
1272
- self.engine_read.dispose()
1273
1253
 
1274
1254
  if self.con_write is not None:
1275
1255
  self.con_write.close()
1276
- if self.engine_write:
1277
- self.engine_write.dispose()
1278
1256
 
1279
1257
  def query(self, sql_query, coerce_float=False):
1280
1258
  """ Read data from SQL according to the sql_query
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: berryworld
3
- Version: 1.0.0.197234
3
+ Version: 1.0.0.199020
4
4
  Summary: Handy classes to improve ETL processes
5
5
  Home-page: https://www.berryworld.com
6
6
  Author: BerryWorld ltd
@@ -1,4 +1,4 @@
1
- berryworld/__init__.py,sha256=dDnmPuE2CZfPzh08BfjJjmyeM0mjs5mIP0OS7RujDco,1140
1
+ berryworld/__init__.py,sha256=xYQwJeCZ2di85E0_O3Sx8AUsRhoT6USUxcPGJyCr-gY,1175
2
2
  berryworld/aks_logs.py,sha256=Gb2_cokiZbEX01Yoptd0MxpDociaug-GrXdwliyxFBo,2293
3
3
  berryworld/allocation_solver.py,sha256=asFtaCAze6-eHUGWXA0kAp67UBS-Upj1KKdrVLj_ttQ,8513
4
4
  berryworld/app_logs.py,sha256=MKzKPYd3JuPfOQNAapIgaeZeFHw1z_w2mbn9I6QCADE,4180
@@ -10,28 +10,29 @@ berryworld/email_con.py,sha256=CVyEDVnc_iVeRTwJgIU31ToFgyix7L2yn4Ih9Wu7I5s,15806
10
10
  berryworld/email_logging.py,sha256=LeSrTExhQhar49gJR2wGC1dS0lqsNpFl9pS3eYWqnuo,4936
11
11
  berryworld/generate_env.py,sha256=LrqUH8AjCI6P0uU6BMBRYC9cnmyKkYcpXF1KKIzxkZ8,15900
12
12
  berryworld/handy_mix.py,sha256=SqJ3UjIjmIOgjbp-_1eyKQJ3OJLN7OfpDQIASvjNZ_Q,9672
13
- berryworld/logging.py,sha256=tOxzaFALQm3aVVEc3I7t8MU6PwgUI9VSnzNCH9yRryo,1013
13
+ berryworld/logging.py,sha256=mFFueg1OKudoYFDVJIzCYNY7lRipoefllJiHzytkmzk,1010
14
14
  berryworld/logic_apps.py,sha256=a0uU4tNO3v2w7grdBv-OOx4hUf7VBIerJpwZ9U-29dQ,14591
15
15
  berryworld/microsoft_teams.py,sha256=8uPo0yku-euBj2VdzBoZCeX3IcsCCOqISLqaVZUVxfA,16030
16
16
  berryworld/persistent_storage.py,sha256=L15kLyzN42T6UB1WAg8rFXJq3Mdb1M8Sw4P5YQaUN84,8711
17
17
  berryworld/pickle_management.py,sha256=5o6UuXBpTj23Jgpz6sj9V-vdcdWBK1xMEckWxT-Whj4,2436
18
18
  berryworld/power_automate.py,sha256=V86QEGG9H36DrDvod9Q6yp8OUu307hfYcXJhw06pYrA,27912
19
+ berryworld/python_logs.py,sha256=mzRSO4sywKrr_4KC5_WetRs4XJ9uiD1OaUPOGwsEib0,5804
19
20
  berryworld/sharepoint_con.py,sha256=nmyZJIcaAKJ6Y-ti4gQbvzA_rRbrMGIxTDXe4eP-tiI,44950
20
21
  berryworld/snowflake_conn.py,sha256=L0ePgTKa3tvaxj88BZmsjS6cFp3ZU3rytw7S2jkuA-U,3161
21
22
  berryworld/sql_conn.py,sha256=6kUR3oLXweakz2IBc4zI1ZMqRoN8K6nbQneHhenM-7I,47668
22
- berryworld/sql_connenction.py,sha256=jqXaYZ204HdaIwaQjBlKvrAiJIj6QS_FASb-RYEkT-g,105053
23
+ berryworld/sql_connenction.py,sha256=IJAOsQ7p0tMFEEucazmmkS3lNonf_YFWftyZ_kGPHj8,104150
23
24
  berryworld/teams_logging.py,sha256=8NwXyWr4fLj7W6GzAm2nRQCGFDxibQpAHDHHD24FrP8,6997
24
25
  berryworld/transportation_solver.py,sha256=tNc1JJk71azIBccdWVHbqcvXWhalOdKffv6HmBD6tG0,5014
25
26
  berryworld/verify_keys.py,sha256=X7VUHqYDklWPDO0bGVHIOXeLq5Qi4fZRZbHYw5x7UnA,4356
26
27
  berryworld/vivantio.py,sha256=QfZo0UKqkzVRg_LyiwivNd3aEup4TH57x4KxLZkCJwc,10627
27
28
  berryworld/vivantio_logging.py,sha256=ciy7gA4u3FrgUIpEBnMgocbNPp6jcu9TPoy-kLcrTZU,5736
28
29
  berryworld/xml_parser.py,sha256=HWD71NaTN3DaIOGT6Wzxs4CEsroFhGQwe9iPLIL80Co,957
29
- berryworld-1.0.0.197234.dist-info/licenses/LICENSE,sha256=vtkVCJM6E2af2gnsi2XxKPr4WY-uIbvzVLXieFND0UU,1074
30
+ berryworld-1.0.0.199020.dist-info/licenses/LICENSE,sha256=vtkVCJM6E2af2gnsi2XxKPr4WY-uIbvzVLXieFND0UU,1074
30
31
  tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
32
  tests/test_allocation_config.py,sha256=e12l6fE9U57eSPS35g6ekJ_hol7-RHg89JV60_m1BlE,4633
32
33
  tests/test_handy_mix_config.py,sha256=Un56mz9KJmdn4K4OwzHAHLSRzDU1Xv2nFrONNuzOG04,2594
33
34
  tests/test_xml_parser.py,sha256=3QTlhFEd6KbK6nRFKZnc35tad6wqukTbe4QrFi8mr_8,859
34
- berryworld-1.0.0.197234.dist-info/METADATA,sha256=uTCAY4_FLQRCSQZ-WQDjJlCD7bqH11YSbVRKZltYhRA,1445
35
- berryworld-1.0.0.197234.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
36
- berryworld-1.0.0.197234.dist-info/top_level.txt,sha256=GIZ5qy-P5oxfEH755vA1IMFeTVdX3-40JxMe6nOe5I8,17
37
- berryworld-1.0.0.197234.dist-info/RECORD,,
35
+ berryworld-1.0.0.199020.dist-info/METADATA,sha256=yk85pOBoxVb2NakviW8KMRrMNGzDmvE_-YHnYFnvy7E,1445
36
+ berryworld-1.0.0.199020.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
37
+ berryworld-1.0.0.199020.dist-info/top_level.txt,sha256=GIZ5qy-P5oxfEH755vA1IMFeTVdX3-40JxMe6nOe5I8,17
38
+ berryworld-1.0.0.199020.dist-info/RECORD,,