berryworld 1.0.0.179097__py3-none-any.whl → 1.0.0.197234__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
berryworld/__init__.py CHANGED
@@ -3,14 +3,12 @@ from .handy_mix import HandyMix
3
3
  from .transportation_solver import TransportationAlgorithm
4
4
  from .allocation_solver import AllocationSolver
5
5
  from .pickle_management import PickleManagement
6
- from .postgres_connection import Postgresql
7
6
  from .email_logging import EmailLogging
8
7
  from .verify_keys import Verify
9
- from .credentials import PostgresCredentials, SQLCredentials, BCCredentials, CDSCredentials, SharePointCredentials, \
10
- WebServiceCredentials, MicrosoftTeamsCredentials
8
+ from .credentials import SnowflakeCredentials, SQLCredentials, WebServiceCredentials, MicrosoftTeamsCredentials
11
9
  from .persistent_storage import PersistentStorage
12
10
  from .generate_env import EnvVariables
13
- from .sharepoint_con import SharepointConnection
11
+ from .sharepoint_con import SharepointConnection, SharepointConn
14
12
  from .email_con import EmailConnection
15
13
  from .app_logs import AppLogs
16
14
  from .app_logs_query import AppLogsQuery
@@ -23,3 +21,6 @@ from .microsoft_teams import MicrosoftTeams
23
21
  from .vivantio import Vivantio
24
22
  from .teams_logging import TeamsLogging
25
23
  from .vivantio_logging import VivantioLogging
24
+ from .snowflake_conn import SnowflakeConn
25
+ from .logging import PythonLogs
26
+ from .sql_connenction import SQLConnection, SQLConnectionPool, SQLPoolEngine
berryworld/cache_data.py CHANGED
@@ -50,7 +50,7 @@ class CacheManagement:
50
50
  def dump_pickle(df, filepath):
51
51
  """ Save the data to a filepath
52
52
  :param df: DataFrame gathering the data
53
- :param path: Path and filename to the file destination
53
+ :param filepath: Path and filename to the file destination
54
54
  """
55
55
  pickle_out = open(filepath, 'wb')
56
56
  pickle.dump(df, pickle_out)
@@ -59,7 +59,7 @@ class CacheManagement:
59
59
  @staticmethod
60
60
  def retrieve_pickle(filepath):
61
61
  """ Get the data from the indicated filepath
62
- :param path: Path and filename to the file destination
62
+ :param filepath: Path and filename to the file destination
63
63
  """
64
64
  pickle_in = open(filepath, "rb")
65
65
  data_df = pickle.load(pickle_in)
@@ -69,7 +69,7 @@ class CacheManagement:
69
69
  @staticmethod
70
70
  def remove_pickle(filepath):
71
71
  """ Delete a file allocated in filepath
72
- :param path: Path and filename to the file destination
72
+ :param filepath: Path and filename to the file destination
73
73
  """
74
74
  if os.path.exists(filepath):
75
75
  os.remove(filepath)
berryworld/credentials.py CHANGED
@@ -71,133 +71,6 @@ class SQLCredentials:
71
71
  raise ValueError("Variable %s not found" % str(e))
72
72
 
73
73
 
74
- class PostgresCredentials:
75
- def __init__(self, db_name, server_type=None):
76
- if db_name is None:
77
- raise ValueError("Please provide a value for db_name")
78
- self.db_name = db_name
79
- self.server_type = server_type
80
-
81
- def simple_creds(self):
82
- if self.server_type is None:
83
- raise ValueError("Please provide a value for server_type")
84
-
85
- try:
86
- if os.name == 'nt':
87
- server_name = os.environ.get(
88
- "POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_LOCAL_SERVER_NAME')
89
- else:
90
- server_name = os.environ.get(
91
- "POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_SERVER_NAME')
92
-
93
- db_name = os.environ.get(
94
- "POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_DB_NAME')
95
- user_name = os.environ.get(
96
- "POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_USER_NAME')
97
- password = os.environ.get(
98
- "POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_PASSWORD')
99
-
100
- return {'server_name': re.sub(r'(\\)\1*', r'\1', server_name),
101
- 'db_name': db_name,
102
- 'user_name': user_name,
103
- 'password': password}
104
- except ValueError as e:
105
- raise ValueError("Variable %s not found" % str(e))
106
-
107
-
108
- class BCCredentials:
109
- def __init__(self, db_name=None, auth=False):
110
- self.db_name = db_name
111
- self.auth = auth
112
-
113
- def simple_creds(self):
114
- try:
115
- if self.auth:
116
- scope = os.environ.get("BC_AUTH_SCOPE")
117
- client_id = os.environ.get("BC_AUTH_CLIENT_ID")
118
- client_secret = os.environ.get("BC_AUTH_CLIENT_SECRET")
119
-
120
- return {'scope': scope,
121
- 'client_id': client_id,
122
- 'client_secret': client_secret}
123
- elif self.db_name is not None:
124
- server_type = os.environ.get(f"BC_ENV_SERVER_{self.db_name.upper()}")
125
-
126
- return {'server_type': server_type}
127
- else:
128
- raise ValueError("Please provide a valid input")
129
-
130
- except ValueError as e:
131
- raise ValueError("Variable %s not found" % str(e))
132
-
133
-
134
- class CDSCredentials:
135
- def __init__(self, env_name, webhook=False, auth=False):
136
- self.env_name = env_name
137
- self.webhook = webhook
138
- self.auth = auth
139
-
140
- def simple_creds(self):
141
- try:
142
- if self.auth:
143
- scope = os.environ.get("CDS_AUTH_SCOPE")
144
- client_id = os.environ.get("CDS_AUTH_CLIENT_ID")
145
- client_secret = os.environ.get("CDS_AUTH_CLIENT_SECRET")
146
-
147
- return {'scope': scope,
148
- 'client_id': client_id,
149
- 'client_secret': client_secret}
150
- else:
151
- server = os.environ.get(f"CDS_ENV_SERVER_{self.env_name.upper()}")
152
- organisation_id = os.environ.get(f"CDS_ENV_ORG_{self.env_name.upper()}")
153
- environment_prefix = os.environ.get(f"CDS_ENV_PREFIX_{self.env_name.upper()}")
154
- environment_url = os.environ.get(f"CDS_ENV_URL_{self.env_name.upper()}")
155
- if self.webhook:
156
- environment_name = os.environ.get(f"CDS_ENV_NAME_{self.env_name.upper()}")
157
- else:
158
- environment_name = self.env_name
159
-
160
- return {'server': server,
161
- 'environment_name': environment_name,
162
- 'organisation_id': organisation_id,
163
- 'environment_prefix': environment_prefix,
164
- 'environment_url': environment_url}
165
-
166
- except ValueError as e:
167
- raise ValueError("Variable %s not found" % str(e))
168
-
169
-
170
- class SharePointCredentials:
171
- def __init__(self, site=None):
172
- self.site = site
173
-
174
- def simple_creds(self):
175
- try:
176
- if self.site is None:
177
- raise ValueError("Please provide a value for site")
178
-
179
- client_id = os.environ.get(f"SHAREPOINT_CLIENT_ID_{self.site.upper()}")
180
- scopes = os.environ.get(f"SHAREPOINT_SCOPES_{self.site.upper()}")
181
- organisation_id = os.environ.get(f"SHAREPOINT_ORG_{self.site.upper()}")
182
- username = os.environ.get(f"SHAREPOINT_USER_{self.site.upper()}")
183
- password = os.environ.get(f"SHAREPOINT_PASSWORD_{self.site.upper()}")
184
- site_id = os.environ.get(f"SHAREPOINT_SITE_ID_{self.site.upper()}")
185
- site_name = os.environ.get(f"SHAREPOINT_SITE_NAME_{self.site.upper()}")
186
- api_version = os.environ.get(f"SHAREPOINT_API_VERSION_{self.site.upper()}")
187
-
188
- return {'client_id': client_id,
189
- 'scopes': scopes,
190
- 'organisation_id': organisation_id,
191
- 'username': username,
192
- 'password': password,
193
- 'site_id': site_id,
194
- 'site_name': site_name,
195
- 'api_version': api_version}
196
-
197
- except ValueError as e:
198
- raise ValueError("Variable %s not found" % str(e))
199
-
200
-
201
74
  class WebServiceCredentials:
202
75
  def __init__(self, service=None):
203
76
  self.service = service
@@ -209,15 +82,18 @@ class WebServiceCredentials:
209
82
 
210
83
  try:
211
84
  user_name = os.environ.get(f"WEBSERVICE_USER_{self.service.upper()}")
212
- except Exception:
85
+ except Exception as e:
86
+ print(e)
213
87
  user_name = ''
214
88
  try:
215
89
  password = os.environ.get(f"WEBSERVICE_PASSWORD_{self.service.upper()}")
216
- except Exception:
90
+ except Exception as e:
91
+ print(e)
217
92
  password = ''
218
93
  try:
219
94
  access_token = os.environ.get(f"WEBSERVICE_ACCESS_TOKEN_{self.service.upper()}")
220
- except Exception:
95
+ except Exception as e:
96
+ print(e)
221
97
  access_token = ''
222
98
 
223
99
  return {'user_name': user_name,
@@ -250,3 +126,23 @@ class MicrosoftTeamsCredentials:
250
126
 
251
127
  except ValueError as e:
252
128
  raise ValueError("Variable %s not found" % str(e))
129
+
130
+
131
+ class SnowflakeCredentials:
132
+ def __init__(self, db_name):
133
+ if db_name is None:
134
+ raise ValueError("Please provide a value for db_name")
135
+ self.db_name = db_name
136
+
137
+ def simple_creds(self):
138
+ try:
139
+ account = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-ACCOUNT')
140
+ user_name = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-USERNAME')
141
+ password = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-PASSWORD')
142
+
143
+ return {
144
+ 'account': account,
145
+ 'user_name': user_name,
146
+ 'password': password}
147
+ except ValueError as e:
148
+ raise ValueError("Variable %s not found" % str(e))
berryworld/devops.py CHANGED
@@ -231,7 +231,8 @@ class DevOps:
231
231
  }
232
232
  ],
233
233
  "commits": [{
234
- "comment": f"Adding {environment_name} PowerAutomate properties via API - {run_time}.",
234
+ "comment": f"Adding {environment_name} PowerAutomate properties via API - {run_time}. "
235
+ f"skip-secret-scanning:true",
235
236
  "changes": payload
236
237
  }]
237
238
  })
berryworld/email_con.py CHANGED
@@ -1,3 +1,4 @@
1
+ import ast
1
2
  import base64
2
3
  import json
3
4
  import copy
@@ -12,12 +13,49 @@ from msal import ClientApplication
12
13
  class EmailConnection:
13
14
  """ Tooling to connect to Outlook and manage emails. Use the Graphql Outlook API """
14
15
 
15
- def __init__(self, client_id, authority, client_secret, email_user, email_password, days_back=10):
16
- self.client_id = client_id
17
- self.authority = authority
18
- self.client_secret = client_secret
19
- self.email_user = email_user
20
- self.email_password = email_password
16
+ def __init__(self, email_creds, days_back=10):
17
+ """ Initialize the class
18
+ :param email_creds: Dictionary containing the credentials to connect to the email account
19
+ :param days_back: Number of days back to query emails
20
+
21
+ email_creds = {
22
+ 'authority': '',
23
+ 'box': '',
24
+ 'client_id': '',
25
+ 'client_secret': '',
26
+ 'pwd': ''
27
+ }
28
+ """
29
+ try:
30
+ self.email_creds = ast.literal_eval(email_creds)
31
+ except Exception as e:
32
+ raise ValueError(f'Email credentials not properly formatted. ERROR: {e}')
33
+
34
+ if 'authority' not in self.email_creds.keys():
35
+ raise ValueError(f"Authority not provided in email credentials")
36
+ else:
37
+ self.authority = self.email_creds['authority']
38
+
39
+ if 'client_id' not in self.email_creds.keys():
40
+ raise ValueError(f"ClientId not provided in email credentials")
41
+ else:
42
+ self.client_id = self.email_creds['client_id']
43
+
44
+ if 'client_secret' not in self.email_creds.keys():
45
+ raise ValueError(f"Client Secret not provided in email credentials")
46
+ else:
47
+ self.client_secret = self.email_creds['client_secret']
48
+
49
+ if 'box' not in self.email_creds.keys():
50
+ raise ValueError(f"Email Box not provided in email credentials")
51
+ else:
52
+ self.email_user = self.email_creds['box']
53
+
54
+ if 'pwd' not in self.email_creds.keys():
55
+ raise ValueError(f"Password not provided in email credentials")
56
+ else:
57
+ self.email_password = self.email_creds['pwd']
58
+
21
59
  self.base_url = "https://graph.microsoft.com/v1.0"
22
60
  self.headers = self.get_headers()
23
61
  self.days_back = days_back
@@ -59,16 +59,24 @@ class EmailLogging:
59
59
  :param critical: Indicate whether it should avoid sending successful logs
60
60
  :param proposed_solution: Proposed solution to the error message
61
61
  """
62
- unsuccessful_columns = {'Successful': 0,
63
- 'FinishedDate': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
64
- 'Section': section,
65
- 'ErrorMessage': str(error_message).replace("'", '"'),
66
- 'Critical': 1 if critical is True else 0}
67
- self.log_df = self.log_df.assign(**unsuccessful_columns)
68
- if proposed_solution is not None:
69
- self.log_df = self.log_df.assign(**{'ProposedSolution': proposed_solution})
70
- self.sql_con.insert(self.log_df, 'Logging', 'PythonLogs', print_sql=self.print_sql)
71
- self.failure_type.append(critical)
62
+ save_failure = True
63
+ if section is not None:
64
+ if (('prod' not in str(self.sql_con.server)) | ('prd' not in str(self.sql_con.server))) \
65
+ & (('connection' in str(section).lower()) & ('dw' in str(section).lower())):
66
+ print('Avoiding to report a connection DW error in a non-production environment')
67
+ save_failure = False
68
+
69
+ if save_failure:
70
+ unsuccessful_columns = {'Successful': 0,
71
+ 'FinishedDate': datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
72
+ 'Section': section,
73
+ 'ErrorMessage': str(error_message).replace("'", '"'),
74
+ 'Critical': 1 if critical is True else 0}
75
+ self.log_df = self.log_df.assign(**unsuccessful_columns)
76
+ if proposed_solution is not None:
77
+ self.log_df = self.log_df.assign(**{'ProposedSolution': proposed_solution})
78
+ self.sql_con.insert(self.log_df, 'Logging', 'PythonLogs', print_sql=self.print_sql)
79
+ self.failure_type.append(critical)
72
80
 
73
81
  @staticmethod
74
82
  def register_failure(email_log, section, error_, solutions=None):
@@ -1,10 +1,203 @@
1
- from .handy_mix import HandyMix
1
+ import os
2
+ import ast
2
3
  import yaml
4
+ import json
5
+ from .handy_mix import HandyMix
6
+ from azure.identity import ClientSecretCredential
7
+ from azure.keyvault.secrets import SecretClient
3
8
 
4
9
 
5
10
  class EnvVariables:
6
11
  """ Generate the environmental variables to be used by kubernetes """
7
12
 
13
+ def __init__(self, env=None):
14
+ """
15
+ :param env: Environment folder to upload the terraform .tfvars file generated
16
+ """
17
+ self.env = env
18
+
19
+ def extract_akv_config_from_tf(self, config_path):
20
+ """ Extract the AKV configuration from the Terraform file
21
+ :param config_path: Path to the Terraform file
22
+ """
23
+ with open(config_path, "r") as f:
24
+ lines = f.readlines()
25
+
26
+ start, end = None, None
27
+ for idx, line in enumerate(lines):
28
+ if "akv_config" in line and "=" in line and "{" in line:
29
+ start = idx + 1
30
+ if start and "}" in line:
31
+ end = idx
32
+ break
33
+
34
+ if start is None or end is None:
35
+ raise ValueError("akv_config block not found in the tf file")
36
+
37
+ kv_lines = lines[start:end]
38
+ config = {}
39
+ for line in kv_lines:
40
+ if "=" in line:
41
+ key, value = line.strip().split("=", 1)
42
+ key = key.strip()
43
+ value = value.strip()
44
+
45
+ # Use ast.literal_eval to safely evaluate lists or strings
46
+ try:
47
+ parsed_value = ast.literal_eval(value)
48
+ except:
49
+ # fallback if parsing fails
50
+ parsed_value = value.strip('"')
51
+
52
+ config[key] = parsed_value
53
+
54
+ return config
55
+
56
+ def get_secret_client(self, config):
57
+ """ Create a Key Vault client
58
+ :param config: Dictionary containing Azure Key Vault configuration
59
+ """
60
+ tenant_id = config["AZURE-TENANT-ID"]
61
+ client_id = config["AZURE-CLIENT-ID"]
62
+ client_secret = config["AZURE-CLIENT-SECRET"]
63
+ vault_url = f"https://{config['AZURE-KEY-VAULT-NAME']}.vault.azure.net/"
64
+ credential = ClientSecretCredential(tenant_id, client_id, client_secret)
65
+
66
+ return SecretClient(vault_url=vault_url, credential=credential)
67
+
68
+ def fetch_secrets_by_tag(self, config):
69
+ """ Fetch enabled secrets from Azure Key Vault based on tags
70
+ :param config: Dictionary containing Azure Key Vault configuration
71
+ """
72
+ client = self.get_secret_client(config)
73
+ project_tags = config.get("AZURE-KEY-VAULT-PROJECT-TAGS", [])
74
+ secrets = {}
75
+ for prop in client.list_properties_of_secrets():
76
+ if not prop.enabled:
77
+ continue
78
+
79
+ if prop.tags:
80
+ if any("project" in key and value in project_tags for key, value in prop.tags.items()):
81
+ secret = client.get_secret(prop.name)
82
+ key = str(prop.name)
83
+ secrets[key] = secret.value
84
+ return secrets
85
+
86
+ def fetch_all_secrets(self, config):
87
+ """ Fetch all enabled secrets from the Key Vault
88
+ :param config: Dictionary containing Azure Key Vault configuration
89
+ """
90
+ client = self.get_secret_client(config)
91
+ secrets = {}
92
+ for prop in client.list_properties_of_secrets():
93
+ if prop.enabled:
94
+ secret = client.get_secret(prop.name)
95
+ secrets[secret.name] = {
96
+ "value": secret.value,
97
+ "tags": prop.tags or {}
98
+ }
99
+ return secrets
100
+
101
+ def create_or_update_akv_secret(self, config, name=None, value=None, tags=None, tag_name=None, tag_value=None,
102
+ disable_old=True, secrets_dict=None):
103
+ """ Create or update a secret / all secrets in Key Vault :
104
+ :param config: Dictionary containing Azure Key Vault configuration
105
+ :param name: Name of the secret to be created / updated
106
+ :param value: Value of the secret to be created / updated
107
+ :param tags: Dictionary containing Azure Key Vault configuration
108
+ :param tag_name: Tag name of the secret to be created / updated
109
+ :param tag_value: Tag value of the secret to be created / updated
110
+ :param disable_old: Flag if previous version should be disabled
111
+ :param secrets_dict: Dictionary of all the secrets from AKV
112
+ """
113
+ client = self.get_secret_client(config)
114
+
115
+ # If secrets_dict is provided, loop through it
116
+ if secrets_dict is not None:
117
+ for s_name, s_data in secrets_dict.items():
118
+ self.create_or_update_akv_secret(
119
+ config,
120
+ name=s_name,
121
+ value=s_data["value"],
122
+ tags=s_data.get("tags"),
123
+ disable_old=disable_old
124
+ )
125
+ return
126
+
127
+ if (name is None and value is None) and secrets_dict is None:
128
+ raise ValueError("Must provide either secrets_dict or both name and value")
129
+
130
+ # Check the latest version of the secret
131
+ latest_secret = None
132
+ latest_tags = {}
133
+ try:
134
+ latest_secret = client.get_secret(name)
135
+ latest_tags = latest_secret.properties.tags or {}
136
+ except Exception:
137
+ pass
138
+
139
+ updated_tags = {**latest_tags}
140
+ if tags is not None:
141
+ updated_tags.update(tags)
142
+ elif tag_name is not None and tag_value is not None:
143
+ updated_tags[tag_name] = tag_value
144
+
145
+ # Check if anything changed in the value or tag
146
+ value_changed = latest_secret is None or latest_secret.value != value
147
+ tags_changed = updated_tags != latest_tags
148
+
149
+ if not value_changed and not tags_changed:
150
+ print(f"Secret {name} already up to date. Skipping.")
151
+ return
152
+
153
+ # Create new version
154
+ new_secret = client.set_secret(name, value, tags=updated_tags)
155
+ new_version = new_secret.properties.version
156
+ print(f"Secret {name} updated. New version: {new_version}.")
157
+
158
+ # Disable previous versions
159
+ if disable_old:
160
+ for secret_prop in client.list_properties_of_secret_versions(name):
161
+ if secret_prop.version != new_version and secret_prop.enabled:
162
+ client.update_secret_properties(secret_prop.name, secret_prop.version, enabled=False)
163
+
164
+ def generate_akv_env_file(self, config_path=None, env_path=None, fetch_all=False):
165
+ """ Generate the environment variables file from Azure Key Vault secrets
166
+ :param config_path: Path to the Terraform configuration file (optional)
167
+ :param env_path: Path to the destination .env file (optional)
168
+ :param fetch_all: Flag if all the secrets should be fetched
169
+ """
170
+ config = self.extract_akv_config_from_tf(config_path)
171
+ if fetch_all:
172
+ secrets = self.fetch_all_secrets(config)
173
+ else:
174
+ secrets = self.fetch_secrets_by_tag(config)
175
+
176
+ if fetch_all:
177
+ return secrets
178
+
179
+ if not fetch_all:
180
+ # Create .env file for UnitTest
181
+ with open(env_path, "w") as f:
182
+ for key, value in secrets.items():
183
+ f.write(f"{key}={value}\n")
184
+
185
+ # Create .env file for container apps
186
+ if os.name != "nt" and self.env is not None:
187
+ # Linux/Mac: generate terraform .tfvars file
188
+ tfvars_path = f"./environments/{self.env}/secrets.auto.tfvars.json"
189
+ os.makedirs(os.path.dirname(tfvars_path), exist_ok=True)
190
+ with open(tfvars_path, "w") as f:
191
+ json.dump({"keyvault_secrets": secrets}, f, indent=2)
192
+
193
+ def generate_env_tf(self, config_path='./environments/prod/step01.tf', env_path='.env', fetch_all=False):
194
+ """ Main function to parse arguments and generate AKV environment variables file.
195
+ :param config_path: Path to the Terraform configuration file
196
+ :param env_path: Path to the destination .env file
197
+ :param fetch_all: Flag if all the secrets should be fetched
198
+ """
199
+ self.generate_akv_env_file(config_path=config_path, env_path=env_path, fetch_all=fetch_all)
200
+
8
201
  @staticmethod
9
202
  def generate_env_file(yaml_path='./environments/prd1/step01.yaml', env_path='./.env'):
10
203
  """ Save the data from the yaml file into a .env file to be used by decouple
berryworld/handy_mix.py CHANGED
@@ -65,7 +65,21 @@ class HandyMix:
65
65
  return result
66
66
 
67
67
  @staticmethod
68
- def likely_match(srs, match_str, n_res=1, threshold=False):
68
+ def grab_n_elem(n, comp_):
69
+ max_list = heapq.nlargest(n, comp_)
70
+ like_matches = np.array([elem in max_list for elem in comp_]).sum()
71
+ if like_matches > n:
72
+ list_ = [elem in max_list for elem in comp_]
73
+ k = 0
74
+ for elem in list_:
75
+ if (elem is True) & (k > n - 1):
76
+ list_[k] = False
77
+ k = + 1
78
+ return list_
79
+ else:
80
+ return [elem in max_list for elem in comp_]
81
+
82
+ def likely_match(self, srs, match_str, n_res=1, threshold=False):
69
83
  """ Look for the more similar element in a Series to the one provided in match_str. If there is a draw,
70
84
  It will pick the first matching elements. Threshold only consider the elements if their likelihood is
71
85
  greater than that threshold variable
@@ -88,29 +102,15 @@ class HandyMix:
88
102
  aux = SequenceMatcher(None, element, match_str)
89
103
  comp.append(aux.ratio())
90
104
 
91
- def grab_n_elem(n, comp):
92
- max_list = heapq.nlargest(n, comp)
93
- like_matches = np.array([elem in max_list for elem in comp]).sum()
94
- if like_matches > n:
95
- list_ = [elem in max_list for elem in comp]
96
- k = 0
97
- for elem in list_:
98
- if (elem is True) & (k > n - 1):
99
- list_[k] = False
100
- k = + 1
101
- return list_
102
- else:
103
- return [elem in max_list for elem in comp]
104
-
105
105
  if threshold:
106
106
  # Return n elements above the threshold
107
107
  if any(elem >= threshold for elem in comp):
108
- return grab_n_elem(n_res, comp)
108
+ return self.grab_n_elem(n_res, comp)
109
109
  else:
110
110
  return [False] * comp.__len__()
111
111
  else:
112
112
  # Return n elements likely to match
113
- return grab_n_elem(n_res, comp)
113
+ return self.grab_n_elem(n_res, comp)
114
114
 
115
115
  @staticmethod
116
116
  def remove_line_chars(input_text, remove_duplicate_white_spaces=False):
@@ -226,11 +226,15 @@ class HandyMix:
226
226
  :param data: DataFrame to transform its integer columns to strings
227
227
  :return: DataFrame with its columns translated to strings
228
228
  """
229
- float_cols = data.columns[data.dtypes == 'float']
229
+ float_cols = data.select_dtypes(include=['float']).columns
230
+
230
231
  for float_col in float_cols:
231
- if data[float_col].dtype == float:
232
- if all(['.0' in val for val in data[float_col].loc[~data[float_col].isnull()].astype(str).values]):
233
- data.loc[~data[float_col].isnull(), float_col] = data.loc[
234
- ~data[float_col].isnull(), float_col].astype(str).str.replace('.0', '', regex=True)
232
+ mask = ~data[float_col].isnull()
233
+ values = data.loc[mask, float_col]
234
+
235
+ data[float_col] = data[float_col].astype(object)
236
+
237
+ if ((values % 1) == 0).all():
238
+ data.loc[mask, float_col] = values.astype(int).astype(str)
235
239
 
236
240
  return data
berryworld/logging.py ADDED
@@ -0,0 +1,31 @@
1
+
2
+
3
+
4
+ class PythonLogs:
5
+ """ Register the Python Logs """
6
+ def __init__(self, conn, batch_process):
7
+ self.conn = conn
8
+ self.batch_process = batch_process
9
+ self.batch_log_id = None
10
+
11
+ def start(self, log_type):
12
+ """ Start the Python Log
13
+ :param log_type: Type of the log
14
+ """
15
+ batch_log = self.conn.run_statement(
16
+ f"EXEC d365bc.spPythonBatchLogInsert @DataType = {self.batch_process}, @LogType = '{log_type}'",
17
+ commit_as_transaction=False)
18
+ self.batch_log_id = batch_log['OUTPUT'][0]
19
+
20
+ def end(self, batch_log_id=None):
21
+ """ End the Python Log
22
+ :param batch_log_id: Batch Log Id
23
+ """
24
+ if batch_log_id is None:
25
+ batch_log_id = self.batch_log_id
26
+ if batch_log_id is None:
27
+ raise Exception('Batch Log Id is not set')
28
+
29
+ self.conn.run_statement(
30
+ f"EXEC d365bc.spPythonBatchLogUpdate @BatchLogId = {batch_log_id}",
31
+ commit_as_transaction=False)
@@ -254,11 +254,11 @@ class MicrosoftTeams:
254
254
  vivantio_ticket_id = vivantio_ticket['Id'].values[0]
255
255
  display_id = vivantio_ticket['DisplayId'].values[0]
256
256
  vivantio_ticket_url = f'https://poupart.flex.vivantio.com/item/Ticket/{vivantio_ticket_id}'
257
- html_message = html_message + f'<br><b>Vivantio Ticket Id:</b> {display_id}' + \
258
- f'<br><b>Vivantio Ticket URL:</b> {vivantio_ticket_url}<br>'
257
+ html_message = html_message + f'<br><b>Vivantio Ticket Id:</b> {display_id}' \
258
+ f'<br><b>Vivantio Ticket URL:</b> {vivantio_ticket_url}<br>'
259
259
 
260
- html_message = html_message + '<br><b>Error Status:</b> ' + status + f'<br><b>Error Count:</b> {count}' + \
261
- '<br><b>Error Content:</b> <br>' + message
260
+ html_message = html_message + '<br><b>Error Status:</b> ' + status + f'<br><b>Error Count:</b> {count}' \
261
+ f'<br><b>Error Content:</b> <br>' + message
262
262
 
263
263
  payload = {}
264
264
  if importance: