berryworld 1.0.0.151213__py3-none-any.whl → 1.0.0.197234__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- berryworld/__init__.py +12 -6
- berryworld/allocation_solver.py +2 -2
- berryworld/cache_data.py +3 -3
- berryworld/credentials.py +55 -131
- berryworld/devops.py +257 -0
- berryworld/email_con.py +56 -11
- berryworld/email_logging.py +35 -9
- berryworld/generate_env.py +194 -1
- berryworld/handy_mix.py +26 -22
- berryworld/logging.py +31 -0
- berryworld/logic_apps.py +307 -0
- berryworld/microsoft_teams.py +377 -0
- berryworld/persistent_storage.py +115 -47
- berryworld/pickle_management.py +1 -1
- berryworld/power_automate.py +557 -0
- berryworld/sharepoint_con.py +499 -23
- berryworld/snowflake_conn.py +87 -0
- berryworld/sql_conn.py +120 -106
- berryworld/sql_connenction.py +2199 -0
- berryworld/teams_logging.py +131 -0
- berryworld/transportation_solver.py +4 -4
- berryworld/verify_keys.py +3 -1
- berryworld/vivantio.py +293 -0
- berryworld/vivantio_logging.py +126 -0
- berryworld-1.0.0.197234.dist-info/METADATA +48 -0
- berryworld-1.0.0.197234.dist-info/RECORD +37 -0
- {berryworld-1.0.0.151213.dist-info → berryworld-1.0.0.197234.dist-info}/WHEEL +1 -1
- berryworld/postgres_connection.py +0 -194
- berryworld/sql_connection.py +0 -946
- berryworld/sql_connection_polars.py +0 -189
- berryworld-1.0.0.151213.dist-info/METADATA +0 -36
- berryworld-1.0.0.151213.dist-info/RECORD +0 -30
- {berryworld-1.0.0.151213.dist-info → berryworld-1.0.0.197234.dist-info/licenses}/LICENSE +0 -0
- {berryworld-1.0.0.151213.dist-info → berryworld-1.0.0.197234.dist-info}/top_level.txt +0 -0
berryworld/__init__.py
CHANGED
|
@@ -2,19 +2,25 @@ from .xml_parser import XMLparser
|
|
|
2
2
|
from .handy_mix import HandyMix
|
|
3
3
|
from .transportation_solver import TransportationAlgorithm
|
|
4
4
|
from .allocation_solver import AllocationSolver
|
|
5
|
-
from .sql_connection import SQLConnection
|
|
6
5
|
from .pickle_management import PickleManagement
|
|
7
|
-
from .postgres_connection import Postgresql
|
|
8
6
|
from .email_logging import EmailLogging
|
|
9
7
|
from .verify_keys import Verify
|
|
10
|
-
from .credentials import
|
|
11
|
-
WebServiceCredentials
|
|
8
|
+
from .credentials import SnowflakeCredentials, SQLCredentials, WebServiceCredentials, MicrosoftTeamsCredentials
|
|
12
9
|
from .persistent_storage import PersistentStorage
|
|
13
10
|
from .generate_env import EnvVariables
|
|
14
|
-
from .sharepoint_con import SharepointConnection
|
|
11
|
+
from .sharepoint_con import SharepointConnection, SharepointConn
|
|
15
12
|
from .email_con import EmailConnection
|
|
16
13
|
from .app_logs import AppLogs
|
|
17
14
|
from .app_logs_query import AppLogsQuery
|
|
18
|
-
from .sql_connection_polars import SQLConnectionPolars
|
|
19
15
|
from .sql_conn import SQLConn
|
|
20
16
|
from .aks_logs import Logs
|
|
17
|
+
from .devops import DevOps
|
|
18
|
+
from .power_automate import PowerAutomate
|
|
19
|
+
from .logic_apps import LogicApps
|
|
20
|
+
from .microsoft_teams import MicrosoftTeams
|
|
21
|
+
from .vivantio import Vivantio
|
|
22
|
+
from .teams_logging import TeamsLogging
|
|
23
|
+
from .vivantio_logging import VivantioLogging
|
|
24
|
+
from .snowflake_conn import SnowflakeConn
|
|
25
|
+
from .logging import PythonLogs
|
|
26
|
+
from .sql_connenction import SQLConnection, SQLConnectionPool, SQLPoolEngine
|
berryworld/allocation_solver.py
CHANGED
|
@@ -61,8 +61,8 @@ class AllocationSolver:
|
|
|
61
61
|
# The one with less potential future Ranking Value will be used in the first instance
|
|
62
62
|
ranking_df_gr = ranking_df.loc[(ranking_df[index_prod].isin(max_alloc[index_prod])) & (
|
|
63
63
|
ranking_df[index_sales].isin(max_alloc[index_sales]))].sort_values(rank_col, ascending=False)[:5]
|
|
64
|
-
weak_index = ranking_df_gr.groupby(index_prod, as_index=False).sum(
|
|
65
|
-
|
|
64
|
+
weak_index = ranking_df_gr.groupby(index_prod, as_index=False).sum(
|
|
65
|
+
numeric_only=True).sort_values(rank_col, ascending=False)[-1:]
|
|
66
66
|
|
|
67
67
|
# Get production element to be allocated
|
|
68
68
|
max_prod = production_df.loc[production_df[index_prod].isin(weak_index[index_prod])]
|
berryworld/cache_data.py
CHANGED
|
@@ -50,7 +50,7 @@ class CacheManagement:
|
|
|
50
50
|
def dump_pickle(df, filepath):
|
|
51
51
|
""" Save the data to a filepath
|
|
52
52
|
:param df: DataFrame gathering the data
|
|
53
|
-
:param
|
|
53
|
+
:param filepath: Path and filename to the file destination
|
|
54
54
|
"""
|
|
55
55
|
pickle_out = open(filepath, 'wb')
|
|
56
56
|
pickle.dump(df, pickle_out)
|
|
@@ -59,7 +59,7 @@ class CacheManagement:
|
|
|
59
59
|
@staticmethod
|
|
60
60
|
def retrieve_pickle(filepath):
|
|
61
61
|
""" Get the data from the indicated filepath
|
|
62
|
-
:param
|
|
62
|
+
:param filepath: Path and filename to the file destination
|
|
63
63
|
"""
|
|
64
64
|
pickle_in = open(filepath, "rb")
|
|
65
65
|
data_df = pickle.load(pickle_in)
|
|
@@ -69,7 +69,7 @@ class CacheManagement:
|
|
|
69
69
|
@staticmethod
|
|
70
70
|
def remove_pickle(filepath):
|
|
71
71
|
""" Delete a file allocated in filepath
|
|
72
|
-
:param
|
|
72
|
+
:param filepath: Path and filename to the file destination
|
|
73
73
|
"""
|
|
74
74
|
if os.path.exists(filepath):
|
|
75
75
|
os.remove(filepath)
|
berryworld/credentials.py
CHANGED
|
@@ -7,6 +7,8 @@ load_dotenv(os.path.join(os.getcwd(), '.env'))
|
|
|
7
7
|
|
|
8
8
|
class SQLCredentials:
|
|
9
9
|
def __init__(self, db_name, server_type=None, azure=None):
|
|
10
|
+
if db_name is None:
|
|
11
|
+
raise ValueError("Please provide a value for db_name")
|
|
10
12
|
self.db_name = db_name
|
|
11
13
|
self.server_type = server_type
|
|
12
14
|
self.azure = azure
|
|
@@ -24,9 +26,9 @@ class SQLCredentials:
|
|
|
24
26
|
else:
|
|
25
27
|
server_name = os.environ.get("SQL_" + self.db_name.upper() + '_' + self.server_type.upper())
|
|
26
28
|
|
|
27
|
-
|
|
29
|
+
if os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME_' + self.server_type.upper()) is not None:
|
|
28
30
|
db_name = os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME_' + self.server_type.upper())
|
|
29
|
-
|
|
31
|
+
else:
|
|
30
32
|
db_name = os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME')
|
|
31
33
|
|
|
32
34
|
user_name = os.environ.get("SQL_" + self.db_name.upper() + '_USER_NAME')
|
|
@@ -48,9 +50,9 @@ class SQLCredentials:
|
|
|
48
50
|
except Exception as e:
|
|
49
51
|
print(e)
|
|
50
52
|
dev_ = None
|
|
51
|
-
|
|
53
|
+
if os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME_' + self.server_type.upper()) is not None:
|
|
52
54
|
db_name = os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME_' + self.server_type.upper())
|
|
53
|
-
|
|
55
|
+
else:
|
|
54
56
|
db_name = os.environ.get("SQL_" + self.db_name.upper() + '_DB_NAME')
|
|
55
57
|
user_name = os.environ.get("SQL_" + self.db_name.upper() + '_USER_NAME')
|
|
56
58
|
password = os.environ.get("SQL_" + self.db_name.upper() + '_PASSWORD')
|
|
@@ -69,156 +71,78 @@ class SQLCredentials:
|
|
|
69
71
|
raise ValueError("Variable %s not found" % str(e))
|
|
70
72
|
|
|
71
73
|
|
|
72
|
-
class
|
|
73
|
-
def __init__(self,
|
|
74
|
-
self.
|
|
75
|
-
self.server_type = server_type
|
|
74
|
+
class WebServiceCredentials:
|
|
75
|
+
def __init__(self, service=None):
|
|
76
|
+
self.service = service
|
|
76
77
|
|
|
77
78
|
def simple_creds(self):
|
|
78
|
-
if self.server_type is None:
|
|
79
|
-
raise ValueError("Please provide a value for server_type")
|
|
80
|
-
|
|
81
79
|
try:
|
|
82
|
-
if
|
|
83
|
-
|
|
84
|
-
"POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_LOCAL_SERVER_NAME')
|
|
85
|
-
else:
|
|
86
|
-
server_name = os.environ.get(
|
|
87
|
-
"POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_SERVER_NAME')
|
|
88
|
-
|
|
89
|
-
db_name = os.environ.get(
|
|
90
|
-
"POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_DB_NAME')
|
|
91
|
-
user_name = os.environ.get(
|
|
92
|
-
"POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_USER_NAME')
|
|
93
|
-
password = os.environ.get(
|
|
94
|
-
"POSTGRESQL_" + self.db_name.upper() + '_' + self.server_type.upper() + '_PASSWORD')
|
|
95
|
-
|
|
96
|
-
return {'server_name': re.sub(r'(\\)\1*', r'\1', server_name),
|
|
97
|
-
'db_name': db_name,
|
|
98
|
-
'user_name': user_name,
|
|
99
|
-
'password': password}
|
|
100
|
-
except ValueError as e:
|
|
101
|
-
raise ValueError("Variable %s not found" % str(e))
|
|
102
|
-
|
|
80
|
+
if self.service is None:
|
|
81
|
+
raise ValueError("Please provide a value for site")
|
|
103
82
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
83
|
+
try:
|
|
84
|
+
user_name = os.environ.get(f"WEBSERVICE_USER_{self.service.upper()}")
|
|
85
|
+
except Exception as e:
|
|
86
|
+
print(e)
|
|
87
|
+
user_name = ''
|
|
88
|
+
try:
|
|
89
|
+
password = os.environ.get(f"WEBSERVICE_PASSWORD_{self.service.upper()}")
|
|
90
|
+
except Exception as e:
|
|
91
|
+
print(e)
|
|
92
|
+
password = ''
|
|
93
|
+
try:
|
|
94
|
+
access_token = os.environ.get(f"WEBSERVICE_ACCESS_TOKEN_{self.service.upper()}")
|
|
95
|
+
except Exception as e:
|
|
96
|
+
print(e)
|
|
97
|
+
access_token = ''
|
|
108
98
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
scope = os.environ.get("BC_AUTH_SCOPE")
|
|
113
|
-
client_id = os.environ.get("BC_AUTH_CLIENT_ID")
|
|
114
|
-
client_secret = os.environ.get("BC_AUTH_CLIENT_SECRET")
|
|
115
|
-
|
|
116
|
-
return {'scope': scope,
|
|
117
|
-
'client_id': client_id,
|
|
118
|
-
'client_secret': client_secret}
|
|
119
|
-
elif self.db_name is not None:
|
|
120
|
-
server_type = os.environ.get(f"BC_ENV_SERVER_{self.db_name.upper()}")
|
|
121
|
-
|
|
122
|
-
return {'server_type': server_type}
|
|
123
|
-
else:
|
|
124
|
-
raise ValueError("Please provide a valid input")
|
|
99
|
+
return {'user_name': user_name,
|
|
100
|
+
'password': password,
|
|
101
|
+
'access_token': access_token}
|
|
125
102
|
|
|
126
103
|
except ValueError as e:
|
|
127
104
|
raise ValueError("Variable %s not found" % str(e))
|
|
128
105
|
|
|
129
106
|
|
|
130
|
-
class
|
|
131
|
-
def __init__(self,
|
|
132
|
-
self.
|
|
133
|
-
self.webhook = webhook
|
|
134
|
-
self.auth = auth
|
|
107
|
+
class MicrosoftTeamsCredentials:
|
|
108
|
+
def __init__(self, organisation_id=None):
|
|
109
|
+
self.organisation_id = organisation_id
|
|
135
110
|
|
|
136
111
|
def simple_creds(self):
|
|
137
112
|
try:
|
|
138
|
-
if self.
|
|
139
|
-
|
|
140
|
-
client_id = os.environ.get("CDS_AUTH_CLIENT_ID")
|
|
141
|
-
client_secret = os.environ.get("CDS_AUTH_CLIENT_SECRET")
|
|
142
|
-
|
|
143
|
-
return {'scope': scope,
|
|
144
|
-
'client_id': client_id,
|
|
145
|
-
'client_secret': client_secret}
|
|
146
|
-
else:
|
|
147
|
-
server = os.environ.get(f"CDS_ENV_SERVER_{self.env_name.upper()}")
|
|
148
|
-
organisation_id = os.environ.get(f"CDS_ENV_ORG_{self.env_name.upper()}")
|
|
149
|
-
environment_prefix = os.environ.get(f"CDS_ENV_PREFIX_{self.env_name.upper()}")
|
|
150
|
-
environment_url = os.environ.get(f"CDS_ENV_URL_{self.env_name.upper()}")
|
|
151
|
-
if self.webhook:
|
|
152
|
-
environment_name = os.environ.get(f"CDS_ENV_NAME_{self.env_name.upper()}")
|
|
153
|
-
else:
|
|
154
|
-
environment_name = self.env_name
|
|
155
|
-
|
|
156
|
-
return {'server': server,
|
|
157
|
-
'environment_name': environment_name,
|
|
158
|
-
'organisation_id': organisation_id,
|
|
159
|
-
'environment_prefix': environment_prefix,
|
|
160
|
-
'environment_url': environment_url}
|
|
161
|
-
|
|
162
|
-
except ValueError as e:
|
|
163
|
-
raise ValueError("Variable %s not found" % str(e))
|
|
164
|
-
|
|
113
|
+
if self.organisation_id is None:
|
|
114
|
+
self.organisation_id = os.environ.get("POUPART_ORGANISATION_ID")
|
|
165
115
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
def simple_creds(self):
|
|
171
|
-
try:
|
|
172
|
-
if self.site is None:
|
|
173
|
-
raise ValueError("Please provide a value for site")
|
|
116
|
+
client_id = os.environ.get("MICROSOFT_TEAMS_APP_CLIENT_ID")
|
|
117
|
+
client_secret = os.environ.get("MICROSOFT_TEAMS_APP_CLIENT_SECRET")
|
|
118
|
+
username = os.environ.get("MICROSOFT_TEAMS_USERNAME")
|
|
119
|
+
password = os.environ.get("MICROSOFT_TEAMS_PASSWORD")
|
|
174
120
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
username = os.environ.get(f"SHAREPOINT_USER_{self.site.upper()}")
|
|
179
|
-
password = os.environ.get(f"SHAREPOINT_PASSWORD_{self.site.upper()}")
|
|
180
|
-
site_id = os.environ.get(f"SHAREPOINT_SITE_ID_{self.site.upper()}")
|
|
181
|
-
site_name = os.environ.get(f"SHAREPOINT_SITE_NAME_{self.site.upper()}")
|
|
182
|
-
api_version = os.environ.get(f"SHAREPOINT_API_VERSION_{self.site.upper()}")
|
|
183
|
-
|
|
184
|
-
return {'client_id': client_id,
|
|
185
|
-
'scopes': scopes,
|
|
186
|
-
'organisation_id': organisation_id,
|
|
121
|
+
return {'organisation_id': self.organisation_id,
|
|
122
|
+
'client_id': client_id,
|
|
123
|
+
'client_secret': client_secret,
|
|
187
124
|
'username': username,
|
|
188
|
-
'password': password
|
|
189
|
-
'site_id': site_id,
|
|
190
|
-
'site_name': site_name,
|
|
191
|
-
'api_version': api_version}
|
|
125
|
+
'password': password}
|
|
192
126
|
|
|
193
127
|
except ValueError as e:
|
|
194
128
|
raise ValueError("Variable %s not found" % str(e))
|
|
195
129
|
|
|
196
130
|
|
|
197
|
-
class
|
|
198
|
-
def __init__(self,
|
|
199
|
-
|
|
131
|
+
class SnowflakeCredentials:
|
|
132
|
+
def __init__(self, db_name):
|
|
133
|
+
if db_name is None:
|
|
134
|
+
raise ValueError("Please provide a value for db_name")
|
|
135
|
+
self.db_name = db_name
|
|
200
136
|
|
|
201
137
|
def simple_creds(self):
|
|
202
138
|
try:
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
user_name
|
|
210
|
-
|
|
211
|
-
password = os.environ.get(f"WEBSERVICE_PASSWORD_{self.service.upper()}")
|
|
212
|
-
except Exception:
|
|
213
|
-
password = ''
|
|
214
|
-
try:
|
|
215
|
-
access_token = os.environ.get(f"WEBSERVICE_ACCESS_TOKEN_{self.service.upper()}")
|
|
216
|
-
except Exception:
|
|
217
|
-
access_token = ''
|
|
218
|
-
|
|
219
|
-
return {'user_name': user_name,
|
|
220
|
-
'password': password,
|
|
221
|
-
'access_token': access_token}
|
|
222
|
-
|
|
139
|
+
account = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-ACCOUNT')
|
|
140
|
+
user_name = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-USERNAME')
|
|
141
|
+
password = os.environ.get("SNOWFLAKE-" + self.db_name.upper() + '-PASSWORD')
|
|
142
|
+
|
|
143
|
+
return {
|
|
144
|
+
'account': account,
|
|
145
|
+
'user_name': user_name,
|
|
146
|
+
'password': password}
|
|
223
147
|
except ValueError as e:
|
|
224
148
|
raise ValueError("Variable %s not found" % str(e))
|
berryworld/devops.py
ADDED
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import requests as req
|
|
2
|
+
import json
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from requests.adapters import HTTPAdapter
|
|
5
|
+
from urllib3.util.retry import Retry
|
|
6
|
+
import base64
|
|
7
|
+
import datetime
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DevOps:
|
|
11
|
+
def __init__(self, token=None, api_version=None):
|
|
12
|
+
""" Connect to Azure DevOps and backup the flows data
|
|
13
|
+
:param token: Azure DevOps PAT token
|
|
14
|
+
:param api_version: Azure DevOps API version
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
if token is None:
|
|
18
|
+
raise Exception('Token is required to connect to DevOps')
|
|
19
|
+
|
|
20
|
+
self.headers = {"Content-Type": "application/json",
|
|
21
|
+
"Authorization": f"Basic {base64.b64encode(str(f':{token}').encode('utf-8')).decode('utf-8')}"}
|
|
22
|
+
|
|
23
|
+
self.session = req.Session()
|
|
24
|
+
retry = Retry(total=3, status_forcelist=[429, 500, 502, 504], backoff_factor=30)
|
|
25
|
+
retry.BACKOFF_MAX = 190
|
|
26
|
+
|
|
27
|
+
adapter = HTTPAdapter(max_retries=retry)
|
|
28
|
+
self.session.mount('http://', adapter)
|
|
29
|
+
self.session.mount('https://', adapter)
|
|
30
|
+
|
|
31
|
+
self.base_url = 'https://dev.azure.com/BerryworldGroup'
|
|
32
|
+
|
|
33
|
+
if api_version is None:
|
|
34
|
+
self.api_version = 'api-version=7.0'
|
|
35
|
+
else:
|
|
36
|
+
self.api_version = f'api-version={api_version}'
|
|
37
|
+
|
|
38
|
+
def session_request(self, method, url, headers=None, data=None, content=False):
|
|
39
|
+
""" Make a request to Azure DevOps
|
|
40
|
+
:param method: Request method
|
|
41
|
+
:param url: Request URL
|
|
42
|
+
:param headers: Request headers
|
|
43
|
+
:param data: Request data
|
|
44
|
+
:param content: Content request
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
if headers is None:
|
|
48
|
+
headers = self.headers
|
|
49
|
+
|
|
50
|
+
if data is None:
|
|
51
|
+
response = self.session.request(method, url, headers=headers)
|
|
52
|
+
else:
|
|
53
|
+
response = self.session.request(method, url, headers=headers, data=data)
|
|
54
|
+
|
|
55
|
+
if response.status_code == 204:
|
|
56
|
+
return pd.DataFrame()
|
|
57
|
+
elif str(response.status_code).startswith('2'):
|
|
58
|
+
if content:
|
|
59
|
+
return response
|
|
60
|
+
|
|
61
|
+
response = (json.loads(response.text))
|
|
62
|
+
if 'value' in response:
|
|
63
|
+
response_df = pd.DataFrame(response['value'])
|
|
64
|
+
else:
|
|
65
|
+
response_df = pd.DataFrame([response])
|
|
66
|
+
|
|
67
|
+
else:
|
|
68
|
+
raise Exception(f'Status: {response.status_code} - {response.text}')
|
|
69
|
+
|
|
70
|
+
return response_df
|
|
71
|
+
|
|
72
|
+
def list_projects(self):
|
|
73
|
+
""" List all projects in the DevOps organisation
|
|
74
|
+
"""
|
|
75
|
+
projects_url = f'{self.base_url}/_apis/projects'
|
|
76
|
+
projects_df = self.session_request('GET', projects_url)
|
|
77
|
+
|
|
78
|
+
return projects_df
|
|
79
|
+
|
|
80
|
+
def list_repositories(self, project_name):
|
|
81
|
+
""" List all repositories in a project
|
|
82
|
+
:param project_name: DevOps project name
|
|
83
|
+
"""
|
|
84
|
+
repo_url = f"{self.base_url}/{project_name}/_apis/git/repositories"
|
|
85
|
+
repo_df = self.session_request('GET', repo_url)
|
|
86
|
+
|
|
87
|
+
return repo_df
|
|
88
|
+
|
|
89
|
+
def list_repository_items(self, project_name, repo_id, path=None, branch=None):
|
|
90
|
+
""" List all items in a repository
|
|
91
|
+
:param project_name: DevOps project name
|
|
92
|
+
:param repo_id: DevOps repository ID
|
|
93
|
+
:param path: Path to the repository
|
|
94
|
+
:param branch: Branch to list
|
|
95
|
+
"""
|
|
96
|
+
repo_items_url = f"{self.base_url}/{project_name}/_apis/git/repositories/{repo_id}/itemsbatch?" \
|
|
97
|
+
f"{self.api_version}"
|
|
98
|
+
|
|
99
|
+
if path is None:
|
|
100
|
+
path = "/"
|
|
101
|
+
|
|
102
|
+
if branch is None:
|
|
103
|
+
branch = "master"
|
|
104
|
+
|
|
105
|
+
body = json.dumps({
|
|
106
|
+
"itemDescriptors": [
|
|
107
|
+
{
|
|
108
|
+
"path": path,
|
|
109
|
+
"version": branch,
|
|
110
|
+
"versionType": "branch",
|
|
111
|
+
"versionOptions": "none",
|
|
112
|
+
"recursionLevel": "full"
|
|
113
|
+
}
|
|
114
|
+
],
|
|
115
|
+
"includeContentMetadata": "true"
|
|
116
|
+
})
|
|
117
|
+
|
|
118
|
+
repo_items_df = self.session_request('POST', repo_items_url, data=body)
|
|
119
|
+
|
|
120
|
+
return repo_items_df
|
|
121
|
+
|
|
122
|
+
def list_pipeline_releases(self, project_name):
|
|
123
|
+
""" List all pipeline releases in a project
|
|
124
|
+
:param project_name: DevOps project name
|
|
125
|
+
"""
|
|
126
|
+
release_definitions_url = f"https://vsrm.dev.azure.com/BerryworldGroup/{project_name}/_apis/release/definitions"
|
|
127
|
+
release_definitions_df = self.session_request('GET', release_definitions_url)
|
|
128
|
+
|
|
129
|
+
return release_definitions_df
|
|
130
|
+
|
|
131
|
+
def list_release_revision(self, project_name, release_id, revision_name=None):
|
|
132
|
+
""" List all pipeline release revisions
|
|
133
|
+
:param project_name: DevOps project name
|
|
134
|
+
:param release_id: DevOps release ID
|
|
135
|
+
:param revision_name: DevOps release revision name
|
|
136
|
+
"""
|
|
137
|
+
release_revision_url = f"https://vsrm.dev.azure.com/BerryworldGroup/{project_name}/_apis/release/" \
|
|
138
|
+
f"definitions/{release_id}/revisions"
|
|
139
|
+
|
|
140
|
+
if revision_name is not None:
|
|
141
|
+
release_revision_url = f"{release_revision_url}/{revision_name}"
|
|
142
|
+
|
|
143
|
+
release_revision_df = self.session_request('GET', release_revision_url)
|
|
144
|
+
|
|
145
|
+
return release_revision_df
|
|
146
|
+
|
|
147
|
+
def list_pipeline_builds(self, project_name):
|
|
148
|
+
""" List all pipeline builds in a project
|
|
149
|
+
:param project_name: DevOps project name
|
|
150
|
+
"""
|
|
151
|
+
build_definitions_url = f"{self.base_url}/{project_name}/_apis/build/definitions"
|
|
152
|
+
build_definitions_df = self.session_request('GET', build_definitions_url)
|
|
153
|
+
|
|
154
|
+
return build_definitions_df
|
|
155
|
+
|
|
156
|
+
def list_build_revision(self, project_name, build_id):
|
|
157
|
+
""" List all pipeline build revisions
|
|
158
|
+
:param project_name: DevOps project name
|
|
159
|
+
:param build_id: DevOps build ID
|
|
160
|
+
"""
|
|
161
|
+
build_revision_url = f"{self.base_url}/{project_name}/_apis/build/definitions/{build_id}/revisions"
|
|
162
|
+
build_revision_df = self.session_request('GET', build_revision_url)
|
|
163
|
+
|
|
164
|
+
return build_revision_df
|
|
165
|
+
|
|
166
|
+
def list_artifact_feeds(self):
|
|
167
|
+
""" List all artifact feeds
|
|
168
|
+
"""
|
|
169
|
+
artifact_feeds_url = f"https://feeds.dev.azure.com/BerryworldGroup/_apis/packaging/feeds"
|
|
170
|
+
artifact_feeds_df = self.session_request('GET', artifact_feeds_url)
|
|
171
|
+
|
|
172
|
+
return artifact_feeds_df
|
|
173
|
+
|
|
174
|
+
def list_feed_packages(self, feed_id):
|
|
175
|
+
""" List all packages in a feed
|
|
176
|
+
:param feed_id: DevOps feed ID
|
|
177
|
+
"""
|
|
178
|
+
feed_packages_url = f"https://feeds.dev.azure.com/BerryworldGroup/_apis/packaging/feeds/{feed_id}/packages"
|
|
179
|
+
feed_packages_df = self.session_request('GET', feed_packages_url)
|
|
180
|
+
|
|
181
|
+
return feed_packages_df
|
|
182
|
+
|
|
183
|
+
def list_package_versions(self, feed_id, package_name):
|
|
184
|
+
""" List all versions of a package
|
|
185
|
+
:param feed_id: DevOps feed ID
|
|
186
|
+
:param package_name: DevOps package name
|
|
187
|
+
"""
|
|
188
|
+
package_versions_url = f"https://feeds.dev.azure.com/BerryworldGroup/_apis/packaging/feeds/{feed_id}/" \
|
|
189
|
+
f"packages/{package_name}/versions"
|
|
190
|
+
package_versions_df = self.session_request('GET', package_versions_url)
|
|
191
|
+
|
|
192
|
+
return package_versions_df
|
|
193
|
+
|
|
194
|
+
def get_package_version_content(self, feed_id, package_id, version_id):
|
|
195
|
+
""" Get the content of a package version
|
|
196
|
+
:param feed_id: DevOps feed ID
|
|
197
|
+
:param package_id: DevOps package ID
|
|
198
|
+
:param version_id: DevOps package version ID
|
|
199
|
+
"""
|
|
200
|
+
package_content_url = f"https://feeds.dev.azure.com/BerryworldGroup/_apis/packaging/feeds/{feed_id}/" \
|
|
201
|
+
f"packages/{package_id}/versions/{version_id}"
|
|
202
|
+
package_content_df = self.session_request('GET', package_content_url)
|
|
203
|
+
|
|
204
|
+
return package_content_df
|
|
205
|
+
|
|
206
|
+
def create_repo_files(self, project_name, repo_id, environment_name, payload, branch=None):
|
|
207
|
+
""" Create files in a repository
|
|
208
|
+
:param project_name: DevOps project name
|
|
209
|
+
:param repo_id: DevOps repository ID
|
|
210
|
+
:param environment_name: Environment name
|
|
211
|
+
:param payload: Payload to create the files
|
|
212
|
+
:param branch: Branch to create the files
|
|
213
|
+
"""
|
|
214
|
+
commits_url = f"{self.base_url}/{project_name}/_apis/git/repositories/{repo_id}/" \
|
|
215
|
+
f"commits?{self.api_version}"
|
|
216
|
+
commits_df = self.session_request("GET", commits_url)
|
|
217
|
+
if commits_df.shape[0] > 0:
|
|
218
|
+
last_commit_id = commits_df['commitId'][0]
|
|
219
|
+
else:
|
|
220
|
+
last_commit_id = '0000000000000000000000000000000000000000'
|
|
221
|
+
|
|
222
|
+
if branch is None:
|
|
223
|
+
branch = "refs/heads/master"
|
|
224
|
+
|
|
225
|
+
run_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
226
|
+
repo_payload = json.dumps({
|
|
227
|
+
"refUpdates": [
|
|
228
|
+
{
|
|
229
|
+
"name": branch,
|
|
230
|
+
"oldObjectId": last_commit_id
|
|
231
|
+
}
|
|
232
|
+
],
|
|
233
|
+
"commits": [{
|
|
234
|
+
"comment": f"Adding {environment_name} PowerAutomate properties via API - {run_time}. "
|
|
235
|
+
f"skip-secret-scanning:true",
|
|
236
|
+
"changes": payload
|
|
237
|
+
}]
|
|
238
|
+
})
|
|
239
|
+
|
|
240
|
+
repo_pushes_url = f"{self.base_url}/{project_name}/_apis/git/repositories/{repo_id}/pushes?{self.api_version}"
|
|
241
|
+
|
|
242
|
+
create_repo_files = self.session_request("POST", repo_pushes_url, data=repo_payload)
|
|
243
|
+
|
|
244
|
+
return create_repo_files
|
|
245
|
+
|
|
246
|
+
def get_file_content(self, project_name, repo_id, file_path):
|
|
247
|
+
""" Get the content of a file in a repository
|
|
248
|
+
:param project_name: DevOps project name
|
|
249
|
+
:param repo_id: DevOps repository ID
|
|
250
|
+
:param file_path: Path to the file
|
|
251
|
+
"""
|
|
252
|
+
file_content_url = f"{self.base_url}/{project_name}/_apis/git/repositories/{repo_id}/items?" \
|
|
253
|
+
f"scopePath={file_path}&includeContent=True"
|
|
254
|
+
|
|
255
|
+
file_content = self.session_request("GET", file_content_url, content=True)
|
|
256
|
+
|
|
257
|
+
return file_content
|
berryworld/email_con.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import ast
|
|
1
2
|
import base64
|
|
2
3
|
import json
|
|
3
4
|
import copy
|
|
@@ -12,12 +13,49 @@ from msal import ClientApplication
|
|
|
12
13
|
class EmailConnection:
|
|
13
14
|
""" Tooling to connect to Outlook and manage emails. Use the Graphql Outlook API """
|
|
14
15
|
|
|
15
|
-
def __init__(self,
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
16
|
+
def __init__(self, email_creds, days_back=10):
|
|
17
|
+
""" Initialize the class
|
|
18
|
+
:param email_creds: Dictionary containing the credentials to connect to the email account
|
|
19
|
+
:param days_back: Number of days back to query emails
|
|
20
|
+
|
|
21
|
+
email_creds = {
|
|
22
|
+
'authority': '',
|
|
23
|
+
'box': '',
|
|
24
|
+
'client_id': '',
|
|
25
|
+
'client_secret': '',
|
|
26
|
+
'pwd': ''
|
|
27
|
+
}
|
|
28
|
+
"""
|
|
29
|
+
try:
|
|
30
|
+
self.email_creds = ast.literal_eval(email_creds)
|
|
31
|
+
except Exception as e:
|
|
32
|
+
raise ValueError(f'Email credentials not properly formatted. ERROR: {e}')
|
|
33
|
+
|
|
34
|
+
if 'authority' not in self.email_creds.keys():
|
|
35
|
+
raise ValueError(f"Authority not provided in email credentials")
|
|
36
|
+
else:
|
|
37
|
+
self.authority = self.email_creds['authority']
|
|
38
|
+
|
|
39
|
+
if 'client_id' not in self.email_creds.keys():
|
|
40
|
+
raise ValueError(f"ClientId not provided in email credentials")
|
|
41
|
+
else:
|
|
42
|
+
self.client_id = self.email_creds['client_id']
|
|
43
|
+
|
|
44
|
+
if 'client_secret' not in self.email_creds.keys():
|
|
45
|
+
raise ValueError(f"Client Secret not provided in email credentials")
|
|
46
|
+
else:
|
|
47
|
+
self.client_secret = self.email_creds['client_secret']
|
|
48
|
+
|
|
49
|
+
if 'box' not in self.email_creds.keys():
|
|
50
|
+
raise ValueError(f"Email Box not provided in email credentials")
|
|
51
|
+
else:
|
|
52
|
+
self.email_user = self.email_creds['box']
|
|
53
|
+
|
|
54
|
+
if 'pwd' not in self.email_creds.keys():
|
|
55
|
+
raise ValueError(f"Password not provided in email credentials")
|
|
56
|
+
else:
|
|
57
|
+
self.email_password = self.email_creds['pwd']
|
|
58
|
+
|
|
21
59
|
self.base_url = "https://graph.microsoft.com/v1.0"
|
|
22
60
|
self.headers = self.get_headers()
|
|
23
61
|
self.days_back = days_back
|
|
@@ -267,20 +305,22 @@ class EmailConnection:
|
|
|
267
305
|
response = requests.get(url=base_url, headers=header)
|
|
268
306
|
json_resp = response.json()
|
|
269
307
|
|
|
308
|
+
if 'value' not in json_resp.keys():
|
|
309
|
+
return False
|
|
270
310
|
emails_df = pd.DataFrame(json_resp['value'])
|
|
271
311
|
if emails_df.shape[0] > 0:
|
|
272
312
|
emails_df = emails_df.loc[emails_df['isRead'] == False]
|
|
273
313
|
return emails_df.shape[0] != 0
|
|
274
314
|
|
|
275
|
-
except Exception:
|
|
276
|
-
print(
|
|
277
|
-
raise Exception("An unknown error have happened")
|
|
315
|
+
except Exception as e:
|
|
316
|
+
print(e)
|
|
278
317
|
|
|
279
318
|
@staticmethod
|
|
280
319
|
def get_mimetype(file_path):
|
|
320
|
+
""" Get the mimetype of the file to be attached to the email
|
|
321
|
+
:param file_path: Path of the file to be attached
|
|
322
|
+
"""
|
|
281
323
|
# Get mimetype
|
|
282
|
-
mimetype = None
|
|
283
|
-
extension = None
|
|
284
324
|
if ('.jpg' in file_path) | ('.jpeg' in file_path):
|
|
285
325
|
mimetype = "image/jpeg"
|
|
286
326
|
extension = '.jpeg'
|
|
@@ -308,5 +348,10 @@ class EmailConnection:
|
|
|
308
348
|
elif '.xlsx' in file_path:
|
|
309
349
|
mimetype = "application/vnd.ms-excel"
|
|
310
350
|
extension = '.xlsx'
|
|
351
|
+
elif '.html' in file_path:
|
|
352
|
+
mimetype = "application/vnd.ms-excel"
|
|
353
|
+
extension = 'text/html'
|
|
354
|
+
else:
|
|
355
|
+
raise Exception('File mime type not found')
|
|
311
356
|
|
|
312
357
|
return mimetype, extension
|