berryworld 1.0.0.161217__py3-none-any.whl → 1.0.0.161412__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- berryworld/power_automate.py +118 -58
- {berryworld-1.0.0.161217.dist-info → berryworld-1.0.0.161412.dist-info}/METADATA +1 -1
- {berryworld-1.0.0.161217.dist-info → berryworld-1.0.0.161412.dist-info}/RECORD +6 -6
- {berryworld-1.0.0.161217.dist-info → berryworld-1.0.0.161412.dist-info}/LICENSE +0 -0
- {berryworld-1.0.0.161217.dist-info → berryworld-1.0.0.161412.dist-info}/WHEEL +0 -0
- {berryworld-1.0.0.161217.dist-info → berryworld-1.0.0.161412.dist-info}/top_level.txt +0 -0
berryworld/power_automate.py
CHANGED
|
@@ -65,6 +65,9 @@ class PowerAutomate:
|
|
|
65
65
|
# Base URL to get the flows
|
|
66
66
|
self.base_flow_url = f'https://api.flow.microsoft.com/providers/Microsoft.ProcessSimple'
|
|
67
67
|
|
|
68
|
+
# Base URL to edit the flows
|
|
69
|
+
self.base_edit_url = 'https://make.powerautomate.com/'
|
|
70
|
+
|
|
68
71
|
self.headers = self.generate_bearer_token(self.grant_type)
|
|
69
72
|
|
|
70
73
|
def generate_bearer_token(self, grant_type, scope=None):
|
|
@@ -193,7 +196,7 @@ class PowerAutomate:
|
|
|
193
196
|
# Get Flow Owners
|
|
194
197
|
flows_df['Owners'] = None
|
|
195
198
|
for i, flow in flows_df.iterrows():
|
|
196
|
-
flow_user_ids, flow_group_ids = self.list_flow_owners(flow['
|
|
199
|
+
flow_user_ids, flow_group_ids = self.list_flow_owners(flow['FlowId'])
|
|
197
200
|
flows_df.at[i, 'Owners'] = (flow_user_ids + flow_group_ids)
|
|
198
201
|
|
|
199
202
|
user_ids = set(list(user_ids) + flow_user_ids)
|
|
@@ -215,33 +218,64 @@ class PowerAutomate:
|
|
|
215
218
|
batch_group_ids = group_ids[batch * 15:(batch + 1) * 15]
|
|
216
219
|
accounts_df = pd.concat([accounts_df, self.list_graph_accounts('groups', batch_group_ids)])
|
|
217
220
|
|
|
221
|
+
# Get Creator
|
|
222
|
+
flows_df['CreatedById'] = flows_df['FlowProperties'].apply(lambda x: x['creator']['userId'])
|
|
223
|
+
flows_df = flows_df.merge(
|
|
224
|
+
accounts_df[['displayName', 'id']].rename(columns={'id': 'CreatedById'}).drop_duplicates(),
|
|
225
|
+
how='left', on='CreatedById')
|
|
226
|
+
flows_df.rename(columns={'displayName': 'CreatedBy'}, inplace=True)
|
|
227
|
+
|
|
218
228
|
if accounts_df.shape[0] > 0:
|
|
219
229
|
flows_df = flows_df.explode('Owners')
|
|
220
230
|
flows_df = flows_df.merge(accounts_df[['displayName', 'id']], how='left',
|
|
221
|
-
left_on='Owners',
|
|
222
|
-
right_on='id', suffixes=('', '_account'))
|
|
231
|
+
left_on='Owners', right_on='id', suffixes=('', '_account'))
|
|
223
232
|
|
|
224
233
|
# Remove null owners
|
|
225
234
|
null_account_mask = flows_df['displayName'].isnull()
|
|
226
|
-
flow_name_mask = flows_df.groupby('
|
|
235
|
+
flow_name_mask = flows_df.groupby('FlowId')['FlowId'].transform('count') > 1
|
|
227
236
|
flows_df = flows_df[~(null_account_mask & flow_name_mask)]
|
|
228
237
|
|
|
229
238
|
flows_df = flows_df[flows_df['displayName'].notna()]
|
|
230
|
-
flows_df = flows_df.groupby(['
|
|
231
|
-
'displayName': list,
|
|
232
|
-
'properties': list
|
|
233
|
-
}).reset_index()
|
|
239
|
+
flows_df = flows_df.groupby(['FlowId', 'id', 'type', 'CreatedBy']).agg({
|
|
240
|
+
'displayName': list, 'FlowProperties': list}).reset_index()
|
|
234
241
|
flows_df.rename(columns={'displayName': 'Owners'}, inplace=True)
|
|
235
242
|
|
|
236
243
|
return flows_df
|
|
237
244
|
|
|
245
|
+
@staticmethod
|
|
246
|
+
def enrich_info(flows_df):
|
|
247
|
+
""" Enrich the flows with the information
|
|
248
|
+
:param flows_df: DataFrame containing the flows information
|
|
249
|
+
"""
|
|
250
|
+
if 'FlowProperties' in flows_df.columns:
|
|
251
|
+
flows_info_df = pd.DataFrame()
|
|
252
|
+
flows_list = []
|
|
253
|
+
for _, prop in flows_df[['FlowId', 'FlowProperties']].iterrows():
|
|
254
|
+
flow_info_response = prop['FlowProperties']
|
|
255
|
+
if isinstance(flow_info_response, list) & (len(flow_info_response) > 0):
|
|
256
|
+
flow_info_response = flow_info_response[0]
|
|
257
|
+
flow_info = {'FlowId': prop['FlowId']}
|
|
258
|
+
for key in flow_info_response.keys():
|
|
259
|
+
if isinstance(flow_info_response[key], (dict, list, bool)):
|
|
260
|
+
flow_info[key] = [flow_info_response[key]]
|
|
261
|
+
else:
|
|
262
|
+
if len(flow_info_response[key]) > 0:
|
|
263
|
+
flow_info[key] = flow_info_response[key]
|
|
264
|
+
|
|
265
|
+
flows_list.append(flow_info)
|
|
266
|
+
flows_info_df = pd.DataFrame(flows_list)
|
|
267
|
+
|
|
268
|
+
flows_df = pd.merge(flows_df, flows_info_df, on='FlowId', how='left')
|
|
269
|
+
|
|
270
|
+
return flows_df
|
|
271
|
+
|
|
238
272
|
def enrich_run_history(self, flows_df):
|
|
239
273
|
""" Enrich the flows with the run history
|
|
240
274
|
:param flows_df: DataFrame containing the flows information
|
|
241
275
|
"""
|
|
242
276
|
flows_runs_df = pd.DataFrame()
|
|
243
277
|
for _, flow in flows_df.iterrows():
|
|
244
|
-
flow_url = f'{self.base_flow_url}/scopes/admin/environments/{self.environment_id}/flows/{flow["
|
|
278
|
+
flow_url = f'{self.base_flow_url}/scopes/admin/environments/{self.environment_id}/flows/{flow["FlowId"]}/' \
|
|
245
279
|
f'runs?{self.api_version}'
|
|
246
280
|
|
|
247
281
|
run_info_df = self.session_request("GET", flow_url)
|
|
@@ -255,11 +289,9 @@ class PowerAutomate:
|
|
|
255
289
|
run_info_df['ErrorMessage'] = run_info_df['RunProperties'].apply(
|
|
256
290
|
lambda x: x['error']['message'] if 'error' in x else None)
|
|
257
291
|
|
|
258
|
-
run_info_df['
|
|
259
|
-
run_info_df = run_info_df.groupby(['
|
|
260
|
-
'RunProperties': list,
|
|
261
|
-
'ErrorMessage': list
|
|
262
|
-
}).reset_index()
|
|
292
|
+
run_info_df['FlowId'] = flow['FlowId']
|
|
293
|
+
run_info_df = run_info_df.groupby(['FlowId', 'ErrorCount', 'RunCount', 'LastRun', 'LastRunStatus']).agg(
|
|
294
|
+
{'RunProperties': list, 'ErrorMessage': list}).reset_index()
|
|
263
295
|
|
|
264
296
|
run_info_df['ErrorMessage'] = run_info_df['ErrorMessage'].apply(
|
|
265
297
|
lambda x: [e for e in x if not pd.isna(e)])
|
|
@@ -267,7 +299,7 @@ class PowerAutomate:
|
|
|
267
299
|
flows_runs_df = pd.concat([flows_runs_df, run_info_df])
|
|
268
300
|
|
|
269
301
|
if flows_runs_df.shape[0] > 0:
|
|
270
|
-
flows_df = pd.merge(flows_df, flows_runs_df, how='left', on='
|
|
302
|
+
flows_df = pd.merge(flows_df, flows_runs_df, how='left', on='FlowId')
|
|
271
303
|
flows_df[['ErrorCount', 'RunCount']] = flows_df[['ErrorCount', 'RunCount']].fillna(0).astype(int)
|
|
272
304
|
|
|
273
305
|
return flows_df
|
|
@@ -278,19 +310,18 @@ class PowerAutomate:
|
|
|
278
310
|
"""
|
|
279
311
|
flows_connections_df = pd.DataFrame()
|
|
280
312
|
for i, flow in flows_df.iterrows():
|
|
281
|
-
flow_url = f'{self.base_flow_url}/scopes/admin/environments/{self.environment_id}/flows/{flow["
|
|
313
|
+
flow_url = f'{self.base_flow_url}/scopes/admin/environments/{self.environment_id}/flows/{flow["FlowId"]}/' \
|
|
282
314
|
f'connections?{self.api_version}'
|
|
283
315
|
|
|
284
316
|
connections_df = self.session_request("GET", flow_url)
|
|
285
317
|
if connections_df.shape[0] > 0:
|
|
286
318
|
connections_df.rename(columns={'name': 'ConnectionName'}, inplace=True)
|
|
287
|
-
connections_df['
|
|
319
|
+
connections_df['FlowId'] = flow['FlowId']
|
|
288
320
|
|
|
289
321
|
if 'properties' in connections_df.columns:
|
|
290
|
-
connections_df_properties = pd.concat(
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
ignore_index=True)
|
|
322
|
+
connections_df_properties = pd.concat(connections_df.apply(
|
|
323
|
+
lambda row: pd.json_normalize(row['properties']).assign(name=row['FlowId']), axis=1).tolist(),
|
|
324
|
+
ignore_index=True)
|
|
294
325
|
|
|
295
326
|
expanded_statuses_df = pd.concat(connections_df_properties.apply(
|
|
296
327
|
lambda row: pd.json_normalize(row['statuses']).assign(original_index=row.name),
|
|
@@ -337,7 +368,8 @@ class PowerAutomate:
|
|
|
337
368
|
flows_connections_df = pd.concat([flows_connections_df, connections_df_properties_group])
|
|
338
369
|
|
|
339
370
|
if flows_connections_df.shape[0] > 0:
|
|
340
|
-
|
|
371
|
+
flows_connections_df.rename(columns={'name': 'FlowId'}, inplace=True)
|
|
372
|
+
flows_df = pd.merge(flows_df, flows_connections_df, how='left', on='FlowId')
|
|
341
373
|
flows_df['ConnectionsCount'] = flows_df['ConnectionsCount'].fillna(0).astype(int)
|
|
342
374
|
|
|
343
375
|
if 'ConnectionErrorCode' in flows_df.columns:
|
|
@@ -348,67 +380,61 @@ class PowerAutomate:
|
|
|
348
380
|
|
|
349
381
|
return flows_df
|
|
350
382
|
|
|
351
|
-
def
|
|
383
|
+
def enrich_flow_url(self, flows_df):
|
|
384
|
+
""" Enrich the flows with the flow URL
|
|
385
|
+
:param flows_df: DataFrame containing the flows information
|
|
386
|
+
"""
|
|
387
|
+
replacement_str = '/providers/Microsoft.ProcessSimple/'
|
|
388
|
+
flows_df.loc[:, 'FlowUrl'] = flows_df['id'].apply(lambda x: self.base_edit_url + x.replace(replacement_str, ''))
|
|
389
|
+
|
|
390
|
+
return flows_df
|
|
391
|
+
|
|
392
|
+
def list_flows(self, relevant_columns=True, **kwargs):
|
|
352
393
|
""" List the flows of an environment
|
|
394
|
+
:param relevant_columns: Whether to return all the columns or only the relevant ones
|
|
353
395
|
"""
|
|
354
396
|
enrich = False
|
|
355
397
|
enrich_info = False
|
|
356
398
|
enrich_owners = False
|
|
357
399
|
enrich_run_history = False
|
|
358
400
|
enrich_connections = False
|
|
401
|
+
enrich_flow_url = False
|
|
359
402
|
if kwargs != {}:
|
|
360
403
|
try:
|
|
361
404
|
if 'enrich' in kwargs.keys():
|
|
362
405
|
enrich = True
|
|
363
406
|
|
|
364
|
-
if
|
|
365
|
-
enrich_info = True
|
|
366
|
-
|
|
367
|
-
if ('enrich_owners' in kwargs.keys()) | (enrich is True):
|
|
407
|
+
if enrich:
|
|
368
408
|
enrich_owners = True
|
|
369
|
-
|
|
370
|
-
if ('enrich_run_history' in kwargs.keys()) | (enrich is True):
|
|
409
|
+
enrich_info = True
|
|
371
410
|
enrich_run_history = True
|
|
372
|
-
|
|
373
|
-
if ('enrich_connections' in kwargs.keys()) | (enrich is True):
|
|
374
411
|
enrich_connections = True
|
|
412
|
+
enrich_flow_url = True
|
|
413
|
+
else:
|
|
414
|
+
if ('enrich_info' in kwargs.keys()) | (enrich is True):
|
|
415
|
+
enrich_info = True
|
|
416
|
+
if ('enrich_owners' in kwargs.keys()) | (enrich is True):
|
|
417
|
+
enrich_owners = True
|
|
418
|
+
if ('enrich_run_history' in kwargs.keys()) | (enrich is True):
|
|
419
|
+
enrich_run_history = True
|
|
420
|
+
if ('enrich_connections' in kwargs.keys()) | (enrich is True):
|
|
421
|
+
enrich_connections = True
|
|
422
|
+
if ('enrich_flow_url' in kwargs.keys()) | (enrich is True):
|
|
423
|
+
enrich_flow_url = True
|
|
375
424
|
|
|
376
425
|
except KeyError:
|
|
377
426
|
raise KeyError('Please provide a value to enrich the response with')
|
|
378
427
|
|
|
379
428
|
flow_url = f'{self.base_flow_url}/scopes/admin/environments/{self.environment_id}/flows?{self.api_version}'
|
|
380
429
|
flows_df = self.session_request("GET", flow_url)
|
|
381
|
-
|
|
382
|
-
if enrich:
|
|
383
|
-
enrich_owners = True
|
|
384
|
-
enrich_info = True
|
|
385
|
-
enrich_run_history = True
|
|
386
|
-
enrich_connections = True
|
|
430
|
+
flows_df.rename(columns={'name': 'FlowId', 'properties': 'FlowProperties'}, inplace=True)
|
|
387
431
|
|
|
388
432
|
if enrich_owners:
|
|
389
433
|
flows_df = self.enrich_owners(flows_df)
|
|
390
|
-
flows_df.drop_duplicates(subset=['
|
|
434
|
+
flows_df.drop_duplicates(subset=['FlowId'], inplace=True)
|
|
391
435
|
|
|
392
436
|
if enrich_info:
|
|
393
|
-
|
|
394
|
-
flows_info_df = pd.DataFrame()
|
|
395
|
-
flows_list = []
|
|
396
|
-
for _, prop in flows_df[['name', 'properties']].iterrows():
|
|
397
|
-
flow_info_response = prop['properties']
|
|
398
|
-
if isinstance(flow_info_response, list) & (len(flow_info_response) > 0):
|
|
399
|
-
flow_info_response = flow_info_response[0]
|
|
400
|
-
flow_info = {'name': prop['name']}
|
|
401
|
-
for key in flow_info_response.keys():
|
|
402
|
-
if isinstance(flow_info_response[key], (dict, list, bool)):
|
|
403
|
-
flow_info[key] = [flow_info_response[key]]
|
|
404
|
-
else:
|
|
405
|
-
if len(flow_info_response[key]) > 0:
|
|
406
|
-
flow_info[key] = flow_info_response[key]
|
|
407
|
-
|
|
408
|
-
flows_list.append(flow_info)
|
|
409
|
-
flows_info_df = pd.DataFrame(flows_list)
|
|
410
|
-
|
|
411
|
-
flows_df = pd.merge(flows_df, flows_info_df, on='name', how='left')
|
|
437
|
+
flows_df = self.enrich_info(flows_df)
|
|
412
438
|
|
|
413
439
|
if enrich_run_history:
|
|
414
440
|
flows_df = self.enrich_run_history(flows_df)
|
|
@@ -416,7 +442,41 @@ class PowerAutomate:
|
|
|
416
442
|
if enrich_connections:
|
|
417
443
|
flows_df = self.enrich_connections(flows_df)
|
|
418
444
|
|
|
419
|
-
|
|
445
|
+
if enrich_flow_url:
|
|
446
|
+
flows_df = self.enrich_flow_url(flows_df)
|
|
447
|
+
|
|
448
|
+
flows_df.rename(
|
|
449
|
+
columns={
|
|
450
|
+
'displayName': 'FlowName', 'definition': 'JsonDefinition', 'state': 'State',
|
|
451
|
+
'createdTime': 'CreatedDate', 'lastModifiedTime': 'LastModifiedDate',
|
|
452
|
+
'flowSuspensionReason': 'FlowSuspensionReason', 'flowSuspensionTime': 'FlowSuspensionTime',
|
|
453
|
+
'flowSuspensionReasonDetails': 'FlowSuspensionReasonDetails',
|
|
454
|
+
'AuthenticatedUserName': 'ConnectionAuthenticatedUserName'
|
|
455
|
+
}, inplace=True
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
if relevant_columns:
|
|
459
|
+
flows_final_df = flows_df[['FlowId', 'FlowName', 'id', 'FlowUrl', 'JsonDefinition', 'State', 'CreatedBy',
|
|
460
|
+
'Owners', 'CreatedDate', 'LastModifiedDate', 'FlowSuspensionReason',
|
|
461
|
+
'ErrorCount', 'ErrorMessage', 'RunCount', 'RunProperties', 'LastRun',
|
|
462
|
+
'LastRunStatus', 'ConnectionAccountName', 'ConnectionStatus',
|
|
463
|
+
'ConnectionLastModifiedTime', 'ConnectionsCount',
|
|
464
|
+
'ConnectionAuthenticatedUserName', 'ConnectionErrorCode']]
|
|
465
|
+
|
|
466
|
+
if 'FlowSuspensionReasonDetails' in flows_df.columns:
|
|
467
|
+
flows_final_df.loc[:, 'FlowSuspensionReasonDetails'] = flows_df['FlowSuspensionReasonDetails']
|
|
468
|
+
if 'FlowSuspensionTime' in flows_df.columns:
|
|
469
|
+
flows_final_df.loc[:, 'FlowSuspensionTime'] = flows_df['FlowSuspensionTime']
|
|
470
|
+
if 'ConnectionErrorCode' in flows_df.columns:
|
|
471
|
+
flows_final_df.loc[:, 'ConnectionErrorCode'] = flows_df['ConnectionErrorCode']
|
|
472
|
+
if 'ConnectionErrorMessage' in flows_df.columns:
|
|
473
|
+
flows_final_df.loc[:, 'ConnectionErrorMessage'] = flows_df['ConnectionErrorMessage']
|
|
474
|
+
else:
|
|
475
|
+
flows_final_df = flows_df
|
|
476
|
+
|
|
477
|
+
flows_final_df.sort_values(by=['FlowName'], inplace=True)
|
|
478
|
+
|
|
479
|
+
return flows_final_df.reset_index(drop=True)
|
|
420
480
|
|
|
421
481
|
def update_flow(self, flow_id, payload):
|
|
422
482
|
""" Update a flow
|
|
@@ -14,7 +14,7 @@ berryworld/microsoft_teams.py,sha256=Mhj83-tQWRR58YfBUPUTQKQ-qJD1Y5FBPpjU70gP6UU
|
|
|
14
14
|
berryworld/persistent_storage.py,sha256=_lGdXa7IyxfMF3xNF9y26X_z9RDb2Ah7R0oF61HR8Gc,5764
|
|
15
15
|
berryworld/pickle_management.py,sha256=O49ojVtTqYCT510rVRTbZWWaur_-5q3HSVG03Azn8mQ,2393
|
|
16
16
|
berryworld/postgres_connection.py,sha256=L4x1wM0g4HkkkaJhqOP4NV9klWsaQ_F8EucD0Q2Rr60,8788
|
|
17
|
-
berryworld/power_automate.py,sha256=
|
|
17
|
+
berryworld/power_automate.py,sha256=PY7Sj7t6Td8P54qUfnKSMpSkV9leITadtgpP9o5KZhM,25575
|
|
18
18
|
berryworld/sharepoint_con.py,sha256=72CZSVpAoHxocAWFuSSeUkNHa4Vu8FdM09NX-QC5-eE,22011
|
|
19
19
|
berryworld/sql_conn.py,sha256=_q7FSrVHUuNWmqLjMR4is2o9Tns2jWNxtmi3tahmkSw,45226
|
|
20
20
|
berryworld/sql_connection.py,sha256=qxSMZKKUhjX-c2BOE14qJFfvHhDZ_CNM_w4XbLec-y0,44344
|
|
@@ -28,8 +28,8 @@ tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
28
28
|
tests/test_allocation_config.py,sha256=e12l6fE9U57eSPS35g6ekJ_hol7-RHg89JV60_m1BlE,4633
|
|
29
29
|
tests/test_handy_mix_config.py,sha256=Un56mz9KJmdn4K4OwzHAHLSRzDU1Xv2nFrONNuzOG04,2594
|
|
30
30
|
tests/test_xml_parser.py,sha256=3QTlhFEd6KbK6nRFKZnc35tad6wqukTbe4QrFi8mr_8,859
|
|
31
|
-
berryworld-1.0.0.
|
|
32
|
-
berryworld-1.0.0.
|
|
33
|
-
berryworld-1.0.0.
|
|
34
|
-
berryworld-1.0.0.
|
|
35
|
-
berryworld-1.0.0.
|
|
31
|
+
berryworld-1.0.0.161412.dist-info/LICENSE,sha256=vtkVCJM6E2af2gnsi2XxKPr4WY-uIbvzVLXieFND0UU,1074
|
|
32
|
+
berryworld-1.0.0.161412.dist-info/METADATA,sha256=jVfB9cUzbjuRoc2N6zPmhS5eHo0s395Vw9g8HOrfhqs,1107
|
|
33
|
+
berryworld-1.0.0.161412.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
34
|
+
berryworld-1.0.0.161412.dist-info/top_level.txt,sha256=GIZ5qy-P5oxfEH755vA1IMFeTVdX3-40JxMe6nOe5I8,17
|
|
35
|
+
berryworld-1.0.0.161412.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|