illumio-pylo 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. illumio_pylo/API/APIConnector.py +1308 -0
  2. illumio_pylo/API/AuditLog.py +42 -0
  3. illumio_pylo/API/ClusterHealth.py +136 -0
  4. illumio_pylo/API/CredentialsManager.py +286 -0
  5. illumio_pylo/API/Explorer.py +1077 -0
  6. illumio_pylo/API/JsonPayloadTypes.py +240 -0
  7. illumio_pylo/API/RuleSearchQuery.py +128 -0
  8. illumio_pylo/API/__init__.py +0 -0
  9. illumio_pylo/AgentStore.py +139 -0
  10. illumio_pylo/Exception.py +44 -0
  11. illumio_pylo/Helpers/__init__.py +3 -0
  12. illumio_pylo/Helpers/exports.py +508 -0
  13. illumio_pylo/Helpers/functions.py +166 -0
  14. illumio_pylo/IPList.py +135 -0
  15. illumio_pylo/IPMap.py +285 -0
  16. illumio_pylo/Label.py +25 -0
  17. illumio_pylo/LabelCommon.py +48 -0
  18. illumio_pylo/LabelGroup.py +68 -0
  19. illumio_pylo/LabelStore.py +403 -0
  20. illumio_pylo/LabeledObject.py +25 -0
  21. illumio_pylo/Organization.py +258 -0
  22. illumio_pylo/Query.py +331 -0
  23. illumio_pylo/ReferenceTracker.py +41 -0
  24. illumio_pylo/Rule.py +671 -0
  25. illumio_pylo/Ruleset.py +306 -0
  26. illumio_pylo/RulesetStore.py +101 -0
  27. illumio_pylo/SecurityPrincipal.py +62 -0
  28. illumio_pylo/Service.py +256 -0
  29. illumio_pylo/SoftwareVersion.py +125 -0
  30. illumio_pylo/VirtualService.py +17 -0
  31. illumio_pylo/VirtualServiceStore.py +75 -0
  32. illumio_pylo/Workload.py +506 -0
  33. illumio_pylo/WorkloadStore.py +289 -0
  34. illumio_pylo/__init__.py +82 -0
  35. illumio_pylo/cli/NativeParsers.py +96 -0
  36. illumio_pylo/cli/__init__.py +134 -0
  37. illumio_pylo/cli/__main__.py +10 -0
  38. illumio_pylo/cli/commands/__init__.py +32 -0
  39. illumio_pylo/cli/commands/credential_manager.py +168 -0
  40. illumio_pylo/cli/commands/iplist_import_from_file.py +185 -0
  41. illumio_pylo/cli/commands/misc.py +7 -0
  42. illumio_pylo/cli/commands/ruleset_export.py +129 -0
  43. illumio_pylo/cli/commands/update_pce_objects_cache.py +44 -0
  44. illumio_pylo/cli/commands/ven_duplicate_remover.py +366 -0
  45. illumio_pylo/cli/commands/ven_idle_to_visibility.py +287 -0
  46. illumio_pylo/cli/commands/ven_upgrader.py +226 -0
  47. illumio_pylo/cli/commands/workload_export.py +251 -0
  48. illumio_pylo/cli/commands/workload_import.py +423 -0
  49. illumio_pylo/cli/commands/workload_relabeler.py +510 -0
  50. illumio_pylo/cli/commands/workload_reset_names_to_null.py +83 -0
  51. illumio_pylo/cli/commands/workload_used_in_rule_finder.py +80 -0
  52. illumio_pylo/docs/Doxygen +1757 -0
  53. illumio_pylo/tmp.py +104 -0
  54. illumio_pylo/utilities/__init__.py +0 -0
  55. illumio_pylo/utilities/cli.py +10 -0
  56. illumio_pylo/utilities/credentials.example.json +20 -0
  57. illumio_pylo/utilities/explorer_report_exporter.py +86 -0
  58. illumio_pylo/utilities/health_monitoring.py +102 -0
  59. illumio_pylo/utilities/iplist_analyzer.py +148 -0
  60. illumio_pylo/utilities/iplists_stats_duplicates_unused_finder.py +75 -0
  61. illumio_pylo/utilities/resources/iplists-import-example.csv +3 -0
  62. illumio_pylo/utilities/resources/iplists-import-example.xlsx +0 -0
  63. illumio_pylo/utilities/resources/workload-exporter-filter-example.csv +3 -0
  64. illumio_pylo/utilities/resources/workloads-import-example.csv +2 -0
  65. illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
  66. illumio_pylo/utilities/ven_compatibility_report_export.py +240 -0
  67. illumio_pylo/utilities/ven_idle_to_illumination.py +344 -0
  68. illumio_pylo/utilities/ven_reassign_pce.py +183 -0
  69. illumio_pylo-0.2.5.dist-info/LICENSE +176 -0
  70. illumio_pylo-0.2.5.dist-info/METADATA +197 -0
  71. illumio_pylo-0.2.5.dist-info/RECORD +73 -0
  72. illumio_pylo-0.2.5.dist-info/WHEEL +5 -0
  73. illumio_pylo-0.2.5.dist-info/top_level.txt +1 -0
@@ -0,0 +1,510 @@
1
+ import illumio_pylo as pylo
2
+ import argparse
3
+ import sys
4
+ import math
5
+ from .misc import make_filename_with_timestamp
6
+ from . import Command
7
+
8
+ command_name = 'workload-relabeler'
9
+ objects_load_filter = ['workloads', 'labels']
10
+
11
+
12
+ def fill_parser(parser: argparse.ArgumentParser):
13
+ parser.add_argument('--confirm', action='store_true',
14
+ help="No change will be implemented in the PCE until you use this function to confirm you're good with them after review")
15
+
16
+ parser.add_argument('--input-file', '-i', type=str, required=True,
17
+ help='CSV or Excel input filename')
18
+ parser.add_argument('--input-file-delimiter', type=str, required=False, default=',',
19
+ help='CSV field delimiter')
20
+
21
+ parser.add_argument('--match-on-hostname', action='store_true',
22
+ help="In order to be relabeled, a workload must match a HOSTNAME entry from the CSV file")
23
+ parser.add_argument('--match-on-ip', action='store_true',
24
+ help="In order to be relabeled, a workload must match an IP entry from the CSV file")
25
+ parser.add_argument('--match-on-href', action='store_true',
26
+ help="In order to be relabeled, a workload must match a HREF entry from the CSV file")
27
+
28
+ parser.add_argument('--filter-env-label', type=str, required=False, default=None,
29
+ help='Filter workloads by environment labels (separated by commas)')
30
+ parser.add_argument('--filter-loc-label', type=str, required=False, default=None,
31
+ help='Filter workloads by environment labels (separated by commas)')
32
+ parser.add_argument('--filter-app-label', type=str, required=False, default=None,
33
+ help='Filter workloads by role labels (separated by commas)')
34
+ parser.add_argument('--filter-role-label', type=str, required=False, default=None,
35
+ help='Filter workloads by role labels (separated by commas)')
36
+
37
+ parser.add_argument('--batch-size', type=int, required=False, default=500,
38
+ help='Number of Workloads to update per API call')
39
+
40
+
41
+ def __main(args, org: pylo.Organization, **kwargs):
42
+
43
+ input_file = args['input_file']
44
+ input_file_delimiter = args['input_file_delimiter']
45
+ batch_size = args['batch_size']
46
+ confirmed_changes = args['confirm']
47
+
48
+ input_match_on_hostname = args['match_on_hostname']
49
+ input_match_on_ip = args['match_on_ip']
50
+ input_match_on_href = args['match_on_href']
51
+
52
+ output_file_prefix = make_filename_with_timestamp('workload-relabeler-results_')
53
+ output_file_csv = output_file_prefix + '.csv'
54
+ output_file_excel = output_file_prefix + '.xlsx'
55
+
56
+ ignored_workloads_count = 0
57
+
58
+ csv_expected_fields = [
59
+ {'name': 'role', 'optional': True},
60
+ {'name': 'app', 'optional': True},
61
+ {'name': 'env', 'optional': True},
62
+ {'name': 'loc', 'optional': True},
63
+ {'name': 'ip', 'optional': not input_match_on_ip},
64
+ {'name': 'hostname', 'optional': not input_match_on_hostname},
65
+ {'name': 'href', 'optional': not input_match_on_href}
66
+ ]
67
+
68
+ if not input_match_on_ip and not input_match_on_hostname and not input_match_on_href:
69
+ pylo.log.error('You must specify at least one (or several) property to match on for workloads vs input: href, ip or hostname')
70
+ sys.exit(1)
71
+
72
+ csv_report = pylo.ArrayToExport(['name', 'role', 'app', 'env', 'loc', 'new_role', 'new_app', 'new_env', 'new_loc', '**updated**', '**reason**', 'href'])
73
+
74
+ print(" * Loading CSV input file '{}'...".format(input_file), flush=True, end='')
75
+ CsvData = pylo.CsvExcelToObject(input_file, expected_headers=csv_expected_fields, csv_delimiter=input_file_delimiter)
76
+ print('OK')
77
+ print(" - CSV has {} columns and {} lines (headers don't count)".format(CsvData.count_columns(), CsvData.count_lines()))
78
+ # print(pylo.nice_json(CsvData._objects))
79
+
80
+ def workload_to_csv_report(workload: pylo.Workload, updated: bool, reason: str = '', new_labels=None):
81
+
82
+ labels = workload.get_labels_str_list()
83
+
84
+ record = {
85
+ 'name': workload.get_name(),
86
+ 'href': workload.href,
87
+ 'role': labels[pylo.label_type_role],
88
+ 'app': labels[pylo.label_type_app],
89
+ 'env': labels[pylo.label_type_env],
90
+ 'loc': labels[pylo.label_type_loc],
91
+ '**updated**': str(updated),
92
+ '**reason**': reason
93
+ }
94
+
95
+ unchanged_str = '*unchanged*'
96
+
97
+ if new_labels is not None:
98
+ if 'role' in new_labels:
99
+ record['new_role'] = new_labels['role']['name']
100
+ else:
101
+ record['new_role'] = unchanged_str
102
+
103
+ if 'app' in new_labels:
104
+ record['new_app'] = new_labels['app']['name']
105
+ else:
106
+ record['new_app'] = unchanged_str
107
+
108
+ if 'env' in new_labels:
109
+ record['new_env'] = new_labels['env']['name']
110
+ else:
111
+ record['new_env'] = unchanged_str
112
+
113
+ if 'loc' in new_labels:
114
+ record['new_loc'] = new_labels['loc']['name']
115
+ else:
116
+ record['new_loc'] = unchanged_str
117
+
118
+ else:
119
+ record['new_role'] = unchanged_str
120
+ record['new_app'] = unchanged_str
121
+ record['new_env'] = unchanged_str
122
+ record['new_loc'] = unchanged_str
123
+
124
+ return record
125
+
126
+ # <editor-fold desc="CSV basic checks">
127
+ print(" * Performing basic checks on CSV input:")
128
+ csv_ip_cache = {}
129
+ csv_name_cache = {}
130
+ csv_href_cache = {}
131
+ csv_check_failed = 0
132
+
133
+ if input_match_on_ip:
134
+ for csv_object in CsvData.objects():
135
+ ips = csv_object['ip'].rsplit(',')
136
+ csv_object['**ip_array**'] = []
137
+
138
+ for ip in ips:
139
+ ip = ip.strip(" \r\n")
140
+ if not pylo.is_valid_ipv4(ip) and not pylo.is_valid_ipv6(ip):
141
+ print(" - ERROR: CSV line #{} has invalid IP address defined".format(csv_object['*line*']), flush=True)
142
+ csv_check_failed += 1
143
+ continue
144
+
145
+ csv_object['**ip_array**'].append(ip)
146
+
147
+ if ip not in csv_ip_cache:
148
+ csv_ip_cache[ip] = csv_object
149
+ continue
150
+
151
+ csv_check_failed += 1
152
+ print(" - ERROR: CSV line #{} has a duplicate IP address with line #{}".format(csv_object['*line*'], csv_ip_cache[ip]['*line*']), flush=True)
153
+
154
+ if len(csv_object['**ip_array**']) < 1:
155
+ print(" - ERROR: CSV line #{} has no valid IP address defined".format(csv_object['*line*']), flush=True)
156
+
157
+ if input_match_on_hostname:
158
+ for csv_object in CsvData.objects():
159
+ name = csv_object['hostname']
160
+ name = pylo.Workload.static_name_stripped_fqdn(name)
161
+ if name is None or len(name) < 1:
162
+ print(" - ERROR: CSV line #{} has invalid hostname defined: '{}'".format(csv_object['*line*'], csv_object['hostname']), flush=True)
163
+ csv_check_failed += 1
164
+ continue
165
+
166
+ if name not in csv_name_cache:
167
+ csv_name_cache[name.lower()] = csv_object
168
+ continue
169
+
170
+ print(" - ERROR: CSV line #{} has duplicate hostname defined from a previous line: '{}'".format(csv_object['*line*'], csv_object['hostname']), flush=True)
171
+ csv_check_failed += 1
172
+
173
+ if input_match_on_href:
174
+ for csv_object in CsvData.objects():
175
+ href = csv_object['href']
176
+ if href is None or len(href) < 1:
177
+ print(" - ERROR: CSV line #{} has invalid href defined: '{}'".format(csv_object['*line*'], csv_object['href']), flush=True)
178
+ csv_check_failed += 1
179
+ continue
180
+
181
+ if href not in csv_href_cache:
182
+ csv_name_cache[href] = csv_object
183
+ continue
184
+
185
+ print(" - ERROR: CSV line #{} has duplicate href defined from a previous line: '{}'".format(csv_object['*line*'], csv_object['href']), flush=True)
186
+ csv_check_failed += 1
187
+
188
+ if csv_check_failed > 0:
189
+ pylo.log.error("ERROR! Several ({}) inconsistencies were found in the CSV, please fix them before you continue!".format(csv_check_failed))
190
+ sys.exit(1)
191
+
192
+ print(" * Done")
193
+ # </editor-fold>
194
+
195
+ # <editor-fold desc="Parsing Label filters">
196
+ print(" * Parsing filters")
197
+ env_label_list = {}
198
+ if args['filter_env_label'] is not None:
199
+ print(" * Environment Labels specified")
200
+ for raw_label_name in args['filter_env_label'].split(','):
201
+ print(" - label named '{}'".format(raw_label_name), end='', flush=True)
202
+ label = org.LabelStore.find_label_by_name_and_type(raw_label_name, pylo.label_type_env)
203
+ if label is None:
204
+ print("NOT FOUND!")
205
+ raise pylo.PyloEx("Cannot find label named '{}'".format(raw_label_name))
206
+ else:
207
+ print(" found")
208
+ env_label_list[label] = label
209
+
210
+ loc_label_list = {}
211
+ if args['filter_loc_label'] is not None:
212
+ print(" * Location Labels specified")
213
+ for raw_label_name in args['filter_loc_label'].split(','):
214
+ print(" - label named '{}' ".format(raw_label_name), end='', flush=True)
215
+ label = org.LabelStore.find_label_by_name_and_type(raw_label_name, pylo.label_type_loc)
216
+ if label is None:
217
+ print("NOT FOUND!")
218
+ raise pylo.PyloEx("Cannot find label named '{}'".format(raw_label_name))
219
+ else:
220
+ print(" found")
221
+ loc_label_list[label] = label
222
+
223
+ app_label_list = {}
224
+ if args['filter_app_label'] is not None:
225
+ print(" * Application Labels specified")
226
+ for raw_label_name in args['filter_app_label'].split(','):
227
+ print(" - label named '{}' ".format(raw_label_name), end='', flush=True)
228
+ label = org.LabelStore.find_label_by_name_and_type(raw_label_name, pylo.label_type_app)
229
+ if label is None:
230
+ print(" NOT FOUND!")
231
+ raise pylo.PyloEx("Cannot find label named '{}'".format(raw_label_name))
232
+ else:
233
+ print(" found")
234
+ app_label_list[label] = label
235
+
236
+ role_label_list = {}
237
+ if args['filter_role_label'] is not None:
238
+ print(" * Role Labels specified")
239
+ for raw_label_name in args['filter_role_label'].split(','):
240
+ print(" - label named '{}' ".format(raw_label_name), end='', flush=True)
241
+ label = org.LabelStore.find_label_by_name_and_type(raw_label_name, pylo.label_type_role)
242
+ if label is None:
243
+ print("NOT FOUND!")
244
+ raise pylo.PyloEx("Cannot find label named '{}'".format(raw_label_name))
245
+ else:
246
+ print("found")
247
+ role_label_list[label] = label
248
+ print(" * DONE")
249
+ # </editor-fold>
250
+
251
+ # <editor-fold desc="Filter the list of VENs to be relabeled">
252
+ workloads_to_relabel = org.WorkloadStore.itemsByHRef.copy()
253
+ print(" * PCE has {} managed workloads. Now applying requested filters:".format(len(workloads_to_relabel)))
254
+ for workload_href in list(workloads_to_relabel.keys()):
255
+ workload = workloads_to_relabel[workload_href]
256
+
257
+ if len(role_label_list) > 0 and (workload.role_label is None or workload.role_label not in role_label_list):
258
+ del workloads_to_relabel[workload_href]
259
+ ignored_workloads_count += 1
260
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Role label did not match filters'))
261
+ continue
262
+
263
+ if len(app_label_list) > 0 and (workload.app_label is None or workload.app_label not in app_label_list):
264
+ del workloads_to_relabel[workload_href]
265
+ ignored_workloads_count += 1
266
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Application label did not match filters'))
267
+ continue
268
+
269
+ if len(env_label_list) > 0 and (workload.env_label is None or workload.env_label not in env_label_list):
270
+ del workloads_to_relabel[workload_href]
271
+ ignored_workloads_count += 1
272
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Environment label did not match filters'))
273
+ continue
274
+
275
+ if len(loc_label_list) > 0 and (workload.loc_label is None or workload.loc_label not in loc_label_list):
276
+ del workloads_to_relabel[workload_href]
277
+ ignored_workloads_count += 1
278
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Location label did not match filters'))
279
+ continue
280
+
281
+ print(" * DONE! {} Managed Workloads remain to be relabeled".format(len(workloads_to_relabel)))
282
+ # </editor-fold>
283
+
284
+ # <editor-fold desc="Matching between CSV/Excel and Managed Workloads">
285
+ print(" * Matching remaining Managed Workloads with CSV/Excel input:")
286
+ count = 0
287
+ workloads_to_relabel_match = {} # type: dict[pylo.Workload,dict]
288
+
289
+ for workload_href in list(workloads_to_relabel.keys()):
290
+ count += 1
291
+ workload = workloads_to_relabel[workload_href]
292
+ print(" - Workload #{}/{} named '{}' href '{}' with {} IP addresses".format(count, len(workloads_to_relabel), workload.get_name(), workload.href, len(workload.interfaces)))
293
+ this_workload_matched_on_ip = None
294
+ this_workload_matched_on_name = None
295
+ this_workload_matched_on_href = None
296
+ this_workload_matched = None
297
+
298
+ if input_match_on_ip:
299
+ ip_matches = []
300
+ for interface in workload.interfaces:
301
+ print(" - ip {}...".format(interface.ip), flush=True, end='')
302
+ csv_ip_record = csv_ip_cache.get(interface.ip)
303
+ if csv_ip_record is None:
304
+ print(" not found in CSV/Excel")
305
+ else:
306
+ print("found")
307
+ ip_matches.append(csv_ip_record)
308
+ if len(ip_matches) < 1:
309
+ print(" - No matching IP address found in CSV/Excel, this Workload will not be relabeled")
310
+ del workloads_to_relabel[workload_href]
311
+ ignored_workloads_count += 1
312
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'No IP match was found in CSV/Excel input'))
313
+ continue
314
+ if len(ip_matches) > 1:
315
+ print(" - Found more than 1 IP matches in CSV/Excel, this Workload will not be relabeled")
316
+ del workloads_to_relabel[workload_href]
317
+ ignored_workloads_count += 1
318
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Too many IP matches were found in CSV/Excel input'))
319
+ continue
320
+ this_workload_matched_on_ip = ip_matches[0]
321
+ this_workload_matched = this_workload_matched_on_ip
322
+
323
+ if input_match_on_hostname:
324
+ name_match = csv_name_cache.get(workload.get_name_stripped_fqdn().lower())
325
+ print(" - match on name '{}'...".format(workload.get_name_stripped_fqdn()), flush=True, end='')
326
+ if name_match is None:
327
+ del workloads_to_relabel[workload_href]
328
+ print(" NOT FOUND")
329
+ ignored_workloads_count += 1
330
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'No name match was found in CSV/Excel input'))
331
+ continue
332
+
333
+ print(" FOUND")
334
+ this_workload_matched_on_name = name_match
335
+ this_workload_matched = this_workload_matched_on_name
336
+
337
+ if input_match_on_href:
338
+ href_match = csv_name_cache.get(workload.href)
339
+ print(" - match on href '{}'...".format(workload.href), flush=True, end='')
340
+ if href_match is None:
341
+ del workloads_to_relabel[workload_href]
342
+ print(" NOT FOUND")
343
+ ignored_workloads_count += 1
344
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'No href match was found in CSV/Excel input'))
345
+ continue
346
+
347
+ print(" FOUND")
348
+ this_workload_matched_on_href = href_match
349
+ this_workload_matched = this_workload_matched_on_href
350
+
351
+
352
+ if this_workload_matched is not None and\
353
+ (not input_match_on_ip or input_match_on_ip and this_workload_matched['*line*'] == this_workload_matched_on_ip['*line*']) and\
354
+ (not input_match_on_hostname or input_match_on_hostname and this_workload_matched['*line*'] == this_workload_matched_on_name['*line*']) and \
355
+ (not input_match_on_href or input_match_on_href and this_workload_matched['*line*'] == this_workload_matched_on_href['*line*']):
356
+ workloads_to_relabel_match[workload] = this_workload_matched
357
+ print(" - all filters matched, it's in!")
358
+
359
+ print(" * Done! After Filtering+CSV Match, {} workloads remain to be relabeled".format(len(workloads_to_relabel)))
360
+ # </editor-fold>
361
+
362
+ # <editor-fold desc="List missing Labels and Workloads which already have the right labels">
363
+ print(" * Looking for any missing label which need to be created and Workloads which already have the right labels:")
364
+ labels_to_be_created = {}
365
+ count_workloads_with_right_labels = 0
366
+ for workload in list(workloads_to_relabel.values()):
367
+
368
+ workload_needs_label_change = False
369
+
370
+ csv_object = workloads_to_relabel_match[workload]
371
+
372
+ def process_label(label_type: str) -> bool:
373
+ change_needed = False
374
+
375
+ if csv_object[label_type] is not None and len(csv_object[label_type]) > 0:
376
+ label_found = org.LabelStore.find_label_by_name(csv_object[label_type], label_type)
377
+ if label_found is None:
378
+ change_needed = True
379
+ temp_label_name = '**{}**{}'.format(label_type, csv_object[label_type].lower())
380
+ labels_to_be_created[temp_label_name] = {'name': csv_object[label_type], 'type': label_type}
381
+ else:
382
+ if label_found is not workload.get_label(label_type):
383
+ change_needed = True
384
+ else:
385
+ if workload.get_label(label_type) is not None:
386
+ change_needed = True
387
+
388
+ return change_needed
389
+
390
+ workload_needs_label_change = process_label('role')
391
+ workload_needs_label_change = process_label('app') or workload_needs_label_change
392
+ workload_needs_label_change = process_label('env') or workload_needs_label_change
393
+ workload_needs_label_change = process_label('loc') or workload_needs_label_change
394
+
395
+ if not workload_needs_label_change:
396
+ count_workloads_with_right_labels += 1
397
+ del workloads_to_relabel[workload.href]
398
+ del workloads_to_relabel_match[workload]
399
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, 'Workload already has the right Labels'))
400
+ print()
401
+ print(" * DONE! Found {} missing labels to be created and {} Workloads which need an update".format(len(labels_to_be_created), len(workloads_to_relabel)), flush=True)
402
+ # </editor-fold>
403
+
404
+ # <editor-fold desc="Missing Labels creation">
405
+ if len(labels_to_be_created) > 0:
406
+ print(" * {} Labels need to created before Workloads can be imported, listing:".format(len(labels_to_be_created)), flush=True)
407
+ for label_to_create in labels_to_be_created.values():
408
+ print(" - '{}' type {}".format(label_to_create['name'], label_to_create['type']))
409
+
410
+ if confirmed_changes:
411
+ for label_to_create in labels_to_be_created.values():
412
+ print(" - Pushing '{}' with type '{}' to the PCE... ".format(label_to_create['name'], label_to_create['type']), end='', flush=True)
413
+ org.LabelStore.api_create_label(label_to_create['name'], label_to_create['type'])
414
+ print("OK")
415
+ else:
416
+ for label_to_create in labels_to_be_created.values():
417
+ org.LabelStore.create_label(label_to_create['name'], label_to_create['type'])
418
+
419
+ # </editor-fold>
420
+
421
+ # <editor-fold desc="JSON Payloads generation">
422
+ print(' * Preparing Workloads JSON payloads...', flush=True)
423
+ workloads_to_relabel_fixed_index = list(workloads_to_relabel_match.keys())
424
+ workloads_list_changed_labels_for_report = {}
425
+ workloads_json_data = []
426
+ for workload in workloads_to_relabel_fixed_index:
427
+ data = workloads_to_relabel_match[workload]
428
+ new_workload = {'href': workload.href}
429
+ workloads_json_data.append(new_workload)
430
+ changed_labels = {}
431
+ workloads_list_changed_labels_for_report[workload] = changed_labels
432
+ new_workload['labels'] = []
433
+
434
+ def process_label(label_type: str):
435
+ if data[label_type] is not None and len(data[label_type]) > 0:
436
+ # print(data)
437
+ found_label = org.LabelStore.find_label_by_name(data[label_type], pylo.LabelStore.label_type)
438
+ if found_label is None:
439
+ raise pylo.PyloEx('Cannot find a Label named "{}" in the PCE for CSV line #{}'.format(data[label_type], data['*line*']))
440
+ workload_found_label = workload.get_label(label_type)
441
+ if workload_found_label is not found_label:
442
+ new_workload['labels'].append({'href': found_label.href})
443
+ changed_labels[label_type] = {'name': found_label.name, 'href': found_label.href}
444
+ else:
445
+ if workload_found_label is not None:
446
+ new_workload['labels'].append({'href': workload_found_label.href})
447
+
448
+ process_label('role')
449
+ process_label('app')
450
+ process_label('env')
451
+ process_label('loc')
452
+
453
+ print(" * DONE")
454
+ # </editor-fold>
455
+
456
+ if confirmed_changes:
457
+ # <editor-fold desc="Unmanaged Workloads PUSH to API">
458
+ print(" * Updating {} Workloads in batches of {}".format(len(workloads_json_data), batch_size), flush=True)
459
+ batch_cursor = 0
460
+ total_created_count = 0
461
+ total_failed_count = 0
462
+ while batch_cursor <= len(workloads_json_data):
463
+ print(" - batch #{} of {}".format(math.ceil(batch_cursor/batch_size)+1, math.ceil(len(workloads_json_data)/batch_size)), flush=True)
464
+ batch_json_data = workloads_json_data[batch_cursor:batch_cursor+batch_size-1]
465
+ results = org.connector.objects_workload_update_bulk(batch_json_data)
466
+ created_count = 0
467
+ failed_count = 0
468
+
469
+ # print(results)
470
+ for i in range(0, batch_size):
471
+ if i >= len(batch_json_data):
472
+ break
473
+
474
+ workload = workloads_to_relabel_fixed_index[i + batch_cursor]
475
+ result = results[i]
476
+ if result['status'] != 'updated':
477
+ csv_report.add_line_from_object(workload_to_csv_report(workload, False, result['message']))
478
+ failed_count += 1
479
+ total_failed_count += 1
480
+ else:
481
+ csv_report.add_line_from_object(workload_to_csv_report(workload, True, new_labels=workloads_list_changed_labels_for_report[workload]))
482
+ created_count += 1
483
+ total_created_count += 1
484
+
485
+ print(" - {} updated with success, {} failures (read report to get reasons)".format(created_count, failed_count))
486
+ csv_report.write_to_csv(output_file_csv)
487
+ csv_report.write_to_excel(output_file_excel)
488
+
489
+ batch_cursor += batch_size
490
+ print(" * DONE - {} workloads labels updated with success, {} failures and {} ignored. A report was created in {} and {}".format(total_created_count, total_failed_count, ignored_workloads_count, output_file_csv, output_file_excel))
491
+ # </editor-fold>
492
+ else:
493
+ print("\n*************")
494
+ print(" WARNING!!! --confirm option was not used so no Workloads were relabeled and no Labels were created")
495
+ print("- {} Managed Workloads were in the queue for relabeling".format(len(workloads_to_relabel)))
496
+ print("- {} Managed Workloads were marked as Ignored (read reports for details)".format(csv_report.lines_count()))
497
+ print("- {} Labels were found to be missing and to be created".format(len(labels_to_be_created)))
498
+ print("*************")
499
+ for workload in workloads_to_relabel_fixed_index:
500
+ csv_report.add_line_from_object(workload_to_csv_report(workload, True, new_labels=workloads_list_changed_labels_for_report[workload]))
501
+
502
+ print(" * Writing report file '{}' ... ".format(output_file_csv), end='', flush=True)
503
+ csv_report.write_to_csv(output_file_csv)
504
+ print("DONE")
505
+ print(" * Writing report file '{}' ... ".format(output_file_excel), end='', flush=True)
506
+ csv_report.write_to_excel(output_file_excel)
507
+ print("DONE")
508
+
509
+
510
+ command_object = Command(command_name, __main, fill_parser, objects_load_filter)
@@ -0,0 +1,83 @@
1
+ from typing import List
2
+ import illumio_pylo as pylo
3
+ import argparse
4
+ import sys
5
+ import math
6
+ import colorama
7
+ from .misc import make_filename_with_timestamp
8
+ from . import Command
9
+
10
+ command_name = 'workload-reset-ven-names-to-null'
11
+ objects_load_filter = ['workloads', 'labels']
12
+
13
+
14
+ def fill_parser(parser: argparse.ArgumentParser):
15
+ parser.add_argument('--confirm', action='store_true',
16
+ help="No change will be implemented in the PCE until you use this function to confirm you're good with them after review")
17
+ parser.add_argument('--batch-size', type=int, required=False, default=500,
18
+ help='Number of Workloads to update per API call')
19
+
20
+
21
+ def __main(args, org: pylo.Organization, **kwargs):
22
+ batch_size = args['batch_size']
23
+ confirmed_changes = args['confirm']
24
+
25
+ count_managed_workloads = 0
26
+ count_workloads_with_forced_names = 0
27
+ count_workloads_with_mismatching_names = 0
28
+
29
+ workloads_with_forced_names: List[pylo.Workload] = []
30
+ workloads_with_mismatching_names: List[pylo.Workload] = []
31
+
32
+ #iterate through each workload
33
+ for wkl in org.WorkloadStore.itemsByHRef.values():
34
+ #only care about Managed workloads
35
+ if wkl.unmanaged:
36
+ continue
37
+
38
+ count_managed_workloads += 1
39
+ if wkl.forced_name is not None:
40
+ workloads_with_forced_names.append(wkl)
41
+ short_forced_name = wkl.static_name_stripped_fqdn(wkl.forced_name).lower()
42
+ short_hostname = wkl.static_name_stripped_fqdn(wkl.hostname).lower()
43
+ if short_forced_name != short_hostname:
44
+ workloads_with_mismatching_names.append(wkl)
45
+ print(f"Found mismatching forced name for {wkl.hostname} (hostname={wkl.forced_name})")
46
+
47
+ print()
48
+ print(" * Summary of Analysis:")
49
+
50
+ print(f" - Found {count_managed_workloads} Managed Workloads")
51
+ print(f" - Found {len(workloads_with_forced_names)} Workloads with Forced Names")
52
+ print(f" - Found {len(workloads_with_mismatching_names)} Workloads with Mismatching Forced Names")
53
+
54
+ # <editor-fold desc="JSON Payloads generation">
55
+
56
+ #for each batch of workloads, generate a JSON payload to send to the PCE to reset name to null
57
+ #the payload will be a list of objects with the following structure:
58
+ # {
59
+ # "href": "string",
60
+ # "name": null
61
+ # }
62
+
63
+ if not confirmed_changes:
64
+ print(colorama.Fore.YELLOW + "Changes have not been confirmed. Use the --confirm flag to confirm the changes and push to the PCE")
65
+ #reset colorama colors
66
+ print(colorama.Style.RESET_ALL)
67
+ return
68
+
69
+ # for loop for each batch of workloads
70
+ for i in range(math.ceil(len(workloads_with_mismatching_names) / batch_size)):
71
+ #get the next batch of workloads
72
+ batch = workloads_with_mismatching_names[i * batch_size: (i + 1) * batch_size]
73
+ #create a list of objects with the structure described above
74
+ payload = [{"href": wkl.href, "name": wkl.static_name_stripped_fqdn(wkl.hostname)} for wkl in batch]
75
+ #debug display
76
+ print(f"Sending payload for batch {i + 1} of {math.ceil(len(workloads_with_mismatching_names) / batch_size)} ({len(payload)} workloads)")
77
+
78
+ org.connector.objects_workload_update_bulk(payload)
79
+
80
+ # </editor-fold>
81
+
82
+
83
+ command_object = Command(command_name, __main, fill_parser, objects_load_filter)
@@ -0,0 +1,80 @@
1
+ import illumio_pylo as pylo
2
+ import argparse
3
+ from typing import *
4
+ from . import Command
5
+
6
+ command_name = "workload-used-in-rules-finder"
7
+ objects_load_filter = ['workloads', 'rules', 'rulesets']
8
+
9
+
10
+ def fill_parser(parser: argparse.ArgumentParser):
11
+ parser.add_argument('--only-deleted', action='store_true', help='only look for deleted workloads')
12
+
13
+
14
+ def __main(args, org: pylo.Organization, **kwargs):
15
+
16
+ settings_only_delete_workloads = args['only_deleted']
17
+
18
+ print()
19
+ print("**** Now parsing workloads and rules.... ****")
20
+
21
+ workloads_to_inspect = org.WorkloadStore.itemsByHRef.values()
22
+ global_count_concerned_workloads = 0
23
+ global_concerned_rulesets = {}
24
+ global_concerned_rules = {}
25
+
26
+ for workload in workloads_to_inspect:
27
+
28
+ if settings_only_delete_workloads:
29
+ if workload.temporary is True or workload.deleted is True:
30
+ continue
31
+
32
+ concerned_rulesets: Dict[pylo.Ruleset, Dict[pylo.Rule, pylo.Rule]] = {}
33
+ count_concerned_rules = 0
34
+ for referencer in workload.get_references():
35
+ if type(referencer) is pylo.RuleHostContainer:
36
+ concerned_rule = referencer.owner # type: pylo.Rule
37
+ concerned_ruleset = concerned_rule.owner
38
+
39
+ global_concerned_rulesets[concerned_ruleset] = True
40
+ global_concerned_rules[concerned_rule] = True
41
+
42
+ if concerned_ruleset not in concerned_rulesets:
43
+ concerned_rulesets[concerned_ruleset] = {concerned_rule: concerned_rule}
44
+ count_concerned_rules = count_concerned_rules + 1
45
+ else:
46
+ if concerned_rule not in concerned_rulesets[concerned_ruleset]:
47
+ concerned_rulesets[concerned_ruleset][concerned_rule] = concerned_rule
48
+ count_concerned_rules = count_concerned_rules + 1
49
+
50
+ if len(concerned_rulesets) < 1: # this workload was not used in any ruleset
51
+ continue
52
+
53
+ global_count_concerned_workloads += 1
54
+
55
+ print(" - Workload {} HREF {} is used {} Rulesets and {} Rules".format(workload.get_name(), workload.href, len(concerned_rulesets), count_concerned_rules))
56
+ for ruleset in concerned_rulesets:
57
+ print(" - in ruleset '{}' HREF:{}".format(ruleset.name, ruleset.href))
58
+
59
+ def get_name(obj):
60
+ return obj.name
61
+
62
+ tmp_rulesets = sorted(global_concerned_rulesets.keys(), key=get_name)
63
+
64
+ print("\n* For convenience here is the consolidated list of Rulesets:")
65
+ for ruleset in tmp_rulesets:
66
+ ruleset_url = ruleset.get_ruleset_url()
67
+ print(" - '{}' HREF: {} URL: {}".format(ruleset.name, ruleset.href, ruleset_url))
68
+
69
+ print("\n***** DONE with workloads & rules parsing *****")
70
+ print("** Total: {} Workloads used in {} Rulesets and {} Rules".format( global_count_concerned_workloads,
71
+ len(global_concerned_rulesets),
72
+ len(global_concerned_rules)))
73
+
74
+ print("\n**** END OF SCRIPT ****\n")
75
+
76
+
77
+ command_object = Command(command_name, __main,
78
+ fill_parser,
79
+ load_specific_objects_only=objects_load_filter,
80
+ skip_pce_config_loading=True)