illumio-pylo 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +1308 -0
- illumio_pylo/API/AuditLog.py +42 -0
- illumio_pylo/API/ClusterHealth.py +136 -0
- illumio_pylo/API/CredentialsManager.py +286 -0
- illumio_pylo/API/Explorer.py +1077 -0
- illumio_pylo/API/JsonPayloadTypes.py +240 -0
- illumio_pylo/API/RuleSearchQuery.py +128 -0
- illumio_pylo/API/__init__.py +0 -0
- illumio_pylo/AgentStore.py +139 -0
- illumio_pylo/Exception.py +44 -0
- illumio_pylo/Helpers/__init__.py +3 -0
- illumio_pylo/Helpers/exports.py +508 -0
- illumio_pylo/Helpers/functions.py +166 -0
- illumio_pylo/IPList.py +135 -0
- illumio_pylo/IPMap.py +285 -0
- illumio_pylo/Label.py +25 -0
- illumio_pylo/LabelCommon.py +48 -0
- illumio_pylo/LabelGroup.py +68 -0
- illumio_pylo/LabelStore.py +403 -0
- illumio_pylo/LabeledObject.py +25 -0
- illumio_pylo/Organization.py +258 -0
- illumio_pylo/Query.py +331 -0
- illumio_pylo/ReferenceTracker.py +41 -0
- illumio_pylo/Rule.py +671 -0
- illumio_pylo/Ruleset.py +306 -0
- illumio_pylo/RulesetStore.py +101 -0
- illumio_pylo/SecurityPrincipal.py +62 -0
- illumio_pylo/Service.py +256 -0
- illumio_pylo/SoftwareVersion.py +125 -0
- illumio_pylo/VirtualService.py +17 -0
- illumio_pylo/VirtualServiceStore.py +75 -0
- illumio_pylo/Workload.py +506 -0
- illumio_pylo/WorkloadStore.py +289 -0
- illumio_pylo/__init__.py +82 -0
- illumio_pylo/cli/NativeParsers.py +96 -0
- illumio_pylo/cli/__init__.py +134 -0
- illumio_pylo/cli/__main__.py +10 -0
- illumio_pylo/cli/commands/__init__.py +32 -0
- illumio_pylo/cli/commands/credential_manager.py +168 -0
- illumio_pylo/cli/commands/iplist_import_from_file.py +185 -0
- illumio_pylo/cli/commands/misc.py +7 -0
- illumio_pylo/cli/commands/ruleset_export.py +129 -0
- illumio_pylo/cli/commands/update_pce_objects_cache.py +44 -0
- illumio_pylo/cli/commands/ven_duplicate_remover.py +366 -0
- illumio_pylo/cli/commands/ven_idle_to_visibility.py +287 -0
- illumio_pylo/cli/commands/ven_upgrader.py +226 -0
- illumio_pylo/cli/commands/workload_export.py +251 -0
- illumio_pylo/cli/commands/workload_import.py +423 -0
- illumio_pylo/cli/commands/workload_relabeler.py +510 -0
- illumio_pylo/cli/commands/workload_reset_names_to_null.py +83 -0
- illumio_pylo/cli/commands/workload_used_in_rule_finder.py +80 -0
- illumio_pylo/docs/Doxygen +1757 -0
- illumio_pylo/tmp.py +104 -0
- illumio_pylo/utilities/__init__.py +0 -0
- illumio_pylo/utilities/cli.py +10 -0
- illumio_pylo/utilities/credentials.example.json +20 -0
- illumio_pylo/utilities/explorer_report_exporter.py +86 -0
- illumio_pylo/utilities/health_monitoring.py +102 -0
- illumio_pylo/utilities/iplist_analyzer.py +148 -0
- illumio_pylo/utilities/iplists_stats_duplicates_unused_finder.py +75 -0
- illumio_pylo/utilities/resources/iplists-import-example.csv +3 -0
- illumio_pylo/utilities/resources/iplists-import-example.xlsx +0 -0
- illumio_pylo/utilities/resources/workload-exporter-filter-example.csv +3 -0
- illumio_pylo/utilities/resources/workloads-import-example.csv +2 -0
- illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
- illumio_pylo/utilities/ven_compatibility_report_export.py +240 -0
- illumio_pylo/utilities/ven_idle_to_illumination.py +344 -0
- illumio_pylo/utilities/ven_reassign_pce.py +183 -0
- illumio_pylo-0.2.5.dist-info/LICENSE +176 -0
- illumio_pylo-0.2.5.dist-info/METADATA +197 -0
- illumio_pylo-0.2.5.dist-info/RECORD +73 -0
- illumio_pylo-0.2.5.dist-info/WHEEL +5 -0
- illumio_pylo-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
import click
|
|
4
|
+
|
|
5
|
+
import illumio_pylo as pylo
|
|
6
|
+
import argparse
|
|
7
|
+
from typing import Dict, List, Literal, Optional
|
|
8
|
+
from .misc import make_filename_with_timestamp
|
|
9
|
+
from . import Command
|
|
10
|
+
|
|
11
|
+
command_name = 'ven-duplicate-remover'
|
|
12
|
+
objects_load_filter = ['labels']
|
|
13
|
+
|
|
14
|
+
def fill_parser(parser: argparse.ArgumentParser):
|
|
15
|
+
parser.add_argument('--verbose', '-v', action='store_true',
|
|
16
|
+
help='')
|
|
17
|
+
parser.add_argument('--proceed-with-deletion', action='store_true',
|
|
18
|
+
help='Actually operate deletions. Considered as a dry-run if not specified.')
|
|
19
|
+
parser.add_argument('--do-not-require-deletion-confirmation', action='store_true',
|
|
20
|
+
help='Ask for confirmation for each deletion')
|
|
21
|
+
parser.add_argument('--filter-label', '-fl', action='append',
|
|
22
|
+
help='Only look at workloads matching specified labels')
|
|
23
|
+
parser.add_argument('--ignore-unmanaged-workloads', '-iuw', action='store_true',
|
|
24
|
+
help='Do not touch unmanaged workloads nor include them to detect duplicates')
|
|
25
|
+
parser.add_argument('--report-format', '-rf', action='append', type=str, choices=['csv', 'xlsx'], default=None,
|
|
26
|
+
help='Which report formats you want to produce (repeat option to have several)')
|
|
27
|
+
parser.add_argument('--do-not-delete-the-most-recent-workload', '-nrc', action='store_true',
|
|
28
|
+
help='Workload which was created the last will not be deleted')
|
|
29
|
+
parser.add_argument('--do-not-delete-the-most-recently-heartbeating-workload', '-nrh', action='store_true',
|
|
30
|
+
help='Workload which was heartbeating the last will not be deleted')
|
|
31
|
+
parser.add_argument('--do-not-delete-if-last-heartbeat-is-more-recent-than', type=int, default=None,
|
|
32
|
+
help='Workload which was heartbeating the last will not be deleted if the last heartbeat is more recent than the specified number of days')
|
|
33
|
+
parser.add_argument('--override-pce-offline-timer-to', type=int, default=None,
|
|
34
|
+
help='Override the PCE offline timer to the specified number of days')
|
|
35
|
+
parser.add_argument('--limit-number-of-deleted-workloads', '-l', type=int, default=None,
|
|
36
|
+
help='Limit the number of workloads to be deleted, for a limited test run for example.')
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
41
|
+
|
|
42
|
+
report_wanted_format: List[Literal['csv','xlsx']] = args['report_format']
|
|
43
|
+
if report_wanted_format is None:
|
|
44
|
+
report_wanted_format = ['xlsx']
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
arg_verbose = args['verbose']
|
|
48
|
+
arg_proceed_with_deletion = args['proceed_with_deletion'] is True
|
|
49
|
+
arg_do_not_require_deletion_confirmation = args['do_not_require_deletion_confirmation'] is True
|
|
50
|
+
arg_ignore_unmanaged_workloads = args['ignore_unmanaged_workloads'] is True
|
|
51
|
+
arg_do_not_delete_the_most_recent_workload = args['do_not_delete_the_most_recent_workload'] is True
|
|
52
|
+
arg_do_not_delete_the_most_recently_heartbeating_workload = args['do_not_delete_the_most_recently_heartbeating_workload'] is True
|
|
53
|
+
arg_do_not_delete_if_last_heartbeat_is_more_recent_than = args['do_not_delete_if_last_heartbeat_is_more_recent_than']
|
|
54
|
+
arg_override_pce_offline_timer_to = args['override_pce_offline_timer_to']
|
|
55
|
+
arg_limit_number_of_deleted_workloads = args['limit_number_of_deleted_workloads']
|
|
56
|
+
|
|
57
|
+
output_file_prefix = make_filename_with_timestamp('ven-duplicate-removal_')
|
|
58
|
+
output_file_csv = output_file_prefix + '.csv'
|
|
59
|
+
output_file_excel = output_file_prefix + '.xlsx'
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
csv_report_headers = [{'name':'name'},
|
|
64
|
+
{'name':'hostname'}]
|
|
65
|
+
|
|
66
|
+
# insert all label dimensions
|
|
67
|
+
for label_type in org.LabelStore.label_types:
|
|
68
|
+
csv_report_headers.append({'name': 'label_'+label_type, 'wrap_text': False})
|
|
69
|
+
|
|
70
|
+
csv_report_headers += [
|
|
71
|
+
'online',
|
|
72
|
+
{'name':'last_heartbeat', 'max_width': 15, 'wrap_text': False},
|
|
73
|
+
{'name': 'created_at', 'max_width': 15, 'wrap_text': False},
|
|
74
|
+
'action',
|
|
75
|
+
{'name':'link_to_pce','max_width': 15, 'wrap_text': False, 'link_text': 'See in PCE', 'is_url': True},
|
|
76
|
+
{'name':'href', 'max_width': 15, 'wrap_text': False}]
|
|
77
|
+
csv_report = pylo.ArraysToExcel()
|
|
78
|
+
sheet: pylo.ArraysToExcel.Sheet = csv_report.create_sheet('duplicates', csv_report_headers, force_all_wrap_text=True, multivalues_cell_delimiter=',')
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
filter_labels: List[pylo.Label] = [] # the list of labels to filter the workloads against
|
|
82
|
+
if args['filter_label'] is not None:
|
|
83
|
+
for label_name in args['filter_label']:
|
|
84
|
+
label = org.LabelStore.find_label_by_name(label_name)
|
|
85
|
+
if label is None:
|
|
86
|
+
raise pylo.PyloEx("Cannot find label '{}' in the PCE".format(label_name))
|
|
87
|
+
filter_labels.append(label)
|
|
88
|
+
|
|
89
|
+
# <editor-fold desc="Download workloads from PCE">
|
|
90
|
+
if not pce_cache_was_used:
|
|
91
|
+
print("* Downloading Workloads data from the PCE (it may take moment for large amounts of workloads) ... ", flush=True, end='')
|
|
92
|
+
if args['filter_label'] is None:
|
|
93
|
+
workloads_json = org.connector.objects_workload_get(async_mode=True, max_results=1000000)
|
|
94
|
+
else:
|
|
95
|
+
filter_labels_list_of_list: List[List[pylo.Label]] = []
|
|
96
|
+
# convert filter_labels dict to an array of arrays
|
|
97
|
+
for label_type, label_list in org.LabelStore.Utils.list_to_dict_by_type(filter_labels).items():
|
|
98
|
+
filter_labels_list_of_list.append(label_list)
|
|
99
|
+
|
|
100
|
+
# convert filter_labels_list_of_list to a matrix of all possibilities
|
|
101
|
+
# example: [[a,b],[c,d]] becomes [[a,c],[a,d],[b,c],[b,d]]
|
|
102
|
+
filter_labels_matrix = [[]]
|
|
103
|
+
for label_list in filter_labels_list_of_list:
|
|
104
|
+
new_matrix = []
|
|
105
|
+
for label in label_list:
|
|
106
|
+
for row in filter_labels_matrix:
|
|
107
|
+
new_row = row.copy()
|
|
108
|
+
new_row.append(label.href)
|
|
109
|
+
new_matrix.append(new_row)
|
|
110
|
+
filter_labels_matrix = new_matrix
|
|
111
|
+
|
|
112
|
+
workloads_json = org.connector.objects_workload_get(async_mode=False, max_results=1000000, filter_by_label=filter_labels_matrix)
|
|
113
|
+
|
|
114
|
+
org.WorkloadStore.load_workloads_from_json(workloads_json)
|
|
115
|
+
|
|
116
|
+
print("OK! {} workloads loaded".format(org.WorkloadStore.count_workloads()))
|
|
117
|
+
# </editor-fold>
|
|
118
|
+
|
|
119
|
+
all_workloads: List[pylo.Workload] # the list of all workloads to be processed
|
|
120
|
+
|
|
121
|
+
if pce_cache_was_used:
|
|
122
|
+
# if some filters were used, let's apply them now
|
|
123
|
+
print("* Filtering workloads loaded from cache based on their labels... ", end='', flush=True)
|
|
124
|
+
# if some label filters were used, we will apply them at later stage
|
|
125
|
+
all_workloads: List[pylo.Workload] = list((org.WorkloadStore.find_workloads_matching_all_labels(filter_labels)).values())
|
|
126
|
+
print("OK! {} workloads left after filtering".format(len(all_workloads)))
|
|
127
|
+
else:
|
|
128
|
+
# filter was already applied during the download from the PCE
|
|
129
|
+
all_workloads = org.WorkloadStore.workloads
|
|
130
|
+
|
|
131
|
+
def add_workload_to_report(workload: pylo.Workload, action: str):
|
|
132
|
+
url_link_to_pce = workload.get_pce_ui_url()
|
|
133
|
+
new_row = {
|
|
134
|
+
'hostname': workload.hostname,
|
|
135
|
+
'online': workload.online,
|
|
136
|
+
'last_heartbeat': workload.ven_agent.get_last_heartbeat_date().strftime('%Y-%m-%d %H:%M'),
|
|
137
|
+
'href': workload.href,
|
|
138
|
+
'link_to_pce': url_link_to_pce,
|
|
139
|
+
'action': action
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
for label_type in org.LabelStore.label_types:
|
|
143
|
+
new_row['label_'+label_type] = workload.get_label_name(label_type, '')
|
|
144
|
+
|
|
145
|
+
sheet.add_line_from_object(new_row)
|
|
146
|
+
|
|
147
|
+
duplicated_hostnames = DuplicateRecordManager(arg_override_pce_offline_timer_to)
|
|
148
|
+
|
|
149
|
+
print(" * Looking for VEN with duplicated hostname(s)")
|
|
150
|
+
|
|
151
|
+
for workload in all_workloads:
|
|
152
|
+
if workload.deleted:
|
|
153
|
+
continue
|
|
154
|
+
if workload.unmanaged and arg_ignore_unmanaged_workloads:
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
duplicated_hostnames.add_workload(workload)
|
|
158
|
+
|
|
159
|
+
print(" * Found {} duplicated hostnames".format(duplicated_hostnames.count_duplicates()))
|
|
160
|
+
|
|
161
|
+
delete_tracker = org.connector.new_tracker_workload_multi_delete()
|
|
162
|
+
|
|
163
|
+
for dup_hostname, dup_record in duplicated_hostnames._records.items():
|
|
164
|
+
if not dup_record.has_duplicates():
|
|
165
|
+
continue
|
|
166
|
+
|
|
167
|
+
print(" - hostname '{}' has duplicates. ({} online, {} offline, {} unmanaged)".format(dup_hostname,
|
|
168
|
+
len(dup_record.online),
|
|
169
|
+
len(dup_record.offline),
|
|
170
|
+
len(dup_record.unmanaged)))
|
|
171
|
+
|
|
172
|
+
latest_created_workload = dup_record.find_latest_created_at()
|
|
173
|
+
latest_heartbeat_workload = dup_record.find_latest_heartbeat()
|
|
174
|
+
print(" - Latest created at {} and latest heartbeat at {}".format(latest_created_workload.created_at, latest_heartbeat_workload.ven_agent.get_last_heartbeat_date()))
|
|
175
|
+
|
|
176
|
+
if dup_record.count_online() == 0:
|
|
177
|
+
print(" - IGNORED: there is no VEN online")
|
|
178
|
+
for wkl in dup_record.offline:
|
|
179
|
+
add_workload_to_report(wkl, "ignored (no VEN online)")
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
if dup_record.count_online() > 1:
|
|
183
|
+
print(" - WARNING: there are more than 1 VEN online")
|
|
184
|
+
|
|
185
|
+
# Don't delete online workloads but still show them in the report
|
|
186
|
+
for wkl in dup_record.online:
|
|
187
|
+
add_workload_to_report(wkl, "ignored (VEN is online)")
|
|
188
|
+
|
|
189
|
+
for wkl in dup_record.offline:
|
|
190
|
+
if arg_do_not_delete_the_most_recent_workload and wkl is latest_created_workload:
|
|
191
|
+
print(" - IGNORED: wkl {}/{} is the most recent".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
192
|
+
add_workload_to_report(wkl, "ignored (it is the most recently created)")
|
|
193
|
+
elif arg_do_not_delete_the_most_recently_heartbeating_workload and wkl is latest_heartbeat_workload:
|
|
194
|
+
print(" - IGNORED: wkl {}/{} is the most recently heartbeating".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
195
|
+
add_workload_to_report(wkl, "ignored (it is the most recently heartbeating)")
|
|
196
|
+
elif arg_do_not_delete_if_last_heartbeat_is_more_recent_than is not None and wkl.ven_agent.get_last_heartbeat_date() > datetime.datetime.now() - datetime.timedelta(days=arg_do_not_delete_if_last_heartbeat_is_more_recent_than):
|
|
197
|
+
print(" - IGNORED: wkl {}/{} has a last heartbeat more recent than {} days".format(wkl.get_name_stripped_fqdn(), wkl.href, arg_do_not_delete_if_last_heartbeat_is_more_recent_than))
|
|
198
|
+
add_workload_to_report(wkl, "ignored (last heartbeat is more recent than {} days)".format(arg_do_not_delete_if_last_heartbeat_is_more_recent_than))
|
|
199
|
+
else:
|
|
200
|
+
if arg_limit_number_of_deleted_workloads is not None and delete_tracker.count_entries() >= arg_limit_number_of_deleted_workloads:
|
|
201
|
+
print(" - IGNORED: wkl {}/{} because the limit of {} workloads to be deleted was reached".format(wkl.get_name_stripped_fqdn(), wkl.href, arg_limit_number_of_deleted_workloads))
|
|
202
|
+
add_workload_to_report(wkl, "ignored (limit of {} workloads to be deleted was reached)".format(arg_limit_number_of_deleted_workloads))
|
|
203
|
+
else:
|
|
204
|
+
delete_tracker.add_workload(wkl)
|
|
205
|
+
print(" - added offline wkl {}/{} to the delete list".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
for wkl in dup_record.unmanaged:
|
|
209
|
+
if arg_limit_number_of_deleted_workloads is not None and delete_tracker.count_entries() >= arg_limit_number_of_deleted_workloads:
|
|
210
|
+
print(" - IGNORED: wkl {}/{} because the limit of {} workloads to be deleted was reached".format(wkl.get_name_stripped_fqdn(), wkl.href, arg_limit_number_of_deleted_workloads))
|
|
211
|
+
add_workload_to_report(wkl, "ignored (limit of {} workloads to be deleted was reached)".format(arg_limit_number_of_deleted_workloads))
|
|
212
|
+
else:
|
|
213
|
+
delete_tracker.add_workload(wkl)
|
|
214
|
+
print(" - added unmanaged wkl {}/{} to the delete list".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
215
|
+
|
|
216
|
+
print()
|
|
217
|
+
|
|
218
|
+
if delete_tracker.count_entries() < 1:
|
|
219
|
+
print(" * No workloads to be deleted")
|
|
220
|
+
|
|
221
|
+
elif arg_proceed_with_deletion:
|
|
222
|
+
print(" * Found {} workloads to be deleted. Listing:".format(delete_tracker.count_entries()))
|
|
223
|
+
for wkl in delete_tracker.workloads:
|
|
224
|
+
print(" - {} (href: {} url: {})".format(wkl.get_name_stripped_fqdn(), wkl.href, wkl.get_pce_ui_url()))
|
|
225
|
+
|
|
226
|
+
print()
|
|
227
|
+
|
|
228
|
+
if arg_do_not_require_deletion_confirmation:
|
|
229
|
+
print(" * '--do-not-require-deletion-confirmation' option was used, no confirmation will be asked")
|
|
230
|
+
else:
|
|
231
|
+
confirm = click.confirm(" * Are you sure you want to proceed with the deletion of {} workloads?".format(delete_tracker.count_entries()), abort=True)
|
|
232
|
+
if not confirm:
|
|
233
|
+
print(" * Aborted by user")
|
|
234
|
+
return
|
|
235
|
+
|
|
236
|
+
print(" * Executing deletion requests ... ".format(output_file_csv), end='', flush=True)
|
|
237
|
+
delete_tracker.execute(unpair_agents=True)
|
|
238
|
+
print("DONE")
|
|
239
|
+
|
|
240
|
+
for wkl in delete_tracker.workloads:
|
|
241
|
+
error_msg = delete_tracker.get_error_by_href(wkl.href)
|
|
242
|
+
if error_msg is None:
|
|
243
|
+
add_workload_to_report(wkl, "deleted")
|
|
244
|
+
else:
|
|
245
|
+
print(" - an error occurred when deleting workload {}/{} : {}".format(wkl.get_name_stripped_fqdn(), wkl.href, error_msg))
|
|
246
|
+
add_workload_to_report(wkl, "API error: " + error_msg)
|
|
247
|
+
|
|
248
|
+
print()
|
|
249
|
+
print(" * {} workloads deleted / {} with errors".format(delete_tracker.count_entries()-delete_tracker.count_errors(), delete_tracker.count_errors()))
|
|
250
|
+
print()
|
|
251
|
+
else:
|
|
252
|
+
print(" * Found {} workloads to be deleted BUT NO '--proceed-with-deletion' OPTION WAS USED".format(delete_tracker.count_entries()))
|
|
253
|
+
for wkl in delete_tracker.workloads:
|
|
254
|
+
add_workload_to_report(wkl, "TO BE DELETED (no confirm option used)")
|
|
255
|
+
|
|
256
|
+
if sheet.lines_count() >= 1:
|
|
257
|
+
if len(report_wanted_format) < 1:
|
|
258
|
+
print(" * No report format was specified, no report will be generated")
|
|
259
|
+
else:
|
|
260
|
+
sheet.reorder_lines(['hostname']) # sort by hostname for better readability
|
|
261
|
+
for report_format in report_wanted_format:
|
|
262
|
+
output_filename = output_file_prefix + '.' + report_format
|
|
263
|
+
print(" * Writing report file '{}' ... ".format(output_filename), end='', flush=True)
|
|
264
|
+
if report_format == 'csv':
|
|
265
|
+
sheet.write_to_csv(output_filename)
|
|
266
|
+
elif report_format == 'xlsx':
|
|
267
|
+
csv_report.write_to_excel(output_filename)
|
|
268
|
+
else:
|
|
269
|
+
raise pylo.PyloEx("Unknown format for report: '{}'".format(report_format))
|
|
270
|
+
print("DONE")
|
|
271
|
+
|
|
272
|
+
else:
|
|
273
|
+
print("\n** WARNING: no entry matched your filters so reports were not generated !\n")
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
# make this command available to the CLI system
|
|
277
|
+
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|
|
278
|
+
|
|
279
|
+
class DuplicateRecordManager:
|
|
280
|
+
class DuplicatedRecord:
|
|
281
|
+
def __init__(self, pce_offline_timer_override: Optional[int] = None):
|
|
282
|
+
self.offline = []
|
|
283
|
+
self.online = []
|
|
284
|
+
self.unmanaged= []
|
|
285
|
+
self.all: List[pylo.Workload] = []
|
|
286
|
+
self._pce_offline_timer_override: Optional[int] = pce_offline_timer_override
|
|
287
|
+
|
|
288
|
+
def add_workload(self, workload: 'pylo.Workload'):
|
|
289
|
+
self.all.append(workload)
|
|
290
|
+
if workload.unmanaged:
|
|
291
|
+
self.unmanaged.append(workload)
|
|
292
|
+
elif self._pce_offline_timer_override is None:
|
|
293
|
+
if workload.online:
|
|
294
|
+
self.online.append(workload)
|
|
295
|
+
else:
|
|
296
|
+
self.offline.append(workload)
|
|
297
|
+
else:
|
|
298
|
+
if workload.ven_agent.get_last_heartbeat_date() > datetime.datetime.now() - datetime.timedelta(days=self._pce_offline_timer_override):
|
|
299
|
+
self.online.append(workload)
|
|
300
|
+
else:
|
|
301
|
+
self.offline.append(workload)
|
|
302
|
+
|
|
303
|
+
def count_workloads(self):
|
|
304
|
+
return len(self.unmanaged) + len(self.online) + len(self.offline)
|
|
305
|
+
|
|
306
|
+
def count_online(self):
|
|
307
|
+
return len(self.online)
|
|
308
|
+
|
|
309
|
+
def count_offline(self):
|
|
310
|
+
return len(self.offline)
|
|
311
|
+
|
|
312
|
+
def count_unmanaged(self):
|
|
313
|
+
return len(self.unmanaged)
|
|
314
|
+
|
|
315
|
+
def has_duplicates(self):
|
|
316
|
+
if len(self.offline) + len(self.online) + len(self.unmanaged) > 1:
|
|
317
|
+
return True
|
|
318
|
+
return False
|
|
319
|
+
|
|
320
|
+
def find_latest_created_at(self)-> 'pylo.Workload':
|
|
321
|
+
latest: Optional[pylo.Workload] = None
|
|
322
|
+
for wkl in self.all:
|
|
323
|
+
if wkl.unmanaged:
|
|
324
|
+
continue
|
|
325
|
+
if latest is None or wkl.created_at > latest.created_at:
|
|
326
|
+
latest = wkl
|
|
327
|
+
return latest
|
|
328
|
+
|
|
329
|
+
def find_latest_heartbeat(self)-> 'pylo.Workload':
|
|
330
|
+
latest: Optional[pylo.Workload] = None
|
|
331
|
+
for wkl in self.all:
|
|
332
|
+
if wkl.unmanaged:
|
|
333
|
+
continue
|
|
334
|
+
if latest is None or wkl.ven_agent.get_last_heartbeat_date() > latest.ven_agent.get_last_heartbeat_date():
|
|
335
|
+
latest = wkl
|
|
336
|
+
return latest
|
|
337
|
+
|
|
338
|
+
def __init__(self, pce_offline_timer_override: Optional[int] = None):
|
|
339
|
+
self._records: Dict[str, DuplicateRecordManager.DuplicatedRecord] = {}
|
|
340
|
+
self._pce_offline_timer_override: Optional[int] = pce_offline_timer_override
|
|
341
|
+
|
|
342
|
+
def count_record(self):
|
|
343
|
+
return len(self._records)
|
|
344
|
+
|
|
345
|
+
def count_workloads(self):
|
|
346
|
+
total = 0
|
|
347
|
+
for record in self._records.values():
|
|
348
|
+
total += record.count_workloads()
|
|
349
|
+
|
|
350
|
+
def records(self) -> List['DuplicateRecordManager.DuplicatedRecord']:
|
|
351
|
+
return list(self._records.values())
|
|
352
|
+
|
|
353
|
+
def count_duplicates(self):
|
|
354
|
+
count = 0
|
|
355
|
+
for record in self._records.values():
|
|
356
|
+
if record.has_duplicates():
|
|
357
|
+
count += 1
|
|
358
|
+
return count
|
|
359
|
+
|
|
360
|
+
def add_workload(self, workload: pylo.Workload):
|
|
361
|
+
lower_hostname = workload.get_name_stripped_fqdn().lower()
|
|
362
|
+
|
|
363
|
+
if lower_hostname not in self._records:
|
|
364
|
+
self._records[lower_hostname] = self.DuplicatedRecord(self._pce_offline_timer_override)
|
|
365
|
+
record = self._records[lower_hostname]
|
|
366
|
+
record.add_workload(workload)
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
from typing import Dict, List, Any, Union
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
import sys
|
|
4
|
+
import argparse
|
|
5
|
+
import math
|
|
6
|
+
import illumio_pylo as pylo
|
|
7
|
+
from .misc import make_filename_with_timestamp
|
|
8
|
+
from . import Command
|
|
9
|
+
from ..NativeParsers import LabelParser
|
|
10
|
+
|
|
11
|
+
command_name = 'ven-idle-to-visibility'
|
|
12
|
+
objects_load_filter = ['workloads', 'labels']
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def fill_parser(parser: argparse.ArgumentParser):
|
|
16
|
+
parser.add_argument('--filter-on-href-from-file', type=str, required=False, default=None,
|
|
17
|
+
help='Filter agents on workload href found in specific csv file')
|
|
18
|
+
|
|
19
|
+
parser.add_argument('--ignore-incompatibilities', type=str, nargs='+', required=False, default=None,
|
|
20
|
+
help="Ignore specific incompatibilities and force mode switch!")
|
|
21
|
+
|
|
22
|
+
parser.add_argument('--ignore-all-incompatibilities', action='store_true',
|
|
23
|
+
help="Don't check compatibility report and just do the change!")
|
|
24
|
+
|
|
25
|
+
parser.add_argument('-c', '--confirm', action='store_true',
|
|
26
|
+
help='Request upgrade of the Agents')
|
|
27
|
+
|
|
28
|
+
parser.add_argument('-m', '--mode', type=str.lower, required=True, choices=['build', 'test'],
|
|
29
|
+
help='Select if you want to switch from IDLE to BUILD or TEST')
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class MyBuiltInParser:
|
|
34
|
+
filter_env_label = LabelParser('--filter-env-label', '-env', 'env', is_required=False, allow_multiple=True)
|
|
35
|
+
filter_app_label = LabelParser('--filter-app-label', '-app', 'app', is_required=False, allow_multiple=True)
|
|
36
|
+
filter_role_label = LabelParser('--filter-role-label', '-role', 'role', is_required=False, allow_multiple=True)
|
|
37
|
+
filter_loc_label = LabelParser('--filter-loc-label', '-loc', 'loc', is_required=False, allow_multiple=True)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def __main(args, org: pylo.Organization, native_parsers: MyBuiltInParser, **kwargs):
|
|
41
|
+
confirmed_updates = args['confirm']
|
|
42
|
+
switch_to_mode = args['mode']
|
|
43
|
+
href_filter_file = args['filter_on_href_from_file']
|
|
44
|
+
options_ignore_all_incompatibilities = args['ignore_all_incompatibilities']
|
|
45
|
+
option_ignore_incompatibilities: Union[None, Dict[str, bool]] = None
|
|
46
|
+
if args['ignore_incompatibilities'] is not None:
|
|
47
|
+
option_ignore_incompatibilities = {}
|
|
48
|
+
for entry in args['ignore_incompatibilities']:
|
|
49
|
+
option_ignore_incompatibilities[entry] = True
|
|
50
|
+
|
|
51
|
+
minimum_supported_version = pylo.SoftwareVersion("18.2.0-0")
|
|
52
|
+
|
|
53
|
+
output_file_prefix = make_filename_with_timestamp('ven-mode-update-results_')
|
|
54
|
+
output_file_csv = output_file_prefix + '.csv'
|
|
55
|
+
output_file_excel = output_file_prefix + '.xlsx'
|
|
56
|
+
|
|
57
|
+
csv_report_headers = ['name', 'hostname', 'role', 'app', 'env', 'loc', 'changed_mode', 'details', 'href']
|
|
58
|
+
csv_report = pylo.ArrayToExport(csv_report_headers)
|
|
59
|
+
|
|
60
|
+
def add_workload_to_report(wkl: pylo.Workload, changed_mode: str, details: str):
|
|
61
|
+
labels = workload.get_labels_str_list()
|
|
62
|
+
new_row = {
|
|
63
|
+
'hostname': wkl.hostname,
|
|
64
|
+
'role': labels[0],
|
|
65
|
+
'app': labels[1],
|
|
66
|
+
'env': labels[2],
|
|
67
|
+
'loc': labels[3],
|
|
68
|
+
'href': wkl.href,
|
|
69
|
+
'status': wkl.get_status_string(),
|
|
70
|
+
'changed_mode': changed_mode,
|
|
71
|
+
'details': details
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
csv_report.add_line_from_object(new_row)
|
|
75
|
+
|
|
76
|
+
href_filter_data = None
|
|
77
|
+
if href_filter_file is not None:
|
|
78
|
+
print(" * Loading CSV input file '{}'...".format(href_filter_file), flush=True, end='')
|
|
79
|
+
href_filter_data = pylo.CsvExcelToObject(href_filter_file,
|
|
80
|
+
expected_headers=[{'name': 'href', 'optional': False}])
|
|
81
|
+
print('OK')
|
|
82
|
+
print(" - CSV has {} columns and {} lines (headers don't count)".format(href_filter_data.count_columns(),
|
|
83
|
+
href_filter_data.count_lines()),
|
|
84
|
+
flush=True)
|
|
85
|
+
|
|
86
|
+
agents = {}
|
|
87
|
+
for agent in org.AgentStore.items_by_href.values():
|
|
88
|
+
if agent.mode == 'idle':
|
|
89
|
+
agents[agent.href] = agent
|
|
90
|
+
print(" * Found {} IDLE Agents".format(len(agents)))
|
|
91
|
+
count_idle_agents_total = len(agents)
|
|
92
|
+
|
|
93
|
+
print(" * Parsing filters")
|
|
94
|
+
|
|
95
|
+
env_label_list = native_parsers.filter_env_label.results
|
|
96
|
+
if env_label_list is None:
|
|
97
|
+
print(" * No Environment Labels specified")
|
|
98
|
+
else:
|
|
99
|
+
print(" * Environment Labels specified")
|
|
100
|
+
for label in env_label_list:
|
|
101
|
+
print(" - label named '{}'".format(label.name))
|
|
102
|
+
|
|
103
|
+
loc_label_list = native_parsers.filter_loc_label.results
|
|
104
|
+
if loc_label_list is None:
|
|
105
|
+
print(" * No Location Labels specified")
|
|
106
|
+
else:
|
|
107
|
+
print(" * Location Labels specified")
|
|
108
|
+
for label in loc_label_list:
|
|
109
|
+
print(" - label named '{}'".format(label.name))
|
|
110
|
+
|
|
111
|
+
app_label_list = native_parsers.filter_app_label.results
|
|
112
|
+
if app_label_list is None:
|
|
113
|
+
print(" * No Application Labels specified")
|
|
114
|
+
else:
|
|
115
|
+
print(" * Application Labels specified")
|
|
116
|
+
for label in app_label_list:
|
|
117
|
+
print(" - label named '{}'".format(label.name))
|
|
118
|
+
|
|
119
|
+
role_label_list = native_parsers.filter_role_label.results
|
|
120
|
+
if role_label_list is None:
|
|
121
|
+
print(" * No Role Labels specified")
|
|
122
|
+
else:
|
|
123
|
+
print(" * Role Labels specified")
|
|
124
|
+
for label in role_label_list:
|
|
125
|
+
print(" - label named '{}'".format(label.name))
|
|
126
|
+
|
|
127
|
+
print(" * DONE")
|
|
128
|
+
|
|
129
|
+
print(" * Applying filters to the list of Agents...", flush=True, end='')
|
|
130
|
+
|
|
131
|
+
for agent_href in list(agents.keys()):
|
|
132
|
+
agent = agents[agent_href]
|
|
133
|
+
workload = agent.workload
|
|
134
|
+
|
|
135
|
+
if env_label_list is not None and (workload.env_label is None or workload.env_label not in env_label_list):
|
|
136
|
+
del agents[agent_href]
|
|
137
|
+
continue
|
|
138
|
+
if loc_label_list is not None and (workload.loc_label is None or workload.loc_label not in loc_label_list):
|
|
139
|
+
del agents[agent_href]
|
|
140
|
+
continue
|
|
141
|
+
if app_label_list is not None and (workload.app_label is None or workload.app_label not in app_label_list):
|
|
142
|
+
del agents[agent_href]
|
|
143
|
+
continue
|
|
144
|
+
if role_label_list is not None and (workload.role_label is None or workload.role_label not in role_label_list):
|
|
145
|
+
del agents[agent_href]
|
|
146
|
+
continue
|
|
147
|
+
|
|
148
|
+
if href_filter_data is not None:
|
|
149
|
+
workload_href_found = False
|
|
150
|
+
for href_entry in href_filter_data.objects():
|
|
151
|
+
workload_href = href_entry['href']
|
|
152
|
+
if workload_href is not None and workload_href == workload.href:
|
|
153
|
+
workload_href_found = True
|
|
154
|
+
break
|
|
155
|
+
if not workload_href_found:
|
|
156
|
+
del agents[agent_href]
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
print("OK! {} VENs are matching filters (from initial list of {} IDLE VENs).".format(len(agents), count_idle_agents_total))
|
|
160
|
+
|
|
161
|
+
print()
|
|
162
|
+
print(" ** Request Compatibility Report for each Agent in IDLE mode **")
|
|
163
|
+
|
|
164
|
+
agent_count = 0
|
|
165
|
+
agent_green_count = 0
|
|
166
|
+
agent_mode_changed_count = 0
|
|
167
|
+
agent_skipped_not_online = 0
|
|
168
|
+
agent_has_no_report_count = 0
|
|
169
|
+
agent_report_failed_count = 0
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
for agent in agents.values():
|
|
173
|
+
agent_count += 1
|
|
174
|
+
print(" - Agent #{}/{}: wkl NAME:'{}' HREF:{} Labels:{}".format(agent_count, len(agents), agent.workload.get_name(),
|
|
175
|
+
agent.workload.href,
|
|
176
|
+
agent.workload.get_labels_str())
|
|
177
|
+
)
|
|
178
|
+
if not agent.workload.online:
|
|
179
|
+
print(" - Agent is not ONLINE so we're skipping it")
|
|
180
|
+
agent_skipped_not_online += 1
|
|
181
|
+
add_workload_to_report(agent.workload, 'no', 'VEN is not online')
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
if options_ignore_all_incompatibilities:
|
|
185
|
+
if not confirmed_updates:
|
|
186
|
+
print(" - ** SKIPPING Agent mode change process as option '--confirm' was not used")
|
|
187
|
+
add_workload_to_report(agent.workload, 'no', '--confirm option was not used')
|
|
188
|
+
continue
|
|
189
|
+
print(" - Request Agent mode switch to BUILD/TEST...", end='', flush=True)
|
|
190
|
+
org.connector.objects_agent_change_mode(agent.workload.href, switch_to_mode)
|
|
191
|
+
print("OK")
|
|
192
|
+
agent_mode_changed_count += 1
|
|
193
|
+
add_workload_to_report(agent.workload, 'yes', '')
|
|
194
|
+
continue
|
|
195
|
+
|
|
196
|
+
print(" - Downloading report...", flush=True, end='')
|
|
197
|
+
report = org.connector.agent_get_compatibility_report(agent_href=agent.href, return_raw_json=False)
|
|
198
|
+
print('OK')
|
|
199
|
+
if report.empty:
|
|
200
|
+
print(" - ** SKIPPING : Report does not exist")
|
|
201
|
+
agent_has_no_report_count += 1
|
|
202
|
+
add_workload_to_report(agent.workload, 'no', 'Compatibility report does not exist')
|
|
203
|
+
continue
|
|
204
|
+
print(" - Report status is '{}'".format(report.global_status))
|
|
205
|
+
if report.global_status == 'green':
|
|
206
|
+
agent_green_count += 1
|
|
207
|
+
if not confirmed_updates:
|
|
208
|
+
print(" - ** SKIPPING Agent mode change process as option '--confirm' was not used")
|
|
209
|
+
add_workload_to_report(agent.workload, 'no', '--confirm option was not used')
|
|
210
|
+
continue
|
|
211
|
+
print(" - Request Agent mode switch to BUILD/TEST...", end='', flush=True)
|
|
212
|
+
org.connector.objects_agent_change_mode(agent.workload.href, switch_to_mode)
|
|
213
|
+
print("OK")
|
|
214
|
+
agent_mode_changed_count += 1
|
|
215
|
+
add_workload_to_report(agent.workload, 'yes', '')
|
|
216
|
+
else:
|
|
217
|
+
print(" - the following issues were found in the report:", flush=True)
|
|
218
|
+
failed_items = report.get_failed_items()
|
|
219
|
+
issues_remaining = False
|
|
220
|
+
for failed_item in failed_items:
|
|
221
|
+
if option_ignore_incompatibilities is not None and failed_item in option_ignore_incompatibilities:
|
|
222
|
+
print(" -{} (ignored because it's part of --ignore-incompatibilities list)".format(failed_item))
|
|
223
|
+
else:
|
|
224
|
+
print(" -{}".format(failed_item))
|
|
225
|
+
issues_remaining = True
|
|
226
|
+
|
|
227
|
+
if not issues_remaining:
|
|
228
|
+
agent_green_count += 1
|
|
229
|
+
if not confirmed_updates:
|
|
230
|
+
print(" - ** SKIPPING Agent mode change process as option '--confirm' was not used")
|
|
231
|
+
add_workload_to_report(agent.workload, 'no', '--confirm option was not used')
|
|
232
|
+
continue
|
|
233
|
+
print(" - Request Agent mode switch to BUILD/TEST...", end='', flush=True)
|
|
234
|
+
org.connector.objects_agent_change_mode(agent.workload.href, switch_to_mode)
|
|
235
|
+
print("OK")
|
|
236
|
+
agent_mode_changed_count += 1
|
|
237
|
+
add_workload_to_report(agent.workload, 'yes', '')
|
|
238
|
+
continue
|
|
239
|
+
|
|
240
|
+
add_workload_to_report(agent.workload, 'no',
|
|
241
|
+
'compatibility report has reported issues: {}'.format(pylo.string_list_to_text(failed_items.keys()))
|
|
242
|
+
)
|
|
243
|
+
agent_report_failed_count += 1
|
|
244
|
+
|
|
245
|
+
except:
|
|
246
|
+
pylo.log.error("An unexpected error happened, an intermediate report will be written and original traceback displayed")
|
|
247
|
+
pylo.log.error(" * Writing report file '{}' ... ".format(output_file_csv))
|
|
248
|
+
csv_report.write_to_csv(output_file_csv)
|
|
249
|
+
pylo.log.error("DONE")
|
|
250
|
+
pylo.log.error(" * Writing report file '{}' ... ".format(output_file_excel))
|
|
251
|
+
csv_report.write_to_excel(output_file_excel)
|
|
252
|
+
pylo.log.error("DONE")
|
|
253
|
+
|
|
254
|
+
raise
|
|
255
|
+
|
|
256
|
+
def myformat(name, value):
|
|
257
|
+
return "{:<42} {:>6}".format(name, value)
|
|
258
|
+
# return "{:<18} {:>6}".format(name, "${:.2f}".format(value))
|
|
259
|
+
|
|
260
|
+
print("\n\n*** Statistics ***")
|
|
261
|
+
print(myformat(" - IDLE Agents count (after filters):", agent_count))
|
|
262
|
+
if confirmed_updates:
|
|
263
|
+
print(myformat(" - Agents mode changed count:", agent_mode_changed_count))
|
|
264
|
+
else:
|
|
265
|
+
print(myformat(" - Agents with successful report count:", agent_green_count))
|
|
266
|
+
print(myformat(" - SKIPPED because not online count:", agent_skipped_not_online))
|
|
267
|
+
print(myformat(" - SKIPPED because report was not found:", agent_has_no_report_count))
|
|
268
|
+
print(myformat(" - Agents with failed reports:", agent_report_failed_count ))
|
|
269
|
+
|
|
270
|
+
print()
|
|
271
|
+
print(" * Writing report file '{}' ... ".format(output_file_csv), end='', flush=True)
|
|
272
|
+
csv_report.write_to_csv(output_file_csv)
|
|
273
|
+
print("DONE")
|
|
274
|
+
print(" * Writing report file '{}' ... ".format(output_file_excel), end='', flush=True)
|
|
275
|
+
csv_report.write_to_excel(output_file_excel)
|
|
276
|
+
print("DONE")
|
|
277
|
+
|
|
278
|
+
if not confirmed_updates:
|
|
279
|
+
print()
|
|
280
|
+
print(" ***** No Agent was switched to Illumination because --confirm option was not used *****")
|
|
281
|
+
print()
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
command_object = Command(command_name, __main, fill_parser,
|
|
285
|
+
load_specific_objects_only=objects_load_filter,
|
|
286
|
+
native_parsers_as_class=MyBuiltInParser()
|
|
287
|
+
)
|