illumio-pylo 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +90 -54
- illumio_pylo/API/JsonPayloadTypes.py +10 -0
- illumio_pylo/Helpers/functions.py +8 -13
- illumio_pylo/IPList.py +5 -9
- illumio_pylo/IPMap.py +3 -3
- illumio_pylo/Label.py +0 -1
- illumio_pylo/LabelCommon.py +1 -1
- illumio_pylo/LabelStore.py +24 -25
- illumio_pylo/LabeledObject.py +4 -5
- illumio_pylo/Organization.py +1 -3
- illumio_pylo/ReferenceTracker.py +0 -3
- illumio_pylo/Rule.py +2 -2
- illumio_pylo/Ruleset.py +7 -7
- illumio_pylo/RulesetStore.py +1 -1
- illumio_pylo/SecurityPrincipal.py +0 -5
- illumio_pylo/Workload.py +4 -13
- illumio_pylo/WorkloadStoreSubClasses.py +7 -10
- illumio_pylo/__init__.py +1 -1
- illumio_pylo/cli/__init__.py +0 -2
- illumio_pylo/cli/commands/credential_manager.py +7 -18
- illumio_pylo/cli/commands/iplist_analyzer.py +3 -9
- illumio_pylo/cli/commands/iplist_import_from_file.py +1 -2
- illumio_pylo/cli/commands/ruleset_export.py +16 -20
- illumio_pylo/cli/commands/update_pce_objects_cache.py +0 -1
- illumio_pylo/cli/commands/utils/LabelCreation.py +2 -2
- illumio_pylo/cli/commands/utils/misc.py +1 -2
- illumio_pylo/cli/commands/ven_compatibility_report_export.py +4 -14
- illumio_pylo/cli/commands/ven_duplicate_remover.py +26 -32
- illumio_pylo/cli/commands/ven_idle_to_visibility.py +2 -4
- illumio_pylo/cli/commands/ven_upgrader.py +1 -2
- illumio_pylo/cli/commands/workload_import.py +12 -14
- illumio_pylo/cli/commands/workload_reset_names_to_null.py +12 -14
- illumio_pylo/cli/commands/workload_update.py +25 -30
- illumio_pylo/cli/commands/workload_used_in_rule_finder.py +5 -5
- illumio_pylo/tmp.py +1 -0
- illumio_pylo/utilities/resources/workloads-import-example.csv +1 -1
- illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
- {illumio_pylo-0.3.8.dist-info → illumio_pylo-0.3.10.dist-info}/METADATA +2 -2
- illumio_pylo-0.3.10.dist-info/RECORD +72 -0
- {illumio_pylo-0.3.8.dist-info → illumio_pylo-0.3.10.dist-info}/WHEEL +1 -1
- illumio_pylo-0.3.8.dist-info/RECORD +0 -72
- {illumio_pylo-0.3.8.dist-info → illumio_pylo-0.3.10.dist-info}/LICENSE +0 -0
- {illumio_pylo-0.3.8.dist-info → illumio_pylo-0.3.10.dist-info}/top_level.txt +0 -0
|
@@ -4,7 +4,7 @@ import click
|
|
|
4
4
|
import argparse
|
|
5
5
|
|
|
6
6
|
import illumio_pylo as pylo
|
|
7
|
-
from illumio_pylo import
|
|
7
|
+
from illumio_pylo import ExcelHeader
|
|
8
8
|
|
|
9
9
|
from .utils.misc import make_filename_with_timestamp
|
|
10
10
|
from . import Command
|
|
@@ -12,6 +12,7 @@ from . import Command
|
|
|
12
12
|
command_name = 'ven-duplicate-remover'
|
|
13
13
|
objects_load_filter = ['labels']
|
|
14
14
|
|
|
15
|
+
|
|
15
16
|
def fill_parser(parser: argparse.ArgumentParser):
|
|
16
17
|
parser.add_argument('--verbose', '-v', action='store_true',
|
|
17
18
|
help='')
|
|
@@ -40,14 +41,11 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
40
41
|
help='Directory where to write the report file(s)')
|
|
41
42
|
|
|
42
43
|
|
|
43
|
-
|
|
44
44
|
def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
45
|
-
|
|
46
|
-
report_wanted_format: List[Literal['csv','xlsx']] = args['report_format']
|
|
45
|
+
report_wanted_format: List[Literal['csv', 'xlsx']] = args['report_format']
|
|
47
46
|
if report_wanted_format is None:
|
|
48
47
|
report_wanted_format = ['xlsx']
|
|
49
48
|
|
|
50
|
-
|
|
51
49
|
arg_verbose = args['verbose']
|
|
52
50
|
arg_proceed_with_deletion = args['proceed_with_deletion'] is True
|
|
53
51
|
arg_do_not_require_deletion_confirmation = args['do_not_require_deletion_confirmation'] is True
|
|
@@ -59,32 +57,29 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
59
57
|
arg_limit_number_of_deleted_workloads = args['limit_number_of_deleted_workloads']
|
|
60
58
|
arg_report_output_dir: str = args['output_dir']
|
|
61
59
|
|
|
62
|
-
|
|
63
60
|
output_file_prefix = make_filename_with_timestamp('ven-duplicate-removal_', arg_report_output_dir)
|
|
64
61
|
output_file_csv = output_file_prefix + '.csv'
|
|
65
62
|
output_file_excel = output_file_prefix + '.xlsx'
|
|
66
63
|
|
|
67
|
-
|
|
68
64
|
csv_report_headers = pylo.ExcelHeaderSet([
|
|
69
|
-
ExcelHeader(name
|
|
70
|
-
ExcelHeader(name
|
|
65
|
+
ExcelHeader(name='name', max_width=40),
|
|
66
|
+
ExcelHeader(name='hostname', max_width=40)
|
|
71
67
|
])
|
|
72
68
|
# insert all label dimensions
|
|
73
69
|
for label_type in org.LabelStore.label_types:
|
|
74
|
-
csv_report_headers.append(ExcelHeader(name=
|
|
70
|
+
csv_report_headers.append(ExcelHeader(name=f'label_{label_type}', wrap_text=False))
|
|
75
71
|
|
|
76
72
|
csv_report_headers.extend([
|
|
77
73
|
'online',
|
|
78
|
-
ExcelHeader(name
|
|
79
|
-
ExcelHeader(name
|
|
74
|
+
ExcelHeader(name='last_heartbeat', max_width=15, wrap_text=False),
|
|
75
|
+
ExcelHeader(name='created_at', max_width=15, wrap_text=False),
|
|
80
76
|
'action',
|
|
81
|
-
ExcelHeader(name
|
|
82
|
-
ExcelHeader(name
|
|
77
|
+
ExcelHeader(name='link_to_pce', max_width=15, wrap_text=False, url_text='See in PCE', is_url=True),
|
|
78
|
+
ExcelHeader(name='href', max_width=15, wrap_text=False)
|
|
83
79
|
])
|
|
84
80
|
csv_report = pylo.ArraysToExcel()
|
|
85
81
|
sheet: pylo.ArraysToExcel.Sheet = csv_report.create_sheet('duplicates', csv_report_headers, force_all_wrap_text=True, multivalues_cell_delimiter=',')
|
|
86
82
|
|
|
87
|
-
|
|
88
83
|
filter_labels: List[pylo.Label] = [] # the list of labels to filter the workloads against
|
|
89
84
|
if args['filter_label'] is not None:
|
|
90
85
|
for label_name in args['filter_label']:
|
|
@@ -138,17 +133,17 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
138
133
|
def add_workload_to_report(workload: pylo.Workload, action: str):
|
|
139
134
|
url_link_to_pce = workload.get_pce_ui_url()
|
|
140
135
|
new_row = {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
136
|
+
'hostname': workload.hostname,
|
|
137
|
+
'online': workload.online,
|
|
138
|
+
'last_heartbeat': workload.ven_agent.get_last_heartbeat_date().strftime('%Y-%m-%d %H:%M'),
|
|
139
|
+
'created_at': workload.created_at_datetime().strftime('%Y-%m-%d %H:%M'),
|
|
140
|
+
'href': workload.href,
|
|
141
|
+
'link_to_pce': url_link_to_pce,
|
|
142
|
+
'action': action
|
|
148
143
|
}
|
|
149
144
|
|
|
150
145
|
for label_type in org.LabelStore.label_types:
|
|
151
|
-
new_row['label_'+label_type] = workload.get_label_name(label_type, '')
|
|
146
|
+
new_row['label_' + label_type] = workload.get_label_name(label_type, '')
|
|
152
147
|
|
|
153
148
|
sheet.add_line_from_object(new_row)
|
|
154
149
|
|
|
@@ -173,9 +168,9 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
173
168
|
continue
|
|
174
169
|
|
|
175
170
|
print(" - hostname '{}' has duplicates. ({} online, {} offline, {} unmanaged)".format(dup_hostname,
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
171
|
+
len(dup_record.online),
|
|
172
|
+
len(dup_record.offline),
|
|
173
|
+
len(dup_record.unmanaged)))
|
|
179
174
|
|
|
180
175
|
latest_created_workload = dup_record.find_latest_created_at()
|
|
181
176
|
latest_heartbeat_workload = dup_record.find_latest_heartbeat()
|
|
@@ -195,7 +190,7 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
195
190
|
add_workload_to_report(wkl, "ignored (VEN is online)")
|
|
196
191
|
|
|
197
192
|
for wkl in dup_record.offline:
|
|
198
|
-
if arg_do_not_delete_the_most_recent_workload and
|
|
193
|
+
if arg_do_not_delete_the_most_recent_workload and wkl is latest_created_workload:
|
|
199
194
|
print(" - IGNORED: wkl {}/{} is the most recent".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
200
195
|
add_workload_to_report(wkl, "ignored (it is the most recently created)")
|
|
201
196
|
elif arg_do_not_delete_the_most_recently_heartbeating_workload and wkl is latest_heartbeat_workload:
|
|
@@ -212,7 +207,6 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
212
207
|
delete_tracker.add_workload(wkl)
|
|
213
208
|
print(" - added offline wkl {}/{} to the delete list".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
214
209
|
|
|
215
|
-
|
|
216
210
|
for wkl in dup_record.unmanaged:
|
|
217
211
|
if arg_limit_number_of_deleted_workloads is not None and delete_tracker.count_entries() >= arg_limit_number_of_deleted_workloads:
|
|
218
212
|
print(" - IGNORED: wkl {}/{} because the limit of {} workloads to be deleted was reached".format(wkl.get_name_stripped_fqdn(), wkl.href, arg_limit_number_of_deleted_workloads))
|
|
@@ -269,7 +263,7 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
269
263
|
if len(report_wanted_format) < 1:
|
|
270
264
|
print(" * No report format was specified, no report will be generated")
|
|
271
265
|
else:
|
|
272
|
-
sheet.reorder_lines(['hostname'])
|
|
266
|
+
sheet.reorder_lines(['hostname']) # sort by hostname for better readability
|
|
273
267
|
for report_format in report_wanted_format:
|
|
274
268
|
output_filename = output_file_prefix + '.' + report_format
|
|
275
269
|
print(" * Writing report file '{}' ... ".format(output_filename), end='', flush=True)
|
|
@@ -294,7 +288,7 @@ class DuplicateRecordManager:
|
|
|
294
288
|
def __init__(self, pce_offline_timer_override: Optional[int] = None):
|
|
295
289
|
self.offline = []
|
|
296
290
|
self.online = []
|
|
297
|
-
self.unmanaged= []
|
|
291
|
+
self.unmanaged = []
|
|
298
292
|
self.all: List[pylo.Workload] = []
|
|
299
293
|
self._pce_offline_timer_override: Optional[int] = pce_offline_timer_override
|
|
300
294
|
|
|
@@ -330,7 +324,7 @@ class DuplicateRecordManager:
|
|
|
330
324
|
return True
|
|
331
325
|
return False
|
|
332
326
|
|
|
333
|
-
def find_latest_created_at(self)-> 'pylo.Workload':
|
|
327
|
+
def find_latest_created_at(self) -> 'pylo.Workload':
|
|
334
328
|
latest: Optional[pylo.Workload] = None
|
|
335
329
|
for wkl in self.all:
|
|
336
330
|
if wkl.unmanaged:
|
|
@@ -339,7 +333,7 @@ class DuplicateRecordManager:
|
|
|
339
333
|
latest = wkl
|
|
340
334
|
return latest
|
|
341
335
|
|
|
342
|
-
def find_latest_heartbeat(self)-> 'pylo.Workload':
|
|
336
|
+
def find_latest_heartbeat(self) -> 'pylo.Workload':
|
|
343
337
|
latest: Optional[pylo.Workload] = None
|
|
344
338
|
for wkl in self.all:
|
|
345
339
|
if wkl.unmanaged:
|
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
from typing import Dict,
|
|
1
|
+
from typing import Dict, Union
|
|
2
2
|
from dataclasses import dataclass
|
|
3
|
-
import sys
|
|
4
3
|
import argparse
|
|
5
|
-
import math
|
|
6
4
|
import illumio_pylo as pylo
|
|
7
5
|
from .utils.misc import make_filename_with_timestamp
|
|
8
6
|
from . import Command
|
|
@@ -265,7 +263,7 @@ def __main(args, org: pylo.Organization, native_parsers: MyBuiltInParser, **kwar
|
|
|
265
263
|
print(myformat(" - Agents with successful report count:", agent_green_count))
|
|
266
264
|
print(myformat(" - SKIPPED because not online count:", agent_skipped_not_online))
|
|
267
265
|
print(myformat(" - SKIPPED because report was not found:", agent_has_no_report_count))
|
|
268
|
-
print(myformat(" - Agents with failed reports:", agent_report_failed_count
|
|
266
|
+
print(myformat(" - Agents with failed reports:", agent_report_failed_count))
|
|
269
267
|
|
|
270
268
|
print()
|
|
271
269
|
print(" * Writing report file '{}' ... ".format(output_file_csv), end='', flush=True)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Dict, List, Any
|
|
1
|
+
from typing import Dict, List, Any
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
import sys
|
|
4
4
|
import argparse
|
|
@@ -52,7 +52,6 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
52
52
|
help='If set, the script will proceed with the creation of the workloads and labels without asking for confirmation')
|
|
53
53
|
|
|
54
54
|
|
|
55
|
-
|
|
56
55
|
def __main(args, org: pylo.Organization, **kwargs):
|
|
57
56
|
input_file = args['input_file']
|
|
58
57
|
input_file_delimiter: str = args['input_file_delimiter']
|
|
@@ -84,8 +83,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
84
83
|
|
|
85
84
|
# each label type/dimension is optional
|
|
86
85
|
for label_type in org.LabelStore.label_types:
|
|
87
|
-
csv_expected_fields.append({'name': f"{settings_header_label_prefix}{label_type}"
|
|
88
|
-
|
|
86
|
+
csv_expected_fields.append({'name': f"{settings_header_label_prefix}{label_type}", 'optional': True})
|
|
89
87
|
|
|
90
88
|
csv_report_headers = ExcelHeaderSet(['name', 'hostname', 'ip', 'description'])
|
|
91
89
|
for label_type in org.LabelStore.label_types:
|
|
@@ -97,13 +95,12 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
97
95
|
csv_report = ArraysToExcel()
|
|
98
96
|
csv_sheet = csv_report.create_sheet('Workloads', csv_report_headers)
|
|
99
97
|
|
|
100
|
-
|
|
101
98
|
print(" * Loading CSV input file '{}'...".format(input_file), flush=True, end='')
|
|
102
99
|
csv_data = pylo.CsvExcelToObject(input_file, expected_headers=csv_expected_fields, csv_delimiter=input_file_delimiter)
|
|
103
100
|
print('OK')
|
|
104
101
|
print(" - CSV has {} columns and {} lines (headers don't count)".format(csv_data.count_columns(), csv_data.count_lines()))
|
|
105
102
|
|
|
106
|
-
#check if CSV has all headers for each labels types
|
|
103
|
+
# check if CSV has all headers for each labels types
|
|
107
104
|
if not settings_ignore_missing_headers:
|
|
108
105
|
for label_type in org.LabelStore.label_types:
|
|
109
106
|
header_name = f"{settings_header_label_prefix}{label_type}".lower()
|
|
@@ -111,7 +108,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
111
108
|
raise pylo.PyloEx(f"CSV/Excel file is missing the column '{header_name}' for label type '{label_type}'. "
|
|
112
109
|
"If this was intended use --ignore-missing-headers flag")
|
|
113
110
|
|
|
114
|
-
|
|
115
111
|
detect_workloads_name_collisions(csv_data, org, settings_ignore_all_sorts_collisions, settings_ignore_hostname_collision)
|
|
116
112
|
|
|
117
113
|
detect_ip_collisions(csv_data, org, settings_ignore_all_sorts_collisions, settings_ignore_empty_ip_entries, settings_ignore_ip_collision)
|
|
@@ -149,7 +145,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
149
145
|
print(" * No Workloads to create, all were ignored due to collisions or missing data.")
|
|
150
146
|
# still want to save the CSV/Excel files in the end so don't exit
|
|
151
147
|
else:
|
|
152
|
-
if
|
|
148
|
+
if settings_proceed_with_creation is not True:
|
|
153
149
|
print(" * No workload will be created because the --proceed-with-creation/-p flag was not set. Yet report will be generated")
|
|
154
150
|
for object_to_create in csv_objects_to_create:
|
|
155
151
|
if '**not_created_reason**' not in object_to_create:
|
|
@@ -238,6 +234,7 @@ class WorkloadCollisionItem:
|
|
|
238
234
|
workload_object: pylo.Workload = None
|
|
239
235
|
csv_object: Dict[str, Any] = None
|
|
240
236
|
|
|
237
|
+
|
|
241
238
|
def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_all_sorts_collisions, ignore_hostname_collision):
|
|
242
239
|
print(" * Checking for name/hostname collisions inside the PCE:", flush=True)
|
|
243
240
|
name_cache: Dict[str, WorkloadCollisionItem] = {}
|
|
@@ -278,14 +275,14 @@ def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_al
|
|
|
278
275
|
else:
|
|
279
276
|
print(
|
|
280
277
|
" - WARNING: CSV has an entry for workload name '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(
|
|
281
|
-
|
|
278
|
+
lower_name, csv_object['*line*']))
|
|
282
279
|
|
|
283
280
|
if csv_object['hostname'] is not None and len(csv_object['hostname']) > 0:
|
|
284
281
|
lower_hostname = csv_object['hostname'].lower()
|
|
285
282
|
if lower_name != lower_hostname:
|
|
286
283
|
if lower_hostname not in name_cache:
|
|
287
284
|
name_cache[lower_hostname] = WorkloadCollisionItem(from_pce=False, csv_object=csv_object,
|
|
288
|
-
|
|
285
|
+
managed=False)
|
|
289
286
|
else:
|
|
290
287
|
if not name_cache[lower_hostname].from_pce:
|
|
291
288
|
raise pylo.PyloEx('CSV contains workloads with duplicates name/hostname: {}'.format(lower_name))
|
|
@@ -295,7 +292,7 @@ def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_al
|
|
|
295
292
|
else:
|
|
296
293
|
print(
|
|
297
294
|
" - WARNING: CSV has an entry for workload hostname '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(
|
|
298
|
-
|
|
295
|
+
lower_hostname, csv_object['*line*']))
|
|
299
296
|
print(" * DONE")
|
|
300
297
|
|
|
301
298
|
|
|
@@ -348,9 +345,10 @@ def detect_ip_collisions(csv_data, org: pylo.Organization, ignore_all_sorts_coll
|
|
|
348
345
|
else:
|
|
349
346
|
count_duplicate_ip_addresses_in_csv += 1
|
|
350
347
|
if not ignore_all_sorts_collisions and not settings_ignore_ip_collision:
|
|
351
|
-
pylo.log.warn(
|
|
352
|
-
|
|
353
|
-
|
|
348
|
+
pylo.log.warn(
|
|
349
|
+
indent + "Duplicate IP address {} found in the PCE and CSV/Excel at line #{} (name={} hostname={}). "
|
|
350
|
+
"(look for --options to bypass this if you know what you are doing)"
|
|
351
|
+
.format(ip, csv_object['*line*'], csv_object['name'], csv_object['hostname']))
|
|
354
352
|
csv_object['**not_created_reason**'] = "Duplicate IP address {} found in the PCE".format(ip)
|
|
355
353
|
|
|
356
354
|
if ignore_all_sorts_collisions or settings_ignore_ip_collision:
|
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from typing import List
|
|
2
2
|
import illumio_pylo as pylo
|
|
3
3
|
import argparse
|
|
4
|
-
import sys
|
|
5
4
|
import math
|
|
6
5
|
import colorama
|
|
7
|
-
from .utils.misc import make_filename_with_timestamp
|
|
8
6
|
from . import Command
|
|
9
7
|
|
|
10
8
|
command_name = 'workload-reset-ven-names-to-null'
|
|
@@ -29,9 +27,9 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
29
27
|
workloads_with_forced_names: List[pylo.Workload] = []
|
|
30
28
|
workloads_with_mismatching_names: List[pylo.Workload] = []
|
|
31
29
|
|
|
32
|
-
#iterate through each workload
|
|
30
|
+
# iterate through each workload
|
|
33
31
|
for wkl in org.WorkloadStore.itemsByHRef.values():
|
|
34
|
-
#only care about Managed workloads
|
|
32
|
+
# only care about Managed workloads
|
|
35
33
|
if wkl.unmanaged:
|
|
36
34
|
continue
|
|
37
35
|
|
|
@@ -53,26 +51,26 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
53
51
|
|
|
54
52
|
# <editor-fold desc="JSON Payloads generation">
|
|
55
53
|
|
|
56
|
-
#for each batch of workloads, generate a JSON payload to send to the PCE to reset name to null
|
|
57
|
-
#the payload will be a list of objects with the following structure:
|
|
58
|
-
#
|
|
59
|
-
#
|
|
60
|
-
#
|
|
54
|
+
# for each batch of workloads, generate a JSON payload to send to the PCE to reset name to null
|
|
55
|
+
# the payload will be a list of objects with the following structure:
|
|
56
|
+
# {
|
|
57
|
+
# "href": "string",
|
|
58
|
+
# "name": null
|
|
61
59
|
# }
|
|
62
60
|
|
|
63
61
|
if not confirmed_changes:
|
|
64
62
|
print(colorama.Fore.YELLOW + "Changes have not been confirmed. Use the --confirm flag to confirm the changes and push to the PCE")
|
|
65
|
-
#reset colorama colors
|
|
63
|
+
# reset colorama colors
|
|
66
64
|
print(colorama.Style.RESET_ALL)
|
|
67
65
|
return
|
|
68
66
|
|
|
69
67
|
# for loop for each batch of workloads
|
|
70
68
|
for i in range(math.ceil(len(workloads_with_mismatching_names) / batch_size)):
|
|
71
|
-
#get the next batch of workloads
|
|
69
|
+
# get the next batch of workloads
|
|
72
70
|
batch = workloads_with_mismatching_names[i * batch_size: (i + 1) * batch_size]
|
|
73
|
-
#create a list of objects with the structure described above
|
|
71
|
+
# create a list of objects with the structure described above
|
|
74
72
|
payload = [{"href": wkl.href, "name": wkl.static_name_stripped_fqdn(wkl.hostname)} for wkl in batch]
|
|
75
|
-
#debug display
|
|
73
|
+
# debug display
|
|
76
74
|
print(f"Sending payload for batch {i + 1} of {math.ceil(len(workloads_with_mismatching_names) / batch_size)} ({len(payload)} workloads)")
|
|
77
75
|
|
|
78
76
|
org.connector.objects_workload_update_bulk(payload)
|
|
@@ -80,4 +78,4 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
80
78
|
# </editor-fold>
|
|
81
79
|
|
|
82
80
|
|
|
83
|
-
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|
|
81
|
+
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|
|
@@ -4,7 +4,7 @@ import argparse
|
|
|
4
4
|
import sys
|
|
5
5
|
|
|
6
6
|
import illumio_pylo as pylo
|
|
7
|
-
from illumio_pylo import ArraysToExcel,
|
|
7
|
+
from illumio_pylo import ArraysToExcel, ExcelHeaderSet
|
|
8
8
|
from .utils.LabelCreation import generate_list_of_labels_to_create, create_labels
|
|
9
9
|
from .utils.misc import make_filename_with_timestamp, default_label_columns_prefix
|
|
10
10
|
from . import Command
|
|
@@ -54,7 +54,7 @@ class ContextSingleton:
|
|
|
54
54
|
|
|
55
55
|
def __init__(self, org: pylo.Organization):
|
|
56
56
|
self.org: pylo.Organization = org
|
|
57
|
-
self.csv_data: List[Dict[str, Union[str,bool,int, None]]] = []
|
|
57
|
+
self.csv_data: List[Dict[str, Union[str, bool, int, None]]] = []
|
|
58
58
|
self.settings_label_type_header_prefix: str = ''
|
|
59
59
|
self.settings_blank_labels_means_remove: bool = False
|
|
60
60
|
self.csv_ip_index: Dict[str, Dict] = {} # ip -> csv_data
|
|
@@ -64,9 +64,10 @@ class ContextSingleton:
|
|
|
64
64
|
self.csv_report_sheet: Optional[pylo.ArraysToExcel.Sheet] = None
|
|
65
65
|
self.ignored_workloads_count = 0
|
|
66
66
|
self.stats_count_csv_entries_with_no_match = 0
|
|
67
|
-
self.workloads_previous_labels: Dict[pylo.Workload,Dict[str, pylo.Label]] = {}
|
|
67
|
+
self.workloads_previous_labels: Dict[pylo.Workload, Dict[str, pylo.Label]] = {}
|
|
68
68
|
self.csv_input_missing_label_types: List[str] = []
|
|
69
69
|
|
|
70
|
+
|
|
70
71
|
def __main(args, org: pylo.Organization, **kwargs):
|
|
71
72
|
|
|
72
73
|
context = ContextSingleton(org=org)
|
|
@@ -98,7 +99,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
98
99
|
|
|
99
100
|
context.ignored_workloads_count = 0
|
|
100
101
|
|
|
101
|
-
|
|
102
102
|
csv_report_headers = ExcelHeaderSet(['name', 'hostname'])
|
|
103
103
|
for label_type in org.LabelStore.label_types:
|
|
104
104
|
csv_report_headers.append(f'{context.settings_label_type_header_prefix}{label_type}')
|
|
@@ -106,7 +106,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
106
106
|
csv_report_headers.append(f'new_{label_type}')
|
|
107
107
|
csv_report_headers.extend(['**updated**', '**reason**', 'href'])
|
|
108
108
|
|
|
109
|
-
context.csv_report =
|
|
109
|
+
context.csv_report = ArraysToExcel()
|
|
110
110
|
context.csv_report_sheet = context.csv_report.create_sheet('Workloads Update Report', csv_report_headers)
|
|
111
111
|
|
|
112
112
|
# <editor-fold desc="CSV input file data extraction">
|
|
@@ -120,7 +120,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
120
120
|
for label_type in org.LabelStore.label_types:
|
|
121
121
|
csv_expected_fields.append({'name': f'{context.settings_label_type_header_prefix}{label_type}', 'optional': True})
|
|
122
122
|
|
|
123
|
-
|
|
124
123
|
print(" * Loading CSV input file '{}'...".format(settings_input_file), flush=True, end='')
|
|
125
124
|
csv_input_object = pylo.CsvExcelToObject(settings_input_file, expected_headers=csv_expected_fields, csv_delimiter=settings_input_file_delimiter)
|
|
126
125
|
for label_type in org.LabelStore.label_types:
|
|
@@ -132,7 +131,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
132
131
|
context.csv_data = csv_input_object.objects()
|
|
133
132
|
# </editor-fold desc="CSV input file data extraction">
|
|
134
133
|
|
|
135
|
-
|
|
136
134
|
if not input_match_on_ip and not input_match_on_hostname and not input_match_on_href:
|
|
137
135
|
pylo.log.error('You must specify at least one (or several) property to match on for workloads vs input: href, ip or hostname')
|
|
138
136
|
sys.exit(1)
|
|
@@ -142,11 +140,11 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
142
140
|
# </editor-fold desc="CSV input basic checks">
|
|
143
141
|
|
|
144
142
|
# <editor-fold desc="Filter the list of Workloads to be edited">
|
|
145
|
-
workloads_to_update: Dict[str, pylo.Workload] = org.WorkloadStore.itemsByHRef.copy()
|
|
143
|
+
workloads_to_update: Dict[str, pylo.Workload] = org.WorkloadStore.itemsByHRef.copy() # start with a list of all workloads from the PCE
|
|
146
144
|
print(" * PCE has {} workloads. Now applying requested filters...".format(len(workloads_to_update)))
|
|
147
145
|
|
|
148
146
|
context.ignored_workloads_count += filter_pce_workloads(context, workloads_to_update, settings_filter_managed_only,
|
|
149
|
-
|
|
147
|
+
settings_filter_unmanaged_only)
|
|
150
148
|
|
|
151
149
|
print(" * DONE! {} Workloads remain to be updated".format(len(workloads_to_update)))
|
|
152
150
|
# </editor-fold>
|
|
@@ -154,14 +152,13 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
154
152
|
# <editor-fold desc="Matching between CSV/Excel and Managed Workloads">
|
|
155
153
|
workloads_to_update_match_csv: Dict[pylo.Workload, Dict] # Workloads from the PCE and CSV data associated to it
|
|
156
154
|
workloads_to_update_match_csv = match_pce_workloads_vs_csv(context,
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
155
|
+
input_match_on_hostname,
|
|
156
|
+
input_match_on_href,
|
|
157
|
+
input_match_on_ip,
|
|
158
|
+
workloads_to_update)
|
|
161
159
|
add_unmatched_csv_lines_to_report(context, workloads_to_update_match_csv)
|
|
162
160
|
# </editor-fold>
|
|
163
161
|
|
|
164
|
-
|
|
165
162
|
# <editor-fold desc="List missing Labels and exclude Workloads which require no changes">
|
|
166
163
|
print(" * Looking for any missing label which need to be created and Workloads which already have the right labels:")
|
|
167
164
|
labels_to_be_created = generate_list_of_labels_to_create(workloads_to_update_match_csv.values(), org, context.settings_label_type_header_prefix)
|
|
@@ -172,14 +169,12 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
172
169
|
# </editor-fold>
|
|
173
170
|
|
|
174
171
|
# <editor-fold desc="Compare remaining workloads and CSV data to generate update payloads later">
|
|
175
|
-
print(" * Comparing remaining {} Workloads and CSV data to generate update payloads later...".format(len(workloads_to_update))
|
|
172
|
+
print(" * Comparing remaining {} Workloads and CSV data to generate update payloads later...".format(len(workloads_to_update)), flush=True)
|
|
176
173
|
compare_workloads_vs_csv_data_to_generate_changes(context, workloads_to_update, workloads_to_update_match_csv)
|
|
177
174
|
|
|
178
|
-
|
|
179
175
|
print(" * DONE - {} Workloads remain to be updated".format(len(workloads_to_update_match_csv)))
|
|
180
176
|
# </editor-fold desc="Compare remaining workloads and CSV data to generate update payloads later">
|
|
181
177
|
|
|
182
|
-
|
|
183
178
|
# <editor-fold desc="Workloads updates Push to API">
|
|
184
179
|
if len(workloads_to_update) == 0:
|
|
185
180
|
print(" * No Workloads to update")
|
|
@@ -227,7 +222,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
227
222
|
print("*************")
|
|
228
223
|
for workload in workloads_to_update.values():
|
|
229
224
|
context.csv_report_sheet.add_line_from_object(workload_to_csv_report(context, workload, 'Potentially', reason='No confirmation was given to proceed with the update'))
|
|
230
|
-
#new_labels = workloads_list_changed_labels_for_report[workload]))
|
|
225
|
+
# new_labels = workloads_list_changed_labels_for_report[workload]))
|
|
231
226
|
# </editor-fold>
|
|
232
227
|
|
|
233
228
|
print(" * Writing report file '{}' ... ".format(output_file_csv), end='', flush=True)
|
|
@@ -238,7 +233,8 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
238
233
|
print("DONE")
|
|
239
234
|
|
|
240
235
|
|
|
241
|
-
def compare_workloads_vs_csv_data_to_generate_changes(context: ContextSingleton,workloads_to_update,
|
|
236
|
+
def compare_workloads_vs_csv_data_to_generate_changes(context: ContextSingleton, workloads_to_update,
|
|
237
|
+
workloads_to_update_match_csv):
|
|
242
238
|
for workload, csv_data in workloads_to_update_match_csv.copy().items():
|
|
243
239
|
workload.api_stacked_updates_start()
|
|
244
240
|
if 'name' in csv_data:
|
|
@@ -264,7 +260,6 @@ def compare_workloads_vs_csv_data_to_generate_changes(context: ContextSingleton,
|
|
|
264
260
|
format(csv_data[csv_label_column_name], csv_data['*line*']))
|
|
265
261
|
found_labels.append(found_label)
|
|
266
262
|
|
|
267
|
-
|
|
268
263
|
context.workloads_previous_labels[workload] = workload.get_labels_dict()
|
|
269
264
|
workload.api_update_labels(found_labels, missing_label_type_means_no_change=context.settings_blank_labels_means_remove)
|
|
270
265
|
|
|
@@ -385,14 +380,14 @@ def match_pce_workloads_vs_csv(context: ContextSingleton,
|
|
|
385
380
|
print(" - No matching IP address found in CSV/Excel, this Workload will not be relabeled")
|
|
386
381
|
del workloads_to_relabel[workload_href]
|
|
387
382
|
context.ignored_workloads_count += 1
|
|
388
|
-
#context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
383
|
+
# context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
389
384
|
# 'No IP match was found in CSV/Excel input'))
|
|
390
385
|
continue
|
|
391
386
|
if len(ip_matches) > 1:
|
|
392
387
|
print(" - Found more than 1 IP matches in CSV/Excel, this Workload will not be relabeled")
|
|
393
388
|
del workloads_to_relabel[workload_href]
|
|
394
389
|
context.ignored_workloads_count += 1
|
|
395
|
-
#context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
390
|
+
# context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
396
391
|
# 'Too many IP matches were found in CSV/Excel input'))
|
|
397
392
|
continue
|
|
398
393
|
this_workload_matched_on_ip = ip_matches[0]
|
|
@@ -405,7 +400,7 @@ def match_pce_workloads_vs_csv(context: ContextSingleton,
|
|
|
405
400
|
del workloads_to_relabel[workload_href]
|
|
406
401
|
print(" NOT FOUND")
|
|
407
402
|
context.ignored_workloads_count += 1
|
|
408
|
-
#context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
403
|
+
# context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
409
404
|
# 'No hostname match was found in CSV/Excel input'))
|
|
410
405
|
continue
|
|
411
406
|
|
|
@@ -420,7 +415,7 @@ def match_pce_workloads_vs_csv(context: ContextSingleton,
|
|
|
420
415
|
del workloads_to_relabel[workload_href]
|
|
421
416
|
print(" NOT FOUND")
|
|
422
417
|
context.ignored_workloads_count += 1
|
|
423
|
-
#context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
418
|
+
# context.csv_report.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
424
419
|
# 'No href match was found in CSV/Excel input'))
|
|
425
420
|
continue
|
|
426
421
|
|
|
@@ -463,7 +458,7 @@ def filter_pce_workloads(context: ContextSingleton, workloads_to_update: Dict[st
|
|
|
463
458
|
del workloads_to_update[workload_href]
|
|
464
459
|
ignored_workloads_count += 1
|
|
465
460
|
context.csv_report_sheet.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
466
|
-
|
|
461
|
+
'Managed Workload was filtered out'))
|
|
467
462
|
if filter_managed_only:
|
|
468
463
|
print(" - Filtering out Unmanaged Workloads...")
|
|
469
464
|
for workload_href in list(workloads_to_update.keys()):
|
|
@@ -472,13 +467,13 @@ def filter_pce_workloads(context: ContextSingleton, workloads_to_update: Dict[st
|
|
|
472
467
|
del workloads_to_update[workload_href]
|
|
473
468
|
ignored_workloads_count += 1
|
|
474
469
|
context.csv_report_sheet.add_line_from_object(workload_to_csv_report(context, workload, False,
|
|
475
|
-
|
|
470
|
+
'Unmanaged Workload was filtered out'))
|
|
476
471
|
|
|
477
472
|
print(" * DONE! {} Workloads were ignored".format(ignored_workloads_count))
|
|
478
473
|
return ignored_workloads_count
|
|
479
474
|
|
|
480
475
|
|
|
481
|
-
def workload_to_csv_report(context: ContextSingleton, workload: pylo.Workload, updated: Union[bool,str],
|
|
476
|
+
def workload_to_csv_report(context: ContextSingleton, workload: pylo.Workload, updated: Union[bool, str],
|
|
482
477
|
reason: str = ''):
|
|
483
478
|
|
|
484
479
|
record = {
|
|
@@ -488,7 +483,6 @@ def workload_to_csv_report(context: ContextSingleton, workload: pylo.Workload, u
|
|
|
488
483
|
'**reason**': reason
|
|
489
484
|
}
|
|
490
485
|
|
|
491
|
-
|
|
492
486
|
for label_type in context.org.LabelStore.label_types:
|
|
493
487
|
previous_label = context.workloads_previous_labels[workload].get(label_type)
|
|
494
488
|
record[f'{context.settings_label_type_header_prefix}{label_type}'] = previous_label.name if previous_label is not None else ''
|
|
@@ -508,11 +502,13 @@ def workload_to_csv_report(context: ContextSingleton, workload: pylo.Workload, u
|
|
|
508
502
|
|
|
509
503
|
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|
|
510
504
|
|
|
505
|
+
|
|
511
506
|
class ChangedLabelRecord(TypedDict):
|
|
512
507
|
name: Optional[str]
|
|
513
508
|
href: Optional[str]
|
|
514
509
|
|
|
515
|
-
|
|
510
|
+
|
|
511
|
+
ChangedLabelRecordCollection = Dict[pylo.Workload, Dict[str, ChangedLabelRecord]]
|
|
516
512
|
|
|
517
513
|
|
|
518
514
|
def add_unmatched_csv_lines_to_report(context: ContextSingleton,
|
|
@@ -540,6 +536,5 @@ def add_unmatched_csv_lines_to_report(context: ContextSingleton,
|
|
|
540
536
|
new_data['**reason**'] = 'No matching Workload was found in the PCE'
|
|
541
537
|
context.csv_report_sheet.add_line_from_object(new_data)
|
|
542
538
|
|
|
543
|
-
|
|
544
539
|
print(" * {} CSV lines were not matched with any Workload. They are added to the report now".
|
|
545
540
|
format(context.stats_count_csv_entries_with_no_match))
|
|
@@ -40,8 +40,8 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
40
40
|
global_concerned_rules[concerned_rule] = True
|
|
41
41
|
|
|
42
42
|
if concerned_ruleset not in concerned_rulesets:
|
|
43
|
-
|
|
44
|
-
|
|
43
|
+
concerned_rulesets[concerned_ruleset] = {concerned_rule: concerned_rule}
|
|
44
|
+
count_concerned_rules = count_concerned_rules + 1
|
|
45
45
|
else:
|
|
46
46
|
if concerned_rule not in concerned_rulesets[concerned_ruleset]:
|
|
47
47
|
concerned_rulesets[concerned_ruleset][concerned_rule] = concerned_rule
|
|
@@ -67,9 +67,9 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
67
67
|
print(" - '{}' HREF: {} URL: {}".format(ruleset.name, ruleset.href, ruleset_url))
|
|
68
68
|
|
|
69
69
|
print("\n***** DONE with workloads & rules parsing *****")
|
|
70
|
-
print("** Total: {} Workloads used in {} Rulesets and {} Rules".format(
|
|
71
|
-
|
|
72
|
-
|
|
70
|
+
print("** Total: {} Workloads used in {} Rulesets and {} Rules".format(global_count_concerned_workloads,
|
|
71
|
+
len(global_concerned_rulesets),
|
|
72
|
+
len(global_concerned_rules)))
|
|
73
73
|
|
|
74
74
|
print("\n**** END OF SCRIPT ****\n")
|
|
75
75
|
|
illumio_pylo/tmp.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
name,hostname,ip,role,app,env,loc
|
|
1
|
+
name,hostname,ip,label:role,label:app,label:env,label:loc
|
|
2
2
|
VEN-KFMGF,hostdd,192.168.50.12,R_WEB,A_APPC,E_DEV,L_AMER
|
|
Binary file
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: illumio_pylo
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.10
|
|
4
4
|
Summary: A set of tools and library for working with Illumio PCE
|
|
5
5
|
Home-page: https://github.com/cpainchaud/pylo
|
|
6
6
|
Author: Christophe Painchaud
|
|
@@ -187,7 +187,7 @@ Requires-Python: >=3.11
|
|
|
187
187
|
License-File: LICENSE
|
|
188
188
|
Requires-Dist: click ==8.1.7
|
|
189
189
|
Requires-Dist: colorama ~=0.4.4
|
|
190
|
-
Requires-Dist: cryptography ==
|
|
190
|
+
Requires-Dist: cryptography ==43.0.1
|
|
191
191
|
Requires-Dist: openpyxl ~=3.1.3
|
|
192
192
|
Requires-Dist: paramiko ~=3.4.0
|
|
193
193
|
Requires-Dist: prettytable ~=3.10.0
|