illumio-pylo 0.3.7__py3-none-any.whl → 0.3.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +90 -54
- illumio_pylo/API/JsonPayloadTypes.py +10 -0
- illumio_pylo/Helpers/functions.py +8 -13
- illumio_pylo/IPList.py +5 -9
- illumio_pylo/IPMap.py +3 -3
- illumio_pylo/Label.py +0 -1
- illumio_pylo/LabelCommon.py +1 -1
- illumio_pylo/LabelStore.py +24 -25
- illumio_pylo/LabeledObject.py +4 -5
- illumio_pylo/Organization.py +1 -3
- illumio_pylo/ReferenceTracker.py +0 -3
- illumio_pylo/Rule.py +2 -2
- illumio_pylo/Ruleset.py +7 -7
- illumio_pylo/RulesetStore.py +1 -1
- illumio_pylo/SecurityPrincipal.py +0 -5
- illumio_pylo/Workload.py +4 -13
- illumio_pylo/WorkloadStoreSubClasses.py +7 -10
- illumio_pylo/__init__.py +1 -1
- illumio_pylo/cli/__init__.py +0 -2
- illumio_pylo/cli/commands/credential_manager.py +7 -18
- illumio_pylo/cli/commands/iplist_analyzer.py +3 -9
- illumio_pylo/cli/commands/iplist_import_from_file.py +57 -34
- illumio_pylo/cli/commands/ruleset_export.py +16 -20
- illumio_pylo/cli/commands/update_pce_objects_cache.py +0 -1
- illumio_pylo/cli/commands/utils/LabelCreation.py +2 -2
- illumio_pylo/cli/commands/utils/misc.py +3 -2
- illumio_pylo/cli/commands/ven_compatibility_report_export.py +4 -14
- illumio_pylo/cli/commands/ven_duplicate_remover.py +26 -32
- illumio_pylo/cli/commands/ven_idle_to_visibility.py +2 -4
- illumio_pylo/cli/commands/ven_upgrader.py +1 -2
- illumio_pylo/cli/commands/workload_import.py +16 -18
- illumio_pylo/cli/commands/workload_reset_names_to_null.py +12 -14
- illumio_pylo/cli/commands/workload_update.py +28 -32
- illumio_pylo/cli/commands/workload_used_in_rule_finder.py +5 -5
- illumio_pylo/tmp.py +1 -0
- illumio_pylo/utilities/resources/workloads-import-example.csv +1 -1
- illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
- {illumio_pylo-0.3.7.dist-info → illumio_pylo-0.3.9.dist-info}/METADATA +4 -4
- illumio_pylo-0.3.9.dist-info/RECORD +72 -0
- {illumio_pylo-0.3.7.dist-info → illumio_pylo-0.3.9.dist-info}/WHEEL +1 -1
- illumio_pylo-0.3.7.dist-info/RECORD +0 -72
- {illumio_pylo-0.3.7.dist-info → illumio_pylo-0.3.9.dist-info}/LICENSE +0 -0
- {illumio_pylo-0.3.7.dist-info → illumio_pylo-0.3.9.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from datetime import datetime
|
|
3
|
-
|
|
3
|
+
|
|
4
|
+
default_label_columns_prefix = 'label:'
|
|
4
5
|
|
|
5
6
|
|
|
6
7
|
def make_filename_with_timestamp(prefix: str, output_directory: str = './') -> str:
|
|
@@ -13,4 +14,4 @@ def make_filename_with_timestamp(prefix: str, output_directory: str = './') -> s
|
|
|
13
14
|
os.makedirs(output_directory)
|
|
14
15
|
|
|
15
16
|
now = datetime.now()
|
|
16
|
-
return output_directory + os.path.sep + prefix + now.strftime("%Y%m%d-%H%M%S")
|
|
17
|
+
return output_directory + os.path.sep + prefix + now.strftime("%Y%m%d-%H%M%S")
|
|
@@ -1,6 +1,4 @@
|
|
|
1
1
|
import argparse
|
|
2
|
-
import sys
|
|
3
|
-
from datetime import datetime
|
|
4
2
|
from typing import Dict, List
|
|
5
3
|
|
|
6
4
|
from prettytable import PrettyTable
|
|
@@ -23,19 +21,18 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
23
21
|
parser.add_argument('--output-dir', '-o', type=str, required=False, default='output',
|
|
24
22
|
help='Directory where the output files will be saved')
|
|
25
23
|
|
|
24
|
+
|
|
26
25
|
def __main(args, org: pylo.Organization, **kwargs):
|
|
27
26
|
|
|
28
27
|
settings_output_dir: str = args['output_dir']
|
|
29
28
|
settings_filter_labels: List[str] = args['filter_label']
|
|
30
29
|
settings_limit: int = args['limit']
|
|
31
30
|
|
|
32
|
-
|
|
33
31
|
# <editor-fold desc="Prepare the output files and CSV/Excel Object">
|
|
34
32
|
output_file_prefix = make_filename_with_timestamp('ven-compatibility-reports_', settings_output_dir)
|
|
35
33
|
output_filename_csv = output_file_prefix + '.csv'
|
|
36
34
|
output_filename_xls = output_file_prefix + '.xlsx'
|
|
37
35
|
|
|
38
|
-
|
|
39
36
|
# clean the files if they exist, also to check if we have write access to the directory/files
|
|
40
37
|
pylo.file_clean(output_filename_csv)
|
|
41
38
|
pylo.file_clean(output_filename_xls)
|
|
@@ -47,20 +44,19 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
47
44
|
|
|
48
45
|
# insert all label dimensions
|
|
49
46
|
for label_type in org.LabelStore.label_types:
|
|
50
|
-
csv_report_headers.append(ExcelHeader(name=
|
|
47
|
+
csv_report_headers.append(ExcelHeader(name='label_' + label_type, wrap_text=False))
|
|
51
48
|
|
|
52
49
|
csv_report_headers.extend([
|
|
53
50
|
'operating_system',
|
|
54
51
|
'report_failed',
|
|
55
52
|
'details',
|
|
56
|
-
ExcelHeader(name
|
|
53
|
+
ExcelHeader(name='link_to_pce', max_width=15, wrap_text=False, url_text='See in PCE', is_url=True),
|
|
57
54
|
ExcelHeader(name='href', max_width=15, wrap_text=False)
|
|
58
55
|
])
|
|
59
56
|
csv_report = pylo.ArraysToExcel()
|
|
60
57
|
sheet: pylo.ArraysToExcel.Sheet = csv_report.create_sheet('duplicates', csv_report_headers, force_all_wrap_text=True, multivalues_cell_delimiter=',')
|
|
61
58
|
# </editor-fold desc="Prepare the output files and CSV/Excel Object">
|
|
62
59
|
|
|
63
|
-
|
|
64
60
|
agents: Dict[str, pylo.VENAgent] = {}
|
|
65
61
|
for agent in org.AgentStore.items_by_href.values():
|
|
66
62
|
if agent.mode == 'idle':
|
|
@@ -86,7 +82,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
86
82
|
if workload.get_label('loc') is not None and workload.get_label('loc').name == 'CN':
|
|
87
83
|
print("hello")
|
|
88
84
|
|
|
89
|
-
|
|
90
85
|
if len(filter_labels) > 0:
|
|
91
86
|
if not workload.uses_all_labels(filter_labels):
|
|
92
87
|
pylo.log.info(" - Removing Agent '{}' because it does not match the filter".format(workload.get_name()))
|
|
@@ -99,7 +94,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
99
94
|
|
|
100
95
|
print("OK! {} Agents left after filtering".format(len(agents)))
|
|
101
96
|
|
|
102
|
-
|
|
103
97
|
print()
|
|
104
98
|
print(" ** Request Compatibility Report for each Agent in IDLE mode **", flush=True)
|
|
105
99
|
|
|
@@ -110,7 +104,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
110
104
|
stats_agent_has_no_report_count = 0
|
|
111
105
|
stats_agent_report_failed_count = 0
|
|
112
106
|
|
|
113
|
-
|
|
114
107
|
for agent in agents.values():
|
|
115
108
|
stats_agent_count += 1
|
|
116
109
|
|
|
@@ -135,7 +128,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
135
128
|
label = agent.workload.get_label(label_type)
|
|
136
129
|
export_row['label_'+label_type] = label.name if label else ''
|
|
137
130
|
|
|
138
|
-
|
|
139
131
|
print(" - Downloading report (it may be delayed by API flood protection)...", flush=True, end='')
|
|
140
132
|
report = org.connector.agent_get_compatibility_report(agent_href=agent.href, return_raw_json=False)
|
|
141
133
|
print('OK')
|
|
@@ -163,7 +155,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
163
155
|
|
|
164
156
|
sheet.add_line_from_object(export_row)
|
|
165
157
|
|
|
166
|
-
|
|
167
158
|
print("\n**** Saving Compatibility Reports to '{}' ****".format(output_filename_csv), end='', flush=True)
|
|
168
159
|
sheet.write_to_csv(output_filename_csv)
|
|
169
160
|
print("OK!")
|
|
@@ -171,7 +162,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
171
162
|
csv_report.write_to_excel(output_filename_xls)
|
|
172
163
|
print("OK!")
|
|
173
164
|
|
|
174
|
-
|
|
175
165
|
print("\n\n*** Statistics ***\n")
|
|
176
166
|
table = PrettyTable()
|
|
177
167
|
table.field_names = ["item", "Value"]
|
|
@@ -193,4 +183,4 @@ command_object = Command(
|
|
|
193
183
|
main_func=__main,
|
|
194
184
|
parser_func=fill_parser,
|
|
195
185
|
load_specific_objects_only=objects_load_filter
|
|
196
|
-
)
|
|
186
|
+
)
|
|
@@ -4,7 +4,7 @@ import click
|
|
|
4
4
|
import argparse
|
|
5
5
|
|
|
6
6
|
import illumio_pylo as pylo
|
|
7
|
-
from illumio_pylo import
|
|
7
|
+
from illumio_pylo import ExcelHeader
|
|
8
8
|
|
|
9
9
|
from .utils.misc import make_filename_with_timestamp
|
|
10
10
|
from . import Command
|
|
@@ -12,6 +12,7 @@ from . import Command
|
|
|
12
12
|
command_name = 'ven-duplicate-remover'
|
|
13
13
|
objects_load_filter = ['labels']
|
|
14
14
|
|
|
15
|
+
|
|
15
16
|
def fill_parser(parser: argparse.ArgumentParser):
|
|
16
17
|
parser.add_argument('--verbose', '-v', action='store_true',
|
|
17
18
|
help='')
|
|
@@ -40,14 +41,11 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
40
41
|
help='Directory where to write the report file(s)')
|
|
41
42
|
|
|
42
43
|
|
|
43
|
-
|
|
44
44
|
def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
45
|
-
|
|
46
|
-
report_wanted_format: List[Literal['csv','xlsx']] = args['report_format']
|
|
45
|
+
report_wanted_format: List[Literal['csv', 'xlsx']] = args['report_format']
|
|
47
46
|
if report_wanted_format is None:
|
|
48
47
|
report_wanted_format = ['xlsx']
|
|
49
48
|
|
|
50
|
-
|
|
51
49
|
arg_verbose = args['verbose']
|
|
52
50
|
arg_proceed_with_deletion = args['proceed_with_deletion'] is True
|
|
53
51
|
arg_do_not_require_deletion_confirmation = args['do_not_require_deletion_confirmation'] is True
|
|
@@ -59,32 +57,29 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
59
57
|
arg_limit_number_of_deleted_workloads = args['limit_number_of_deleted_workloads']
|
|
60
58
|
arg_report_output_dir: str = args['output_dir']
|
|
61
59
|
|
|
62
|
-
|
|
63
60
|
output_file_prefix = make_filename_with_timestamp('ven-duplicate-removal_', arg_report_output_dir)
|
|
64
61
|
output_file_csv = output_file_prefix + '.csv'
|
|
65
62
|
output_file_excel = output_file_prefix + '.xlsx'
|
|
66
63
|
|
|
67
|
-
|
|
68
64
|
csv_report_headers = pylo.ExcelHeaderSet([
|
|
69
|
-
ExcelHeader(name
|
|
70
|
-
ExcelHeader(name
|
|
65
|
+
ExcelHeader(name='name', max_width=40),
|
|
66
|
+
ExcelHeader(name='hostname', max_width=40)
|
|
71
67
|
])
|
|
72
68
|
# insert all label dimensions
|
|
73
69
|
for label_type in org.LabelStore.label_types:
|
|
74
|
-
csv_report_headers.append(ExcelHeader(name=
|
|
70
|
+
csv_report_headers.append(ExcelHeader(name=f'label_{label_type}', wrap_text=False))
|
|
75
71
|
|
|
76
72
|
csv_report_headers.extend([
|
|
77
73
|
'online',
|
|
78
|
-
ExcelHeader(name
|
|
79
|
-
ExcelHeader(name
|
|
74
|
+
ExcelHeader(name='last_heartbeat', max_width=15, wrap_text=False),
|
|
75
|
+
ExcelHeader(name='created_at', max_width=15, wrap_text=False),
|
|
80
76
|
'action',
|
|
81
|
-
ExcelHeader(name
|
|
82
|
-
ExcelHeader(name
|
|
77
|
+
ExcelHeader(name='link_to_pce', max_width=15, wrap_text=False, url_text='See in PCE', is_url=True),
|
|
78
|
+
ExcelHeader(name='href', max_width=15, wrap_text=False)
|
|
83
79
|
])
|
|
84
80
|
csv_report = pylo.ArraysToExcel()
|
|
85
81
|
sheet: pylo.ArraysToExcel.Sheet = csv_report.create_sheet('duplicates', csv_report_headers, force_all_wrap_text=True, multivalues_cell_delimiter=',')
|
|
86
82
|
|
|
87
|
-
|
|
88
83
|
filter_labels: List[pylo.Label] = [] # the list of labels to filter the workloads against
|
|
89
84
|
if args['filter_label'] is not None:
|
|
90
85
|
for label_name in args['filter_label']:
|
|
@@ -138,17 +133,17 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
138
133
|
def add_workload_to_report(workload: pylo.Workload, action: str):
|
|
139
134
|
url_link_to_pce = workload.get_pce_ui_url()
|
|
140
135
|
new_row = {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
136
|
+
'hostname': workload.hostname,
|
|
137
|
+
'online': workload.online,
|
|
138
|
+
'last_heartbeat': workload.ven_agent.get_last_heartbeat_date().strftime('%Y-%m-%d %H:%M'),
|
|
139
|
+
'created_at': workload.created_at_datetime().strftime('%Y-%m-%d %H:%M'),
|
|
140
|
+
'href': workload.href,
|
|
141
|
+
'link_to_pce': url_link_to_pce,
|
|
142
|
+
'action': action
|
|
148
143
|
}
|
|
149
144
|
|
|
150
145
|
for label_type in org.LabelStore.label_types:
|
|
151
|
-
new_row['label_'+label_type] = workload.get_label_name(label_type, '')
|
|
146
|
+
new_row['label_' + label_type] = workload.get_label_name(label_type, '')
|
|
152
147
|
|
|
153
148
|
sheet.add_line_from_object(new_row)
|
|
154
149
|
|
|
@@ -173,9 +168,9 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
173
168
|
continue
|
|
174
169
|
|
|
175
170
|
print(" - hostname '{}' has duplicates. ({} online, {} offline, {} unmanaged)".format(dup_hostname,
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
171
|
+
len(dup_record.online),
|
|
172
|
+
len(dup_record.offline),
|
|
173
|
+
len(dup_record.unmanaged)))
|
|
179
174
|
|
|
180
175
|
latest_created_workload = dup_record.find_latest_created_at()
|
|
181
176
|
latest_heartbeat_workload = dup_record.find_latest_heartbeat()
|
|
@@ -195,7 +190,7 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
195
190
|
add_workload_to_report(wkl, "ignored (VEN is online)")
|
|
196
191
|
|
|
197
192
|
for wkl in dup_record.offline:
|
|
198
|
-
if arg_do_not_delete_the_most_recent_workload and
|
|
193
|
+
if arg_do_not_delete_the_most_recent_workload and wkl is latest_created_workload:
|
|
199
194
|
print(" - IGNORED: wkl {}/{} is the most recent".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
200
195
|
add_workload_to_report(wkl, "ignored (it is the most recently created)")
|
|
201
196
|
elif arg_do_not_delete_the_most_recently_heartbeating_workload and wkl is latest_heartbeat_workload:
|
|
@@ -212,7 +207,6 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
212
207
|
delete_tracker.add_workload(wkl)
|
|
213
208
|
print(" - added offline wkl {}/{} to the delete list".format(wkl.get_name_stripped_fqdn(), wkl.href))
|
|
214
209
|
|
|
215
|
-
|
|
216
210
|
for wkl in dup_record.unmanaged:
|
|
217
211
|
if arg_limit_number_of_deleted_workloads is not None and delete_tracker.count_entries() >= arg_limit_number_of_deleted_workloads:
|
|
218
212
|
print(" - IGNORED: wkl {}/{} because the limit of {} workloads to be deleted was reached".format(wkl.get_name_stripped_fqdn(), wkl.href, arg_limit_number_of_deleted_workloads))
|
|
@@ -269,7 +263,7 @@ def __main(args, org: pylo.Organization, pce_cache_was_used: bool, **kwargs):
|
|
|
269
263
|
if len(report_wanted_format) < 1:
|
|
270
264
|
print(" * No report format was specified, no report will be generated")
|
|
271
265
|
else:
|
|
272
|
-
sheet.reorder_lines(['hostname'])
|
|
266
|
+
sheet.reorder_lines(['hostname']) # sort by hostname for better readability
|
|
273
267
|
for report_format in report_wanted_format:
|
|
274
268
|
output_filename = output_file_prefix + '.' + report_format
|
|
275
269
|
print(" * Writing report file '{}' ... ".format(output_filename), end='', flush=True)
|
|
@@ -294,7 +288,7 @@ class DuplicateRecordManager:
|
|
|
294
288
|
def __init__(self, pce_offline_timer_override: Optional[int] = None):
|
|
295
289
|
self.offline = []
|
|
296
290
|
self.online = []
|
|
297
|
-
self.unmanaged= []
|
|
291
|
+
self.unmanaged = []
|
|
298
292
|
self.all: List[pylo.Workload] = []
|
|
299
293
|
self._pce_offline_timer_override: Optional[int] = pce_offline_timer_override
|
|
300
294
|
|
|
@@ -330,7 +324,7 @@ class DuplicateRecordManager:
|
|
|
330
324
|
return True
|
|
331
325
|
return False
|
|
332
326
|
|
|
333
|
-
def find_latest_created_at(self)-> 'pylo.Workload':
|
|
327
|
+
def find_latest_created_at(self) -> 'pylo.Workload':
|
|
334
328
|
latest: Optional[pylo.Workload] = None
|
|
335
329
|
for wkl in self.all:
|
|
336
330
|
if wkl.unmanaged:
|
|
@@ -339,7 +333,7 @@ class DuplicateRecordManager:
|
|
|
339
333
|
latest = wkl
|
|
340
334
|
return latest
|
|
341
335
|
|
|
342
|
-
def find_latest_heartbeat(self)-> 'pylo.Workload':
|
|
336
|
+
def find_latest_heartbeat(self) -> 'pylo.Workload':
|
|
343
337
|
latest: Optional[pylo.Workload] = None
|
|
344
338
|
for wkl in self.all:
|
|
345
339
|
if wkl.unmanaged:
|
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
from typing import Dict,
|
|
1
|
+
from typing import Dict, Union
|
|
2
2
|
from dataclasses import dataclass
|
|
3
|
-
import sys
|
|
4
3
|
import argparse
|
|
5
|
-
import math
|
|
6
4
|
import illumio_pylo as pylo
|
|
7
5
|
from .utils.misc import make_filename_with_timestamp
|
|
8
6
|
from . import Command
|
|
@@ -265,7 +263,7 @@ def __main(args, org: pylo.Organization, native_parsers: MyBuiltInParser, **kwar
|
|
|
265
263
|
print(myformat(" - Agents with successful report count:", agent_green_count))
|
|
266
264
|
print(myformat(" - SKIPPED because not online count:", agent_skipped_not_online))
|
|
267
265
|
print(myformat(" - SKIPPED because report was not found:", agent_has_no_report_count))
|
|
268
|
-
print(myformat(" - Agents with failed reports:", agent_report_failed_count
|
|
266
|
+
print(myformat(" - Agents with failed reports:", agent_report_failed_count))
|
|
269
267
|
|
|
270
268
|
print()
|
|
271
269
|
print(" * Writing report file '{}' ... ".format(output_file_csv), end='', flush=True)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Dict, List, Any
|
|
1
|
+
from typing import Dict, List, Any
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
import sys
|
|
4
4
|
import argparse
|
|
@@ -8,7 +8,7 @@ import click
|
|
|
8
8
|
import illumio_pylo as pylo
|
|
9
9
|
from illumio_pylo import ArraysToExcel, ExcelHeaderSet, ExcelHeader
|
|
10
10
|
from .utils.LabelCreation import generate_list_of_labels_to_create, create_labels
|
|
11
|
-
from .utils.misc import make_filename_with_timestamp
|
|
11
|
+
from .utils.misc import make_filename_with_timestamp, default_label_columns_prefix
|
|
12
12
|
from . import Command
|
|
13
13
|
|
|
14
14
|
command_name = 'workload-import'
|
|
@@ -27,7 +27,7 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
27
27
|
parser.add_argument('--ignore-missing-headers', action='store_true',
|
|
28
28
|
help='Ignore missing headers in the CSV/Excel file for label types')
|
|
29
29
|
|
|
30
|
-
parser.add_argument('--label-type-header-prefix', type=str, required=False, default=
|
|
30
|
+
parser.add_argument('--label-type-header-prefix', type=str, required=False, default=default_label_columns_prefix,
|
|
31
31
|
help='Prefix for the label type headers in the CSV/Excel file')
|
|
32
32
|
|
|
33
33
|
parser.add_argument('--ignore-hostname-collision', action='store_true',
|
|
@@ -52,7 +52,6 @@ def fill_parser(parser: argparse.ArgumentParser):
|
|
|
52
52
|
help='If set, the script will proceed with the creation of the workloads and labels without asking for confirmation')
|
|
53
53
|
|
|
54
54
|
|
|
55
|
-
|
|
56
55
|
def __main(args, org: pylo.Organization, **kwargs):
|
|
57
56
|
input_file = args['input_file']
|
|
58
57
|
input_file_delimiter: str = args['input_file_delimiter']
|
|
@@ -84,12 +83,11 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
84
83
|
|
|
85
84
|
# each label type/dimension is optional
|
|
86
85
|
for label_type in org.LabelStore.label_types:
|
|
87
|
-
csv_expected_fields.append({'name': f"{settings_header_label_prefix}{label_type}"
|
|
88
|
-
|
|
86
|
+
csv_expected_fields.append({'name': f"{settings_header_label_prefix}{label_type}", 'optional': True})
|
|
89
87
|
|
|
90
88
|
csv_report_headers = ExcelHeaderSet(['name', 'hostname', 'ip', 'description'])
|
|
91
89
|
for label_type in org.LabelStore.label_types:
|
|
92
|
-
csv_report_headers.append(f'
|
|
90
|
+
csv_report_headers.append(f'{settings_header_label_prefix}{label_type}')
|
|
93
91
|
|
|
94
92
|
csv_report_headers.append(ExcelHeader(name='**not_created_reason**'))
|
|
95
93
|
csv_report_headers.append(ExcelHeader(name='href', max_width=15))
|
|
@@ -97,13 +95,12 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
97
95
|
csv_report = ArraysToExcel()
|
|
98
96
|
csv_sheet = csv_report.create_sheet('Workloads', csv_report_headers)
|
|
99
97
|
|
|
100
|
-
|
|
101
98
|
print(" * Loading CSV input file '{}'...".format(input_file), flush=True, end='')
|
|
102
99
|
csv_data = pylo.CsvExcelToObject(input_file, expected_headers=csv_expected_fields, csv_delimiter=input_file_delimiter)
|
|
103
100
|
print('OK')
|
|
104
101
|
print(" - CSV has {} columns and {} lines (headers don't count)".format(csv_data.count_columns(), csv_data.count_lines()))
|
|
105
102
|
|
|
106
|
-
#check if CSV has all headers for each labels types
|
|
103
|
+
# check if CSV has all headers for each labels types
|
|
107
104
|
if not settings_ignore_missing_headers:
|
|
108
105
|
for label_type in org.LabelStore.label_types:
|
|
109
106
|
header_name = f"{settings_header_label_prefix}{label_type}".lower()
|
|
@@ -111,7 +108,6 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
111
108
|
raise pylo.PyloEx(f"CSV/Excel file is missing the column '{header_name}' for label type '{label_type}'. "
|
|
112
109
|
"If this was intended use --ignore-missing-headers flag")
|
|
113
110
|
|
|
114
|
-
|
|
115
111
|
detect_workloads_name_collisions(csv_data, org, settings_ignore_all_sorts_collisions, settings_ignore_hostname_collision)
|
|
116
112
|
|
|
117
113
|
detect_ip_collisions(csv_data, org, settings_ignore_all_sorts_collisions, settings_ignore_empty_ip_entries, settings_ignore_ip_collision)
|
|
@@ -124,7 +120,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
124
120
|
if len(labels_to_be_created) > 0:
|
|
125
121
|
print(" * {} Labels need to created before Workloads can be imported, listing:".format(len(labels_to_be_created)))
|
|
126
122
|
for label in labels_to_be_created:
|
|
127
|
-
print(" - Label: {} (type={})".format(label
|
|
123
|
+
print(" - Label: {} (type={})".format(label['name'], label['type']))
|
|
128
124
|
if not settings_no_confirmation_required:
|
|
129
125
|
click.confirm("Do you want to proceed with the creation of these labels?", abort=True)
|
|
130
126
|
|
|
@@ -149,7 +145,7 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
149
145
|
print(" * No Workloads to create, all were ignored due to collisions or missing data.")
|
|
150
146
|
# still want to save the CSV/Excel files in the end so don't exit
|
|
151
147
|
else:
|
|
152
|
-
if
|
|
148
|
+
if settings_proceed_with_creation is not True:
|
|
153
149
|
print(" * No workload will be created because the --proceed-with-creation/-p flag was not set. Yet report will be generated")
|
|
154
150
|
for object_to_create in csv_objects_to_create:
|
|
155
151
|
if '**not_created_reason**' not in object_to_create:
|
|
@@ -238,6 +234,7 @@ class WorkloadCollisionItem:
|
|
|
238
234
|
workload_object: pylo.Workload = None
|
|
239
235
|
csv_object: Dict[str, Any] = None
|
|
240
236
|
|
|
237
|
+
|
|
241
238
|
def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_all_sorts_collisions, ignore_hostname_collision):
|
|
242
239
|
print(" * Checking for name/hostname collisions inside the PCE:", flush=True)
|
|
243
240
|
name_cache: Dict[str, WorkloadCollisionItem] = {}
|
|
@@ -278,14 +275,14 @@ def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_al
|
|
|
278
275
|
else:
|
|
279
276
|
print(
|
|
280
277
|
" - WARNING: CSV has an entry for workload name '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(
|
|
281
|
-
|
|
278
|
+
lower_name, csv_object['*line*']))
|
|
282
279
|
|
|
283
280
|
if csv_object['hostname'] is not None and len(csv_object['hostname']) > 0:
|
|
284
281
|
lower_hostname = csv_object['hostname'].lower()
|
|
285
282
|
if lower_name != lower_hostname:
|
|
286
283
|
if lower_hostname not in name_cache:
|
|
287
284
|
name_cache[lower_hostname] = WorkloadCollisionItem(from_pce=False, csv_object=csv_object,
|
|
288
|
-
|
|
285
|
+
managed=False)
|
|
289
286
|
else:
|
|
290
287
|
if not name_cache[lower_hostname].from_pce:
|
|
291
288
|
raise pylo.PyloEx('CSV contains workloads with duplicates name/hostname: {}'.format(lower_name))
|
|
@@ -295,7 +292,7 @@ def detect_workloads_name_collisions(csv_data, org: pylo.Organization, ignore_al
|
|
|
295
292
|
else:
|
|
296
293
|
print(
|
|
297
294
|
" - WARNING: CSV has an entry for workload hostname '{}' at line #{} but it exists already in the PCE. It will be ignored.".format(
|
|
298
|
-
|
|
295
|
+
lower_hostname, csv_object['*line*']))
|
|
299
296
|
print(" * DONE")
|
|
300
297
|
|
|
301
298
|
|
|
@@ -348,9 +345,10 @@ def detect_ip_collisions(csv_data, org: pylo.Organization, ignore_all_sorts_coll
|
|
|
348
345
|
else:
|
|
349
346
|
count_duplicate_ip_addresses_in_csv += 1
|
|
350
347
|
if not ignore_all_sorts_collisions and not settings_ignore_ip_collision:
|
|
351
|
-
pylo.log.warn(
|
|
352
|
-
|
|
353
|
-
|
|
348
|
+
pylo.log.warn(
|
|
349
|
+
indent + "Duplicate IP address {} found in the PCE and CSV/Excel at line #{} (name={} hostname={}). "
|
|
350
|
+
"(look for --options to bypass this if you know what you are doing)"
|
|
351
|
+
.format(ip, csv_object['*line*'], csv_object['name'], csv_object['hostname']))
|
|
354
352
|
csv_object['**not_created_reason**'] = "Duplicate IP address {} found in the PCE".format(ip)
|
|
355
353
|
|
|
356
354
|
if ignore_all_sorts_collisions or settings_ignore_ip_collision:
|
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from typing import List
|
|
2
2
|
import illumio_pylo as pylo
|
|
3
3
|
import argparse
|
|
4
|
-
import sys
|
|
5
4
|
import math
|
|
6
5
|
import colorama
|
|
7
|
-
from .utils.misc import make_filename_with_timestamp
|
|
8
6
|
from . import Command
|
|
9
7
|
|
|
10
8
|
command_name = 'workload-reset-ven-names-to-null'
|
|
@@ -29,9 +27,9 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
29
27
|
workloads_with_forced_names: List[pylo.Workload] = []
|
|
30
28
|
workloads_with_mismatching_names: List[pylo.Workload] = []
|
|
31
29
|
|
|
32
|
-
#iterate through each workload
|
|
30
|
+
# iterate through each workload
|
|
33
31
|
for wkl in org.WorkloadStore.itemsByHRef.values():
|
|
34
|
-
#only care about Managed workloads
|
|
32
|
+
# only care about Managed workloads
|
|
35
33
|
if wkl.unmanaged:
|
|
36
34
|
continue
|
|
37
35
|
|
|
@@ -53,26 +51,26 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
53
51
|
|
|
54
52
|
# <editor-fold desc="JSON Payloads generation">
|
|
55
53
|
|
|
56
|
-
#for each batch of workloads, generate a JSON payload to send to the PCE to reset name to null
|
|
57
|
-
#the payload will be a list of objects with the following structure:
|
|
58
|
-
#
|
|
59
|
-
#
|
|
60
|
-
#
|
|
54
|
+
# for each batch of workloads, generate a JSON payload to send to the PCE to reset name to null
|
|
55
|
+
# the payload will be a list of objects with the following structure:
|
|
56
|
+
# {
|
|
57
|
+
# "href": "string",
|
|
58
|
+
# "name": null
|
|
61
59
|
# }
|
|
62
60
|
|
|
63
61
|
if not confirmed_changes:
|
|
64
62
|
print(colorama.Fore.YELLOW + "Changes have not been confirmed. Use the --confirm flag to confirm the changes and push to the PCE")
|
|
65
|
-
#reset colorama colors
|
|
63
|
+
# reset colorama colors
|
|
66
64
|
print(colorama.Style.RESET_ALL)
|
|
67
65
|
return
|
|
68
66
|
|
|
69
67
|
# for loop for each batch of workloads
|
|
70
68
|
for i in range(math.ceil(len(workloads_with_mismatching_names) / batch_size)):
|
|
71
|
-
#get the next batch of workloads
|
|
69
|
+
# get the next batch of workloads
|
|
72
70
|
batch = workloads_with_mismatching_names[i * batch_size: (i + 1) * batch_size]
|
|
73
|
-
#create a list of objects with the structure described above
|
|
71
|
+
# create a list of objects with the structure described above
|
|
74
72
|
payload = [{"href": wkl.href, "name": wkl.static_name_stripped_fqdn(wkl.hostname)} for wkl in batch]
|
|
75
|
-
#debug display
|
|
73
|
+
# debug display
|
|
76
74
|
print(f"Sending payload for batch {i + 1} of {math.ceil(len(workloads_with_mismatching_names) / batch_size)} ({len(payload)} workloads)")
|
|
77
75
|
|
|
78
76
|
org.connector.objects_workload_update_bulk(payload)
|
|
@@ -80,4 +78,4 @@ def __main(args, org: pylo.Organization, **kwargs):
|
|
|
80
78
|
# </editor-fold>
|
|
81
79
|
|
|
82
80
|
|
|
83
|
-
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|
|
81
|
+
command_object = Command(command_name, __main, fill_parser, objects_load_filter)
|