illumio-pylo 0.3.1__py3-none-any.whl → 0.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +42 -9
- illumio_pylo/API/CredentialsManager.py +9 -9
- illumio_pylo/API/JsonPayloadTypes.py +29 -0
- illumio_pylo/Helpers/exports.py +3 -4
- illumio_pylo/IPMap.py +9 -0
- illumio_pylo/LabeledObject.py +1 -1
- illumio_pylo/Organization.py +4 -1
- illumio_pylo/Rule.py +27 -1
- illumio_pylo/Ruleset.py +15 -27
- illumio_pylo/Service.py +49 -52
- illumio_pylo/__init__.py +1 -1
- illumio_pylo/cli/__init__.py +19 -6
- illumio_pylo/cli/commands/credential_manager.py +91 -26
- illumio_pylo/cli/commands/ruleset_export.py +44 -38
- illumio_pylo/cli/commands/utils/misc.py +4 -0
- illumio_pylo/cli/commands/ven_upgrader.py +10 -78
- illumio_pylo/cli/commands/workload_export.py +7 -4
- illumio_pylo/cli/commands/workload_import.py +44 -13
- illumio_pylo/tmp.py +8 -4
- {illumio_pylo-0.3.1.dist-info → illumio_pylo-0.3.3.dist-info}/METADATA +1 -1
- {illumio_pylo-0.3.1.dist-info → illumio_pylo-0.3.3.dist-info}/RECORD +24 -24
- {illumio_pylo-0.3.1.dist-info → illumio_pylo-0.3.3.dist-info}/LICENSE +0 -0
- {illumio_pylo-0.3.1.dist-info → illumio_pylo-0.3.3.dist-info}/WHEEL +0 -0
- {illumio_pylo-0.3.1.dist-info → illumio_pylo-0.3.3.dist-info}/top_level.txt +0 -0
illumio_pylo/API/APIConnector.py
CHANGED
|
@@ -2,7 +2,7 @@ import json
|
|
|
2
2
|
import time
|
|
3
3
|
import getpass
|
|
4
4
|
|
|
5
|
-
from .CredentialsManager import is_api_key_encrypted, decrypt_api_key
|
|
5
|
+
from .CredentialsManager import is_api_key_encrypted, decrypt_api_key, CredentialProfile
|
|
6
6
|
from .JsonPayloadTypes import LabelGroupObjectJsonStructure, LabelObjectCreationJsonStructure, \
|
|
7
7
|
LabelObjectJsonStructure, LabelObjectUpdateJsonStructure, PCEObjectsJsonStructure, \
|
|
8
8
|
LabelGroupObjectUpdateJsonStructure, IPListObjectCreationJsonStructure, IPListObjectJsonStructure, \
|
|
@@ -12,7 +12,7 @@ from .JsonPayloadTypes import LabelGroupObjectJsonStructure, LabelObjectCreation
|
|
|
12
12
|
LabelDimensionObjectStructure, AuditLogApiReplyEventJsonStructure, WorkloadsGetQueryLabelFilterJsonStructure, \
|
|
13
13
|
NetworkDeviceObjectJsonStructure, NetworkDeviceEndpointObjectJsonStructure, HrefReference, \
|
|
14
14
|
WorkloadObjectCreateJsonStructure, WorkloadObjectMultiCreateJsonRequestPayload, \
|
|
15
|
-
WorkloadBulkUpdateEntryJsonStructure, WorkloadBulkUpdateResponseEntry
|
|
15
|
+
WorkloadBulkUpdateEntryJsonStructure, WorkloadBulkUpdateResponseEntry, VenObjectJsonStructure
|
|
16
16
|
|
|
17
17
|
try:
|
|
18
18
|
import requests as requests
|
|
@@ -108,6 +108,12 @@ class APIConnector:
|
|
|
108
108
|
def get_all_object_types():
|
|
109
109
|
return all_object_types.copy()
|
|
110
110
|
|
|
111
|
+
@staticmethod
|
|
112
|
+
def create_from_credentials_object(credentials: CredentialProfile) -> Optional['APIConnector']:
|
|
113
|
+
return APIConnector(credentials.fqdn, credentials.port, credentials.api_user,
|
|
114
|
+
credentials.api_key, skip_ssl_cert_check=not credentials.verify_ssl,
|
|
115
|
+
org_id=credentials.org_id, name=credentials.name)
|
|
116
|
+
|
|
111
117
|
@staticmethod
|
|
112
118
|
def create_from_credentials_in_file(fqdn_or_profile_name: str, request_if_missing: bool = False,
|
|
113
119
|
credential_file: Optional[str] = None) -> Optional['APIConnector']:
|
|
@@ -115,9 +121,7 @@ class APIConnector:
|
|
|
115
121
|
credentials = pylo.get_credentials_from_file(fqdn_or_profile_name, credential_file)
|
|
116
122
|
|
|
117
123
|
if credentials is not None:
|
|
118
|
-
return APIConnector(credentials
|
|
119
|
-
credentials.api_key, skip_ssl_cert_check=not credentials.verify_ssl,
|
|
120
|
-
org_id=credentials.org_id, name=credentials.name)
|
|
124
|
+
return APIConnector.create_from_credentials_object(credentials)
|
|
121
125
|
|
|
122
126
|
if not request_if_missing:
|
|
123
127
|
return None
|
|
@@ -362,7 +366,7 @@ class APIConnector:
|
|
|
362
366
|
else:
|
|
363
367
|
raise pylo.PyloEx("Unsupported object type '{}'".format(object_type))
|
|
364
368
|
|
|
365
|
-
def get_pce_objects(self, include_deleted_workloads=False, list_of_objects_to_load: Optional[List[str]] = None):
|
|
369
|
+
def get_pce_objects(self, include_deleted_workloads=False, list_of_objects_to_load: Optional[List[str]] = None, force_async_mode=False):
|
|
366
370
|
|
|
367
371
|
objects_to_load = {}
|
|
368
372
|
if list_of_objects_to_load is not None:
|
|
@@ -389,7 +393,7 @@ class APIConnector:
|
|
|
389
393
|
errors = []
|
|
390
394
|
thread_queue = Queue()
|
|
391
395
|
|
|
392
|
-
def get_objects(q: Queue, thread_num: int):
|
|
396
|
+
def get_objects(q: Queue, thread_num: int, force_async_mode=False):
|
|
393
397
|
while True:
|
|
394
398
|
object_type, errors = q.get()
|
|
395
399
|
try:
|
|
@@ -397,7 +401,7 @@ class APIConnector:
|
|
|
397
401
|
q.task_done()
|
|
398
402
|
continue
|
|
399
403
|
if object_type == 'workloads':
|
|
400
|
-
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
404
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls or force_async_mode:
|
|
401
405
|
data['workloads'] = self.objects_workload_get(include_deleted=include_deleted_workloads)
|
|
402
406
|
else:
|
|
403
407
|
data['workloads'] = self.objects_workload_get(include_deleted=include_deleted_workloads, async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
@@ -456,7 +460,7 @@ class APIConnector:
|
|
|
456
460
|
q.task_done()
|
|
457
461
|
|
|
458
462
|
for i in range(threads_count):
|
|
459
|
-
worker = Thread(target=get_objects, args=(thread_queue, i))
|
|
463
|
+
worker = Thread(target=get_objects, args=(thread_queue, i, force_async_mode,))
|
|
460
464
|
worker.daemon = True
|
|
461
465
|
worker.start()
|
|
462
466
|
|
|
@@ -620,6 +624,35 @@ class APIConnector:
|
|
|
620
624
|
|
|
621
625
|
return None
|
|
622
626
|
|
|
627
|
+
def objects_ven_get(self,
|
|
628
|
+
include_deleted=False,
|
|
629
|
+
filter_by_ip: str = None,
|
|
630
|
+
filter_by_label: Optional[WorkloadsGetQueryLabelFilterJsonStructure] = None,
|
|
631
|
+
filter_by_name: str = None,
|
|
632
|
+
max_results: int = None,
|
|
633
|
+
async_mode=True) -> List[VenObjectJsonStructure]:
|
|
634
|
+
path = '/vens'
|
|
635
|
+
data = {}
|
|
636
|
+
|
|
637
|
+
if include_deleted:
|
|
638
|
+
data['include_deleted'] = 'yes'
|
|
639
|
+
|
|
640
|
+
if filter_by_ip is not None:
|
|
641
|
+
data['ip_address'] = filter_by_ip
|
|
642
|
+
|
|
643
|
+
if filter_by_label is not None:
|
|
644
|
+
# filter_by_label must be converted to json text
|
|
645
|
+
data['labels'] = json.dumps(filter_by_label)
|
|
646
|
+
|
|
647
|
+
if filter_by_name is not None:
|
|
648
|
+
data['name'] = filter_by_name
|
|
649
|
+
|
|
650
|
+
if max_results is not None:
|
|
651
|
+
data['max_results'] = max_results
|
|
652
|
+
|
|
653
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
654
|
+
|
|
655
|
+
|
|
623
656
|
def objects_workload_get(self,
|
|
624
657
|
include_deleted=False,
|
|
625
658
|
filter_by_ip: str = None,
|
|
@@ -14,7 +14,6 @@ except ImportError:
|
|
|
14
14
|
paramiko = None
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
|
|
18
17
|
class CredentialFileEntry(TypedDict):
|
|
19
18
|
name: str
|
|
20
19
|
fqdn: str
|
|
@@ -97,7 +96,7 @@ def get_all_credentials_from_file(credential_file: str ) -> List[CredentialProfi
|
|
|
97
96
|
|
|
98
97
|
|
|
99
98
|
def get_credentials_from_file(fqdn_or_profile_name: str = None,
|
|
100
|
-
credential_file: str = None) -> CredentialProfile:
|
|
99
|
+
credential_file: str = None, fail_with_an_exception=True) -> Optional[CredentialProfile]:
|
|
101
100
|
|
|
102
101
|
if fqdn_or_profile_name is None:
|
|
103
102
|
log.debug("No fqdn_or_profile_name provided, profile_name=default will be used")
|
|
@@ -121,9 +120,12 @@ def get_credentials_from_file(fqdn_or_profile_name: str = None,
|
|
|
121
120
|
if credential_profile.fqdn.lower() == fqdn_or_profile_name.lower():
|
|
122
121
|
return credential_profile
|
|
123
122
|
|
|
124
|
-
|
|
123
|
+
if fail_with_an_exception:
|
|
124
|
+
raise PyloEx("No profile found in credential file '{}' with fqdn: {}".
|
|
125
125
|
format(credential_file, fqdn_or_profile_name))
|
|
126
126
|
|
|
127
|
+
return None
|
|
128
|
+
|
|
127
129
|
|
|
128
130
|
def list_potential_credential_files() -> List[str]:
|
|
129
131
|
"""
|
|
@@ -204,9 +206,7 @@ def create_credential_in_default_file(data: CredentialFileEntry) -> str:
|
|
|
204
206
|
return file_path
|
|
205
207
|
|
|
206
208
|
|
|
207
|
-
def
|
|
208
|
-
|
|
209
|
-
|
|
209
|
+
def encrypt_api_key_with_paramiko_ssh_key_fernet(ssh_key: paramiko.AgentKey, api_key: str) -> str:
|
|
210
210
|
def encrypt(raw: str, key: bytes) -> bytes:
|
|
211
211
|
"""
|
|
212
212
|
|
|
@@ -220,7 +220,7 @@ def encrypt_api_key_with_paramiko_key(ssh_key: paramiko.AgentKey, api_key: str)
|
|
|
220
220
|
|
|
221
221
|
|
|
222
222
|
# generate a random 128bit key
|
|
223
|
-
session_key_to_sign = os.urandom(
|
|
223
|
+
session_key_to_sign = os.urandom(32)
|
|
224
224
|
|
|
225
225
|
signed_message = ssh_key.sign_ssh_data(session_key_to_sign)
|
|
226
226
|
|
|
@@ -236,7 +236,7 @@ def encrypt_api_key_with_paramiko_key(ssh_key: paramiko.AgentKey, api_key: str)
|
|
|
236
236
|
return api_key
|
|
237
237
|
|
|
238
238
|
|
|
239
|
-
def
|
|
239
|
+
def decrypt_api_key_with_paramiko_ssh_key_fernet(encrypted_api_key_payload: str) -> str:
|
|
240
240
|
def decrypt(token_b64_encoded: str, key: bytes):
|
|
241
241
|
f = Fernet(base64.urlsafe_b64encode(key))
|
|
242
242
|
return f.decrypt(token_b64_encoded).decode('utf-8')
|
|
@@ -277,7 +277,7 @@ def decrypt_api_key(encrypted_api_key_payload: str) -> str:
|
|
|
277
277
|
if not encrypted_api_key_payload.startswith("$encrypted$:"):
|
|
278
278
|
raise PyloEx("Invalid encrypted API key format")
|
|
279
279
|
if encrypted_api_key_payload.startswith("$encrypted$:ssh-Fernet:"):
|
|
280
|
-
return
|
|
280
|
+
return decrypt_api_key_with_paramiko_ssh_key_fernet(encrypted_api_key_payload)
|
|
281
281
|
|
|
282
282
|
raise PyloEx("Unsupported encryption method: {}".format(encrypted_api_key_payload.split(":")[1]))
|
|
283
283
|
|
|
@@ -121,6 +121,35 @@ class WorkloadBulkUpdateResponseEntry(TypedDict):
|
|
|
121
121
|
message: NotRequired[str]
|
|
122
122
|
|
|
123
123
|
|
|
124
|
+
class VenObjectWorkloadSummaryJsonStructure(TypedDict):
|
|
125
|
+
href: str
|
|
126
|
+
mode: str
|
|
127
|
+
online: bool
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class VenObjectJsonStructure(TypedDict):
|
|
131
|
+
created_at: str
|
|
132
|
+
created_by: Optional[HrefReferenceWithName]
|
|
133
|
+
description: Optional[str]
|
|
134
|
+
hostname: Optional[str]
|
|
135
|
+
href: str
|
|
136
|
+
labels: List[HrefReference]
|
|
137
|
+
name: Optional[str]
|
|
138
|
+
interfaces: List[WorkloadInterfaceObjectJsonStructure]
|
|
139
|
+
updated_at: str
|
|
140
|
+
updated_by: Optional[HrefReferenceWithName]
|
|
141
|
+
last_heartbeat_at: Optional[str]
|
|
142
|
+
last_goodbye_at: Optional[str]
|
|
143
|
+
ven_type: Literal['server', 'endpoint', 'containerized-ven']
|
|
144
|
+
active_pce_fqdn: Optional[str]
|
|
145
|
+
target_pce_fqdn: Optional[str]
|
|
146
|
+
workloads: List[VenObjectWorkloadSummaryJsonStructure]
|
|
147
|
+
version: Optional[str]
|
|
148
|
+
os_id: Optional[str]
|
|
149
|
+
os_version: Optional[str]
|
|
150
|
+
os_platform: Optional[str]
|
|
151
|
+
uid: Optional[str]
|
|
152
|
+
|
|
124
153
|
|
|
125
154
|
class RuleServiceReferenceObjectJsonStructure(TypedDict):
|
|
126
155
|
href: str
|
illumio_pylo/Helpers/exports.py
CHANGED
|
@@ -255,10 +255,9 @@ class ArraysToExcel:
|
|
|
255
255
|
new_line.append('=HYPERLINK("{}", "{}")'.format(item,self._headers[item_index].url_text))
|
|
256
256
|
else:
|
|
257
257
|
new_line.append(item)
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
columns_max_width[item_index] = length
|
|
258
|
+
length = find_length(new_line[item_index])
|
|
259
|
+
if length > columns_max_width[item_index]:
|
|
260
|
+
columns_max_width[item_index] = length
|
|
262
261
|
|
|
263
262
|
|
|
264
263
|
xls_data.append(new_line)
|
illumio_pylo/IPMap.py
CHANGED
|
@@ -71,6 +71,15 @@ class IP4Map:
|
|
|
71
71
|
self._entries.append(new_entry)
|
|
72
72
|
self.sort_and_recalculate()
|
|
73
73
|
|
|
74
|
+
def add_another_map(self, another_map: 'IP4Map', skip_recalculation=False):
|
|
75
|
+
for entry in another_map._entries:
|
|
76
|
+
self._entries.append(entry)
|
|
77
|
+
|
|
78
|
+
if skip_recalculation:
|
|
79
|
+
return
|
|
80
|
+
|
|
81
|
+
self.sort_and_recalculate()
|
|
82
|
+
|
|
74
83
|
def intersection(self, another_map: 'IP4Map'):
|
|
75
84
|
|
|
76
85
|
inverted_map = IP4Map()
|
illumio_pylo/LabeledObject.py
CHANGED
illumio_pylo/Organization.py
CHANGED
|
@@ -135,7 +135,10 @@ class Organization:
|
|
|
135
135
|
object_to_load = pylo.APIConnector.get_all_object_types()
|
|
136
136
|
|
|
137
137
|
if self.pce_version is None:
|
|
138
|
-
|
|
138
|
+
if 'pce_version' in data:
|
|
139
|
+
self.pce_version = pylo.SoftwareVersion(data['pce_version'])
|
|
140
|
+
else:
|
|
141
|
+
raise pylo.PyloEx('Organization has no "version" specified')
|
|
139
142
|
|
|
140
143
|
self.LabelStore.load_label_dimensions(data.get('label_dimensions'))
|
|
141
144
|
|
illumio_pylo/Rule.py
CHANGED
|
@@ -4,7 +4,7 @@ from typing import Optional, List, Union, Dict, Any, NewType
|
|
|
4
4
|
import illumio_pylo as pylo
|
|
5
5
|
from .API.JsonPayloadTypes import RuleServiceReferenceObjectJsonStructure, RuleDirectServiceReferenceObjectJsonStructure
|
|
6
6
|
from illumio_pylo import Workload, Label, LabelGroup, Ruleset, Referencer, SecurityPrincipal, PyloEx, \
|
|
7
|
-
Service, nice_json, string_list_to_text, find_connector_or_die, VirtualService, IPList
|
|
7
|
+
Service, nice_json, string_list_to_text, find_connector_or_die, VirtualService, IPList, PortMap
|
|
8
8
|
|
|
9
9
|
RuleActorsAcceptableTypes = NewType('RuleActorsAcceptableTypes', Union[Workload, Label, LabelGroup, IPList, VirtualService])
|
|
10
10
|
|
|
@@ -250,6 +250,7 @@ class RuleServiceContainer(pylo.Referencer):
|
|
|
250
250
|
self.owner = owner
|
|
251
251
|
self._items: Dict[Service, Service] = {}
|
|
252
252
|
self._direct_services: List[DirectServiceInRule] = []
|
|
253
|
+
self._cached_port_map: Optional[PortMap] = None
|
|
253
254
|
|
|
254
255
|
def load_from_json(self, data_list: List[RuleServiceReferenceObjectJsonStructure|RuleDirectServiceReferenceObjectJsonStructure]):
|
|
255
256
|
ss_store = self.owner.owner.owner.owner.ServiceStore # make it a local variable for fast lookups
|
|
@@ -295,6 +296,8 @@ class RuleServiceContainer(pylo.Referencer):
|
|
|
295
296
|
:param service:
|
|
296
297
|
:return: True if the service was removed, False if it was not found
|
|
297
298
|
"""
|
|
299
|
+
self._cached_port_map = None
|
|
300
|
+
|
|
298
301
|
for i in range(0, len(self._direct_services)):
|
|
299
302
|
if self._direct_services[i] is service:
|
|
300
303
|
del(self._direct_services[i])
|
|
@@ -302,6 +305,8 @@ class RuleServiceContainer(pylo.Referencer):
|
|
|
302
305
|
return False
|
|
303
306
|
|
|
304
307
|
def add_direct_service(self, service: DirectServiceInRule) -> bool:
|
|
308
|
+
self._cached_port_map = None
|
|
309
|
+
|
|
305
310
|
for member in self._direct_services:
|
|
306
311
|
if service is member:
|
|
307
312
|
return False
|
|
@@ -352,6 +357,27 @@ class RuleServiceContainer(pylo.Referencer):
|
|
|
352
357
|
|
|
353
358
|
self.owner.raw_json.update(data)
|
|
354
359
|
|
|
360
|
+
def get_port_map(self) -> PortMap:
|
|
361
|
+
"""
|
|
362
|
+
Get a PortMap object with all ports and protocols from all services in this container
|
|
363
|
+
:return:
|
|
364
|
+
"""
|
|
365
|
+
if self._cached_port_map is not None:
|
|
366
|
+
return self._cached_port_map
|
|
367
|
+
|
|
368
|
+
result = PortMap()
|
|
369
|
+
for service in self._items.values():
|
|
370
|
+
for entry in service.entries:
|
|
371
|
+
result.add(entry.protocol, entry.port, entry.to_port, skip_recalculation=True)
|
|
372
|
+
for direct in self._direct_services:
|
|
373
|
+
result.add(direct.protocol, direct.port, direct.to_port, skip_recalculation=True)
|
|
374
|
+
|
|
375
|
+
result.merge_overlapping_maps()
|
|
376
|
+
|
|
377
|
+
self._cached_port_map = result
|
|
378
|
+
|
|
379
|
+
return result
|
|
380
|
+
|
|
355
381
|
|
|
356
382
|
class RuleHostContainer(pylo.Referencer):
|
|
357
383
|
def __init__(self, owner: 'pylo.Rule', name: str):
|
illumio_pylo/Ruleset.py
CHANGED
|
@@ -101,29 +101,17 @@ class RulesetScopeEntry:
|
|
|
101
101
|
|
|
102
102
|
|
|
103
103
|
def to_string(self, label_separator = '|', use_href=False):
|
|
104
|
-
string = '
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
else:
|
|
114
|
-
if use_href:
|
|
115
|
-
string += self.env_label.href + label_separator
|
|
116
|
-
else:
|
|
117
|
-
string += self.env_label.name + label_separator
|
|
118
|
-
|
|
119
|
-
if self.loc_label is None:
|
|
120
|
-
string += 'All'
|
|
121
|
-
else:
|
|
122
|
-
if use_href:
|
|
123
|
-
string += self.loc_label.href
|
|
104
|
+
string = ''
|
|
105
|
+
for label_type in self.owner.owner.owner.owner.LabelStore.label_types:
|
|
106
|
+
label = self._labels.get(label_type)
|
|
107
|
+
if len(string) > 0:
|
|
108
|
+
string += label_separator
|
|
109
|
+
if label is None:
|
|
110
|
+
string += 'All'
|
|
111
|
+
elif use_href:
|
|
112
|
+
string += label.href
|
|
124
113
|
else:
|
|
125
|
-
string +=
|
|
126
|
-
|
|
114
|
+
string += label.name
|
|
127
115
|
return string
|
|
128
116
|
|
|
129
117
|
def is_all_all_all(self):
|
|
@@ -156,10 +144,6 @@ class RulesetScopeEntry:
|
|
|
156
144
|
|
|
157
145
|
class Ruleset:
|
|
158
146
|
|
|
159
|
-
name: str
|
|
160
|
-
href: Optional[str]
|
|
161
|
-
description: str
|
|
162
|
-
|
|
163
147
|
def __init__(self, owner: 'pylo.RulesetStore'):
|
|
164
148
|
self.owner: 'pylo.RulesetStore' = owner
|
|
165
149
|
self.href: Optional[str] = None
|
|
@@ -169,6 +153,7 @@ class Ruleset:
|
|
|
169
153
|
# must keep an ordered list of rules while the dict by href is there for quick searches
|
|
170
154
|
self._rules_by_href: Dict[str, 'pylo.Rule'] = {}
|
|
171
155
|
self._rules: List['pylo.Rule'] = []
|
|
156
|
+
self.disabled: bool = False
|
|
172
157
|
|
|
173
158
|
@property
|
|
174
159
|
def rules(self):
|
|
@@ -211,6 +196,9 @@ class Ruleset:
|
|
|
211
196
|
raise pylo.PyloEx("Cannot find Ruleset href in JSON data: \n" + pylo.Helpers.nice_json(data))
|
|
212
197
|
self.href = data['href']
|
|
213
198
|
|
|
199
|
+
if 'enabled' in data:
|
|
200
|
+
self.disabled = not data['enabled']
|
|
201
|
+
|
|
214
202
|
scopes_json = data.get('scopes')
|
|
215
203
|
if scopes_json is None:
|
|
216
204
|
raise pylo.PyloEx("Cannot find Ruleset scope in JSON data: \n" + pylo.Helpers.nice_json(data))
|
|
@@ -287,7 +275,7 @@ class Ruleset:
|
|
|
287
275
|
if pce_fqdn is None or pce_port is None:
|
|
288
276
|
connector = pylo.find_connector_or_die(self)
|
|
289
277
|
if pce_fqdn is None:
|
|
290
|
-
pce_fqdn = connector.
|
|
278
|
+
pce_fqdn = connector.fqdn
|
|
291
279
|
if pce_port is None:
|
|
292
280
|
pce_port = connector.port
|
|
293
281
|
|
illumio_pylo/Service.py
CHANGED
|
@@ -7,10 +7,20 @@ from typing import *
|
|
|
7
7
|
|
|
8
8
|
class PortMap:
|
|
9
9
|
def __init__(self):
|
|
10
|
-
self._tcp_map = []
|
|
11
|
-
self._udp_map = []
|
|
10
|
+
self._tcp_map: List[List[2]] = [] # [start, end]
|
|
11
|
+
self._udp_map: List[List[2]] = [] # [start, end]
|
|
12
12
|
self._protocol_map = {}
|
|
13
13
|
|
|
14
|
+
def copy(self) -> 'PortMap':
|
|
15
|
+
new_map = PortMap()
|
|
16
|
+
new_map._tcp_map = self._tcp_map.copy()
|
|
17
|
+
new_map._udp_map = self._udp_map.copy()
|
|
18
|
+
new_map._protocol_map = self._protocol_map.copy()
|
|
19
|
+
return new_map
|
|
20
|
+
|
|
21
|
+
def count(self) -> int:
|
|
22
|
+
return len(self._tcp_map) + len(self._udp_map) + len(self._protocol_map)
|
|
23
|
+
|
|
14
24
|
def add(self, protocol, start_port: int, end_port: int = None, skip_recalculation=False):
|
|
15
25
|
|
|
16
26
|
proto = None
|
|
@@ -31,67 +41,54 @@ class PortMap:
|
|
|
31
41
|
return
|
|
32
42
|
|
|
33
43
|
if start_port is None:
|
|
44
|
+
start_port = end_port
|
|
45
|
+
|
|
46
|
+
if end_port is None:
|
|
34
47
|
end_port = start_port
|
|
35
48
|
|
|
36
|
-
|
|
49
|
+
if proto == 6:
|
|
50
|
+
self._tcp_map.append([start_port, end_port])
|
|
51
|
+
else:
|
|
52
|
+
self._udp_map.append([start_port, end_port])
|
|
37
53
|
|
|
38
54
|
if not skip_recalculation:
|
|
39
55
|
self.merge_overlapping_maps()
|
|
40
56
|
|
|
41
|
-
def
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
cur_entry = None
|
|
47
|
-
|
|
48
|
-
for original_entry in self._tcp_map:
|
|
49
|
-
if cur_entry is None:
|
|
50
|
-
cur_entry = original_entry
|
|
51
|
-
continue
|
|
52
|
-
|
|
53
|
-
cur_start = cur_entry[0]
|
|
54
|
-
cur_end = cur_entry[1]
|
|
55
|
-
new_start = original_entry[0]
|
|
56
|
-
new_end = original_entry[1]
|
|
57
|
+
def to_list_of_objects(self) -> List[Dict]:
|
|
58
|
+
result = []
|
|
59
|
+
for entry in self._tcp_map:
|
|
60
|
+
result.append({'proto': 6, 'port': entry[0], 'to_port': entry[1]})
|
|
57
61
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
continue
|
|
62
|
+
for entry in self._udp_map:
|
|
63
|
+
result.append({'proto': 17, 'port': entry[0], 'to_port': entry[1]})
|
|
61
64
|
|
|
62
|
-
|
|
63
|
-
|
|
65
|
+
for proto in self._protocol_map:
|
|
66
|
+
result.append({'proto': proto})
|
|
64
67
|
|
|
65
|
-
|
|
66
|
-
self._tcp_map = []
|
|
67
|
-
else:
|
|
68
|
-
new_map.append(cur_entry)
|
|
69
|
-
self._tcp_map = new_map
|
|
70
|
-
|
|
71
|
-
new_map = []
|
|
72
|
-
|
|
73
|
-
for original_entry in self._udp_map:
|
|
74
|
-
if cur_entry is None:
|
|
75
|
-
cur_entry = original_entry
|
|
76
|
-
continue
|
|
77
|
-
|
|
78
|
-
cur_start = cur_entry[0]
|
|
79
|
-
cur_end = cur_entry[1]
|
|
80
|
-
new_start = original_entry[0]
|
|
81
|
-
new_end = original_entry[1]
|
|
82
|
-
|
|
83
|
-
if new_start > cur_end + 1:
|
|
84
|
-
new_map.append(cur_entry)
|
|
85
|
-
continue
|
|
68
|
+
return result
|
|
86
69
|
|
|
87
|
-
|
|
88
|
-
|
|
70
|
+
def merge_overlapping_maps(self):
|
|
71
|
+
self._sort_maps()
|
|
89
72
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
73
|
+
def merge_maps(map_list):
|
|
74
|
+
new_list = []
|
|
75
|
+
current = None
|
|
76
|
+
for entry in map_list:
|
|
77
|
+
if current is None:
|
|
78
|
+
current = entry
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
if entry[0] <= current[1] + 1:
|
|
82
|
+
current[1] = entry[1]
|
|
83
|
+
else:
|
|
84
|
+
new_list.append(current)
|
|
85
|
+
current = entry
|
|
86
|
+
if current is not None:
|
|
87
|
+
new_list.append(current)
|
|
88
|
+
return new_list
|
|
89
|
+
|
|
90
|
+
self._tcp_map = merge_maps(self._tcp_map)
|
|
91
|
+
self._udp_map = merge_maps(self._udp_map)
|
|
95
92
|
|
|
96
93
|
def _sort_maps(self):
|
|
97
94
|
def first_entry(my_list):
|
illumio_pylo/__init__.py
CHANGED
illumio_pylo/cli/__init__.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from typing import Optional, Dict
|
|
3
|
-
|
|
3
|
+
import time
|
|
4
|
+
import datetime
|
|
4
5
|
import sys
|
|
5
6
|
import argparse
|
|
6
7
|
from .NativeParsers import BaseParser
|
|
@@ -15,6 +16,8 @@ from illumio_pylo.cli import commands
|
|
|
15
16
|
|
|
16
17
|
def run(forced_command_name: Optional[str] = None):
|
|
17
18
|
|
|
19
|
+
cli_start_time = datetime.datetime.now()
|
|
20
|
+
|
|
18
21
|
def add_native_parser_to_argparse(parser: argparse.ArgumentParser, native_parsers: object):
|
|
19
22
|
# each property of the native parser is an extension of BaseParser, we need to iterate over them and add them to the argparse parser
|
|
20
23
|
for attr_name in dir(native_parsers):
|
|
@@ -33,9 +36,11 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
33
36
|
print(" * Native CLI arguments parsing...")
|
|
34
37
|
attr.execute(args[attr.get_arg_name()], org, padding=' ')
|
|
35
38
|
|
|
36
|
-
parser = argparse.ArgumentParser(description='
|
|
39
|
+
parser = argparse.ArgumentParser(description='PYLO-CLI: Illumio API&More Command Line Interface')
|
|
37
40
|
parser.add_argument('--pce', type=str, required=False,
|
|
38
41
|
help='hostname of the PCE')
|
|
42
|
+
parser.add_argument('--force-async-mode', action='store_true',
|
|
43
|
+
help='Forces the command to run async API queries when required (large PCEs which timeout on specific queries)')
|
|
39
44
|
parser.add_argument('--debug', action='store_true',
|
|
40
45
|
help='Enables extra debugging output in Pylo framework')
|
|
41
46
|
parser.add_argument('--use-cache', action='store_true',
|
|
@@ -89,14 +94,16 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
89
94
|
|
|
90
95
|
print("* Started Pylo CLI version {}".format(pylo.__version__))
|
|
91
96
|
|
|
97
|
+
|
|
92
98
|
if not selected_command.credentials_manager_mode:
|
|
99
|
+
timer_start = time.perf_counter()
|
|
93
100
|
# credential_profile_name is required for all commands except the credential manager
|
|
94
101
|
if credential_profile_name is None:
|
|
95
102
|
raise pylo.PyloEx("The --pce argument is required for this command")
|
|
96
103
|
if settings_use_cache:
|
|
97
104
|
print(" * Loading objects from cached PCE '{}' data... ".format(credential_profile_name), end="", flush=True)
|
|
98
105
|
org = pylo.Organization.get_from_cache_file(credential_profile_name)
|
|
99
|
-
print("OK!")
|
|
106
|
+
print("OK! (execution time: {:.2f} seconds)".format(time.perf_counter() - timer_start))
|
|
100
107
|
connector = pylo.APIConnector.create_from_credentials_in_file(credential_profile_name, request_if_missing=False)
|
|
101
108
|
if connector is not None:
|
|
102
109
|
org.connector = connector
|
|
@@ -106,8 +113,9 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
106
113
|
print("OK!")
|
|
107
114
|
|
|
108
115
|
print(" * Downloading PCE objects from API... ".format(credential_profile_name), end="", flush=True)
|
|
109
|
-
config_data = connector.get_pce_objects(list_of_objects_to_load=selected_command.load_specific_objects_only)
|
|
110
|
-
|
|
116
|
+
config_data = connector.get_pce_objects(list_of_objects_to_load=selected_command.load_specific_objects_only, force_async_mode=args['force_async_mode'])
|
|
117
|
+
timer_download_finished = time.perf_counter()
|
|
118
|
+
print("OK! (execution time: {:.2f} seconds)".format(timer_download_finished - timer_start))
|
|
111
119
|
|
|
112
120
|
org = pylo.Organization(1)
|
|
113
121
|
org.connector = connector
|
|
@@ -116,7 +124,7 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
116
124
|
print(" * Loading objects from PCE '{}' via API... ".format(credential_profile_name), end="", flush=True)
|
|
117
125
|
org.pce_version = connector.get_software_version()
|
|
118
126
|
org.load_from_json(config_data, list_of_objects_to_load=selected_command.load_specific_objects_only)
|
|
119
|
-
print("OK!")
|
|
127
|
+
print("OK! (execution time: {:.2f} seconds)".format(time.perf_counter() - timer_download_finished))
|
|
120
128
|
|
|
121
129
|
print()
|
|
122
130
|
if not selected_command.skip_pce_config_loading:
|
|
@@ -126,6 +134,7 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
126
134
|
print(flush=True)
|
|
127
135
|
|
|
128
136
|
print("**** {} UTILITY ****".format(selected_command.name.upper()), flush=True)
|
|
137
|
+
command_execution_time_start = time.perf_counter()
|
|
129
138
|
if selected_command.native_parsers is None:
|
|
130
139
|
native_parsers = None
|
|
131
140
|
else:
|
|
@@ -138,7 +147,11 @@ def run(forced_command_name: Optional[str] = None):
|
|
|
138
147
|
commands.available_commands[selected_command.name].main(args, org=org, config_data=config_data, connector=connector, pce_cache_was_used=settings_use_cache)
|
|
139
148
|
|
|
140
149
|
print()
|
|
150
|
+
cli_end_time = datetime.datetime.now()
|
|
141
151
|
print("**** END OF {} UTILITY ****".format(selected_command.name.upper()))
|
|
152
|
+
print("Command Specific Execution time: {:.2f} seconds".format(time.perf_counter() - command_execution_time_start))
|
|
153
|
+
print("CLI started at {} and finished at {}".format(cli_start_time, cli_end_time))
|
|
154
|
+
print("CLI Total Execution time: {}".format(cli_end_time - cli_start_time))
|
|
142
155
|
print()
|
|
143
156
|
|
|
144
157
|
|