illumio-pylo 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- illumio_pylo/API/APIConnector.py +1308 -0
- illumio_pylo/API/AuditLog.py +42 -0
- illumio_pylo/API/ClusterHealth.py +136 -0
- illumio_pylo/API/CredentialsManager.py +286 -0
- illumio_pylo/API/Explorer.py +1077 -0
- illumio_pylo/API/JsonPayloadTypes.py +240 -0
- illumio_pylo/API/RuleSearchQuery.py +128 -0
- illumio_pylo/API/__init__.py +0 -0
- illumio_pylo/AgentStore.py +139 -0
- illumio_pylo/Exception.py +44 -0
- illumio_pylo/Helpers/__init__.py +3 -0
- illumio_pylo/Helpers/exports.py +508 -0
- illumio_pylo/Helpers/functions.py +166 -0
- illumio_pylo/IPList.py +135 -0
- illumio_pylo/IPMap.py +285 -0
- illumio_pylo/Label.py +25 -0
- illumio_pylo/LabelCommon.py +48 -0
- illumio_pylo/LabelGroup.py +68 -0
- illumio_pylo/LabelStore.py +403 -0
- illumio_pylo/LabeledObject.py +25 -0
- illumio_pylo/Organization.py +258 -0
- illumio_pylo/Query.py +331 -0
- illumio_pylo/ReferenceTracker.py +41 -0
- illumio_pylo/Rule.py +671 -0
- illumio_pylo/Ruleset.py +306 -0
- illumio_pylo/RulesetStore.py +101 -0
- illumio_pylo/SecurityPrincipal.py +62 -0
- illumio_pylo/Service.py +256 -0
- illumio_pylo/SoftwareVersion.py +125 -0
- illumio_pylo/VirtualService.py +17 -0
- illumio_pylo/VirtualServiceStore.py +75 -0
- illumio_pylo/Workload.py +506 -0
- illumio_pylo/WorkloadStore.py +289 -0
- illumio_pylo/__init__.py +82 -0
- illumio_pylo/cli/NativeParsers.py +96 -0
- illumio_pylo/cli/__init__.py +134 -0
- illumio_pylo/cli/__main__.py +10 -0
- illumio_pylo/cli/commands/__init__.py +32 -0
- illumio_pylo/cli/commands/credential_manager.py +168 -0
- illumio_pylo/cli/commands/iplist_import_from_file.py +185 -0
- illumio_pylo/cli/commands/misc.py +7 -0
- illumio_pylo/cli/commands/ruleset_export.py +129 -0
- illumio_pylo/cli/commands/update_pce_objects_cache.py +44 -0
- illumio_pylo/cli/commands/ven_duplicate_remover.py +366 -0
- illumio_pylo/cli/commands/ven_idle_to_visibility.py +287 -0
- illumio_pylo/cli/commands/ven_upgrader.py +226 -0
- illumio_pylo/cli/commands/workload_export.py +251 -0
- illumio_pylo/cli/commands/workload_import.py +423 -0
- illumio_pylo/cli/commands/workload_relabeler.py +510 -0
- illumio_pylo/cli/commands/workload_reset_names_to_null.py +83 -0
- illumio_pylo/cli/commands/workload_used_in_rule_finder.py +80 -0
- illumio_pylo/docs/Doxygen +1757 -0
- illumio_pylo/tmp.py +104 -0
- illumio_pylo/utilities/__init__.py +0 -0
- illumio_pylo/utilities/cli.py +10 -0
- illumio_pylo/utilities/credentials.example.json +20 -0
- illumio_pylo/utilities/explorer_report_exporter.py +86 -0
- illumio_pylo/utilities/health_monitoring.py +102 -0
- illumio_pylo/utilities/iplist_analyzer.py +148 -0
- illumio_pylo/utilities/iplists_stats_duplicates_unused_finder.py +75 -0
- illumio_pylo/utilities/resources/iplists-import-example.csv +3 -0
- illumio_pylo/utilities/resources/iplists-import-example.xlsx +0 -0
- illumio_pylo/utilities/resources/workload-exporter-filter-example.csv +3 -0
- illumio_pylo/utilities/resources/workloads-import-example.csv +2 -0
- illumio_pylo/utilities/resources/workloads-import-example.xlsx +0 -0
- illumio_pylo/utilities/ven_compatibility_report_export.py +240 -0
- illumio_pylo/utilities/ven_idle_to_illumination.py +344 -0
- illumio_pylo/utilities/ven_reassign_pce.py +183 -0
- illumio_pylo-0.2.5.dist-info/LICENSE +176 -0
- illumio_pylo-0.2.5.dist-info/METADATA +197 -0
- illumio_pylo-0.2.5.dist-info/RECORD +73 -0
- illumio_pylo-0.2.5.dist-info/WHEEL +5 -0
- illumio_pylo-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1308 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
import getpass
|
|
4
|
+
|
|
5
|
+
from .CredentialsManager import is_api_key_encrypted, decrypt_api_key
|
|
6
|
+
from .JsonPayloadTypes import LabelGroupObjectJsonStructure, LabelObjectCreationJsonStructure, \
|
|
7
|
+
LabelObjectJsonStructure, LabelObjectUpdateJsonStructure, PCEObjectsJsonStructure, \
|
|
8
|
+
LabelGroupObjectUpdateJsonStructure, IPListObjectCreationJsonStructure, IPListObjectJsonStructure, \
|
|
9
|
+
VirtualServiceObjectJsonStructure, RuleCoverageQueryEntryJsonStructure, RulesetObjectUpdateStructure, \
|
|
10
|
+
WorkloadHrefRef, IPListHrefRef, VirtualServiceHrefRef, RuleDirectServiceReferenceObjectJsonStructure, \
|
|
11
|
+
RulesetObjectJsonStructure, WorkloadObjectJsonStructure, SecurityPrincipalObjectJsonStructure, \
|
|
12
|
+
LabelDimensionObjectStructure, AuditLogApiReplyEventJsonStructure, WorkloadsGetQueryLabelFilterJsonStructure, \
|
|
13
|
+
NetworkDeviceObjectJsonStructure, NetworkDeviceEndpointObjectJsonStructure, HrefReference
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
import requests as requests
|
|
17
|
+
except ImportError:
|
|
18
|
+
import requests
|
|
19
|
+
|
|
20
|
+
from threading import Thread
|
|
21
|
+
from queue import Queue
|
|
22
|
+
import illumio_pylo as pylo
|
|
23
|
+
from illumio_pylo import log
|
|
24
|
+
from typing import Union, Dict, Any, List, Optional, Literal
|
|
25
|
+
|
|
26
|
+
requests.packages.urllib3.disable_warnings()
|
|
27
|
+
|
|
28
|
+
default_retry_count_if_api_call_limit_reached = 3
|
|
29
|
+
default_retry_wait_time_if_api_call_limit_reached = 10
|
|
30
|
+
default_max_objects_for_sync_calls = 99999
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_field_or_die(field_name: str, data):
|
|
34
|
+
if type(data) is not dict:
|
|
35
|
+
raise pylo.PyloEx("Data argument should of type DICT, '{}' was given".format(type(data)))
|
|
36
|
+
|
|
37
|
+
field = data.get(field_name, pylo.objectNotFound)
|
|
38
|
+
|
|
39
|
+
if field is pylo.objectNotFound:
|
|
40
|
+
raise pylo.PyloEx("Could not find field named '{}' in data".format(field_name), data)
|
|
41
|
+
return field
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
ObjectTypes = Literal['iplists', 'workloads', 'virtual_services', 'labels', 'labelgroups', 'services', 'rulesets',
|
|
45
|
+
'security_principals', 'label_dimensions']
|
|
46
|
+
|
|
47
|
+
all_object_types: Dict[ObjectTypes, ObjectTypes] = {
|
|
48
|
+
'iplists': 'iplists',
|
|
49
|
+
'workloads': 'workloads',
|
|
50
|
+
'virtual_services': 'virtual_services',
|
|
51
|
+
'labels': 'labels',
|
|
52
|
+
'labelgroups': 'labelgroups',
|
|
53
|
+
'services': 'services',
|
|
54
|
+
'rulesets': 'rulesets',
|
|
55
|
+
'security_principals': 'security_principals',
|
|
56
|
+
'label_dimensions': 'label_dimensions'
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class APIConnector:
|
|
61
|
+
"""docstring for APIConnector."""
|
|
62
|
+
|
|
63
|
+
def __init__(self, fqdn: str, port, apiuser: str, apikey: str, skip_ssl_cert_check=False, org_id=1, name='unnamed'):
|
|
64
|
+
self.name = name
|
|
65
|
+
self.fqdn: str = fqdn
|
|
66
|
+
if type(port) is int:
|
|
67
|
+
port = str(port)
|
|
68
|
+
self.port: int = port
|
|
69
|
+
self._api_key: str = apikey
|
|
70
|
+
self._decrypted_api_key: str = None
|
|
71
|
+
self.api_user: str = apiuser
|
|
72
|
+
self.orgID: int = org_id
|
|
73
|
+
self.skipSSLCertCheck: bool = skip_ssl_cert_check
|
|
74
|
+
self.version: Optional['pylo.SoftwareVersion'] = None
|
|
75
|
+
self.version_string: str = "Not Defined"
|
|
76
|
+
self._cached_session = requests.session()
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def api_key(self):
|
|
80
|
+
if self._decrypted_api_key is not None:
|
|
81
|
+
return self._decrypted_api_key
|
|
82
|
+
if is_api_key_encrypted(self._api_key):
|
|
83
|
+
self._decrypted_api_key = decrypt_api_key(self._api_key)
|
|
84
|
+
return self._decrypted_api_key
|
|
85
|
+
return self._api_key
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@staticmethod
|
|
89
|
+
def get_all_object_types_names_except(exception_list: List[ObjectTypes]):
|
|
90
|
+
|
|
91
|
+
if len(exception_list) == 0:
|
|
92
|
+
return all_object_types.values()
|
|
93
|
+
|
|
94
|
+
# first let's check that all names in exception_list are valid (case mismatches and typos...)
|
|
95
|
+
for name in exception_list:
|
|
96
|
+
if name not in all_object_types:
|
|
97
|
+
raise pylo.PyloEx("object type named '{}' doesn't exist. The list of supported objects names is: {}".
|
|
98
|
+
format(name, pylo.string_list_to_text(all_object_types.values())))
|
|
99
|
+
|
|
100
|
+
object_names_list: List[str] = []
|
|
101
|
+
for name in all_object_types.values():
|
|
102
|
+
if name not in exception_list:
|
|
103
|
+
object_names_list.append(name)
|
|
104
|
+
|
|
105
|
+
@staticmethod
|
|
106
|
+
def get_all_object_types():
|
|
107
|
+
return all_object_types.copy()
|
|
108
|
+
|
|
109
|
+
@staticmethod
|
|
110
|
+
def create_from_credentials_in_file(fqdn_or_profile_name: str, request_if_missing: bool = False,
|
|
111
|
+
credential_file: Optional[str] = None) -> Optional['APIConnector']:
|
|
112
|
+
|
|
113
|
+
credentials = pylo.get_credentials_from_file(fqdn_or_profile_name, credential_file)
|
|
114
|
+
|
|
115
|
+
if credentials is not None:
|
|
116
|
+
return APIConnector(credentials.fqdn, credentials.port, credentials.api_user,
|
|
117
|
+
credentials.api_key, skip_ssl_cert_check=not credentials.verify_ssl,
|
|
118
|
+
org_id=credentials.org_id, name=credentials.name)
|
|
119
|
+
|
|
120
|
+
if not request_if_missing:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
print('Cannot find credentials for host "{}".\nPlease input an API user:'.format(fqdn_or_profile_name), end='')
|
|
124
|
+
user = input()
|
|
125
|
+
print('API password:', end='')
|
|
126
|
+
password = getpass.getpass()
|
|
127
|
+
print('Server port:', end='')
|
|
128
|
+
port = int(input())
|
|
129
|
+
print('A name for this connection (ie: MyCompany PROD')
|
|
130
|
+
name = input()
|
|
131
|
+
|
|
132
|
+
connector = pylo.APIConnector(fqdn_or_profile_name, port, user, password, skip_ssl_cert_check=True, name=name)
|
|
133
|
+
return connector
|
|
134
|
+
|
|
135
|
+
def _make_base_url(self, path: str='') -> str:
|
|
136
|
+
# remove leading '/' from path if exists
|
|
137
|
+
if len(path) > 0 and path[0] == '/':
|
|
138
|
+
path = path[1:]
|
|
139
|
+
url = "https://{0}:{1}/{2}".format(self.fqdn, self.port, path)
|
|
140
|
+
return url
|
|
141
|
+
|
|
142
|
+
def _make_api_url(self, path: str = '', include_org_id=False) -> str:
|
|
143
|
+
url = self._make_base_url('/api/v2')
|
|
144
|
+
if include_org_id:
|
|
145
|
+
url += '/orgs/' + str(self.orgID)
|
|
146
|
+
url += path
|
|
147
|
+
|
|
148
|
+
return url
|
|
149
|
+
|
|
150
|
+
def do_get_call(self, path, json_arguments=None, include_org_id=True, json_output_expected=True, async_call=False, params=None, skip_product_version_check=False,
|
|
151
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
152
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached,
|
|
153
|
+
return_headers: bool = False):
|
|
154
|
+
|
|
155
|
+
return self._do_call('GET', path, json_arguments=json_arguments, include_org_id=include_org_id,
|
|
156
|
+
json_output_expected=json_output_expected, async_call=async_call, skip_product_version_check=skip_product_version_check, params=params,
|
|
157
|
+
retry_count_if_api_call_limit_reached=retry_count_if_api_call_limit_reached,
|
|
158
|
+
retry_wait_time_if_api_call_limit_reached=retry_wait_time_if_api_call_limit_reached,
|
|
159
|
+
return_headers=return_headers)
|
|
160
|
+
|
|
161
|
+
def do_post_call(self, path, json_arguments=None, include_org_id=True, json_output_expected=True, async_call=False, params=None,
|
|
162
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
163
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached):
|
|
164
|
+
|
|
165
|
+
return self._do_call('POST', path, json_arguments=json_arguments, include_org_id=include_org_id,
|
|
166
|
+
json_output_expected=json_output_expected, async_call=async_call, params=params,
|
|
167
|
+
retry_count_if_api_call_limit_reached=retry_count_if_api_call_limit_reached,
|
|
168
|
+
retry_wait_time_if_api_call_limit_reached=retry_wait_time_if_api_call_limit_reached)
|
|
169
|
+
|
|
170
|
+
def do_put_call(self, path, json_arguments=None, include_org_id=True, json_output_expected=True, async_call=False, params=None,
|
|
171
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
172
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached):
|
|
173
|
+
|
|
174
|
+
return self._do_call('PUT', path, json_arguments=json_arguments, include_org_id=include_org_id,
|
|
175
|
+
json_output_expected=json_output_expected, async_call=async_call, params=params,
|
|
176
|
+
retry_count_if_api_call_limit_reached=retry_count_if_api_call_limit_reached,
|
|
177
|
+
retry_wait_time_if_api_call_limit_reached=retry_wait_time_if_api_call_limit_reached)
|
|
178
|
+
|
|
179
|
+
def do_delete_call(self, path, json_arguments=None, include_org_id=True, json_output_expected=True, async_call=False, params=None,
|
|
180
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
181
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached):
|
|
182
|
+
|
|
183
|
+
return self._do_call('DELETE', path, json_arguments=json_arguments, include_org_id=include_org_id,
|
|
184
|
+
json_output_expected=json_output_expected, async_call=async_call, params=params,
|
|
185
|
+
retry_count_if_api_call_limit_reached=retry_count_if_api_call_limit_reached,
|
|
186
|
+
retry_wait_time_if_api_call_limit_reached=retry_wait_time_if_api_call_limit_reached)
|
|
187
|
+
|
|
188
|
+
def _do_call(self, method, path, json_arguments=None, include_org_id=True, json_output_expected=True, async_call=False,
|
|
189
|
+
skip_product_version_check=False, params=None,
|
|
190
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
191
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached,
|
|
192
|
+
return_headers: bool = False):
|
|
193
|
+
|
|
194
|
+
if self.version is None and not skip_product_version_check:
|
|
195
|
+
self.collect_pce_infos()
|
|
196
|
+
|
|
197
|
+
url = self._make_api_url(path, include_org_id)
|
|
198
|
+
|
|
199
|
+
headers = {'Accept': 'application/json'}
|
|
200
|
+
|
|
201
|
+
if json_arguments is not None:
|
|
202
|
+
headers['Content-Type'] = 'application/json'
|
|
203
|
+
|
|
204
|
+
if async_call:
|
|
205
|
+
headers['Prefer'] = 'respond-async'
|
|
206
|
+
|
|
207
|
+
while True:
|
|
208
|
+
|
|
209
|
+
log.info("Request URL: " + url)
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
req = self._cached_session.request(method, url, headers=headers, auth=(self.api_user, self.api_key),
|
|
213
|
+
verify=(not self.skipSSLCertCheck), json=json_arguments,
|
|
214
|
+
params=params)
|
|
215
|
+
except Exception as e:
|
|
216
|
+
raise pylo.PyloApiEx("PCE connectivity or low level issue: {}".format(e))
|
|
217
|
+
|
|
218
|
+
answer_size = len(req.content) / 1024
|
|
219
|
+
log.info("URL downloaded (size "+str( int(answer_size) )+"KB) Reply headers:\n" +
|
|
220
|
+
"HTTP " + method + " " + url + " STATUS " + str(req.status_code) + " " + req.reason)
|
|
221
|
+
log.info(req.headers)
|
|
222
|
+
# log.info("Request Body:" + pylo.nice_json(json_arguments))
|
|
223
|
+
# log.info("Request returned code "+ str(req.status_code) + ". Raw output:\n" + req.text[0:2000])
|
|
224
|
+
|
|
225
|
+
if async_call:
|
|
226
|
+
if (method == 'GET' or method == 'POST') and req.status_code != 202:
|
|
227
|
+
orig_request = req.request # type: requests.PreparedRequest
|
|
228
|
+
raise Exception("Status code for Async call should be 202 but " + str(req.status_code)
|
|
229
|
+
+ " " + req.reason + " was returned with the following body: " + req.text +
|
|
230
|
+
"\n\n Request was: " + orig_request.url + "\nHEADERS: " + str(orig_request.headers) +
|
|
231
|
+
"\nBODY:\n" + str(orig_request.body))
|
|
232
|
+
|
|
233
|
+
if 'Location' not in req.headers:
|
|
234
|
+
raise Exception('Header "Location" was not found in API answer!')
|
|
235
|
+
if 'Retry-After' not in req.headers:
|
|
236
|
+
raise Exception('Header "Retry-After" was not found in API answer!')
|
|
237
|
+
|
|
238
|
+
job_location = req.headers['Location']
|
|
239
|
+
retry_interval = int(req.headers['Retry-After'])
|
|
240
|
+
|
|
241
|
+
retry_loop_times = 0
|
|
242
|
+
|
|
243
|
+
while True:
|
|
244
|
+
log.info("Sleeping " + str(retry_interval) + " seconds before polling for job status, elapsed " + str(retry_interval*retry_loop_times) + " seconds so far" )
|
|
245
|
+
retry_loop_times += 1
|
|
246
|
+
time.sleep(retry_interval)
|
|
247
|
+
job_poll = self.do_get_call(job_location, include_org_id=False)
|
|
248
|
+
if 'status' not in job_poll:
|
|
249
|
+
raise Exception('Job polling request did not return a "status" field')
|
|
250
|
+
job_poll_status = job_poll['status']
|
|
251
|
+
|
|
252
|
+
if job_poll_status == 'failed':
|
|
253
|
+
if 'result' in job_poll and 'message' in job_poll['result']:
|
|
254
|
+
raise Exception('Job polling return with status "Failed": ' + job_poll['result']['message'])
|
|
255
|
+
else:
|
|
256
|
+
raise Exception('Job polling return with status "Failed": ' + job_poll)
|
|
257
|
+
|
|
258
|
+
if job_poll_status == 'done':
|
|
259
|
+
if 'result' not in job_poll:
|
|
260
|
+
raise Exception('Job is marked as done but has no "result"')
|
|
261
|
+
if 'href' not in job_poll['result']:
|
|
262
|
+
raise Exception("Job is marked as done but did not return a href to download resulting Dataset")
|
|
263
|
+
|
|
264
|
+
result_href = job_poll['result']['href']
|
|
265
|
+
break
|
|
266
|
+
|
|
267
|
+
log.info("Job status is " + job_poll_status)
|
|
268
|
+
|
|
269
|
+
log.info("Job is done, we will now download the resulting dataset")
|
|
270
|
+
dataset = self.do_get_call(result_href, include_org_id=False)
|
|
271
|
+
|
|
272
|
+
return dataset
|
|
273
|
+
|
|
274
|
+
if method == 'GET' and req.status_code != 200 \
|
|
275
|
+
or\
|
|
276
|
+
method == 'POST' and req.status_code != 201 and req.status_code != 204 and req.status_code != 200 and req.status_code != 202\
|
|
277
|
+
or\
|
|
278
|
+
method == 'DELETE' and req.status_code != 204 \
|
|
279
|
+
or \
|
|
280
|
+
method == 'PUT' and req.status_code != 204 and req.status_code != 200:
|
|
281
|
+
|
|
282
|
+
if req.status_code == 429:
|
|
283
|
+
# too many requests sent in short amount of time? [{"token":"too_many_requests_error", ....}]
|
|
284
|
+
json_out = req.json()
|
|
285
|
+
if len(json_out) > 0:
|
|
286
|
+
if "token" in json_out[0]:
|
|
287
|
+
if json_out[0]['token'] == 'too_many_requests_error':
|
|
288
|
+
if retry_count_if_api_call_limit_reached < 1:
|
|
289
|
+
raise pylo.PyloApiTooManyRequestsEx(
|
|
290
|
+
'API has hit DOS protection limit (X calls per minute)', json_out)
|
|
291
|
+
|
|
292
|
+
retry_count_if_api_call_limit_reached = retry_count_if_api_call_limit_reached - 1
|
|
293
|
+
log.info(
|
|
294
|
+
"API has returned 'too_many_requests_error', we will sleep for {} seconds and retry {} more times".format(
|
|
295
|
+
retry_wait_time_if_api_call_limit_reached,
|
|
296
|
+
retry_count_if_api_call_limit_reached))
|
|
297
|
+
time.sleep(retry_wait_time_if_api_call_limit_reached)
|
|
298
|
+
continue
|
|
299
|
+
|
|
300
|
+
if req.status_code == 403:
|
|
301
|
+
raise pylo.PyloApiRequestForbiddenEx('API returned error status "' + str(req.status_code) + ' ' + req.reason
|
|
302
|
+
+ '" and error message: ' + req.text)
|
|
303
|
+
|
|
304
|
+
raise pylo.PyloApiEx('API returned error status "' + str(req.status_code) + ' ' + req.reason
|
|
305
|
+
+ '" and error message: ' + req.text)
|
|
306
|
+
|
|
307
|
+
if return_headers:
|
|
308
|
+
return req.headers
|
|
309
|
+
|
|
310
|
+
if json_output_expected:
|
|
311
|
+
log.info("Parsing API answer to JSON (with a size of " + str( int(answer_size) ) + "KB)")
|
|
312
|
+
json_out = req.json()
|
|
313
|
+
log.info("Done!")
|
|
314
|
+
if answer_size < 5:
|
|
315
|
+
log.info("Resulting JSON object:")
|
|
316
|
+
log.info(json.dumps(json_out, indent=2, sort_keys=True))
|
|
317
|
+
else:
|
|
318
|
+
log.info("Answer is too large to be printed")
|
|
319
|
+
return json_out
|
|
320
|
+
|
|
321
|
+
return req.text
|
|
322
|
+
|
|
323
|
+
raise pylo.PyloApiEx("Unexpected API output or race condition")
|
|
324
|
+
|
|
325
|
+
def get_software_version(self) -> Optional['pylo.SoftwareVersion']:
|
|
326
|
+
self.collect_pce_infos()
|
|
327
|
+
return self.version
|
|
328
|
+
|
|
329
|
+
def get_software_version_string(self) -> str:
|
|
330
|
+
self.collect_pce_infos()
|
|
331
|
+
return self.version_string
|
|
332
|
+
|
|
333
|
+
def get_objects_count_by_type(self, object_type: str) -> int:
|
|
334
|
+
|
|
335
|
+
def extract_count(headers):
|
|
336
|
+
count = headers.get('x-total-count')
|
|
337
|
+
if count is None:
|
|
338
|
+
raise pylo.PyloApiEx('API didnt provide field "x-total-count"')
|
|
339
|
+
|
|
340
|
+
return int(count)
|
|
341
|
+
|
|
342
|
+
if object_type == 'workloads':
|
|
343
|
+
return extract_count(self.do_get_call('/workloads', async_call=False, return_headers=True))
|
|
344
|
+
elif object_type == 'virtual_services':
|
|
345
|
+
return extract_count(self.do_get_call('/sec_policy/draft/virtual_services', async_call=False, return_headers=True))
|
|
346
|
+
elif object_type == 'labels':
|
|
347
|
+
return extract_count(self.do_get_call('/labels', async_call=False, return_headers=True))
|
|
348
|
+
elif object_type == 'labelgroups':
|
|
349
|
+
return extract_count(self.do_get_call('/sec_policy/draft/label_groups', async_call=False, return_headers=True))
|
|
350
|
+
elif object_type == 'iplists':
|
|
351
|
+
return extract_count(self.do_get_call('/sec_policy/draft/ip_lists', async_call=False, return_headers=True))
|
|
352
|
+
elif object_type == 'services':
|
|
353
|
+
return extract_count(self.do_get_call('/sec_policy/draft/services', async_call=False, return_headers=True))
|
|
354
|
+
elif object_type == 'rulesets':
|
|
355
|
+
return extract_count(self.do_get_call('/sec_policy/draft/rule_sets', async_call=False, return_headers=True))
|
|
356
|
+
elif object_type == 'security_principals':
|
|
357
|
+
return extract_count(self.do_get_call('/security_principals', async_call=False, return_headers=True))
|
|
358
|
+
elif object_type == 'label_dimensions':
|
|
359
|
+
return extract_count(self.do_get_call('/label_dimensions', async_call=False, return_headers=True))
|
|
360
|
+
else:
|
|
361
|
+
raise pylo.PyloEx("Unsupported object type '{}'".format(object_type))
|
|
362
|
+
|
|
363
|
+
def get_pce_objects(self, include_deleted_workloads=False, list_of_objects_to_load: Optional[List[str]] = None):
|
|
364
|
+
|
|
365
|
+
objects_to_load = {}
|
|
366
|
+
if list_of_objects_to_load is not None:
|
|
367
|
+
all_types = pylo.APIConnector.get_all_object_types()
|
|
368
|
+
for object_type in list_of_objects_to_load:
|
|
369
|
+
if object_type not in all_types:
|
|
370
|
+
raise pylo.PyloEx("Unknown object type '{}'".format(object_type))
|
|
371
|
+
objects_to_load[object_type] = True
|
|
372
|
+
else:
|
|
373
|
+
objects_to_load = pylo.APIConnector.get_all_object_types()
|
|
374
|
+
|
|
375
|
+
self.get_software_version()
|
|
376
|
+
|
|
377
|
+
# whatever the request was, label dimensions are not optional if PCE is 22.2+
|
|
378
|
+
if self.version.is_greater_or_equal_than(pylo.SoftwareVersion("22.2.0")):
|
|
379
|
+
objects_to_load['label_dimensions'] = 'label_dimensions'
|
|
380
|
+
else:
|
|
381
|
+
if 'label_dimensions' in objects_to_load:
|
|
382
|
+
del objects_to_load['label_dimensions']
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
threads_count = 4
|
|
386
|
+
data: PCEObjectsJsonStructure = pylo.Organization.create_fake_empty_config()
|
|
387
|
+
errors = []
|
|
388
|
+
thread_queue = Queue()
|
|
389
|
+
|
|
390
|
+
def get_objects(q: Queue, thread_num: int):
|
|
391
|
+
while True:
|
|
392
|
+
object_type, errors = q.get()
|
|
393
|
+
try:
|
|
394
|
+
if len(errors) > 0:
|
|
395
|
+
q.task_done()
|
|
396
|
+
continue
|
|
397
|
+
if object_type == 'workloads':
|
|
398
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
399
|
+
data['workloads'] = self.objects_workload_get(include_deleted=include_deleted_workloads)
|
|
400
|
+
else:
|
|
401
|
+
data['workloads'] = self.objects_workload_get(include_deleted=include_deleted_workloads, async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
402
|
+
|
|
403
|
+
elif object_type == 'virtual_services':
|
|
404
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
405
|
+
data['virtual_services'] = self.objects_virtual_service_get()
|
|
406
|
+
else:
|
|
407
|
+
data['virtual_services'] = self.objects_virtual_service_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
408
|
+
|
|
409
|
+
elif object_type == 'labels':
|
|
410
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
411
|
+
data['labels'] = self.objects_label_get()
|
|
412
|
+
else:
|
|
413
|
+
data['labels'] = self.objects_label_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
414
|
+
|
|
415
|
+
elif object_type == 'labelgroups':
|
|
416
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
417
|
+
data['labelgroups'] = self.objects_labelgroup_get()
|
|
418
|
+
else:
|
|
419
|
+
data['labelgroups'] = self.objects_labelgroup_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
420
|
+
|
|
421
|
+
elif object_type == 'iplists':
|
|
422
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
423
|
+
data['iplists'] = self.objects_iplist_get()
|
|
424
|
+
else:
|
|
425
|
+
data['iplists'] = self.objects_iplist_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
426
|
+
|
|
427
|
+
elif object_type == 'services':
|
|
428
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
429
|
+
data['services'] = self.objects_service_get()
|
|
430
|
+
else:
|
|
431
|
+
data['services'] = self.objects_service_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
432
|
+
|
|
433
|
+
elif object_type == 'rulesets':
|
|
434
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
435
|
+
data['rulesets'] = self.objects_ruleset_get()
|
|
436
|
+
else:
|
|
437
|
+
data['rulesets'] = self.objects_ruleset_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
438
|
+
|
|
439
|
+
elif object_type == 'security_principals':
|
|
440
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
441
|
+
data['security_principals'] = self.objects_securityprincipal_get()
|
|
442
|
+
else:
|
|
443
|
+
data['security_principals'] = self.objects_securityprincipal_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
444
|
+
elif object_type == 'label_dimensions':
|
|
445
|
+
if self.get_objects_count_by_type(object_type) > default_max_objects_for_sync_calls:
|
|
446
|
+
data['label_dimensions'] = self.objects_label_dimension_get()
|
|
447
|
+
else:
|
|
448
|
+
data['label_dimensions'] = self.objects_label_dimension_get(async_mode=False, max_results=default_max_objects_for_sync_calls)
|
|
449
|
+
else:
|
|
450
|
+
raise pylo.PyloEx("Unsupported object type '{}'".format(object_type))
|
|
451
|
+
except Exception as e:
|
|
452
|
+
errors.append(e)
|
|
453
|
+
|
|
454
|
+
q.task_done()
|
|
455
|
+
|
|
456
|
+
for i in range(threads_count):
|
|
457
|
+
worker = Thread(target=get_objects, args=(thread_queue, i))
|
|
458
|
+
worker.daemon = True
|
|
459
|
+
worker.start()
|
|
460
|
+
|
|
461
|
+
for type in objects_to_load.keys():
|
|
462
|
+
thread_queue.put((type, errors,))
|
|
463
|
+
|
|
464
|
+
thread_queue.join()
|
|
465
|
+
|
|
466
|
+
if len(errors) > 0:
|
|
467
|
+
raise errors[0]
|
|
468
|
+
|
|
469
|
+
return data
|
|
470
|
+
|
|
471
|
+
def collect_pce_infos(self):
|
|
472
|
+
if self.version is not None: # Make sure we collect data only once
|
|
473
|
+
return
|
|
474
|
+
path = "/product_version"
|
|
475
|
+
json_output = self.do_get_call(path, include_org_id=False, skip_product_version_check=True)
|
|
476
|
+
|
|
477
|
+
self.version_string = json_output['version']
|
|
478
|
+
self.version = pylo.SoftwareVersion(json_output['long_display'])
|
|
479
|
+
|
|
480
|
+
def policy_check(self, protocol, port=None, src_ip=None, src_href=None, dst_ip=None, dst_href=None,
|
|
481
|
+
retry_count_if_api_call_limit_reached=default_retry_count_if_api_call_limit_reached,
|
|
482
|
+
retry_wait_time_if_api_call_limit_reached=default_retry_wait_time_if_api_call_limit_reached):
|
|
483
|
+
|
|
484
|
+
if type(port) is str:
|
|
485
|
+
lower = protocol.lower()
|
|
486
|
+
if lower == 'udp':
|
|
487
|
+
protocol = 17
|
|
488
|
+
elif lower == 'tcp':
|
|
489
|
+
protocol = 6
|
|
490
|
+
else:
|
|
491
|
+
raise pylo.PyloEx("Unsupported protocol '{}'".format(protocol))
|
|
492
|
+
|
|
493
|
+
if src_ip is None and src_href is None:
|
|
494
|
+
raise pylo.PyloEx('src_ip and src_href cannot be both null')
|
|
495
|
+
if dst_ip is None and dst_href is None:
|
|
496
|
+
raise pylo.PyloEx('dst_ip and dst_href cannot be both null')
|
|
497
|
+
|
|
498
|
+
path = "/sec_policy/draft/allow?protocol={}".format(protocol)
|
|
499
|
+
|
|
500
|
+
if port is not None:
|
|
501
|
+
path += "&port={}".format(port)
|
|
502
|
+
|
|
503
|
+
if src_ip is not None:
|
|
504
|
+
path += "&src_external_ip={}".format(src_ip)
|
|
505
|
+
if src_href is not None:
|
|
506
|
+
path += "&src_workload={}".format(src_href)
|
|
507
|
+
|
|
508
|
+
if src_ip is not None:
|
|
509
|
+
path += "&src_external_ip={}".format(src_ip)
|
|
510
|
+
if src_href is not None:
|
|
511
|
+
path += "&src_workload={}".format(src_href)
|
|
512
|
+
|
|
513
|
+
if dst_ip is not None:
|
|
514
|
+
path += "&dst_external_ip={}".format(dst_ip)
|
|
515
|
+
if dst_href is not None:
|
|
516
|
+
path += "&dst_workload={}".format(dst_href)
|
|
517
|
+
|
|
518
|
+
return self.do_get_call(path=path, async_call=False,
|
|
519
|
+
retry_count_if_api_call_limit_reached=retry_count_if_api_call_limit_reached,
|
|
520
|
+
retry_wait_time_if_api_call_limit_reached=retry_wait_time_if_api_call_limit_reached)
|
|
521
|
+
|
|
522
|
+
def rule_coverage_query(self, data: List[RuleCoverageQueryEntryJsonStructure], include_boundary_rules=True):
|
|
523
|
+
params = None
|
|
524
|
+
if include_boundary_rules is not None:
|
|
525
|
+
params = {'include_deny_rules': include_boundary_rules}
|
|
526
|
+
return self.do_post_call(path='/sec_policy/draft/rule_coverage', json_arguments=data, include_org_id=True, json_output_expected=True, async_call=False, params=params)
|
|
527
|
+
|
|
528
|
+
def objects_label_get(self, max_results: int = None, async_mode=True) -> List[LabelObjectJsonStructure]:
|
|
529
|
+
path = '/labels'
|
|
530
|
+
data = {}
|
|
531
|
+
|
|
532
|
+
if max_results is not None:
|
|
533
|
+
data['max_results'] = max_results
|
|
534
|
+
|
|
535
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
536
|
+
|
|
537
|
+
def objects_label_update(self, href: str, data: LabelObjectUpdateJsonStructure):
|
|
538
|
+
path = href
|
|
539
|
+
return self.do_put_call(path=path, json_arguments=data, json_output_expected=False, include_org_id=False)
|
|
540
|
+
|
|
541
|
+
def objects_label_delete(self, href: Union[str, 'pylo.Label']):
|
|
542
|
+
path = href
|
|
543
|
+
if type(href) is pylo.Label:
|
|
544
|
+
path = href.href
|
|
545
|
+
|
|
546
|
+
return self.do_delete_call(path=path, json_output_expected=False, include_org_id=False)
|
|
547
|
+
|
|
548
|
+
def objects_label_create(self, label_name: str, label_type: str):
|
|
549
|
+
path = '/labels'
|
|
550
|
+
if label_type != 'app' and label_type != 'env' and label_type != 'role' and label_type != 'loc':
|
|
551
|
+
raise Exception("Requested to create a Label '%s' with wrong type '%s'" % (label_name, label_type))
|
|
552
|
+
data: LabelObjectCreationJsonStructure = {'key': label_type, 'value': label_name}
|
|
553
|
+
return self.do_post_call(path=path, json_arguments=data)
|
|
554
|
+
|
|
555
|
+
def objects_labelgroup_get(self, max_results: int = None, async_mode=True) -> List[LabelGroupObjectJsonStructure]:
|
|
556
|
+
path = '/sec_policy/draft/label_groups'
|
|
557
|
+
data = {}
|
|
558
|
+
|
|
559
|
+
if max_results is not None:
|
|
560
|
+
data['max_results'] = max_results
|
|
561
|
+
|
|
562
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
563
|
+
|
|
564
|
+
def objects_labelgroup_update(self, href: str, data: LabelGroupObjectUpdateJsonStructure):
|
|
565
|
+
path = href
|
|
566
|
+
return self.do_put_call(path=path, json_arguments=data, json_output_expected=False, include_org_id=False)
|
|
567
|
+
|
|
568
|
+
def objects_label_dimension_get(self, max_results: int = None, async_mode=False) -> List[LabelDimensionObjectStructure]:
|
|
569
|
+
path = '/label_dimensions'
|
|
570
|
+
data = {}
|
|
571
|
+
|
|
572
|
+
if max_results is not None:
|
|
573
|
+
data['max_results'] = max_results
|
|
574
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
575
|
+
|
|
576
|
+
def objects_virtual_service_get(self, max_results: int = None, async_mode=True) -> List[VirtualServiceObjectJsonStructure]:
|
|
577
|
+
path = '/sec_policy/draft/virtual_services'
|
|
578
|
+
data = {}
|
|
579
|
+
|
|
580
|
+
if max_results is not None:
|
|
581
|
+
data['max_results'] = max_results
|
|
582
|
+
|
|
583
|
+
results = self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
584
|
+
# check type
|
|
585
|
+
if type(results) is list:
|
|
586
|
+
return results
|
|
587
|
+
raise pylo.PyloEx("Unexpected result type '{}' while expecting an array of Virtual Service objects".format(type(results)), results)
|
|
588
|
+
|
|
589
|
+
def objects_iplist_get(self, max_results: int = None, async_mode=True, search_name: str = None) -> List[IPListObjectJsonStructure]:
|
|
590
|
+
path = '/sec_policy/draft/ip_lists'
|
|
591
|
+
data = {}
|
|
592
|
+
|
|
593
|
+
if search_name is not None:
|
|
594
|
+
data['name'] = search_name
|
|
595
|
+
|
|
596
|
+
if max_results is not None:
|
|
597
|
+
data['max_results'] = max_results
|
|
598
|
+
|
|
599
|
+
results: List[IPListObjectJsonStructure] = self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
600
|
+
# check type
|
|
601
|
+
if type(results) is list:
|
|
602
|
+
return results
|
|
603
|
+
|
|
604
|
+
raise pylo.PyloEx("Unexpected result type '{}' while expecting an array of IP List objects".format(type(results)), results)
|
|
605
|
+
|
|
606
|
+
def objects_iplist_create(self, json_blob: IPListObjectCreationJsonStructure):
|
|
607
|
+
path = '/sec_policy/draft/ip_lists'
|
|
608
|
+
return self.do_post_call(path=path, json_arguments=json_blob)
|
|
609
|
+
|
|
610
|
+
def objects_iplists_get_default_any(self) -> Optional[str]:
|
|
611
|
+
"""
|
|
612
|
+
Returns the href of the default 'ANY' IP List or None (which is a bad sign!)
|
|
613
|
+
:return:
|
|
614
|
+
"""
|
|
615
|
+
response = self.objects_iplist_get(max_results=10, async_mode=False, search_name='0.0.0.0')
|
|
616
|
+
|
|
617
|
+
for item in response:
|
|
618
|
+
if item['created_by']['href'] == '/users/0':
|
|
619
|
+
return item['href']
|
|
620
|
+
|
|
621
|
+
return None
|
|
622
|
+
|
|
623
|
+
def objects_workload_get(self,
|
|
624
|
+
include_deleted=False,
|
|
625
|
+
filter_by_ip: str = None,
|
|
626
|
+
filter_by_label: WorkloadsGetQueryLabelFilterJsonStructure=None,
|
|
627
|
+
filter_by_name: str = None,
|
|
628
|
+
filter_by_managed: bool = None,
|
|
629
|
+
filer_by_policy_health: Literal['active', 'warning', 'error'] = None,
|
|
630
|
+
max_results: int = None,
|
|
631
|
+
async_mode=True) -> List[WorkloadObjectJsonStructure]:
|
|
632
|
+
path = '/workloads'
|
|
633
|
+
data = {}
|
|
634
|
+
|
|
635
|
+
if include_deleted:
|
|
636
|
+
data['include_deleted'] = 'yes'
|
|
637
|
+
|
|
638
|
+
if filter_by_ip is not None:
|
|
639
|
+
data['ip_address'] = filter_by_ip
|
|
640
|
+
|
|
641
|
+
if filter_by_label is not None:
|
|
642
|
+
# filter_by_label must be converted to json text
|
|
643
|
+
data['labels'] = json.dumps(filter_by_label)
|
|
644
|
+
|
|
645
|
+
if filter_by_name is not None:
|
|
646
|
+
data['name'] = filter_by_name
|
|
647
|
+
|
|
648
|
+
if filter_by_managed is not None:
|
|
649
|
+
data['managed'] = 'true' if filter_by_managed else 'false'
|
|
650
|
+
|
|
651
|
+
if filer_by_policy_health is not None:
|
|
652
|
+
data['policy_health'] = filer_by_policy_health
|
|
653
|
+
|
|
654
|
+
if max_results is not None:
|
|
655
|
+
data['max_results'] = max_results
|
|
656
|
+
|
|
657
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
658
|
+
|
|
659
|
+
def objects_workload_agent_upgrade(self, workload_href: str, target_version: str):
|
|
660
|
+
path = '{}/upgrade'.format(workload_href)
|
|
661
|
+
data = {"release": target_version}
|
|
662
|
+
|
|
663
|
+
return self.do_post_call(path=path, json_arguments=data, json_output_expected=False, include_org_id=False)
|
|
664
|
+
|
|
665
|
+
def objects_workload_update(self, href: str, data):
|
|
666
|
+
path = href
|
|
667
|
+
|
|
668
|
+
return self.do_put_call(path=path, json_arguments=data, json_output_expected=False, include_org_id=False)
|
|
669
|
+
|
|
670
|
+
def objects_workload_update_bulk(self, json_object):
|
|
671
|
+
path = '/workloads/bulk_update'
|
|
672
|
+
return self.do_put_call(path=path, json_arguments=json_object)
|
|
673
|
+
|
|
674
|
+
def objects_workload_delete(self, href):
|
|
675
|
+
"""
|
|
676
|
+
|
|
677
|
+
:type href: str|pylo.Workload
|
|
678
|
+
"""
|
|
679
|
+
path = href
|
|
680
|
+
if type(href) is pylo.Workload:
|
|
681
|
+
path = href.href
|
|
682
|
+
|
|
683
|
+
return self.do_delete_call(path=path, json_output_expected=False, include_org_id=False)
|
|
684
|
+
|
|
685
|
+
def object_workload_get_active_policies(self, workload_href: str):
|
|
686
|
+
path = '/sec_policy/active/policy_view'
|
|
687
|
+
data = {'workload': workload_href}
|
|
688
|
+
return self.do_get_call(path=path, async_call=False, params=data, include_org_id=True, json_output_expected=True)
|
|
689
|
+
|
|
690
|
+
class WorkloadMultiDeleteTracker:
|
|
691
|
+
_errors: Dict[str, str]
|
|
692
|
+
_hrefs: Dict[str, bool]
|
|
693
|
+
_workloads: Dict[str, 'pylo.Workload'] # dict of workloads by HREF
|
|
694
|
+
connector: 'pylo.APIConnector'
|
|
695
|
+
|
|
696
|
+
def __init__(self, connector: 'pylo.APIConnector'):
|
|
697
|
+
self.connector = connector
|
|
698
|
+
self._hrefs = {}
|
|
699
|
+
self._errors = {}
|
|
700
|
+
self._workloads = {}
|
|
701
|
+
|
|
702
|
+
@property
|
|
703
|
+
def workloads(self) -> List['pylo.Workload']:
|
|
704
|
+
"""
|
|
705
|
+
Return a copy of the list of workloads.
|
|
706
|
+
:return:
|
|
707
|
+
"""
|
|
708
|
+
return list(self._workloads.values())
|
|
709
|
+
|
|
710
|
+
@property
|
|
711
|
+
def workloads_by_href(self) -> Dict[str, 'pylo.Workload']:
|
|
712
|
+
"""
|
|
713
|
+
Return a copy of the dict of workloads by href
|
|
714
|
+
:return:
|
|
715
|
+
"""
|
|
716
|
+
return self._workloads.copy()
|
|
717
|
+
|
|
718
|
+
@property
|
|
719
|
+
def hrefs(self) -> List[str]:
|
|
720
|
+
"""
|
|
721
|
+
Return a copy of the list of hrefs
|
|
722
|
+
:return:
|
|
723
|
+
"""
|
|
724
|
+
return list(self._hrefs.keys())
|
|
725
|
+
|
|
726
|
+
def add_workload(self, wkl: 'pylo.Workload'):
|
|
727
|
+
self._hrefs[wkl.href] = True
|
|
728
|
+
self._workloads[wkl.href] = wkl
|
|
729
|
+
|
|
730
|
+
def add_href(self, href: str):
|
|
731
|
+
self._hrefs[href] = True
|
|
732
|
+
|
|
733
|
+
def add_error(self, href: str, message: str):
|
|
734
|
+
self._errors[href] = message
|
|
735
|
+
|
|
736
|
+
def get_error_by_wlk(self, wkl: 'pylo.Workload') -> Optional[str]:
|
|
737
|
+
found = self._errors.get(wkl.href, pylo.objectNotFound)
|
|
738
|
+
if found is pylo.objectNotFound:
|
|
739
|
+
return None
|
|
740
|
+
return found
|
|
741
|
+
|
|
742
|
+
def get_error_by_href(self, href: str) -> Union[str, None]:
|
|
743
|
+
return self._errors.get(href)
|
|
744
|
+
|
|
745
|
+
def execute(self, unpair_agents=False):
|
|
746
|
+
|
|
747
|
+
if len(self._hrefs) < 1:
|
|
748
|
+
raise pylo.PyloEx("WorkloadMultiDeleteTracker is empty")
|
|
749
|
+
|
|
750
|
+
try:
|
|
751
|
+
result = self.connector.objects_workload_delete_multi(list(self._hrefs.keys()))
|
|
752
|
+
except Exception as ex: #global exception means something really bad happened we log errors for all workloads
|
|
753
|
+
for href in self._hrefs.keys():
|
|
754
|
+
self._errors[href] = str(ex)
|
|
755
|
+
return
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
# print(pylo.nice_json(result))
|
|
759
|
+
if not type(result) is list:
|
|
760
|
+
raise pylo.PyloApiEx("API didnt return expected JSON format", result)
|
|
761
|
+
|
|
762
|
+
agents_to_unpair = []
|
|
763
|
+
|
|
764
|
+
for entry in result:
|
|
765
|
+
if not type(entry) is dict:
|
|
766
|
+
raise pylo.PyloApiEx("API didnt return expected JSON format", entry)
|
|
767
|
+
href = entry.get("href")
|
|
768
|
+
if href is None or type(href) is not str:
|
|
769
|
+
raise pylo.PyloApiEx("API didnt return expected JSON format", entry)
|
|
770
|
+
|
|
771
|
+
error = entry.get("errors")
|
|
772
|
+
error_string = json.dumps(error)
|
|
773
|
+
if unpair_agents and error is not None and error_string.find("method_not_allowed_error") > -1:
|
|
774
|
+
agents_to_unpair.append(href)
|
|
775
|
+
elif error is not None and len(error) > 0:
|
|
776
|
+
self._errors[href] = error_string
|
|
777
|
+
|
|
778
|
+
if len(agents_to_unpair) > 0:
|
|
779
|
+
self._unpair_agents(agents_to_unpair)
|
|
780
|
+
|
|
781
|
+
def _unpair_agents(self, workloads_hrefs: [str]):
|
|
782
|
+
for href in workloads_hrefs:
|
|
783
|
+
retry_count = 5
|
|
784
|
+
api_result = None
|
|
785
|
+
|
|
786
|
+
while retry_count >= 0:
|
|
787
|
+
retry_count -= 1
|
|
788
|
+
try:
|
|
789
|
+
api_result = self.connector.objects_workload_unpair_multi([href])
|
|
790
|
+
break
|
|
791
|
+
|
|
792
|
+
except pylo.PyloApiTooManyRequestsEx as ex:
|
|
793
|
+
if retry_count <= 0:
|
|
794
|
+
self._errors[href] = str(ex)
|
|
795
|
+
break
|
|
796
|
+
time.sleep(6)
|
|
797
|
+
|
|
798
|
+
except pylo.PyloApiEx as ex:
|
|
799
|
+
self._errors[href] = str(ex)
|
|
800
|
+
break
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
def count_entries(self):
|
|
804
|
+
return len(self._hrefs)
|
|
805
|
+
|
|
806
|
+
def count_errors(self):
|
|
807
|
+
return len(self._errors)
|
|
808
|
+
|
|
809
|
+
def new_tracker_workload_multi_delete(self):
|
|
810
|
+
return APIConnector.WorkloadMultiDeleteTracker(self)
|
|
811
|
+
|
|
812
|
+
def objects_workload_delete_multi(self, href_or_workload_array: Union[List['pylo.Workload'],List[str]]):
|
|
813
|
+
if len(href_or_workload_array) < 1:
|
|
814
|
+
return
|
|
815
|
+
|
|
816
|
+
json_data = []
|
|
817
|
+
|
|
818
|
+
if type(href_or_workload_array[0]) is str:
|
|
819
|
+
for href in href_or_workload_array:
|
|
820
|
+
json_data.append({"href": href})
|
|
821
|
+
else:
|
|
822
|
+
href: 'pylo.Workload'
|
|
823
|
+
for href in href_or_workload_array:
|
|
824
|
+
json_data.append({"href": href.href})
|
|
825
|
+
|
|
826
|
+
# print(json_data)
|
|
827
|
+
|
|
828
|
+
path = "/workloads/bulk_delete"
|
|
829
|
+
|
|
830
|
+
return self.do_put_call(path=path, json_arguments=json_data, json_output_expected=True)
|
|
831
|
+
|
|
832
|
+
def objects_workload_unpair_multi(self, href_or_workload_array):
|
|
833
|
+
"""
|
|
834
|
+
|
|
835
|
+
:type href_or_workload_array: list[str]|list[pylo.Workload]
|
|
836
|
+
"""
|
|
837
|
+
|
|
838
|
+
if len(href_or_workload_array) < 1:
|
|
839
|
+
return
|
|
840
|
+
|
|
841
|
+
json_data = {
|
|
842
|
+
"ip_table_restore": "disable",
|
|
843
|
+
"workloads": []
|
|
844
|
+
}
|
|
845
|
+
|
|
846
|
+
if type(href_or_workload_array[0]) is str:
|
|
847
|
+
for href in href_or_workload_array:
|
|
848
|
+
json_data['workloads'].append({"href": href})
|
|
849
|
+
else:
|
|
850
|
+
href: 'pylo.Workload'
|
|
851
|
+
for href in href_or_workload_array:
|
|
852
|
+
json_data['workloads'].append({"href": href.href})
|
|
853
|
+
|
|
854
|
+
# print(json_data)
|
|
855
|
+
|
|
856
|
+
path = "/workloads/unpair"
|
|
857
|
+
|
|
858
|
+
return self.do_put_call(path=path, json_arguments=json_data, json_output_expected=False)
|
|
859
|
+
|
|
860
|
+
def objects_workload_create_single_unmanaged(self, json_object):
|
|
861
|
+
path = '/workloads'
|
|
862
|
+
return self.do_post_call(path=path, json_arguments=json_object)
|
|
863
|
+
|
|
864
|
+
def objects_workload_create_bulk_unmanaged(self, json_object):
|
|
865
|
+
path = '/workloads/bulk_create'
|
|
866
|
+
return self.do_put_call(path=path, json_arguments=json_object)
|
|
867
|
+
|
|
868
|
+
def objects_service_get(self, max_results: int = None, async_mode=True):
|
|
869
|
+
path = '/sec_policy/draft/services'
|
|
870
|
+
data = {}
|
|
871
|
+
|
|
872
|
+
if max_results is not None:
|
|
873
|
+
data['max_results'] = max_results
|
|
874
|
+
|
|
875
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
876
|
+
|
|
877
|
+
def objects_service_delete(self, href):
|
|
878
|
+
"""
|
|
879
|
+
|
|
880
|
+
:type href: str|pylo.Workload
|
|
881
|
+
"""
|
|
882
|
+
path = href
|
|
883
|
+
if type(href) is pylo.Service:
|
|
884
|
+
path = href.href
|
|
885
|
+
|
|
886
|
+
return self.do_delete_call(path=path, json_output_expected=False, include_org_id=False)
|
|
887
|
+
|
|
888
|
+
def objects_network_device_get(self,
|
|
889
|
+
max_results: int = None) -> List[NetworkDeviceObjectJsonStructure]:
|
|
890
|
+
path = '/network_devices'
|
|
891
|
+
data = {}
|
|
892
|
+
|
|
893
|
+
if max_results is not None:
|
|
894
|
+
data['max_results'] = max_results
|
|
895
|
+
|
|
896
|
+
return self.do_get_call(path=path, async_call=False, params=data)
|
|
897
|
+
|
|
898
|
+
def object_network_device_endpoints_get(self, network_device_href: str) -> List[NetworkDeviceEndpointObjectJsonStructure]:
|
|
899
|
+
path = '{}/network_endpoints'.format(network_device_href)
|
|
900
|
+
data = {}
|
|
901
|
+
|
|
902
|
+
return self.do_get_call(path=path, async_call=False, include_org_id=False )
|
|
903
|
+
|
|
904
|
+
def object_network_device_endpoint_create(self, network_device_href: str, name: str, endpoint_type: Literal['switch_port'], workloads_href: List[str]) -> List[NetworkDeviceEndpointObjectJsonStructure]:
|
|
905
|
+
path = '{}/network_endpoints'.format(network_device_href)
|
|
906
|
+
|
|
907
|
+
worklaods_href_objects = []
|
|
908
|
+
for workload_href in workloads_href:
|
|
909
|
+
worklaods_href_objects.append({'href': workload_href})
|
|
910
|
+
|
|
911
|
+
data = { 'config': { 'name': name, 'endpoint_type': endpoint_type }, 'workloads': worklaods_href_objects}
|
|
912
|
+
|
|
913
|
+
return self.do_post_call(path=path, async_call=False, include_org_id=False, json_arguments=data, json_output_expected=True)
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
def objects_ruleset_get(self, max_results: int = None, async_mode=True) -> List[RulesetObjectJsonStructure]:
|
|
917
|
+
path = '/sec_policy/draft/rule_sets'
|
|
918
|
+
data = {}
|
|
919
|
+
|
|
920
|
+
if max_results is not None:
|
|
921
|
+
data['max_results'] = max_results
|
|
922
|
+
|
|
923
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
924
|
+
|
|
925
|
+
def objects_ruleset_create(self, name: str,
|
|
926
|
+
scope_app: 'pylo.Label' = None,
|
|
927
|
+
scope_env: 'pylo.Label' = None,
|
|
928
|
+
scope_loc: 'pylo.Label' = None,
|
|
929
|
+
description: str = '', enabled: bool = True) -> Dict:
|
|
930
|
+
path = '/sec_policy/draft/rule_sets'
|
|
931
|
+
|
|
932
|
+
scope = []
|
|
933
|
+
if scope_app is not None:
|
|
934
|
+
scope.append(scope_app.get_api_reference_json())
|
|
935
|
+
if scope_env is not None:
|
|
936
|
+
scope.append(scope_env.get_api_reference_json())
|
|
937
|
+
if scope_app is not None:
|
|
938
|
+
scope.append(scope_loc.get_api_reference_json())
|
|
939
|
+
|
|
940
|
+
data = {
|
|
941
|
+
'name': name,
|
|
942
|
+
'enabled': enabled,
|
|
943
|
+
'description': description,
|
|
944
|
+
'scopes': [scope]
|
|
945
|
+
}
|
|
946
|
+
|
|
947
|
+
return self.do_post_call(path=path, json_arguments=data, json_output_expected=True)
|
|
948
|
+
|
|
949
|
+
def objects_ruleset_update(self, ruleset_href: str, update_data: RulesetObjectUpdateStructure):
|
|
950
|
+
return self.do_put_call(path=ruleset_href,
|
|
951
|
+
json_arguments=update_data,
|
|
952
|
+
include_org_id=False,
|
|
953
|
+
json_output_expected=False
|
|
954
|
+
)
|
|
955
|
+
|
|
956
|
+
def objects_ruleset_delete(self, ruleset_href: str):
|
|
957
|
+
return self.do_delete_call(path=ruleset_href,
|
|
958
|
+
include_org_id=False,
|
|
959
|
+
json_output_expected=False
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
def objects_rule_update(self, rule_href: str, update_data):
|
|
963
|
+
return self.do_put_call(path=rule_href,
|
|
964
|
+
json_arguments=update_data,
|
|
965
|
+
include_org_id=False,
|
|
966
|
+
json_output_expected=False
|
|
967
|
+
)
|
|
968
|
+
|
|
969
|
+
def objects_rule_delete(self, rule_href: str):
|
|
970
|
+
return self.do_delete_call(path=rule_href,
|
|
971
|
+
include_org_id=False,
|
|
972
|
+
json_output_expected=False
|
|
973
|
+
)
|
|
974
|
+
|
|
975
|
+
def objects_rule_create(self, ruleset_href: str,
|
|
976
|
+
intra_scope: bool,
|
|
977
|
+
consumers: List[Union[WorkloadHrefRef, IPListHrefRef, VirtualServiceHrefRef, 'pylo.IPList', 'pylo.Label', 'pylo.LabelGroup']],
|
|
978
|
+
providers: List[Union[WorkloadHrefRef, IPListHrefRef, VirtualServiceHrefRef, 'pylo.IPList', 'pylo.Label', 'pylo.LabelGroup']],
|
|
979
|
+
services: List[Union['pylo.Service', 'pylo.DirectServiceInRule', RuleDirectServiceReferenceObjectJsonStructure]],
|
|
980
|
+
description='', machine_auth=False, secure_connect=False, enabled=True,
|
|
981
|
+
stateless=False, consuming_security_principals=None,
|
|
982
|
+
resolve_consumers_as_virtual_services=True, resolve_consumers_as_workloads=True,
|
|
983
|
+
resolve_providers_as_virtual_services=True, resolve_providers_as_workloads=True) \
|
|
984
|
+
-> Dict[str, Any]:
|
|
985
|
+
|
|
986
|
+
if consuming_security_principals is None:
|
|
987
|
+
consuming_security_principals = []
|
|
988
|
+
|
|
989
|
+
resolve_consumers = []
|
|
990
|
+
if resolve_consumers_as_virtual_services:
|
|
991
|
+
resolve_consumers.append('virtual_services')
|
|
992
|
+
if resolve_consumers_as_workloads:
|
|
993
|
+
resolve_consumers.append('workloads')
|
|
994
|
+
|
|
995
|
+
resolve_providers = []
|
|
996
|
+
if resolve_providers_as_virtual_services:
|
|
997
|
+
resolve_providers.append('virtual_services')
|
|
998
|
+
if resolve_providers_as_workloads:
|
|
999
|
+
resolve_providers.append('workloads')
|
|
1000
|
+
|
|
1001
|
+
consumers_json = []
|
|
1002
|
+
for item in consumers:
|
|
1003
|
+
if type(item) is dict:
|
|
1004
|
+
consumers_json.append(item)
|
|
1005
|
+
else:
|
|
1006
|
+
consumers_json.append(item.get_api_reference_json())
|
|
1007
|
+
|
|
1008
|
+
providers_json = []
|
|
1009
|
+
for item in providers:
|
|
1010
|
+
if type(item) is dict:
|
|
1011
|
+
providers_json.append(item)
|
|
1012
|
+
else:
|
|
1013
|
+
providers_json.append(item.get_api_reference_json())
|
|
1014
|
+
|
|
1015
|
+
services_json = []
|
|
1016
|
+
for item in services:
|
|
1017
|
+
if type(item) is dict:
|
|
1018
|
+
services_json.append(item)
|
|
1019
|
+
elif type(item) is pylo.DirectServiceInRule:
|
|
1020
|
+
services_json.append(item.get_api_json())
|
|
1021
|
+
else:
|
|
1022
|
+
services_json.append(item.get_api_reference_json())
|
|
1023
|
+
|
|
1024
|
+
data = {
|
|
1025
|
+
'unscoped_consumers': not intra_scope,
|
|
1026
|
+
'description': description,
|
|
1027
|
+
'machine_auth': machine_auth,
|
|
1028
|
+
'sec_connect': secure_connect,
|
|
1029
|
+
'enabled': enabled,
|
|
1030
|
+
'stateless': stateless,
|
|
1031
|
+
'consuming_security_principals': consuming_security_principals,
|
|
1032
|
+
'resolve_labels_as': {'providers': resolve_providers, 'consumers': resolve_consumers,},
|
|
1033
|
+
'consumers': consumers_json,
|
|
1034
|
+
'providers': providers_json,
|
|
1035
|
+
'ingress_services': services_json
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
path = ruleset_href+'/sec_rules'
|
|
1039
|
+
|
|
1040
|
+
return self.do_post_call(path, json_arguments=data, json_output_expected=True, include_org_id=False)
|
|
1041
|
+
|
|
1042
|
+
def objects_securityprincipal_get(self, max_results: int = None, async_mode=True) -> List[SecurityPrincipalObjectJsonStructure]:
|
|
1043
|
+
path = '/security_principals'
|
|
1044
|
+
data = {}
|
|
1045
|
+
|
|
1046
|
+
if max_results is not None:
|
|
1047
|
+
data['max_results'] = max_results
|
|
1048
|
+
|
|
1049
|
+
return self.do_get_call(path=path, async_call=async_mode, params=data)
|
|
1050
|
+
|
|
1051
|
+
def objects_securityprincipal_create(self, name: str = None, sid: str = None, json_data=None) -> str:
|
|
1052
|
+
"""
|
|
1053
|
+
|
|
1054
|
+
:param name: friendly name for this object
|
|
1055
|
+
:param sid: Windows SID for that Group
|
|
1056
|
+
:param json_data:
|
|
1057
|
+
:return: HREF of the created Security Principal
|
|
1058
|
+
"""
|
|
1059
|
+
path = '/security_principals'
|
|
1060
|
+
|
|
1061
|
+
if json_data is not None and name is not None:
|
|
1062
|
+
raise pylo.PyloApiEx("You must either use json_data or name but you cannot use both they are mutually exclusive")
|
|
1063
|
+
|
|
1064
|
+
if json_data is not None:
|
|
1065
|
+
return get_field_or_die('href', self.do_post_call(path=path, json_arguments=json_data))
|
|
1066
|
+
|
|
1067
|
+
if name is None:
|
|
1068
|
+
raise pylo.PyloApiEx("You need to provide a group name")
|
|
1069
|
+
if sid is None:
|
|
1070
|
+
raise pylo.PyloApiEx("You need to provide a SID")
|
|
1071
|
+
|
|
1072
|
+
return get_field_or_die('href', self.do_post_call(path=path, json_arguments={'name': name, 'sid': sid}))
|
|
1073
|
+
|
|
1074
|
+
class ApiAgentCompatibilityReport:
|
|
1075
|
+
|
|
1076
|
+
class ApiAgentCompatibilityReportItem:
|
|
1077
|
+
def __init__(self, name, value, status, extra_debug_message=None):
|
|
1078
|
+
self.name = name
|
|
1079
|
+
self.value = value
|
|
1080
|
+
self.status = status
|
|
1081
|
+
self.extra_debug_message = extra_debug_message
|
|
1082
|
+
|
|
1083
|
+
def __init__(self, raw_json):
|
|
1084
|
+
self._items = {}
|
|
1085
|
+
self.empty = False
|
|
1086
|
+
|
|
1087
|
+
if len(raw_json) == 0:
|
|
1088
|
+
self.empty = True
|
|
1089
|
+
return
|
|
1090
|
+
|
|
1091
|
+
self.global_status = raw_json.get('qualify_status')
|
|
1092
|
+
if self.global_status is None:
|
|
1093
|
+
raise pylo.PyloEx('Cannot find Compatibility Report status in JSON', json_object=raw_json)
|
|
1094
|
+
|
|
1095
|
+
results = raw_json.get('results')
|
|
1096
|
+
if results is None:
|
|
1097
|
+
raise pylo.PyloEx('Cannot find Compatibility Report results in JSON', json_object=raw_json)
|
|
1098
|
+
|
|
1099
|
+
results = results.get('qualify_tests')
|
|
1100
|
+
if results is None:
|
|
1101
|
+
raise pylo.PyloEx('Cannot find Compatibility Report results in JSON', json_object=raw_json)
|
|
1102
|
+
|
|
1103
|
+
for result in results:
|
|
1104
|
+
status = result.get('status')
|
|
1105
|
+
if status is None:
|
|
1106
|
+
continue
|
|
1107
|
+
|
|
1108
|
+
for result_name in result.keys():
|
|
1109
|
+
if result_name == 'status':
|
|
1110
|
+
continue
|
|
1111
|
+
self._items[result_name] = APIConnector.ApiAgentCompatibilityReport.ApiAgentCompatibilityReportItem(result_name, result[result_name], status)
|
|
1112
|
+
if result_name == "required_packages_installed" and status != "green":
|
|
1113
|
+
for tmp in results:
|
|
1114
|
+
if "required_packages_missing" in tmp:
|
|
1115
|
+
extra_infos = 'missing packages:{}'.format(pylo.string_list_to_text(tmp["required_packages_missing"]))
|
|
1116
|
+
self._items[result_name].extra_debug_message = extra_infos
|
|
1117
|
+
break
|
|
1118
|
+
|
|
1119
|
+
def get_failed_items(self) -> Dict[str, 'APIConnector.ApiAgentCompatibilityReport.ApiAgentCompatibilityReportItem']:
|
|
1120
|
+
results: Dict[str, 'pylo.APIConnector.ApiAgentCompatibilityReport.ApiAgentCompatibilityReportItem'] = {}
|
|
1121
|
+
for infos in self._items.values():
|
|
1122
|
+
if infos.status != 'green':
|
|
1123
|
+
results[infos.name] = infos
|
|
1124
|
+
|
|
1125
|
+
return results
|
|
1126
|
+
|
|
1127
|
+
def agent_get_compatibility_report(self, agent_href: str = None, agent_id: str = None, return_raw_json=True) \
|
|
1128
|
+
-> Union['pylo.APIConnector.ApiAgentCompatibilityReport', Dict[str, Any]]:
|
|
1129
|
+
if agent_href is None and agent_id is None:
|
|
1130
|
+
raise pylo.PyloEx('you need to provide a HREF or an ID')
|
|
1131
|
+
if agent_href is not None and agent_id is not None:
|
|
1132
|
+
raise pylo.PyloEx('you need to provide a HREF or an ID but not BOTH')
|
|
1133
|
+
|
|
1134
|
+
include_org_id_in_api_query = False
|
|
1135
|
+
|
|
1136
|
+
if agent_href is None:
|
|
1137
|
+
path = '/agents/{}/compatibility_report'.format(agent_id)
|
|
1138
|
+
include_org_id_in_api_query = True
|
|
1139
|
+
else:
|
|
1140
|
+
path = '{}/compatibility_report'.format(agent_href)
|
|
1141
|
+
|
|
1142
|
+
if return_raw_json:
|
|
1143
|
+
return self.do_get_call(path=path, include_org_id=include_org_id_in_api_query)
|
|
1144
|
+
|
|
1145
|
+
retry_count = 5
|
|
1146
|
+
api_result = None
|
|
1147
|
+
|
|
1148
|
+
while retry_count >= 0:
|
|
1149
|
+
retry_count -= 1
|
|
1150
|
+
try:
|
|
1151
|
+
api_result = self.do_get_call(path=path, include_org_id=include_org_id_in_api_query)
|
|
1152
|
+
break
|
|
1153
|
+
|
|
1154
|
+
except pylo.PyloApiTooManyRequestsEx as ex:
|
|
1155
|
+
if retry_count <= 0:
|
|
1156
|
+
raise ex
|
|
1157
|
+
time.sleep(6)
|
|
1158
|
+
|
|
1159
|
+
return APIConnector.ApiAgentCompatibilityReport(api_result)
|
|
1160
|
+
|
|
1161
|
+
def objects_agent_change_mode(self, agent_href: str, mode: str):
|
|
1162
|
+
path = agent_href
|
|
1163
|
+
|
|
1164
|
+
if mode != 'build' and mode != 'idle' and mode != 'test':
|
|
1165
|
+
raise pylo.PyloEx("unsupported mode {}".format(mode))
|
|
1166
|
+
|
|
1167
|
+
log_traffic = False
|
|
1168
|
+
|
|
1169
|
+
if mode == 'build':
|
|
1170
|
+
mode = 'illuminated'
|
|
1171
|
+
elif mode == 'test':
|
|
1172
|
+
mode = 'illuminated'
|
|
1173
|
+
log_traffic = True
|
|
1174
|
+
|
|
1175
|
+
data = {"agent": {"config": {"mode": mode, 'log_traffic': log_traffic}}}
|
|
1176
|
+
|
|
1177
|
+
return self.do_put_call(path, json_arguments=data, include_org_id=False, json_output_expected=False)
|
|
1178
|
+
|
|
1179
|
+
def objects_agent_reassign_pce(self, agent_href: str, target_pce: str):
|
|
1180
|
+
"""
|
|
1181
|
+
Reassign an agent to a different PCE
|
|
1182
|
+
:param agent_href:
|
|
1183
|
+
:param target_pce:
|
|
1184
|
+
:return:
|
|
1185
|
+
"""
|
|
1186
|
+
path = agent_href + '/update'
|
|
1187
|
+
data = {"target_pce_fqdn": target_pce}
|
|
1188
|
+
return self.do_put_call(path, json_arguments=data, include_org_id=False, json_output_expected=False)
|
|
1189
|
+
|
|
1190
|
+
def explorer_async_queries_all_status_get(self):
|
|
1191
|
+
"""
|
|
1192
|
+
Get the status of all async queries
|
|
1193
|
+
"""
|
|
1194
|
+
return self.do_get_call('/traffic_flows/async_queries', json_output_expected=True, include_org_id=True)
|
|
1195
|
+
|
|
1196
|
+
def explorer_async_query_get_specific_request_status(self, request_href: str):
|
|
1197
|
+
all_statuses = self.explorer_async_queries_all_status_get()
|
|
1198
|
+
for status in all_statuses:
|
|
1199
|
+
if status['href'] == request_href:
|
|
1200
|
+
return status
|
|
1201
|
+
|
|
1202
|
+
raise pylo.PyloObjectNotFound("Request with ID {} not found".format(request_href))
|
|
1203
|
+
|
|
1204
|
+
def explorer_search(self, filters: Union[Dict, 'pylo.ExplorerFilterSetV1'],
|
|
1205
|
+
max_running_time_seconds=1800,
|
|
1206
|
+
check_for_update_interval_seconds=10) -> 'pylo.ExplorerResultSetV1':
|
|
1207
|
+
path = "/traffic_flows/async_queries"
|
|
1208
|
+
if isinstance(filters, pylo.ExplorerFilterSetV1):
|
|
1209
|
+
data = filters.generate_json_query()
|
|
1210
|
+
else:
|
|
1211
|
+
data = filters
|
|
1212
|
+
|
|
1213
|
+
query_queued_json_response = self.do_post_call(path, json_arguments=data, include_org_id=True,
|
|
1214
|
+
json_output_expected=True)
|
|
1215
|
+
|
|
1216
|
+
if 'status' not in query_queued_json_response:
|
|
1217
|
+
raise pylo.PyloApiEx("Invalid response from API, missing 'status' property", query_queued_json_response)
|
|
1218
|
+
|
|
1219
|
+
if query_queued_json_response['status'] != "queued":
|
|
1220
|
+
raise pylo.PyloApiEx("Invalid response from API, 'status' property is not 'QUEUED'", query_queued_json_response)
|
|
1221
|
+
|
|
1222
|
+
if 'href' not in query_queued_json_response:
|
|
1223
|
+
raise pylo.PyloApiEx("Invalid response from API, missing 'href' property", query_queued_json_response)
|
|
1224
|
+
|
|
1225
|
+
query_href = query_queued_json_response['href']
|
|
1226
|
+
# check that query_href is a string
|
|
1227
|
+
if not isinstance(query_href, str):
|
|
1228
|
+
raise pylo.PyloApiEx("Invalid response from API, 'href' property is not a string", query_queued_json_response)
|
|
1229
|
+
|
|
1230
|
+
# get current timestamp to ensure we don't wait too long
|
|
1231
|
+
start_time = time.time()
|
|
1232
|
+
|
|
1233
|
+
query_status = None # Json response from API for specific query
|
|
1234
|
+
|
|
1235
|
+
while True:
|
|
1236
|
+
# check that we don't wait too long
|
|
1237
|
+
if time.time() - start_time > max_running_time_seconds:
|
|
1238
|
+
raise pylo.PyloApiEx("Timeout while waiting for query to complete", query_queued_json_response)
|
|
1239
|
+
|
|
1240
|
+
queries_status_json_response = self.explorer_async_query_get_specific_request_status(query_href)
|
|
1241
|
+
if queries_status_json_response['status'] == "completed":
|
|
1242
|
+
query_status = queries_status_json_response
|
|
1243
|
+
break
|
|
1244
|
+
|
|
1245
|
+
if queries_status_json_response['status'] not in ["queued", "working"]:
|
|
1246
|
+
raise pylo.PyloApiEx("Query failed with status {}".format(queries_status_json_response['status']),
|
|
1247
|
+
queries_status_json_response)
|
|
1248
|
+
|
|
1249
|
+
time.sleep(check_for_update_interval_seconds)
|
|
1250
|
+
|
|
1251
|
+
if query_status is None:
|
|
1252
|
+
raise pylo.PyloEx("Unexpected logic where query_status is None", query_queued_json_response)
|
|
1253
|
+
|
|
1254
|
+
query_json_response = self.do_get_call(query_href + "/download", json_output_expected=True, include_org_id=False)
|
|
1255
|
+
|
|
1256
|
+
result = pylo.ExplorerResultSetV1(query_json_response,
|
|
1257
|
+
owner=self,
|
|
1258
|
+
emulated_process_exclusion=filters.exclude_processes_emulate)
|
|
1259
|
+
|
|
1260
|
+
return result
|
|
1261
|
+
|
|
1262
|
+
def cluster_health_get(self, return_object=False):
|
|
1263
|
+
path = '/health'
|
|
1264
|
+
|
|
1265
|
+
if not return_object:
|
|
1266
|
+
return self.do_get_call(path)
|
|
1267
|
+
|
|
1268
|
+
# cluster_health list
|
|
1269
|
+
json_output = self.do_get_call(path, include_org_id=False)
|
|
1270
|
+
if type(json_output) is not list:
|
|
1271
|
+
raise pylo.PyloEx("A list object was expected but we received a '{}' instead".format(type(json_output)))
|
|
1272
|
+
|
|
1273
|
+
dict_of_health_reports = {}
|
|
1274
|
+
|
|
1275
|
+
for single_output in json_output:
|
|
1276
|
+
new_report = pylo.ClusterHealth(single_output)
|
|
1277
|
+
dict_of_health_reports[new_report.fqdn] = new_report
|
|
1278
|
+
|
|
1279
|
+
return dict_of_health_reports
|
|
1280
|
+
|
|
1281
|
+
def new_rule_search_query(self) -> 'pylo.RuleSearchQuery':
|
|
1282
|
+
return pylo.RuleSearchQuery(self)
|
|
1283
|
+
|
|
1284
|
+
def new_explorer_query(self, max_results: int = 1500, max_running_time_seconds: int = 1800,
|
|
1285
|
+
check_for_update_interval_seconds: int = 10) -> 'pylo.ExplorerQuery':
|
|
1286
|
+
return pylo.ExplorerQuery(self, max_results, max_running_time_seconds, check_for_update_interval_seconds)
|
|
1287
|
+
|
|
1288
|
+
|
|
1289
|
+
def new_audit_log_query(self, max_results: int = 10000, max_running_time_seconds: int = 1800,
|
|
1290
|
+
check_for_update_interval_seconds: int = 10) -> 'pylo.AuditLogQuery':
|
|
1291
|
+
return pylo.AuditLogQuery(self, max_results, max_running_time_seconds )
|
|
1292
|
+
|
|
1293
|
+
def audit_log_query(self, max_results = 1000, event_type: Optional[str] = None) -> List[AuditLogApiReplyEventJsonStructure]:
|
|
1294
|
+
url = '/events'
|
|
1295
|
+
args = {'max_results': max_results}
|
|
1296
|
+
if event_type is not None:
|
|
1297
|
+
args['event_type'] = event_type
|
|
1298
|
+
|
|
1299
|
+
return self.do_get_call(path=url, params=args)
|
|
1300
|
+
|
|
1301
|
+
|
|
1302
|
+
def get_pce_ui_workload_url(self, href: str) -> str:
|
|
1303
|
+
# extract UUID from workload HREF:
|
|
1304
|
+
uuid = href.split('/')[-1]
|
|
1305
|
+
return self._make_base_url('/#/workloads/' + uuid )
|
|
1306
|
+
|
|
1307
|
+
|
|
1308
|
+
|