contentctl 4.4.7__py3-none-any.whl → 5.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- contentctl/__init__.py +1 -1
- contentctl/actions/build.py +102 -57
- contentctl/actions/deploy_acs.py +29 -24
- contentctl/actions/detection_testing/DetectionTestingManager.py +66 -42
- contentctl/actions/detection_testing/GitService.py +134 -76
- contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
- contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +192 -147
- contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
- contentctl/actions/detection_testing/progress_bar.py +9 -6
- contentctl/actions/detection_testing/views/DetectionTestingView.py +16 -19
- contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
- contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
- contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
- contentctl/actions/doc_gen.py +9 -5
- contentctl/actions/initialize.py +45 -33
- contentctl/actions/inspect.py +118 -61
- contentctl/actions/new_content.py +155 -108
- contentctl/actions/release_notes.py +276 -146
- contentctl/actions/reporting.py +23 -19
- contentctl/actions/test.py +33 -28
- contentctl/actions/validate.py +55 -34
- contentctl/api.py +54 -45
- contentctl/contentctl.py +124 -90
- contentctl/enrichments/attack_enrichment.py +112 -72
- contentctl/enrichments/cve_enrichment.py +34 -28
- contentctl/enrichments/splunk_app_enrichment.py +38 -36
- contentctl/helper/link_validator.py +101 -78
- contentctl/helper/splunk_app.py +69 -41
- contentctl/helper/utils.py +58 -53
- contentctl/input/director.py +68 -36
- contentctl/input/new_content_questions.py +27 -35
- contentctl/input/yml_reader.py +28 -18
- contentctl/objects/abstract_security_content_objects/detection_abstract.py +303 -259
- contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +115 -52
- contentctl/objects/alert_action.py +10 -9
- contentctl/objects/annotated_types.py +1 -1
- contentctl/objects/atomic.py +65 -54
- contentctl/objects/base_test.py +5 -3
- contentctl/objects/base_test_result.py +19 -11
- contentctl/objects/baseline.py +62 -30
- contentctl/objects/baseline_tags.py +30 -24
- contentctl/objects/config.py +790 -597
- contentctl/objects/constants.py +33 -56
- contentctl/objects/correlation_search.py +150 -136
- contentctl/objects/dashboard.py +55 -41
- contentctl/objects/data_source.py +16 -17
- contentctl/objects/deployment.py +43 -44
- contentctl/objects/deployment_email.py +3 -2
- contentctl/objects/deployment_notable.py +4 -2
- contentctl/objects/deployment_phantom.py +7 -6
- contentctl/objects/deployment_rba.py +3 -2
- contentctl/objects/deployment_scheduling.py +3 -2
- contentctl/objects/deployment_slack.py +3 -2
- contentctl/objects/detection.py +5 -2
- contentctl/objects/detection_metadata.py +1 -0
- contentctl/objects/detection_stanza.py +7 -2
- contentctl/objects/detection_tags.py +58 -103
- contentctl/objects/drilldown.py +66 -34
- contentctl/objects/enums.py +81 -100
- contentctl/objects/errors.py +16 -24
- contentctl/objects/integration_test.py +3 -3
- contentctl/objects/integration_test_result.py +1 -0
- contentctl/objects/investigation.py +59 -36
- contentctl/objects/investigation_tags.py +30 -19
- contentctl/objects/lookup.py +304 -101
- contentctl/objects/macro.py +55 -39
- contentctl/objects/manual_test.py +3 -3
- contentctl/objects/manual_test_result.py +1 -0
- contentctl/objects/mitre_attack_enrichment.py +17 -16
- contentctl/objects/notable_action.py +2 -1
- contentctl/objects/notable_event.py +1 -3
- contentctl/objects/playbook.py +37 -35
- contentctl/objects/playbook_tags.py +23 -13
- contentctl/objects/rba.py +96 -0
- contentctl/objects/risk_analysis_action.py +15 -11
- contentctl/objects/risk_event.py +110 -160
- contentctl/objects/risk_object.py +1 -0
- contentctl/objects/savedsearches_conf.py +9 -7
- contentctl/objects/security_content_object.py +5 -2
- contentctl/objects/story.py +54 -49
- contentctl/objects/story_tags.py +56 -45
- contentctl/objects/test_attack_data.py +2 -1
- contentctl/objects/test_group.py +5 -2
- contentctl/objects/threat_object.py +1 -0
- contentctl/objects/throttling.py +27 -18
- contentctl/objects/unit_test.py +3 -4
- contentctl/objects/unit_test_baseline.py +5 -5
- contentctl/objects/unit_test_result.py +6 -6
- contentctl/output/api_json_output.py +233 -220
- contentctl/output/attack_nav_output.py +21 -21
- contentctl/output/attack_nav_writer.py +29 -37
- contentctl/output/conf_output.py +235 -172
- contentctl/output/conf_writer.py +201 -125
- contentctl/output/data_source_writer.py +38 -26
- contentctl/output/doc_md_output.py +53 -27
- contentctl/output/jinja_writer.py +19 -15
- contentctl/output/json_writer.py +21 -11
- contentctl/output/svg_output.py +56 -38
- contentctl/output/templates/analyticstories_detections.j2 +2 -2
- contentctl/output/templates/analyticstories_stories.j2 +1 -1
- contentctl/output/templates/collections.j2 +1 -1
- contentctl/output/templates/doc_detections.j2 +0 -5
- contentctl/output/templates/es_investigations_investigations.j2 +1 -1
- contentctl/output/templates/es_investigations_stories.j2 +1 -1
- contentctl/output/templates/savedsearches_baselines.j2 +2 -2
- contentctl/output/templates/savedsearches_detections.j2 +10 -11
- contentctl/output/templates/savedsearches_investigations.j2 +2 -2
- contentctl/output/templates/transforms.j2 +6 -8
- contentctl/output/yml_writer.py +29 -20
- contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +16 -34
- contentctl/templates/stories/cobalt_strike.yml +1 -0
- {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/METADATA +5 -4
- contentctl-5.0.0.dist-info/RECORD +168 -0
- {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/WHEEL +1 -1
- contentctl/actions/initialize_old.py +0 -245
- contentctl/objects/event_source.py +0 -11
- contentctl/objects/observable.py +0 -37
- contentctl/output/detection_writer.py +0 -28
- contentctl/output/new_content_yml_output.py +0 -56
- contentctl/output/yml_output.py +0 -66
- contentctl-4.4.7.dist-info/RECORD +0 -173
- {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/LICENSE.md +0 -0
- {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/entry_points.txt +0 -0
contentctl/helper/splunk_app.py
CHANGED
|
@@ -39,6 +39,7 @@ class RetryConstant:
|
|
|
39
39
|
|
|
40
40
|
class SplunkBaseError(requests.HTTPError):
|
|
41
41
|
"""An error raise in communicating with Splunkbase"""
|
|
42
|
+
|
|
42
43
|
pass
|
|
43
44
|
|
|
44
45
|
|
|
@@ -50,6 +51,7 @@ class SplunkApp:
|
|
|
50
51
|
|
|
51
52
|
class InitializationError(Exception):
|
|
52
53
|
"""An initialization error during SplunkApp setup"""
|
|
54
|
+
|
|
53
55
|
pass
|
|
54
56
|
|
|
55
57
|
@staticmethod
|
|
@@ -68,16 +70,16 @@ class SplunkApp:
|
|
|
68
70
|
status_forcelist=status_forcelist,
|
|
69
71
|
)
|
|
70
72
|
adapter = HTTPAdapter(max_retries=retry)
|
|
71
|
-
session.mount(
|
|
72
|
-
session.mount(
|
|
73
|
+
session.mount("http://", adapter)
|
|
74
|
+
session.mount("https://", adapter)
|
|
73
75
|
return session
|
|
74
76
|
|
|
75
77
|
def __init__(
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
78
|
+
self,
|
|
79
|
+
app_uid: Optional[int] = None,
|
|
80
|
+
app_name_id: Optional[str] = None,
|
|
81
|
+
manual_setup: bool = False,
|
|
82
|
+
) -> None:
|
|
81
83
|
if app_uid is None and app_name_id is None:
|
|
82
84
|
raise SplunkApp.InitializationError(
|
|
83
85
|
"Either app_uid (the numeric app UID e.g. 742) or app_name_id (the app name "
|
|
@@ -123,18 +125,22 @@ class SplunkApp:
|
|
|
123
125
|
if self._app_info_cache is not None:
|
|
124
126
|
return self._app_info_cache
|
|
125
127
|
elif self.app_uid is None:
|
|
126
|
-
raise SplunkApp.InitializationError(
|
|
128
|
+
raise SplunkApp.InitializationError(
|
|
129
|
+
"app_uid must be set in order to fetch app info"
|
|
130
|
+
)
|
|
127
131
|
|
|
128
132
|
# NOTE: auth not required
|
|
129
133
|
# Get app info by uid
|
|
130
134
|
try:
|
|
131
135
|
response = self.requests_retry_session().get(
|
|
132
136
|
APIEndPoint.SPLUNK_BASE_APP_INFO.format(app_uid=self.app_uid),
|
|
133
|
-
timeout=RetryConstant.RETRY_INTERVAL
|
|
137
|
+
timeout=RetryConstant.RETRY_INTERVAL,
|
|
134
138
|
)
|
|
135
139
|
response.raise_for_status()
|
|
136
140
|
except requests.exceptions.RequestException as e:
|
|
137
|
-
raise SplunkBaseError(
|
|
141
|
+
raise SplunkBaseError(
|
|
142
|
+
f"Error fetching app info for app_uid {self.app_uid}: {str(e)}"
|
|
143
|
+
)
|
|
138
144
|
|
|
139
145
|
# parse JSON and set cache
|
|
140
146
|
self._app_info_cache: dict = json.loads(response.content)
|
|
@@ -156,7 +162,9 @@ class SplunkApp:
|
|
|
156
162
|
if "appid" in app_info:
|
|
157
163
|
self.app_name_id = app_info["appid"]
|
|
158
164
|
else:
|
|
159
|
-
raise SplunkBaseError(
|
|
165
|
+
raise SplunkBaseError(
|
|
166
|
+
f"Invalid response from Splunkbase; missing key 'appid': {app_info}"
|
|
167
|
+
)
|
|
160
168
|
|
|
161
169
|
def set_app_uid(self) -> None:
|
|
162
170
|
"""
|
|
@@ -166,19 +174,25 @@ class SplunkApp:
|
|
|
166
174
|
if self.app_uid is not None:
|
|
167
175
|
return
|
|
168
176
|
elif self.app_name_id is None:
|
|
169
|
-
raise SplunkApp.InitializationError(
|
|
177
|
+
raise SplunkApp.InitializationError(
|
|
178
|
+
"app_name_id must be set in order to fetch app_uid"
|
|
179
|
+
)
|
|
170
180
|
|
|
171
181
|
# NOTE: auth not required
|
|
172
182
|
# Get app_uid by app_name_id via a redirect
|
|
173
183
|
try:
|
|
174
184
|
response = self.requests_retry_session().get(
|
|
175
|
-
APIEndPoint.SPLUNK_BASE_GET_UID_REDIRECT.format(
|
|
185
|
+
APIEndPoint.SPLUNK_BASE_GET_UID_REDIRECT.format(
|
|
186
|
+
app_name_id=self.app_name_id
|
|
187
|
+
),
|
|
176
188
|
allow_redirects=False,
|
|
177
|
-
timeout=RetryConstant.RETRY_INTERVAL
|
|
189
|
+
timeout=RetryConstant.RETRY_INTERVAL,
|
|
178
190
|
)
|
|
179
191
|
response.raise_for_status()
|
|
180
192
|
except requests.exceptions.RequestException as e:
|
|
181
|
-
raise SplunkBaseError(
|
|
193
|
+
raise SplunkBaseError(
|
|
194
|
+
f"Error fetching app_uid for app_name_id '{self.app_name_id}': {str(e)}"
|
|
195
|
+
)
|
|
182
196
|
|
|
183
197
|
# Extract the app_uid from the redirect path
|
|
184
198
|
if "Location" in response.headers:
|
|
@@ -199,7 +213,9 @@ class SplunkApp:
|
|
|
199
213
|
if "title" in app_info:
|
|
200
214
|
self.app_title = app_info["title"]
|
|
201
215
|
else:
|
|
202
|
-
raise SplunkBaseError(
|
|
216
|
+
raise SplunkBaseError(
|
|
217
|
+
f"Invalid response from Splunkbase; missing key 'title': {app_info}"
|
|
218
|
+
)
|
|
203
219
|
|
|
204
220
|
def __fetch_url_latest_version_info(self) -> str:
|
|
205
221
|
"""
|
|
@@ -209,12 +225,16 @@ class SplunkApp:
|
|
|
209
225
|
# retrieve app entries using the app_name_id
|
|
210
226
|
try:
|
|
211
227
|
response = self.requests_retry_session().get(
|
|
212
|
-
APIEndPoint.SPLUNK_BASE_FETCH_APP_BY_ENTRY_ID.format(
|
|
213
|
-
|
|
228
|
+
APIEndPoint.SPLUNK_BASE_FETCH_APP_BY_ENTRY_ID.format(
|
|
229
|
+
app_name_id=self.app_name_id
|
|
230
|
+
),
|
|
231
|
+
timeout=RetryConstant.RETRY_INTERVAL,
|
|
214
232
|
)
|
|
215
233
|
response.raise_for_status()
|
|
216
234
|
except requests.exceptions.RequestException as e:
|
|
217
|
-
raise SplunkBaseError(
|
|
235
|
+
raise SplunkBaseError(
|
|
236
|
+
f"Error fetching app entries for app_name_id '{self.app_name_id}': {str(e)}"
|
|
237
|
+
)
|
|
218
238
|
|
|
219
239
|
# parse xml
|
|
220
240
|
app_xml = xmltodict.parse(response.content)
|
|
@@ -231,7 +251,9 @@ class SplunkApp:
|
|
|
231
251
|
return entry.get("link").get("@href")
|
|
232
252
|
|
|
233
253
|
# raise if no entry was found
|
|
234
|
-
raise SplunkBaseError(
|
|
254
|
+
raise SplunkBaseError(
|
|
255
|
+
f"No app entry found with 'islatest' tag set to True: {self.app_name_id}"
|
|
256
|
+
)
|
|
235
257
|
|
|
236
258
|
def __fetch_url_latest_version_download(self, info_url: str) -> str:
|
|
237
259
|
"""
|
|
@@ -241,10 +263,14 @@ class SplunkApp:
|
|
|
241
263
|
"""
|
|
242
264
|
# fetch download info
|
|
243
265
|
try:
|
|
244
|
-
response = self.requests_retry_session().get(
|
|
266
|
+
response = self.requests_retry_session().get(
|
|
267
|
+
info_url, timeout=RetryConstant.RETRY_INTERVAL
|
|
268
|
+
)
|
|
245
269
|
response.raise_for_status()
|
|
246
270
|
except requests.exceptions.RequestException as e:
|
|
247
|
-
raise SplunkBaseError(
|
|
271
|
+
raise SplunkBaseError(
|
|
272
|
+
f"Error fetching download info for app_name_id '{self.app_name_id}': {str(e)}"
|
|
273
|
+
)
|
|
248
274
|
|
|
249
275
|
# parse XML and extract download URL
|
|
250
276
|
build_xml = xmltodict.parse(response.content)
|
|
@@ -254,14 +280,18 @@ class SplunkApp:
|
|
|
254
280
|
def set_latest_version_info(self) -> None:
|
|
255
281
|
# raise if app_name_id not set
|
|
256
282
|
if self.app_name_id is None:
|
|
257
|
-
raise SplunkApp.InitializationError(
|
|
283
|
+
raise SplunkApp.InitializationError(
|
|
284
|
+
"app_name_id must be set in order to fetch latest version info"
|
|
285
|
+
)
|
|
258
286
|
|
|
259
287
|
# fetch the info URL
|
|
260
288
|
info_url = self.__fetch_url_latest_version_info()
|
|
261
289
|
|
|
262
290
|
# parse out the version number and fetch the download URL
|
|
263
291
|
self.latest_version = info_url.split("/")[-1]
|
|
264
|
-
self.latest_version_download_url = self.__fetch_url_latest_version_download(
|
|
292
|
+
self.latest_version_download_url = self.__fetch_url_latest_version_download(
|
|
293
|
+
info_url
|
|
294
|
+
)
|
|
265
295
|
|
|
266
296
|
def __get_splunk_base_session_token(self, username: str, password: str) -> str:
|
|
267
297
|
"""
|
|
@@ -309,12 +339,12 @@ class SplunkApp:
|
|
|
309
339
|
return token_value
|
|
310
340
|
|
|
311
341
|
def download(
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
342
|
+
self,
|
|
343
|
+
out: Path,
|
|
344
|
+
username: str,
|
|
345
|
+
password: str,
|
|
346
|
+
is_dir: bool = False,
|
|
347
|
+
overwrite: bool = False,
|
|
318
348
|
) -> Path:
|
|
319
349
|
"""
|
|
320
350
|
Given an output path, download the app to the specified location
|
|
@@ -336,11 +366,7 @@ class SplunkApp:
|
|
|
336
366
|
# Get the Splunkbase session token
|
|
337
367
|
token = self.__get_splunk_base_session_token(username, password)
|
|
338
368
|
response = requests.request(
|
|
339
|
-
"GET",
|
|
340
|
-
self.latest_version_download_url,
|
|
341
|
-
cookies={
|
|
342
|
-
"sessionid": token
|
|
343
|
-
}
|
|
369
|
+
"GET", self.latest_version_download_url, cookies={"sessionid": token}
|
|
344
370
|
)
|
|
345
371
|
|
|
346
372
|
# If the provided output path was a directory we need to try and pull the filename from the
|
|
@@ -348,17 +374,21 @@ class SplunkApp:
|
|
|
348
374
|
if is_dir:
|
|
349
375
|
try:
|
|
350
376
|
# Pull 'Content-Disposition' from the headers
|
|
351
|
-
content_disposition: str = response.headers[
|
|
377
|
+
content_disposition: str = response.headers["Content-Disposition"]
|
|
352
378
|
|
|
353
379
|
# Attempt to parse the filename as a KV
|
|
354
380
|
key, value = content_disposition.strip().split("=")
|
|
355
381
|
if key != "attachment;filename":
|
|
356
|
-
raise ValueError(
|
|
382
|
+
raise ValueError(
|
|
383
|
+
f"Unexpected key in 'Content-Disposition' KV pair: {key}"
|
|
384
|
+
)
|
|
357
385
|
|
|
358
386
|
# Validate the filename is the expected .tgz file
|
|
359
387
|
filename = Path(value.strip().strip('"'))
|
|
360
388
|
if filename.suffixes != [".tgz"]:
|
|
361
|
-
raise ValueError(
|
|
389
|
+
raise ValueError(
|
|
390
|
+
f"Filename has unexpected extension(s): {filename.suffixes}"
|
|
391
|
+
)
|
|
362
392
|
out = Path(out, filename)
|
|
363
393
|
except KeyError as e:
|
|
364
394
|
raise KeyError(
|
|
@@ -371,9 +401,7 @@ class SplunkApp:
|
|
|
371
401
|
|
|
372
402
|
# Ensure the output path is not already occupied
|
|
373
403
|
if out.exists() and not overwrite:
|
|
374
|
-
msg =
|
|
375
|
-
f"File already exists at {out}, cannot download the app."
|
|
376
|
-
)
|
|
404
|
+
msg = f"File already exists at {out}, cannot download the app."
|
|
377
405
|
raise Exception(msg)
|
|
378
406
|
|
|
379
407
|
# Make any parent directories as needed
|
contentctl/helper/utils.py
CHANGED
|
@@ -12,6 +12,7 @@ import tqdm
|
|
|
12
12
|
from math import ceil
|
|
13
13
|
|
|
14
14
|
from typing import TYPE_CHECKING
|
|
15
|
+
|
|
15
16
|
if TYPE_CHECKING:
|
|
16
17
|
from contentctl.objects.security_content_object import SecurityContentObject
|
|
17
18
|
from contentctl.objects.security_content_object import SecurityContentObject
|
|
@@ -24,26 +25,29 @@ ALWAYS_PULL = True
|
|
|
24
25
|
class Utils:
|
|
25
26
|
@staticmethod
|
|
26
27
|
def get_all_yml_files_from_directory(path: str) -> list[pathlib.Path]:
|
|
27
|
-
listOfFiles:list[pathlib.Path] = []
|
|
28
|
+
listOfFiles: list[pathlib.Path] = []
|
|
28
29
|
base_path = pathlib.Path(path)
|
|
29
30
|
if not base_path.exists():
|
|
30
31
|
return listOfFiles
|
|
31
|
-
for
|
|
32
|
+
for dirpath, dirnames, filenames in os.walk(path):
|
|
32
33
|
for file in filenames:
|
|
33
34
|
if file.endswith(".yml"):
|
|
34
35
|
listOfFiles.append(pathlib.Path(os.path.join(dirpath, file)))
|
|
35
|
-
|
|
36
|
+
|
|
36
37
|
return sorted(listOfFiles)
|
|
37
|
-
|
|
38
|
+
|
|
38
39
|
@staticmethod
|
|
39
|
-
def get_security_content_files_from_directory(
|
|
40
|
-
|
|
40
|
+
def get_security_content_files_from_directory(
|
|
41
|
+
path: pathlib.Path,
|
|
42
|
+
allowedFileExtensions: list[str] = [".yml"],
|
|
43
|
+
fileExtensionsToReturn: list[str] = [".yml"],
|
|
44
|
+
) -> list[pathlib.Path]:
|
|
41
45
|
"""
|
|
42
46
|
Get all of the Security Content Object Files rooted in a given directory. These will almost
|
|
43
47
|
certain be YML files, but could be other file types as specified by the user
|
|
44
48
|
|
|
45
49
|
Args:
|
|
46
|
-
path (pathlib.Path): The root path at which to enumerate all Security Content Files. All directories will be traversed.
|
|
50
|
+
path (pathlib.Path): The root path at which to enumerate all Security Content Files. All directories will be traversed.
|
|
47
51
|
allowedFileExtensions (set[str], optional): File extensions which are allowed to be present in this directory. In most cases, we do not want to allow the presence of non-YML files. Defaults to [".yml"].
|
|
48
52
|
fileExtensionsToReturn (set[str], optional): Filenames with extensions that should be returned from this function. For example, the lookups/ directory contains YML, CSV, and MLMODEL directories, but only the YMLs are Security Content Objects for constructing Lookyps. Defaults to[".yml"].
|
|
49
53
|
|
|
@@ -56,14 +60,18 @@ class Utils:
|
|
|
56
60
|
list[pathlib.Path]: list of files with an extension in fileExtensionsToReturn found in path
|
|
57
61
|
"""
|
|
58
62
|
if not set(fileExtensionsToReturn).issubset(set(allowedFileExtensions)):
|
|
59
|
-
raise Exception(
|
|
60
|
-
|
|
63
|
+
raise Exception(
|
|
64
|
+
f"allowedFileExtensions {allowedFileExtensions} MUST be a subset of fileExtensionsToReturn {fileExtensionsToReturn}, but it is not"
|
|
65
|
+
)
|
|
66
|
+
|
|
61
67
|
if not path.exists() or not path.is_dir():
|
|
62
|
-
raise Exception(
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
68
|
+
raise Exception(
|
|
69
|
+
f"Unable to get security_content files, required directory '{str(path)}' does not exist or is not a directory"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
allowedFiles: list[pathlib.Path] = []
|
|
73
|
+
erroneousFiles: list[pathlib.Path] = []
|
|
74
|
+
# Get every single file extension
|
|
67
75
|
for filePath in path.glob("**/*.*"):
|
|
68
76
|
if filePath.suffix in allowedFileExtensions:
|
|
69
77
|
# Yes these are allowed
|
|
@@ -73,58 +81,75 @@ class Utils:
|
|
|
73
81
|
erroneousFiles.append(filePath)
|
|
74
82
|
|
|
75
83
|
if len(erroneousFiles):
|
|
76
|
-
raise Exception(
|
|
77
|
-
|
|
84
|
+
raise Exception(
|
|
85
|
+
f"The following files are not allowed in the directory '{path}'. Only files with the extensions {allowedFileExtensions} are allowed:{[str(filePath) for filePath in erroneousFiles]}"
|
|
86
|
+
)
|
|
87
|
+
|
|
78
88
|
# There were no errorneous files, so return the requested files
|
|
79
|
-
return sorted(
|
|
89
|
+
return sorted(
|
|
90
|
+
[
|
|
91
|
+
filePath
|
|
92
|
+
for filePath in allowedFiles
|
|
93
|
+
if filePath.suffix in fileExtensionsToReturn
|
|
94
|
+
]
|
|
95
|
+
)
|
|
80
96
|
|
|
81
97
|
@staticmethod
|
|
82
|
-
def get_all_yml_files_from_directory_one_layer_deep(
|
|
98
|
+
def get_all_yml_files_from_directory_one_layer_deep(
|
|
99
|
+
path: str,
|
|
100
|
+
) -> list[pathlib.Path]:
|
|
83
101
|
listOfFiles: list[pathlib.Path] = []
|
|
84
102
|
base_path = pathlib.Path(path)
|
|
85
103
|
if not base_path.exists():
|
|
86
104
|
return listOfFiles
|
|
87
105
|
# Check the base directory
|
|
88
106
|
for item in base_path.iterdir():
|
|
89
|
-
if item.is_file() and item.suffix ==
|
|
107
|
+
if item.is_file() and item.suffix == ".yml":
|
|
90
108
|
listOfFiles.append(item)
|
|
91
109
|
# Check one subfolder level deep
|
|
92
110
|
for subfolder in base_path.iterdir():
|
|
93
111
|
if subfolder.is_dir() and subfolder.name != "cim":
|
|
94
112
|
for item in subfolder.iterdir():
|
|
95
|
-
if item.is_file() and item.suffix ==
|
|
113
|
+
if item.is_file() and item.suffix == ".yml":
|
|
96
114
|
listOfFiles.append(item)
|
|
97
115
|
return sorted(listOfFiles)
|
|
98
116
|
|
|
99
|
-
|
|
100
117
|
@staticmethod
|
|
101
|
-
def add_id(
|
|
118
|
+
def add_id(
|
|
119
|
+
id_dict: dict[str, list[pathlib.Path]],
|
|
120
|
+
obj: SecurityContentObject,
|
|
121
|
+
path: pathlib.Path,
|
|
122
|
+
) -> None:
|
|
102
123
|
if hasattr(obj, "id"):
|
|
103
124
|
obj_id = obj.id
|
|
104
125
|
if obj_id in id_dict:
|
|
105
126
|
id_dict[obj_id].append(path)
|
|
106
127
|
else:
|
|
107
128
|
id_dict[obj_id] = [path]
|
|
129
|
+
|
|
108
130
|
# Otherwise, no ID so nothing to add....
|
|
109
131
|
|
|
110
132
|
@staticmethod
|
|
111
|
-
def check_ids_for_duplicates(
|
|
112
|
-
|
|
113
|
-
|
|
133
|
+
def check_ids_for_duplicates(
|
|
134
|
+
id_dict: dict[str, list[pathlib.Path]],
|
|
135
|
+
) -> list[Tuple[pathlib.Path, ValueError]]:
|
|
136
|
+
validation_errors: list[Tuple[pathlib.Path, ValueError]] = []
|
|
137
|
+
|
|
114
138
|
for key, values in id_dict.items():
|
|
115
139
|
if len(values) > 1:
|
|
116
140
|
error_file_path = pathlib.Path("MULTIPLE")
|
|
117
|
-
all_files =
|
|
118
|
-
exception = ValueError(
|
|
141
|
+
all_files = "\n\t".join(str(pathlib.Path(p)) for p in values)
|
|
142
|
+
exception = ValueError(
|
|
143
|
+
f"Error validating id [{key}] - duplicate ID was used in the following files: \n\t{all_files}"
|
|
144
|
+
)
|
|
119
145
|
validation_errors.append((error_file_path, exception))
|
|
120
|
-
|
|
146
|
+
|
|
121
147
|
return validation_errors
|
|
122
148
|
|
|
123
149
|
@staticmethod
|
|
124
150
|
def validate_git_hash(
|
|
125
151
|
repo_path: str, repo_url: str, commit_hash: str, branch_name: Union[str, None]
|
|
126
152
|
) -> bool:
|
|
127
|
-
|
|
128
153
|
# Get a list of all branches
|
|
129
154
|
repo = git.Repo(repo_path)
|
|
130
155
|
if commit_hash is None:
|
|
@@ -141,14 +166,14 @@ class Utils:
|
|
|
141
166
|
# Note, of course, that a hash can be in 0, 1, more branches!
|
|
142
167
|
for branch_string in all_branches_containing_hash:
|
|
143
168
|
if branch_string.split(" ")[0] == "*" and (
|
|
144
|
-
branch_string.split(" ")[-1] == branch_name or branch_name
|
|
169
|
+
branch_string.split(" ")[-1] == branch_name or branch_name is None
|
|
145
170
|
):
|
|
146
171
|
# Yes, the hash exists in the branch (or branch_name was None and it existed in at least one branch)!
|
|
147
172
|
return True
|
|
148
173
|
# If we get here, it does not exist in the given branch
|
|
149
174
|
raise (Exception("Does not exist in branch"))
|
|
150
175
|
|
|
151
|
-
except Exception
|
|
176
|
+
except Exception:
|
|
152
177
|
if branch_name is None:
|
|
153
178
|
branch_name = "ANY_BRANCH"
|
|
154
179
|
if ALWAYS_PULL:
|
|
@@ -247,25 +272,10 @@ class Utils:
|
|
|
247
272
|
|
|
248
273
|
return hash
|
|
249
274
|
|
|
250
|
-
# @staticmethod
|
|
251
|
-
# def check_required_fields(
|
|
252
|
-
# thisField: str, definedFields: dict, requiredFields: list[str]
|
|
253
|
-
# ):
|
|
254
|
-
# missing_fields = [
|
|
255
|
-
# field for field in requiredFields if field not in definedFields
|
|
256
|
-
# ]
|
|
257
|
-
# if len(missing_fields) > 0:
|
|
258
|
-
# raise (
|
|
259
|
-
# ValueError(
|
|
260
|
-
# f"Could not validate - please resolve other errors resulting in missing fields {missing_fields}"
|
|
261
|
-
# )
|
|
262
|
-
# )
|
|
263
|
-
|
|
264
275
|
@staticmethod
|
|
265
276
|
def verify_file_exists(
|
|
266
277
|
file_path: str, verbose_print=False, timeout_seconds: int = 10
|
|
267
278
|
) -> None:
|
|
268
|
-
|
|
269
279
|
try:
|
|
270
280
|
if pathlib.Path(file_path).is_file():
|
|
271
281
|
# This is a file and we know it exists
|
|
@@ -275,18 +285,13 @@ class Utils:
|
|
|
275
285
|
|
|
276
286
|
# Try to make a head request to verify existence of the file
|
|
277
287
|
try:
|
|
278
|
-
|
|
279
288
|
req = requests.head(
|
|
280
289
|
file_path, timeout=timeout_seconds, verify=True, allow_redirects=True
|
|
281
290
|
)
|
|
282
291
|
if req.status_code > 400:
|
|
283
292
|
raise (Exception(f"Return code={req.status_code}"))
|
|
284
293
|
except Exception as e:
|
|
285
|
-
raise (
|
|
286
|
-
Exception(
|
|
287
|
-
f"HTTP Resolution Failed: {str(e)}"
|
|
288
|
-
)
|
|
289
|
-
)
|
|
294
|
+
raise (Exception(f"HTTP Resolution Failed: {str(e)}"))
|
|
290
295
|
|
|
291
296
|
@staticmethod
|
|
292
297
|
def copy_local_file(
|
|
@@ -390,7 +395,7 @@ class Utils:
|
|
|
390
395
|
)
|
|
391
396
|
|
|
392
397
|
try:
|
|
393
|
-
|
|
398
|
+
default_timer()
|
|
394
399
|
bytes_written = 0
|
|
395
400
|
file_to_download = requests.get(file_path, stream=True)
|
|
396
401
|
file_to_download.raise_for_status()
|
contentctl/input/director.py
CHANGED
|
@@ -14,7 +14,7 @@ from contentctl.objects.investigation import Investigation
|
|
|
14
14
|
from contentctl.objects.playbook import Playbook
|
|
15
15
|
from contentctl.objects.deployment import Deployment
|
|
16
16
|
from contentctl.objects.macro import Macro
|
|
17
|
-
from contentctl.objects.lookup import Lookup
|
|
17
|
+
from contentctl.objects.lookup import LookupAdapter, Lookup
|
|
18
18
|
from contentctl.objects.atomic import AtomicEnrichment
|
|
19
19
|
from contentctl.objects.security_content_object import SecurityContentObject
|
|
20
20
|
from contentctl.objects.data_source import DataSource
|
|
@@ -29,7 +29,7 @@ from contentctl.helper.utils import Utils
|
|
|
29
29
|
|
|
30
30
|
@dataclass
|
|
31
31
|
class DirectorOutputDto:
|
|
32
|
-
# Atomic Tests are first because parsing them
|
|
32
|
+
# Atomic Tests are first because parsing them
|
|
33
33
|
# is far quicker than attack_enrichment
|
|
34
34
|
atomic_enrichment: AtomicEnrichment
|
|
35
35
|
attack_enrichment: AttackEnrichment
|
|
@@ -50,15 +50,14 @@ class DirectorOutputDto:
|
|
|
50
50
|
|
|
51
51
|
def addContentToDictMappings(self, content: SecurityContentObject):
|
|
52
52
|
content_name = content.name
|
|
53
|
-
|
|
54
|
-
|
|
53
|
+
|
|
55
54
|
if content_name in self.name_to_content_map:
|
|
56
55
|
raise ValueError(
|
|
57
56
|
f"Duplicate name '{content_name}' with paths:\n"
|
|
58
57
|
f" - {content.file_path}\n"
|
|
59
58
|
f" - {self.name_to_content_map[content_name].file_path}"
|
|
60
59
|
)
|
|
61
|
-
|
|
60
|
+
|
|
62
61
|
if content.id in self.uuid_to_content_map:
|
|
63
62
|
raise ValueError(
|
|
64
63
|
f"Duplicate id '{content.id}' with paths:\n"
|
|
@@ -83,7 +82,7 @@ class DirectorOutputDto:
|
|
|
83
82
|
elif isinstance(content, Detection):
|
|
84
83
|
self.detections.append(content)
|
|
85
84
|
elif isinstance(content, Dashboard):
|
|
86
|
-
self.dashboards.append(content)
|
|
85
|
+
self.dashboards.append(content)
|
|
87
86
|
|
|
88
87
|
elif isinstance(content, DataSource):
|
|
89
88
|
self.data_sources.append(content)
|
|
@@ -94,7 +93,7 @@ class DirectorOutputDto:
|
|
|
94
93
|
self.uuid_to_content_map[content.id] = content
|
|
95
94
|
|
|
96
95
|
|
|
97
|
-
class Director
|
|
96
|
+
class Director:
|
|
98
97
|
input_dto: validate
|
|
99
98
|
output_dto: DirectorOutputDto
|
|
100
99
|
|
|
@@ -113,13 +112,18 @@ class Director():
|
|
|
113
112
|
self.createSecurityContent(SecurityContentType.playbooks)
|
|
114
113
|
self.createSecurityContent(SecurityContentType.detections)
|
|
115
114
|
self.createSecurityContent(SecurityContentType.dashboards)
|
|
116
|
-
|
|
117
|
-
from contentctl.objects.abstract_security_content_objects.detection_abstract import
|
|
115
|
+
|
|
116
|
+
from contentctl.objects.abstract_security_content_objects.detection_abstract import (
|
|
117
|
+
MISSING_SOURCES,
|
|
118
|
+
)
|
|
119
|
+
|
|
118
120
|
if len(MISSING_SOURCES) > 0:
|
|
119
121
|
missing_sources_string = "\n 🟡 ".join(sorted(list(MISSING_SOURCES)))
|
|
120
|
-
print(
|
|
121
|
-
|
|
122
|
-
|
|
122
|
+
print(
|
|
123
|
+
"WARNING: The following data_sources have been used in detections, but are not yet defined.\n"
|
|
124
|
+
"This is not yet an error since not all data_sources have been defined, but will be convered to an error soon:\n 🟡 "
|
|
125
|
+
f"{missing_sources_string}"
|
|
126
|
+
)
|
|
123
127
|
else:
|
|
124
128
|
print("No missing data_sources!")
|
|
125
129
|
|
|
@@ -134,18 +138,20 @@ class Director():
|
|
|
134
138
|
SecurityContentType.playbooks,
|
|
135
139
|
SecurityContentType.detections,
|
|
136
140
|
SecurityContentType.data_sources,
|
|
137
|
-
SecurityContentType.dashboards
|
|
141
|
+
SecurityContentType.dashboards,
|
|
138
142
|
]:
|
|
139
143
|
files = Utils.get_all_yml_files_from_directory(
|
|
140
144
|
os.path.join(self.input_dto.path, str(contentType.name))
|
|
141
145
|
)
|
|
142
|
-
security_content_files = [
|
|
143
|
-
f for f in files
|
|
144
|
-
]
|
|
146
|
+
security_content_files = [f for f in files]
|
|
145
147
|
else:
|
|
146
|
-
raise (
|
|
148
|
+
raise (
|
|
149
|
+
Exception(
|
|
150
|
+
f"Cannot createSecurityContent for unknown product {contentType}."
|
|
151
|
+
)
|
|
152
|
+
)
|
|
147
153
|
|
|
148
|
-
validation_errors:list[tuple[Path,ValueError]] = []
|
|
154
|
+
validation_errors: list[tuple[Path, ValueError]] = []
|
|
149
155
|
|
|
150
156
|
already_ran = False
|
|
151
157
|
progress_percent = 0
|
|
@@ -157,40 +163,67 @@ class Director():
|
|
|
157
163
|
modelDict = YmlReader.load_file(file)
|
|
158
164
|
|
|
159
165
|
if contentType == SecurityContentType.lookups:
|
|
160
|
-
lookup =
|
|
166
|
+
lookup = LookupAdapter.validate_python(
|
|
167
|
+
modelDict,
|
|
168
|
+
context={
|
|
169
|
+
"output_dto": self.output_dto,
|
|
170
|
+
"config": self.input_dto,
|
|
171
|
+
},
|
|
172
|
+
)
|
|
173
|
+
# lookup = Lookup.model_validate(modelDict, context={"output_dto":self.output_dto, "config":self.input_dto})
|
|
161
174
|
self.output_dto.addContentToDictMappings(lookup)
|
|
162
|
-
|
|
175
|
+
|
|
163
176
|
elif contentType == SecurityContentType.macros:
|
|
164
|
-
macro = Macro.model_validate(
|
|
177
|
+
macro = Macro.model_validate(
|
|
178
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
179
|
+
)
|
|
165
180
|
self.output_dto.addContentToDictMappings(macro)
|
|
166
|
-
|
|
181
|
+
|
|
167
182
|
elif contentType == SecurityContentType.deployments:
|
|
168
|
-
deployment = Deployment.model_validate(
|
|
183
|
+
deployment = Deployment.model_validate(
|
|
184
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
185
|
+
)
|
|
169
186
|
self.output_dto.addContentToDictMappings(deployment)
|
|
170
187
|
|
|
171
188
|
elif contentType == SecurityContentType.playbooks:
|
|
172
|
-
playbook = Playbook.model_validate(
|
|
173
|
-
|
|
174
|
-
|
|
189
|
+
playbook = Playbook.model_validate(
|
|
190
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
191
|
+
)
|
|
192
|
+
self.output_dto.addContentToDictMappings(playbook)
|
|
193
|
+
|
|
175
194
|
elif contentType == SecurityContentType.baselines:
|
|
176
|
-
baseline = Baseline.model_validate(
|
|
195
|
+
baseline = Baseline.model_validate(
|
|
196
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
197
|
+
)
|
|
177
198
|
self.output_dto.addContentToDictMappings(baseline)
|
|
178
|
-
|
|
199
|
+
|
|
179
200
|
elif contentType == SecurityContentType.investigations:
|
|
180
|
-
investigation = Investigation.model_validate(
|
|
201
|
+
investigation = Investigation.model_validate(
|
|
202
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
203
|
+
)
|
|
181
204
|
self.output_dto.addContentToDictMappings(investigation)
|
|
182
205
|
|
|
183
206
|
elif contentType == SecurityContentType.stories:
|
|
184
|
-
story = Story.model_validate(
|
|
207
|
+
story = Story.model_validate(
|
|
208
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
209
|
+
)
|
|
185
210
|
self.output_dto.addContentToDictMappings(story)
|
|
186
|
-
|
|
211
|
+
|
|
187
212
|
elif contentType == SecurityContentType.detections:
|
|
188
|
-
detection = Detection.model_validate(
|
|
213
|
+
detection = Detection.model_validate(
|
|
214
|
+
modelDict,
|
|
215
|
+
context={
|
|
216
|
+
"output_dto": self.output_dto,
|
|
217
|
+
"app": self.input_dto.app,
|
|
218
|
+
},
|
|
219
|
+
)
|
|
189
220
|
self.output_dto.addContentToDictMappings(detection)
|
|
190
|
-
|
|
221
|
+
|
|
191
222
|
elif contentType == SecurityContentType.dashboards:
|
|
192
|
-
|
|
193
|
-
self.output_dto
|
|
223
|
+
dashboard = Dashboard.model_validate(
|
|
224
|
+
modelDict, context={"output_dto": self.output_dto}
|
|
225
|
+
)
|
|
226
|
+
self.output_dto.addContentToDictMappings(dashboard)
|
|
194
227
|
|
|
195
228
|
elif contentType == SecurityContentType.data_sources:
|
|
196
229
|
data_source = DataSource.model_validate(
|
|
@@ -237,4 +270,3 @@ class Director():
|
|
|
237
270
|
raise Exception(
|
|
238
271
|
f"The following {len(validation_errors)} error(s) were found during validation:\n\n{errors_string}\n\nVALIDATION FAILED"
|
|
239
272
|
)
|
|
240
|
-
|