contentctl 4.3.4__py3-none-any.whl → 4.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. contentctl/actions/build.py +1 -0
  2. contentctl/actions/detection_testing/GitService.py +10 -10
  3. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +68 -38
  4. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +5 -1
  5. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +10 -8
  6. contentctl/actions/initialize.py +28 -12
  7. contentctl/actions/inspect.py +191 -91
  8. contentctl/actions/new_content.py +10 -2
  9. contentctl/actions/validate.py +3 -6
  10. contentctl/api.py +1 -1
  11. contentctl/contentctl.py +3 -0
  12. contentctl/enrichments/attack_enrichment.py +49 -81
  13. contentctl/enrichments/cve_enrichment.py +6 -7
  14. contentctl/helper/splunk_app.py +141 -10
  15. contentctl/input/director.py +19 -24
  16. contentctl/input/new_content_questions.py +9 -42
  17. contentctl/objects/abstract_security_content_objects/detection_abstract.py +155 -13
  18. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +17 -9
  19. contentctl/objects/atomic.py +51 -77
  20. contentctl/objects/base_test_result.py +7 -7
  21. contentctl/objects/baseline.py +12 -18
  22. contentctl/objects/baseline_tags.py +2 -5
  23. contentctl/objects/config.py +154 -26
  24. contentctl/objects/constants.py +34 -1
  25. contentctl/objects/correlation_search.py +79 -114
  26. contentctl/objects/dashboard.py +100 -0
  27. contentctl/objects/deployment.py +20 -5
  28. contentctl/objects/detection_metadata.py +71 -0
  29. contentctl/objects/detection_stanza.py +79 -0
  30. contentctl/objects/detection_tags.py +28 -26
  31. contentctl/objects/drilldown.py +70 -0
  32. contentctl/objects/enums.py +26 -24
  33. contentctl/objects/errors.py +187 -0
  34. contentctl/objects/investigation.py +23 -15
  35. contentctl/objects/investigation_tags.py +4 -3
  36. contentctl/objects/lookup.py +8 -1
  37. contentctl/objects/macro.py +16 -7
  38. contentctl/objects/notable_event.py +6 -5
  39. contentctl/objects/risk_analysis_action.py +4 -4
  40. contentctl/objects/risk_event.py +8 -7
  41. contentctl/objects/savedsearches_conf.py +196 -0
  42. contentctl/objects/story.py +4 -16
  43. contentctl/objects/throttling.py +46 -0
  44. contentctl/output/conf_output.py +4 -0
  45. contentctl/output/conf_writer.py +24 -4
  46. contentctl/output/new_content_yml_output.py +4 -9
  47. contentctl/output/templates/analyticstories_detections.j2 +2 -2
  48. contentctl/output/templates/analyticstories_investigations.j2 +5 -5
  49. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  50. contentctl/output/templates/savedsearches_baselines.j2 +2 -3
  51. contentctl/output/templates/savedsearches_detections.j2 +12 -7
  52. contentctl/output/templates/savedsearches_investigations.j2 +3 -4
  53. contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +10 -1
  54. {contentctl-4.3.4.dist-info → contentctl-4.4.0.dist-info}/METADATA +6 -5
  55. {contentctl-4.3.4.dist-info → contentctl-4.4.0.dist-info}/RECORD +58 -57
  56. {contentctl-4.3.4.dist-info → contentctl-4.4.0.dist-info}/WHEEL +1 -1
  57. contentctl/objects/ssa_detection.py +0 -157
  58. contentctl/objects/ssa_detection_tags.py +0 -138
  59. contentctl/objects/unit_test_old.py +0 -10
  60. contentctl/objects/unit_test_ssa.py +0 -31
  61. contentctl/output/templates/finding_report.j2 +0 -30
  62. {contentctl-4.3.4.dist-info → contentctl-4.4.0.dist-info}/LICENSE.md +0 -0
  63. {contentctl-4.3.4.dist-info → contentctl-4.4.0.dist-info}/entry_points.txt +0 -0
@@ -1,14 +1,13 @@
1
1
 
2
2
  from __future__ import annotations
3
- import csv
4
- import os
5
3
  import sys
6
4
  from attackcti import attack_client
7
5
  import logging
8
- from pydantic import BaseModel, Field
6
+ from pydantic import BaseModel
9
7
  from dataclasses import field
10
- from typing import Annotated,Any
11
- from contentctl.objects.mitre_attack_enrichment import MitreAttackEnrichment
8
+ from typing import Any
9
+ from pathlib import Path
10
+ from contentctl.objects.mitre_attack_enrichment import MitreAttackEnrichment, MitreTactics
12
11
  from contentctl.objects.config import validate
13
12
  from contentctl.objects.annotated_types import MITRE_ATTACK_ID_TYPE
14
13
  logging.getLogger('taxii2client').setLevel(logging.CRITICAL)
@@ -21,12 +20,12 @@ class AttackEnrichment(BaseModel):
21
20
  @staticmethod
22
21
  def getAttackEnrichment(config:validate)->AttackEnrichment:
23
22
  enrichment = AttackEnrichment(use_enrichment=config.enrichments)
24
- _ = enrichment.get_attack_lookup(str(config.path))
23
+ _ = enrichment.get_attack_lookup(config.mitre_cti_repo_path, config.enrichments)
25
24
  return enrichment
26
25
 
27
26
  def getEnrichmentByMitreID(self, mitre_id:MITRE_ATTACK_ID_TYPE)->MitreAttackEnrichment:
28
27
  if not self.use_enrichment:
29
- raise Exception(f"Error, trying to add Mitre Enrichment, but use_enrichment was set to False")
28
+ raise Exception("Error, trying to add Mitre Enrichment, but use_enrichment was set to False")
30
29
 
31
30
  enrichment = self.data.get(mitre_id, None)
32
31
  if enrichment is not None:
@@ -34,71 +33,69 @@ class AttackEnrichment(BaseModel):
34
33
  else:
35
34
  raise Exception(f"Error, Unable to find Mitre Enrichment for MitreID {mitre_id}")
36
35
 
37
- def addMitreIDViaGroupNames(self, technique:dict, tactics:list[str], groupNames:list[str])->None:
36
+ def addMitreIDViaGroupNames(self, technique:dict[str,Any], tactics:list[str], groupNames:list[str])->None:
38
37
  technique_id = technique['technique_id']
39
38
  technique_obj = technique['technique']
40
39
  tactics.sort()
41
40
 
42
41
  if technique_id in self.data:
43
42
  raise Exception(f"Error, trying to redefine MITRE ID '{technique_id}'")
44
- self.data[technique_id] = MitreAttackEnrichment(mitre_attack_id=technique_id,
45
- mitre_attack_technique=technique_obj,
46
- mitre_attack_tactics=tactics,
47
- mitre_attack_groups=groupNames,
48
- mitre_attack_group_objects=[])
43
+ self.data[technique_id] = MitreAttackEnrichment.model_validate({'mitre_attack_id':technique_id,
44
+ 'mitre_attack_technique':technique_obj,
45
+ 'mitre_attack_tactics':tactics,
46
+ 'mitre_attack_groups':groupNames,
47
+ 'mitre_attack_group_objects':[]})
49
48
 
50
- def addMitreIDViaGroupObjects(self, technique:dict, tactics:list[str], groupObjects:list[dict[str,Any]])->None:
49
+ def addMitreIDViaGroupObjects(self, technique:dict[str,Any], tactics:list[MitreTactics], groupDicts:list[dict[str,Any]])->None:
51
50
  technique_id = technique['technique_id']
52
51
  technique_obj = technique['technique']
53
52
  tactics.sort()
54
53
 
55
- groupNames:list[str] = sorted([group['group'] for group in groupObjects])
54
+ groupNames:list[str] = sorted([group['group'] for group in groupDicts])
56
55
 
57
56
  if technique_id in self.data:
58
57
  raise Exception(f"Error, trying to redefine MITRE ID '{technique_id}'")
59
- self.data[technique_id] = MitreAttackEnrichment(mitre_attack_id=technique_id,
60
- mitre_attack_technique=technique_obj,
61
- mitre_attack_tactics=tactics,
62
- mitre_attack_groups=groupNames,
63
- mitre_attack_group_objects=groupObjects)
58
+
59
+ self.data[technique_id] = MitreAttackEnrichment.model_validate({'mitre_attack_id': technique_id,
60
+ 'mitre_attack_technique': technique_obj,
61
+ 'mitre_attack_tactics': tactics,
62
+ 'mitre_attack_groups': groupNames,
63
+ 'mitre_attack_group_objects': groupDicts})
64
64
 
65
65
 
66
- def get_attack_lookup(self, input_path: str, store_csv: bool = False, force_cached_or_offline: bool = False, skip_enrichment:bool = False) -> dict:
67
- if not self.use_enrichment:
68
- return {}
69
- print("Getting MITRE Attack Enrichment Data. This may take some time...")
70
- attack_lookup = dict()
71
- file_path = os.path.join(input_path, "app_template", "lookups", "mitre_enrichment.csv")
72
-
73
- if skip_enrichment is True:
74
- print("Skipping enrichment")
66
+ def get_attack_lookup(self, input_path: Path, enrichments:bool = False) -> dict[str,MitreAttackEnrichment]:
67
+ attack_lookup:dict[str,MitreAttackEnrichment] = {}
68
+ if not enrichments:
75
69
  return attack_lookup
70
+
76
71
  try:
77
-
78
- if force_cached_or_offline is True:
79
- raise(Exception("WARNING - Using cached MITRE Attack Enrichment. Attack Enrichment may be out of date. Only use this setting for offline environments and development purposes."))
80
- print(f"\r{'Client'.rjust(23)}: [{0:3.0f}%]...", end="", flush=True)
81
- lift = attack_client()
82
- print(f"\r{'Client'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
72
+ print(f"Performing MITRE Enrichment using the repository at {input_path}...",end="", flush=True)
73
+ # The existence of the input_path is validated during cli argument validation, but it is
74
+ # possible that the repo is in the wrong format. If the following directories do not
75
+ # exist, then attack_client will fall back to resolving via REST API. We do not
76
+ # want this as it is slow and error prone, so we will force an exception to
77
+ # be generated.
78
+ enterprise_path = input_path/"enterprise-attack"
79
+ mobile_path = input_path/"ics-attack"
80
+ ics_path = input_path/"mobile-attack"
81
+ if not (enterprise_path.is_dir() and mobile_path.is_dir() and ics_path.is_dir()):
82
+ raise FileNotFoundError("One or more of the following paths does not exist: "
83
+ f"{[str(enterprise_path),str(mobile_path),str(ics_path)]}. "
84
+ f"Please ensure that the {input_path} directory "
85
+ "has been git cloned correctly.")
86
+ lift = attack_client(
87
+ local_paths= {
88
+ "enterprise":str(enterprise_path),
89
+ "mobile":str(mobile_path),
90
+ "ics":str(ics_path)
91
+ }
92
+ )
83
93
 
84
- print(f"\r{'Techniques'.rjust(23)}: [{0.0:3.0f}%]...", end="", flush=True)
85
94
  all_enterprise_techniques = lift.get_enterprise_techniques(stix_format=False)
86
-
87
- print(f"\r{'Techniques'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
88
-
89
- print(f"\r{'Relationships'.rjust(23)}: [{0.0:3.0f}%]...", end="", flush=True)
90
95
  enterprise_relationships = lift.get_enterprise_relationships(stix_format=False)
91
- print(f"\r{'Relationships'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
92
-
93
- print(f"\r{'Groups'.rjust(23)}: [{0:3.0f}%]...", end="", flush=True)
94
96
  enterprise_groups = lift.get_enterprise_groups(stix_format=False)
95
- print(f"\r{'Groups'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
96
-
97
97
 
98
- for index, technique in enumerate(all_enterprise_techniques):
99
- progress_percent = ((index+1)/len(all_enterprise_techniques)) * 100
100
- if (sys.stdout.isatty() and sys.stdin.isatty() and sys.stderr.isatty()):
101
- print(f"\r\t{'MITRE Technique Progress'.rjust(23)}: [{progress_percent:3.0f}%]...", end="", flush=True)
98
+ for technique in all_enterprise_techniques:
102
99
  apt_groups:list[dict[str,Any]] = []
103
100
  for relationship in enterprise_relationships:
104
101
  if (relationship['target_object'] == technique['id']) and relationship['source_object'].startswith('intrusion-set'):
@@ -115,39 +112,10 @@ class AttackEnrichment(BaseModel):
115
112
  self.addMitreIDViaGroupObjects(technique, tactics, apt_groups)
116
113
  attack_lookup[technique['technique_id']] = {'technique': technique['technique'], 'tactics': tactics, 'groups': apt_groups}
117
114
 
118
- if store_csv:
119
- f = open(file_path, 'w')
120
- writer = csv.writer(f)
121
- writer.writerow(['mitre_id', 'technique', 'tactics' ,'groups'])
122
- for key in attack_lookup.keys():
123
- if len(attack_lookup[key]['groups']) == 0:
124
- groups = 'no'
125
- else:
126
- groups = '|'.join(attack_lookup[key]['groups'])
127
-
128
- writer.writerow([
129
- key,
130
- attack_lookup[key]['technique'],
131
- '|'.join(attack_lookup[key]['tactics']),
132
- groups
133
- ])
134
-
135
- f.close()
136
115
 
116
+
137
117
  except Exception as err:
138
- print(f'\nError: {str(err)}')
139
- print('Use local copy app_template/lookups/mitre_enrichment.csv')
140
- with open(file_path, mode='r') as inp:
141
- reader = csv.reader(inp)
142
- attack_lookup = {rows[0]:{'technique': rows[1], 'tactics': rows[2].split('|'), 'groups': rows[3].split('|')} for rows in reader}
143
- attack_lookup.pop('mitre_id')
144
- for key in attack_lookup.keys():
145
- technique_input = {'technique_id': key , 'technique': attack_lookup[key]['technique'] }
146
- tactics_input = attack_lookup[key]['tactics']
147
- groups_input = attack_lookup[key]['groups']
148
- self.addMitreIDViaGroupNames(technique=technique_input, tactics=tactics_input, groups=groups_input)
149
-
150
-
151
-
118
+ raise Exception(f"Error getting MITRE Enrichment: {str(err)}")
119
+
152
120
  print("Done!")
153
121
  return attack_lookup
@@ -5,7 +5,7 @@ import os
5
5
  import shelve
6
6
  import time
7
7
  from typing import Annotated, Any, Union, TYPE_CHECKING
8
- from pydantic import BaseModel,Field, computed_field
8
+ from pydantic import ConfigDict, BaseModel,Field, computed_field
9
9
  from decimal import Decimal
10
10
  from requests.exceptions import ReadTimeout
11
11
  from contentctl.objects.annotated_types import CVE_TYPE
@@ -32,13 +32,12 @@ class CveEnrichmentObj(BaseModel):
32
32
  class CveEnrichment(BaseModel):
33
33
  use_enrichment: bool = True
34
34
  cve_api_obj: Union[CVESearch,None] = None
35
-
36
35
 
37
- class Config:
38
- # Arbitrary_types are allowed to let us use the CVESearch Object
39
- arbitrary_types_allowed = True
40
- frozen = True
41
-
36
+ # Arbitrary_types are allowed to let us use the CVESearch Object
37
+ model_config = ConfigDict(
38
+ arbitrary_types_allowed=True,
39
+ frozen=True
40
+ )
42
41
 
43
42
  @staticmethod
44
43
  def getCveEnrichment(config:validate, timeout_seconds:int=10, force_disable_enrichment:bool=True)->CveEnrichment:
@@ -1,20 +1,20 @@
1
- import os
2
- import time
3
1
  import json
2
+ from typing import Optional, Collection
3
+ from pathlib import Path
4
4
  import xml.etree.ElementTree as ET
5
- from typing import List, Tuple, Optional
6
5
  from urllib.parse import urlencode
7
6
 
8
7
  import requests
9
8
  import urllib3
10
9
  import xmltodict
11
10
  from requests.adapters import HTTPAdapter
12
- from requests.packages.urllib3.util.retry import Retry
11
+ from urllib3.util.retry import Retry
13
12
 
14
13
  urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
15
14
 
16
15
  MAX_RETRY = 3
17
16
 
17
+
18
18
  class APIEndPoint:
19
19
  """
20
20
  Class which contains Static Endpoint
@@ -27,6 +27,7 @@ class APIEndPoint:
27
27
  SPLUNK_BASE_GET_UID_REDIRECT = "https://apps.splunk.com/apps/id/{app_name_id}"
28
28
  SPLUNK_BASE_APP_INFO = "https://splunkbase.splunk.com/api/v1/app/{app_uid}"
29
29
 
30
+
30
31
  class RetryConstant:
31
32
  """
32
33
  Class which contains Retry Constant
@@ -53,11 +54,11 @@ class SplunkApp:
53
54
 
54
55
  @staticmethod
55
56
  def requests_retry_session(
56
- retries=RetryConstant.RETRY_COUNT,
57
- backoff_factor=1,
58
- status_forcelist=(500, 502, 503, 504),
59
- session=None,
60
- ):
57
+ retries: int = RetryConstant.RETRY_COUNT,
58
+ backoff_factor: int = 1,
59
+ status_forcelist: Collection[int] = (500, 502, 503, 504),
60
+ session: requests.Session | None = None,
61
+ ) -> requests.Session:
61
62
  session = session or requests.Session()
62
63
  retry = Retry(
63
64
  total=retries,
@@ -260,4 +261,134 @@ class SplunkApp:
260
261
 
261
262
  # parse out the version number and fetch the download URL
262
263
  self.latest_version = info_url.split("/")[-1]
263
- self.latest_version_download_url = self.__fetch_url_latest_version_download(info_url)
264
+ self.latest_version_download_url = self.__fetch_url_latest_version_download(info_url)
265
+
266
+ def __get_splunk_base_session_token(self, username: str, password: str) -> str:
267
+ """
268
+ This method will generate Splunk base session token
269
+
270
+ :param username: Splunkbase username
271
+ :type username: str
272
+ :param password: Splunkbase password
273
+ :type password: str
274
+
275
+ :return: Splunk base session token
276
+ :rtype: str
277
+ """
278
+ # Data payload for fetch splunk base session token
279
+ payload = urlencode(
280
+ {
281
+ "username": username,
282
+ "password": password,
283
+ }
284
+ )
285
+
286
+ headers = {
287
+ "content-type": "application/x-www-form-urlencoded",
288
+ "cache-control": "no-cache",
289
+ }
290
+
291
+ response = requests.request(
292
+ "POST",
293
+ APIEndPoint.SPLUNK_BASE_AUTH_URL,
294
+ data=payload,
295
+ headers=headers,
296
+ )
297
+
298
+ token_value = ""
299
+
300
+ if response.status_code != 200:
301
+ msg = (
302
+ f"Error occurred while executing the rest call for splunk base authentication api,"
303
+ f"{response.content}"
304
+ )
305
+ raise Exception(msg)
306
+ else:
307
+ root = ET.fromstring(response.content)
308
+ token_value = root.find("{http://www.w3.org/2005/Atom}id").text.strip()
309
+ return token_value
310
+
311
+ def download(
312
+ self,
313
+ out: Path,
314
+ username: str,
315
+ password: str,
316
+ is_dir: bool = False,
317
+ overwrite: bool = False
318
+ ) -> Path:
319
+ """
320
+ Given an output path, download the app to the specified location
321
+
322
+ :param out: the Path to download the app to
323
+ :type out: :class:`pathlib.Path`
324
+ :param username: Splunkbase username
325
+ :type username: str
326
+ :param password: Splunkbase password
327
+ :type password: str
328
+ :param is_dir: a flag indicating whether out is directory, otherwise a file (default: False)
329
+ :type is_dir: bool
330
+ :param overwrite: a flag indicating whether we can overwrite the file at out or not
331
+ :type overwrite: bool
332
+
333
+ :returns path: the Path the download was written to (needed when is_dir is True)
334
+ :rtype: :class:`pathlib.Path`
335
+ """
336
+ # Get the Splunkbase session token
337
+ token = self.__get_splunk_base_session_token(username, password)
338
+ response = requests.request(
339
+ "GET",
340
+ self.latest_version_download_url,
341
+ cookies={
342
+ "sessionid": token
343
+ }
344
+ )
345
+
346
+ # If the provided output path was a directory we need to try and pull the filename from the
347
+ # response headers
348
+ if is_dir:
349
+ try:
350
+ # Pull 'Content-Disposition' from the headers
351
+ content_disposition: str = response.headers['Content-Disposition']
352
+
353
+ # Attempt to parse the filename as a KV
354
+ key, value = content_disposition.strip().split("=")
355
+ if key != "attachment;filename":
356
+ raise ValueError(f"Unexpected key in 'Content-Disposition' KV pair: {key}")
357
+
358
+ # Validate the filename is the expected .tgz file
359
+ filename = Path(value.strip().strip('"'))
360
+ if filename.suffixes != [".tgz"]:
361
+ raise ValueError(f"Filename has unexpected extension(s): {filename.suffixes}")
362
+ out = Path(out, filename)
363
+ except KeyError as e:
364
+ raise KeyError(
365
+ f"Unable to properly extract 'Content-Disposition' from response headers: {e}"
366
+ ) from e
367
+ except ValueError as e:
368
+ raise ValueError(
369
+ f"Unable to parse filename from 'Content-Disposition' header: {e}"
370
+ ) from e
371
+
372
+ # Ensure the output path is not already occupied
373
+ if out.exists() and not overwrite:
374
+ msg = (
375
+ f"File already exists at {out}, cannot download the app."
376
+ )
377
+ raise Exception(msg)
378
+
379
+ # Make any parent directories as needed
380
+ out.parent.mkdir(parents=True, exist_ok=True)
381
+
382
+ # Check for HTTP errors
383
+ if response.status_code != 200:
384
+ msg = (
385
+ f"Error occurred while executing the rest call for splunk base authentication api,"
386
+ f"{response.content}"
387
+ )
388
+ raise Exception(msg)
389
+
390
+ # Write the app to disk
391
+ with open(out, "wb") as file:
392
+ file.write(response.content)
393
+
394
+ return out
@@ -1,41 +1,29 @@
1
1
  import os
2
2
  import sys
3
- import pathlib
4
- from typing import Union
3
+ from pathlib import Path
5
4
  from dataclasses import dataclass, field
6
5
  from pydantic import ValidationError
7
6
  from uuid import UUID
8
7
  from contentctl.input.yml_reader import YmlReader
9
8
 
10
-
11
9
  from contentctl.objects.detection import Detection
12
10
  from contentctl.objects.story import Story
13
11
 
14
- from contentctl.objects.enums import SecurityContentProduct
15
12
  from contentctl.objects.baseline import Baseline
16
13
  from contentctl.objects.investigation import Investigation
17
14
  from contentctl.objects.playbook import Playbook
18
15
  from contentctl.objects.deployment import Deployment
19
16
  from contentctl.objects.macro import Macro
20
17
  from contentctl.objects.lookup import Lookup
21
- from contentctl.objects.ssa_detection import SSADetection
22
- from contentctl.objects.atomic import AtomicTest
18
+ from contentctl.objects.atomic import AtomicEnrichment
23
19
  from contentctl.objects.security_content_object import SecurityContentObject
24
20
  from contentctl.objects.data_source import DataSource
25
- from contentctl.objects.event_source import EventSource
26
-
21
+ from contentctl.objects.dashboard import Dashboard
27
22
  from contentctl.enrichments.attack_enrichment import AttackEnrichment
28
23
  from contentctl.enrichments.cve_enrichment import CveEnrichment
29
24
 
30
25
  from contentctl.objects.config import validate
31
26
  from contentctl.objects.enums import SecurityContentType
32
-
33
- from contentctl.objects.enums import DetectionStatus
34
- from contentctl.helper.utils import Utils
35
-
36
- from contentctl.objects.enums import SecurityContentType
37
-
38
- from contentctl.objects.enums import DetectionStatus
39
27
  from contentctl.helper.utils import Utils
40
28
 
41
29
 
@@ -43,7 +31,7 @@ from contentctl.helper.utils import Utils
43
31
  class DirectorOutputDto:
44
32
  # Atomic Tests are first because parsing them
45
33
  # is far quicker than attack_enrichment
46
- atomic_tests: None | list[AtomicTest]
34
+ atomic_enrichment: AtomicEnrichment
47
35
  attack_enrichment: AttackEnrichment
48
36
  cve_enrichment: CveEnrichment
49
37
  detections: list[Detection]
@@ -54,16 +42,15 @@ class DirectorOutputDto:
54
42
  macros: list[Macro]
55
43
  lookups: list[Lookup]
56
44
  deployments: list[Deployment]
45
+ dashboards: list[Dashboard]
46
+
57
47
  data_sources: list[DataSource]
58
48
  name_to_content_map: dict[str, SecurityContentObject] = field(default_factory=dict)
59
49
  uuid_to_content_map: dict[UUID, SecurityContentObject] = field(default_factory=dict)
60
50
 
61
51
  def addContentToDictMappings(self, content: SecurityContentObject):
62
52
  content_name = content.name
63
- if isinstance(content, SSADetection):
64
- # Since SSA detections may have the same name as ESCU detection,
65
- # for this function we prepend 'SSA ' to the name.
66
- content_name = f"SSA {content_name}"
53
+
67
54
 
68
55
  if content_name in self.name_to_content_map:
69
56
  raise ValueError(
@@ -95,6 +82,9 @@ class DirectorOutputDto:
95
82
  self.stories.append(content)
96
83
  elif isinstance(content, Detection):
97
84
  self.detections.append(content)
85
+ elif isinstance(content, Dashboard):
86
+ self.dashboards.append(content)
87
+
98
88
  elif isinstance(content, DataSource):
99
89
  self.data_sources.append(content)
100
90
  else:
@@ -122,7 +112,7 @@ class Director():
122
112
  self.createSecurityContent(SecurityContentType.data_sources)
123
113
  self.createSecurityContent(SecurityContentType.playbooks)
124
114
  self.createSecurityContent(SecurityContentType.detections)
125
-
115
+ self.createSecurityContent(SecurityContentType.dashboards)
126
116
 
127
117
  from contentctl.objects.abstract_security_content_objects.detection_abstract import MISSING_SOURCES
128
118
  if len(MISSING_SOURCES) > 0:
@@ -144,17 +134,18 @@ class Director():
144
134
  SecurityContentType.playbooks,
145
135
  SecurityContentType.detections,
146
136
  SecurityContentType.data_sources,
137
+ SecurityContentType.dashboards
147
138
  ]:
148
139
  files = Utils.get_all_yml_files_from_directory(
149
140
  os.path.join(self.input_dto.path, str(contentType.name))
150
141
  )
151
142
  security_content_files = [
152
- f for f in files if not f.name.startswith("ssa___")
143
+ f for f in files
153
144
  ]
154
145
  else:
155
- raise (Exception(f"Cannot createSecurityContent for unknown product."))
146
+ raise (Exception(f"Cannot createSecurityContent for unknown product {contentType}."))
156
147
 
157
- validation_errors = []
148
+ validation_errors:list[tuple[Path,ValueError]] = []
158
149
 
159
150
  already_ran = False
160
151
  progress_percent = 0
@@ -196,6 +187,10 @@ class Director():
196
187
  elif contentType == SecurityContentType.detections:
197
188
  detection = Detection.model_validate(modelDict, context={"output_dto":self.output_dto, "app":self.input_dto.app})
198
189
  self.output_dto.addContentToDictMappings(detection)
190
+
191
+ elif contentType == SecurityContentType.dashboards:
192
+ dashboard = Dashboard.model_validate(modelDict,context={"output_dto":self.output_dto})
193
+ self.output_dto.addContentToDictMappings(dashboard)
199
194
 
200
195
  elif contentType == SecurityContentType.data_sources:
201
196
  data_source = DataSource.model_validate(
@@ -1,7 +1,11 @@
1
+ from typing import Any
2
+ from contentctl.objects.enums import DataSource
3
+
4
+
1
5
  class NewContentQuestions:
2
6
 
3
7
  @classmethod
4
- def get_questions_detection(self) -> list:
8
+ def get_questions_detection(cls) -> list[dict[str,Any]]:
5
9
  questions = [
6
10
  {
7
11
  "type": "text",
@@ -45,46 +49,9 @@ class NewContentQuestions:
45
49
  'type': 'checkbox',
46
50
  'message': 'Your data source',
47
51
  'name': 'data_source',
48
- 'choices': [
49
- "OSQuery ES Process Events",
50
- "Powershell 4104",
51
- "Sysmon Event ID 1",
52
- "Sysmon Event ID 3",
53
- "Sysmon Event ID 5",
54
- "Sysmon Event ID 6",
55
- "Sysmon Event ID 7",
56
- "Sysmon Event ID 8",
57
- "Sysmon Event ID 9",
58
- "Sysmon Event ID 10",
59
- "Sysmon Event ID 11",
60
- "Sysmon Event ID 13",
61
- "Sysmon Event ID 15",
62
- "Sysmon Event ID 20",
63
- "Sysmon Event ID 21",
64
- "Sysmon Event ID 22",
65
- "Sysmon Event ID 23",
66
- "Windows Security 4624",
67
- "Windows Security 4625",
68
- "Windows Security 4648",
69
- "Windows Security 4663",
70
- "Windows Security 4688",
71
- "Windows Security 4698",
72
- "Windows Security 4703",
73
- "Windows Security 4720",
74
- "Windows Security 4732",
75
- "Windows Security 4738",
76
- "Windows Security 4741",
77
- "Windows Security 4742",
78
- "Windows Security 4768",
79
- "Windows Security 4769",
80
- "Windows Security 4771",
81
- "Windows Security 4776",
82
- "Windows Security 4781",
83
- "Windows Security 4798",
84
- "Windows Security 5136",
85
- "Windows Security 5145",
86
- "Windows System 7045"
87
- ]
52
+ #In the future, we should dynamically populate this from the DataSource Objects we have parsed from the data_sources directory
53
+ 'choices': sorted(DataSource._value2member_map_ )
54
+
88
55
  },
89
56
  {
90
57
  "type": "text",
@@ -116,7 +83,7 @@ class NewContentQuestions:
116
83
  return questions
117
84
 
118
85
  @classmethod
119
- def get_questions_story(self) -> list:
86
+ def get_questions_story(cls)-> list[dict[str,Any]]:
120
87
  questions = [
121
88
  {
122
89
  "type": "text",