contentctl 4.3.4__py3-none-any.whl → 4.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,13 @@
1
1
 
2
2
  from __future__ import annotations
3
- import csv
4
- import os
5
3
  import sys
6
4
  from attackcti import attack_client
7
5
  import logging
8
- from pydantic import BaseModel, Field
6
+ from pydantic import BaseModel
9
7
  from dataclasses import field
10
- from typing import Annotated,Any
11
- from contentctl.objects.mitre_attack_enrichment import MitreAttackEnrichment
8
+ from typing import Any
9
+ from pathlib import Path
10
+ from contentctl.objects.mitre_attack_enrichment import MitreAttackEnrichment, MitreTactics
12
11
  from contentctl.objects.config import validate
13
12
  from contentctl.objects.annotated_types import MITRE_ATTACK_ID_TYPE
14
13
  logging.getLogger('taxii2client').setLevel(logging.CRITICAL)
@@ -21,12 +20,12 @@ class AttackEnrichment(BaseModel):
21
20
  @staticmethod
22
21
  def getAttackEnrichment(config:validate)->AttackEnrichment:
23
22
  enrichment = AttackEnrichment(use_enrichment=config.enrichments)
24
- _ = enrichment.get_attack_lookup(str(config.path))
23
+ _ = enrichment.get_attack_lookup(config.mitre_cti_repo_path, config.enrichments)
25
24
  return enrichment
26
25
 
27
26
  def getEnrichmentByMitreID(self, mitre_id:MITRE_ATTACK_ID_TYPE)->MitreAttackEnrichment:
28
27
  if not self.use_enrichment:
29
- raise Exception(f"Error, trying to add Mitre Enrichment, but use_enrichment was set to False")
28
+ raise Exception("Error, trying to add Mitre Enrichment, but use_enrichment was set to False")
30
29
 
31
30
  enrichment = self.data.get(mitre_id, None)
32
31
  if enrichment is not None:
@@ -34,71 +33,69 @@ class AttackEnrichment(BaseModel):
34
33
  else:
35
34
  raise Exception(f"Error, Unable to find Mitre Enrichment for MitreID {mitre_id}")
36
35
 
37
- def addMitreIDViaGroupNames(self, technique:dict, tactics:list[str], groupNames:list[str])->None:
36
+ def addMitreIDViaGroupNames(self, technique:dict[str,Any], tactics:list[str], groupNames:list[str])->None:
38
37
  technique_id = technique['technique_id']
39
38
  technique_obj = technique['technique']
40
39
  tactics.sort()
41
40
 
42
41
  if technique_id in self.data:
43
42
  raise Exception(f"Error, trying to redefine MITRE ID '{technique_id}'")
44
- self.data[technique_id] = MitreAttackEnrichment(mitre_attack_id=technique_id,
45
- mitre_attack_technique=technique_obj,
46
- mitre_attack_tactics=tactics,
47
- mitre_attack_groups=groupNames,
48
- mitre_attack_group_objects=[])
43
+ self.data[technique_id] = MitreAttackEnrichment.model_validate({'mitre_attack_id':technique_id,
44
+ 'mitre_attack_technique':technique_obj,
45
+ 'mitre_attack_tactics':tactics,
46
+ 'mitre_attack_groups':groupNames,
47
+ 'mitre_attack_group_objects':[]})
49
48
 
50
- def addMitreIDViaGroupObjects(self, technique:dict, tactics:list[str], groupObjects:list[dict[str,Any]])->None:
49
+ def addMitreIDViaGroupObjects(self, technique:dict[str,Any], tactics:list[MitreTactics], groupDicts:list[dict[str,Any]])->None:
51
50
  technique_id = technique['technique_id']
52
51
  technique_obj = technique['technique']
53
52
  tactics.sort()
54
53
 
55
- groupNames:list[str] = sorted([group['group'] for group in groupObjects])
54
+ groupNames:list[str] = sorted([group['group'] for group in groupDicts])
56
55
 
57
56
  if technique_id in self.data:
58
57
  raise Exception(f"Error, trying to redefine MITRE ID '{technique_id}'")
59
- self.data[technique_id] = MitreAttackEnrichment(mitre_attack_id=technique_id,
60
- mitre_attack_technique=technique_obj,
61
- mitre_attack_tactics=tactics,
62
- mitre_attack_groups=groupNames,
63
- mitre_attack_group_objects=groupObjects)
58
+
59
+ self.data[technique_id] = MitreAttackEnrichment.model_validate({'mitre_attack_id': technique_id,
60
+ 'mitre_attack_technique': technique_obj,
61
+ 'mitre_attack_tactics': tactics,
62
+ 'mitre_attack_groups': groupNames,
63
+ 'mitre_attack_group_objects': groupDicts})
64
64
 
65
65
 
66
- def get_attack_lookup(self, input_path: str, store_csv: bool = False, force_cached_or_offline: bool = False, skip_enrichment:bool = False) -> dict:
67
- if not self.use_enrichment:
68
- return {}
69
- print("Getting MITRE Attack Enrichment Data. This may take some time...")
70
- attack_lookup = dict()
71
- file_path = os.path.join(input_path, "app_template", "lookups", "mitre_enrichment.csv")
72
-
73
- if skip_enrichment is True:
74
- print("Skipping enrichment")
66
+ def get_attack_lookup(self, input_path: Path, enrichments:bool = False) -> dict[str,MitreAttackEnrichment]:
67
+ attack_lookup:dict[str,MitreAttackEnrichment] = {}
68
+ if not enrichments:
75
69
  return attack_lookup
70
+
76
71
  try:
77
-
78
- if force_cached_or_offline is True:
79
- raise(Exception("WARNING - Using cached MITRE Attack Enrichment. Attack Enrichment may be out of date. Only use this setting for offline environments and development purposes."))
80
- print(f"\r{'Client'.rjust(23)}: [{0:3.0f}%]...", end="", flush=True)
81
- lift = attack_client()
82
- print(f"\r{'Client'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
72
+ print(f"Performing MITRE Enrichment using the repository at {input_path}...",end="", flush=True)
73
+ # The existence of the input_path is validated during cli argument validation, but it is
74
+ # possible that the repo is in the wrong format. If the following directories do not
75
+ # exist, then attack_client will fall back to resolving via REST API. We do not
76
+ # want this as it is slow and error prone, so we will force an exception to
77
+ # be generated.
78
+ enterprise_path = input_path/"enterprise-attack"
79
+ mobile_path = input_path/"ics-attack"
80
+ ics_path = input_path/"mobile-attack"
81
+ if not (enterprise_path.is_dir() and mobile_path.is_dir() and ics_path.is_dir()):
82
+ raise FileNotFoundError("One or more of the following paths does not exist: "
83
+ f"{[str(enterprise_path),str(mobile_path),str(ics_path)]}. "
84
+ f"Please ensure that the {input_path} directory "
85
+ "has been git cloned correctly.")
86
+ lift = attack_client(
87
+ local_paths= {
88
+ "enterprise":str(enterprise_path),
89
+ "mobile":str(mobile_path),
90
+ "ics":str(ics_path)
91
+ }
92
+ )
83
93
 
84
- print(f"\r{'Techniques'.rjust(23)}: [{0.0:3.0f}%]...", end="", flush=True)
85
94
  all_enterprise_techniques = lift.get_enterprise_techniques(stix_format=False)
86
-
87
- print(f"\r{'Techniques'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
88
-
89
- print(f"\r{'Relationships'.rjust(23)}: [{0.0:3.0f}%]...", end="", flush=True)
90
95
  enterprise_relationships = lift.get_enterprise_relationships(stix_format=False)
91
- print(f"\r{'Relationships'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
92
-
93
- print(f"\r{'Groups'.rjust(23)}: [{0:3.0f}%]...", end="", flush=True)
94
96
  enterprise_groups = lift.get_enterprise_groups(stix_format=False)
95
- print(f"\r{'Groups'.rjust(23)}: [{100:3.0f}%]...Done!", end="\n", flush=True)
96
-
97
97
 
98
- for index, technique in enumerate(all_enterprise_techniques):
99
- progress_percent = ((index+1)/len(all_enterprise_techniques)) * 100
100
- if (sys.stdout.isatty() and sys.stdin.isatty() and sys.stderr.isatty()):
101
- print(f"\r\t{'MITRE Technique Progress'.rjust(23)}: [{progress_percent:3.0f}%]...", end="", flush=True)
98
+ for technique in all_enterprise_techniques:
102
99
  apt_groups:list[dict[str,Any]] = []
103
100
  for relationship in enterprise_relationships:
104
101
  if (relationship['target_object'] == technique['id']) and relationship['source_object'].startswith('intrusion-set'):
@@ -115,39 +112,10 @@ class AttackEnrichment(BaseModel):
115
112
  self.addMitreIDViaGroupObjects(technique, tactics, apt_groups)
116
113
  attack_lookup[technique['technique_id']] = {'technique': technique['technique'], 'tactics': tactics, 'groups': apt_groups}
117
114
 
118
- if store_csv:
119
- f = open(file_path, 'w')
120
- writer = csv.writer(f)
121
- writer.writerow(['mitre_id', 'technique', 'tactics' ,'groups'])
122
- for key in attack_lookup.keys():
123
- if len(attack_lookup[key]['groups']) == 0:
124
- groups = 'no'
125
- else:
126
- groups = '|'.join(attack_lookup[key]['groups'])
127
-
128
- writer.writerow([
129
- key,
130
- attack_lookup[key]['technique'],
131
- '|'.join(attack_lookup[key]['tactics']),
132
- groups
133
- ])
134
-
135
- f.close()
136
115
 
116
+
137
117
  except Exception as err:
138
- print(f'\nError: {str(err)}')
139
- print('Use local copy app_template/lookups/mitre_enrichment.csv')
140
- with open(file_path, mode='r') as inp:
141
- reader = csv.reader(inp)
142
- attack_lookup = {rows[0]:{'technique': rows[1], 'tactics': rows[2].split('|'), 'groups': rows[3].split('|')} for rows in reader}
143
- attack_lookup.pop('mitre_id')
144
- for key in attack_lookup.keys():
145
- technique_input = {'technique_id': key , 'technique': attack_lookup[key]['technique'] }
146
- tactics_input = attack_lookup[key]['tactics']
147
- groups_input = attack_lookup[key]['groups']
148
- self.addMitreIDViaGroupNames(technique=technique_input, tactics=tactics_input, groups=groups_input)
149
-
150
-
151
-
118
+ raise Exception(f"Error getting MITRE Enrichment: {str(err)}")
119
+
152
120
  print("Done!")
153
121
  return attack_lookup
@@ -1,20 +1,20 @@
1
- import os
2
- import time
3
1
  import json
2
+ from typing import Optional, Collection
3
+ from pathlib import Path
4
4
  import xml.etree.ElementTree as ET
5
- from typing import List, Tuple, Optional
6
5
  from urllib.parse import urlencode
7
6
 
8
7
  import requests
9
8
  import urllib3
10
9
  import xmltodict
11
10
  from requests.adapters import HTTPAdapter
12
- from requests.packages.urllib3.util.retry import Retry
11
+ from urllib3.util.retry import Retry
13
12
 
14
13
  urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
15
14
 
16
15
  MAX_RETRY = 3
17
16
 
17
+
18
18
  class APIEndPoint:
19
19
  """
20
20
  Class which contains Static Endpoint
@@ -27,6 +27,7 @@ class APIEndPoint:
27
27
  SPLUNK_BASE_GET_UID_REDIRECT = "https://apps.splunk.com/apps/id/{app_name_id}"
28
28
  SPLUNK_BASE_APP_INFO = "https://splunkbase.splunk.com/api/v1/app/{app_uid}"
29
29
 
30
+
30
31
  class RetryConstant:
31
32
  """
32
33
  Class which contains Retry Constant
@@ -53,11 +54,11 @@ class SplunkApp:
53
54
 
54
55
  @staticmethod
55
56
  def requests_retry_session(
56
- retries=RetryConstant.RETRY_COUNT,
57
- backoff_factor=1,
58
- status_forcelist=(500, 502, 503, 504),
59
- session=None,
60
- ):
57
+ retries: int = RetryConstant.RETRY_COUNT,
58
+ backoff_factor: int = 1,
59
+ status_forcelist: Collection[int] = (500, 502, 503, 504),
60
+ session: requests.Session | None = None,
61
+ ) -> requests.Session:
61
62
  session = session or requests.Session()
62
63
  retry = Retry(
63
64
  total=retries,
@@ -260,4 +261,134 @@ class SplunkApp:
260
261
 
261
262
  # parse out the version number and fetch the download URL
262
263
  self.latest_version = info_url.split("/")[-1]
263
- self.latest_version_download_url = self.__fetch_url_latest_version_download(info_url)
264
+ self.latest_version_download_url = self.__fetch_url_latest_version_download(info_url)
265
+
266
+ def __get_splunk_base_session_token(self, username: str, password: str) -> str:
267
+ """
268
+ This method will generate Splunk base session token
269
+
270
+ :param username: Splunkbase username
271
+ :type username: str
272
+ :param password: Splunkbase password
273
+ :type password: str
274
+
275
+ :return: Splunk base session token
276
+ :rtype: str
277
+ """
278
+ # Data payload for fetch splunk base session token
279
+ payload = urlencode(
280
+ {
281
+ "username": username,
282
+ "password": password,
283
+ }
284
+ )
285
+
286
+ headers = {
287
+ "content-type": "application/x-www-form-urlencoded",
288
+ "cache-control": "no-cache",
289
+ }
290
+
291
+ response = requests.request(
292
+ "POST",
293
+ APIEndPoint.SPLUNK_BASE_AUTH_URL,
294
+ data=payload,
295
+ headers=headers,
296
+ )
297
+
298
+ token_value = ""
299
+
300
+ if response.status_code != 200:
301
+ msg = (
302
+ f"Error occurred while executing the rest call for splunk base authentication api,"
303
+ f"{response.content}"
304
+ )
305
+ raise Exception(msg)
306
+ else:
307
+ root = ET.fromstring(response.content)
308
+ token_value = root.find("{http://www.w3.org/2005/Atom}id").text.strip()
309
+ return token_value
310
+
311
+ def download(
312
+ self,
313
+ out: Path,
314
+ username: str,
315
+ password: str,
316
+ is_dir: bool = False,
317
+ overwrite: bool = False
318
+ ) -> Path:
319
+ """
320
+ Given an output path, download the app to the specified location
321
+
322
+ :param out: the Path to download the app to
323
+ :type out: :class:`pathlib.Path`
324
+ :param username: Splunkbase username
325
+ :type username: str
326
+ :param password: Splunkbase password
327
+ :type password: str
328
+ :param is_dir: a flag indicating whether out is directory, otherwise a file (default: False)
329
+ :type is_dir: bool
330
+ :param overwrite: a flag indicating whether we can overwrite the file at out or not
331
+ :type overwrite: bool
332
+
333
+ :returns path: the Path the download was written to (needed when is_dir is True)
334
+ :rtype: :class:`pathlib.Path`
335
+ """
336
+ # Get the Splunkbase session token
337
+ token = self.__get_splunk_base_session_token(username, password)
338
+ response = requests.request(
339
+ "GET",
340
+ self.latest_version_download_url,
341
+ cookies={
342
+ "sessionid": token
343
+ }
344
+ )
345
+
346
+ # If the provided output path was a directory we need to try and pull the filename from the
347
+ # response headers
348
+ if is_dir:
349
+ try:
350
+ # Pull 'Content-Disposition' from the headers
351
+ content_disposition: str = response.headers['Content-Disposition']
352
+
353
+ # Attempt to parse the filename as a KV
354
+ key, value = content_disposition.strip().split("=")
355
+ if key != "attachment;filename":
356
+ raise ValueError(f"Unexpected key in 'Content-Disposition' KV pair: {key}")
357
+
358
+ # Validate the filename is the expected .tgz file
359
+ filename = Path(value.strip().strip('"'))
360
+ if filename.suffixes != [".tgz"]:
361
+ raise ValueError(f"Filename has unexpected extension(s): {filename.suffixes}")
362
+ out = Path(out, filename)
363
+ except KeyError as e:
364
+ raise KeyError(
365
+ f"Unable to properly extract 'Content-Disposition' from response headers: {e}"
366
+ ) from e
367
+ except ValueError as e:
368
+ raise ValueError(
369
+ f"Unable to parse filename from 'Content-Disposition' header: {e}"
370
+ ) from e
371
+
372
+ # Ensure the output path is not already occupied
373
+ if out.exists() and not overwrite:
374
+ msg = (
375
+ f"File already exists at {out}, cannot download the app."
376
+ )
377
+ raise Exception(msg)
378
+
379
+ # Make any parent directories as needed
380
+ out.parent.mkdir(parents=True, exist_ok=True)
381
+
382
+ # Check for HTTP errors
383
+ if response.status_code != 200:
384
+ msg = (
385
+ f"Error occurred while executing the rest call for splunk base authentication api,"
386
+ f"{response.content}"
387
+ )
388
+ raise Exception(msg)
389
+
390
+ # Write the app to disk
391
+ with open(out, "wb") as file:
392
+ file.write(response.content)
393
+
394
+ return out
@@ -18,8 +18,7 @@ from contentctl.objects.playbook import Playbook
18
18
  from contentctl.objects.deployment import Deployment
19
19
  from contentctl.objects.macro import Macro
20
20
  from contentctl.objects.lookup import Lookup
21
- from contentctl.objects.ssa_detection import SSADetection
22
- from contentctl.objects.atomic import AtomicTest
21
+ from contentctl.objects.atomic import AtomicEnrichment
23
22
  from contentctl.objects.security_content_object import SecurityContentObject
24
23
  from contentctl.objects.data_source import DataSource
25
24
  from contentctl.objects.event_source import EventSource
@@ -33,17 +32,14 @@ from contentctl.objects.enums import SecurityContentType
33
32
  from contentctl.objects.enums import DetectionStatus
34
33
  from contentctl.helper.utils import Utils
35
34
 
36
- from contentctl.objects.enums import SecurityContentType
37
35
 
38
- from contentctl.objects.enums import DetectionStatus
39
- from contentctl.helper.utils import Utils
40
36
 
41
37
 
42
38
  @dataclass
43
39
  class DirectorOutputDto:
44
40
  # Atomic Tests are first because parsing them
45
41
  # is far quicker than attack_enrichment
46
- atomic_tests: None | list[AtomicTest]
42
+ atomic_enrichment: AtomicEnrichment
47
43
  attack_enrichment: AttackEnrichment
48
44
  cve_enrichment: CveEnrichment
49
45
  detections: list[Detection]
@@ -60,10 +56,7 @@ class DirectorOutputDto:
60
56
 
61
57
  def addContentToDictMappings(self, content: SecurityContentObject):
62
58
  content_name = content.name
63
- if isinstance(content, SSADetection):
64
- # Since SSA detections may have the same name as ESCU detection,
65
- # for this function we prepend 'SSA ' to the name.
66
- content_name = f"SSA {content_name}"
59
+
67
60
 
68
61
  if content_name in self.name_to_content_map:
69
62
  raise ValueError(
@@ -149,10 +142,10 @@ class Director():
149
142
  os.path.join(self.input_dto.path, str(contentType.name))
150
143
  )
151
144
  security_content_files = [
152
- f for f in files if not f.name.startswith("ssa___")
145
+ f for f in files
153
146
  ]
154
147
  else:
155
- raise (Exception(f"Cannot createSecurityContent for unknown product."))
148
+ raise (Exception(f"Cannot createSecurityContent for unknown product {contentType}."))
156
149
 
157
150
  validation_errors = []
158
151
 
@@ -83,15 +83,13 @@ class Detection_Abstract(SecurityContentObject):
83
83
 
84
84
 
85
85
  Args:
86
- value (Union[str, dict[str,Any]]): The search. It can either be a string (and should be
87
- SPL or a dict, in which case it is Sigma-formatted.
86
+ value (str): The SPL search. It must be an SPL-formatted string.
88
87
  info (ValidationInfo): The validation info can contain a number of different objects.
89
88
  Today it only contains the director.
90
89
 
91
90
  Returns:
92
- Union[str, dict[str,Any]]: The search, either in sigma or SPL format.
93
- """
94
-
91
+ str: The search, as an SPL formatted string.
92
+ """
95
93
 
96
94
  # Otherwise, the search is SPL.
97
95
 
@@ -390,7 +388,11 @@ class Detection_Abstract(SecurityContentObject):
390
388
  # NOTE: we ignore the type error around self.status because we are using Pydantic's
391
389
  # use_enum_values configuration
392
390
  # https://docs.pydantic.dev/latest/api/config/#pydantic.config.ConfigDict.populate_by_name
393
-
391
+
392
+ # NOTE: The `inspect` action is HIGHLY sensitive to the structure of the metadata line in
393
+ # the detection stanza in savedsearches.conf. Additive operations (e.g. a new field in the
394
+ # dict below) should not have any impact, but renaming or removing any of these fields will
395
+ # break the `inspect` action.
394
396
  return {
395
397
  'detection_id': str(self.id),
396
398
  'deprecated': '1' if self.status == DetectionStatus.deprecated.value else '0', # type: ignore
@@ -1,12 +1,15 @@
1
1
  from __future__ import annotations
2
+ from typing import TYPE_CHECKING
3
+ if TYPE_CHECKING:
4
+ from contentctl.objects.config import validate
5
+
2
6
  from contentctl.input.yml_reader import YmlReader
3
7
  from pydantic import BaseModel, model_validator, ConfigDict, FilePath, UUID4
8
+ import dataclasses
4
9
  from typing import List, Optional, Dict, Union, Self
5
10
  import pathlib
6
-
7
-
8
11
  from enum import StrEnum, auto
9
-
12
+ import uuid
10
13
 
11
14
  class SupportedPlatform(StrEnum):
12
15
  windows = auto()
@@ -84,15 +87,6 @@ class AtomicTest(BaseModel):
84
87
  dependencies: Optional[List[AtomicDependency]] = None
85
88
  dependency_executor_name: Optional[DependencyExecutorType] = None
86
89
 
87
- @staticmethod
88
- def AtomicTestWhenEnrichmentIsDisabled(auto_generated_guid: UUID4) -> AtomicTest:
89
- return AtomicTest(name="Placeholder Atomic Test (enrichment disabled)",
90
- auto_generated_guid=auto_generated_guid,
91
- description="This is a placeholder AtomicTest. Because enrichments were not enabled, it has not been validated against the real Atomic Red Team Repo.",
92
- supported_platforms=[],
93
- executor=AtomicExecutor(name="Placeholder Executor (enrichment disabled)",
94
- command="Placeholder command (enrichment disabled)"))
95
-
96
90
  @staticmethod
97
91
  def AtomicTestWhenTestIsMissing(auto_generated_guid: UUID4) -> AtomicTest:
98
92
  return AtomicTest(name="Missing Atomic",
@@ -100,31 +94,16 @@ class AtomicTest(BaseModel):
100
94
  description="This is a placeholder AtomicTest. Either the auto_generated_guid is incorrect or it there was an exception while parsing its AtomicFile.",
101
95
  supported_platforms=[],
102
96
  executor=AtomicExecutor(name="Placeholder Executor (failed to find auto_generated_guid)",
103
- command="Placeholder command (failed to find auto_generated_guid)"))
104
-
105
-
106
- @classmethod
107
- def getAtomicByAtomicGuid(cls, guid: UUID4, all_atomics:list[AtomicTest] | None)->AtomicTest:
108
- if all_atomics is None:
109
- return AtomicTest.AtomicTestWhenEnrichmentIsDisabled(guid)
110
- matching_atomics = [atomic for atomic in all_atomics if atomic.auto_generated_guid == guid]
111
- if len(matching_atomics) == 0:
112
- raise ValueError(f"Unable to find atomic_guid {guid} in {len(all_atomics)} atomic_tests from ART Repo")
113
- elif len(matching_atomics) > 1:
114
- raise ValueError(f"Found {len(matching_atomics)} matching tests for atomic_guid {guid} in {len(all_atomics)} atomic_tests from ART Repo")
115
-
116
- return matching_atomics[0]
97
+ command="Placeholder command (failed to find auto_generated_guid)"))
117
98
 
118
99
  @classmethod
119
- def parseArtRepo(cls, repo_path:pathlib.Path)->List[AtomicFile]:
120
- if not repo_path.is_dir():
121
- print(f"WARNING: Atomic Red Team repo does NOT exist at {repo_path.absolute()}. You can check it out with:\n * git clone --single-branch https://github.com/redcanaryco/atomic-red-team. This will ONLY throw a validation error if you reference atomid_guids in your detection(s).")
122
- return []
100
+ def parseArtRepo(cls, repo_path:pathlib.Path)->dict[uuid.UUID, AtomicTest]:
101
+ test_mapping: dict[uuid.UUID, AtomicTest] = {}
123
102
  atomics_path = repo_path/"atomics"
124
103
  if not atomics_path.is_dir():
125
- print(f"WARNING: Atomic Red Team repo exists at {repo_path.absolute}, but atomics directory does NOT exist at {atomics_path.absolute()}. Was it deleted or renamed? This will ONLY throw a validation error if you reference atomid_guids in your detection(s).")
126
- return []
127
-
104
+ raise FileNotFoundError(f"WARNING: Atomic Red Team repo exists at {repo_path}, "
105
+ f"but atomics directory does NOT exist at {atomics_path}. "
106
+ "Was it deleted or renamed?")
128
107
 
129
108
  atomic_files:List[AtomicFile] = []
130
109
  error_messages:List[str] = []
@@ -133,6 +112,7 @@ class AtomicTest(BaseModel):
133
112
  atomic_files.append(cls.constructAtomicFile(obj_path))
134
113
  except Exception as e:
135
114
  error_messages.append(f"File [{obj_path}]\n{str(e)}")
115
+
136
116
  if len(error_messages) > 0:
137
117
  exceptions_string = '\n\n'.join(error_messages)
138
118
  print(f"WARNING: The following [{len(error_messages)}] ERRORS were generated when parsing the Atomic Red Team Repo.\n"
@@ -140,38 +120,28 @@ class AtomicTest(BaseModel):
140
120
  "Note that this is only a warning and contentctl will ignore Atomics contained in these files.\n"
141
121
  f"However, if you have written a detection that references them, 'contentctl build --enrichments' will fail:\n\n{exceptions_string}")
142
122
 
143
- return atomic_files
123
+ # Now iterate over all the files, collect all the tests, and return the dict mapping
124
+ redefined_guids:set[uuid.UUID] = set()
125
+ for atomic_file in atomic_files:
126
+ for atomic_test in atomic_file.atomic_tests:
127
+ if atomic_test.auto_generated_guid in test_mapping:
128
+ redefined_guids.add(atomic_test.auto_generated_guid)
129
+ else:
130
+ test_mapping[atomic_test.auto_generated_guid] = atomic_test
131
+ if len(redefined_guids) > 0:
132
+ guids_string = '\n\t'.join([str(guid) for guid in redefined_guids])
133
+ raise Exception(f"The following [{len(redefined_guids)}] Atomic Test"
134
+ " auto_generated_guid(s) were defined more than once. "
135
+ f"auto_generated_guids MUST be unique:\n\t{guids_string}")
136
+
137
+ print(f"Successfully parsed [{len(test_mapping)}] Atomic Red Team Tests!")
138
+ return test_mapping
144
139
 
145
140
  @classmethod
146
141
  def constructAtomicFile(cls, file_path:pathlib.Path)->AtomicFile:
147
142
  yml_dict = YmlReader.load_file(file_path)
148
143
  atomic_file = AtomicFile.model_validate(yml_dict)
149
144
  return atomic_file
150
-
151
- @classmethod
152
- def getAtomicTestsFromArtRepo(cls, repo_path:pathlib.Path, enabled:bool=True)->list[AtomicTest] | None:
153
- # Get all the atomic files. Note that if the ART repo is not found, we will not throw an error,
154
- # but will not have any atomics. This means that if atomic_guids are referenced during validation,
155
- # validation for those detections will fail
156
- if not enabled:
157
- return None
158
-
159
- atomic_files = cls.getAtomicFilesFromArtRepo(repo_path)
160
-
161
- atomic_tests:List[AtomicTest] = []
162
- for atomic_file in atomic_files:
163
- atomic_tests.extend(atomic_file.atomic_tests)
164
- print(f"Found [{len(atomic_tests)}] Atomic Simulations in the Atomic Red Team Repo!")
165
- return atomic_tests
166
-
167
-
168
- @classmethod
169
- def getAtomicFilesFromArtRepo(cls, repo_path:pathlib.Path)->List[AtomicFile]:
170
- return cls.parseArtRepo(repo_path)
171
-
172
-
173
-
174
-
175
145
 
176
146
 
177
147
  class AtomicFile(BaseModel):
@@ -182,27 +152,31 @@ class AtomicFile(BaseModel):
182
152
  atomic_tests: List[AtomicTest]
183
153
 
184
154
 
155
+ class AtomicEnrichment(BaseModel):
156
+ data: dict[uuid.UUID,AtomicTest] = dataclasses.field(default_factory = dict)
157
+ use_enrichment: bool = False
185
158
 
159
+ @classmethod
160
+ def getAtomicEnrichment(cls, config:validate)->AtomicEnrichment:
161
+ enrichment = AtomicEnrichment(use_enrichment=config.enrichments)
162
+ if config.enrichments:
163
+ enrichment.data = AtomicTest.parseArtRepo(config.atomic_red_team_repo_path)
164
+
165
+ return enrichment
166
+
167
+ def getAtomic(self, atomic_guid: uuid.UUID)->AtomicTest:
168
+ if self.use_enrichment:
169
+ if atomic_guid in self.data:
170
+ return self.data[atomic_guid]
171
+ else:
172
+ raise Exception(f"Atomic with GUID {atomic_guid} not found.")
173
+ else:
174
+ # If enrichment is not enabled, for the sake of compatability
175
+ # return a stub test with no useful or meaningful information.
176
+ return AtomicTest.AtomicTestWhenTestIsMissing(atomic_guid)
186
177
 
187
- # ATOMICS_PATH = pathlib.Path("./atomics")
188
- # atomic_objects = []
189
- # atomic_simulations = []
190
- # for obj_path in ATOMICS_PATH.glob("**/T*.yaml"):
191
- # try:
192
- # with open(obj_path, 'r', encoding="utf-8") as obj_handle:
193
- # obj_data = yaml.load(obj_handle, Loader=yaml.CSafeLoader)
194
- # atomic_obj = AtomicFile.model_validate(obj_data)
195
- # except Exception as e:
196
- # print(f"Error parsing object at path {obj_path}: {str(e)}")
197
- # print(f"We have successfully parsed {len(atomic_objects)}, however!")
198
- # sys.exit(1)
199
-
200
- # print(f"Successfully parsed {obj_path}!")
201
- # atomic_objects.append(atomic_obj)
202
- # atomic_simulations += atomic_obj.atomic_tests
178
+
203
179
 
204
- # print(f"Successfully parsed all {len(atomic_objects)} files!")
205
- # print(f"Successfully parsed all {len(atomic_simulations)} simulations!")
206
180
 
207
181
 
208
182