contentctl 4.4.7__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. contentctl/__init__.py +1 -1
  2. contentctl/actions/build.py +102 -57
  3. contentctl/actions/deploy_acs.py +29 -24
  4. contentctl/actions/detection_testing/DetectionTestingManager.py +66 -42
  5. contentctl/actions/detection_testing/GitService.py +134 -76
  6. contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
  7. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +192 -147
  8. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
  9. contentctl/actions/detection_testing/progress_bar.py +9 -6
  10. contentctl/actions/detection_testing/views/DetectionTestingView.py +16 -19
  11. contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
  12. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
  13. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
  14. contentctl/actions/doc_gen.py +9 -5
  15. contentctl/actions/initialize.py +45 -33
  16. contentctl/actions/inspect.py +118 -61
  17. contentctl/actions/new_content.py +155 -108
  18. contentctl/actions/release_notes.py +276 -146
  19. contentctl/actions/reporting.py +23 -19
  20. contentctl/actions/test.py +33 -28
  21. contentctl/actions/validate.py +55 -34
  22. contentctl/api.py +54 -45
  23. contentctl/contentctl.py +124 -90
  24. contentctl/enrichments/attack_enrichment.py +112 -72
  25. contentctl/enrichments/cve_enrichment.py +34 -28
  26. contentctl/enrichments/splunk_app_enrichment.py +38 -36
  27. contentctl/helper/link_validator.py +101 -78
  28. contentctl/helper/splunk_app.py +69 -41
  29. contentctl/helper/utils.py +58 -53
  30. contentctl/input/director.py +68 -36
  31. contentctl/input/new_content_questions.py +27 -35
  32. contentctl/input/yml_reader.py +28 -18
  33. contentctl/objects/abstract_security_content_objects/detection_abstract.py +303 -259
  34. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +115 -52
  35. contentctl/objects/alert_action.py +10 -9
  36. contentctl/objects/annotated_types.py +1 -1
  37. contentctl/objects/atomic.py +65 -54
  38. contentctl/objects/base_test.py +5 -3
  39. contentctl/objects/base_test_result.py +19 -11
  40. contentctl/objects/baseline.py +62 -30
  41. contentctl/objects/baseline_tags.py +30 -24
  42. contentctl/objects/config.py +790 -597
  43. contentctl/objects/constants.py +33 -56
  44. contentctl/objects/correlation_search.py +150 -136
  45. contentctl/objects/dashboard.py +55 -41
  46. contentctl/objects/data_source.py +16 -17
  47. contentctl/objects/deployment.py +43 -44
  48. contentctl/objects/deployment_email.py +3 -2
  49. contentctl/objects/deployment_notable.py +4 -2
  50. contentctl/objects/deployment_phantom.py +7 -6
  51. contentctl/objects/deployment_rba.py +3 -2
  52. contentctl/objects/deployment_scheduling.py +3 -2
  53. contentctl/objects/deployment_slack.py +3 -2
  54. contentctl/objects/detection.py +5 -2
  55. contentctl/objects/detection_metadata.py +1 -0
  56. contentctl/objects/detection_stanza.py +7 -2
  57. contentctl/objects/detection_tags.py +58 -103
  58. contentctl/objects/drilldown.py +66 -34
  59. contentctl/objects/enums.py +81 -100
  60. contentctl/objects/errors.py +16 -24
  61. contentctl/objects/integration_test.py +3 -3
  62. contentctl/objects/integration_test_result.py +1 -0
  63. contentctl/objects/investigation.py +59 -36
  64. contentctl/objects/investigation_tags.py +30 -19
  65. contentctl/objects/lookup.py +304 -101
  66. contentctl/objects/macro.py +55 -39
  67. contentctl/objects/manual_test.py +3 -3
  68. contentctl/objects/manual_test_result.py +1 -0
  69. contentctl/objects/mitre_attack_enrichment.py +17 -16
  70. contentctl/objects/notable_action.py +2 -1
  71. contentctl/objects/notable_event.py +1 -3
  72. contentctl/objects/playbook.py +37 -35
  73. contentctl/objects/playbook_tags.py +23 -13
  74. contentctl/objects/rba.py +96 -0
  75. contentctl/objects/risk_analysis_action.py +15 -11
  76. contentctl/objects/risk_event.py +110 -160
  77. contentctl/objects/risk_object.py +1 -0
  78. contentctl/objects/savedsearches_conf.py +9 -7
  79. contentctl/objects/security_content_object.py +5 -2
  80. contentctl/objects/story.py +54 -49
  81. contentctl/objects/story_tags.py +56 -45
  82. contentctl/objects/test_attack_data.py +2 -1
  83. contentctl/objects/test_group.py +5 -2
  84. contentctl/objects/threat_object.py +1 -0
  85. contentctl/objects/throttling.py +27 -18
  86. contentctl/objects/unit_test.py +3 -4
  87. contentctl/objects/unit_test_baseline.py +5 -5
  88. contentctl/objects/unit_test_result.py +6 -6
  89. contentctl/output/api_json_output.py +233 -220
  90. contentctl/output/attack_nav_output.py +21 -21
  91. contentctl/output/attack_nav_writer.py +29 -37
  92. contentctl/output/conf_output.py +235 -172
  93. contentctl/output/conf_writer.py +201 -125
  94. contentctl/output/data_source_writer.py +38 -26
  95. contentctl/output/doc_md_output.py +53 -27
  96. contentctl/output/jinja_writer.py +19 -15
  97. contentctl/output/json_writer.py +21 -11
  98. contentctl/output/svg_output.py +56 -38
  99. contentctl/output/templates/analyticstories_detections.j2 +2 -2
  100. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  101. contentctl/output/templates/collections.j2 +1 -1
  102. contentctl/output/templates/doc_detections.j2 +0 -5
  103. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  104. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  105. contentctl/output/templates/savedsearches_baselines.j2 +2 -2
  106. contentctl/output/templates/savedsearches_detections.j2 +10 -11
  107. contentctl/output/templates/savedsearches_investigations.j2 +2 -2
  108. contentctl/output/templates/transforms.j2 +6 -8
  109. contentctl/output/yml_writer.py +29 -20
  110. contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +16 -34
  111. contentctl/templates/stories/cobalt_strike.yml +1 -0
  112. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/METADATA +5 -4
  113. contentctl-5.0.0.dist-info/RECORD +168 -0
  114. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/WHEEL +1 -1
  115. contentctl/actions/initialize_old.py +0 -245
  116. contentctl/objects/event_source.py +0 -11
  117. contentctl/objects/observable.py +0 -37
  118. contentctl/output/detection_writer.py +0 -28
  119. contentctl/output/new_content_yml_output.py +0 -56
  120. contentctl/output/yml_output.py +0 -66
  121. contentctl-4.4.7.dist-info/RECORD +0 -173
  122. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/LICENSE.md +0 -0
  123. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,64 +1,70 @@
1
1
  from __future__ import annotations
2
2
  from pycvesearch import CVESearch
3
- import functools
4
- import os
5
- import shelve
6
- import time
7
- from typing import Annotated, Any, Union, TYPE_CHECKING
8
- from pydantic import ConfigDict, BaseModel,Field, computed_field
3
+ from typing import Annotated, Union, TYPE_CHECKING
4
+ from pydantic import ConfigDict, BaseModel, Field, computed_field
9
5
  from decimal import Decimal
10
- from requests.exceptions import ReadTimeout
11
6
  from contentctl.objects.annotated_types import CVE_TYPE
7
+
12
8
  if TYPE_CHECKING:
13
9
  from contentctl.objects.config import validate
14
10
 
15
11
 
16
-
17
- CVESSEARCH_API_URL = 'https://cve.circl.lu'
12
+ CVESSEARCH_API_URL = "https://cve.circl.lu"
18
13
 
19
14
 
20
15
  class CveEnrichmentObj(BaseModel):
21
16
  id: CVE_TYPE
22
- cvss: Annotated[Decimal, Field(ge=.1, le=10, decimal_places=1)]
17
+ cvss: Annotated[Decimal, Field(ge=0.1, le=10, decimal_places=1)]
23
18
  summary: str
24
-
19
+
25
20
  @computed_field
26
21
  @property
27
- def url(self)->str:
22
+ def url(self) -> str:
28
23
  BASE_NVD_URL = "https://nvd.nist.gov/vuln/detail/"
29
24
  return f"{BASE_NVD_URL}{self.id}"
30
25
 
31
26
 
32
27
  class CveEnrichment(BaseModel):
33
28
  use_enrichment: bool = True
34
- cve_api_obj: Union[CVESearch,None] = None
29
+ cve_api_obj: Union[CVESearch, None] = None
35
30
 
36
31
  # Arbitrary_types are allowed to let us use the CVESearch Object
37
- model_config = ConfigDict(
38
- arbitrary_types_allowed=True,
39
- frozen=True
40
- )
32
+ model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
41
33
 
42
34
  @staticmethod
43
- def getCveEnrichment(config:validate, timeout_seconds:int=10, force_disable_enrichment:bool=True)->CveEnrichment:
35
+ def getCveEnrichment(
36
+ config: validate,
37
+ timeout_seconds: int = 10,
38
+ force_disable_enrichment: bool = True,
39
+ ) -> CveEnrichment:
44
40
  if force_disable_enrichment:
45
- return CveEnrichment(use_enrichment=False, cve_api_obj=None)
46
-
41
+ return CveEnrichment(use_enrichment=False, cve_api_obj=None)
42
+
47
43
  if config.enrichments:
48
44
  try:
49
45
  cve_api_obj = CVESearch(CVESSEARCH_API_URL, timeout=timeout_seconds)
50
46
  return CveEnrichment(use_enrichment=True, cve_api_obj=cve_api_obj)
51
47
  except Exception as e:
52
- raise Exception(f"Error setting CVE_SEARCH API to: {CVESSEARCH_API_URL}: {str(e)}")
53
-
54
- return CveEnrichment(use_enrichment=False, cve_api_obj=None)
55
-
48
+ raise Exception(
49
+ f"Error setting CVE_SEARCH API to: {CVESSEARCH_API_URL}: {str(e)}"
50
+ )
56
51
 
57
- def enrich_cve(self, cve_id:str, raise_exception_on_failure:bool=True)->CveEnrichmentObj:
52
+ return CveEnrichment(use_enrichment=False, cve_api_obj=None)
58
53
 
54
+ def enrich_cve(
55
+ self, cve_id: str, raise_exception_on_failure: bool = True
56
+ ) -> CveEnrichmentObj:
59
57
  if not self.use_enrichment:
60
- return CveEnrichmentObj(id=cve_id,cvss=Decimal(5.0),summary="SUMMARY NOT AVAILABLE! ONLY THE LINK WILL BE USED AT THIS TIME")
58
+ return CveEnrichmentObj(
59
+ id=cve_id,
60
+ cvss=Decimal(5.0),
61
+ summary="SUMMARY NOT AVAILABLE! ONLY THE LINK WILL BE USED AT THIS TIME",
62
+ )
61
63
  else:
62
64
  print("WARNING - Dynamic enrichment not supported at this time.")
63
- return CveEnrichmentObj(id=cve_id,cvss=Decimal(5.0),summary="SUMMARY NOT AVAILABLE! ONLY THE LINK WILL BE USED AT THIS TIME")
64
- # Depending on needs, we may add dynamic enrichment functionality back to the tool
65
+ return CveEnrichmentObj(
66
+ id=cve_id,
67
+ cvss=Decimal(5.0),
68
+ summary="SUMMARY NOT AVAILABLE! ONLY THE LINK WILL BE USED AT THIS TIME",
69
+ )
70
+ # Depending on needs, we may add dynamic enrichment functionality back to the tool
@@ -1,11 +1,8 @@
1
1
  import requests
2
2
  import xmltodict
3
- import json
4
3
  import functools
5
- import pickle
6
4
  import shelve
7
5
  import os
8
- import time
9
6
 
10
7
  SPLUNKBASE_API_URL = "https://apps.splunk.com/api/apps/entriesbyid/"
11
8
 
@@ -13,15 +10,16 @@ APP_ENRICHMENT_CACHE_FILENAME = "lookups/APP_ENRICHMENT_CACHE.db"
13
10
 
14
11
  NON_PERSISTENT_CACHE = {}
15
12
 
13
+
16
14
  @functools.cache
17
- def requests_get_helper(url:str, force_cached_or_offline:bool = False)->bytes:
15
+ def requests_get_helper(url: str, force_cached_or_offline: bool = False) -> bytes:
18
16
  if force_cached_or_offline:
19
17
  if not os.path.exists(APP_ENRICHMENT_CACHE_FILENAME):
20
18
  print(f"Cache at {APP_ENRICHMENT_CACHE_FILENAME} not found - Creating it.")
21
- cache = shelve.open(APP_ENRICHMENT_CACHE_FILENAME, flag='c', writeback=True)
19
+ cache = shelve.open(APP_ENRICHMENT_CACHE_FILENAME, flag="c", writeback=True)
22
20
  else:
23
21
  cache = NON_PERSISTENT_CACHE
24
-
22
+
25
23
  if url in cache:
26
24
  req_content = cache[url]
27
25
  else:
@@ -29,62 +27,66 @@ def requests_get_helper(url:str, force_cached_or_offline:bool = False)->bytes:
29
27
  req = requests.get(url)
30
28
  req_content = req.content
31
29
  cache[url] = req_content
32
- except Exception as e:
33
- raise(Exception(f"ERROR - Failed to get Splunk App Enrichment at {SPLUNKBASE_API_URL}"))
34
-
30
+ except Exception:
31
+ raise (
32
+ Exception(
33
+ f"ERROR - Failed to get Splunk App Enrichment at {SPLUNKBASE_API_URL}"
34
+ )
35
+ )
36
+
35
37
  if isinstance(cache, shelve.Shelf):
36
- #close the cache if it is a shelf
38
+ # close the cache if it is a shelf
37
39
  cache.close()
38
-
40
+
39
41
  return req_content
40
42
 
41
- class SplunkAppEnrichment():
42
43
 
44
+ class SplunkAppEnrichment:
43
45
  @classmethod
44
- def enrich_splunk_app(self, splunk_ta: str, force_cached_or_offline: bool = False) -> dict:
45
-
46
+ def enrich_splunk_app(
47
+ self, splunk_ta: str, force_cached_or_offline: bool = False
48
+ ) -> dict:
46
49
  appurl = SPLUNKBASE_API_URL + splunk_ta
47
50
  splunk_app_enriched = dict()
48
-
51
+
49
52
  try:
50
-
51
53
  content = requests_get_helper(appurl, force_cached_or_offline)
52
54
  response_dict = xmltodict.parse(content)
53
-
55
+
54
56
  # check if list since data changes depending on answer
55
57
  url, results = self._parse_splunkbase_response(response_dict)
56
58
  # grab the app name
57
59
  for i in results:
58
- if i['@name'] == 'appName':
59
- splunk_app_enriched['name'] = i['#text']
60
- # grab out the splunkbase url
61
- if 'entriesbyid' in url:
60
+ if i["@name"] == "appName":
61
+ splunk_app_enriched["name"] = i["#text"]
62
+ # grab out the splunkbase url
63
+ if "entriesbyid" in url:
62
64
  content = requests_get_helper(url, force_cached_or_offline)
63
65
  response_dict = xmltodict.parse(content)
64
-
65
- #print(json.dumps(response_dict, indent=2))
66
+
67
+ # print(json.dumps(response_dict, indent=2))
66
68
  url, results = self._parse_splunkbase_response(response_dict)
67
69
  # chop the url so we grab the splunkbase portion but not direct download
68
- splunk_app_enriched['url'] = url.rsplit('/', 4)[0]
70
+ splunk_app_enriched["url"] = url.rsplit("/", 4)[0]
69
71
  except requests.exceptions.ConnectionError as connErr:
70
72
  print(f"There was a connErr for ta {splunk_ta}: {connErr}")
71
73
  # there was a connection error lets just capture the name
72
- splunk_app_enriched['name'] = splunk_ta
73
- splunk_app_enriched['url'] = ''
74
+ splunk_app_enriched["name"] = splunk_ta
75
+ splunk_app_enriched["url"] = ""
74
76
  except Exception as e:
75
- print(f"There was an unknown error enriching the Splunk TA [{splunk_ta}]: {str(e)}")
76
- splunk_app_enriched['name'] = splunk_ta
77
- splunk_app_enriched['url'] = ''
78
-
77
+ print(
78
+ f"There was an unknown error enriching the Splunk TA [{splunk_ta}]: {str(e)}"
79
+ )
80
+ splunk_app_enriched["name"] = splunk_ta
81
+ splunk_app_enriched["url"] = ""
79
82
 
80
83
  return splunk_app_enriched
81
84
 
82
85
  def _parse_splunkbase_response(response_dict):
83
- if isinstance(response_dict['feed']['entry'], list):
84
- url = response_dict['feed']['entry'][0]['link']['@href']
85
- results = response_dict['feed']['entry'][0]['content']['s:dict']['s:key']
86
+ if isinstance(response_dict["feed"]["entry"], list):
87
+ url = response_dict["feed"]["entry"][0]["link"]["@href"]
88
+ results = response_dict["feed"]["entry"][0]["content"]["s:dict"]["s:key"]
86
89
  else:
87
- url = response_dict['feed']['entry']['link']['@href']
88
- results = response_dict['feed']['entry']['content']['s:dict']['s:key']
90
+ url = response_dict["feed"]["entry"]["link"]["@href"]
91
+ results = response_dict["feed"]["entry"]["content"]["s:dict"]["s:key"]
89
92
  return url, results
90
-
@@ -1,7 +1,8 @@
1
1
  from pydantic import BaseModel, model_validator
2
2
  from typing import Union, Callable, Any
3
3
  import requests
4
- import urllib3, urllib3.exceptions
4
+ import urllib3
5
+ import urllib3.exceptions
5
6
  import time
6
7
  import abc
7
8
 
@@ -10,88 +11,96 @@ import shelve
10
11
 
11
12
  DEFAULT_USER_AGENT_STRING = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.41 Safari/537.36"
12
13
  ALLOWED_HTTP_CODES = [200]
13
- class LinkStats(BaseModel):
14
14
 
15
- #Static Values
15
+
16
+ class LinkStats(BaseModel):
17
+ # Static Values
16
18
  method: Callable = requests.get
17
- allowed_http_codes: list[int] = ALLOWED_HTTP_CODES
18
- access_count: int = 1 #when constructor is called, it has been accessed once!
19
+ allowed_http_codes: list[int] = ALLOWED_HTTP_CODES
20
+ access_count: int = 1 # when constructor is called, it has been accessed once!
19
21
  timeout_seconds: int = 15
20
22
  allow_redirects: bool = True
21
23
  headers: dict = {"User-Agent": DEFAULT_USER_AGENT_STRING}
22
24
  verify_ssl: bool = False
23
25
  if verify_ssl is False:
24
26
  urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
25
-
26
- #Values generated at runtime.
27
- #We need to assign these some default values to get the
28
- #validation working since ComputedField has not yet been
29
- #introduced to Pydantic
27
+
28
+ # Values generated at runtime.
29
+ # We need to assign these some default values to get the
30
+ # validation working since ComputedField has not yet been
31
+ # introduced to Pydantic
30
32
  reference: str
31
33
  referencing_files: set[str]
32
- redirect: Union[str,None] = None
34
+ redirect: Union[str, None] = None
33
35
  status_code: int = 0
34
36
  valid: bool = False
35
- resolution_time: float = 0
36
-
37
-
38
- def is_link_valid(self, referencing_file:str)->bool:
37
+ resolution_time: float = 0
38
+
39
+ def is_link_valid(self, referencing_file: str) -> bool:
39
40
  self.access_count += 1
40
41
  self.referencing_files.add(referencing_file)
41
42
  return self.valid
42
-
43
+
43
44
  @model_validator(mode="before")
44
- def check_reference(cls, data:Any)->Any:
45
+ def check_reference(cls, data: Any) -> Any:
45
46
  start_time = time.time()
46
- #Get out all the fields names to make them easier to reference
47
- method = data['method']
48
- reference = data['reference']
49
- timeout_seconds = data['timeout_seconds']
50
- headers = data['headers']
51
- allow_redirects = data['allow_redirects']
52
- verify_ssl = data['verify_ssl']
53
- allowed_http_codes = data['allowed_http_codes']
47
+ # Get out all the fields names to make them easier to reference
48
+ method = data["method"]
49
+ reference = data["reference"]
50
+ timeout_seconds = data["timeout_seconds"]
51
+ headers = data["headers"]
52
+ allow_redirects = data["allow_redirects"]
53
+ verify_ssl = data["verify_ssl"]
54
+ allowed_http_codes = data["allowed_http_codes"]
54
55
  if not (reference.startswith("http://") or reference.startswith("https://")):
55
- raise(ValueError(f"Reference {reference} does not begin with http(s). Only http(s) references are supported"))
56
-
56
+ raise (
57
+ ValueError(
58
+ f"Reference {reference} does not begin with http(s). Only http(s) references are supported"
59
+ )
60
+ )
61
+
57
62
  try:
58
- get = method(reference, timeout=timeout_seconds,
59
- headers = headers,
60
- allow_redirects=allow_redirects, verify=verify_ssl)
63
+ get = method(
64
+ reference,
65
+ timeout=timeout_seconds,
66
+ headers=headers,
67
+ allow_redirects=allow_redirects,
68
+ verify=verify_ssl,
69
+ )
61
70
  resolution_time = time.time() - start_time
62
- data['status_code'] = get.status_code
63
- data['resolution_time'] = resolution_time
71
+ data["status_code"] = get.status_code
72
+ data["resolution_time"] = resolution_time
64
73
  if reference != get.url:
65
- data['redirect'] = get.url
74
+ data["redirect"] = get.url
66
75
  else:
67
- data['redirect'] = None #None is also already the default
76
+ data["redirect"] = None # None is also already the default
68
77
 
69
- #Returns the updated values and sets them for the object
78
+ # Returns the updated values and sets them for the object
70
79
  if get.status_code in allowed_http_codes:
71
- data['valid'] = True
80
+ data["valid"] = True
72
81
  else:
73
- #print(f"Unacceptable HTTP Status Code {get.status_code} received for {reference}")
74
- data['valid'] = False
75
- return data
82
+ # print(f"Unacceptable HTTP Status Code {get.status_code} received for {reference}")
83
+ data["valid"] = False
84
+ return data
76
85
 
77
- except Exception as e:
86
+ except Exception:
78
87
  resolution_time = time.time() - start_time
79
- #print(f"Reference {reference} was not reachable after {resolution_time:.2f} seconds")
80
- data['status_code'] = 0
81
- data['valid'] = False
82
- data['redirect'] = None
83
- data['resolution_time'] = resolution_time
88
+ # print(f"Reference {reference} was not reachable after {resolution_time:.2f} seconds")
89
+ data["status_code"] = 0
90
+ data["valid"] = False
91
+ data["redirect"] = None
92
+ data["resolution_time"] = resolution_time
84
93
  return data
85
94
 
86
95
 
87
96
  class LinkValidator(abc.ABC):
88
- cache: Union[dict[str,LinkStats], shelve.Shelf] = {}
97
+ cache: Union[dict[str, LinkStats], shelve.Shelf] = {}
89
98
  uncached_checks: int = 0
90
99
  total_checks: int = 0
91
- #cache: dict[str,LinkStats] = {}
100
+ # cache: dict[str,LinkStats] = {}
92
101
 
93
102
  use_file_cache: bool = False
94
- reference_cache_file: str ="lookups/REFERENCE_CACHE.db"
103
+ reference_cache_file: str = "lookups/REFERENCE_CACHE.db"
95
104
 
96
105
  @staticmethod
97
106
  def initialize_cache(use_file_cache: bool = False):
@@ -99,74 +108,88 @@ class LinkValidator(abc.ABC):
99
108
  if use_file_cache is False:
100
109
  return
101
110
  if not os.path.exists(LinkValidator.reference_cache_file):
102
- print(f"Cache at {LinkValidator.reference_cache_file} not found - Creating it.")
103
-
111
+ print(
112
+ f"Cache at {LinkValidator.reference_cache_file} not found - Creating it."
113
+ )
114
+
104
115
  try:
105
- LinkValidator.cache = shelve.open(LinkValidator.reference_cache_file, flag='c', writeback=True)
106
- except:
107
- print(f"Failed to create the cache file {LinkValidator.reference_cache_file}. Reference info will not be cached.")
116
+ LinkValidator.cache = shelve.open(
117
+ LinkValidator.reference_cache_file, flag="c", writeback=True
118
+ )
119
+ except Exception:
120
+ print(
121
+ f"Failed to create the cache file {LinkValidator.reference_cache_file}. Reference info will not be cached."
122
+ )
108
123
  LinkValidator.cache = {}
109
124
 
110
- #Remove all of the failures to force those resources to be resolved again
125
+ # Remove all of the failures to force those resources to be resolved again
111
126
  failed_refs = []
112
127
  for ref in LinkValidator.cache.keys():
113
128
  if LinkValidator.cache[ref].status_code not in ALLOWED_HTTP_CODES:
114
129
  failed_refs.append(ref)
115
- #can't remove it here because this will throw an error:
116
- #cannot change size of dictionary while iterating over it
130
+ # can't remove it here because this will throw an error:
131
+ # cannot change size of dictionary while iterating over it
117
132
  else:
118
- #Set the reference count to 0 and referencing files to empty set
133
+ # Set the reference count to 0 and referencing files to empty set
119
134
  LinkValidator.cache[ref].access_count = 0
120
135
  LinkValidator.cache[ref].referencing_files = set()
121
-
122
- for ref in failed_refs:
123
- del(LinkValidator.cache[ref])
124
136
 
137
+ for ref in failed_refs:
138
+ del LinkValidator.cache[ref]
125
139
 
126
-
127
-
128
140
  @staticmethod
129
141
  def close_cache():
130
142
  if LinkValidator.use_file_cache:
131
143
  LinkValidator.cache.close()
132
144
 
133
145
  @staticmethod
134
- def validate_reference(reference: str, referencing_file:str, raise_exception_if_failure: bool = False) -> bool:
146
+ def validate_reference(
147
+ reference: str, referencing_file: str, raise_exception_if_failure: bool = False
148
+ ) -> bool:
135
149
  LinkValidator.total_checks += 1
136
150
  if reference not in LinkValidator.cache:
137
151
  LinkValidator.uncached_checks += 1
138
- LinkValidator.cache[reference] = LinkStats(reference=reference, referencing_files = set([referencing_file]))
152
+ LinkValidator.cache[reference] = LinkStats(
153
+ reference=reference, referencing_files=set([referencing_file])
154
+ )
139
155
  result = LinkValidator.cache[reference].is_link_valid(referencing_file)
140
156
 
141
- #print(f"Total Checks: {LinkValidator.total_checks}, Percent Cached: {100*(1 - LinkValidator.uncached_checks / LinkValidator.total_checks):.2f}")
157
+ # print(f"Total Checks: {LinkValidator.total_checks}, Percent Cached: {100*(1 - LinkValidator.uncached_checks / LinkValidator.total_checks):.2f}")
142
158
 
143
159
  if result is True:
144
160
  return True
145
161
  elif raise_exception_if_failure is True:
146
- raise(Exception(f"Reference Link Failed: {reference}"))
162
+ raise (Exception(f"Reference Link Failed: {reference}"))
147
163
  else:
148
164
  return False
165
+
149
166
  @staticmethod
150
167
  def print_link_validation_errors():
151
- failures = [LinkValidator.cache[k] for k in LinkValidator.cache if LinkValidator.cache[k].valid is False]
168
+ failures = [
169
+ LinkValidator.cache[k]
170
+ for k in LinkValidator.cache
171
+ if LinkValidator.cache[k].valid is False
172
+ ]
152
173
  failures.sort(key=lambda d: d.status_code)
153
174
  for failure in failures:
154
- print(f"Link {failure.reference} invalid with HTTP Status Code [{failure.status_code}] and referenced by the following files:")
175
+ print(
176
+ f"Link {failure.reference} invalid with HTTP Status Code [{failure.status_code}] and referenced by the following files:"
177
+ )
155
178
  for ref in failure.referencing_files:
156
179
  print(f"\t* {ref}")
157
180
 
158
181
  @staticmethod
159
- def SecurityContentObject_validate_references(v:list, values: dict)->list:
160
- if 'check_references' not in values:
161
- raise(Exception("Member 'check_references' missing from Baseline!"))
162
- elif values['check_references'] is False:
163
- #Reference checking is enabled
182
+ def SecurityContentObject_validate_references(v: list, values: dict) -> list:
183
+ if "check_references" not in values:
184
+ raise (Exception("Member 'check_references' missing from Baseline!"))
185
+ elif values["check_references"] is False:
186
+ # Reference checking is enabled
164
187
  pass
165
- elif values['check_references'] is True:
188
+ elif values["check_references"] is True:
166
189
  for reference in v:
167
- LinkValidator.validate_reference(reference, values['name'])
168
- #Remove the check_references key from the values dict so that it is not
169
- #output by the serialization code
170
- del values['check_references']
190
+ LinkValidator.validate_reference(reference, values["name"])
191
+ # Remove the check_references key from the values dict so that it is not
192
+ # output by the serialization code
193
+ del values["check_references"]
171
194
 
172
195
  return v