contentctl 5.0.0a2__py3-none-any.whl → 5.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. contentctl/__init__.py +1 -1
  2. contentctl/actions/build.py +88 -55
  3. contentctl/actions/deploy_acs.py +29 -24
  4. contentctl/actions/detection_testing/DetectionTestingManager.py +66 -41
  5. contentctl/actions/detection_testing/GitService.py +2 -4
  6. contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
  7. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +163 -124
  8. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
  9. contentctl/actions/detection_testing/progress_bar.py +3 -0
  10. contentctl/actions/detection_testing/views/DetectionTestingView.py +15 -18
  11. contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
  12. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
  13. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
  14. contentctl/actions/doc_gen.py +9 -5
  15. contentctl/actions/initialize.py +45 -33
  16. contentctl/actions/inspect.py +118 -61
  17. contentctl/actions/new_content.py +83 -53
  18. contentctl/actions/release_notes.py +276 -146
  19. contentctl/actions/reporting.py +23 -19
  20. contentctl/actions/test.py +31 -25
  21. contentctl/actions/validate.py +54 -34
  22. contentctl/api.py +54 -45
  23. contentctl/contentctl.py +10 -10
  24. contentctl/enrichments/attack_enrichment.py +112 -72
  25. contentctl/enrichments/cve_enrichment.py +34 -28
  26. contentctl/enrichments/splunk_app_enrichment.py +38 -36
  27. contentctl/helper/link_validator.py +101 -78
  28. contentctl/helper/splunk_app.py +69 -41
  29. contentctl/helper/utils.py +58 -39
  30. contentctl/input/director.py +69 -37
  31. contentctl/input/new_content_questions.py +26 -34
  32. contentctl/input/yml_reader.py +22 -17
  33. contentctl/objects/abstract_security_content_objects/detection_abstract.py +255 -323
  34. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +111 -46
  35. contentctl/objects/alert_action.py +8 -8
  36. contentctl/objects/annotated_types.py +1 -1
  37. contentctl/objects/atomic.py +64 -54
  38. contentctl/objects/base_test.py +2 -1
  39. contentctl/objects/base_test_result.py +16 -8
  40. contentctl/objects/baseline.py +47 -35
  41. contentctl/objects/baseline_tags.py +29 -22
  42. contentctl/objects/config.py +1 -1
  43. contentctl/objects/constants.py +32 -58
  44. contentctl/objects/correlation_search.py +75 -55
  45. contentctl/objects/dashboard.py +55 -41
  46. contentctl/objects/data_source.py +13 -13
  47. contentctl/objects/deployment.py +44 -37
  48. contentctl/objects/deployment_email.py +1 -1
  49. contentctl/objects/deployment_notable.py +2 -1
  50. contentctl/objects/deployment_phantom.py +5 -5
  51. contentctl/objects/deployment_rba.py +1 -1
  52. contentctl/objects/deployment_scheduling.py +1 -1
  53. contentctl/objects/deployment_slack.py +1 -1
  54. contentctl/objects/detection.py +5 -2
  55. contentctl/objects/detection_metadata.py +1 -0
  56. contentctl/objects/detection_stanza.py +7 -2
  57. contentctl/objects/detection_tags.py +54 -64
  58. contentctl/objects/drilldown.py +66 -35
  59. contentctl/objects/enums.py +61 -43
  60. contentctl/objects/errors.py +16 -24
  61. contentctl/objects/integration_test.py +3 -3
  62. contentctl/objects/integration_test_result.py +1 -0
  63. contentctl/objects/investigation.py +53 -31
  64. contentctl/objects/investigation_tags.py +29 -17
  65. contentctl/objects/lookup.py +234 -113
  66. contentctl/objects/macro.py +55 -38
  67. contentctl/objects/manual_test.py +3 -3
  68. contentctl/objects/manual_test_result.py +1 -0
  69. contentctl/objects/mitre_attack_enrichment.py +17 -16
  70. contentctl/objects/notable_action.py +2 -1
  71. contentctl/objects/notable_event.py +1 -3
  72. contentctl/objects/playbook.py +37 -35
  73. contentctl/objects/playbook_tags.py +22 -16
  74. contentctl/objects/rba.py +68 -11
  75. contentctl/objects/risk_analysis_action.py +15 -11
  76. contentctl/objects/risk_event.py +27 -20
  77. contentctl/objects/risk_object.py +1 -0
  78. contentctl/objects/savedsearches_conf.py +9 -7
  79. contentctl/objects/security_content_object.py +5 -2
  80. contentctl/objects/story.py +54 -49
  81. contentctl/objects/story_tags.py +56 -44
  82. contentctl/objects/test_group.py +5 -2
  83. contentctl/objects/threat_object.py +1 -0
  84. contentctl/objects/throttling.py +27 -18
  85. contentctl/objects/unit_test.py +3 -4
  86. contentctl/objects/unit_test_baseline.py +4 -5
  87. contentctl/objects/unit_test_result.py +6 -6
  88. contentctl/output/api_json_output.py +22 -22
  89. contentctl/output/attack_nav_output.py +21 -21
  90. contentctl/output/attack_nav_writer.py +29 -37
  91. contentctl/output/conf_output.py +230 -174
  92. contentctl/output/data_source_writer.py +38 -25
  93. contentctl/output/doc_md_output.py +53 -27
  94. contentctl/output/jinja_writer.py +19 -15
  95. contentctl/output/json_writer.py +20 -8
  96. contentctl/output/svg_output.py +56 -38
  97. contentctl/output/templates/analyticstories_detections.j2 +1 -1
  98. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  99. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  100. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  101. contentctl/output/templates/savedsearches_baselines.j2 +2 -2
  102. contentctl/output/templates/savedsearches_detections.j2 +2 -8
  103. contentctl/output/templates/savedsearches_investigations.j2 +2 -2
  104. contentctl/output/templates/transforms.j2 +2 -4
  105. contentctl/output/yml_writer.py +18 -24
  106. contentctl/templates/stories/cobalt_strike.yml +1 -0
  107. {contentctl-5.0.0a2.dist-info → contentctl-5.0.1.dist-info}/METADATA +1 -1
  108. contentctl-5.0.1.dist-info/RECORD +168 -0
  109. contentctl/actions/initialize_old.py +0 -245
  110. contentctl/objects/observable.py +0 -39
  111. contentctl-5.0.0a2.dist-info/RECORD +0 -170
  112. {contentctl-5.0.0a2.dist-info → contentctl-5.0.1.dist-info}/LICENSE.md +0 -0
  113. {contentctl-5.0.0a2.dist-info → contentctl-5.0.1.dist-info}/WHEEL +0 -0
  114. {contentctl-5.0.0a2.dist-info → contentctl-5.0.1.dist-info}/entry_points.txt +0 -0
@@ -1,4 +1,3 @@
1
- import os
2
1
  from contentctl.objects.config import release_notes
3
2
  from git import Repo
4
3
  import re
@@ -7,234 +6,365 @@ import pathlib
7
6
  from typing import List, Union
8
7
 
9
8
 
10
-
11
9
  class ReleaseNotes:
12
- def create_notes(self,repo_path:pathlib.Path, file_paths:List[pathlib.Path], header:str)->dict[str,Union[List[str], str]]:
13
- updates:List[str] = []
14
- warnings:List[str] = []
10
+ def create_notes(
11
+ self, repo_path: pathlib.Path, file_paths: List[pathlib.Path], header: str
12
+ ) -> dict[str, Union[List[str], str]]:
13
+ updates: List[str] = []
14
+ warnings: List[str] = []
15
15
  for file_path in file_paths:
16
16
  # Check if the file exists
17
17
  if file_path.exists() and file_path.is_file():
18
18
  # Check if the file is a YAML file
19
- if file_path.suffix in ['.yaml', '.yml']:
19
+ if file_path.suffix in [".yaml", ".yml"]:
20
20
  # Read and parse the YAML file
21
- with open(file_path, 'r') as file:
21
+ with open(file_path, "r") as file:
22
22
  try:
23
23
  data = yaml.safe_load(file)
24
24
  # Check and create story link
25
- if 'name' in data and 'stories' in file_path.parts:
26
- story_link = "https://research.splunk.com/stories/" + data['name']
27
- story_link=story_link.replace(" ","_")
25
+ if "name" in data and "stories" in file_path.parts:
26
+ story_link = (
27
+ "https://research.splunk.com/stories/"
28
+ + data["name"]
29
+ )
30
+ story_link = story_link.replace(" ", "_")
28
31
  story_link = story_link.lower()
29
- updates.append("- "+"["+f"{data['name']}"+"]"+"("+story_link+")")
30
-
31
- if 'name' in data and'playbooks' in file_path.parts:
32
- playbook_link = "https://research.splunk.com/" + str(file_path).replace(str(repo_path),"")
33
- playbook_link=playbook_link.replace(".yml","/").lower()
34
- updates.append("- "+"["+f"{data['name']}"+"]"+"("+playbook_link+")")
35
-
36
- if 'name' in data and'macros' in file_path.parts:
32
+ updates.append(
33
+ "- "
34
+ + "["
35
+ + f"{data['name']}"
36
+ + "]"
37
+ + "("
38
+ + story_link
39
+ + ")"
40
+ )
41
+
42
+ if "name" in data and "playbooks" in file_path.parts:
43
+ playbook_link = "https://research.splunk.com/" + str(
44
+ file_path
45
+ ).replace(str(repo_path), "")
46
+ playbook_link = playbook_link.replace(
47
+ ".yml", "/"
48
+ ).lower()
49
+ updates.append(
50
+ "- "
51
+ + "["
52
+ + f"{data['name']}"
53
+ + "]"
54
+ + "("
55
+ + playbook_link
56
+ + ")"
57
+ )
58
+
59
+ if "name" in data and "macros" in file_path.parts:
37
60
  updates.append("- " + f"{data['name']}")
38
61
 
39
- if 'name' in data and'lookups' in file_path.parts:
62
+ if "name" in data and "lookups" in file_path.parts:
40
63
  updates.append("- " + f"{data['name']}")
41
64
 
42
65
  # Create only SSA link when its production
43
- if 'name' in data and 'id' in data and 'ssa_detections' in file_path.parts:
44
- if data['status'] == "production":
45
- temp_link = "https://research.splunk.com/" + str(file_path).replace(str(repo_path),"")
46
- pattern = r'(?<=/)[^/]*$'
47
- detection_link = re.sub(pattern, data['id'], temp_link)
48
- detection_link = detection_link.replace("detections","" )
49
- detection_link = detection_link.replace("ssa_/","" )
50
- updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")
51
-
52
- if data['status'] == "validation":
53
- updates.append("- "+f"{data['name']}"+" (Validation Mode)")
54
-
66
+ if (
67
+ "name" in data
68
+ and "id" in data
69
+ and "ssa_detections" in file_path.parts
70
+ ):
71
+ if data["status"] == "production":
72
+ temp_link = "https://research.splunk.com/" + str(
73
+ file_path
74
+ ).replace(str(repo_path), "")
75
+ pattern = r"(?<=/)[^/]*$"
76
+ detection_link = re.sub(
77
+ pattern, data["id"], temp_link
78
+ )
79
+ detection_link = detection_link.replace(
80
+ "detections", ""
81
+ )
82
+ detection_link = detection_link.replace("ssa_/", "")
83
+ updates.append(
84
+ "- "
85
+ + "["
86
+ + f"{data['name']}"
87
+ + "]"
88
+ + "("
89
+ + detection_link
90
+ + ")"
91
+ )
92
+
93
+ if data["status"] == "validation":
94
+ updates.append(
95
+ "- " + f"{data['name']}" + " (Validation Mode)"
96
+ )
55
97
 
56
98
  # Check and create detection link
57
- if 'name' in data and 'id' in data and 'detections' in file_path.parts and not 'ssa_detections' in file_path.parts and 'detections/deprecated' not in file_path.parts:
58
-
59
- if data['status'] == "production":
60
- temp_link = "https://research.splunk.com" + str(file_path).replace(str(repo_path),"")
61
- pattern = r'(?<=/)[^/]*$'
62
- detection_link = re.sub(pattern, data['id'], temp_link)
63
- detection_link = detection_link.replace("detections","" )
64
- detection_link = detection_link.replace(".com//",".com/" )
65
- updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")
66
-
67
- if data['status'] == "deprecated":
68
- temp_link = "https://research.splunk.com" + str(file_path).replace(str(repo_path),"")
69
- pattern = r'(?<=/)[^/]*$'
70
- detection_link = re.sub(pattern, data['id'], temp_link)
71
- detection_link = detection_link.replace("detections","" )
72
- detection_link = detection_link.replace(".com//",".com/" )
73
- updates.append("- "+"["+f"{data['name']}"+"]"+"("+detection_link+")")
74
-
99
+ if (
100
+ "name" in data
101
+ and "id" in data
102
+ and "detections" in file_path.parts
103
+ and "ssa_detections" not in file_path.parts
104
+ and "detections/deprecated" not in file_path.parts
105
+ ):
106
+ if data["status"] == "production":
107
+ temp_link = "https://research.splunk.com" + str(
108
+ file_path
109
+ ).replace(str(repo_path), "")
110
+ pattern = r"(?<=/)[^/]*$"
111
+ detection_link = re.sub(
112
+ pattern, data["id"], temp_link
113
+ )
114
+ detection_link = detection_link.replace(
115
+ "detections", ""
116
+ )
117
+ detection_link = detection_link.replace(
118
+ ".com//", ".com/"
119
+ )
120
+ updates.append(
121
+ "- "
122
+ + "["
123
+ + f"{data['name']}"
124
+ + "]"
125
+ + "("
126
+ + detection_link
127
+ + ")"
128
+ )
129
+
130
+ if data["status"] == "deprecated":
131
+ temp_link = "https://research.splunk.com" + str(
132
+ file_path
133
+ ).replace(str(repo_path), "")
134
+ pattern = r"(?<=/)[^/]*$"
135
+ detection_link = re.sub(
136
+ pattern, data["id"], temp_link
137
+ )
138
+ detection_link = detection_link.replace(
139
+ "detections", ""
140
+ )
141
+ detection_link = detection_link.replace(
142
+ ".com//", ".com/"
143
+ )
144
+ updates.append(
145
+ "- "
146
+ + "["
147
+ + f"{data['name']}"
148
+ + "]"
149
+ + "("
150
+ + detection_link
151
+ + ")"
152
+ )
153
+
75
154
  except yaml.YAMLError as exc:
76
- raise Exception(f"Error parsing YAML file for release_notes {file_path}: {str(exc)}")
155
+ raise Exception(
156
+ f"Error parsing YAML file for release_notes {file_path}: {str(exc)}"
157
+ )
77
158
  else:
78
- warnings.append(f"Error parsing YAML file for release_notes. File not found or is not a file: {file_path}")
79
- #print out all updates at once
80
- success_header = f'### {header} - [{len(updates)}]'
81
- warning_header = f'### {header} - [{len(warnings)}]'
82
- return {'header': success_header, 'changes': sorted(updates),
83
- 'warning_header': warning_header, 'warnings': warnings}
84
-
85
-
86
- def release_notes(self, config:release_notes) -> None:
159
+ warnings.append(
160
+ f"Error parsing YAML file for release_notes. File not found or is not a file: {file_path}"
161
+ )
162
+ # print out all updates at once
163
+ success_header = f"### {header} - [{len(updates)}]"
164
+ warning_header = f"### {header} - [{len(warnings)}]"
165
+ return {
166
+ "header": success_header,
167
+ "changes": sorted(updates),
168
+ "warning_header": warning_header,
169
+ "warnings": warnings,
170
+ }
87
171
 
172
+ def release_notes(self, config: release_notes) -> None:
88
173
  ### Remove hard coded path
89
- directories = ['detections/','stories/','macros/','lookups/','playbooks/','ssa_detections/']
90
-
174
+ directories = [
175
+ "detections/",
176
+ "stories/",
177
+ "macros/",
178
+ "lookups/",
179
+ "playbooks/",
180
+ "ssa_detections/",
181
+ ]
182
+
91
183
  repo = Repo(config.path)
92
184
  # Ensure the new tag is in the tags if tags are supplied
93
-
94
- if config.new_tag:
185
+
186
+ if config.new_tag:
95
187
  if config.new_tag not in repo.tags:
96
- raise Exception(f"new_tag {config.new_tag} does not exist in the repository. Make sure your branch nameis ")
188
+ raise Exception(
189
+ f"new_tag {config.new_tag} does not exist in the repository. Make sure your branch nameis "
190
+ )
97
191
  if config.old_tag is None:
98
- #Old tag was not supplied, so find the index of the new tag, then get the tag before it
99
- tags_sorted = sorted(repo.tags, key=lambda t: t.commit.committed_datetime, reverse=True)
100
- tags_names_sorted = [tag.name for tag in tags_sorted]
192
+ # Old tag was not supplied, so find the index of the new tag, then get the tag before it
193
+ tags_sorted = sorted(
194
+ repo.tags, key=lambda t: t.commit.committed_datetime, reverse=True
195
+ )
196
+ tags_names_sorted = [tag.name for tag in tags_sorted]
101
197
  new_tag_index = tags_names_sorted.index(config.new_tag)
102
198
  try:
103
- config.old_tag = tags_names_sorted[new_tag_index+1]
199
+ config.old_tag = tags_names_sorted[new_tag_index + 1]
104
200
  except Exception:
105
- raise Exception(f"old_tag cannot be inferred. {config.new_tag} is the oldest tag in the repo!")
201
+ raise Exception(
202
+ f"old_tag cannot be inferred. {config.new_tag} is the oldest tag in the repo!"
203
+ )
106
204
  latest_tag = config.new_tag
107
- previous_tag = config.old_tag
205
+ previous_tag = config.old_tag
108
206
  commit1 = repo.commit(latest_tag)
109
- commit2 = repo.commit(previous_tag)
207
+ commit2 = repo.commit(previous_tag)
110
208
  diff_index = commit2.diff(commit1)
111
209
 
112
- # Ensure the branch is in the repo
210
+ # Ensure the branch is in the repo
113
211
  if config.latest_branch:
114
- #If a branch name is supplied, compare against develop
212
+ # If a branch name is supplied, compare against develop
115
213
  if config.latest_branch not in repo.branches:
116
- raise ValueError(f"latest branch {config.latest_branch} does not exist in the repository. Make sure your branch name is correct")
214
+ raise ValueError(
215
+ f"latest branch {config.latest_branch} does not exist in the repository. Make sure your branch name is correct"
216
+ )
117
217
  if config.compare_against not in repo.branches:
118
- raise ValueError(f"compare_against branch {config.compare_against} does not exist in the repository. Make sure your branch name is correct")
119
-
218
+ raise ValueError(
219
+ f"compare_against branch {config.compare_against} does not exist in the repository. Make sure your branch name is correct"
220
+ )
221
+
120
222
  commit1 = repo.commit(config.latest_branch)
121
- commit2 = repo.commit(config.compare_against)
223
+ commit2 = repo.commit(config.compare_against)
122
224
  diff_index = commit2.diff(commit1)
123
-
124
- modified_files:List[pathlib.Path] = []
125
- added_files:List[pathlib.Path] = []
225
+
226
+ modified_files: List[pathlib.Path] = []
227
+ added_files: List[pathlib.Path] = []
126
228
  for diff in diff_index:
127
229
  file_path = pathlib.Path(diff.a_path)
128
230
 
129
231
  # Check if the file is in the specified directories
130
232
  if any(str(file_path).startswith(directory) for directory in directories):
131
233
  # Check if a file is Modified
132
- if diff.change_type == 'M':
234
+ if diff.change_type == "M":
133
235
  modified_files.append(file_path)
134
236
 
135
-
136
237
  # Check if a file is Added
137
- elif diff.change_type == 'A':
238
+ elif diff.change_type == "A":
138
239
  added_files.append(file_path)
139
240
  # print(added_files)
140
- detections_added:List[pathlib.Path] = []
141
- ba_detections_added:List[pathlib.Path] = []
142
- stories_added:List[pathlib.Path] = []
143
- macros_added:List[pathlib.Path] = []
144
- lookups_added:List[pathlib.Path] = []
145
- playbooks_added:List[pathlib.Path] = []
146
- detections_modified:List[pathlib.Path] = []
147
- ba_detections_modified:List[pathlib.Path] = []
148
- stories_modified:List[pathlib.Path] = []
149
- macros_modified:List[pathlib.Path] = []
150
- lookups_modified:List[pathlib.Path] = []
151
- playbooks_modified:List[pathlib.Path] = []
152
- detections_deprecated:List[pathlib.Path] = []
241
+ detections_added: List[pathlib.Path] = []
242
+ ba_detections_added: List[pathlib.Path] = []
243
+ stories_added: List[pathlib.Path] = []
244
+ macros_added: List[pathlib.Path] = []
245
+ lookups_added: List[pathlib.Path] = []
246
+ playbooks_added: List[pathlib.Path] = []
247
+ detections_modified: List[pathlib.Path] = []
248
+ ba_detections_modified: List[pathlib.Path] = []
249
+ stories_modified: List[pathlib.Path] = []
250
+ macros_modified: List[pathlib.Path] = []
251
+ lookups_modified: List[pathlib.Path] = []
252
+ playbooks_modified: List[pathlib.Path] = []
253
+ detections_deprecated: List[pathlib.Path] = []
153
254
 
154
255
  for file in modified_files:
155
- file= config.path / file
156
- if 'detections' in file.parts and 'ssa_detections' not in file.parts and 'deprecated' not in file.parts:
256
+ file = config.path / file
257
+ if (
258
+ "detections" in file.parts
259
+ and "ssa_detections" not in file.parts
260
+ and "deprecated" not in file.parts
261
+ ):
157
262
  detections_modified.append(file)
158
- if 'detections' in file.parts and 'ssa_detections' not in file.parts and 'deprecated' in file.parts:
263
+ if (
264
+ "detections" in file.parts
265
+ and "ssa_detections" not in file.parts
266
+ and "deprecated" in file.parts
267
+ ):
159
268
  detections_deprecated.append(file)
160
- if 'stories' in file.parts:
269
+ if "stories" in file.parts:
161
270
  stories_modified.append(file)
162
- if 'macros' in file.parts:
271
+ if "macros" in file.parts:
163
272
  macros_modified.append(file)
164
- if 'lookups' in file.parts:
273
+ if "lookups" in file.parts:
165
274
  lookups_modified.append(file)
166
- if 'playbooks' in file.parts:
275
+ if "playbooks" in file.parts:
167
276
  playbooks_modified.append(file)
168
- if 'ssa_detections' in file.parts:
277
+ if "ssa_detections" in file.parts:
169
278
  ba_detections_modified.append(file)
170
279
 
171
280
  for file in added_files:
172
- file=config.path / file
173
- if 'detections' in file.parts and 'ssa_detections' not in file.parts:
281
+ file = config.path / file
282
+ if "detections" in file.parts and "ssa_detections" not in file.parts:
174
283
  detections_added.append(file)
175
- if 'stories' in file.parts:
284
+ if "stories" in file.parts:
176
285
  stories_added.append(file)
177
- if 'macros' in file.parts:
286
+ if "macros" in file.parts:
178
287
  macros_added.append(file)
179
- if 'lookups' in file.parts:
288
+ if "lookups" in file.parts:
180
289
  lookups_added.append(file)
181
- if 'playbooks' in file.parts:
290
+ if "playbooks" in file.parts:
182
291
  playbooks_added.append(file)
183
- if 'ssa_detections' in file.parts:
292
+ if "ssa_detections" in file.parts:
184
293
  ba_detections_added.append(file)
185
294
 
186
295
  if config.new_tag:
187
-
188
296
  print(f"Generating release notes - \033[92m{latest_tag}\033[0m")
189
297
  print(f"Compared against - \033[92m{previous_tag}\033[0m")
190
298
  print("\n## Release notes for ESCU " + latest_tag)
191
299
 
192
300
  if config.latest_branch:
193
- print(f"Generating release notes - \033[92m{config.latest_branch}\033[0m")
194
- print(f"Compared against - \033[92m{config.compare_against}\033[0m")
301
+ print(
302
+ f"Generating release notes - \033[92m{config.latest_branch}\033[0m"
303
+ )
304
+ print(
305
+ f"Compared against - \033[92m{config.compare_against}\033[0m"
306
+ )
195
307
  print("\n## Release notes for ESCU " + config.latest_branch)
196
308
 
197
- notes = [self.create_notes(config.path, stories_added, header="New Analytic Story"),
198
- self.create_notes(config.path,stories_modified, header="Updated Analytic Story"),
199
- self.create_notes(config.path,detections_added, header="New Analytics"),
200
- self.create_notes(config.path,detections_modified, header="Updated Analytics"),
201
- self.create_notes(config.path,macros_added, header="Macros Added"),
202
- self.create_notes(config.path,macros_modified, header="Macros Updated"),
203
- self.create_notes(config.path,lookups_added, header="Lookups Added"),
204
- self.create_notes(config.path,lookups_modified, header="Lookups Updated"),
205
- self.create_notes(config.path,playbooks_added, header="Playbooks Added"),
206
- self.create_notes(config.path,playbooks_modified, header="Playbooks Updated"),
207
- self.create_notes(config.path,detections_deprecated, header="Deprecated Analytics")]
208
-
209
- #generate and show ba_notes in a different section
210
- ba_notes = [self.create_notes(config.path,ba_detections_added, header="New BA Analytics"),
211
- self.create_notes(config.path,ba_detections_modified, header="Updated BA Analytics") ]
212
-
213
-
214
- def printNotes(notes:List[dict[str,Union[List[str], str]]], outfile:Union[pathlib.Path,None]=None):
215
- num_changes = sum([len(note['changes']) for note in notes])
216
- num_warnings = sum([len(note['warnings']) for note in notes])
217
- lines:List[str] = []
309
+ notes = [
310
+ self.create_notes(config.path, stories_added, header="New Analytic Story"),
311
+ self.create_notes(
312
+ config.path, stories_modified, header="Updated Analytic Story"
313
+ ),
314
+ self.create_notes(config.path, detections_added, header="New Analytics"),
315
+ self.create_notes(
316
+ config.path, detections_modified, header="Updated Analytics"
317
+ ),
318
+ self.create_notes(config.path, macros_added, header="Macros Added"),
319
+ self.create_notes(config.path, macros_modified, header="Macros Updated"),
320
+ self.create_notes(config.path, lookups_added, header="Lookups Added"),
321
+ self.create_notes(config.path, lookups_modified, header="Lookups Updated"),
322
+ self.create_notes(config.path, playbooks_added, header="Playbooks Added"),
323
+ self.create_notes(
324
+ config.path, playbooks_modified, header="Playbooks Updated"
325
+ ),
326
+ self.create_notes(
327
+ config.path, detections_deprecated, header="Deprecated Analytics"
328
+ ),
329
+ ]
330
+
331
+ # generate and show ba_notes in a different section
332
+ ba_notes = [
333
+ self.create_notes(
334
+ config.path, ba_detections_added, header="New BA Analytics"
335
+ ),
336
+ self.create_notes(
337
+ config.path, ba_detections_modified, header="Updated BA Analytics"
338
+ ),
339
+ ]
340
+
341
+ def printNotes(
342
+ notes: List[dict[str, Union[List[str], str]]],
343
+ outfile: Union[pathlib.Path, None] = None,
344
+ ):
345
+ num_changes = sum([len(note["changes"]) for note in notes])
346
+ num_warnings = sum([len(note["warnings"]) for note in notes])
347
+ lines: List[str] = []
218
348
  lines.append(f"Total New and Updated Content: [{num_changes}]")
219
349
  for note in notes:
220
350
  lines.append("")
221
- lines.append(note['header'])
222
- lines+=(note['changes'])
223
-
351
+ lines.append(note["header"])
352
+ lines += note["changes"]
353
+
224
354
  lines.append(f"\n\nTotal Warnings: [{num_warnings}]")
225
355
  for note in notes:
226
- if len(note['warnings']) > 0:
227
- lines.append(note['warning_header'])
228
- lines+=note['warnings']
229
- text_blob = '\n'.join(lines)
356
+ if len(note["warnings"]) > 0:
357
+ lines.append(note["warning_header"])
358
+ lines += note["warnings"]
359
+ text_blob = "\n".join(lines)
230
360
  print(text_blob)
231
361
  if outfile is not None:
232
- with open(outfile,'w') as writer:
362
+ with open(outfile, "w") as writer:
233
363
  writer.write(text_blob)
234
-
235
- printNotes(notes, config.releaseNotesFilename(f"release_notes.txt"))
364
+
365
+ printNotes(notes, config.releaseNotesFilename("release_notes.txt"))
236
366
 
237
367
  print("\n\n### Other Updates\n-\n")
238
368
  print("\n## BA Release Notes")
239
369
  printNotes(ba_notes, config.releaseNotesFilename("ba_release_notes.txt"))
240
- print(f"Release notes completed succesfully")
370
+ print("Release notes completed succesfully")
@@ -1,5 +1,3 @@
1
- import os
2
-
3
1
  from dataclasses import dataclass
4
2
 
5
3
  from contentctl.input.director import DirectorOutputDto
@@ -7,38 +5,44 @@ from contentctl.output.svg_output import SvgOutput
7
5
  from contentctl.output.attack_nav_output import AttackNavOutput
8
6
  from contentctl.objects.config import report
9
7
 
8
+
10
9
  @dataclass(frozen=True)
11
10
  class ReportingInputDto:
12
11
  director_output_dto: DirectorOutputDto
13
12
  config: report
14
13
 
15
- class Reporting:
16
14
 
15
+ class Reporting:
17
16
  def execute(self, input_dto: ReportingInputDto) -> None:
18
-
19
-
20
- #Ensure the reporting path exists
17
+ # Ensure the reporting path exists
21
18
  try:
22
- input_dto.config.getReportingPath().mkdir(exist_ok=True,parents=True)
19
+ input_dto.config.getReportingPath().mkdir(exist_ok=True, parents=True)
23
20
  except Exception as e:
24
21
  if input_dto.config.getReportingPath().is_file():
25
- raise Exception(f"Error writing reporting: '{input_dto.config.getReportingPath()}' is a file, not a directory.")
22
+ raise Exception(
23
+ f"Error writing reporting: '{input_dto.config.getReportingPath()}' is a file, not a directory."
24
+ )
26
25
  else:
27
- raise Exception(f"Error writing reporting : '{input_dto.config.getReportingPath()}': {str(e)}")
26
+ raise Exception(
27
+ f"Error writing reporting : '{input_dto.config.getReportingPath()}': {str(e)}"
28
+ )
28
29
 
29
30
  print("Creating GitHub Badges...")
30
- #Generate GitHub Badges
31
+ # Generate GitHub Badges
31
32
  svg_output = SvgOutput()
32
33
  svg_output.writeObjects(
33
- input_dto.director_output_dto.detections,
34
- input_dto.config.getReportingPath())
35
-
36
- #Generate coverage json
34
+ input_dto.director_output_dto.detections,
35
+ input_dto.config.getReportingPath(),
36
+ )
37
+
38
+ # Generate coverage json
37
39
  print("Generating coverage.json...")
38
- attack_nav_output = AttackNavOutput()
40
+ attack_nav_output = AttackNavOutput()
39
41
  attack_nav_output.writeObjects(
40
- input_dto.director_output_dto.detections,
41
- input_dto.config.getReportingPath()
42
+ input_dto.director_output_dto.detections,
43
+ input_dto.config.getReportingPath(),
44
+ )
45
+
46
+ print(
47
+ f"Reporting successfully written to '{input_dto.config.getReportingPath()}'"
42
48
  )
43
-
44
- print(f"Reporting successfully written to '{input_dto.config.getReportingPath()}'")