contentctl 4.4.7__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. contentctl/__init__.py +1 -1
  2. contentctl/actions/build.py +102 -57
  3. contentctl/actions/deploy_acs.py +29 -24
  4. contentctl/actions/detection_testing/DetectionTestingManager.py +66 -42
  5. contentctl/actions/detection_testing/GitService.py +134 -76
  6. contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
  7. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +192 -147
  8. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
  9. contentctl/actions/detection_testing/progress_bar.py +9 -6
  10. contentctl/actions/detection_testing/views/DetectionTestingView.py +16 -19
  11. contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
  12. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
  13. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
  14. contentctl/actions/doc_gen.py +9 -5
  15. contentctl/actions/initialize.py +45 -33
  16. contentctl/actions/inspect.py +118 -61
  17. contentctl/actions/new_content.py +155 -108
  18. contentctl/actions/release_notes.py +276 -146
  19. contentctl/actions/reporting.py +23 -19
  20. contentctl/actions/test.py +33 -28
  21. contentctl/actions/validate.py +55 -34
  22. contentctl/api.py +54 -45
  23. contentctl/contentctl.py +124 -90
  24. contentctl/enrichments/attack_enrichment.py +112 -72
  25. contentctl/enrichments/cve_enrichment.py +34 -28
  26. contentctl/enrichments/splunk_app_enrichment.py +38 -36
  27. contentctl/helper/link_validator.py +101 -78
  28. contentctl/helper/splunk_app.py +69 -41
  29. contentctl/helper/utils.py +58 -53
  30. contentctl/input/director.py +68 -36
  31. contentctl/input/new_content_questions.py +27 -35
  32. contentctl/input/yml_reader.py +28 -18
  33. contentctl/objects/abstract_security_content_objects/detection_abstract.py +303 -259
  34. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +115 -52
  35. contentctl/objects/alert_action.py +10 -9
  36. contentctl/objects/annotated_types.py +1 -1
  37. contentctl/objects/atomic.py +65 -54
  38. contentctl/objects/base_test.py +5 -3
  39. contentctl/objects/base_test_result.py +19 -11
  40. contentctl/objects/baseline.py +62 -30
  41. contentctl/objects/baseline_tags.py +30 -24
  42. contentctl/objects/config.py +790 -597
  43. contentctl/objects/constants.py +33 -56
  44. contentctl/objects/correlation_search.py +150 -136
  45. contentctl/objects/dashboard.py +55 -41
  46. contentctl/objects/data_source.py +16 -17
  47. contentctl/objects/deployment.py +43 -44
  48. contentctl/objects/deployment_email.py +3 -2
  49. contentctl/objects/deployment_notable.py +4 -2
  50. contentctl/objects/deployment_phantom.py +7 -6
  51. contentctl/objects/deployment_rba.py +3 -2
  52. contentctl/objects/deployment_scheduling.py +3 -2
  53. contentctl/objects/deployment_slack.py +3 -2
  54. contentctl/objects/detection.py +5 -2
  55. contentctl/objects/detection_metadata.py +1 -0
  56. contentctl/objects/detection_stanza.py +7 -2
  57. contentctl/objects/detection_tags.py +58 -103
  58. contentctl/objects/drilldown.py +66 -34
  59. contentctl/objects/enums.py +81 -100
  60. contentctl/objects/errors.py +16 -24
  61. contentctl/objects/integration_test.py +3 -3
  62. contentctl/objects/integration_test_result.py +1 -0
  63. contentctl/objects/investigation.py +59 -36
  64. contentctl/objects/investigation_tags.py +30 -19
  65. contentctl/objects/lookup.py +304 -101
  66. contentctl/objects/macro.py +55 -39
  67. contentctl/objects/manual_test.py +3 -3
  68. contentctl/objects/manual_test_result.py +1 -0
  69. contentctl/objects/mitre_attack_enrichment.py +17 -16
  70. contentctl/objects/notable_action.py +2 -1
  71. contentctl/objects/notable_event.py +1 -3
  72. contentctl/objects/playbook.py +37 -35
  73. contentctl/objects/playbook_tags.py +23 -13
  74. contentctl/objects/rba.py +96 -0
  75. contentctl/objects/risk_analysis_action.py +15 -11
  76. contentctl/objects/risk_event.py +110 -160
  77. contentctl/objects/risk_object.py +1 -0
  78. contentctl/objects/savedsearches_conf.py +9 -7
  79. contentctl/objects/security_content_object.py +5 -2
  80. contentctl/objects/story.py +54 -49
  81. contentctl/objects/story_tags.py +56 -45
  82. contentctl/objects/test_attack_data.py +2 -1
  83. contentctl/objects/test_group.py +5 -2
  84. contentctl/objects/threat_object.py +1 -0
  85. contentctl/objects/throttling.py +27 -18
  86. contentctl/objects/unit_test.py +3 -4
  87. contentctl/objects/unit_test_baseline.py +5 -5
  88. contentctl/objects/unit_test_result.py +6 -6
  89. contentctl/output/api_json_output.py +233 -220
  90. contentctl/output/attack_nav_output.py +21 -21
  91. contentctl/output/attack_nav_writer.py +29 -37
  92. contentctl/output/conf_output.py +235 -172
  93. contentctl/output/conf_writer.py +201 -125
  94. contentctl/output/data_source_writer.py +38 -26
  95. contentctl/output/doc_md_output.py +53 -27
  96. contentctl/output/jinja_writer.py +19 -15
  97. contentctl/output/json_writer.py +21 -11
  98. contentctl/output/svg_output.py +56 -38
  99. contentctl/output/templates/analyticstories_detections.j2 +2 -2
  100. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  101. contentctl/output/templates/collections.j2 +1 -1
  102. contentctl/output/templates/doc_detections.j2 +0 -5
  103. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  104. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  105. contentctl/output/templates/savedsearches_baselines.j2 +2 -2
  106. contentctl/output/templates/savedsearches_detections.j2 +10 -11
  107. contentctl/output/templates/savedsearches_investigations.j2 +2 -2
  108. contentctl/output/templates/transforms.j2 +6 -8
  109. contentctl/output/yml_writer.py +29 -20
  110. contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +16 -34
  111. contentctl/templates/stories/cobalt_strike.yml +1 -0
  112. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/METADATA +5 -4
  113. contentctl-5.0.0.dist-info/RECORD +168 -0
  114. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/WHEEL +1 -1
  115. contentctl/actions/initialize_old.py +0 -245
  116. contentctl/objects/event_source.py +0 -11
  117. contentctl/objects/observable.py +0 -37
  118. contentctl/output/detection_writer.py +0 -28
  119. contentctl/output/new_content_yml_output.py +0 -56
  120. contentctl/output/yml_output.py +0 -66
  121. contentctl-4.4.7.dist-info/RECORD +0 -173
  122. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/LICENSE.md +0 -0
  123. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,44 +1,39 @@
1
1
  import logging
2
2
  import os
3
3
  import pathlib
4
+ from typing import TYPE_CHECKING, List, Optional
5
+
4
6
  import pygit2
5
- from pygit2.enums import DeltaStatus
6
- from typing import List, Optional
7
7
  from pydantic import BaseModel, FilePath
8
- from typing import TYPE_CHECKING
8
+ from pygit2.enums import DeltaStatus
9
+
9
10
  if TYPE_CHECKING:
10
11
  from contentctl.input.director import DirectorOutputDto
11
-
12
12
 
13
- from contentctl.objects.macro import Macro
14
- from contentctl.objects.lookup import Lookup
15
- from contentctl.objects.detection import Detection
13
+ from contentctl.input.director import DirectorOutputDto
14
+ from contentctl.objects.config import All, Changes, Selected, test_common
16
15
  from contentctl.objects.data_source import DataSource
16
+ from contentctl.objects.detection import Detection
17
+ from contentctl.objects.lookup import CSVLookup, Lookup
18
+ from contentctl.objects.macro import Macro
17
19
  from contentctl.objects.security_content_object import SecurityContentObject
18
- from contentctl.objects.config import test_common, All, Changes, Selected
19
20
 
20
21
  # Logger
21
22
  logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
22
23
  LOGGER = logging.getLogger(__name__)
23
24
 
24
25
 
25
-
26
- from contentctl.input.director import DirectorOutputDto
27
-
28
-
29
-
30
26
  class GitService(BaseModel):
31
27
  director: DirectorOutputDto
32
28
  config: test_common
33
29
  gitHash: Optional[str] = None
34
-
35
- def getHash(self)->str:
30
+
31
+ def getHash(self) -> str:
36
32
  if self.gitHash is None:
37
33
  raise Exception("Cannot get hash of repo, it was not set")
38
34
  return self.gitHash
39
35
 
40
-
41
- def getContent(self)->List[Detection]:
36
+ def getContent(self) -> List[Detection]:
42
37
  if isinstance(self.config.mode, Selected):
43
38
  return self.getSelected(self.config.mode.files)
44
39
  elif isinstance(self.config.mode, Changes):
@@ -46,142 +41,205 @@ class GitService(BaseModel):
46
41
  if isinstance(self.config.mode, All):
47
42
  return self.getAll()
48
43
  else:
49
- raise Exception(f"Could not get content to test. Unsupported test mode '{self.config.mode}'")
50
- def getAll(self)->List[Detection]:
44
+ raise Exception(
45
+ f"Could not get content to test. Unsupported test mode '{self.config.mode}'"
46
+ )
47
+
48
+ def getAll(self) -> List[Detection]:
51
49
  return self.director.detections
52
-
53
- def getChanges(self, target_branch:str)->List[Detection]:
50
+
51
+ def getChanges(self, target_branch: str) -> List[Detection]:
54
52
  repo = pygit2.Repository(path=str(self.config.path))
55
53
 
56
54
  try:
57
55
  target_tree = repo.revparse_single(target_branch).tree
58
56
  self.gitHash = target_tree.id
59
57
  diffs = repo.index.diff_to_tree(target_tree)
60
- except Exception as e:
61
- raise Exception(f"Error parsing diff target_branch '{target_branch}'. Are you certain that it exists?")
62
-
63
- #Get the uncommitted changes in the current directory
58
+ except Exception:
59
+ raise Exception(
60
+ f"Error parsing diff target_branch '{target_branch}'. Are you certain that it exists?"
61
+ )
62
+
63
+ # Get the uncommitted changes in the current directory
64
64
  diffs2 = repo.index.diff_to_workdir()
65
-
66
- #Combine the uncommitted changes with the committed changes
65
+
66
+ # Combine the uncommitted changes with the committed changes
67
67
  all_diffs = list(diffs) + list(diffs2)
68
68
 
69
- #Make a filename to content map
70
- filepath_to_content_map = { obj.file_path:obj for (_,obj) in self.director.name_to_content_map.items()}
69
+ # Make a filename to content map
70
+ filepath_to_content_map = {
71
+ obj.file_path: obj for (_, obj) in self.director.name_to_content_map.items()
72
+ }
71
73
 
72
74
  updated_detections: set[Detection] = set()
73
75
  updated_macros: set[Macro] = set()
74
76
  updated_lookups: set[Lookup] = set()
75
77
  updated_datasources: set[DataSource] = set()
76
78
 
77
-
78
79
  for diff in all_diffs:
79
- if type(diff) == pygit2.Patch:
80
- if diff.delta.status in (DeltaStatus.ADDED, DeltaStatus.MODIFIED, DeltaStatus.RENAMED):
81
- #print(f"{DeltaStatus(diff.delta.status).name:<8}:{diff.delta.new_file.raw_path}")
82
- decoded_path = pathlib.Path(diff.delta.new_file.raw_path.decode('utf-8'))
80
+ if type(diff) is pygit2.Patch:
81
+ if diff.delta.status in (
82
+ DeltaStatus.ADDED,
83
+ DeltaStatus.MODIFIED,
84
+ DeltaStatus.RENAMED,
85
+ ):
86
+ # print(f"{DeltaStatus(diff.delta.status).name:<8}:{diff.delta.new_file.raw_path}")
87
+ decoded_path = pathlib.Path(
88
+ diff.delta.new_file.raw_path.decode("utf-8")
89
+ )
83
90
  # Note that we only handle updates to detections, lookups, and macros at this time. All other changes are ignored.
84
- if decoded_path.is_relative_to(self.config.path/"detections") and decoded_path.suffix == ".yml":
85
- detectionObject = filepath_to_content_map.get(decoded_path, None)
91
+ if (
92
+ decoded_path.is_relative_to(self.config.path / "detections")
93
+ and decoded_path.suffix == ".yml"
94
+ ):
95
+ detectionObject = filepath_to_content_map.get(
96
+ decoded_path, None
97
+ )
86
98
  if isinstance(detectionObject, Detection):
87
99
  updated_detections.add(detectionObject)
88
100
  else:
89
- raise Exception(f"Error getting detection object for file {str(decoded_path)}")
90
-
91
- elif decoded_path.is_relative_to(self.config.path/"macros") and decoded_path.suffix == ".yml":
101
+ raise Exception(
102
+ f"Error getting detection object for file {str(decoded_path)}"
103
+ )
104
+
105
+ elif (
106
+ decoded_path.is_relative_to(self.config.path / "macros")
107
+ and decoded_path.suffix == ".yml"
108
+ ):
92
109
  macroObject = filepath_to_content_map.get(decoded_path, None)
93
110
  if isinstance(macroObject, Macro):
94
111
  updated_macros.add(macroObject)
95
112
  else:
96
- raise Exception(f"Error getting macro object for file {str(decoded_path)}")
97
-
98
- elif decoded_path.is_relative_to(self.config.path/"data_sources") and decoded_path.suffix == ".yml":
99
- datasourceObject = filepath_to_content_map.get(decoded_path, None)
113
+ raise Exception(
114
+ f"Error getting macro object for file {str(decoded_path)}"
115
+ )
116
+
117
+ elif (
118
+ decoded_path.is_relative_to(self.config.path / "data_sources")
119
+ and decoded_path.suffix == ".yml"
120
+ ):
121
+ datasourceObject = filepath_to_content_map.get(
122
+ decoded_path, None
123
+ )
100
124
  if isinstance(datasourceObject, DataSource):
101
125
  updated_datasources.add(datasourceObject)
102
126
  else:
103
- raise Exception(f"Error getting data source object for file {str(decoded_path)}")
127
+ raise Exception(
128
+ f"Error getting data source object for file {str(decoded_path)}"
129
+ )
104
130
 
105
- elif decoded_path.is_relative_to(self.config.path/"lookups"):
131
+ elif decoded_path.is_relative_to(self.config.path / "lookups"):
106
132
  # We need to convert this to a yml. This means we will catch
107
133
  # both changes to a csv AND changes to the YML that uses it
108
134
  if decoded_path.suffix == ".yml":
109
- updatedLookup = filepath_to_content_map.get(decoded_path, None)
110
- if not isinstance(updatedLookup,Lookup):
111
- raise Exception(f"Expected {decoded_path} to be type {type(Lookup)}, but instead if was {(type(updatedLookup))}")
135
+ updatedLookup = filepath_to_content_map.get(
136
+ decoded_path, None
137
+ )
138
+ if not isinstance(updatedLookup, Lookup):
139
+ raise Exception(
140
+ f"Expected {decoded_path} to be type {type(Lookup)}, but instead if was {(type(updatedLookup))}"
141
+ )
112
142
  updated_lookups.add(updatedLookup)
113
143
 
114
144
  elif decoded_path.suffix == ".csv":
115
- # If the CSV was updated, we want to make sure that we
145
+ # If the CSV was updated, we want to make sure that we
116
146
  # add the correct corresponding Lookup object.
117
- #Filter to find the Lookup Object the references this CSV
118
- matched = list(filter(lambda x: x.filename is not None and x.filename == decoded_path, self.director.lookups))
147
+ # Filter to find the Lookup Object the references this CSV
148
+ matched = list(
149
+ filter(
150
+ lambda x: isinstance(x, CSVLookup)
151
+ and x.filename == decoded_path,
152
+ self.director.lookups,
153
+ )
154
+ )
119
155
  if len(matched) == 0:
120
- raise Exception(f"Failed to find any lookups that reference the modified CSV file '{decoded_path}'")
156
+ raise Exception(
157
+ f"Failed to find any lookups that reference the modified CSV file '{decoded_path}'"
158
+ )
121
159
  elif len(matched) > 1:
122
- raise Exception(f"More than 1 Lookup reference the modified CSV file '{decoded_path}': {[l.file_path for l in matched ]}")
160
+ raise Exception(
161
+ f"More than 1 Lookup reference the modified CSV file '{decoded_path}': {[match.file_path for match in matched]}"
162
+ )
123
163
  else:
124
164
  updatedLookup = matched[0]
125
165
  elif decoded_path.suffix == ".mlmodel":
126
- # Detected a changed .mlmodel file. However, since we do not have testing for these detections at
166
+ # Detected a changed .mlmodel file. However, since we do not have testing for these detections at
127
167
  # this time, we will ignore this change.
128
168
  updatedLookup = None
129
169
 
130
170
  else:
131
- raise Exception(f"Detected a changed file in the lookups/ directory '{str(decoded_path)}'.\n"
132
- "Only files ending in .csv, .yml, or .mlmodel are supported in this "
133
- "directory. This file must be removed from the lookups/ directory.")
134
-
135
- if updatedLookup is not None and updatedLookup not in updated_lookups:
171
+ raise Exception(
172
+ f"Detected a changed file in the lookups/ directory '{str(decoded_path)}'.\n"
173
+ "Only files ending in .csv, .yml, or .mlmodel are supported in this "
174
+ "directory. This file must be removed from the lookups/ directory."
175
+ )
176
+
177
+ if (
178
+ updatedLookup is not None
179
+ and updatedLookup not in updated_lookups
180
+ ):
136
181
  # It is possible that both the CSV and YML have been modified for the same lookup,
137
- # and we do not want to add it twice.
182
+ # and we do not want to add it twice.
138
183
  updated_lookups.add(updatedLookup)
139
184
 
140
185
  else:
141
186
  pass
142
- #print(f"Ignore changes to file {decoded_path} since it is not a detection, macro, or lookup.")
187
+ # print(f"Ignore changes to file {decoded_path} since it is not a detection, macro, or lookup.")
143
188
  else:
144
189
  raise Exception(f"Unrecognized diff type {type(diff)}")
145
190
 
146
-
147
191
  # If a detection has at least one dependency on changed content,
148
192
  # then we must test it again
149
193
 
150
- changed_macros_and_lookups_and_datasources:set[SecurityContentObject] = updated_macros.union(updated_lookups, updated_datasources)
151
-
194
+ changed_macros_and_lookups_and_datasources: set[Macro | Lookup | DataSource] = (
195
+ updated_macros.union(updated_lookups, updated_datasources)
196
+ )
197
+
152
198
  for detection in self.director.detections:
153
199
  if detection in updated_detections:
154
- # we are already planning to test it, don't need
200
+ # we are already planning to test it, don't need
155
201
  # to add it again
156
202
  continue
157
203
 
158
204
  for obj in changed_macros_and_lookups_and_datasources:
159
205
  if obj in detection.get_content_dependencies():
160
- updated_detections.add(detection)
161
- break
206
+ updated_detections.add(detection)
207
+ break
162
208
 
163
- #Print out the names of all modified/new content
164
- modifiedAndNewContentString = "\n - ".join(sorted([d.name for d in updated_detections]))
209
+ # Print out the names of all modified/new content
210
+ modifiedAndNewContentString = "\n - ".join(
211
+ sorted([d.name for d in updated_detections])
212
+ )
165
213
 
166
- print(f"[{len(updated_detections)}] Pieces of modifed and new content (this may include experimental/deprecated/manual_test content):\n - {modifiedAndNewContentString}")
214
+ print(
215
+ f"[{len(updated_detections)}] Pieces of modifed and new content (this may include experimental/deprecated/manual_test content):\n - {modifiedAndNewContentString}"
216
+ )
167
217
  return sorted(list(updated_detections))
168
218
 
169
219
  def getSelected(self, detectionFilenames: List[FilePath]) -> List[Detection]:
170
220
  filepath_to_content_map: dict[FilePath, SecurityContentObject] = {
171
- obj.file_path: obj for (_, obj) in self.director.name_to_content_map.items() if obj.file_path is not None
172
- }
221
+ obj.file_path: obj
222
+ for (_, obj) in self.director.name_to_content_map.items()
223
+ if obj.file_path is not None
224
+ }
173
225
  errors = []
174
226
  detections: List[Detection] = []
175
227
  for name in detectionFilenames:
176
228
  obj = filepath_to_content_map.get(name, None)
177
229
  if obj is None:
178
- errors.append(f"There is no detection file or security_content_object at '{name}'")
230
+ errors.append(
231
+ f"There is no detection file or security_content_object at '{name}'"
232
+ )
179
233
  elif not isinstance(obj, Detection):
180
- errors.append(f"The security_content_object at '{name}' is of type '{type(obj).__name__}', NOT '{Detection.__name__}'")
234
+ errors.append(
235
+ f"The security_content_object at '{name}' is of type '{type(obj).__name__}', NOT '{Detection.__name__}'"
236
+ )
181
237
  else:
182
238
  detections.append(obj)
183
239
 
184
240
  if errors:
185
241
  errorsString = "\n - ".join(errors)
186
- raise Exception(f"The following errors were encountered while getting selected detections to test:\n - {errorsString}")
187
- return detections
242
+ raise Exception(
243
+ f"The following errors were encountered while getting selected detections to test:\n - {errorsString}"
244
+ )
245
+ return detections
@@ -2,7 +2,7 @@ import argparse
2
2
  import json
3
3
  import sys
4
4
 
5
- RAW_BADGE_SVG = '''<?xml version="1.0"?>
5
+ RAW_BADGE_SVG = """<?xml version="1.0"?>
6
6
  <svg xmlns="http://www.w3.org/2000/svg" width="100" height="20">
7
7
  <linearGradient id="a" x2="0" y2="100%">
8
8
  <stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
@@ -19,47 +19,65 @@ RAW_BADGE_SVG = '''<?xml version="1.0"?>
19
19
  <text x="30" y="14">{}</text>
20
20
  <text x="80" y="14">{}</text>
21
21
  </g>
22
- </svg>'''
23
-
24
-
25
- parser = argparse.ArgumentParser(description='Use a summary.json file to generate a test coverage badge')
26
- parser.add_argument('-i', "--input_summary_file", type=argparse.FileType('r'), required = True,
27
- help='Summary file to use to generate the pass percentage badge')
28
- parser.add_argument('-o', "--output_badge_file", type=argparse.FileType('w'), required = True,
29
- help='Name of the badge to output')
30
- parser.add_argument('-s', "--badge_string", type=str, required = True,
31
- help='Name of the badge to output')
32
-
22
+ </svg>"""
23
+
24
+
25
+ parser = argparse.ArgumentParser(
26
+ description="Use a summary.json file to generate a test coverage badge"
27
+ )
28
+ parser.add_argument(
29
+ "-i",
30
+ "--input_summary_file",
31
+ type=argparse.FileType("r"),
32
+ required=True,
33
+ help="Summary file to use to generate the pass percentage badge",
34
+ )
35
+ parser.add_argument(
36
+ "-o",
37
+ "--output_badge_file",
38
+ type=argparse.FileType("w"),
39
+ required=True,
40
+ help="Name of the badge to output",
41
+ )
42
+ parser.add_argument(
43
+ "-s", "--badge_string", type=str, required=True, help="Name of the badge to output"
44
+ )
33
45
 
34
46
 
35
47
  try:
36
- results = parser.parse_args()
48
+ results = parser.parse_args()
37
49
  except Exception as e:
38
- print(f"Error parsing arguments: {str(e)}")
39
- exit(1)
50
+ print(f"Error parsing arguments: {str(e)}")
51
+ exit(1)
40
52
 
41
53
  try:
42
- summary_info = json.loads(results.input_summary_file.read())
54
+ summary_info = json.loads(results.input_summary_file.read())
43
55
  except Exception as e:
44
- print(f"Error loading {results.input_summary_file.name} JSON file: {str(e)}")
45
- sys.exit(1)
46
-
47
- if 'summary' not in summary_info:
48
- print("Missing 'summary' key in {results.input_summary_file.name}")
49
- sys.exit(1)
50
- elif 'PASS_RATE' not in summary_info['summary'] or 'TESTS_PASSED' not in summary_info['summary']:
51
- print(f"Missing PASS_RATE in 'summary' section of {results.input_summary_file.name}")
52
- sys.exit(1)
53
- pass_percent = 100 * summary_info['summary']['PASS_RATE']
56
+ print(f"Error loading {results.input_summary_file.name} JSON file: {str(e)}")
57
+ sys.exit(1)
58
+
59
+ if "summary" not in summary_info:
60
+ print("Missing 'summary' key in {results.input_summary_file.name}")
61
+ sys.exit(1)
62
+ elif (
63
+ "PASS_RATE" not in summary_info["summary"]
64
+ or "TESTS_PASSED" not in summary_info["summary"]
65
+ ):
66
+ print(
67
+ f"Missing PASS_RATE in 'summary' section of {results.input_summary_file.name}"
68
+ )
69
+ sys.exit(1)
70
+ pass_percent = 100 * summary_info["summary"]["PASS_RATE"]
54
71
 
55
72
 
56
73
  try:
57
- results.output_badge_file.write(RAW_BADGE_SVG.format(results.badge_string, "{:2.1f}%".format(pass_percent)))
74
+ results.output_badge_file.write(
75
+ RAW_BADGE_SVG.format(results.badge_string, "{:2.1f}%".format(pass_percent))
76
+ )
58
77
  except Exception as e:
59
- print(f"Error generating badge: {str(e)}")
60
- sys.exit(1)
78
+ print(f"Error generating badge: {str(e)}")
79
+ sys.exit(1)
61
80
 
62
81
 
63
82
  print(f"Badge {results.output_badge_file.name} successfully generated!")
64
83
  sys.exit(0)
65
-