contentctl 4.2.5__py3-none-any.whl → 4.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,436 +0,0 @@
1
- import os
2
- import sys
3
- import copy
4
- import pathlib
5
-
6
- from dataclasses import dataclass
7
- from jinja2 import Environment, FileSystemLoader
8
-
9
- from sigma.processing.conditions import LogsourceCondition
10
- from sigma.processing.transformations import AddConditionTransformation, FieldMappingTransformation, DetectionItemFailureTransformation, RuleFailureTransformation, SetStateTransformation
11
- from sigma.processing.conditions import LogsourceCondition, IncludeFieldCondition, ExcludeFieldCondition, RuleProcessingItemAppliedCondition
12
- from sigma.collection import SigmaCollection
13
- from sigma.backends.splunk import SplunkBackend
14
- from sigma.processing.pipeline import ProcessingItem, ProcessingPipeline
15
-
16
- from contentctl.input.yml_reader import YmlReader
17
- from contentctl.objects.detection import Detection
18
- from contentctl.objects.data_source import DataSource
19
- from contentctl.objects.unit_test import UnitTest
20
- from contentctl.objects.enums import *
21
- from contentctl.helper.utils import Utils
22
- from contentctl.input.backend_splunk_ba import SplunkBABackend
23
-
24
-
25
- @dataclass(frozen=True)
26
- class SigmaConverterInputDto:
27
- data_model: SigmaConverterTarget
28
- detection_path: str
29
- detection_folder : str
30
- input_path: str
31
- log_source: str
32
-
33
-
34
- @dataclass(frozen=True)
35
- class SigmaConverterOutputDto:
36
- detections: list
37
-
38
-
39
- class SigmaConverter():
40
- output_dto : SigmaConverterOutputDto
41
-
42
- def __init__(self, output_dto: SigmaConverterOutputDto) -> None:
43
- self.output_dto = output_dto
44
-
45
-
46
- def execute(self, input_dto: SigmaConverterInputDto) -> None:
47
-
48
- detection_files = []
49
- errors = []
50
-
51
- if input_dto.detection_path:
52
- detection_files.append(input_dto.detection_path)
53
- elif input_dto.detection_folder:
54
- detection_files = Utils.get_all_yml_files_from_directory(input_dto.detection_folder)
55
- else:
56
- print("ERROR: --detection_path or --detection_folder needed.")
57
- sys.exit(1)
58
-
59
- for detection_file in detection_files:
60
- try:
61
- detection = self.read_detection(str(detection_file))
62
- print("Converting detection: " + detection.name)
63
- data_source = self.load_data_source(input_dto.input_path, detection.data_source[0])
64
- if not data_source:
65
- print("ERROR: Didn't find data source with name: " + detection.data_source[0] + " for detection " + detection.name)
66
- sys.exit(1)
67
-
68
- file_name = detection.name.replace(' ', '_').replace('-','_').replace('.','_').replace('/','_').lower()
69
-
70
-
71
- if input_dto.data_model == SigmaConverterTarget.RAW:
72
- if input_dto.log_source and input_dto.log_source != detection.data_source[0][0]:
73
- try:
74
- field_mapping = self.find_mapping(data_source.convert_to_log_source, 'data_source', input_dto.log_source)
75
- except Exception as e:
76
- print(e)
77
- print("ERROR: Couldn't find data source mapping for log source " + input_dto.log_source + " for detection: " + detection.name)
78
- sys.exit(1)
79
-
80
- detection = self.convert_detection_fields(detection, field_mapping)
81
-
82
- logsource_condition = self.get_logsource_condition(data_source)
83
- processing_item = self.get_field_transformation_processing_item(
84
- field_mapping['mapping'],
85
- logsource_condition
86
- )
87
- sigma_processing_pipeline = self.get_pipeline_from_processing_items([processing_item])
88
- splunk_backend = SplunkBackend(processing_pipeline=sigma_processing_pipeline)
89
- data_source = self.load_data_source(input_dto.input_path, input_dto.log_source)
90
-
91
- else:
92
- splunk_backend = SplunkBackend()
93
-
94
- sigma_rule = self.get_sigma_rule(detection, data_source)
95
- search = splunk_backend.convert(sigma_rule)[0]
96
- search = self.add_source_macro(search, data_source.type)
97
- search = self.add_stats_count(search, data_source.raw_fields)
98
- search = self.add_timeformat_conversion(search)
99
- search = self.add_filter_macro(search, file_name)
100
-
101
- detection.file_path = file_name + '.yml'
102
-
103
- elif input_dto.data_model == SigmaConverterTarget.CIM:
104
- logsource_condition = self.get_logsource_condition(data_source)
105
- try:
106
- field_mapping = self.find_mapping(data_source.field_mappings, 'data_model', 'cim')
107
- except Exception as e:
108
- print(e)
109
- print("ERROR: Couldn't find data source mapping to cim for log source " + detection.data_source[0] + " and detection " + detection.name)
110
- sys.exit(1)
111
-
112
- detection = self.convert_detection_fields(detection, field_mapping)
113
- sigma_rule = self.get_sigma_rule(detection, data_source)
114
-
115
- sigma_transformation_processing_item = self.get_field_transformation_processing_item(
116
- field_mapping['mapping'],
117
- logsource_condition
118
- )
119
-
120
- sigma_state_fields_processing_item = self.get_state_fields_processing_item(
121
- field_mapping['mapping'].values(),
122
- logsource_condition
123
- )
124
- sigma_state_data_model_processing_item = self.get_state_data_model_processing_item(
125
- field_mapping['data_set'],
126
- logsource_condition
127
- )
128
- sigma_processing_pipeline = self.get_pipeline_from_processing_items([
129
- sigma_transformation_processing_item,
130
- sigma_state_fields_processing_item,
131
- sigma_state_data_model_processing_item
132
- ])
133
- splunk_backend = SplunkBackend(processing_pipeline=sigma_processing_pipeline)
134
- search = splunk_backend.convert(sigma_rule, "data_model")[0]
135
- search = self.add_filter_macro(search, file_name)
136
-
137
- detection.file_path = file_name + '.yml'
138
-
139
- elif input_dto.data_model == SigmaConverterTarget.OCSF:
140
-
141
- processing_items = list()
142
- logsource_condition = self.get_logsource_condition(data_source)
143
- if input_dto.log_source and input_dto.log_source != detection.data_source[0]:
144
- data_source_new = self.load_data_source(input_dto.input_path, input_dto.log_source)
145
-
146
- try:
147
- field_mapping = self.get_mapping_converted_data_source(
148
- data_source,
149
- "data_source",
150
- input_dto.log_source,
151
- data_source_new,
152
- "data_model",
153
- "ocsf"
154
- )
155
- except Exception as e:
156
- print(e)
157
- print("ERROR: Couldn't find data source mapping for log source " + input_dto.log_source + " and detection " + detection.name)
158
- sys.exit(1)
159
-
160
- cim_to_ocsf_mapping = self.get_cim_to_ocsf_mapping(data_source_new)
161
-
162
- # elif input_dto.cim_to_ocsf:
163
- # field_mapping = self.get_cim_to_ocsf_mapping(data_source)
164
- # cim_to_ocsf_mapping = field_mapping
165
-
166
- else:
167
- field_mapping = self.find_mapping(data_source.field_mappings, 'data_model', 'ocsf')
168
- cim_to_ocsf_mapping = self.get_cim_to_ocsf_mapping(data_source)
169
-
170
- field_mapping_underline = copy.deepcopy(field_mapping)
171
- for field in field_mapping_underline["mapping"].keys():
172
- field_mapping_underline["mapping"][field] = field_mapping_underline["mapping"][field].replace(".", "_")
173
-
174
- self.add_required_fields(cim_to_ocsf_mapping, detection)
175
- self.add_mappings(cim_to_ocsf_mapping, detection)
176
-
177
- self.update_observables(detection)
178
-
179
- processing_items.append(
180
- self.get_field_transformation_processing_item(
181
- field_mapping_underline['mapping'],
182
- logsource_condition
183
- )
184
- )
185
- processing_items.append(
186
- self.get_state_fields_processing_item(
187
- field_mapping_underline['mapping'].values(),
188
- logsource_condition
189
- )
190
- )
191
-
192
- detection = self.convert_detection_fields(detection)
193
- sigma_rule = self.get_sigma_rule(detection, data_source)
194
- sigma_processing_pipeline = self.get_pipeline_from_processing_items(processing_items)
195
-
196
- splunk_backend = SplunkBABackend(processing_pipeline=sigma_processing_pipeline, detection=detection, field_mapping=field_mapping)
197
-
198
- search = splunk_backend.convert(sigma_rule, "data_model")[0]
199
-
200
- search = search + ' --finding_report--'
201
- detection.file_path = 'ssa___' + file_name + '.yml'
202
-
203
- detection.search = search
204
-
205
- self.output_dto.detections.append(detection)
206
-
207
- except Exception as e:
208
- errors.append(f"ERROR: Converting detection file '{detection_file}': {str(e)}")
209
-
210
- if len(errors) > 0:
211
- errors_string = '\n\t'.join(errors)
212
- raise Exception(f"The following errors were encountered during conversion:\n\t{errors_string}")
213
-
214
- def read_detection(self, detection_path : str) -> Detection:
215
- yml_dict = YmlReader.load_file(detection_path)
216
-
217
- #SSA Detections are ALLOWED to have names longer than 67 characters,
218
- #unlike Splunk App Detections. Because we still want to use the
219
- #Detection Object (and its validations), we will arbitrarily
220
- #truncate the name of a detection if it is too long so that
221
- #it passes validation, then updated the name after the object
222
- #is constructed. Because we do not have Pydantic configured
223
- #to validate each new field assignment, this will not throw
224
- #an error
225
- name = yml_dict.get("name","")
226
- yml_dict["name"] = name[:67]
227
- detection = Detection.parse_obj(yml_dict)
228
- # Remove any Integration Tests. IntegrationTests are only relevant
229
- # for ESCU Content and NOT for BA Content. Instead of filtering OUT
230
- # IntegrationTest, we will ONLY include UnitTest. This supports the introduction
231
- # of additional ESCU Test Types in the future.
232
- detection.tests = list(filter(lambda t: isinstance(t, UnitTest), detection.tests))
233
-
234
- detection.name = name
235
-
236
-
237
- return detection
238
-
239
-
240
- def load_data_source(self, input_path: str, data_source_name: str) -> DataSource:
241
- data_sources = list()
242
- files = Utils.get_all_yml_files_from_directory(os.path.join(input_path, 'data_sources'))
243
- for file in files:
244
- data_sources.append(DataSource.parse_obj(YmlReader.load_file(str(file))))
245
-
246
- data_source = None
247
-
248
- for obj in data_sources:
249
- if obj.name == data_source_name:
250
- return obj
251
-
252
- return None
253
-
254
-
255
- def get_sigma_rule(self, detection: Detection, data_source: DataSource) -> SigmaCollection:
256
- return SigmaCollection.from_dicts([{
257
- "title": detection.name,
258
- "status": "experimental",
259
- "logsource": {
260
- "category": data_source.category,
261
- "product": data_source.product
262
- },
263
- "detection": detection.search
264
- }])
265
-
266
-
267
- # def convert_detection_fields(self, detection: Detection, mappings: dict) -> Detection:
268
- # for selection in detection.search.keys():
269
- # if selection != "condition":
270
- # new_selection = copy.deepcopy(detection.search[selection])
271
- # for field in detection.search[selection].keys():
272
- # for mapping in mappings["mapping"].keys():
273
- # if mapping == field:
274
- # new_selection[mappings["mapping"][mapping]] = detection.search[selection][field]
275
- # new_selection.pop(field)
276
- # detection.search[selection] = new_selection
277
-
278
- # return detection
279
-
280
- def convert_detection_fields(self, detection: Detection) -> Detection:
281
- for selection in detection.search.keys():
282
- if selection != "condition":
283
- new_selection = copy.deepcopy(detection.search[selection])
284
- for field in detection.search[selection].keys():
285
- new_field_name = field.replace(".", "_")
286
- new_selection[new_field_name] = detection.search[selection][field]
287
- new_selection.pop(field)
288
- detection.search[selection] = new_selection
289
-
290
- return detection
291
-
292
-
293
- def get_logsource_condition(self, data_source: DataSource) -> LogsourceCondition:
294
- return LogsourceCondition(
295
- category=data_source.category,
296
- product=data_source.product,
297
- )
298
-
299
-
300
- def get_field_transformation_processing_item(self, data_source_mapping: dict, logsource_condition: LogsourceCondition) -> ProcessingItem:
301
- return ProcessingItem(
302
- identifier="field_mapping_transformation",
303
- transformation=FieldMappingTransformation(data_source_mapping),
304
- rule_conditions=[
305
- logsource_condition
306
- ]
307
- )
308
-
309
-
310
- def get_state_fields_processing_item(self, fields: list, logsource_condition: LogsourceCondition) -> ProcessingItem:
311
- return ProcessingItem(
312
- identifier="fields",
313
- transformation=SetStateTransformation("fields", fields),
314
- rule_conditions=[
315
- logsource_condition
316
- ]
317
- )
318
-
319
-
320
- def get_state_data_model_processing_item(self, data_model: str, logsource_condition: LogsourceCondition) -> ProcessingItem:
321
- return ProcessingItem(
322
- identifier="data_model",
323
- transformation=SetStateTransformation("data_model_set", data_model),
324
- rule_conditions=[
325
- logsource_condition
326
- ]
327
- )
328
-
329
-
330
- def get_pipeline_from_processing_items(self, processing_items: list) -> ProcessingPipeline:
331
- return ProcessingPipeline(
332
- name="Splunk Sigma",
333
- priority=10,
334
- items=processing_items
335
- )
336
-
337
- def add_source_macro(self, search: str, data_source_type: str) -> str:
338
- return "`" + data_source_type + "` " + search
339
-
340
- def add_stats_count(self, search: str, fields: list) -> str:
341
- search = search + " | fillnull | stats count min(_time) as firstTime max(_time) as lastTime by "
342
- for key in fields:
343
- search = search + key + " "
344
- return search
345
-
346
- def add_timeformat_conversion(self, search: str) -> str:
347
- return search + '| convert timeformat="%Y-%m-%dT%H:%M:%S" ctime(firstTime) | convert timeformat="%Y-%m-%dT%H:%M:%S" ctime(lastTime) '
348
-
349
- def add_filter_macro(self, search: str, file_name: str) -> str:
350
- return search + '| `' + file_name + '_filter`'
351
-
352
- def find(self, name: str, path: str) -> str:
353
- for root, dirs, files in os.walk(path):
354
- if name in files:
355
- return os.path.join(root, name)
356
- return None
357
-
358
- def find_mapping(self, field_mappings: list, object: str, data_model: str) -> dict:
359
- for mapping in field_mappings:
360
- if mapping[object] == data_model:
361
- return mapping
362
-
363
- raise AttributeError("ERROR: Couldn't find mapping.")
364
-
365
-
366
- def add_required_fields(self, field_mapping: dict, detection: Detection) -> None:
367
- required_fields = list()
368
- # required_fields = ["process.user.name", "device.hostname"]
369
- for mapping in field_mapping["mapping"].keys():
370
- required_fields.append(field_mapping["mapping"][mapping])
371
-
372
- detection.tags.required_fields = required_fields
373
-
374
-
375
- def add_mappings(self, field_mapping: dict, detection: Detection) -> None:
376
- mappings = list()
377
- for mapping in field_mapping["mapping"].keys():
378
- mappings.append({
379
- "ocsf": field_mapping["mapping"][mapping],
380
- "cim": mapping
381
- })
382
- detection.tags.mappings = mappings
383
-
384
- def update_observables(self, detection : Detection) -> None:
385
- mapping_field_to_type = {
386
- "process.user.name": "User Name",
387
- "actor.user.name": "User Name",
388
- "device.hostname": "Hostname",
389
- "process.file.name": "File Name",
390
- "actor.process.file.name": "File Name",
391
- "actor.process.file.path": "File Name",
392
- "actor.process.cmd_line": "Process",
393
- "actor.user.uid": "Other",
394
- "process.cmd_line": "Other",
395
- "process.file.path": "File",
396
- "process.file.name": "File",
397
- "process.uid": "Other",
398
- "process.pid": "Other",
399
- "actor.process.pid": "Other"
400
- }
401
-
402
- observables = list()
403
-
404
- for field in detection.tags.required_fields:
405
- observables.append({
406
- "name": field,
407
- "type": mapping_field_to_type[field]
408
- })
409
-
410
- detection.tags.observable = observables
411
-
412
-
413
- def get_cim_to_ocsf_mapping(self, data_source : DataSource) -> dict:
414
- cim_to_ocsf_mapping = dict()
415
- cim_to_ocsf_mapping["mapping"] = dict()
416
- cim_mapping = self.find_mapping(data_source.field_mappings, "data_model", "cim")
417
- ocsf_mapping = self.find_mapping(data_source.field_mappings, "data_model", "ocsf")
418
-
419
- for key in cim_mapping["mapping"].keys():
420
- cim_field = cim_mapping["mapping"][key].split(".")[1]
421
- cim_to_ocsf_mapping["mapping"][cim_field] = ocsf_mapping["mapping"][key]
422
-
423
- return cim_to_ocsf_mapping
424
-
425
-
426
- def get_mapping_converted_data_source(self, det_ds: DataSource, det_ds_obj: str, det_ds_dm: str, con_ds: DataSource, con_ds_obj: str, con_ds_dm: str) -> dict:
427
- mapping = dict()
428
- mapping["mapping"] = dict()
429
- det_ds_mapping = self.find_mapping(det_ds.convert_to_log_source, det_ds_obj, det_ds_dm)
430
- con_ds_mapping = self.find_mapping(con_ds.field_mappings, con_ds_obj, con_ds_dm)
431
-
432
- for key in det_ds_mapping["mapping"].keys():
433
- mapped_field = con_ds_mapping["mapping"][det_ds_mapping["mapping"][key]]
434
- mapping["mapping"][key] = mapped_field
435
-
436
- return mapping
@@ -1,169 +0,0 @@
1
- import sys
2
- import re
3
- import os
4
-
5
- from pydantic import ValidationError
6
- from typing import List
7
- from contentctl.input.yml_reader import YmlReader
8
- from contentctl.objects.detection import Detection
9
- from contentctl.objects.security_content_object import SecurityContentObject
10
- from contentctl.objects.macro import Macro
11
- from contentctl.objects.mitre_attack_enrichment import MitreAttackEnrichment
12
- from contentctl.enrichments.cve_enrichment import CveEnrichment
13
- from contentctl.enrichments.splunk_app_enrichment import SplunkAppEnrichment
14
- from contentctl.objects.ssa_detection import SSADetection
15
- from contentctl.objects.constants import *
16
- from contentctl.enrichments.attack_enrichment import AttackEnrichment
17
-
18
- class SSADetectionBuilder():
19
- security_content_obj : SSADetection
20
-
21
-
22
- def setObject(self, path: str) -> None:
23
- yml_dict = YmlReader.load_file(path)
24
- self.security_content_obj = SSADetection.parse_obj(yml_dict)
25
- self.security_content_obj.source = os.path.split(os.path.dirname(self.security_content_obj.file_path))[-1]
26
-
27
- def addProvidingTechnologies(self) -> None:
28
- if self.security_content_obj:
29
- if 'Endpoint' in str(self.security_content_obj.search):
30
- self.security_content_obj.providing_technologies = ["Sysmon", "Microsoft Windows","Carbon Black Response","CrowdStrike Falcon", "Symantec Endpoint Protection"]
31
- if "`cloudtrail`" in str(self.security_content_obj.search):
32
- self.security_content_obj.providing_technologies = ["Amazon Web Services - Cloudtrail"]
33
- if '`wineventlog_security`' in self.security_content_obj.search or '`powershell`' in self.security_content_obj.search:
34
- self.security_content_obj.providing_technologies = ["Microsoft Windows"]
35
-
36
-
37
- def addMappings(self) -> None:
38
- if self.security_content_obj:
39
- keys = ['mitre_attack', 'kill_chain_phases', 'cis20', 'nist']
40
- mappings = {}
41
- for key in keys:
42
- if key == 'mitre_attack':
43
- if getattr(self.security_content_obj.tags, 'mitre_attack_id'):
44
- mappings[key] = getattr(self.security_content_obj.tags, 'mitre_attack_id')
45
- elif getattr(self.security_content_obj.tags, key):
46
- mappings[key] = getattr(self.security_content_obj.tags, key)
47
- self.security_content_obj.mappings = mappings
48
-
49
-
50
- def addAnnotations(self) -> None:
51
- if self.security_content_obj:
52
- annotations = {}
53
- annotation_keys = ['mitre_attack', 'kill_chain_phases', 'cis20', 'nist',
54
- 'analytic_story', 'context', 'impact', 'confidence', 'cve']
55
- for key in annotation_keys:
56
- if key == 'mitre_attack':
57
- if getattr(self.security_content_obj.tags, 'mitre_attack_id'):
58
- annotations[key] = getattr(self.security_content_obj.tags, 'mitre_attack_id')
59
- try:
60
- if getattr(self.security_content_obj.tags, key):
61
- annotations[key] = getattr(self.security_content_obj.tags, key)
62
- except AttributeError as e:
63
- continue
64
- self.security_content_obj.annotations = annotations
65
-
66
-
67
- def addUnitTest(self) -> None:
68
- if self.security_content_obj:
69
- if self.security_content_obj.tests:
70
- self.security_content_obj.test = self.security_content_obj.tests[0]
71
-
72
-
73
- def addMitreAttackEnrichment(self, attack_enrichment: dict) -> None:
74
- if self.security_content_obj:
75
- if attack_enrichment:
76
- if self.security_content_obj.tags.mitre_attack_id:
77
- self.security_content_obj.tags.mitre_attack_enrichments = []
78
-
79
- for mitre_attack_id in self.security_content_obj.tags.mitre_attack_id:
80
- if mitre_attack_id in attack_enrichment:
81
- mitre_attack_enrichment = MitreAttackEnrichment(
82
- mitre_attack_id = mitre_attack_id,
83
- mitre_attack_technique = attack_enrichment[mitre_attack_id]["technique"],
84
- mitre_attack_tactics = sorted(attack_enrichment[mitre_attack_id]["tactics"]),
85
- mitre_attack_groups = sorted(attack_enrichment[mitre_attack_id]["groups"])
86
- )
87
- self.security_content_obj.tags.mitre_attack_enrichments.append(mitre_attack_enrichment)
88
- else:
89
- #print("mitre_attack_id " + mitre_attack_id + " doesn't exist for detecction " + self.security_content_obj.name)
90
- raise ValueError("mitre_attack_id " + mitre_attack_id + " doesn't exist for detection " + self.security_content_obj.name)
91
- def addMitreAttackEnrichmentNew(self, attack_enrichment: AttackEnrichment) -> None:
92
- # We skip enriching if configured to do so
93
- if attack_enrichment.use_enrichment:
94
- if self.security_content_obj and self.security_content_obj.tags.mitre_attack_id:
95
- self.security_content_obj.tags.mitre_attack_enrichments = []
96
- for mitre_attack_id in self.security_content_obj.tags.mitre_attack_id:
97
- enrichment_obj = attack_enrichment.getEnrichmentByMitreID(mitre_attack_id)
98
- if enrichment_obj is not None:
99
- self.security_content_obj.tags.mitre_attack_enrichments.append(enrichment_obj)
100
-
101
-
102
-
103
- def addCIS(self) -> None:
104
- if self.security_content_obj:
105
- if self.security_content_obj.tags.security_domain == "network":
106
- self.security_content_obj.tags.cis20 = ["CIS 13"]
107
- else:
108
- self.security_content_obj.tags.cis20 = ["CIS 10"]
109
-
110
-
111
- def addKillChainPhase(self) -> None:
112
- if self.security_content_obj:
113
- if not self.security_content_obj.tags.kill_chain_phases:
114
- kill_chain_phases = list()
115
- if self.security_content_obj.tags.mitre_attack_enrichments:
116
- for mitre_attack_enrichment in self.security_content_obj.tags.mitre_attack_enrichments:
117
- for mitre_attack_tactic in mitre_attack_enrichment.mitre_attack_tactics:
118
- kill_chain_phases.append(ATTACK_TACTICS_KILLCHAIN_MAPPING[mitre_attack_tactic])
119
- self.security_content_obj.tags.kill_chain_phases = list(dict.fromkeys(kill_chain_phases))
120
-
121
-
122
- def addNist(self) -> None:
123
- if self.security_content_obj:
124
- if self.security_content_obj.type == "TTP":
125
- self.security_content_obj.tags.nist = ["DE.CM"]
126
- else:
127
- self.security_content_obj.tags.nist = ["DE.AE"]
128
-
129
-
130
- def addDatamodel(self) -> None:
131
- if self.security_content_obj:
132
- self.security_content_obj.datamodel = []
133
- data_models = [
134
- "Authentication",
135
- "Change",
136
- "Change_Analysis",
137
- "Email",
138
- "Endpoint",
139
- "Network_Resolution",
140
- "Network_Sessions",
141
- "Network_Traffic",
142
- "Risk",
143
- "Splunk_Audit",
144
- "UEBA",
145
- "Updates",
146
- "Vulnerabilities",
147
- "Web"
148
- ]
149
- for data_model in data_models:
150
- if data_model in self.security_content_obj.search:
151
- self.security_content_obj.datamodel.append(data_model)
152
-
153
-
154
- def addRBA(self) -> None:
155
- if self.security_content_obj:
156
- if self.security_content_obj.tags.risk_score >= 80:
157
- self.security_content_obj.tags.risk_severity = 'high'
158
- elif (self.security_content_obj.tags.risk_score >= 50 and self.security_content_obj.tags.risk_score <= 79):
159
- self.security_content_obj.tags.risk_severity = 'medium'
160
- else:
161
- self.security_content_obj.tags.risk_severity = 'low'
162
-
163
-
164
- def reset(self) -> None:
165
- self.security_content_obj = None
166
-
167
-
168
- def getObject(self) -> SSADetection:
169
- return self.security_content_obj