contentctl 4.1.5__py3-none-any.whl → 4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,8 @@ from contentctl.output.conf_output import ConfOutput
10
10
  from contentctl.output.conf_writer import ConfWriter
11
11
  from contentctl.output.ba_yml_output import BAYmlOutput
12
12
  from contentctl.output.api_json_output import ApiJsonOutput
13
+ from contentctl.output.data_source_writer import DataSourceWriter
14
+ from contentctl.objects.lookup import Lookup
13
15
  import pathlib
14
16
  import json
15
17
  import datetime
@@ -28,9 +30,20 @@ class Build:
28
30
 
29
31
 
30
32
  def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
31
- if input_dto.config.build_app:
33
+ if input_dto.config.build_app:
34
+
32
35
  updated_conf_files:set[pathlib.Path] = set()
33
36
  conf_output = ConfOutput(input_dto.config)
37
+
38
+ # Construct a special lookup whose CSV is created at runtime and
39
+ # written directly into the output folder. It is created with model_construct,
40
+ # not model_validate, because the CSV does not exist yet.
41
+ data_sources_lookup_csv_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.csv"
42
+ DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
43
+ input_dto.director_output_dto.addContentToDictMappings(Lookup.model_construct(description= "A lookup file that will contain the data source objects for detections.",
44
+ filename=data_sources_lookup_csv_path,
45
+ name="data_sources"))
46
+
34
47
  updated_conf_files.update(conf_output.writeHeaders())
35
48
  updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.detections, SecurityContentType.detections))
36
49
  updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.stories, SecurityContentType.stories))
@@ -28,6 +28,7 @@ class Initialize:
28
28
  ('../templates/app_template/', 'app_template'),
29
29
  ('../templates/deployments/', 'deployments'),
30
30
  ('../templates/detections/', 'detections'),
31
+ ('../templates/data_sources/', 'data_sources'),
31
32
  ('../templates/macros/','macros'),
32
33
  ('../templates/stories/', 'stories'),
33
34
  ]:
@@ -28,7 +28,6 @@ class Validate:
28
28
  [],
29
29
  [],
30
30
  [],
31
- [],
32
31
  )
33
32
 
34
33
  director = Director(director_output_dto)
@@ -58,7 +58,6 @@ class DirectorOutputDto:
58
58
  deployments: list[Deployment]
59
59
  ssa_detections: list[SSADetection]
60
60
  data_sources: list[DataSource]
61
- event_sources: list[EventSource]
62
61
  name_to_content_map: dict[str, SecurityContentObject] = field(default_factory=dict)
63
62
  uuid_to_content_map: dict[UUID, SecurityContentObject] = field(default_factory=dict)
64
63
 
@@ -68,17 +67,19 @@ class DirectorOutputDto:
68
67
  # Since SSA detections may have the same name as ESCU detection,
69
68
  # for this function we prepend 'SSA ' to the name.
70
69
  content_name = f"SSA {content_name}"
70
+
71
71
  if content_name in self.name_to_content_map:
72
72
  raise ValueError(
73
73
  f"Duplicate name '{content_name}' with paths:\n"
74
74
  f" - {content.file_path}\n"
75
75
  f" - {self.name_to_content_map[content_name].file_path}"
76
76
  )
77
- elif content.id in self.uuid_to_content_map:
77
+
78
+ if content.id in self.uuid_to_content_map:
78
79
  raise ValueError(
79
80
  f"Duplicate id '{content.id}' with paths:\n"
80
81
  f" - {content.file_path}\n"
81
- f" - {self.name_to_content_map[content_name].file_path}"
82
+ f" - {self.uuid_to_content_map[content.id].file_path}"
82
83
  )
83
84
 
84
85
  if isinstance(content, Lookup):
@@ -99,9 +100,10 @@ class DirectorOutputDto:
99
100
  self.detections.append(content)
100
101
  elif isinstance(content, SSADetection):
101
102
  self.ssa_detections.append(content)
103
+ elif isinstance(content, DataSource):
104
+ self.data_sources.append(content)
102
105
  else:
103
- raise Exception(f"Unknown security content type: {type(content)}")
104
-
106
+ raise Exception(f"Unknown security content type: {type(content)}")
105
107
 
106
108
  self.name_to_content_map[content_name] = content
107
109
  self.uuid_to_content_map[content.id] = content
@@ -124,41 +126,27 @@ class Director():
124
126
  self.createSecurityContent(SecurityContentType.stories)
125
127
  self.createSecurityContent(SecurityContentType.baselines)
126
128
  self.createSecurityContent(SecurityContentType.investigations)
127
- self.createSecurityContent(SecurityContentType.event_sources)
128
129
  self.createSecurityContent(SecurityContentType.data_sources)
129
130
  self.createSecurityContent(SecurityContentType.playbooks)
130
131
  self.createSecurityContent(SecurityContentType.detections)
131
132
  self.createSecurityContent(SecurityContentType.ssa_detections)
132
133
 
134
+
135
+ from contentctl.objects.abstract_security_content_objects.detection_abstract import MISSING_SOURCES
136
+ if len(MISSING_SOURCES) > 0:
137
+ missing_sources_string = "\n 🟡 ".join(sorted(list(MISSING_SOURCES)))
138
+ print("WARNING: The following data_sources have been used in detections, but are not yet defined.\n"
139
+ "This is not yet an error since not all data_sources have been defined, but will be convered to an error soon:\n 🟡 "
140
+ f"{missing_sources_string}")
141
+ else:
142
+ print("No missing data_sources!")
143
+
133
144
  def createSecurityContent(self, contentType: SecurityContentType) -> None:
134
145
  if contentType == SecurityContentType.ssa_detections:
135
146
  files = Utils.get_all_yml_files_from_directory(
136
147
  os.path.join(self.input_dto.path, "ssa_detections")
137
148
  )
138
149
  security_content_files = [f for f in files if f.name.startswith("ssa___")]
139
-
140
- elif contentType == SecurityContentType.data_sources:
141
- security_content_files = (
142
- Utils.get_all_yml_files_from_directory_one_layer_deep(
143
- os.path.join(self.input_dto.path, "data_sources")
144
- )
145
- )
146
-
147
- elif contentType == SecurityContentType.event_sources:
148
- security_content_files = Utils.get_all_yml_files_from_directory(
149
- os.path.join(self.input_dto.path, "data_sources", "cloud", "event_sources")
150
- )
151
- security_content_files.extend(
152
- Utils.get_all_yml_files_from_directory(
153
- os.path.join(self.input_dto.path, "data_sources", "endpoint", "event_sources")
154
- )
155
- )
156
- security_content_files.extend(
157
- Utils.get_all_yml_files_from_directory(
158
- os.path.join(self.input_dto.path, "data_sources", "network", "event_sources")
159
- )
160
- )
161
-
162
150
  elif contentType in [
163
151
  SecurityContentType.deployments,
164
152
  SecurityContentType.lookups,
@@ -168,6 +156,7 @@ class Director():
168
156
  SecurityContentType.investigations,
169
157
  SecurityContentType.playbooks,
170
158
  SecurityContentType.detections,
159
+ SecurityContentType.data_sources,
171
160
  ]:
172
161
  files = Utils.get_all_yml_files_from_directory(
173
162
  os.path.join(self.input_dto.path, str(contentType.name))
@@ -190,54 +179,48 @@ class Director():
190
179
  modelDict = YmlReader.load_file(file)
191
180
 
192
181
  if contentType == SecurityContentType.lookups:
193
- lookup = Lookup.model_validate(modelDict,context={"output_dto":self.output_dto, "config":self.input_dto})
194
- self.output_dto.addContentToDictMappings(lookup)
182
+ lookup = Lookup.model_validate(modelDict,context={"output_dto":self.output_dto, "config":self.input_dto})
183
+ self.output_dto.addContentToDictMappings(lookup)
195
184
 
196
185
  elif contentType == SecurityContentType.macros:
197
- macro = Macro.model_validate(modelDict,context={"output_dto":self.output_dto})
198
- self.output_dto.addContentToDictMappings(macro)
186
+ macro = Macro.model_validate(modelDict,context={"output_dto":self.output_dto})
187
+ self.output_dto.addContentToDictMappings(macro)
199
188
 
200
189
  elif contentType == SecurityContentType.deployments:
201
- deployment = Deployment.model_validate(modelDict,context={"output_dto":self.output_dto})
202
- self.output_dto.addContentToDictMappings(deployment)
190
+ deployment = Deployment.model_validate(modelDict,context={"output_dto":self.output_dto})
191
+ self.output_dto.addContentToDictMappings(deployment)
203
192
 
204
193
  elif contentType == SecurityContentType.playbooks:
205
- playbook = Playbook.model_validate(modelDict,context={"output_dto":self.output_dto})
206
- self.output_dto.addContentToDictMappings(playbook)
194
+ playbook = Playbook.model_validate(modelDict,context={"output_dto":self.output_dto})
195
+ self.output_dto.addContentToDictMappings(playbook)
207
196
 
208
197
  elif contentType == SecurityContentType.baselines:
209
- baseline = Baseline.model_validate(modelDict,context={"output_dto":self.output_dto})
210
- self.output_dto.addContentToDictMappings(baseline)
198
+ baseline = Baseline.model_validate(modelDict,context={"output_dto":self.output_dto})
199
+ self.output_dto.addContentToDictMappings(baseline)
211
200
 
212
201
  elif contentType == SecurityContentType.investigations:
213
- investigation = Investigation.model_validate(modelDict,context={"output_dto":self.output_dto})
214
- self.output_dto.addContentToDictMappings(investigation)
202
+ investigation = Investigation.model_validate(modelDict,context={"output_dto":self.output_dto})
203
+ self.output_dto.addContentToDictMappings(investigation)
215
204
 
216
205
  elif contentType == SecurityContentType.stories:
217
- story = Story.model_validate(modelDict,context={"output_dto":self.output_dto})
218
- self.output_dto.addContentToDictMappings(story)
206
+ story = Story.model_validate(modelDict,context={"output_dto":self.output_dto})
207
+ self.output_dto.addContentToDictMappings(story)
219
208
 
220
209
  elif contentType == SecurityContentType.detections:
221
- detection = Detection.model_validate(modelDict,context={"output_dto":self.output_dto, "app":self.input_dto.app})
222
- self.output_dto.addContentToDictMappings(detection)
210
+ detection = Detection.model_validate(modelDict,context={"output_dto":self.output_dto, "app":self.input_dto.app})
211
+ self.output_dto.addContentToDictMappings(detection)
223
212
 
224
213
  elif contentType == SecurityContentType.ssa_detections:
225
- self.constructSSADetection(self.ssa_detection_builder, self.output_dto,str(file))
226
- ssa_detection = self.ssa_detection_builder.getObject()
227
- if ssa_detection.status in [DetectionStatus.production.value, DetectionStatus.validation.value]:
228
- self.output_dto.addContentToDictMappings(ssa_detection)
214
+ self.constructSSADetection(self.ssa_detection_builder, self.output_dto,str(file))
215
+ ssa_detection = self.ssa_detection_builder.getObject()
216
+ if ssa_detection.status in [DetectionStatus.production.value, DetectionStatus.validation.value]:
217
+ self.output_dto.addContentToDictMappings(ssa_detection)
229
218
 
230
219
  elif contentType == SecurityContentType.data_sources:
231
220
  data_source = DataSource.model_validate(
232
221
  modelDict, context={"output_dto": self.output_dto}
233
222
  )
234
- self.output_dto.data_sources.append(data_source)
235
-
236
- elif contentType == SecurityContentType.event_sources:
237
- event_source = EventSource.model_validate(
238
- modelDict, context={"output_dto": self.output_dto}
239
- )
240
- self.output_dto.event_sources.append(event_source)
223
+ self.output_dto.addContentToDictMappings(data_source)
241
224
 
242
225
  else:
243
226
  raise Exception(f"Unsupported type: [{contentType}]")
@@ -40,6 +40,8 @@ class YmlReader():
40
40
  if add_fields == False:
41
41
  return yml_obj
42
42
 
43
+
43
44
  yml_obj['file_path'] = str(file_path)
45
+
44
46
 
45
47
  return yml_obj
@@ -22,12 +22,14 @@ from contentctl.objects.deployment import Deployment
22
22
  from contentctl.objects.unit_test import UnitTest
23
23
  from contentctl.objects.test_group import TestGroup
24
24
  from contentctl.objects.integration_test import IntegrationTest
25
-
25
+ from contentctl.objects.event_source import EventSource
26
+ from contentctl.objects.data_source import DataSource
26
27
 
27
28
  #from contentctl.objects.playbook import Playbook
28
- from contentctl.objects.enums import DataSource,ProvidingTechnology
29
+ from contentctl.objects.enums import ProvidingTechnology
29
30
  from contentctl.enrichments.cve_enrichment import CveEnrichmentObj
30
31
 
32
+ MISSING_SOURCES:set[str] = set()
31
33
 
32
34
  class Detection_Abstract(SecurityContentObject):
33
35
  model_config = ConfigDict(use_enum_values=True)
@@ -35,12 +37,11 @@ class Detection_Abstract(SecurityContentObject):
35
37
  #contentType: SecurityContentType = SecurityContentType.detections
36
38
  type: AnalyticsType = Field(...)
37
39
  status: DetectionStatus = Field(...)
38
- data_source: Optional[List[str]] = None
40
+ data_source: list[str] = []
39
41
  tags: DetectionTags = Field(...)
40
42
  search: Union[str, dict[str,Any]] = Field(...)
41
43
  how_to_implement: str = Field(..., min_length=4)
42
44
  known_false_positives: str = Field(..., min_length=4)
43
- data_source_objects: Optional[List[DataSource]] = None
44
45
 
45
46
  enabled_by_default: bool = False
46
47
  file_path: FilePath = Field(...)
@@ -53,6 +54,8 @@ class Detection_Abstract(SecurityContentObject):
53
54
  # A list of groups of tests, relying on the same data
54
55
  test_groups: Union[list[TestGroup], None] = Field(None,validate_default=True)
55
56
 
57
+ data_source_objects: list[DataSource] = []
58
+
56
59
 
57
60
  @field_validator("search", mode="before")
58
61
  @classmethod
@@ -138,6 +141,7 @@ class Detection_Abstract(SecurityContentObject):
138
141
  else:
139
142
  return []
140
143
 
144
+
141
145
  @computed_field
142
146
  @property
143
147
  def source(self)->str:
@@ -161,10 +165,12 @@ class Detection_Abstract(SecurityContentObject):
161
165
  annotations_dict["type"] = self.type
162
166
  #annotations_dict["version"] = self.version
163
167
 
168
+ annotations_dict["data_source"] = self.data_source
169
+
164
170
  #The annotations object is a superset of the mappings object.
165
171
  # So start with the mapping object.
166
172
  annotations_dict.update(self.mappings)
167
-
173
+
168
174
  #Make sure that the results are sorted for readability/easier diffs
169
175
  return dict(sorted(annotations_dict.items(), key=lambda item: item[0]))
170
176
 
@@ -384,23 +390,37 @@ class Detection_Abstract(SecurityContentObject):
384
390
  raise ValueError(f"Error, failed to replace detection reference in Baseline '{baseline.name}' to detection '{self.name}'")
385
391
  baseline.tags.detections = new_detections
386
392
 
387
- self.data_source_objects = []
388
- for data_source_obj in director.data_sources:
389
- for detection_data_source in self.data_source:
390
- if data_source_obj.name in detection_data_source:
391
- self.data_source_objects.append(data_source_obj)
392
-
393
- # Remove duplicate data source objects based on their 'name' property
394
- unique_data_sources = {}
395
- for data_source_obj in self.data_source_objects:
396
- if data_source_obj.name not in unique_data_sources:
397
- unique_data_sources[data_source_obj.name] = data_source_obj
398
- self.data_source_objects = list(unique_data_sources.values())
393
+ # Data source may be defined 1 on each line, OR they may be defined as
394
+ # SOUCE_1 AND ANOTHERSOURCE AND A_THIRD_SOURCE
395
+ # if more than 1 data source is required for a detection (for example, because it includes a join)
396
+ # Parse and update the list to resolve individual names and remove potential duplicates
397
+ updated_data_source_names:set[str] = set()
398
+
399
+ for ds in self.data_source:
400
+ split_data_sources = {d.strip() for d in ds.split('AND')}
401
+ updated_data_source_names.update(split_data_sources)
402
+
403
+ sources = sorted(list(updated_data_source_names))
404
+
405
+ matched_data_sources:list[DataSource] = []
406
+ missing_sources:list[str] = []
407
+ for source in sources:
408
+ try:
409
+ matched_data_sources += DataSource.mapNamesToSecurityContentObjects([source], director)
410
+ except Exception as data_source_mapping_exception:
411
+ # We gobble this up and add it to a global set so that we
412
+ # can print it ONCE at the end of the build of datasources.
413
+ # This will be removed later as per the note below
414
+ MISSING_SOURCES.add(source)
415
+
416
+ if len(missing_sources) > 0:
417
+ # This will be changed to ValueError when we have a complete list of data sources
418
+ print(f"WARNING: The following exception occurred when mapping the data_source field to DataSource objects:{missing_sources}")
419
+
420
+ self.data_source_objects = matched_data_sources
399
421
 
400
422
  for story in self.tags.analytic_story:
401
- story.detections.append(self)
402
- story.data_sources.extend(self.data_source_objects)
403
-
423
+ story.detections.append(self)
404
424
  return self
405
425
 
406
426
 
@@ -424,14 +444,16 @@ class Detection_Abstract(SecurityContentObject):
424
444
  raise ValueError("Error, baselines are constructed automatically at runtime. Please do not include this field.")
425
445
 
426
446
 
427
- name:Union[str,dict] = info.data.get("name",None)
447
+ name:Union[str,None] = info.data.get("name",None)
428
448
  if name is None:
429
449
  raise ValueError("Error, cannot get Baselines because the Detection does not have a 'name' defined.")
430
-
450
+
431
451
  director:DirectorOutputDto = info.context.get("output_dto",None)
432
452
  baselines:List[Baseline] = []
433
453
  for baseline in director.baselines:
434
- if name in baseline.tags.detections:
454
+ # This matching is a bit strange, because baseline.tags.detections starts as a list of strings, but
455
+ # is eventually updated to a list of Detections as we construct all of the detection objects.
456
+ if name in [detection_name for detection_name in baseline.tags.detections if isinstance(detection_name,str)]:
435
457
  baselines.append(baseline)
436
458
 
437
459
  return baselines
@@ -125,9 +125,9 @@ class SecurityContentObject_Abstract(BaseModel, abc.ABC):
125
125
  errors:list[str] = []
126
126
  if len(missing_objects) > 0:
127
127
  errors.append(f"Failed to find the following '{cls.__name__}': {missing_objects}")
128
- if len(missing_objects) > 0:
128
+ if len(mistyped_objects) > 0:
129
129
  for mistyped_object in mistyped_objects:
130
- errors.append(f"'{mistyped_object.name}' expected to have type '{type(Self)}', but actually had type '{type(mistyped_object)}'")
130
+ errors.append(f"'{mistyped_object.name}' expected to have type '{cls}', but actually had type '{type(mistyped_object)}'")
131
131
 
132
132
  if len(errors) > 0:
133
133
  error_string = "\n - ".join(errors)
@@ -194,6 +194,33 @@ class SecurityContentObject_Abstract(BaseModel, abc.ABC):
194
194
 
195
195
  def __str__(self)->str:
196
196
  return(self.__repr__())
197
+
198
+ def __lt__(self, other:object)->bool:
199
+ if not isinstance(other,SecurityContentObject_Abstract):
200
+ raise Exception(f"SecurityContentObject can only be compared to each other, not to {type(other)}")
201
+ return self.name < other.name
202
+
203
+ def __eq__(self, other:object)->bool:
204
+ if not isinstance(other,SecurityContentObject_Abstract):
205
+ raise Exception(f"SecurityContentObject can only be compared to each other, not to {type(other)}")
206
+
207
+ if id(self) == id(other) and self.name == other.name and self.id == other.id:
208
+ # Yes, this is the same object
209
+ return True
210
+
211
+ elif id(self) == id(other) or self.name == other.name or self.id == other.id:
212
+ raise Exception("Attempted to compare two SecurityContentObjects, but their fields indicate they were not globally unique:"
213
+ f"\n\tid(obj1) : {id(self)}"
214
+ f"\n\tid(obj2) : {id(other)}"
215
+ f"\n\tobj1.name : {self.name}"
216
+ f"\n\tobj2.name : {other.name}"
217
+ f"\n\tobj1.id : {self.id}"
218
+ f"\n\tobj2.id : {other.id}")
219
+ else:
220
+ return False
221
+
222
+ def __hash__(self) -> NonNegativeInt:
223
+ return id(self)
197
224
 
198
225
 
199
226
 
@@ -1,28 +1,42 @@
1
1
  from __future__ import annotations
2
- from pydantic import BaseModel
2
+ from typing import Optional, Any
3
+ from pydantic import Field, FilePath, model_serializer
4
+ from contentctl.objects.security_content_object import SecurityContentObject
5
+ from contentctl.objects.event_source import EventSource
3
6
 
7
+ class DataSource(SecurityContentObject):
8
+ source: str = Field(...)
9
+ sourcetype: str = Field(...)
10
+ separator: Optional[str] = None
11
+ configuration: Optional[str] = None
12
+ supported_TA: Optional[list] = None
13
+ fields: Optional[list] = None
14
+ field_mappings: Optional[list] = None
15
+ convert_to_log_source: Optional[list] = None
16
+ example_log: Optional[str] = None
4
17
 
5
- class DataSource(BaseModel):
6
- name: str
7
- id: str
8
- author: str
9
- source: str
10
- sourcetype: str
11
- separator: str = None
12
- configuration: str = None
13
- supported_TA: dict
14
- event_names: list = None
15
- event_sources: list = None
16
- fields: list = None
17
- example_log: str = None
18
18
 
19
- def model_post_init(self, ctx:dict[str,Any]):
20
- context = ctx.get("output_dto")
19
+ @model_serializer
20
+ def serialize_model(self):
21
+ #Call serializer for parent
22
+ super_fields = super().serialize_model()
21
23
 
22
- if self.event_names:
23
- self.event_sources = []
24
- for event_source in context.event_sources:
25
- if any(event['event_name'] == event_source.event_name for event in self.event_names):
26
- self.event_sources.append(event_source)
27
-
28
- return self
24
+ #All fields custom to this model
25
+ model:dict[str,Any] = {
26
+ "source": self.source,
27
+ "sourcetype": self.sourcetype,
28
+ "separator": self.separator,
29
+ "configuration": self.configuration,
30
+ "supported_TA": self.supported_TA,
31
+ "fields": self.fields,
32
+ "field_mappings": self.field_mappings,
33
+ "convert_to_log_source": self.convert_to_log_source,
34
+ "example_log":self.example_log
35
+ }
36
+
37
+
38
+ #Combine fields from this model with fields from parent
39
+ super_fields.update(model)
40
+
41
+ #return the model
42
+ return super_fields
@@ -56,7 +56,6 @@ class SecurityContentType(enum.Enum):
56
56
  unit_tests = 9
57
57
  ssa_detections = 10
58
58
  data_sources = 11
59
- event_sources = 12
60
59
 
61
60
  # Bringing these changes back in line will take some time after
62
61
  # the initial merge is complete
@@ -1,10 +1,11 @@
1
1
  from __future__ import annotations
2
- from pydantic import BaseModel
2
+ from typing import Union, Optional, List
3
+ from pydantic import BaseModel, Field
3
4
 
5
+ from contentctl.objects.security_content_object import SecurityContentObject
4
6
 
5
- class EventSource(BaseModel):
6
- event_name: str
7
- fields: list[str]
8
- field_mappings: list[dict] = None
9
- convert_to_log_source: list[dict] = None
10
- example_log: str = None
7
+ class EventSource(SecurityContentObject):
8
+ fields: Optional[list[str]] = None
9
+ field_mappings: Optional[list[dict]] = None
10
+ convert_to_log_source: Optional[list[dict]] = None
11
+ example_log: Optional[str] = None
@@ -33,7 +33,18 @@ class Story(SecurityContentObject):
33
33
  detections:List[Detection] = []
34
34
  investigations: List[Investigation] = []
35
35
  baselines: List[Baseline] = []
36
- data_sources: List[DataSource] = []
36
+
37
+
38
+ @computed_field
39
+ @property
40
+ def data_sources(self)-> list[DataSource]:
41
+ # Only add a data_source if it does not already exist in the story
42
+ data_source_objects:set[DataSource] = set()
43
+ for detection in self.detections:
44
+ data_source_objects.update(set(detection.data_source_objects))
45
+
46
+ return sorted(list(data_source_objects))
47
+
37
48
 
38
49
  def storyAndInvestigationNamesWithApp(self, app_name:str)->List[str]:
39
50
  return [f"{app_name} - {name} - Rule" for name in self.detection_names] + \
@@ -141,7 +152,3 @@ class Story(SecurityContentObject):
141
152
  def baseline_names(self)->List[str]:
142
153
  return [baseline.name for baseline in self.baselines]
143
154
 
144
-
145
-
146
-
147
-
@@ -0,0 +1,40 @@
1
+ import csv
2
+ from contentctl.objects.data_source import DataSource
3
+ from contentctl.objects.event_source import EventSource
4
+ from typing import List
5
+ import pathlib
6
+
7
+ class DataSourceWriter:
8
+
9
+ @staticmethod
10
+ def writeDataSourceCsv(data_source_objects: List[DataSource], file_path: pathlib.Path):
11
+ with open(file_path, mode='w', newline='') as file:
12
+ writer = csv.writer(file)
13
+ # Write the header
14
+ writer.writerow([
15
+ "name", "id", "author", "source", "sourcetype", "separator",
16
+ "supported_TA_name", "supported_TA_version", "supported_TA_url",
17
+ "description"
18
+ ])
19
+ # Write the data
20
+ for data_source in data_source_objects:
21
+ if data_source.supported_TA and isinstance(data_source.supported_TA, list) and len(data_source.supported_TA) > 0:
22
+ supported_TA_name = data_source.supported_TA[0].get('name', '')
23
+ supported_TA_version = data_source.supported_TA[0].get('version', '')
24
+ supported_TA_url = data_source.supported_TA[0].get('url', '')
25
+ else:
26
+ supported_TA_name = ''
27
+ supported_TA_version = ''
28
+ supported_TA_url = ''
29
+ writer.writerow([
30
+ data_source.name,
31
+ data_source.id,
32
+ data_source.author,
33
+ data_source.source,
34
+ data_source.sourcetype,
35
+ data_source.separator,
36
+ supported_TA_name,
37
+ supported_TA_version,
38
+ supported_TA_url,
39
+ data_source.description,
40
+ ])
@@ -0,0 +1,171 @@
1
+ name: Sysmon EventID 1
2
+ id: b375f4d1-d7ca-4bc0-9103-294825c0af17
3
+ version: 1
4
+ date: '2024-07-18'
5
+ author: Patrick Bareiss, Splunk
6
+ description: Data source object for Sysmon EventID 1
7
+ source: XmlWinEventLog:Microsoft-Windows-Sysmon/Operational
8
+ sourcetype: xmlwineventlog
9
+ separator: EventID
10
+ supported_TA:
11
+ - name: Splunk Add-on for Sysmon
12
+ url: https://splunkbase.splunk.com/app/5709/
13
+ version: 4.0.0
14
+ fields:
15
+ - _time
16
+ - Channel
17
+ - CommandLine
18
+ - Company
19
+ - Computer
20
+ - CurrentDirectory
21
+ - Description
22
+ - EventChannel
23
+ - EventCode
24
+ - EventData_Xml
25
+ - EventDescription
26
+ - EventID
27
+ - EventRecordID
28
+ - FileVersion
29
+ - Guid
30
+ - Hashes
31
+ - IMPHASH
32
+ - Image
33
+ - IntegrityLevel
34
+ - Keywords
35
+ - Level
36
+ - LogonGuid
37
+ - LogonId
38
+ - MD5
39
+ - Name
40
+ - Opcode
41
+ - OriginalFileName
42
+ - ParentCommandLine
43
+ - ParentImage
44
+ - ParentProcessGuid
45
+ - ParentProcessId
46
+ - ProcessGuid
47
+ - ProcessID
48
+ - ProcessId
49
+ - Product
50
+ - RecordID
51
+ - RecordNumber
52
+ - RuleName
53
+ - SHA256
54
+ - SecurityID
55
+ - SystemTime
56
+ - System_Props_Xml
57
+ - Task
58
+ - TerminalSessionId
59
+ - ThreadID
60
+ - TimeCreated
61
+ - User
62
+ - UserID
63
+ - UtcTime
64
+ - Version
65
+ - action
66
+ - date_hour
67
+ - date_mday
68
+ - date_minute
69
+ - date_month
70
+ - date_second
71
+ - date_wday
72
+ - date_year
73
+ - date_zone
74
+ - dest
75
+ - dvc_nt_host
76
+ - event_id
77
+ - eventtype
78
+ - host
79
+ - id
80
+ - index
81
+ - linecount
82
+ - original_file_name
83
+ - os
84
+ - parent_process
85
+ - parent_process_exec
86
+ - parent_process_guid
87
+ - parent_process_id
88
+ - parent_process_name
89
+ - parent_process_path
90
+ - process
91
+ - process_current_directory
92
+ - process_exec
93
+ - process_guid
94
+ - process_hash
95
+ - process_id
96
+ - process_integrity_level
97
+ - process_name
98
+ - process_path
99
+ - punct
100
+ - signature
101
+ - signature_id
102
+ - source
103
+ - sourcetype
104
+ - splunk_server
105
+ - tag
106
+ - tag::eventtype
107
+ - timeendpos
108
+ - timestartpos
109
+ - user
110
+ - user_id
111
+ - vendor_product
112
+ field_mappings:
113
+ - data_model: cim
114
+ data_set: Endpoint.Processes
115
+ mapping:
116
+ ProcessGuid: Processes.process_guid
117
+ ProcessId: Processes.process_id
118
+ Image: Processes.process_path
119
+ Image|endswith: Processes.process_name
120
+ CommandLine: Processes.process
121
+ CurrentDirectory: Processes.process_current_directory
122
+ User: Processes.user
123
+ IntegrityLevel: Processes.process_integrity_level
124
+ Hashes: Processes.process_hash
125
+ ParentProcessGuid: Processes.parent_process_guid
126
+ ParentProcessId: Processes.parent_process_id
127
+ ParentImage: Processes.parent_process_name
128
+ ParentCommandLine: Processes.parent_process
129
+ Computer: Processes.dest
130
+ OriginalFileName: Processes.original_file_name
131
+ convert_to_log_source:
132
+ - data_source: Windows Event Log Security 4688
133
+ mapping:
134
+ ProcessId: NewProcessId
135
+ Image: NewProcessName
136
+ Image|endswith: NewProcessName|endswith
137
+ CommandLine: Process_Command_Line
138
+ User: SubjectUserSid
139
+ ParentProcessId: ProcessId
140
+ ParentImage: ParentProcessName
141
+ ParentImage|endswith: ParentProcessName|endswith
142
+ Computer: Computer
143
+ OriginalFileName: NewProcessName|endswith
144
+ - data_source: Crowdstrike Process
145
+ mapping:
146
+ ProcessId: RawProcessId
147
+ Image: ImageFileName
148
+ CommandLine: CommandLine
149
+ User: UserSid
150
+ ParentProcessId: ParentProcessId
151
+ ParentImage: ParentBaseFileName
152
+ example_log: "<Event xmlns='http://schemas.microsoft.com/win/2004/08/events/event'><System><Provider\
153
+ \ Name='Microsoft-Windows-Sysmon' Guid='{5770385F-C22A-43E0-BF4C-06F5698FFBD9}'/><EventID>1</EventID><Version>5</Version><Level>4</Level><Task>1</Task><Opcode>0</Opcode><Keywords>0x8000000000000000</Keywords><TimeCreated\
154
+ \ SystemTime='2020-10-08T11:03:46.617920300Z'/><EventRecordID>4522</EventRecordID><Correlation/><Execution\
155
+ \ ProcessID='2912' ThreadID='3424'/><Channel>Microsoft-Windows-Sysmon/Operational</Channel><Computer>win-dc-6764986.attackrange.local</Computer><Security\
156
+ \ UserID='S-1-5-18'/></System><EventData><Data Name='RuleName'>-</Data><Data Name='UtcTime'>2020-10-08\
157
+ \ 11:03:46.615</Data><Data Name='ProcessGuid'>{96128EA2-F212-5F7E-E400-000000007F01}</Data><Data\
158
+ \ Name='ProcessId'>2296</Data><Data Name='Image'>C:\\Windows\\System32\\cmd.exe</Data><Data\
159
+ \ Name='FileVersion'>10.0.14393.0 (rs1_release.160715-1616)</Data><Data Name='Description'>Windows\
160
+ \ Command Processor</Data><Data Name='Product'>Microsoft\xAE Windows\xAE Operating\
161
+ \ System</Data><Data Name='Company'>Microsoft Corporation</Data><Data Name='OriginalFileName'>Cmd.Exe</Data><Data\
162
+ \ Name='CommandLine'>\"C:\\Windows\\system32\\cmd.exe\" /c \"reg save HKLM\\sam\
163
+ \ %%temp%%\\sam &amp; reg save HKLM\\system %%temp%%\\system &amp; reg save HKLM\\\
164
+ security %%temp%%\\security\" </Data><Data Name='CurrentDirectory'>C:\\Users\\ADMINI~1\\\
165
+ AppData\\Local\\Temp\\</Data><Data Name='User'>ATTACKRANGE\\Administrator</Data><Data\
166
+ \ Name='LogonGuid'>{96128EA2-F210-5F7E-ACD4-080000000000}</Data><Data Name='LogonId'>0x8d4ac</Data><Data\
167
+ \ Name='TerminalSessionId'>0</Data><Data Name='IntegrityLevel'>High</Data><Data\
168
+ \ Name='Hashes'>MD5=F4F684066175B77E0C3A000549D2922C,SHA256=935C1861DF1F4018D698E8B65ABFA02D7E9037D8F68CA3C2065B6CA165D44AD2,IMPHASH=3062ED732D4B25D1C64F084DAC97D37A</Data><Data\
169
+ \ Name='ParentProcessGuid'>{96128EA2-F211-5F7E-DF00-000000007F01}</Data><Data Name='ParentProcessId'>4624</Data><Data\
170
+ \ Name='ParentImage'>C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe</Data><Data\
171
+ \ Name='ParentCommandLine'>\"powershell.exe\" -noninteractive -encodedcommand WwBDAG8AbgBzAG8AbABlAF0AOgA6AEkAbgBwAHUAdABFAG4AYwBvAGQAaQBuAGcAIAA9ACAATgBlAHcALQBPAGIAagBlAGMAdAAgAFQAZQB4AHQALgBVAFQARgA4AEUAbgBjAG8AZABpAG4AZwAgACQAZgBhAGwAcwBlADsAIABJAG0AcABvAHIAdAAtAE0AbwBkAHUAbABlACAAIgBDADoAXABBAHQAbwBtAGkAYwBSAGUAZABUAGUAYQBtAFwAaQBuAHYAbwBrAGUALQBhAHQAbwBtAGkAYwByAGUAZAB0AGUAYQBtAFwASQBuAHYAbwBrAGUALQBBAHQAbwBtAGkAYwBSAGUAZABUAGUAYQBtAC4AcABzAGQAMQAiACAALQBGAG8AcgBjAGUACgBJAG4AdgBvAGsAZQAtAEEAdABvAG0AaQBjAFQAZQBzAHQAIAAiAFQAMQAwADAAMwAuADAAMAAyACIAIAAtAEMAbwBuAGYAaQByAG0AOgAkAGYAYQBsAHMAZQAgAC0AVABpAG0AZQBvAHUAdABTAGUAYwBvAG4AZABzACAAMwAwADAAIAAtAEUAeABlAGMAdQB0AGkAbwBuAEwAbwBnAFAAYQB0AGgAIABDADoAXABBAHQAbwBtAGkAYwBSAGUAZABUAGUAYQBtAFwAYQB0AGMAXwBlAHgAZQBjAHUAdABpAG8AbgAuAGMAcwB2AA==</Data></EventData></Event>"
@@ -13,7 +13,7 @@ description: The following detection identifies a 7z.exe spawned from `Rundll32.
13
13
  any files written to disk and analyze as needed. Review parallel processes for additional
14
14
  behaviors. Typically, archiving files will result in exfiltration.
15
15
  data_source:
16
- - Sysmon Event ID 1
16
+ - Sysmon EventID 1
17
17
  search: '| tstats `security_content_summariesonly` count min(_time) as firstTime max(_time)
18
18
  as lastTime from datamodel=Endpoint.Processes where Processes.parent_process_name
19
19
  IN ("rundll32.exe", "dllhost.exe") Processes.process_name=*7z* by Processes.dest
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: contentctl
3
- Version: 4.1.5
3
+ Version: 4.2.0
4
4
  Summary: Splunk Content Control Tool
5
5
  License: Apache 2.0
6
6
  Author: STRT
@@ -1,5 +1,5 @@
1
1
  contentctl/__init__.py,sha256=IMjkMO3twhQzluVTo8Z6rE7Eg-9U79_LGKMcsWLKBkY,22
2
- contentctl/actions/build.py,sha256=BVc-1E63zeUQ9wWAHTC_fLNvfEK5YT3Z6_QLiE72TQs,4765
2
+ contentctl/actions/build.py,sha256=mGm1F8jWdj547uJVSEWZBZcEyjoO4QpPKWhJOpRwR94,5739
3
3
  contentctl/actions/convert.py,sha256=0KBWLxvP1hSPXpExePqpOQPRvlQLamvPLyQqeTIWNbk,704
4
4
  contentctl/actions/deploy_acs.py,sha256=mf3uk495H1EU_LNN-TiOsYCo18HMGoEBMb6ojeTr0zw,1418
5
5
  contentctl/actions/detection_testing/DetectionTestingManager.py,sha256=zg8JasDjCpSC-yhseEyUwO8qbDJIUJbhlus9Li9ZAnA,8818
@@ -14,14 +14,14 @@ contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py,sha256=Mos
14
14
  contentctl/actions/detection_testing/views/DetectionTestingViewFile.py,sha256=OJgmQgoVnzy7p1MN9bDyKGUhFWKzQc6ejc4F87uZG1I,1123
15
15
  contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py,sha256=6mecacXFoTJxcHiRZSnlHos5Hca1jdedEEZfiIAhaJg,4706
16
16
  contentctl/actions/doc_gen.py,sha256=YNc1VYA0ikL1hWDHYjfEOmUkfhy8PEIdvTyC4ZLxQRY,863
17
- contentctl/actions/initialize.py,sha256=2h3_A68mNWcyZjbrKF-OeQXBi5p4Zu3z74K7QxEtII4,1749
17
+ contentctl/actions/initialize.py,sha256=BRKmvLr50dnL4SnUEGM6jRfAghfZAmk0hFFWIZcKpxg,1809
18
18
  contentctl/actions/initialize_old.py,sha256=0qXbW_fNDvkcnEeL6Zpte8d-hpTu1REyzHsXOCY-YB8,9333
19
19
  contentctl/actions/inspect.py,sha256=6gVVKmV5CUUYOkNNVTMPKj9bM1uXVthgGCoFKZGDeS8,12628
20
20
  contentctl/actions/new_content.py,sha256=o5ZYBQ216RN6TnW_wRxVGJybx2SsJ7ht4PAi1dw45Yg,6076
21
21
  contentctl/actions/release_notes.py,sha256=akkFfLhsJuaPUyjsb6dLlKt9cUM-JApAjTFQMbYoXeM,13115
22
22
  contentctl/actions/reporting.py,sha256=MJEmvmoA1WnSFZEU9QM6daL_W94oOX0WXAcX1qAM2As,1583
23
23
  contentctl/actions/test.py,sha256=dx7f750_MrlvysxOmOdIro1bH0iVKF4K54TSwhvU2MU,5146
24
- contentctl/actions/validate.py,sha256=E6bQ0lZkFiFyC4hyRuypKiMybZSE4EXvzd94B0fQUFo,3590
24
+ contentctl/actions/validate.py,sha256=HfHfUTaRNx8eItociAEgQt8BEOVy9jb2yc8bKAGSsFA,3574
25
25
  contentctl/api.py,sha256=FBOpRhbBCBdjORmwe_8MPQ3PRZ6T0KrrFcfKovVFkug,6343
26
26
  contentctl/contentctl.py,sha256=Vr2cuvaPjpJpYvD9kVoYq7iD6rhLQEpTKmcGoq4emhA,10470
27
27
  contentctl/enrichments/attack_enrichment.py,sha256=EkEloG3hMmPTloPyYiVkhq3iT_BieXaJmprJ5stfyRw,6732
@@ -31,13 +31,13 @@ contentctl/helper/link_validator.py,sha256=-XorhxfGtjLynEL1X4hcpRMiyemogf2JEnvLw
31
31
  contentctl/helper/logger.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  contentctl/helper/utils.py,sha256=8ICRvE7DUiNL9BK4Hw71hCLFbd3R2u86OwKeDOdaBTY,19454
33
33
  contentctl/input/backend_splunk_ba.py,sha256=Y70tJqgaUM0nzfm2SiGMof4HkhY84feqf-xnRx1xPb4,5861
34
- contentctl/input/director.py,sha256=BR1RvBD0U_JtHtrM3jM_FpcvaaNlME7nc-gNO4RJLM8,13323
34
+ contentctl/input/director.py,sha256=36Licm8TV62oDk1s97Y7EFGvTKAP1ryXi7NL4BXP9kU,12603
35
35
  contentctl/input/new_content_questions.py,sha256=o4prlBoUhEMxqpZukquI9WKbzfFJfYhEF7a8m2q_BEE,5565
36
36
  contentctl/input/sigma_converter.py,sha256=ATFNW7boNngp5dmWM7Gr4rMZrUKjvKW2_qu28--FdiU,19391
37
37
  contentctl/input/ssa_detection_builder.py,sha256=dke9mPn2VQVSpYiaGWjZn3PkqVJTe58gcT2Vifv9_yc,8159
38
- contentctl/input/yml_reader.py,sha256=oaal24UP8rDXkCmN5I3GnIheZrsgkhbKOlzXtyhB474,1475
39
- contentctl/objects/abstract_security_content_objects/detection_abstract.py,sha256=XhyMpghvizU37sOHKE009la1wo__EZqGdemXt9En5wc,34039
40
- contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py,sha256=IVr26xFrIlTvsqQoqwYl4cmcfaP9BeYt9I0QTriKmwE,8451
38
+ contentctl/input/yml_reader.py,sha256=hyVUYhx4Ka8C618kP2D_E3sDUKEQGC6ty_QZQArHKd4,1489
39
+ contentctl/objects/abstract_security_content_objects/detection_abstract.py,sha256=gpyl-hnDNAxxMAHDMBZJMK10yZOq78Y6-ZMCStvrgQM,35354
40
+ contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py,sha256=zpteipbBAwBe90SOmq5ky0KIztS806jnrg1vsBr5PsM,9778
41
41
  contentctl/objects/alert_action.py,sha256=E9gjCn5C31h0sN7k90KNe4agRxFFSnMW_Z-Ri_3YQss,1335
42
42
  contentctl/objects/atomic.py,sha256=a_G_iliAm86BunpAAG86aAL3LAEGpd9Crp7t7-PxYvI,8979
43
43
  contentctl/objects/base_test.py,sha256=6hCL9K-N_jJx1zLbuZQCsB93_XWj6JcGGs2PbbjzJWo,1028
@@ -47,7 +47,7 @@ contentctl/objects/baseline_tags.py,sha256=JLdlCUc_DEccMQD6f-sa2qD8pcxYiwMUT_sRZ
47
47
  contentctl/objects/config.py,sha256=tK0BY4A9Go5jp8tpOSwgczuOAyu9dMPvC0nyOHeO-74,43642
48
48
  contentctl/objects/constants.py,sha256=1LjiK9A7t0aHHkJz2mrW-DImdW1P98GPssTwmwNNI_M,3468
49
49
  contentctl/objects/correlation_search.py,sha256=B97vCt2Ew7PGgqd5Y9l6RD3DJdy51Eh7Gzkxxs2xqZ0,36891
50
- contentctl/objects/data_source.py,sha256=UoI1zLyrwwTtKqvXf_K-TifJEY5HaVBItR3vR4J43iw,768
50
+ contentctl/objects/data_source.py,sha256=ThZivI3NoQybD0C0fS_f-FvhjhD5_n09IML913e1fEY,1459
51
51
  contentctl/objects/deployment.py,sha256=Qc6M4yeOvxjqFKR8sfjd4CG06AbVheTOqP1mwqo4t8s,2651
52
52
  contentctl/objects/deployment_email.py,sha256=Zu9cXZdfOP6noa_mZpiK1GrYCTgi3Mim94iLGjE674c,147
53
53
  contentctl/objects/deployment_notable.py,sha256=QhOI7HEkUuuqk0fum9SD8IpYBlbwIsJUff8s3kCKKj4,198
@@ -57,8 +57,8 @@ contentctl/objects/deployment_scheduling.py,sha256=bQjbJHNaUGdU1VAGV8-nFOHzHutbI
57
57
  contentctl/objects/deployment_slack.py,sha256=P6z8OLHDKcDWx7nbKWasqBc3dFRatGcpO2GtmxzVV8I,135
58
58
  contentctl/objects/detection.py,sha256=3W41cXf3ECjWuPqWrseqSLC3PAA7O5_nENWWM6MPK0Y,620
59
59
  contentctl/objects/detection_tags.py,sha256=nAHRuBtltx4Rsx9htPtxizRlmQOSypYysbzqn3CQZ_I,10321
60
- contentctl/objects/enums.py,sha256=cW-orYfVBgMdZKVS8ANAkSZ-zygbrhJZX6FP4TxNGgg,14075
61
- contentctl/objects/event_source.py,sha256=oOCCSQpfpSbYw6_v103I4VxwqjpXP4gsTbds06qiEa0,251
60
+ contentctl/objects/enums.py,sha256=37v7w8xCg5j5hxP3kod0S3HQ9BY-CqZulPiwhnTtEvs,14052
61
+ contentctl/objects/event_source.py,sha256=G9P7rtcN5hcBNQx6DG37mR3QyQufx--T6kgQGNqQuKk,415
62
62
  contentctl/objects/integration_test.py,sha256=W_VksBN_cRo7DTXdr1aLujjS9mgkEp0uvoNpmL0dVnQ,1273
63
63
  contentctl/objects/integration_test_result.py,sha256=DrIZRRlILSHGcsK_Rlm3KJLnbKPtIen8uEPFi4ZdJ8s,370
64
64
  contentctl/objects/investigation.py,sha256=JRoZxc_qi1fu_VFTRaxOc3B7zzSzCfEURsNzWPUCrtY,2620
@@ -75,7 +75,7 @@ contentctl/objects/risk_object.py,sha256=yY4NmEwEKaRl4sLzCRZb1n8kdpV3HzYbQVQ1ClQ
75
75
  contentctl/objects/security_content_object.py,sha256=j8KNDwSMfZsSIzJucC3NuZo0SlFVpqHfDc6y3-YHjHI,234
76
76
  contentctl/objects/ssa_detection.py,sha256=-G6tXfVVlZgPWS64hIIy3M-aMePANAuQvdpXPlgUyUs,5873
77
77
  contentctl/objects/ssa_detection_tags.py,sha256=u8annjzo3MYZ-16wyFnuR8qJJzRa4LEhdprMIrQ47G0,5224
78
- contentctl/objects/story.py,sha256=GMcMAaDcX16B2mbSAzrzpVD8IMyOILAyIo2rR8jySto,4618
78
+ contentctl/objects/story.py,sha256=FXe11LV19xJTtCgx7DKdvV9cL0gKeryUnE3yjpnDmrU,4957
79
79
  contentctl/objects/story_tags.py,sha256=0oF1OePLBxa-RQPb438tXrrfosa939CP8UbNV0_S8XY,2225
80
80
  contentctl/objects/test_group.py,sha256=Yb1sqGom6SkVL8B3czPndz8w3CK8WdwZ39V_cn0_JZQ,2600
81
81
  contentctl/objects/threat_object.py,sha256=S8B7RQFfLxN_g7yKPrDTuYhIy9JvQH3YwJ_T5LUZIa4,711
@@ -91,6 +91,7 @@ contentctl/output/attack_nav_writer.py,sha256=64ILZLmNbh2XLmbopgENkeo6t-4SRRG8xZ
91
91
  contentctl/output/ba_yml_output.py,sha256=Lrk13Q9-f71i3c0oNrT50G94PxdogG4k4-MI-rTMOAo,5950
92
92
  contentctl/output/conf_output.py,sha256=qCRT77UKNFCe4AufeBV8Uz9lkPqgpGzU1Y149RuEnis,10147
93
93
  contentctl/output/conf_writer.py,sha256=2TaCAPEtU-bMa7A2m7xOxh93PMpzIdhwiHiPLUCeCB4,8281
94
+ contentctl/output/data_source_writer.py,sha256=55gi6toAMcjj0AxOmYMwkTHcANCfK6dezQs5aIQTW4k,1737
94
95
  contentctl/output/detection_writer.py,sha256=AzxbssNLmsNIOaYKotew5-ONoyq1cQpKSGy3pe191B0,960
95
96
  contentctl/output/doc_md_output.py,sha256=gf7osH1uSrC6js3D_I72g4uDe9TaB3tsvtqCHi5znp0,3238
96
97
  contentctl/output/finding_report_writer.py,sha256=bjJR7NAxLE8vt8uU3zSDhazQzqzOdtCsUu95lVdzU_w,3939
@@ -148,6 +149,7 @@ contentctl/templates/app_template/static/appIcon.png,sha256=jcJ1PNdkBX7Kl_y9Tf0S
148
149
  contentctl/templates/app_template/static/appIconAlt.png,sha256=uRXjoHQQjs0-BxcK-3KNBEdck1adDNTHMvV14xR4W0g,2656
149
150
  contentctl/templates/app_template/static/appIconAlt_2x.png,sha256=I0m-CPRqq7ak9NJQZGGmz6Ac4pmzFV_SonOUxOEDOFs,7442
150
151
  contentctl/templates/app_template/static/appIcon_2x.png,sha256=XEpqQzDvzuEV5StzD05XRgxwySqHHLes1hMPy2v5Vdk,3657
152
+ contentctl/templates/data_sources/sysmon_eventid_1.yml,sha256=7PIcLr1e9Ql-wu_Dk9D4JAZs1OWDby-tY77nDDUZ1CQ,6079
151
153
  contentctl/templates/datamodels_cim.conf,sha256=RB_SCtpQG_KaC_0lKTCKexVOlEq_ShGwpGlg95aqOfs,9381
152
154
  contentctl/templates/datamodels_custom.conf,sha256=6BANthXdqg3fYpYmEqiGZnv4cWheNfXz1uQ_I1JePXc,480
153
155
  contentctl/templates/deployments/escu_default_configuration_anomaly.yml,sha256=j_H2wovWBj1EKxVwj3mMoJVQnVm-2Imt7xnB9U1Tun4,418
@@ -157,14 +159,14 @@ contentctl/templates/deployments/escu_default_configuration_hunting.yml,sha256=h
157
159
  contentctl/templates/deployments/escu_default_configuration_ttp.yml,sha256=1D-pvzaH1v3_yCZXaY6njmdvV4S2_Ak8uzzCOsnj9XY,548
158
160
  contentctl/templates/detections/application/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
159
161
  contentctl/templates/detections/cloud/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
160
- contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml,sha256=hkN214ZOqbQPWyYrqgbOrYb4iA0DroG1AnFRhSC_m0M,3323
162
+ contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml,sha256=-jp7CC3shnA9Te_0Zw6jLbLT8JnrVQvOfEUkNCQbCNo,3322
161
163
  contentctl/templates/detections/network/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
162
164
  contentctl/templates/detections/web/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
163
165
  contentctl/templates/macros/security_content_ctime.yml,sha256=Gg1YNllHVsX_YB716H1SJLWzxXZEfuJlnsgB2fuyoHU,159
164
166
  contentctl/templates/macros/security_content_summariesonly.yml,sha256=9BYUxAl2E4Nwh8K19F3AJS8Ka7ceO6ZDBjFiO3l3LY0,162
165
167
  contentctl/templates/stories/cobalt_strike.yml,sha256=rlaXxMN-5k8LnKBLPafBoksyMtlmsPMHPJOjTiMiZ-M,3063
166
- contentctl-4.1.5.dist-info/LICENSE.md,sha256=hQWUayRk-pAiOZbZnuy8djmoZkjKBx8MrCFpW-JiOgo,11344
167
- contentctl-4.1.5.dist-info/METADATA,sha256=dzhmOQMe0mFoHlZ12p7FsQzHGF-jSKvzbAosP5fTsC8,19706
168
- contentctl-4.1.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
169
- contentctl-4.1.5.dist-info/entry_points.txt,sha256=5bjZ2NkbQfSwK47uOnA77yCtjgXhvgxnmCQiynRF_-U,57
170
- contentctl-4.1.5.dist-info/RECORD,,
168
+ contentctl-4.2.0.dist-info/LICENSE.md,sha256=hQWUayRk-pAiOZbZnuy8djmoZkjKBx8MrCFpW-JiOgo,11344
169
+ contentctl-4.2.0.dist-info/METADATA,sha256=Mwn05R0o74IKd0Z0KdayxzX3E7wJGJQSTfu4fzQxjQA,19706
170
+ contentctl-4.2.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
171
+ contentctl-4.2.0.dist-info/entry_points.txt,sha256=5bjZ2NkbQfSwK47uOnA77yCtjgXhvgxnmCQiynRF_-U,57
172
+ contentctl-4.2.0.dist-info/RECORD,,