contentctl 3.6.0__py3-none-any.whl → 4.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. contentctl/actions/build.py +89 -0
  2. contentctl/actions/detection_testing/DetectionTestingManager.py +48 -49
  3. contentctl/actions/detection_testing/GitService.py +148 -230
  4. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +14 -24
  5. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +43 -17
  6. contentctl/actions/detection_testing/views/DetectionTestingView.py +3 -2
  7. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +0 -8
  8. contentctl/actions/doc_gen.py +1 -1
  9. contentctl/actions/initialize.py +28 -65
  10. contentctl/actions/inspect.py +260 -0
  11. contentctl/actions/new_content.py +106 -13
  12. contentctl/actions/release_notes.py +168 -144
  13. contentctl/actions/reporting.py +24 -13
  14. contentctl/actions/test.py +39 -20
  15. contentctl/actions/validate.py +25 -48
  16. contentctl/contentctl.py +196 -754
  17. contentctl/enrichments/attack_enrichment.py +69 -19
  18. contentctl/enrichments/cve_enrichment.py +28 -13
  19. contentctl/helper/link_validator.py +24 -26
  20. contentctl/helper/utils.py +7 -3
  21. contentctl/input/director.py +139 -201
  22. contentctl/input/new_content_questions.py +63 -61
  23. contentctl/input/sigma_converter.py +1 -2
  24. contentctl/input/ssa_detection_builder.py +16 -7
  25. contentctl/input/yml_reader.py +4 -3
  26. contentctl/objects/abstract_security_content_objects/detection_abstract.py +487 -154
  27. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +155 -51
  28. contentctl/objects/alert_action.py +40 -0
  29. contentctl/objects/atomic.py +212 -0
  30. contentctl/objects/baseline.py +44 -43
  31. contentctl/objects/baseline_tags.py +69 -20
  32. contentctl/objects/config.py +857 -125
  33. contentctl/objects/constants.py +0 -1
  34. contentctl/objects/correlation_search.py +1 -1
  35. contentctl/objects/data_source.py +2 -4
  36. contentctl/objects/deployment.py +61 -21
  37. contentctl/objects/deployment_email.py +2 -2
  38. contentctl/objects/deployment_notable.py +4 -4
  39. contentctl/objects/deployment_phantom.py +2 -2
  40. contentctl/objects/deployment_rba.py +3 -4
  41. contentctl/objects/deployment_scheduling.py +2 -3
  42. contentctl/objects/deployment_slack.py +2 -2
  43. contentctl/objects/detection.py +1 -5
  44. contentctl/objects/detection_tags.py +210 -119
  45. contentctl/objects/enums.py +312 -24
  46. contentctl/objects/integration_test.py +1 -1
  47. contentctl/objects/integration_test_result.py +0 -2
  48. contentctl/objects/investigation.py +62 -53
  49. contentctl/objects/investigation_tags.py +30 -6
  50. contentctl/objects/lookup.py +80 -31
  51. contentctl/objects/macro.py +29 -45
  52. contentctl/objects/mitre_attack_enrichment.py +29 -5
  53. contentctl/objects/observable.py +3 -7
  54. contentctl/objects/playbook.py +60 -30
  55. contentctl/objects/playbook_tags.py +45 -8
  56. contentctl/objects/security_content_object.py +1 -5
  57. contentctl/objects/ssa_detection.py +8 -4
  58. contentctl/objects/ssa_detection_tags.py +19 -26
  59. contentctl/objects/story.py +142 -44
  60. contentctl/objects/story_tags.py +46 -33
  61. contentctl/objects/unit_test.py +7 -2
  62. contentctl/objects/unit_test_attack_data.py +10 -19
  63. contentctl/objects/unit_test_baseline.py +1 -1
  64. contentctl/objects/unit_test_old.py +4 -3
  65. contentctl/objects/unit_test_result.py +5 -3
  66. contentctl/objects/unit_test_ssa.py +31 -0
  67. contentctl/output/api_json_output.py +202 -130
  68. contentctl/output/attack_nav_output.py +20 -9
  69. contentctl/output/attack_nav_writer.py +3 -3
  70. contentctl/output/ba_yml_output.py +3 -3
  71. contentctl/output/conf_output.py +125 -391
  72. contentctl/output/conf_writer.py +169 -31
  73. contentctl/output/jinja_writer.py +2 -2
  74. contentctl/output/json_writer.py +17 -5
  75. contentctl/output/new_content_yml_output.py +8 -7
  76. contentctl/output/svg_output.py +17 -27
  77. contentctl/output/templates/analyticstories_detections.j2 +8 -4
  78. contentctl/output/templates/analyticstories_investigations.j2 +1 -1
  79. contentctl/output/templates/analyticstories_stories.j2 +6 -6
  80. contentctl/output/templates/app.conf.j2 +2 -2
  81. contentctl/output/templates/app.manifest.j2 +2 -2
  82. contentctl/output/templates/detection_coverage.j2 +6 -8
  83. contentctl/output/templates/doc_detection_page.j2 +2 -2
  84. contentctl/output/templates/doc_detections.j2 +2 -2
  85. contentctl/output/templates/doc_stories.j2 +1 -1
  86. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  87. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  88. contentctl/output/templates/header.j2 +2 -1
  89. contentctl/output/templates/macros.j2 +6 -10
  90. contentctl/output/templates/savedsearches_baselines.j2 +5 -5
  91. contentctl/output/templates/savedsearches_detections.j2 +36 -33
  92. contentctl/output/templates/savedsearches_investigations.j2 +4 -4
  93. contentctl/output/templates/transforms.j2 +4 -4
  94. contentctl/output/yml_writer.py +2 -2
  95. contentctl/templates/app_template/README.md +7 -0
  96. contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/nav/default.xml +1 -0
  97. contentctl/templates/app_template/lookups/mitre_enrichment.csv +638 -0
  98. contentctl/templates/deployments/{00_default_anomaly.yml → escu_default_configuration_anomaly.yml} +1 -2
  99. contentctl/templates/deployments/{00_default_baseline.yml → escu_default_configuration_baseline.yml} +1 -2
  100. contentctl/templates/deployments/{00_default_correlation.yml → escu_default_configuration_correlation.yml} +2 -2
  101. contentctl/templates/deployments/{00_default_hunting.yml → escu_default_configuration_hunting.yml} +2 -2
  102. contentctl/templates/deployments/{00_default_ttp.yml → escu_default_configuration_ttp.yml} +1 -2
  103. contentctl/templates/detections/anomalous_usage_of_7zip.yml +0 -1
  104. contentctl/templates/stories/cobalt_strike.yml +0 -1
  105. {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/METADATA +36 -15
  106. contentctl-4.0.2.dist-info/RECORD +168 -0
  107. contentctl/actions/detection_testing/DataManipulation.py +0 -149
  108. contentctl/actions/generate.py +0 -91
  109. contentctl/helper/config_handler.py +0 -75
  110. contentctl/input/baseline_builder.py +0 -66
  111. contentctl/input/basic_builder.py +0 -58
  112. contentctl/input/detection_builder.py +0 -370
  113. contentctl/input/investigation_builder.py +0 -42
  114. contentctl/input/new_content_generator.py +0 -95
  115. contentctl/input/playbook_builder.py +0 -68
  116. contentctl/input/story_builder.py +0 -106
  117. contentctl/objects/app.py +0 -214
  118. contentctl/objects/repo_config.py +0 -163
  119. contentctl/objects/test_config.py +0 -630
  120. contentctl/output/templates/macros_detections.j2 +0 -7
  121. contentctl/output/templates/splunk_app/README.md +0 -7
  122. contentctl-3.6.0.dist-info/RECORD +0 -176
  123. /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_story_detail.txt +0 -0
  124. /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_summary.txt +0 -0
  125. /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_usage_dashboard.txt +0 -0
  126. /contentctl/{output/templates/splunk_app → templates/app_template}/default/analytic_stories.conf +0 -0
  127. /contentctl/{output/templates/splunk_app → templates/app_template}/default/app.conf +0 -0
  128. /contentctl/{output/templates/splunk_app → templates/app_template}/default/commands.conf +0 -0
  129. /contentctl/{output/templates/splunk_app → templates/app_template}/default/content-version.conf +0 -0
  130. /contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/views/escu_summary.xml +0 -0
  131. /contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/views/feedback.xml +0 -0
  132. /contentctl/{output/templates/splunk_app → templates/app_template}/default/distsearch.conf +0 -0
  133. /contentctl/{output/templates/splunk_app → templates/app_template}/default/usage_searches.conf +0 -0
  134. /contentctl/{output/templates/splunk_app → templates/app_template}/default/use_case_library.conf +0 -0
  135. /contentctl/{output/templates/splunk_app → templates/app_template}/metadata/default.meta +0 -0
  136. /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIcon.png +0 -0
  137. /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIconAlt.png +0 -0
  138. /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIconAlt_2x.png +0 -0
  139. /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIcon_2x.png +0 -0
  140. {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/LICENSE.md +0 -0
  141. {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/WHEEL +0 -0
  142. {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/entry_points.txt +0 -0
@@ -10,11 +10,11 @@ import pathlib
10
10
  from tempfile import TemporaryDirectory, mktemp
11
11
  from ssl import SSLEOFError, SSLZeroReturnError
12
12
  from sys import stdout
13
- from dataclasses import dataclass
13
+ #from dataclasses import dataclass
14
14
  from shutil import copyfile
15
15
  from typing import Union, Optional
16
16
 
17
- from pydantic import BaseModel, PrivateAttr, Field
17
+ from pydantic import BaseModel, PrivateAttr, Field, dataclasses
18
18
  import requests # type: ignore
19
19
  import splunklib.client as client # type: ignore
20
20
  from splunklib.binding import HTTPError # type: ignore
@@ -23,6 +23,7 @@ import splunklib.results
23
23
  from urllib3 import disable_warnings
24
24
  import urllib.parse
25
25
 
26
+ from contentctl.objects.config import test_common, Infrastructure
26
27
  from contentctl.objects.enums import PostTestBehavior, AnalyticsType
27
28
  from contentctl.objects.detection import Detection
28
29
  from contentctl.objects.base_test import BaseTest
@@ -31,14 +32,10 @@ from contentctl.objects.integration_test import IntegrationTest
31
32
  from contentctl.objects.unit_test_attack_data import UnitTestAttackData
32
33
  from contentctl.objects.unit_test_result import UnitTestResult
33
34
  from contentctl.objects.integration_test_result import IntegrationTestResult
34
- from contentctl.objects.test_config import TestConfig, Infrastructure
35
35
  from contentctl.objects.test_group import TestGroup
36
36
  from contentctl.objects.base_test_result import TestResultStatus
37
37
  from contentctl.objects.correlation_search import CorrelationSearch, PbarData
38
38
  from contentctl.helper.utils import Utils
39
- from contentctl.actions.detection_testing.DataManipulation import (
40
- DataManipulation,
41
- )
42
39
  from contentctl.actions.detection_testing.progress_bar import (
43
40
  format_pbar_string,
44
41
  TestReportingType,
@@ -66,8 +63,8 @@ class ContainerStoppedException(Exception):
66
63
  pass
67
64
 
68
65
 
69
- @dataclass(frozen=False)
70
- class DetectionTestingManagerOutputDto:
66
+ @dataclasses.dataclass(frozen=False)
67
+ class DetectionTestingManagerOutputDto():
71
68
  inputQueue: list[Detection] = Field(default_factory=list)
72
69
  outputQueue: list[Detection] = Field(default_factory=list)
73
70
  skippedQueue: list[Detection] = Field(default_factory=list)
@@ -81,7 +78,7 @@ class DetectionTestingManagerOutputDto:
81
78
 
82
79
  class DetectionTestingInfrastructure(BaseModel, abc.ABC):
83
80
  # thread: threading.Thread = threading.Thread()
84
- global_config: TestConfig
81
+ global_config: test_common
85
82
  infrastructure: Infrastructure
86
83
  sync_obj: DetectionTestingManagerOutputDto
87
84
  hec_token: str = ""
@@ -239,6 +236,7 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
239
236
  self.pbar.write(
240
237
  f"Error getting API connection (not quitting) '{type(e).__name__}': {str(e)}"
241
238
  )
239
+ print("wow")
242
240
  # self.pbar.write(
243
241
  # f"Unhandled exception getting connection to splunk server: {str(e)}"
244
242
  # )
@@ -397,7 +395,7 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
397
395
  try:
398
396
  self.test_detection(detection)
399
397
  except ContainerStoppedException:
400
- self.pbar.write(f"Stopped container [{self.get_name()}]")
398
+ self.pbar.write(f"Warning - container was stopped when trying to execute detection [{self.get_name()}]")
401
399
  self.finish()
402
400
  return
403
401
  except Exception as e:
@@ -1196,14 +1194,12 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
1196
1194
  ):
1197
1195
  tempfile = mktemp(dir=tmp_dir)
1198
1196
 
1199
- if not (
1200
- attack_data_file.data.startswith("https://")
1201
- or attack_data_file.data.startswith("http://")
1202
- ):
1203
- if pathlib.Path(attack_data_file.data).is_file():
1197
+ if not (str(attack_data_file.data).startswith("http://") or
1198
+ str(attack_data_file.data).startswith("https://")) :
1199
+ if pathlib.Path(str(attack_data_file.data)).is_file():
1204
1200
  self.format_pbar_string(TestReportingType.GROUP, test_group.name, "Copying Data", test_group_start_time)
1205
1201
  try:
1206
- copyfile(attack_data_file.data, tempfile)
1202
+ copyfile(str(attack_data_file.data), tempfile)
1207
1203
  except Exception as e:
1208
1204
  raise Exception(
1209
1205
  f"Error copying local Attack Data File for [{test_group.name}] - [{attack_data_file.data}]: "
@@ -1229,7 +1225,7 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
1229
1225
  )
1230
1226
 
1231
1227
  Utils.download_file_from_http(
1232
- attack_data_file.data, tempfile, self.pbar, overwrite_file=True
1228
+ str(attack_data_file.data), tempfile, self.pbar, overwrite_file=True
1233
1229
  )
1234
1230
  except Exception as e:
1235
1231
  raise (
@@ -1238,12 +1234,6 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
1238
1234
  )
1239
1235
  )
1240
1236
 
1241
- # Update timestamps before replay
1242
- if attack_data_file.update_timestamp:
1243
- data_manipulation = DataManipulation()
1244
- data_manipulation.manipulate_timestamp(
1245
- tempfile, attack_data_file.sourcetype, attack_data_file.source
1246
- )
1247
1237
 
1248
1238
  # Upload the data
1249
1239
  self.format_pbar_string(
@@ -1366,7 +1356,7 @@ class DetectionTestingInfrastructure(BaseModel, abc.ABC):
1366
1356
  pass
1367
1357
 
1368
1358
  def finish(self):
1369
- self.pbar.bar_format = f"Stopped container [{self.get_name()}]"
1359
+ self.pbar.bar_format = f"Finished running tests on instance: [{self.get_name()}]"
1370
1360
  self.pbar.update()
1371
1361
  self.pbar.close()
1372
1362
 
@@ -1,21 +1,19 @@
1
1
  from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
2
2
  DetectionTestingInfrastructure,
3
3
  )
4
+ from contentctl.objects.config import test
4
5
  import docker.models.resource
5
6
  import docker.models.containers
6
7
  import docker
7
8
  import docker.types
8
- from contentctl.objects.test_config import (
9
- CONTAINER_APP_DIR,
10
- LOCAL_APP_DIR,
11
- )
12
9
 
13
10
 
14
11
  class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
12
+ global_config: test
15
13
  container: docker.models.resource.Model = None
16
14
 
17
15
  def start(self):
18
- if self.global_config.infrastructure_config.persist_and_reuse_container:
16
+ if self.global_config.container_settings.leave_running:
19
17
  # If we are configured to use the persistent container, then check and see if it's already
20
18
  # running. If so, just use it without additional configuration.
21
19
  try:
@@ -76,8 +74,8 @@ class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
76
74
 
77
75
  mounts = [
78
76
  docker.types.Mount(
79
- source=str(LOCAL_APP_DIR.absolute()),
80
- target=str(CONTAINER_APP_DIR.absolute()),
77
+ source=str(self.global_config.getLocalAppDir()),
78
+ target=str(self.global_config.getContainerAppDir()),
81
79
  type="bind",
82
80
  read_only=True,
83
81
  )
@@ -86,18 +84,32 @@ class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
86
84
  environment = {}
87
85
  environment["SPLUNK_START_ARGS"] = "--accept-license"
88
86
  environment["SPLUNK_PASSWORD"] = self.infrastructure.splunk_app_password
89
- environment["SPLUNK_APPS_URL"] = ",".join(
90
- p.environment_path for p in self.global_config.apps
91
- )
87
+ # Files have already been staged by the time that we call this. Files must only be staged
88
+ # once, not staged by every container
89
+ environment["SPLUNK_APPS_URL"] = self.global_config.getContainerEnvironmentString(stage_file=False)
92
90
  if (
93
- self.global_config.splunkbase_password is not None
94
- and self.global_config.splunkbase_username is not None
91
+ self.global_config.splunk_api_username is not None
92
+ and self.global_config.splunk_api_password is not None
95
93
  ):
96
- environment["SPLUNKBASE_USERNAME"] = self.global_config.splunkbase_username
97
- environment["SPLUNKBASE_PASSWORD"] = self.global_config.splunkbase_password
98
-
94
+ environment["SPLUNKBASE_USERNAME"] = self.global_config.splunk_api_username
95
+ environment["SPLUNKBASE_PASSWORD"] = self.global_config.splunk_api_password
96
+
97
+
98
+
99
+ def emit_docker_run_equivalent():
100
+ environment_string = " ".join([f'-e "{k}={environment.get(k)}"' for k in environment.keys()])
101
+ print(f"\n\ndocker run -d "\
102
+ f"-p {self.infrastructure.web_ui_port}:8000 "
103
+ f"-p {self.infrastructure.hec_port}:8088 "
104
+ f"-p {self.infrastructure.api_port}:8089 "
105
+ f"{environment_string} "
106
+ f" --name {self.get_name()} "
107
+ f"--platform linux/amd64 "
108
+ f"{self.global_config.container_settings.full_image_path}\n\n")
109
+ #emit_docker_run_equivalent()
110
+
99
111
  container = self.get_docker_client().containers.create(
100
- self.global_config.infrastructure_config.full_image_path,
112
+ self.global_config.container_settings.full_image_path,
101
113
  ports=ports_dict,
102
114
  environment=environment,
103
115
  name=self.get_name(),
@@ -105,6 +117,18 @@ class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
105
117
  detach=True,
106
118
  platform="linux/amd64"
107
119
  )
120
+
121
+ if self.global_config.enterpriseSecurityInApps():
122
+ #ES sets up https, so make sure it is included in the link
123
+ address = f"https://{self.infrastructure.instance_address}:{self.infrastructure.web_ui_port}"
124
+ else:
125
+ address = f"http://{self.infrastructure.instance_address}:{self.infrastructure.web_ui_port}"
126
+ print(f"\nStarted container with the following information:\n"
127
+ f"\tname : [{self.get_name()}]\n"
128
+ f"\taddress : [{address}]\n"
129
+ f"\tusername: [{self.infrastructure.splunk_app_username}]\n"
130
+ f"\tpassword: [{self.infrastructure.splunk_app_password}]\n"
131
+ )
108
132
 
109
133
  return container
110
134
 
@@ -119,13 +143,15 @@ class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
119
143
  try:
120
144
  # If the user wants to persist the container (or use a previously configured container), then DO NOT remove it.
121
145
  # Emit the following message, which they will see on initial setup and teardown at the end of the test.
122
- if self.global_config.infrastructure_config.persist_and_reuse_container:
146
+ if self.global_config.container_settings.leave_running:
123
147
  print(f"\nContainer [{self.get_name()}] has NOT been terminated because 'contentctl_test.yml ---> infrastructure_config ---> persist_and_reuse_container = True'")
124
148
  print(f"To remove it, please manually run the following at the command line: `docker container rm -fv {self.get_name()}`\n")
125
149
  return
126
150
  # container was found, so now we try to remove it
127
151
  # v also removes volumes linked to the container
128
152
  container.remove(v=removeVolumes, force=forceRemove)
153
+ print(f"container [{self.get_name()}] successfully removed")
154
+
129
155
  # remove it even if it is running. remove volumes as well
130
156
  # No need to print that the container has been removed, it is expected behavior
131
157
 
@@ -3,7 +3,8 @@ import datetime
3
3
 
4
4
  from pydantic import BaseModel
5
5
 
6
- from contentctl.objects.test_config import TestConfig
6
+ from contentctl.objects.config import test_common
7
+
7
8
  from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
8
9
  DetectionTestingManagerOutputDto,
9
10
  )
@@ -12,7 +13,7 @@ from contentctl.objects.enums import DetectionStatus
12
13
 
13
14
 
14
15
  class DetectionTestingView(BaseModel, abc.ABC):
15
- config: TestConfig
16
+ config: test_common
16
17
  sync_obj: DetectionTestingManagerOutputDto
17
18
 
18
19
  interval: float = 10
@@ -1,11 +1,3 @@
1
- from pydantic import BaseModel
2
- import abc
3
- from typing import Callable
4
- from contentctl.objects.test_config import TestConfig
5
- from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
6
- DetectionTestingManagerOutputDto,
7
- )
8
-
9
1
  from contentctl.actions.detection_testing.views.DetectionTestingView import (
10
2
  DetectionTestingView,
11
3
  )
@@ -13,7 +13,7 @@ class DocGenInputDto:
13
13
  class DocGen:
14
14
 
15
15
  def execute(self, input_dto: DocGenInputDto) -> None:
16
- director_output_dto = DirectorOutputDto([],[],[],[],[],[],[],[])
16
+ director_output_dto = DirectorOutputDto([],[],[],[],[],[],[],[],[],[])
17
17
  director = Director(director_output_dto)
18
18
  director.execute(input_dto.director_input_dto)
19
19
 
@@ -2,81 +2,44 @@
2
2
  import shutil
3
3
  import os
4
4
  import pathlib
5
- from dataclasses import dataclass
6
- from contentctl.objects.config import Config, TestConfig, PASSWORD
7
- from contentctl.output.yml_writer import YmlWriter
8
- import json
9
5
 
10
- @dataclass(frozen=True)
11
- class InitializeInputDto:
12
- path: pathlib.Path
13
- demo: bool = False
6
+ from pydantic import RootModel
7
+ from contentctl.objects.config import test
8
+ from contentctl.output.yml_writer import YmlWriter
14
9
 
15
10
 
16
11
  class Initialize:
17
12
 
18
- def execute(self, input_dto: InitializeInputDto) -> None:
19
-
20
- c = Config()
13
+ def execute(self, config: test) -> None:
14
+ # construct a test object from the init object
15
+ # This way we can easily populate a yml with ALL the important
16
+ # fields for validating, building, and testing your app.
21
17
 
22
- t = TestConfig.construct() #Disable validation for default object
18
+ YmlWriter.writeYmlFile(str(config.path/'contentctl.yml'), config.model_dump())
23
19
 
24
- config_as_dict = c.dict()
25
- config_as_dict.pop("test")
26
- YmlWriter.writeYmlFile(os.path.join(input_dto.path, 'contentctl.yml'), config_as_dict)
27
-
28
-
29
- # This field serialization hack is required to get
30
- # enums declared in Pydantic Models serialized properly
31
- # without emitting tags that make them hard to read in yml
20
+ #Create the following empty directories:
21
+ for emptyDir in ['lookups', 'baselines', 'docs', 'reporting', 'investigations']:
22
+ #Throw an error if this directory already exists
23
+ (config.path/emptyDir).mkdir(exist_ok=False)
32
24
 
33
- j = json.dumps(t.dict(),sort_keys=False)
34
- obj=json.loads(j)
35
- YmlWriter.writeYmlFile(os.path.join(input_dto.path, 'contentctl_test.yml'), dict(obj))
36
-
37
25
 
38
- folders = ['detections', 'stories', 'lookups', 'macros', 'baselines', 'dist', 'docs', 'reporting', 'investigations']
39
- for folder in folders:
40
- os.makedirs(os.path.join(input_dto.path, folder))
41
-
42
- # Working Detection
43
- source_path = pathlib.Path(os.path.join(os.path.dirname(__file__), '../templates/detections/'))
44
- dest_path = pathlib.Path(os.path.join(input_dto.path, 'detections'))
45
- detections_to_populate = ['anomalous_usage_of_7zip.yml']
46
- if input_dto.demo:
47
- detections_to_populate += ['anomalous_usage_of_7zip_validation_fail.yml',
48
- 'anomalous_usage_of_7zip_test_fail.yml']
49
-
50
- for detection_name in detections_to_populate:
51
- shutil.copyfile(
52
- source_path/detection_name,
53
- dest_path/detection_name)
26
+ #copy the contents of all template directories
27
+ for templateDir, targetDir in [
28
+ ('../templates/app_template/', 'app_template'),
29
+ ('../templates/deployments/', 'deployments'),
30
+ ('../templates/detections/', 'detections'),
31
+ ('../templates/macros/','macros'),
32
+ ('../templates/stories/', 'stories'),
33
+ ]:
34
+ source_directory = pathlib.Path(os.path.dirname(__file__))/templateDir
35
+ target_directory = config.path/targetDir
36
+ #Throw an exception if the target exists
37
+ shutil.copytree(source_directory, target_directory, dirs_exist_ok=False)
54
38
 
39
+ #Create the config file as well
40
+ shutil.copyfile(pathlib.Path(os.path.dirname(__file__))/'../templates/README','README')
55
41
 
56
- shutil.copytree(
57
- os.path.join(os.path.dirname(__file__), '../templates/deployments'),
58
- os.path.join(input_dto.path, 'deployments')
59
- )
60
-
61
- shutil.copyfile(
62
- os.path.join(os.path.dirname(__file__), '../templates/stories/cobalt_strike.yml'),
63
- os.path.join(input_dto.path, 'stories', 'cobalt_strike.yml')
64
- )
65
-
66
- shutil.copyfile(
67
- os.path.join(os.path.dirname(__file__), '../templates/macros/security_content_ctime.yml'),
68
- os.path.join(input_dto.path, 'macros', 'security_content_ctime.yml')
69
- )
70
-
71
- shutil.copyfile(
72
- os.path.join(os.path.dirname(__file__), '../templates/macros/security_content_summariesonly.yml'),
73
- os.path.join(input_dto.path, 'macros', 'security_content_summariesonly.yml')
74
- )
75
-
76
- shutil.copyfile(
77
- os.path.join(os.path.dirname(__file__), '../templates/README'),
78
- os.path.join(input_dto.path, 'README')
79
- )
80
42
 
81
- print('The following folders were created: {0} under {1}.\nContent pack has been initialized, please run `new` to create new content.'.format(folders, input_dto.path))
43
+ print(f"The app '{config.app.title}' has been initialized. "
44
+ "Please run 'contentctl new --type {detection,story}' to create new content")
82
45
 
@@ -0,0 +1,260 @@
1
+ import sys
2
+
3
+
4
+ from dataclasses import dataclass
5
+
6
+ import pathlib
7
+ import json
8
+ import datetime
9
+
10
+
11
+ from contentctl.objects.config import inspect
12
+ from requests import Session, post, get
13
+ from requests.auth import HTTPBasicAuth
14
+ import timeit
15
+ import time
16
+ @dataclass(frozen=True)
17
+ class InspectInputDto:
18
+ config:inspect
19
+
20
+
21
+ class Inspect:
22
+
23
+ def execute(self, config: inspect) -> str:
24
+ if config.build_app or config.build_api:
25
+
26
+ self.inspectAppCLI(config)
27
+ appinspect_token = self.inspectAppAPI(config)
28
+
29
+
30
+ return appinspect_token
31
+
32
+ else:
33
+ raise Exception("Inspect only supported for app and api build targets")
34
+
35
+ def getElapsedTime(self, startTime:float)->datetime.timedelta:
36
+ return datetime.timedelta(seconds=round(timeit.default_timer() - startTime))
37
+
38
+
39
+ def inspectAppAPI(self, config: inspect)->str:
40
+ session = Session()
41
+ session.auth = HTTPBasicAuth(config.splunk_api_username, config.splunk_api_password)
42
+ if config.stack_type not in ['victoria', 'classic']:
43
+ raise Exception(f"stack_type MUST be either 'classic' or 'victoria', NOT '{config.stack_type}'")
44
+
45
+ APPINSPECT_API_LOGIN = "https://api.splunk.com/2.0/rest/login/splunk"
46
+
47
+
48
+
49
+ res = session.get(APPINSPECT_API_LOGIN)
50
+ #If login failed or other failure, raise an exception
51
+ res.raise_for_status()
52
+
53
+ authorization_bearer = res.json().get("data",{}).get("token",None)
54
+ APPINSPECT_API_VALIDATION_REQUEST = "https://appinspect.splunk.com/v1/app/validate"
55
+ headers = {
56
+ "Authorization": f"bearer {authorization_bearer}",
57
+ "Cache-Control": "no-cache"
58
+ }
59
+
60
+ package_path = config.getPackageFilePath(include_version=False)
61
+ if not package_path.is_file():
62
+ raise Exception(f"Cannot run Appinspect API on App '{config.app.title}' - "
63
+ f"no package exists as expected path '{package_path}'.\nAre you "
64
+ "trying to 'contentctl acs_deploy' the package BEFORE running 'contentctl build'?")
65
+
66
+ files = {
67
+ "app_package": open(package_path,"rb"),
68
+ "included_tags":(None,"cloud")
69
+ }
70
+
71
+ res = post(APPINSPECT_API_VALIDATION_REQUEST, headers=headers, files=files)
72
+
73
+ res.raise_for_status()
74
+
75
+ request_id = res.json().get("request_id",None)
76
+ APPINSPECT_API_VALIDATION_STATUS = f"https://appinspect.splunk.com/v1/app/validate/status/{request_id}?included_tags=private_{config.stack_type}"
77
+ headers = headers = {
78
+ "Authorization": f"bearer {authorization_bearer}"
79
+ }
80
+ startTime = timeit.default_timer()
81
+ # the first time, wait for 40 seconds. subsequent times, wait for less.
82
+ # this is because appinspect takes some time to return, so there is no sense
83
+ # checking many times when we know it will take at least 40 seconds to run.
84
+ iteration_wait_time = 40
85
+ while True:
86
+
87
+ res = get(APPINSPECT_API_VALIDATION_STATUS, headers=headers)
88
+ res.raise_for_status()
89
+ status = res.json().get("status",None)
90
+ if status in ["PROCESSING", "PREPARING"]:
91
+ print(f"[{self.getElapsedTime(startTime)}] Appinspect API is {status}...")
92
+ time.sleep(iteration_wait_time)
93
+ iteration_wait_time = 1
94
+ continue
95
+ elif status == "SUCCESS":
96
+ print(f"[{self.getElapsedTime(startTime)}] Appinspect API has finished!")
97
+ break
98
+ else:
99
+ raise Exception(f"Error - Unknown Appinspect API status '{status}'")
100
+
101
+
102
+
103
+ #We have finished running appinspect, so get the report
104
+ APPINSPECT_API_REPORT = f"https://appinspect.splunk.com/v1/app/report/{request_id}?included_tags=private_{config.stack_type}"
105
+ #Get human-readable HTML report
106
+ headers = headers = {
107
+ "Authorization": f"bearer {authorization_bearer}",
108
+ "Content-Type": "text/html"
109
+ }
110
+ res = get(APPINSPECT_API_REPORT, headers=headers)
111
+ res.raise_for_status()
112
+ report_html = res.content
113
+
114
+ #Get JSON report for processing
115
+ headers = headers = {
116
+ "Authorization": f"bearer {authorization_bearer}",
117
+ "Content-Type": "application/json"
118
+ }
119
+ res = get(APPINSPECT_API_REPORT, headers=headers)
120
+ res.raise_for_status()
121
+ report_json = res.json()
122
+
123
+ # Just get app path here to avoid long function calls in the open() calls below
124
+ appPath = config.getPackageFilePath(include_version=True)
125
+ appinpect_html_path = appPath.with_suffix(appPath.suffix+".appinspect_api_results.html")
126
+ appinspect_json_path = appPath.with_suffix(appPath.suffix+".appinspect_api_results.json")
127
+ #Use the full path of the app, but update the suffix to include info about appinspect
128
+ with open(appinpect_html_path, "wb") as report:
129
+ report.write(report_html)
130
+ with open(appinspect_json_path, "w") as report:
131
+ json.dump(report_json, report)
132
+
133
+
134
+ self.parseAppinspectJsonLogFile(appinspect_json_path)
135
+
136
+
137
+ return authorization_bearer
138
+
139
+
140
+ def inspectAppCLI(self, config:inspect)-> None:
141
+
142
+ try:
143
+ raise Exception("Local spunk-appinspect Not Supported at this time (you may use the appinspect api). If you would like to locally inspect your app with"
144
+ "Python 3.7, 3.8, or 3.9 (with limited support), please refer to:\n"
145
+ "\t - https://dev.splunk.com/enterprise/docs/developapps/testvalidate/appinspect/useappinspectclitool/")
146
+ from splunk_appinspect.main import (
147
+ validate, MODE_OPTION, APP_PACKAGE_ARGUMENT, OUTPUT_FILE_OPTION,
148
+ LOG_FILE_OPTION, INCLUDED_TAGS_OPTION, EXCLUDED_TAGS_OPTION,
149
+ PRECERT_MODE, TEST_MODE)
150
+ except Exception as e:
151
+ print(e)
152
+ # print("******WARNING******")
153
+ # if sys.version_info.major == 3 and sys.version_info.minor > 9:
154
+ # print("The package splunk-appinspect was not installed due to a current issue with the library on Python3.10+. "
155
+ # "Please use the following commands to set up a virtualenvironment in a different folder so you may run appinspect manually (if desired):"
156
+ # "\n\tpython3.9 -m venv .venv"
157
+ # "\n\tsource .venv/bin/activate"
158
+ # "\n\tpython3 -m pip install splunk-appinspect"
159
+ # f"\n\tsplunk-appinspect inspect {self.getPackagePath(include_version=False).relative_to(pathlib.Path('.').absolute())} --mode precert")
160
+
161
+ # else:
162
+ # print("splunk-appinspect is only compatable with Python3.9 at this time. Please see the following open issue here: https://github.com/splunk/contentctl/issues/28")
163
+ # print("******WARNING******")
164
+ return
165
+
166
+ # Note that all tags are available and described here:
167
+ # https://dev.splunk.com/enterprise/reference/appinspect/appinspecttagreference/
168
+ # By default, precert mode will run ALL checks. Explicitly included or excluding tags will
169
+ # change this behavior. To give the most thorough inspection, we leave these empty so that
170
+ # ALL checks are run
171
+ included_tags = []
172
+ excluded_tags = []
173
+
174
+ appinspect_output = self.dist/f"{self.config.build.title}-{self.config.build.version}.appinspect_cli_results.json"
175
+ appinspect_logging = self.dist/f"{self.config.build.title}-{self.config.build.version}.appinspect_cli_logging.log"
176
+ try:
177
+ arguments_list = [(APP_PACKAGE_ARGUMENT, str(self.getPackagePath(include_version=False)))]
178
+ options_list = []
179
+ options_list += [MODE_OPTION, TEST_MODE]
180
+ options_list += [OUTPUT_FILE_OPTION, str(appinspect_output)]
181
+ options_list += [LOG_FILE_OPTION, str(appinspect_logging)]
182
+
183
+ #If there are any tags defined, then include them here
184
+ for opt in included_tags:
185
+ options_list += [INCLUDED_TAGS_OPTION, opt]
186
+ for opt in excluded_tags:
187
+ options_list += [EXCLUDED_TAGS_OPTION, opt]
188
+
189
+ cmdline = options_list + [arg[1] for arg in arguments_list]
190
+ validate(cmdline)
191
+
192
+ except SystemExit as e:
193
+ if e.code == 0:
194
+ # The sys.exit called inside of appinspect validate closes stdin. We need to
195
+ # reopen it.
196
+ sys.stdin = open("/dev/stdin","r")
197
+ print(f"AppInspect passed! Please check [ {appinspect_output} , {appinspect_logging} ] for verbose information.")
198
+ else:
199
+ if sys.version.startswith('3.11') or sys.version.startswith('3.12'):
200
+ raise Exception("At this time, AppInspect may fail on valid apps under Python>=3.11 with "
201
+ "the error 'global flags not at the start of the expression at position 1'. "
202
+ "If you encounter this error, please run AppInspect on a version of Python "
203
+ "<3.11. This issue is currently tracked. Please review the appinspect "
204
+ "report output above for errors.")
205
+ else:
206
+ raise Exception("AppInspect Failure - Please review the appinspect report output above for errors.")
207
+ finally:
208
+ # appinspect outputs the log in json format, but does not format it to be easier
209
+ # to read (it is all in one line). Read back that file and write it so it
210
+ # is easier to understand
211
+
212
+ #Note that this may raise an exception itself!
213
+ self.parseAppinspectJsonLogFile(appinspect_output)
214
+
215
+ def parseAppinspectJsonLogFile(self, logfile_path:pathlib.Path,
216
+ status_types:list[str] = ["error", "failure", "manual_check", "warning"],
217
+ exception_types = ["error","failure","manual_check"] )->None:
218
+ if not set(exception_types).issubset(set(status_types)):
219
+ raise Exception(f"Error - exception_types {exception_types} MUST be a subset of status_types {status_types}, but it is not")
220
+ with open(logfile_path, "r+") as logfile:
221
+ j = json.load(logfile)
222
+ #Seek back to the beginning of the file. We don't need to clear
223
+ #it sice we will always write AT LEAST the same number of characters
224
+ #back as we read (due to the addition of whitespace)
225
+ logfile.seek(0)
226
+ json.dump(j, logfile, indent=3, )
227
+
228
+ reports = j.get("reports", [])
229
+ if len(reports) != 1:
230
+ raise Exception("Expected to find one appinspect report but found 0")
231
+ verbose_errors = []
232
+
233
+ for group in reports[0].get("groups", []):
234
+ for check in group.get("checks",[]):
235
+ if check.get("result","") in status_types:
236
+ verbose_errors.append(f" - {check.get('result','')} [{group.get('name','NONAME')}: {check.get('name', 'NONAME')}]")
237
+ verbose_errors.sort()
238
+
239
+ summary = j.get("summary", None)
240
+ if summary is None:
241
+ raise Exception("Missing summary from appinspect report")
242
+ msgs = []
243
+ generated_exception = False
244
+ for key in status_types:
245
+ if summary.get(key,0)>0:
246
+ msgs.append(f" - {summary.get(key,0)} {key}s")
247
+ if key in exception_types:
248
+ generated_exception = True
249
+ if len(msgs)>0 or len(verbose_errors):
250
+ summary = '\n'.join(msgs)
251
+ details = '\n'.join(verbose_errors)
252
+ summary = f"{summary}\nDetails:\n{details}"
253
+ if generated_exception:
254
+ raise Exception(f"AppInspect found [{','.join(exception_types)}] that MUST be addressed to pass AppInspect API:\n{summary}")
255
+ else:
256
+ print(f"AppInspect found [{','.join(status_types)}] that MAY cause a failure during AppInspect API:\n{summary}")
257
+ else:
258
+ print("AppInspect was successful!")
259
+
260
+ return