contentctl 4.4.7__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. contentctl/__init__.py +1 -1
  2. contentctl/actions/build.py +102 -57
  3. contentctl/actions/deploy_acs.py +29 -24
  4. contentctl/actions/detection_testing/DetectionTestingManager.py +66 -42
  5. contentctl/actions/detection_testing/GitService.py +134 -76
  6. contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
  7. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +192 -147
  8. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
  9. contentctl/actions/detection_testing/progress_bar.py +9 -6
  10. contentctl/actions/detection_testing/views/DetectionTestingView.py +16 -19
  11. contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
  12. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
  13. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
  14. contentctl/actions/doc_gen.py +9 -5
  15. contentctl/actions/initialize.py +45 -33
  16. contentctl/actions/inspect.py +118 -61
  17. contentctl/actions/new_content.py +155 -108
  18. contentctl/actions/release_notes.py +276 -146
  19. contentctl/actions/reporting.py +23 -19
  20. contentctl/actions/test.py +33 -28
  21. contentctl/actions/validate.py +55 -34
  22. contentctl/api.py +54 -45
  23. contentctl/contentctl.py +124 -90
  24. contentctl/enrichments/attack_enrichment.py +112 -72
  25. contentctl/enrichments/cve_enrichment.py +34 -28
  26. contentctl/enrichments/splunk_app_enrichment.py +38 -36
  27. contentctl/helper/link_validator.py +101 -78
  28. contentctl/helper/splunk_app.py +69 -41
  29. contentctl/helper/utils.py +58 -53
  30. contentctl/input/director.py +68 -36
  31. contentctl/input/new_content_questions.py +27 -35
  32. contentctl/input/yml_reader.py +28 -18
  33. contentctl/objects/abstract_security_content_objects/detection_abstract.py +303 -259
  34. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +115 -52
  35. contentctl/objects/alert_action.py +10 -9
  36. contentctl/objects/annotated_types.py +1 -1
  37. contentctl/objects/atomic.py +65 -54
  38. contentctl/objects/base_test.py +5 -3
  39. contentctl/objects/base_test_result.py +19 -11
  40. contentctl/objects/baseline.py +62 -30
  41. contentctl/objects/baseline_tags.py +30 -24
  42. contentctl/objects/config.py +790 -597
  43. contentctl/objects/constants.py +33 -56
  44. contentctl/objects/correlation_search.py +150 -136
  45. contentctl/objects/dashboard.py +55 -41
  46. contentctl/objects/data_source.py +16 -17
  47. contentctl/objects/deployment.py +43 -44
  48. contentctl/objects/deployment_email.py +3 -2
  49. contentctl/objects/deployment_notable.py +4 -2
  50. contentctl/objects/deployment_phantom.py +7 -6
  51. contentctl/objects/deployment_rba.py +3 -2
  52. contentctl/objects/deployment_scheduling.py +3 -2
  53. contentctl/objects/deployment_slack.py +3 -2
  54. contentctl/objects/detection.py +5 -2
  55. contentctl/objects/detection_metadata.py +1 -0
  56. contentctl/objects/detection_stanza.py +7 -2
  57. contentctl/objects/detection_tags.py +58 -103
  58. contentctl/objects/drilldown.py +66 -34
  59. contentctl/objects/enums.py +81 -100
  60. contentctl/objects/errors.py +16 -24
  61. contentctl/objects/integration_test.py +3 -3
  62. contentctl/objects/integration_test_result.py +1 -0
  63. contentctl/objects/investigation.py +59 -36
  64. contentctl/objects/investigation_tags.py +30 -19
  65. contentctl/objects/lookup.py +304 -101
  66. contentctl/objects/macro.py +55 -39
  67. contentctl/objects/manual_test.py +3 -3
  68. contentctl/objects/manual_test_result.py +1 -0
  69. contentctl/objects/mitre_attack_enrichment.py +17 -16
  70. contentctl/objects/notable_action.py +2 -1
  71. contentctl/objects/notable_event.py +1 -3
  72. contentctl/objects/playbook.py +37 -35
  73. contentctl/objects/playbook_tags.py +23 -13
  74. contentctl/objects/rba.py +96 -0
  75. contentctl/objects/risk_analysis_action.py +15 -11
  76. contentctl/objects/risk_event.py +110 -160
  77. contentctl/objects/risk_object.py +1 -0
  78. contentctl/objects/savedsearches_conf.py +9 -7
  79. contentctl/objects/security_content_object.py +5 -2
  80. contentctl/objects/story.py +54 -49
  81. contentctl/objects/story_tags.py +56 -45
  82. contentctl/objects/test_attack_data.py +2 -1
  83. contentctl/objects/test_group.py +5 -2
  84. contentctl/objects/threat_object.py +1 -0
  85. contentctl/objects/throttling.py +27 -18
  86. contentctl/objects/unit_test.py +3 -4
  87. contentctl/objects/unit_test_baseline.py +5 -5
  88. contentctl/objects/unit_test_result.py +6 -6
  89. contentctl/output/api_json_output.py +233 -220
  90. contentctl/output/attack_nav_output.py +21 -21
  91. contentctl/output/attack_nav_writer.py +29 -37
  92. contentctl/output/conf_output.py +235 -172
  93. contentctl/output/conf_writer.py +201 -125
  94. contentctl/output/data_source_writer.py +38 -26
  95. contentctl/output/doc_md_output.py +53 -27
  96. contentctl/output/jinja_writer.py +19 -15
  97. contentctl/output/json_writer.py +21 -11
  98. contentctl/output/svg_output.py +56 -38
  99. contentctl/output/templates/analyticstories_detections.j2 +2 -2
  100. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  101. contentctl/output/templates/collections.j2 +1 -1
  102. contentctl/output/templates/doc_detections.j2 +0 -5
  103. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  104. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  105. contentctl/output/templates/savedsearches_baselines.j2 +2 -2
  106. contentctl/output/templates/savedsearches_detections.j2 +10 -11
  107. contentctl/output/templates/savedsearches_investigations.j2 +2 -2
  108. contentctl/output/templates/transforms.j2 +6 -8
  109. contentctl/output/yml_writer.py +29 -20
  110. contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +16 -34
  111. contentctl/templates/stories/cobalt_strike.yml +1 -0
  112. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/METADATA +5 -4
  113. contentctl-5.0.0.dist-info/RECORD +168 -0
  114. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/WHEEL +1 -1
  115. contentctl/actions/initialize_old.py +0 -245
  116. contentctl/objects/event_source.py +0 -11
  117. contentctl/objects/observable.py +0 -37
  118. contentctl/output/detection_writer.py +0 -28
  119. contentctl/output/new_content_yml_output.py +0 -56
  120. contentctl/output/yml_output.py +0 -66
  121. contentctl-4.4.7.dist-info/RECORD +0 -173
  122. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/LICENSE.md +0 -0
  123. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/entry_points.txt +0 -0
@@ -1,31 +1,39 @@
1
1
  from __future__ import annotations
2
2
 
3
- from os import environ
4
- from datetime import datetime, UTC
5
- from typing import Optional, Any, List, Union, Self
6
- import random
7
- from enum import StrEnum, auto
8
3
  import pathlib
9
- from urllib.parse import urlparse
4
+ import random
10
5
  from abc import ABC, abstractmethod
6
+ from datetime import UTC, datetime
7
+ from enum import StrEnum, auto
11
8
  from functools import partialmethod
9
+ from os import environ
10
+ from typing import Any, List, Optional, Self, Union
11
+ from urllib.parse import urlparse
12
12
 
13
- import tqdm
14
13
  import semantic_version
14
+ import tqdm
15
15
  from pydantic import (
16
- BaseModel, Field, field_validator,
17
- field_serializer, ConfigDict, DirectoryPath,
18
- PositiveInt, FilePath, HttpUrl, AnyUrl, model_validator,
19
- ValidationInfo
16
+ AnyUrl,
17
+ BaseModel,
18
+ ConfigDict,
19
+ DirectoryPath,
20
+ Field,
21
+ FilePath,
22
+ HttpUrl,
23
+ PositiveInt,
24
+ ValidationInfo,
25
+ field_serializer,
26
+ field_validator,
27
+ model_validator,
20
28
  )
21
29
 
22
- from contentctl.objects.constants import DOWNLOADS_DIRECTORY
23
- from contentctl.output.yml_writer import YmlWriter
30
+ from contentctl.helper.splunk_app import SplunkApp
24
31
  from contentctl.helper.utils import Utils
25
- from contentctl.objects.enums import PostTestBehavior, DetectionTestingMode
26
- from contentctl.objects.detection import Detection
27
32
  from contentctl.objects.annotated_types import APPID_TYPE
28
- from contentctl.helper.splunk_app import SplunkApp
33
+ from contentctl.objects.constants import DOWNLOADS_DIRECTORY
34
+ from contentctl.objects.detection import Detection
35
+ from contentctl.objects.enums import PostTestBehavior
36
+ from contentctl.output.yml_writer import YmlWriter
29
37
 
30
38
  ENTERPRISE_SECURITY_UID = 263
31
39
  COMMON_INFORMATION_MODEL_UID = 1621
@@ -33,59 +41,71 @@ COMMON_INFORMATION_MODEL_UID = 1621
33
41
  SPLUNKBASE_URL = "https://splunkbase.splunk.com/app/{uid}/release/{version}/download"
34
42
 
35
43
 
36
- # TODO (#266): disable the use_enum_values configuration
37
- class App_Base(BaseModel,ABC):
38
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
44
+ class App_Base(BaseModel, ABC):
45
+ model_config = ConfigDict(
46
+ validate_default=True, arbitrary_types_allowed=True, extra="forbid"
47
+ )
39
48
  uid: Optional[int] = Field(default=None)
40
- title: str = Field(description="Human-readable name used by the app. This can have special characters.")
41
- appid: Optional[APPID_TYPE]= Field(default=None,description="Internal name used by your app. "
42
- "It may ONLY have characters, numbers, and underscores. No other characters are allowed.")
43
- version: str = Field(description="The version of your Content Pack. This must follow semantic versioning guidelines.")
44
- description: Optional[str] = Field(default="description of app",description="Free text description of the Content Pack.")
45
-
46
-
47
-
49
+ title: str = Field(
50
+ description="Human-readable name used by the app. This can have special characters."
51
+ )
52
+ appid: Optional[APPID_TYPE] = Field(
53
+ default=None,
54
+ description="Internal name used by your app. "
55
+ "It may ONLY have characters, numbers, and underscores. No other characters are allowed.",
56
+ )
57
+ version: str = Field(
58
+ description="The version of your Content Pack. This must follow semantic versioning guidelines."
59
+ )
60
+ description: Optional[str] = Field(
61
+ default="description of app",
62
+ description="Free text description of the Content Pack.",
63
+ )
48
64
 
49
- def getSplunkbasePath(self)->HttpUrl:
65
+ def getSplunkbasePath(self) -> HttpUrl:
50
66
  return HttpUrl(SPLUNKBASE_URL.format(uid=self.uid, release=self.version))
51
67
 
52
68
  @abstractmethod
53
- def getApp(self, config:test, stage_file:bool=False)->str:
54
- ...
69
+ def getApp(self, config: test, stage_file: bool = False) -> str: ...
55
70
 
56
- def ensureAppPathExists(self, config:test, stage_file:bool=False):
71
+ def ensureAppPathExists(self, config: test, stage_file: bool = False):
57
72
  if stage_file:
58
73
  if not config.getLocalAppDir().exists():
59
74
  config.getLocalAppDir().mkdir(parents=True)
60
75
 
61
76
 
62
- # TODO (#266): disable the use_enum_values configuration
63
77
  class TestApp(App_Base):
64
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
65
- hardcoded_path: Optional[Union[FilePath,HttpUrl]] = Field(default=None, description="This may be a relative or absolute link to a file OR an HTTP URL linking to your app.")
66
-
78
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
79
+ hardcoded_path: Optional[Union[FilePath, HttpUrl]] = Field(
80
+ default=None,
81
+ description="This may be a relative or absolute link to a file OR an HTTP URL linking to your app.",
82
+ )
67
83
 
68
- @field_serializer('hardcoded_path',when_used='always')
69
- def serialize_path(path: Union[AnyUrl, pathlib.Path])->str:
84
+ @field_serializer("hardcoded_path", when_used="always")
85
+ def serialize_path(path: Union[AnyUrl, pathlib.Path]) -> str:
70
86
  return str(path)
71
87
 
72
- def getApp(self, config:test,stage_file:bool=False)->str:
73
- #If the apps directory does not exist, then create it
74
- self.ensureAppPathExists(config,stage_file)
88
+ def getApp(self, config: test, stage_file: bool = False) -> str:
89
+ # If the apps directory does not exist, then create it
90
+ self.ensureAppPathExists(config, stage_file)
75
91
 
76
- if config.splunk_api_password is not None and config.splunk_api_username is not None:
92
+ if (
93
+ config.splunk_api_password is not None
94
+ and config.splunk_api_username is not None
95
+ ):
77
96
  if self.version is not None and self.uid is not None:
78
- return str(self.getSplunkbasePath())
97
+ return str(self.getSplunkbasePath())
79
98
  if self.version is None or self.uid is None:
80
- print(f"Not downloading {self.title} from Splunkbase since uid[{self.uid}] AND version[{self.version}] MUST be defined")
81
-
82
-
99
+ print(
100
+ f"Not downloading {self.title} from Splunkbase since uid[{self.uid}] AND version[{self.version}] MUST be defined"
101
+ )
102
+
83
103
  elif isinstance(self.hardcoded_path, pathlib.Path):
84
104
  destination = config.getLocalAppDir() / self.hardcoded_path.name
85
105
  if stage_file:
86
- Utils.copy_local_file(str(self.hardcoded_path),
87
- str(destination),
88
- verbose_print=True)
106
+ Utils.copy_local_file(
107
+ str(self.hardcoded_path), str(destination), verbose_print=True
108
+ )
89
109
 
90
110
  elif isinstance(self.hardcoded_path, AnyUrl):
91
111
  file_url_string = str(self.hardcoded_path)
@@ -95,24 +115,42 @@ class TestApp(App_Base):
95
115
  Utils.download_file_from_http(file_url_string, str(destination))
96
116
  else:
97
117
  raise Exception(f"Unknown path for app '{self.title}'")
98
-
118
+
99
119
  return str(destination)
100
120
 
101
121
 
102
- # TODO (#266): disable the use_enum_values configuration
103
122
  class CustomApp(App_Base):
104
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
123
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
105
124
  # Fields required for app.conf based on
106
125
  # https://docs.splunk.com/Documentation/Splunk/9.0.4/Admin/Appconf
107
- uid: int = Field(ge=2, lt=100000, default_factory=lambda:random.randint(20000,100000))
108
- title: str = Field(default="Content Pack",description="Human-readable name used by the app. This can have special characters.")
109
- appid: APPID_TYPE = Field(default="ContentPack",description="Internal name used by your app. "
110
- "It may ONLY have characters, numbers, and underscores. No other characters are allowed.")
111
- version: str = Field(default="0.0.1",description="The version of your Content Pack. This must follow semantic versioning guidelines.", validate_default=True)
112
-
113
- prefix: str = Field(default="ContentPack",description="A short prefix to easily identify all your content.")
114
- build: int = Field(exclude=True, default=int(datetime.now(UTC).strftime("%Y%m%d%H%M%S")), validate_default=True,
115
- description="Build number for your app. This will always be a number that corresponds to the time of the build in the format YYYYMMDDHHMMSS")
126
+ uid: int = Field(
127
+ ge=2, lt=100000, default_factory=lambda: random.randint(20000, 100000)
128
+ )
129
+ title: str = Field(
130
+ default="Content Pack",
131
+ description="Human-readable name used by the app. This can have special characters.",
132
+ )
133
+ appid: APPID_TYPE = Field(
134
+ default="ContentPack",
135
+ description="Internal name used by your app. "
136
+ "It may ONLY have characters, numbers, and underscores. No other characters are allowed.",
137
+ )
138
+ version: str = Field(
139
+ default="0.0.1",
140
+ description="The version of your Content Pack. This must follow semantic versioning guidelines.",
141
+ validate_default=True,
142
+ )
143
+
144
+ prefix: str = Field(
145
+ default="ContentPack",
146
+ description="A short prefix to easily identify all your content.",
147
+ )
148
+ build: int = Field(
149
+ exclude=True,
150
+ default=int(datetime.now(UTC).strftime("%Y%m%d%H%M%S")),
151
+ validate_default=True,
152
+ description="Build number for your app. This will always be a number that corresponds to the time of the build in the format YYYYMMDDHHMMSS",
153
+ )
116
154
  # id has many restrictions:
117
155
  # * Omit this setting for apps that are for internal use only and not intended
118
156
  # for upload to Splunkbase.
@@ -128,164 +166,217 @@ class CustomApp(App_Base):
128
166
  # * must not be any of the following names: CON, PRN, AUX, NUL,
129
167
  # COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9,
130
168
  # LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, LPT9
131
-
132
- label: str = Field(default="Custom Splunk Content Pack",description="This is the app name that shows in the launcher.")
133
- author_name: str = Field(default="author name",description="Name of the Content Pack Author.")
134
- author_email: str = Field(default="author@contactemailaddress.com",description="Contact email for the Content Pack Author")
135
- author_company: str = Field(default="author company",description="Name of the company who has developed the Content Pack")
136
- description: str = Field(default="description of app",description="Free text description of the Content Pack.")
137
169
 
170
+ label: str = Field(
171
+ default="Custom Splunk Content Pack",
172
+ description="This is the app name that shows in the launcher.",
173
+ )
174
+ author_name: str = Field(
175
+ default="author name", description="Name of the Content Pack Author."
176
+ )
177
+ author_email: str = Field(
178
+ default="author@contactemailaddress.com",
179
+ description="Contact email for the Content Pack Author",
180
+ )
181
+ author_company: str = Field(
182
+ default="author company",
183
+ description="Name of the company who has developed the Content Pack",
184
+ )
185
+ description: str = Field(
186
+ default="description of app",
187
+ description="Free text description of the Content Pack.",
188
+ )
138
189
 
139
- @field_validator('version')
190
+ @field_validator("version")
140
191
  def validate_version(cls, v, values):
141
192
  try:
142
193
  _ = semantic_version.Version(v)
143
194
  except Exception as e:
144
- raise(ValueError(f"The specified version does not follow the semantic versioning spec (https://semver.org/). {str(e)}"))
195
+ raise (
196
+ ValueError(
197
+ f"The specified version does not follow the semantic versioning spec (https://semver.org/). {str(e)}"
198
+ )
199
+ )
145
200
  return v
146
-
147
- #Build will ALWAYS be the current utc timestamp
148
- @field_validator('build')
201
+
202
+ # Build will ALWAYS be the current utc timestamp
203
+ @field_validator("build")
149
204
  def validate_build(cls, v, values):
150
205
  return int(datetime.utcnow().strftime("%Y%m%d%H%M%S"))
151
-
152
- def getApp(self, config:test, stage_file=True)->str:
153
- self.ensureAppPathExists(config,stage_file)
154
-
155
- destination = config.getLocalAppDir() / (config.getPackageFilePath(include_version=True).name)
206
+
207
+ def getApp(self, config: test, stage_file=True) -> str:
208
+ self.ensureAppPathExists(config, stage_file)
209
+
210
+ destination = config.getLocalAppDir() / (
211
+ config.getPackageFilePath(include_version=True).name
212
+ )
156
213
  if stage_file:
157
- Utils.copy_local_file(str(config.getPackageFilePath(include_version=True)),
158
- str(destination),
159
- verbose_print=True)
214
+ Utils.copy_local_file(
215
+ str(config.getPackageFilePath(include_version=True)),
216
+ str(destination),
217
+ verbose_print=True,
218
+ )
160
219
  return str(destination)
161
-
162
- # TODO (#266): disable the use_enum_values configuration
220
+
221
+
163
222
  class Config_Base(BaseModel):
164
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
165
-
166
- path: DirectoryPath = Field(default=DirectoryPath("."), description="The root of your app.")
167
- app:CustomApp = Field(default_factory=CustomApp)
168
- verbose:bool = Field(default=False, description="Enable verbose error logging, including a stacktrace. "
169
- "This option makes debugging contentctl errors much easier, but produces way more "
170
- "output than is useful under most uses cases. "
171
- "Please use this flag if you are submitting a bug report or issue on GitHub.")
172
-
173
- @field_serializer('path',when_used='always')
174
- def serialize_path(path: DirectoryPath)->str:
223
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
224
+
225
+ path: DirectoryPath = Field(
226
+ default=DirectoryPath("."), description="The root of your app."
227
+ )
228
+ app: CustomApp = Field(default_factory=CustomApp)
229
+ verbose: bool = Field(
230
+ default=False,
231
+ description="Enable verbose error logging, including a stacktrace. "
232
+ "This option makes debugging contentctl errors much easier, but produces way more "
233
+ "output than is useful under most uses cases. "
234
+ "Please use this flag if you are submitting a bug report or issue on GitHub.",
235
+ )
236
+
237
+ @field_serializer("path", when_used="always")
238
+ def serialize_path(path: DirectoryPath) -> str:
175
239
  return str(path)
176
240
 
241
+
177
242
  class init(Config_Base):
178
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
179
- bare: bool = Field(default=False, description="contentctl normally provides some some example content "
180
- "(macros, stories, data_sources, and/or analytic stories). This option disables "
181
- "initialization with that additional contnet. Note that even if --bare is used, it "
182
- "init will still create the directory structure of the app, "
183
- "include the app_template directory with default content, and content in "
184
- "the deployment/ directory (since it is not yet easily customizable).")
243
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
244
+ bare: bool = Field(
245
+ default=False,
246
+ description="contentctl normally provides some some example content "
247
+ "(macros, stories, data_sources, and/or analytic stories). This option disables "
248
+ "initialization with that additional contnet. Note that even if --bare is used, it "
249
+ "init will still create the directory structure of the app, "
250
+ "include the app_template directory with default content, and content in "
251
+ "the deployment/ directory (since it is not yet easily customizable).",
252
+ )
185
253
 
186
254
 
187
- # TODO (#266): disable the use_enum_values configuration
188
255
  class validate(Config_Base):
189
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
190
- enrichments: bool = Field(default=False, description="Enable MITRE, APP, and CVE Enrichments. "\
191
- "This is useful when outputting a release build "\
192
- "and validating these values, but should otherwise "\
193
- "be avoided for performance reasons.")
194
- build_app: bool = Field(default=True, description="Should an app be built and output in the build_path?")
195
- build_api: bool = Field(default=False, description="Should api objects be built and output in the build_path?")
196
- data_source_TA_validation: bool = Field(default=False, description="Validate latest TA information from Splunkbase")
256
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
257
+ enrichments: bool = Field(
258
+ default=False,
259
+ description="Enable MITRE, APP, and CVE Enrichments. "
260
+ "This is useful when outputting a release build "
261
+ "and validating these values, but should otherwise "
262
+ "be avoided for performance reasons.",
263
+ )
264
+ build_app: bool = Field(
265
+ default=True, description="Should an app be built and output in the build_path?"
266
+ )
267
+ build_api: bool = Field(
268
+ default=False,
269
+ description="Should api objects be built and output in the build_path?",
270
+ )
271
+ data_source_TA_validation: bool = Field(
272
+ default=False, description="Validate latest TA information from Splunkbase"
273
+ )
197
274
 
198
275
  @property
199
- def external_repos_path(self)->pathlib.Path:
200
- return self.path/"external_repos"
276
+ def external_repos_path(self) -> pathlib.Path:
277
+ return self.path / "external_repos"
201
278
 
202
- @property
203
- def mitre_cti_repo_path(self)->pathlib.Path:
204
- return self.external_repos_path/"cti"
279
+ @property
280
+ def mitre_cti_repo_path(self) -> pathlib.Path:
281
+ return self.external_repos_path / "cti"
205
282
 
206
283
  @property
207
284
  def atomic_red_team_repo_path(self):
208
- return self.external_repos_path/"atomic-red-team"
285
+ return self.external_repos_path / "atomic-red-team"
209
286
 
210
287
  @model_validator(mode="after")
211
- def ensureEnrichmentReposPresent(self)->Self:
212
- '''
213
- Ensures that the enrichments repos, the atomic red team repo and the
288
+ def ensureEnrichmentReposPresent(self) -> Self:
289
+ """
290
+ Ensures that the enrichments repos, the atomic red team repo and the
214
291
  mitre attack enrichment repo, are present at the inded path.
215
292
  Raises a detailed exception if either of these are not present
216
293
  when enrichments are enabled.
217
- '''
294
+ """
218
295
  if not self.enrichments:
219
296
  return self
220
297
  # If enrichments are enabled, ensure that all of the
221
298
  # enrichment directories exist
222
- missing_repos:list[str] = []
299
+ missing_repos: list[str] = []
223
300
  if not self.atomic_red_team_repo_path.is_dir():
224
- missing_repos.append(f"https://github.com/redcanaryco/atomic-red-team {self.atomic_red_team_repo_path}")
301
+ missing_repos.append(
302
+ f"https://github.com/redcanaryco/atomic-red-team {self.atomic_red_team_repo_path}"
303
+ )
225
304
 
226
305
  if not self.mitre_cti_repo_path.is_dir():
227
- missing_repos.append(f"https://github.com/mitre/cti {self.mitre_cti_repo_path}")
228
-
229
- if len(missing_repos) > 0:
230
- msg_list = ["The following repositories, which are required for enrichment, have not "
231
- f"been checked out to the {self.external_repos_path} directory. "
232
- "Please check them out using the following commands:"]
233
- msg_list.extend([f"git clone --single-branch {repo_string}" for repo_string in missing_repos])
234
- msg = '\n\t'.join(msg_list)
306
+ missing_repos.append(
307
+ f"https://github.com/mitre/cti {self.mitre_cti_repo_path}"
308
+ )
309
+
310
+ if len(missing_repos) > 0:
311
+ msg_list = [
312
+ "The following repositories, which are required for enrichment, have not "
313
+ f"been checked out to the {self.external_repos_path} directory. "
314
+ "Please check them out using the following commands:"
315
+ ]
316
+ msg_list.extend(
317
+ [
318
+ f"git clone --single-branch {repo_string}"
319
+ for repo_string in missing_repos
320
+ ]
321
+ )
322
+ msg = "\n\t".join(msg_list)
235
323
  raise FileNotFoundError(msg)
236
324
  return self
237
325
 
326
+
238
327
  class report(validate):
239
- #reporting takes no extra args, but we define it here so that it can be a mode on the command line
240
- def getReportingPath(self)->pathlib.Path:
241
- return self.path/"reporting/"
328
+ # reporting takes no extra args, but we define it here so that it can be a mode on the command line
329
+ def getReportingPath(self) -> pathlib.Path:
330
+ return self.path / "reporting/"
242
331
 
243
332
 
244
- # TODO (#266): disable the use_enum_values configuration
245
333
  class build(validate):
246
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
247
- build_path: DirectoryPath = Field(default=DirectoryPath("dist/"), title="Target path for all build outputs")
334
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
335
+ build_path: DirectoryPath = Field(
336
+ default=DirectoryPath("dist/"), title="Target path for all build outputs"
337
+ )
248
338
 
249
- @field_serializer('build_path',when_used='always')
250
- def serialize_build_path(path: DirectoryPath)->str:
339
+ @field_serializer("build_path", when_used="always")
340
+ def serialize_build_path(path: DirectoryPath) -> str:
251
341
  return str(path)
252
342
 
253
- @field_validator('build_path',mode='before')
343
+ @field_validator("build_path", mode="before")
254
344
  @classmethod
255
- def ensure_build_path(cls, v:Union[str,DirectoryPath]):
256
- '''
345
+ def ensure_build_path(cls, v: Union[str, DirectoryPath]):
346
+ """
257
347
  If the build path does not exist, then create it.
258
348
  If the build path is actually a file, then raise a descriptive
259
349
  exception.
260
- '''
261
- if isinstance(v,str):
350
+ """
351
+ if isinstance(v, str):
262
352
  v = pathlib.Path(v)
263
353
  if v.is_dir():
264
354
  return v
265
355
  elif v.is_file():
266
- raise ValueError(f"Build path {v} must be a directory, but instead it is a file")
356
+ raise ValueError(
357
+ f"Build path {v} must be a directory, but instead it is a file"
358
+ )
267
359
  elif not v.exists():
268
360
  v.mkdir(parents=True)
269
361
  return v
270
-
271
- def getBuildDir(self)->pathlib.Path:
362
+
363
+ def getBuildDir(self) -> pathlib.Path:
272
364
  return self.path / self.build_path
273
365
 
274
- def getPackageDirectoryPath(self)->pathlib.Path:
275
- return self.getBuildDir() / f"{self.app.appid}"
276
-
366
+ def getPackageDirectoryPath(self) -> pathlib.Path:
367
+ return self.getBuildDir() / f"{self.app.appid}"
277
368
 
278
- def getPackageFilePath(self, include_version:bool=False)->pathlib.Path:
369
+ def getPackageFilePath(self, include_version: bool = False) -> pathlib.Path:
279
370
  if include_version:
280
371
  return self.getBuildDir() / f"{self.app.appid}-{self.app.version}.tar.gz"
281
372
  else:
282
373
  return self.getBuildDir() / f"{self.app.appid}-latest.tar.gz"
283
374
 
284
- def getAPIPath(self)->pathlib.Path:
375
+ def getAPIPath(self) -> pathlib.Path:
285
376
  return self.getBuildDir() / "api"
286
377
 
287
- def getAppTemplatePath(self)->pathlib.Path:
288
- return self.path/"app_template"
378
+ def getAppTemplatePath(self) -> pathlib.Path:
379
+ return self.path / "app_template"
289
380
 
290
381
 
291
382
  class StackType(StrEnum):
@@ -294,20 +385,19 @@ class StackType(StrEnum):
294
385
 
295
386
 
296
387
  class inspect(build):
297
-
298
388
  splunk_api_username: str = Field(
299
389
  description="Splunk API username used for appinspect and Splunkbase downloads."
300
390
  )
301
391
  splunk_api_password: str = Field(
302
392
  exclude=True,
303
- description="Splunk API password used for appinspect and Splunkbase downloads."
393
+ description="Splunk API password used for appinspect and Splunkbase downloads.",
304
394
  )
305
395
  enable_metadata_validation: bool = Field(
306
396
  default=False,
307
397
  description=(
308
398
  "Flag indicating whether detection metadata validation and versioning enforcement "
309
399
  "should be enabled."
310
- )
400
+ ),
311
401
  )
312
402
  suppress_missing_content_exceptions: bool = Field(
313
403
  default=False,
@@ -317,15 +407,15 @@ class inspect(build):
317
407
  "is not accidentally removed. In order to support testing both public and private "
318
408
  "content, this warning can be suppressed. If it is suppressed, it will still be "
319
409
  "printed out as a warning."
320
- )
410
+ ),
321
411
  )
322
412
  enrichments: bool = Field(
323
413
  default=True,
324
414
  description=(
325
415
  "[NOTE: enrichments must be ENABLED for inspect to run. Please adjust your config "
326
416
  f"or CLI invocation appropriately] {validate.model_fields['enrichments'].description}"
327
- )
328
- )
417
+ ),
418
+ )
329
419
  # TODO (cmcginley): wording should change here if we want to be able to download any app from
330
420
  # Splunkbase
331
421
  previous_build: str | None = Field(
@@ -333,13 +423,15 @@ class inspect(build):
333
423
  description=(
334
424
  "Local path to the previous app build for metatdata validation and versioning "
335
425
  "enforcement (defaults to the latest release of the app published on Splunkbase)."
336
- )
426
+ ),
337
427
  )
338
428
  stack_type: StackType = Field(description="The type of your Splunk Cloud Stack")
339
429
 
340
430
  @field_validator("enrichments", mode="after")
341
431
  @classmethod
342
- def validate_needed_flags_metadata_validation(cls, v: bool, info: ValidationInfo) -> bool:
432
+ def validate_needed_flags_metadata_validation(
433
+ cls, v: bool, info: ValidationInfo
434
+ ) -> bool:
343
435
  """
344
436
  Validates that `enrichments` is True for the inspect action
345
437
 
@@ -353,7 +445,9 @@ class inspect(build):
353
445
  """
354
446
  # Enforce that `enrichments` is True for the inspect action
355
447
  if v is False:
356
- raise ValueError("Field `enrichments` must be True for the `inspect` action")
448
+ raise ValueError(
449
+ "Field `enrichments` must be True for the `inspect` action"
450
+ )
357
451
 
358
452
  return v
359
453
 
@@ -379,9 +473,11 @@ class inspect(build):
379
473
  username=self.splunk_api_username,
380
474
  password=self.splunk_api_password,
381
475
  is_dir=True,
382
- overwrite=True
476
+ overwrite=True,
477
+ )
478
+ print(
479
+ f"Latest release downloaded from Splunkbase to: {previous_build_path}"
383
480
  )
384
- print(f"Latest release downloaded from Splunkbase to: {previous_build_path}")
385
481
  self.previous_build = str(previous_build_path)
386
482
  return pathlib.Path(previous_build_path)
387
483
 
@@ -392,467 +488,517 @@ class NewContentType(StrEnum):
392
488
 
393
489
 
394
490
  class new(Config_Base):
395
- type: NewContentType = Field(default=NewContentType.detection, description="Specify the type of content you would like to create.")
491
+ type: NewContentType = Field(
492
+ default=NewContentType.detection,
493
+ description="Specify the type of content you would like to create.",
494
+ )
396
495
 
397
496
 
398
- # TODO (#266): disable the use_enum_values configuration
399
497
  class deploy_acs(inspect):
400
- model_config = ConfigDict(use_enum_values=True,validate_default=False, arbitrary_types_allowed=True)
401
- #ignore linter error
402
- splunk_cloud_jwt_token: str = Field(exclude=True, description="Splunk JWT used for performing ACS operations on a Splunk Cloud Instance")
498
+ model_config = ConfigDict(validate_default=False, arbitrary_types_allowed=True)
499
+ # ignore linter error
500
+ splunk_cloud_jwt_token: str = Field(
501
+ exclude=True,
502
+ description="Splunk JWT used for performing ACS operations on a Splunk Cloud Instance",
503
+ )
403
504
  splunk_cloud_stack: str = Field(description="The name of your Splunk Cloud Stack")
404
505
 
405
506
 
406
- # TODO (#266): disable the use_enum_values configuration
407
507
  class Infrastructure(BaseModel):
408
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
409
- splunk_app_username:str = Field(default="admin", description="Username for logging in to your Splunk Server")
410
- splunk_app_password:str = Field(exclude=True, default="password", description="Password for logging in to your Splunk Server.")
411
- instance_address:str = Field(..., description="Address of your splunk server.")
412
- hec_port: int = Field(default=8088, gt=1, lt=65536, title="HTTP Event Collector Port")
508
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
509
+ splunk_app_username: str = Field(
510
+ default="admin", description="Username for logging in to your Splunk Server"
511
+ )
512
+ splunk_app_password: str = Field(
513
+ exclude=True,
514
+ default="password",
515
+ description="Password for logging in to your Splunk Server.",
516
+ )
517
+ instance_address: str = Field(..., description="Address of your splunk server.")
518
+ hec_port: int = Field(
519
+ default=8088, gt=1, lt=65536, title="HTTP Event Collector Port"
520
+ )
413
521
  web_ui_port: int = Field(default=8000, gt=1, lt=65536, title="Web UI Port")
414
522
  api_port: int = Field(default=8089, gt=1, lt=65536, title="REST API Port")
415
523
  instance_name: str = Field(...)
416
524
 
417
525
 
418
- # TODO (#266): disable the use_enum_values configuration
419
526
  class Container(Infrastructure):
420
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
421
- instance_address:str = Field(default="localhost", description="Address of your splunk server.")
527
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
528
+ instance_address: str = Field(
529
+ default="localhost", description="Address of your splunk server."
530
+ )
422
531
 
423
532
 
424
- # TODO (#266): disable the use_enum_values configuration
425
533
  class ContainerSettings(BaseModel):
426
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
427
- leave_running: bool = Field(default=True, description="Leave container running after it is first "
428
- "set up to speed up subsequent test runs.")
429
- num_containers: PositiveInt = Field(default=1, description="Number of containers to start in parallel. "
430
- "Please note that each container is quite expensive to run. It is not "
431
- "recommended to run more than 4 containers unless you have a very "
432
- "well-resourced environment.")
433
- full_image_path:str = Field(default="registry.hub.docker.com/splunk/splunk:latest",
434
- title="Full path to the container image to be used")
435
-
436
- def getContainers(self)->List[Container]:
534
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
535
+ leave_running: bool = Field(
536
+ default=True,
537
+ description="Leave container running after it is first "
538
+ "set up to speed up subsequent test runs.",
539
+ )
540
+ num_containers: PositiveInt = Field(
541
+ default=1,
542
+ description="Number of containers to start in parallel. "
543
+ "Please note that each container is quite expensive to run. It is not "
544
+ "recommended to run more than 4 containers unless you have a very "
545
+ "well-resourced environment.",
546
+ )
547
+ full_image_path: str = Field(
548
+ default="registry.hub.docker.com/splunk/splunk:9.3",
549
+ title="Full path to the container image to be used. We are currently pinned to 9.3 as we resolve an issue with waiting to run until app installation completes.",
550
+ )
551
+
552
+ def getContainers(self) -> List[Container]:
437
553
  containers = []
438
554
  for i in range(self.num_containers):
439
- containers.append(Container(instance_name="contentctl_{}".format(i),
440
- web_ui_port=8000+i, hec_port=8088+(i*2), api_port=8089+(i*2)))
555
+ containers.append(
556
+ Container(
557
+ instance_name="contentctl_{}".format(i),
558
+ web_ui_port=8000 + i,
559
+ hec_port=8088 + (i * 2),
560
+ api_port=8089 + (i * 2),
561
+ )
562
+ )
441
563
 
442
564
  return containers
443
565
 
444
566
 
445
567
  class All(BaseModel):
446
- #Doesn't need any extra logic
568
+ # Doesn't need any extra logic
569
+ mode_name: str = "All"
447
570
  pass
448
571
 
449
572
 
450
- # TODO (#266): disable the use_enum_values configuration
451
573
  class Changes(BaseModel):
452
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
453
- target_branch:str = Field(...,description="The target branch to diff against. Note that this includes uncommitted changes in the working directory as well.")
574
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
575
+ mode_name: str = "Changes"
576
+ target_branch: str = Field(
577
+ ...,
578
+ description="The target branch to diff against. Note that this includes uncommitted changes in the working directory as well.",
579
+ )
454
580
 
455
581
 
456
- # TODO (#266): disable the use_enum_values configuration
457
582
  class Selected(BaseModel):
458
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
459
- files:List[FilePath] = Field(...,description="List of detection files to test, separated by spaces.")
583
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
584
+ mode_name: str = "Selected"
585
+ files: List[FilePath] = Field(
586
+ ..., description="List of detection files to test, separated by spaces."
587
+ )
460
588
 
461
- @field_serializer('files',when_used='always')
462
- def serialize_path(paths: List[FilePath])->List[str]:
589
+ @field_serializer("files", when_used="always")
590
+ def serialize_path(paths: List[FilePath]) -> List[str]:
463
591
  return [str(path) for path in paths]
464
592
 
465
- DEFAULT_APPS:List[TestApp] = [
466
- TestApp(
467
- uid=1621,
468
- appid="Splunk_SA_CIM",
469
- title="Splunk Common Information Model (CIM)",
470
- version="5.2.0",
471
- hardcoded_path=HttpUrl(
472
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-common-information-model-cim_520.tgz"
473
- ),
474
- ),
475
- TestApp(
476
- uid=6553,
477
- appid="Splunk_TA_okta_identity_cloud",
478
- title="Splunk Add-on for Okta Identity Cloud",
479
- version="2.1.0",
480
- hardcoded_path=HttpUrl(
481
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-okta-identity-cloud_210.tgz"
482
- ),
593
+
594
+ DEFAULT_APPS: List[TestApp] = [
595
+ TestApp(
596
+ uid=1621,
597
+ appid="Splunk_SA_CIM",
598
+ title="Splunk Common Information Model (CIM)",
599
+ version="5.2.0",
600
+ hardcoded_path=HttpUrl(
601
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-common-information-model-cim_520.tgz"
602
+ ),
603
+ ),
604
+ TestApp(
605
+ uid=6553,
606
+ appid="Splunk_TA_okta_identity_cloud",
607
+ title="Splunk Add-on for Okta Identity Cloud",
608
+ version="2.1.0",
609
+ hardcoded_path=HttpUrl(
610
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-okta-identity-cloud_210.tgz"
483
611
  ),
484
- TestApp(
485
- uid=6176,
486
- appid="Splunk_TA_linux_sysmon",
487
- title="Add-on for Linux Sysmon",
488
- version="1.0.4",
489
- hardcoded_path=HttpUrl(
490
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/add-on-for-linux-sysmon_104.tgz"
491
- ),
612
+ ),
613
+ TestApp(
614
+ uid=6176,
615
+ appid="Splunk_TA_linux_sysmon",
616
+ title="Add-on for Linux Sysmon",
617
+ version="1.0.4",
618
+ hardcoded_path=HttpUrl(
619
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/add-on-for-linux-sysmon_104.tgz"
492
620
  ),
493
- TestApp(
494
- appid="Splunk_FIX_XMLWINEVENTLOG_HEC_PARSING",
495
- title="Splunk Fix XmlWinEventLog HEC Parsing",
496
- version="0.1",
497
- description="This TA is required for replaying Windows Data into the Test Environment. The Default TA does not include logic for properly splitting multiple log events in a single file. In production environments, this logic is applied by the Universal Forwarder.",
498
- hardcoded_path=HttpUrl(
499
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/Splunk_TA_fix_windows.tgz"
500
- ),
621
+ ),
622
+ TestApp(
623
+ appid="Splunk_FIX_XMLWINEVENTLOG_HEC_PARSING",
624
+ title="Splunk Fix XmlWinEventLog HEC Parsing",
625
+ version="0.1",
626
+ description="This TA is required for replaying Windows Data into the Test Environment. The Default TA does not include logic for properly splitting multiple log events in a single file. In production environments, this logic is applied by the Universal Forwarder.",
627
+ hardcoded_path=HttpUrl(
628
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/Splunk_TA_fix_windows.tgz"
501
629
  ),
502
- TestApp(
503
- uid=742,
504
- appid="SPLUNK_ADD_ON_FOR_MICROSOFT_WINDOWS",
505
- title="Splunk Add-on for Microsoft Windows",
506
- version="8.8.0",
507
- hardcoded_path=HttpUrl(
508
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-windows_880.tgz"
509
- ),
630
+ ),
631
+ TestApp(
632
+ uid=742,
633
+ appid="SPLUNK_ADD_ON_FOR_MICROSOFT_WINDOWS",
634
+ title="Splunk Add-on for Microsoft Windows",
635
+ version="8.8.0",
636
+ hardcoded_path=HttpUrl(
637
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-windows_880.tgz"
510
638
  ),
511
- TestApp(
512
- uid=5709,
513
- appid="Splunk_TA_microsoft_sysmon",
514
- title="Splunk Add-on for Sysmon",
515
- version="4.0.0",
516
- hardcoded_path=HttpUrl(
517
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-sysmon_400.tgz"
518
- ),
639
+ ),
640
+ TestApp(
641
+ uid=5709,
642
+ appid="Splunk_TA_microsoft_sysmon",
643
+ title="Splunk Add-on for Sysmon",
644
+ version="4.0.0",
645
+ hardcoded_path=HttpUrl(
646
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-sysmon_400.tgz"
519
647
  ),
520
- TestApp(
521
- uid=833,
522
- appid="Splunk_TA_nix",
523
- title="Splunk Add-on for Unix and Linux",
524
- version="9.0.0",
525
- hardcoded_path=HttpUrl(
526
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-unix-and-linux_900.tgz"
527
- ),
648
+ ),
649
+ TestApp(
650
+ uid=833,
651
+ appid="Splunk_TA_nix",
652
+ title="Splunk Add-on for Unix and Linux",
653
+ version="9.0.0",
654
+ hardcoded_path=HttpUrl(
655
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-unix-and-linux_900.tgz"
528
656
  ),
529
- TestApp(
530
- uid=5579,
531
- appid="Splunk_TA_CrowdStrike_FDR",
532
- title="Splunk Add-on for CrowdStrike FDR",
533
- version="1.5.0",
534
- hardcoded_path=HttpUrl(
535
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-crowdstrike-fdr_150.tgz"
536
- ),
657
+ ),
658
+ TestApp(
659
+ uid=5579,
660
+ appid="Splunk_TA_CrowdStrike_FDR",
661
+ title="Splunk Add-on for CrowdStrike FDR",
662
+ version="1.5.0",
663
+ hardcoded_path=HttpUrl(
664
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-crowdstrike-fdr_150.tgz"
537
665
  ),
538
- TestApp(
539
- uid=3185,
540
- appid="SPLUNK_TA_FOR_IIS",
541
- title="Splunk Add-on for Microsoft IIS",
542
- version="1.3.0",
543
- hardcoded_path=HttpUrl(
544
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-iis_130.tgz"
545
- ),
666
+ ),
667
+ TestApp(
668
+ uid=3185,
669
+ appid="SPLUNK_TA_FOR_IIS",
670
+ title="Splunk Add-on for Microsoft IIS",
671
+ version="1.3.0",
672
+ hardcoded_path=HttpUrl(
673
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-iis_130.tgz"
546
674
  ),
547
- TestApp(
548
- uid=4242,
549
- appid="SPLUNK_TA_FOR_SURICATA",
550
- title="TA for Suricata",
551
- version="2.3.4",
552
- hardcoded_path=HttpUrl(
553
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-suricata_234.tgz"
554
- ),
675
+ ),
676
+ TestApp(
677
+ uid=4242,
678
+ appid="SPLUNK_TA_FOR_SURICATA",
679
+ title="TA for Suricata",
680
+ version="2.3.4",
681
+ hardcoded_path=HttpUrl(
682
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-suricata_234.tgz"
555
683
  ),
556
- TestApp(
557
- uid=5466,
558
- appid="SPLUNK_TA_FOR_ZEEK",
559
- title="TA for Zeek",
560
- version="1.0.6",
561
- hardcoded_path=HttpUrl(
562
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-zeek_106.tgz"
563
- ),
684
+ ),
685
+ TestApp(
686
+ uid=5466,
687
+ appid="SPLUNK_TA_FOR_ZEEK",
688
+ title="TA for Zeek",
689
+ version="1.0.6",
690
+ hardcoded_path=HttpUrl(
691
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-zeek_106.tgz"
564
692
  ),
565
- TestApp(
566
- uid=3258,
567
- appid="SPLUNK_ADD_ON_FOR_NGINX",
568
- title="Splunk Add-on for NGINX",
569
- version="3.2.2",
570
- hardcoded_path=HttpUrl(
571
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-nginx_322.tgz"
572
- ),
693
+ ),
694
+ TestApp(
695
+ uid=3258,
696
+ appid="SPLUNK_ADD_ON_FOR_NGINX",
697
+ title="Splunk Add-on for NGINX",
698
+ version="3.2.2",
699
+ hardcoded_path=HttpUrl(
700
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-nginx_322.tgz"
573
701
  ),
574
- TestApp(
575
- uid=5238,
576
- appid="SPLUNK_ADD_ON_FOR_STREAM_FORWARDERS",
577
- title="Splunk Add-on for Stream Forwarders",
578
- version="8.1.1",
579
- hardcoded_path=HttpUrl(
580
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-forwarders_811.tgz"
581
- ),
702
+ ),
703
+ TestApp(
704
+ uid=5238,
705
+ appid="SPLUNK_ADD_ON_FOR_STREAM_FORWARDERS",
706
+ title="Splunk Add-on for Stream Forwarders",
707
+ version="8.1.1",
708
+ hardcoded_path=HttpUrl(
709
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-forwarders_811.tgz"
582
710
  ),
583
- TestApp(
584
- uid=5234,
585
- appid="SPLUNK_ADD_ON_FOR_STREAM_WIRE_DATA",
586
- title="Splunk Add-on for Stream Wire Data",
587
- version="8.1.1",
588
- hardcoded_path=HttpUrl(
589
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-wire-data_811.tgz"
590
- ),
711
+ ),
712
+ TestApp(
713
+ uid=5234,
714
+ appid="SPLUNK_ADD_ON_FOR_STREAM_WIRE_DATA",
715
+ title="Splunk Add-on for Stream Wire Data",
716
+ version="8.1.1",
717
+ hardcoded_path=HttpUrl(
718
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-wire-data_811.tgz"
591
719
  ),
592
- TestApp(
593
- uid=2757,
594
- appid="PALO_ALTO_NETWORKS_ADD_ON_FOR_SPLUNK",
595
- title="Palo Alto Networks Add-on for Splunk",
596
- version="8.1.1",
597
- hardcoded_path=HttpUrl(
598
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/palo-alto-networks-add-on-for-splunk_811.tgz"
599
- ),
720
+ ),
721
+ TestApp(
722
+ uid=2757,
723
+ appid="PALO_ALTO_NETWORKS_ADD_ON_FOR_SPLUNK",
724
+ title="Palo Alto Networks Add-on for Splunk",
725
+ version="8.1.1",
726
+ hardcoded_path=HttpUrl(
727
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/palo-alto-networks-add-on-for-splunk_811.tgz"
600
728
  ),
601
- TestApp(
602
- uid=3865,
603
- appid="Zscaler_CIM",
604
- title="Zscaler Technical Add-On for Splunk",
605
- version="4.0.3",
606
- hardcoded_path=HttpUrl(
607
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/zscaler-technical-add-on-for-splunk_403.tgz"
608
- ),
729
+ ),
730
+ TestApp(
731
+ uid=3865,
732
+ appid="Zscaler_CIM",
733
+ title="Zscaler Technical Add-On for Splunk",
734
+ version="4.0.3",
735
+ hardcoded_path=HttpUrl(
736
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/zscaler-technical-add-on-for-splunk_403.tgz"
609
737
  ),
610
- TestApp(
611
- uid=3719,
612
- appid="SPLUNK_ADD_ON_FOR_AMAZON_KINESIS_FIREHOSE",
613
- title="Splunk Add-on for Amazon Kinesis Firehose",
614
- version="1.3.2",
615
- hardcoded_path=HttpUrl(
616
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-kinesis-firehose_132.tgz"
617
- ),
738
+ ),
739
+ TestApp(
740
+ uid=3719,
741
+ appid="SPLUNK_ADD_ON_FOR_AMAZON_KINESIS_FIREHOSE",
742
+ title="Splunk Add-on for Amazon Kinesis Firehose",
743
+ version="1.3.2",
744
+ hardcoded_path=HttpUrl(
745
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-kinesis-firehose_132.tgz"
618
746
  ),
619
- TestApp(
620
- uid=1876,
621
- appid="Splunk_TA_aws",
622
- title="Splunk Add-on for AWS",
623
- version="7.5.0",
624
- hardcoded_path=HttpUrl(
625
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-web-services-aws_750.tgz"
626
- ),
747
+ ),
748
+ TestApp(
749
+ uid=1876,
750
+ appid="Splunk_TA_aws",
751
+ title="Splunk Add-on for AWS",
752
+ version="7.5.0",
753
+ hardcoded_path=HttpUrl(
754
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-web-services-aws_750.tgz"
627
755
  ),
628
- TestApp(
629
- uid=3088,
630
- appid="SPLUNK_ADD_ON_FOR_GOOGLE_CLOUD_PLATFORM",
631
- title="Splunk Add-on for Google Cloud Platform",
632
- version="4.4.0",
633
- hardcoded_path=HttpUrl(
634
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-cloud-platform_440.tgz"
635
- ),
756
+ ),
757
+ TestApp(
758
+ uid=3088,
759
+ appid="SPLUNK_ADD_ON_FOR_GOOGLE_CLOUD_PLATFORM",
760
+ title="Splunk Add-on for Google Cloud Platform",
761
+ version="4.4.0",
762
+ hardcoded_path=HttpUrl(
763
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-cloud-platform_440.tgz"
636
764
  ),
637
- TestApp(
638
- uid=5556,
639
- appid="SPLUNK_ADD_ON_FOR_GOOGLE_WORKSPACE",
640
- title="Splunk Add-on for Google Workspace",
641
- version="2.6.3",
642
- hardcoded_path=HttpUrl(
643
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-workspace_263.tgz"
644
- ),
765
+ ),
766
+ TestApp(
767
+ uid=5556,
768
+ appid="SPLUNK_ADD_ON_FOR_GOOGLE_WORKSPACE",
769
+ title="Splunk Add-on for Google Workspace",
770
+ version="2.6.3",
771
+ hardcoded_path=HttpUrl(
772
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-workspace_263.tgz"
645
773
  ),
646
- TestApp(
647
- uid=3110,
648
- appid="SPLUNK_TA_MICROSOFT_CLOUD_SERVICES",
649
- title="Splunk Add-on for Microsoft Cloud Services",
650
- version="5.2.2",
651
- hardcoded_path=HttpUrl(
652
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-cloud-services_522.tgz"
653
- ),
774
+ ),
775
+ TestApp(
776
+ uid=3110,
777
+ appid="SPLUNK_TA_MICROSOFT_CLOUD_SERVICES",
778
+ title="Splunk Add-on for Microsoft Cloud Services",
779
+ version="5.2.2",
780
+ hardcoded_path=HttpUrl(
781
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-cloud-services_522.tgz"
654
782
  ),
655
- TestApp(
656
- uid=4055,
657
- appid="SPLUNK_ADD_ON_FOR_MICROSOFT_OFFICE_365",
658
- title="Splunk Add-on for Microsoft Office 365",
659
- version="4.5.1",
660
- hardcoded_path=HttpUrl(
661
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-office-365_451.tgz"
662
- ),
783
+ ),
784
+ TestApp(
785
+ uid=4055,
786
+ appid="SPLUNK_ADD_ON_FOR_MICROSOFT_OFFICE_365",
787
+ title="Splunk Add-on for Microsoft Office 365",
788
+ version="4.5.1",
789
+ hardcoded_path=HttpUrl(
790
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-office-365_451.tgz"
663
791
  ),
664
- TestApp(
665
- uid=2890,
666
- appid="SPLUNK_MACHINE_LEARNING_TOOLKIT",
667
- title="Splunk Machine Learning Toolkit",
668
- version="5.4.1",
669
- hardcoded_path=HttpUrl(
670
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-machine-learning-toolkit_541.tgz"
671
- ),
792
+ ),
793
+ TestApp(
794
+ uid=2890,
795
+ appid="SPLUNK_MACHINE_LEARNING_TOOLKIT",
796
+ title="Splunk Machine Learning Toolkit",
797
+ version="5.4.1",
798
+ hardcoded_path=HttpUrl(
799
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-machine-learning-toolkit_541.tgz"
672
800
  ),
673
- TestApp(
674
- uid=2734,
675
- appid="URL_TOOLBOX",
676
- title="URL Toolbox",
677
- version="1.9.2",
678
- hardcoded_path=HttpUrl(
679
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/url-toolbox_192.tgz"
680
- ),
801
+ ),
802
+ TestApp(
803
+ uid=2734,
804
+ appid="URL_TOOLBOX",
805
+ title="URL Toolbox",
806
+ version="1.9.2",
807
+ hardcoded_path=HttpUrl(
808
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/url-toolbox_192.tgz"
681
809
  ),
682
- ]
810
+ ),
811
+ ]
812
+
683
813
 
684
814
  class test_common(build):
685
- mode:Union[Changes, Selected, All] = Field(All(), union_mode='left_to_right')
686
- post_test_behavior: PostTestBehavior = Field(default=PostTestBehavior.pause_on_failure, description="Controls what to do when a test completes.\n\n"
687
- f"'{PostTestBehavior.always_pause.value}' - the state of "
688
- "the test will always pause after a test, allowing the user to log into the "
689
- "server and experiment with the search and data before it is removed.\n\n"
690
- f"'{PostTestBehavior.pause_on_failure.value}' - pause execution ONLY when a test fails. The user may press ENTER in the terminal "
691
- "running the test to move on to the next test.\n\n"
692
- f"'{PostTestBehavior.never_pause.value}' - never stop testing, even if a test fails.\n\n"
693
- "***SPECIAL NOTE FOR CI/CD*** 'never_pause' MUST be used for a test to "
694
- "run in an unattended manner or in a CI/CD system - otherwise a single failed test "
695
- "will result in the testing never finishing as the tool waits for input.")
696
- test_instances:List[Infrastructure] = Field(...)
697
- enable_integration_testing: bool = Field(default=False, description="Enable integration testing, which REQUIRES Splunk Enterprise Security "
698
- "to be installed on the server. This checks for a number of different things including generation "
699
- "of appropriate notables and messages. Please note that this will increase testing time "
700
- "considerably (by approximately 2-3 minutes per detection).")
701
- plan_only:bool = Field(default=False, exclude=True, description="WARNING - This is an advanced feature and currently intended for widespread use. "
702
- "This flag is useful for building your app and generating a test plan to run on different infrastructure. "
703
- "This flag does not actually perform the test. Instead, it builds validates all content and builds the app(s). "
704
- "It MUST be used with mode.changes and must run in the context of a git repo.")
705
- disable_tqdm:bool = Field(default=False, exclude=True, description="The tdqm library (https://github.com/tqdm/tqdm) is used to facilitate a richer,"
706
- " interactive command line workflow that can display progress bars and status information frequently. "
707
- "Unfortunately it is incompatible with, or may cause poorly formatted logs, in many CI/CD systems or other unattended environments. "
708
- "If you are running contentctl in CI/CD, then please set this argument to True. Note that if you are running in a CI/CD context, "
709
- f"you also MUST set post_test_behavior to {PostTestBehavior.never_pause.value}. Otherwiser, a failed detection will cause"
710
- "the CI/CD running to pause indefinitely.")
711
-
712
- apps: List[TestApp] = Field(default=DEFAULT_APPS, exclude=False, description="List of apps to install in test environment")
713
-
714
-
715
- def dumpCICDPlanAndQuit(self, githash: str, detections:List[Detection]):
815
+ mode: Union[Changes, Selected, All] = Field(All(), union_mode="left_to_right")
816
+ post_test_behavior: PostTestBehavior = Field(
817
+ default=PostTestBehavior.pause_on_failure,
818
+ description="Controls what to do when a test completes.\n\n"
819
+ f"'{PostTestBehavior.always_pause}' - the state of "
820
+ "the test will always pause after a test, allowing the user to log into the "
821
+ "server and experiment with the search and data before it is removed.\n\n"
822
+ f"'{PostTestBehavior.pause_on_failure}' - pause execution ONLY when a test fails. The user may press ENTER in the terminal "
823
+ "running the test to move on to the next test.\n\n"
824
+ f"'{PostTestBehavior.never_pause}' - never stop testing, even if a test fails.\n\n"
825
+ "***SPECIAL NOTE FOR CI/CD*** 'never_pause' MUST be used for a test to "
826
+ "run in an unattended manner or in a CI/CD system - otherwise a single failed test "
827
+ "will result in the testing never finishing as the tool waits for input.",
828
+ )
829
+ test_instances: List[Infrastructure] = Field(...)
830
+ enable_integration_testing: bool = Field(
831
+ default=False,
832
+ description="Enable integration testing, which REQUIRES Splunk Enterprise Security "
833
+ "to be installed on the server. This checks for a number of different things including generation "
834
+ "of appropriate notables and messages. Please note that this will increase testing time "
835
+ "considerably (by approximately 2-3 minutes per detection).",
836
+ )
837
+ plan_only: bool = Field(
838
+ default=False,
839
+ exclude=True,
840
+ description="WARNING - This is an advanced feature and currently intended for widespread use. "
841
+ "This flag is useful for building your app and generating a test plan to run on different infrastructure. "
842
+ "This flag does not actually perform the test. Instead, it builds validates all content and builds the app(s). "
843
+ "It MUST be used with mode.changes and must run in the context of a git repo.",
844
+ )
845
+ disable_tqdm: bool = Field(
846
+ default=False,
847
+ exclude=True,
848
+ description="The tdqm library (https://github.com/tqdm/tqdm) is used to facilitate a richer,"
849
+ " interactive command line workflow that can display progress bars and status information frequently. "
850
+ "Unfortunately it is incompatible with, or may cause poorly formatted logs, in many CI/CD systems or other unattended environments. "
851
+ "If you are running contentctl in CI/CD, then please set this argument to True. Note that if you are running in a CI/CD context, "
852
+ f"you also MUST set post_test_behavior to {PostTestBehavior.never_pause}. Otherwiser, a failed detection will cause"
853
+ "the CI/CD running to pause indefinitely.",
854
+ )
855
+
856
+ apps: List[TestApp] = Field(
857
+ default=DEFAULT_APPS,
858
+ exclude=False,
859
+ description="List of apps to install in test environment",
860
+ )
861
+
862
+ def dumpCICDPlanAndQuit(self, githash: str, detections: List[Detection]):
716
863
  output_file = self.path / "test_plan.yml"
717
- self.mode = Selected(files=sorted([detection.file_path for detection in detections], key=lambda path: str(path)))
718
- self.post_test_behavior = PostTestBehavior.never_pause.value
719
- #required so that CI/CD does not get too much output or hang
864
+ self.mode = Selected(
865
+ files=sorted(
866
+ [detection.file_path for detection in detections],
867
+ key=lambda path: str(path),
868
+ )
869
+ )
870
+ self.post_test_behavior = PostTestBehavior.never_pause
871
+ # required so that CI/CD does not get too much output or hang
720
872
  self.disable_tqdm = True
721
873
 
722
- # We will still parse the app, but no need to do enrichments or
874
+ # We will still parse the app, but no need to do enrichments or
723
875
  # output to dist. We have already built it!
724
876
  self.build_app = False
725
877
  self.build_api = False
726
878
  self.enrichments = False
727
-
879
+
728
880
  self.enable_integration_testing = True
729
881
 
730
882
  data = self.model_dump()
731
-
732
- #Add the hash of the current commit
733
- data['githash'] = str(githash)
734
-
735
- #Remove some fields that are not relevant
736
- for k in ['container_settings', 'test_instances']:
883
+
884
+ # Add the hash of the current commit
885
+ data["githash"] = str(githash)
886
+
887
+ # Remove some fields that are not relevant
888
+ for k in ["container_settings", "test_instances"]:
737
889
  if k in data:
738
- del(data[k])
739
-
740
-
890
+ del data[k]
741
891
 
742
892
  try:
743
893
  YmlWriter.writeYmlFile(str(output_file), data)
744
- print(f"Successfully wrote a test plan for [{len(self.mode.files)} detections] using [{len(self.apps)} apps] to [{output_file}]")
894
+ print(
895
+ f"Successfully wrote a test plan for [{len(self.mode.files)} detections] using [{len(self.apps)} apps] to [{output_file}]"
896
+ )
745
897
  except Exception as e:
746
898
  raise Exception(f"Error writing test plan file [{output_file}]: {str(e)}")
747
899
 
748
-
749
- def getLocalAppDir(self)->pathlib.Path:
900
+ def getLocalAppDir(self) -> pathlib.Path:
750
901
  # docker really wants absolute paths
751
902
  path = self.path / "apps"
752
903
  return path.absolute()
753
-
754
- def getContainerAppDir(self)->pathlib.Path:
904
+
905
+ def getContainerAppDir(self) -> pathlib.Path:
755
906
  # docker really wants absolute paths
756
907
  return pathlib.Path("/tmp/apps")
757
908
 
758
- def enterpriseSecurityInApps(self)->bool:
759
-
909
+ def enterpriseSecurityInApps(self) -> bool:
760
910
  for app in self.apps:
761
911
  if app.uid == ENTERPRISE_SECURITY_UID:
762
912
  return True
763
913
  return False
764
-
765
- def commonInformationModelInApps(self)->bool:
914
+
915
+ def commonInformationModelInApps(self) -> bool:
766
916
  for app in self.apps:
767
917
  if app.uid == COMMON_INFORMATION_MODEL_UID:
768
918
  return True
769
- return False
919
+ return False
770
920
 
771
- @model_validator(mode='after')
772
- def ensureCommonInformationModel(self)->Self:
921
+ @model_validator(mode="after")
922
+ def ensureCommonInformationModel(self) -> Self:
773
923
  if self.commonInformationModelInApps():
774
924
  return self
775
- print(f"INFO: Common Information Model/CIM "
776
- f"(uid: [{COMMON_INFORMATION_MODEL_UID}]) is not listed in apps.\n"
777
- f"contentctl test MUST include Common Information Model.\n"
778
- f"Please note this message is only informational.")
925
+ print(
926
+ f"INFO: Common Information Model/CIM "
927
+ f"(uid: [{COMMON_INFORMATION_MODEL_UID}]) is not listed in apps.\n"
928
+ f"contentctl test MUST include Common Information Model.\n"
929
+ f"Please note this message is only informational."
930
+ )
779
931
  return self
780
-
781
- @model_validator(mode='after')
782
- def suppressTQDM(self)->Self:
932
+
933
+ @model_validator(mode="after")
934
+ def suppressTQDM(self) -> Self:
783
935
  if self.disable_tqdm:
784
936
  tqdm.tqdm.__init__ = partialmethod(tqdm.tqdm.__init__, disable=True)
785
- if self.post_test_behavior != PostTestBehavior.never_pause.value:
786
- raise ValueError(f"You have disabled tqdm, presumably because you are "
787
- f"running in CI/CD or another unattended context.\n"
788
- f"However, post_test_behavior is set to [{self.post_test_behavior}].\n"
789
- f"If that is the case, then you MUST set post_test_behavior "
790
- f"to [{PostTestBehavior.never_pause.value}].\n"
791
- "Otherwise, if a detection fails in CI/CD, your CI/CD runner will hang forever.")
937
+ if self.post_test_behavior != PostTestBehavior.never_pause:
938
+ raise ValueError(
939
+ f"You have disabled tqdm, presumably because you are "
940
+ f"running in CI/CD or another unattended context.\n"
941
+ f"However, post_test_behavior is set to [{self.post_test_behavior}].\n"
942
+ f"If that is the case, then you MUST set post_test_behavior "
943
+ f"to [{PostTestBehavior.never_pause}].\n"
944
+ "Otherwise, if a detection fails in CI/CD, your CI/CD runner will hang forever."
945
+ )
792
946
  return self
793
-
794
-
795
947
 
796
- @model_validator(mode='after')
797
- def ensureEnterpriseSecurityForIntegrationTesting(self)->Self:
948
+ @model_validator(mode="after")
949
+ def ensureEnterpriseSecurityForIntegrationTesting(self) -> Self:
798
950
  if not self.enable_integration_testing:
799
951
  return self
800
952
  if self.enterpriseSecurityInApps():
801
953
  return self
802
-
803
- print(f"INFO: enable_integration_testing is [{self.enable_integration_testing}], "
804
- f"but the Splunk Enterprise Security "
805
- f"App (uid: [{ENTERPRISE_SECURITY_UID}]) is not listed in apps.\n"
806
- f"Integration Testing MUST include Enterprise Security.\n"
807
- f"Please note this message is only informational.")
808
- return self
809
-
810
954
 
955
+ print(
956
+ f"INFO: enable_integration_testing is [{self.enable_integration_testing}], "
957
+ f"but the Splunk Enterprise Security "
958
+ f"App (uid: [{ENTERPRISE_SECURITY_UID}]) is not listed in apps.\n"
959
+ f"Integration Testing MUST include Enterprise Security.\n"
960
+ f"Please note this message is only informational."
961
+ )
962
+ return self
811
963
 
812
- @model_validator(mode='after')
813
- def checkPlanOnlyUse(self)->Self:
814
- #Ensure that mode is CHANGES
964
+ @model_validator(mode="after")
965
+ def checkPlanOnlyUse(self) -> Self:
966
+ # Ensure that mode is CHANGES
815
967
  if self.plan_only and not isinstance(self.mode, Changes):
816
- raise ValueError("plan_only MUST be used with --mode:changes")
968
+ raise ValueError("plan_only MUST be used with --mode:changes")
817
969
  return self
818
970
 
819
971
 
820
- def getModeName(self)->str:
821
- if isinstance(self.mode, All):
822
- return DetectionTestingMode.all.value
823
- elif isinstance(self.mode, Changes):
824
- return DetectionTestingMode.changes.value
825
- else:
826
- return DetectionTestingMode.selected.value
827
-
828
-
829
- # TODO (#266): disable the use_enum_values configuration
830
972
  class test(test_common):
831
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
832
- container_settings:ContainerSettings = ContainerSettings()
833
- test_instances: List[Container] = Field([], exclude = True, validate_default=True)
834
- splunk_api_username: Optional[str] = Field(default=None, exclude = True,description="Splunk API username used for running appinspect or installating apps from Splunkbase")
835
- splunk_api_password: Optional[str] = Field(default=None, exclude = True, description="Splunk API password used for running appinspect or installaing apps from Splunkbase")
836
-
837
-
838
- def getContainerInfrastructureObjects(self)->Self:
973
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
974
+ container_settings: ContainerSettings = ContainerSettings()
975
+ test_instances: List[Container] = Field([], exclude=True, validate_default=True)
976
+ splunk_api_username: Optional[str] = Field(
977
+ default=None,
978
+ exclude=True,
979
+ description="Splunk API username used for running appinspect or installating apps from Splunkbase",
980
+ )
981
+ splunk_api_password: Optional[str] = Field(
982
+ default=None,
983
+ exclude=True,
984
+ description="Splunk API password used for running appinspect or installaing apps from Splunkbase",
985
+ )
986
+
987
+ def getContainerInfrastructureObjects(self) -> Self:
839
988
  try:
840
989
  self.test_instances = self.container_settings.getContainers()
841
990
  return self
842
-
991
+
843
992
  except Exception as e:
844
993
  raise ValueError(f"Error constructing container test_instances: {str(e)}")
845
-
846
-
847
-
848
-
849
- @model_validator(mode='after')
850
- def ensureAppsAreGood(self)->Self:
994
+
995
+ @model_validator(mode="after")
996
+ def ensureAppsAreGood(self) -> Self:
851
997
  """
852
- This function ensures that, after the rest of the configuration
998
+ This function ensures that, after the rest of the configuration
853
999
  has been validated, all of the apps are able to be correctly resolved.
854
1000
  This includes apps that may be sourced from local files, HTTP files,
855
- and/or Splunkbase.
1001
+ and/or Splunkbase.
856
1002
 
857
1003
  This is NOT a model_post_init function because it does perform some validation,
858
1004
  even though it does not change the object
@@ -862,29 +1008,34 @@ class test(test_common):
862
1008
 
863
1009
  Returns:
864
1010
  Self: The test object. No modifications are made during this call.
865
- """
1011
+ """
866
1012
  try:
867
- _ = self.getContainerEnvironmentString(stage_file=False, include_custom_app=False)
1013
+ _ = self.getContainerEnvironmentString(
1014
+ stage_file=False, include_custom_app=False
1015
+ )
868
1016
  except Exception as e:
869
1017
  raise Exception(f"Error validating test apps: {str(e)}")
870
1018
  return self
871
1019
 
872
-
873
- def getContainerEnvironmentString(self,stage_file:bool=False, include_custom_app:bool=True)->str:
874
- apps:List[App_Base] = self.apps
1020
+ def getContainerEnvironmentString(
1021
+ self, stage_file: bool = False, include_custom_app: bool = True
1022
+ ) -> str:
1023
+ apps: List[App_Base] = self.apps
875
1024
  if include_custom_app:
876
1025
  apps.append(self.app)
877
1026
 
878
- paths = [app.getApp(self,stage_file=stage_file) for app in apps]
1027
+ paths = [app.getApp(self, stage_file=stage_file) for app in apps]
879
1028
 
880
1029
  container_paths = []
881
1030
  for path in paths:
882
1031
  if path.startswith(SPLUNKBASE_URL):
883
1032
  container_paths.append(path)
884
1033
  else:
885
- container_paths.append((self.getContainerAppDir()/pathlib.Path(path).name).as_posix())
886
-
887
- return ','.join(container_paths)
1034
+ container_paths.append(
1035
+ (self.getContainerAppDir() / pathlib.Path(path).name).as_posix()
1036
+ )
1037
+
1038
+ return ",".join(container_paths)
888
1039
 
889
1040
  def getAppFilePath(self):
890
1041
  return self.path / "apps.yml"
@@ -893,101 +1044,143 @@ class test(test_common):
893
1044
  TEST_ARGS_ENV = "CONTENTCTL_TEST_INFRASTRUCTURES"
894
1045
 
895
1046
 
896
- # TODO (#266): disable the use_enum_values configuration
897
1047
  class test_servers(test_common):
898
- model_config = ConfigDict(use_enum_values=True,validate_default=True, arbitrary_types_allowed=True)
899
- test_instances:List[Infrastructure] = Field([],description="Test against one or more preconfigured servers.", validate_default=True)
900
- server_info:Optional[str] = Field(None, validate_default=True, description='String of pre-configured servers to use for testing. The list MUST be in the format:\n'
901
- 'address,username,web_ui_port,hec_port,api_port;address_2,username_2,web_ui_port_2,hec_port_2,api_port_2'
902
- '\nFor example, the following string will use 2 preconfigured test instances:\n'
903
- '127.0.0.1,firstUser,firstUserPassword,8000,8088,8089;1.2.3.4,secondUser,secondUserPassword,8000,8088,8089\n'
904
- 'Note that these test_instances may be hosted on the same system, such as localhost/127.0.0.1 or a docker server, or different hosts.\n'
905
- f'This value may also be passed by setting the environment variable [{TEST_ARGS_ENV}] with the value above.')
906
-
907
- @model_validator(mode='before')
1048
+ model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
1049
+ test_instances: List[Infrastructure] = Field(
1050
+ [],
1051
+ description="Test against one or more preconfigured servers.",
1052
+ validate_default=True,
1053
+ )
1054
+ server_info: Optional[str] = Field(
1055
+ None,
1056
+ validate_default=True,
1057
+ description="String of pre-configured servers to use for testing. The list MUST be in the format:\n"
1058
+ "address,username,web_ui_port,hec_port,api_port;address_2,username_2,web_ui_port_2,hec_port_2,api_port_2"
1059
+ "\nFor example, the following string will use 2 preconfigured test instances:\n"
1060
+ "127.0.0.1,firstUser,firstUserPassword,8000,8088,8089;1.2.3.4,secondUser,secondUserPassword,8000,8088,8089\n"
1061
+ "Note that these test_instances may be hosted on the same system, such as localhost/127.0.0.1 or a docker server, or different hosts.\n"
1062
+ f"This value may also be passed by setting the environment variable [{TEST_ARGS_ENV}] with the value above.",
1063
+ )
1064
+
1065
+ @model_validator(mode="before")
908
1066
  @classmethod
909
- def parse_config(cls, data:Any, info: ValidationInfo)->Any:
910
- #Ignore whatever is in the file or defaults, these must be supplied on command line
911
- #if len(v) != 0:
1067
+ def parse_config(cls, data: Any, info: ValidationInfo) -> Any:
1068
+ # Ignore whatever is in the file or defaults, these must be supplied on command line
1069
+ # if len(v) != 0:
912
1070
  # return v
913
-
914
-
915
- if isinstance(data.get("server_info"),str) :
1071
+
1072
+ if isinstance(data.get("server_info"), str):
916
1073
  server_info = data.get("server_info")
917
- elif isinstance(environ.get(TEST_ARGS_ENV),str):
1074
+ elif isinstance(environ.get(TEST_ARGS_ENV), str):
918
1075
  server_info = environ.get(TEST_ARGS_ENV)
919
1076
  else:
920
- raise ValueError(f"server_info not passed on command line or in environment variable {TEST_ARGS_ENV}")
1077
+ raise ValueError(
1078
+ f"server_info not passed on command line or in environment variable {TEST_ARGS_ENV}"
1079
+ )
1080
+
1081
+ infrastructures: List[Infrastructure] = []
921
1082
 
922
- infrastructures:List[Infrastructure] = []
923
-
924
-
925
1083
  index = 0
926
- for server in server_info.split(';'):
927
- address, username, password, web_ui_port, hec_port, api_port = server.split(",")
928
- infrastructures.append(Infrastructure(splunk_app_username = username, splunk_app_password=password,
929
- instance_address=address, hec_port = int(hec_port),
930
- web_ui_port= int(web_ui_port),api_port=int(api_port), instance_name=f"test_server_{index}")
1084
+ for server in server_info.split(";"):
1085
+ address, username, password, web_ui_port, hec_port, api_port = server.split(
1086
+ ","
1087
+ )
1088
+ infrastructures.append(
1089
+ Infrastructure(
1090
+ splunk_app_username=username,
1091
+ splunk_app_password=password,
1092
+ instance_address=address,
1093
+ hec_port=int(hec_port),
1094
+ web_ui_port=int(web_ui_port),
1095
+ api_port=int(api_port),
1096
+ instance_name=f"test_server_{index}",
931
1097
  )
932
- index+=1
933
- data['test_instances'] = infrastructures
1098
+ )
1099
+ index += 1
1100
+ data["test_instances"] = infrastructures
934
1101
  return data
935
1102
 
936
- @field_validator('test_instances',mode='before')
1103
+ @field_validator("test_instances", mode="before")
937
1104
  @classmethod
938
- def check_environment_variable_for_config(cls, v:List[Infrastructure]):
1105
+ def check_environment_variable_for_config(cls, v: List[Infrastructure]):
939
1106
  return v
940
- #Ignore whatever is in the file or defaults, these must be supplied on command line
941
- #if len(v) != 0:
1107
+ # Ignore whatever is in the file or defaults, these must be supplied on command line
1108
+ # if len(v) != 0:
942
1109
  # return v
943
1110
  TEST_ARGS_ENV = "CONTENTCTL_TEST_INFRASTRUCTURES"
944
-
945
-
946
- #environment variable is present. try to parse it
947
- infrastructures:List[Infrastructure] = []
948
- server_info:str|None = environ.get(TEST_ARGS_ENV)
1111
+
1112
+ # environment variable is present. try to parse it
1113
+ infrastructures: List[Infrastructure] = []
1114
+ server_info: str | None = environ.get(TEST_ARGS_ENV)
949
1115
  if server_info is None:
950
- raise ValueError(f"test_instances not passed on command line or in environment variable {TEST_ARGS_ENV}")
951
-
952
-
1116
+ raise ValueError(
1117
+ f"test_instances not passed on command line or in environment variable {TEST_ARGS_ENV}"
1118
+ )
1119
+
953
1120
  index = 0
954
- for server in server_info.split(';'):
955
- address, username, password, web_ui_port, hec_port, api_port = server.split(",")
956
- infrastructures.append(Infrastructure(splunk_app_username = username, splunk_app_password=password,
957
- instance_address=address, hec_port = int(hec_port),
958
- web_ui_port= int(web_ui_port),api_port=int(api_port), instance_name=f"test_server_{index}")
1121
+ for server in server_info.split(";"):
1122
+ address, username, password, web_ui_port, hec_port, api_port = server.split(
1123
+ ","
1124
+ )
1125
+ infrastructures.append(
1126
+ Infrastructure(
1127
+ splunk_app_username=username,
1128
+ splunk_app_password=password,
1129
+ instance_address=address,
1130
+ hec_port=int(hec_port),
1131
+ web_ui_port=int(web_ui_port),
1132
+ api_port=int(api_port),
1133
+ instance_name=f"test_server_{index}",
959
1134
  )
960
- index+=1
1135
+ )
1136
+ index += 1
961
1137
 
962
1138
 
963
1139
  class release_notes(Config_Base):
964
- old_tag:Optional[str] = Field(None, description="Name of the tag to diff against to find new content. "
965
- "If it is not supplied, then it will be inferred as the "
966
- "second newest tag at runtime.")
967
- new_tag:Optional[str] = Field(None, description="Name of the tag containing new content. If it is not supplied,"
968
- " then it will be inferred as the newest tag at runtime.")
969
- latest_branch:Optional[str] = Field(None, description="Branch name for which we are generating release notes for")
970
- compare_against:Optional[str] = Field(default="develop", description="Branch name for which we are comparing the files changes against")
971
-
972
- def releaseNotesFilename(self, filename:str)->pathlib.Path:
973
- #Assume that notes are written to dist/. This does not respect build_dir since that is
974
- #only a member of build
975
- p = self.path / "dist"
1140
+ old_tag: Optional[str] = Field(
1141
+ None,
1142
+ description="Name of the tag to diff against to find new content. "
1143
+ "If it is not supplied, then it will be inferred as the "
1144
+ "second newest tag at runtime.",
1145
+ )
1146
+ new_tag: Optional[str] = Field(
1147
+ None,
1148
+ description="Name of the tag containing new content. If it is not supplied,"
1149
+ " then it will be inferred as the newest tag at runtime.",
1150
+ )
1151
+ latest_branch: Optional[str] = Field(
1152
+ None, description="Branch name for which we are generating release notes for"
1153
+ )
1154
+ compare_against: Optional[str] = Field(
1155
+ default="develop",
1156
+ description="Branch name for which we are comparing the files changes against",
1157
+ )
1158
+
1159
+ def releaseNotesFilename(self, filename: str) -> pathlib.Path:
1160
+ # Assume that notes are written to dist/. This does not respect build_dir since that is
1161
+ # only a member of build
1162
+ p = self.path / "dist"
976
1163
  try:
977
- p.mkdir(exist_ok=True,parents=True)
978
- except Exception:
979
- raise Exception(f"Error making the directory '{p}' to hold release_notes: {str(e)}")
980
- return p/filename
1164
+ p.mkdir(exist_ok=True, parents=True)
1165
+ except Exception as e:
1166
+ raise Exception(
1167
+ f"Error making the directory '{p}' to hold release_notes: {str(e)}"
1168
+ )
1169
+ return p / filename
981
1170
 
982
- @model_validator(mode='after')
1171
+ @model_validator(mode="after")
983
1172
  def ensureNewTagOrLatestBranch(self):
984
- '''
1173
+ """
985
1174
  Exactly one of latest_branch or new_tag must be defined. otherwise, throw an error
986
- '''
1175
+ """
987
1176
  if self.new_tag is not None and self.latest_branch is not None:
988
- raise ValueError("Both new_tag and latest_branch are defined. EXACTLY one of these MUST be defiend.")
1177
+ raise ValueError(
1178
+ "Both new_tag and latest_branch are defined. EXACTLY one of these MUST be defiend."
1179
+ )
989
1180
  elif self.new_tag is None and self.latest_branch is None:
990
- raise ValueError("Neither new_tag nor latest_branch are defined. EXACTLY one of these MUST be defined.")
1181
+ raise ValueError(
1182
+ "Neither new_tag nor latest_branch are defined. EXACTLY one of these MUST be defined."
1183
+ )
991
1184
  return self
992
1185
 
993
1186
  # @model_validator(mode='after')
@@ -997,10 +1190,9 @@ class release_notes(Config_Base):
997
1190
  # from pygit2 import Commit
998
1191
  # repo = pygit2.Repository(path=str(self.path))
999
1192
  # tags = list(repo.references.iterator(references_return_type=pygit2.enums.ReferenceFilter.TAGS))
1000
-
1193
+
1001
1194
  # #Sort all tags by commit time from newest to oldest
1002
1195
  # sorted_tags = sorted(tags, key=lambda tag: repo.lookup_reference(tag.name).peel(Commit).commit_time, reverse=True)
1003
-
1004
1196
 
1005
1197
  # tags_names:List[str] = [t.shorthand for t in sorted_tags]
1006
1198
  # print(tags_names)
@@ -1015,9 +1207,7 @@ class release_notes(Config_Base):
1015
1207
  # pass
1016
1208
  # else:
1017
1209
  # raise ValueError(f"Unknown error getting new_tag {self.new_tag}")
1018
-
1019
-
1020
-
1210
+
1021
1211
  # if self.old_tag is not None and self.old_tag not in tags_names:
1022
1212
  # raise ValueError(f"The old_tag '{self.new_tag}' was not found in the set name tags for this repo: {tags_names}")
1023
1213
  # elif self.new_tag == self.old_tag:
@@ -1031,15 +1221,18 @@ class release_notes(Config_Base):
1031
1221
  # pass
1032
1222
  # else:
1033
1223
  # raise ValueError(f"Unknown error getting old_tag {self.old_tag}")
1034
-
1035
-
1036
-
1224
+
1037
1225
  # if not tags_names.index(self.new_tag) < tags_names.index(self.old_tag):
1038
1226
  # raise ValueError(f"The new_tag '{self.new_tag}' is not newer than the old_tag '{self.old_tag}'")
1039
-
1227
+
1040
1228
  # if self.latest_branch is not None:
1041
1229
  # if repo.lookup_branch(self.latest_branch) is None:
1042
1230
  # raise ValueError("The latest_branch '{self.latest_branch}' was not found in the repository")
1043
-
1044
-
1045
- # return self
1231
+
1232
+ # return self # raise ValueError(f"The new_tag '{self.new_tag}' is not newer than the old_tag '{self.old_tag}'")
1233
+
1234
+ # if self.latest_branch is not None:
1235
+ # if repo.lookup_branch(self.latest_branch) is None:
1236
+ # raise ValueError("The latest_branch '{self.latest_branch}' was not found in the repository")
1237
+
1238
+ # return self