contentctl 5.0.0a0__py3-none-any.whl → 5.0.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,31 +1,39 @@
1
1
  from __future__ import annotations
2
2
 
3
- from os import environ
4
- from datetime import datetime, UTC
5
- from typing import Optional, Any, List, Union, Self
6
- import random
7
- from enum import StrEnum, auto
8
3
  import pathlib
9
- from urllib.parse import urlparse
4
+ import random
10
5
  from abc import ABC, abstractmethod
6
+ from datetime import UTC, datetime
7
+ from enum import StrEnum, auto
11
8
  from functools import partialmethod
9
+ from os import environ
10
+ from typing import Any, List, Optional, Self, Union
11
+ from urllib.parse import urlparse
12
12
 
13
- import tqdm
14
13
  import semantic_version
14
+ import tqdm
15
15
  from pydantic import (
16
- BaseModel, Field, field_validator,
17
- field_serializer, ConfigDict, DirectoryPath,
18
- PositiveInt, FilePath, HttpUrl, AnyUrl, model_validator,
19
- ValidationInfo
16
+ AnyUrl,
17
+ BaseModel,
18
+ ConfigDict,
19
+ DirectoryPath,
20
+ Field,
21
+ FilePath,
22
+ HttpUrl,
23
+ PositiveInt,
24
+ ValidationInfo,
25
+ field_serializer,
26
+ field_validator,
27
+ model_validator,
20
28
  )
21
29
 
22
- from contentctl.objects.constants import DOWNLOADS_DIRECTORY
23
- from contentctl.output.yml_writer import YmlWriter
30
+ from contentctl.helper.splunk_app import SplunkApp
24
31
  from contentctl.helper.utils import Utils
25
- from contentctl.objects.enums import PostTestBehavior, DetectionTestingMode
26
- from contentctl.objects.detection import Detection
27
32
  from contentctl.objects.annotated_types import APPID_TYPE
28
- from contentctl.helper.splunk_app import SplunkApp
33
+ from contentctl.objects.constants import DOWNLOADS_DIRECTORY
34
+ from contentctl.objects.detection import Detection
35
+ from contentctl.objects.enums import PostTestBehavior
36
+ from contentctl.output.yml_writer import YmlWriter
29
37
 
30
38
  ENTERPRISE_SECURITY_UID = 263
31
39
  COMMON_INFORMATION_MODEL_UID = 1621
@@ -33,27 +41,34 @@ COMMON_INFORMATION_MODEL_UID = 1621
33
41
  SPLUNKBASE_URL = "https://splunkbase.splunk.com/app/{uid}/release/{version}/download"
34
42
 
35
43
 
36
- class App_Base(BaseModel,ABC):
37
-
38
- model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True, extra='forbid')
44
+ class App_Base(BaseModel, ABC):
45
+ model_config = ConfigDict(
46
+ validate_default=True, arbitrary_types_allowed=True, extra="forbid"
47
+ )
39
48
  uid: Optional[int] = Field(default=None)
40
- title: str = Field(description="Human-readable name used by the app. This can have special characters.")
41
- appid: Optional[APPID_TYPE]= Field(default=None,description="Internal name used by your app. "
42
- "It may ONLY have characters, numbers, and underscores. No other characters are allowed.")
43
- version: str = Field(description="The version of your Content Pack. This must follow semantic versioning guidelines.")
44
- description: Optional[str] = Field(default="description of app",description="Free text description of the Content Pack.")
45
-
46
-
47
-
49
+ title: str = Field(
50
+ description="Human-readable name used by the app. This can have special characters."
51
+ )
52
+ appid: Optional[APPID_TYPE] = Field(
53
+ default=None,
54
+ description="Internal name used by your app. "
55
+ "It may ONLY have characters, numbers, and underscores. No other characters are allowed.",
56
+ )
57
+ version: str = Field(
58
+ description="The version of your Content Pack. This must follow semantic versioning guidelines."
59
+ )
60
+ description: Optional[str] = Field(
61
+ default="description of app",
62
+ description="Free text description of the Content Pack.",
63
+ )
48
64
 
49
- def getSplunkbasePath(self)->HttpUrl:
65
+ def getSplunkbasePath(self) -> HttpUrl:
50
66
  return HttpUrl(SPLUNKBASE_URL.format(uid=self.uid, release=self.version))
51
67
 
52
68
  @abstractmethod
53
- def getApp(self, config:test, stage_file:bool=False)->str:
54
- ...
69
+ def getApp(self, config: test, stage_file: bool = False) -> str: ...
55
70
 
56
- def ensureAppPathExists(self, config:test, stage_file:bool=False):
71
+ def ensureAppPathExists(self, config: test, stage_file: bool = False):
57
72
  if stage_file:
58
73
  if not config.getLocalAppDir().exists():
59
74
  config.getLocalAppDir().mkdir(parents=True)
@@ -61,30 +76,36 @@ class App_Base(BaseModel,ABC):
61
76
 
62
77
  class TestApp(App_Base):
63
78
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
64
- hardcoded_path: Optional[Union[FilePath,HttpUrl]] = Field(default=None, description="This may be a relative or absolute link to a file OR an HTTP URL linking to your app.")
65
-
79
+ hardcoded_path: Optional[Union[FilePath, HttpUrl]] = Field(
80
+ default=None,
81
+ description="This may be a relative or absolute link to a file OR an HTTP URL linking to your app.",
82
+ )
66
83
 
67
- @field_serializer('hardcoded_path',when_used='always')
68
- def serialize_path(path: Union[AnyUrl, pathlib.Path])->str:
84
+ @field_serializer("hardcoded_path", when_used="always")
85
+ def serialize_path(path: Union[AnyUrl, pathlib.Path]) -> str:
69
86
  return str(path)
70
87
 
71
- def getApp(self, config:test,stage_file:bool=False)->str:
72
- #If the apps directory does not exist, then create it
73
- self.ensureAppPathExists(config,stage_file)
88
+ def getApp(self, config: test, stage_file: bool = False) -> str:
89
+ # If the apps directory does not exist, then create it
90
+ self.ensureAppPathExists(config, stage_file)
74
91
 
75
- if config.splunk_api_password is not None and config.splunk_api_username is not None:
92
+ if (
93
+ config.splunk_api_password is not None
94
+ and config.splunk_api_username is not None
95
+ ):
76
96
  if self.version is not None and self.uid is not None:
77
- return str(self.getSplunkbasePath())
97
+ return str(self.getSplunkbasePath())
78
98
  if self.version is None or self.uid is None:
79
- print(f"Not downloading {self.title} from Splunkbase since uid[{self.uid}] AND version[{self.version}] MUST be defined")
80
-
81
-
99
+ print(
100
+ f"Not downloading {self.title} from Splunkbase since uid[{self.uid}] AND version[{self.version}] MUST be defined"
101
+ )
102
+
82
103
  elif isinstance(self.hardcoded_path, pathlib.Path):
83
104
  destination = config.getLocalAppDir() / self.hardcoded_path.name
84
105
  if stage_file:
85
- Utils.copy_local_file(str(self.hardcoded_path),
86
- str(destination),
87
- verbose_print=True)
106
+ Utils.copy_local_file(
107
+ str(self.hardcoded_path), str(destination), verbose_print=True
108
+ )
88
109
 
89
110
  elif isinstance(self.hardcoded_path, AnyUrl):
90
111
  file_url_string = str(self.hardcoded_path)
@@ -94,7 +115,7 @@ class TestApp(App_Base):
94
115
  Utils.download_file_from_http(file_url_string, str(destination))
95
116
  else:
96
117
  raise Exception(f"Unknown path for app '{self.title}'")
97
-
118
+
98
119
  return str(destination)
99
120
 
100
121
 
@@ -102,15 +123,34 @@ class CustomApp(App_Base):
102
123
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
103
124
  # Fields required for app.conf based on
104
125
  # https://docs.splunk.com/Documentation/Splunk/9.0.4/Admin/Appconf
105
- uid: int = Field(ge=2, lt=100000, default_factory=lambda:random.randint(20000,100000))
106
- title: str = Field(default="Content Pack",description="Human-readable name used by the app. This can have special characters.")
107
- appid: APPID_TYPE = Field(default="ContentPack",description="Internal name used by your app. "
108
- "It may ONLY have characters, numbers, and underscores. No other characters are allowed.")
109
- version: str = Field(default="0.0.1",description="The version of your Content Pack. This must follow semantic versioning guidelines.", validate_default=True)
110
-
111
- prefix: str = Field(default="ContentPack",description="A short prefix to easily identify all your content.")
112
- build: int = Field(exclude=True, default=int(datetime.now(UTC).strftime("%Y%m%d%H%M%S")), validate_default=True,
113
- description="Build number for your app. This will always be a number that corresponds to the time of the build in the format YYYYMMDDHHMMSS")
126
+ uid: int = Field(
127
+ ge=2, lt=100000, default_factory=lambda: random.randint(20000, 100000)
128
+ )
129
+ title: str = Field(
130
+ default="Content Pack",
131
+ description="Human-readable name used by the app. This can have special characters.",
132
+ )
133
+ appid: APPID_TYPE = Field(
134
+ default="ContentPack",
135
+ description="Internal name used by your app. "
136
+ "It may ONLY have characters, numbers, and underscores. No other characters are allowed.",
137
+ )
138
+ version: str = Field(
139
+ default="0.0.1",
140
+ description="The version of your Content Pack. This must follow semantic versioning guidelines.",
141
+ validate_default=True,
142
+ )
143
+
144
+ prefix: str = Field(
145
+ default="ContentPack",
146
+ description="A short prefix to easily identify all your content.",
147
+ )
148
+ build: int = Field(
149
+ exclude=True,
150
+ default=int(datetime.now(UTC).strftime("%Y%m%d%H%M%S")),
151
+ validate_default=True,
152
+ description="Build number for your app. This will always be a number that corresponds to the time of the build in the format YYYYMMDDHHMMSS",
153
+ )
114
154
  # id has many restrictions:
115
155
  # * Omit this setting for apps that are for internal use only and not intended
116
156
  # for upload to Splunkbase.
@@ -126,161 +166,217 @@ class CustomApp(App_Base):
126
166
  # * must not be any of the following names: CON, PRN, AUX, NUL,
127
167
  # COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9,
128
168
  # LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, LPT9
129
-
130
- label: str = Field(default="Custom Splunk Content Pack",description="This is the app name that shows in the launcher.")
131
- author_name: str = Field(default="author name",description="Name of the Content Pack Author.")
132
- author_email: str = Field(default="author@contactemailaddress.com",description="Contact email for the Content Pack Author")
133
- author_company: str = Field(default="author company",description="Name of the company who has developed the Content Pack")
134
- description: str = Field(default="description of app",description="Free text description of the Content Pack.")
135
169
 
170
+ label: str = Field(
171
+ default="Custom Splunk Content Pack",
172
+ description="This is the app name that shows in the launcher.",
173
+ )
174
+ author_name: str = Field(
175
+ default="author name", description="Name of the Content Pack Author."
176
+ )
177
+ author_email: str = Field(
178
+ default="author@contactemailaddress.com",
179
+ description="Contact email for the Content Pack Author",
180
+ )
181
+ author_company: str = Field(
182
+ default="author company",
183
+ description="Name of the company who has developed the Content Pack",
184
+ )
185
+ description: str = Field(
186
+ default="description of app",
187
+ description="Free text description of the Content Pack.",
188
+ )
136
189
 
137
- @field_validator('version')
190
+ @field_validator("version")
138
191
  def validate_version(cls, v, values):
139
192
  try:
140
193
  _ = semantic_version.Version(v)
141
194
  except Exception as e:
142
- raise(ValueError(f"The specified version does not follow the semantic versioning spec (https://semver.org/). {str(e)}"))
195
+ raise (
196
+ ValueError(
197
+ f"The specified version does not follow the semantic versioning spec (https://semver.org/). {str(e)}"
198
+ )
199
+ )
143
200
  return v
144
-
145
- #Build will ALWAYS be the current utc timestamp
146
- @field_validator('build')
201
+
202
+ # Build will ALWAYS be the current utc timestamp
203
+ @field_validator("build")
147
204
  def validate_build(cls, v, values):
148
205
  return int(datetime.utcnow().strftime("%Y%m%d%H%M%S"))
149
-
150
- def getApp(self, config:test, stage_file=True)->str:
151
- self.ensureAppPathExists(config,stage_file)
152
-
153
- destination = config.getLocalAppDir() / (config.getPackageFilePath(include_version=True).name)
206
+
207
+ def getApp(self, config: test, stage_file=True) -> str:
208
+ self.ensureAppPathExists(config, stage_file)
209
+
210
+ destination = config.getLocalAppDir() / (
211
+ config.getPackageFilePath(include_version=True).name
212
+ )
154
213
  if stage_file:
155
- Utils.copy_local_file(str(config.getPackageFilePath(include_version=True)),
156
- str(destination),
157
- verbose_print=True)
214
+ Utils.copy_local_file(
215
+ str(config.getPackageFilePath(include_version=True)),
216
+ str(destination),
217
+ verbose_print=True,
218
+ )
158
219
  return str(destination)
159
-
220
+
221
+
160
222
  class Config_Base(BaseModel):
161
223
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
162
224
 
163
- path: DirectoryPath = Field(default=DirectoryPath("."), description="The root of your app.")
164
- app:CustomApp = Field(default_factory=CustomApp)
165
- verbose:bool = Field(default=False, description="Enable verbose error logging, including a stacktrace. "
166
- "This option makes debugging contentctl errors much easier, but produces way more "
167
- "output than is useful under most uses cases. "
168
- "Please use this flag if you are submitting a bug report or issue on GitHub.")
169
-
170
- @field_serializer('path',when_used='always')
171
- def serialize_path(path: DirectoryPath)->str:
225
+ path: DirectoryPath = Field(
226
+ default=DirectoryPath("."), description="The root of your app."
227
+ )
228
+ app: CustomApp = Field(default_factory=CustomApp)
229
+ verbose: bool = Field(
230
+ default=False,
231
+ description="Enable verbose error logging, including a stacktrace. "
232
+ "This option makes debugging contentctl errors much easier, but produces way more "
233
+ "output than is useful under most uses cases. "
234
+ "Please use this flag if you are submitting a bug report or issue on GitHub.",
235
+ )
236
+
237
+ @field_serializer("path", when_used="always")
238
+ def serialize_path(path: DirectoryPath) -> str:
172
239
  return str(path)
173
240
 
241
+
174
242
  class init(Config_Base):
175
243
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
176
- bare: bool = Field(default=False, description="contentctl normally provides some some example content "
177
- "(macros, stories, data_sources, and/or analytic stories). This option disables "
178
- "initialization with that additional contnet. Note that even if --bare is used, it "
179
- "init will still create the directory structure of the app, "
180
- "include the app_template directory with default content, and content in "
181
- "the deployment/ directory (since it is not yet easily customizable).")
244
+ bare: bool = Field(
245
+ default=False,
246
+ description="contentctl normally provides some some example content "
247
+ "(macros, stories, data_sources, and/or analytic stories). This option disables "
248
+ "initialization with that additional contnet. Note that even if --bare is used, it "
249
+ "init will still create the directory structure of the app, "
250
+ "include the app_template directory with default content, and content in "
251
+ "the deployment/ directory (since it is not yet easily customizable).",
252
+ )
182
253
 
183
254
 
184
255
  class validate(Config_Base):
185
256
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
186
- enrichments: bool = Field(default=False, description="Enable MITRE, APP, and CVE Enrichments. "\
187
- "This is useful when outputting a release build "\
188
- "and validating these values, but should otherwise "\
189
- "be avoided for performance reasons.")
190
- build_app: bool = Field(default=True, description="Should an app be built and output in the build_path?")
191
- build_api: bool = Field(default=False, description="Should api objects be built and output in the build_path?")
192
- data_source_TA_validation: bool = Field(default=False, description="Validate latest TA information from Splunkbase")
257
+ enrichments: bool = Field(
258
+ default=False,
259
+ description="Enable MITRE, APP, and CVE Enrichments. "
260
+ "This is useful when outputting a release build "
261
+ "and validating these values, but should otherwise "
262
+ "be avoided for performance reasons.",
263
+ )
264
+ build_app: bool = Field(
265
+ default=True, description="Should an app be built and output in the build_path?"
266
+ )
267
+ build_api: bool = Field(
268
+ default=False,
269
+ description="Should api objects be built and output in the build_path?",
270
+ )
271
+ data_source_TA_validation: bool = Field(
272
+ default=False, description="Validate latest TA information from Splunkbase"
273
+ )
193
274
 
194
275
  @property
195
- def external_repos_path(self)->pathlib.Path:
196
- return self.path/"external_repos"
276
+ def external_repos_path(self) -> pathlib.Path:
277
+ return self.path / "external_repos"
197
278
 
198
- @property
199
- def mitre_cti_repo_path(self)->pathlib.Path:
200
- return self.external_repos_path/"cti"
279
+ @property
280
+ def mitre_cti_repo_path(self) -> pathlib.Path:
281
+ return self.external_repos_path / "cti"
201
282
 
202
283
  @property
203
284
  def atomic_red_team_repo_path(self):
204
- return self.external_repos_path/"atomic-red-team"
285
+ return self.external_repos_path / "atomic-red-team"
205
286
 
206
287
  @model_validator(mode="after")
207
- def ensureEnrichmentReposPresent(self)->Self:
208
- '''
209
- Ensures that the enrichments repos, the atomic red team repo and the
288
+ def ensureEnrichmentReposPresent(self) -> Self:
289
+ """
290
+ Ensures that the enrichments repos, the atomic red team repo and the
210
291
  mitre attack enrichment repo, are present at the inded path.
211
292
  Raises a detailed exception if either of these are not present
212
293
  when enrichments are enabled.
213
- '''
294
+ """
214
295
  if not self.enrichments:
215
296
  return self
216
297
  # If enrichments are enabled, ensure that all of the
217
298
  # enrichment directories exist
218
- missing_repos:list[str] = []
299
+ missing_repos: list[str] = []
219
300
  if not self.atomic_red_team_repo_path.is_dir():
220
- missing_repos.append(f"https://github.com/redcanaryco/atomic-red-team {self.atomic_red_team_repo_path}")
301
+ missing_repos.append(
302
+ f"https://github.com/redcanaryco/atomic-red-team {self.atomic_red_team_repo_path}"
303
+ )
221
304
 
222
305
  if not self.mitre_cti_repo_path.is_dir():
223
- missing_repos.append(f"https://github.com/mitre/cti {self.mitre_cti_repo_path}")
224
-
225
- if len(missing_repos) > 0:
226
- msg_list = ["The following repositories, which are required for enrichment, have not "
227
- f"been checked out to the {self.external_repos_path} directory. "
228
- "Please check them out using the following commands:"]
229
- msg_list.extend([f"git clone --single-branch {repo_string}" for repo_string in missing_repos])
230
- msg = '\n\t'.join(msg_list)
306
+ missing_repos.append(
307
+ f"https://github.com/mitre/cti {self.mitre_cti_repo_path}"
308
+ )
309
+
310
+ if len(missing_repos) > 0:
311
+ msg_list = [
312
+ "The following repositories, which are required for enrichment, have not "
313
+ f"been checked out to the {self.external_repos_path} directory. "
314
+ "Please check them out using the following commands:"
315
+ ]
316
+ msg_list.extend(
317
+ [
318
+ f"git clone --single-branch {repo_string}"
319
+ for repo_string in missing_repos
320
+ ]
321
+ )
322
+ msg = "\n\t".join(msg_list)
231
323
  raise FileNotFoundError(msg)
232
324
  return self
233
325
 
326
+
234
327
  class report(validate):
235
- #reporting takes no extra args, but we define it here so that it can be a mode on the command line
236
- def getReportingPath(self)->pathlib.Path:
237
- return self.path/"reporting/"
328
+ # reporting takes no extra args, but we define it here so that it can be a mode on the command line
329
+ def getReportingPath(self) -> pathlib.Path:
330
+ return self.path / "reporting/"
238
331
 
239
332
 
240
333
  class build(validate):
241
334
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
242
- build_path: DirectoryPath = Field(default=DirectoryPath("dist/"), title="Target path for all build outputs")
335
+ build_path: DirectoryPath = Field(
336
+ default=DirectoryPath("dist/"), title="Target path for all build outputs"
337
+ )
243
338
 
244
- @field_serializer('build_path',when_used='always')
245
- def serialize_build_path(path: DirectoryPath)->str:
339
+ @field_serializer("build_path", when_used="always")
340
+ def serialize_build_path(path: DirectoryPath) -> str:
246
341
  return str(path)
247
342
 
248
- @field_validator('build_path',mode='before')
343
+ @field_validator("build_path", mode="before")
249
344
  @classmethod
250
- def ensure_build_path(cls, v:Union[str,DirectoryPath]):
251
- '''
345
+ def ensure_build_path(cls, v: Union[str, DirectoryPath]):
346
+ """
252
347
  If the build path does not exist, then create it.
253
348
  If the build path is actually a file, then raise a descriptive
254
349
  exception.
255
- '''
256
- if isinstance(v,str):
350
+ """
351
+ if isinstance(v, str):
257
352
  v = pathlib.Path(v)
258
353
  if v.is_dir():
259
354
  return v
260
355
  elif v.is_file():
261
- raise ValueError(f"Build path {v} must be a directory, but instead it is a file")
356
+ raise ValueError(
357
+ f"Build path {v} must be a directory, but instead it is a file"
358
+ )
262
359
  elif not v.exists():
263
360
  v.mkdir(parents=True)
264
361
  return v
265
-
266
- def getBuildDir(self)->pathlib.Path:
362
+
363
+ def getBuildDir(self) -> pathlib.Path:
267
364
  return self.path / self.build_path
268
365
 
269
- def getPackageDirectoryPath(self)->pathlib.Path:
270
- return self.getBuildDir() / f"{self.app.appid}"
271
-
366
+ def getPackageDirectoryPath(self) -> pathlib.Path:
367
+ return self.getBuildDir() / f"{self.app.appid}"
272
368
 
273
- def getPackageFilePath(self, include_version:bool=False)->pathlib.Path:
369
+ def getPackageFilePath(self, include_version: bool = False) -> pathlib.Path:
274
370
  if include_version:
275
371
  return self.getBuildDir() / f"{self.app.appid}-{self.app.version}.tar.gz"
276
372
  else:
277
373
  return self.getBuildDir() / f"{self.app.appid}-latest.tar.gz"
278
374
 
279
- def getAPIPath(self)->pathlib.Path:
375
+ def getAPIPath(self) -> pathlib.Path:
280
376
  return self.getBuildDir() / "api"
281
377
 
282
- def getAppTemplatePath(self)->pathlib.Path:
283
- return self.path/"app_template"
378
+ def getAppTemplatePath(self) -> pathlib.Path:
379
+ return self.path / "app_template"
284
380
 
285
381
 
286
382
  class StackType(StrEnum):
@@ -289,20 +385,19 @@ class StackType(StrEnum):
289
385
 
290
386
 
291
387
  class inspect(build):
292
-
293
388
  splunk_api_username: str = Field(
294
389
  description="Splunk API username used for appinspect and Splunkbase downloads."
295
390
  )
296
391
  splunk_api_password: str = Field(
297
392
  exclude=True,
298
- description="Splunk API password used for appinspect and Splunkbase downloads."
393
+ description="Splunk API password used for appinspect and Splunkbase downloads.",
299
394
  )
300
395
  enable_metadata_validation: bool = Field(
301
396
  default=False,
302
397
  description=(
303
398
  "Flag indicating whether detection metadata validation and versioning enforcement "
304
399
  "should be enabled."
305
- )
400
+ ),
306
401
  )
307
402
  suppress_missing_content_exceptions: bool = Field(
308
403
  default=False,
@@ -312,15 +407,15 @@ class inspect(build):
312
407
  "is not accidentally removed. In order to support testing both public and private "
313
408
  "content, this warning can be suppressed. If it is suppressed, it will still be "
314
409
  "printed out as a warning."
315
- )
410
+ ),
316
411
  )
317
412
  enrichments: bool = Field(
318
413
  default=True,
319
414
  description=(
320
415
  "[NOTE: enrichments must be ENABLED for inspect to run. Please adjust your config "
321
416
  f"or CLI invocation appropriately] {validate.model_fields['enrichments'].description}"
322
- )
323
- )
417
+ ),
418
+ )
324
419
  # TODO (cmcginley): wording should change here if we want to be able to download any app from
325
420
  # Splunkbase
326
421
  previous_build: str | None = Field(
@@ -328,13 +423,15 @@ class inspect(build):
328
423
  description=(
329
424
  "Local path to the previous app build for metatdata validation and versioning "
330
425
  "enforcement (defaults to the latest release of the app published on Splunkbase)."
331
- )
426
+ ),
332
427
  )
333
428
  stack_type: StackType = Field(description="The type of your Splunk Cloud Stack")
334
429
 
335
430
  @field_validator("enrichments", mode="after")
336
431
  @classmethod
337
- def validate_needed_flags_metadata_validation(cls, v: bool, info: ValidationInfo) -> bool:
432
+ def validate_needed_flags_metadata_validation(
433
+ cls, v: bool, info: ValidationInfo
434
+ ) -> bool:
338
435
  """
339
436
  Validates that `enrichments` is True for the inspect action
340
437
 
@@ -348,7 +445,9 @@ class inspect(build):
348
445
  """
349
446
  # Enforce that `enrichments` is True for the inspect action
350
447
  if v is False:
351
- raise ValueError("Field `enrichments` must be True for the `inspect` action")
448
+ raise ValueError(
449
+ "Field `enrichments` must be True for the `inspect` action"
450
+ )
352
451
 
353
452
  return v
354
453
 
@@ -374,9 +473,11 @@ class inspect(build):
374
473
  username=self.splunk_api_username,
375
474
  password=self.splunk_api_password,
376
475
  is_dir=True,
377
- overwrite=True
476
+ overwrite=True,
477
+ )
478
+ print(
479
+ f"Latest release downloaded from Splunkbase to: {previous_build_path}"
378
480
  )
379
- print(f"Latest release downloaded from Splunkbase to: {previous_build_path}")
380
481
  self.previous_build = str(previous_build_path)
381
482
  return pathlib.Path(previous_build_path)
382
483
 
@@ -387,22 +488,36 @@ class NewContentType(StrEnum):
387
488
 
388
489
 
389
490
  class new(Config_Base):
390
- type: NewContentType = Field(default=NewContentType.detection, description="Specify the type of content you would like to create.")
491
+ type: NewContentType = Field(
492
+ default=NewContentType.detection,
493
+ description="Specify the type of content you would like to create.",
494
+ )
391
495
 
392
496
 
393
497
  class deploy_acs(inspect):
394
498
  model_config = ConfigDict(validate_default=False, arbitrary_types_allowed=True)
395
- #ignore linter error
396
- splunk_cloud_jwt_token: str = Field(exclude=True, description="Splunk JWT used for performing ACS operations on a Splunk Cloud Instance")
499
+ # ignore linter error
500
+ splunk_cloud_jwt_token: str = Field(
501
+ exclude=True,
502
+ description="Splunk JWT used for performing ACS operations on a Splunk Cloud Instance",
503
+ )
397
504
  splunk_cloud_stack: str = Field(description="The name of your Splunk Cloud Stack")
398
505
 
399
506
 
400
507
  class Infrastructure(BaseModel):
401
508
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
402
- splunk_app_username:str = Field(default="admin", description="Username for logging in to your Splunk Server")
403
- splunk_app_password:str = Field(exclude=True, default="password", description="Password for logging in to your Splunk Server.")
404
- instance_address:str = Field(..., description="Address of your splunk server.")
405
- hec_port: int = Field(default=8088, gt=1, lt=65536, title="HTTP Event Collector Port")
509
+ splunk_app_username: str = Field(
510
+ default="admin", description="Username for logging in to your Splunk Server"
511
+ )
512
+ splunk_app_password: str = Field(
513
+ exclude=True,
514
+ default="password",
515
+ description="Password for logging in to your Splunk Server.",
516
+ )
517
+ instance_address: str = Field(..., description="Address of your splunk server.")
518
+ hec_port: int = Field(
519
+ default=8088, gt=1, lt=65536, title="HTTP Event Collector Port"
520
+ )
406
521
  web_ui_port: int = Field(default=8000, gt=1, lt=65536, title="Web UI Port")
407
522
  api_port: int = Field(default=8089, gt=1, lt=65536, title="REST API Port")
408
523
  instance_name: str = Field(...)
@@ -410,431 +525,480 @@ class Infrastructure(BaseModel):
410
525
 
411
526
  class Container(Infrastructure):
412
527
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
413
- instance_address:str = Field(default="localhost", description="Address of your splunk server.")
528
+ instance_address: str = Field(
529
+ default="localhost", description="Address of your splunk server."
530
+ )
414
531
 
415
532
 
416
533
  class ContainerSettings(BaseModel):
417
534
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
418
- leave_running: bool = Field(default=True, description="Leave container running after it is first "
419
- "set up to speed up subsequent test runs.")
420
- num_containers: PositiveInt = Field(default=1, description="Number of containers to start in parallel. "
421
- "Please note that each container is quite expensive to run. It is not "
422
- "recommended to run more than 4 containers unless you have a very "
423
- "well-resourced environment.")
424
- full_image_path:str = Field(default="registry.hub.docker.com/splunk/splunk:latest",
425
- title="Full path to the container image to be used")
426
-
427
- def getContainers(self)->List[Container]:
535
+ leave_running: bool = Field(
536
+ default=True,
537
+ description="Leave container running after it is first "
538
+ "set up to speed up subsequent test runs.",
539
+ )
540
+ num_containers: PositiveInt = Field(
541
+ default=1,
542
+ description="Number of containers to start in parallel. "
543
+ "Please note that each container is quite expensive to run. It is not "
544
+ "recommended to run more than 4 containers unless you have a very "
545
+ "well-resourced environment.",
546
+ )
547
+ full_image_path: str = Field(
548
+ default="registry.hub.docker.com/splunk/splunk:9.3",
549
+ title="Full path to the container image to be used. We are currently pinned to 9.3 as we resolve an issue with waiting to run until app installation completes.",
550
+ )
551
+
552
+ def getContainers(self) -> List[Container]:
428
553
  containers = []
429
554
  for i in range(self.num_containers):
430
- containers.append(Container(instance_name="contentctl_{}".format(i),
431
- web_ui_port=8000+i, hec_port=8088+(i*2), api_port=8089+(i*2)))
555
+ containers.append(
556
+ Container(
557
+ instance_name="contentctl_{}".format(i),
558
+ web_ui_port=8000 + i,
559
+ hec_port=8088 + (i * 2),
560
+ api_port=8089 + (i * 2),
561
+ )
562
+ )
432
563
 
433
564
  return containers
434
565
 
435
566
 
436
567
  class All(BaseModel):
437
- #Doesn't need any extra logic
438
- mode_name:str = "All"
568
+ # Doesn't need any extra logic
569
+ mode_name: str = "All"
439
570
  pass
440
571
 
441
572
 
442
573
  class Changes(BaseModel):
443
574
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
444
575
  mode_name: str = "Changes"
445
- target_branch:str = Field(...,description="The target branch to diff against. Note that this includes uncommitted changes in the working directory as well.")
576
+ target_branch: str = Field(
577
+ ...,
578
+ description="The target branch to diff against. Note that this includes uncommitted changes in the working directory as well.",
579
+ )
446
580
 
447
581
 
448
582
  class Selected(BaseModel):
449
583
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
450
- mode_name:str = "Selected"
451
- files:List[FilePath] = Field(...,description="List of detection files to test, separated by spaces.")
584
+ mode_name: str = "Selected"
585
+ files: List[FilePath] = Field(
586
+ ..., description="List of detection files to test, separated by spaces."
587
+ )
452
588
 
453
- @field_serializer('files',when_used='always')
454
- def serialize_path(paths: List[FilePath])->List[str]:
589
+ @field_serializer("files", when_used="always")
590
+ def serialize_path(paths: List[FilePath]) -> List[str]:
455
591
  return [str(path) for path in paths]
456
592
 
457
- DEFAULT_APPS:List[TestApp] = [
458
- TestApp(
459
- uid=1621,
460
- appid="Splunk_SA_CIM",
461
- title="Splunk Common Information Model (CIM)",
462
- version="5.2.0",
463
- hardcoded_path=HttpUrl(
464
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-common-information-model-cim_520.tgz"
465
- ),
466
- ),
467
- TestApp(
468
- uid=6553,
469
- appid="Splunk_TA_okta_identity_cloud",
470
- title="Splunk Add-on for Okta Identity Cloud",
471
- version="2.1.0",
472
- hardcoded_path=HttpUrl(
473
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-okta-identity-cloud_210.tgz"
474
- ),
593
+
594
+ DEFAULT_APPS: List[TestApp] = [
595
+ TestApp(
596
+ uid=1621,
597
+ appid="Splunk_SA_CIM",
598
+ title="Splunk Common Information Model (CIM)",
599
+ version="5.2.0",
600
+ hardcoded_path=HttpUrl(
601
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-common-information-model-cim_520.tgz"
602
+ ),
603
+ ),
604
+ TestApp(
605
+ uid=6553,
606
+ appid="Splunk_TA_okta_identity_cloud",
607
+ title="Splunk Add-on for Okta Identity Cloud",
608
+ version="2.1.0",
609
+ hardcoded_path=HttpUrl(
610
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-okta-identity-cloud_210.tgz"
475
611
  ),
476
- TestApp(
477
- uid=6176,
478
- appid="Splunk_TA_linux_sysmon",
479
- title="Add-on for Linux Sysmon",
480
- version="1.0.4",
481
- hardcoded_path=HttpUrl(
482
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/add-on-for-linux-sysmon_104.tgz"
483
- ),
612
+ ),
613
+ TestApp(
614
+ uid=6176,
615
+ appid="Splunk_TA_linux_sysmon",
616
+ title="Add-on for Linux Sysmon",
617
+ version="1.0.4",
618
+ hardcoded_path=HttpUrl(
619
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/add-on-for-linux-sysmon_104.tgz"
484
620
  ),
485
- TestApp(
486
- appid="Splunk_FIX_XMLWINEVENTLOG_HEC_PARSING",
487
- title="Splunk Fix XmlWinEventLog HEC Parsing",
488
- version="0.1",
489
- description="This TA is required for replaying Windows Data into the Test Environment. The Default TA does not include logic for properly splitting multiple log events in a single file. In production environments, this logic is applied by the Universal Forwarder.",
490
- hardcoded_path=HttpUrl(
491
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/Splunk_TA_fix_windows.tgz"
492
- ),
621
+ ),
622
+ TestApp(
623
+ appid="Splunk_FIX_XMLWINEVENTLOG_HEC_PARSING",
624
+ title="Splunk Fix XmlWinEventLog HEC Parsing",
625
+ version="0.1",
626
+ description="This TA is required for replaying Windows Data into the Test Environment. The Default TA does not include logic for properly splitting multiple log events in a single file. In production environments, this logic is applied by the Universal Forwarder.",
627
+ hardcoded_path=HttpUrl(
628
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/Splunk_TA_fix_windows.tgz"
493
629
  ),
494
- TestApp(
495
- uid=742,
496
- appid="SPLUNK_ADD_ON_FOR_MICROSOFT_WINDOWS",
497
- title="Splunk Add-on for Microsoft Windows",
498
- version="8.8.0",
499
- hardcoded_path=HttpUrl(
500
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-windows_880.tgz"
501
- ),
630
+ ),
631
+ TestApp(
632
+ uid=742,
633
+ appid="SPLUNK_ADD_ON_FOR_MICROSOFT_WINDOWS",
634
+ title="Splunk Add-on for Microsoft Windows",
635
+ version="8.8.0",
636
+ hardcoded_path=HttpUrl(
637
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-windows_880.tgz"
502
638
  ),
503
- TestApp(
504
- uid=5709,
505
- appid="Splunk_TA_microsoft_sysmon",
506
- title="Splunk Add-on for Sysmon",
507
- version="4.0.0",
508
- hardcoded_path=HttpUrl(
509
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-sysmon_400.tgz"
510
- ),
639
+ ),
640
+ TestApp(
641
+ uid=5709,
642
+ appid="Splunk_TA_microsoft_sysmon",
643
+ title="Splunk Add-on for Sysmon",
644
+ version="4.0.0",
645
+ hardcoded_path=HttpUrl(
646
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-sysmon_400.tgz"
511
647
  ),
512
- TestApp(
513
- uid=833,
514
- appid="Splunk_TA_nix",
515
- title="Splunk Add-on for Unix and Linux",
516
- version="9.0.0",
517
- hardcoded_path=HttpUrl(
518
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-unix-and-linux_900.tgz"
519
- ),
648
+ ),
649
+ TestApp(
650
+ uid=833,
651
+ appid="Splunk_TA_nix",
652
+ title="Splunk Add-on for Unix and Linux",
653
+ version="9.0.0",
654
+ hardcoded_path=HttpUrl(
655
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-unix-and-linux_900.tgz"
520
656
  ),
521
- TestApp(
522
- uid=5579,
523
- appid="Splunk_TA_CrowdStrike_FDR",
524
- title="Splunk Add-on for CrowdStrike FDR",
525
- version="1.5.0",
526
- hardcoded_path=HttpUrl(
527
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-crowdstrike-fdr_150.tgz"
528
- ),
657
+ ),
658
+ TestApp(
659
+ uid=5579,
660
+ appid="Splunk_TA_CrowdStrike_FDR",
661
+ title="Splunk Add-on for CrowdStrike FDR",
662
+ version="1.5.0",
663
+ hardcoded_path=HttpUrl(
664
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-crowdstrike-fdr_150.tgz"
529
665
  ),
530
- TestApp(
531
- uid=3185,
532
- appid="SPLUNK_TA_FOR_IIS",
533
- title="Splunk Add-on for Microsoft IIS",
534
- version="1.3.0",
535
- hardcoded_path=HttpUrl(
536
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-iis_130.tgz"
537
- ),
666
+ ),
667
+ TestApp(
668
+ uid=3185,
669
+ appid="SPLUNK_TA_FOR_IIS",
670
+ title="Splunk Add-on for Microsoft IIS",
671
+ version="1.3.0",
672
+ hardcoded_path=HttpUrl(
673
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-iis_130.tgz"
538
674
  ),
539
- TestApp(
540
- uid=4242,
541
- appid="SPLUNK_TA_FOR_SURICATA",
542
- title="TA for Suricata",
543
- version="2.3.4",
544
- hardcoded_path=HttpUrl(
545
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-suricata_234.tgz"
546
- ),
675
+ ),
676
+ TestApp(
677
+ uid=4242,
678
+ appid="SPLUNK_TA_FOR_SURICATA",
679
+ title="TA for Suricata",
680
+ version="2.3.4",
681
+ hardcoded_path=HttpUrl(
682
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-suricata_234.tgz"
547
683
  ),
548
- TestApp(
549
- uid=5466,
550
- appid="SPLUNK_TA_FOR_ZEEK",
551
- title="TA for Zeek",
552
- version="1.0.6",
553
- hardcoded_path=HttpUrl(
554
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-zeek_106.tgz"
555
- ),
684
+ ),
685
+ TestApp(
686
+ uid=5466,
687
+ appid="SPLUNK_TA_FOR_ZEEK",
688
+ title="TA for Zeek",
689
+ version="1.0.6",
690
+ hardcoded_path=HttpUrl(
691
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/ta-for-zeek_106.tgz"
556
692
  ),
557
- TestApp(
558
- uid=3258,
559
- appid="SPLUNK_ADD_ON_FOR_NGINX",
560
- title="Splunk Add-on for NGINX",
561
- version="3.2.2",
562
- hardcoded_path=HttpUrl(
563
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-nginx_322.tgz"
564
- ),
693
+ ),
694
+ TestApp(
695
+ uid=3258,
696
+ appid="SPLUNK_ADD_ON_FOR_NGINX",
697
+ title="Splunk Add-on for NGINX",
698
+ version="3.2.2",
699
+ hardcoded_path=HttpUrl(
700
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-nginx_322.tgz"
565
701
  ),
566
- TestApp(
567
- uid=5238,
568
- appid="SPLUNK_ADD_ON_FOR_STREAM_FORWARDERS",
569
- title="Splunk Add-on for Stream Forwarders",
570
- version="8.1.1",
571
- hardcoded_path=HttpUrl(
572
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-forwarders_811.tgz"
573
- ),
702
+ ),
703
+ TestApp(
704
+ uid=5238,
705
+ appid="SPLUNK_ADD_ON_FOR_STREAM_FORWARDERS",
706
+ title="Splunk Add-on for Stream Forwarders",
707
+ version="8.1.1",
708
+ hardcoded_path=HttpUrl(
709
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-forwarders_811.tgz"
574
710
  ),
575
- TestApp(
576
- uid=5234,
577
- appid="SPLUNK_ADD_ON_FOR_STREAM_WIRE_DATA",
578
- title="Splunk Add-on for Stream Wire Data",
579
- version="8.1.1",
580
- hardcoded_path=HttpUrl(
581
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-wire-data_811.tgz"
582
- ),
711
+ ),
712
+ TestApp(
713
+ uid=5234,
714
+ appid="SPLUNK_ADD_ON_FOR_STREAM_WIRE_DATA",
715
+ title="Splunk Add-on for Stream Wire Data",
716
+ version="8.1.1",
717
+ hardcoded_path=HttpUrl(
718
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-stream-wire-data_811.tgz"
583
719
  ),
584
- TestApp(
585
- uid=2757,
586
- appid="PALO_ALTO_NETWORKS_ADD_ON_FOR_SPLUNK",
587
- title="Palo Alto Networks Add-on for Splunk",
588
- version="8.1.1",
589
- hardcoded_path=HttpUrl(
590
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/palo-alto-networks-add-on-for-splunk_811.tgz"
591
- ),
720
+ ),
721
+ TestApp(
722
+ uid=2757,
723
+ appid="PALO_ALTO_NETWORKS_ADD_ON_FOR_SPLUNK",
724
+ title="Palo Alto Networks Add-on for Splunk",
725
+ version="8.1.1",
726
+ hardcoded_path=HttpUrl(
727
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/palo-alto-networks-add-on-for-splunk_811.tgz"
592
728
  ),
593
- TestApp(
594
- uid=3865,
595
- appid="Zscaler_CIM",
596
- title="Zscaler Technical Add-On for Splunk",
597
- version="4.0.3",
598
- hardcoded_path=HttpUrl(
599
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/zscaler-technical-add-on-for-splunk_403.tgz"
600
- ),
729
+ ),
730
+ TestApp(
731
+ uid=3865,
732
+ appid="Zscaler_CIM",
733
+ title="Zscaler Technical Add-On for Splunk",
734
+ version="4.0.3",
735
+ hardcoded_path=HttpUrl(
736
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/zscaler-technical-add-on-for-splunk_403.tgz"
601
737
  ),
602
- TestApp(
603
- uid=3719,
604
- appid="SPLUNK_ADD_ON_FOR_AMAZON_KINESIS_FIREHOSE",
605
- title="Splunk Add-on for Amazon Kinesis Firehose",
606
- version="1.3.2",
607
- hardcoded_path=HttpUrl(
608
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-kinesis-firehose_132.tgz"
609
- ),
738
+ ),
739
+ TestApp(
740
+ uid=3719,
741
+ appid="SPLUNK_ADD_ON_FOR_AMAZON_KINESIS_FIREHOSE",
742
+ title="Splunk Add-on for Amazon Kinesis Firehose",
743
+ version="1.3.2",
744
+ hardcoded_path=HttpUrl(
745
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-kinesis-firehose_132.tgz"
610
746
  ),
611
- TestApp(
612
- uid=1876,
613
- appid="Splunk_TA_aws",
614
- title="Splunk Add-on for AWS",
615
- version="7.5.0",
616
- hardcoded_path=HttpUrl(
617
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-web-services-aws_750.tgz"
618
- ),
747
+ ),
748
+ TestApp(
749
+ uid=1876,
750
+ appid="Splunk_TA_aws",
751
+ title="Splunk Add-on for AWS",
752
+ version="7.5.0",
753
+ hardcoded_path=HttpUrl(
754
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-amazon-web-services-aws_750.tgz"
619
755
  ),
620
- TestApp(
621
- uid=3088,
622
- appid="SPLUNK_ADD_ON_FOR_GOOGLE_CLOUD_PLATFORM",
623
- title="Splunk Add-on for Google Cloud Platform",
624
- version="4.4.0",
625
- hardcoded_path=HttpUrl(
626
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-cloud-platform_440.tgz"
627
- ),
756
+ ),
757
+ TestApp(
758
+ uid=3088,
759
+ appid="SPLUNK_ADD_ON_FOR_GOOGLE_CLOUD_PLATFORM",
760
+ title="Splunk Add-on for Google Cloud Platform",
761
+ version="4.4.0",
762
+ hardcoded_path=HttpUrl(
763
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-cloud-platform_440.tgz"
628
764
  ),
629
- TestApp(
630
- uid=5556,
631
- appid="SPLUNK_ADD_ON_FOR_GOOGLE_WORKSPACE",
632
- title="Splunk Add-on for Google Workspace",
633
- version="2.6.3",
634
- hardcoded_path=HttpUrl(
635
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-workspace_263.tgz"
636
- ),
765
+ ),
766
+ TestApp(
767
+ uid=5556,
768
+ appid="SPLUNK_ADD_ON_FOR_GOOGLE_WORKSPACE",
769
+ title="Splunk Add-on for Google Workspace",
770
+ version="2.6.3",
771
+ hardcoded_path=HttpUrl(
772
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-google-workspace_263.tgz"
637
773
  ),
638
- TestApp(
639
- uid=3110,
640
- appid="SPLUNK_TA_MICROSOFT_CLOUD_SERVICES",
641
- title="Splunk Add-on for Microsoft Cloud Services",
642
- version="5.2.2",
643
- hardcoded_path=HttpUrl(
644
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-cloud-services_522.tgz"
645
- ),
774
+ ),
775
+ TestApp(
776
+ uid=3110,
777
+ appid="SPLUNK_TA_MICROSOFT_CLOUD_SERVICES",
778
+ title="Splunk Add-on for Microsoft Cloud Services",
779
+ version="5.2.2",
780
+ hardcoded_path=HttpUrl(
781
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-cloud-services_522.tgz"
646
782
  ),
647
- TestApp(
648
- uid=4055,
649
- appid="SPLUNK_ADD_ON_FOR_MICROSOFT_OFFICE_365",
650
- title="Splunk Add-on for Microsoft Office 365",
651
- version="4.5.1",
652
- hardcoded_path=HttpUrl(
653
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-office-365_451.tgz"
654
- ),
783
+ ),
784
+ TestApp(
785
+ uid=4055,
786
+ appid="SPLUNK_ADD_ON_FOR_MICROSOFT_OFFICE_365",
787
+ title="Splunk Add-on for Microsoft Office 365",
788
+ version="4.5.1",
789
+ hardcoded_path=HttpUrl(
790
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-add-on-for-microsoft-office-365_451.tgz"
655
791
  ),
656
- TestApp(
657
- uid=2890,
658
- appid="SPLUNK_MACHINE_LEARNING_TOOLKIT",
659
- title="Splunk Machine Learning Toolkit",
660
- version="5.4.1",
661
- hardcoded_path=HttpUrl(
662
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-machine-learning-toolkit_541.tgz"
663
- ),
792
+ ),
793
+ TestApp(
794
+ uid=2890,
795
+ appid="SPLUNK_MACHINE_LEARNING_TOOLKIT",
796
+ title="Splunk Machine Learning Toolkit",
797
+ version="5.4.1",
798
+ hardcoded_path=HttpUrl(
799
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/splunk-machine-learning-toolkit_541.tgz"
664
800
  ),
665
- TestApp(
666
- uid=2734,
667
- appid="URL_TOOLBOX",
668
- title="URL Toolbox",
669
- version="1.9.2",
670
- hardcoded_path=HttpUrl(
671
- "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/url-toolbox_192.tgz"
672
- ),
801
+ ),
802
+ TestApp(
803
+ uid=2734,
804
+ appid="URL_TOOLBOX",
805
+ title="URL Toolbox",
806
+ version="1.9.2",
807
+ hardcoded_path=HttpUrl(
808
+ "https://attack-range-appbinaries.s3.us-west-2.amazonaws.com/Latest/url-toolbox_192.tgz"
673
809
  ),
674
- ]
810
+ ),
811
+ ]
812
+
675
813
 
676
814
  class test_common(build):
677
- mode:Union[Changes, Selected, All] = Field(All(), union_mode='left_to_right')
678
- post_test_behavior: PostTestBehavior = Field(default=PostTestBehavior.pause_on_failure, description="Controls what to do when a test completes.\n\n"
679
- f"'{PostTestBehavior.always_pause}' - the state of "
680
- "the test will always pause after a test, allowing the user to log into the "
681
- "server and experiment with the search and data before it is removed.\n\n"
682
- f"'{PostTestBehavior.pause_on_failure}' - pause execution ONLY when a test fails. The user may press ENTER in the terminal "
683
- "running the test to move on to the next test.\n\n"
684
- f"'{PostTestBehavior.never_pause}' - never stop testing, even if a test fails.\n\n"
685
- "***SPECIAL NOTE FOR CI/CD*** 'never_pause' MUST be used for a test to "
686
- "run in an unattended manner or in a CI/CD system - otherwise a single failed test "
687
- "will result in the testing never finishing as the tool waits for input.")
688
- test_instances:List[Infrastructure] = Field(...)
689
- enable_integration_testing: bool = Field(default=False, description="Enable integration testing, which REQUIRES Splunk Enterprise Security "
690
- "to be installed on the server. This checks for a number of different things including generation "
691
- "of appropriate notables and messages. Please note that this will increase testing time "
692
- "considerably (by approximately 2-3 minutes per detection).")
693
- plan_only:bool = Field(default=False, exclude=True, description="WARNING - This is an advanced feature and currently intended for widespread use. "
694
- "This flag is useful for building your app and generating a test plan to run on different infrastructure. "
695
- "This flag does not actually perform the test. Instead, it builds validates all content and builds the app(s). "
696
- "It MUST be used with mode.changes and must run in the context of a git repo.")
697
- disable_tqdm:bool = Field(default=False, exclude=True, description="The tdqm library (https://github.com/tqdm/tqdm) is used to facilitate a richer,"
698
- " interactive command line workflow that can display progress bars and status information frequently. "
699
- "Unfortunately it is incompatible with, or may cause poorly formatted logs, in many CI/CD systems or other unattended environments. "
700
- "If you are running contentctl in CI/CD, then please set this argument to True. Note that if you are running in a CI/CD context, "
701
- f"you also MUST set post_test_behavior to {PostTestBehavior.never_pause}. Otherwiser, a failed detection will cause"
702
- "the CI/CD running to pause indefinitely.")
703
-
704
- apps: List[TestApp] = Field(default=DEFAULT_APPS, exclude=False, description="List of apps to install in test environment")
705
-
706
-
707
- def dumpCICDPlanAndQuit(self, githash: str, detections:List[Detection]):
815
+ mode: Union[Changes, Selected, All] = Field(All(), union_mode="left_to_right")
816
+ post_test_behavior: PostTestBehavior = Field(
817
+ default=PostTestBehavior.pause_on_failure,
818
+ description="Controls what to do when a test completes.\n\n"
819
+ f"'{PostTestBehavior.always_pause}' - the state of "
820
+ "the test will always pause after a test, allowing the user to log into the "
821
+ "server and experiment with the search and data before it is removed.\n\n"
822
+ f"'{PostTestBehavior.pause_on_failure}' - pause execution ONLY when a test fails. The user may press ENTER in the terminal "
823
+ "running the test to move on to the next test.\n\n"
824
+ f"'{PostTestBehavior.never_pause}' - never stop testing, even if a test fails.\n\n"
825
+ "***SPECIAL NOTE FOR CI/CD*** 'never_pause' MUST be used for a test to "
826
+ "run in an unattended manner or in a CI/CD system - otherwise a single failed test "
827
+ "will result in the testing never finishing as the tool waits for input.",
828
+ )
829
+ test_instances: List[Infrastructure] = Field(...)
830
+ enable_integration_testing: bool = Field(
831
+ default=False,
832
+ description="Enable integration testing, which REQUIRES Splunk Enterprise Security "
833
+ "to be installed on the server. This checks for a number of different things including generation "
834
+ "of appropriate notables and messages. Please note that this will increase testing time "
835
+ "considerably (by approximately 2-3 minutes per detection).",
836
+ )
837
+ plan_only: bool = Field(
838
+ default=False,
839
+ exclude=True,
840
+ description="WARNING - This is an advanced feature and currently intended for widespread use. "
841
+ "This flag is useful for building your app and generating a test plan to run on different infrastructure. "
842
+ "This flag does not actually perform the test. Instead, it builds validates all content and builds the app(s). "
843
+ "It MUST be used with mode.changes and must run in the context of a git repo.",
844
+ )
845
+ disable_tqdm: bool = Field(
846
+ default=False,
847
+ exclude=True,
848
+ description="The tdqm library (https://github.com/tqdm/tqdm) is used to facilitate a richer,"
849
+ " interactive command line workflow that can display progress bars and status information frequently. "
850
+ "Unfortunately it is incompatible with, or may cause poorly formatted logs, in many CI/CD systems or other unattended environments. "
851
+ "If you are running contentctl in CI/CD, then please set this argument to True. Note that if you are running in a CI/CD context, "
852
+ f"you also MUST set post_test_behavior to {PostTestBehavior.never_pause}. Otherwiser, a failed detection will cause"
853
+ "the CI/CD running to pause indefinitely.",
854
+ )
855
+
856
+ apps: List[TestApp] = Field(
857
+ default=DEFAULT_APPS,
858
+ exclude=False,
859
+ description="List of apps to install in test environment",
860
+ )
861
+
862
+ def dumpCICDPlanAndQuit(self, githash: str, detections: List[Detection]):
708
863
  output_file = self.path / "test_plan.yml"
709
- self.mode = Selected(files=sorted([detection.file_path for detection in detections], key=lambda path: str(path)))
864
+ self.mode = Selected(
865
+ files=sorted(
866
+ [detection.file_path for detection in detections],
867
+ key=lambda path: str(path),
868
+ )
869
+ )
710
870
  self.post_test_behavior = PostTestBehavior.never_pause
711
- #required so that CI/CD does not get too much output or hang
871
+ # required so that CI/CD does not get too much output or hang
712
872
  self.disable_tqdm = True
713
873
 
714
- # We will still parse the app, but no need to do enrichments or
874
+ # We will still parse the app, but no need to do enrichments or
715
875
  # output to dist. We have already built it!
716
876
  self.build_app = False
717
877
  self.build_api = False
718
878
  self.enrichments = False
719
-
879
+
720
880
  self.enable_integration_testing = True
721
881
 
722
882
  data = self.model_dump()
723
-
724
- #Add the hash of the current commit
725
- data['githash'] = str(githash)
726
-
727
- #Remove some fields that are not relevant
728
- for k in ['container_settings', 'test_instances']:
883
+
884
+ # Add the hash of the current commit
885
+ data["githash"] = str(githash)
886
+
887
+ # Remove some fields that are not relevant
888
+ for k in ["container_settings", "test_instances"]:
729
889
  if k in data:
730
- del(data[k])
731
-
732
-
890
+ del data[k]
733
891
 
734
892
  try:
735
893
  YmlWriter.writeYmlFile(str(output_file), data)
736
- print(f"Successfully wrote a test plan for [{len(self.mode.files)} detections] using [{len(self.apps)} apps] to [{output_file}]")
894
+ print(
895
+ f"Successfully wrote a test plan for [{len(self.mode.files)} detections] using [{len(self.apps)} apps] to [{output_file}]"
896
+ )
737
897
  except Exception as e:
738
898
  raise Exception(f"Error writing test plan file [{output_file}]: {str(e)}")
739
899
 
740
-
741
- def getLocalAppDir(self)->pathlib.Path:
900
+ def getLocalAppDir(self) -> pathlib.Path:
742
901
  # docker really wants absolute paths
743
902
  path = self.path / "apps"
744
903
  return path.absolute()
745
-
746
- def getContainerAppDir(self)->pathlib.Path:
904
+
905
+ def getContainerAppDir(self) -> pathlib.Path:
747
906
  # docker really wants absolute paths
748
907
  return pathlib.Path("/tmp/apps")
749
908
 
750
- def enterpriseSecurityInApps(self)->bool:
751
-
909
+ def enterpriseSecurityInApps(self) -> bool:
752
910
  for app in self.apps:
753
911
  if app.uid == ENTERPRISE_SECURITY_UID:
754
912
  return True
755
913
  return False
756
-
757
- def commonInformationModelInApps(self)->bool:
914
+
915
+ def commonInformationModelInApps(self) -> bool:
758
916
  for app in self.apps:
759
917
  if app.uid == COMMON_INFORMATION_MODEL_UID:
760
918
  return True
761
- return False
919
+ return False
762
920
 
763
- @model_validator(mode='after')
764
- def ensureCommonInformationModel(self)->Self:
921
+ @model_validator(mode="after")
922
+ def ensureCommonInformationModel(self) -> Self:
765
923
  if self.commonInformationModelInApps():
766
924
  return self
767
- print(f"INFO: Common Information Model/CIM "
768
- f"(uid: [{COMMON_INFORMATION_MODEL_UID}]) is not listed in apps.\n"
769
- f"contentctl test MUST include Common Information Model.\n"
770
- f"Please note this message is only informational.")
925
+ print(
926
+ f"INFO: Common Information Model/CIM "
927
+ f"(uid: [{COMMON_INFORMATION_MODEL_UID}]) is not listed in apps.\n"
928
+ f"contentctl test MUST include Common Information Model.\n"
929
+ f"Please note this message is only informational."
930
+ )
771
931
  return self
772
-
773
- @model_validator(mode='after')
774
- def suppressTQDM(self)->Self:
932
+
933
+ @model_validator(mode="after")
934
+ def suppressTQDM(self) -> Self:
775
935
  if self.disable_tqdm:
776
936
  tqdm.tqdm.__init__ = partialmethod(tqdm.tqdm.__init__, disable=True)
777
937
  if self.post_test_behavior != PostTestBehavior.never_pause:
778
- raise ValueError(f"You have disabled tqdm, presumably because you are "
779
- f"running in CI/CD or another unattended context.\n"
780
- f"However, post_test_behavior is set to [{self.post_test_behavior}].\n"
781
- f"If that is the case, then you MUST set post_test_behavior "
782
- f"to [{PostTestBehavior.never_pause}].\n"
783
- "Otherwise, if a detection fails in CI/CD, your CI/CD runner will hang forever.")
938
+ raise ValueError(
939
+ f"You have disabled tqdm, presumably because you are "
940
+ f"running in CI/CD or another unattended context.\n"
941
+ f"However, post_test_behavior is set to [{self.post_test_behavior}].\n"
942
+ f"If that is the case, then you MUST set post_test_behavior "
943
+ f"to [{PostTestBehavior.never_pause}].\n"
944
+ "Otherwise, if a detection fails in CI/CD, your CI/CD runner will hang forever."
945
+ )
784
946
  return self
785
-
786
-
787
947
 
788
- @model_validator(mode='after')
789
- def ensureEnterpriseSecurityForIntegrationTesting(self)->Self:
948
+ @model_validator(mode="after")
949
+ def ensureEnterpriseSecurityForIntegrationTesting(self) -> Self:
790
950
  if not self.enable_integration_testing:
791
951
  return self
792
952
  if self.enterpriseSecurityInApps():
793
953
  return self
794
-
795
- print(f"INFO: enable_integration_testing is [{self.enable_integration_testing}], "
796
- f"but the Splunk Enterprise Security "
797
- f"App (uid: [{ENTERPRISE_SECURITY_UID}]) is not listed in apps.\n"
798
- f"Integration Testing MUST include Enterprise Security.\n"
799
- f"Please note this message is only informational.")
800
- return self
801
-
802
954
 
955
+ print(
956
+ f"INFO: enable_integration_testing is [{self.enable_integration_testing}], "
957
+ f"but the Splunk Enterprise Security "
958
+ f"App (uid: [{ENTERPRISE_SECURITY_UID}]) is not listed in apps.\n"
959
+ f"Integration Testing MUST include Enterprise Security.\n"
960
+ f"Please note this message is only informational."
961
+ )
962
+ return self
803
963
 
804
- @model_validator(mode='after')
805
- def checkPlanOnlyUse(self)->Self:
806
- #Ensure that mode is CHANGES
964
+ @model_validator(mode="after")
965
+ def checkPlanOnlyUse(self) -> Self:
966
+ # Ensure that mode is CHANGES
807
967
  if self.plan_only and not isinstance(self.mode, Changes):
808
- raise ValueError("plan_only MUST be used with --mode:changes")
968
+ raise ValueError("plan_only MUST be used with --mode:changes")
809
969
  return self
810
970
 
811
971
 
812
972
  class test(test_common):
813
973
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
814
- container_settings:ContainerSettings = ContainerSettings()
815
- test_instances: List[Container] = Field([], exclude = True, validate_default=True)
816
- splunk_api_username: Optional[str] = Field(default=None, exclude = True,description="Splunk API username used for running appinspect or installating apps from Splunkbase")
817
- splunk_api_password: Optional[str] = Field(default=None, exclude = True, description="Splunk API password used for running appinspect or installaing apps from Splunkbase")
818
-
819
-
820
- def getContainerInfrastructureObjects(self)->Self:
974
+ container_settings: ContainerSettings = ContainerSettings()
975
+ test_instances: List[Container] = Field([], exclude=True, validate_default=True)
976
+ splunk_api_username: Optional[str] = Field(
977
+ default=None,
978
+ exclude=True,
979
+ description="Splunk API username used for running appinspect or installating apps from Splunkbase",
980
+ )
981
+ splunk_api_password: Optional[str] = Field(
982
+ default=None,
983
+ exclude=True,
984
+ description="Splunk API password used for running appinspect or installaing apps from Splunkbase",
985
+ )
986
+
987
+ def getContainerInfrastructureObjects(self) -> Self:
821
988
  try:
822
989
  self.test_instances = self.container_settings.getContainers()
823
990
  return self
824
-
991
+
825
992
  except Exception as e:
826
993
  raise ValueError(f"Error constructing container test_instances: {str(e)}")
827
-
828
-
829
-
830
-
831
- @model_validator(mode='after')
832
- def ensureAppsAreGood(self)->Self:
994
+
995
+ @model_validator(mode="after")
996
+ def ensureAppsAreGood(self) -> Self:
833
997
  """
834
- This function ensures that, after the rest of the configuration
998
+ This function ensures that, after the rest of the configuration
835
999
  has been validated, all of the apps are able to be correctly resolved.
836
1000
  This includes apps that may be sourced from local files, HTTP files,
837
- and/or Splunkbase.
1001
+ and/or Splunkbase.
838
1002
 
839
1003
  This is NOT a model_post_init function because it does perform some validation,
840
1004
  even though it does not change the object
@@ -844,29 +1008,34 @@ class test(test_common):
844
1008
 
845
1009
  Returns:
846
1010
  Self: The test object. No modifications are made during this call.
847
- """
1011
+ """
848
1012
  try:
849
- _ = self.getContainerEnvironmentString(stage_file=False, include_custom_app=False)
1013
+ _ = self.getContainerEnvironmentString(
1014
+ stage_file=False, include_custom_app=False
1015
+ )
850
1016
  except Exception as e:
851
1017
  raise Exception(f"Error validating test apps: {str(e)}")
852
1018
  return self
853
1019
 
854
-
855
- def getContainerEnvironmentString(self,stage_file:bool=False, include_custom_app:bool=True)->str:
856
- apps:List[App_Base] = self.apps
1020
+ def getContainerEnvironmentString(
1021
+ self, stage_file: bool = False, include_custom_app: bool = True
1022
+ ) -> str:
1023
+ apps: List[App_Base] = self.apps
857
1024
  if include_custom_app:
858
1025
  apps.append(self.app)
859
1026
 
860
- paths = [app.getApp(self,stage_file=stage_file) for app in apps]
1027
+ paths = [app.getApp(self, stage_file=stage_file) for app in apps]
861
1028
 
862
1029
  container_paths = []
863
1030
  for path in paths:
864
1031
  if path.startswith(SPLUNKBASE_URL):
865
1032
  container_paths.append(path)
866
1033
  else:
867
- container_paths.append((self.getContainerAppDir()/pathlib.Path(path).name).as_posix())
868
-
869
- return ','.join(container_paths)
1034
+ container_paths.append(
1035
+ (self.getContainerAppDir() / pathlib.Path(path).name).as_posix()
1036
+ )
1037
+
1038
+ return ",".join(container_paths)
870
1039
 
871
1040
  def getAppFilePath(self):
872
1041
  return self.path / "apps.yml"
@@ -877,98 +1046,141 @@ TEST_ARGS_ENV = "CONTENTCTL_TEST_INFRASTRUCTURES"
877
1046
 
878
1047
  class test_servers(test_common):
879
1048
  model_config = ConfigDict(validate_default=True, arbitrary_types_allowed=True)
880
- test_instances:List[Infrastructure] = Field([],description="Test against one or more preconfigured servers.", validate_default=True)
881
- server_info:Optional[str] = Field(None, validate_default=True, description='String of pre-configured servers to use for testing. The list MUST be in the format:\n'
882
- 'address,username,web_ui_port,hec_port,api_port;address_2,username_2,web_ui_port_2,hec_port_2,api_port_2'
883
- '\nFor example, the following string will use 2 preconfigured test instances:\n'
884
- '127.0.0.1,firstUser,firstUserPassword,8000,8088,8089;1.2.3.4,secondUser,secondUserPassword,8000,8088,8089\n'
885
- 'Note that these test_instances may be hosted on the same system, such as localhost/127.0.0.1 or a docker server, or different hosts.\n'
886
- f'This value may also be passed by setting the environment variable [{TEST_ARGS_ENV}] with the value above.')
887
-
888
- @model_validator(mode='before')
1049
+ test_instances: List[Infrastructure] = Field(
1050
+ [],
1051
+ description="Test against one or more preconfigured servers.",
1052
+ validate_default=True,
1053
+ )
1054
+ server_info: Optional[str] = Field(
1055
+ None,
1056
+ validate_default=True,
1057
+ description="String of pre-configured servers to use for testing. The list MUST be in the format:\n"
1058
+ "address,username,web_ui_port,hec_port,api_port;address_2,username_2,web_ui_port_2,hec_port_2,api_port_2"
1059
+ "\nFor example, the following string will use 2 preconfigured test instances:\n"
1060
+ "127.0.0.1,firstUser,firstUserPassword,8000,8088,8089;1.2.3.4,secondUser,secondUserPassword,8000,8088,8089\n"
1061
+ "Note that these test_instances may be hosted on the same system, such as localhost/127.0.0.1 or a docker server, or different hosts.\n"
1062
+ f"This value may also be passed by setting the environment variable [{TEST_ARGS_ENV}] with the value above.",
1063
+ )
1064
+
1065
+ @model_validator(mode="before")
889
1066
  @classmethod
890
- def parse_config(cls, data:Any, info: ValidationInfo)->Any:
891
- #Ignore whatever is in the file or defaults, these must be supplied on command line
892
- #if len(v) != 0:
1067
+ def parse_config(cls, data: Any, info: ValidationInfo) -> Any:
1068
+ # Ignore whatever is in the file or defaults, these must be supplied on command line
1069
+ # if len(v) != 0:
893
1070
  # return v
894
-
895
-
896
- if isinstance(data.get("server_info"),str) :
1071
+
1072
+ if isinstance(data.get("server_info"), str):
897
1073
  server_info = data.get("server_info")
898
- elif isinstance(environ.get(TEST_ARGS_ENV),str):
1074
+ elif isinstance(environ.get(TEST_ARGS_ENV), str):
899
1075
  server_info = environ.get(TEST_ARGS_ENV)
900
1076
  else:
901
- raise ValueError(f"server_info not passed on command line or in environment variable {TEST_ARGS_ENV}")
1077
+ raise ValueError(
1078
+ f"server_info not passed on command line or in environment variable {TEST_ARGS_ENV}"
1079
+ )
1080
+
1081
+ infrastructures: List[Infrastructure] = []
902
1082
 
903
- infrastructures:List[Infrastructure] = []
904
-
905
-
906
1083
  index = 0
907
- for server in server_info.split(';'):
908
- address, username, password, web_ui_port, hec_port, api_port = server.split(",")
909
- infrastructures.append(Infrastructure(splunk_app_username = username, splunk_app_password=password,
910
- instance_address=address, hec_port = int(hec_port),
911
- web_ui_port= int(web_ui_port),api_port=int(api_port), instance_name=f"test_server_{index}")
1084
+ for server in server_info.split(";"):
1085
+ address, username, password, web_ui_port, hec_port, api_port = server.split(
1086
+ ","
1087
+ )
1088
+ infrastructures.append(
1089
+ Infrastructure(
1090
+ splunk_app_username=username,
1091
+ splunk_app_password=password,
1092
+ instance_address=address,
1093
+ hec_port=int(hec_port),
1094
+ web_ui_port=int(web_ui_port),
1095
+ api_port=int(api_port),
1096
+ instance_name=f"test_server_{index}",
912
1097
  )
913
- index+=1
914
- data['test_instances'] = infrastructures
1098
+ )
1099
+ index += 1
1100
+ data["test_instances"] = infrastructures
915
1101
  return data
916
1102
 
917
- @field_validator('test_instances',mode='before')
1103
+ @field_validator("test_instances", mode="before")
918
1104
  @classmethod
919
- def check_environment_variable_for_config(cls, v:List[Infrastructure]):
1105
+ def check_environment_variable_for_config(cls, v: List[Infrastructure]):
920
1106
  return v
921
- #Ignore whatever is in the file or defaults, these must be supplied on command line
922
- #if len(v) != 0:
1107
+ # Ignore whatever is in the file or defaults, these must be supplied on command line
1108
+ # if len(v) != 0:
923
1109
  # return v
924
1110
  TEST_ARGS_ENV = "CONTENTCTL_TEST_INFRASTRUCTURES"
925
-
926
-
927
- #environment variable is present. try to parse it
928
- infrastructures:List[Infrastructure] = []
929
- server_info:str|None = environ.get(TEST_ARGS_ENV)
1111
+
1112
+ # environment variable is present. try to parse it
1113
+ infrastructures: List[Infrastructure] = []
1114
+ server_info: str | None = environ.get(TEST_ARGS_ENV)
930
1115
  if server_info is None:
931
- raise ValueError(f"test_instances not passed on command line or in environment variable {TEST_ARGS_ENV}")
932
-
933
-
1116
+ raise ValueError(
1117
+ f"test_instances not passed on command line or in environment variable {TEST_ARGS_ENV}"
1118
+ )
1119
+
934
1120
  index = 0
935
- for server in server_info.split(';'):
936
- address, username, password, web_ui_port, hec_port, api_port = server.split(",")
937
- infrastructures.append(Infrastructure(splunk_app_username = username, splunk_app_password=password,
938
- instance_address=address, hec_port = int(hec_port),
939
- web_ui_port= int(web_ui_port),api_port=int(api_port), instance_name=f"test_server_{index}")
1121
+ for server in server_info.split(";"):
1122
+ address, username, password, web_ui_port, hec_port, api_port = server.split(
1123
+ ","
1124
+ )
1125
+ infrastructures.append(
1126
+ Infrastructure(
1127
+ splunk_app_username=username,
1128
+ splunk_app_password=password,
1129
+ instance_address=address,
1130
+ hec_port=int(hec_port),
1131
+ web_ui_port=int(web_ui_port),
1132
+ api_port=int(api_port),
1133
+ instance_name=f"test_server_{index}",
940
1134
  )
941
- index+=1
1135
+ )
1136
+ index += 1
942
1137
 
943
1138
 
944
1139
  class release_notes(Config_Base):
945
- old_tag:Optional[str] = Field(None, description="Name of the tag to diff against to find new content. "
946
- "If it is not supplied, then it will be inferred as the "
947
- "second newest tag at runtime.")
948
- new_tag:Optional[str] = Field(None, description="Name of the tag containing new content. If it is not supplied,"
949
- " then it will be inferred as the newest tag at runtime.")
950
- latest_branch:Optional[str] = Field(None, description="Branch name for which we are generating release notes for")
951
- compare_against:Optional[str] = Field(default="develop", description="Branch name for which we are comparing the files changes against")
952
-
953
- def releaseNotesFilename(self, filename:str)->pathlib.Path:
954
- #Assume that notes are written to dist/. This does not respect build_dir since that is
955
- #only a member of build
956
- p = self.path / "dist"
1140
+ old_tag: Optional[str] = Field(
1141
+ None,
1142
+ description="Name of the tag to diff against to find new content. "
1143
+ "If it is not supplied, then it will be inferred as the "
1144
+ "second newest tag at runtime.",
1145
+ )
1146
+ new_tag: Optional[str] = Field(
1147
+ None,
1148
+ description="Name of the tag containing new content. If it is not supplied,"
1149
+ " then it will be inferred as the newest tag at runtime.",
1150
+ )
1151
+ latest_branch: Optional[str] = Field(
1152
+ None, description="Branch name for which we are generating release notes for"
1153
+ )
1154
+ compare_against: Optional[str] = Field(
1155
+ default="develop",
1156
+ description="Branch name for which we are comparing the files changes against",
1157
+ )
1158
+
1159
+ def releaseNotesFilename(self, filename: str) -> pathlib.Path:
1160
+ # Assume that notes are written to dist/. This does not respect build_dir since that is
1161
+ # only a member of build
1162
+ p = self.path / "dist"
957
1163
  try:
958
- p.mkdir(exist_ok=True,parents=True)
1164
+ p.mkdir(exist_ok=True, parents=True)
959
1165
  except Exception:
960
- raise Exception(f"Error making the directory '{p}' to hold release_notes: {str(e)}")
961
- return p/filename
1166
+ raise Exception(
1167
+ f"Error making the directory '{p}' to hold release_notes: {str(e)}"
1168
+ )
1169
+ return p / filename
962
1170
 
963
- @model_validator(mode='after')
1171
+ @model_validator(mode="after")
964
1172
  def ensureNewTagOrLatestBranch(self):
965
- '''
1173
+ """
966
1174
  Exactly one of latest_branch or new_tag must be defined. otherwise, throw an error
967
- '''
1175
+ """
968
1176
  if self.new_tag is not None and self.latest_branch is not None:
969
- raise ValueError("Both new_tag and latest_branch are defined. EXACTLY one of these MUST be defiend.")
1177
+ raise ValueError(
1178
+ "Both new_tag and latest_branch are defined. EXACTLY one of these MUST be defiend."
1179
+ )
970
1180
  elif self.new_tag is None and self.latest_branch is None:
971
- raise ValueError("Neither new_tag nor latest_branch are defined. EXACTLY one of these MUST be defined.")
1181
+ raise ValueError(
1182
+ "Neither new_tag nor latest_branch are defined. EXACTLY one of these MUST be defined."
1183
+ )
972
1184
  return self
973
1185
 
974
1186
  # @model_validator(mode='after')
@@ -978,10 +1190,9 @@ class release_notes(Config_Base):
978
1190
  # from pygit2 import Commit
979
1191
  # repo = pygit2.Repository(path=str(self.path))
980
1192
  # tags = list(repo.references.iterator(references_return_type=pygit2.enums.ReferenceFilter.TAGS))
981
-
1193
+
982
1194
  # #Sort all tags by commit time from newest to oldest
983
1195
  # sorted_tags = sorted(tags, key=lambda tag: repo.lookup_reference(tag.name).peel(Commit).commit_time, reverse=True)
984
-
985
1196
 
986
1197
  # tags_names:List[str] = [t.shorthand for t in sorted_tags]
987
1198
  # print(tags_names)
@@ -996,9 +1207,7 @@ class release_notes(Config_Base):
996
1207
  # pass
997
1208
  # else:
998
1209
  # raise ValueError(f"Unknown error getting new_tag {self.new_tag}")
999
-
1000
-
1001
-
1210
+
1002
1211
  # if self.old_tag is not None and self.old_tag not in tags_names:
1003
1212
  # raise ValueError(f"The old_tag '{self.new_tag}' was not found in the set name tags for this repo: {tags_names}")
1004
1213
  # elif self.new_tag == self.old_tag:
@@ -1012,15 +1221,18 @@ class release_notes(Config_Base):
1012
1221
  # pass
1013
1222
  # else:
1014
1223
  # raise ValueError(f"Unknown error getting old_tag {self.old_tag}")
1015
-
1016
-
1017
-
1224
+
1018
1225
  # if not tags_names.index(self.new_tag) < tags_names.index(self.old_tag):
1019
1226
  # raise ValueError(f"The new_tag '{self.new_tag}' is not newer than the old_tag '{self.old_tag}'")
1020
-
1227
+
1021
1228
  # if self.latest_branch is not None:
1022
1229
  # if repo.lookup_branch(self.latest_branch) is None:
1023
1230
  # raise ValueError("The latest_branch '{self.latest_branch}' was not found in the repository")
1024
-
1025
-
1026
- # return self
1231
+
1232
+ # return self # raise ValueError(f"The new_tag '{self.new_tag}' is not newer than the old_tag '{self.old_tag}'")
1233
+
1234
+ # if self.latest_branch is not None:
1235
+ # if repo.lookup_branch(self.latest_branch) is None:
1236
+ # raise ValueError("The latest_branch '{self.latest_branch}' was not found in the repository")
1237
+
1238
+ # return self