contentctl 4.4.7__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. contentctl/__init__.py +1 -1
  2. contentctl/actions/build.py +102 -57
  3. contentctl/actions/deploy_acs.py +29 -24
  4. contentctl/actions/detection_testing/DetectionTestingManager.py +66 -42
  5. contentctl/actions/detection_testing/GitService.py +134 -76
  6. contentctl/actions/detection_testing/generate_detection_coverage_badge.py +48 -30
  7. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +192 -147
  8. contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +45 -32
  9. contentctl/actions/detection_testing/progress_bar.py +9 -6
  10. contentctl/actions/detection_testing/views/DetectionTestingView.py +16 -19
  11. contentctl/actions/detection_testing/views/DetectionTestingViewCLI.py +1 -5
  12. contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +2 -2
  13. contentctl/actions/detection_testing/views/DetectionTestingViewWeb.py +1 -4
  14. contentctl/actions/doc_gen.py +9 -5
  15. contentctl/actions/initialize.py +45 -33
  16. contentctl/actions/inspect.py +118 -61
  17. contentctl/actions/new_content.py +155 -108
  18. contentctl/actions/release_notes.py +276 -146
  19. contentctl/actions/reporting.py +23 -19
  20. contentctl/actions/test.py +33 -28
  21. contentctl/actions/validate.py +55 -34
  22. contentctl/api.py +54 -45
  23. contentctl/contentctl.py +124 -90
  24. contentctl/enrichments/attack_enrichment.py +112 -72
  25. contentctl/enrichments/cve_enrichment.py +34 -28
  26. contentctl/enrichments/splunk_app_enrichment.py +38 -36
  27. contentctl/helper/link_validator.py +101 -78
  28. contentctl/helper/splunk_app.py +69 -41
  29. contentctl/helper/utils.py +58 -53
  30. contentctl/input/director.py +68 -36
  31. contentctl/input/new_content_questions.py +27 -35
  32. contentctl/input/yml_reader.py +28 -18
  33. contentctl/objects/abstract_security_content_objects/detection_abstract.py +303 -259
  34. contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +115 -52
  35. contentctl/objects/alert_action.py +10 -9
  36. contentctl/objects/annotated_types.py +1 -1
  37. contentctl/objects/atomic.py +65 -54
  38. contentctl/objects/base_test.py +5 -3
  39. contentctl/objects/base_test_result.py +19 -11
  40. contentctl/objects/baseline.py +62 -30
  41. contentctl/objects/baseline_tags.py +30 -24
  42. contentctl/objects/config.py +790 -597
  43. contentctl/objects/constants.py +33 -56
  44. contentctl/objects/correlation_search.py +150 -136
  45. contentctl/objects/dashboard.py +55 -41
  46. contentctl/objects/data_source.py +16 -17
  47. contentctl/objects/deployment.py +43 -44
  48. contentctl/objects/deployment_email.py +3 -2
  49. contentctl/objects/deployment_notable.py +4 -2
  50. contentctl/objects/deployment_phantom.py +7 -6
  51. contentctl/objects/deployment_rba.py +3 -2
  52. contentctl/objects/deployment_scheduling.py +3 -2
  53. contentctl/objects/deployment_slack.py +3 -2
  54. contentctl/objects/detection.py +5 -2
  55. contentctl/objects/detection_metadata.py +1 -0
  56. contentctl/objects/detection_stanza.py +7 -2
  57. contentctl/objects/detection_tags.py +58 -103
  58. contentctl/objects/drilldown.py +66 -34
  59. contentctl/objects/enums.py +81 -100
  60. contentctl/objects/errors.py +16 -24
  61. contentctl/objects/integration_test.py +3 -3
  62. contentctl/objects/integration_test_result.py +1 -0
  63. contentctl/objects/investigation.py +59 -36
  64. contentctl/objects/investigation_tags.py +30 -19
  65. contentctl/objects/lookup.py +304 -101
  66. contentctl/objects/macro.py +55 -39
  67. contentctl/objects/manual_test.py +3 -3
  68. contentctl/objects/manual_test_result.py +1 -0
  69. contentctl/objects/mitre_attack_enrichment.py +17 -16
  70. contentctl/objects/notable_action.py +2 -1
  71. contentctl/objects/notable_event.py +1 -3
  72. contentctl/objects/playbook.py +37 -35
  73. contentctl/objects/playbook_tags.py +23 -13
  74. contentctl/objects/rba.py +96 -0
  75. contentctl/objects/risk_analysis_action.py +15 -11
  76. contentctl/objects/risk_event.py +110 -160
  77. contentctl/objects/risk_object.py +1 -0
  78. contentctl/objects/savedsearches_conf.py +9 -7
  79. contentctl/objects/security_content_object.py +5 -2
  80. contentctl/objects/story.py +54 -49
  81. contentctl/objects/story_tags.py +56 -45
  82. contentctl/objects/test_attack_data.py +2 -1
  83. contentctl/objects/test_group.py +5 -2
  84. contentctl/objects/threat_object.py +1 -0
  85. contentctl/objects/throttling.py +27 -18
  86. contentctl/objects/unit_test.py +3 -4
  87. contentctl/objects/unit_test_baseline.py +5 -5
  88. contentctl/objects/unit_test_result.py +6 -6
  89. contentctl/output/api_json_output.py +233 -220
  90. contentctl/output/attack_nav_output.py +21 -21
  91. contentctl/output/attack_nav_writer.py +29 -37
  92. contentctl/output/conf_output.py +235 -172
  93. contentctl/output/conf_writer.py +201 -125
  94. contentctl/output/data_source_writer.py +38 -26
  95. contentctl/output/doc_md_output.py +53 -27
  96. contentctl/output/jinja_writer.py +19 -15
  97. contentctl/output/json_writer.py +21 -11
  98. contentctl/output/svg_output.py +56 -38
  99. contentctl/output/templates/analyticstories_detections.j2 +2 -2
  100. contentctl/output/templates/analyticstories_stories.j2 +1 -1
  101. contentctl/output/templates/collections.j2 +1 -1
  102. contentctl/output/templates/doc_detections.j2 +0 -5
  103. contentctl/output/templates/es_investigations_investigations.j2 +1 -1
  104. contentctl/output/templates/es_investigations_stories.j2 +1 -1
  105. contentctl/output/templates/savedsearches_baselines.j2 +2 -2
  106. contentctl/output/templates/savedsearches_detections.j2 +10 -11
  107. contentctl/output/templates/savedsearches_investigations.j2 +2 -2
  108. contentctl/output/templates/transforms.j2 +6 -8
  109. contentctl/output/yml_writer.py +29 -20
  110. contentctl/templates/detections/endpoint/anomalous_usage_of_7zip.yml +16 -34
  111. contentctl/templates/stories/cobalt_strike.yml +1 -0
  112. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/METADATA +5 -4
  113. contentctl-5.0.0.dist-info/RECORD +168 -0
  114. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/WHEEL +1 -1
  115. contentctl/actions/initialize_old.py +0 -245
  116. contentctl/objects/event_source.py +0 -11
  117. contentctl/objects/observable.py +0 -37
  118. contentctl/output/detection_writer.py +0 -28
  119. contentctl/output/new_content_yml_output.py +0 -56
  120. contentctl/output/yml_output.py +0 -66
  121. contentctl-4.4.7.dist-info/RECORD +0 -173
  122. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/LICENSE.md +0 -0
  123. {contentctl-4.4.7.dist-info → contentctl-5.0.0.dist-info}/entry_points.txt +0 -0
contentctl/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.1.0'
1
+ __version__ = "0.1.0"
@@ -1,91 +1,136 @@
1
- import sys
2
1
  import shutil
3
- import os
4
2
 
5
3
  from dataclasses import dataclass
6
4
 
7
- from contentctl.objects.enums import SecurityContentProduct, SecurityContentType
8
- from contentctl.input.director import Director, DirectorOutputDto
5
+ from contentctl.input.director import DirectorOutputDto
9
6
  from contentctl.output.conf_output import ConfOutput
10
7
  from contentctl.output.conf_writer import ConfWriter
11
8
  from contentctl.output.api_json_output import ApiJsonOutput
12
9
  from contentctl.output.data_source_writer import DataSourceWriter
13
- from contentctl.objects.lookup import Lookup
10
+ from contentctl.objects.lookup import CSVLookup, Lookup_Type
14
11
  import pathlib
15
12
  import json
16
13
  import datetime
17
- from typing import Union
14
+ import uuid
18
15
 
19
16
  from contentctl.objects.config import build
20
17
 
18
+
21
19
  @dataclass(frozen=True)
22
20
  class BuildInputDto:
23
21
  director_output_dto: DirectorOutputDto
24
- config:build
22
+ config: build
25
23
 
26
24
 
27
25
  class Build:
28
-
29
-
30
-
31
26
  def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
32
27
  if input_dto.config.build_app:
33
-
34
- updated_conf_files:set[pathlib.Path] = set()
28
+ updated_conf_files: set[pathlib.Path] = set()
35
29
  conf_output = ConfOutput(input_dto.config)
36
30
 
37
- # Construct a special lookup whose CSV is created at runtime and
38
- # written directly into the output folder. It is created with model_construct,
39
- # not model_validate, because the CSV does not exist yet.
40
- data_sources_lookup_csv_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.csv"
41
- DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
42
- input_dto.director_output_dto.addContentToDictMappings(Lookup.model_construct(description= "A lookup file that will contain the data source objects for detections.",
43
- filename=data_sources_lookup_csv_path,
44
- name="data_sources"))
31
+ # Construct a path to a YML that does not actually exist.
32
+ # We mock this "fake" path since the YML does not exist.
33
+ # This ensures the checking for the existence of the CSV is correct
34
+ data_sources_fake_yml_path = (
35
+ input_dto.config.getPackageDirectoryPath()
36
+ / "lookups"
37
+ / "data_sources.yml"
38
+ )
45
39
 
40
+ # Construct a special lookup whose CSV is created at runtime and
41
+ # written directly into the lookups folder. We will delete this after a build,
42
+ # assuming that it is successful.
43
+ data_sources_lookup_csv_path = (
44
+ input_dto.config.getPackageDirectoryPath()
45
+ / "lookups"
46
+ / "data_sources.csv"
47
+ )
48
+
49
+ DataSourceWriter.writeDataSourceCsv(
50
+ input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path
51
+ )
52
+ input_dto.director_output_dto.addContentToDictMappings(
53
+ CSVLookup.model_construct(
54
+ name="data_sources",
55
+ id=uuid.UUID("b45c1403-6e09-47b0-824f-cf6e44f15ac8"),
56
+ version=1,
57
+ author=input_dto.config.app.author_name,
58
+ date=datetime.date.today(),
59
+ description="A lookup file that will contain the data source objects for detections.",
60
+ lookup_type=Lookup_Type.csv,
61
+ file_path=data_sources_fake_yml_path,
62
+ )
63
+ )
46
64
  updated_conf_files.update(conf_output.writeHeaders())
47
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.detections, SecurityContentType.detections))
48
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.stories, SecurityContentType.stories))
49
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.baselines, SecurityContentType.baselines))
50
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.investigations, SecurityContentType.investigations))
51
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.lookups, SecurityContentType.lookups))
52
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.macros, SecurityContentType.macros))
53
- updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.dashboards, SecurityContentType.dashboards))
65
+ updated_conf_files.update(
66
+ conf_output.writeLookups(input_dto.director_output_dto.lookups)
67
+ )
68
+ updated_conf_files.update(
69
+ conf_output.writeDetections(input_dto.director_output_dto.detections)
70
+ )
71
+ updated_conf_files.update(
72
+ conf_output.writeStories(input_dto.director_output_dto.stories)
73
+ )
74
+ updated_conf_files.update(
75
+ conf_output.writeBaselines(input_dto.director_output_dto.baselines)
76
+ )
77
+ updated_conf_files.update(
78
+ conf_output.writeInvestigations(
79
+ input_dto.director_output_dto.investigations
80
+ )
81
+ )
82
+ updated_conf_files.update(
83
+ conf_output.writeMacros(input_dto.director_output_dto.macros)
84
+ )
85
+ updated_conf_files.update(
86
+ conf_output.writeDashboards(input_dto.director_output_dto.dashboards)
87
+ )
54
88
  updated_conf_files.update(conf_output.writeMiscellaneousAppFiles())
55
-
56
89
 
57
-
58
- #Ensure that the conf file we just generated/update is syntactically valid
90
+ # Ensure that the conf file we just generated/update is syntactically valid
59
91
  for conf_file in updated_conf_files:
60
- ConfWriter.validateConfFile(conf_file)
61
-
92
+ ConfWriter.validateConfFile(conf_file)
93
+
62
94
  conf_output.packageApp()
63
95
 
64
- print(f"Build of '{input_dto.config.app.title}' APP successful to {input_dto.config.getPackageFilePath()}")
65
-
96
+ print(
97
+ f"Build of '{input_dto.config.app.title}' APP successful to {input_dto.config.getPackageFilePath()}"
98
+ )
66
99
 
67
- if input_dto.config.build_api:
100
+ if input_dto.config.build_api:
68
101
  shutil.rmtree(input_dto.config.getAPIPath(), ignore_errors=True)
69
102
  input_dto.config.getAPIPath().mkdir(parents=True)
70
- api_json_output = ApiJsonOutput()
71
- for output_objects, output_type in [(input_dto.director_output_dto.detections, SecurityContentType.detections),
72
- (input_dto.director_output_dto.stories, SecurityContentType.stories),
73
- (input_dto.director_output_dto.baselines, SecurityContentType.baselines),
74
- (input_dto.director_output_dto.investigations, SecurityContentType.investigations),
75
- (input_dto.director_output_dto.lookups, SecurityContentType.lookups),
76
- (input_dto.director_output_dto.macros, SecurityContentType.macros),
77
- (input_dto.director_output_dto.deployments, SecurityContentType.deployments)]:
78
- api_json_output.writeObjects(output_objects, input_dto.config.getAPIPath(), input_dto.config.app.label, output_type )
79
-
80
-
81
-
82
- #create version file for sse api
83
- version_file = input_dto.config.getAPIPath()/"version.json"
84
- utc_time = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0,tzinfo=None).isoformat()
85
- version_dict = {"version":{"name":f"v{input_dto.config.app.version}","published_at": f"{utc_time}Z" }}
86
- with open(version_file,"w") as version_f:
87
- json.dump(version_dict,version_f)
88
-
89
- print(f"Build of '{input_dto.config.app.title}' API successful to {input_dto.config.getAPIPath()}")
90
-
91
- return input_dto.director_output_dto
103
+ api_json_output = ApiJsonOutput(
104
+ input_dto.config.getAPIPath(), input_dto.config.app.label
105
+ )
106
+ api_json_output.writeDetections(input_dto.director_output_dto.detections)
107
+ api_json_output.writeStories(input_dto.director_output_dto.stories)
108
+ api_json_output.writeBaselines(input_dto.director_output_dto.baselines)
109
+ api_json_output.writeInvestigations(
110
+ input_dto.director_output_dto.investigations
111
+ )
112
+ api_json_output.writeLookups(input_dto.director_output_dto.lookups)
113
+ api_json_output.writeMacros(input_dto.director_output_dto.macros)
114
+ api_json_output.writeDeployments(input_dto.director_output_dto.deployments)
115
+
116
+ # create version file for sse api
117
+ version_file = input_dto.config.getAPIPath() / "version.json"
118
+ utc_time = (
119
+ datetime.datetime.now(datetime.timezone.utc)
120
+ .replace(microsecond=0, tzinfo=None)
121
+ .isoformat()
122
+ )
123
+ version_dict = {
124
+ "version": {
125
+ "name": f"v{input_dto.config.app.version}",
126
+ "published_at": f"{utc_time}Z",
127
+ }
128
+ }
129
+ with open(version_file, "w") as version_f:
130
+ json.dump(version_dict, version_f)
131
+
132
+ print(
133
+ f"Build of '{input_dto.config.app.title}' API successful to {input_dto.config.getAPIPath()}"
134
+ )
135
+
136
+ return input_dto.director_output_dto
@@ -4,52 +4,57 @@ import pprint
4
4
 
5
5
 
6
6
  class Deploy:
7
- def execute(self, config: deploy_acs, appinspect_token:str) -> None:
8
-
9
- #The following common headers are used by both Clasic and Victoria
7
+ def execute(self, config: deploy_acs, appinspect_token: str) -> None:
8
+ # The following common headers are used by both Clasic and Victoria
10
9
  headers = {
11
- 'Authorization': f'Bearer {config.splunk_cloud_jwt_token}',
12
- 'ACS-Legal-Ack': 'Y'
10
+ "Authorization": f"Bearer {config.splunk_cloud_jwt_token}",
11
+ "ACS-Legal-Ack": "Y",
13
12
  }
14
13
  try:
15
-
16
- with open(config.getPackageFilePath(include_version=False),'rb') as app_data:
17
- #request_data = app_data.read()
14
+ with open(
15
+ config.getPackageFilePath(include_version=False), "rb"
16
+ ) as app_data:
17
+ # request_data = app_data.read()
18
18
  if config.stack_type == StackType.classic:
19
19
  # Classic instead uses a form to store token and package
20
20
  # https://docs.splunk.com/Documentation/SplunkCloud/9.1.2308/Config/ManageApps#Manage_private_apps_using_the_ACS_API_on_Classic_Experience
21
21
  address = f"https://admin.splunk.com/{config.splunk_cloud_stack}/adminconfig/v2/apps"
22
-
23
- form_data = {
24
- 'token': (None, appinspect_token),
25
- 'package': app_data
26
- }
27
- res = post(address, headers=headers, files = form_data)
22
+
23
+ form_data = {"token": (None, appinspect_token), "package": app_data}
24
+ res = post(address, headers=headers, files=form_data)
28
25
  elif config.stack_type == StackType.victoria:
29
26
  # Victoria uses the X-Splunk-Authorization Header
30
27
  # It also uses --data-binary for the app content
31
28
  # https://docs.splunk.com/Documentation/SplunkCloud/9.1.2308/Config/ManageApps#Manage_private_apps_using_the_ACS_API_on_Victoria_Experience
32
- headers.update({'X-Splunk-Authorization': appinspect_token})
29
+ headers.update({"X-Splunk-Authorization": appinspect_token})
33
30
  address = f"https://admin.splunk.com/{config.splunk_cloud_stack}/adminconfig/v2/apps/victoria"
34
31
  res = post(address, headers=headers, data=app_data.read())
35
32
  else:
36
33
  raise Exception(f"Unsupported stack type: '{config.stack_type}'")
37
34
  except Exception as e:
38
- raise Exception(f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{str(e)}")
39
-
35
+ raise Exception(
36
+ f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{str(e)}"
37
+ )
38
+
40
39
  try:
41
40
  # Request went through and completed, but may have returned a non-successful error code.
42
41
  # This likely includes a more verbose response describing the error
43
42
  res.raise_for_status()
44
43
  print(res.json())
45
- except Exception as e:
44
+ except Exception:
46
45
  try:
47
46
  error_text = res.json()
48
- except Exception as e:
47
+ except Exception:
49
48
  error_text = "No error text - request failed"
50
49
  formatted_error_text = pprint.pformat(error_text)
51
- print("While this may not be the cause of your error, ensure that the uid and appid of your Private App does not exist in Splunkbase\n"
52
- "ACS cannot deploy and app with the same uid or appid as one that exists in Splunkbase.")
53
- raise Exception(f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{formatted_error_text}")
54
-
55
- print(f"'{config.getPackageFilePath(include_version=False)}' successfully installed to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS!")
50
+ print(
51
+ "While this may not be the cause of your error, ensure that the uid and appid of your Private App does not exist in Splunkbase\n"
52
+ "ACS cannot deploy and app with the same uid or appid as one that exists in Splunkbase."
53
+ )
54
+ raise Exception(
55
+ f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{formatted_error_text}"
56
+ )
57
+
58
+ print(
59
+ f"'{config.getPackageFilePath(include_version=False)}' successfully installed to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS!"
60
+ )
@@ -1,26 +1,29 @@
1
- from typing import List,Union
2
- from contentctl.objects.config import test, test_servers, Container,Infrastructure
3
- from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import DetectionTestingInfrastructure
4
- from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructureContainer import DetectionTestingInfrastructureContainer
5
- from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructureServer import DetectionTestingInfrastructureServer
6
- from urllib.parse import urlparse
7
- from copy import deepcopy
8
- from contentctl.objects.enums import DetectionTestingTargetInfrastructure
1
+ from typing import List, Union
2
+ from contentctl.objects.config import test, test_servers, Container, Infrastructure
3
+ from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
4
+ DetectionTestingInfrastructure,
5
+ )
6
+ from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructureContainer import (
7
+ DetectionTestingInfrastructureContainer,
8
+ )
9
+ from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructureServer import (
10
+ DetectionTestingInfrastructureServer,
11
+ )
9
12
  import signal
10
13
  import datetime
14
+
11
15
  # from queue import Queue
12
16
  from dataclasses import dataclass
17
+
13
18
  # import threading
14
- import ctypes
15
19
  from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
16
- DetectionTestingInfrastructure,
17
20
  DetectionTestingManagerOutputDto,
18
21
  )
19
22
  from contentctl.actions.detection_testing.views.DetectionTestingView import (
20
23
  DetectionTestingView,
21
24
  )
22
25
  from contentctl.objects.enums import PostTestBehavior
23
- from pydantic import BaseModel, Field
26
+ from pydantic import BaseModel
24
27
  from contentctl.objects.detection import Detection
25
28
  import concurrent.futures
26
29
  import docker
@@ -28,7 +31,7 @@ import docker
28
31
 
29
32
  @dataclass(frozen=False)
30
33
  class DetectionTestingManagerInputDto:
31
- config: Union[test,test_servers]
34
+ config: Union[test, test_servers]
32
35
  detections: List[Detection]
33
36
  views: list[DetectionTestingView]
34
37
 
@@ -65,15 +68,18 @@ class DetectionTestingManager(BaseModel):
65
68
  print("*******************************")
66
69
 
67
70
  signal.signal(signal.SIGINT, sigint_handler)
68
-
69
- with concurrent.futures.ThreadPoolExecutor(
70
- max_workers=len(self.input_dto.config.test_instances),
71
- ) as instance_pool, concurrent.futures.ThreadPoolExecutor(
72
- max_workers=len(self.input_dto.views)
73
- ) as view_runner, concurrent.futures.ThreadPoolExecutor(
74
- max_workers=len(self.input_dto.config.test_instances),
75
- ) as view_shutdowner:
76
71
 
72
+ with (
73
+ concurrent.futures.ThreadPoolExecutor(
74
+ max_workers=len(self.input_dto.config.test_instances),
75
+ ) as instance_pool,
76
+ concurrent.futures.ThreadPoolExecutor(
77
+ max_workers=len(self.input_dto.views)
78
+ ) as view_runner,
79
+ concurrent.futures.ThreadPoolExecutor(
80
+ max_workers=len(self.input_dto.config.test_instances),
81
+ ) as view_shutdowner,
82
+ ):
77
83
  # Start all the views
78
84
  future_views = {
79
85
  view_runner.submit(view.setup): view for view in self.input_dto.views
@@ -87,7 +93,7 @@ class DetectionTestingManager(BaseModel):
87
93
  # Wait for all instances to be set up
88
94
  for future in concurrent.futures.as_completed(future_instances_setup):
89
95
  try:
90
- result = future.result()
96
+ future.result()
91
97
  except Exception as e:
92
98
  self.output_dto.terminate = True
93
99
  print(f"Error setting up container: {str(e)}")
@@ -102,7 +108,7 @@ class DetectionTestingManager(BaseModel):
102
108
  # Wait for execution to finish
103
109
  for future in concurrent.futures.as_completed(future_instances_execute):
104
110
  try:
105
- result = future.result()
111
+ future.result()
106
112
  except Exception as e:
107
113
  self.output_dto.terminate = True
108
114
  print(f"Error running in container: {str(e)}")
@@ -115,34 +121,43 @@ class DetectionTestingManager(BaseModel):
115
121
  }
116
122
  for future in concurrent.futures.as_completed(future_views_shutdowner):
117
123
  try:
118
- result = future.result()
124
+ future.result()
119
125
  except Exception as e:
120
126
  print(f"Error stopping view: {str(e)}")
121
127
 
122
128
  # Wait for original view-related threads to complete
123
129
  for future in concurrent.futures.as_completed(future_views):
124
130
  try:
125
- result = future.result()
131
+ future.result()
126
132
  except Exception as e:
127
133
  print(f"Error running container: {str(e)}")
128
134
 
129
135
  return self.output_dto
130
136
 
131
137
  def create_DetectionTestingInfrastructureObjects(self):
132
- #Make sure that, if we need to, we pull the appropriate container
138
+ # Make sure that, if we need to, we pull the appropriate container
133
139
  for infrastructure in self.input_dto.config.test_instances:
134
- if (isinstance(self.input_dto.config, test) and isinstance(infrastructure, Container)):
140
+ if isinstance(self.input_dto.config, test) and isinstance(
141
+ infrastructure, Container
142
+ ):
135
143
  try:
136
144
  client = docker.from_env()
137
- except Exception as e:
138
- raise Exception("Unable to connect to docker. Are you sure that docker is running on this host?")
145
+ except Exception:
146
+ raise Exception(
147
+ "Unable to connect to docker. Are you sure that docker is running on this host?"
148
+ )
139
149
  try:
140
-
141
- parts = self.input_dto.config.container_settings.full_image_path.split(':')
150
+ parts = (
151
+ self.input_dto.config.container_settings.full_image_path.split(
152
+ ":"
153
+ )
154
+ )
142
155
  if len(parts) != 2:
143
- raise Exception(f"Expected to find a name:tag in {self.input_dto.config.container_settings.full_image_path}, "
144
- f"but instead found {parts}. Note that this path MUST include the tag, which is separated by ':'")
145
-
156
+ raise Exception(
157
+ f"Expected to find a name:tag in {self.input_dto.config.container_settings.full_image_path}, "
158
+ f"but instead found {parts}. Note that this path MUST include the tag, which is separated by ':'"
159
+ )
160
+
146
161
  print(
147
162
  f"Getting the latest version of the container image [{self.input_dto.config.container_settings.full_image_path}]...",
148
163
  end="",
@@ -152,12 +167,15 @@ class DetectionTestingManager(BaseModel):
152
167
  print("done!")
153
168
  break
154
169
  except Exception as e:
155
- raise Exception(f"Failed to pull docker container image [{self.input_dto.config.container_settings.full_image_path}]: {str(e)}")
170
+ raise Exception(
171
+ f"Failed to pull docker container image [{self.input_dto.config.container_settings.full_image_path}]: {str(e)}"
172
+ )
156
173
 
157
174
  already_staged_container_files = False
158
175
  for infrastructure in self.input_dto.config.test_instances:
159
-
160
- if (isinstance(self.input_dto.config, test) and isinstance(infrastructure, Container)):
176
+ if isinstance(self.input_dto.config, test) and isinstance(
177
+ infrastructure, Container
178
+ ):
161
179
  # Stage the files in the apps dir so that they can be passed directly to
162
180
  # subsequent containers. Do this here, instead of inside each container, to
163
181
  # avoid duplicate downloads/moves/copies
@@ -167,18 +185,24 @@ class DetectionTestingManager(BaseModel):
167
185
 
168
186
  self.detectionTestingInfrastructureObjects.append(
169
187
  DetectionTestingInfrastructureContainer(
170
- global_config=self.input_dto.config, infrastructure=infrastructure, sync_obj=self.output_dto
188
+ global_config=self.input_dto.config,
189
+ infrastructure=infrastructure,
190
+ sync_obj=self.output_dto,
171
191
  )
172
192
  )
173
193
 
174
- elif (isinstance(self.input_dto.config, test_servers) and isinstance(infrastructure, Infrastructure)):
194
+ elif isinstance(self.input_dto.config, test_servers) and isinstance(
195
+ infrastructure, Infrastructure
196
+ ):
175
197
  self.detectionTestingInfrastructureObjects.append(
176
198
  DetectionTestingInfrastructureServer(
177
- global_config=self.input_dto.config, infrastructure=infrastructure, sync_obj=self.output_dto
199
+ global_config=self.input_dto.config,
200
+ infrastructure=infrastructure,
201
+ sync_obj=self.output_dto,
178
202
  )
179
203
  )
180
204
 
181
205
  else:
182
-
183
- raise Exception(f"Unsupported target infrastructure '{infrastructure}' and config type {self.input_dto.config}")
184
-
206
+ raise Exception(
207
+ f"Unsupported target infrastructure '{infrastructure}' and config type {self.input_dto.config}"
208
+ )