contentctl 3.6.0__py3-none-any.whl → 4.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- contentctl/actions/build.py +89 -0
- contentctl/actions/detection_testing/DetectionTestingManager.py +48 -49
- contentctl/actions/detection_testing/GitService.py +148 -230
- contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py +14 -24
- contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructureContainer.py +43 -17
- contentctl/actions/detection_testing/views/DetectionTestingView.py +3 -2
- contentctl/actions/detection_testing/views/DetectionTestingViewFile.py +0 -8
- contentctl/actions/doc_gen.py +1 -1
- contentctl/actions/initialize.py +28 -65
- contentctl/actions/inspect.py +260 -0
- contentctl/actions/new_content.py +106 -13
- contentctl/actions/release_notes.py +168 -144
- contentctl/actions/reporting.py +24 -13
- contentctl/actions/test.py +39 -20
- contentctl/actions/validate.py +25 -48
- contentctl/contentctl.py +196 -754
- contentctl/enrichments/attack_enrichment.py +69 -19
- contentctl/enrichments/cve_enrichment.py +28 -13
- contentctl/helper/link_validator.py +24 -26
- contentctl/helper/utils.py +7 -3
- contentctl/input/director.py +139 -201
- contentctl/input/new_content_questions.py +63 -61
- contentctl/input/sigma_converter.py +1 -2
- contentctl/input/ssa_detection_builder.py +16 -7
- contentctl/input/yml_reader.py +4 -3
- contentctl/objects/abstract_security_content_objects/detection_abstract.py +487 -154
- contentctl/objects/abstract_security_content_objects/security_content_object_abstract.py +155 -51
- contentctl/objects/alert_action.py +40 -0
- contentctl/objects/atomic.py +212 -0
- contentctl/objects/baseline.py +44 -43
- contentctl/objects/baseline_tags.py +69 -20
- contentctl/objects/config.py +857 -125
- contentctl/objects/constants.py +0 -1
- contentctl/objects/correlation_search.py +1 -1
- contentctl/objects/data_source.py +2 -4
- contentctl/objects/deployment.py +61 -21
- contentctl/objects/deployment_email.py +2 -2
- contentctl/objects/deployment_notable.py +4 -4
- contentctl/objects/deployment_phantom.py +2 -2
- contentctl/objects/deployment_rba.py +3 -4
- contentctl/objects/deployment_scheduling.py +2 -3
- contentctl/objects/deployment_slack.py +2 -2
- contentctl/objects/detection.py +1 -5
- contentctl/objects/detection_tags.py +210 -119
- contentctl/objects/enums.py +312 -24
- contentctl/objects/integration_test.py +1 -1
- contentctl/objects/integration_test_result.py +0 -2
- contentctl/objects/investigation.py +62 -53
- contentctl/objects/investigation_tags.py +30 -6
- contentctl/objects/lookup.py +80 -31
- contentctl/objects/macro.py +29 -45
- contentctl/objects/mitre_attack_enrichment.py +29 -5
- contentctl/objects/observable.py +3 -7
- contentctl/objects/playbook.py +60 -30
- contentctl/objects/playbook_tags.py +45 -8
- contentctl/objects/security_content_object.py +1 -5
- contentctl/objects/ssa_detection.py +8 -4
- contentctl/objects/ssa_detection_tags.py +19 -26
- contentctl/objects/story.py +142 -44
- contentctl/objects/story_tags.py +46 -33
- contentctl/objects/unit_test.py +7 -2
- contentctl/objects/unit_test_attack_data.py +10 -19
- contentctl/objects/unit_test_baseline.py +1 -1
- contentctl/objects/unit_test_old.py +4 -3
- contentctl/objects/unit_test_result.py +5 -3
- contentctl/objects/unit_test_ssa.py +31 -0
- contentctl/output/api_json_output.py +202 -130
- contentctl/output/attack_nav_output.py +20 -9
- contentctl/output/attack_nav_writer.py +3 -3
- contentctl/output/ba_yml_output.py +3 -3
- contentctl/output/conf_output.py +125 -391
- contentctl/output/conf_writer.py +169 -31
- contentctl/output/jinja_writer.py +2 -2
- contentctl/output/json_writer.py +17 -5
- contentctl/output/new_content_yml_output.py +8 -7
- contentctl/output/svg_output.py +17 -27
- contentctl/output/templates/analyticstories_detections.j2 +8 -4
- contentctl/output/templates/analyticstories_investigations.j2 +1 -1
- contentctl/output/templates/analyticstories_stories.j2 +6 -6
- contentctl/output/templates/app.conf.j2 +2 -2
- contentctl/output/templates/app.manifest.j2 +2 -2
- contentctl/output/templates/detection_coverage.j2 +6 -8
- contentctl/output/templates/doc_detection_page.j2 +2 -2
- contentctl/output/templates/doc_detections.j2 +2 -2
- contentctl/output/templates/doc_stories.j2 +1 -1
- contentctl/output/templates/es_investigations_investigations.j2 +1 -1
- contentctl/output/templates/es_investigations_stories.j2 +1 -1
- contentctl/output/templates/header.j2 +2 -1
- contentctl/output/templates/macros.j2 +6 -10
- contentctl/output/templates/savedsearches_baselines.j2 +5 -5
- contentctl/output/templates/savedsearches_detections.j2 +36 -33
- contentctl/output/templates/savedsearches_investigations.j2 +4 -4
- contentctl/output/templates/transforms.j2 +4 -4
- contentctl/output/yml_writer.py +2 -2
- contentctl/templates/app_template/README.md +7 -0
- contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/nav/default.xml +1 -0
- contentctl/templates/app_template/lookups/mitre_enrichment.csv +638 -0
- contentctl/templates/deployments/{00_default_anomaly.yml → escu_default_configuration_anomaly.yml} +1 -2
- contentctl/templates/deployments/{00_default_baseline.yml → escu_default_configuration_baseline.yml} +1 -2
- contentctl/templates/deployments/{00_default_correlation.yml → escu_default_configuration_correlation.yml} +2 -2
- contentctl/templates/deployments/{00_default_hunting.yml → escu_default_configuration_hunting.yml} +2 -2
- contentctl/templates/deployments/{00_default_ttp.yml → escu_default_configuration_ttp.yml} +1 -2
- contentctl/templates/detections/anomalous_usage_of_7zip.yml +0 -1
- contentctl/templates/stories/cobalt_strike.yml +0 -1
- {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/METADATA +36 -15
- contentctl-4.0.2.dist-info/RECORD +168 -0
- contentctl/actions/detection_testing/DataManipulation.py +0 -149
- contentctl/actions/generate.py +0 -91
- contentctl/helper/config_handler.py +0 -75
- contentctl/input/baseline_builder.py +0 -66
- contentctl/input/basic_builder.py +0 -58
- contentctl/input/detection_builder.py +0 -370
- contentctl/input/investigation_builder.py +0 -42
- contentctl/input/new_content_generator.py +0 -95
- contentctl/input/playbook_builder.py +0 -68
- contentctl/input/story_builder.py +0 -106
- contentctl/objects/app.py +0 -214
- contentctl/objects/repo_config.py +0 -163
- contentctl/objects/test_config.py +0 -630
- contentctl/output/templates/macros_detections.j2 +0 -7
- contentctl/output/templates/splunk_app/README.md +0 -7
- contentctl-3.6.0.dist-info/RECORD +0 -176
- /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_story_detail.txt +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_summary.txt +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/README/essoc_usage_dashboard.txt +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/analytic_stories.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/app.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/commands.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/content-version.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/views/escu_summary.xml +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/data/ui/views/feedback.xml +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/distsearch.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/usage_searches.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/default/use_case_library.conf +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/metadata/default.meta +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIcon.png +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIconAlt.png +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIconAlt_2x.png +0 -0
- /contentctl/{output/templates/splunk_app → templates/app_template}/static/appIcon_2x.png +0 -0
- {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/LICENSE.md +0 -0
- {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/WHEEL +0 -0
- {contentctl-3.6.0.dist-info → contentctl-4.0.2.dist-info}/entry_points.txt +0 -0
contentctl/output/conf_output.py
CHANGED
|
@@ -14,86 +14,91 @@ import shutil
|
|
|
14
14
|
import json
|
|
15
15
|
from contentctl.output.conf_writer import ConfWriter
|
|
16
16
|
from contentctl.objects.enums import SecurityContentType
|
|
17
|
-
from contentctl.objects.config import
|
|
17
|
+
from contentctl.objects.config import build
|
|
18
18
|
from requests import Session, post, get
|
|
19
19
|
from requests.auth import HTTPBasicAuth
|
|
20
|
-
import pprint
|
|
21
|
-
class ConfOutput:
|
|
22
20
|
|
|
23
|
-
|
|
24
|
-
config:
|
|
25
|
-
output_path: pathlib.Path
|
|
21
|
+
class ConfOutput:
|
|
22
|
+
config: build
|
|
26
23
|
|
|
27
24
|
|
|
28
|
-
def __init__(self,
|
|
29
|
-
self.input_path = input_path
|
|
25
|
+
def __init__(self, config: build):
|
|
30
26
|
self.config = config
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
27
|
+
|
|
28
|
+
#Create the build directory if it does not exist
|
|
29
|
+
config.getPackageDirectoryPath().parent.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
|
|
31
|
+
#Remove the app path, if it exists
|
|
32
|
+
shutil.rmtree(config.getPackageDirectoryPath(), ignore_errors=True)
|
|
33
|
+
|
|
34
|
+
#Copy all the template files into the app
|
|
35
|
+
shutil.copytree(config.getAppTemplatePath(), config.getPackageDirectoryPath())
|
|
36
36
|
|
|
37
|
-
def getPackagePath(self, include_version:bool=False)->pathlib.Path:
|
|
38
|
-
if include_version:
|
|
39
|
-
return self.dist / f"{self.config.build.name}-{self.config.build.version}.tar.gz"
|
|
40
|
-
else:
|
|
41
|
-
return self.dist / f"{self.config.build.name}-latest.tar.gz"
|
|
42
37
|
|
|
43
|
-
def writeHeaders(self) ->
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
38
|
+
def writeHeaders(self) -> set[pathlib.Path]:
|
|
39
|
+
written_files:set[pathlib.Path] = set()
|
|
40
|
+
for output_app_path in ['default/analyticstories.conf',
|
|
41
|
+
'default/savedsearches.conf',
|
|
42
|
+
'default/collections.conf',
|
|
43
|
+
'default/es_investigations.conf',
|
|
44
|
+
'default/macros.conf',
|
|
45
|
+
'default/transforms.conf',
|
|
46
|
+
'default/workflow_actions.conf',
|
|
47
|
+
'default/app.conf',
|
|
48
|
+
'default/content-version.conf']:
|
|
49
|
+
written_files.add(ConfWriter.writeConfFileHeader(pathlib.Path(output_app_path),self.config))
|
|
50
|
+
|
|
51
|
+
return written_files
|
|
52
|
+
|
|
53
|
+
|
|
53
54
|
#The contents of app.manifest are not a conf file, but json.
|
|
54
55
|
#DO NOT write a header for this file type, simply create the file
|
|
55
|
-
with open(self.
|
|
56
|
+
with open(self.config.getPackageDirectoryPath() / pathlib.Path('app.manifest'), 'w') as f:
|
|
56
57
|
pass
|
|
57
58
|
|
|
58
59
|
|
|
59
|
-
def writeAppConf(self):
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
60
|
+
def writeAppConf(self)->set[pathlib.Path]:
|
|
61
|
+
written_files:set[pathlib.Path] = set()
|
|
62
|
+
for output_app_path, template_name in [ ("default/app.conf", "app.conf.j2"),
|
|
63
|
+
("default/content-version.conf", "content-version.j2")]:
|
|
64
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path(output_app_path),
|
|
65
|
+
template_name,
|
|
66
|
+
self.config,
|
|
67
|
+
[self.config.app]))
|
|
68
|
+
|
|
69
|
+
written_files.add(ConfWriter.writeManifestFile(pathlib.Path("app.manifest"),
|
|
70
|
+
"app.manifest.j2",
|
|
71
|
+
self.config,
|
|
72
|
+
[self.config.app]))
|
|
73
|
+
return written_files
|
|
63
74
|
|
|
64
|
-
|
|
75
|
+
|
|
76
|
+
def writeObjects(self, objects: list, type: SecurityContentType = None) -> set[pathlib.Path]:
|
|
77
|
+
written_files:set[pathlib.Path] = set()
|
|
65
78
|
if type == SecurityContentType.detections:
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
ConfWriter.writeConfFile(self.output_path/'default/analyticstories.conf',
|
|
71
|
-
'analyticstories_detections.j2',
|
|
72
|
-
self.config, objects)
|
|
73
|
-
|
|
74
|
-
ConfWriter.writeConfFile(self.output_path/'default/macros.conf',
|
|
75
|
-
'macros_detections.j2',
|
|
76
|
-
self.config, objects)
|
|
79
|
+
for output_app_path, template_name in [ ('default/savedsearches.conf', 'savedsearches_detections.j2'),
|
|
80
|
+
('default/analyticstories.conf', 'analyticstories_detections.j2')]:
|
|
81
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path(output_app_path),
|
|
82
|
+
template_name, self.config, objects))
|
|
77
83
|
|
|
78
84
|
elif type == SecurityContentType.stories:
|
|
79
|
-
ConfWriter.writeConfFile(
|
|
80
|
-
|
|
81
|
-
|
|
85
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path('default/analyticstories.conf'),
|
|
86
|
+
'analyticstories_stories.j2',
|
|
87
|
+
self.config, objects))
|
|
82
88
|
|
|
83
89
|
elif type == SecurityContentType.baselines:
|
|
84
|
-
ConfWriter.writeConfFile(
|
|
85
|
-
|
|
86
|
-
|
|
90
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path('default/savedsearches.conf'),
|
|
91
|
+
'savedsearches_baselines.j2',
|
|
92
|
+
self.config, objects))
|
|
87
93
|
|
|
88
94
|
elif type == SecurityContentType.investigations:
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
95
|
+
for output_app_path, template_name in [ ('default/savedsearches.conf', 'savedsearches_investigations.j2'),
|
|
96
|
+
('default/analyticstories.conf', 'analyticstories_investigations.j2')]:
|
|
97
|
+
ConfWriter.writeConfFile(pathlib.Path(output_app_path),
|
|
98
|
+
template_name,
|
|
99
|
+
self.config,
|
|
100
|
+
objects)
|
|
101
|
+
|
|
97
102
|
workbench_panels = []
|
|
98
103
|
for investigation in objects:
|
|
99
104
|
if investigation.inputs:
|
|
@@ -101,66 +106,68 @@ class ConfOutput:
|
|
|
101
106
|
workbench_panels.append(investigation)
|
|
102
107
|
investigation.search = investigation.search.replace(">",">")
|
|
103
108
|
investigation.search = investigation.search.replace("<","<")
|
|
104
|
-
ConfWriter.writeConfFileHeaderEmpty(
|
|
105
|
-
self.output_path/f'default/data/ui/panels/workbench_panel_{response_file_name_xml}',
|
|
106
|
-
self.config)
|
|
107
109
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
110
|
+
|
|
111
|
+
ConfWriter.writeXmlFileHeader(pathlib.Path(f'default/data/ui/panels/workbench_panel_{response_file_name_xml}'),
|
|
112
|
+
self.config)
|
|
113
|
+
|
|
114
|
+
ConfWriter.writeXmlFile( pathlib.Path(f'default/data/ui/panels/workbench_panel_{response_file_name_xml}'),
|
|
115
|
+
'panel.j2',
|
|
116
|
+
self.config,[investigation.search])
|
|
115
117
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
118
|
+
for output_app_path, template_name in [ ('default/es_investigations.conf', 'es_investigations_investigations.j2'),
|
|
119
|
+
('default/workflow_actions.conf', 'workflow_actions.j2')]:
|
|
120
|
+
written_files.add( ConfWriter.writeConfFile(pathlib.Path(output_app_path),
|
|
121
|
+
template_name,
|
|
122
|
+
self.config,
|
|
123
|
+
workbench_panels))
|
|
119
124
|
|
|
120
125
|
elif type == SecurityContentType.lookups:
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
self.config, objects)
|
|
128
|
-
|
|
126
|
+
for output_app_path, template_name in [ ('default/collections.conf', 'collections.j2'),
|
|
127
|
+
('default/transforms.conf', 'transforms.j2')]:
|
|
128
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path(output_app_path),
|
|
129
|
+
template_name,
|
|
130
|
+
self.config,
|
|
131
|
+
objects))
|
|
129
132
|
|
|
130
|
-
|
|
131
|
-
raise(Exception(f"input_path is required for lookups, but received [{self.input_path}]"))
|
|
132
|
-
|
|
133
|
+
|
|
133
134
|
#we want to copy all *.mlmodel files as well, not just csvs
|
|
134
|
-
files = list(glob.iglob(
|
|
135
|
-
lookup_folder = self.
|
|
136
|
-
if lookup_folder.exists():
|
|
137
|
-
# Remove it since we want to remove any previous lookups that are not
|
|
138
|
-
# currently part of the app
|
|
139
|
-
if lookup_folder.is_dir():
|
|
140
|
-
shutil.rmtree(lookup_folder)
|
|
141
|
-
else:
|
|
142
|
-
#it's a file, but there should not be a file called lookups
|
|
143
|
-
lookup_folder.unlink()
|
|
135
|
+
files = list(glob.iglob(str(self.config.path/ 'lookups/*.csv'))) + list(glob.iglob(str(self.config.path / 'lookups/*.mlmodel')))
|
|
136
|
+
lookup_folder = self.config.getPackageDirectoryPath()/"lookups"
|
|
144
137
|
|
|
145
138
|
# Make the new folder for the lookups
|
|
146
|
-
|
|
139
|
+
# This folder almost certainly already exists because mitre_enrichment.csv has been writtent here from the app template.
|
|
140
|
+
lookup_folder.mkdir(exist_ok=True)
|
|
147
141
|
|
|
148
142
|
#Copy each lookup into the folder
|
|
149
143
|
for lookup_name in files:
|
|
150
144
|
lookup_path = pathlib.Path(lookup_name)
|
|
151
145
|
if lookup_path.is_file():
|
|
152
|
-
|
|
153
|
-
shutil.copy(lookup_path, lookup_target_path)
|
|
146
|
+
shutil.copy(lookup_path, lookup_folder/lookup_path.name)
|
|
154
147
|
else:
|
|
155
148
|
raise(Exception(f"Error copying lookup/mlmodel file. Path {lookup_path} does not exist or is not a file."))
|
|
156
149
|
|
|
157
150
|
elif type == SecurityContentType.macros:
|
|
158
|
-
ConfWriter.writeConfFile(
|
|
159
|
-
|
|
160
|
-
|
|
151
|
+
written_files.add(ConfWriter.writeConfFile(pathlib.Path('default/macros.conf'),
|
|
152
|
+
'macros.j2',
|
|
153
|
+
self.config, objects))
|
|
154
|
+
|
|
155
|
+
return written_files
|
|
156
|
+
|
|
157
|
+
|
|
161
158
|
|
|
162
159
|
|
|
163
|
-
|
|
160
|
+
|
|
161
|
+
def packageAppTar(self) -> None:
|
|
162
|
+
|
|
163
|
+
with tarfile.open(self.config.getPackageFilePath(include_version=True), "w:gz") as app_archive:
|
|
164
|
+
app_archive.add(self.config.getPackageDirectoryPath(), arcname=self.config.getPackageDirectoryPath().name)
|
|
165
|
+
|
|
166
|
+
shutil.copy2(self.config.getPackageFilePath(include_version=True),
|
|
167
|
+
self.config.getPackageFilePath(include_version=False),
|
|
168
|
+
follow_symlinks=False)
|
|
169
|
+
|
|
170
|
+
def packageAppSlim(self) -> None:
|
|
164
171
|
|
|
165
172
|
|
|
166
173
|
# input_app_path = pathlib.Path(self.config.build.path_root)/f"{self.config.build.name}"
|
|
@@ -171,305 +178,32 @@ class ConfOutput:
|
|
|
171
178
|
# shutil.copyfile(readme_file, input_app_path/readme_file.name)
|
|
172
179
|
|
|
173
180
|
|
|
174
|
-
# try:
|
|
175
|
-
# import slim
|
|
176
|
-
# use_slim = True
|
|
177
|
-
|
|
178
|
-
# except Exception as e:
|
|
179
|
-
# print("Failed to import Splunk Packaging Toolkit (slim). slim requires Python<3.10. "
|
|
180
|
-
# "Packaging app with tar instead. This should still work, but appinspect may catch "
|
|
181
|
-
# "errors that otherwise would have been flagged by slim.")
|
|
182
|
-
# use_slim = False
|
|
183
|
-
|
|
184
|
-
# if use_slim:
|
|
185
|
-
# import slim
|
|
186
|
-
# from slim.utils import SlimLogger
|
|
187
|
-
# import logging
|
|
188
|
-
# #In order to avoid significant output, only emit FATAL log messages
|
|
189
|
-
# SlimLogger.set_level(logging.ERROR)
|
|
190
|
-
# try:
|
|
191
|
-
# slim.package(source=input_app_path, output_dir=pathlib.Path(self.config.build.path_root))
|
|
192
|
-
# except SystemExit as e:
|
|
193
|
-
# raise Exception(f"Error building package with slim: {str(e)}")
|
|
194
|
-
# else:
|
|
195
|
-
with tarfile.open(self.getPackagePath(include_version=True), "w:gz") as app_archive:
|
|
196
|
-
app_archive.add(self.output_path, arcname=os.path.basename(self.output_path))
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
if not self.output_path.exists():
|
|
200
|
-
raise (Exception(f"The expected output app path '{self.getPackagePath(include_version=True)}' does not exist"))
|
|
201
|
-
|
|
202
|
-
shutil.copy2(self.getPackagePath(include_version=True),
|
|
203
|
-
self.getPackagePath(include_version=False),
|
|
204
|
-
follow_symlinks=False)
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
def getElapsedTime(self, startTime:float)->datetime.timedelta:
|
|
208
|
-
return datetime.timedelta(seconds=round(timeit.default_timer() - startTime))
|
|
209
|
-
|
|
210
|
-
def deploy_via_acs(self, splunk_cloud_jwt_token:str, splunk_cloud_stack:str, appinspect_token:str, stack_type:str):
|
|
211
|
-
if stack_type not in ['victoria', 'classic']:
|
|
212
|
-
raise Exception(f"stack_type MUST be either 'classic' or 'victoria', NOT '{stack_type}'")
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
#The following common headers are used by both Clasic and Victoria
|
|
216
|
-
headers = {
|
|
217
|
-
'Authorization': f'Bearer {splunk_cloud_jwt_token}',
|
|
218
|
-
'ACS-Legal-Ack': 'Y'
|
|
219
|
-
}
|
|
220
181
|
try:
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
address = f"https://admin.splunk.com/{splunk_cloud_stack}/adminconfig/v2/apps"
|
|
227
|
-
|
|
228
|
-
form_data = {
|
|
229
|
-
'token': (None, appinspect_token),
|
|
230
|
-
'package': app_data
|
|
231
|
-
}
|
|
232
|
-
res = post(address, headers=headers, files = form_data)
|
|
233
|
-
else:
|
|
234
|
-
# Victoria uses the X-Splunk-Authorization Header
|
|
235
|
-
# It also uses --data-binary for the app content
|
|
236
|
-
# https://docs.splunk.com/Documentation/SplunkCloud/9.1.2308/Config/ManageApps#Manage_private_apps_using_the_ACS_API_on_Victoria_Experience
|
|
237
|
-
headers.update({'X-Splunk-Authorization': appinspect_token})
|
|
238
|
-
address = f"https://admin.splunk.com/{splunk_cloud_stack}/adminconfig/v2/apps/victoria"
|
|
239
|
-
res = post(address, headers=headers, data=app_data.read())
|
|
240
|
-
except Exception as e:
|
|
241
|
-
raise Exception(f"Error installing to stack '{splunk_cloud_stack}' (stack_type='{stack_type}') via ACS:\n{str(e)}")
|
|
242
|
-
|
|
243
|
-
try:
|
|
244
|
-
# Request went through and completed, but may have returned a non-successful error code.
|
|
245
|
-
# This likely includes a more verbose response describing the error
|
|
246
|
-
res.raise_for_status()
|
|
247
|
-
except Exception as e:
|
|
182
|
+
import slim
|
|
183
|
+
from slim.utils import SlimLogger
|
|
184
|
+
import logging
|
|
185
|
+
#In order to avoid significant output, only emit FATAL log messages
|
|
186
|
+
SlimLogger.set_level(logging.ERROR)
|
|
248
187
|
try:
|
|
249
|
-
|
|
250
|
-
except
|
|
251
|
-
|
|
252
|
-
formatted_error_text = pprint.pformat(error_text)
|
|
253
|
-
raise Exception(f"Error installing to stack '{splunk_cloud_stack}' (stack_type='{stack_type}') via ACS:\n{formatted_error_text}")
|
|
254
|
-
|
|
255
|
-
print(f"'{self.getPackagePath(include_version=False)}' successfully installed to stack '{splunk_cloud_stack}' (stack_type='{stack_type}') via ACS!")
|
|
256
|
-
|
|
257
|
-
return
|
|
258
|
-
|
|
259
|
-
def inspectAppAPI(self, username:str, password:str, stack_type:str)->str:
|
|
260
|
-
session = Session()
|
|
261
|
-
session.auth = HTTPBasicAuth(username, password)
|
|
262
|
-
if stack_type not in ['victoria', 'classic']:
|
|
263
|
-
raise Exception(f"stack_type MUST be either 'classic' or 'victoria', NOT '{stack_type}'")
|
|
188
|
+
slim.package(source=self.config.getPackageDirectoryPath(), output_dir=pathlib.Path(self.config.getBuildDir()))
|
|
189
|
+
except SystemExit as e:
|
|
190
|
+
raise Exception(f"Error building package with slim: {str(e)}")
|
|
264
191
|
|
|
265
|
-
APPINSPECT_API_LOGIN = "https://api.splunk.com/2.0/rest/login/splunk"
|
|
266
192
|
|
|
193
|
+
except Exception as e:
|
|
194
|
+
print("Failed to import Splunk Packaging Toolkit (slim). slim requires Python<3.10. "
|
|
195
|
+
"Packaging app with tar instead. This should still work, but appinspect may catch "
|
|
196
|
+
"errors that otherwise would have been flagged by slim.")
|
|
197
|
+
raise Exception(f"slim (splunk packaging toolkit) not installed: {str(e)}")
|
|
267
198
|
|
|
268
199
|
|
|
269
|
-
res = session.get(APPINSPECT_API_LOGIN)
|
|
270
|
-
#If login failed or other failure, raise an exception
|
|
271
|
-
res.raise_for_status()
|
|
272
|
-
|
|
273
|
-
authorization_bearer = res.json().get("data",{}).get("token",None)
|
|
274
|
-
APPINSPECT_API_VALIDATION_REQUEST = "https://appinspect.splunk.com/v1/app/validate"
|
|
275
|
-
headers = {
|
|
276
|
-
"Authorization": f"bearer {authorization_bearer}",
|
|
277
|
-
"Cache-Control": "no-cache"
|
|
278
|
-
}
|
|
279
|
-
|
|
280
|
-
package_path = self.getPackagePath(include_version=False)
|
|
281
|
-
if not package_path.is_file():
|
|
282
|
-
raise Exception(f"Cannot run Appinspect API on App '{self.config.build.title}' - "
|
|
283
|
-
f"no package exists as expected path '{package_path}'.\nAre you "
|
|
284
|
-
"trying to 'contentctl acs_deploy' the package BEFORE running 'contentctl build'?")
|
|
285
|
-
|
|
286
|
-
files = {
|
|
287
|
-
"app_package": open(package_path,"rb"),
|
|
288
|
-
"included_tags":(None,"cloud")
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
res = post(APPINSPECT_API_VALIDATION_REQUEST, headers=headers, files=files)
|
|
292
|
-
|
|
293
|
-
res.raise_for_status()
|
|
294
|
-
|
|
295
|
-
request_id = res.json().get("request_id",None)
|
|
296
|
-
APPINSPECT_API_VALIDATION_STATUS = f"https://appinspect.splunk.com/v1/app/validate/status/{request_id}?included_tags=private_{stack_type}"
|
|
297
|
-
headers = headers = {
|
|
298
|
-
"Authorization": f"bearer {authorization_bearer}"
|
|
299
|
-
}
|
|
300
|
-
startTime = timeit.default_timer()
|
|
301
|
-
# the first time, wait for 40 seconds. subsequent times, wait for less.
|
|
302
|
-
# this is because appinspect takes some time to return, so there is no sense
|
|
303
|
-
# checking many times when we know it will take at least 40 seconds to run.
|
|
304
|
-
iteration_wait_time = 40
|
|
305
|
-
while True:
|
|
306
|
-
|
|
307
|
-
res = get(APPINSPECT_API_VALIDATION_STATUS, headers=headers)
|
|
308
|
-
res.raise_for_status()
|
|
309
|
-
status = res.json().get("status",None)
|
|
310
|
-
if status in ["PROCESSING", "PREPARING"]:
|
|
311
|
-
print(f"[{self.getElapsedTime(startTime)}] Appinspect API is {status}...")
|
|
312
|
-
time.sleep(iteration_wait_time)
|
|
313
|
-
iteration_wait_time = 1
|
|
314
|
-
continue
|
|
315
|
-
elif status == "SUCCESS":
|
|
316
|
-
print(f"[{self.getElapsedTime(startTime)}] Appinspect API has finished!")
|
|
317
|
-
break
|
|
318
|
-
else:
|
|
319
|
-
raise Exception(f"Error - Unknown Appinspect API status '{status}'")
|
|
320
|
-
|
|
321
200
|
|
|
201
|
+
def packageApp(self, method=packageAppTar)->None:
|
|
202
|
+
return method(self)
|
|
322
203
|
|
|
323
|
-
#We have finished running appinspect, so get the report
|
|
324
|
-
APPINSPECT_API_REPORT = f"https://appinspect.splunk.com/v1/app/report/{request_id}?included_tags=private_{stack_type}"
|
|
325
|
-
#Get human-readable HTML report
|
|
326
|
-
headers = headers = {
|
|
327
|
-
"Authorization": f"bearer {authorization_bearer}",
|
|
328
|
-
"Content-Type": "text/html"
|
|
329
|
-
}
|
|
330
|
-
res = get(APPINSPECT_API_REPORT, headers=headers)
|
|
331
|
-
res.raise_for_status()
|
|
332
|
-
report_html = res.content
|
|
333
|
-
|
|
334
|
-
#Get JSON report for processing
|
|
335
|
-
headers = headers = {
|
|
336
|
-
"Authorization": f"bearer {authorization_bearer}",
|
|
337
|
-
"Content-Type": "application/json"
|
|
338
|
-
}
|
|
339
|
-
res = get(APPINSPECT_API_REPORT, headers=headers)
|
|
340
|
-
res.raise_for_status()
|
|
341
|
-
report_json = res.json()
|
|
342
204
|
|
|
343
205
|
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
with open(self.dist/f"{self.config.build.name}-{self.config.build.version}.appinspect_api_results.json", "w") as report:
|
|
347
|
-
json.dump(report_json, report)
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
self.parseAppinspectJsonLogFile(self.dist/f"{self.config.build.name}-{self.config.build.version}.appinspect_api_results.json")
|
|
351
|
-
|
|
352
|
-
return authorization_bearer
|
|
353
|
-
|
|
354
|
-
def parseAppinspectJsonLogFile(self, logfile_path:pathlib.Path,
|
|
355
|
-
status_types:list[str] = ["error", "failure", "manual_check", "warning"],
|
|
356
|
-
exception_types = ["error","failure","manual_check"] )->None:
|
|
357
|
-
if not set(exception_types).issubset(set(status_types)):
|
|
358
|
-
raise Exception(f"Error - exception_types {exception_types} MUST be a subset of status_types {status_types}, but it is not")
|
|
359
|
-
with open(logfile_path, "r+") as logfile:
|
|
360
|
-
j = json.load(logfile)
|
|
361
|
-
#Seek back to the beginning of the file. We don't need to clear
|
|
362
|
-
#it sice we will always write AT LEAST the same number of characters
|
|
363
|
-
#back as we read (due to the addition of whitespace)
|
|
364
|
-
logfile.seek(0)
|
|
365
|
-
json.dump(j, logfile, indent=3, )
|
|
366
|
-
|
|
367
|
-
reports = j.get("reports", [])
|
|
368
|
-
if len(reports) != 1:
|
|
369
|
-
raise Exception("Expected to find one appinspect report but found 0")
|
|
370
|
-
verbose_errors = []
|
|
371
|
-
|
|
372
|
-
for group in reports[0].get("groups", []):
|
|
373
|
-
for check in group.get("checks",[]):
|
|
374
|
-
if check.get("result","") in status_types:
|
|
375
|
-
verbose_errors.append(f" - {check.get('result','')} [{group.get('name','NONAME')}: {check.get('name', 'NONAME')}]")
|
|
376
|
-
verbose_errors.sort()
|
|
377
|
-
|
|
378
|
-
summary = j.get("summary", None)
|
|
379
|
-
if summary is None:
|
|
380
|
-
raise Exception("Missing summary from appinspect report")
|
|
381
|
-
msgs = []
|
|
382
|
-
generated_exception = False
|
|
383
|
-
for key in status_types:
|
|
384
|
-
if summary.get(key,0)>0:
|
|
385
|
-
msgs.append(f" - {summary.get(key,0)} {key}s")
|
|
386
|
-
if key in exception_types:
|
|
387
|
-
generated_exception = True
|
|
388
|
-
if len(msgs)>0 or len(verbose_errors):
|
|
389
|
-
summary = '\n'.join(msgs)
|
|
390
|
-
details = '\n'.join(verbose_errors)
|
|
391
|
-
summary = f"{summary}\nDetails:\n{details}"
|
|
392
|
-
if generated_exception:
|
|
393
|
-
raise Exception(f"AppInspect found [{','.join(exception_types)}] that MUST be addressed to pass AppInspect API:\n{summary}")
|
|
394
|
-
else:
|
|
395
|
-
print(f"AppInspect found [{','.join(status_types)}] that MAY cause a failure during AppInspect API:\n{summary}")
|
|
396
|
-
else:
|
|
397
|
-
print("AppInspect was successful!")
|
|
398
|
-
|
|
399
|
-
return
|
|
400
|
-
|
|
401
|
-
def inspectAppCLI(self)-> None:
|
|
206
|
+
def getElapsedTime(self, startTime:float)->datetime.timedelta:
|
|
207
|
+
return datetime.timedelta(seconds=round(timeit.default_timer() - startTime))
|
|
402
208
|
|
|
403
|
-
|
|
404
|
-
raise Exception("Local spunk-appinspect Not Supported at this time (you may use the appinspect api). If you would like to locally inspect your app with"
|
|
405
|
-
"Python 3.7, 3.8, or 3.9 (with limited support), please refer to:\n"
|
|
406
|
-
"\t - https://dev.splunk.com/enterprise/docs/developapps/testvalidate/appinspect/useappinspectclitool/")
|
|
407
|
-
from splunk_appinspect.main import (
|
|
408
|
-
validate, MODE_OPTION, APP_PACKAGE_ARGUMENT, OUTPUT_FILE_OPTION,
|
|
409
|
-
LOG_FILE_OPTION, INCLUDED_TAGS_OPTION, EXCLUDED_TAGS_OPTION,
|
|
410
|
-
PRECERT_MODE, TEST_MODE)
|
|
411
|
-
except Exception as e:
|
|
412
|
-
print(e)
|
|
413
|
-
# print("******WARNING******")
|
|
414
|
-
# if sys.version_info.major == 3 and sys.version_info.minor > 9:
|
|
415
|
-
# print("The package splunk-appinspect was not installed due to a current issue with the library on Python3.10+. "
|
|
416
|
-
# "Please use the following commands to set up a virtualenvironment in a different folder so you may run appinspect manually (if desired):"
|
|
417
|
-
# "\n\tpython3.9 -m venv .venv"
|
|
418
|
-
# "\n\tsource .venv/bin/activate"
|
|
419
|
-
# "\n\tpython3 -m pip install splunk-appinspect"
|
|
420
|
-
# f"\n\tsplunk-appinspect inspect {self.getPackagePath(include_version=False).relative_to(pathlib.Path('.').absolute())} --mode precert")
|
|
421
|
-
|
|
422
|
-
# else:
|
|
423
|
-
# print("splunk-appinspect is only compatable with Python3.9 at this time. Please see the following open issue here: https://github.com/splunk/contentctl/issues/28")
|
|
424
|
-
# print("******WARNING******")
|
|
425
|
-
return
|
|
426
|
-
|
|
427
|
-
# Note that all tags are available and described here:
|
|
428
|
-
# https://dev.splunk.com/enterprise/reference/appinspect/appinspecttagreference/
|
|
429
|
-
# By default, precert mode will run ALL checks. Explicitly included or excluding tags will
|
|
430
|
-
# change this behavior. To give the most thorough inspection, we leave these empty so that
|
|
431
|
-
# ALL checks are run
|
|
432
|
-
included_tags = []
|
|
433
|
-
excluded_tags = []
|
|
434
|
-
|
|
435
|
-
appinspect_output = self.dist/f"{self.config.build.name}-{self.config.build.version}.appinspect_cli_results.json"
|
|
436
|
-
appinspect_logging = self.dist/f"{self.config.build.name}-{self.config.build.version}.appinspect_cli_logging.log"
|
|
437
|
-
try:
|
|
438
|
-
arguments_list = [(APP_PACKAGE_ARGUMENT, str(self.getPackagePath(include_version=False)))]
|
|
439
|
-
options_list = []
|
|
440
|
-
options_list += [MODE_OPTION, TEST_MODE]
|
|
441
|
-
options_list += [OUTPUT_FILE_OPTION, str(appinspect_output)]
|
|
442
|
-
options_list += [LOG_FILE_OPTION, str(appinspect_logging)]
|
|
443
|
-
|
|
444
|
-
#If there are any tags defined, then include them here
|
|
445
|
-
for opt in included_tags:
|
|
446
|
-
options_list += [INCLUDED_TAGS_OPTION, opt]
|
|
447
|
-
for opt in excluded_tags:
|
|
448
|
-
options_list += [EXCLUDED_TAGS_OPTION, opt]
|
|
449
|
-
|
|
450
|
-
cmdline = options_list + [arg[1] for arg in arguments_list]
|
|
451
|
-
validate(cmdline)
|
|
452
|
-
|
|
453
|
-
except SystemExit as e:
|
|
454
|
-
if e.code == 0:
|
|
455
|
-
# The sys.exit called inside of appinspect validate closes stdin. We need to
|
|
456
|
-
# reopen it.
|
|
457
|
-
sys.stdin = open("/dev/stdin","r")
|
|
458
|
-
print(f"AppInspect passed! Please check [ {appinspect_output} , {appinspect_logging} ] for verbose information.")
|
|
459
|
-
else:
|
|
460
|
-
if sys.version.startswith('3.11') or sys.version.startswith('3.12'):
|
|
461
|
-
raise Exception("At this time, AppInspect may fail on valid apps under Python>=3.11 with "
|
|
462
|
-
"the error 'global flags not at the start of the expression at position 1'. "
|
|
463
|
-
"If you encounter this error, please run AppInspect on a version of Python "
|
|
464
|
-
"<3.11. This issue is currently tracked. Please review the appinspect "
|
|
465
|
-
"report output above for errors.")
|
|
466
|
-
else:
|
|
467
|
-
raise Exception("AppInspect Failure - Please review the appinspect report output above for errors.")
|
|
468
|
-
finally:
|
|
469
|
-
# appinspect outputs the log in json format, but does not format it to be easier
|
|
470
|
-
# to read (it is all in one line). Read back that file and write it so it
|
|
471
|
-
# is easier to understand
|
|
472
|
-
|
|
473
|
-
#Note that this may raise an exception itself!
|
|
474
|
-
self.parseAppinspectJsonLogFile(appinspect_output)
|
|
475
|
-
|
|
209
|
+
|