secator 0.3.6__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +14 -19
- secator/cli.py +181 -99
- secator/config.py +558 -122
- secator/decorators.py +5 -5
- secator/definitions.py +25 -126
- secator/exporters/gdrive.py +10 -10
- secator/hooks/mongodb.py +3 -4
- secator/installer.py +10 -6
- secator/output_types/vulnerability.py +3 -1
- secator/runners/_base.py +11 -9
- secator/runners/_helpers.py +52 -34
- secator/runners/command.py +26 -30
- secator/runners/scan.py +4 -7
- secator/runners/task.py +2 -1
- secator/runners/workflow.py +3 -6
- secator/tasks/_categories.py +95 -44
- secator/tasks/dnsxbrute.py +3 -2
- secator/tasks/ffuf.py +2 -2
- secator/tasks/httpx.py +4 -4
- secator/tasks/katana.py +5 -4
- secator/tasks/msfconsole.py +3 -4
- secator/tasks/nmap.py +95 -48
- secator/tasks/nuclei.py +4 -0
- secator/template.py +137 -0
- secator/utils.py +3 -7
- {secator-0.3.6.dist-info → secator-0.4.1.dist-info}/METADATA +12 -6
- {secator-0.3.6.dist-info → secator-0.4.1.dist-info}/RECORD +30 -29
- {secator-0.3.6.dist-info → secator-0.4.1.dist-info}/WHEEL +1 -1
- {secator-0.3.6.dist-info → secator-0.4.1.dist-info}/entry_points.txt +0 -0
- {secator-0.3.6.dist-info → secator-0.4.1.dist-info}/licenses/LICENSE +0 -0
secator/decorators.py
CHANGED
|
@@ -6,12 +6,13 @@ from rich_click.rich_click import _get_rich_console
|
|
|
6
6
|
from rich_click.rich_group import RichGroup
|
|
7
7
|
|
|
8
8
|
from secator.definitions import ADDONS_ENABLED, OPT_NOT_SUPPORTED
|
|
9
|
+
from secator.config import CONFIG
|
|
9
10
|
from secator.runners import Scan, Task, Workflow
|
|
10
11
|
from secator.utils import (deduplicate, expand_input, get_command_category,
|
|
11
12
|
get_command_cls)
|
|
12
13
|
|
|
13
14
|
RUNNER_OPTS = {
|
|
14
|
-
'output': {'type': str, 'default':
|
|
15
|
+
'output': {'type': str, 'default': None, 'help': 'Output options (-o table,json,csv,gdrive)', 'short': 'o'},
|
|
15
16
|
'workspace': {'type': str, 'default': 'default', 'help': 'Workspace', 'short': 'ws'},
|
|
16
17
|
'json': {'is_flag': True, 'default': False, 'help': 'Enable JSON mode'},
|
|
17
18
|
'orig': {'is_flag': True, 'default': False, 'help': 'Enable original output (no schema conversion)'},
|
|
@@ -282,10 +283,9 @@ def register_runner(cli_endpoint, config):
|
|
|
282
283
|
sync = True
|
|
283
284
|
else:
|
|
284
285
|
sync = False
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
if CELERY_BROKER_URL:
|
|
286
|
+
broker_protocol = CONFIG.celery.broker_url.split('://')[0]
|
|
287
|
+
backend_protocol = CONFIG.celery.result_backend.split('://')[0]
|
|
288
|
+
if CONFIG.celery.broker_url:
|
|
289
289
|
if (broker_protocol == 'redis' or backend_protocol == 'redis') and not ADDONS_ENABLED['redis']:
|
|
290
290
|
_get_rich_console().print('[bold red]Missing `redis` addon: please run `secator install addons redis`[/].')
|
|
291
291
|
sys.exit(1)
|
secator/definitions.py
CHANGED
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
-
import requests
|
|
5
4
|
|
|
6
|
-
from dotenv import find_dotenv, load_dotenv
|
|
7
5
|
from importlib.metadata import version
|
|
8
6
|
|
|
9
|
-
from secator.
|
|
7
|
+
from secator.config import CONFIG, ROOT_FOLDER
|
|
10
8
|
|
|
11
|
-
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
12
9
|
|
|
13
10
|
# Globals
|
|
14
11
|
VERSION = version('secator')
|
|
@@ -22,63 +19,15 @@ ASCII = f"""
|
|
|
22
19
|
freelabz.com
|
|
23
20
|
""" # noqa: W605,W291
|
|
24
21
|
|
|
25
|
-
#
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
CONFIGS_FOLDER = LIB_FOLDER + '/configs'
|
|
29
|
-
EXTRA_CONFIGS_FOLDER = os.environ.get('SECATOR_EXTRA_CONFIGS_FOLDER')
|
|
30
|
-
BIN_FOLDER = os.environ.get('SECATOR_BIN_FOLDER', f'{os.path.expanduser("~")}/.local/bin')
|
|
31
|
-
DATA_FOLDER = os.environ.get('SECATOR_DATA_FOLDER', f'{os.path.expanduser("~")}/.secator')
|
|
32
|
-
REPORTS_FOLDER = os.environ.get('SECATOR_REPORTS_FOLDER', f'{DATA_FOLDER}/reports')
|
|
33
|
-
WORDLISTS_FOLDER = os.environ.get('SECATOR_WORDLISTS_FOLDER', f'{DATA_FOLDER}/wordlists')
|
|
34
|
-
SCRIPTS_FOLDER = f'{ROOT_FOLDER}/scripts'
|
|
35
|
-
CVES_FOLDER = f'{DATA_FOLDER}/cves'
|
|
36
|
-
PAYLOADS_FOLDER = f'{DATA_FOLDER}/payloads'
|
|
37
|
-
REVSHELLS_FOLDER = f'{DATA_FOLDER}/revshells'
|
|
38
|
-
TESTS_FOLDER = f'{ROOT_FOLDER}/tests'
|
|
39
|
-
|
|
40
|
-
# Celery local fs folders
|
|
41
|
-
CELERY_DATA_FOLDER = f'{DATA_FOLDER}/celery/data'
|
|
42
|
-
CELERY_RESULTS_FOLDER = f'{DATA_FOLDER}/celery/results'
|
|
43
|
-
|
|
44
|
-
# Environment variables
|
|
45
|
-
DEBUG = int(os.environ.get('DEBUG', '0'))
|
|
46
|
-
DEBUG_COMPONENT = os.environ.get('DEBUG_COMPONENT', '').split(',')
|
|
47
|
-
RECORD = bool(int(os.environ.get('RECORD', 0)))
|
|
48
|
-
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'filesystem://')
|
|
49
|
-
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', f'file://{CELERY_RESULTS_FOLDER}')
|
|
50
|
-
CELERY_BROKER_POOL_LIMIT = int(os.environ.get('CELERY_BROKER_POOL_LIMIT', 10))
|
|
51
|
-
CELERY_BROKER_CONNECTION_TIMEOUT = float(os.environ.get('CELERY_BROKER_CONNECTION_TIMEOUT', 4.0))
|
|
52
|
-
CELERY_BROKER_VISIBILITY_TIMEOUT = int(os.environ.get('CELERY_BROKER_VISIBILITY_TIMEOUT', 3600))
|
|
53
|
-
CELERY_OVERRIDE_DEFAULT_LOGGING = bool(int(os.environ.get('CELERY_OVERRIDE_DEFAULT_LOGGING', 1)))
|
|
54
|
-
GOOGLE_DRIVE_PARENT_FOLDER_ID = os.environ.get('GOOGLE_DRIVE_PARENT_FOLDER_ID')
|
|
55
|
-
GOOGLE_CREDENTIALS_PATH = os.environ.get('GOOGLE_CREDENTIALS_PATH')
|
|
56
|
-
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
|
57
|
-
|
|
58
|
-
# Defaults HTTP and Proxy settings
|
|
59
|
-
DEFAULT_SOCKS5_PROXY = os.environ.get('SOCKS5_PROXY', "socks5://127.0.0.1:9050")
|
|
60
|
-
DEFAULT_HTTP_PROXY = os.environ.get('HTTP_PROXY', "https://127.0.0.1:9080")
|
|
61
|
-
DEFAULT_STORE_HTTP_RESPONSES = bool(int(os.environ.get('DEFAULT_STORE_HTTP_RESPONSES', 1)))
|
|
62
|
-
DEFAULT_PROXYCHAINS_COMMAND = "proxychains"
|
|
63
|
-
DEFAULT_FREEPROXY_TIMEOUT = 1 # seconds
|
|
64
|
-
|
|
65
|
-
# Default worker settings
|
|
66
|
-
DEFAULT_INPUT_CHUNK_SIZE = int(os.environ.get('DEFAULT_INPUT_CHUNK_SIZE', 1000))
|
|
67
|
-
DEFAULT_STDIN_TIMEOUT = 1000 # seconds
|
|
22
|
+
# Debug
|
|
23
|
+
DEBUG = CONFIG.debug.level
|
|
24
|
+
DEBUG_COMPONENT = CONFIG.debug.component.split(',')
|
|
68
25
|
|
|
69
26
|
# Default tasks settings
|
|
70
27
|
DEFAULT_HTTPX_FLAGS = os.environ.get('DEFAULT_HTTPX_FLAGS', '-td')
|
|
71
28
|
DEFAULT_KATANA_FLAGS = os.environ.get('DEFAULT_KATANA_FLAGS', '-jc -js-crawl -known-files all -or -ob')
|
|
72
29
|
DEFAULT_NUCLEI_FLAGS = os.environ.get('DEFAULT_NUCLEI_FLAGS', '-stats -sj -si 20 -hm -or')
|
|
73
30
|
DEFAULT_FEROXBUSTER_FLAGS = os.environ.get('DEFAULT_FEROXBUSTER_FLAGS', '--auto-bail --no-state')
|
|
74
|
-
DEFAULT_PROGRESS_UPDATE_FREQUENCY = int(os.environ.get('DEFAULT_PROGRESS_UPDATE_FREQUENCY', 60))
|
|
75
|
-
DEFAULT_SKIP_CVE_SEARCH = bool(int(os.environ.get('DEFAULT_SKIP_CVE_SEARCH', 0)))
|
|
76
|
-
|
|
77
|
-
# Default wordlists
|
|
78
|
-
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/fuzz-Bo0oM.txt')
|
|
79
|
-
DEFAULT_HTTP_WORDLIST_URL = 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt'
|
|
80
|
-
DEFAULT_DNS_WORDLIST = os.environ.get('DEFAULT_DNS_WORDLIST', f'{WORDLISTS_FOLDER}/combined_subdomains.txt')
|
|
81
|
-
DEFAULT_DNS_WORDLIST_URL = 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
|
|
82
31
|
|
|
83
32
|
# Constants
|
|
84
33
|
OPT_NOT_SUPPORTED = -1
|
|
@@ -154,80 +103,30 @@ WORDLIST = 'wordlist'
|
|
|
154
103
|
WORDS = 'words'
|
|
155
104
|
|
|
156
105
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
wordlist_path = globals()[f'DEFAULT_{wordlist}_WORDLIST']
|
|
169
|
-
wordlist_url = globals()[f'DEFAULT_{wordlist}_WORDLIST_URL']
|
|
170
|
-
if not os.path.exists(wordlist_path):
|
|
171
|
-
try:
|
|
172
|
-
console.print(f'[bold turquoise4]Downloading default {wordlist} wordlist {wordlist_path} ...[/] ', end='')
|
|
173
|
-
resp = requests.get(wordlist_url)
|
|
174
|
-
with open(wordlist_path, 'w') as f:
|
|
175
|
-
f.write(resp.text)
|
|
176
|
-
console.print('[bold green]ok.[/]')
|
|
177
|
-
except requests.exceptions.RequestException as e:
|
|
178
|
-
console.print(f'[bold green]failed ({type(e).__name__}).[/]')
|
|
179
|
-
pass
|
|
106
|
+
def is_importable(module_to_import):
|
|
107
|
+
import importlib
|
|
108
|
+
try:
|
|
109
|
+
importlib.import_module(module_to_import)
|
|
110
|
+
return True
|
|
111
|
+
except ModuleNotFoundError:
|
|
112
|
+
return False
|
|
113
|
+
except Exception as e:
|
|
114
|
+
print(f'Failed trying to import {module_to_import}: {str(e)}')
|
|
115
|
+
return False
|
|
116
|
+
|
|
180
117
|
|
|
181
118
|
ADDONS_ENABLED = {}
|
|
182
119
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
ADDONS_ENABLED['google'] = True
|
|
194
|
-
except ModuleNotFoundError:
|
|
195
|
-
ADDONS_ENABLED['google'] = False
|
|
196
|
-
|
|
197
|
-
# Check mongodb addon
|
|
198
|
-
try:
|
|
199
|
-
import pymongo # noqa: F401
|
|
200
|
-
ADDONS_ENABLED['mongodb'] = True
|
|
201
|
-
except ModuleNotFoundError:
|
|
202
|
-
ADDONS_ENABLED['mongodb'] = False
|
|
203
|
-
|
|
204
|
-
# Check redis addon
|
|
205
|
-
try:
|
|
206
|
-
import redis # noqa: F401
|
|
207
|
-
ADDONS_ENABLED['redis'] = True
|
|
208
|
-
except ModuleNotFoundError:
|
|
209
|
-
ADDONS_ENABLED['redis'] = False
|
|
210
|
-
|
|
211
|
-
# Check dev addon
|
|
212
|
-
try:
|
|
213
|
-
import flake8 # noqa: F401
|
|
214
|
-
ADDONS_ENABLED['dev'] = True
|
|
215
|
-
except ModuleNotFoundError:
|
|
216
|
-
ADDONS_ENABLED['dev'] = False
|
|
217
|
-
|
|
218
|
-
# Check build addon
|
|
219
|
-
try:
|
|
220
|
-
import hatch # noqa: F401
|
|
221
|
-
ADDONS_ENABLED['build'] = True
|
|
222
|
-
except ModuleNotFoundError:
|
|
223
|
-
ADDONS_ENABLED['build'] = False
|
|
224
|
-
|
|
225
|
-
# Check trace addon
|
|
226
|
-
try:
|
|
227
|
-
import memray # noqa: F401
|
|
228
|
-
ADDONS_ENABLED['trace'] = True
|
|
229
|
-
except ModuleNotFoundError:
|
|
230
|
-
ADDONS_ENABLED['trace'] = False
|
|
120
|
+
for addon, module in [
|
|
121
|
+
('worker', 'eventlet'),
|
|
122
|
+
('google', 'gspread'),
|
|
123
|
+
('mongodb', 'pymongo'),
|
|
124
|
+
('redis', 'redis'),
|
|
125
|
+
('dev', 'flake8'),
|
|
126
|
+
('trace', 'memray'),
|
|
127
|
+
('build', 'hatch')
|
|
128
|
+
]:
|
|
129
|
+
ADDONS_ENABLED[addon] = is_importable(module)
|
|
231
130
|
|
|
232
131
|
# Check dev package
|
|
233
132
|
if os.path.exists(f'{ROOT_FOLDER}/pyproject.toml'):
|
secator/exporters/gdrive.py
CHANGED
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
import csv
|
|
3
3
|
import yaml
|
|
4
4
|
|
|
5
|
-
from secator.
|
|
5
|
+
from secator.config import CONFIG
|
|
6
6
|
from secator.exporters._base import Exporter
|
|
7
7
|
from secator.rich import console
|
|
8
8
|
from secator.utils import pluralize
|
|
@@ -16,20 +16,20 @@ class GdriveExporter(Exporter):
|
|
|
16
16
|
title = self.report.data['info']['title']
|
|
17
17
|
sheet_title = f'{self.report.data["info"]["title"]}_{self.report.timestamp}'
|
|
18
18
|
results = self.report.data['results']
|
|
19
|
-
if not
|
|
20
|
-
console.print(':file_cabinet: Missing
|
|
19
|
+
if not CONFIG.addons.google.credentials_path:
|
|
20
|
+
console.print(':file_cabinet: Missing CONFIG.addons.google.credentials_path to save to Google Sheets', style='red')
|
|
21
21
|
return
|
|
22
|
-
if not
|
|
23
|
-
console.print(':file_cabinet: Missing
|
|
22
|
+
if not CONFIG.addons.google.drive_parent_folder_id:
|
|
23
|
+
console.print(':file_cabinet: Missing CONFIG.addons.google.drive_parent_folder_id to save to Google Sheets.', style='red') # noqa: E501
|
|
24
24
|
return
|
|
25
|
-
client = gspread.service_account(
|
|
25
|
+
client = gspread.service_account(CONFIG.addons.google.credentials_path)
|
|
26
26
|
|
|
27
27
|
# Create workspace folder if it doesn't exist
|
|
28
|
-
folder_id = self.get_folder_by_name(ws, parent_id=
|
|
28
|
+
folder_id = self.get_folder_by_name(ws, parent_id=CONFIG.addons.google.drive_parent_folder_id)
|
|
29
29
|
if ws and not folder_id:
|
|
30
30
|
folder_id = self.create_folder(
|
|
31
31
|
folder_name=ws,
|
|
32
|
-
parent_id=
|
|
32
|
+
parent_id=CONFIG.addons.google.drive_parent_folder_id)
|
|
33
33
|
|
|
34
34
|
# Create worksheet
|
|
35
35
|
sheet = client.create(title, folder_id=folder_id)
|
|
@@ -84,7 +84,7 @@ class GdriveExporter(Exporter):
|
|
|
84
84
|
def create_folder(self, folder_name, parent_id=None):
|
|
85
85
|
from googleapiclient.discovery import build
|
|
86
86
|
from google.oauth2 import service_account
|
|
87
|
-
creds = service_account.Credentials.from_service_account_file(
|
|
87
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.google.credentials_path)
|
|
88
88
|
service = build('drive', 'v3', credentials=creds)
|
|
89
89
|
body = {
|
|
90
90
|
'name': folder_name,
|
|
@@ -98,7 +98,7 @@ class GdriveExporter(Exporter):
|
|
|
98
98
|
def list_folders(self, parent_id):
|
|
99
99
|
from googleapiclient.discovery import build
|
|
100
100
|
from google.oauth2 import service_account
|
|
101
|
-
creds = service_account.Credentials.from_service_account_file(
|
|
101
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.google.credentials_path)
|
|
102
102
|
service = build('drive', 'v3', credentials=creds)
|
|
103
103
|
driveid = service.files().get(fileId='root').execute()['id']
|
|
104
104
|
response = service.files().list(
|
secator/hooks/mongodb.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
import os
|
|
3
2
|
import time
|
|
4
3
|
|
|
5
4
|
import pymongo
|
|
6
5
|
from bson.objectid import ObjectId
|
|
7
6
|
from celery import shared_task
|
|
8
7
|
|
|
9
|
-
from secator.
|
|
8
|
+
from secator.config import CONFIG
|
|
10
9
|
from secator.output_types import OUTPUT_TYPES
|
|
11
10
|
from secator.runners import Scan, Task, Workflow
|
|
12
11
|
from secator.utils import debug, escape_mongodb_url
|
|
@@ -14,8 +13,8 @@ from secator.utils import debug, escape_mongodb_url
|
|
|
14
13
|
# import gevent.monkey
|
|
15
14
|
# gevent.monkey.patch_all()
|
|
16
15
|
|
|
17
|
-
MONGODB_URL =
|
|
18
|
-
MONGODB_UPDATE_FREQUENCY =
|
|
16
|
+
MONGODB_URL = CONFIG.addons.mongodb.url
|
|
17
|
+
MONGODB_UPDATE_FREQUENCY = CONFIG.addons.mongodb.update_frequency
|
|
19
18
|
MAX_POOL_SIZE = 100
|
|
20
19
|
|
|
21
20
|
logger = logging.getLogger(__name__)
|
secator/installer.py
CHANGED
|
@@ -11,7 +11,7 @@ from rich.table import Table
|
|
|
11
11
|
|
|
12
12
|
from secator.rich import console
|
|
13
13
|
from secator.runners import Command
|
|
14
|
-
from secator.
|
|
14
|
+
from secator.config import CONFIG
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class ToolInstaller:
|
|
@@ -95,7 +95,7 @@ class GithubInstaller:
|
|
|
95
95
|
|
|
96
96
|
# Download and unpack asset
|
|
97
97
|
console.print(f'Found release URL: {download_url}')
|
|
98
|
-
cls._download_and_unpack(download_url,
|
|
98
|
+
cls._download_and_unpack(download_url, CONFIG.dirs.bin, repo)
|
|
99
99
|
return True
|
|
100
100
|
|
|
101
101
|
@classmethod
|
|
@@ -113,8 +113,8 @@ class GithubInstaller:
|
|
|
113
113
|
owner, repo = tuple(github_handle.split('/'))
|
|
114
114
|
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
|
|
115
115
|
headers = {}
|
|
116
|
-
if
|
|
117
|
-
headers['Authorization'] = f'Bearer {
|
|
116
|
+
if CONFIG.cli.github_token:
|
|
117
|
+
headers['Authorization'] = f'Bearer {CONFIG.cli.github_token}'
|
|
118
118
|
try:
|
|
119
119
|
response = requests.get(url, headers=headers, timeout=5)
|
|
120
120
|
response.raise_for_status()
|
|
@@ -281,8 +281,10 @@ def get_version_info(name, version_flag=None, github_handle=None, version=None):
|
|
|
281
281
|
info['version'] = version
|
|
282
282
|
|
|
283
283
|
# Get latest version
|
|
284
|
-
latest_version =
|
|
285
|
-
|
|
284
|
+
latest_version = None
|
|
285
|
+
if not CONFIG.offline_mode:
|
|
286
|
+
latest_version = GithubInstaller.get_latest_version(github_handle)
|
|
287
|
+
info['latest_version'] = latest_version
|
|
286
288
|
|
|
287
289
|
if location:
|
|
288
290
|
info['installed'] = True
|
|
@@ -295,6 +297,8 @@ def get_version_info(name, version_flag=None, github_handle=None, version=None):
|
|
|
295
297
|
info['status'] = 'current unknown'
|
|
296
298
|
elif not latest_version:
|
|
297
299
|
info['status'] = 'latest unknown'
|
|
300
|
+
if CONFIG.offline_mode:
|
|
301
|
+
info['status'] += ' [dim orange1]\[offline][/]'
|
|
298
302
|
else:
|
|
299
303
|
info['status'] = 'missing'
|
|
300
304
|
|
|
@@ -15,7 +15,7 @@ class Vulnerability(OutputType):
|
|
|
15
15
|
id: str = ''
|
|
16
16
|
matched_at: str = ''
|
|
17
17
|
ip: str = field(default='', compare=False)
|
|
18
|
-
confidence:
|
|
18
|
+
confidence: str = 'low'
|
|
19
19
|
severity: str = 'unknown'
|
|
20
20
|
cvss_score: float = 0
|
|
21
21
|
tags: List[str] = field(default_factory=list)
|
|
@@ -85,6 +85,8 @@ class Vulnerability(OutputType):
|
|
|
85
85
|
s += f' \[[cyan]{tags_str}[/]]'
|
|
86
86
|
if data:
|
|
87
87
|
s += f' \[[yellow]{str(data)}[/]]'
|
|
88
|
+
if self.confidence == 'low':
|
|
89
|
+
s = f'[dim]{s}[/]'
|
|
88
90
|
return rich_to_ansi(s)
|
|
89
91
|
|
|
90
92
|
# def __gt__(self, other):
|
secator/runners/_base.py
CHANGED
|
@@ -14,7 +14,8 @@ from rich.panel import Panel
|
|
|
14
14
|
from rich.progress import Progress as RichProgress
|
|
15
15
|
from rich.progress import SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
16
16
|
|
|
17
|
-
from secator.definitions import DEBUG
|
|
17
|
+
from secator.definitions import DEBUG
|
|
18
|
+
from secator.config import CONFIG
|
|
18
19
|
from secator.output_types import OUTPUT_TYPES, OutputType, Progress
|
|
19
20
|
from secator.report import Report
|
|
20
21
|
from secator.rich import console, console_stdout
|
|
@@ -48,7 +49,7 @@ class Runner:
|
|
|
48
49
|
"""Runner class.
|
|
49
50
|
|
|
50
51
|
Args:
|
|
51
|
-
config (secator.config.
|
|
52
|
+
config (secator.config.TemplateLoader): Loaded config.
|
|
52
53
|
targets (list): List of targets to run task on.
|
|
53
54
|
results (list): List of existing results to re-use.
|
|
54
55
|
workspace_name (str): Workspace name.
|
|
@@ -109,7 +110,7 @@ class Runner:
|
|
|
109
110
|
self.celery_result = None
|
|
110
111
|
|
|
111
112
|
# Determine report folder
|
|
112
|
-
default_reports_folder_base = f'{
|
|
113
|
+
default_reports_folder_base = f'{CONFIG.dirs.reports}/{self.workspace_name}/{self.config.type}s'
|
|
113
114
|
_id = get_task_folder_id(default_reports_folder_base)
|
|
114
115
|
self.reports_folder = f'{default_reports_folder_base}/{_id}'
|
|
115
116
|
|
|
@@ -391,14 +392,14 @@ class Runner:
|
|
|
391
392
|
|
|
392
393
|
def resolve_exporters(self):
|
|
393
394
|
"""Resolve exporters from output options."""
|
|
394
|
-
output = self.run_opts.get('output'
|
|
395
|
-
if output
|
|
396
|
-
return self.default_exporters
|
|
397
|
-
elif output is False:
|
|
395
|
+
output = self.run_opts.get('output') or self.default_exporters
|
|
396
|
+
if not output or output in ['false', 'False']:
|
|
398
397
|
return []
|
|
398
|
+
if isinstance(output, str):
|
|
399
|
+
output = output.split(',')
|
|
399
400
|
exporters = [
|
|
400
401
|
import_dynamic(f'secator.exporters.{o.capitalize()}Exporter', 'Exporter')
|
|
401
|
-
for o in output
|
|
402
|
+
for o in output
|
|
402
403
|
if o
|
|
403
404
|
]
|
|
404
405
|
return [e for e in exporters if e]
|
|
@@ -850,7 +851,8 @@ class Runner:
|
|
|
850
851
|
|
|
851
852
|
if item._type == 'progress' and item._source == self.config.name:
|
|
852
853
|
self.progress = item.percent
|
|
853
|
-
|
|
854
|
+
update_frequency = CONFIG.runners.progress_update_frequency
|
|
855
|
+
if self.last_updated_progress and (item._timestamp - self.last_updated_progress) < update_frequency:
|
|
854
856
|
return None
|
|
855
857
|
elif int(item.percent) in [0, 100]:
|
|
856
858
|
return None
|
secator/runners/_helpers.py
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import os
|
|
2
2
|
|
|
3
|
+
import kombu
|
|
4
|
+
import kombu.exceptions
|
|
5
|
+
|
|
3
6
|
from secator.utils import deduplicate
|
|
7
|
+
from secator.rich import console
|
|
4
8
|
|
|
5
9
|
|
|
6
10
|
def run_extractors(results, opts, targets=[]):
|
|
@@ -80,20 +84,24 @@ def get_task_ids(result, ids=[]):
|
|
|
80
84
|
if result is None:
|
|
81
85
|
return
|
|
82
86
|
|
|
83
|
-
|
|
84
|
-
|
|
87
|
+
try:
|
|
88
|
+
if isinstance(result, GroupResult):
|
|
89
|
+
get_task_ids(result.parent, ids=ids)
|
|
85
90
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
91
|
+
elif isinstance(result, AsyncResult):
|
|
92
|
+
if result.id not in ids:
|
|
93
|
+
ids.append(result.id)
|
|
89
94
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
95
|
+
if hasattr(result, 'children') and result.children:
|
|
96
|
+
for child in result.children:
|
|
97
|
+
get_task_ids(child, ids=ids)
|
|
93
98
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
99
|
+
# Browse parent
|
|
100
|
+
if hasattr(result, 'parent') and result.parent:
|
|
101
|
+
get_task_ids(result.parent, ids=ids)
|
|
102
|
+
except kombu.exceptions.DecodeError as e:
|
|
103
|
+
console.print(f'[bold red]{str(e)}. Aborting get_task_ids.[/]')
|
|
104
|
+
return
|
|
97
105
|
|
|
98
106
|
|
|
99
107
|
def get_task_data(task_id):
|
|
@@ -107,33 +115,43 @@ def get_task_data(task_id):
|
|
|
107
115
|
"""
|
|
108
116
|
from celery.result import AsyncResult
|
|
109
117
|
res = AsyncResult(task_id)
|
|
110
|
-
if not
|
|
118
|
+
if not res:
|
|
119
|
+
return
|
|
120
|
+
try:
|
|
121
|
+
args = res.args
|
|
122
|
+
info = res.info
|
|
123
|
+
state = res.state
|
|
124
|
+
except kombu.exceptions.DecodeError as e:
|
|
125
|
+
console.print(f'[bold red]{str(e)}. Aborting get_task_data.[/]')
|
|
111
126
|
return
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
data
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
127
|
+
if not (args and len(args) > 1):
|
|
128
|
+
return
|
|
129
|
+
task_name = args[1]
|
|
130
|
+
data = {
|
|
131
|
+
'id': task_id,
|
|
132
|
+
'name': task_name,
|
|
133
|
+
'state': state,
|
|
134
|
+
'chunk_info': '',
|
|
135
|
+
'count': 0,
|
|
136
|
+
'error': None,
|
|
137
|
+
'ready': False,
|
|
138
|
+
'descr': '',
|
|
139
|
+
'progress': 0,
|
|
140
|
+
'results': []
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Set ready flag
|
|
144
|
+
if state in ['FAILURE', 'SUCCESS', 'REVOKED']:
|
|
125
145
|
data['ready'] = True
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
data
|
|
130
|
-
data
|
|
131
|
-
|
|
146
|
+
|
|
147
|
+
# Set task data
|
|
148
|
+
if info and not isinstance(info, list):
|
|
149
|
+
data.update(info)
|
|
150
|
+
chunk = data.get('chunk')
|
|
151
|
+
chunk_count = data.get('chunk_count')
|
|
152
|
+
if chunk and chunk_count:
|
|
132
153
|
data['chunk_info'] = f'{chunk}/{chunk_count}'
|
|
133
|
-
data.update(res.info)
|
|
134
154
|
data['descr'] = data.pop('description', '')
|
|
135
|
-
# del data['results']
|
|
136
|
-
# del data['task_results']
|
|
137
155
|
return data
|
|
138
156
|
|
|
139
157
|
|