secator 0.3.5__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +16 -21
- secator/cli.py +163 -81
- secator/config.py +555 -122
- secator/decorators.py +17 -10
- secator/definitions.py +4 -77
- secator/exporters/gdrive.py +10 -10
- secator/hooks/mongodb.py +3 -4
- secator/installer.py +10 -6
- secator/output_types/vulnerability.py +3 -1
- secator/runners/_base.py +12 -11
- secator/runners/_helpers.py +52 -34
- secator/runners/command.py +26 -30
- secator/runners/scan.py +4 -8
- secator/runners/task.py +2 -2
- secator/runners/workflow.py +3 -7
- secator/tasks/_categories.py +95 -44
- secator/tasks/dnsxbrute.py +3 -2
- secator/tasks/ffuf.py +2 -2
- secator/tasks/httpx.py +4 -4
- secator/tasks/katana.py +5 -4
- secator/tasks/msfconsole.py +3 -4
- secator/tasks/nmap.py +95 -48
- secator/tasks/nuclei.py +4 -0
- secator/tasks/searchsploit.py +0 -1
- secator/template.py +137 -0
- secator/utils.py +3 -7
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/METADATA +12 -6
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/RECORD +31 -30
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/WHEEL +1 -1
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/entry_points.txt +0 -0
- {secator-0.3.5.dist-info → secator-0.4.0.dist-info}/licenses/LICENSE +0 -0
secator/decorators.py
CHANGED
|
@@ -6,12 +6,13 @@ from rich_click.rich_click import _get_rich_console
|
|
|
6
6
|
from rich_click.rich_group import RichGroup
|
|
7
7
|
|
|
8
8
|
from secator.definitions import ADDONS_ENABLED, OPT_NOT_SUPPORTED
|
|
9
|
+
from secator.config import CONFIG
|
|
9
10
|
from secator.runners import Scan, Task, Workflow
|
|
10
11
|
from secator.utils import (deduplicate, expand_input, get_command_category,
|
|
11
12
|
get_command_cls)
|
|
12
13
|
|
|
13
14
|
RUNNER_OPTS = {
|
|
14
|
-
'output': {'type': str, 'default':
|
|
15
|
+
'output': {'type': str, 'default': None, 'help': 'Output options (-o table,json,csv,gdrive)', 'short': 'o'},
|
|
15
16
|
'workspace': {'type': str, 'default': 'default', 'help': 'Workspace', 'short': 'ws'},
|
|
16
17
|
'json': {'is_flag': True, 'default': False, 'help': 'Enable JSON mode'},
|
|
17
18
|
'orig': {'is_flag': True, 'default': False, 'help': 'Enable original output (no schema conversion)'},
|
|
@@ -24,7 +25,6 @@ RUNNER_OPTS = {
|
|
|
24
25
|
|
|
25
26
|
RUNNER_GLOBAL_OPTS = {
|
|
26
27
|
'sync': {'is_flag': True, 'help': 'Run tasks synchronously (automatic if no worker is alive)'},
|
|
27
|
-
'worker': {'is_flag': True, 'help': 'Run tasks in worker (automatic if worker is alive)'},
|
|
28
28
|
'proxy': {'type': str, 'help': 'HTTP proxy'},
|
|
29
29
|
'driver': {'type': str, 'help': 'Export real-time results. E.g: "mongodb"'}
|
|
30
30
|
# 'debug': {'type': int, 'default': 0, 'help': 'Debug mode'},
|
|
@@ -264,7 +264,6 @@ def register_runner(cli_endpoint, config):
|
|
|
264
264
|
def func(ctx, **opts):
|
|
265
265
|
opts.update(fmt_opts)
|
|
266
266
|
sync = opts['sync']
|
|
267
|
-
worker = opts['worker']
|
|
268
267
|
# debug = opts['debug']
|
|
269
268
|
ws = opts.pop('workspace')
|
|
270
269
|
driver = opts.pop('driver', '')
|
|
@@ -275,13 +274,21 @@ def register_runner(cli_endpoint, config):
|
|
|
275
274
|
# opts.update(unknown_opts)
|
|
276
275
|
targets = opts.pop(input_type)
|
|
277
276
|
targets = expand_input(targets)
|
|
278
|
-
if sync or show
|
|
277
|
+
if sync or show:
|
|
279
278
|
sync = True
|
|
280
|
-
|
|
281
|
-
sync = False
|
|
282
|
-
else: # automatically run in worker if it's alive
|
|
279
|
+
else:
|
|
283
280
|
from secator.celery import is_celery_worker_alive
|
|
284
|
-
|
|
281
|
+
worker_alive = is_celery_worker_alive()
|
|
282
|
+
if not worker_alive:
|
|
283
|
+
sync = True
|
|
284
|
+
else:
|
|
285
|
+
sync = False
|
|
286
|
+
broker_protocol = CONFIG.celery.broker_url.split('://')[0]
|
|
287
|
+
backend_protocol = CONFIG.celery.result_backend.split('://')[0]
|
|
288
|
+
if CONFIG.celery.broker_url:
|
|
289
|
+
if (broker_protocol == 'redis' or backend_protocol == 'redis') and not ADDONS_ENABLED['redis']:
|
|
290
|
+
_get_rich_console().print('[bold red]Missing `redis` addon: please run `secator install addons redis`[/].')
|
|
291
|
+
sys.exit(1)
|
|
285
292
|
opts['sync'] = sync
|
|
286
293
|
opts.update({
|
|
287
294
|
'print_item': not sync,
|
|
@@ -293,8 +300,8 @@ def register_runner(cli_endpoint, config):
|
|
|
293
300
|
# Build hooks from driver name
|
|
294
301
|
hooks = {}
|
|
295
302
|
if driver == 'mongodb':
|
|
296
|
-
if not ADDONS_ENABLED['
|
|
297
|
-
_get_rich_console().print('[bold red]Missing
|
|
303
|
+
if not ADDONS_ENABLED['mongodb']:
|
|
304
|
+
_get_rich_console().print('[bold red]Missing `mongodb` addon: please run `secator install addons mongodb`[/].')
|
|
298
305
|
sys.exit(1)
|
|
299
306
|
from secator.hooks.mongodb import MONGODB_HOOKS
|
|
300
307
|
hooks = MONGODB_HOOKS
|
secator/definitions.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
-
import requests
|
|
5
4
|
|
|
6
5
|
from dotenv import find_dotenv, load_dotenv
|
|
7
6
|
from importlib.metadata import version
|
|
8
7
|
|
|
9
|
-
from secator.
|
|
8
|
+
from secator.config import CONFIG, ROOT_FOLDER
|
|
10
9
|
|
|
11
10
|
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
12
11
|
|
|
@@ -22,63 +21,15 @@ ASCII = f"""
|
|
|
22
21
|
freelabz.com
|
|
23
22
|
""" # noqa: W605,W291
|
|
24
23
|
|
|
25
|
-
#
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
CONFIGS_FOLDER = LIB_FOLDER + '/configs'
|
|
29
|
-
EXTRA_CONFIGS_FOLDER = os.environ.get('SECATOR_EXTRA_CONFIGS_FOLDER')
|
|
30
|
-
BIN_FOLDER = os.environ.get('SECATOR_BIN_FOLDER', f'{os.path.expanduser("~")}/.local/bin')
|
|
31
|
-
DATA_FOLDER = os.environ.get('SECATOR_DATA_FOLDER', f'{os.path.expanduser("~")}/.secator')
|
|
32
|
-
REPORTS_FOLDER = os.environ.get('SECATOR_REPORTS_FOLDER', f'{DATA_FOLDER}/reports')
|
|
33
|
-
WORDLISTS_FOLDER = os.environ.get('SECATOR_WORDLISTS_FOLDER', f'{DATA_FOLDER}/wordlists')
|
|
34
|
-
SCRIPTS_FOLDER = f'{ROOT_FOLDER}/scripts'
|
|
35
|
-
CVES_FOLDER = f'{DATA_FOLDER}/cves'
|
|
36
|
-
PAYLOADS_FOLDER = f'{DATA_FOLDER}/payloads'
|
|
37
|
-
REVSHELLS_FOLDER = f'{DATA_FOLDER}/revshells'
|
|
38
|
-
TESTS_FOLDER = f'{ROOT_FOLDER}/tests'
|
|
39
|
-
|
|
40
|
-
# Celery local fs folders
|
|
41
|
-
CELERY_DATA_FOLDER = f'{DATA_FOLDER}/celery/data'
|
|
42
|
-
CELERY_RESULTS_FOLDER = f'{DATA_FOLDER}/celery/results'
|
|
43
|
-
|
|
44
|
-
# Environment variables
|
|
45
|
-
DEBUG = int(os.environ.get('DEBUG', '0'))
|
|
46
|
-
DEBUG_COMPONENT = os.environ.get('DEBUG_COMPONENT', '').split(',')
|
|
47
|
-
RECORD = bool(int(os.environ.get('RECORD', 0)))
|
|
48
|
-
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'filesystem://')
|
|
49
|
-
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', f'file://{CELERY_RESULTS_FOLDER}')
|
|
50
|
-
CELERY_BROKER_POOL_LIMIT = int(os.environ.get('CELERY_BROKER_POOL_LIMIT', 10))
|
|
51
|
-
CELERY_BROKER_CONNECTION_TIMEOUT = float(os.environ.get('CELERY_BROKER_CONNECTION_TIMEOUT', 4.0))
|
|
52
|
-
CELERY_BROKER_VISIBILITY_TIMEOUT = int(os.environ.get('CELERY_BROKER_VISIBILITY_TIMEOUT', 3600))
|
|
53
|
-
CELERY_OVERRIDE_DEFAULT_LOGGING = bool(int(os.environ.get('CELERY_OVERRIDE_DEFAULT_LOGGING', 1)))
|
|
54
|
-
GOOGLE_DRIVE_PARENT_FOLDER_ID = os.environ.get('GOOGLE_DRIVE_PARENT_FOLDER_ID')
|
|
55
|
-
GOOGLE_CREDENTIALS_PATH = os.environ.get('GOOGLE_CREDENTIALS_PATH')
|
|
56
|
-
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
|
57
|
-
|
|
58
|
-
# Defaults HTTP and Proxy settings
|
|
59
|
-
DEFAULT_SOCKS5_PROXY = os.environ.get('SOCKS5_PROXY', "socks5://127.0.0.1:9050")
|
|
60
|
-
DEFAULT_HTTP_PROXY = os.environ.get('HTTP_PROXY', "https://127.0.0.1:9080")
|
|
61
|
-
DEFAULT_STORE_HTTP_RESPONSES = bool(int(os.environ.get('DEFAULT_STORE_HTTP_RESPONSES', 1)))
|
|
62
|
-
DEFAULT_PROXYCHAINS_COMMAND = "proxychains"
|
|
63
|
-
DEFAULT_FREEPROXY_TIMEOUT = 1 # seconds
|
|
64
|
-
|
|
65
|
-
# Default worker settings
|
|
66
|
-
DEFAULT_INPUT_CHUNK_SIZE = int(os.environ.get('DEFAULT_INPUT_CHUNK_SIZE', 1000))
|
|
67
|
-
DEFAULT_STDIN_TIMEOUT = 1000 # seconds
|
|
24
|
+
# Debug
|
|
25
|
+
DEBUG = CONFIG.debug.level
|
|
26
|
+
DEBUG_COMPONENT = CONFIG.debug.component.split(',')
|
|
68
27
|
|
|
69
28
|
# Default tasks settings
|
|
70
29
|
DEFAULT_HTTPX_FLAGS = os.environ.get('DEFAULT_HTTPX_FLAGS', '-td')
|
|
71
30
|
DEFAULT_KATANA_FLAGS = os.environ.get('DEFAULT_KATANA_FLAGS', '-jc -js-crawl -known-files all -or -ob')
|
|
72
31
|
DEFAULT_NUCLEI_FLAGS = os.environ.get('DEFAULT_NUCLEI_FLAGS', '-stats -sj -si 20 -hm -or')
|
|
73
32
|
DEFAULT_FEROXBUSTER_FLAGS = os.environ.get('DEFAULT_FEROXBUSTER_FLAGS', '--auto-bail --no-state')
|
|
74
|
-
DEFAULT_PROGRESS_UPDATE_FREQUENCY = int(os.environ.get('DEFAULT_PROGRESS_UPDATE_FREQUENCY', 60))
|
|
75
|
-
DEFAULT_SKIP_CVE_SEARCH = bool(int(os.environ.get('DEFAULT_SKIP_CVE_SEARCH', 0)))
|
|
76
|
-
|
|
77
|
-
# Default wordlists
|
|
78
|
-
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/fuzz-Bo0oM.txt')
|
|
79
|
-
DEFAULT_HTTP_WORDLIST_URL = 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt'
|
|
80
|
-
DEFAULT_DNS_WORDLIST = os.environ.get('DEFAULT_DNS_WORDLIST', f'{WORDLISTS_FOLDER}/combined_subdomains.txt')
|
|
81
|
-
DEFAULT_DNS_WORDLIST_URL = 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
|
|
82
33
|
|
|
83
34
|
# Constants
|
|
84
35
|
OPT_NOT_SUPPORTED = -1
|
|
@@ -154,30 +105,6 @@ WORDLIST = 'wordlist'
|
|
|
154
105
|
WORDS = 'words'
|
|
155
106
|
|
|
156
107
|
|
|
157
|
-
# Create all folders
|
|
158
|
-
for folder in [BIN_FOLDER, DATA_FOLDER, REPORTS_FOLDER, WORDLISTS_FOLDER, CVES_FOLDER, PAYLOADS_FOLDER,
|
|
159
|
-
REVSHELLS_FOLDER, CELERY_DATA_FOLDER, CELERY_RESULTS_FOLDER]:
|
|
160
|
-
if not os.path.exists(folder):
|
|
161
|
-
console.print(f'[bold turquoise4]Creating folder {folder} ...[/] ', end='')
|
|
162
|
-
os.makedirs(folder)
|
|
163
|
-
console.print('[bold green]ok.[/]')
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
# Download default wordlists
|
|
167
|
-
for wordlist in ['HTTP', 'DNS']:
|
|
168
|
-
wordlist_path = globals()[f'DEFAULT_{wordlist}_WORDLIST']
|
|
169
|
-
wordlist_url = globals()[f'DEFAULT_{wordlist}_WORDLIST_URL']
|
|
170
|
-
if not os.path.exists(wordlist_path):
|
|
171
|
-
try:
|
|
172
|
-
console.print(f'[bold turquoise4]Downloading default {wordlist} wordlist {wordlist_path} ...[/] ', end='')
|
|
173
|
-
resp = requests.get(wordlist_url)
|
|
174
|
-
with open(wordlist_path, 'w') as f:
|
|
175
|
-
f.write(resp.text)
|
|
176
|
-
console.print('[bold green]ok.[/]')
|
|
177
|
-
except requests.exceptions.RequestException as e:
|
|
178
|
-
console.print(f'[bold green]failed ({type(e).__name__}).[/]')
|
|
179
|
-
pass
|
|
180
|
-
|
|
181
108
|
ADDONS_ENABLED = {}
|
|
182
109
|
|
|
183
110
|
# Check worker addon
|
secator/exporters/gdrive.py
CHANGED
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
import csv
|
|
3
3
|
import yaml
|
|
4
4
|
|
|
5
|
-
from secator.
|
|
5
|
+
from secator.config import CONFIG
|
|
6
6
|
from secator.exporters._base import Exporter
|
|
7
7
|
from secator.rich import console
|
|
8
8
|
from secator.utils import pluralize
|
|
@@ -16,20 +16,20 @@ class GdriveExporter(Exporter):
|
|
|
16
16
|
title = self.report.data['info']['title']
|
|
17
17
|
sheet_title = f'{self.report.data["info"]["title"]}_{self.report.timestamp}'
|
|
18
18
|
results = self.report.data['results']
|
|
19
|
-
if not
|
|
20
|
-
console.print(':file_cabinet: Missing
|
|
19
|
+
if not CONFIG.addons.google.credentials_path:
|
|
20
|
+
console.print(':file_cabinet: Missing CONFIG.addons.google.credentials_path to save to Google Sheets', style='red')
|
|
21
21
|
return
|
|
22
|
-
if not
|
|
23
|
-
console.print(':file_cabinet: Missing
|
|
22
|
+
if not CONFIG.addons.google.drive_parent_folder_id:
|
|
23
|
+
console.print(':file_cabinet: Missing CONFIG.addons.google.drive_parent_folder_id to save to Google Sheets.', style='red') # noqa: E501
|
|
24
24
|
return
|
|
25
|
-
client = gspread.service_account(
|
|
25
|
+
client = gspread.service_account(CONFIG.addons.google.credentials_path)
|
|
26
26
|
|
|
27
27
|
# Create workspace folder if it doesn't exist
|
|
28
|
-
folder_id = self.get_folder_by_name(ws, parent_id=
|
|
28
|
+
folder_id = self.get_folder_by_name(ws, parent_id=CONFIG.addons.google.drive_parent_folder_id)
|
|
29
29
|
if ws and not folder_id:
|
|
30
30
|
folder_id = self.create_folder(
|
|
31
31
|
folder_name=ws,
|
|
32
|
-
parent_id=
|
|
32
|
+
parent_id=CONFIG.addons.google.drive_parent_folder_id)
|
|
33
33
|
|
|
34
34
|
# Create worksheet
|
|
35
35
|
sheet = client.create(title, folder_id=folder_id)
|
|
@@ -84,7 +84,7 @@ class GdriveExporter(Exporter):
|
|
|
84
84
|
def create_folder(self, folder_name, parent_id=None):
|
|
85
85
|
from googleapiclient.discovery import build
|
|
86
86
|
from google.oauth2 import service_account
|
|
87
|
-
creds = service_account.Credentials.from_service_account_file(
|
|
87
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.google.credentials_path)
|
|
88
88
|
service = build('drive', 'v3', credentials=creds)
|
|
89
89
|
body = {
|
|
90
90
|
'name': folder_name,
|
|
@@ -98,7 +98,7 @@ class GdriveExporter(Exporter):
|
|
|
98
98
|
def list_folders(self, parent_id):
|
|
99
99
|
from googleapiclient.discovery import build
|
|
100
100
|
from google.oauth2 import service_account
|
|
101
|
-
creds = service_account.Credentials.from_service_account_file(
|
|
101
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.google.credentials_path)
|
|
102
102
|
service = build('drive', 'v3', credentials=creds)
|
|
103
103
|
driveid = service.files().get(fileId='root').execute()['id']
|
|
104
104
|
response = service.files().list(
|
secator/hooks/mongodb.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
import os
|
|
3
2
|
import time
|
|
4
3
|
|
|
5
4
|
import pymongo
|
|
6
5
|
from bson.objectid import ObjectId
|
|
7
6
|
from celery import shared_task
|
|
8
7
|
|
|
9
|
-
from secator.
|
|
8
|
+
from secator.config import CONFIG
|
|
10
9
|
from secator.output_types import OUTPUT_TYPES
|
|
11
10
|
from secator.runners import Scan, Task, Workflow
|
|
12
11
|
from secator.utils import debug, escape_mongodb_url
|
|
@@ -14,8 +13,8 @@ from secator.utils import debug, escape_mongodb_url
|
|
|
14
13
|
# import gevent.monkey
|
|
15
14
|
# gevent.monkey.patch_all()
|
|
16
15
|
|
|
17
|
-
MONGODB_URL =
|
|
18
|
-
MONGODB_UPDATE_FREQUENCY =
|
|
16
|
+
MONGODB_URL = CONFIG.addons.mongodb.url
|
|
17
|
+
MONGODB_UPDATE_FREQUENCY = CONFIG.addons.mongodb.update_frequency
|
|
19
18
|
MAX_POOL_SIZE = 100
|
|
20
19
|
|
|
21
20
|
logger = logging.getLogger(__name__)
|
secator/installer.py
CHANGED
|
@@ -11,7 +11,7 @@ from rich.table import Table
|
|
|
11
11
|
|
|
12
12
|
from secator.rich import console
|
|
13
13
|
from secator.runners import Command
|
|
14
|
-
from secator.
|
|
14
|
+
from secator.config import CONFIG
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class ToolInstaller:
|
|
@@ -95,7 +95,7 @@ class GithubInstaller:
|
|
|
95
95
|
|
|
96
96
|
# Download and unpack asset
|
|
97
97
|
console.print(f'Found release URL: {download_url}')
|
|
98
|
-
cls._download_and_unpack(download_url,
|
|
98
|
+
cls._download_and_unpack(download_url, CONFIG.dirs.bin, repo)
|
|
99
99
|
return True
|
|
100
100
|
|
|
101
101
|
@classmethod
|
|
@@ -113,8 +113,8 @@ class GithubInstaller:
|
|
|
113
113
|
owner, repo = tuple(github_handle.split('/'))
|
|
114
114
|
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
|
|
115
115
|
headers = {}
|
|
116
|
-
if
|
|
117
|
-
headers['Authorization'] = f'Bearer {
|
|
116
|
+
if CONFIG.cli.github_token:
|
|
117
|
+
headers['Authorization'] = f'Bearer {CONFIG.cli.github_token}'
|
|
118
118
|
try:
|
|
119
119
|
response = requests.get(url, headers=headers, timeout=5)
|
|
120
120
|
response.raise_for_status()
|
|
@@ -281,8 +281,10 @@ def get_version_info(name, version_flag=None, github_handle=None, version=None):
|
|
|
281
281
|
info['version'] = version
|
|
282
282
|
|
|
283
283
|
# Get latest version
|
|
284
|
-
latest_version =
|
|
285
|
-
|
|
284
|
+
latest_version = None
|
|
285
|
+
if not CONFIG.offline_mode:
|
|
286
|
+
latest_version = GithubInstaller.get_latest_version(github_handle)
|
|
287
|
+
info['latest_version'] = latest_version
|
|
286
288
|
|
|
287
289
|
if location:
|
|
288
290
|
info['installed'] = True
|
|
@@ -295,6 +297,8 @@ def get_version_info(name, version_flag=None, github_handle=None, version=None):
|
|
|
295
297
|
info['status'] = 'current unknown'
|
|
296
298
|
elif not latest_version:
|
|
297
299
|
info['status'] = 'latest unknown'
|
|
300
|
+
if CONFIG.offline_mode:
|
|
301
|
+
info['status'] += ' [dim orange1]\[offline][/]'
|
|
298
302
|
else:
|
|
299
303
|
info['status'] = 'missing'
|
|
300
304
|
|
|
@@ -15,7 +15,7 @@ class Vulnerability(OutputType):
|
|
|
15
15
|
id: str = ''
|
|
16
16
|
matched_at: str = ''
|
|
17
17
|
ip: str = field(default='', compare=False)
|
|
18
|
-
confidence:
|
|
18
|
+
confidence: str = 'low'
|
|
19
19
|
severity: str = 'unknown'
|
|
20
20
|
cvss_score: float = 0
|
|
21
21
|
tags: List[str] = field(default_factory=list)
|
|
@@ -85,6 +85,8 @@ class Vulnerability(OutputType):
|
|
|
85
85
|
s += f' \[[cyan]{tags_str}[/]]'
|
|
86
86
|
if data:
|
|
87
87
|
s += f' \[[yellow]{str(data)}[/]]'
|
|
88
|
+
if self.confidence == 'low':
|
|
89
|
+
s = f'[dim]{s}[/]'
|
|
88
90
|
return rich_to_ansi(s)
|
|
89
91
|
|
|
90
92
|
# def __gt__(self, other):
|
secator/runners/_base.py
CHANGED
|
@@ -14,7 +14,8 @@ from rich.panel import Panel
|
|
|
14
14
|
from rich.progress import Progress as RichProgress
|
|
15
15
|
from rich.progress import SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
16
16
|
|
|
17
|
-
from secator.definitions import DEBUG
|
|
17
|
+
from secator.definitions import DEBUG
|
|
18
|
+
from secator.config import CONFIG
|
|
18
19
|
from secator.output_types import OUTPUT_TYPES, OutputType, Progress
|
|
19
20
|
from secator.report import Report
|
|
20
21
|
from secator.rich import console, console_stdout
|
|
@@ -48,7 +49,7 @@ class Runner:
|
|
|
48
49
|
"""Runner class.
|
|
49
50
|
|
|
50
51
|
Args:
|
|
51
|
-
config (secator.config.
|
|
52
|
+
config (secator.config.TemplateLoader): Loaded config.
|
|
52
53
|
targets (list): List of targets to run task on.
|
|
53
54
|
results (list): List of existing results to re-use.
|
|
54
55
|
workspace_name (str): Workspace name.
|
|
@@ -109,10 +110,9 @@ class Runner:
|
|
|
109
110
|
self.celery_result = None
|
|
110
111
|
|
|
111
112
|
# Determine report folder
|
|
112
|
-
default_reports_folder_base = f'{
|
|
113
|
+
default_reports_folder_base = f'{CONFIG.dirs.reports}/{self.workspace_name}/{self.config.type}s'
|
|
113
114
|
_id = get_task_folder_id(default_reports_folder_base)
|
|
114
|
-
|
|
115
|
-
self.reports_folder = run_opts.get('reports_folder') or default_report_folder
|
|
115
|
+
self.reports_folder = f'{default_reports_folder_base}/{_id}'
|
|
116
116
|
|
|
117
117
|
# Make reports folders
|
|
118
118
|
os.makedirs(self.reports_folder, exist_ok=True)
|
|
@@ -392,14 +392,14 @@ class Runner:
|
|
|
392
392
|
|
|
393
393
|
def resolve_exporters(self):
|
|
394
394
|
"""Resolve exporters from output options."""
|
|
395
|
-
output = self.run_opts.get('output'
|
|
396
|
-
if output
|
|
397
|
-
return self.default_exporters
|
|
398
|
-
elif output is False:
|
|
395
|
+
output = self.run_opts.get('output') or self.default_exporters
|
|
396
|
+
if not output or output in ['false', 'False']:
|
|
399
397
|
return []
|
|
398
|
+
if isinstance(output, str):
|
|
399
|
+
output = output.split(',')
|
|
400
400
|
exporters = [
|
|
401
401
|
import_dynamic(f'secator.exporters.{o.capitalize()}Exporter', 'Exporter')
|
|
402
|
-
for o in output
|
|
402
|
+
for o in output
|
|
403
403
|
if o
|
|
404
404
|
]
|
|
405
405
|
return [e for e in exporters if e]
|
|
@@ -851,7 +851,8 @@ class Runner:
|
|
|
851
851
|
|
|
852
852
|
if item._type == 'progress' and item._source == self.config.name:
|
|
853
853
|
self.progress = item.percent
|
|
854
|
-
|
|
854
|
+
update_frequency = CONFIG.runners.progress_update_frequency
|
|
855
|
+
if self.last_updated_progress and (item._timestamp - self.last_updated_progress) < update_frequency:
|
|
855
856
|
return None
|
|
856
857
|
elif int(item.percent) in [0, 100]:
|
|
857
858
|
return None
|
secator/runners/_helpers.py
CHANGED
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import os
|
|
2
2
|
|
|
3
|
+
import kombu
|
|
4
|
+
import kombu.exceptions
|
|
5
|
+
|
|
3
6
|
from secator.utils import deduplicate
|
|
7
|
+
from secator.rich import console
|
|
4
8
|
|
|
5
9
|
|
|
6
10
|
def run_extractors(results, opts, targets=[]):
|
|
@@ -80,20 +84,24 @@ def get_task_ids(result, ids=[]):
|
|
|
80
84
|
if result is None:
|
|
81
85
|
return
|
|
82
86
|
|
|
83
|
-
|
|
84
|
-
|
|
87
|
+
try:
|
|
88
|
+
if isinstance(result, GroupResult):
|
|
89
|
+
get_task_ids(result.parent, ids=ids)
|
|
85
90
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
91
|
+
elif isinstance(result, AsyncResult):
|
|
92
|
+
if result.id not in ids:
|
|
93
|
+
ids.append(result.id)
|
|
89
94
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
95
|
+
if hasattr(result, 'children') and result.children:
|
|
96
|
+
for child in result.children:
|
|
97
|
+
get_task_ids(child, ids=ids)
|
|
93
98
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
99
|
+
# Browse parent
|
|
100
|
+
if hasattr(result, 'parent') and result.parent:
|
|
101
|
+
get_task_ids(result.parent, ids=ids)
|
|
102
|
+
except kombu.exceptions.DecodeError as e:
|
|
103
|
+
console.print(f'[bold red]{str(e)}. Aborting get_task_ids.[/]')
|
|
104
|
+
return
|
|
97
105
|
|
|
98
106
|
|
|
99
107
|
def get_task_data(task_id):
|
|
@@ -107,33 +115,43 @@ def get_task_data(task_id):
|
|
|
107
115
|
"""
|
|
108
116
|
from celery.result import AsyncResult
|
|
109
117
|
res = AsyncResult(task_id)
|
|
110
|
-
if not
|
|
118
|
+
if not res:
|
|
119
|
+
return
|
|
120
|
+
try:
|
|
121
|
+
args = res.args
|
|
122
|
+
info = res.info
|
|
123
|
+
state = res.state
|
|
124
|
+
except kombu.exceptions.DecodeError as e:
|
|
125
|
+
console.print(f'[bold red]{str(e)}. Aborting get_task_data.[/]')
|
|
111
126
|
return
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
data
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
127
|
+
if not (args and len(args) > 1):
|
|
128
|
+
return
|
|
129
|
+
task_name = args[1]
|
|
130
|
+
data = {
|
|
131
|
+
'id': task_id,
|
|
132
|
+
'name': task_name,
|
|
133
|
+
'state': state,
|
|
134
|
+
'chunk_info': '',
|
|
135
|
+
'count': 0,
|
|
136
|
+
'error': None,
|
|
137
|
+
'ready': False,
|
|
138
|
+
'descr': '',
|
|
139
|
+
'progress': 0,
|
|
140
|
+
'results': []
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Set ready flag
|
|
144
|
+
if state in ['FAILURE', 'SUCCESS', 'REVOKED']:
|
|
125
145
|
data['ready'] = True
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
data
|
|
130
|
-
data
|
|
131
|
-
|
|
146
|
+
|
|
147
|
+
# Set task data
|
|
148
|
+
if info and not isinstance(info, list):
|
|
149
|
+
data.update(info)
|
|
150
|
+
chunk = data.get('chunk')
|
|
151
|
+
chunk_count = data.get('chunk_count')
|
|
152
|
+
if chunk and chunk_count:
|
|
132
153
|
data['chunk_info'] = f'{chunk}/{chunk_count}'
|
|
133
|
-
data.update(res.info)
|
|
134
154
|
data['descr'] = data.pop('description', '')
|
|
135
|
-
# del data['results']
|
|
136
|
-
# del data['task_results']
|
|
137
155
|
return data
|
|
138
156
|
|
|
139
157
|
|
secator/runners/command.py
CHANGED
|
@@ -10,19 +10,13 @@ from time import sleep
|
|
|
10
10
|
|
|
11
11
|
from fp.fp import FreeProxy
|
|
12
12
|
|
|
13
|
-
from secator.
|
|
14
|
-
from secator.definitions import
|
|
15
|
-
|
|
16
|
-
DEFAULT_PROXYCHAINS_COMMAND,
|
|
17
|
-
DEFAULT_SOCKS5_PROXY, OPT_NOT_SUPPORTED,
|
|
18
|
-
OPT_PIPE_INPUT, DEFAULT_INPUT_CHUNK_SIZE)
|
|
13
|
+
from secator.template import TemplateLoader
|
|
14
|
+
from secator.definitions import OPT_NOT_SUPPORTED, OPT_PIPE_INPUT
|
|
15
|
+
from secator.config import CONFIG
|
|
19
16
|
from secator.runners import Runner
|
|
20
17
|
from secator.serializers import JSONSerializer
|
|
21
18
|
from secator.utils import debug
|
|
22
19
|
|
|
23
|
-
# from rich.markup import escape
|
|
24
|
-
# from rich.text import Text
|
|
25
|
-
|
|
26
20
|
|
|
27
21
|
logger = logging.getLogger(__name__)
|
|
28
22
|
|
|
@@ -69,7 +63,7 @@ class Command(Runner):
|
|
|
69
63
|
input_path = None
|
|
70
64
|
|
|
71
65
|
# Input chunk size (default None)
|
|
72
|
-
input_chunk_size =
|
|
66
|
+
input_chunk_size = CONFIG.runners.input_chunk_size
|
|
73
67
|
|
|
74
68
|
# Flag to take a file as input
|
|
75
69
|
file_flag = None
|
|
@@ -110,7 +104,7 @@ class Command(Runner):
|
|
|
110
104
|
|
|
111
105
|
def __init__(self, input=None, **run_opts):
|
|
112
106
|
# Build runnerconfig on-the-fly
|
|
113
|
-
config =
|
|
107
|
+
config = TemplateLoader(input={
|
|
114
108
|
'name': self.__class__.__name__,
|
|
115
109
|
'type': 'task',
|
|
116
110
|
'description': run_opts.get('description', None)
|
|
@@ -270,14 +264,16 @@ class Command(Runner):
|
|
|
270
264
|
secator.runners.Command: instance of the Command.
|
|
271
265
|
"""
|
|
272
266
|
name = name or cmd.split(' ')[0]
|
|
273
|
-
kwargs['no_process'] = True
|
|
267
|
+
kwargs['no_process'] = kwargs.get('no_process', True)
|
|
274
268
|
kwargs['print_cmd'] = not kwargs.get('quiet', False)
|
|
275
269
|
kwargs['print_item'] = not kwargs.get('quiet', False)
|
|
276
270
|
kwargs['print_line'] = not kwargs.get('quiet', False)
|
|
271
|
+
delay_run = kwargs.pop('delay_run', False)
|
|
277
272
|
cmd_instance = type(name, (Command,), {'cmd': cmd})(**kwargs)
|
|
278
273
|
for k, v in cls_attributes.items():
|
|
279
274
|
setattr(cmd_instance, k, v)
|
|
280
|
-
|
|
275
|
+
if not delay_run:
|
|
276
|
+
cmd_instance.run()
|
|
281
277
|
return cmd_instance
|
|
282
278
|
|
|
283
279
|
def configure_proxy(self):
|
|
@@ -290,7 +286,7 @@ class Command(Runner):
|
|
|
290
286
|
opt_key_map = self.opt_key_map
|
|
291
287
|
proxy_opt = opt_key_map.get('proxy', False)
|
|
292
288
|
support_proxy_opt = proxy_opt and proxy_opt != OPT_NOT_SUPPORTED
|
|
293
|
-
proxychains_flavor = getattr(self, 'proxychains_flavor',
|
|
289
|
+
proxychains_flavor = getattr(self, 'proxychains_flavor', CONFIG.http.proxychains_command)
|
|
294
290
|
proxy = False
|
|
295
291
|
|
|
296
292
|
if self.proxy in ['auto', 'proxychains'] and self.proxychains:
|
|
@@ -298,12 +294,12 @@ class Command(Runner):
|
|
|
298
294
|
proxy = 'proxychains'
|
|
299
295
|
|
|
300
296
|
elif self.proxy and support_proxy_opt:
|
|
301
|
-
if self.proxy in ['auto', 'socks5'] and self.proxy_socks5 and
|
|
302
|
-
proxy =
|
|
303
|
-
elif self.proxy in ['auto', 'http'] and self.proxy_http and
|
|
304
|
-
proxy =
|
|
297
|
+
if self.proxy in ['auto', 'socks5'] and self.proxy_socks5 and CONFIG.http.socks5_proxy:
|
|
298
|
+
proxy = CONFIG.http.socks5_proxy
|
|
299
|
+
elif self.proxy in ['auto', 'http'] and self.proxy_http and CONFIG.http.http_proxy:
|
|
300
|
+
proxy = CONFIG.http.http_proxy
|
|
305
301
|
elif self.proxy == 'random':
|
|
306
|
-
proxy = FreeProxy(timeout=
|
|
302
|
+
proxy = FreeProxy(timeout=CONFIG.http.freeproxy_timeout, rand=True, anonym=True).get()
|
|
307
303
|
elif self.proxy.startswith(('http://', 'socks5://')):
|
|
308
304
|
proxy = self.proxy
|
|
309
305
|
|
|
@@ -354,7 +350,7 @@ class Command(Runner):
|
|
|
354
350
|
try:
|
|
355
351
|
env = os.environ
|
|
356
352
|
env.update(self.env)
|
|
357
|
-
process = subprocess.Popen(
|
|
353
|
+
self.process = subprocess.Popen(
|
|
358
354
|
command,
|
|
359
355
|
stdin=subprocess.PIPE if sudo_password else None,
|
|
360
356
|
stdout=sys.stdout if self.no_capture else subprocess.PIPE,
|
|
@@ -366,8 +362,8 @@ class Command(Runner):
|
|
|
366
362
|
|
|
367
363
|
# If sudo password is provided, send it to stdin
|
|
368
364
|
if sudo_password:
|
|
369
|
-
process.stdin.write(f"{sudo_password}\n")
|
|
370
|
-
process.stdin.flush()
|
|
365
|
+
self.process.stdin.write(f"{sudo_password}\n")
|
|
366
|
+
self.process.stdin.flush()
|
|
371
367
|
|
|
372
368
|
except FileNotFoundError as e:
|
|
373
369
|
if self.config.name in str(e):
|
|
@@ -386,11 +382,11 @@ class Command(Runner):
|
|
|
386
382
|
try:
|
|
387
383
|
# No capture mode, wait for command to finish and return
|
|
388
384
|
if self.no_capture:
|
|
389
|
-
self._wait_for_end(
|
|
385
|
+
self._wait_for_end()
|
|
390
386
|
return
|
|
391
387
|
|
|
392
388
|
# Process the output in real-time
|
|
393
|
-
for line in iter(lambda: process.stdout.readline(), b''):
|
|
389
|
+
for line in iter(lambda: self.process.stdout.readline(), b''):
|
|
394
390
|
sleep(0) # for async to give up control
|
|
395
391
|
if not line:
|
|
396
392
|
break
|
|
@@ -430,11 +426,11 @@ class Command(Runner):
|
|
|
430
426
|
yield from items
|
|
431
427
|
|
|
432
428
|
except KeyboardInterrupt:
|
|
433
|
-
process.kill()
|
|
429
|
+
self.process.kill()
|
|
434
430
|
self.killed = True
|
|
435
431
|
|
|
436
432
|
# Retrieve the return code and output
|
|
437
|
-
self._wait_for_end(
|
|
433
|
+
self._wait_for_end()
|
|
438
434
|
|
|
439
435
|
def run_item_loaders(self, line):
|
|
440
436
|
"""Run item loaders on a string."""
|
|
@@ -493,16 +489,16 @@ class Command(Runner):
|
|
|
493
489
|
self._print("Sudo password verification failed after 3 attempts.")
|
|
494
490
|
return None
|
|
495
491
|
|
|
496
|
-
def _wait_for_end(self
|
|
492
|
+
def _wait_for_end(self):
|
|
497
493
|
"""Wait for process to finish and process output and return code."""
|
|
498
|
-
process.wait()
|
|
499
|
-
self.return_code = process.returncode
|
|
494
|
+
self.process.wait()
|
|
495
|
+
self.return_code = self.process.returncode
|
|
500
496
|
|
|
501
497
|
if self.no_capture:
|
|
502
498
|
self.output = ''
|
|
503
499
|
else:
|
|
504
500
|
self.output = self.output.strip()
|
|
505
|
-
process.stdout.close()
|
|
501
|
+
self.process.stdout.close()
|
|
506
502
|
|
|
507
503
|
if self.ignore_return_code:
|
|
508
504
|
self.return_code = 0
|