secator 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- secator/.gitignore +162 -0
- secator/__init__.py +0 -0
- secator/celery.py +453 -0
- secator/celery_signals.py +138 -0
- secator/celery_utils.py +320 -0
- secator/cli.py +2035 -0
- secator/cli_helper.py +395 -0
- secator/click.py +87 -0
- secator/config.py +670 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +8 -0
- secator/configs/profiles/all_ports.yaml +7 -0
- secator/configs/profiles/full.yaml +31 -0
- secator/configs/profiles/http_headless.yaml +7 -0
- secator/configs/profiles/http_record.yaml +8 -0
- secator/configs/profiles/insane.yaml +8 -0
- secator/configs/profiles/paranoid.yaml +8 -0
- secator/configs/profiles/passive.yaml +11 -0
- secator/configs/profiles/polite.yaml +8 -0
- secator/configs/profiles/sneaky.yaml +8 -0
- secator/configs/profiles/tor.yaml +5 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +31 -0
- secator/configs/scans/host.yaml +23 -0
- secator/configs/scans/network.yaml +30 -0
- secator/configs/scans/subdomain.yaml +27 -0
- secator/configs/scans/url.yaml +19 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +48 -0
- secator/configs/workflows/code_scan.yaml +29 -0
- secator/configs/workflows/domain_recon.yaml +46 -0
- secator/configs/workflows/host_recon.yaml +95 -0
- secator/configs/workflows/subdomain_recon.yaml +120 -0
- secator/configs/workflows/url_bypass.yaml +15 -0
- secator/configs/workflows/url_crawl.yaml +98 -0
- secator/configs/workflows/url_dirsearch.yaml +62 -0
- secator/configs/workflows/url_fuzz.yaml +68 -0
- secator/configs/workflows/url_params_fuzz.yaml +66 -0
- secator/configs/workflows/url_secrets_hunt.yaml +23 -0
- secator/configs/workflows/url_vuln.yaml +91 -0
- secator/configs/workflows/user_hunt.yaml +29 -0
- secator/configs/workflows/wordpress.yaml +38 -0
- secator/cve.py +718 -0
- secator/decorators.py +7 -0
- secator/definitions.py +168 -0
- secator/exporters/__init__.py +14 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +37 -0
- secator/exporters/gdrive.py +123 -0
- secator/exporters/json.py +16 -0
- secator/exporters/table.py +36 -0
- secator/exporters/txt.py +28 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/gcs.py +80 -0
- secator/hooks/mongodb.py +281 -0
- secator/installer.py +694 -0
- secator/loader.py +128 -0
- secator/output_types/__init__.py +49 -0
- secator/output_types/_base.py +108 -0
- secator/output_types/certificate.py +78 -0
- secator/output_types/domain.py +50 -0
- secator/output_types/error.py +42 -0
- secator/output_types/exploit.py +58 -0
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +47 -0
- secator/output_types/port.py +55 -0
- secator/output_types/progress.py +36 -0
- secator/output_types/record.py +36 -0
- secator/output_types/stat.py +41 -0
- secator/output_types/state.py +29 -0
- secator/output_types/subdomain.py +45 -0
- secator/output_types/tag.py +69 -0
- secator/output_types/target.py +38 -0
- secator/output_types/url.py +112 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +101 -0
- secator/output_types/warning.py +30 -0
- secator/report.py +140 -0
- secator/rich.py +130 -0
- secator/runners/__init__.py +14 -0
- secator/runners/_base.py +1240 -0
- secator/runners/_helpers.py +218 -0
- secator/runners/celery.py +18 -0
- secator/runners/command.py +1178 -0
- secator/runners/python.py +126 -0
- secator/runners/scan.py +87 -0
- secator/runners/task.py +81 -0
- secator/runners/workflow.py +168 -0
- secator/scans/__init__.py +29 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +39 -0
- secator/serializers/json.py +45 -0
- secator/serializers/regex.py +25 -0
- secator/tasks/__init__.py +8 -0
- secator/tasks/_categories.py +487 -0
- secator/tasks/arjun.py +113 -0
- secator/tasks/arp.py +53 -0
- secator/tasks/arpscan.py +70 -0
- secator/tasks/bbot.py +372 -0
- secator/tasks/bup.py +118 -0
- secator/tasks/cariddi.py +193 -0
- secator/tasks/dalfox.py +87 -0
- secator/tasks/dirsearch.py +84 -0
- secator/tasks/dnsx.py +186 -0
- secator/tasks/feroxbuster.py +93 -0
- secator/tasks/ffuf.py +135 -0
- secator/tasks/fping.py +85 -0
- secator/tasks/gau.py +102 -0
- secator/tasks/getasn.py +60 -0
- secator/tasks/gf.py +36 -0
- secator/tasks/gitleaks.py +96 -0
- secator/tasks/gospider.py +84 -0
- secator/tasks/grype.py +109 -0
- secator/tasks/h8mail.py +75 -0
- secator/tasks/httpx.py +167 -0
- secator/tasks/jswhois.py +36 -0
- secator/tasks/katana.py +203 -0
- secator/tasks/maigret.py +87 -0
- secator/tasks/mapcidr.py +42 -0
- secator/tasks/msfconsole.py +179 -0
- secator/tasks/naabu.py +85 -0
- secator/tasks/nmap.py +487 -0
- secator/tasks/nuclei.py +151 -0
- secator/tasks/search_vulns.py +225 -0
- secator/tasks/searchsploit.py +109 -0
- secator/tasks/sshaudit.py +299 -0
- secator/tasks/subfinder.py +48 -0
- secator/tasks/testssl.py +283 -0
- secator/tasks/trivy.py +130 -0
- secator/tasks/trufflehog.py +240 -0
- secator/tasks/urlfinder.py +100 -0
- secator/tasks/wafw00f.py +106 -0
- secator/tasks/whois.py +34 -0
- secator/tasks/wpprobe.py +116 -0
- secator/tasks/wpscan.py +202 -0
- secator/tasks/x8.py +94 -0
- secator/tasks/xurlfind3r.py +83 -0
- secator/template.py +294 -0
- secator/thread.py +24 -0
- secator/tree.py +196 -0
- secator/utils.py +922 -0
- secator/utils_test.py +297 -0
- secator/workflows/__init__.py +29 -0
- secator-0.22.0.dist-info/METADATA +447 -0
- secator-0.22.0.dist-info/RECORD +150 -0
- secator-0.22.0.dist-info/WHEEL +4 -0
- secator-0.22.0.dist-info/entry_points.txt +2 -0
- secator-0.22.0.dist-info/licenses/LICENSE +60 -0
secator/decorators.py
ADDED
secator/definitions.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
from importlib.metadata import version
|
|
6
|
+
|
|
7
|
+
from secator.config import CONFIG, ROOT_FOLDER
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# Globals
|
|
11
|
+
VERSION = version('secator')
|
|
12
|
+
ASCII = rf"""
|
|
13
|
+
__
|
|
14
|
+
________ _________ _/ /_____ _____
|
|
15
|
+
/ ___/ _ \/ ___/ __ `/ __/ __ \/ ___/
|
|
16
|
+
(__ / __/ /__/ /_/ / /_/ /_/ / /
|
|
17
|
+
/____/\___/\___/\__,_/\__/\____/_/ v{VERSION}
|
|
18
|
+
|
|
19
|
+
freelabz.com
|
|
20
|
+
""" # noqa: W605,W291
|
|
21
|
+
|
|
22
|
+
# Debug
|
|
23
|
+
DEBUG = CONFIG.debug.split(',')
|
|
24
|
+
FORCE_TTY = CONFIG.runners.force_tty
|
|
25
|
+
|
|
26
|
+
# Constants
|
|
27
|
+
OPT_NOT_SUPPORTED = -1
|
|
28
|
+
OPT_PIPE_INPUT = -2
|
|
29
|
+
OPT_SPACE_SEPARATED = -3
|
|
30
|
+
STATE_COLORS = {
|
|
31
|
+
'PENDING': 'dim yellow3',
|
|
32
|
+
'RUNNING': 'bold yellow3',
|
|
33
|
+
'SUCCESS': 'bold green',
|
|
34
|
+
'FAILURE': 'bold red',
|
|
35
|
+
'REVOKED': 'bold magenta'
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
# Vocab
|
|
39
|
+
ALIVE = 'alive'
|
|
40
|
+
AUTO_CALIBRATION = 'auto_calibration'
|
|
41
|
+
CONTENT_TYPE = 'content_type'
|
|
42
|
+
CONTENT_LENGTH = 'content_length'
|
|
43
|
+
CERTIFICATE_STATUS_UNKNOWN = 'Unknown'
|
|
44
|
+
CERTIFICATE_STATUS_REVOKED = 'Revoked'
|
|
45
|
+
CERTIFICATE_STATUS_TRUSTED = 'Trusted'
|
|
46
|
+
CIDR_RANGE = 'cidr_range'
|
|
47
|
+
CONFIDENCE = 'confidence'
|
|
48
|
+
CPES = 'cpes'
|
|
49
|
+
CVES = 'cves'
|
|
50
|
+
CVSS_SCORE = 'cvss_score'
|
|
51
|
+
DATA = 'data'
|
|
52
|
+
DELAY = 'delay'
|
|
53
|
+
DESCRIPTION = 'description'
|
|
54
|
+
DOCKER_IMAGE = 'docker_image'
|
|
55
|
+
DOMAIN = 'domain'
|
|
56
|
+
DEPTH = 'depth'
|
|
57
|
+
EXTRA_DATA = 'extra_data'
|
|
58
|
+
EMAIL = 'email'
|
|
59
|
+
FILENAME = 'filename'
|
|
60
|
+
FILTER_CODES = 'filter_codes'
|
|
61
|
+
FILTER_WORDS = 'filter_words'
|
|
62
|
+
FOLLOW_REDIRECT = 'follow_redirect'
|
|
63
|
+
FILTER_REGEX = 'filter_regex'
|
|
64
|
+
FILTER_SIZE = 'filter_size'
|
|
65
|
+
GCS_URL = 'gcs_url'
|
|
66
|
+
GIT_REPOSITORY = 'git_repository'
|
|
67
|
+
HEADER = 'header'
|
|
68
|
+
HOST = 'host'
|
|
69
|
+
HOST_PORT = 'host:port'
|
|
70
|
+
IBAN = 'iban'
|
|
71
|
+
ID = 'id'
|
|
72
|
+
IP = 'ip'
|
|
73
|
+
PROTOCOL = 'protocol'
|
|
74
|
+
LINES = 'lines'
|
|
75
|
+
METHOD = 'method'
|
|
76
|
+
MAC_ADDRESS = 'mac'
|
|
77
|
+
MATCHED_AT = 'matched_at'
|
|
78
|
+
MATCH_CODES = 'match_codes'
|
|
79
|
+
MATCH_REGEX = 'match_regex'
|
|
80
|
+
MATCH_SIZE = 'match_size'
|
|
81
|
+
MATCH_WORDS = 'match_words'
|
|
82
|
+
NAME = 'name'
|
|
83
|
+
ORG_NAME = 'org_name'
|
|
84
|
+
OUTPUT_PATH = 'output_path'
|
|
85
|
+
PATH = 'path'
|
|
86
|
+
PERCENT = 'percent'
|
|
87
|
+
PORTS = 'ports'
|
|
88
|
+
PORT = 'port'
|
|
89
|
+
PROVIDER = 'provider'
|
|
90
|
+
PROXY = 'proxy'
|
|
91
|
+
RATE_LIMIT = 'rate_limit'
|
|
92
|
+
REFERENCE = 'reference'
|
|
93
|
+
REFERENCES = 'references'
|
|
94
|
+
RETRIES = 'retries'
|
|
95
|
+
SCRIPT = 'script'
|
|
96
|
+
SERVICE_NAME = 'service_name'
|
|
97
|
+
SEVERITY = 'severity'
|
|
98
|
+
SITE_NAME = 'site_name'
|
|
99
|
+
SLUG = 'slug'
|
|
100
|
+
SOURCES = 'sources'
|
|
101
|
+
STORED_RESPONSE_PATH = 'stored_response_path'
|
|
102
|
+
STATE = 'state'
|
|
103
|
+
STATUS_CODE = 'status_code'
|
|
104
|
+
STRING = 'str'
|
|
105
|
+
TAGS = 'tags'
|
|
106
|
+
TECH = 'tech'
|
|
107
|
+
TECHNOLOGY = 'technology'
|
|
108
|
+
THREADS = 'threads'
|
|
109
|
+
TIME = 'time'
|
|
110
|
+
TIMEOUT = 'timeout'
|
|
111
|
+
TITLE = 'title'
|
|
112
|
+
TOP_PORTS = 'top_ports'
|
|
113
|
+
TYPE = 'type'
|
|
114
|
+
URL = 'url'
|
|
115
|
+
USER_AGENT = 'user_agent'
|
|
116
|
+
USERNAME = 'username'
|
|
117
|
+
UUID = 'uuid'
|
|
118
|
+
WEBSERVER = 'webserver'
|
|
119
|
+
WORDLIST = 'wordlist'
|
|
120
|
+
WORDS = 'words'
|
|
121
|
+
|
|
122
|
+
# Allowed input types
|
|
123
|
+
INPUT_TYPES = [
|
|
124
|
+
URL,
|
|
125
|
+
IP,
|
|
126
|
+
CIDR_RANGE,
|
|
127
|
+
HOST,
|
|
128
|
+
MAC_ADDRESS,
|
|
129
|
+
EMAIL,
|
|
130
|
+
IBAN,
|
|
131
|
+
UUID,
|
|
132
|
+
PATH,
|
|
133
|
+
SLUG,
|
|
134
|
+
STRING,
|
|
135
|
+
]
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def is_importable(module_to_import):
|
|
139
|
+
import importlib
|
|
140
|
+
try:
|
|
141
|
+
importlib.import_module(module_to_import)
|
|
142
|
+
return True
|
|
143
|
+
except ModuleNotFoundError:
|
|
144
|
+
return False
|
|
145
|
+
except Exception as e:
|
|
146
|
+
print(f'Failed trying to import {module_to_import}: {str(e)}')
|
|
147
|
+
return False
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
ADDONS_ENABLED = {}
|
|
151
|
+
|
|
152
|
+
for addon, module in [
|
|
153
|
+
('worker', 'eventlet'),
|
|
154
|
+
('gdrive', 'gspread'),
|
|
155
|
+
('gcs', 'google.cloud.storage'),
|
|
156
|
+
('mongodb', 'pymongo'),
|
|
157
|
+
('redis', 'redis'),
|
|
158
|
+
('dev', 'flake8'),
|
|
159
|
+
('trace', 'memray'),
|
|
160
|
+
('build', 'hatch')
|
|
161
|
+
]:
|
|
162
|
+
ADDONS_ENABLED[addon] = is_importable(module)
|
|
163
|
+
|
|
164
|
+
# Check dev package
|
|
165
|
+
if os.path.exists(f'{ROOT_FOLDER}/pyproject.toml'):
|
|
166
|
+
DEV_PACKAGE = True
|
|
167
|
+
else:
|
|
168
|
+
DEV_PACKAGE = False
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
'ConsoleExporter',
|
|
3
|
+
'CsvExporter',
|
|
4
|
+
'GdriveExporter',
|
|
5
|
+
'JsonExporter',
|
|
6
|
+
'TableExporter',
|
|
7
|
+
'TxtExporter'
|
|
8
|
+
]
|
|
9
|
+
from secator.exporters.console import ConsoleExporter
|
|
10
|
+
from secator.exporters.csv import CsvExporter
|
|
11
|
+
from secator.exporters.gdrive import GdriveExporter
|
|
12
|
+
from secator.exporters.json import JsonExporter
|
|
13
|
+
from secator.exporters.table import TableExporter
|
|
14
|
+
from secator.exporters.txt import TxtExporter
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from secator.exporters._base import Exporter
|
|
2
|
+
from secator.rich import console_stdout
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ConsoleExporter(Exporter):
|
|
6
|
+
def send(self):
|
|
7
|
+
results = self.report.data['results']
|
|
8
|
+
for items in results.values():
|
|
9
|
+
for item in items:
|
|
10
|
+
console_stdout.print(item)
|
secator/exporters/csv.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import csv as _csv
|
|
2
|
+
|
|
3
|
+
from dataclasses import fields
|
|
4
|
+
|
|
5
|
+
from secator.exporters._base import Exporter
|
|
6
|
+
from secator.rich import console
|
|
7
|
+
from secator.output_types import FINDING_TYPES
|
|
8
|
+
from secator.output_types import Info
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CsvExporter(Exporter):
|
|
12
|
+
def send(self):
|
|
13
|
+
results = self.report.data['results']
|
|
14
|
+
if not results:
|
|
15
|
+
return
|
|
16
|
+
csv_paths = []
|
|
17
|
+
|
|
18
|
+
for output_type, items in results.items():
|
|
19
|
+
output_cls = [o for o in FINDING_TYPES if o._type == output_type][0]
|
|
20
|
+
keys = [o.name for o in fields(output_cls)]
|
|
21
|
+
items = [i.toDict() for i in items]
|
|
22
|
+
if not items:
|
|
23
|
+
continue
|
|
24
|
+
csv_path = f'{self.report.output_folder}/report_{output_type}.csv'
|
|
25
|
+
csv_paths.append(csv_path)
|
|
26
|
+
with open(csv_path, 'w', newline='') as output_file:
|
|
27
|
+
dict_writer = _csv.DictWriter(output_file, keys)
|
|
28
|
+
dict_writer.writeheader()
|
|
29
|
+
dict_writer.writerows(items)
|
|
30
|
+
|
|
31
|
+
if len(csv_paths) == 1:
|
|
32
|
+
csv_paths_str = csv_paths[0]
|
|
33
|
+
else:
|
|
34
|
+
csv_paths_str = '\n • ' + '\n • '.join(csv_paths)
|
|
35
|
+
|
|
36
|
+
info = Info(message=f'Saved CSV reports to {csv_paths_str}')
|
|
37
|
+
console.print(info)
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
import yaml
|
|
4
|
+
|
|
5
|
+
from secator.config import CONFIG
|
|
6
|
+
from secator.exporters._base import Exporter
|
|
7
|
+
from secator.output_types import Info, Error
|
|
8
|
+
from secator.rich import console
|
|
9
|
+
from secator.utils import pluralize
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GdriveExporter(Exporter):
|
|
13
|
+
def send(self):
|
|
14
|
+
import gspread
|
|
15
|
+
ws = self.report.workspace_name
|
|
16
|
+
info = self.report.data['info']
|
|
17
|
+
title = self.report.data['info']['title']
|
|
18
|
+
sheet_title = f'{self.report.data["info"]["title"]}_{self.report.timestamp}'
|
|
19
|
+
results = self.report.data['results']
|
|
20
|
+
if not CONFIG.addons.gdrive.credentials_path:
|
|
21
|
+
error = Error('Missing CONFIG.addons.gdrive.credentials_path to save to Google Sheets')
|
|
22
|
+
console.print(error)
|
|
23
|
+
return
|
|
24
|
+
if not CONFIG.addons.gdrive.drive_parent_folder_id:
|
|
25
|
+
error = Error('Missing CONFIG.addons.gdrive.drive_parent_folder_id to save to Google Sheets.')
|
|
26
|
+
console.print(error)
|
|
27
|
+
return
|
|
28
|
+
client = gspread.service_account(CONFIG.addons.gdrive.credentials_path)
|
|
29
|
+
|
|
30
|
+
# Create workspace folder if it doesn't exist
|
|
31
|
+
folder_id = self.get_folder_by_name(ws, parent_id=CONFIG.addons.gdrive.drive_parent_folder_id)
|
|
32
|
+
if ws and not folder_id:
|
|
33
|
+
folder_id = self.create_folder(
|
|
34
|
+
folder_name=ws,
|
|
35
|
+
parent_id=CONFIG.addons.gdrive.drive_parent_folder_id)
|
|
36
|
+
|
|
37
|
+
# Create worksheet
|
|
38
|
+
sheet = client.create(title, folder_id=folder_id)
|
|
39
|
+
|
|
40
|
+
# Add options worksheet for input data
|
|
41
|
+
info = self.report.data['info']
|
|
42
|
+
info['targets'] = '\n'.join(info['targets'])
|
|
43
|
+
info['opts'] = yaml.dump(info['opts'])
|
|
44
|
+
keys = [k.replace('_', ' ').upper() for k in list(info.keys())]
|
|
45
|
+
ws = sheet.add_worksheet('OPTIONS', rows=2, cols=len(keys))
|
|
46
|
+
sheet.values_update(
|
|
47
|
+
ws.title,
|
|
48
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
49
|
+
body={'values': [keys, list(info.values())]}
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# Add one worksheet per output type
|
|
53
|
+
for output_type, items in results.items():
|
|
54
|
+
items = [i.toDict() for i in items]
|
|
55
|
+
if not items:
|
|
56
|
+
continue
|
|
57
|
+
keys = [
|
|
58
|
+
k.replace('_', ' ').upper()
|
|
59
|
+
for k in list(items[0].keys())
|
|
60
|
+
]
|
|
61
|
+
csv_path = f'{self.report.output_folder}/report_{output_type}.csv'
|
|
62
|
+
if not os.path.exists(csv_path):
|
|
63
|
+
error = Error(
|
|
64
|
+
f'Unable to find CSV at {csv_path}. For Google sheets reports, please enable CSV reports as well.')
|
|
65
|
+
console.print(error)
|
|
66
|
+
return
|
|
67
|
+
sheet_title = pluralize(output_type).upper()
|
|
68
|
+
ws = sheet.add_worksheet(sheet_title, rows=len(items), cols=len(keys))
|
|
69
|
+
with open(csv_path, 'r') as f:
|
|
70
|
+
data = csv.reader(f)
|
|
71
|
+
data = list(data)
|
|
72
|
+
data[0] = [
|
|
73
|
+
k.replace('_', ' ').upper()
|
|
74
|
+
for k in data[0]
|
|
75
|
+
]
|
|
76
|
+
sheet.values_update(
|
|
77
|
+
ws.title,
|
|
78
|
+
params={'valueInputOption': 'USER_ENTERED'},
|
|
79
|
+
body={'values': data}
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
# Delete 'default' worksheet
|
|
83
|
+
ws = sheet.get_worksheet(0)
|
|
84
|
+
sheet.del_worksheet(ws)
|
|
85
|
+
|
|
86
|
+
info = Info(message=f'Saved Google Sheets reports to [u magenta]{sheet.url}')
|
|
87
|
+
console.print(info)
|
|
88
|
+
|
|
89
|
+
def create_folder(self, folder_name, parent_id=None):
|
|
90
|
+
from googleapiclient.discovery import build
|
|
91
|
+
from google.oauth2 import service_account
|
|
92
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.gdrive.credentials_path)
|
|
93
|
+
service = build('drive', 'v3', credentials=creds)
|
|
94
|
+
body = {
|
|
95
|
+
'name': folder_name,
|
|
96
|
+
'mimeType': "application/vnd.google-apps.folder"
|
|
97
|
+
}
|
|
98
|
+
if parent_id:
|
|
99
|
+
body['parents'] = [parent_id]
|
|
100
|
+
folder = service.files().create(body=body, fields='id').execute()
|
|
101
|
+
return folder['id']
|
|
102
|
+
|
|
103
|
+
def list_folders(self, parent_id):
|
|
104
|
+
from googleapiclient.discovery import build
|
|
105
|
+
from google.oauth2 import service_account
|
|
106
|
+
creds = service_account.Credentials.from_service_account_file(CONFIG.addons.gdrive.credentials_path)
|
|
107
|
+
service = build('drive', 'v3', credentials=creds)
|
|
108
|
+
driveid = service.files().get(fileId='root').execute()['id']
|
|
109
|
+
response = service.files().list(
|
|
110
|
+
q=f"'{parent_id}' in parents and mimeType='application/vnd.google-apps.folder'",
|
|
111
|
+
driveId=driveid,
|
|
112
|
+
corpora='drive',
|
|
113
|
+
includeItemsFromAllDrives=True,
|
|
114
|
+
supportsAllDrives=True
|
|
115
|
+
).execute()
|
|
116
|
+
return response
|
|
117
|
+
|
|
118
|
+
def get_folder_by_name(self, name, parent_id=None):
|
|
119
|
+
response = self.list_folders(parent_id=parent_id)
|
|
120
|
+
existing = [i for i in response['files'] if i['name'] == name]
|
|
121
|
+
if existing:
|
|
122
|
+
return existing[0]['id']
|
|
123
|
+
return None
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from secator.exporters._base import Exporter
|
|
2
|
+
from secator.output_types import Info
|
|
3
|
+
from secator.rich import console
|
|
4
|
+
from secator.serializers.dataclass import dumps_dataclass
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class JsonExporter(Exporter):
|
|
8
|
+
def send(self):
|
|
9
|
+
json_path = f'{self.report.output_folder}/report.json'
|
|
10
|
+
|
|
11
|
+
# Save JSON report to file
|
|
12
|
+
with open(json_path, 'w') as f:
|
|
13
|
+
f.write(dumps_dataclass(self.report.data, indent=2))
|
|
14
|
+
|
|
15
|
+
info = Info(f'Saved JSON report to {json_path}')
|
|
16
|
+
console.print(info)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from rich.markdown import Markdown
|
|
2
|
+
|
|
3
|
+
from secator.exporters._base import Exporter
|
|
4
|
+
from secator.output_types import OutputType
|
|
5
|
+
from secator.rich import build_table, console
|
|
6
|
+
from secator.utils import pluralize
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TableExporter(Exporter):
|
|
10
|
+
def send(self):
|
|
11
|
+
results = self.report.data['results']
|
|
12
|
+
if not results:
|
|
13
|
+
return
|
|
14
|
+
title = self.report.title
|
|
15
|
+
_print = console.print
|
|
16
|
+
_print()
|
|
17
|
+
if title:
|
|
18
|
+
title = ' '.join(title.capitalize().split('_')) + ' results'
|
|
19
|
+
h1 = Markdown(f'# {title}')
|
|
20
|
+
_print(h1, style='bold magenta', width=50)
|
|
21
|
+
_print()
|
|
22
|
+
for output_type, items in results.items():
|
|
23
|
+
if output_type == 'progress':
|
|
24
|
+
continue
|
|
25
|
+
if items:
|
|
26
|
+
is_output_type = isinstance(items[0], OutputType)
|
|
27
|
+
output_fields = items[0]._table_fields if is_output_type else None
|
|
28
|
+
sort_by = items[0]._sort_by if is_output_type else []
|
|
29
|
+
_table = build_table(
|
|
30
|
+
items,
|
|
31
|
+
output_fields=output_fields,
|
|
32
|
+
sort_by=sort_by)
|
|
33
|
+
title = pluralize(items[0]._type).upper() if is_output_type else 'Results'
|
|
34
|
+
_print(f':wrench: {title}', style='bold gold3', justify='left')
|
|
35
|
+
_print(_table)
|
|
36
|
+
_print()
|
secator/exporters/txt.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from secator.exporters._base import Exporter
|
|
2
|
+
from secator.output_types import Info
|
|
3
|
+
from secator.rich import console
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TxtExporter(Exporter):
|
|
7
|
+
def send(self):
|
|
8
|
+
results = self.report.data['results']
|
|
9
|
+
if not results:
|
|
10
|
+
return
|
|
11
|
+
txt_paths = []
|
|
12
|
+
|
|
13
|
+
for output_type, items in results.items():
|
|
14
|
+
items = list(set(str(i) for i in items))
|
|
15
|
+
if not items:
|
|
16
|
+
continue
|
|
17
|
+
txt_path = f'{self.report.output_folder}/report_{output_type}.txt'
|
|
18
|
+
with open(txt_path, 'w') as f:
|
|
19
|
+
f.write('\n'.join(items))
|
|
20
|
+
txt_paths.append(txt_path)
|
|
21
|
+
|
|
22
|
+
if len(txt_paths) == 1:
|
|
23
|
+
txt_paths_str = txt_paths[0]
|
|
24
|
+
else:
|
|
25
|
+
txt_paths_str = '\n • ' + '\n • '.join(txt_paths)
|
|
26
|
+
|
|
27
|
+
info = Info(f'Saved TXT reports to {txt_paths_str}')
|
|
28
|
+
console.print(info)
|
|
File without changes
|
secator/hooks/gcs.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import warnings
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from time import time
|
|
5
|
+
|
|
6
|
+
from google.cloud import storage
|
|
7
|
+
|
|
8
|
+
from secator.config import CONFIG
|
|
9
|
+
from secator.runners import Task
|
|
10
|
+
from secator.thread import Thread
|
|
11
|
+
from secator.utils import debug
|
|
12
|
+
|
|
13
|
+
warnings.filterwarnings("ignore", "Your application has authenticated using end user credentials")
|
|
14
|
+
|
|
15
|
+
GCS_BUCKET_NAME = CONFIG.addons.gcs.bucket_name
|
|
16
|
+
ITEMS_TO_SEND = {
|
|
17
|
+
'url': ['screenshot_path', 'stored_response_path']
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
_gcs_client = None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_gcs_client():
|
|
24
|
+
"""Get or create GCS client"""
|
|
25
|
+
global _gcs_client
|
|
26
|
+
if _gcs_client is None:
|
|
27
|
+
_gcs_client = storage.Client()
|
|
28
|
+
return _gcs_client
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def process_item(self, item):
|
|
32
|
+
if item._type not in ITEMS_TO_SEND.keys():
|
|
33
|
+
return item
|
|
34
|
+
if not GCS_BUCKET_NAME:
|
|
35
|
+
debug('skipped since addons.gcs.bucket_name is empty.', sub='hooks.gcs')
|
|
36
|
+
return item
|
|
37
|
+
to_send = ITEMS_TO_SEND[item._type]
|
|
38
|
+
for k, v in item.toDict().items():
|
|
39
|
+
if k in to_send and v:
|
|
40
|
+
path = Path(v)
|
|
41
|
+
if not path.exists():
|
|
42
|
+
continue
|
|
43
|
+
ext = path.suffix
|
|
44
|
+
blob_name = f'{item._uuid}_{k}{ext}'
|
|
45
|
+
t = Thread(target=upload_blob, args=(GCS_BUCKET_NAME, v, blob_name))
|
|
46
|
+
t.start()
|
|
47
|
+
self.threads.append(t)
|
|
48
|
+
setattr(item, k, f'gs://{GCS_BUCKET_NAME}/{blob_name}')
|
|
49
|
+
return item
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def upload_blob(bucket_name, source_file_name, destination_blob_name):
|
|
53
|
+
"""Uploads a file to the bucket."""
|
|
54
|
+
start_time = time()
|
|
55
|
+
storage_client = get_gcs_client()
|
|
56
|
+
bucket = storage_client.bucket(bucket_name)
|
|
57
|
+
blob = bucket.blob(destination_blob_name)
|
|
58
|
+
with open(source_file_name, 'rb') as f:
|
|
59
|
+
f.seek(0)
|
|
60
|
+
blob.upload_from_file(f)
|
|
61
|
+
end_time = time()
|
|
62
|
+
elapsed = end_time - start_time
|
|
63
|
+
debug(f'in {elapsed:.4f}s', obj={'blob': 'UPLOADED', 'blob_name': destination_blob_name, 'bucket': bucket_name}, obj_after=False, sub='hooks.gcs', verbose=True) # noqa: E501
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def download_blob(bucket_name, source_blob_name, destination_file_name):
|
|
67
|
+
"""Downloads a file from the bucket."""
|
|
68
|
+
start_time = time()
|
|
69
|
+
storage_client = get_gcs_client()
|
|
70
|
+
bucket = storage_client.bucket(bucket_name)
|
|
71
|
+
blob = bucket.blob(source_blob_name)
|
|
72
|
+
blob.download_to_filename(destination_file_name)
|
|
73
|
+
end_time = time()
|
|
74
|
+
elapsed = end_time - start_time
|
|
75
|
+
debug(f'in {elapsed:.4f}s', obj={'blob': 'DOWNLOADED', 'blob_name': source_blob_name, 'bucket': bucket_name}, obj_after=False, sub='hooks.gcs', verbose=True) # noqa: E501
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
HOOKS = {
|
|
79
|
+
Task: {'on_item': [process_item]}
|
|
80
|
+
}
|