secator 0.2.0__py2.py3-none-any.whl → 0.3.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +1 -1
- secator/cli.py +434 -454
- secator/decorators.py +5 -6
- secator/definitions.py +53 -28
- secator/exporters/txt.py +1 -1
- secator/installer.py +335 -0
- secator/rich.py +2 -8
- secator/runners/_base.py +47 -15
- secator/runners/command.py +2 -16
- secator/runners/task.py +4 -3
- secator/runners/workflow.py +1 -1
- secator/tasks/_categories.py +6 -11
- secator/tasks/dalfox.py +1 -0
- secator/tasks/dnsx.py +1 -0
- secator/tasks/dnsxbrute.py +1 -0
- secator/tasks/feroxbuster.py +1 -0
- secator/tasks/ffuf.py +1 -0
- secator/tasks/gau.py +1 -0
- secator/tasks/gospider.py +1 -0
- secator/tasks/grype.py +1 -0
- secator/tasks/httpx.py +1 -0
- secator/tasks/katana.py +1 -0
- secator/tasks/mapcidr.py +1 -0
- secator/tasks/naabu.py +1 -0
- secator/tasks/nuclei.py +1 -0
- secator/tasks/searchsploit.py +1 -0
- secator/tasks/subfinder.py +1 -0
- secator/utils.py +23 -1
- secator/utils_test.py +1 -0
- {secator-0.2.0.dist-info → secator-0.3.1.dist-info}/METADATA +1 -1
- {secator-0.2.0.dist-info → secator-0.3.1.dist-info}/RECORD +34 -33
- {secator-0.2.0.dist-info → secator-0.3.1.dist-info}/WHEEL +0 -0
- {secator-0.2.0.dist-info → secator-0.3.1.dist-info}/entry_points.txt +0 -0
- {secator-0.2.0.dist-info → secator-0.3.1.dist-info}/licenses/LICENSE +0 -0
secator/decorators.py
CHANGED
|
@@ -5,8 +5,7 @@ import rich_click as click
|
|
|
5
5
|
from rich_click.rich_click import _get_rich_console
|
|
6
6
|
from rich_click.rich_group import RichGroup
|
|
7
7
|
|
|
8
|
-
from secator.definitions import
|
|
9
|
-
WORKER_ADDON_ENABLED)
|
|
8
|
+
from secator.definitions import ADDONS_ENABLED, OPT_NOT_SUPPORTED
|
|
10
9
|
from secator.runners import Scan, Task, Workflow
|
|
11
10
|
from secator.utils import (deduplicate, expand_input, get_command_category,
|
|
12
11
|
get_command_cls)
|
|
@@ -53,7 +52,7 @@ class OrderedGroup(RichGroup):
|
|
|
53
52
|
if not name:
|
|
54
53
|
raise click.UsageError("`name` command argument is required when using aliases.")
|
|
55
54
|
|
|
56
|
-
f.__doc__ = f.__doc__ or '
|
|
55
|
+
f.__doc__ = f.__doc__ or '\0'.ljust(padding+1)
|
|
57
56
|
f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}'
|
|
58
57
|
base_command = super(OrderedGroup, self).command(
|
|
59
58
|
name, *args, **kwargs
|
|
@@ -79,7 +78,7 @@ class OrderedGroup(RichGroup):
|
|
|
79
78
|
max_width = _get_rich_console().width
|
|
80
79
|
aliases_str = ', '.join(f'[bold cyan]{alias}[/]' for alias in aliases)
|
|
81
80
|
padding = max_width // 4
|
|
82
|
-
f.__doc__ = f.__doc__ or '
|
|
81
|
+
f.__doc__ = f.__doc__ or '\0'.ljust(padding+1)
|
|
83
82
|
f.__doc__ = f'{f.__doc__:<{padding}}[dim](aliases)[/] {aliases_str}'
|
|
84
83
|
for alias in aliases:
|
|
85
84
|
grp = super(OrderedGroup, self).group(
|
|
@@ -276,7 +275,7 @@ def register_runner(cli_endpoint, config):
|
|
|
276
275
|
# opts.update(unknown_opts)
|
|
277
276
|
targets = opts.pop(input_type)
|
|
278
277
|
targets = expand_input(targets)
|
|
279
|
-
if sync or show or not
|
|
278
|
+
if sync or show or not ADDONS_ENABLED['worker']:
|
|
280
279
|
sync = True
|
|
281
280
|
elif worker:
|
|
282
281
|
sync = False
|
|
@@ -294,7 +293,7 @@ def register_runner(cli_endpoint, config):
|
|
|
294
293
|
# Build hooks from driver name
|
|
295
294
|
hooks = {}
|
|
296
295
|
if driver == 'mongodb':
|
|
297
|
-
if not
|
|
296
|
+
if not ADDONS_ENABLED['mongo']:
|
|
298
297
|
_get_rich_console().print('[bold red]Missing MongoDB dependencies: please run `secator install addons mongodb`[/].')
|
|
299
298
|
sys.exit(1)
|
|
300
299
|
from secator.hooks.mongodb import MONGODB_HOOKS
|
secator/definitions.py
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
#!/usr/bin/python
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
import requests
|
|
4
5
|
|
|
5
6
|
from dotenv import find_dotenv, load_dotenv
|
|
6
7
|
from pkg_resources import get_distribution
|
|
7
8
|
|
|
9
|
+
from secator.rich import console
|
|
10
|
+
|
|
8
11
|
load_dotenv(find_dotenv(usecwd=True), override=False)
|
|
9
12
|
|
|
10
13
|
# Globals
|
|
@@ -24,6 +27,7 @@ ROOT_FOLDER = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
|
24
27
|
LIB_FOLDER = ROOT_FOLDER + '/secator'
|
|
25
28
|
CONFIGS_FOLDER = LIB_FOLDER + '/configs'
|
|
26
29
|
EXTRA_CONFIGS_FOLDER = os.environ.get('SECATOR_EXTRA_CONFIGS_FOLDER')
|
|
30
|
+
BIN_FOLDER = os.environ.get('SECATOR_BIN_FOLDER', f'{os.path.expanduser("~")}/.local/bin')
|
|
27
31
|
DATA_FOLDER = os.environ.get('SECATOR_DATA_FOLDER', f'{os.path.expanduser("~")}/.secator')
|
|
28
32
|
REPORTS_FOLDER = os.environ.get('SECATOR_REPORTS_FOLDER', f'{DATA_FOLDER}/reports')
|
|
29
33
|
WORDLISTS_FOLDER = os.environ.get('SECATOR_WORDLISTS_FOLDER', f'{DATA_FOLDER}/wordlists')
|
|
@@ -32,19 +36,10 @@ CVES_FOLDER = f'{DATA_FOLDER}/cves'
|
|
|
32
36
|
PAYLOADS_FOLDER = f'{DATA_FOLDER}/payloads'
|
|
33
37
|
REVSHELLS_FOLDER = f'{DATA_FOLDER}/revshells'
|
|
34
38
|
TESTS_FOLDER = f'{ROOT_FOLDER}/tests'
|
|
35
|
-
os.makedirs(DATA_FOLDER, exist_ok=True)
|
|
36
|
-
os.makedirs(REPORTS_FOLDER, exist_ok=True)
|
|
37
|
-
os.makedirs(WORDLISTS_FOLDER, exist_ok=True)
|
|
38
|
-
os.makedirs(SCRIPTS_FOLDER, exist_ok=True)
|
|
39
|
-
os.makedirs(CVES_FOLDER, exist_ok=True)
|
|
40
|
-
os.makedirs(PAYLOADS_FOLDER, exist_ok=True)
|
|
41
|
-
os.makedirs(REVSHELLS_FOLDER, exist_ok=True)
|
|
42
39
|
|
|
43
40
|
# Celery local fs folders
|
|
44
41
|
CELERY_DATA_FOLDER = f'{DATA_FOLDER}/celery/data'
|
|
45
42
|
CELERY_RESULTS_FOLDER = f'{DATA_FOLDER}/celery/results'
|
|
46
|
-
os.makedirs(CELERY_DATA_FOLDER, exist_ok=True)
|
|
47
|
-
os.makedirs(CELERY_RESULTS_FOLDER, exist_ok=True)
|
|
48
43
|
|
|
49
44
|
# Environment variables
|
|
50
45
|
DEBUG = int(os.environ.get('DEBUG', '0'))
|
|
@@ -58,6 +53,7 @@ CELERY_BROKER_VISIBILITY_TIMEOUT = int(os.environ.get('CELERY_BROKER_VISIBILITY_
|
|
|
58
53
|
CELERY_OVERRIDE_DEFAULT_LOGGING = bool(int(os.environ.get('CELERY_OVERRIDE_DEFAULT_LOGGING', 1)))
|
|
59
54
|
GOOGLE_DRIVE_PARENT_FOLDER_ID = os.environ.get('GOOGLE_DRIVE_PARENT_FOLDER_ID')
|
|
60
55
|
GOOGLE_CREDENTIALS_PATH = os.environ.get('GOOGLE_CREDENTIALS_PATH')
|
|
56
|
+
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN')
|
|
61
57
|
|
|
62
58
|
# Defaults HTTP and Proxy settings
|
|
63
59
|
DEFAULT_SOCKS5_PROXY = os.environ.get('SOCKS5_PROXY', "socks5://127.0.0.1:9050")
|
|
@@ -79,8 +75,10 @@ DEFAULT_PROGRESS_UPDATE_FREQUENCY = int(os.environ.get('DEFAULT_PROGRESS_UPDATE_
|
|
|
79
75
|
DEFAULT_SKIP_CVE_SEARCH = bool(int(os.environ.get('DEFAULT_SKIP_CVE_SEARCH', 0)))
|
|
80
76
|
|
|
81
77
|
# Default wordlists
|
|
82
|
-
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/
|
|
83
|
-
|
|
78
|
+
DEFAULT_HTTP_WORDLIST = os.environ.get('DEFAULT_HTTP_WORDLIST', f'{WORDLISTS_FOLDER}/fuzz-Bo0oM.txt')
|
|
79
|
+
DEFAULT_HTTP_WORDLIST_URL = 'https://raw.githubusercontent.com/Bo0oM/fuzz.txt/master/fuzz.txt'
|
|
80
|
+
DEFAULT_DNS_WORDLIST = os.environ.get('DEFAULT_DNS_WORDLIST', f'{WORDLISTS_FOLDER}/combined_subdomains.txt')
|
|
81
|
+
DEFAULT_DNS_WORDLIST_URL = 'https://raw.githubusercontent.com/danielmiessler/SecLists/master/Discovery/DNS/combined_subdomains.txt' # noqa: E501
|
|
84
82
|
|
|
85
83
|
# Constants
|
|
86
84
|
OPT_NOT_SUPPORTED = -1
|
|
@@ -155,57 +153,84 @@ WEBSERVER = 'webserver'
|
|
|
155
153
|
WORDLIST = 'wordlist'
|
|
156
154
|
WORDS = 'words'
|
|
157
155
|
|
|
156
|
+
|
|
157
|
+
# Create all folders
|
|
158
|
+
for folder in [BIN_FOLDER, DATA_FOLDER, REPORTS_FOLDER, WORDLISTS_FOLDER, SCRIPTS_FOLDER, CVES_FOLDER, PAYLOADS_FOLDER,
|
|
159
|
+
REVSHELLS_FOLDER, CELERY_DATA_FOLDER, CELERY_RESULTS_FOLDER]:
|
|
160
|
+
if not os.path.exists(folder):
|
|
161
|
+
console.print(f'[bold turquoise4]Creating folder {folder} ...[/] ', end='')
|
|
162
|
+
os.makedirs(folder)
|
|
163
|
+
console.print('[bold green]ok.[/]')
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# Download default wordlists
|
|
167
|
+
for wordlist in ['HTTP', 'DNS']:
|
|
168
|
+
wordlist_path = globals()[f'DEFAULT_{wordlist}_WORDLIST']
|
|
169
|
+
wordlist_url = globals()[f'DEFAULT_{wordlist}_WORDLIST_URL']
|
|
170
|
+
if not os.path.exists(wordlist_path):
|
|
171
|
+
try:
|
|
172
|
+
console.print(f'[bold turquoise4]Downloading default {wordlist} wordlist {wordlist_path} ...[/] ', end='')
|
|
173
|
+
resp = requests.get(wordlist_url)
|
|
174
|
+
with open(wordlist_path, 'w') as f:
|
|
175
|
+
f.write(resp.text)
|
|
176
|
+
console.print('[bold green]ok.[/]')
|
|
177
|
+
except requests.exceptions.RequestException as e:
|
|
178
|
+
console.print(f'[bold green]failed ({type(e).__name__}).[/]')
|
|
179
|
+
pass
|
|
180
|
+
|
|
181
|
+
ADDONS_ENABLED = {}
|
|
182
|
+
|
|
158
183
|
# Check worker addon
|
|
159
184
|
try:
|
|
160
185
|
import eventlet # noqa: F401
|
|
161
|
-
|
|
186
|
+
ADDONS_ENABLED['worker'] = True
|
|
162
187
|
except ModuleNotFoundError:
|
|
163
|
-
|
|
188
|
+
ADDONS_ENABLED['worker'] = False
|
|
164
189
|
|
|
165
190
|
# Check google addon
|
|
166
191
|
try:
|
|
167
192
|
import gspread # noqa: F401
|
|
168
|
-
|
|
193
|
+
ADDONS_ENABLED['google'] = True
|
|
169
194
|
except ModuleNotFoundError:
|
|
170
|
-
|
|
195
|
+
ADDONS_ENABLED['google'] = False
|
|
171
196
|
|
|
172
197
|
# Check mongodb addon
|
|
173
198
|
try:
|
|
174
199
|
import pymongo # noqa: F401
|
|
175
|
-
|
|
200
|
+
ADDONS_ENABLED['mongodb'] = True
|
|
176
201
|
except ModuleNotFoundError:
|
|
177
|
-
|
|
202
|
+
ADDONS_ENABLED['mongodb'] = False
|
|
178
203
|
|
|
179
204
|
# Check redis addon
|
|
180
205
|
try:
|
|
181
206
|
import redis # noqa: F401
|
|
182
|
-
|
|
207
|
+
ADDONS_ENABLED['redis'] = True
|
|
183
208
|
except ModuleNotFoundError:
|
|
184
|
-
|
|
209
|
+
ADDONS_ENABLED['redis'] = False
|
|
185
210
|
|
|
186
211
|
# Check dev addon
|
|
187
212
|
try:
|
|
188
213
|
import flake8 # noqa: F401
|
|
189
|
-
|
|
214
|
+
ADDONS_ENABLED['dev'] = True
|
|
190
215
|
except ModuleNotFoundError:
|
|
191
|
-
|
|
216
|
+
ADDONS_ENABLED['dev'] = False
|
|
192
217
|
|
|
193
218
|
# Check build addon
|
|
194
219
|
try:
|
|
195
220
|
import hatch # noqa: F401
|
|
196
|
-
|
|
221
|
+
ADDONS_ENABLED['build'] = True
|
|
197
222
|
except ModuleNotFoundError:
|
|
198
|
-
|
|
223
|
+
ADDONS_ENABLED['build'] = False
|
|
199
224
|
|
|
200
225
|
# Check trace addon
|
|
201
226
|
try:
|
|
202
227
|
import memray # noqa: F401
|
|
203
|
-
|
|
228
|
+
ADDONS_ENABLED['trace'] = True
|
|
204
229
|
except ModuleNotFoundError:
|
|
205
|
-
|
|
230
|
+
ADDONS_ENABLED['trace'] = False
|
|
206
231
|
|
|
207
232
|
# Check dev package
|
|
208
|
-
if
|
|
209
|
-
DEV_PACKAGE =
|
|
233
|
+
if os.path.exists(f'{ROOT_FOLDER}/pyproject.toml'):
|
|
234
|
+
DEV_PACKAGE = True
|
|
210
235
|
else:
|
|
211
|
-
DEV_PACKAGE =
|
|
236
|
+
DEV_PACKAGE = False
|
secator/exporters/txt.py
CHANGED
|
@@ -11,7 +11,7 @@ class TxtExporter(Exporter):
|
|
|
11
11
|
items = [str(i) for i in items]
|
|
12
12
|
if not items:
|
|
13
13
|
continue
|
|
14
|
-
txt_path = f'{self.report.output_folder}/
|
|
14
|
+
txt_path = f'{self.report.output_folder}/report_{output_type}.txt'
|
|
15
15
|
with open(txt_path, 'w') as f:
|
|
16
16
|
f.write('\n'.join(items))
|
|
17
17
|
txt_paths.append(txt_path)
|
secator/installer.py
ADDED
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
|
|
2
|
+
import requests
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
5
|
+
import shutil
|
|
6
|
+
import tarfile
|
|
7
|
+
import zipfile
|
|
8
|
+
import io
|
|
9
|
+
|
|
10
|
+
from rich.table import Table
|
|
11
|
+
|
|
12
|
+
from secator.rich import console
|
|
13
|
+
from secator.runners import Command
|
|
14
|
+
from secator.definitions import BIN_FOLDER, GITHUB_TOKEN
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ToolInstaller:
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def install(cls, tool_cls):
|
|
21
|
+
"""Install a tool.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
cls: ToolInstaller class.
|
|
25
|
+
tool_cls: Tool class (derived from secator.runners.Command).
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
bool: True if install is successful, False otherwise.
|
|
29
|
+
"""
|
|
30
|
+
console.print(f'[bold gold3]:wrench: Installing {tool_cls.__name__}')
|
|
31
|
+
success = False
|
|
32
|
+
|
|
33
|
+
if not tool_cls.install_github_handle and not tool_cls.install_cmd:
|
|
34
|
+
console.print(
|
|
35
|
+
f'[bold red]{tool_cls.__name__} install is not supported yet. Please install it manually.[/]')
|
|
36
|
+
return False
|
|
37
|
+
|
|
38
|
+
if tool_cls.install_github_handle:
|
|
39
|
+
success = GithubInstaller.install(tool_cls.install_github_handle)
|
|
40
|
+
|
|
41
|
+
if tool_cls.install_cmd and not success:
|
|
42
|
+
success = SourceInstaller.install(tool_cls.install_cmd)
|
|
43
|
+
|
|
44
|
+
if success:
|
|
45
|
+
console.print(
|
|
46
|
+
f'[bold green]:tada: {tool_cls.__name__} installed successfully[/] !')
|
|
47
|
+
else:
|
|
48
|
+
console.print(
|
|
49
|
+
f'[bold red]:exclamation_mark: Failed to install {tool_cls.__name__}.[/]')
|
|
50
|
+
return success
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class SourceInstaller:
|
|
54
|
+
"""Install a tool from source."""
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def install(cls, install_cmd):
|
|
58
|
+
"""Install from source.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
cls: ToolInstaller class.
|
|
62
|
+
install_cmd (str): Install command.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
bool: True if install is successful, False otherwise.
|
|
66
|
+
"""
|
|
67
|
+
ret = Command.execute(install_cmd, cls_attributes={'shell': True})
|
|
68
|
+
return ret.return_code == 0
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class GithubInstaller:
|
|
72
|
+
"""Install a tool from GitHub releases."""
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def install(cls, github_handle):
|
|
76
|
+
"""Find and install a release from a GitHub handle {user}/{repo}.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
github_handle (str): A GitHub handle {user}/{repo}
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
bool: True if install is successful, False otherwise.
|
|
83
|
+
"""
|
|
84
|
+
_, repo = tuple(github_handle.split('/'))
|
|
85
|
+
latest_release = cls.get_latest_release(github_handle)
|
|
86
|
+
if not latest_release:
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
# Find the right asset to download
|
|
90
|
+
os_identifiers, arch_identifiers = cls._get_platform_identifier()
|
|
91
|
+
download_url = cls._find_matching_asset(latest_release['assets'], os_identifiers, arch_identifiers)
|
|
92
|
+
if not download_url:
|
|
93
|
+
console.print('[dim red]Could not find a GitHub release matching distribution.[/]')
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
# Download and unpack asset
|
|
97
|
+
console.print(f'Found release URL: {download_url}')
|
|
98
|
+
cls._download_and_unpack(download_url, BIN_FOLDER, repo)
|
|
99
|
+
return True
|
|
100
|
+
|
|
101
|
+
@classmethod
|
|
102
|
+
def get_latest_release(cls, github_handle):
|
|
103
|
+
"""Get latest release from GitHub.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
github_handle (str): A GitHub handle {user}/{repo}.
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
dict: Latest release JSON from GitHub releases.
|
|
110
|
+
"""
|
|
111
|
+
if not github_handle:
|
|
112
|
+
return False
|
|
113
|
+
owner, repo = tuple(github_handle.split('/'))
|
|
114
|
+
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
|
|
115
|
+
headers = {}
|
|
116
|
+
if GITHUB_TOKEN:
|
|
117
|
+
headers['Authorization'] = f'Bearer {GITHUB_TOKEN}'
|
|
118
|
+
try:
|
|
119
|
+
response = requests.get(url, headers=headers, timeout=5)
|
|
120
|
+
response.raise_for_status()
|
|
121
|
+
latest_release = response.json()
|
|
122
|
+
return latest_release
|
|
123
|
+
except requests.RequestException as e:
|
|
124
|
+
console.print(f'Failed to fetch latest release for {github_handle}: {str(e)}')
|
|
125
|
+
return None
|
|
126
|
+
|
|
127
|
+
@classmethod
|
|
128
|
+
def get_latest_version(cls, github_handle):
|
|
129
|
+
latest_release = cls.get_latest_release(github_handle)
|
|
130
|
+
if not latest_release:
|
|
131
|
+
return None
|
|
132
|
+
return latest_release['tag_name'].lstrip('v')
|
|
133
|
+
|
|
134
|
+
@classmethod
|
|
135
|
+
def _get_platform_identifier(cls):
|
|
136
|
+
"""Generate lists of possible identifiers for the current platform."""
|
|
137
|
+
system = platform.system().lower()
|
|
138
|
+
arch = platform.machine().lower()
|
|
139
|
+
|
|
140
|
+
# Mapping common platform.system() values to those found in release names
|
|
141
|
+
os_mapping = {
|
|
142
|
+
'linux': ['linux'],
|
|
143
|
+
'windows': ['windows', 'win'],
|
|
144
|
+
'darwin': ['darwin', 'macos', 'osx', 'mac']
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
# Enhanced architecture mapping to avoid conflicts
|
|
148
|
+
arch_mapping = {
|
|
149
|
+
'x86_64': ['amd64', 'x86_64'],
|
|
150
|
+
'amd64': ['amd64', 'x86_64'],
|
|
151
|
+
'aarch64': ['arm64', 'aarch64'],
|
|
152
|
+
'armv7l': ['armv7', 'arm'],
|
|
153
|
+
'386': ['386', 'x86', 'i386'],
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
os_identifiers = os_mapping.get(system, [])
|
|
157
|
+
arch_identifiers = arch_mapping.get(arch, [])
|
|
158
|
+
return os_identifiers, arch_identifiers
|
|
159
|
+
|
|
160
|
+
@classmethod
|
|
161
|
+
def _find_matching_asset(cls, assets, os_identifiers, arch_identifiers):
|
|
162
|
+
"""Find a release asset matching the current platform more precisely."""
|
|
163
|
+
potential_matches = []
|
|
164
|
+
|
|
165
|
+
for asset in assets:
|
|
166
|
+
asset_name = asset['name'].lower()
|
|
167
|
+
if any(os_id in asset_name for os_id in os_identifiers) and \
|
|
168
|
+
any(arch_id in asset_name for arch_id in arch_identifiers):
|
|
169
|
+
potential_matches.append(asset['browser_download_url'])
|
|
170
|
+
|
|
171
|
+
# Preference ordering for file formats, if needed
|
|
172
|
+
preferred_formats = ['.tar.gz', '.zip']
|
|
173
|
+
|
|
174
|
+
for format in preferred_formats:
|
|
175
|
+
for match in potential_matches:
|
|
176
|
+
if match.endswith(format):
|
|
177
|
+
return match
|
|
178
|
+
|
|
179
|
+
if potential_matches:
|
|
180
|
+
return potential_matches[0]
|
|
181
|
+
|
|
182
|
+
@classmethod
|
|
183
|
+
def _download_and_unpack(cls, url, destination, repo_name):
|
|
184
|
+
"""Download and unpack a release asset."""
|
|
185
|
+
console.print(f'Downloading and unpacking to {destination}...')
|
|
186
|
+
response = requests.get(url, timeout=5)
|
|
187
|
+
response.raise_for_status()
|
|
188
|
+
|
|
189
|
+
# Create a temporary directory to extract the archive
|
|
190
|
+
temp_dir = os.path.join("/tmp", repo_name)
|
|
191
|
+
os.makedirs(temp_dir, exist_ok=True)
|
|
192
|
+
|
|
193
|
+
if url.endswith('.zip'):
|
|
194
|
+
with zipfile.ZipFile(io.BytesIO(response.content)) as zip_ref:
|
|
195
|
+
zip_ref.extractall(temp_dir)
|
|
196
|
+
elif url.endswith('.tar.gz'):
|
|
197
|
+
with tarfile.open(fileobj=io.BytesIO(response.content), mode='r:gz') as tar:
|
|
198
|
+
tar.extractall(path=temp_dir)
|
|
199
|
+
|
|
200
|
+
# For archives, find and move the binary that matches the repo name
|
|
201
|
+
binary_path = cls._find_binary_in_directory(temp_dir, repo_name)
|
|
202
|
+
if binary_path:
|
|
203
|
+
os.chmod(binary_path, 0o755) # Make it executable
|
|
204
|
+
shutil.move(binary_path, os.path.join(destination, repo_name)) # Move the binary
|
|
205
|
+
else:
|
|
206
|
+
console.print('[bold red]Binary matching the repository name was not found in the archive.[/]')
|
|
207
|
+
|
|
208
|
+
@classmethod
|
|
209
|
+
def _find_binary_in_directory(cls, directory, binary_name):
|
|
210
|
+
"""Search for the binary in the given directory that matches the repository name."""
|
|
211
|
+
for root, _, files in os.walk(directory):
|
|
212
|
+
for file in files:
|
|
213
|
+
# Match the file name exactly with the repository name
|
|
214
|
+
if file == binary_name:
|
|
215
|
+
return os.path.join(root, file)
|
|
216
|
+
return None
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def which(command):
|
|
220
|
+
"""Run which on a command.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
command (str): Command to check.
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
secator.Command: Command instance.
|
|
227
|
+
"""
|
|
228
|
+
return Command.execute(f'which {command}', quiet=True, print_errors=False)
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
def get_version(version_cmd):
|
|
232
|
+
"""Run version command and match first version number found.
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
version_cmd (str): Command to get the version.
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
str: Version string.
|
|
239
|
+
"""
|
|
240
|
+
from secator.runners import Command
|
|
241
|
+
import re
|
|
242
|
+
regex = r'[0-9]+\.[0-9]+\.?[0-9]*\.?[a-zA-Z]*'
|
|
243
|
+
ret = Command.execute(version_cmd, quiet=True, print_errors=False)
|
|
244
|
+
match = re.findall(regex, ret.output)
|
|
245
|
+
if not match:
|
|
246
|
+
return ''
|
|
247
|
+
return match[0]
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def get_version_info(name, version_flag=None, github_handle=None, version=None):
|
|
251
|
+
"""Get version info for a command.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
name (str): Command name.
|
|
255
|
+
version_flag (str): Version flag.
|
|
256
|
+
github_handle (str): Github handle.
|
|
257
|
+
version (str): Existing version.
|
|
258
|
+
|
|
259
|
+
Return:
|
|
260
|
+
dict: Version info.
|
|
261
|
+
"""
|
|
262
|
+
from pkg_resources import parse_version
|
|
263
|
+
from secator.installer import GithubInstaller
|
|
264
|
+
info = {
|
|
265
|
+
'name': name,
|
|
266
|
+
'installed': False,
|
|
267
|
+
'version': version,
|
|
268
|
+
'latest_version': None,
|
|
269
|
+
'location': None,
|
|
270
|
+
'status': ''
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
# Get binary path
|
|
274
|
+
location = which(name).output
|
|
275
|
+
info['location'] = location
|
|
276
|
+
|
|
277
|
+
# Get current version
|
|
278
|
+
if version_flag:
|
|
279
|
+
version_cmd = f'{name} {version_flag}'
|
|
280
|
+
version = get_version(version_cmd)
|
|
281
|
+
info['version'] = version
|
|
282
|
+
|
|
283
|
+
# Get latest version
|
|
284
|
+
latest_version = GithubInstaller.get_latest_version(github_handle)
|
|
285
|
+
info['latest_version'] = latest_version
|
|
286
|
+
|
|
287
|
+
if location:
|
|
288
|
+
info['installed'] = True
|
|
289
|
+
if version and latest_version:
|
|
290
|
+
if parse_version(version) < parse_version(latest_version):
|
|
291
|
+
info['status'] = 'outdated'
|
|
292
|
+
else:
|
|
293
|
+
info['status'] = 'latest'
|
|
294
|
+
elif not version:
|
|
295
|
+
info['status'] = 'current unknown'
|
|
296
|
+
elif not latest_version:
|
|
297
|
+
info['status'] = 'latest unknown'
|
|
298
|
+
else:
|
|
299
|
+
info['status'] = 'missing'
|
|
300
|
+
|
|
301
|
+
return info
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def fmt_health_table_row(version_info, category=None):
|
|
305
|
+
name = version_info['name']
|
|
306
|
+
version = version_info['version']
|
|
307
|
+
status = version_info['status']
|
|
308
|
+
installed = version_info['installed']
|
|
309
|
+
name_str = f'[magenta]{name}[/]'
|
|
310
|
+
|
|
311
|
+
# Format version row
|
|
312
|
+
_version = version or ''
|
|
313
|
+
_version = f'[bold green]{_version:<10}[/]'
|
|
314
|
+
if status == 'latest':
|
|
315
|
+
_version += ' [bold green](latest)[/]'
|
|
316
|
+
elif status == 'outdated':
|
|
317
|
+
_version += ' [bold red](outdated)[/]'
|
|
318
|
+
elif status == 'missing':
|
|
319
|
+
_version = '[bold red]missing[/]'
|
|
320
|
+
elif status == 'ok':
|
|
321
|
+
_version = '[bold green]ok [/]'
|
|
322
|
+
elif status:
|
|
323
|
+
if not version and installed:
|
|
324
|
+
_version = '[bold green]ok [/]'
|
|
325
|
+
_version += f' [dim]({status}[/])'
|
|
326
|
+
|
|
327
|
+
row = (name_str, _version)
|
|
328
|
+
return row
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def get_health_table():
|
|
332
|
+
table = Table(box=None, show_header=False)
|
|
333
|
+
for col in ['name', 'version']:
|
|
334
|
+
table.add_column(col)
|
|
335
|
+
return table
|
secator/rich.py
CHANGED
|
@@ -1,19 +1,13 @@
|
|
|
1
1
|
import operator
|
|
2
2
|
|
|
3
|
-
import click
|
|
4
|
-
import rich_click
|
|
5
3
|
import yaml
|
|
6
4
|
from rich import box
|
|
7
5
|
from rich.console import Console
|
|
8
6
|
from rich.table import Table
|
|
9
|
-
from rich.traceback import install
|
|
10
7
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
console = Console(stderr=True, record=RECORD, color_system='truecolor')
|
|
8
|
+
console = Console(stderr=True, color_system='truecolor')
|
|
14
9
|
console_stdout = Console(record=True)
|
|
15
10
|
# handler = RichHandler(rich_tracebacks=True) # TODO: add logging handler
|
|
16
|
-
install(show_locals=DEBUG > 2, suppress=[click, rich_click])
|
|
17
11
|
|
|
18
12
|
|
|
19
13
|
def criticity_to_color(value):
|
|
@@ -73,7 +67,7 @@ def build_table(items, output_fields=[], exclude_fields=[], sort_by=None):
|
|
|
73
67
|
items = sorted(items, key=operator.attrgetter(*sort_by))
|
|
74
68
|
|
|
75
69
|
# Create rich table
|
|
76
|
-
box_style = box.
|
|
70
|
+
box_style = box.ROUNDED
|
|
77
71
|
table = Table(show_lines=True, box=box_style)
|
|
78
72
|
|
|
79
73
|
# Get table schema if any, default to first item keys
|