secator 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/__init__.py +0 -0
- secator/celery.py +482 -0
- secator/cli.py +617 -0
- secator/config.py +137 -0
- secator/configs/__init__.py +0 -0
- secator/configs/profiles/__init__.py +0 -0
- secator/configs/profiles/aggressive.yaml +7 -0
- secator/configs/profiles/default.yaml +9 -0
- secator/configs/profiles/stealth.yaml +7 -0
- secator/configs/scans/__init__.py +0 -0
- secator/configs/scans/domain.yaml +18 -0
- secator/configs/scans/host.yaml +14 -0
- secator/configs/scans/network.yaml +17 -0
- secator/configs/scans/subdomain.yaml +8 -0
- secator/configs/scans/url.yaml +12 -0
- secator/configs/workflows/__init__.py +0 -0
- secator/configs/workflows/cidr_recon.yaml +28 -0
- secator/configs/workflows/code_scan.yaml +11 -0
- secator/configs/workflows/host_recon.yaml +41 -0
- secator/configs/workflows/port_scan.yaml +34 -0
- secator/configs/workflows/subdomain_recon.yaml +33 -0
- secator/configs/workflows/url_crawl.yaml +29 -0
- secator/configs/workflows/url_dirsearch.yaml +29 -0
- secator/configs/workflows/url_fuzz.yaml +35 -0
- secator/configs/workflows/url_nuclei.yaml +11 -0
- secator/configs/workflows/url_vuln.yaml +55 -0
- secator/configs/workflows/user_hunt.yaml +10 -0
- secator/configs/workflows/wordpress.yaml +14 -0
- secator/decorators.py +309 -0
- secator/definitions.py +165 -0
- secator/exporters/__init__.py +12 -0
- secator/exporters/_base.py +3 -0
- secator/exporters/csv.py +30 -0
- secator/exporters/gdrive.py +118 -0
- secator/exporters/json.py +15 -0
- secator/exporters/table.py +7 -0
- secator/exporters/txt.py +25 -0
- secator/hooks/__init__.py +0 -0
- secator/hooks/mongodb.py +212 -0
- secator/output_types/__init__.py +24 -0
- secator/output_types/_base.py +95 -0
- secator/output_types/exploit.py +50 -0
- secator/output_types/ip.py +33 -0
- secator/output_types/port.py +45 -0
- secator/output_types/progress.py +35 -0
- secator/output_types/record.py +34 -0
- secator/output_types/subdomain.py +42 -0
- secator/output_types/tag.py +46 -0
- secator/output_types/target.py +30 -0
- secator/output_types/url.py +76 -0
- secator/output_types/user_account.py +41 -0
- secator/output_types/vulnerability.py +97 -0
- secator/report.py +107 -0
- secator/rich.py +124 -0
- secator/runners/__init__.py +12 -0
- secator/runners/_base.py +833 -0
- secator/runners/_helpers.py +153 -0
- secator/runners/command.py +638 -0
- secator/runners/scan.py +65 -0
- secator/runners/task.py +106 -0
- secator/runners/workflow.py +135 -0
- secator/serializers/__init__.py +8 -0
- secator/serializers/dataclass.py +33 -0
- secator/serializers/json.py +15 -0
- secator/serializers/regex.py +17 -0
- secator/tasks/__init__.py +10 -0
- secator/tasks/_categories.py +304 -0
- secator/tasks/cariddi.py +102 -0
- secator/tasks/dalfox.py +65 -0
- secator/tasks/dirsearch.py +90 -0
- secator/tasks/dnsx.py +56 -0
- secator/tasks/dnsxbrute.py +34 -0
- secator/tasks/feroxbuster.py +91 -0
- secator/tasks/ffuf.py +86 -0
- secator/tasks/fping.py +44 -0
- secator/tasks/gau.py +47 -0
- secator/tasks/gf.py +33 -0
- secator/tasks/gospider.py +71 -0
- secator/tasks/grype.py +79 -0
- secator/tasks/h8mail.py +81 -0
- secator/tasks/httpx.py +99 -0
- secator/tasks/katana.py +133 -0
- secator/tasks/maigret.py +78 -0
- secator/tasks/mapcidr.py +32 -0
- secator/tasks/msfconsole.py +174 -0
- secator/tasks/naabu.py +52 -0
- secator/tasks/nmap.py +344 -0
- secator/tasks/nuclei.py +97 -0
- secator/tasks/searchsploit.py +52 -0
- secator/tasks/subfinder.py +40 -0
- secator/tasks/wpscan.py +179 -0
- secator/utils.py +445 -0
- secator/utils_test.py +183 -0
- secator-0.0.1.dist-info/LICENSE +60 -0
- secator-0.0.1.dist-info/METADATA +199 -0
- secator-0.0.1.dist-info/RECORD +114 -0
- secator-0.0.1.dist-info/WHEEL +5 -0
- secator-0.0.1.dist-info/entry_points.txt +2 -0
- secator-0.0.1.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/inputs.py +42 -0
- tests/integration/outputs.py +392 -0
- tests/integration/test_scans.py +82 -0
- tests/integration/test_tasks.py +103 -0
- tests/integration/test_workflows.py +163 -0
- tests/performance/__init__.py +0 -0
- tests/performance/loadtester.py +56 -0
- tests/unit/__init__.py +0 -0
- tests/unit/test_celery.py +39 -0
- tests/unit/test_scans.py +0 -0
- tests/unit/test_serializers.py +51 -0
- tests/unit/test_tasks.py +348 -0
- tests/unit/test_workflows.py +96 -0
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import unittest
|
|
4
|
+
import warnings
|
|
5
|
+
from time import sleep
|
|
6
|
+
|
|
7
|
+
from secator.definitions import DEBUG
|
|
8
|
+
from secator.rich import console
|
|
9
|
+
from secator.runners import Command
|
|
10
|
+
from secator.utils import setup_logging, merge_opts
|
|
11
|
+
from secator.utils_test import (META_OPTS, TEST_TASKS, CommandOutputTester,
|
|
12
|
+
load_fixture)
|
|
13
|
+
from tests.integration.inputs import INPUTS_TASKS
|
|
14
|
+
from tests.integration.outputs import OUTPUTS_TASKS
|
|
15
|
+
|
|
16
|
+
INTEGRATION_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
17
|
+
level = logging.DEBUG if DEBUG > 0 else logging.INFO
|
|
18
|
+
setup_logging(level)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TestTasks(unittest.TestCase, CommandOutputTester):
|
|
22
|
+
def setUp(self):
|
|
23
|
+
warnings.simplefilter('ignore', category=ResourceWarning)
|
|
24
|
+
warnings.simplefilter('ignore', category=DeprecationWarning)
|
|
25
|
+
Command.run_command(
|
|
26
|
+
f'sh {INTEGRATION_DIR}/setup.sh',
|
|
27
|
+
cwd=INTEGRATION_DIR
|
|
28
|
+
)
|
|
29
|
+
sleep(15)
|
|
30
|
+
|
|
31
|
+
def tearDown(self):
|
|
32
|
+
Command.run_command(
|
|
33
|
+
f'sh {INTEGRATION_DIR}/teardown.sh',
|
|
34
|
+
cwd=INTEGRATION_DIR
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
def test_tasks(self):
|
|
38
|
+
opts = META_OPTS.copy()
|
|
39
|
+
fmt_opts = {
|
|
40
|
+
'print_cmd': DEBUG > 0,
|
|
41
|
+
'print_item': DEBUG > 1,
|
|
42
|
+
'print_item_count': DEBUG > 0,
|
|
43
|
+
'json': DEBUG > 2
|
|
44
|
+
}
|
|
45
|
+
extra_opts = {
|
|
46
|
+
'dirsearch.filter_size': 1987,
|
|
47
|
+
'dnsxbrute.wordlist': load_fixture('wordlist_dns', INTEGRATION_DIR, only_path=True),
|
|
48
|
+
'ffuf.filter_size': 1987,
|
|
49
|
+
'feroxbuster.filter_size': 1987,
|
|
50
|
+
'h8mail.local_breach': load_fixture('h8mail_breach', INTEGRATION_DIR, only_path=True),
|
|
51
|
+
'nmap.port': '3000,8080',
|
|
52
|
+
'match_codes': '200',
|
|
53
|
+
'maigret.site': 'github',
|
|
54
|
+
'wordlist': load_fixture('wordlist', INTEGRATION_DIR, only_path=True),
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
# Merge opts
|
|
58
|
+
opts = merge_opts(opts, fmt_opts, extra_opts)
|
|
59
|
+
|
|
60
|
+
# Remove unit tests options
|
|
61
|
+
del opts['nmap.output_path']
|
|
62
|
+
del opts['maigret.output_path']
|
|
63
|
+
del opts['dirsearch.output_path']
|
|
64
|
+
del opts['wpscan.output_path']
|
|
65
|
+
del opts['timeout']
|
|
66
|
+
|
|
67
|
+
for cls in TEST_TASKS:
|
|
68
|
+
if cls.__name__ == 'msfconsole': # skip msfconsole test as it's stuck
|
|
69
|
+
continue
|
|
70
|
+
with self.subTest(name=cls.__name__):
|
|
71
|
+
console.print(f'Testing {cls.__name__} ...')
|
|
72
|
+
|
|
73
|
+
# Get task input
|
|
74
|
+
input = INPUTS_TASKS.get(cls.__name__) or INPUTS_TASKS.get(cls.input_type)
|
|
75
|
+
if not input:
|
|
76
|
+
console.print(f'No input for {cls.__name__} ! Skipping')
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
# Get task output
|
|
80
|
+
outputs = OUTPUTS_TASKS.get(cls.__name__, [])
|
|
81
|
+
|
|
82
|
+
# Init task
|
|
83
|
+
task = cls(input, **opts)
|
|
84
|
+
|
|
85
|
+
# Run task
|
|
86
|
+
results = task.run()
|
|
87
|
+
if DEBUG > 2:
|
|
88
|
+
console.print([list(r.toDict() for r in results)])
|
|
89
|
+
|
|
90
|
+
# Check return code
|
|
91
|
+
if not task.ignore_return_code:
|
|
92
|
+
self.assertEqual(task.return_code, 0)
|
|
93
|
+
|
|
94
|
+
if not results:
|
|
95
|
+
console.print(f'No results from {cls.__name__} ! Skipping item check.')
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
# Test result types
|
|
99
|
+
self._test_task_output(
|
|
100
|
+
results,
|
|
101
|
+
expected_output_types=cls.output_types,
|
|
102
|
+
expected_results=outputs,
|
|
103
|
+
empty_results_allowed=True)
|
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import unittest
|
|
4
|
+
import warnings
|
|
5
|
+
from time import sleep
|
|
6
|
+
|
|
7
|
+
from secator.config import ConfigLoader
|
|
8
|
+
from secator.runners import Task
|
|
9
|
+
from secator.output_types import Target, Port, Url
|
|
10
|
+
from secator.definitions import DEBUG
|
|
11
|
+
from secator.rich import console
|
|
12
|
+
from secator.runners import Command, Workflow
|
|
13
|
+
from secator.utils import setup_logging, merge_opts
|
|
14
|
+
from secator.utils_test import TEST_WORKFLOWS, CommandOutputTester, load_fixture
|
|
15
|
+
from tests.integration.inputs import INPUTS_WORKFLOWS
|
|
16
|
+
from tests.integration.outputs import OUTPUTS_WORKFLOWS
|
|
17
|
+
|
|
18
|
+
INTEGRATION_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
19
|
+
level = logging.DEBUG if DEBUG > 0 else logging.INFO
|
|
20
|
+
setup_logging(level)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def hook_workflow_init(self):
|
|
24
|
+
self.context['workflow_id'] = 1
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def hook_task_init(self):
|
|
28
|
+
self.context['task_id'] = 1
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def hook_item(self, item):
|
|
32
|
+
print(item.toDict())
|
|
33
|
+
return item
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class TestWorkflows(unittest.TestCase, CommandOutputTester):
|
|
37
|
+
|
|
38
|
+
def setUp(self):
|
|
39
|
+
warnings.simplefilter('ignore', category=ResourceWarning)
|
|
40
|
+
warnings.simplefilter('ignore', category=DeprecationWarning)
|
|
41
|
+
Command.run_command(
|
|
42
|
+
f'sh {INTEGRATION_DIR}/setup.sh',
|
|
43
|
+
cwd=INTEGRATION_DIR
|
|
44
|
+
)
|
|
45
|
+
sleep(15)
|
|
46
|
+
|
|
47
|
+
def tearDown(self):
|
|
48
|
+
Command.run_command(
|
|
49
|
+
f'sh {INTEGRATION_DIR}/teardown.sh',
|
|
50
|
+
cwd=INTEGRATION_DIR
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def test_default_workflows(self):
|
|
54
|
+
fmt_opts = {
|
|
55
|
+
'print_cmd': DEBUG > 0,
|
|
56
|
+
'print_item': DEBUG > 1,
|
|
57
|
+
'print_line': DEBUG > 2,
|
|
58
|
+
'table': DEBUG > 1,
|
|
59
|
+
'output': 'table' if DEBUG > 1 else ''
|
|
60
|
+
}
|
|
61
|
+
opts = {
|
|
62
|
+
'ffuf.filter_size': 1987,
|
|
63
|
+
'feroxbuster.filter_size': 1987,
|
|
64
|
+
'dirsearch.filter_size': 1987,
|
|
65
|
+
'follow_redirect': True,
|
|
66
|
+
'match_codes': '200',
|
|
67
|
+
'httpx.match_codes': False,
|
|
68
|
+
'httpx.filter_size': False,
|
|
69
|
+
'nuclei.retries': 5,
|
|
70
|
+
'nuclei.timeout': 15,
|
|
71
|
+
'rate_limit': 1000,
|
|
72
|
+
'wordlist': load_fixture('wordlist', INTEGRATION_DIR, only_path=True),
|
|
73
|
+
'timeout': 7,
|
|
74
|
+
'depth': 2
|
|
75
|
+
}
|
|
76
|
+
opts = merge_opts(opts, fmt_opts)
|
|
77
|
+
|
|
78
|
+
for conf in TEST_WORKFLOWS:
|
|
79
|
+
with self.subTest(name=conf.name):
|
|
80
|
+
console.print(f'Testing workflow {conf.name} ...')
|
|
81
|
+
inputs = INPUTS_WORKFLOWS.get(conf.name, [])
|
|
82
|
+
outputs = OUTPUTS_WORKFLOWS.get(conf.name, [])
|
|
83
|
+
if not inputs:
|
|
84
|
+
console.print(
|
|
85
|
+
f'No inputs for workflow {conf.name} ! Skipping.', style='dim red'
|
|
86
|
+
)
|
|
87
|
+
continue
|
|
88
|
+
workflow = Workflow(conf, targets=inputs, run_opts=opts)
|
|
89
|
+
results = workflow.run()
|
|
90
|
+
if DEBUG > 0:
|
|
91
|
+
for result in results:
|
|
92
|
+
print(repr(result))
|
|
93
|
+
if not outputs:
|
|
94
|
+
console.print(
|
|
95
|
+
f'No outputs for workflow {conf.name} ! Skipping.', style='dim red'
|
|
96
|
+
)
|
|
97
|
+
continue
|
|
98
|
+
self._test_task_output(
|
|
99
|
+
results,
|
|
100
|
+
expected_results=outputs)
|
|
101
|
+
|
|
102
|
+
def test_adhoc_workflow(self):
|
|
103
|
+
# Ignore if TEST_WORKFLOWS are defined
|
|
104
|
+
if TEST_WORKFLOWS:
|
|
105
|
+
return
|
|
106
|
+
|
|
107
|
+
# Expected results / context
|
|
108
|
+
expected_results = [
|
|
109
|
+
Port(port=9999, host='localhost', service_name='fake', _source='unknown'),
|
|
110
|
+
Port(port=3000, host='localhost', ip='127.0.0.1', _source='naabu'),
|
|
111
|
+
Port(port=8080, host='localhost', ip='127.0.0.1', _source='naabu'),
|
|
112
|
+
Url(url='http://localhost:3000', host='127.0.0.1', status_code=200, title='OWASP Juice Shop', content_type='text/html', _source='httpx'),
|
|
113
|
+
Url(url='http://localhost:8080', host='127.0.0.1', status_code=400, title='', content_type='application/json', _source='httpx'),
|
|
114
|
+
]
|
|
115
|
+
expected_context = {
|
|
116
|
+
'task_id': 1,
|
|
117
|
+
'workflow_id': 1
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
# Create ad-hoc workflow
|
|
121
|
+
conf = {
|
|
122
|
+
'name': 'my_workflow',
|
|
123
|
+
'description': 'Test workflow',
|
|
124
|
+
'tasks': {
|
|
125
|
+
'naabu': {},
|
|
126
|
+
'httpx': {
|
|
127
|
+
'targets_': {'type': 'port', 'field': '{host}:{port}'}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
config = ConfigLoader(conf)
|
|
132
|
+
workflow = Workflow(
|
|
133
|
+
config,
|
|
134
|
+
targets=['localhost'],
|
|
135
|
+
results=[
|
|
136
|
+
Port(port=9999, host='localhost', service_name='fake', _source='unknown', _context=expected_context)
|
|
137
|
+
],
|
|
138
|
+
hooks = {
|
|
139
|
+
Workflow: {
|
|
140
|
+
'on_init': [hook_workflow_init],
|
|
141
|
+
},
|
|
142
|
+
Task: {
|
|
143
|
+
'on_init': [hook_task_init],
|
|
144
|
+
'on_item': [hook_item],
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
)
|
|
148
|
+
uuids = []
|
|
149
|
+
results = []
|
|
150
|
+
|
|
151
|
+
# Verify no duplicates and context added from hook is present in output
|
|
152
|
+
for result in workflow:
|
|
153
|
+
self.assertEqual(result._context, expected_context)
|
|
154
|
+
self.assertNotIn(result._uuid, uuids)
|
|
155
|
+
uuids.append(result._uuid)
|
|
156
|
+
results.append(result)
|
|
157
|
+
|
|
158
|
+
# Verify results yielded from workflow and workflow.results are equal
|
|
159
|
+
self.assertEqual(results, workflow.results)
|
|
160
|
+
|
|
161
|
+
# Verify expected results are there
|
|
162
|
+
for res in expected_results:
|
|
163
|
+
self.assertIn(res, workflow.results)
|
|
File without changes
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import eventlet
|
|
2
|
+
eventlet.monkey_patch()
|
|
3
|
+
from secator.runners import Workflow
|
|
4
|
+
from secator.config import ConfigLoader
|
|
5
|
+
from secator.rich import console
|
|
6
|
+
from secator.celery import *
|
|
7
|
+
import _thread
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
import json
|
|
11
|
+
from time import sleep, time
|
|
12
|
+
|
|
13
|
+
pool = eventlet.GreenPool(100)
|
|
14
|
+
|
|
15
|
+
from kombu.serialization import register
|
|
16
|
+
|
|
17
|
+
nworkflows = int(os.environ.get('SECATOR_WORKFLOWS_COUNT', '1'))
|
|
18
|
+
targets = [c.strip() for c in os.environ.get('SECATOR_TARGETS', '').split(',') if c]
|
|
19
|
+
|
|
20
|
+
if not targets:
|
|
21
|
+
console.log('Please specify test targets with SECATOR_TARGETS keys (hosts are comma-separated)', style='bold red')
|
|
22
|
+
sys.exit(1)
|
|
23
|
+
|
|
24
|
+
if not is_celery_worker_alive():
|
|
25
|
+
console.log('A Celery worker must be running. Start one using "secator worker"', style='bold red')
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
|
|
28
|
+
console.log(f'Load tester initialized with {len(targets)} targets.')
|
|
29
|
+
console.log(f'Targets: {targets}')
|
|
30
|
+
|
|
31
|
+
def create_runner(index):
|
|
32
|
+
register('json', json.dumps, json.loads, content_type='application/json', content_encoding='utf-8')
|
|
33
|
+
config = ConfigLoader(name='workflows/subdomain_recon')
|
|
34
|
+
run_opts = {
|
|
35
|
+
'sync': False,
|
|
36
|
+
# 'print_start': True,
|
|
37
|
+
'print_item': True,
|
|
38
|
+
# 'print_remote_status': True,
|
|
39
|
+
# 'print_run_summary': True,
|
|
40
|
+
'json': False
|
|
41
|
+
}
|
|
42
|
+
result = Workflow.delay(config, targets, run_opts=run_opts)
|
|
43
|
+
while not result.ready():
|
|
44
|
+
print(f'Running workflow {index + 1}/{nworkflows} ..')
|
|
45
|
+
sleep(2)
|
|
46
|
+
result.get()
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
if __name__ == '__main__':
|
|
50
|
+
print(f'Starting {nworkflows} workflows ...')
|
|
51
|
+
start_time = time()
|
|
52
|
+
for i in range(nworkflows):
|
|
53
|
+
pool.spawn(create_runner, i)
|
|
54
|
+
pool.waitall()
|
|
55
|
+
elapsed_time = time() - start_time
|
|
56
|
+
print(f'All workflows completed in {elapsed_time:.2f}s.')
|
tests/unit/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from secator.celery import *
|
|
2
|
+
import unittest
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
# class TestCeleryCommand(unittest.TestCase):
|
|
6
|
+
|
|
7
|
+
# def test_parent_run_command(self):
|
|
8
|
+
# result = run_command.apply(
|
|
9
|
+
# args=[
|
|
10
|
+
# [],
|
|
11
|
+
# 'ffuf',
|
|
12
|
+
# ['https://mydomain.com', 'https://media.mydomain.com']
|
|
13
|
+
# ]
|
|
14
|
+
# )
|
|
15
|
+
# results = result.get()
|
|
16
|
+
# self.assertTrue(isinstance(results, list))
|
|
17
|
+
|
|
18
|
+
# def test_parent_run_command_delay(self):
|
|
19
|
+
# result = run_command.delay(
|
|
20
|
+
# [],
|
|
21
|
+
# 'ffuf',
|
|
22
|
+
# ['https://mydomain.com', 'https://media.mydomain.com'],
|
|
23
|
+
# opts={
|
|
24
|
+
# 'sync': False
|
|
25
|
+
# }
|
|
26
|
+
# )
|
|
27
|
+
# results = result.get()
|
|
28
|
+
# self.assertTrue(isinstance(results, dict))
|
|
29
|
+
# self.assertTrue('results' in results)
|
|
30
|
+
|
|
31
|
+
# def test_parent_command_delay(self):
|
|
32
|
+
# result = ffuf.delay(
|
|
33
|
+
# ['https://mydomain.com', 'https://media.mydomain.com'],
|
|
34
|
+
# print_cmd=True,
|
|
35
|
+
# print_item=True,
|
|
36
|
+
# )
|
|
37
|
+
# ffuf.poll(result)
|
|
38
|
+
# results = result.get()
|
|
39
|
+
# print(results)
|
tests/unit/test_scans.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from secator.celery import *
|
|
2
|
+
import unittest
|
|
3
|
+
from secator.serializers.dataclass import dumps_dataclass, loads_dataclass
|
|
4
|
+
from secator.output_types import Port, Vulnerability
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TestSerializers(unittest.TestCase):
|
|
8
|
+
|
|
9
|
+
def test_dumps_loads(self):
|
|
10
|
+
results = [
|
|
11
|
+
Port(port=53, ip='127.0.0.1', host='localhost'),
|
|
12
|
+
Vulnerability(matched_at='localhost', name='CVE-123123123', provider='nmap')
|
|
13
|
+
]
|
|
14
|
+
results = dumps_dataclass(results)
|
|
15
|
+
results = loads_dataclass(results)
|
|
16
|
+
self.assertTrue(isinstance(results, list))
|
|
17
|
+
self.assertEqual(len(results), 2)
|
|
18
|
+
self.assertTrue(all(type(result) in [Port, Vulnerability] for result in results))
|
|
19
|
+
self.assertTrue(isinstance(results, list))
|
|
20
|
+
|
|
21
|
+
def test_dumps_loads_nested(self):
|
|
22
|
+
results = {
|
|
23
|
+
'info': {'name': 'test'},
|
|
24
|
+
'results': {
|
|
25
|
+
'ports': [
|
|
26
|
+
{'port': 53, 'ip': '127.0.0.1', 'host': 'localhost', '_type': 'port'},
|
|
27
|
+
],
|
|
28
|
+
'vulnerabilities': [
|
|
29
|
+
{'matched_at': 'localhost', 'name': 'CVE-123123123', 'provider': 'nmap', '_type': 'vulnerability'}
|
|
30
|
+
]
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
results = loads_dataclass(dumps_dataclass(results))
|
|
34
|
+
self.assertTrue(isinstance(results['results']['ports'][0], Port))
|
|
35
|
+
self.assertTrue(isinstance(results['results']['vulnerabilities'][0], Vulnerability))
|
|
36
|
+
|
|
37
|
+
def test_dumps_loads_nested_obj(self):
|
|
38
|
+
results = {
|
|
39
|
+
'info': {'name': 'test'},
|
|
40
|
+
'results': {
|
|
41
|
+
'ports': [
|
|
42
|
+
Port(port=53, ip='127.0.0.1', host='localhost'),
|
|
43
|
+
],
|
|
44
|
+
'vulnerabilities': [
|
|
45
|
+
Vulnerability(matched_at='localhost', name='CVE-123123123', provider='nmap')
|
|
46
|
+
]
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
results = loads_dataclass(dumps_dataclass(results))
|
|
50
|
+
self.assertTrue(isinstance(results['results']['ports'][0], Port))
|
|
51
|
+
self.assertTrue(isinstance(results['results']['vulnerabilities'][0], Vulnerability))
|