secator 0.16.5__py3-none-any.whl → 0.17.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +75 -9
- secator/celery_signals.py +2 -1
- secator/cli.py +7 -1
- secator/config.py +1 -0
- secator/hooks/gcs.py +11 -1
- secator/hooks/mongodb.py +71 -66
- secator/installer.py +1 -1
- secator/output_types/certificate.py +1 -1
- secator/output_types/exploit.py +1 -1
- secator/output_types/ip.py +1 -1
- secator/output_types/progress.py +1 -1
- secator/output_types/record.py +1 -1
- secator/output_types/stat.py +1 -1
- secator/output_types/state.py +1 -1
- secator/output_types/subdomain.py +1 -1
- secator/output_types/tag.py +1 -1
- secator/output_types/target.py +1 -1
- secator/output_types/user_account.py +1 -1
- secator/output_types/vulnerability.py +1 -1
- secator/runners/command.py +21 -6
- secator/tasks/cariddi.py +37 -1
- secator/tasks/dalfox.py +2 -2
- secator/tasks/dirsearch.py +0 -1
- secator/tasks/feroxbuster.py +0 -1
- secator/tasks/ffuf.py +0 -1
- secator/tasks/katana.py +3 -0
- secator/tasks/naabu.py +1 -2
- secator/tasks/nuclei.py +4 -1
- secator/utils.py +9 -0
- {secator-0.16.5.dist-info → secator-0.17.0.dist-info}/METADATA +1 -1
- {secator-0.16.5.dist-info → secator-0.17.0.dist-info}/RECORD +34 -34
- {secator-0.16.5.dist-info → secator-0.17.0.dist-info}/WHEEL +0 -0
- {secator-0.16.5.dist-info → secator-0.17.0.dist-info}/entry_points.txt +0 -0
- {secator-0.16.5.dist-info → secator-0.17.0.dist-info}/licenses/LICENSE +0 -0
secator/celery.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import gc
|
|
1
2
|
import json
|
|
2
3
|
import logging
|
|
3
4
|
import os
|
|
@@ -5,6 +6,7 @@ import os
|
|
|
5
6
|
from time import time
|
|
6
7
|
|
|
7
8
|
from celery import Celery, chord
|
|
9
|
+
from celery.canvas import signature
|
|
8
10
|
from celery.app import trace
|
|
9
11
|
|
|
10
12
|
from rich.logging import RichHandler
|
|
@@ -61,9 +63,10 @@ app.conf.update({
|
|
|
61
63
|
'result_backend': CONFIG.celery.result_backend,
|
|
62
64
|
'result_expires': CONFIG.celery.result_expires,
|
|
63
65
|
'result_backend_transport_options': json.loads(CONFIG.celery.result_backend_transport_options) if CONFIG.celery.result_backend_transport_options else {}, # noqa: E501
|
|
64
|
-
'result_extended':
|
|
66
|
+
'result_extended': not CONFIG.addons.mongodb.enabled,
|
|
65
67
|
'result_backend_thread_safe': True,
|
|
66
68
|
'result_serializer': 'pickle',
|
|
69
|
+
'result_accept_content': ['application/x-python-serialize'],
|
|
67
70
|
|
|
68
71
|
# Task config
|
|
69
72
|
'task_acks_late': CONFIG.celery.task_acks_late,
|
|
@@ -81,6 +84,11 @@ app.conf.update({
|
|
|
81
84
|
'task_store_eager_result': True,
|
|
82
85
|
'task_send_sent_event': CONFIG.celery.task_send_sent_event,
|
|
83
86
|
'task_serializer': 'pickle',
|
|
87
|
+
'task_accept_content': ['application/x-python-serialize'],
|
|
88
|
+
|
|
89
|
+
# Event config
|
|
90
|
+
'event_serializer': 'pickle',
|
|
91
|
+
'event_accept_content': ['application/x-python-serialize'],
|
|
84
92
|
|
|
85
93
|
# Worker config
|
|
86
94
|
# 'worker_direct': True, # TODO: consider enabling this to allow routing to specific workers
|
|
@@ -168,6 +176,12 @@ def run_scan(self, args=[], kwargs={}):
|
|
|
168
176
|
|
|
169
177
|
@app.task(bind=True)
|
|
170
178
|
def run_command(self, results, name, targets, opts={}):
|
|
179
|
+
# Set Celery request id in context
|
|
180
|
+
context = opts.get('context', {})
|
|
181
|
+
context['celery_id'] = self.request.id
|
|
182
|
+
context['worker_name'] = os.environ.get('WORKER_NAME', 'unknown')
|
|
183
|
+
|
|
184
|
+
# Set routing key in context
|
|
171
185
|
if IN_CELERY_WORKER_PROCESS:
|
|
172
186
|
quiet = not CONFIG.cli.worker_command_verbose
|
|
173
187
|
opts.update({
|
|
@@ -179,15 +193,13 @@ def run_command(self, results, name, targets, opts={}):
|
|
|
179
193
|
'quiet': quiet
|
|
180
194
|
})
|
|
181
195
|
routing_key = self.request.delivery_info['routing_key']
|
|
196
|
+
context['routing_key'] = routing_key
|
|
182
197
|
debug(f'Task "{name}" running with routing key "{routing_key}"', sub='celery.state')
|
|
183
198
|
|
|
184
199
|
# Flatten + dedupe + filter results
|
|
185
200
|
results = forward_results(results)
|
|
186
201
|
|
|
187
|
-
# Set
|
|
188
|
-
context = opts.get('context', {})
|
|
189
|
-
context['celery_id'] = self.request.id
|
|
190
|
-
context['worker_name'] = os.environ.get('WORKER_NAME', 'unknown')
|
|
202
|
+
# Set task opts
|
|
191
203
|
opts['context'] = context
|
|
192
204
|
opts['results'] = results
|
|
193
205
|
opts['sync'] = True
|
|
@@ -204,10 +216,13 @@ def run_command(self, results, name, targets, opts={}):
|
|
|
204
216
|
# Chunk task if needed
|
|
205
217
|
if chunk_it:
|
|
206
218
|
if IN_CELERY_WORKER_PROCESS:
|
|
207
|
-
console.print(Info(message=f'Task {name} requires chunking
|
|
208
|
-
|
|
219
|
+
console.print(Info(message=f'Task {name} requires chunking'))
|
|
220
|
+
workflow = break_task(task, opts, results=results)
|
|
221
|
+
if IN_CELERY_WORKER_PROCESS:
|
|
222
|
+
console.print(Info(message=f'Task {name} successfully broken into {len(workflow)} chunks'))
|
|
209
223
|
update_state(self, task, force=True)
|
|
210
|
-
|
|
224
|
+
console.print(Info(message=f'Task {name} updated state, replacing task with Celery chord workflow'))
|
|
225
|
+
return replace(self, workflow)
|
|
211
226
|
|
|
212
227
|
# Update state live
|
|
213
228
|
for _ in task:
|
|
@@ -327,6 +342,52 @@ def is_celery_worker_alive():
|
|
|
327
342
|
return result
|
|
328
343
|
|
|
329
344
|
|
|
345
|
+
def replace(task_instance, sig):
|
|
346
|
+
"""Replace this task, with a new task inheriting the task id.
|
|
347
|
+
|
|
348
|
+
Execution of the host task ends immediately and no subsequent statements
|
|
349
|
+
will be run.
|
|
350
|
+
|
|
351
|
+
.. versionadded:: 4.0
|
|
352
|
+
|
|
353
|
+
Arguments:
|
|
354
|
+
sig (Signature): signature to replace with.
|
|
355
|
+
visitor (StampingVisitor): Visitor API object.
|
|
356
|
+
|
|
357
|
+
Raises:
|
|
358
|
+
~@Ignore: This is always raised when called in asynchronous context.
|
|
359
|
+
It is best to always use ``return self.replace(...)`` to convey
|
|
360
|
+
to the reader that the task won't continue after being replaced.
|
|
361
|
+
"""
|
|
362
|
+
console.print('Replacing task')
|
|
363
|
+
chord = task_instance.request.chord
|
|
364
|
+
sig.freeze(task_instance.request.id)
|
|
365
|
+
replaced_task_nesting = task_instance.request.get('replaced_task_nesting', 0) + 1
|
|
366
|
+
sig.set(
|
|
367
|
+
chord=chord,
|
|
368
|
+
group_id=task_instance.request.group,
|
|
369
|
+
group_index=task_instance.request.group_index,
|
|
370
|
+
root_id=task_instance.request.root_id,
|
|
371
|
+
replaced_task_nesting=replaced_task_nesting
|
|
372
|
+
)
|
|
373
|
+
import psutil
|
|
374
|
+
import os
|
|
375
|
+
process = psutil.Process(os.getpid())
|
|
376
|
+
length = len(task_instance.request.chain) if task_instance.request.chain else 0
|
|
377
|
+
console.print(f'Adding {length} chain tasks from request chain')
|
|
378
|
+
for ix, t in enumerate(reversed(task_instance.request.chain or [])):
|
|
379
|
+
console.print(f'Adding chain task {t.name} from request chain ({ix + 1}/{length})')
|
|
380
|
+
chain_task = signature(t, app=task_instance.app)
|
|
381
|
+
chain_task.set(replaced_task_nesting=replaced_task_nesting)
|
|
382
|
+
sig |= chain_task
|
|
383
|
+
del chain_task
|
|
384
|
+
del t
|
|
385
|
+
memory_bytes = process.memory_info().rss
|
|
386
|
+
console.print(f'Memory usage: {memory_bytes / 1024 / 1024:.2f} MB (chain task {ix + 1}/{length})')
|
|
387
|
+
gc.collect()
|
|
388
|
+
return task_instance.on_replace(sig)
|
|
389
|
+
|
|
390
|
+
|
|
330
391
|
def break_task(task, task_opts, results=[]):
|
|
331
392
|
"""Break a task into multiple of the same type."""
|
|
332
393
|
chunks = task.inputs
|
|
@@ -370,16 +431,21 @@ def break_task(task, task_opts, results=[]):
|
|
|
370
431
|
task_id = sig.freeze().task_id
|
|
371
432
|
full_name = f'{task.name}_{ix + 1}'
|
|
372
433
|
task.add_subtask(task_id, task.name, full_name)
|
|
373
|
-
info = Info(message=f'Celery chunked task created: {task_id}')
|
|
434
|
+
info = Info(message=f'Celery chunked task created ({ix + 1} / {len(chunks)}): {task_id}')
|
|
374
435
|
task.add_result(info)
|
|
375
436
|
sigs.append(sig)
|
|
376
437
|
|
|
377
438
|
# Mark main task as async since it's being chunked
|
|
378
439
|
task.sync = False
|
|
440
|
+
task.results = []
|
|
441
|
+
task.uuids = set()
|
|
442
|
+
console.print(Info(message=f'Task {task.unique_name} is now async, building chord with{len(sigs)} chunks'))
|
|
443
|
+
console.print(Info(message=f'Results: {results}'))
|
|
379
444
|
|
|
380
445
|
# Build Celery workflow
|
|
381
446
|
workflow = chord(
|
|
382
447
|
tuple(sigs),
|
|
383
448
|
mark_runner_completed.s(runner=task).set(queue='results')
|
|
384
449
|
)
|
|
450
|
+
console.print(Info(message=f'Task {task.unique_name} chord built with {len(sigs)} chunks, returning workflow'))
|
|
385
451
|
return workflow
|
secator/celery_signals.py
CHANGED
|
@@ -92,8 +92,9 @@ def task_postrun_handler(**kwargs):
|
|
|
92
92
|
|
|
93
93
|
# Get sender name from kwargs
|
|
94
94
|
sender_name = kwargs['sender'].name
|
|
95
|
+
# console.print(Info(message=f'Task postrun handler --> Sender name: {sender_name}'))
|
|
95
96
|
|
|
96
|
-
if CONFIG.celery.worker_kill_after_task and sender_name.startswith('secator.'):
|
|
97
|
+
if CONFIG.celery.worker_kill_after_task and (sender_name.startswith('secator.') or sender_name.startswith('api.')):
|
|
97
98
|
worker_name = os.environ.get('WORKER_NAME', 'unknown')
|
|
98
99
|
console.print(Info(message=f'Shutdown worker {worker_name} since config celery.worker_kill_after_task is set.'))
|
|
99
100
|
kill_worker(parent=True)
|
secator/cli.py
CHANGED
|
@@ -134,7 +134,10 @@ for config in SCANS:
|
|
|
134
134
|
@click.option('--stop', is_flag=True, help='Stop a worker in dev mode (celery multi).')
|
|
135
135
|
@click.option('--show', is_flag=True, help='Show command (celery multi).')
|
|
136
136
|
@click.option('--use-command-runner', is_flag=True, default=False, help='Use command runner to run the command.')
|
|
137
|
-
|
|
137
|
+
@click.option('--without-gossip', is_flag=True)
|
|
138
|
+
@click.option('--without-mingle', is_flag=True)
|
|
139
|
+
@click.option('--without-heartbeat', is_flag=True)
|
|
140
|
+
def worker(hostname, concurrency, reload, queue, pool, quiet, loglevel, check, dev, stop, show, use_command_runner, without_gossip, without_mingle, without_heartbeat): # noqa: E501
|
|
138
141
|
"""Run a worker."""
|
|
139
142
|
|
|
140
143
|
# Check Celery addon is installed
|
|
@@ -182,6 +185,9 @@ def worker(hostname, concurrency, reload, queue, pool, quiet, loglevel, check, d
|
|
|
182
185
|
cmd += f' -P {pool}' if pool else ''
|
|
183
186
|
cmd += f' -c {concurrency}' if concurrency else ''
|
|
184
187
|
cmd += f' -l {loglevel}' if loglevel else ''
|
|
188
|
+
cmd += ' --without-mingle' if without_mingle else ''
|
|
189
|
+
cmd += ' --without-gossip' if without_gossip else ''
|
|
190
|
+
cmd += ' --without-heartbeat' if without_heartbeat else ''
|
|
185
191
|
|
|
186
192
|
if reload:
|
|
187
193
|
patterns = "celery.py;tasks/*.py;runners/*.py;serializers/*.py;output_types/*.py;hooks/*.py;exporters/*.py"
|
secator/config.py
CHANGED
secator/hooks/gcs.py
CHANGED
|
@@ -14,6 +14,16 @@ ITEMS_TO_SEND = {
|
|
|
14
14
|
'url': ['screenshot_path', 'stored_response_path']
|
|
15
15
|
}
|
|
16
16
|
|
|
17
|
+
_gcs_client = None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_gcs_client():
|
|
21
|
+
"""Get or create GCS client"""
|
|
22
|
+
global _gcs_client
|
|
23
|
+
if _gcs_client is None:
|
|
24
|
+
_gcs_client = storage.Client()
|
|
25
|
+
return _gcs_client
|
|
26
|
+
|
|
17
27
|
|
|
18
28
|
def process_item(self, item):
|
|
19
29
|
if item._type not in ITEMS_TO_SEND.keys():
|
|
@@ -39,7 +49,7 @@ def process_item(self, item):
|
|
|
39
49
|
def upload_blob(bucket_name, source_file_name, destination_blob_name):
|
|
40
50
|
"""Uploads a file to the bucket."""
|
|
41
51
|
start_time = time()
|
|
42
|
-
storage_client =
|
|
52
|
+
storage_client = get_gcs_client()
|
|
43
53
|
bucket = storage_client.bucket(bucket_name)
|
|
44
54
|
blob = bucket.blob(destination_blob_name)
|
|
45
55
|
with open(source_file_name, 'rb') as f:
|
secator/hooks/mongodb.py
CHANGED
|
@@ -166,85 +166,90 @@ def tag_duplicates(ws_id: str = None, full_scan: bool = False):
|
|
|
166
166
|
full_scan (bool): If True, scan all findings, otherwise only untagged findings.
|
|
167
167
|
"""
|
|
168
168
|
debug(f'running duplicate check on workspace {ws_id}', sub='hooks.mongodb')
|
|
169
|
+
init_time = time.time()
|
|
169
170
|
client = get_mongodb_client()
|
|
170
171
|
db = client.main
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
untagged_query = {'_context.workspace_id': str(ws_id)}
|
|
174
|
-
if
|
|
175
|
-
untagged_query['_tagged']
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
172
|
+
start_time = time.time()
|
|
173
|
+
workspace_query = {'_context.workspace_id': str(ws_id), '_context.workspace_duplicate': False, '_tagged': True}
|
|
174
|
+
untagged_query = {'_context.workspace_id': str(ws_id), '_tagged': {'$ne': True}}
|
|
175
|
+
if full_scan:
|
|
176
|
+
del untagged_query['_tagged']
|
|
177
|
+
workspace_findings = load_findings(list(db.findings.find(workspace_query).sort('_timestamp', -1)))
|
|
178
|
+
untagged_findings = load_findings(list(db.findings.find(untagged_query).sort('_timestamp', -1)))
|
|
179
|
+
debug(
|
|
180
|
+
f'Workspace non-duplicates findings: {len(workspace_findings)}, '
|
|
181
|
+
f'Untagged findings: {len(untagged_findings)}. '
|
|
182
|
+
f'Query time: {time.time() - start_time}s',
|
|
183
|
+
sub='hooks.mongodb'
|
|
184
|
+
)
|
|
185
|
+
start_time = time.time()
|
|
186
|
+
seen = []
|
|
187
|
+
db_updates = {}
|
|
182
188
|
|
|
183
|
-
untagged_findings = load_findings(untagged_query)
|
|
184
|
-
workspace_findings = load_findings(workspace_query)
|
|
185
|
-
non_duplicates = []
|
|
186
|
-
duplicates = []
|
|
187
189
|
for item in untagged_findings:
|
|
188
|
-
|
|
189
|
-
seen = [f for f in duplicates if f._uuid == item._uuid]
|
|
190
|
-
if seen:
|
|
190
|
+
if item._uuid in seen:
|
|
191
191
|
continue
|
|
192
192
|
|
|
193
|
-
|
|
194
|
-
|
|
193
|
+
debug(
|
|
194
|
+
f'Processing: {repr(item)} ({item._timestamp}) [{item._uuid}]',
|
|
195
|
+
sub='hooks.mongodb',
|
|
196
|
+
verbose=True
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
duplicate_ids = [
|
|
200
|
+
_._uuid
|
|
201
|
+
for _ in untagged_findings
|
|
202
|
+
if _ == item and _._uuid != item._uuid
|
|
203
|
+
]
|
|
204
|
+
seen.extend(duplicate_ids)
|
|
195
205
|
|
|
196
|
-
# Check if already present in list of workspace_findings findings, list of duplicates, or untagged_findings
|
|
197
|
-
workspace_dupes = [f for f in workspace_findings if f == item and f._uuid != item._uuid]
|
|
198
|
-
untagged_dupes = [f for f in untagged_findings if f == item and f._uuid != item._uuid]
|
|
199
|
-
seen_dupes = [f for f in duplicates if f == item and f._uuid != item._uuid]
|
|
200
|
-
tmp_duplicates.extend(workspace_dupes)
|
|
201
|
-
tmp_duplicates.extend(untagged_dupes)
|
|
202
|
-
tmp_duplicates.extend(seen_dupes)
|
|
203
206
|
debug(
|
|
204
|
-
f'for item
|
|
205
|
-
obj={
|
|
206
|
-
'workspace dupes': len(workspace_dupes),
|
|
207
|
-
'untagged dupes': len(untagged_dupes),
|
|
208
|
-
'seen dupes': len(seen_dupes)
|
|
209
|
-
},
|
|
210
|
-
id=ws_id,
|
|
207
|
+
f'Found {len(duplicate_ids)} duplicates for item',
|
|
211
208
|
sub='hooks.mongodb',
|
|
212
|
-
verbose=True
|
|
213
|
-
|
|
214
|
-
debug(f'duplicate ids: {tmp_duplicates_ids}', id=ws_id, sub='hooks.mongodb', verbose=True)
|
|
215
|
-
|
|
216
|
-
# Update latest object as non-duplicate
|
|
217
|
-
if tmp_duplicates:
|
|
218
|
-
duplicates.extend([f for f in tmp_duplicates])
|
|
219
|
-
db.findings.update_one({'_id': ObjectId(item._uuid)}, {'$set': {'_related': tmp_duplicates_ids}})
|
|
220
|
-
debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb', verbose=True)
|
|
221
|
-
non_duplicates.append(item)
|
|
222
|
-
else:
|
|
223
|
-
debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb', verbose=True)
|
|
224
|
-
non_duplicates.append(item)
|
|
209
|
+
verbose=True
|
|
210
|
+
)
|
|
225
211
|
|
|
226
|
-
|
|
212
|
+
duplicate_ws = [
|
|
213
|
+
_ for _ in workspace_findings
|
|
214
|
+
if _ == item and _._uuid != item._uuid
|
|
215
|
+
]
|
|
216
|
+
debug(f' --> Found {len(duplicate_ws)} workspace duplicates for item', sub='hooks.mongodb', verbose=True)
|
|
217
|
+
|
|
218
|
+
related_ids = []
|
|
219
|
+
if duplicate_ws:
|
|
220
|
+
duplicate_ws_ids = [_._uuid for _ in duplicate_ws]
|
|
221
|
+
duplicate_ids.extend(duplicate_ws_ids)
|
|
222
|
+
for related in duplicate_ws:
|
|
223
|
+
related_ids.extend(related._related)
|
|
224
|
+
|
|
225
|
+
debug(f' --> Found {len(duplicate_ids)} total duplicates for item', sub='hooks.mongodb', verbose=True)
|
|
226
|
+
|
|
227
|
+
db_updates[item._uuid] = {
|
|
228
|
+
'_related': duplicate_ids + related_ids,
|
|
229
|
+
'_context.workspace_duplicate': False,
|
|
230
|
+
'_tagged': True
|
|
231
|
+
}
|
|
232
|
+
for uuid in duplicate_ids:
|
|
233
|
+
db_updates[uuid] = {
|
|
234
|
+
'_context.workspace_duplicate': True,
|
|
235
|
+
'_tagged': True
|
|
236
|
+
}
|
|
237
|
+
debug(f'Finished processing untagged findings in {time.time() - start_time}s', sub='hooks.mongodb')
|
|
238
|
+
start_time = time.time()
|
|
227
239
|
|
|
228
|
-
|
|
229
|
-
duplicates_ids = list(dict.fromkeys([n._uuid for n in duplicates]))
|
|
230
|
-
non_duplicates_ids = list(dict.fromkeys([n._uuid for n in non_duplicates]))
|
|
240
|
+
debug(f'Executing {len(db_updates)} database updates', sub='hooks.mongodb')
|
|
231
241
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
242
|
+
from pymongo import UpdateOne
|
|
243
|
+
if not db_updates:
|
|
244
|
+
debug('no db updates to execute', sub='hooks.mongodb')
|
|
245
|
+
return
|
|
235
246
|
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
debug(
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
obj={
|
|
243
|
-
'processed': len(untagged_findings),
|
|
244
|
-
'duplicates': len(duplicates_ids),
|
|
245
|
-
'non-duplicates': len(non_duplicates_ids)
|
|
246
|
-
},
|
|
247
|
-
sub='hooks.mongodb')
|
|
247
|
+
result = db.findings.bulk_write(
|
|
248
|
+
[UpdateOne({'_id': ObjectId(uuid)}, {'$set': update}) for uuid, update in db_updates.items()]
|
|
249
|
+
)
|
|
250
|
+
debug(result, sub='hooks.mongodb')
|
|
251
|
+
debug(f'Finished running db update in {time.time() - start_time}s', sub='hooks.mongodb')
|
|
252
|
+
debug(f'Finished running tag duplicates in {time.time() - init_time}s', sub='hooks.mongodb')
|
|
248
253
|
|
|
249
254
|
|
|
250
255
|
HOOKS = {
|
secator/installer.py
CHANGED
|
@@ -395,7 +395,7 @@ class GithubInstaller:
|
|
|
395
395
|
for root, _, files in os.walk(directory):
|
|
396
396
|
for file in files:
|
|
397
397
|
# Match the file name exactly with the repository name
|
|
398
|
-
if file
|
|
398
|
+
if file.startswith(binary_name):
|
|
399
399
|
return os.path.join(root, file)
|
|
400
400
|
return None
|
|
401
401
|
|
|
@@ -26,7 +26,7 @@ class Certificate(OutputType):
|
|
|
26
26
|
serial_number: str = field(default='', compare=False)
|
|
27
27
|
ciphers: list[str] = field(default_factory=list, compare=False)
|
|
28
28
|
# parent_certificate: 'Certificate' = None # noqa: F821
|
|
29
|
-
_source: str = field(default='', repr=True)
|
|
29
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
30
30
|
_type: str = field(default='certificate', repr=True)
|
|
31
31
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
32
32
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/exploit.py
CHANGED
|
@@ -18,7 +18,7 @@ class Exploit(OutputType):
|
|
|
18
18
|
cves: list = field(default_factory=list, compare=False)
|
|
19
19
|
tags: list = field(default_factory=list, compare=False)
|
|
20
20
|
extra_data: dict = field(default_factory=dict, compare=False)
|
|
21
|
-
_source: str = field(default='', repr=True)
|
|
21
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
22
22
|
_type: str = field(default='exploit', repr=True)
|
|
23
23
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
24
24
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/ip.py
CHANGED
|
@@ -18,7 +18,7 @@ class Ip(OutputType):
|
|
|
18
18
|
host: str = field(default='', repr=True, compare=False)
|
|
19
19
|
alive: bool = False
|
|
20
20
|
protocol: str = field(default=IpProtocol.IPv4)
|
|
21
|
-
_source: str = field(default='', repr=True)
|
|
21
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
22
22
|
_type: str = field(default='ip', repr=True)
|
|
23
23
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
24
24
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/progress.py
CHANGED
|
@@ -9,7 +9,7 @@ from secator.utils import rich_to_ansi, format_object
|
|
|
9
9
|
class Progress(OutputType):
|
|
10
10
|
percent: int = 0
|
|
11
11
|
extra_data: dict = field(default_factory=dict)
|
|
12
|
-
_source: str = field(default='', repr=True)
|
|
12
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
13
13
|
_type: str = field(default='progress', repr=True)
|
|
14
14
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
15
15
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/record.py
CHANGED
|
@@ -12,7 +12,7 @@ class Record(OutputType):
|
|
|
12
12
|
type: str
|
|
13
13
|
host: str = ''
|
|
14
14
|
extra_data: dict = field(default_factory=dict, compare=False)
|
|
15
|
-
_source: str = field(default='', repr=True)
|
|
15
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
16
16
|
_type: str = field(default='record', repr=True)
|
|
17
17
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
18
18
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/stat.py
CHANGED
|
@@ -13,7 +13,7 @@ class Stat(OutputType):
|
|
|
13
13
|
memory: int
|
|
14
14
|
net_conns: int = field(default=None, repr=True)
|
|
15
15
|
extra_data: dict = field(default_factory=dict)
|
|
16
|
-
_source: str = field(default='', repr=True)
|
|
16
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
17
17
|
_type: str = field(default='stat', repr=True)
|
|
18
18
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
19
19
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/state.py
CHANGED
|
@@ -12,7 +12,7 @@ class State(OutputType):
|
|
|
12
12
|
task_id: str
|
|
13
13
|
state: str
|
|
14
14
|
_type: str = field(default='state', repr=True)
|
|
15
|
-
_source: str = field(default='', repr=True)
|
|
15
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
16
16
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
17
17
|
_uuid: str = field(default='', repr=True, compare=False)
|
|
18
18
|
_context: dict = field(default_factory=dict, repr=True, compare=False)
|
|
@@ -13,7 +13,7 @@ class Subdomain(OutputType):
|
|
|
13
13
|
domain: str
|
|
14
14
|
sources: List[str] = field(default_factory=list, compare=False)
|
|
15
15
|
extra_data: dict = field(default_factory=dict, compare=False)
|
|
16
|
-
_source: str = field(default='', repr=True)
|
|
16
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
17
17
|
_type: str = field(default='subdomain', repr=True)
|
|
18
18
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
19
19
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/tag.py
CHANGED
|
@@ -11,7 +11,7 @@ class Tag(OutputType):
|
|
|
11
11
|
match: str
|
|
12
12
|
extra_data: dict = field(default_factory=dict, repr=True, compare=False)
|
|
13
13
|
stored_response_path: str = field(default='', compare=False)
|
|
14
|
-
_source: str = field(default='', repr=True)
|
|
14
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
15
15
|
_type: str = field(default='tag', repr=True)
|
|
16
16
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
17
17
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/output_types/target.py
CHANGED
|
@@ -9,7 +9,7 @@ from secator.utils import autodetect_type, rich_to_ansi, rich_escape as _s
|
|
|
9
9
|
class Target(OutputType):
|
|
10
10
|
name: str
|
|
11
11
|
type: str = ''
|
|
12
|
-
_source: str = field(default='', repr=True)
|
|
12
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
13
13
|
_type: str = field(default='target', repr=True)
|
|
14
14
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
15
15
|
_uuid: str = field(default='', repr=True, compare=False)
|
|
@@ -13,7 +13,7 @@ class UserAccount(OutputType):
|
|
|
13
13
|
email: str = ''
|
|
14
14
|
site_name: str = ''
|
|
15
15
|
extra_data: dict = field(default_factory=dict, compare=False)
|
|
16
|
-
_source: str = field(default='', repr=True)
|
|
16
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
17
17
|
_type: str = field(default='user_account', repr=True)
|
|
18
18
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
19
19
|
_uuid: str = field(default='', repr=True, compare=False)
|
|
@@ -25,7 +25,7 @@ class Vulnerability(OutputType):
|
|
|
25
25
|
reference: str = field(default='', compare=False)
|
|
26
26
|
confidence_nb: int = 0
|
|
27
27
|
severity_nb: int = 0
|
|
28
|
-
_source: str = field(default='', repr=True)
|
|
28
|
+
_source: str = field(default='', repr=True, compare=False)
|
|
29
29
|
_type: str = field(default='vulnerability', repr=True)
|
|
30
30
|
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
31
31
|
_uuid: str = field(default='', repr=True, compare=False)
|
secator/runners/command.py
CHANGED
|
@@ -18,7 +18,7 @@ from secator.config import CONFIG
|
|
|
18
18
|
from secator.output_types import Info, Warning, Error, Stat
|
|
19
19
|
from secator.runners import Runner
|
|
20
20
|
from secator.template import TemplateLoader
|
|
21
|
-
from secator.utils import debug, rich_escape as _s
|
|
21
|
+
from secator.utils import debug, rich_escape as _s, signal_to_name
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
logger = logging.getLogger(__name__)
|
|
@@ -440,6 +440,7 @@ class Command(Runner):
|
|
|
440
440
|
# Output and results
|
|
441
441
|
self.return_code = 0
|
|
442
442
|
self.killed = False
|
|
443
|
+
self.memory_limit_mb = CONFIG.security.memory_limit_mb
|
|
443
444
|
|
|
444
445
|
# Run the command using subprocess
|
|
445
446
|
env = os.environ
|
|
@@ -449,6 +450,7 @@ class Command(Runner):
|
|
|
449
450
|
stdout=subprocess.PIPE,
|
|
450
451
|
stderr=subprocess.STDOUT,
|
|
451
452
|
universal_newlines=True,
|
|
453
|
+
preexec_fn=os.setsid,
|
|
452
454
|
shell=self.shell,
|
|
453
455
|
env=env,
|
|
454
456
|
cwd=self.cwd)
|
|
@@ -473,6 +475,11 @@ class Command(Runner):
|
|
|
473
475
|
except FileNotFoundError as e:
|
|
474
476
|
yield from self.handle_file_not_found(e)
|
|
475
477
|
|
|
478
|
+
except MemoryError as e:
|
|
479
|
+
self.debug(f'{self.unique_name}: {type(e).__name__}.', sub='end')
|
|
480
|
+
self.stop_process(exit_ok=True, sig=signal.SIGTERM)
|
|
481
|
+
yield Warning(message=f'Memory limit {self.memory_limit_mb}MB reached for {self.unique_name}')
|
|
482
|
+
|
|
476
483
|
except BaseException as e:
|
|
477
484
|
self.debug(f'{self.unique_name}: {type(e).__name__}.', sub='end')
|
|
478
485
|
self.stop_process()
|
|
@@ -527,7 +534,7 @@ class Command(Runner):
|
|
|
527
534
|
if self.last_updated_stat and (time() - self.last_updated_stat) < CONFIG.runners.stat_update_frequency:
|
|
528
535
|
return
|
|
529
536
|
|
|
530
|
-
yield from self.stats()
|
|
537
|
+
yield from self.stats(self.memory_limit_mb)
|
|
531
538
|
self.last_updated_stat = time()
|
|
532
539
|
|
|
533
540
|
def print_description(self):
|
|
@@ -565,26 +572,31 @@ class Command(Runner):
|
|
|
565
572
|
error = Error.from_exception(exc)
|
|
566
573
|
yield error
|
|
567
574
|
|
|
568
|
-
def stop_process(self, exit_ok=False):
|
|
575
|
+
def stop_process(self, exit_ok=False, sig=signal.SIGINT):
|
|
569
576
|
"""Sends SIGINT to running process, if any."""
|
|
570
577
|
if not self.process:
|
|
571
578
|
return
|
|
572
|
-
self.debug(f'Sending
|
|
573
|
-
self.process.
|
|
579
|
+
self.debug(f'Sending signal {signal_to_name(sig)} to process {self.process.pid}.', sub='error')
|
|
580
|
+
if self.process and self.process.pid:
|
|
581
|
+
os.killpg(os.getpgid(self.process.pid), sig)
|
|
574
582
|
if exit_ok:
|
|
575
583
|
self.exit_ok = True
|
|
576
584
|
|
|
577
|
-
def stats(self):
|
|
585
|
+
def stats(self, memory_limit_mb=None):
|
|
578
586
|
"""Gather stats about the current running process, if any."""
|
|
579
587
|
if not self.process or not self.process.pid:
|
|
580
588
|
return
|
|
581
589
|
proc = psutil.Process(self.process.pid)
|
|
582
590
|
stats = Command.get_process_info(proc, children=True)
|
|
591
|
+
total_mem = 0
|
|
583
592
|
for info in stats:
|
|
584
593
|
name = info['name']
|
|
585
594
|
pid = info['pid']
|
|
586
595
|
cpu_percent = info['cpu_percent']
|
|
587
596
|
mem_percent = info['memory_percent']
|
|
597
|
+
mem_rss = round(info['memory_info']['rss'] / 1024 / 1024, 2)
|
|
598
|
+
total_mem += mem_rss
|
|
599
|
+
self.debug(f'{name} {pid} {mem_rss}MB', sub='stats')
|
|
588
600
|
net_conns = info.get('net_connections') or []
|
|
589
601
|
extra_data = {k: v for k, v in info.items() if k not in ['cpu_percent', 'memory_percent', 'net_connections']}
|
|
590
602
|
yield Stat(
|
|
@@ -595,6 +607,9 @@ class Command(Runner):
|
|
|
595
607
|
net_conns=len(net_conns),
|
|
596
608
|
extra_data=extra_data
|
|
597
609
|
)
|
|
610
|
+
self.debug(f'Total mem: {total_mem}MB, memory limit: {memory_limit_mb}', sub='stats')
|
|
611
|
+
if memory_limit_mb and memory_limit_mb != -1 and total_mem > memory_limit_mb:
|
|
612
|
+
raise MemoryError(f'Memory limit {memory_limit_mb}MB reached for {self.unique_name}')
|
|
598
613
|
|
|
599
614
|
@staticmethod
|
|
600
615
|
def get_process_info(process, children=False):
|
secator/tasks/cariddi.py
CHANGED
|
@@ -31,6 +31,7 @@ class cariddi(HttpCrawler):
|
|
|
31
31
|
input_types = [URL]
|
|
32
32
|
output_types = [Url, Tag]
|
|
33
33
|
tags = ['url', 'crawl']
|
|
34
|
+
input_chunk_size = 1
|
|
34
35
|
input_flag = OPT_PIPE_INPUT
|
|
35
36
|
file_flag = OPT_PIPE_INPUT
|
|
36
37
|
json_flag = '-json'
|
|
@@ -77,7 +78,42 @@ class cariddi(HttpCrawler):
|
|
|
77
78
|
proxychains = False
|
|
78
79
|
proxy_socks5 = True # with leaks... https://github.com/edoardottt/cariddi/issues/122
|
|
79
80
|
proxy_http = True # with leaks... https://github.com/edoardottt/cariddi/issues/122
|
|
80
|
-
profile =
|
|
81
|
+
profile = lambda opts: cariddi.dynamic_profile(opts) # noqa: E731
|
|
82
|
+
|
|
83
|
+
@staticmethod
|
|
84
|
+
def dynamic_profile(opts):
|
|
85
|
+
juicy_endpoints = cariddi._get_opt_value(
|
|
86
|
+
opts,
|
|
87
|
+
'juicy_endpoints',
|
|
88
|
+
opts_conf=dict(cariddi.opts, **cariddi.meta_opts),
|
|
89
|
+
opt_aliases=opts.get('aliases', [])
|
|
90
|
+
)
|
|
91
|
+
juicy_extensions = cariddi._get_opt_value(
|
|
92
|
+
opts,
|
|
93
|
+
'juicy_extensions',
|
|
94
|
+
opts_conf=dict(cariddi.opts, **cariddi.meta_opts),
|
|
95
|
+
opt_aliases=opts.get('aliases', [])
|
|
96
|
+
)
|
|
97
|
+
info = cariddi._get_opt_value(
|
|
98
|
+
opts,
|
|
99
|
+
'info',
|
|
100
|
+
opts_conf=dict(cariddi.opts, **cariddi.meta_opts),
|
|
101
|
+
opt_aliases=opts.get('aliases', [])
|
|
102
|
+
)
|
|
103
|
+
secrets = cariddi._get_opt_value(
|
|
104
|
+
opts,
|
|
105
|
+
'secrets',
|
|
106
|
+
opts_conf=dict(cariddi.opts, **cariddi.meta_opts),
|
|
107
|
+
opt_aliases=opts.get('aliases', [])
|
|
108
|
+
)
|
|
109
|
+
errors = cariddi._get_opt_value(
|
|
110
|
+
opts,
|
|
111
|
+
'errors',
|
|
112
|
+
opts_conf=dict(cariddi.opts, **cariddi.meta_opts),
|
|
113
|
+
opt_aliases=opts.get('aliases', [])
|
|
114
|
+
)
|
|
115
|
+
hunt = juicy_endpoints or (juicy_extensions is not None) or info or secrets or errors
|
|
116
|
+
return 'cpu' if hunt is True else 'io'
|
|
81
117
|
|
|
82
118
|
@staticmethod
|
|
83
119
|
def on_json_loaded(self, item):
|
secator/tasks/dalfox.py
CHANGED
|
@@ -26,8 +26,8 @@ class dalfox(VulnHttp):
|
|
|
26
26
|
tags = ['url', 'fuzz']
|
|
27
27
|
input_flag = 'url'
|
|
28
28
|
input_chunk_size = 20
|
|
29
|
+
ignore_return_code = True
|
|
29
30
|
file_flag = 'file'
|
|
30
|
-
# input_chunk_size = 1
|
|
31
31
|
json_flag = '--format jsonl'
|
|
32
32
|
version_flag = 'version'
|
|
33
33
|
opt_prefix = '--'
|
|
@@ -57,7 +57,7 @@ class dalfox(VulnHttp):
|
|
|
57
57
|
}
|
|
58
58
|
}
|
|
59
59
|
install_version = 'v2.11.0'
|
|
60
|
-
install_cmd = 'go install -v github.com/hahwul/dalfox/v2@
|
|
60
|
+
install_cmd = 'go install -v github.com/hahwul/dalfox/v2@[install_version]'
|
|
61
61
|
install_github_handle = 'hahwul/dalfox'
|
|
62
62
|
encoding = 'ansi'
|
|
63
63
|
proxychains = False
|
secator/tasks/dirsearch.py
CHANGED
secator/tasks/feroxbuster.py
CHANGED
secator/tasks/ffuf.py
CHANGED
secator/tasks/katana.py
CHANGED
|
@@ -17,6 +17,7 @@ from secator.tasks._categories import HttpCrawler
|
|
|
17
17
|
class katana(HttpCrawler):
|
|
18
18
|
"""Next-generation crawling and spidering framework."""
|
|
19
19
|
cmd = 'katana'
|
|
20
|
+
input_chunk_size = 1
|
|
20
21
|
input_types = [URL]
|
|
21
22
|
output_types = [Url]
|
|
22
23
|
tags = ['url', 'crawl']
|
|
@@ -144,6 +145,8 @@ class katana(HttpCrawler):
|
|
|
144
145
|
if store_responses and os.path.exists(item.stored_response_path):
|
|
145
146
|
with open(item.stored_response_path, 'r', encoding='latin-1') as fin:
|
|
146
147
|
data = fin.read().splitlines(True)
|
|
148
|
+
if not data:
|
|
149
|
+
return item
|
|
147
150
|
first_line = data[0]
|
|
148
151
|
with open(item.stored_response_path, 'w', encoding='latin-1') as fout:
|
|
149
152
|
fout.writelines(data[1:])
|
secator/tasks/naabu.py
CHANGED
|
@@ -38,8 +38,7 @@ class naabu(ReconPort):
|
|
|
38
38
|
# 'health_check': 'hc'
|
|
39
39
|
}
|
|
40
40
|
opt_value_map = {
|
|
41
|
-
TIMEOUT: lambda x: int(x*1000
|
|
42
|
-
RETRIES: lambda x: 1 if x == 0 else x,
|
|
41
|
+
TIMEOUT: lambda x: int(x)*1000 if x and int(x) > 0 else None, # convert to milliseconds
|
|
43
42
|
PROXY: lambda x: x.replace('socks5://', '')
|
|
44
43
|
}
|
|
45
44
|
item_loaders = [JSONSerializer()]
|
secator/tasks/nuclei.py
CHANGED
|
@@ -20,6 +20,7 @@ class nuclei(VulnMulti):
|
|
|
20
20
|
file_flag = '-l'
|
|
21
21
|
input_flag = '-u'
|
|
22
22
|
json_flag = '-jsonl'
|
|
23
|
+
input_chunk_size = 20
|
|
23
24
|
opts = {
|
|
24
25
|
'bulk_size': {'type': int, 'short': 'bs', 'help': 'Maximum number of hosts to be analyzed in parallel per template'}, # noqa: E501
|
|
25
26
|
'debug': {'type': str, 'help': 'Debug mode'},
|
|
@@ -31,6 +32,7 @@ class nuclei(VulnMulti):
|
|
|
31
32
|
'new_templates': {'type': str, 'short': 'nt', 'help': 'Run only new templates added in latest nuclei-templates release'}, # noqa: E501
|
|
32
33
|
'automatic_scan': {'is_flag': True, 'short': 'as', 'help': 'Automatic web scan using wappalyzer technology detection to tags mapping'}, # noqa: E501
|
|
33
34
|
'omit_raw': {'is_flag': True, 'short': 'or', 'default': True, 'help': 'Omit requests/response pairs in the JSON, JSONL, and Markdown outputs (for findings only)'}, # noqa: E501
|
|
35
|
+
'response_size_read': {'type': int, 'help': 'Max body size to read (bytes)'},
|
|
34
36
|
'stats': {'is_flag': True, 'short': 'stats', 'default': True, 'help': 'Display statistics about the running scan'},
|
|
35
37
|
'stats_json': {'is_flag': True, 'short': 'sj', 'default': True, 'help': 'Display statistics in JSONL(ines) format'},
|
|
36
38
|
'stats_interval': {'type': str, 'short': 'si', 'help': 'Number of seconds to wait between showing a statistics update'}, # noqa: E501
|
|
@@ -52,7 +54,8 @@ class nuclei(VulnMulti):
|
|
|
52
54
|
# nuclei opts
|
|
53
55
|
'exclude_tags': 'exclude-tags',
|
|
54
56
|
'exclude_severity': 'exclude-severity',
|
|
55
|
-
'templates': 't'
|
|
57
|
+
'templates': 't',
|
|
58
|
+
'response_size_read': 'rsr'
|
|
56
59
|
}
|
|
57
60
|
opt_value_map = {
|
|
58
61
|
'tags': lambda x: ','.join(x) if isinstance(x, list) else x,
|
secator/utils.py
CHANGED
|
@@ -6,6 +6,7 @@ import json
|
|
|
6
6
|
import logging
|
|
7
7
|
import operator
|
|
8
8
|
import os
|
|
9
|
+
import signal
|
|
9
10
|
import tldextract
|
|
10
11
|
import re
|
|
11
12
|
import select
|
|
@@ -823,3 +824,11 @@ def get_versions_from_string(string):
|
|
|
823
824
|
if not matches:
|
|
824
825
|
return []
|
|
825
826
|
return matches
|
|
827
|
+
|
|
828
|
+
|
|
829
|
+
def signal_to_name(signum):
|
|
830
|
+
"""Convert a signal number to its name"""
|
|
831
|
+
for name, value in vars(signal).items():
|
|
832
|
+
if name.startswith('SIG') and not name.startswith('SIG_') and value == signum:
|
|
833
|
+
return name
|
|
834
|
+
return str(signum)
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
secator/.gitignore,sha256=da8MUc3hdb6Mo0WjZu2upn5uZMbXcBGvhdhTQ1L89HI,3093
|
|
2
2
|
secator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
secator/celery.py,sha256=
|
|
4
|
-
secator/celery_signals.py,sha256=
|
|
3
|
+
secator/celery.py,sha256=UeF1MGz4UCZ0xH6S-8GsXF0GnYWkMColWkwh8_48olI,14214
|
|
4
|
+
secator/celery_signals.py,sha256=chWicrTmvh2ioMXRm43r1zN09wrhWnw3Kc4mWDUY55s,4314
|
|
5
5
|
secator/celery_utils.py,sha256=vhL5ZxXDn3ODvyVxMijKyUTJ1dOisMDjF_PhFUyOVSA,9451
|
|
6
|
-
secator/cli.py,sha256=
|
|
6
|
+
secator/cli.py,sha256=zmaMa-RhN3ENAPcfltTwUEE3Dobg9kKeVGTxsMN1v1g,61267
|
|
7
7
|
secator/cli_helper.py,sha256=EJFl80fd1HcgMYbmiddMZssCD32YDiFLnr-UbLp61aQ,13720
|
|
8
8
|
secator/click.py,sha256=pg7XPI7-wAhhEhd4aeAC8vHSqKi-H0zeFRlh0T-ayYg,2662
|
|
9
|
-
secator/config.py,sha256=
|
|
9
|
+
secator/config.py,sha256=6CjGLMNNFWg1PAvW16v8SGceyngFs2SCEKjJhV2NtOU,20805
|
|
10
10
|
secator/cve.py,sha256=j47VOGyZjOvCY_xwVYS9fiXQPKHL5bPRtCnVAmbQthE,21356
|
|
11
11
|
secator/decorators.py,sha256=uygU8MguxEO0BKXRvF4Nn2QEDnjqdIer8ReBj_j9ALg,88
|
|
12
12
|
secator/definitions.py,sha256=sJaR9e_4aEgAo7cVzYQcD2lotXQPN_3lze_qWhKvo1M,3275
|
|
13
|
-
secator/installer.py,sha256
|
|
13
|
+
secator/installer.py,sha256=-gw6jSCCezuRgKdrlKYYK7UIORP4OWyx69bohM-8tfc,21129
|
|
14
14
|
secator/loader.py,sha256=fR0oAdBgZlII8guOmSs_htQq917mUZZIiAzf0fvUq0Y,4139
|
|
15
15
|
secator/report.py,sha256=4lEjW_GzDgsPBe1eQHX4ntcHWs0nsAMIbrNMw0UfWHc,4025
|
|
16
16
|
secator/rich.py,sha256=jITAXV_Wgj32Q7FfkssDN-DMD8TxK1wwlrIlkaCNc70,3960
|
|
17
17
|
secator/template.py,sha256=vLp-4cmg05YDKyvqmPtKoclH-b_NamRKvr_qprIPSGA,9905
|
|
18
18
|
secator/thread.py,sha256=EqilUiqunUmVLHvZQiPl7GUYXHXVneDpI8crhqKKT_4,562
|
|
19
19
|
secator/tree.py,sha256=zxZ1rXE5jzipyNNUVuTDoeq35qA-7h5yAZ4mE230ZUQ,7000
|
|
20
|
-
secator/utils.py,sha256=
|
|
20
|
+
secator/utils.py,sha256=NIfmFO_V2_wn3tjFhC-lFIldq1HDXDNgwWKB9B0x-40,22174
|
|
21
21
|
secator/utils_test.py,sha256=cI8JRhKhgq9X5c8Lvvhs-T_C2UxxHY1wexVo4qBStS4,10131
|
|
22
22
|
secator/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
23
|
secator/configs/profiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -57,32 +57,32 @@ secator/exporters/json.py,sha256=1ZtDf8RksPO_V0zIvnwDUxMUb630DCElAMM8_RQvyAo,474
|
|
|
57
57
|
secator/exporters/table.py,sha256=zYNmwNGEyB6dTJ1ATVkrv-AOuPjrW6tvk1_4naLQo8Q,1114
|
|
58
58
|
secator/exporters/txt.py,sha256=t_FykaJOxs4UUlqiH4k6HCccEqYqc8e3iNZndL_CKPg,739
|
|
59
59
|
secator/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
60
|
-
secator/hooks/gcs.py,sha256=
|
|
61
|
-
secator/hooks/mongodb.py,sha256=
|
|
60
|
+
secator/hooks/gcs.py,sha256=sQ_1O16aPMrjwa_pVMOTw7g_4oQ2cSJpfc7MQJUOpb8,1724
|
|
61
|
+
secator/hooks/mongodb.py,sha256=tHBuHwO8wejShV7FLHK5jVNRyw_fZ52vfvqqRrkkjlU,7955
|
|
62
62
|
secator/output_types/__init__.py,sha256=CJcYy2_Ek-opKiBz4wFlDHQBTm3t0JVwZ4w_2Jxoeuw,1291
|
|
63
63
|
secator/output_types/_base.py,sha256=9iBqPdtlfJBldBiuC729KamHHGbKhwo69P-2UNwz-3Q,2874
|
|
64
|
-
secator/output_types/certificate.py,sha256=
|
|
64
|
+
secator/output_types/certificate.py,sha256=Y6Lv1nI73zEIZt13n6yQwArgol3Z-p8sUInbj73Gs3c,3055
|
|
65
65
|
secator/output_types/error.py,sha256=lA7DDNUb8iuw3nbanzMD0BjQjOEwUEQPAMZy_9sRs9o,1540
|
|
66
|
-
secator/output_types/exploit.py,sha256
|
|
66
|
+
secator/output_types/exploit.py,sha256=TwRqaIvoTSSsXahz3ZSZC1XLGMiml0jiTIWdSTXFicA,1765
|
|
67
67
|
secator/output_types/info.py,sha256=HcOwdF4OPw3Qni_X0c8bDSLqq2LZLMjIwwLgtEwBwHE,820
|
|
68
|
-
secator/output_types/ip.py,sha256=
|
|
68
|
+
secator/output_types/ip.py,sha256=rKSv6yAu9AKZ3nKBh7pzRg8eai8d1fE36vKUAMjyMJ8,1232
|
|
69
69
|
secator/output_types/port.py,sha256=JdqXnEF8XuwaWFMT8Vghj7fKLwtsImuUdRfMmITgmWM,1879
|
|
70
|
-
secator/output_types/progress.py,sha256=
|
|
71
|
-
secator/output_types/record.py,sha256=
|
|
72
|
-
secator/output_types/stat.py,sha256=
|
|
73
|
-
secator/output_types/state.py,sha256=
|
|
74
|
-
secator/output_types/subdomain.py,sha256=
|
|
75
|
-
secator/output_types/tag.py,sha256=
|
|
76
|
-
secator/output_types/target.py,sha256=
|
|
70
|
+
secator/output_types/progress.py,sha256=D598UDh4VBcSLetOjcd-DY1H_EVgymQXVgbMK6FE-ZY,1228
|
|
71
|
+
secator/output_types/record.py,sha256=ehbJ-4rcVuFUxof5RXkYcuoh34DdnJEeFZazPQN4QKo,1265
|
|
72
|
+
secator/output_types/stat.py,sha256=YpKZRP5fge42oTmlWSYEfixDPx764g-5aVBeilQM0io,1263
|
|
73
|
+
secator/output_types/state.py,sha256=kY5ArRYpRVfIULj7Qt93Lx8YeeIEMa1Ke7q8vnK0Yzk,996
|
|
74
|
+
secator/output_types/subdomain.py,sha256=1pWVXGKA6r7IWaBSt0TRe4tC3tEVbBsRQBettr0FEH8,1359
|
|
75
|
+
secator/output_types/tag.py,sha256=4-khdI4W9tahW3G4YPh1WEWKHoOYW69M9UDtzPlrqnU,1656
|
|
76
|
+
secator/output_types/target.py,sha256=gIrx-IXermWBaPfIS4CBs0PfsrfdtMsYlnduwmZe8BE,1067
|
|
77
77
|
secator/output_types/url.py,sha256=rPbDek2zmvGK2mcjQfQoi6Ij7gKTyItIaDxMT04f2TE,3628
|
|
78
|
-
secator/output_types/user_account.py,sha256=
|
|
79
|
-
secator/output_types/vulnerability.py,sha256=
|
|
78
|
+
secator/output_types/user_account.py,sha256=IqPg0nfKzSsxA5DerLA3PEWIN9HscV_D7PRKoyqllU8,1432
|
|
79
|
+
secator/output_types/vulnerability.py,sha256=cuS5r_BKFuO-DQlrSEiN7elmunwlu2sdC4Rt9WDa10g,2864
|
|
80
80
|
secator/output_types/warning.py,sha256=iy949Aj5OXJLWif7HFB5EvjcYrgKHAzIP9ffyLTV7LA,830
|
|
81
81
|
secator/runners/__init__.py,sha256=EBbOk37vkBy9p8Hhrbi-2VtM_rTwQ3b-0ggTyiD22cE,290
|
|
82
82
|
secator/runners/_base.py,sha256=IkAQfPzz_kou5Pa82y-2Wmtp_lIudKMc9ix8_NP4370,40663
|
|
83
83
|
secator/runners/_helpers.py,sha256=TeebZnpo4cp-9tpgPlDoFm_gmr00_CERAC1aOYhTzA4,6281
|
|
84
84
|
secator/runners/celery.py,sha256=bqvDTTdoHiGRCt0FRvlgFHQ_nsjKMP5P0PzGbwfCj_0,425
|
|
85
|
-
secator/runners/command.py,sha256=
|
|
85
|
+
secator/runners/command.py,sha256=A--1cFpzHAxvzO0xvpMSyY0Tb0OZxkJc8HsJaTNsfB0,31096
|
|
86
86
|
secator/runners/scan.py,sha256=axT_OmGhixogCPMUS1OUeMLnFtk8PxY7zL9NYCugFVU,2578
|
|
87
87
|
secator/runners/task.py,sha256=PrkVns8UAGht2JbCmCUWycA6B39Z5oeMmAMq69KtXKI,2199
|
|
88
88
|
secator/runners/workflow.py,sha256=YnpTSdmp54d55vORe4khWLSx2J7gtDFNryKfZXYAWnY,6076
|
|
@@ -96,12 +96,12 @@ secator/tasks/_categories.py,sha256=ZmUNzeFIZ9-_er9sLJw66PTYIL5nO799JQU3EoW-6nE,
|
|
|
96
96
|
secator/tasks/arjun.py,sha256=WdRZtTCd2Ejbv5HlLS_FoWVKgGpMsR6RCDekV2kR788,3061
|
|
97
97
|
secator/tasks/bbot.py,sha256=moIkwd52jCKaeg1v6Nv4Gfmd4GPObo9c9nwOzQvf-2M,9236
|
|
98
98
|
secator/tasks/bup.py,sha256=9IXsCqMdhOeZcCsQB2L4IJ3Kzm2oQKDE7mflGljm0lM,3867
|
|
99
|
-
secator/tasks/cariddi.py,sha256=
|
|
100
|
-
secator/tasks/dalfox.py,sha256=
|
|
101
|
-
secator/tasks/dirsearch.py,sha256
|
|
99
|
+
secator/tasks/cariddi.py,sha256=pc1z6FWFV4dauSJxWL9BKD-MXjCo14sgcNtAkGuKy5I,5194
|
|
100
|
+
secator/tasks/dalfox.py,sha256=dllEP9A8-7YaX12fGRmLMltfNjm_9Us6wYoS86C_VO0,2507
|
|
101
|
+
secator/tasks/dirsearch.py,sha256=-oa2P2Pq8LjF61PguUEtjgr9rgvVpGLzRZRDXIJMswY,2453
|
|
102
102
|
secator/tasks/dnsx.py,sha256=2qNC-wSjS33geuHMOwuBapLwKEvWTlDgnmvM67ZSJVA,4220
|
|
103
|
-
secator/tasks/feroxbuster.py,sha256=
|
|
104
|
-
secator/tasks/ffuf.py,sha256=
|
|
103
|
+
secator/tasks/feroxbuster.py,sha256=H7_WT8B0cPIBeq7FOownpQlrZ468R07zRLqrDLNCkg8,3006
|
|
104
|
+
secator/tasks/ffuf.py,sha256=L2Rb34YIH30CFJacvaY8QVF1gDah9E0nNCdgAHWL9jo,4103
|
|
105
105
|
secator/tasks/fping.py,sha256=uTOq24DcNQpNgpXQlFV4xxBdn8P9gJWM5mmhkobqW-Y,1575
|
|
106
106
|
secator/tasks/gau.py,sha256=SJaza2yQoMeJeE6TOCRrRv0udbwRIoiXX4gRE64GXoU,1804
|
|
107
107
|
secator/tasks/gf.py,sha256=svNRzaBr_DYW3QGFoPmUBWZh0Xm07XDS2bbNH-tfcA4,1028
|
|
@@ -110,13 +110,13 @@ secator/tasks/gospider.py,sha256=5cEgBCCGWIGE05XfAkjMhTap9V-MwLK2lm1iqxcbj-M,251
|
|
|
110
110
|
secator/tasks/grype.py,sha256=OasQs5WQwgt--o6M2_uh3RYZZaA3-difweCS46Uc5-w,2573
|
|
111
111
|
secator/tasks/h8mail.py,sha256=XsDnL8LPk_jIHfJhqeYMj2423epk0NADorjd_JhBa9o,2033
|
|
112
112
|
secator/tasks/httpx.py,sha256=0Umt2ouL36TELxmoaZ4dKSGXgipN3ve__IQFgUKrWZQ,6498
|
|
113
|
-
secator/tasks/katana.py,sha256=
|
|
113
|
+
secator/tasks/katana.py,sha256=10Sml1d0bO_UDwT1y_4TDQ_a0ihWKAW6L6-n8M9ArUw,6220
|
|
114
114
|
secator/tasks/maigret.py,sha256=jjuyR8lAZYUybmN8SwEj3hrRB25p9xm4X_361auZK_Q,2173
|
|
115
115
|
secator/tasks/mapcidr.py,sha256=tMTHQspHSs92F4R-9HVYjFBpiu9ZhxoJSNvpd8KwKKc,1057
|
|
116
116
|
secator/tasks/msfconsole.py,sha256=3VjAEpwEAFDcGxyYMhKyDLHRObXELYFx_H306fzmtMw,6566
|
|
117
|
-
secator/tasks/naabu.py,sha256=
|
|
117
|
+
secator/tasks/naabu.py,sha256=Z8kYvAMeeSLrhnojLRx8GzxvwhFhDCfDj9a7r9Wbr1A,2407
|
|
118
118
|
secator/tasks/nmap.py,sha256=bGfPrB_JD_XaVccUJTvMAN81cNfmWo2gI4Hd6P_ZRLI,16986
|
|
119
|
-
secator/tasks/nuclei.py,sha256=
|
|
119
|
+
secator/tasks/nuclei.py,sha256=ApY9oQaQZAvrFKObVQt0JxZS9ZcoLabL_lrc5Uwxl9s,5144
|
|
120
120
|
secator/tasks/searchsploit.py,sha256=gwP05nLdu3yBnpMrAVu7S2CIjgCtcS3w9_1K0Tp9WBM,3503
|
|
121
121
|
secator/tasks/subfinder.py,sha256=Q7YIBSyFxHWXSskmn2dEWPxU_HZ9rZCMU3Kl4sdvPwc,1297
|
|
122
122
|
secator/tasks/testssl.py,sha256=rrpKerOYcNA4NJr9RQ_uAtAbl3W50FRp3bPo3yD8EEg,8787
|
|
@@ -125,8 +125,8 @@ secator/tasks/wafw00f.py,sha256=9CnV9F7ZrykO27F3PAb5HtwULDMYEKGSTbz-jh0kc2g,3189
|
|
|
125
125
|
secator/tasks/wpprobe.py,sha256=1QPJ-7JvhL7LFvjUTAmqpH2Krp-Qmi079lonso16YPQ,3229
|
|
126
126
|
secator/tasks/wpscan.py,sha256=dBkbG9EODHDUBAA8uNVULX4SdVgTCAi_F1T1oCfRbsI,5852
|
|
127
127
|
secator/workflows/__init__.py,sha256=XOviyjSylZ4cuVmmQ76yuqZRdmvOEghqAnuw_4cLmfk,702
|
|
128
|
-
secator-0.
|
|
129
|
-
secator-0.
|
|
130
|
-
secator-0.
|
|
131
|
-
secator-0.
|
|
132
|
-
secator-0.
|
|
128
|
+
secator-0.17.0.dist-info/METADATA,sha256=FPBlaaLaBXMP4i2-bz5q39-Z2i_LVn3ezeu-xNuC0Ro,17253
|
|
129
|
+
secator-0.17.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
130
|
+
secator-0.17.0.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
|
|
131
|
+
secator-0.17.0.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
|
|
132
|
+
secator-0.17.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|