secator 0.10.1a3__py3-none-any.whl → 0.10.1a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +95 -56
- secator/celery_utils.py +16 -1
- secator/output_types/__init__.py +3 -1
- secator/output_types/state.py +29 -0
- secator/report.py +1 -1
- secator/runners/_base.py +61 -18
- secator/runners/scan.py +25 -13
- secator/runners/task.py +28 -34
- secator/runners/workflow.py +23 -52
- secator/scans/__init__.py +18 -17
- secator/workflows/__init__.py +18 -17
- {secator-0.10.1a3.dist-info → secator-0.10.1a5.dist-info}/METADATA +1 -1
- {secator-0.10.1a3.dist-info → secator-0.10.1a5.dist-info}/RECORD +16 -15
- {secator-0.10.1a3.dist-info → secator-0.10.1a5.dist-info}/WHEEL +0 -0
- {secator-0.10.1a3.dist-info → secator-0.10.1a5.dist-info}/entry_points.txt +0 -0
- {secator-0.10.1a3.dist-info → secator-0.10.1a5.dist-info}/licenses/LICENSE +0 -0
secator/celery.py
CHANGED
|
@@ -136,63 +136,8 @@ def chunker(seq, size):
|
|
|
136
136
|
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
|
|
137
137
|
|
|
138
138
|
|
|
139
|
-
@app.task(bind=True)
|
|
140
|
-
def handle_runner_error(self, results, runner):
|
|
141
|
-
"""Handle errors in Celery workflows (chunked tasks or runners)."""
|
|
142
|
-
results = forward_results(results)
|
|
143
|
-
runner.results = results
|
|
144
|
-
runner.log_results()
|
|
145
|
-
runner.run_hooks('on_end')
|
|
146
|
-
return runner.results
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
def break_task(task, task_opts, results=[]):
|
|
150
|
-
"""Break a task into multiple of the same type."""
|
|
151
|
-
chunks = task.inputs
|
|
152
|
-
if task.input_chunk_size > 1:
|
|
153
|
-
chunks = list(chunker(task.inputs, task.input_chunk_size))
|
|
154
|
-
debug(
|
|
155
|
-
'',
|
|
156
|
-
obj={task.unique_name: 'CHUNKED', 'chunk_size': task.input_chunk_size, 'chunks': len(chunks), 'target_count': len(task.inputs)}, # noqa: E501
|
|
157
|
-
obj_after=False,
|
|
158
|
-
sub='celery.state',
|
|
159
|
-
verbose=True
|
|
160
|
-
)
|
|
161
|
-
|
|
162
|
-
# Clone opts
|
|
163
|
-
opts = task_opts.copy()
|
|
164
|
-
|
|
165
|
-
# Build signatures
|
|
166
|
-
sigs = []
|
|
167
|
-
task.ids_map = {}
|
|
168
|
-
for ix, chunk in enumerate(chunks):
|
|
169
|
-
if not isinstance(chunk, list):
|
|
170
|
-
chunk = [chunk]
|
|
171
|
-
if len(chunks) > 0: # add chunk to task opts for tracking chunks exec
|
|
172
|
-
opts['chunk'] = ix + 1
|
|
173
|
-
opts['chunk_count'] = len(chunks)
|
|
174
|
-
task_id = str(uuid.uuid4())
|
|
175
|
-
opts['has_parent'] = True
|
|
176
|
-
opts['enable_duplicate_check'] = False
|
|
177
|
-
opts['results'] = results
|
|
178
|
-
sig = type(task).si(chunk, **opts).set(queue=type(task).profile, task_id=task_id)
|
|
179
|
-
full_name = f'{task.name}_{ix + 1}'
|
|
180
|
-
task.add_subtask(task_id, task.name, f'{task.name}_{ix + 1}')
|
|
181
|
-
info = Info(message=f'Celery chunked task created: {task_id}', _source=full_name, _uuid=str(uuid.uuid4()))
|
|
182
|
-
task.add_result(info)
|
|
183
|
-
sigs.append(sig)
|
|
184
|
-
|
|
185
|
-
# Build Celery workflow
|
|
186
|
-
workflow = chord(
|
|
187
|
-
tuple(sigs),
|
|
188
|
-
handle_runner_error.s(runner=task).set(queue='results')
|
|
189
|
-
)
|
|
190
|
-
return workflow
|
|
191
|
-
|
|
192
|
-
|
|
193
139
|
@app.task(bind=True)
|
|
194
140
|
def run_task(self, args=[], kwargs={}):
|
|
195
|
-
print('run task')
|
|
196
141
|
console.print(Info(message=f'Running task {self.request.id}'))
|
|
197
142
|
if 'context' not in kwargs:
|
|
198
143
|
kwargs['context'] = {}
|
|
@@ -243,11 +188,14 @@ def run_command(self, results, name, targets, opts={}):
|
|
|
243
188
|
sync = not IN_CELERY_WORKER_PROCESS
|
|
244
189
|
task_cls = Task.get_task_class(name)
|
|
245
190
|
task = task_cls(targets, **opts)
|
|
191
|
+
task.started = True
|
|
192
|
+
task.run_hooks('on_start')
|
|
246
193
|
update_state(self, task, force=True)
|
|
247
194
|
|
|
248
195
|
# Chunk task if needed
|
|
249
196
|
if task.needs_chunking(sync):
|
|
250
|
-
|
|
197
|
+
if IN_CELERY_WORKER_PROCESS:
|
|
198
|
+
console.print(Info(message=f'Task {name} requires chunking, breaking into {len(targets)} tasks'))
|
|
251
199
|
tasks = break_task(task, opts, results=results)
|
|
252
200
|
update_state(self, task, force=True)
|
|
253
201
|
return self.replace(tasks)
|
|
@@ -276,6 +224,50 @@ def forward_results(results):
|
|
|
276
224
|
console.print(Info(message=f'Forwarding {len(results)} results ...'))
|
|
277
225
|
return results
|
|
278
226
|
|
|
227
|
+
|
|
228
|
+
@app.task
|
|
229
|
+
def mark_runner_started(runner):
|
|
230
|
+
"""Mark a runner as started and run on_start hooks.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
runner (Runner): Secator runner instance
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
list: Runner results
|
|
237
|
+
"""
|
|
238
|
+
runner.started = True
|
|
239
|
+
# runner.start_time = time()
|
|
240
|
+
runner.run_hooks('on_start')
|
|
241
|
+
return runner.results
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
@app.task
|
|
245
|
+
def mark_runner_complete(results, runner):
|
|
246
|
+
"""Mark a runner as completed and run on_end hooks.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
results (list): Task results
|
|
250
|
+
runner (Runner): Secator runner instance
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
list: Final results
|
|
254
|
+
"""
|
|
255
|
+
results = forward_results(results)
|
|
256
|
+
|
|
257
|
+
# If sync mode, don't update the runner as it's already done
|
|
258
|
+
if runner.sync:
|
|
259
|
+
return results
|
|
260
|
+
|
|
261
|
+
# Run final processing
|
|
262
|
+
runner.results = results
|
|
263
|
+
if not runner.no_process:
|
|
264
|
+
runner.mark_duplicates()
|
|
265
|
+
runner.results = runner.filter_results()
|
|
266
|
+
runner.log_results()
|
|
267
|
+
runner.run_hooks('on_end')
|
|
268
|
+
return runner.results
|
|
269
|
+
|
|
270
|
+
|
|
279
271
|
#--------------#
|
|
280
272
|
# Celery utils #
|
|
281
273
|
#--------------#
|
|
@@ -290,3 +282,50 @@ def is_celery_worker_alive():
|
|
|
290
282
|
else:
|
|
291
283
|
console.print(Info(message='No Celery worker available, running locally'))
|
|
292
284
|
return result
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def break_task(task, task_opts, results=[]):
|
|
288
|
+
"""Break a task into multiple of the same type."""
|
|
289
|
+
chunks = task.inputs
|
|
290
|
+
if task.input_chunk_size > 1:
|
|
291
|
+
chunks = list(chunker(task.inputs, task.input_chunk_size))
|
|
292
|
+
debug(
|
|
293
|
+
'',
|
|
294
|
+
obj={task.unique_name: 'CHUNKED', 'chunk_size': task.input_chunk_size, 'chunks': len(chunks), 'target_count': len(task.inputs)}, # noqa: E501
|
|
295
|
+
obj_after=False,
|
|
296
|
+
sub='celery.state',
|
|
297
|
+
verbose=True
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
# Clone opts
|
|
301
|
+
opts = task_opts.copy()
|
|
302
|
+
|
|
303
|
+
# Build signatures
|
|
304
|
+
sigs = []
|
|
305
|
+
task.ids_map = {}
|
|
306
|
+
for ix, chunk in enumerate(chunks):
|
|
307
|
+
if not isinstance(chunk, list):
|
|
308
|
+
chunk = [chunk]
|
|
309
|
+
if len(chunks) > 0: # add chunk to task opts for tracking chunks exec
|
|
310
|
+
opts['chunk'] = ix + 1
|
|
311
|
+
opts['chunk_count'] = len(chunks)
|
|
312
|
+
task_id = str(uuid.uuid4())
|
|
313
|
+
opts['has_parent'] = True
|
|
314
|
+
opts['enable_duplicate_check'] = False
|
|
315
|
+
opts['results'] = results
|
|
316
|
+
sig = type(task).si(chunk, **opts).set(queue=type(task).profile, task_id=task_id)
|
|
317
|
+
full_name = f'{task.name}_{ix + 1}'
|
|
318
|
+
task.add_subtask(task_id, task.name, f'{task.name}_{ix + 1}')
|
|
319
|
+
info = Info(message=f'Celery chunked task created: {task_id}', _source=full_name, _uuid=str(uuid.uuid4()))
|
|
320
|
+
task.add_result(info)
|
|
321
|
+
sigs.append(sig)
|
|
322
|
+
|
|
323
|
+
# Mark main task as async since it's being chunked
|
|
324
|
+
task.sync = False
|
|
325
|
+
|
|
326
|
+
# Build Celery workflow
|
|
327
|
+
workflow = chord(
|
|
328
|
+
tuple(sigs),
|
|
329
|
+
mark_runner_complete.s(runner=task).set(queue='results')
|
|
330
|
+
)
|
|
331
|
+
return workflow
|
secator/celery_utils.py
CHANGED
|
@@ -12,7 +12,7 @@ from rich.padding import Padding
|
|
|
12
12
|
from rich.progress import Progress as RichProgress, SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
13
13
|
from secator.config import CONFIG
|
|
14
14
|
from secator.definitions import STATE_COLORS
|
|
15
|
-
from secator.output_types import Error, Info
|
|
15
|
+
from secator.output_types import Error, Info, State
|
|
16
16
|
from secator.rich import console
|
|
17
17
|
from secator.utils import debug, traceback_as_string
|
|
18
18
|
|
|
@@ -138,9 +138,24 @@ class CeleryData(object):
|
|
|
138
138
|
"""
|
|
139
139
|
while True:
|
|
140
140
|
try:
|
|
141
|
+
main_task = State(
|
|
142
|
+
task_id=result.id,
|
|
143
|
+
state=result.state,
|
|
144
|
+
_source='celery'
|
|
145
|
+
)
|
|
146
|
+
debug(f"Main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
|
|
147
|
+
yield {'id': result.id, 'results': [main_task]}
|
|
141
148
|
yield from CeleryData.get_all_data(result, ids_map)
|
|
149
|
+
|
|
142
150
|
if result.ready():
|
|
143
151
|
debug('result is ready', sub='celery.poll', id=result.id)
|
|
152
|
+
main_task = State(
|
|
153
|
+
task_id=result.id,
|
|
154
|
+
state=result.state,
|
|
155
|
+
_source='celery'
|
|
156
|
+
)
|
|
157
|
+
debug(f"Final main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
|
|
158
|
+
yield {'id': result.id, 'results': [main_task]}
|
|
144
159
|
yield from CeleryData.get_all_data(result, ids_map)
|
|
145
160
|
break
|
|
146
161
|
except (KeyboardInterrupt, GreenletExit):
|
secator/output_types/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ __all__ = [
|
|
|
7
7
|
'Progress',
|
|
8
8
|
'Record',
|
|
9
9
|
'Stat',
|
|
10
|
+
'State',
|
|
10
11
|
'Subdomain',
|
|
11
12
|
'Url',
|
|
12
13
|
'UserAccount',
|
|
@@ -29,9 +30,10 @@ from secator.output_types.info import Info
|
|
|
29
30
|
from secator.output_types.warning import Warning
|
|
30
31
|
from secator.output_types.error import Error
|
|
31
32
|
from secator.output_types.stat import Stat
|
|
33
|
+
from secator.output_types.state import State
|
|
32
34
|
|
|
33
35
|
EXECUTION_TYPES = [
|
|
34
|
-
Target, Progress, Info, Warning, Error
|
|
36
|
+
Target, Progress, Info, Warning, Error, State
|
|
35
37
|
]
|
|
36
38
|
STAT_TYPES = [
|
|
37
39
|
Stat
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
|
|
4
|
+
from secator.output_types._base import OutputType
|
|
5
|
+
from secator.utils import rich_to_ansi
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class State(OutputType):
|
|
10
|
+
"""Represents the state of a Celery task."""
|
|
11
|
+
|
|
12
|
+
task_id: str
|
|
13
|
+
state: str
|
|
14
|
+
_type: str = field(default='state', repr=True)
|
|
15
|
+
_source: str = field(default='', repr=True)
|
|
16
|
+
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
17
|
+
_uuid: str = field(default='', repr=True, compare=False)
|
|
18
|
+
_context: dict = field(default_factory=dict, repr=True, compare=False)
|
|
19
|
+
_tagged: bool = field(default=False, repr=True, compare=False)
|
|
20
|
+
_duplicate: bool = field(default=False, repr=True, compare=False)
|
|
21
|
+
_related: list = field(default_factory=list, compare=False)
|
|
22
|
+
_icon = '📊'
|
|
23
|
+
_color = 'bright_blue'
|
|
24
|
+
|
|
25
|
+
def __str__(self) -> str:
|
|
26
|
+
return f"Task {self.task_id} is {self.state}"
|
|
27
|
+
|
|
28
|
+
def __repr__(self) -> str:
|
|
29
|
+
return rich_to_ansi(f"{self._icon} [bold {self._color}]{self.state}[/] {self.task_id}")
|
secator/report.py
CHANGED
|
@@ -38,7 +38,7 @@ class Report:
|
|
|
38
38
|
exporters (list): List of exporter classes.
|
|
39
39
|
"""
|
|
40
40
|
def __init__(self, runner, title=None, exporters=[]):
|
|
41
|
-
self.title = title or f'{runner.
|
|
41
|
+
self.title = title or f'{runner.config.type}_{runner.config.name}'
|
|
42
42
|
self.runner = runner
|
|
43
43
|
self.timestamp = get_file_timestamp()
|
|
44
44
|
self.exporters = exporters
|
secator/runners/_base.py
CHANGED
|
@@ -12,7 +12,7 @@ import humanize
|
|
|
12
12
|
from secator.definitions import ADDONS_ENABLED
|
|
13
13
|
from secator.celery_utils import CeleryData
|
|
14
14
|
from secator.config import CONFIG
|
|
15
|
-
from secator.output_types import FINDING_TYPES, OutputType, Progress, Info, Warning, Error, Target
|
|
15
|
+
from secator.output_types import FINDING_TYPES, OutputType, Progress, Info, Warning, Error, Target, State
|
|
16
16
|
from secator.report import Report
|
|
17
17
|
from secator.rich import console, console_stdout
|
|
18
18
|
from secator.runners._helpers import (get_task_folder_id, process_extractor, run_extractors)
|
|
@@ -285,11 +285,8 @@ class Runner:
|
|
|
285
285
|
self.run_hooks('on_end')
|
|
286
286
|
return
|
|
287
287
|
|
|
288
|
-
# Choose yielder
|
|
289
|
-
yielder = self.yielder_celery if self.celery_result else self.yielder
|
|
290
|
-
|
|
291
288
|
# Loop and process items
|
|
292
|
-
for item in yielder():
|
|
289
|
+
for item in self.yielder():
|
|
293
290
|
yield from self._process_item(item)
|
|
294
291
|
self.run_hooks('on_interval')
|
|
295
292
|
|
|
@@ -485,16 +482,52 @@ class Runner:
|
|
|
485
482
|
dupe = self.run_hooks('on_duplicate', dupe)
|
|
486
483
|
|
|
487
484
|
def yielder(self):
|
|
488
|
-
"""
|
|
489
|
-
|
|
485
|
+
"""Base yielder implementation.
|
|
486
|
+
|
|
487
|
+
This should be overridden by derived classes if they need custom behavior.
|
|
488
|
+
Otherwise, they can implement build_celery_workflow() and get standard behavior.
|
|
489
|
+
|
|
490
|
+
Yields:
|
|
491
|
+
secator.output_types.OutputType: Secator output type.
|
|
492
|
+
"""
|
|
493
|
+
# Build Celery workflow
|
|
494
|
+
workflow = self.build_celery_workflow()
|
|
495
|
+
|
|
496
|
+
# Run workflow and get results
|
|
497
|
+
if self.sync:
|
|
498
|
+
self.print_item = False
|
|
499
|
+
self.started = True
|
|
500
|
+
results = workflow.apply().get()
|
|
501
|
+
yield from results
|
|
502
|
+
else:
|
|
503
|
+
self.celery_result = workflow()
|
|
504
|
+
self.celery_ids.append(str(self.celery_result.id))
|
|
505
|
+
yield Info(
|
|
506
|
+
message=f'Celery task created: {self.celery_result.id}',
|
|
507
|
+
task_id=self.celery_result.id
|
|
508
|
+
)
|
|
509
|
+
if self.no_poll:
|
|
510
|
+
return
|
|
511
|
+
results = CeleryData.iter_results(
|
|
512
|
+
self.celery_result,
|
|
513
|
+
ids_map=self.celery_ids_map,
|
|
514
|
+
description=True,
|
|
515
|
+
print_remote_info=self.print_remote_info,
|
|
516
|
+
print_remote_title=f'[bold gold3]{self.__class__.__name__.capitalize()}[/] [bold magenta]{self.name}[/] results'
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
# Yield results
|
|
520
|
+
yield from results
|
|
521
|
+
|
|
522
|
+
def build_celery_workflow(self):
|
|
523
|
+
"""Build Celery workflow.
|
|
490
524
|
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
)
|
|
525
|
+
This should be implemented by derived classes.
|
|
526
|
+
|
|
527
|
+
Returns:
|
|
528
|
+
celery.Signature: Celery task signature.
|
|
529
|
+
"""
|
|
530
|
+
raise NotImplementedError("Derived classes must implement build_celery_workflow()")
|
|
498
531
|
|
|
499
532
|
def toDict(self):
|
|
500
533
|
"""Dict representation of the runner."""
|
|
@@ -644,7 +677,6 @@ class Runner:
|
|
|
644
677
|
|
|
645
678
|
def log_start(self):
|
|
646
679
|
"""Log runner start."""
|
|
647
|
-
self.started = True
|
|
648
680
|
if not self.print_remote_info:
|
|
649
681
|
return
|
|
650
682
|
remote_str = 'starting' if self.sync else 'sent to Celery worker'
|
|
@@ -654,8 +686,7 @@ class Runner:
|
|
|
654
686
|
|
|
655
687
|
def log_results(self):
|
|
656
688
|
"""Log runner results."""
|
|
657
|
-
|
|
658
|
-
return
|
|
689
|
+
self.started = True
|
|
659
690
|
self.done = True
|
|
660
691
|
self.progress = 100
|
|
661
692
|
self.end_time = datetime.fromtimestamp(time())
|
|
@@ -848,8 +879,20 @@ class Runner:
|
|
|
848
879
|
if not item._source:
|
|
849
880
|
item._source = self.unique_name
|
|
850
881
|
|
|
882
|
+
# Check for state updates
|
|
883
|
+
if isinstance(item, State) and self.celery_result and item.task_id == self.celery_result.id:
|
|
884
|
+
self.debug(f'Updating runner state from Celery: {item.state}', sub='state')
|
|
885
|
+
if item.state in ['FAILURE', 'SUCCESS', 'REVOKED']:
|
|
886
|
+
self.started = True
|
|
887
|
+
self.done = True
|
|
888
|
+
elif item.state in ['RUNNING']:
|
|
889
|
+
self.started = True
|
|
890
|
+
self.debug(f'Runner {self.unique_name} is {self.status} (started: {self.started}, done: {self.done})', sub='state')
|
|
891
|
+
self.last_updated_celery = item._timestamp
|
|
892
|
+
return
|
|
893
|
+
|
|
851
894
|
# If progress item, update runner progress
|
|
852
|
-
|
|
895
|
+
elif isinstance(item, Progress) and item._source == self.unique_name:
|
|
853
896
|
self.progress = item.percent
|
|
854
897
|
if not should_update(CONFIG.runners.progress_update_frequency, self.last_updated_progress, item._timestamp):
|
|
855
898
|
return
|
secator/runners/scan.py
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
|
|
3
|
-
from secator.template import TemplateLoader
|
|
4
3
|
from secator.config import CONFIG
|
|
5
4
|
from secator.runners._base import Runner
|
|
6
5
|
from secator.runners.workflow import Workflow
|
|
7
|
-
from secator.output_types import Error
|
|
8
6
|
from secator.utils import merge_opts
|
|
9
7
|
|
|
10
8
|
logger = logging.getLogger(__name__)
|
|
@@ -19,25 +17,39 @@ class Scan(Runner):
|
|
|
19
17
|
from secator.celery import run_scan
|
|
20
18
|
return run_scan.delay(args=args, kwargs=kwargs)
|
|
21
19
|
|
|
22
|
-
def
|
|
23
|
-
"""
|
|
20
|
+
def build_celery_workflow(self):
|
|
21
|
+
"""Build Celery workflow for scan execution.
|
|
24
22
|
|
|
25
|
-
|
|
26
|
-
|
|
23
|
+
Returns:
|
|
24
|
+
celery.Signature: Celery task signature.
|
|
27
25
|
"""
|
|
26
|
+
from celery import chain
|
|
27
|
+
from secator.celery import mark_runner_started, mark_runner_complete
|
|
28
|
+
from secator.template import TemplateLoader
|
|
29
|
+
|
|
28
30
|
scan_opts = self.config.options
|
|
29
|
-
|
|
31
|
+
|
|
32
|
+
# Build chain of workflows
|
|
33
|
+
sigs = []
|
|
30
34
|
for name, workflow_opts in self.config.workflows.items():
|
|
31
35
|
run_opts = self.run_opts.copy()
|
|
32
36
|
opts = merge_opts(scan_opts, workflow_opts, run_opts)
|
|
37
|
+
config = TemplateLoader(name=f'workflows/{name}')
|
|
33
38
|
workflow = Workflow(
|
|
34
|
-
|
|
39
|
+
config,
|
|
35
40
|
self.inputs,
|
|
36
41
|
results=self.results,
|
|
37
42
|
run_opts=opts,
|
|
38
43
|
hooks=self._hooks,
|
|
39
|
-
context=self.context.copy()
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
+
context=self.context.copy()
|
|
45
|
+
)
|
|
46
|
+
celery_workflow = workflow.build_celery_workflow()
|
|
47
|
+
for task_id, task_info in workflow.celery_ids_map.items():
|
|
48
|
+
self.add_subtask(task_id, task_info['name'], task_info['descr'])
|
|
49
|
+
sigs.append(celery_workflow)
|
|
50
|
+
|
|
51
|
+
return chain(
|
|
52
|
+
mark_runner_started.si(self).set(queue='results'),
|
|
53
|
+
*sigs,
|
|
54
|
+
mark_runner_complete.s(self).set(queue='results'),
|
|
55
|
+
)
|
secator/runners/task.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
+
import uuid
|
|
1
2
|
from secator.config import CONFIG
|
|
2
3
|
from secator.runners import Runner
|
|
3
4
|
from secator.utils import discover_tasks
|
|
4
|
-
from
|
|
5
|
-
from secator.output_types import Info
|
|
5
|
+
from celery import chain
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class Task(Runner):
|
|
@@ -14,52 +14,46 @@ class Task(Runner):
|
|
|
14
14
|
from secator.celery import run_task
|
|
15
15
|
return run_task.apply_async(kwargs={'args': args, 'kwargs': kwargs}, queue='celery')
|
|
16
16
|
|
|
17
|
-
def
|
|
18
|
-
"""
|
|
17
|
+
def build_celery_workflow(self):
|
|
18
|
+
"""Build Celery workflow for task execution.
|
|
19
19
|
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
Args:
|
|
21
|
+
run_opts (dict): Run options.
|
|
22
|
+
results (list): Prior results.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
celery.Signature: Celery task signature.
|
|
22
26
|
"""
|
|
27
|
+
from secator.celery import run_command, mark_runner_started, mark_runner_complete
|
|
28
|
+
|
|
23
29
|
# Get task class
|
|
24
30
|
task_cls = Task.get_task_class(self.config.name)
|
|
25
31
|
|
|
26
32
|
# Run opts
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
33
|
+
opts = self.run_opts.copy()
|
|
34
|
+
opts.pop('output', None)
|
|
35
|
+
opts.pop('no_poll', False)
|
|
30
36
|
|
|
31
37
|
# Set task output types
|
|
32
38
|
self.output_types = task_cls.output_types
|
|
33
39
|
self.enable_duplicate_check = False
|
|
34
40
|
|
|
35
41
|
# Get hooks
|
|
36
|
-
hooks =
|
|
37
|
-
|
|
38
|
-
|
|
42
|
+
hooks = self._hooks.get(Task, {})
|
|
43
|
+
opts['hooks'] = hooks
|
|
44
|
+
opts['context'] = self.context
|
|
39
45
|
|
|
40
|
-
#
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
results = result.apply().get()
|
|
45
|
-
else:
|
|
46
|
-
self.celery_result = task_cls.delay(self.inputs, **run_opts)
|
|
47
|
-
self.add_subtask(self.celery_result.id, self.config.name, self.config.description or '')
|
|
48
|
-
yield Info(
|
|
49
|
-
message=f'Celery task created: {self.celery_result.id}',
|
|
50
|
-
task_id=self.celery_result.id
|
|
51
|
-
)
|
|
52
|
-
if self.no_poll:
|
|
53
|
-
return
|
|
54
|
-
results = CeleryData.iter_results(
|
|
55
|
-
self.celery_result,
|
|
56
|
-
ids_map=self.celery_ids_map,
|
|
57
|
-
description=True,
|
|
58
|
-
print_remote_info=False,
|
|
59
|
-
print_remote_title=f'[bold gold3]{self.__class__.__name__.capitalize()}[/] [bold magenta]{self.name}[/] results')
|
|
46
|
+
# Create task signature
|
|
47
|
+
task_id = str(uuid.uuid4())
|
|
48
|
+
sig = run_command.s(self.config.name, self.inputs, opts).set(queue=task_cls.profile, task_id=task_id)
|
|
49
|
+
self.add_subtask(task_id, self.config.name, self.config.description or '')
|
|
60
50
|
|
|
61
|
-
#
|
|
62
|
-
|
|
51
|
+
# Build signature chain with lifecycle management
|
|
52
|
+
return chain(
|
|
53
|
+
mark_runner_started.si(self).set(queue='results'),
|
|
54
|
+
sig,
|
|
55
|
+
mark_runner_complete.s(self).set(queue='results'),
|
|
56
|
+
)
|
|
63
57
|
|
|
64
58
|
@staticmethod
|
|
65
59
|
def get_task_class(name):
|
secator/runners/workflow.py
CHANGED
|
@@ -4,8 +4,6 @@ from secator.config import CONFIG
|
|
|
4
4
|
from secator.runners._base import Runner
|
|
5
5
|
from secator.runners.task import Task
|
|
6
6
|
from secator.utils import merge_opts
|
|
7
|
-
from secator.celery_utils import CeleryData
|
|
8
|
-
from secator.output_types import Info
|
|
9
7
|
|
|
10
8
|
|
|
11
9
|
class Workflow(Runner):
|
|
@@ -17,65 +15,38 @@ class Workflow(Runner):
|
|
|
17
15
|
from secator.celery import run_workflow
|
|
18
16
|
return run_workflow.delay(args=args, kwargs=kwargs)
|
|
19
17
|
|
|
20
|
-
|
|
21
|
-
|
|
18
|
+
@classmethod
|
|
19
|
+
def s(cls, *args, **kwargs):
|
|
20
|
+
from secator.celery import run_workflow
|
|
21
|
+
return run_workflow.s(args=args, kwargs=kwargs)
|
|
22
22
|
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
"""
|
|
26
|
-
# Task opts
|
|
27
|
-
run_opts = self.run_opts.copy()
|
|
28
|
-
run_opts['hooks'] = self._hooks.get(Task, {})
|
|
29
|
-
run_opts.pop('no_poll', False)
|
|
30
|
-
|
|
31
|
-
# Build Celery workflow
|
|
32
|
-
workflow = self.build_celery_workflow(
|
|
33
|
-
run_opts=run_opts,
|
|
34
|
-
results=self.results
|
|
35
|
-
)
|
|
36
|
-
self.celery_ids = list(self.celery_ids_map.keys())
|
|
37
|
-
|
|
38
|
-
# Run Celery workflow and get results
|
|
39
|
-
if self.sync:
|
|
40
|
-
self.print_item = False
|
|
41
|
-
results = workflow.apply().get()
|
|
42
|
-
else:
|
|
43
|
-
result = workflow()
|
|
44
|
-
self.celery_ids.append(str(result.id))
|
|
45
|
-
self.celery_result = result
|
|
46
|
-
yield Info(
|
|
47
|
-
message=f'Celery task created: {self.celery_result.id}',
|
|
48
|
-
task_id=self.celery_result.id
|
|
49
|
-
)
|
|
50
|
-
if self.no_poll:
|
|
51
|
-
return
|
|
52
|
-
results = CeleryData.iter_results(
|
|
53
|
-
self.celery_result,
|
|
54
|
-
ids_map=self.celery_ids_map,
|
|
55
|
-
description=True,
|
|
56
|
-
print_remote_info=self.print_remote_info,
|
|
57
|
-
print_remote_title=f'[bold gold3]{self.__class__.__name__.capitalize()}[/] [bold magenta]{self.name}[/] results'
|
|
58
|
-
)
|
|
59
|
-
|
|
60
|
-
# Get workflow results
|
|
61
|
-
yield from results
|
|
62
|
-
|
|
63
|
-
def build_celery_workflow(self, run_opts={}, results=[]):
|
|
64
|
-
""""Build Celery workflow.
|
|
23
|
+
def build_celery_workflow(self):
|
|
24
|
+
"""Build Celery workflow for workflow execution.
|
|
65
25
|
|
|
66
26
|
Returns:
|
|
67
|
-
|
|
27
|
+
celery.Signature: Celery task signature.
|
|
68
28
|
"""
|
|
69
29
|
from celery import chain
|
|
70
|
-
from secator.celery import
|
|
30
|
+
from secator.celery import mark_runner_started, mark_runner_complete
|
|
31
|
+
|
|
32
|
+
# Prepare run options
|
|
33
|
+
opts = self.run_opts.copy()
|
|
34
|
+
opts['hooks'] = self._hooks.get(Task, {})
|
|
35
|
+
opts.pop('no_poll', False)
|
|
36
|
+
|
|
37
|
+
# Build task signatures
|
|
71
38
|
sigs = self.get_tasks(
|
|
72
39
|
self.config.tasks.toDict(),
|
|
73
40
|
self.inputs,
|
|
74
41
|
self.config.options,
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
workflow
|
|
78
|
-
return
|
|
42
|
+
opts)
|
|
43
|
+
|
|
44
|
+
# Build workflow chain with lifecycle management
|
|
45
|
+
return chain(
|
|
46
|
+
mark_runner_started.si(self).set(queue='results'),
|
|
47
|
+
*sigs,
|
|
48
|
+
mark_runner_complete.s(self).set(queue='results'),
|
|
49
|
+
)
|
|
79
50
|
|
|
80
51
|
def get_tasks(self, config, inputs, workflow_opts, run_opts):
|
|
81
52
|
"""Get tasks recursively as Celery chains / chords.
|
secator/scans/__init__.py
CHANGED
|
@@ -1,28 +1,29 @@
|
|
|
1
1
|
from secator.cli import ALL_SCANS
|
|
2
|
+
from secator.runners import Scan
|
|
2
3
|
|
|
3
4
|
|
|
4
|
-
|
|
5
|
-
|
|
5
|
+
class DynamicScan(Scan):
|
|
6
|
+
def __init__(self, config):
|
|
7
|
+
self.config = config
|
|
6
8
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
return scan, config.name
|
|
9
|
+
def __call__(self, targets, **kwargs):
|
|
10
|
+
hooks = kwargs.pop('hooks', {})
|
|
11
|
+
results = kwargs.pop('results', [])
|
|
12
|
+
context = kwargs.pop('context', {})
|
|
13
|
+
super().__init__(
|
|
14
|
+
config=self.config,
|
|
15
|
+
inputs=targets,
|
|
16
|
+
results=results,
|
|
17
|
+
hooks=hooks,
|
|
18
|
+
context=context,
|
|
19
|
+
run_opts=kwargs)
|
|
20
|
+
return self
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
DYNAMIC_SCANS = {}
|
|
23
24
|
for scan in ALL_SCANS:
|
|
24
|
-
|
|
25
|
-
DYNAMIC_SCANS[name] =
|
|
25
|
+
instance = DynamicScan(scan)
|
|
26
|
+
DYNAMIC_SCANS[scan.name] = instance
|
|
26
27
|
|
|
27
28
|
globals().update(DYNAMIC_SCANS)
|
|
28
29
|
__all__ = list(DYNAMIC_SCANS)
|
secator/workflows/__init__.py
CHANGED
|
@@ -1,28 +1,29 @@
|
|
|
1
1
|
from secator.cli import ALL_WORKFLOWS
|
|
2
|
+
from secator.runners import Workflow
|
|
2
3
|
|
|
3
4
|
|
|
4
|
-
|
|
5
|
-
|
|
5
|
+
class DynamicWorkflow(Workflow):
|
|
6
|
+
def __init__(self, config):
|
|
7
|
+
self.config = config
|
|
6
8
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
return workflow, config.name
|
|
9
|
+
def __call__(self, targets, **kwargs):
|
|
10
|
+
hooks = kwargs.pop('hooks', {})
|
|
11
|
+
results = kwargs.pop('results', [])
|
|
12
|
+
context = kwargs.pop('context', {})
|
|
13
|
+
super().__init__(
|
|
14
|
+
config=self.config,
|
|
15
|
+
inputs=targets,
|
|
16
|
+
results=results,
|
|
17
|
+
hooks=hooks,
|
|
18
|
+
context=context,
|
|
19
|
+
run_opts=kwargs)
|
|
20
|
+
return self
|
|
20
21
|
|
|
21
22
|
|
|
22
23
|
DYNAMIC_WORKFLOWS = {}
|
|
23
24
|
for workflow in ALL_WORKFLOWS:
|
|
24
|
-
|
|
25
|
-
DYNAMIC_WORKFLOWS[name] =
|
|
25
|
+
instance = DynamicWorkflow(workflow)
|
|
26
|
+
DYNAMIC_WORKFLOWS[workflow.name] = instance
|
|
26
27
|
|
|
27
28
|
globals().update(DYNAMIC_WORKFLOWS)
|
|
28
29
|
__all__ = list(DYNAMIC_WORKFLOWS)
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
secator/.gitignore,sha256=da8MUc3hdb6Mo0WjZu2upn5uZMbXcBGvhdhTQ1L89HI,3093
|
|
2
2
|
secator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
secator/celery.py,sha256=
|
|
3
|
+
secator/celery.py,sha256=NNaLjSENWnNTUgSSW8XXni6wlTE_-2ju9dd3Exwk7bE,9663
|
|
4
4
|
secator/celery_signals.py,sha256=iumfx7tTeoavAbHijBtij0JzeIqElxQldNZtuZmFY_U,4456
|
|
5
|
-
secator/celery_utils.py,sha256=
|
|
5
|
+
secator/celery_utils.py,sha256=bW1yzMCjfIiesU4SOVNVuy0I8HukJyh8KmNB4w0woJM,8857
|
|
6
6
|
secator/cli.py,sha256=3_tTTusW12MCejFgtOeYjiedjrJpyQj_gsCK8FkTMJA,43922
|
|
7
7
|
secator/config.py,sha256=CdVBh6d4k13SpkQKyHQfMFHgkLypUH07kAKLmCJJO1w,19688
|
|
8
8
|
secator/decorators.py,sha256=3kYadCz6haIZtnjkFHSRfenTdc6Yu7bHd-0IVjhD72w,13902
|
|
9
9
|
secator/definitions.py,sha256=gFtLT9fjNtX_1qkiCjNfQyCvYq07IhScsQzX4o20_SE,3084
|
|
10
10
|
secator/installer.py,sha256=Q5qmGbxGmuhysEA9YovTpy-YY2TxxFskhrzSX44c42E,17971
|
|
11
|
-
secator/report.py,sha256=
|
|
11
|
+
secator/report.py,sha256=rocJgsAMT2BgcwCqm3pwjAyymENlZZpHBqsBsXtXde4,3613
|
|
12
12
|
secator/rich.py,sha256=owmuLcTTUt8xYBTE3_SqWTkPeAomcU_8bPdW_V-U8VM,3264
|
|
13
13
|
secator/template.py,sha256=Sb6PjCTGIkZ7I0OGWFp5CaXmjt-6VPe_xpcRhWhjGpU,4409
|
|
14
14
|
secator/thread.py,sha256=rgRgEtcMgs2wyfLWVlCTUCLWeg6jsMo5iKpyyrON5rY,655
|
|
@@ -50,7 +50,7 @@ secator/exporters/txt.py,sha256=oMtr22di6cqyE_5yJoiWP-KElrI5QgvK1cOUrj7H7js,730
|
|
|
50
50
|
secator/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
51
51
|
secator/hooks/gcs.py,sha256=MIhntyWYz9BZdTXhWl5JznaczSq1_7fl3TVqPufuTSo,1490
|
|
52
52
|
secator/hooks/mongodb.py,sha256=XKbm_SrcSbQ2koILWvhzSg4tqdvHXgX5aU5x46Edu1s,7716
|
|
53
|
-
secator/output_types/__init__.py,sha256=
|
|
53
|
+
secator/output_types/__init__.py,sha256=L3q9NXPaW0TGeidx5YH-6dWhOXD1GizztAcL2lqIA8Q,1221
|
|
54
54
|
secator/output_types/_base.py,sha256=OgS6ICt66TzPsqo1JZwRIIwbng2HRX1i_u5qbUECgNk,2820
|
|
55
55
|
secator/output_types/error.py,sha256=39gpEJfKM2EuyOhD9lSkjjna2QicMvnLdFav6kHmhlg,1529
|
|
56
56
|
secator/output_types/exploit.py,sha256=-BKTqPBg94rVgjw8YSmcYuBCI2x-73WwMd9ITP9qr3Y,1750
|
|
@@ -60,6 +60,7 @@ secator/output_types/port.py,sha256=JdqXnEF8XuwaWFMT8Vghj7fKLwtsImuUdRfMmITgmWM,
|
|
|
60
60
|
secator/output_types/progress.py,sha256=MIbmnrLHNodLL42UgiaqLHL0OG5-w6mtUrhn0ZhksjA,1343
|
|
61
61
|
secator/output_types/record.py,sha256=HnsKxlIhkgswA_Yjz7BZ1vDjP53l6OJ0BCOtCSDwCSY,1250
|
|
62
62
|
secator/output_types/stat.py,sha256=90oN2Ghc4k0B0FOdp6MOWiNgmXMmLHYknjunDeEKKRE,1129
|
|
63
|
+
secator/output_types/state.py,sha256=-kQs_P-v_d_J8mgMRJA9Pa0SaOVHN__Fq_ateDc0tiA,1038
|
|
63
64
|
secator/output_types/subdomain.py,sha256=ivJ_2kmrJ8hdB8wmvRJYlKV1BcE3Cds_vAI_5wL7ES4,1344
|
|
64
65
|
secator/output_types/tag.py,sha256=_XEqWAvAvmi7nd2ldfEE71zQx97jTSph2iDHkeqGTyk,1470
|
|
65
66
|
secator/output_types/target.py,sha256=lmPw2aFOGIOFG4XXo6vNVZBBAZlnApJjyDVepDY54TU,871
|
|
@@ -68,14 +69,14 @@ secator/output_types/user_account.py,sha256=rm10somxyu30JHjj629IkR15Nhahylud_fVO
|
|
|
68
69
|
secator/output_types/vulnerability.py,sha256=nF7OT9zGez8sZvLrkhjBOORjVi8hCqfCYUFq3eZ_ywo,2870
|
|
69
70
|
secator/output_types/warning.py,sha256=47GtmG083GqGPb_R5JDFmARJ9Mqrme58UxwJhgdGPuI,853
|
|
70
71
|
secator/runners/__init__.py,sha256=EBbOk37vkBy9p8Hhrbi-2VtM_rTwQ3b-0ggTyiD22cE,290
|
|
71
|
-
secator/runners/_base.py,sha256=
|
|
72
|
+
secator/runners/_base.py,sha256=MhjvSUTY7tkNWyxwjMzmt6qA5qCFAlREujkqG_z2xIw,31264
|
|
72
73
|
secator/runners/_helpers.py,sha256=QhJmdmFdu5XSx3LBFf4Q4Hy2EXS6bLGnJUq8G7C6f68,2410
|
|
73
74
|
secator/runners/celery.py,sha256=bqvDTTdoHiGRCt0FRvlgFHQ_nsjKMP5P0PzGbwfCj_0,425
|
|
74
75
|
secator/runners/command.py,sha256=9AvjZgSXctP8D-ffPCtlnXEiGqTeaD2wVGhiGNuROb0,25469
|
|
75
|
-
secator/runners/scan.py,sha256=
|
|
76
|
-
secator/runners/task.py,sha256=
|
|
77
|
-
secator/runners/workflow.py,sha256=
|
|
78
|
-
secator/scans/__init__.py,sha256=
|
|
76
|
+
secator/runners/scan.py,sha256=9FjDsFmQrAWfA6crWkCJaVqG3-t2HBVjcsv4UQp_9b8,1500
|
|
77
|
+
secator/runners/task.py,sha256=59jPXKSxFtSNXsm6VTAz8li2jxpM0Bkcgcn77HIDCrY,1869
|
|
78
|
+
secator/runners/workflow.py,sha256=qldnRm7r_SCvRHJFkZ7eaml62RZkOeCdT18PU357grY,2982
|
|
79
|
+
secator/scans/__init__.py,sha256=s4Ojsk5CWwyWqHu_A4zaXUL5Hm5L5nCmCHZn7wdD3Io,623
|
|
79
80
|
secator/serializers/__init__.py,sha256=OP5cmFl77ovgSCW_IDcZ21St2mUt5UK4QHfrsK2KvH8,248
|
|
80
81
|
secator/serializers/dataclass.py,sha256=RqICpfsYWGjHAACAA2h2jZ_69CFHim4VZwcBqowGMcQ,1010
|
|
81
82
|
secator/serializers/json.py,sha256=UJwAymRzjF-yBKOgz1MTOyBhQcdQg7fOKRXgmHIu8fo,411
|
|
@@ -108,9 +109,9 @@ secator/tasks/nuclei.py,sha256=bMXCRU5VWyrwI7Cv6BCj84NTpfjuALFumPqUSZ4Y6Ug,4243
|
|
|
108
109
|
secator/tasks/searchsploit.py,sha256=gvtLZbL2hzAZ07Cf0cSj2Qs0GvWK94XyHvoPFsetXu8,3321
|
|
109
110
|
secator/tasks/subfinder.py,sha256=C6W5NnXT92OUB1aSS9IYseqdI3wDMAz70TOEl8X-o3U,1213
|
|
110
111
|
secator/tasks/wpscan.py,sha256=036ywiEqZfX_Bt071U7qIm7bi6pNk7vodflmuslJurA,5550
|
|
111
|
-
secator/workflows/__init__.py,sha256=
|
|
112
|
-
secator-0.10.
|
|
113
|
-
secator-0.10.
|
|
114
|
-
secator-0.10.
|
|
115
|
-
secator-0.10.
|
|
116
|
-
secator-0.10.
|
|
112
|
+
secator/workflows/__init__.py,sha256=R_TTyjg9f2Ph2_LYiF0lL07IjTrfRE_zqJzy-N7_WCk,675
|
|
113
|
+
secator-0.10.1a5.dist-info/METADATA,sha256=oZnZtXatOa_oqR8f816Hc-0-Re2uouxu0mgHa-vLNyk,14726
|
|
114
|
+
secator-0.10.1a5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
115
|
+
secator-0.10.1a5.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
|
|
116
|
+
secator-0.10.1a5.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
|
|
117
|
+
secator-0.10.1a5.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|