secator 0.10.1a2__py3-none-any.whl → 0.10.1a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

secator/celery.py CHANGED
@@ -2,7 +2,6 @@ import gc
2
2
  import json
3
3
  import logging
4
4
  import os
5
- import sys
6
5
  import uuid
7
6
 
8
7
  from time import time
@@ -13,14 +12,13 @@ from celery.app import trace
13
12
  from rich.logging import RichHandler
14
13
  from retry import retry
15
14
 
16
- from secator.celery_signals import setup_handlers
15
+ from secator.celery_signals import IN_CELERY_WORKER_PROCESS, setup_handlers
17
16
  from secator.config import CONFIG
18
17
  from secator.output_types import Info
19
18
  from secator.rich import console
20
19
  from secator.runners import Scan, Task, Workflow
21
20
  from secator.utils import (debug, deduplicate, flatten, should_update)
22
21
 
23
- IN_CELERY_WORKER_PROCESS = sys.argv and ('secator.celery.app' in sys.argv or 'worker' in sys.argv)
24
22
 
25
23
  #---------#
26
24
  # Logging #
@@ -148,14 +146,14 @@ def handle_runner_error(self, results, runner):
148
146
  return runner.results
149
147
 
150
148
 
151
- def break_task(task, task_opts, targets, results=[], chunk_size=1):
149
+ def break_task(task, task_opts, results=[]):
152
150
  """Break a task into multiple of the same type."""
153
- chunks = targets
154
- if chunk_size > 1:
155
- chunks = list(chunker(targets, chunk_size))
151
+ chunks = task.inputs
152
+ if task.input_chunk_size > 1:
153
+ chunks = list(chunker(task.inputs, task.input_chunk_size))
156
154
  debug(
157
155
  '',
158
- obj={task.unique_name: 'CHUNKED', 'chunk_size': chunk_size, 'chunks': len(chunks), 'target_count': len(targets)},
156
+ obj={task.unique_name: 'CHUNKED', 'chunk_size': task.input_chunk_size, 'chunks': len(chunks), 'target_count': len(task.inputs)}, # noqa: E501
159
157
  obj_after=False,
160
158
  sub='celery.state',
161
159
  verbose=True
@@ -196,6 +194,8 @@ def break_task(task, task_opts, targets, results=[], chunk_size=1):
196
194
  def run_task(self, args=[], kwargs={}):
197
195
  print('run task')
198
196
  console.print(Info(message=f'Running task {self.request.id}'))
197
+ if 'context' not in kwargs:
198
+ kwargs['context'] = {}
199
199
  kwargs['context']['celery_id'] = self.request.id
200
200
  task = Task(*args, **kwargs)
201
201
  task.run()
@@ -204,6 +204,8 @@ def run_task(self, args=[], kwargs={}):
204
204
  @app.task(bind=True)
205
205
  def run_workflow(self, args=[], kwargs={}):
206
206
  console.print(Info(message=f'Running workflow {self.request.id}'))
207
+ if 'context' not in kwargs:
208
+ kwargs['context'] = {}
207
209
  kwargs['context']['celery_id'] = self.request.id
208
210
  workflow = Workflow(*args, **kwargs)
209
211
  workflow.run()
@@ -244,9 +246,11 @@ def run_command(self, results, name, targets, opts={}):
244
246
  update_state(self, task, force=True)
245
247
 
246
248
  # Chunk task if needed
247
- if task_cls.needs_chunking(targets, sync):
249
+ if task.needs_chunking(sync):
248
250
  console.print(Info(message=f'Task {name} requires chunking, breaking into {len(targets)} tasks'))
249
- return self.replace(break_task(task, opts, targets, results=results))
251
+ tasks = break_task(task, opts, results=results)
252
+ update_state(self, task, force=True)
253
+ return self.replace(tasks)
250
254
 
251
255
  # Update state live
252
256
  [update_state(self, task) for _ in task]
@@ -268,7 +272,8 @@ def forward_results(results):
268
272
  results = results['results']
269
273
  results = flatten(results)
270
274
  results = deduplicate(results, attr='_uuid')
271
- console.print(Info(message=f'Forwarding {len(results)} results ...'))
275
+ if IN_CELERY_WORKER_PROCESS:
276
+ console.print(Info(message=f'Forwarding {len(results)} results ...'))
272
277
  return results
273
278
 
274
279
  #--------------#
secator/celery_signals.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import os
2
2
  import signal
3
+ import sys
3
4
  import threading
4
5
  from pathlib import Path
5
6
 
@@ -10,6 +11,7 @@ from secator.output_types import Info
10
11
  from secator.rich import console
11
12
 
12
13
  IDLE_TIMEOUT = CONFIG.celery.worker_kill_after_idle_seconds
14
+ IN_CELERY_WORKER_PROCESS = sys.argv and ('secator.celery.app' in sys.argv or 'worker' in sys.argv)
13
15
 
14
16
  # File-based state management system
15
17
  STATE_DIR = Path("/tmp/celery_state")
secator/celery_utils.py CHANGED
@@ -12,7 +12,7 @@ from rich.padding import Padding
12
12
  from rich.progress import Progress as RichProgress, SpinnerColumn, TextColumn, TimeElapsedColumn
13
13
  from secator.config import CONFIG
14
14
  from secator.definitions import STATE_COLORS
15
- from secator.output_types import Error
15
+ from secator.output_types import Error, Info
16
16
  from secator.rich import console
17
17
  from secator.utils import debug, traceback_as_string
18
18
 
@@ -76,10 +76,31 @@ class CeleryData(object):
76
76
 
77
77
  # Get live results and print progress
78
78
  for data in CeleryData.poll(result, ids_map, refresh_interval):
79
- yield from data['results']
79
+ for result in data['results']:
80
+
81
+ # Add dynamic subtask to ids_map
82
+ if isinstance(result, Info):
83
+ message = result.message
84
+ if message.startswith('Celery chunked task created: '):
85
+ task_id = message.split(' ')[-1]
86
+ ids_map[task_id] = {
87
+ 'id': task_id,
88
+ 'name': result._source,
89
+ 'full_name': result._source,
90
+ 'descr': '',
91
+ 'state': 'PENDING',
92
+ 'count': 0,
93
+ 'progress': 0
94
+ }
95
+ yield result
80
96
 
81
97
  if print_remote_info:
82
98
  task_id = data['id']
99
+ if task_id not in progress_cache:
100
+ if CONFIG.runners.show_subtasks:
101
+ progress_cache[task_id] = progress.add_task('', advance=0, **data)
102
+ else:
103
+ continue
83
104
  progress_id = progress_cache[task_id]
84
105
  CeleryData.update_progress(progress, progress_id, data)
85
106
 
secator/config.py CHANGED
@@ -93,6 +93,7 @@ class Runners(StrictModel):
93
93
  skip_exploit_search: bool = False
94
94
  skip_cve_low_confidence: bool = False
95
95
  remove_duplicates: bool = False
96
+ show_chunk_progress: bool = False
96
97
 
97
98
 
98
99
  class Security(StrictModel):
secator/runners/_base.py CHANGED
@@ -97,6 +97,7 @@ class Runner:
97
97
  self.threads = []
98
98
  self.no_poll = self.run_opts.get('no_poll', False)
99
99
  self.quiet = self.run_opts.get('quiet', False)
100
+ self.started = False
100
101
 
101
102
  # Runner process options
102
103
  self.no_process = self.run_opts.get('no_process', False)
@@ -117,10 +118,21 @@ class Runner:
117
118
  self.raise_on_error = self.run_opts.get('raise_on_error', False)
118
119
  self.print_opts = {k: v for k, v in self.__dict__.items() if k.startswith('print_') if v}
119
120
 
121
+ # Chunks
122
+ self.has_parent = self.run_opts.get('has_parent', False)
123
+ self.has_children = self.run_opts.get('has_children', False)
124
+ self.chunk = self.run_opts.get('chunk', None)
125
+ self.chunk_count = self.run_opts.get('chunk_count', None)
126
+ self.unique_name = self.name.replace('/', '_')
127
+ self.unique_name = f'{self.unique_name}_{self.chunk}' if self.chunk else self.unique_name
128
+
129
+ # Add prior results to runner results
130
+ [self.add_result(result, print=False, output=False) for result in results]
131
+
120
132
  # Determine inputs
121
133
  inputs = [inputs] if not isinstance(inputs, list) else inputs
122
- if results:
123
- inputs, run_opts, errors = run_extractors(results, run_opts, inputs)
134
+ if not self.chunk and self.results:
135
+ inputs, run_opts, errors = run_extractors(self.results, run_opts, inputs)
124
136
  for error in errors:
125
137
  self.add_result(error, print=True)
126
138
  self.inputs = inputs
@@ -163,18 +175,6 @@ class Runner:
163
175
  self.validators = {name: [] for name in VALIDATORS + getattr(self, 'validators', [])}
164
176
  self.register_validators(validators)
165
177
 
166
- # Chunks
167
- self.has_parent = self.run_opts.get('has_parent', False)
168
- self.has_children = self.run_opts.get('has_children', False)
169
- self.chunk = self.run_opts.get('chunk', None)
170
- self.chunk_count = self.run_opts.get('chunk_count', None)
171
- self.unique_name = self.name.replace('/', '_')
172
- self.unique_name = f'{self.unique_name}_{self.chunk}' if self.chunk else self.unique_name
173
-
174
- # Process prior results
175
- for result in results:
176
- list(self._process_item(result, print=False, output=False))
177
-
178
178
  # Input post-process
179
179
  self.run_hooks('before_init')
180
180
 
@@ -238,6 +238,8 @@ class Runner:
238
238
 
239
239
  @property
240
240
  def status(self):
241
+ if not self.started:
242
+ return 'PENDING'
241
243
  if not self.done:
242
244
  return 'RUNNING'
243
245
  return 'FAILURE' if len(self.self_errors) > 0 else 'SUCCESS'
@@ -326,16 +328,18 @@ class Runner:
326
328
  self.add_result(error, print=True)
327
329
  yield error
328
330
 
329
- def add_result(self, item, print=False):
331
+ def add_result(self, item, print=False, output=True):
330
332
  """Add item to runner results.
331
333
 
332
334
  Args:
333
335
  item (OutputType): Item.
334
336
  print (bool): Whether to print it or not.
337
+ output (bool): Whether to add it to the output or not.
335
338
  """
336
339
  self.uuids.append(item._uuid)
337
340
  self.results.append(item)
338
- self.output += repr(item) + '\n'
341
+ if output:
342
+ self.output += repr(item) + '\n'
339
343
  if print:
340
344
  self._print_item(item)
341
345
 
@@ -640,6 +644,7 @@ class Runner:
640
644
 
641
645
  def log_start(self):
642
646
  """Log runner start."""
647
+ self.started = True
643
648
  if not self.print_remote_info:
644
649
  return
645
650
  remote_str = 'starting' if self.sync else 'sent to Celery worker'
@@ -831,14 +836,14 @@ class Runner:
831
836
  # Update item context
832
837
  item._context.update(self.context)
833
838
 
834
- # Return if already seen
835
- if item._uuid in self.uuids:
836
- return
837
-
838
839
  # Add uuid to item
839
840
  if not item._uuid:
840
841
  item._uuid = str(uuid.uuid4())
841
842
 
843
+ # Return if already seen
844
+ if item._uuid in self.uuids:
845
+ return
846
+
842
847
  # Add source to item
843
848
  if not item._source:
844
849
  item._source = self.unique_name
@@ -193,11 +193,10 @@ class Command(Runner):
193
193
  })
194
194
  return res
195
195
 
196
- @classmethod
197
- def needs_chunking(cls, targets, sync):
198
- many_targets = len(targets) > 1
199
- targets_over_chunk_size = cls.input_chunk_size and len(targets) > cls.input_chunk_size
200
- has_file_flag = cls.file_flag is not None
196
+ def needs_chunking(self, sync):
197
+ many_targets = len(self.inputs) > 1
198
+ targets_over_chunk_size = self.input_chunk_size and len(self.inputs) > self.input_chunk_size
199
+ has_file_flag = self.file_flag is not None
201
200
  chunk_it = (sync and many_targets and not has_file_flag) or (not sync and many_targets and targets_over_chunk_size)
202
201
  return chunk_it
203
202
 
secator/runners/scan.py CHANGED
@@ -3,8 +3,8 @@ import logging
3
3
  from secator.template import TemplateLoader
4
4
  from secator.config import CONFIG
5
5
  from secator.runners._base import Runner
6
- from secator.runners._helpers import run_extractors
7
6
  from secator.runners.workflow import Workflow
7
+ from secator.output_types import Error
8
8
  from secator.utils import merge_opts
9
9
 
10
10
  logger = logging.getLogger(__name__)
@@ -28,16 +28,16 @@ class Scan(Runner):
28
28
  scan_opts = self.config.options
29
29
  self.print_item = False
30
30
  for name, workflow_opts in self.config.workflows.items():
31
- # Run workflow
32
31
  run_opts = self.run_opts.copy()
33
32
  opts = merge_opts(scan_opts, workflow_opts, run_opts)
34
33
  workflow = Workflow(
35
34
  TemplateLoader(name=f'workflows/{name}'),
36
35
  self.inputs,
37
- results=[],
36
+ results=self.results,
38
37
  run_opts=opts,
39
38
  hooks=self._hooks,
40
39
  context=self.context.copy())
41
-
42
- # Get results
43
40
  yield from workflow
41
+ if len(self.errors) > 0:
42
+ self.add_result(Error(message=f'Stopping scan since workflow {name} has errors'))
43
+ return
secator/scans/__init__.py CHANGED
@@ -2,9 +2,9 @@ from secator.cli import ALL_SCANS
2
2
 
3
3
 
4
4
  def generate_class(config):
5
- from secator.runners import Workflow
5
+ from secator.runners import Scan
6
6
 
7
- class workflow(Workflow):
7
+ class scan(Scan):
8
8
  def __init__(self, inputs=[], **run_opts):
9
9
  hooks = run_opts.pop('hooks', {})
10
10
  results = run_opts.pop('results', [])
@@ -16,12 +16,12 @@ def generate_class(config):
16
16
  run_opts=run_opts,
17
17
  hooks=hooks,
18
18
  context=context)
19
- return workflow, config.name
19
+ return scan, config.name
20
20
 
21
21
 
22
22
  DYNAMIC_SCANS = {}
23
- for workflow in ALL_SCANS:
24
- cls, name = generate_class(workflow)
23
+ for scan in ALL_SCANS:
24
+ cls, name = generate_class(scan)
25
25
  DYNAMIC_SCANS[name] = cls
26
26
 
27
27
  globals().update(DYNAMIC_SCANS)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: secator
3
- Version: 0.10.1a2
3
+ Version: 0.10.1a3
4
4
  Summary: The pentester's swiss knife.
5
5
  Project-URL: Homepage, https://github.com/freelabz/secator
6
6
  Project-URL: Issues, https://github.com/freelabz/secator/issues
@@ -1,10 +1,10 @@
1
1
  secator/.gitignore,sha256=da8MUc3hdb6Mo0WjZu2upn5uZMbXcBGvhdhTQ1L89HI,3093
2
2
  secator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- secator/celery.py,sha256=9KXKv4EamJYJrHt_Ppn7aIp1AiFaTn2V0J_tZBwtWK0,8802
4
- secator/celery_signals.py,sha256=HobT7hCbVKPEHvCNwxCvQxFVUyocU1kkrTXi67b1DDw,4346
5
- secator/celery_utils.py,sha256=UWqLZpUaOXcztC_GD6uEDLiP8bGmD3WiTQN-u3lialg,7712
3
+ secator/celery.py,sha256=wEvhoBrnpkHuj_sFjW-NnWyPdvraBhgJ9ecF8gwlZ20,8926
4
+ secator/celery_signals.py,sha256=iumfx7tTeoavAbHijBtij0JzeIqElxQldNZtuZmFY_U,4456
5
+ secator/celery_utils.py,sha256=2Mw08To1DzWIlnW7TBeoX5suPMjm3XYbKT7NWPOV5nI,8346
6
6
  secator/cli.py,sha256=3_tTTusW12MCejFgtOeYjiedjrJpyQj_gsCK8FkTMJA,43922
7
- secator/config.py,sha256=xItKM29yvMqzNZZygSNZXZ2V9vJbTdRuLTfIoRfP3XE,19653
7
+ secator/config.py,sha256=CdVBh6d4k13SpkQKyHQfMFHgkLypUH07kAKLmCJJO1w,19688
8
8
  secator/decorators.py,sha256=3kYadCz6haIZtnjkFHSRfenTdc6Yu7bHd-0IVjhD72w,13902
9
9
  secator/definitions.py,sha256=gFtLT9fjNtX_1qkiCjNfQyCvYq07IhScsQzX4o20_SE,3084
10
10
  secator/installer.py,sha256=Q5qmGbxGmuhysEA9YovTpy-YY2TxxFskhrzSX44c42E,17971
@@ -68,14 +68,14 @@ secator/output_types/user_account.py,sha256=rm10somxyu30JHjj629IkR15Nhahylud_fVO
68
68
  secator/output_types/vulnerability.py,sha256=nF7OT9zGez8sZvLrkhjBOORjVi8hCqfCYUFq3eZ_ywo,2870
69
69
  secator/output_types/warning.py,sha256=47GtmG083GqGPb_R5JDFmARJ9Mqrme58UxwJhgdGPuI,853
70
70
  secator/runners/__init__.py,sha256=EBbOk37vkBy9p8Hhrbi-2VtM_rTwQ3b-0ggTyiD22cE,290
71
- secator/runners/_base.py,sha256=T9gjOqe-UPDHe5ZdVRBtUtxTefRgDcq9JV08F6UV5ZU,29596
71
+ secator/runners/_base.py,sha256=Luxr6-Nd0CtLjfVBNGNXsUeQdELETKdgdaAac8oqE5U,29801
72
72
  secator/runners/_helpers.py,sha256=QhJmdmFdu5XSx3LBFf4Q4Hy2EXS6bLGnJUq8G7C6f68,2410
73
73
  secator/runners/celery.py,sha256=bqvDTTdoHiGRCt0FRvlgFHQ_nsjKMP5P0PzGbwfCj_0,425
74
- secator/runners/command.py,sha256=PqCOHDKJXvG4weB8mXDTElGxc8i8pK2RoyTKUBpHASU,25480
75
- secator/runners/scan.py,sha256=Pab_o_liI5fhlv2OOwYNmonz5JFYYVqtQFf9eyAQpiE,1071
74
+ secator/runners/command.py,sha256=9AvjZgSXctP8D-ffPCtlnXEiGqTeaD2wVGhiGNuROb0,25469
75
+ secator/runners/scan.py,sha256=AJ7ucBIXXcdUz6pzpsLIdEj_tRRa2IlANhYRKOc2IZQ,1157
76
76
  secator/runners/task.py,sha256=f2AduWpIy8JHK-Qitl_2Kh0fia573_YHAyAlV6MsJ50,2068
77
77
  secator/runners/workflow.py,sha256=XEhBfL-f3vGH0HgEPnj62d8ITxjH_tPXiNSVkaonuwQ,3862
78
- secator/scans/__init__.py,sha256=nlNLiRl7Vu--c_iXClFFcagMd_b_OWKitq8tX1-1krQ,641
78
+ secator/scans/__init__.py,sha256=1EEbngbDbvWxmeDYC6uux00WWy1v5qHtSpk6NVz27rM,617
79
79
  secator/serializers/__init__.py,sha256=OP5cmFl77ovgSCW_IDcZ21St2mUt5UK4QHfrsK2KvH8,248
80
80
  secator/serializers/dataclass.py,sha256=RqICpfsYWGjHAACAA2h2jZ_69CFHim4VZwcBqowGMcQ,1010
81
81
  secator/serializers/json.py,sha256=UJwAymRzjF-yBKOgz1MTOyBhQcdQg7fOKRXgmHIu8fo,411
@@ -109,8 +109,8 @@ secator/tasks/searchsploit.py,sha256=gvtLZbL2hzAZ07Cf0cSj2Qs0GvWK94XyHvoPFsetXu8
109
109
  secator/tasks/subfinder.py,sha256=C6W5NnXT92OUB1aSS9IYseqdI3wDMAz70TOEl8X-o3U,1213
110
110
  secator/tasks/wpscan.py,sha256=036ywiEqZfX_Bt071U7qIm7bi6pNk7vodflmuslJurA,5550
111
111
  secator/workflows/__init__.py,sha256=ivpZHiYYlj4JqlXLRmB9cmAPUGdk8QcUrCRL34hIqEA,665
112
- secator-0.10.1a2.dist-info/METADATA,sha256=YgZWsub4cdLtNiz-2UKkPKhLAi8mx0wFIPwGjnYU794,14726
113
- secator-0.10.1a2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
- secator-0.10.1a2.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
115
- secator-0.10.1a2.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
116
- secator-0.10.1a2.dist-info/RECORD,,
112
+ secator-0.10.1a3.dist-info/METADATA,sha256=G7F0k81GVKn7Y9a0mGQ3u96ZOHxxoshrlUxNj12dLao,14726
113
+ secator-0.10.1a3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
+ secator-0.10.1a3.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
115
+ secator-0.10.1a3.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
116
+ secator-0.10.1a3.dist-info/RECORD,,