secator 0.9.3__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

secator/celery.py CHANGED
@@ -1,17 +1,19 @@
1
1
  import gc
2
2
  import json
3
3
  import logging
4
+ import os
4
5
  import sys
5
6
  import uuid
6
7
 
7
8
  from time import time
8
9
 
9
- from celery import Celery, chain, chord, signals
10
+ from celery import Celery, chain, chord
10
11
  from celery.app import trace
11
12
 
12
13
  from rich.logging import RichHandler
13
14
  from retry import retry
14
15
 
16
+ from secator.celery_signals import setup_handlers
15
17
  from secator.config import CONFIG
16
18
  from secator.output_types import Info, Error
17
19
  from secator.rich import console
@@ -78,7 +80,8 @@ app.conf.update({
78
80
  'secator.celery.run_workflow': {'queue': 'celery'},
79
81
  'secator.celery.run_scan': {'queue': 'celery'},
80
82
  'secator.celery.run_task': {'queue': 'celery'},
81
- 'secator.hooks.mongodb.tag_duplicates': {'queue': 'mongodb'}
83
+ 'secator.celery.forward_results': {'queue': 'results'},
84
+ 'secator.hooks.mongodb.*': {'queue': 'mongodb'}
82
85
  },
83
86
  'task_store_eager_result': True,
84
87
  'task_send_sent_event': CONFIG.celery.task_send_sent_event,
@@ -93,23 +96,8 @@ app.conf.update({
93
96
  'worker_send_task_events': CONFIG.celery.worker_send_task_events
94
97
  })
95
98
  app.autodiscover_tasks(['secator.hooks.mongodb'], related_name=None)
96
-
97
-
98
- def maybe_override_logging():
99
- def decorator(func):
100
- if CONFIG.celery.override_default_logging:
101
- return signals.setup_logging.connect(func)
102
- else:
103
- return func
104
- return decorator
105
-
106
-
107
- @maybe_override_logging()
108
- def void(*args, **kwargs):
109
- """Override celery's logging setup to prevent it from altering our settings.
110
- github.com/celery/celery/issues/1867
111
- """
112
- pass
99
+ if IN_CELERY_WORKER_PROCESS:
100
+ setup_handlers()
113
101
 
114
102
 
115
103
  @retry(Exception, tries=3, delay=2)
@@ -188,10 +176,10 @@ def break_task(task, task_opts, targets, results=[], chunk_size=1):
188
176
 
189
177
  # Build Celery workflow
190
178
  workflow = chain(
191
- forward_results.s(results).set(queue='io'),
179
+ forward_results.s(results).set(queue='results'),
192
180
  chord(
193
181
  tuple(sigs),
194
- forward_results.s().set(queue='io'),
182
+ forward_results.s().set(queue='results'),
195
183
  )
196
184
  )
197
185
  if task.sync:
@@ -204,6 +192,7 @@ def break_task(task, task_opts, targets, results=[], chunk_size=1):
204
192
 
205
193
  @app.task(bind=True)
206
194
  def run_task(self, args=[], kwargs={}):
195
+ console.print(Info(message=f'Running task {self.request.id}'))
207
196
  kwargs['context']['celery_id'] = self.request.id
208
197
  task = Task(*args, **kwargs)
209
198
  task.run()
@@ -211,6 +200,7 @@ def run_task(self, args=[], kwargs={}):
211
200
 
212
201
  @app.task(bind=True)
213
202
  def run_workflow(self, args=[], kwargs={}):
203
+ console.print(Info(message=f'Running workflow {self.request.id}'))
214
204
  kwargs['context']['celery_id'] = self.request.id
215
205
  workflow = Workflow(*args, **kwargs)
216
206
  workflow.run()
@@ -218,6 +208,7 @@ def run_workflow(self, args=[], kwargs={}):
218
208
 
219
209
  @app.task(bind=True)
220
210
  def run_scan(self, args=[], kwargs={}):
211
+ console.print(Info(message=f'Running scan {self.request.id}'))
221
212
  if 'context' not in kwargs:
222
213
  kwargs['context'] = {}
223
214
  kwargs['context']['celery_id'] = self.request.id
@@ -233,6 +224,7 @@ def run_command(self, results, name, targets, opts={}):
233
224
  # Set Celery request id in context
234
225
  context = opts.get('context', {})
235
226
  context['celery_id'] = self.request.id
227
+ context['worker_name'] = os.environ.get('WORKER_NAME', 'unknown')
236
228
  opts['context'] = context
237
229
  opts['print_remote_info'] = False
238
230
  opts['results'] = results
@@ -244,6 +236,8 @@ def run_command(self, results, name, targets, opts={}):
244
236
  'print_line': True,
245
237
  'print_cmd': True
246
238
  })
239
+ routing_key = self.request.delivery_info['routing_key']
240
+ console.print(Info(message=f'Task "{name}" running with routing key "{routing_key}"'))
247
241
 
248
242
  # Flatten + dedupe results
249
243
  results = flatten(results)
@@ -254,7 +248,10 @@ def run_command(self, results, name, targets, opts={}):
254
248
  targets, opts = run_extractors(results, opts, targets)
255
249
  debug('after extractors', obj={'targets': targets, 'opts': opts}, sub='celery.state')
256
250
 
251
+ task = None
252
+
257
253
  try:
254
+
258
255
  # Get task class
259
256
  task_cls = Task.get_task_class(name)
260
257
 
@@ -268,8 +265,14 @@ def run_command(self, results, name, targets, opts={}):
268
265
  'print_remote_info': False,
269
266
  'has_children': chunk_it,
270
267
  })
268
+
269
+ if IN_CELERY_WORKER_PROCESS and chunk_it and routing_key != 'poll':
270
+ console.print(Info(message=f'Task {name} is chunkable but not running on "poll" queue, re-routing to "poll" queue'))
271
+ raise self.replace(run_command.si(results, name, targets, opts=opts).set(queue='poll', task_id=self.request.id))
272
+
271
273
  if chunk_it:
272
274
  task_opts['print_cmd'] = False
275
+
273
276
  task = task_cls(targets, **task_opts)
274
277
  debug(
275
278
  '',
@@ -295,6 +298,7 @@ def run_command(self, results, name, targets, opts={}):
295
298
  targets,
296
299
  results=results,
297
300
  chunk_size=chunk_size)
301
+ console.print(Info(message=f'Task "{name}" starts polling for chunked results'))
298
302
 
299
303
  # Update state before starting
300
304
  update_state(self, task)
@@ -304,6 +308,8 @@ def run_command(self, results, name, targets, opts={}):
304
308
  update_state(self, task)
305
309
 
306
310
  except BaseException as e:
311
+ if not task:
312
+ raise e
307
313
  error = Error.from_exception(e)
308
314
  error._source = task.unique_name
309
315
  error._uuid = str(uuid.uuid4())
@@ -311,6 +317,8 @@ def run_command(self, results, name, targets, opts={}):
311
317
  task.stop_celery_tasks()
312
318
 
313
319
  finally:
320
+ if not task:
321
+ raise
314
322
  update_state(self, task, force=True)
315
323
  gc.collect()
316
324
  debug('', obj={task.unique_name: task.status, 'results': task.results}, sub='celery.results', verbose=True)
@@ -0,0 +1,103 @@
1
+ import os
2
+ import signal
3
+ import threading
4
+
5
+ from celery import signals
6
+
7
+ from secator.config import CONFIG
8
+ from secator.output_types import Info
9
+ from secator.rich import console
10
+
11
+
12
+ IDLE_TIMEOUT = CONFIG.celery.worker_kill_after_idle_seconds
13
+ TASK_IN_PROGRESS = False
14
+
15
+
16
+ def kill_worker():
17
+ """"Kill current worker using it's pid by sending a SIGTERM to Celery master process."""
18
+ worker_name = os.environ['WORKER_NAME']
19
+ if not TASK_IN_PROGRESS:
20
+ pid = os.getpid()
21
+ console.print(Info(message=f'Sending SIGTERM to worker {worker_name} with pid {pid}'))
22
+ os.kill(pid, signal.SIGTERM)
23
+ else:
24
+ console.print(Info(message=f'Cancelling worker shutdown of {worker_name} since a task is currently in progress'))
25
+
26
+
27
+ class IdleTimer:
28
+ def __init__(self, timeout):
29
+ self.thread = None
30
+ self.is_started = False
31
+ self.thread = threading.Timer(timeout, kill_worker)
32
+
33
+ def start(self):
34
+ if self.is_started:
35
+ self.cancel()
36
+ self.thread.start()
37
+ self.is_started = True
38
+
39
+ def cancel(self):
40
+ self.thread.cancel()
41
+ self.s_started = False
42
+
43
+
44
+ IDLE_TIMER = IdleTimer(IDLE_TIMEOUT)
45
+
46
+
47
+ def maybe_override_logging():
48
+ def decorator(func):
49
+ if CONFIG.celery.override_default_logging:
50
+ return signals.setup_logging.connect(func)
51
+ else:
52
+ return func
53
+ return decorator
54
+
55
+
56
+ @maybe_override_logging()
57
+ def setup_logging(*args, **kwargs):
58
+ """Override celery's logging setup to prevent it from altering our settings.
59
+ github.com/celery/celery/issues/1867
60
+ """
61
+ pass
62
+
63
+
64
+ def capture_worker_name(sender, instance, **kwargs):
65
+ os.environ["WORKER_NAME"] = '{0}'.format(sender)
66
+
67
+
68
+ def worker_init_handler(**kwargs):
69
+ if IDLE_TIMEOUT != -1:
70
+ console.print(Info(message=f'Starting inactivity timer for {IDLE_TIMEOUT} seconds ...'))
71
+ IDLE_TIMER.start()
72
+
73
+
74
+ def task_prerun_handler(**kwargs):
75
+ global TASK_IN_PROGRESS, IDLE_TIMER
76
+ TASK_IN_PROGRESS = True
77
+ if IDLE_TIMEOUT != -1:
78
+ IDLE_TIMER.cancel()
79
+
80
+
81
+ def task_postrun_handler(**kwargs):
82
+ global TASK_IN_PROGRESS, IDLE_TIMER
83
+ TASK_IN_PROGRESS = False
84
+ sender_name = kwargs['sender'].name
85
+
86
+ if CONFIG.celery.worker_kill_after_task and sender_name.startswith('secator.'):
87
+ worker_name = os.environ['WORKER_NAME']
88
+ console.print(Info(message=f'Shutdown worker {worker_name} since config celery.worker_kill_after_task is set.'))
89
+ IDLE_TIMER.cancel()
90
+ kill_worker()
91
+ return
92
+
93
+ if IDLE_TIMEOUT != -1: # restart timer
94
+ console.print(Info(message=f'Reset inactivity timer to {IDLE_TIMEOUT} seconds'))
95
+ IDLE_TIMER.start()
96
+
97
+
98
+ def setup_handlers():
99
+ signals.celeryd_after_setup.connect(capture_worker_name)
100
+ signals.setup_logging.connect(setup_logging)
101
+ signals.task_prerun.connect(task_prerun_handler)
102
+ signals.task_postrun.connect(task_postrun_handler)
103
+ signals.worker_ready.connect(worker_init_handler)
secator/cli.py CHANGED
@@ -148,7 +148,7 @@ def worker(hostname, concurrency, reload, queue, pool, check, dev, stop, show):
148
148
  return
149
149
 
150
150
  if not queue:
151
- queue = 'io,cpu,' + ','.join([r['queue'] for r in app.conf.task_routes.values()])
151
+ queue = 'io,cpu,poll,' + ','.join(set([r['queue'] for r in app.conf.task_routes.values()]))
152
152
 
153
153
  app_str = 'secator.celery.app'
154
154
  celery = f'{sys.executable} -m celery'
secator/config.py CHANGED
@@ -73,6 +73,8 @@ class Celery(StrictModel):
73
73
  worker_max_tasks_per_child: int = 20
74
74
  worker_prefetch_multiplier: int = 1
75
75
  worker_send_task_events: bool = False
76
+ worker_kill_after_task: bool = False
77
+ worker_kill_after_idle_seconds: int = -1
76
78
 
77
79
 
78
80
  class Cli(StrictModel):
@@ -499,8 +501,8 @@ class Config(DotMap):
499
501
  self.set(path, value, set_partial=False)
500
502
  if not self.validate(print_errors=False) and print_errors:
501
503
  console.print(f'[bold red]{var} (override failed)[/]')
502
- elif print_errors:
503
- console.print(f'[bold red]{var} (override failed: key not found)[/]')
504
+ # elif print_errors:
505
+ # console.print(f'[bold red]{var} (override failed: key not found)[/]')
504
506
 
505
507
 
506
508
  def download_files(data: dict, target_folder: Path, offline_mode: bool, type: str):
secator/decorators.py CHANGED
@@ -28,6 +28,7 @@ RUNNER_OPTS = {
28
28
  RUNNER_GLOBAL_OPTS = {
29
29
  'sync': {'is_flag': True, 'help': 'Run tasks synchronously (automatic if no worker is alive)'},
30
30
  'worker': {'is_flag': True, 'default': False, 'help': 'Run tasks in worker'},
31
+ 'no_poll': {'is_flag': True, 'default': False, 'help': 'Do not live poll for tasks results when running in worker'},
31
32
  'proxy': {'type': str, 'help': 'HTTP proxy'},
32
33
  'driver': {'type': str, 'help': 'Export real-time results. E.g: "mongodb"'}
33
34
  # 'debug': {'type': int, 'default': 0, 'help': 'Debug mode'},
secator/runners/_base.py CHANGED
@@ -96,6 +96,7 @@ class Runner:
96
96
  self.celery_ids_map = {}
97
97
  self.caller = self.run_opts.get('caller', None)
98
98
  self.threads = []
99
+ self.no_poll = self.run_opts.get('no_poll', False)
99
100
 
100
101
  # Determine exporters
101
102
  exporters_str = self.run_opts.get('output') or self.default_exporters
@@ -139,7 +140,7 @@ class Runner:
139
140
  self.print_progress = self.run_opts.get('print_progress', False) and not self.quiet and not self.print_raw
140
141
  self.print_target = self.run_opts.get('print_target', False) and not self.quiet and not self.print_raw
141
142
  self.print_stat = self.run_opts.get('print_stat', False) and not self.quiet and not self.print_raw
142
- self.raise_on_error = self.run_opts.get('raise_on_error', not self.sync)
143
+ self.raise_on_error = self.run_opts.get('raise_on_error', False)
143
144
  self.print_opts = {k: v for k, v in self.__dict__.items() if k.startswith('print_') if v}
144
145
 
145
146
  # Debug
@@ -635,6 +636,8 @@ class Runner:
635
636
 
636
637
  def log_results(self):
637
638
  """Log runner results."""
639
+ if self.no_poll:
640
+ return
638
641
  self.done = True
639
642
  self.progress = 100
640
643
  self.end_time = datetime.fromtimestamp(time())
@@ -110,7 +110,7 @@ class Command(Runner):
110
110
  proxy_http = False
111
111
 
112
112
  # Profile
113
- profile = 'cpu'
113
+ profile = 'io'
114
114
 
115
115
  def __init__(self, inputs=[], **run_opts):
116
116
 
secator/runners/task.py CHANGED
@@ -26,6 +26,7 @@ class Task(Runner):
26
26
  # Run opts
27
27
  run_opts = self.run_opts.copy()
28
28
  run_opts.pop('output', None)
29
+ run_opts.pop('no_poll', False)
29
30
 
30
31
  # Set task output types
31
32
  self.output_types = task_cls.output_types
@@ -48,6 +49,8 @@ class Task(Runner):
48
49
  message=f'Celery task created: {self.celery_result.id}',
49
50
  task_id=self.celery_result.id
50
51
  )
52
+ if self.no_poll:
53
+ return
51
54
  results = CeleryData.iter_results(
52
55
  self.celery_result,
53
56
  ids_map=self.celery_ids_map,
@@ -26,6 +26,7 @@ class Workflow(Runner):
26
26
  # Task opts
27
27
  run_opts = self.run_opts.copy()
28
28
  run_opts['hooks'] = self._hooks.get(Task, {})
29
+ run_opts.pop('no_poll', False)
29
30
 
30
31
  # Build Celery workflow
31
32
  workflow = self.build_celery_workflow(
@@ -46,6 +47,8 @@ class Workflow(Runner):
46
47
  message=f'Celery task created: {self.celery_result.id}',
47
48
  task_id=self.celery_result.id
48
49
  )
50
+ if self.no_poll:
51
+ return
49
52
  results = CeleryData.iter_results(
50
53
  self.celery_result,
51
54
  ids_map=self.celery_ids_map,
@@ -70,7 +73,7 @@ class Workflow(Runner):
70
73
  self.inputs,
71
74
  self.config.options,
72
75
  run_opts)
73
- sigs = [forward_results.si(results).set(queue='io')] + sigs + [forward_results.s().set(queue='io')]
76
+ sigs = [forward_results.si(results).set(queue='results')] + sigs + [forward_results.s().set(queue='results')]
74
77
  workflow = chain(*sigs)
75
78
  return workflow
76
79
 
@@ -102,7 +105,7 @@ class Workflow(Runner):
102
105
  workflow_opts,
103
106
  run_opts
104
107
  )
105
- sig = chord((tasks), forward_results.s().set(queue='io'))
108
+ sig = chord((tasks), forward_results.s().set(queue='results'))
106
109
  elif task_name == '_chain':
107
110
  tasks = self.get_tasks(
108
111
  task_opts,
secator/tasks/katana.py CHANGED
@@ -30,7 +30,8 @@ class katana(HttpCrawler):
30
30
  'jsluice': {'is_flag': True, 'short': 'jsl', 'default': True, 'help': 'Enable jsluice parsing in javascript file (memory intensive)'}, # noqa: E501
31
31
  'known_files': {'type': str, 'short': 'kf', 'default': 'all', 'help': 'Enable crawling of known files (all, robotstxt, sitemapxml)'}, # noqa: E501
32
32
  'omit_raw': {'is_flag': True, 'short': 'or', 'default': True, 'help': 'Omit raw requests/responses from jsonl output'}, # noqa: E501
33
- 'omit_body': {'is_flag': True, 'short': 'ob', 'default': True, 'help': 'Omit response body from jsonl output'}
33
+ 'omit_body': {'is_flag': True, 'short': 'ob', 'default': True, 'help': 'Omit response body from jsonl output'},
34
+ 'no_sandbox': {'is_flag': True, 'short': 'ns', 'default': False, 'help': 'Disable sandboxing'},
34
35
  }
35
36
  opt_key_map = {
36
37
  HEADER: 'headers',
secator/template.py CHANGED
@@ -6,8 +6,10 @@ from pathlib import Path
6
6
  import yaml
7
7
  from dotmap import DotMap
8
8
 
9
- from secator.rich import console
10
9
  from secator.config import CONFIG, CONFIGS_FOLDER
10
+ from secator.rich import console
11
+ from secator.utils import convert_functions_to_strings
12
+
11
13
 
12
14
  TEMPLATES_DIR_KEYS = ['workflow', 'scan', 'profile']
13
15
 
@@ -106,7 +108,7 @@ class TemplateLoader(DotMap):
106
108
  task_opts = task_class.get_supported_opts()
107
109
  for name, conf in task_opts.items():
108
110
  if name not in opts or not opts[name].get('supported', False):
109
- opts[name] = conf
111
+ opts[name] = convert_functions_to_strings(conf)
110
112
  return opts
111
113
 
112
114
  def _extract_tasks(self):
secator/utils.py CHANGED
@@ -2,6 +2,7 @@ import fnmatch
2
2
  import inspect
3
3
  import importlib
4
4
  import itertools
5
+ import json
5
6
  import logging
6
7
  import operator
7
8
  import os
@@ -778,3 +779,22 @@ def process_wordlist(val):
778
779
  offline_mode=CONFIG.offline_mode,
779
780
  type='wordlist'
780
781
  )
782
+
783
+
784
+ def convert_functions_to_strings(data):
785
+ """Recursively convert functions to strings in a dict.
786
+
787
+ Args:
788
+ data (dict): Dictionary to convert.
789
+
790
+ Returns:
791
+ dict: Converted dictionary.
792
+ """
793
+ if isinstance(data, dict):
794
+ return {k: convert_functions_to_strings(v) for k, v in data.items()}
795
+ elif isinstance(data, list):
796
+ return [convert_functions_to_strings(v) for v in data]
797
+ elif callable(data):
798
+ return json.dumps(data.__name__) # or use inspect.getsource(data) if you want the actual function code
799
+ else:
800
+ return data
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: secator
3
- Version: 0.9.3
3
+ Version: 0.10.0
4
4
  Summary: The pentester's swiss knife.
5
5
  Project-URL: Homepage, https://github.com/freelabz/secator
6
6
  Project-URL: Issues, https://github.com/freelabz/secator/issues
@@ -1,17 +1,18 @@
1
1
  secator/.gitignore,sha256=da8MUc3hdb6Mo0WjZu2upn5uZMbXcBGvhdhTQ1L89HI,3093
2
2
  secator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- secator/celery.py,sha256=dvXUuvkPaoSd0Blq5Abz7ncinVXzjnES8aOLxXxvmvo,9865
3
+ secator/celery.py,sha256=cc7swFNPAuYMGRdL55Drwyo5RO4sK1l9UxDO-ojLM4Y,10518
4
+ secator/celery_signals.py,sha256=WG9d41CoRIPjHpauIiAE41ekI7j7DPQ01uWU6RHtcL0,2719
4
5
  secator/celery_utils.py,sha256=iIuCn_3YkPXCtpnbaYqpppU2TARzSDyTIYHkrRyt54s,7725
5
- secator/cli.py,sha256=SX_SNUA6LLdG7ICpUs5iSiNYOp_DkQLGE0uuB_KSrXE,43879
6
- secator/config.py,sha256=b5I4F2DO1WxxmHvnxii_lrelEefAB3ZMKBkSL-C0y4c,19569
7
- secator/decorators.py,sha256=tjH7WodxJEBIf2CCbegmvOe8H9DKSFh4iPLEhDNGPCA,13784
6
+ secator/cli.py,sha256=b-Oo_fACToy3pGfIo2Bzci_6rcWR4fONfOP01vnfVt4,43889
7
+ secator/config.py,sha256=xItKM29yvMqzNZZygSNZXZ2V9vJbTdRuLTfIoRfP3XE,19653
8
+ secator/decorators.py,sha256=3kYadCz6haIZtnjkFHSRfenTdc6Yu7bHd-0IVjhD72w,13902
8
9
  secator/definitions.py,sha256=gFtLT9fjNtX_1qkiCjNfQyCvYq07IhScsQzX4o20_SE,3084
9
10
  secator/installer.py,sha256=Q5qmGbxGmuhysEA9YovTpy-YY2TxxFskhrzSX44c42E,17971
10
11
  secator/report.py,sha256=qJkEdCFttDBXIwUNUzZqFU_sG8l0PvyTSTogZVBv1Rs,3628
11
12
  secator/rich.py,sha256=owmuLcTTUt8xYBTE3_SqWTkPeAomcU_8bPdW_V-U8VM,3264
12
- secator/template.py,sha256=Qy4RjcmlifeSA8CleWUBb9fluxuYHzxgEH0H-8qs8R4,4323
13
+ secator/template.py,sha256=Sb6PjCTGIkZ7I0OGWFp5CaXmjt-6VPe_xpcRhWhjGpU,4409
13
14
  secator/thread.py,sha256=rgRgEtcMgs2wyfLWVlCTUCLWeg6jsMo5iKpyyrON5rY,655
14
- secator/utils.py,sha256=HMw0Q4omL-a5VcbvUhATC30oOSEKxTVLANgVRfWKnkc,21211
15
+ secator/utils.py,sha256=FBDa0BWPFLDLXKD_3FwFd8Bmz-fP0inKX8kG8LoivjU,21748
15
16
  secator/utils_test.py,sha256=ArHwkWW89t0IDqxO4HjJWd_tm7tp1illP4pu3nLq5yo,6559
16
17
  secator/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
18
  secator/configs/profiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -67,13 +68,13 @@ secator/output_types/user_account.py,sha256=rm10somxyu30JHjj629IkR15Nhahylud_fVO
67
68
  secator/output_types/vulnerability.py,sha256=nF7OT9zGez8sZvLrkhjBOORjVi8hCqfCYUFq3eZ_ywo,2870
68
69
  secator/output_types/warning.py,sha256=47GtmG083GqGPb_R5JDFmARJ9Mqrme58UxwJhgdGPuI,853
69
70
  secator/runners/__init__.py,sha256=EBbOk37vkBy9p8Hhrbi-2VtM_rTwQ3b-0ggTyiD22cE,290
70
- secator/runners/_base.py,sha256=tcTsL35dAHsIMfgcclTtvDk2kQM4Hhu-8IZTyHJgqTs,28973
71
+ secator/runners/_base.py,sha256=QBYyrYCPMJz0YPP6lE8vkgIHmDLplO6byrsisqVjV5g,29047
71
72
  secator/runners/_helpers.py,sha256=FGogmmdHfCWmIyq7wRprwU1oOSxesOu3Y0N4GyAgiGw,2000
72
73
  secator/runners/celery.py,sha256=bqvDTTdoHiGRCt0FRvlgFHQ_nsjKMP5P0PzGbwfCj_0,425
73
- secator/runners/command.py,sha256=xjNTecsdtu94-3Gb7SoXDZLvN91wGPhYakMAw7d3R4o,25090
74
+ secator/runners/command.py,sha256=x7ktQLwIy7CWV-AEL6n5xY2sRzAijGCURuB17hJWRpY,25089
74
75
  secator/runners/scan.py,sha256=tuPuqwL6fIS4UbCoy5WPKthYWm_LL-vCPRD2qK58HZE,1232
75
- secator/runners/task.py,sha256=JXlwo3DyQnu69RbQ8xvJnXu6y0rDYN-3iT4q4gy39tI,2004
76
- secator/runners/workflow.py,sha256=vry_MZFx6dRrorTrdsUqvhMZGOLPCdzpxkvN6fnt62w,3783
76
+ secator/runners/task.py,sha256=f2AduWpIy8JHK-Qitl_2Kh0fia573_YHAyAlV6MsJ50,2068
77
+ secator/runners/workflow.py,sha256=XEhBfL-f3vGH0HgEPnj62d8ITxjH_tPXiNSVkaonuwQ,3862
77
78
  secator/scans/__init__.py,sha256=nlNLiRl7Vu--c_iXClFFcagMd_b_OWKitq8tX1-1krQ,641
78
79
  secator/serializers/__init__.py,sha256=OP5cmFl77ovgSCW_IDcZ21St2mUt5UK4QHfrsK2KvH8,248
79
80
  secator/serializers/dataclass.py,sha256=RqICpfsYWGjHAACAA2h2jZ_69CFHim4VZwcBqowGMcQ,1010
@@ -97,7 +98,7 @@ secator/tasks/gospider.py,sha256=XKLus6GnwN9MYU_ZFmNED-JeRn6n1Eg0CPgul8g1zLs,230
97
98
  secator/tasks/grype.py,sha256=xoOuldnHCrS0O1Y4IzjbSVvoX5eX-fLSZ74THdRC2so,2447
98
99
  secator/tasks/h8mail.py,sha256=wNukV-aB-bXPZNq7WL8n1nFgH5b5tGh6vOF80Yna33I,1934
99
100
  secator/tasks/httpx.py,sha256=ONfCdAOV7ARCM9tSnlucIAM3UQeWcMUm8QZX8F7u9Pg,5895
100
- secator/tasks/katana.py,sha256=A0nnjKKT-A34LBtEuG25lWh5Ria4nwgo4Ti31403E-Q,5256
101
+ secator/tasks/katana.py,sha256=J0HKPT4QIrDj4uW2gZe7ByW6iEwPortSszqaHDvziwY,5355
101
102
  secator/tasks/maigret.py,sha256=6anhBzB4lEM90Lk23cAD_ku7I_ghTpj0W0i3h6HARD8,2088
102
103
  secator/tasks/mapcidr.py,sha256=56ocbaDmB5_C_ns-773CgZXGOKOtkI9q9xJs2Rlfqio,990
103
104
  secator/tasks/msfconsole.py,sha256=TXVrvzSWw9Ncv2h9QJtaEinTMbps_z0zX1PFirERVho,6430
@@ -108,8 +109,8 @@ secator/tasks/searchsploit.py,sha256=gvtLZbL2hzAZ07Cf0cSj2Qs0GvWK94XyHvoPFsetXu8
108
109
  secator/tasks/subfinder.py,sha256=C6W5NnXT92OUB1aSS9IYseqdI3wDMAz70TOEl8X-o3U,1213
109
110
  secator/tasks/wpscan.py,sha256=C8eW3vWfbSFrxm5iPzs3MgcagIfSs7u51QZiecYbT2Q,5577
110
111
  secator/workflows/__init__.py,sha256=ivpZHiYYlj4JqlXLRmB9cmAPUGdk8QcUrCRL34hIqEA,665
111
- secator-0.9.3.dist-info/METADATA,sha256=vgiXAAGJ3eKoc3Nr6fVgpSZWjYBMpJ_umK-gMK_RF4o,14723
112
- secator-0.9.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
113
- secator-0.9.3.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
114
- secator-0.9.3.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
115
- secator-0.9.3.dist-info/RECORD,,
112
+ secator-0.10.0.dist-info/METADATA,sha256=TzYxMIn1BO_mo52EkeS5pyg7Ei13hf3Y787EsSa45GE,14724
113
+ secator-0.10.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
114
+ secator-0.10.0.dist-info/entry_points.txt,sha256=lPgsqqUXWgiuGSfKy-se5gHdQlAXIwS_A46NYq7Acic,44
115
+ secator-0.10.0.dist-info/licenses/LICENSE,sha256=19W5Jsy4WTctNkqmZIqLRV1gTDOp01S3LDj9iSgWaJ0,2867
116
+ secator-0.10.0.dist-info/RECORD,,