secator 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of secator might be problematic. Click here for more details.

Files changed (90) hide show
  1. secator/celery.py +160 -185
  2. secator/celery_utils.py +268 -0
  3. secator/cli.py +427 -176
  4. secator/config.py +114 -68
  5. secator/configs/workflows/host_recon.yaml +5 -3
  6. secator/configs/workflows/port_scan.yaml +7 -3
  7. secator/configs/workflows/subdomain_recon.yaml +2 -2
  8. secator/configs/workflows/url_bypass.yaml +10 -0
  9. secator/configs/workflows/url_dirsearch.yaml +1 -1
  10. secator/configs/workflows/url_vuln.yaml +1 -1
  11. secator/decorators.py +170 -92
  12. secator/definitions.py +11 -4
  13. secator/exporters/__init__.py +7 -5
  14. secator/exporters/console.py +10 -0
  15. secator/exporters/csv.py +27 -19
  16. secator/exporters/gdrive.py +16 -11
  17. secator/exporters/json.py +3 -1
  18. secator/exporters/table.py +30 -2
  19. secator/exporters/txt.py +20 -16
  20. secator/hooks/gcs.py +53 -0
  21. secator/hooks/mongodb.py +53 -27
  22. secator/installer.py +277 -60
  23. secator/output_types/__init__.py +29 -11
  24. secator/output_types/_base.py +11 -1
  25. secator/output_types/error.py +36 -0
  26. secator/output_types/exploit.py +12 -8
  27. secator/output_types/info.py +24 -0
  28. secator/output_types/ip.py +8 -1
  29. secator/output_types/port.py +9 -2
  30. secator/output_types/progress.py +5 -0
  31. secator/output_types/record.py +5 -3
  32. secator/output_types/stat.py +33 -0
  33. secator/output_types/subdomain.py +1 -1
  34. secator/output_types/tag.py +8 -6
  35. secator/output_types/target.py +2 -2
  36. secator/output_types/url.py +14 -11
  37. secator/output_types/user_account.py +6 -6
  38. secator/output_types/vulnerability.py +8 -6
  39. secator/output_types/warning.py +24 -0
  40. secator/report.py +56 -23
  41. secator/rich.py +44 -39
  42. secator/runners/_base.py +629 -638
  43. secator/runners/_helpers.py +5 -91
  44. secator/runners/celery.py +18 -0
  45. secator/runners/command.py +404 -214
  46. secator/runners/scan.py +8 -24
  47. secator/runners/task.py +21 -55
  48. secator/runners/workflow.py +41 -40
  49. secator/scans/__init__.py +28 -0
  50. secator/serializers/dataclass.py +6 -0
  51. secator/serializers/json.py +10 -5
  52. secator/serializers/regex.py +12 -4
  53. secator/tasks/_categories.py +147 -42
  54. secator/tasks/bbot.py +295 -0
  55. secator/tasks/bup.py +99 -0
  56. secator/tasks/cariddi.py +38 -49
  57. secator/tasks/dalfox.py +3 -0
  58. secator/tasks/dirsearch.py +14 -25
  59. secator/tasks/dnsx.py +49 -30
  60. secator/tasks/dnsxbrute.py +4 -1
  61. secator/tasks/feroxbuster.py +10 -20
  62. secator/tasks/ffuf.py +3 -2
  63. secator/tasks/fping.py +4 -4
  64. secator/tasks/gau.py +5 -0
  65. secator/tasks/gf.py +2 -2
  66. secator/tasks/gospider.py +4 -0
  67. secator/tasks/grype.py +11 -13
  68. secator/tasks/h8mail.py +32 -42
  69. secator/tasks/httpx.py +58 -21
  70. secator/tasks/katana.py +19 -23
  71. secator/tasks/maigret.py +27 -25
  72. secator/tasks/mapcidr.py +2 -3
  73. secator/tasks/msfconsole.py +22 -19
  74. secator/tasks/naabu.py +18 -2
  75. secator/tasks/nmap.py +82 -55
  76. secator/tasks/nuclei.py +13 -3
  77. secator/tasks/searchsploit.py +26 -11
  78. secator/tasks/subfinder.py +5 -1
  79. secator/tasks/wpscan.py +91 -94
  80. secator/template.py +61 -45
  81. secator/thread.py +24 -0
  82. secator/utils.py +417 -78
  83. secator/utils_test.py +48 -23
  84. secator/workflows/__init__.py +28 -0
  85. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/METADATA +59 -48
  86. secator-0.8.0.dist-info/RECORD +115 -0
  87. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/WHEEL +1 -1
  88. secator-0.6.0.dist-info/RECORD +0 -101
  89. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/entry_points.txt +0 -0
  90. {secator-0.6.0.dist-info → secator-0.8.0.dist-info}/licenses/LICENSE +0 -0
secator/exporters/txt.py CHANGED
@@ -1,24 +1,28 @@
1
1
  from secator.exporters._base import Exporter
2
+ from secator.output_types import Info
2
3
  from secator.rich import console
3
4
 
4
5
 
5
6
  class TxtExporter(Exporter):
6
- def send(self):
7
- results = self.report.data['results']
8
- txt_paths = []
7
+ def send(self):
8
+ results = self.report.data['results']
9
+ if not results:
10
+ return
11
+ txt_paths = []
9
12
 
10
- for output_type, items in results.items():
11
- items = [str(i) for i in items]
12
- if not items:
13
- continue
14
- txt_path = f'{self.report.output_folder}/report_{output_type}.txt'
15
- with open(txt_path, 'w') as f:
16
- f.write('\n'.join(items))
17
- txt_paths.append(txt_path)
13
+ for output_type, items in results.items():
14
+ items = [str(i) for i in items]
15
+ if not items:
16
+ continue
17
+ txt_path = f'{self.report.output_folder}/report_{output_type}.txt'
18
+ with open(txt_path, 'w') as f:
19
+ f.write('\n'.join(items))
20
+ txt_paths.append(txt_path)
18
21
 
19
- if len(txt_paths) == 1:
20
- txt_paths_str = txt_paths[0]
21
- else:
22
- txt_paths_str = '\n • ' + '\n • '.join(txt_paths)
22
+ if len(txt_paths) == 1:
23
+ txt_paths_str = txt_paths[0]
24
+ else:
25
+ txt_paths_str = '\n • ' + '\n • '.join(txt_paths)
23
26
 
24
- console.print(f':file_cabinet: Saved TXT reports to {txt_paths_str}')
27
+ info = Info(f'Saved TXT reports to {txt_paths_str}')
28
+ console.print(info)
secator/hooks/gcs.py ADDED
@@ -0,0 +1,53 @@
1
+ from pathlib import Path
2
+ from time import time
3
+
4
+ from google.cloud import storage
5
+
6
+ from secator.config import CONFIG
7
+ from secator.runners import Task
8
+ from secator.thread import Thread
9
+ from secator.utils import debug
10
+
11
+
12
+ GCS_BUCKET_NAME = CONFIG.addons.gcs.bucket_name
13
+ ITEMS_TO_SEND = {
14
+ 'url': ['screenshot_path']
15
+ }
16
+
17
+
18
+ def process_item(self, item):
19
+ if item._type not in ITEMS_TO_SEND.keys():
20
+ return item
21
+ if not GCS_BUCKET_NAME:
22
+ debug('skipped since addons.gcs.bucket_name is empty.', sub='hooks.gcs')
23
+ return item
24
+ to_send = ITEMS_TO_SEND[item._type]
25
+ for k, v in item.toDict().items():
26
+ if k in to_send and v:
27
+ path = Path(v)
28
+ if not path.exists():
29
+ continue
30
+ ext = path.suffix
31
+ blob_name = f'{item._uuid}_{k}{ext}'
32
+ t = Thread(target=upload_blob, args=(GCS_BUCKET_NAME, v, blob_name))
33
+ t.start()
34
+ self.threads.append(t)
35
+ setattr(item, k, f'gs://{GCS_BUCKET_NAME}/{blob_name}')
36
+ return item
37
+
38
+
39
+ def upload_blob(bucket_name, source_file_name, destination_blob_name):
40
+ """Uploads a file to the bucket."""
41
+ start_time = time()
42
+ storage_client = storage.Client()
43
+ bucket = storage_client.bucket(bucket_name)
44
+ blob = bucket.blob(destination_blob_name)
45
+ blob.upload_from_filename(source_file_name)
46
+ end_time = time()
47
+ elapsed = end_time - start_time
48
+ debug(f'in {elapsed:.4f}s', obj={'blob': 'CREATED', 'blob_name': destination_blob_name, 'bucket': bucket_name}, obj_after=False, sub='hooks.gcs', verbose=True) # noqa: E501
49
+
50
+
51
+ HOOKS = {
52
+ Task: {'on_item': [process_item]}
53
+ }
secator/hooks/mongodb.py CHANGED
@@ -6,7 +6,7 @@ from bson.objectid import ObjectId
6
6
  from celery import shared_task
7
7
 
8
8
  from secator.config import CONFIG
9
- from secator.output_types import OUTPUT_TYPES
9
+ from secator.output_types import FINDING_TYPES
10
10
  from secator.runners import Scan, Task, Workflow
11
11
  from secator.utils import debug, escape_mongodb_url
12
12
 
@@ -15,11 +15,27 @@ from secator.utils import debug, escape_mongodb_url
15
15
 
16
16
  MONGODB_URL = CONFIG.addons.mongodb.url
17
17
  MONGODB_UPDATE_FREQUENCY = CONFIG.addons.mongodb.update_frequency
18
- MAX_POOL_SIZE = 100
18
+ MONGODB_CONNECT_TIMEOUT = CONFIG.addons.mongodb.server_selection_timeout_ms
19
+ MONGODB_MAX_POOL_SIZE = CONFIG.addons.mongodb.max_pool_size
19
20
 
20
21
  logger = logging.getLogger(__name__)
21
22
 
22
- client = pymongo.MongoClient(escape_mongodb_url(MONGODB_URL), maxPoolSize=MAX_POOL_SIZE)
23
+ client = pymongo.MongoClient(
24
+ escape_mongodb_url(MONGODB_URL),
25
+ maxPoolSize=MONGODB_MAX_POOL_SIZE,
26
+ serverSelectionTimeoutMS=MONGODB_CONNECT_TIMEOUT
27
+ )
28
+
29
+
30
+ def get_runner_dbg(runner):
31
+ """Runner debug object"""
32
+ return {
33
+ runner.unique_name: runner.status,
34
+ 'type': runner.config.type,
35
+ 'class': runner.__class__.__name__,
36
+ 'caller': runner.config.name,
37
+ **runner.context
38
+ }
23
39
 
24
40
 
25
41
  def update_runner(self):
@@ -27,25 +43,19 @@ def update_runner(self):
27
43
  type = self.config.type
28
44
  collection = f'{type}s'
29
45
  update = self.toDict()
30
- debug_obj = {'type': 'runner', 'name': self.name, 'status': self.status}
31
46
  chunk = update.get('chunk')
32
47
  _id = self.context.get(f'{type}_chunk_id') if chunk else self.context.get(f'{type}_id')
33
- debug('update', sub='hooks.mongodb', id=_id, obj=update, obj_after=True, level=4)
48
+ debug('to_update', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=True, obj_breaklines=False, verbose=True) # noqa: E501
34
49
  start_time = time.time()
35
50
  if _id:
36
- delta = start_time - self.last_updated if self.last_updated else MONGODB_UPDATE_FREQUENCY
37
- if self.last_updated and delta < MONGODB_UPDATE_FREQUENCY and self.status == 'RUNNING':
38
- debug(f'skipped ({delta:>.2f}s < {MONGODB_UPDATE_FREQUENCY}s)',
39
- sub='hooks.mongodb', id=_id, obj=debug_obj, obj_after=False, level=3)
40
- return
41
51
  db = client.main
42
52
  start_time = time.time()
43
53
  db[collection].update_one({'_id': ObjectId(_id)}, {'$set': update})
44
54
  end_time = time.time()
45
55
  elapsed = end_time - start_time
46
56
  debug(
47
- f'[dim gold4]updated in {elapsed:.4f}s[/]', sub='hooks.mongodb', id=_id, obj=debug_obj, obj_after=False, level=2)
48
- self.last_updated = start_time
57
+ f'[dim gold4]updated in {elapsed:.4f}s[/]', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=False) # noqa: E501
58
+ self.last_updated_db = start_time
49
59
  else: # sync update and save result to runner object
50
60
  runner = db[collection].insert_one(update)
51
61
  _id = str(runner.inserted_id)
@@ -55,13 +65,16 @@ def update_runner(self):
55
65
  self.context[f'{type}_id'] = _id
56
66
  end_time = time.time()
57
67
  elapsed = end_time - start_time
58
- debug(f'created in {elapsed:.4f}s', sub='hooks.mongodb', id=_id, obj=debug_obj, obj_after=False, level=2)
68
+ debug(f'in {elapsed:.4f}s', sub='hooks.mongodb', id=_id, obj=get_runner_dbg(self), obj_after=False)
59
69
 
60
70
 
61
71
  def update_finding(self, item):
72
+ if type(item) not in FINDING_TYPES:
73
+ return item
62
74
  start_time = time.time()
63
75
  db = client.main
64
76
  update = item.toDict()
77
+ _type = item._type
65
78
  _id = ObjectId(item._uuid) if ObjectId.is_valid(item._uuid) else None
66
79
  if _id:
67
80
  finding = db['findings'].update_one({'_id': _id}, {'$set': update})
@@ -72,7 +85,14 @@ def update_finding(self, item):
72
85
  status = 'CREATED'
73
86
  end_time = time.time()
74
87
  elapsed = end_time - start_time
75
- debug(f'in {elapsed:.4f}s', sub='hooks.mongodb', id=str(item._uuid), obj={'finding': status}, obj_after=False)
88
+ debug_obj = {
89
+ _type: status,
90
+ 'type': 'finding',
91
+ 'class': self.__class__.__name__,
92
+ 'caller': self.config.name,
93
+ **self.context
94
+ }
95
+ debug(f'in {elapsed:.4f}s', sub='hooks.mongodb', id=str(item._uuid), obj=debug_obj, obj_after=False) # noqa: E501
76
96
  return item
77
97
 
78
98
 
@@ -80,20 +100,23 @@ def find_duplicates(self):
80
100
  ws_id = self.toDict().get('context', {}).get('workspace_id')
81
101
  if not ws_id:
82
102
  return
83
- celery_id = tag_duplicates.delay(ws_id)
84
- debug(f'running duplicate check on workspace {ws_id}', id=celery_id, sub='hooks.mongodb')
103
+ if self.sync:
104
+ debug(f'running duplicate check on workspace {ws_id}', sub='hooks.mongodb')
105
+ tag_duplicates(ws_id)
106
+ else:
107
+ celery_id = tag_duplicates.delay(ws_id)
108
+ debug(f'running duplicate check on workspace {ws_id}', id=celery_id, sub='hooks.mongodb')
85
109
 
86
110
 
87
111
  def load_finding(obj):
88
112
  finding_type = obj['_type']
89
113
  klass = None
90
- for otype in OUTPUT_TYPES:
114
+ for otype in FINDING_TYPES:
91
115
  if finding_type == otype.get_name():
92
116
  klass = otype
93
117
  item = klass.load(obj)
94
118
  item._uuid = str(obj['_id'])
95
119
  return item
96
- debug('could not load Secator output type from MongoDB object', obj=obj, sub='hooks.mongodb')
97
120
  return None
98
121
 
99
122
 
@@ -149,18 +172,19 @@ def tag_duplicates(ws_id: str = None):
149
172
  'seen dupes': len(seen_dupes)
150
173
  },
151
174
  id=ws_id,
152
- sub='hooks.mongodb')
175
+ sub='hooks.mongodb.duplicates',
176
+ verbose=True)
153
177
  tmp_duplicates_ids = list(dict.fromkeys([i._uuid for i in tmp_duplicates]))
154
- debug(f'duplicate ids: {tmp_duplicates_ids}', id=ws_id, sub='hooks.mongodb')
178
+ debug(f'duplicate ids: {tmp_duplicates_ids}', id=ws_id, sub='hooks.mongodb.duplicates', verbose=True)
155
179
 
156
180
  # Update latest object as non-duplicate
157
181
  if tmp_duplicates:
158
182
  duplicates.extend([f for f in tmp_duplicates])
159
183
  db.findings.update_one({'_id': ObjectId(item._uuid)}, {'$set': {'_related': tmp_duplicates_ids}})
160
- debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb')
184
+ debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb.duplicates', verbose=True)
161
185
  non_duplicates.append(item)
162
186
  else:
163
- debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb')
187
+ debug(f'adding {item._uuid} as non-duplicate', id=ws_id, sub='hooks.mongodb.duplicates', verbose=True)
164
188
  non_duplicates.append(item)
165
189
 
166
190
  # debug(f'found {len(duplicates)} total duplicates')
@@ -184,19 +208,21 @@ def tag_duplicates(ws_id: str = None):
184
208
  'duplicates': len(duplicates_ids),
185
209
  'non-duplicates': len(non_duplicates_ids)
186
210
  },
187
- sub='hooks.mongodb')
211
+ sub='hooks.mongodb.duplicates')
188
212
 
189
213
 
190
- MONGODB_HOOKS = {
214
+ HOOKS = {
191
215
  Scan: {
216
+ 'on_init': [update_runner],
192
217
  'on_start': [update_runner],
193
- 'on_iter': [update_runner],
218
+ 'on_interval': [update_runner],
194
219
  'on_duplicate': [update_finding],
195
220
  'on_end': [update_runner],
196
221
  },
197
222
  Workflow: {
223
+ 'on_init': [update_runner],
198
224
  'on_start': [update_runner],
199
- 'on_iter': [update_runner],
225
+ 'on_interval': [update_runner],
200
226
  'on_duplicate': [update_finding],
201
227
  'on_end': [update_runner],
202
228
  },
@@ -205,7 +231,7 @@ MONGODB_HOOKS = {
205
231
  'on_start': [update_runner],
206
232
  'on_item': [update_finding],
207
233
  'on_duplicate': [update_finding],
208
- 'on_iter': [update_runner],
234
+ 'on_interval': [update_runner],
209
235
  'on_end': [update_runner, find_duplicates]
210
236
  }
211
237
  }