secator 0.15.1__py3-none-any.whl → 0.16.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +40 -24
- secator/celery_signals.py +71 -68
- secator/celery_utils.py +43 -27
- secator/cli.py +520 -280
- secator/cli_helper.py +394 -0
- secator/click.py +87 -0
- secator/config.py +67 -39
- secator/configs/profiles/http_headless.yaml +6 -0
- secator/configs/profiles/http_record.yaml +6 -0
- secator/configs/profiles/tor.yaml +1 -1
- secator/configs/scans/domain.yaml +4 -2
- secator/configs/scans/host.yaml +1 -1
- secator/configs/scans/network.yaml +1 -4
- secator/configs/scans/subdomain.yaml +13 -1
- secator/configs/scans/url.yaml +1 -2
- secator/configs/workflows/cidr_recon.yaml +6 -4
- secator/configs/workflows/code_scan.yaml +1 -1
- secator/configs/workflows/host_recon.yaml +29 -3
- secator/configs/workflows/subdomain_recon.yaml +67 -16
- secator/configs/workflows/url_crawl.yaml +44 -15
- secator/configs/workflows/url_dirsearch.yaml +4 -4
- secator/configs/workflows/url_fuzz.yaml +25 -17
- secator/configs/workflows/url_params_fuzz.yaml +7 -0
- secator/configs/workflows/url_vuln.yaml +33 -8
- secator/configs/workflows/user_hunt.yaml +4 -2
- secator/configs/workflows/wordpress.yaml +5 -3
- secator/cve.py +718 -0
- secator/decorators.py +0 -454
- secator/definitions.py +49 -30
- secator/exporters/_base.py +2 -2
- secator/exporters/console.py +2 -2
- secator/exporters/table.py +4 -3
- secator/exporters/txt.py +1 -1
- secator/hooks/mongodb.py +2 -4
- secator/installer.py +77 -49
- secator/loader.py +116 -0
- secator/output_types/_base.py +3 -0
- secator/output_types/certificate.py +63 -63
- secator/output_types/error.py +4 -5
- secator/output_types/info.py +2 -2
- secator/output_types/ip.py +3 -1
- secator/output_types/progress.py +5 -9
- secator/output_types/state.py +17 -17
- secator/output_types/tag.py +3 -0
- secator/output_types/target.py +10 -2
- secator/output_types/url.py +19 -7
- secator/output_types/vulnerability.py +11 -7
- secator/output_types/warning.py +2 -2
- secator/report.py +27 -15
- secator/rich.py +18 -10
- secator/runners/_base.py +446 -233
- secator/runners/_helpers.py +133 -24
- secator/runners/command.py +182 -102
- secator/runners/scan.py +33 -5
- secator/runners/task.py +13 -7
- secator/runners/workflow.py +105 -72
- secator/scans/__init__.py +2 -2
- secator/serializers/dataclass.py +20 -20
- secator/tasks/__init__.py +4 -4
- secator/tasks/_categories.py +39 -27
- secator/tasks/arjun.py +9 -5
- secator/tasks/bbot.py +53 -21
- secator/tasks/bup.py +19 -5
- secator/tasks/cariddi.py +24 -3
- secator/tasks/dalfox.py +26 -7
- secator/tasks/dirsearch.py +10 -4
- secator/tasks/dnsx.py +70 -25
- secator/tasks/feroxbuster.py +11 -3
- secator/tasks/ffuf.py +42 -6
- secator/tasks/fping.py +20 -8
- secator/tasks/gau.py +3 -1
- secator/tasks/gf.py +3 -3
- secator/tasks/gitleaks.py +2 -2
- secator/tasks/gospider.py +7 -1
- secator/tasks/grype.py +5 -4
- secator/tasks/h8mail.py +2 -1
- secator/tasks/httpx.py +18 -5
- secator/tasks/katana.py +35 -15
- secator/tasks/maigret.py +4 -4
- secator/tasks/mapcidr.py +3 -3
- secator/tasks/msfconsole.py +4 -4
- secator/tasks/naabu.py +2 -2
- secator/tasks/nmap.py +12 -14
- secator/tasks/nuclei.py +3 -3
- secator/tasks/searchsploit.py +4 -5
- secator/tasks/subfinder.py +2 -2
- secator/tasks/testssl.py +264 -263
- secator/tasks/trivy.py +5 -5
- secator/tasks/wafw00f.py +21 -3
- secator/tasks/wpprobe.py +90 -83
- secator/tasks/wpscan.py +6 -5
- secator/template.py +218 -104
- secator/thread.py +15 -15
- secator/tree.py +196 -0
- secator/utils.py +131 -123
- secator/utils_test.py +60 -19
- secator/workflows/__init__.py +2 -2
- {secator-0.15.1.dist-info → secator-0.16.1.dist-info}/METADATA +36 -36
- secator-0.16.1.dist-info/RECORD +132 -0
- secator/configs/profiles/default.yaml +0 -8
- secator/configs/workflows/url_nuclei.yaml +0 -11
- secator/tasks/dnsxbrute.py +0 -42
- secator-0.15.1.dist-info/RECORD +0 -128
- {secator-0.15.1.dist-info → secator-0.16.1.dist-info}/WHEEL +0 -0
- {secator-0.15.1.dist-info → secator-0.16.1.dist-info}/entry_points.txt +0 -0
- {secator-0.15.1.dist-info → secator-0.16.1.dist-info}/licenses/LICENSE +0 -0
secator/celery.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
|
-
import gc
|
|
2
1
|
import json
|
|
3
2
|
import logging
|
|
4
3
|
import os
|
|
5
|
-
import uuid
|
|
6
4
|
|
|
7
5
|
from time import time
|
|
8
6
|
|
|
@@ -33,7 +31,7 @@ logging.basicConfig(
|
|
|
33
31
|
handlers=[rich_handler],
|
|
34
32
|
force=True)
|
|
35
33
|
logging.getLogger('kombu').setLevel(logging.ERROR)
|
|
36
|
-
logging.getLogger('celery').setLevel(logging.
|
|
34
|
+
logging.getLogger('celery').setLevel(logging.DEBUG if 'celery.debug' in CONFIG.debug or 'celery.*' in CONFIG.debug else logging.WARNING) # noqa: E501
|
|
37
35
|
logger = logging.getLogger(__name__)
|
|
38
36
|
trace.LOG_SUCCESS = "Task %(name)s[%(id)s] succeeded in %(runtime)ss"
|
|
39
37
|
|
|
@@ -169,9 +167,17 @@ def run_scan(self, args=[], kwargs={}):
|
|
|
169
167
|
@app.task(bind=True)
|
|
170
168
|
def run_command(self, results, name, targets, opts={}):
|
|
171
169
|
if IN_CELERY_WORKER_PROCESS:
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
170
|
+
quiet = not CONFIG.cli.worker_command_verbose
|
|
171
|
+
opts.update({
|
|
172
|
+
'print_item': True,
|
|
173
|
+
'print_line': True,
|
|
174
|
+
'print_cmd': True,
|
|
175
|
+
'print_target': True,
|
|
176
|
+
'print_profiles': True,
|
|
177
|
+
'quiet': quiet
|
|
178
|
+
})
|
|
179
|
+
routing_key = self.request.delivery_info['routing_key']
|
|
180
|
+
debug(f'Task "{name}" running with routing key "{routing_key}"', sub='celery.state')
|
|
175
181
|
|
|
176
182
|
# Flatten + dedupe + filter results
|
|
177
183
|
results = forward_results(results)
|
|
@@ -188,11 +194,13 @@ def run_command(self, results, name, targets, opts={}):
|
|
|
188
194
|
sync = not IN_CELERY_WORKER_PROCESS
|
|
189
195
|
task_cls = Task.get_task_class(name)
|
|
190
196
|
task = task_cls(targets, **opts)
|
|
197
|
+
chunk_it = task.needs_chunking(sync)
|
|
198
|
+
task.has_children = chunk_it
|
|
191
199
|
task.mark_started()
|
|
192
200
|
update_state(self, task, force=True)
|
|
193
201
|
|
|
194
202
|
# Chunk task if needed
|
|
195
|
-
if
|
|
203
|
+
if chunk_it:
|
|
196
204
|
if IN_CELERY_WORKER_PROCESS:
|
|
197
205
|
console.print(Info(message=f'Task {name} requires chunking, breaking into {len(targets)} tasks'))
|
|
198
206
|
tasks = break_task(task, opts, results=results)
|
|
@@ -200,12 +208,10 @@ def run_command(self, results, name, targets, opts={}):
|
|
|
200
208
|
return self.replace(tasks)
|
|
201
209
|
|
|
202
210
|
# Update state live
|
|
203
|
-
|
|
211
|
+
for _ in task:
|
|
212
|
+
update_state(self, task)
|
|
204
213
|
update_state(self, task, force=True)
|
|
205
214
|
|
|
206
|
-
# Garbage collection to save RAM
|
|
207
|
-
gc.collect()
|
|
208
|
-
|
|
209
215
|
return task.results
|
|
210
216
|
|
|
211
217
|
|
|
@@ -238,8 +244,7 @@ def mark_runner_started(results, runner, enable_hooks=True):
|
|
|
238
244
|
if results:
|
|
239
245
|
runner.results = forward_results(results)
|
|
240
246
|
runner.enable_hooks = enable_hooks
|
|
241
|
-
|
|
242
|
-
runner.mark_started()
|
|
247
|
+
runner.mark_started()
|
|
243
248
|
return runner.results
|
|
244
249
|
|
|
245
250
|
|
|
@@ -258,9 +263,9 @@ def mark_runner_completed(results, runner, enable_hooks=True):
|
|
|
258
263
|
debug(f'Runner {runner.unique_name} has finished, running mark_completed', sub='celery')
|
|
259
264
|
results = forward_results(results)
|
|
260
265
|
runner.enable_hooks = enable_hooks
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
266
|
+
for item in results:
|
|
267
|
+
runner.add_result(item, print=False)
|
|
268
|
+
runner.mark_completed()
|
|
264
269
|
return runner.results
|
|
265
270
|
|
|
266
271
|
|
|
@@ -294,7 +299,7 @@ def break_task(task, task_opts, results=[]):
|
|
|
294
299
|
)
|
|
295
300
|
|
|
296
301
|
# Clone opts
|
|
297
|
-
|
|
302
|
+
base_opts = task_opts.copy()
|
|
298
303
|
|
|
299
304
|
# Build signatures
|
|
300
305
|
sigs = []
|
|
@@ -302,17 +307,28 @@ def break_task(task, task_opts, results=[]):
|
|
|
302
307
|
for ix, chunk in enumerate(chunks):
|
|
303
308
|
if not isinstance(chunk, list):
|
|
304
309
|
chunk = [chunk]
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
310
|
+
|
|
311
|
+
# Add chunk info to opts
|
|
312
|
+
opts = base_opts.copy()
|
|
313
|
+
opts.update({'chunk': ix + 1, 'chunk_count': len(chunks)})
|
|
314
|
+
debug('', obj={
|
|
315
|
+
task.unique_name: 'CHUNK',
|
|
316
|
+
'chunk': f'{ix + 1} / {len(chunks)}',
|
|
317
|
+
'target_count': len(chunk),
|
|
318
|
+
'targets': chunk
|
|
319
|
+
}, sub='celery.state') # noqa: E501
|
|
320
|
+
|
|
321
|
+
# Construct chunked signature
|
|
309
322
|
opts['has_parent'] = True
|
|
310
323
|
opts['enable_duplicate_check'] = False
|
|
311
324
|
opts['results'] = results
|
|
312
|
-
|
|
325
|
+
if 'targets_' in opts:
|
|
326
|
+
del opts['targets_']
|
|
327
|
+
sig = type(task).si(chunk, **opts)
|
|
328
|
+
task_id = sig.freeze().task_id
|
|
313
329
|
full_name = f'{task.name}_{ix + 1}'
|
|
314
|
-
task.add_subtask(task_id, task.name,
|
|
315
|
-
info = Info(message=f'Celery chunked task created: {task_id}'
|
|
330
|
+
task.add_subtask(task_id, task.name, full_name)
|
|
331
|
+
info = Info(message=f'Celery chunked task created: {task_id}')
|
|
316
332
|
task.add_result(info)
|
|
317
333
|
sigs.append(sig)
|
|
318
334
|
|
secator/celery_signals.py
CHANGED
|
@@ -19,116 +19,119 @@ STATE_DIR.mkdir(exist_ok=True, parents=True)
|
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
def get_lock_file_path():
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
worker_name = os.environ.get("WORKER_NAME", f"unknown_{os.getpid()}")
|
|
23
|
+
return Path(f"/tmp/celery_worker_{worker_name}.lock")
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
def set_task_running(task_id):
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
27
|
+
"""Mark that a task is running in current worker"""
|
|
28
|
+
with open(get_lock_file_path(), "w") as f:
|
|
29
|
+
f.write(task_id)
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
def clear_task_running():
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
33
|
+
"""Clear the task running state"""
|
|
34
|
+
lock_file = get_lock_file_path()
|
|
35
|
+
if lock_file.exists():
|
|
36
|
+
lock_file.unlink()
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
def is_task_running():
|
|
40
|
-
|
|
41
|
-
|
|
40
|
+
"""Check if a task is currently running"""
|
|
41
|
+
return get_lock_file_path().exists()
|
|
42
42
|
|
|
43
43
|
|
|
44
44
|
def kill_worker(parent=False):
|
|
45
|
-
|
|
46
|
-
|
|
45
|
+
"""Kill current worker using its pid by sending a SIGTERM to Celery master process."""
|
|
46
|
+
worker_name = os.environ.get('WORKER_NAME', 'unknown')
|
|
47
47
|
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
48
|
+
# Check if a task is running via the lock file
|
|
49
|
+
if not is_task_running():
|
|
50
|
+
pid = os.getppid() if parent else os.getpid()
|
|
51
|
+
console.print(Info(message=f'Sending SIGTERM to worker {worker_name} with pid {pid}'))
|
|
52
|
+
os.kill(pid, signal.SIGTERM)
|
|
53
|
+
else:
|
|
54
|
+
console.print(Info(message=f'Cancelling worker shutdown of {worker_name} since a task is running'))
|
|
55
55
|
|
|
56
56
|
|
|
57
57
|
def setup_idle_timer(timeout):
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
58
|
+
"""Setup a timer to kill the worker after being idle"""
|
|
59
|
+
if timeout == -1:
|
|
60
|
+
return
|
|
61
61
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
62
|
+
console.print(Info(message=f'Starting inactivity timer for {timeout} seconds ...'))
|
|
63
|
+
timer = threading.Timer(timeout, kill_worker)
|
|
64
|
+
timer.daemon = True # Make sure timer is killed when worker exits
|
|
65
|
+
timer.start()
|
|
66
66
|
|
|
67
67
|
|
|
68
68
|
def setup_logging(*args, **kwargs):
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
69
|
+
"""Override celery's logging setup to prevent it from altering our settings.
|
|
70
|
+
github.com/celery/celery/issues/1867
|
|
71
|
+
"""
|
|
72
|
+
pass
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
def capture_worker_name(sender, instance, **kwargs):
|
|
76
|
-
|
|
76
|
+
os.environ["WORKER_NAME"] = '{0}'.format(sender)
|
|
77
77
|
|
|
78
78
|
|
|
79
79
|
def worker_init_handler(**kwargs):
|
|
80
|
-
|
|
81
|
-
|
|
80
|
+
if IDLE_TIMEOUT != -1:
|
|
81
|
+
setup_idle_timer(IDLE_TIMEOUT)
|
|
82
82
|
|
|
83
83
|
|
|
84
84
|
def task_prerun_handler(task_id, **kwargs):
|
|
85
|
-
|
|
86
|
-
|
|
85
|
+
# Mark that a task is running
|
|
86
|
+
set_task_running(task_id)
|
|
87
87
|
|
|
88
88
|
|
|
89
89
|
def task_postrun_handler(**kwargs):
|
|
90
|
-
|
|
91
|
-
|
|
90
|
+
# Mark that no task is running
|
|
91
|
+
clear_task_running()
|
|
92
92
|
|
|
93
|
-
|
|
94
|
-
|
|
93
|
+
# Get sender name from kwargs
|
|
94
|
+
sender_name = kwargs['sender'].name
|
|
95
95
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
96
|
+
if CONFIG.celery.worker_kill_after_task and sender_name.startswith('secator.'):
|
|
97
|
+
worker_name = os.environ.get('WORKER_NAME', 'unknown')
|
|
98
|
+
console.print(Info(message=f'Shutdown worker {worker_name} since config celery.worker_kill_after_task is set.'))
|
|
99
|
+
kill_worker(parent=True)
|
|
100
|
+
return
|
|
101
101
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
102
|
+
# Set up a new idle timer
|
|
103
|
+
if IDLE_TIMEOUT != -1:
|
|
104
|
+
console.print(Info(message=f'Reset inactivity timer to {IDLE_TIMEOUT} seconds'))
|
|
105
|
+
setup_idle_timer(IDLE_TIMEOUT)
|
|
106
106
|
|
|
107
107
|
|
|
108
108
|
def task_revoked_handler(request=None, **kwargs):
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
109
|
+
"""Handle revoked tasks by clearing the task running state"""
|
|
110
|
+
console.print(Info(message='Task was revoked, clearing running state'))
|
|
111
|
+
clear_task_running()
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
113
|
+
# Set up a new idle timer
|
|
114
|
+
if IDLE_TIMEOUT != -1:
|
|
115
|
+
console.print(Info(message=f'Reset inactivity timer to {IDLE_TIMEOUT} seconds after task revocation'))
|
|
116
|
+
setup_idle_timer(IDLE_TIMEOUT)
|
|
117
117
|
|
|
118
118
|
|
|
119
119
|
def worker_shutdown_handler(**kwargs):
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
120
|
+
"""Cleanup lock files when worker shuts down"""
|
|
121
|
+
lock_file = get_lock_file_path()
|
|
122
|
+
if lock_file.exists():
|
|
123
|
+
lock_file.unlink()
|
|
124
124
|
|
|
125
125
|
|
|
126
126
|
def setup_handlers():
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
127
|
+
if CONFIG.celery.override_default_logging:
|
|
128
|
+
signals.setup_logging.connect(setup_logging)
|
|
129
|
+
|
|
130
|
+
# Register common handlers when either task‐ or idle‐based termination is enabled
|
|
131
|
+
if CONFIG.celery.worker_kill_after_task or CONFIG.celery.worker_kill_after_idle_seconds != -1:
|
|
132
|
+
signals.celeryd_after_setup.connect(capture_worker_name)
|
|
133
|
+
signals.task_postrun.connect(task_postrun_handler)
|
|
134
|
+
signals.task_prerun.connect(task_prerun_handler)
|
|
135
|
+
signals.task_revoked.connect(task_revoked_handler)
|
|
136
|
+
signals.worker_ready.connect(worker_init_handler)
|
|
137
|
+
signals.worker_shutdown.connect(worker_shutdown_handler)
|
secator/celery_utils.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import gc
|
|
2
|
+
|
|
1
3
|
from contextlib import nullcontext
|
|
2
4
|
from time import sleep
|
|
3
5
|
|
|
@@ -5,6 +7,7 @@ import kombu
|
|
|
5
7
|
import kombu.exceptions
|
|
6
8
|
|
|
7
9
|
from celery.result import AsyncResult, GroupResult
|
|
10
|
+
from celery.exceptions import TaskRevokedError
|
|
8
11
|
from greenlet import GreenletExit
|
|
9
12
|
from rich.panel import Panel
|
|
10
13
|
from rich.padding import Padding
|
|
@@ -24,6 +27,7 @@ class CeleryData(object):
|
|
|
24
27
|
result,
|
|
25
28
|
ids_map={},
|
|
26
29
|
description=True,
|
|
30
|
+
revoked=False,
|
|
27
31
|
refresh_interval=CONFIG.runners.poll_frequency,
|
|
28
32
|
print_remote_info=True,
|
|
29
33
|
print_remote_title='Results'
|
|
@@ -33,6 +37,7 @@ class CeleryData(object):
|
|
|
33
37
|
Args:
|
|
34
38
|
result (Union[AsyncResult, GroupResult]): Celery result.
|
|
35
39
|
description (bool): Whether to show task description.
|
|
40
|
+
revoked (bool): Whether the task was revoked.
|
|
36
41
|
refresh_interval (int): Refresh interval.
|
|
37
42
|
print_remote_info (bool): Whether to display live results.
|
|
38
43
|
print_remote_title (str): Title for the progress panel.
|
|
@@ -59,7 +64,7 @@ class CeleryData(object):
|
|
|
59
64
|
TextColumn('{task.fields[count]}'),
|
|
60
65
|
TextColumn('{task.fields[progress]}%'),
|
|
61
66
|
# TextColumn('\[[bold magenta]{task.fields[id]:<30}[/]]'), # noqa: W605
|
|
62
|
-
|
|
67
|
+
auto_refresh=False,
|
|
63
68
|
transient=False,
|
|
64
69
|
console=console,
|
|
65
70
|
# redirect_stderr=True,
|
|
@@ -75,7 +80,7 @@ class CeleryData(object):
|
|
|
75
80
|
progress_cache = CeleryData.init_progress(progress, ids_map)
|
|
76
81
|
|
|
77
82
|
# Get live results and print progress
|
|
78
|
-
for data in CeleryData.poll(result, ids_map, refresh_interval):
|
|
83
|
+
for data in CeleryData.poll(result, ids_map, refresh_interval, revoked):
|
|
79
84
|
for result in data['results']:
|
|
80
85
|
|
|
81
86
|
# Add dynamic subtask to ids_map
|
|
@@ -93,6 +98,7 @@ class CeleryData(object):
|
|
|
93
98
|
'progress': 0
|
|
94
99
|
}
|
|
95
100
|
yield result
|
|
101
|
+
del result
|
|
96
102
|
|
|
97
103
|
if print_remote_info:
|
|
98
104
|
task_id = data['id']
|
|
@@ -103,11 +109,17 @@ class CeleryData(object):
|
|
|
103
109
|
continue
|
|
104
110
|
progress_id = progress_cache[task_id]
|
|
105
111
|
CeleryData.update_progress(progress, progress_id, data)
|
|
112
|
+
progress.refresh()
|
|
113
|
+
|
|
114
|
+
# Garbage collect between polls
|
|
115
|
+
del data
|
|
116
|
+
gc.collect()
|
|
106
117
|
|
|
107
118
|
# Update all tasks to 100 %
|
|
108
119
|
if print_remote_info:
|
|
109
120
|
for progress_id in progress_cache.values():
|
|
110
121
|
progress.update(progress_id, advance=100)
|
|
122
|
+
progress.refresh()
|
|
111
123
|
|
|
112
124
|
@staticmethod
|
|
113
125
|
def init_progress(progress, ids_map):
|
|
@@ -130,36 +142,22 @@ class CeleryData(object):
|
|
|
130
142
|
progress.update(progress_id, **pdata)
|
|
131
143
|
|
|
132
144
|
@staticmethod
|
|
133
|
-
def poll(result, ids_map, refresh_interval):
|
|
145
|
+
def poll(result, ids_map, refresh_interval, revoked=False):
|
|
134
146
|
"""Poll Celery subtasks results in real-time. Fetch task metadata and partial results from each task that runs.
|
|
135
147
|
|
|
136
148
|
Yields:
|
|
137
149
|
dict: Subtasks state and results.
|
|
138
150
|
"""
|
|
139
|
-
|
|
151
|
+
exit_loop = False
|
|
152
|
+
while not exit_loop:
|
|
140
153
|
try:
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
state=result.state,
|
|
144
|
-
_source='celery'
|
|
145
|
-
)
|
|
146
|
-
debug(f"Main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
|
|
147
|
-
yield {'id': result.id, 'state': result.state, 'results': [main_task]}
|
|
148
|
-
yield from CeleryData.get_all_data(result, ids_map)
|
|
149
|
-
|
|
150
|
-
if result.ready():
|
|
154
|
+
yield from CeleryData.get_all_data(result, ids_map, revoked=revoked)
|
|
155
|
+
if result.ready() or revoked:
|
|
151
156
|
debug('result is ready', sub='celery.poll', id=result.id)
|
|
152
|
-
|
|
153
|
-
task_id=result.id,
|
|
154
|
-
state=result.state,
|
|
155
|
-
_source='celery'
|
|
156
|
-
)
|
|
157
|
-
debug(f"Final main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
|
|
158
|
-
yield {'id': result.id, 'state': result.state, 'results': [main_task]}
|
|
159
|
-
yield from CeleryData.get_all_data(result, ids_map)
|
|
160
|
-
break
|
|
157
|
+
exit_loop = True
|
|
161
158
|
except (KeyboardInterrupt, GreenletExit):
|
|
162
159
|
debug('encounted KeyboardInterrupt or GreenletExit', sub='celery.poll')
|
|
160
|
+
yield from CeleryData.get_all_data(result, ids_map, revoked=revoked)
|
|
163
161
|
raise
|
|
164
162
|
except Exception as e:
|
|
165
163
|
error = Error.from_exception(e)
|
|
@@ -169,7 +167,18 @@ class CeleryData(object):
|
|
|
169
167
|
sleep(refresh_interval)
|
|
170
168
|
|
|
171
169
|
@staticmethod
|
|
172
|
-
def get_all_data(result, ids_map):
|
|
170
|
+
def get_all_data(result, ids_map, revoked=False):
|
|
171
|
+
main_task = State(
|
|
172
|
+
task_id=result.id,
|
|
173
|
+
state='REVOKED' if revoked and result.state == 'PENDING' else result.state,
|
|
174
|
+
_source='celery'
|
|
175
|
+
)
|
|
176
|
+
debug(f"Main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
|
|
177
|
+
yield {'id': result.id, 'state': result.state, 'results': [main_task]}
|
|
178
|
+
yield from CeleryData.get_tasks_data(ids_map, revoked=revoked)
|
|
179
|
+
|
|
180
|
+
@staticmethod
|
|
181
|
+
def get_tasks_data(ids_map, revoked=False):
|
|
173
182
|
"""Get Celery results from main result object, AND all subtasks results.
|
|
174
183
|
|
|
175
184
|
Yields:
|
|
@@ -180,6 +189,8 @@ class CeleryData(object):
|
|
|
180
189
|
data = CeleryData.get_task_data(task_id, ids_map)
|
|
181
190
|
if not data:
|
|
182
191
|
continue
|
|
192
|
+
if revoked and data['state'] == 'PENDING':
|
|
193
|
+
data['state'] = 'REVOKED'
|
|
183
194
|
debug(
|
|
184
195
|
'POLL',
|
|
185
196
|
sub='celery.poll',
|
|
@@ -238,13 +249,18 @@ class CeleryData(object):
|
|
|
238
249
|
info = res.info
|
|
239
250
|
|
|
240
251
|
# Depending on the task state, info will be either an Exception (FAILURE), a list (SUCCESS), or a dict (RUNNING).
|
|
241
|
-
# - If it's an Exception, it's an unhandled error.
|
|
252
|
+
# - If it's an Exception, it's a TaskRevokedError or an unhandled error.
|
|
242
253
|
# - If it's a list, it's the task results.
|
|
243
254
|
# - If it's a dict, it's the custom user metadata.
|
|
244
255
|
|
|
245
256
|
if isinstance(info, Exception):
|
|
246
|
-
|
|
247
|
-
|
|
257
|
+
if isinstance(info, TaskRevokedError):
|
|
258
|
+
data['results'] = [Error(message='Task was revoked', _source=data['name'])]
|
|
259
|
+
data['state'] = 'REVOKED'
|
|
260
|
+
data['ready'] = True
|
|
261
|
+
else:
|
|
262
|
+
debug('unhandled exception', obj={'msg': str(info), 'tb': traceback_as_string(info)}, sub='celery.data', id=task_id)
|
|
263
|
+
raise info
|
|
248
264
|
|
|
249
265
|
elif isinstance(info, list):
|
|
250
266
|
data['results'] = info
|