secator 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. secator/.gitignore +162 -0
  2. secator/__init__.py +0 -0
  3. secator/celery.py +453 -0
  4. secator/celery_signals.py +138 -0
  5. secator/celery_utils.py +320 -0
  6. secator/cli.py +2035 -0
  7. secator/cli_helper.py +395 -0
  8. secator/click.py +87 -0
  9. secator/config.py +670 -0
  10. secator/configs/__init__.py +0 -0
  11. secator/configs/profiles/__init__.py +0 -0
  12. secator/configs/profiles/aggressive.yaml +8 -0
  13. secator/configs/profiles/all_ports.yaml +7 -0
  14. secator/configs/profiles/full.yaml +31 -0
  15. secator/configs/profiles/http_headless.yaml +7 -0
  16. secator/configs/profiles/http_record.yaml +8 -0
  17. secator/configs/profiles/insane.yaml +8 -0
  18. secator/configs/profiles/paranoid.yaml +8 -0
  19. secator/configs/profiles/passive.yaml +11 -0
  20. secator/configs/profiles/polite.yaml +8 -0
  21. secator/configs/profiles/sneaky.yaml +8 -0
  22. secator/configs/profiles/tor.yaml +5 -0
  23. secator/configs/scans/__init__.py +0 -0
  24. secator/configs/scans/domain.yaml +31 -0
  25. secator/configs/scans/host.yaml +23 -0
  26. secator/configs/scans/network.yaml +30 -0
  27. secator/configs/scans/subdomain.yaml +27 -0
  28. secator/configs/scans/url.yaml +19 -0
  29. secator/configs/workflows/__init__.py +0 -0
  30. secator/configs/workflows/cidr_recon.yaml +48 -0
  31. secator/configs/workflows/code_scan.yaml +29 -0
  32. secator/configs/workflows/domain_recon.yaml +46 -0
  33. secator/configs/workflows/host_recon.yaml +95 -0
  34. secator/configs/workflows/subdomain_recon.yaml +120 -0
  35. secator/configs/workflows/url_bypass.yaml +15 -0
  36. secator/configs/workflows/url_crawl.yaml +98 -0
  37. secator/configs/workflows/url_dirsearch.yaml +62 -0
  38. secator/configs/workflows/url_fuzz.yaml +68 -0
  39. secator/configs/workflows/url_params_fuzz.yaml +66 -0
  40. secator/configs/workflows/url_secrets_hunt.yaml +23 -0
  41. secator/configs/workflows/url_vuln.yaml +91 -0
  42. secator/configs/workflows/user_hunt.yaml +29 -0
  43. secator/configs/workflows/wordpress.yaml +38 -0
  44. secator/cve.py +718 -0
  45. secator/decorators.py +7 -0
  46. secator/definitions.py +168 -0
  47. secator/exporters/__init__.py +14 -0
  48. secator/exporters/_base.py +3 -0
  49. secator/exporters/console.py +10 -0
  50. secator/exporters/csv.py +37 -0
  51. secator/exporters/gdrive.py +123 -0
  52. secator/exporters/json.py +16 -0
  53. secator/exporters/table.py +36 -0
  54. secator/exporters/txt.py +28 -0
  55. secator/hooks/__init__.py +0 -0
  56. secator/hooks/gcs.py +80 -0
  57. secator/hooks/mongodb.py +281 -0
  58. secator/installer.py +694 -0
  59. secator/loader.py +128 -0
  60. secator/output_types/__init__.py +49 -0
  61. secator/output_types/_base.py +108 -0
  62. secator/output_types/certificate.py +78 -0
  63. secator/output_types/domain.py +50 -0
  64. secator/output_types/error.py +42 -0
  65. secator/output_types/exploit.py +58 -0
  66. secator/output_types/info.py +24 -0
  67. secator/output_types/ip.py +47 -0
  68. secator/output_types/port.py +55 -0
  69. secator/output_types/progress.py +36 -0
  70. secator/output_types/record.py +36 -0
  71. secator/output_types/stat.py +41 -0
  72. secator/output_types/state.py +29 -0
  73. secator/output_types/subdomain.py +45 -0
  74. secator/output_types/tag.py +69 -0
  75. secator/output_types/target.py +38 -0
  76. secator/output_types/url.py +112 -0
  77. secator/output_types/user_account.py +41 -0
  78. secator/output_types/vulnerability.py +101 -0
  79. secator/output_types/warning.py +30 -0
  80. secator/report.py +140 -0
  81. secator/rich.py +130 -0
  82. secator/runners/__init__.py +14 -0
  83. secator/runners/_base.py +1240 -0
  84. secator/runners/_helpers.py +218 -0
  85. secator/runners/celery.py +18 -0
  86. secator/runners/command.py +1178 -0
  87. secator/runners/python.py +126 -0
  88. secator/runners/scan.py +87 -0
  89. secator/runners/task.py +81 -0
  90. secator/runners/workflow.py +168 -0
  91. secator/scans/__init__.py +29 -0
  92. secator/serializers/__init__.py +8 -0
  93. secator/serializers/dataclass.py +39 -0
  94. secator/serializers/json.py +45 -0
  95. secator/serializers/regex.py +25 -0
  96. secator/tasks/__init__.py +8 -0
  97. secator/tasks/_categories.py +487 -0
  98. secator/tasks/arjun.py +113 -0
  99. secator/tasks/arp.py +53 -0
  100. secator/tasks/arpscan.py +70 -0
  101. secator/tasks/bbot.py +372 -0
  102. secator/tasks/bup.py +118 -0
  103. secator/tasks/cariddi.py +193 -0
  104. secator/tasks/dalfox.py +87 -0
  105. secator/tasks/dirsearch.py +84 -0
  106. secator/tasks/dnsx.py +186 -0
  107. secator/tasks/feroxbuster.py +93 -0
  108. secator/tasks/ffuf.py +135 -0
  109. secator/tasks/fping.py +85 -0
  110. secator/tasks/gau.py +102 -0
  111. secator/tasks/getasn.py +60 -0
  112. secator/tasks/gf.py +36 -0
  113. secator/tasks/gitleaks.py +96 -0
  114. secator/tasks/gospider.py +84 -0
  115. secator/tasks/grype.py +109 -0
  116. secator/tasks/h8mail.py +75 -0
  117. secator/tasks/httpx.py +167 -0
  118. secator/tasks/jswhois.py +36 -0
  119. secator/tasks/katana.py +203 -0
  120. secator/tasks/maigret.py +87 -0
  121. secator/tasks/mapcidr.py +42 -0
  122. secator/tasks/msfconsole.py +179 -0
  123. secator/tasks/naabu.py +85 -0
  124. secator/tasks/nmap.py +487 -0
  125. secator/tasks/nuclei.py +151 -0
  126. secator/tasks/search_vulns.py +225 -0
  127. secator/tasks/searchsploit.py +109 -0
  128. secator/tasks/sshaudit.py +299 -0
  129. secator/tasks/subfinder.py +48 -0
  130. secator/tasks/testssl.py +283 -0
  131. secator/tasks/trivy.py +130 -0
  132. secator/tasks/trufflehog.py +240 -0
  133. secator/tasks/urlfinder.py +100 -0
  134. secator/tasks/wafw00f.py +106 -0
  135. secator/tasks/whois.py +34 -0
  136. secator/tasks/wpprobe.py +116 -0
  137. secator/tasks/wpscan.py +202 -0
  138. secator/tasks/x8.py +94 -0
  139. secator/tasks/xurlfind3r.py +83 -0
  140. secator/template.py +294 -0
  141. secator/thread.py +24 -0
  142. secator/tree.py +196 -0
  143. secator/utils.py +922 -0
  144. secator/utils_test.py +297 -0
  145. secator/workflows/__init__.py +29 -0
  146. secator-0.22.0.dist-info/METADATA +447 -0
  147. secator-0.22.0.dist-info/RECORD +150 -0
  148. secator-0.22.0.dist-info/WHEEL +4 -0
  149. secator-0.22.0.dist-info/entry_points.txt +2 -0
  150. secator-0.22.0.dist-info/licenses/LICENSE +60 -0
@@ -0,0 +1,138 @@
1
+ import os
2
+ import signal
3
+ import sys
4
+ import threading
5
+ from pathlib import Path
6
+
7
+ from celery import signals
8
+
9
+ from secator.config import CONFIG
10
+ from secator.output_types import Info
11
+ from secator.rich import console
12
+
13
+ IDLE_TIMEOUT = CONFIG.celery.worker_kill_after_idle_seconds
14
+ IN_CELERY_WORKER_PROCESS = sys.argv and ('secator.celery.app' in sys.argv or 'worker' in sys.argv)
15
+
16
+ # File-based state management system
17
+ STATE_DIR = Path("/tmp/celery_state")
18
+ STATE_DIR.mkdir(exist_ok=True, parents=True)
19
+
20
+
21
+ def get_lock_file_path():
22
+ worker_name = os.environ.get("WORKER_NAME", f"unknown_{os.getpid()}")
23
+ return Path(f"/tmp/celery_worker_{worker_name}.lock")
24
+
25
+
26
+ def set_task_running(task_id):
27
+ """Mark that a task is running in current worker"""
28
+ with open(get_lock_file_path(), "w") as f:
29
+ f.write(task_id)
30
+
31
+
32
+ def clear_task_running():
33
+ """Clear the task running state"""
34
+ lock_file = get_lock_file_path()
35
+ if lock_file.exists():
36
+ lock_file.unlink()
37
+
38
+
39
+ def is_task_running():
40
+ """Check if a task is currently running"""
41
+ return get_lock_file_path().exists()
42
+
43
+
44
+ def kill_worker(parent=False):
45
+ """Kill current worker using its pid by sending a SIGTERM to Celery master process."""
46
+ worker_name = os.environ.get('WORKER_NAME', 'unknown')
47
+
48
+ # Check if a task is running via the lock file
49
+ if not is_task_running():
50
+ pid = os.getppid() if parent else os.getpid()
51
+ console.print(Info(message=f'Sending SIGTERM to worker {worker_name} with pid {pid}'))
52
+ os.kill(pid, signal.SIGTERM)
53
+ else:
54
+ console.print(Info(message=f'Cancelling worker shutdown of {worker_name} since a task is running'))
55
+
56
+
57
+ def setup_idle_timer(timeout):
58
+ """Setup a timer to kill the worker after being idle"""
59
+ if timeout == -1:
60
+ return
61
+
62
+ console.print(Info(message=f'Starting inactivity timer for {timeout} seconds ...'))
63
+ timer = threading.Timer(timeout, kill_worker)
64
+ timer.daemon = True # Make sure timer is killed when worker exits
65
+ timer.start()
66
+
67
+
68
+ def setup_logging(*args, **kwargs):
69
+ """Override celery's logging setup to prevent it from altering our settings.
70
+ github.com/celery/celery/issues/1867
71
+ """
72
+ pass
73
+
74
+
75
+ def capture_worker_name(sender, instance, **kwargs):
76
+ os.environ["WORKER_NAME"] = '{0}'.format(sender)
77
+
78
+
79
+ def worker_init_handler(**kwargs):
80
+ if IDLE_TIMEOUT != -1:
81
+ setup_idle_timer(IDLE_TIMEOUT)
82
+
83
+
84
+ def task_prerun_handler(task_id, **kwargs):
85
+ # Mark that a task is running
86
+ set_task_running(task_id)
87
+
88
+
89
+ def task_postrun_handler(**kwargs):
90
+ # Mark that no task is running
91
+ clear_task_running()
92
+
93
+ # Get sender name from kwargs
94
+ sender_name = kwargs['sender'].name
95
+ # console.print(Info(message=f'Task postrun handler --> Sender name: {sender_name}'))
96
+
97
+ if CONFIG.celery.worker_kill_after_task and (sender_name.startswith('secator.') or sender_name.startswith('api.')):
98
+ worker_name = os.environ.get('WORKER_NAME', 'unknown')
99
+ console.print(Info(message=f'Shutdown worker {worker_name} since config celery.worker_kill_after_task is set.'))
100
+ kill_worker(parent=True)
101
+ return
102
+
103
+ # Set up a new idle timer
104
+ if IDLE_TIMEOUT != -1:
105
+ console.print(Info(message=f'Reset inactivity timer to {IDLE_TIMEOUT} seconds'))
106
+ setup_idle_timer(IDLE_TIMEOUT)
107
+
108
+
109
+ def task_revoked_handler(request=None, **kwargs):
110
+ """Handle revoked tasks by clearing the task running state"""
111
+ console.print(Info(message='Task was revoked, clearing running state'))
112
+ clear_task_running()
113
+
114
+ # Set up a new idle timer
115
+ if IDLE_TIMEOUT != -1:
116
+ console.print(Info(message=f'Reset inactivity timer to {IDLE_TIMEOUT} seconds after task revocation'))
117
+ setup_idle_timer(IDLE_TIMEOUT)
118
+
119
+
120
+ def worker_shutdown_handler(**kwargs):
121
+ """Cleanup lock files when worker shuts down"""
122
+ lock_file = get_lock_file_path()
123
+ if lock_file.exists():
124
+ lock_file.unlink()
125
+
126
+
127
+ def setup_handlers():
128
+ if CONFIG.celery.override_default_logging:
129
+ signals.setup_logging.connect(setup_logging)
130
+
131
+ # Register common handlers when either task‐ or idle‐based termination is enabled
132
+ if CONFIG.celery.worker_kill_after_task or CONFIG.celery.worker_kill_after_idle_seconds != -1:
133
+ signals.celeryd_after_setup.connect(capture_worker_name)
134
+ signals.task_postrun.connect(task_postrun_handler)
135
+ signals.task_prerun.connect(task_prerun_handler)
136
+ signals.task_revoked.connect(task_revoked_handler)
137
+ signals.worker_ready.connect(worker_init_handler)
138
+ signals.worker_shutdown.connect(worker_shutdown_handler)
@@ -0,0 +1,320 @@
1
+ import gc
2
+
3
+ from contextlib import nullcontext
4
+ from time import sleep
5
+
6
+ import kombu
7
+ import kombu.exceptions
8
+
9
+ from celery.result import AsyncResult, GroupResult
10
+ from celery.exceptions import TaskRevokedError
11
+ from greenlet import GreenletExit
12
+ from rich.panel import Panel
13
+ from rich.padding import Padding
14
+
15
+ from rich.progress import Progress as RichProgress, SpinnerColumn, TextColumn, TimeElapsedColumn
16
+ from secator.config import CONFIG
17
+ from secator.definitions import STATE_COLORS
18
+ from secator.output_types import Error, Info, State
19
+ from secator.rich import console
20
+ from secator.utils import debug, traceback_as_string
21
+
22
+
23
+ class CeleryData(object):
24
+ """Utility to simplify tracking a Celery task and all of its subtasks."""
25
+
26
+ def iter_results(
27
+ result,
28
+ ids_map={},
29
+ description=True,
30
+ revoked=False,
31
+ refresh_interval=CONFIG.runners.poll_frequency,
32
+ print_remote_info=True,
33
+ print_remote_title='Results'
34
+ ):
35
+ """Generator to get results from Celery task.
36
+
37
+ Args:
38
+ result (Union[AsyncResult, GroupResult]): Celery result.
39
+ description (bool): Whether to show task description.
40
+ revoked (bool): Whether the task was revoked.
41
+ refresh_interval (int): Refresh interval.
42
+ print_remote_info (bool): Whether to display live results.
43
+ print_remote_title (str): Title for the progress panel.
44
+
45
+ Yields:
46
+ dict: Subtasks state and results.
47
+ """
48
+ # Display live results if print_remote_info is set
49
+ if print_remote_info:
50
+ class PanelProgress(RichProgress):
51
+ def get_renderables(self):
52
+ yield Padding(Panel(
53
+ self.make_tasks_table(self.tasks),
54
+ title=print_remote_title,
55
+ border_style='bold gold3',
56
+ expand=False,
57
+ highlight=True), pad=(2, 0, 0, 0))
58
+ progress = PanelProgress(
59
+ SpinnerColumn('dots'),
60
+ TextColumn('{task.fields[descr]} ') if description else '',
61
+ TextColumn('[bold cyan]{task.fields[full_name]}[/]'),
62
+ TextColumn('{task.fields[state]:<20}'),
63
+ TimeElapsedColumn(),
64
+ TextColumn('{task.fields[count]}'),
65
+ TextColumn('{task.fields[progress]}%'),
66
+ # TextColumn('\[[bold magenta]{task.fields[id]:<30}[/]]'), # noqa: W605
67
+ auto_refresh=False,
68
+ transient=False,
69
+ console=console,
70
+ # redirect_stderr=True,
71
+ # redirect_stdout=False
72
+ )
73
+ else:
74
+ progress = nullcontext()
75
+
76
+ with progress:
77
+
78
+ # Make initial progress
79
+ if print_remote_info:
80
+ progress_cache = CeleryData.init_progress(progress, ids_map)
81
+
82
+ # Get live results and print progress
83
+ for data in CeleryData.poll(result, ids_map, refresh_interval, revoked):
84
+ for result in data['results']:
85
+
86
+ # Add dynamic subtask to ids_map
87
+ if isinstance(result, Info):
88
+ message = result.message
89
+ if message.startswith('Celery chunked task created: '):
90
+ task_id = message.split(' ')[-1]
91
+ ids_map[task_id] = {
92
+ 'id': task_id,
93
+ 'name': result._source,
94
+ 'full_name': result._source,
95
+ 'descr': '',
96
+ 'state': 'PENDING',
97
+ 'count': 0,
98
+ 'progress': 0
99
+ }
100
+ yield result
101
+ del result
102
+
103
+ if print_remote_info:
104
+ task_id = data['id']
105
+ if task_id not in progress_cache:
106
+ if CONFIG.runners.show_subtasks:
107
+ progress_cache[task_id] = progress.add_task('', advance=0, **data)
108
+ else:
109
+ continue
110
+ progress_id = progress_cache[task_id]
111
+ CeleryData.update_progress(progress, progress_id, data)
112
+ progress.refresh()
113
+
114
+ # Garbage collect between polls
115
+ del data
116
+ gc.collect()
117
+
118
+ # Update all tasks to 100 %
119
+ if print_remote_info:
120
+ for progress_id in progress_cache.values():
121
+ progress.update(progress_id, advance=100)
122
+ progress.refresh()
123
+
124
+ @staticmethod
125
+ def init_progress(progress, ids_map):
126
+ cache = {}
127
+ for task_id, data in ids_map.items():
128
+ pdata = data.copy()
129
+ state = data['state']
130
+ pdata['state'] = f'[{STATE_COLORS[state]}]{state}[/]'
131
+ id = progress.add_task('', advance=0, **pdata)
132
+ cache[task_id] = id
133
+ return cache
134
+
135
+ @staticmethod
136
+ def update_progress(progress, progress_id, data):
137
+ """Update rich progress with fresh data."""
138
+ pdata = data.copy()
139
+ state = data['state']
140
+ pdata['state'] = f'[{STATE_COLORS[state]}]{state}[/]'
141
+ pdata = {k: v for k, v in pdata.items() if v}
142
+ progress.update(progress_id, **pdata)
143
+
144
+ @staticmethod
145
+ def poll(result, ids_map, refresh_interval, revoked=False):
146
+ """Poll Celery subtasks results in real-time. Fetch task metadata and partial results from each task that runs.
147
+
148
+ Yields:
149
+ dict: Subtasks state and results.
150
+ """
151
+ exit_loop = False
152
+ while not exit_loop:
153
+ try:
154
+ yield from CeleryData.get_all_data(result, ids_map, revoked=revoked)
155
+ if result.ready() or revoked:
156
+ debug('result is ready', sub='celery.poll', id=result.id)
157
+ exit_loop = True
158
+ except (KeyboardInterrupt, GreenletExit):
159
+ debug('encounted KeyboardInterrupt or GreenletExit', sub='celery.poll')
160
+ yield from CeleryData.get_all_data(result, ids_map, revoked=revoked)
161
+ raise
162
+ except Exception as e:
163
+ error = Error.from_exception(e)
164
+ debug(repr(error), sub='celery.poll')
165
+ pass
166
+ finally:
167
+ sleep(refresh_interval)
168
+
169
+ @staticmethod
170
+ def get_all_data(result, ids_map, revoked=False):
171
+ main_task = State(
172
+ task_id=result.id,
173
+ state='REVOKED' if revoked and result.state == 'PENDING' else result.state,
174
+ _source='celery'
175
+ )
176
+ debug(f"Main task state: {result.id} - {result.state}", sub='celery.poll', verbose=True)
177
+ yield {'id': result.id, 'state': result.state, 'results': [main_task]}
178
+ yield from CeleryData.get_tasks_data(ids_map, revoked=revoked)
179
+
180
+ @staticmethod
181
+ def get_tasks_data(ids_map, revoked=False):
182
+ """Get Celery results from main result object, AND all subtasks results.
183
+
184
+ Yields:
185
+ dict: Subtasks state and results.
186
+ """
187
+ task_ids = list(ids_map.keys())
188
+ for task_id in task_ids:
189
+ data = CeleryData.get_task_data(task_id, ids_map)
190
+ if not data:
191
+ continue
192
+ if revoked and data['state'] == 'PENDING':
193
+ data['state'] = 'REVOKED'
194
+ debug(
195
+ 'POLL',
196
+ sub='celery.poll',
197
+ id=data['id'],
198
+ obj={data['full_name']: data['state'], 'count': data['count']},
199
+ verbose=True
200
+ )
201
+ yield data
202
+
203
+ # Calculate and yield parent task progress
204
+ # if not datas:
205
+ # return
206
+ # total = len(datas)
207
+ # count_finished = sum([i['ready'] for i in datas if i])
208
+ # percent = int(count_finished * 100 / total) if total > 0 else 0
209
+ # parent_id = [c for c in ids_map.values() if c['full_name'] == datas[-1]]
210
+ # data['progress'] = percent
211
+ # yield data
212
+
213
+ @staticmethod
214
+ def get_task_data(task_id, ids_map):
215
+ """Get task info.
216
+
217
+ Args:
218
+ task_id (str): Celery task id.
219
+
220
+ Returns:
221
+ dict: Task info (id, name, state, results, chunk_info, count, error, ready).
222
+ """
223
+
224
+ # Get task data
225
+ data = ids_map.get(task_id, {})
226
+ if not data:
227
+ ids_map[task_id] = {}
228
+ elif data.get('ready', False):
229
+ return
230
+
231
+ # if not data:
232
+ # debug('task not in ids_map', sub='debug.celery', id=task_id)
233
+ # return
234
+
235
+ # Get remote result
236
+ res = AsyncResult(task_id)
237
+ if not res:
238
+ debug('empty response', sub='celery.data', id=task_id)
239
+ return
240
+
241
+ # Set up task state
242
+ data.update({
243
+ 'state': res.state,
244
+ 'ready': False,
245
+ 'results': []
246
+ })
247
+
248
+ # Get remote task data
249
+ info = res.info
250
+
251
+ # Depending on the task state, info will be either an Exception (FAILURE), a list (SUCCESS), or a dict (RUNNING).
252
+ # - If it's an Exception, it's a TaskRevokedError or an unhandled error.
253
+ # - If it's a list, it's the task results.
254
+ # - If it's a dict, it's the custom user metadata.
255
+
256
+ if isinstance(info, Exception):
257
+ if isinstance(info, TaskRevokedError):
258
+ data['results'] = [Error(message='Task was revoked', _source=data['name'])]
259
+ data['state'] = 'REVOKED'
260
+ data['ready'] = True
261
+ else:
262
+ debug('unhandled exception', obj={'msg': str(info), 'tb': traceback_as_string(info)}, sub='celery.data', id=task_id)
263
+ raise info
264
+
265
+ elif isinstance(info, list):
266
+ data['results'] = info
267
+ errors = [e for e in info if e._type == 'error']
268
+ status = 'FAILURE' if errors else 'SUCCESS'
269
+ data['count'] = len([c for c in info if c._source.startswith(data['name'])])
270
+ data['state'] = status
271
+
272
+ elif isinstance(info, dict):
273
+ data.update(info)
274
+
275
+ # Set ready flag and progress
276
+ ready = data['state'] in ['FAILURE', 'SUCCESS', 'REVOKED']
277
+ data['ready'] = ready
278
+ ids_map[task_id]['ready'] = data['ready']
279
+ if data['ready']:
280
+ data['progress'] = 100
281
+ elif data['results']:
282
+ progresses = [e for e in data['results'] if e._type == 'progress' and e._source == data['full_name']]
283
+ if progresses:
284
+ data['progress'] = progresses[-1].percent
285
+
286
+ debug('data', obj=data, sub='celery.data', id=task_id, verbose=True)
287
+ return data
288
+
289
+ @staticmethod
290
+ def get_task_ids(result, ids=[]):
291
+ """Get all Celery task ids recursively.
292
+
293
+ Args:
294
+ result (Union[AsyncResult, GroupResult]): Celery result object.
295
+ ids (list): List of ids.
296
+ """
297
+ if result is None:
298
+ return
299
+
300
+ try:
301
+ if isinstance(result, GroupResult):
302
+ CeleryData.get_task_ids(result.parent, ids=ids)
303
+
304
+ elif isinstance(result, AsyncResult):
305
+ if result.id not in ids:
306
+ ids.append(result.id)
307
+
308
+ if hasattr(result, 'children'):
309
+ children = result.children
310
+ if isinstance(children, list):
311
+ for child in children:
312
+ CeleryData.get_task_ids(child, ids=ids)
313
+
314
+ # Browse parent
315
+ if hasattr(result, 'parent') and result.parent:
316
+ CeleryData.get_task_ids(result.parent, ids=ids)
317
+
318
+ except kombu.exceptions.DecodeError:
319
+ debug('kombu decode error', sub='celery.data')
320
+ return