secator 0.5.2__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of secator might be problematic. Click here for more details.
- secator/celery.py +160 -185
- secator/celery_utils.py +268 -0
- secator/cli.py +327 -106
- secator/config.py +27 -11
- secator/configs/workflows/host_recon.yaml +5 -3
- secator/configs/workflows/port_scan.yaml +7 -3
- secator/configs/workflows/url_bypass.yaml +10 -0
- secator/configs/workflows/url_vuln.yaml +1 -1
- secator/decorators.py +169 -92
- secator/definitions.py +10 -3
- secator/exporters/__init__.py +7 -5
- secator/exporters/console.py +10 -0
- secator/exporters/csv.py +27 -19
- secator/exporters/gdrive.py +16 -11
- secator/exporters/json.py +3 -1
- secator/exporters/table.py +30 -2
- secator/exporters/txt.py +20 -16
- secator/hooks/gcs.py +53 -0
- secator/hooks/mongodb.py +54 -28
- secator/output_types/__init__.py +29 -11
- secator/output_types/_base.py +11 -1
- secator/output_types/error.py +36 -0
- secator/output_types/exploit.py +1 -1
- secator/output_types/info.py +24 -0
- secator/output_types/ip.py +7 -0
- secator/output_types/port.py +8 -1
- secator/output_types/progress.py +6 -1
- secator/output_types/record.py +3 -1
- secator/output_types/stat.py +33 -0
- secator/output_types/tag.py +6 -4
- secator/output_types/url.py +6 -3
- secator/output_types/vulnerability.py +3 -2
- secator/output_types/warning.py +24 -0
- secator/report.py +55 -23
- secator/rich.py +44 -39
- secator/runners/_base.py +622 -635
- secator/runners/_helpers.py +5 -91
- secator/runners/celery.py +18 -0
- secator/runners/command.py +364 -211
- secator/runners/scan.py +8 -24
- secator/runners/task.py +21 -55
- secator/runners/workflow.py +41 -40
- secator/scans/__init__.py +28 -0
- secator/serializers/dataclass.py +6 -0
- secator/serializers/json.py +10 -5
- secator/serializers/regex.py +12 -4
- secator/tasks/_categories.py +6 -3
- secator/tasks/bbot.py +293 -0
- secator/tasks/bup.py +98 -0
- secator/tasks/cariddi.py +38 -49
- secator/tasks/dalfox.py +3 -0
- secator/tasks/dirsearch.py +12 -23
- secator/tasks/dnsx.py +49 -30
- secator/tasks/dnsxbrute.py +2 -0
- secator/tasks/feroxbuster.py +8 -17
- secator/tasks/ffuf.py +3 -2
- secator/tasks/fping.py +3 -3
- secator/tasks/gau.py +5 -0
- secator/tasks/gf.py +2 -2
- secator/tasks/gospider.py +4 -0
- secator/tasks/grype.py +9 -9
- secator/tasks/h8mail.py +31 -41
- secator/tasks/httpx.py +58 -21
- secator/tasks/katana.py +18 -22
- secator/tasks/maigret.py +26 -24
- secator/tasks/mapcidr.py +2 -3
- secator/tasks/msfconsole.py +4 -16
- secator/tasks/naabu.py +3 -1
- secator/tasks/nmap.py +50 -35
- secator/tasks/nuclei.py +9 -2
- secator/tasks/searchsploit.py +17 -9
- secator/tasks/subfinder.py +5 -1
- secator/tasks/wpscan.py +79 -93
- secator/template.py +61 -45
- secator/thread.py +24 -0
- secator/utils.py +330 -80
- secator/utils_test.py +48 -23
- secator/workflows/__init__.py +28 -0
- {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/METADATA +12 -6
- secator-0.7.0.dist-info/RECORD +115 -0
- {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/WHEEL +1 -1
- secator-0.5.2.dist-info/RECORD +0 -101
- {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/entry_points.txt +0 -0
- {secator-0.5.2.dist-info → secator-0.7.0.dist-info}/licenses/LICENSE +0 -0
secator/output_types/progress.py
CHANGED
|
@@ -8,7 +8,7 @@ from secator.utils import rich_to_ansi
|
|
|
8
8
|
@dataclass
|
|
9
9
|
class Progress(OutputType):
|
|
10
10
|
duration: str
|
|
11
|
-
percent: int
|
|
11
|
+
percent: int = 0
|
|
12
12
|
errors: list = field(default_factory=list)
|
|
13
13
|
extra_data: dict = field(default_factory=dict)
|
|
14
14
|
_source: str = field(default='', repr=True)
|
|
@@ -23,6 +23,11 @@ class Progress(OutputType):
|
|
|
23
23
|
_table_fields = ['percent', 'duration']
|
|
24
24
|
_sort_by = ('percent',)
|
|
25
25
|
|
|
26
|
+
def __post_init__(self):
|
|
27
|
+
super().__post_init__()
|
|
28
|
+
if not 0 <= self.percent <= 100:
|
|
29
|
+
self.percent = 0
|
|
30
|
+
|
|
26
31
|
def __str__(self) -> str:
|
|
27
32
|
return f'{self.percent}%'
|
|
28
33
|
|
secator/output_types/record.py
CHANGED
|
@@ -28,7 +28,9 @@ class Record(OutputType):
|
|
|
28
28
|
return self.name
|
|
29
29
|
|
|
30
30
|
def __repr__(self) -> str:
|
|
31
|
-
s = f'🎤 [bold white]{self.name}[/] \[[green]{self.type}[/]]
|
|
31
|
+
s = f'🎤 [bold white]{self.name}[/] \[[green]{self.type}[/]]'
|
|
32
|
+
if self.host:
|
|
33
|
+
s += f' \[[magenta]{self.host}[/]]'
|
|
32
34
|
if self.extra_data:
|
|
33
35
|
s += ' \[[bold yellow]' + ','.join(f'{k}={v}' for k, v in self.extra_data.items()) + '[/]]'
|
|
34
36
|
return rich_to_ansi(s)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
|
|
4
|
+
from secator.output_types import OutputType
|
|
5
|
+
from secator.utils import rich_to_ansi
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class Stat(OutputType):
|
|
10
|
+
name: str
|
|
11
|
+
pid: int
|
|
12
|
+
cpu: int
|
|
13
|
+
memory: int
|
|
14
|
+
net_conns: int = field(default=None, repr=True)
|
|
15
|
+
extra_data: dict = field(default_factory=dict)
|
|
16
|
+
_source: str = field(default='', repr=True)
|
|
17
|
+
_type: str = field(default='stat', repr=True)
|
|
18
|
+
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
19
|
+
_uuid: str = field(default='', repr=True, compare=False)
|
|
20
|
+
_context: dict = field(default_factory=dict, repr=True, compare=False)
|
|
21
|
+
_tagged: bool = field(default=False, repr=True, compare=False)
|
|
22
|
+
_duplicate: bool = field(default=False, repr=True, compare=False)
|
|
23
|
+
_related: list = field(default_factory=list, compare=False)
|
|
24
|
+
|
|
25
|
+
_table_fields = ['name', 'pid', 'cpu', 'memory']
|
|
26
|
+
_sort_by = ('name', 'pid')
|
|
27
|
+
|
|
28
|
+
def __repr__(self) -> str:
|
|
29
|
+
s = f'[dim yellow3]📊 {self.name} \[pid={self.pid}] \[cpu={self.cpu:.2f}%] \[memory={self.memory:.2f}%]'
|
|
30
|
+
if self.net_conns:
|
|
31
|
+
s += f' \[connections={self.net_conns}]'
|
|
32
|
+
s += ' [/]'
|
|
33
|
+
return rich_to_ansi(s)
|
secator/output_types/tag.py
CHANGED
|
@@ -2,7 +2,7 @@ import time
|
|
|
2
2
|
from dataclasses import dataclass, field
|
|
3
3
|
|
|
4
4
|
from secator.output_types import OutputType
|
|
5
|
-
from secator.utils import rich_to_ansi
|
|
5
|
+
from secator.utils import rich_to_ansi, trim_string
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
@dataclass
|
|
@@ -37,9 +37,11 @@ class Tag(OutputType):
|
|
|
37
37
|
sep = ' '
|
|
38
38
|
if not v:
|
|
39
39
|
continue
|
|
40
|
-
if
|
|
41
|
-
v = v
|
|
42
|
-
|
|
40
|
+
if isinstance(v, str):
|
|
41
|
+
v = trim_string(v, max_length=1000)
|
|
42
|
+
if len(v) > 1000:
|
|
43
|
+
v = v.replace('\n', '\n' + sep)
|
|
44
|
+
sep = '\n '
|
|
43
45
|
ed += f'\n [dim red]{k}[/]:{sep}[dim yellow]{v}[/]'
|
|
44
46
|
if ed:
|
|
45
47
|
s += ed
|
secator/output_types/url.py
CHANGED
|
@@ -4,7 +4,8 @@ from dataclasses import dataclass, field
|
|
|
4
4
|
from secator.definitions import (CONTENT_LENGTH, CONTENT_TYPE, STATUS_CODE,
|
|
5
5
|
TECH, TIME, TITLE, URL, WEBSERVER)
|
|
6
6
|
from secator.output_types import OutputType
|
|
7
|
-
from secator.utils import rich_to_ansi
|
|
7
|
+
from secator.utils import rich_to_ansi, trim_string
|
|
8
|
+
from secator.config import CONFIG
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
@dataclass
|
|
@@ -64,7 +65,7 @@ class Url(OutputType):
|
|
|
64
65
|
else:
|
|
65
66
|
s += f' \[[red]{self.status_code}[/]]'
|
|
66
67
|
if self.title:
|
|
67
|
-
s += f' \[[green]{self.title}[/]]'
|
|
68
|
+
s += f' \[[green]{trim_string(self.title)}[/]]'
|
|
68
69
|
if self.webserver:
|
|
69
70
|
s += f' \[[magenta]{self.webserver}[/]]'
|
|
70
71
|
if self.tech:
|
|
@@ -73,7 +74,9 @@ class Url(OutputType):
|
|
|
73
74
|
if self.content_type:
|
|
74
75
|
s += f' \[[magenta]{self.content_type}[/]]'
|
|
75
76
|
if self.content_length:
|
|
76
|
-
|
|
77
|
+
cl = str(self.content_length)
|
|
78
|
+
cl += '[bold red]+[/]' if self.content_length == CONFIG.http.response_max_size_bytes else ''
|
|
79
|
+
s += f' \[[magenta]{cl}[/]]'
|
|
77
80
|
if self.screenshot_path:
|
|
78
81
|
s += f' \[[magenta]{self.screenshot_path}[/]]'
|
|
79
82
|
return rich_to_ansi(s)
|
|
@@ -58,7 +58,8 @@ class Vulnerability(OutputType):
|
|
|
58
58
|
'unknown': 5,
|
|
59
59
|
None: 6
|
|
60
60
|
}
|
|
61
|
-
self.
|
|
61
|
+
self.severity = self.severity.lower() # normalize severity
|
|
62
|
+
self.severity_nb = severity_map.get(self.severity, 6)
|
|
62
63
|
self.confidence_nb = severity_map[self.confidence]
|
|
63
64
|
if len(self.references) > 0:
|
|
64
65
|
self.reference = self.references[0]
|
|
@@ -78,7 +79,7 @@ class Vulnerability(OutputType):
|
|
|
78
79
|
'info': 'magenta',
|
|
79
80
|
'unknown': 'dim magenta'
|
|
80
81
|
}
|
|
81
|
-
c = colors
|
|
82
|
+
c = colors.get(self.severity, 'dim magenta')
|
|
82
83
|
s = f'🚨 \[[green]{self.name} [link={self.reference}]🡕[/link][/]] \[[{c}]{self.severity}[/]] {self.matched_at}'
|
|
83
84
|
if tags:
|
|
84
85
|
tags_str = ','.join(tags)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
import time
|
|
3
|
+
from secator.output_types import OutputType
|
|
4
|
+
from secator.utils import rich_to_ansi
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Warning(OutputType):
|
|
9
|
+
message: str
|
|
10
|
+
task_id: str = field(default='', compare=False)
|
|
11
|
+
_source: str = field(default='', repr=True)
|
|
12
|
+
_type: str = field(default='warning', repr=True)
|
|
13
|
+
_timestamp: int = field(default_factory=lambda: time.time(), compare=False)
|
|
14
|
+
_uuid: str = field(default='', repr=True, compare=False)
|
|
15
|
+
_context: dict = field(default_factory=dict, repr=True, compare=False)
|
|
16
|
+
_duplicate: bool = field(default=False, repr=True, compare=False)
|
|
17
|
+
_related: list = field(default_factory=list, compare=False)
|
|
18
|
+
|
|
19
|
+
_table_fields = ['task_name', 'message']
|
|
20
|
+
_sort_by = ('_timestamp',)
|
|
21
|
+
|
|
22
|
+
def __repr__(self):
|
|
23
|
+
s = f"[orange4]⚠ {self.message}[/]"
|
|
24
|
+
return rich_to_ansi(s)
|
secator/report.py
CHANGED
|
@@ -1,9 +1,31 @@
|
|
|
1
1
|
import operator
|
|
2
2
|
|
|
3
3
|
from secator.config import CONFIG
|
|
4
|
-
from secator.output_types import
|
|
5
|
-
from secator.utils import merge_opts, get_file_timestamp
|
|
4
|
+
from secator.output_types import FINDING_TYPES, OutputType
|
|
5
|
+
from secator.utils import merge_opts, get_file_timestamp, traceback_as_string
|
|
6
6
|
from secator.rich import console
|
|
7
|
+
from secator.runners._helpers import extract_from_results
|
|
8
|
+
|
|
9
|
+
import concurrent.futures
|
|
10
|
+
from threading import Lock
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def remove_duplicates(objects):
|
|
14
|
+
unique_objects = []
|
|
15
|
+
lock = Lock()
|
|
16
|
+
|
|
17
|
+
def add_if_unique(obj):
|
|
18
|
+
nonlocal unique_objects
|
|
19
|
+
with lock:
|
|
20
|
+
# Perform linear search to check for duplicates
|
|
21
|
+
if all(obj != existing_obj for existing_obj in unique_objects):
|
|
22
|
+
unique_objects.append(obj)
|
|
23
|
+
|
|
24
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor:
|
|
25
|
+
# Execute the function concurrently for each object
|
|
26
|
+
executor.map(add_if_unique, objects)
|
|
27
|
+
|
|
28
|
+
return unique_objects
|
|
7
29
|
|
|
8
30
|
|
|
9
31
|
# TODO: initialize from data, not from runner
|
|
@@ -29,54 +51,64 @@ class Report:
|
|
|
29
51
|
report_cls(self).send()
|
|
30
52
|
except Exception as e:
|
|
31
53
|
console.print(
|
|
32
|
-
f'Could not create exporter {report_cls.__name__} for {self.__class__.__name__}:
|
|
33
|
-
|
|
54
|
+
f'[bold red]Could not create exporter {report_cls.__name__} for {self.__class__.__name__}: '
|
|
55
|
+
f'{str(e)}[/]\n[dim]{traceback_as_string(e)}[/]',
|
|
56
|
+
)
|
|
34
57
|
|
|
35
|
-
def build(self):
|
|
58
|
+
def build(self, extractors=[], dedupe=False):
|
|
36
59
|
# Trim options
|
|
37
60
|
from secator.decorators import DEFAULT_CLI_OPTIONS
|
|
38
61
|
opts = merge_opts(self.runner.config.options, self.runner.run_opts)
|
|
39
62
|
opts = {
|
|
40
63
|
k: v for k, v in opts.items()
|
|
41
|
-
if k not in DEFAULT_CLI_OPTIONS
|
|
42
|
-
and not k.startswith('print_')
|
|
64
|
+
if k not in DEFAULT_CLI_OPTIONS and k not in self.runner.print_opts
|
|
43
65
|
and v is not None
|
|
44
66
|
}
|
|
67
|
+
runner_fields = {
|
|
68
|
+
'name',
|
|
69
|
+
'status',
|
|
70
|
+
'targets',
|
|
71
|
+
'start_time',
|
|
72
|
+
'end_time',
|
|
73
|
+
'elapsed',
|
|
74
|
+
'elapsed_human',
|
|
75
|
+
'run_opts',
|
|
76
|
+
'results_count'
|
|
77
|
+
}
|
|
45
78
|
|
|
46
79
|
# Prepare report structure
|
|
47
80
|
data = {
|
|
48
|
-
'info': {
|
|
49
|
-
|
|
50
|
-
'runner': self.runner.__class__.__name__,
|
|
51
|
-
'name': self.runner.config.name,
|
|
52
|
-
'targets': self.runner.targets,
|
|
53
|
-
'total_time': str(self.runner.elapsed),
|
|
54
|
-
'total_human': self.runner.elapsed_human,
|
|
55
|
-
'opts': opts,
|
|
56
|
-
},
|
|
57
|
-
'results': {},
|
|
81
|
+
'info': {k: v for k, v in self.runner.toDict().items() if k in runner_fields},
|
|
82
|
+
'results': {}
|
|
58
83
|
}
|
|
84
|
+
if 'results' in data['info']:
|
|
85
|
+
del data['info']['results']
|
|
86
|
+
data['info']['title'] = self.title
|
|
59
87
|
|
|
60
88
|
# Fill report
|
|
61
|
-
for output_type in
|
|
62
|
-
if output_type.__name__ == 'Progress':
|
|
63
|
-
continue
|
|
89
|
+
for output_type in FINDING_TYPES:
|
|
64
90
|
output_name = output_type.get_name()
|
|
65
91
|
sort_by, _ = get_table_fields(output_type)
|
|
66
92
|
items = [
|
|
67
93
|
item for item in self.runner.results
|
|
68
94
|
if isinstance(item, OutputType) and item._type == output_name
|
|
69
95
|
]
|
|
70
|
-
if CONFIG.runners.remove_duplicates:
|
|
71
|
-
items = [item for item in items if not item._duplicate]
|
|
72
96
|
if items:
|
|
73
97
|
if sort_by and all(sort_by):
|
|
74
98
|
items = sorted(items, key=operator.attrgetter(*sort_by))
|
|
99
|
+
if dedupe and CONFIG.runners.remove_duplicates:
|
|
100
|
+
items = remove_duplicates(items)
|
|
101
|
+
# items = [item for item in items if not item._duplicate and item not in dedupe_from]
|
|
102
|
+
for extractor in extractors:
|
|
103
|
+
items = extract_from_results(items, extractors=[extractor])
|
|
75
104
|
data['results'][output_name] = items
|
|
76
105
|
|
|
77
106
|
# Save data
|
|
78
107
|
self.data = data
|
|
79
108
|
|
|
109
|
+
def is_empty(self):
|
|
110
|
+
return all(not items for items in self.data['results'].values())
|
|
111
|
+
|
|
80
112
|
|
|
81
113
|
def get_table_fields(output_type):
|
|
82
114
|
"""Get output fields and sort fields based on output type.
|
|
@@ -89,7 +121,7 @@ def get_table_fields(output_type):
|
|
|
89
121
|
"""
|
|
90
122
|
sort_by = ()
|
|
91
123
|
output_fields = []
|
|
92
|
-
if output_type in
|
|
124
|
+
if output_type in FINDING_TYPES:
|
|
93
125
|
sort_by = output_type._sort_by
|
|
94
126
|
output_fields = output_type._table_fields
|
|
95
127
|
return sort_by, output_fields
|
secator/rich.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import operator
|
|
2
2
|
|
|
3
3
|
import yaml
|
|
4
|
-
from rich import box
|
|
5
4
|
from rich.console import Console
|
|
6
5
|
from rich.table import Table
|
|
7
6
|
|
|
@@ -67,51 +66,57 @@ def build_table(items, output_fields=[], exclude_fields=[], sort_by=None):
|
|
|
67
66
|
items = sorted(items, key=operator.attrgetter(*sort_by))
|
|
68
67
|
|
|
69
68
|
# Create rich table
|
|
70
|
-
|
|
71
|
-
table = Table(show_lines=True, box=box_style)
|
|
69
|
+
table = Table(show_lines=True)
|
|
72
70
|
|
|
73
71
|
# Get table schema if any, default to first item keys
|
|
74
|
-
keys =
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
72
|
+
keys = []
|
|
73
|
+
if output_fields:
|
|
74
|
+
keys = [k for k in output_fields if k not in exclude_fields]
|
|
75
|
+
# Remove meta fields not needed in output
|
|
76
|
+
if '_cls' in keys:
|
|
77
|
+
keys.remove('_cls')
|
|
78
|
+
if '_type' in keys:
|
|
79
|
+
keys.remove('_type')
|
|
80
|
+
if '_uuid' in keys:
|
|
81
|
+
keys.remove('_uuid')
|
|
82
|
+
|
|
83
|
+
# Add _source field
|
|
84
|
+
if '_source' not in keys:
|
|
85
|
+
keys.append('_source')
|
|
86
|
+
|
|
87
|
+
# Create table columns
|
|
88
|
+
for key in keys:
|
|
89
|
+
key_str = key
|
|
90
|
+
if not key.startswith('_'):
|
|
91
|
+
key_str = ' '.join(key.split('_')).title()
|
|
92
|
+
no_wrap = key in ['url', 'reference', 'references', 'matched_at']
|
|
93
|
+
overflow = None if no_wrap else 'fold'
|
|
94
|
+
table.add_column(
|
|
95
|
+
key_str,
|
|
96
|
+
overflow=overflow,
|
|
97
|
+
min_width=10,
|
|
98
|
+
no_wrap=no_wrap)
|
|
99
|
+
|
|
100
|
+
if not keys:
|
|
98
101
|
table.add_column(
|
|
99
|
-
|
|
100
|
-
overflow=
|
|
102
|
+
'Extracted values',
|
|
103
|
+
overflow=False,
|
|
101
104
|
min_width=10,
|
|
102
|
-
no_wrap=
|
|
103
|
-
header_style='bold blue')
|
|
105
|
+
no_wrap=False)
|
|
104
106
|
|
|
105
107
|
# Create table rows
|
|
106
108
|
for item in items:
|
|
107
109
|
values = []
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
value
|
|
113
|
-
|
|
114
|
-
value
|
|
115
|
-
|
|
110
|
+
if keys:
|
|
111
|
+
for key in keys:
|
|
112
|
+
value = getattr(item, key) if keys else item
|
|
113
|
+
value = FORMATTERS.get(key, lambda x: x)(value) if keys else item
|
|
114
|
+
if isinstance(value, dict) or isinstance(value, list):
|
|
115
|
+
value = yaml.dump(value)
|
|
116
|
+
elif isinstance(value, int) or isinstance(value, float):
|
|
117
|
+
value = str(value)
|
|
118
|
+
values.append(value)
|
|
119
|
+
else:
|
|
120
|
+
values = [item]
|
|
116
121
|
table.add_row(*values)
|
|
117
122
|
return table
|