souleyez 2.16.0__py3-none-any.whl → 2.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- souleyez/__init__.py +1 -1
- souleyez/assets/__init__.py +1 -0
- souleyez/assets/souleyez-icon.png +0 -0
- souleyez/core/msf_sync_manager.py +15 -5
- souleyez/core/tool_chaining.py +221 -29
- souleyez/detection/validator.py +4 -2
- souleyez/docs/README.md +2 -2
- souleyez/docs/user-guide/installation.md +14 -1
- souleyez/engine/background.py +25 -1
- souleyez/engine/result_handler.py +129 -0
- souleyez/integrations/siem/splunk.py +58 -11
- souleyez/main.py +103 -4
- souleyez/parsers/crackmapexec_parser.py +101 -43
- souleyez/parsers/dnsrecon_parser.py +50 -35
- souleyez/parsers/enum4linux_parser.py +101 -21
- souleyez/parsers/http_fingerprint_parser.py +319 -0
- souleyez/parsers/hydra_parser.py +56 -5
- souleyez/parsers/impacket_parser.py +123 -44
- souleyez/parsers/john_parser.py +47 -14
- souleyez/parsers/msf_parser.py +20 -5
- souleyez/parsers/nmap_parser.py +145 -28
- souleyez/parsers/smbmap_parser.py +69 -25
- souleyez/parsers/sqlmap_parser.py +72 -26
- souleyez/parsers/theharvester_parser.py +21 -13
- souleyez/plugins/gobuster.py +96 -3
- souleyez/plugins/http_fingerprint.py +592 -0
- souleyez/plugins/msf_exploit.py +6 -3
- souleyez/plugins/nuclei.py +41 -17
- souleyez/ui/interactive.py +130 -20
- souleyez/ui/setup_wizard.py +424 -58
- souleyez/ui/tool_setup.py +52 -52
- souleyez/utils/tool_checker.py +75 -13
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/METADATA +16 -3
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/RECORD +38 -34
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/WHEEL +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/entry_points.txt +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/licenses/LICENSE +0 -0
- {souleyez-2.16.0.dist-info → souleyez-2.26.0.dist-info}/top_level.txt +0 -0
souleyez/plugins/nuclei.py
CHANGED
|
@@ -207,19 +207,51 @@ class NucleiPlugin(PluginBase):
|
|
|
207
207
|
return True
|
|
208
208
|
return False
|
|
209
209
|
|
|
210
|
-
def
|
|
211
|
-
"""
|
|
212
|
-
|
|
210
|
+
def _normalize_target(self, target: str, args: List[str] = None, log_path: str = None) -> str:
|
|
211
|
+
"""
|
|
212
|
+
Normalize target for Nuclei scanning.
|
|
213
|
+
|
|
214
|
+
- URLs are validated and passed through
|
|
215
|
+
- Bare IPs/domains get http:// prepended for web scanning
|
|
216
|
+
|
|
217
|
+
This fixes the issue where nmap chains pass bare IPs but Nuclei
|
|
218
|
+
needs URLs to properly scan web services.
|
|
219
|
+
"""
|
|
220
|
+
import re
|
|
221
|
+
|
|
222
|
+
# Already a URL - validate and return
|
|
213
223
|
if target.startswith(('http://', 'https://')):
|
|
214
224
|
try:
|
|
215
|
-
|
|
225
|
+
return validate_url(target)
|
|
216
226
|
except ValidationError as e:
|
|
217
227
|
if log_path:
|
|
218
228
|
with open(log_path, 'w') as f:
|
|
219
229
|
f.write(f"ERROR: Invalid URL: {e}\n")
|
|
220
230
|
return None
|
|
221
231
|
|
|
232
|
+
# Bare IP or domain - prepend http:// for web scanning
|
|
233
|
+
# This is needed because Nuclei web templates require a URL
|
|
234
|
+
ip_pattern = r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(:\d+)?$'
|
|
235
|
+
domain_pattern = r'^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?)*$'
|
|
236
|
+
|
|
237
|
+
if re.match(ip_pattern, target) or re.match(domain_pattern, target):
|
|
238
|
+
# Log the conversion
|
|
239
|
+
if log_path:
|
|
240
|
+
with open(log_path, 'a') as f:
|
|
241
|
+
f.write(f"NOTE: Converting bare target '{target}' to 'http://{target}' for web scanning\n")
|
|
242
|
+
return f"http://{target}"
|
|
243
|
+
|
|
244
|
+
# Unknown format - return as-is
|
|
245
|
+
return target
|
|
246
|
+
|
|
247
|
+
def build_command(self, target: str, args: List[str] = None, label: str = "", log_path: str = None):
|
|
248
|
+
"""Build nuclei command for background execution with PID tracking."""
|
|
222
249
|
args = args or []
|
|
250
|
+
|
|
251
|
+
# Normalize target (convert bare IPs to URLs)
|
|
252
|
+
target = self._normalize_target(target, args, log_path)
|
|
253
|
+
if target is None:
|
|
254
|
+
return None
|
|
223
255
|
args = [arg.replace("<target>", target) for arg in args]
|
|
224
256
|
|
|
225
257
|
cmd = ["nuclei", "-target", target]
|
|
@@ -252,21 +284,13 @@ class NucleiPlugin(PluginBase):
|
|
|
252
284
|
|
|
253
285
|
def run(self, target: str, args: List[str] = None, label: str = "", log_path: str = None) -> int:
|
|
254
286
|
"""Execute nuclei scan and write JSON output to log_path."""
|
|
255
|
-
|
|
256
|
-
# For URLs, validate them. For bare IPs/domains, let Nuclei auto-detect protocols
|
|
257
|
-
if target.startswith(('http://', 'https://')):
|
|
258
|
-
try:
|
|
259
|
-
target = validate_url(target)
|
|
260
|
-
except ValidationError as e:
|
|
261
|
-
if log_path:
|
|
262
|
-
with open(log_path, 'w') as f:
|
|
263
|
-
f.write(f"ERROR: Invalid URL: {e}\n")
|
|
264
|
-
return 1
|
|
265
|
-
raise ValueError(f"Invalid URL: {e}")
|
|
266
|
-
# Otherwise keep target as-is (IP or domain) for Nuclei auto-detect protocols
|
|
267
|
-
|
|
268
287
|
args = args or []
|
|
269
288
|
|
|
289
|
+
# Normalize target (convert bare IPs to URLs)
|
|
290
|
+
target = self._normalize_target(target, args, log_path)
|
|
291
|
+
if target is None:
|
|
292
|
+
return 1
|
|
293
|
+
|
|
270
294
|
# Replace <target> placeholder
|
|
271
295
|
args = [arg.replace("<target>", target) for arg in args]
|
|
272
296
|
|
souleyez/ui/interactive.py
CHANGED
|
@@ -130,6 +130,63 @@ def render_standard_header(title: str, width: int = None) -> None:
|
|
|
130
130
|
click.echo()
|
|
131
131
|
|
|
132
132
|
|
|
133
|
+
def parse_syslog_description(desc: str) -> str:
|
|
134
|
+
"""
|
|
135
|
+
Extract meaningful message from syslog-formatted descriptions.
|
|
136
|
+
|
|
137
|
+
Syslog format: <timestamp> <host> [timestamp] <program>[pid]: <message>
|
|
138
|
+
Example input: "Jan 8 07:00:05 192.168.1.111 Jan 8 07:00:05 eyez CRON[537281]: pam_unix(cron:session): session closed for user yoda"
|
|
139
|
+
Example output: "CRON: pam_unix(cron:session): session closed for user yoda"
|
|
140
|
+
"""
|
|
141
|
+
import re
|
|
142
|
+
|
|
143
|
+
if not desc:
|
|
144
|
+
return 'No description'
|
|
145
|
+
|
|
146
|
+
# Try to find the actual message after common syslog patterns
|
|
147
|
+
# Pattern 1: Look for process name with PID followed by colon (e.g., "CRON[537281]:")
|
|
148
|
+
pid_match = re.search(r'([A-Za-z_][A-Za-z0-9_-]*)\[(\d+)\]:\s*(.+)$', desc)
|
|
149
|
+
if pid_match:
|
|
150
|
+
process_name = pid_match.group(1)
|
|
151
|
+
message = pid_match.group(3)
|
|
152
|
+
return f"{process_name}: {message}"
|
|
153
|
+
|
|
154
|
+
# Pattern 2: Look for systemd-style messages (e.g., "systemd[1]: Started...")
|
|
155
|
+
systemd_match = re.search(r'(systemd(?:-[a-z]+)?)\[?\d*\]?:\s*(.+)$', desc, re.IGNORECASE)
|
|
156
|
+
if systemd_match:
|
|
157
|
+
return f"{systemd_match.group(1)}: {systemd_match.group(2)}"
|
|
158
|
+
|
|
159
|
+
# Pattern 3: Look for kernel messages
|
|
160
|
+
kernel_match = re.search(r'kernel:\s*(.+)$', desc)
|
|
161
|
+
if kernel_match:
|
|
162
|
+
return f"kernel: {kernel_match.group(1)}"
|
|
163
|
+
|
|
164
|
+
# Pattern 4: Generic - find content after last colon that has substance
|
|
165
|
+
colon_parts = desc.split(': ')
|
|
166
|
+
if len(colon_parts) > 1:
|
|
167
|
+
# Get the meaningful part (usually after the first "process:" pattern)
|
|
168
|
+
for i, part in enumerate(colon_parts):
|
|
169
|
+
# Skip parts that look like timestamps or IPs
|
|
170
|
+
if not re.match(r'^(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec|\d{1,3}\.\d{1,3}|\d{4}-\d{2})', part):
|
|
171
|
+
# Found something meaningful - join from here
|
|
172
|
+
meaningful = ': '.join(colon_parts[i:])
|
|
173
|
+
if len(meaningful) > 10: # Ensure it's substantial
|
|
174
|
+
return meaningful
|
|
175
|
+
|
|
176
|
+
# Pattern 5: Strip leading timestamp patterns
|
|
177
|
+
# Remove patterns like "Jan 8 07:00:05 192.168.1.111 Jan 8 07:00:05 hostname"
|
|
178
|
+
stripped = re.sub(
|
|
179
|
+
r'^(?:[A-Z][a-z]{2}\s+\d+\s+\d{2}:\d{2}:\d{2}\s+\S+\s*)+',
|
|
180
|
+
'', desc
|
|
181
|
+
).strip()
|
|
182
|
+
|
|
183
|
+
if stripped and len(stripped) > 5:
|
|
184
|
+
return stripped
|
|
185
|
+
|
|
186
|
+
# Fallback: return original if no patterns matched
|
|
187
|
+
return desc
|
|
188
|
+
|
|
189
|
+
|
|
133
190
|
def _show_upgrade_prompt(feature_name: str):
|
|
134
191
|
"""Show upgrade prompt when FREE user tries to access Pro feature."""
|
|
135
192
|
from rich.panel import Panel
|
|
@@ -5345,7 +5402,7 @@ def view_job_detail(job_id: int):
|
|
|
5345
5402
|
|
|
5346
5403
|
# Check if tool has a parser - if yes, hide raw logs by default
|
|
5347
5404
|
tool = job.get('tool', '')
|
|
5348
|
-
has_parser = tool in ['dnsrecon', 'nmap', 'nuclei', 'nikto', 'dalfox', 'theharvester', 'sqlmap', 'ffuf', 'gobuster', 'wpscan', 'crackmapexec', 'hydra', 'whois', 'smbmap', 'enum4linux', 'msf_auxiliary', 'searchsploit']
|
|
5405
|
+
has_parser = tool in ['dnsrecon', 'nmap', 'ard', 'nuclei', 'nikto', 'dalfox', 'theharvester', 'sqlmap', 'ffuf', 'gobuster', 'wpscan', 'crackmapexec', 'hydra', 'whois', 'smbmap', 'enum4linux', 'msf_auxiliary', 'searchsploit']
|
|
5349
5406
|
|
|
5350
5407
|
# Show log file if exists
|
|
5351
5408
|
log_path = job.get('log')
|
|
@@ -5941,7 +5998,9 @@ def view_job_detail(job_id: int):
|
|
|
5941
5998
|
pass
|
|
5942
5999
|
|
|
5943
6000
|
# Parse and display Nmap results if available (only when not showing raw logs)
|
|
5944
|
-
|
|
6001
|
+
# ARD plugin uses nmap under the hood, so include it here
|
|
6002
|
+
nmap_based_tools = ['nmap', 'ard']
|
|
6003
|
+
if not show_raw_logs and job.get('tool') in nmap_based_tools and job.get('status') in ['done', 'completed'] and log_path and os.path.exists(log_path):
|
|
5945
6004
|
try:
|
|
5946
6005
|
from souleyez.parsers.nmap_parser import parse_nmap_output
|
|
5947
6006
|
with open(log_path, 'r', encoding='utf-8', errors='replace') as f:
|
|
@@ -8898,7 +8957,8 @@ def _view_wazuh_alerts(engagement_id: int):
|
|
|
8898
8957
|
icon = get_level_icon(level)
|
|
8899
8958
|
rule_id = str(alert.get('rule_id', 'N/A'))[:10]
|
|
8900
8959
|
agent_name = alert.get('agent_name', 'N/A')[:15]
|
|
8901
|
-
|
|
8960
|
+
raw_desc = alert.get('description') or 'No description'
|
|
8961
|
+
desc = parse_syslog_description(raw_desc)[:45]
|
|
8902
8962
|
ts = alert.get('timestamp', 'N/A')
|
|
8903
8963
|
if hasattr(ts, 'strftime'):
|
|
8904
8964
|
ts = ts.strftime('%Y-%m-%d %H:%M:%S')
|
|
@@ -9075,7 +9135,8 @@ def _view_alert_detail(alert: dict):
|
|
|
9075
9135
|
|
|
9076
9136
|
# Get values from normalized format first, then fall back to raw_data
|
|
9077
9137
|
rule_id = alert.get('rule_id') or rule.get('id', 'N/A')
|
|
9078
|
-
|
|
9138
|
+
raw_description = alert.get('description') or rule.get('description', 'N/A')
|
|
9139
|
+
description = parse_syslog_description(raw_description)
|
|
9079
9140
|
level = alert.get('level', 0) or rule.get('level', 0)
|
|
9080
9141
|
severity = alert.get('severity', 'info')
|
|
9081
9142
|
|
|
@@ -9694,9 +9755,19 @@ def _view_all_job_alerts(item: dict):
|
|
|
9694
9755
|
return f"{icon} {severity[:6].upper()}"
|
|
9695
9756
|
elif key == 'rule_id':
|
|
9696
9757
|
if is_wazuh_style:
|
|
9697
|
-
|
|
9758
|
+
# Wazuh: show first rule group (more descriptive) or rule ID
|
|
9759
|
+
rule_data = alert.get('rule', {})
|
|
9760
|
+
groups = rule_data.get('groups', [])
|
|
9761
|
+
if groups:
|
|
9762
|
+
# Get most specific group (often last is most specific)
|
|
9763
|
+
return str(groups[-1])[:12]
|
|
9764
|
+
return str(rule_data.get('id', 'N/A'))[:12]
|
|
9698
9765
|
else:
|
|
9699
|
-
|
|
9766
|
+
# Splunk: show MITRE tactic if available, else sourcetype
|
|
9767
|
+
mitre_tactics = alert.get('mitre_tactics', [])
|
|
9768
|
+
if mitre_tactics:
|
|
9769
|
+
return str(mitre_tactics[0])[:12]
|
|
9770
|
+
return str(alert.get('rule_id', 'N/A'))[:12]
|
|
9700
9771
|
elif key == 'agent_name':
|
|
9701
9772
|
if is_wazuh_style:
|
|
9702
9773
|
return alert.get('agent', {}).get('name', 'N/A')
|
|
@@ -9704,9 +9775,17 @@ def _view_all_job_alerts(item: dict):
|
|
|
9704
9775
|
return str(alert.get('source_ip', alert.get('host', 'N/A')))[:15]
|
|
9705
9776
|
elif key == 'description':
|
|
9706
9777
|
if is_wazuh_style:
|
|
9707
|
-
|
|
9778
|
+
# Wazuh: use rule description, or rule groups if more descriptive
|
|
9779
|
+
rule_data = alert.get('rule', {})
|
|
9780
|
+
desc = rule_data.get('description', '')
|
|
9781
|
+
if not desc:
|
|
9782
|
+
groups = rule_data.get('groups', [])
|
|
9783
|
+
if groups:
|
|
9784
|
+
desc = ', '.join(groups[:2])
|
|
9785
|
+
return str(desc)[:45] if desc else 'No description'
|
|
9708
9786
|
else:
|
|
9709
|
-
|
|
9787
|
+
# Splunk: prefer actual description (log content) over rule_name
|
|
9788
|
+
desc = alert.get('description', '') or alert.get('rule_name', '')
|
|
9710
9789
|
return str(desc)[:45] if desc else 'No description'
|
|
9711
9790
|
elif key == 'timestamp':
|
|
9712
9791
|
ts = alert.get('timestamp', 'N/A')
|
|
@@ -9718,9 +9797,9 @@ def _view_all_job_alerts(item: dict):
|
|
|
9718
9797
|
columns = [
|
|
9719
9798
|
{'name': '#', 'width': 5, 'key': '_idx'},
|
|
9720
9799
|
{'name': 'Level', 'width': 10, 'key': 'level_display'},
|
|
9721
|
-
{'name': '
|
|
9800
|
+
{'name': 'Type', 'width': 14, 'key': 'rule_id'},
|
|
9722
9801
|
{'name': 'Agent', 'width': 15, 'key': 'agent_name'},
|
|
9723
|
-
{'name': 'Description', 'width':
|
|
9802
|
+
{'name': 'Description', 'width': 42, 'key': 'description'},
|
|
9724
9803
|
{'name': 'Time', 'width': 20, 'key': 'timestamp'},
|
|
9725
9804
|
]
|
|
9726
9805
|
|
|
@@ -15772,6 +15851,37 @@ def view_findings(engagement_id: int):
|
|
|
15772
15851
|
summary_parts.append(f"Filters: {', '.join(active_filters)}")
|
|
15773
15852
|
|
|
15774
15853
|
click.echo(" " + " | ".join(summary_parts))
|
|
15854
|
+
|
|
15855
|
+
# Show tool distribution legend
|
|
15856
|
+
if findings:
|
|
15857
|
+
tool_counts = {}
|
|
15858
|
+
for f in findings:
|
|
15859
|
+
tool = f.get('tool') or 'unknown'
|
|
15860
|
+
tool_counts[tool] = tool_counts.get(tool, 0) + 1
|
|
15861
|
+
|
|
15862
|
+
# Sort by count (descending) and format
|
|
15863
|
+
sorted_tools = sorted(tool_counts.items(), key=lambda x: x[1], reverse=True)
|
|
15864
|
+
tool_parts = [f"{tool}({count})" for tool, count in sorted_tools]
|
|
15865
|
+
|
|
15866
|
+
# Display on one or more lines if needed
|
|
15867
|
+
tool_legend = " Tools: " + " | ".join(tool_parts)
|
|
15868
|
+
if len(tool_legend) > width - 4:
|
|
15869
|
+
# Wrap to multiple lines if too long
|
|
15870
|
+
lines = []
|
|
15871
|
+
current_line = " Tools: "
|
|
15872
|
+
for i, part in enumerate(tool_parts):
|
|
15873
|
+
test_line = current_line + part + (" | " if i < len(tool_parts) - 1 else "")
|
|
15874
|
+
if len(test_line) > width - 4 and current_line != " Tools: ":
|
|
15875
|
+
lines.append(current_line.rstrip(" | "))
|
|
15876
|
+
current_line = " " + part + (" | " if i < len(tool_parts) - 1 else "")
|
|
15877
|
+
else:
|
|
15878
|
+
current_line = test_line
|
|
15879
|
+
lines.append(current_line.rstrip(" | "))
|
|
15880
|
+
for line in lines:
|
|
15881
|
+
click.echo(click.style(line, fg='cyan'))
|
|
15882
|
+
else:
|
|
15883
|
+
click.echo(click.style(tool_legend, fg='cyan'))
|
|
15884
|
+
|
|
15775
15885
|
click.echo()
|
|
15776
15886
|
|
|
15777
15887
|
if not findings:
|
|
@@ -29508,11 +29618,11 @@ def _check_msfdb_ready() -> bool:
|
|
|
29508
29618
|
click.echo(" The Metasploit database needs to be initialized for full functionality.")
|
|
29509
29619
|
click.echo(" Without it, you won't be able to store hosts, credentials, or loot.")
|
|
29510
29620
|
click.echo()
|
|
29511
|
-
if click.confirm(" Initialize database now? (runs: msfdb init)", default=True):
|
|
29621
|
+
if click.confirm(" Initialize database now? (runs: sudo msfdb init)", default=True):
|
|
29512
29622
|
click.echo()
|
|
29513
|
-
click.echo(click.style(" Running msfdb init...", fg='cyan'))
|
|
29623
|
+
click.echo(click.style(" Running sudo msfdb init...", fg='cyan'))
|
|
29514
29624
|
try:
|
|
29515
|
-
result = subprocess.run(['msfdb', 'init'], capture_output=False, text=True)
|
|
29625
|
+
result = subprocess.run(['sudo', 'msfdb', 'init'], capture_output=False, text=True)
|
|
29516
29626
|
if result.returncode == 0:
|
|
29517
29627
|
click.echo(click.style(" Database initialized successfully!", fg='green'))
|
|
29518
29628
|
click.echo()
|
|
@@ -31347,13 +31457,13 @@ def run_interactive_menu():
|
|
|
31347
31457
|
click.echo("└" + "─" * (width - 2) + "┘")
|
|
31348
31458
|
click.echo("\n")
|
|
31349
31459
|
|
|
31350
|
-
# ASCII Art Banner - SOULEYEZ
|
|
31351
|
-
click.echo(click.style(" ███████╗ ██████╗ ██╗ ██╗██╗ ███████╗██╗ ██╗███████╗███████╗", fg='bright_cyan', bold=True))
|
|
31352
|
-
click.echo(click.style(" ██╔════╝██╔═══██╗██║ ██║██║ ██╔════╝╚██╗ ██╔╝██╔════╝╚══███╔╝", fg='bright_cyan', bold=True))
|
|
31353
|
-
click.echo(click.style(" ███████╗██║ ██║██║ ██║██║ █████╗ ╚████╔╝ █████╗ ███╔╝ ", fg='bright_cyan', bold=True))
|
|
31354
|
-
click.echo(click.style(" ╚════██║██║ ██║██║ ██║██║ ██╔══╝ ╚██╔╝ ██╔══╝ ███╔╝ ", fg='bright_cyan', bold=True))
|
|
31355
|
-
click.echo(click.style(" ███████║╚██████╔╝╚██████╔╝███████╗███████╗ ██║ ███████╗███████╗", fg='bright_cyan', bold=True))
|
|
31356
|
-
click.echo(click.style(" ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚══════╝╚══════╝", fg='bright_cyan', bold=True))
|
|
31460
|
+
# ASCII Art Banner - SOULEYEZ with all-seeing eye on the right
|
|
31461
|
+
click.echo(click.style(" ███████╗ ██████╗ ██╗ ██╗██╗ ███████╗██╗ ██╗███████╗███████╗", fg='bright_cyan', bold=True) + click.style(" ▄██▄", fg='bright_blue', bold=True))
|
|
31462
|
+
click.echo(click.style(" ██╔════╝██╔═══██╗██║ ██║██║ ██╔════╝╚██╗ ██╔╝██╔════╝╚══███╔╝", fg='bright_cyan', bold=True) + click.style(" ▄█▀ ▀█▄", fg='bright_blue', bold=True))
|
|
31463
|
+
click.echo(click.style(" ███████╗██║ ██║██║ ██║██║ █████╗ ╚████╔╝ █████╗ ███╔╝ ", fg='bright_cyan', bold=True) + click.style(" █ ◉ █", fg='bright_blue', bold=True))
|
|
31464
|
+
click.echo(click.style(" ╚════██║██║ ██║██║ ██║██║ ██╔══╝ ╚██╔╝ ██╔══╝ ███╔╝ ", fg='bright_cyan', bold=True) + click.style(" █ ═══ █", fg='bright_blue', bold=True))
|
|
31465
|
+
click.echo(click.style(" ███████║╚██████╔╝╚██████╔╝███████╗███████╗ ██║ ███████╗███████╗", fg='bright_cyan', bold=True) + click.style(" ▀█▄ ▄█▀", fg='bright_blue', bold=True))
|
|
31466
|
+
click.echo(click.style(" ╚══════╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚══════╝╚══════╝", fg='bright_cyan', bold=True) + click.style(" ▀██▀", fg='bright_blue', bold=True))
|
|
31357
31467
|
click.echo()
|
|
31358
31468
|
|
|
31359
31469
|
# Tagline and description
|