moriarty-project 0.1.24__py3-none-any.whl → 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -102,12 +102,20 @@ class DomainScanner:
102
102
  async def run(self):
103
103
  """Executa scan completo."""
104
104
  self._prepare_modules()
105
+ # Resolve o IP do domínio
106
+ try:
107
+ import socket
108
+ ip = socket.gethostbyname(self.target)
109
+ target_display = f"{self.target} [{ip}]"
110
+ except Exception:
111
+ target_display = self.target
112
+
105
113
  # Banner profissional
106
114
  banner = Panel(
107
- f"[bold white]Target:[/bold white] [cyan]{self.target}[/cyan]\n"
115
+ f"[bold white]Target:[/bold white] [red]{target_display}[/red]\n"
108
116
  f"[dim]Modules: {', '.join(self.modules)} | Stealth: {self.stealth_level}[/dim]",
109
- title="[bold cyan]🌐 Domain Scanner[/bold cyan]",
110
- border_style="cyan",
117
+ title="[bold red]🌐 Domain Scanner[/bold red]",
118
+ border_style="red",
111
119
  padding=(1, 2),
112
120
  )
113
121
  console.print(banner)
@@ -148,7 +156,7 @@ class DomainScanner:
148
156
 
149
157
  async def _run_dns(self):
150
158
  """Módulo DNS."""
151
- console.print("\n[bold cyan]▶ DNS Enumeration[/bold cyan]")
159
+ console.print("\n[bold red]▶ DNS Enumeration[/bold red]")
152
160
 
153
161
  try:
154
162
  from moriarty.net.dns_client import DNSClient
@@ -179,7 +187,7 @@ class DomainScanner:
179
187
 
180
188
  async def _run_subdiscover(self):
181
189
  """Módulo Subdomain Discovery."""
182
- console.print("\n[bold cyan]▶ Subdomain Discovery[/bold cyan]")
190
+ console.print("\n[bold red]▶ Subdomain Discovery[/bold red]")
183
191
 
184
192
  try:
185
193
  from moriarty.modules.subdomain_discovery import SubdomainDiscovery
@@ -215,7 +223,7 @@ class DomainScanner:
215
223
 
216
224
  async def _run_wayback(self):
217
225
  """Módulo Wayback Machine."""
218
- console.print("\n[bold cyan]▶ Wayback Machine[/bold cyan]")
226
+ console.print("\n[bold red]▶ Wayback Machine[/bold red]")
219
227
 
220
228
  try:
221
229
  from moriarty.modules.wayback_discovery import WaybackDiscovery
@@ -244,37 +252,84 @@ class DomainScanner:
244
252
 
245
253
  async def _run_ports(self):
246
254
  """Módulo Port Scan."""
247
- console.print("\n[bold cyan]▶ Port Scanning[/bold cyan]")
255
+ console.print("\n[bold red]▶ Port Scanning[/bold red]")
248
256
 
249
257
  try:
250
- from moriarty.modules.port_scanner import PortScanner
258
+ from moriarty.modules.port_scanner_nmap import PortScanner
251
259
 
260
+ # Define o perfil baseado no timeout
252
261
  profile = "extended" if self.timeout > 45 else "quick"
262
+
263
+ # Cria o scanner com as configurações apropriadas
253
264
  scanner = PortScanner(
254
265
  target=self.target,
255
- profile=profile,
256
- concurrency=max(40, self.threads * 30),
257
- timeout=max(0.5, min(2.0, self.timeout / 12)),
266
+ ports=profile,
258
267
  stealth_level=self.stealth_level,
268
+ resolve_services=True,
269
+ check_vulns=False
259
270
  )
271
+
260
272
  results = await scanner.scan()
273
+
274
+ if not results:
275
+ console.print("ℹ️ Nenhuma porta aberta encontrada.")
276
+ self.result.port_details = []
277
+ self.result.open_ports = []
278
+ return
279
+
280
+ # Processa os resultados
261
281
  self.result.port_details = [asdict(entry) for entry in results]
262
282
  self.result.open_ports = [entry.port for entry in results]
263
- console.print(f" [green]✓[/green] Found {len(results)} open ports")
264
-
265
- if results:
266
- preview = ", ".join(
267
- f"{entry.port} ({entry.banner[:18]}...)" if entry.banner else str(entry.port)
268
- for entry in results[:5]
269
- )
270
- console.print(f" [dim]→[/dim] {preview}")
283
+
284
+ # Exibe os resultados em uma tabela
285
+ self._display_port_results(results)
271
286
 
272
287
  except Exception as e:
273
- console.print(f" [red]✗[/red] Port scan failed")
288
+ import traceback
289
+ console.print(f" [red]✗[/red] Port scan failed: {str(e)}")
290
+ console.print(f"[yellow]Detalhes:[/yellow] {traceback.format_exc()}")
291
+
292
+ def _display_port_results(self, results):
293
+ """Exibe os resultados da varredura de portas em formato de tabela."""
294
+ from rich.table import Table, box
295
+
296
+ # Filtra apenas portas abertas
297
+ open_ports = [r for r in results if getattr(r, 'status', '').lower() == 'open']
298
+
299
+ if not open_ports:
300
+ console.print("ℹ️ Nenhuma porta aberta encontrada.")
301
+ return
302
+
303
+ # Cria tabela de resultados
304
+ table = Table(title="🚪 Portas abertas:", box=box.ROUNDED)
305
+ table.add_column("Porta", style="cyan")
306
+ table.add_column("Status", style="green")
307
+ table.add_column("Serviço", style="yellow")
308
+ table.add_column("Detalhes", style="white")
309
+
310
+ for entry in open_ports:
311
+ service = getattr(entry, 'service', None)
312
+ service_name = getattr(service, 'name', 'desconhecido') if service else 'desconhecido'
313
+ version = getattr(service, 'version', '')
314
+ details = version if version else ""
315
+
316
+ # Adiciona informações de vulnerabilidades se disponíveis
317
+ if service and hasattr(service, 'vulns') and service.vulns:
318
+ vulns = ", ".join(service.vulns[:2])
319
+ if len(service.vulns) > 2:
320
+ vulns += f" (+{len(service.vulns)-2} mais)"
321
+ details += f"\n🔴 {vulns}"
322
+
323
+ table.add_row(
324
+ str(entry.port),
325
+ "🟢 ABERTA",
326
+ service_name,
327
+ details.strip() or "-"
328
+ )
274
329
 
275
330
  async def _run_ssl(self):
276
331
  """Módulo SSL/TLS."""
277
- console.print("\n[bold cyan]▶ SSL/TLS Analysis[/bold cyan]")
332
+ console.print("\n[bold red]▶ SSL/TLS Analysis[/bold red]")
278
333
 
279
334
  try:
280
335
  from moriarty.modules.tls_validator import TLSCertificateValidator
@@ -287,7 +342,7 @@ class DomainScanner:
287
342
 
288
343
  async def _run_template_scan(self):
289
344
  """Módulo Template Scanner."""
290
- console.print("\n[bold cyan]▶ Template Scanner[/bold cyan]")
345
+ console.print("\n[bold red]▶ Template Scanner[/bold red]")
291
346
 
292
347
  try:
293
348
  from moriarty.modules.template_scanner import TemplateScanner
@@ -338,49 +393,109 @@ class DomainScanner:
338
393
 
339
394
  async def _run_crawl(self):
340
395
  """Executa crawler leve para inventário de rotas."""
341
- console.print("\n[bold cyan]▶ Web Crawler[/bold cyan]")
396
+ console.print("\n[bold red]▶ Web Crawler[/bold red]")
342
397
  try:
398
+ console.print("🔍 Iniciando configuração do Web Crawler...")
343
399
  from moriarty.modules.web_crawler import WebCrawler
344
400
 
345
401
  base_url = self._default_base_url()
346
- crawler = WebCrawler(
347
- base_url=base_url,
348
- max_pages=max(50, self.threads * 10),
349
- max_depth=2,
350
- concurrency=max(5, self.threads),
351
- follow_subdomains=False,
352
- stealth=self.stealth,
353
- )
402
+ console.print(f"🌐 URL base: {base_url}")
403
+
404
+ # Configurações do crawler
405
+ max_pages = max(50, self.threads * 10)
406
+ max_depth = 2
407
+ concurrency = max(5, self.threads)
408
+
409
+ console.print(f"⚙️ Configurações: max_pages={max_pages}, max_depth={max_depth}, concurrency={concurrency}")
410
+
354
411
  try:
355
- pages = await crawler.crawl()
356
- finally:
357
- await crawler.close()
358
-
359
- self.result.crawl_map = {
360
- url: {
361
- "status": page.status,
362
- "title": page.title,
363
- "forms": page.forms,
364
- "links": page.links,
365
- }
366
- for url, page in pages.items()
367
- }
368
-
369
- extracted = self._extract_targets_from_crawl(pages)
370
- for definition in extracted:
371
- self._register_web_target(definition["url"], definition["method"], definition["params"])
372
-
373
- console.print(f" [green]✓[/green] Crawled {len(pages)} pages")
374
- if extracted:
375
- console.print(f" [dim]→[/dim] {len(extracted)} endpoints para fuzz")
412
+ console.print("🚀 Iniciando crawler...")
413
+ try:
414
+ # Log de debug: Verificando se o WebCrawler pode ser instanciado
415
+ console.print("🔧 Instanciando WebCrawler...")
416
+ crawler = WebCrawler(
417
+ base_url=base_url,
418
+ max_pages=max_pages,
419
+ max_depth=max_depth,
420
+ concurrency=concurrency,
421
+ follow_subdomains=False,
422
+ stealth=self.stealth,
423
+ )
424
+ console.print("✅ WebCrawler instanciado com sucesso!")
425
+ except Exception as e:
426
+ console.print(f"❌ [red]Erro ao instanciar WebCrawler: {str(e)}[/red]")
427
+ logger.error("webcrawler.init_error", error=str(e), exc_info=True)
428
+ raise
429
+
430
+ try:
431
+ console.print("🔄 Executando varredura...")
432
+ pages = await crawler.crawl()
433
+ console.print(f"✅ Varredura concluída! {len(pages)} páginas encontradas.")
434
+
435
+ # Log detalhado das páginas encontradas
436
+ for i, (url, page) in enumerate(pages.items(), 1):
437
+ console.print(f" {i}. [blue]{url}[/blue] (Status: {page.status})")
438
+ if page.error:
439
+ console.print(f" [red]Erro: {page.error}[/red]")
440
+ except Exception as e:
441
+ console.print(f"❌ [red]Erro durante o crawler: {str(e)}[/red]")
442
+ logger.error("webcrawler.crawl_error", error=str(e), exc_info=True)
443
+ raise
444
+
445
+ try:
446
+ self.result.crawl_map = {
447
+ url: {
448
+ "status": page.status,
449
+ "title": page.title,
450
+ "forms": page.forms,
451
+ "links": page.links,
452
+ }
453
+ for url, page in pages.items()
454
+ }
455
+ console.print("📊 Dados do crawl processados com sucesso!")
456
+ except Exception as e:
457
+ console.print(f"❌ [red]Erro ao processar resultados do crawl: {str(e)}[/red]")
458
+ logger.error("webcrawler.process_error", error=str(e), exc_info=True)
459
+ raise
460
+
461
+ try:
462
+ extracted = self._extract_targets_from_crawl(pages)
463
+ console.print(f"🔗 {len(extracted)} alvos extraídos para fuzzing")
464
+
465
+ for definition in extracted:
466
+ self._register_web_target(
467
+ definition["url"],
468
+ definition["method"],
469
+ definition["params"]
470
+ )
471
+
472
+ console.print(f" [green]✓[/green] Crawled {len(pages)} pages")
473
+ if extracted:
474
+ console.print(f" [dim]→[/dim] {len(extracted)} endpoints para fuzz")
475
+ except Exception as e:
476
+ console.print(f"❌ [red]Erro ao extrair alvos do crawl: {str(e)}[/red]")
477
+ logger.error("webcrawler.extract_error", error=str(e), exc_info=True)
478
+ raise
479
+
480
+ except Exception as e:
481
+ console.print(f" [red]✗[/red] Crawl falhou: {str(e)}")
482
+ logger.error("domain.crawl.failed", error=str(e), exc_info=True)
483
+ # Tenta fechar o crawler mesmo em caso de erro
484
+ if 'crawler' in locals():
485
+ try:
486
+ await crawler.close()
487
+ except:
488
+ pass
489
+ raise
376
490
 
377
491
  except Exception as exc:
378
- console.print(" [red]✗[/red] Crawl failed")
379
- logger.debug("domain.crawl.error", error=str(exc))
492
+ console.print(f" [red]✗[/red] Erro fatal no Web Crawler: {str(exc)}")
493
+ logger.error("domain.crawl.fatal_error", error=str(exc), exc_info=True)
494
+ raise
380
495
 
381
496
  async def _run_fuzzer(self):
382
497
  """Executa directory fuzzing para expandir superfícies."""
383
- console.print("\n[bold cyan]▶ Directory Fuzzing[/bold cyan]")
498
+ console.print("\n[bold red]▶ Directory Fuzzing[/bold red]")
384
499
  try:
385
500
  from moriarty.modules.directory_fuzzer import DirectoryFuzzer
386
501
 
@@ -406,7 +521,7 @@ class DomainScanner:
406
521
 
407
522
  async def _run_vuln_scan(self):
408
523
  """Executa detecção de vulnerabilidades XSS/SQLi."""
409
- console.print("\n[bold cyan]▶ Web Vulnerability Scan[/bold cyan]")
524
+ console.print("\n[bold red]▶ Web Vulnerability Scan[/bold red]")
410
525
 
411
526
  if not self.web_targets:
412
527
  console.print(" [yellow]⚠️ Nenhum endpoint coletado para testar[/yellow]")
@@ -430,7 +545,7 @@ class DomainScanner:
430
545
 
431
546
  async def _run_waf_detect(self):
432
547
  """Módulo WAF Detection."""
433
- console.print("\n[bold cyan]▶ WAF Detection[/bold cyan]")
548
+ console.print("\n[bold red]▶ WAF Detection[/bold red]")
434
549
 
435
550
  try:
436
551
  from moriarty.modules.waf_detector import WAFDetector
@@ -461,7 +576,7 @@ class DomainScanner:
461
576
  def _show_summary(self):
462
577
  """Mostra resumo final."""
463
578
  # Tree de resultados
464
- tree = Tree(f"\n[bold cyan]📊 Scan Summary[/bold cyan]")
579
+ tree = Tree(f"\n[bold red]📊 Scan Summary[/bold red]")
465
580
 
466
581
  if self.result.dns_info:
467
582
  dns_node = tree.add("[bold]DNS Records[/bold]")
@@ -610,7 +725,15 @@ class DomainScanner:
610
725
  self.result.web_targets = self.web_targets
611
726
 
612
727
  def _default_base_url(self) -> str:
613
- return f"https://{self.target}"
728
+ """Retorna a URL base para o crawler, garantindo que tenha o esquema correto."""
729
+ target = self.target
730
+
731
+ # Se o alvo já tiver esquema, retorna como está
732
+ if target.startswith(('http://', 'https://')):
733
+ return target
734
+
735
+ # Se não tiver esquema, adiciona https://
736
+ return f"https://{target}"
614
737
 
615
738
  def export(self, output: str):
616
739
  """Exporta resultados."""