moriarty-project 0.1.26__py3-none-any.whl → 0.1.27__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- moriarty/__init__.py +1 -1
- moriarty/cli/app.py +2 -2
- moriarty/cli/domain_cmd.py +127 -634
- moriarty/modules/domain_scanner.py +21 -16
- moriarty/modules/port_scanner_nmap.py +369 -102
- {moriarty_project-0.1.26.dist-info → moriarty_project-0.1.27.dist-info}/METADATA +5 -5
- {moriarty_project-0.1.26.dist-info → moriarty_project-0.1.27.dist-info}/RECORD +9 -11
- moriarty/cli/wifippler.py +0 -124
- moriarty/modules/port_scanner.py +0 -1050
- {moriarty_project-0.1.26.dist-info → moriarty_project-0.1.27.dist-info}/WHEEL +0 -0
- {moriarty_project-0.1.26.dist-info → moriarty_project-0.1.27.dist-info}/entry_points.txt +0 -0
@@ -76,6 +76,7 @@ class DomainScanner:
|
|
76
76
|
stealth_level: int = 0,
|
77
77
|
threads: int = 10,
|
78
78
|
timeout: int = 30,
|
79
|
+
ports_profile: str = "quick",
|
79
80
|
verbose: bool = False,
|
80
81
|
):
|
81
82
|
self.target = target
|
@@ -83,6 +84,7 @@ class DomainScanner:
|
|
83
84
|
self.stealth_level = stealth_level
|
84
85
|
self.threads = threads
|
85
86
|
self.timeout = timeout
|
87
|
+
self.ports_profile = ports_profile.lower()
|
86
88
|
self.verbose = verbose
|
87
89
|
self.result = ScanResult(target=target)
|
88
90
|
self.stealth = None
|
@@ -257,8 +259,11 @@ class DomainScanner:
|
|
257
259
|
try:
|
258
260
|
from moriarty.modules.port_scanner_nmap import PortScanner
|
259
261
|
|
260
|
-
#
|
261
|
-
profile = "extended" if self.timeout > 45 else "quick"
|
262
|
+
# Usa o perfil de portas fornecido ou define baseado no timeout se não especificado
|
263
|
+
profile = self.ports_profile if hasattr(self, 'ports_profile') else ("extended" if self.timeout > 45 else "quick")
|
264
|
+
|
265
|
+
# Log do perfil que será usado
|
266
|
+
console.print(f" 🔧 Usando perfil de portas: [bold]{profile}[/]")
|
262
267
|
|
263
268
|
# Cria o scanner com as configurações apropriadas
|
264
269
|
scanner = PortScanner(
|
@@ -272,7 +277,7 @@ class DomainScanner:
|
|
272
277
|
results = await scanner.scan()
|
273
278
|
|
274
279
|
if not results:
|
275
|
-
console.print("ℹ️ Nenhuma porta aberta encontrada.")
|
280
|
+
console.print(" ℹ️ Nenhuma porta aberta encontrada.")
|
276
281
|
self.result.port_details = []
|
277
282
|
self.result.open_ports = []
|
278
283
|
return
|
@@ -395,24 +400,24 @@ class DomainScanner:
|
|
395
400
|
"""Executa crawler leve para inventário de rotas."""
|
396
401
|
console.print("\n[bold red]▶ Web Crawler[/bold red]")
|
397
402
|
try:
|
398
|
-
console.print("🔍 Iniciando configuração do Web Crawler...")
|
403
|
+
console.print(" 🔍 Iniciando configuração do Web Crawler...")
|
399
404
|
from moriarty.modules.web_crawler import WebCrawler
|
400
405
|
|
401
406
|
base_url = self._default_base_url()
|
402
|
-
console.print(f"🌐 URL base: {base_url}")
|
407
|
+
console.print(f" 🌐 URL base: {base_url}")
|
403
408
|
|
404
409
|
# Configurações do crawler
|
405
410
|
max_pages = max(50, self.threads * 10)
|
406
411
|
max_depth = 2
|
407
412
|
concurrency = max(5, self.threads)
|
408
413
|
|
409
|
-
console.print(f"⚙️ Configurações: max_pages={max_pages}, max_depth={max_depth}, concurrency={concurrency}")
|
414
|
+
console.print(f" ⚙️ Configurações: max_pages={max_pages}, max_depth={max_depth}, concurrency={concurrency}")
|
410
415
|
|
411
416
|
try:
|
412
|
-
console.print("🚀 Iniciando crawler...")
|
417
|
+
console.print(" 🚀 Iniciando crawler...")
|
413
418
|
try:
|
414
419
|
# Log de debug: Verificando se o WebCrawler pode ser instanciado
|
415
|
-
console.print("🔧 Instanciando WebCrawler...")
|
420
|
+
console.print(" 🔧 Instanciando WebCrawler...")
|
416
421
|
crawler = WebCrawler(
|
417
422
|
base_url=base_url,
|
418
423
|
max_pages=max_pages,
|
@@ -421,16 +426,16 @@ class DomainScanner:
|
|
421
426
|
follow_subdomains=False,
|
422
427
|
stealth=self.stealth,
|
423
428
|
)
|
424
|
-
console.print("✅ WebCrawler instanciado com sucesso!")
|
429
|
+
console.print(" ✅ WebCrawler instanciado com sucesso!")
|
425
430
|
except Exception as e:
|
426
|
-
console.print(f"❌ [red]Erro ao instanciar WebCrawler: {str(e)}[/red]")
|
431
|
+
console.print(f" ❌ [red]Erro ao instanciar WebCrawler: {str(e)}[/red]")
|
427
432
|
logger.error("webcrawler.init_error", error=str(e), exc_info=True)
|
428
433
|
raise
|
429
434
|
|
430
435
|
try:
|
431
|
-
console.print("🔄 Executando varredura...")
|
436
|
+
console.print(" 🔄 Executando varredura...")
|
432
437
|
pages = await crawler.crawl()
|
433
|
-
console.print(f"✅ Varredura concluída! {len(pages)} páginas encontradas.")
|
438
|
+
console.print(f" ✅ Varredura concluída! {len(pages)} páginas encontradas.")
|
434
439
|
|
435
440
|
# Log detalhado das páginas encontradas
|
436
441
|
for i, (url, page) in enumerate(pages.items(), 1):
|
@@ -438,7 +443,7 @@ class DomainScanner:
|
|
438
443
|
if page.error:
|
439
444
|
console.print(f" [red]Erro: {page.error}[/red]")
|
440
445
|
except Exception as e:
|
441
|
-
console.print(f"❌ [red]Erro durante o crawler: {str(e)}[/red]")
|
446
|
+
console.print(f" ❌ [red]Erro durante o crawler: {str(e)}[/red]")
|
442
447
|
logger.error("webcrawler.crawl_error", error=str(e), exc_info=True)
|
443
448
|
raise
|
444
449
|
|
@@ -452,15 +457,15 @@ class DomainScanner:
|
|
452
457
|
}
|
453
458
|
for url, page in pages.items()
|
454
459
|
}
|
455
|
-
console.print("📊 Dados do crawl processados com sucesso!")
|
460
|
+
console.print(" 📊 Dados do crawl processados com sucesso!")
|
456
461
|
except Exception as e:
|
457
|
-
console.print(f"❌ [red]Erro ao processar resultados do crawl: {str(e)}[/red]")
|
462
|
+
console.print(f" ❌ [red]Erro ao processar resultados do crawl: {str(e)}[/red]")
|
458
463
|
logger.error("webcrawler.process_error", error=str(e), exc_info=True)
|
459
464
|
raise
|
460
465
|
|
461
466
|
try:
|
462
467
|
extracted = self._extract_targets_from_crawl(pages)
|
463
|
-
console.print(f"🔗 {len(extracted)} alvos extraídos para fuzzing")
|
468
|
+
console.print(f" 🔗 {len(extracted)} alvos extraídos para fuzzing")
|
464
469
|
|
465
470
|
for definition in extracted:
|
466
471
|
self._register_web_target(
|