spy-agent 4.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
spy_agent-4.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["flit_core >=3.2,<4"]
|
|
3
|
+
build-backend = "flit_core.buildapi"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "spy-agent"
|
|
7
|
+
version = "4.0.0"
|
|
8
|
+
description = "Advanced JS Miner & Secret Extractor Agent"
|
|
9
|
+
authors = [{name = "Zain Ali"}]
|
|
10
|
+
dependencies = ["cloudscraper", "beautifulsoup4", "requests"]
|
|
11
|
+
|
|
12
|
+
[project.scripts]
|
|
13
|
+
agent = "spy_agent.main:run_mission"
|
|
File without changes
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import cloudscraper
|
|
2
|
+
from bs4 import BeautifulSoup
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
import requests
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
def print_banner():
|
|
10
|
+
os.system("clear")
|
|
11
|
+
print("\033[1;36m")
|
|
12
|
+
print(" .---. ")
|
|
13
|
+
print(" / \ ")
|
|
14
|
+
print(" | () () | AGENT ")
|
|
15
|
+
print(" \ ^ / JS-MINER ")
|
|
16
|
+
print(" ||||| ")
|
|
17
|
+
print(" --- DARK VISION --- ")
|
|
18
|
+
print("\033[1;31m [ VERSION 4.0 ] \033[0m")
|
|
19
|
+
|
|
20
|
+
def setup_target_dir(domain):
|
|
21
|
+
folder_name = f"intel_{domain}"
|
|
22
|
+
if not os.path.exists(folder_name):
|
|
23
|
+
os.makedirs(folder_name)
|
|
24
|
+
if not os.path.exists(f"{folder_name}/downloads"):
|
|
25
|
+
os.makedirs(f"{folder_name}/downloads")
|
|
26
|
+
return folder_name
|
|
27
|
+
|
|
28
|
+
def deep_miner(text):
|
|
29
|
+
patterns = {
|
|
30
|
+
"API_Endpoints": r"/(?:api|v1|v2|auth|v3|config|admin|dev)/[\w\-\._~%?#&=]*",
|
|
31
|
+
"Secrets": r"(?i)(key|token|secret|auth|pwd|password|api_key)[\s:=]+['\"]([\w\-\.]{12,})['\"]",
|
|
32
|
+
"Emails": r"[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}",
|
|
33
|
+
"S3_Buckets": r"[\w\-\.]+\.s3\.amazonaws\.com"
|
|
34
|
+
}
|
|
35
|
+
found = {}
|
|
36
|
+
for label, reg in patterns.items():
|
|
37
|
+
matches = re.findall(reg, text)
|
|
38
|
+
found[label] = list(set(matches))
|
|
39
|
+
return found
|
|
40
|
+
|
|
41
|
+
def download_file(url, folder):
|
|
42
|
+
try:
|
|
43
|
+
name = url.split('/')[-1].split('?')[0]
|
|
44
|
+
if not name.endswith(('.js', '.json', '.xml', '.txt')): name += ".js"
|
|
45
|
+
path = os.path.join(folder, "downloads", name)
|
|
46
|
+
r = requests.get(url, timeout=10, stream=True)
|
|
47
|
+
with open(path, 'wb') as f:
|
|
48
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
49
|
+
f.write(chunk)
|
|
50
|
+
return True
|
|
51
|
+
except:
|
|
52
|
+
return False
|
|
53
|
+
|
|
54
|
+
def run_mission():
|
|
55
|
+
print_banner()
|
|
56
|
+
target = input("\nšÆ TARGET DOMAIN: ").strip()
|
|
57
|
+
if not target: return
|
|
58
|
+
domain = target.replace('https://', '').replace('http://', '').split('/')[0]
|
|
59
|
+
base_url = "https://" + domain
|
|
60
|
+
|
|
61
|
+
work_dir = setup_target_dir(domain)
|
|
62
|
+
scraper = cloudscraper.create_scraper(browser={'browser': 'chrome','platform': 'android','desktop': False})
|
|
63
|
+
|
|
64
|
+
print(f"š” [SCAN] Initiating Agent Recon on {domain}...")
|
|
65
|
+
try:
|
|
66
|
+
res = scraper.get(base_url, timeout=15)
|
|
67
|
+
soup = BeautifulSoup(res.text, 'html.parser')
|
|
68
|
+
main_intel = deep_miner(res.text)
|
|
69
|
+
|
|
70
|
+
js_files = []
|
|
71
|
+
for script in soup.find_all('script', src=True):
|
|
72
|
+
src = script['src']
|
|
73
|
+
full_url = src if src.startswith('http') else base_url + (src if src.startswith('/') else '/' + src)
|
|
74
|
+
js_files.append(full_url)
|
|
75
|
+
|
|
76
|
+
print(f"š¦ Found {len(js_files)} JS Files. Analyzing...")
|
|
77
|
+
js_leaks = []
|
|
78
|
+
for js_url in js_files[:10]:
|
|
79
|
+
if download_file(js_url, work_dir):
|
|
80
|
+
js_res = scraper.get(js_url, timeout=10)
|
|
81
|
+
leaks = deep_miner(js_res.text)
|
|
82
|
+
if any(leaks.values()):
|
|
83
|
+
js_leaks.append({"file": js_url, "found": leaks})
|
|
84
|
+
|
|
85
|
+
report_path = f"{work_dir}/summary_report.txt"
|
|
86
|
+
with open(report_path, 'w') as f:
|
|
87
|
+
f.write(f"--- AGENT JS-MINER REPORT: {domain} ---\n")
|
|
88
|
+
f.write(f"Scanned: {datetime.now()}\n\n[!] SECRETS:\n")
|
|
89
|
+
for s in main_intel['Secrets']: f.write(f" - {s}\n")
|
|
90
|
+
f.write("\n[!] ENDPOINTS:\n")
|
|
91
|
+
for e in main_intel['API_Endpoints']: f.write(f" - {e}\n")
|
|
92
|
+
|
|
93
|
+
print(f"\nā
Mission Accomplished! Data in: {work_dir}/")
|
|
94
|
+
|
|
95
|
+
except Exception as e:
|
|
96
|
+
print(f"ā Error: {e}")
|
|
97
|
+
|
|
98
|
+
if __name__ == "__main__":
|
|
99
|
+
run_mission()
|