anais-apk-forensic 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +249 -0
- package/anais.sh +669 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/dex_payload_hunter.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/entropy_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/find_encrypted_payload.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator_modular.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/so_string_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_enhanced_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_results_processor.cpython-314.pyc +0 -0
- package/analysis_tools/apk_basic_info.py +85 -0
- package/analysis_tools/check_zip_encryption.py +142 -0
- package/analysis_tools/detect_obfuscation.py +650 -0
- package/analysis_tools/dex_payload_hunter.py +734 -0
- package/analysis_tools/entropy_analyzer.py +335 -0
- package/analysis_tools/error_logger.py +75 -0
- package/analysis_tools/find_encrypted_payload.py +485 -0
- package/analysis_tools/fix_apk_headers.py +154 -0
- package/analysis_tools/manifest_analyzer.py +214 -0
- package/analysis_tools/network_analyzer.py +287 -0
- package/analysis_tools/report_generator.py +506 -0
- package/analysis_tools/report_generator_modular.py +885 -0
- package/analysis_tools/sast_scanner.py +412 -0
- package/analysis_tools/so_string_analyzer.py +406 -0
- package/analysis_tools/yara_enhanced_analyzer.py +330 -0
- package/analysis_tools/yara_results_processor.py +368 -0
- package/analyzer_config.json +113 -0
- package/apkid/__init__.py +32 -0
- package/apkid/__pycache__/__init__.cpython-313.pyc +0 -0
- package/apkid/__pycache__/__init__.cpython-314.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-313.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-314.pyc +0 -0
- package/apkid/__pycache__/main.cpython-313.pyc +0 -0
- package/apkid/__pycache__/main.cpython-314.pyc +0 -0
- package/apkid/__pycache__/output.cpython-313.pyc +0 -0
- package/apkid/__pycache__/rules.cpython-313.pyc +0 -0
- package/apkid/apkid.py +266 -0
- package/apkid/main.py +98 -0
- package/apkid/output.py +177 -0
- package/apkid/rules/apk/common.yara +68 -0
- package/apkid/rules/apk/obfuscators.yara +118 -0
- package/apkid/rules/apk/packers.yara +1197 -0
- package/apkid/rules/apk/protectors.yara +301 -0
- package/apkid/rules/dex/abnormal.yara +104 -0
- package/apkid/rules/dex/anti-vm.yara +568 -0
- package/apkid/rules/dex/common.yara +60 -0
- package/apkid/rules/dex/compilers.yara +434 -0
- package/apkid/rules/dex/obfuscators.yara +602 -0
- package/apkid/rules/dex/packers.yara +761 -0
- package/apkid/rules/dex/protectors.yara +520 -0
- package/apkid/rules/dll/common.yara +38 -0
- package/apkid/rules/dll/obfuscators.yara +43 -0
- package/apkid/rules/elf/anti-vm.yara +43 -0
- package/apkid/rules/elf/common.yara +54 -0
- package/apkid/rules/elf/obfuscators.yara +991 -0
- package/apkid/rules/elf/packers.yara +1128 -0
- package/apkid/rules/elf/protectors.yara +794 -0
- package/apkid/rules/res/common.yara +43 -0
- package/apkid/rules/res/obfuscators.yara +46 -0
- package/apkid/rules/res/protectors.yara +46 -0
- package/apkid/rules.py +77 -0
- package/bin/anais +3 -0
- package/dist/cli.js +82 -0
- package/dist/index.js +123 -0
- package/dist/types/index.js +2 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/output.js +44 -0
- package/dist/utils/paths.js +107 -0
- package/docs/ARCHITECTURE.txt +353 -0
- package/docs/Workflow and Reference.md +445 -0
- package/package.json +70 -0
- package/rules/yara_general_rules.yar +323 -0
- package/scripts/dynamic_analysis_helper.sh +334 -0
- package/scripts/frida/dpt_dex_dumper.js +145 -0
- package/scripts/frida/frida_dex_dump.js +145 -0
- package/scripts/frida/frida_hooks.js +437 -0
- package/scripts/frida/frida_websocket_extractor.js +154 -0
- package/scripts/setup.sh +206 -0
- package/scripts/validate_framework.sh +224 -0
- package/src/cli.ts +91 -0
- package/src/index.ts +123 -0
- package/src/types/index.ts +44 -0
- package/src/utils/index.ts +6 -0
- package/src/utils/output.ts +50 -0
- package/src/utils/paths.ts +72 -0
- package/tsconfig.json +14 -0
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
AndroidManifest.xml Analyzer
|
|
4
|
+
Detects security issues and misconfigurations in manifest
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import json
|
|
9
|
+
import xml.etree.ElementTree as ET
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
class ManifestAnalyzer:
|
|
13
|
+
def __init__(self, manifest_path):
|
|
14
|
+
self.manifest_path = Path(manifest_path)
|
|
15
|
+
self.findings = []
|
|
16
|
+
self.info = {}
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
self.tree = ET.parse(manifest_path)
|
|
20
|
+
self.root = self.tree.getroot()
|
|
21
|
+
except Exception as e:
|
|
22
|
+
self.findings.append({
|
|
23
|
+
'severity': 'critical',
|
|
24
|
+
'title': 'Failed to parse manifest',
|
|
25
|
+
'description': str(e)
|
|
26
|
+
})
|
|
27
|
+
self.tree = None
|
|
28
|
+
self.root = None
|
|
29
|
+
|
|
30
|
+
def check_dangerous_permissions(self):
|
|
31
|
+
"""Check for dangerous permissions"""
|
|
32
|
+
if not self.root:
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
dangerous_perms = {
|
|
36
|
+
'READ_SMS': 'Can read SMS messages',
|
|
37
|
+
'RECEIVE_SMS': 'Can intercept incoming SMS',
|
|
38
|
+
'SEND_SMS': 'Can send SMS messages',
|
|
39
|
+
'READ_CONTACTS': 'Can access contacts',
|
|
40
|
+
'WRITE_CONTACTS': 'Can modify contacts',
|
|
41
|
+
'READ_CALL_LOG': 'Can read call history',
|
|
42
|
+
'WRITE_CALL_LOG': 'Can modify call history',
|
|
43
|
+
'RECORD_AUDIO': 'Can record audio',
|
|
44
|
+
'CAMERA': 'Can access camera',
|
|
45
|
+
'ACCESS_FINE_LOCATION': 'Can access precise location',
|
|
46
|
+
'READ_PHONE_STATE': 'Can read phone state and device ID',
|
|
47
|
+
'SYSTEM_ALERT_WINDOW': 'Can draw over other apps',
|
|
48
|
+
'REQUEST_INSTALL_PACKAGES': 'Can install packages',
|
|
49
|
+
'REQUEST_DELETE_PACKAGES': 'Can delete packages',
|
|
50
|
+
'BIND_ACCESSIBILITY_SERVICE': 'Accessibility service (high risk)',
|
|
51
|
+
'BIND_DEVICE_ADMIN': 'Device administrator',
|
|
52
|
+
'WRITE_EXTERNAL_STORAGE': 'Can write to external storage',
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
56
|
+
|
|
57
|
+
for perm in self.root.findall('.//uses-permission', namespace):
|
|
58
|
+
name = perm.get('{http://schemas.android.com/apk/res/android}name', '')
|
|
59
|
+
perm_name = name.split('.')[-1]
|
|
60
|
+
|
|
61
|
+
if perm_name in dangerous_perms:
|
|
62
|
+
severity = 'critical' if perm_name in ['BIND_ACCESSIBILITY_SERVICE', 'SYSTEM_ALERT_WINDOW', 'REQUEST_INSTALL_PACKAGES'] else 'high'
|
|
63
|
+
|
|
64
|
+
self.findings.append({
|
|
65
|
+
'severity': severity,
|
|
66
|
+
'category': 'dangerous_permission',
|
|
67
|
+
'title': f'Dangerous Permission: {perm_name}',
|
|
68
|
+
'description': dangerous_perms[perm_name],
|
|
69
|
+
'permission': name
|
|
70
|
+
})
|
|
71
|
+
|
|
72
|
+
def check_exported_components(self):
|
|
73
|
+
"""Check for exported components without permissions"""
|
|
74
|
+
if not self.root:
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
78
|
+
|
|
79
|
+
components = [
|
|
80
|
+
('activity', 'Activity'),
|
|
81
|
+
('service', 'Service'),
|
|
82
|
+
('receiver', 'Receiver'),
|
|
83
|
+
('provider', 'Provider')
|
|
84
|
+
]
|
|
85
|
+
|
|
86
|
+
for comp_type, comp_name in components:
|
|
87
|
+
for comp in self.root.findall(f'.//{comp_type}', namespace):
|
|
88
|
+
exported = comp.get('{http://schemas.android.com/apk/res/android}exported', 'false')
|
|
89
|
+
permission = comp.get('{http://schemas.android.com/apk/res/android}permission', None)
|
|
90
|
+
name = comp.get('{http://schemas.android.com/apk/res/android}name', 'Unknown')
|
|
91
|
+
|
|
92
|
+
# Check for intent filters (makes component exported by default in older APIs)
|
|
93
|
+
has_intent_filter = len(comp.findall('.//intent-filter')) > 0
|
|
94
|
+
|
|
95
|
+
if (exported == 'true' or has_intent_filter) and not permission:
|
|
96
|
+
self.findings.append({
|
|
97
|
+
'severity': 'high',
|
|
98
|
+
'category': 'exported_component',
|
|
99
|
+
'title': f'Exported {comp_name} Without Permission',
|
|
100
|
+
'description': f'{comp_name} is exported but lacks permission protection',
|
|
101
|
+
'component': name,
|
|
102
|
+
'type': comp_type
|
|
103
|
+
})
|
|
104
|
+
|
|
105
|
+
def check_debuggable(self):
|
|
106
|
+
"""Check if app is debuggable"""
|
|
107
|
+
if not self.root:
|
|
108
|
+
return
|
|
109
|
+
|
|
110
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
111
|
+
app = self.root.find('.//application', namespace)
|
|
112
|
+
|
|
113
|
+
if app is not None:
|
|
114
|
+
debuggable = app.get('{http://schemas.android.com/apk/res/android}debuggable', 'false')
|
|
115
|
+
|
|
116
|
+
if debuggable == 'true':
|
|
117
|
+
self.findings.append({
|
|
118
|
+
'severity': 'critical',
|
|
119
|
+
'category': 'debuggable',
|
|
120
|
+
'title': 'Application is Debuggable',
|
|
121
|
+
'description': 'App can be debugged, allowing code injection and data extraction'
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
def check_backup_allowed(self):
|
|
125
|
+
"""Check if backup is allowed"""
|
|
126
|
+
if not self.root:
|
|
127
|
+
return
|
|
128
|
+
|
|
129
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
130
|
+
app = self.root.find('.//application', namespace)
|
|
131
|
+
|
|
132
|
+
if app is not None:
|
|
133
|
+
backup = app.get('{http://schemas.android.com/apk/res/android}allowBackup', 'true')
|
|
134
|
+
|
|
135
|
+
if backup == 'true':
|
|
136
|
+
self.findings.append({
|
|
137
|
+
'severity': 'medium',
|
|
138
|
+
'category': 'backup_allowed',
|
|
139
|
+
'title': 'Backup Allowed',
|
|
140
|
+
'description': 'App data can be backed up via adb, potentially exposing sensitive data'
|
|
141
|
+
})
|
|
142
|
+
|
|
143
|
+
def check_cleartext_traffic(self):
|
|
144
|
+
"""Check if cleartext traffic is allowed"""
|
|
145
|
+
if not self.root:
|
|
146
|
+
return
|
|
147
|
+
|
|
148
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
149
|
+
app = self.root.find('.//application', namespace)
|
|
150
|
+
|
|
151
|
+
if app is not None:
|
|
152
|
+
cleartext = app.get('{http://schemas.android.com/apk/res/android}usesCleartextTraffic', 'true')
|
|
153
|
+
|
|
154
|
+
if cleartext == 'true':
|
|
155
|
+
self.findings.append({
|
|
156
|
+
'severity': 'medium',
|
|
157
|
+
'category': 'cleartext_traffic',
|
|
158
|
+
'title': 'Cleartext Traffic Allowed',
|
|
159
|
+
'description': 'App can use HTTP connections, data can be intercepted'
|
|
160
|
+
})
|
|
161
|
+
|
|
162
|
+
def extract_info(self):
|
|
163
|
+
"""Extract basic manifest info"""
|
|
164
|
+
if not self.root:
|
|
165
|
+
return
|
|
166
|
+
|
|
167
|
+
namespace = {'android': 'http://schemas.android.com/apk/res/android'}
|
|
168
|
+
|
|
169
|
+
self.info = {
|
|
170
|
+
'package': self.root.get('package', 'Unknown'),
|
|
171
|
+
'version_code': self.root.get('{http://schemas.android.com/apk/res/android}versionCode', 'Unknown'),
|
|
172
|
+
'version_name': self.root.get('{http://schemas.android.com/apk/res/android}versionName', 'Unknown'),
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
# Count components
|
|
176
|
+
self.info['component_counts'] = {
|
|
177
|
+
'activities': len(self.root.findall('.//activity', namespace)),
|
|
178
|
+
'services': len(self.root.findall('.//service', namespace)),
|
|
179
|
+
'receivers': len(self.root.findall('.//receiver', namespace)),
|
|
180
|
+
'providers': len(self.root.findall('.//provider', namespace)),
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
def analyze(self):
|
|
184
|
+
"""Run all checks"""
|
|
185
|
+
self.extract_info()
|
|
186
|
+
self.check_dangerous_permissions()
|
|
187
|
+
self.check_exported_components()
|
|
188
|
+
self.check_debuggable()
|
|
189
|
+
self.check_backup_allowed()
|
|
190
|
+
self.check_cleartext_traffic()
|
|
191
|
+
|
|
192
|
+
return {
|
|
193
|
+
'info': self.info,
|
|
194
|
+
'findings': self.findings
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
def main():
|
|
198
|
+
if len(sys.argv) < 3:
|
|
199
|
+
print("Usage: manifest_analyzer.py <manifest_path> <output_json>")
|
|
200
|
+
sys.exit(1)
|
|
201
|
+
|
|
202
|
+
manifest_path = sys.argv[1]
|
|
203
|
+
output_json = sys.argv[2]
|
|
204
|
+
|
|
205
|
+
analyzer = ManifestAnalyzer(manifest_path)
|
|
206
|
+
results = analyzer.analyze()
|
|
207
|
+
|
|
208
|
+
with open(output_json, 'w') as f:
|
|
209
|
+
json.dump(results, f, indent=2)
|
|
210
|
+
|
|
211
|
+
print(f"Manifest analysis complete: {len(results['findings'])} findings")
|
|
212
|
+
|
|
213
|
+
if __name__ == '__main__':
|
|
214
|
+
main()
|
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Network Artifacts Analyzer
|
|
4
|
+
Extracts and analyzes network-related artifacts from APK
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import json
|
|
9
|
+
import re
|
|
10
|
+
import argparse
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from collections import defaultdict
|
|
13
|
+
from urllib.parse import urlparse
|
|
14
|
+
|
|
15
|
+
class NetworkAnalyzer:
|
|
16
|
+
def __init__(self, decompiled_dir, jadx_dir):
|
|
17
|
+
self.decompiled_dir = Path(decompiled_dir) if decompiled_dir else None
|
|
18
|
+
self.jadx_dir = Path(jadx_dir) if jadx_dir else None
|
|
19
|
+
|
|
20
|
+
self.findings = {
|
|
21
|
+
'urls': [],
|
|
22
|
+
'domains': [],
|
|
23
|
+
'ip_addresses': [],
|
|
24
|
+
'websockets': [],
|
|
25
|
+
'api_endpoints': [],
|
|
26
|
+
'suspicious_urls': [],
|
|
27
|
+
'c2_indicators': []
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
def extract_urls(self, content):
|
|
31
|
+
"""Extract URLs from content"""
|
|
32
|
+
# URL patterns
|
|
33
|
+
url_pattern = r'https?://[^\s\'"<>)}\]]+|wss?://[^\s\'"<>)}\]]+'
|
|
34
|
+
urls = re.findall(url_pattern, content, re.IGNORECASE)
|
|
35
|
+
|
|
36
|
+
# Filter and clean URLs
|
|
37
|
+
cleaned_urls = []
|
|
38
|
+
for url in urls:
|
|
39
|
+
# Remove trailing punctuation
|
|
40
|
+
url = url.rstrip('.,;:!?')
|
|
41
|
+
|
|
42
|
+
# Must have valid domain structure
|
|
43
|
+
try:
|
|
44
|
+
parsed = urlparse(url)
|
|
45
|
+
if parsed.netloc and '.' in parsed.netloc:
|
|
46
|
+
# Skip if netloc looks like a variable/class name
|
|
47
|
+
if not any([
|
|
48
|
+
parsed.netloc.startswith('$'),
|
|
49
|
+
parsed.netloc.startswith('{'),
|
|
50
|
+
'localhost' in parsed.netloc.lower(),
|
|
51
|
+
'127.0.0.1' in parsed.netloc,
|
|
52
|
+
parsed.netloc.endswith('.local'),
|
|
53
|
+
]):
|
|
54
|
+
cleaned_urls.append(url)
|
|
55
|
+
except:
|
|
56
|
+
continue
|
|
57
|
+
|
|
58
|
+
return cleaned_urls
|
|
59
|
+
|
|
60
|
+
def extract_domains(self, content):
|
|
61
|
+
"""Extract domain names"""
|
|
62
|
+
# Domain pattern (more flexible)
|
|
63
|
+
domain_pattern = r'(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,}'
|
|
64
|
+
domains = re.findall(domain_pattern, content)
|
|
65
|
+
|
|
66
|
+
# Filter out false positives
|
|
67
|
+
filtered_domains = []
|
|
68
|
+
for domain in domains:
|
|
69
|
+
domain_lower = domain.lower()
|
|
70
|
+
|
|
71
|
+
# Skip if it looks like a Java class/method pattern
|
|
72
|
+
if any([
|
|
73
|
+
domain.startswith('super.'),
|
|
74
|
+
domain.startswith('this.'),
|
|
75
|
+
'.on' in domain_lower and len(domain.split('.')) <= 3, # onCancel, onCreate, etc
|
|
76
|
+
domain.endswith('.java'),
|
|
77
|
+
domain.endswith('.class'),
|
|
78
|
+
domain.endswith('.xml'),
|
|
79
|
+
'.get' in domain_lower and len(domain.split('.')) <= 3,
|
|
80
|
+
'.set' in domain_lower and len(domain.split('.')) <= 3,
|
|
81
|
+
'.is' in domain_lower and len(domain.split('.')) <= 3,
|
|
82
|
+
all(part[0].isupper() for part in domain.split('.') if part), # ClassName.MethodName
|
|
83
|
+
domain.count('.') > 6, # Too many dots, likely not a domain
|
|
84
|
+
len(domain.split('.')[-1]) > 10, # TLD too long
|
|
85
|
+
]):
|
|
86
|
+
continue
|
|
87
|
+
|
|
88
|
+
# Must have valid TLD
|
|
89
|
+
tld = domain.split('.')[-1].lower()
|
|
90
|
+
if tld in ['com', 'net', 'org', 'io', 'co', 'app', 'dev', 'ai', 'me', 'info',
|
|
91
|
+
'xyz', 'online', 'site', 'top', 'cn', 'ru', 'de', 'uk', 'jp', 'kr',
|
|
92
|
+
'in', 'br', 'au', 'fr', 'it', 'es', 'nl', 'pl', 'se', 'no', 'fi',
|
|
93
|
+
'id', 'my', 'sg', 'th', 'vn', 'ph', 'tw', 'hk']:
|
|
94
|
+
filtered_domains.append(domain)
|
|
95
|
+
|
|
96
|
+
return filtered_domains
|
|
97
|
+
|
|
98
|
+
def extract_ip_addresses(self, content):
|
|
99
|
+
"""Extract IP addresses"""
|
|
100
|
+
ip_pattern = r'\b(?:\d{1,3}\.){3}\d{1,3}\b'
|
|
101
|
+
ips = re.findall(ip_pattern, content)
|
|
102
|
+
# Filter valid IPs
|
|
103
|
+
valid_ips = [ip for ip in ips if all(0 <= int(octet) <= 255 for octet in ip.split('.'))]
|
|
104
|
+
return valid_ips
|
|
105
|
+
|
|
106
|
+
def is_suspicious_url(self, url):
|
|
107
|
+
"""Check if URL is suspicious"""
|
|
108
|
+
suspicious_tlds = ['.top', '.xyz', '.tk', '.ml', '.ga', '.cf', '.gq', '.pw']
|
|
109
|
+
suspicious_keywords = ['api', 'upload', 'download', 'c2', 'cmd', 'command', 'bot']
|
|
110
|
+
|
|
111
|
+
parsed = urlparse(url)
|
|
112
|
+
domain = parsed.netloc.lower()
|
|
113
|
+
path = parsed.path.lower()
|
|
114
|
+
|
|
115
|
+
# Check TLD
|
|
116
|
+
for tld in suspicious_tlds:
|
|
117
|
+
if domain.endswith(tld):
|
|
118
|
+
return True, f'Suspicious TLD: {tld}'
|
|
119
|
+
|
|
120
|
+
# Check keywords in domain or path
|
|
121
|
+
for keyword in suspicious_keywords:
|
|
122
|
+
if keyword in domain or keyword in path:
|
|
123
|
+
return True, f'Suspicious keyword: {keyword}'
|
|
124
|
+
|
|
125
|
+
# Check for WebSocket
|
|
126
|
+
if url.startswith('wss://') or url.startswith('ws://'):
|
|
127
|
+
return True, 'WebSocket connection'
|
|
128
|
+
|
|
129
|
+
return False, None
|
|
130
|
+
|
|
131
|
+
def scan_java_files(self):
|
|
132
|
+
"""Scan Java files for network artifacts"""
|
|
133
|
+
if not self.jadx_dir or not self.jadx_dir.exists():
|
|
134
|
+
return
|
|
135
|
+
|
|
136
|
+
print("Scanning Java files for network artifacts...")
|
|
137
|
+
|
|
138
|
+
java_files = list(self.jadx_dir.rglob('*.java'))
|
|
139
|
+
|
|
140
|
+
all_urls = set()
|
|
141
|
+
all_domains = set()
|
|
142
|
+
all_ips = set()
|
|
143
|
+
|
|
144
|
+
for java_file in java_files:
|
|
145
|
+
try:
|
|
146
|
+
content = java_file.read_text(errors='ignore')
|
|
147
|
+
|
|
148
|
+
# Extract URLs
|
|
149
|
+
urls = self.extract_urls(content)
|
|
150
|
+
for url in urls:
|
|
151
|
+
all_urls.add(url)
|
|
152
|
+
|
|
153
|
+
# Check if suspicious
|
|
154
|
+
is_susp, reason = self.is_suspicious_url(url)
|
|
155
|
+
if is_susp:
|
|
156
|
+
self.findings['suspicious_urls'].append({
|
|
157
|
+
'url': url,
|
|
158
|
+
'reason': reason,
|
|
159
|
+
'file': str(java_file.relative_to(self.jadx_dir))
|
|
160
|
+
})
|
|
161
|
+
|
|
162
|
+
# Check for WebSocket
|
|
163
|
+
if url.startswith('ws'):
|
|
164
|
+
self.findings['websockets'].append({
|
|
165
|
+
'url': url,
|
|
166
|
+
'file': str(java_file.relative_to(self.jadx_dir))
|
|
167
|
+
})
|
|
168
|
+
|
|
169
|
+
# Extract domains
|
|
170
|
+
domains = self.extract_domains(content)
|
|
171
|
+
all_domains.update(domains)
|
|
172
|
+
|
|
173
|
+
# Extract IPs
|
|
174
|
+
ips = self.extract_ip_addresses(content)
|
|
175
|
+
all_ips.update(ips)
|
|
176
|
+
|
|
177
|
+
except Exception as e:
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
self.findings['urls'] = list(all_urls)
|
|
181
|
+
self.findings['domains'] = list(all_domains)
|
|
182
|
+
self.findings['ip_addresses'] = list(all_ips)
|
|
183
|
+
|
|
184
|
+
def scan_smali_files(self):
|
|
185
|
+
"""Scan SMALI files for network strings"""
|
|
186
|
+
if not self.decompiled_dir:
|
|
187
|
+
return
|
|
188
|
+
|
|
189
|
+
print("Scanning SMALI files for network artifacts...")
|
|
190
|
+
|
|
191
|
+
smali_dirs = [d for d in self.decompiled_dir.iterdir() if d.is_dir() and d.name.startswith('smali')]
|
|
192
|
+
|
|
193
|
+
for smali_dir in smali_dirs:
|
|
194
|
+
smali_files = list(smali_dir.rglob('*.smali'))[:200] # Limit
|
|
195
|
+
|
|
196
|
+
for smali_file in smali_files:
|
|
197
|
+
try:
|
|
198
|
+
content = smali_file.read_text(errors='ignore')
|
|
199
|
+
|
|
200
|
+
# Look for string constants
|
|
201
|
+
string_pattern = r'const-string.*?"([^"]+)"'
|
|
202
|
+
strings = re.findall(string_pattern, content)
|
|
203
|
+
|
|
204
|
+
for string in strings:
|
|
205
|
+
# Check if it's a URL
|
|
206
|
+
if re.match(r'https?://', string) or re.match(r'wss?://', string):
|
|
207
|
+
self.findings['urls'].append(string)
|
|
208
|
+
|
|
209
|
+
is_susp, reason = self.is_suspicious_url(string)
|
|
210
|
+
if is_susp:
|
|
211
|
+
self.findings['suspicious_urls'].append({
|
|
212
|
+
'url': string,
|
|
213
|
+
'reason': reason,
|
|
214
|
+
'file': str(smali_file.relative_to(self.decompiled_dir))
|
|
215
|
+
})
|
|
216
|
+
|
|
217
|
+
except Exception:
|
|
218
|
+
continue
|
|
219
|
+
|
|
220
|
+
def detect_c2_patterns(self):
|
|
221
|
+
"""Detect C2 communication patterns"""
|
|
222
|
+
|
|
223
|
+
# Check URLs for C2 indicators
|
|
224
|
+
c2_patterns = [
|
|
225
|
+
r'/api/.*/(upload|download|command|cmd)',
|
|
226
|
+
r'/bot/',
|
|
227
|
+
r'/c2/',
|
|
228
|
+
r'/panel/',
|
|
229
|
+
r'/gate/',
|
|
230
|
+
]
|
|
231
|
+
|
|
232
|
+
for url in self.findings['urls']:
|
|
233
|
+
for pattern in c2_patterns:
|
|
234
|
+
if re.search(pattern, url, re.IGNORECASE):
|
|
235
|
+
self.findings['c2_indicators'].append({
|
|
236
|
+
'url': url,
|
|
237
|
+
'pattern': pattern,
|
|
238
|
+
'description': 'Potential C2 endpoint'
|
|
239
|
+
})
|
|
240
|
+
|
|
241
|
+
# Check for DGA-like domains (Domain Generation Algorithm)
|
|
242
|
+
for domain in self.findings['domains']:
|
|
243
|
+
# Simple heuristic: long subdomain with random-looking characters
|
|
244
|
+
parts = domain.split('.')
|
|
245
|
+
for part in parts[:-2]: # Exclude TLD and main domain
|
|
246
|
+
if len(part) > 15 and sum(c.isdigit() for c in part) > 3:
|
|
247
|
+
self.findings['c2_indicators'].append({
|
|
248
|
+
'domain': domain,
|
|
249
|
+
'description': 'Potential DGA domain (long random subdomain)'
|
|
250
|
+
})
|
|
251
|
+
|
|
252
|
+
def analyze(self):
|
|
253
|
+
"""Run all analysis"""
|
|
254
|
+
self.scan_java_files()
|
|
255
|
+
self.scan_smali_files()
|
|
256
|
+
self.detect_c2_patterns()
|
|
257
|
+
|
|
258
|
+
# Deduplicate
|
|
259
|
+
self.findings['urls'] = list(set(self.findings['urls']))
|
|
260
|
+
self.findings['domains'] = list(set(self.findings['domains']))
|
|
261
|
+
self.findings['ip_addresses'] = list(set(self.findings['ip_addresses']))
|
|
262
|
+
|
|
263
|
+
return self.findings
|
|
264
|
+
|
|
265
|
+
def main():
|
|
266
|
+
parser = argparse.ArgumentParser(description='Network Artifacts Analyzer')
|
|
267
|
+
parser.add_argument('--decompiled', help='Path to decompiled directory')
|
|
268
|
+
parser.add_argument('--jadx', help='Path to JADX output')
|
|
269
|
+
parser.add_argument('--output', required=True, help='Output JSON file')
|
|
270
|
+
|
|
271
|
+
args = parser.parse_args()
|
|
272
|
+
|
|
273
|
+
analyzer = NetworkAnalyzer(args.decompiled, args.jadx)
|
|
274
|
+
findings = analyzer.analyze()
|
|
275
|
+
|
|
276
|
+
with open(args.output, 'w') as f:
|
|
277
|
+
json.dump(findings, f, indent=2)
|
|
278
|
+
|
|
279
|
+
print(f"\nNetwork Analysis Results:")
|
|
280
|
+
print(f" URLs found: {len(findings['urls'])}")
|
|
281
|
+
print(f" Domains found: {len(findings['domains'])}")
|
|
282
|
+
print(f" IP addresses: {len(findings['ip_addresses'])}")
|
|
283
|
+
print(f" Suspicious URLs: {len(findings['suspicious_urls'])}")
|
|
284
|
+
print(f" C2 indicators: {len(findings['c2_indicators'])}")
|
|
285
|
+
|
|
286
|
+
if __name__ == '__main__':
|
|
287
|
+
main()
|