anais-apk-forensic 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +249 -0
- package/anais.sh +669 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/dex_payload_hunter.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/entropy_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/find_encrypted_payload.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator_modular.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/so_string_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_enhanced_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_results_processor.cpython-314.pyc +0 -0
- package/analysis_tools/apk_basic_info.py +85 -0
- package/analysis_tools/check_zip_encryption.py +142 -0
- package/analysis_tools/detect_obfuscation.py +650 -0
- package/analysis_tools/dex_payload_hunter.py +734 -0
- package/analysis_tools/entropy_analyzer.py +335 -0
- package/analysis_tools/error_logger.py +75 -0
- package/analysis_tools/find_encrypted_payload.py +485 -0
- package/analysis_tools/fix_apk_headers.py +154 -0
- package/analysis_tools/manifest_analyzer.py +214 -0
- package/analysis_tools/network_analyzer.py +287 -0
- package/analysis_tools/report_generator.py +506 -0
- package/analysis_tools/report_generator_modular.py +885 -0
- package/analysis_tools/sast_scanner.py +412 -0
- package/analysis_tools/so_string_analyzer.py +406 -0
- package/analysis_tools/yara_enhanced_analyzer.py +330 -0
- package/analysis_tools/yara_results_processor.py +368 -0
- package/analyzer_config.json +113 -0
- package/apkid/__init__.py +32 -0
- package/apkid/__pycache__/__init__.cpython-313.pyc +0 -0
- package/apkid/__pycache__/__init__.cpython-314.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-313.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-314.pyc +0 -0
- package/apkid/__pycache__/main.cpython-313.pyc +0 -0
- package/apkid/__pycache__/main.cpython-314.pyc +0 -0
- package/apkid/__pycache__/output.cpython-313.pyc +0 -0
- package/apkid/__pycache__/rules.cpython-313.pyc +0 -0
- package/apkid/apkid.py +266 -0
- package/apkid/main.py +98 -0
- package/apkid/output.py +177 -0
- package/apkid/rules/apk/common.yara +68 -0
- package/apkid/rules/apk/obfuscators.yara +118 -0
- package/apkid/rules/apk/packers.yara +1197 -0
- package/apkid/rules/apk/protectors.yara +301 -0
- package/apkid/rules/dex/abnormal.yara +104 -0
- package/apkid/rules/dex/anti-vm.yara +568 -0
- package/apkid/rules/dex/common.yara +60 -0
- package/apkid/rules/dex/compilers.yara +434 -0
- package/apkid/rules/dex/obfuscators.yara +602 -0
- package/apkid/rules/dex/packers.yara +761 -0
- package/apkid/rules/dex/protectors.yara +520 -0
- package/apkid/rules/dll/common.yara +38 -0
- package/apkid/rules/dll/obfuscators.yara +43 -0
- package/apkid/rules/elf/anti-vm.yara +43 -0
- package/apkid/rules/elf/common.yara +54 -0
- package/apkid/rules/elf/obfuscators.yara +991 -0
- package/apkid/rules/elf/packers.yara +1128 -0
- package/apkid/rules/elf/protectors.yara +794 -0
- package/apkid/rules/res/common.yara +43 -0
- package/apkid/rules/res/obfuscators.yara +46 -0
- package/apkid/rules/res/protectors.yara +46 -0
- package/apkid/rules.py +77 -0
- package/bin/anais +3 -0
- package/dist/cli.js +82 -0
- package/dist/index.js +123 -0
- package/dist/types/index.js +2 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/output.js +44 -0
- package/dist/utils/paths.js +107 -0
- package/docs/ARCHITECTURE.txt +353 -0
- package/docs/Workflow and Reference.md +445 -0
- package/package.json +70 -0
- package/rules/yara_general_rules.yar +323 -0
- package/scripts/dynamic_analysis_helper.sh +334 -0
- package/scripts/frida/dpt_dex_dumper.js +145 -0
- package/scripts/frida/frida_dex_dump.js +145 -0
- package/scripts/frida/frida_hooks.js +437 -0
- package/scripts/frida/frida_websocket_extractor.js +154 -0
- package/scripts/setup.sh +206 -0
- package/scripts/validate_framework.sh +224 -0
- package/src/cli.ts +91 -0
- package/src/index.ts +123 -0
- package/src/types/index.ts +44 -0
- package/src/utils/index.ts +6 -0
- package/src/utils/output.ts +50 -0
- package/src/utils/paths.ts +72 -0
- package/tsconfig.json +14 -0
|
@@ -0,0 +1,506 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Comprehensive Report Generator
|
|
4
|
+
Generates detailed markdown and JSON reports from analysis results
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import json
|
|
9
|
+
import argparse
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
|
|
13
|
+
class ReportGenerator:
|
|
14
|
+
def __init__(self, workspace, temp_dir, apk_name, sha256):
|
|
15
|
+
self.workspace = Path(workspace)
|
|
16
|
+
self.temp_dir = Path(temp_dir)
|
|
17
|
+
self.apk_name = apk_name
|
|
18
|
+
self.sha256 = sha256
|
|
19
|
+
self.timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
|
20
|
+
|
|
21
|
+
# Load all analysis results
|
|
22
|
+
self.load_results()
|
|
23
|
+
|
|
24
|
+
def load_results(self):
|
|
25
|
+
"""Load all JSON results from temp directory"""
|
|
26
|
+
self.results = {}
|
|
27
|
+
|
|
28
|
+
result_files = {
|
|
29
|
+
'basic_info': 'basic_info.json',
|
|
30
|
+
'encryption_check': 'encryption_check.json',
|
|
31
|
+
'obfuscation': 'obfuscation_report.json',
|
|
32
|
+
'sast': 'sast_findings.json',
|
|
33
|
+
'manifest': 'manifest_findings.json',
|
|
34
|
+
'network': 'network_findings.json',
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
for key, filename in result_files.items():
|
|
38
|
+
filepath = self.temp_dir / filename
|
|
39
|
+
if filepath.exists():
|
|
40
|
+
try:
|
|
41
|
+
with open(filepath, 'r') as f:
|
|
42
|
+
self.results[key] = json.load(f)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
self.results[key] = {'error': str(e)}
|
|
45
|
+
else:
|
|
46
|
+
self.results[key] = {}
|
|
47
|
+
|
|
48
|
+
def generate_markdown(self, output_file):
|
|
49
|
+
"""Generate comprehensive markdown report"""
|
|
50
|
+
|
|
51
|
+
md = []
|
|
52
|
+
|
|
53
|
+
# Header
|
|
54
|
+
md.append("# Android Malware Analysis Report\n")
|
|
55
|
+
md.append(f"**Generated:** {self.timestamp}\n")
|
|
56
|
+
md.append(f"**APK:** `{self.apk_name}`\n")
|
|
57
|
+
md.append(f"**SHA256:** `{self.sha256}`\n")
|
|
58
|
+
md.append("\n---\n")
|
|
59
|
+
|
|
60
|
+
# Executive Summary
|
|
61
|
+
md.append("\n## 📊 Executive Summary\n")
|
|
62
|
+
self._add_executive_summary(md)
|
|
63
|
+
|
|
64
|
+
# APK Information
|
|
65
|
+
md.append("\n## 📱 APK Information\n")
|
|
66
|
+
self._add_apk_info(md)
|
|
67
|
+
|
|
68
|
+
# Protection & Obfuscation
|
|
69
|
+
md.append("\n## 🔒 Protection & Obfuscation Analysis\n")
|
|
70
|
+
self._add_obfuscation_info(md)
|
|
71
|
+
|
|
72
|
+
# Security Findings
|
|
73
|
+
md.append("\n## 🚨 Security Findings (SAST)\n")
|
|
74
|
+
self._add_sast_findings(md)
|
|
75
|
+
|
|
76
|
+
# Manifest Analysis
|
|
77
|
+
md.append("\n## 📋 AndroidManifest Analysis\n")
|
|
78
|
+
self._add_manifest_findings(md)
|
|
79
|
+
|
|
80
|
+
# Network Analysis
|
|
81
|
+
md.append("\n## 🌐 Network Analysis\n")
|
|
82
|
+
self._add_network_findings(md)
|
|
83
|
+
|
|
84
|
+
# YARA Results
|
|
85
|
+
md.append("\n## 🎯 YARA Scan Results\n")
|
|
86
|
+
self._add_yara_results(md)
|
|
87
|
+
|
|
88
|
+
# Recommendations
|
|
89
|
+
md.append("\n## 💡 Recommendations\n")
|
|
90
|
+
self._add_recommendations(md)
|
|
91
|
+
|
|
92
|
+
# Write report - create parent directory if needed
|
|
93
|
+
output_path = Path(output_file)
|
|
94
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
95
|
+
|
|
96
|
+
with open(output_file, 'w') as f:
|
|
97
|
+
f.write('\n'.join(md))
|
|
98
|
+
|
|
99
|
+
def _add_executive_summary(self, md):
|
|
100
|
+
"""Add executive summary"""
|
|
101
|
+
|
|
102
|
+
# Calculate risk level
|
|
103
|
+
risk_score = self.results.get('sast', {}).get('summary', {}).get('risk_score', 0)
|
|
104
|
+
|
|
105
|
+
if risk_score >= 70:
|
|
106
|
+
risk_level = "🔴 **CRITICAL**"
|
|
107
|
+
elif risk_score >= 40:
|
|
108
|
+
risk_level = "🟠 **HIGH**"
|
|
109
|
+
elif risk_score >= 20:
|
|
110
|
+
risk_level = "🟡 **MEDIUM**"
|
|
111
|
+
else:
|
|
112
|
+
risk_level = "🟢 **LOW**"
|
|
113
|
+
|
|
114
|
+
md.append(f"**Overall Risk Level:** {risk_level} (Score: {risk_score}/100)\n")
|
|
115
|
+
|
|
116
|
+
# Key findings
|
|
117
|
+
obf_type = self.results.get('obfuscation', {}).get('type', 'none')
|
|
118
|
+
total_findings = self.results.get('sast', {}).get('summary', {}).get('total_findings', 0)
|
|
119
|
+
critical_findings = len(self.results.get('sast', {}).get('critical', []))
|
|
120
|
+
|
|
121
|
+
md.append(f"- **Obfuscation Type:** `{obf_type.upper()}`")
|
|
122
|
+
md.append(f"- **Total Security Findings:** {total_findings}")
|
|
123
|
+
md.append(f"- **Critical Issues:** {critical_findings}")
|
|
124
|
+
md.append("")
|
|
125
|
+
|
|
126
|
+
def _add_apk_info(self, md):
|
|
127
|
+
"""Add APK basic information"""
|
|
128
|
+
|
|
129
|
+
info = self.results.get('basic_info', {})
|
|
130
|
+
|
|
131
|
+
if 'error' in info:
|
|
132
|
+
md.append(f"⚠️ Error extracting APK info: {info['error']}\n")
|
|
133
|
+
return
|
|
134
|
+
|
|
135
|
+
md.append("| Property | Value |")
|
|
136
|
+
md.append("|----------|-------|")
|
|
137
|
+
md.append(f"| Package Name | `{info.get('package_name', 'N/A')}` |")
|
|
138
|
+
md.append(f"| App Name | {info.get('app_name', 'N/A')} |")
|
|
139
|
+
md.append(f"| Version | {info.get('version_name', 'N/A')} ({info.get('version_code', 'N/A')}) |")
|
|
140
|
+
md.append(f"| Min SDK | {info.get('min_sdk', 'N/A')} |")
|
|
141
|
+
md.append(f"| Target SDK | {info.get('target_sdk', 'N/A')} |")
|
|
142
|
+
md.append(f"| Signed | {info.get('is_signed', False)} |")
|
|
143
|
+
md.append(f"| Permissions | {len(info.get('permissions', []))} |")
|
|
144
|
+
md.append(f"| Activities | {len(info.get('activities', []))} |")
|
|
145
|
+
md.append(f"| Services | {len(info.get('services', []))} |")
|
|
146
|
+
md.append(f"| Receivers | {len(info.get('receivers', []))} |")
|
|
147
|
+
md.append("")
|
|
148
|
+
|
|
149
|
+
# Dangerous permissions
|
|
150
|
+
if info.get('permissions'):
|
|
151
|
+
md.append("\n### Key Permissions\n")
|
|
152
|
+
dangerous = [p for p in info['permissions'] if any(d in p for d in ['SMS', 'CALL', 'CONTACTS', 'LOCATION', 'CAMERA', 'MICROPHONE', 'ACCESSIBILITY'])]
|
|
153
|
+
if dangerous:
|
|
154
|
+
for perm in dangerous[:10]:
|
|
155
|
+
md.append(f"- `{perm}`")
|
|
156
|
+
md.append("")
|
|
157
|
+
|
|
158
|
+
def _add_obfuscation_info(self, md):
|
|
159
|
+
"""Add obfuscation analysis"""
|
|
160
|
+
|
|
161
|
+
obf = self.results.get('obfuscation', {})
|
|
162
|
+
|
|
163
|
+
obf_type = obf.get('type', 'none')
|
|
164
|
+
confidence = obf.get('confidence', 0)
|
|
165
|
+
|
|
166
|
+
md.append(f"**Type:** `{obf_type.upper()}`")
|
|
167
|
+
md.append(f"**Confidence:** {confidence}%\n")
|
|
168
|
+
|
|
169
|
+
# Indicators
|
|
170
|
+
indicators = obf.get('indicators', [])
|
|
171
|
+
if indicators:
|
|
172
|
+
md.append("### Indicators Detected\n")
|
|
173
|
+
for indicator in indicators:
|
|
174
|
+
md.append(f"- {indicator}")
|
|
175
|
+
md.append("")
|
|
176
|
+
|
|
177
|
+
# Recommendations
|
|
178
|
+
recommendations = obf.get('recommendations', [])
|
|
179
|
+
if recommendations:
|
|
180
|
+
md.append("### Analysis Recommendations\n")
|
|
181
|
+
for rec in recommendations:
|
|
182
|
+
md.append(f"{rec} ")
|
|
183
|
+
md.append("")
|
|
184
|
+
|
|
185
|
+
def _add_sast_findings(self, md):
|
|
186
|
+
"""Add SAST security findings"""
|
|
187
|
+
|
|
188
|
+
sast = self.results.get('sast', {})
|
|
189
|
+
summary = sast.get('summary', {})
|
|
190
|
+
|
|
191
|
+
# Summary table
|
|
192
|
+
md.append("### Summary\n")
|
|
193
|
+
md.append("| Severity | Count |")
|
|
194
|
+
md.append("|----------|-------|")
|
|
195
|
+
total_findings = 0
|
|
196
|
+
for severity in ['critical', 'high', 'medium', 'low']:
|
|
197
|
+
count = summary.get('by_severity', {}).get(severity, 0)
|
|
198
|
+
total_findings += count
|
|
199
|
+
emoji = {'critical': '🔴', 'high': '🟠', 'medium': '🟡', 'low': '🔵'}
|
|
200
|
+
md.append(f"| {emoji[severity]} {severity.title()} | {count} |")
|
|
201
|
+
md.append("")
|
|
202
|
+
|
|
203
|
+
md.append(f"**Total Findings:** {total_findings}\n")
|
|
204
|
+
|
|
205
|
+
# Sneak peek - top 5 critical/high findings
|
|
206
|
+
md.append("### 👀 Top Priority Issues (Sneak Peek)\n")
|
|
207
|
+
preview_count = 0
|
|
208
|
+
for severity in ['critical', 'high']:
|
|
209
|
+
findings = sast.get(severity, [])
|
|
210
|
+
for finding in findings[:3]: # Max 3 per severity for preview
|
|
211
|
+
if preview_count >= 5:
|
|
212
|
+
break
|
|
213
|
+
preview_count += 1
|
|
214
|
+
md.append(f"**{preview_count}. [{severity.upper()}] {finding.get('title', 'Unknown')}**")
|
|
215
|
+
md.append(f"- {finding.get('description', 'N/A')}")
|
|
216
|
+
md.append(f"- File: `{finding.get('file', 'N/A')}`")
|
|
217
|
+
md.append("")
|
|
218
|
+
if preview_count >= 5:
|
|
219
|
+
break
|
|
220
|
+
|
|
221
|
+
if total_findings > 5:
|
|
222
|
+
md.append(f"*📜 See all {total_findings} findings in detail below*\n")
|
|
223
|
+
|
|
224
|
+
md.append("---\n")
|
|
225
|
+
|
|
226
|
+
# Detailed findings - ALL items grouped by severity
|
|
227
|
+
md.append("### 📋 All SAST Findings (Detailed)\n")
|
|
228
|
+
|
|
229
|
+
for severity in ['critical', 'high', 'medium']:
|
|
230
|
+
findings = sast.get(severity, [])
|
|
231
|
+
if findings:
|
|
232
|
+
emoji = {'critical': '🔴', 'high': '🟠', 'medium': '🟡'}
|
|
233
|
+
md.append(f"\n#### {emoji[severity]} {severity.upper()} Severity ({len(findings)} findings)\n")
|
|
234
|
+
|
|
235
|
+
# Group by category
|
|
236
|
+
by_category = {}
|
|
237
|
+
for finding in findings:
|
|
238
|
+
cat = finding.get('category', 'other')
|
|
239
|
+
if cat not in by_category:
|
|
240
|
+
by_category[cat] = []
|
|
241
|
+
by_category[cat].append(finding)
|
|
242
|
+
|
|
243
|
+
for category, cat_findings in by_category.items():
|
|
244
|
+
md.append(f"\n##### {category.replace('_', ' ').title()} ({len(cat_findings)} issues)\n")
|
|
245
|
+
|
|
246
|
+
for i, finding in enumerate(cat_findings, 1):
|
|
247
|
+
md.append(f"**{i}. {finding.get('title', 'Unknown')}**")
|
|
248
|
+
md.append(f"- **Description:** {finding.get('description', 'N/A')}")
|
|
249
|
+
md.append(f"- **File:** `{finding.get('file', 'N/A')}`")
|
|
250
|
+
if finding.get('line'):
|
|
251
|
+
md.append(f"- **Line:** {finding['line']}")
|
|
252
|
+
|
|
253
|
+
if finding.get('code'):
|
|
254
|
+
md.append(f"\n```java")
|
|
255
|
+
md.append(finding['code'])
|
|
256
|
+
md.append(f"```\n")
|
|
257
|
+
else:
|
|
258
|
+
md.append("")
|
|
259
|
+
|
|
260
|
+
def _add_manifest_findings(self, md):
|
|
261
|
+
"""Add manifest analysis findings"""
|
|
262
|
+
|
|
263
|
+
manifest = self.results.get('manifest', {})
|
|
264
|
+
findings = manifest.get('findings', [])
|
|
265
|
+
|
|
266
|
+
if not findings:
|
|
267
|
+
md.append("✅ No significant issues found in AndroidManifest.xml\n")
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
# Count by severity
|
|
271
|
+
total = len(findings)
|
|
272
|
+
by_severity = {'critical': [], 'high': [], 'medium': [], 'low': []}
|
|
273
|
+
for finding in findings:
|
|
274
|
+
sev = finding.get('severity', 'low')
|
|
275
|
+
by_severity[sev].append(finding)
|
|
276
|
+
|
|
277
|
+
md.append(f"**Total Issues:** {total}\n")
|
|
278
|
+
|
|
279
|
+
# Sneak peek - top 5 issues
|
|
280
|
+
md.append("### 👀 Top Issues (Sneak Peek)\n")
|
|
281
|
+
preview_count = 0
|
|
282
|
+
for severity in ['critical', 'high', 'medium']:
|
|
283
|
+
for finding in by_severity[severity][:5]:
|
|
284
|
+
if preview_count >= 5:
|
|
285
|
+
break
|
|
286
|
+
preview_count += 1
|
|
287
|
+
emoji = {'critical': '🔴', 'high': '🟠', 'medium': '🟡'}
|
|
288
|
+
md.append(f"**{preview_count}. [{severity.upper()}] {finding.get('title', 'Unknown')}**")
|
|
289
|
+
md.append(f"- {finding.get('description', 'N/A')}")
|
|
290
|
+
md.append("")
|
|
291
|
+
if preview_count >= 5:
|
|
292
|
+
break
|
|
293
|
+
|
|
294
|
+
if total > 5:
|
|
295
|
+
md.append(f"*📜 See all {total} issues in detail below*\n")
|
|
296
|
+
|
|
297
|
+
md.append("---\n")
|
|
298
|
+
|
|
299
|
+
# All findings grouped by severity
|
|
300
|
+
md.append("### 📋 All Manifest Issues (Detailed)\n")
|
|
301
|
+
|
|
302
|
+
for severity in ['critical', 'high', 'medium']:
|
|
303
|
+
if by_severity[severity]:
|
|
304
|
+
emoji = {'critical': '🔴', 'high': '🟠', 'medium': '🟡'}
|
|
305
|
+
md.append(f"\n#### {emoji[severity]} {severity.title()} Issues ({len(by_severity[severity])})\n")
|
|
306
|
+
|
|
307
|
+
for i, finding in enumerate(by_severity[severity], 1):
|
|
308
|
+
md.append(f"**{i}. {finding.get('title', 'Unknown')}**")
|
|
309
|
+
md.append(f"- **Description:** {finding.get('description', 'N/A')}")
|
|
310
|
+
if finding.get('permission'):
|
|
311
|
+
md.append(f"- **Permission:** `{finding['permission']}`")
|
|
312
|
+
if finding.get('component'):
|
|
313
|
+
md.append(f"- **Component:** `{finding['component']}`")
|
|
314
|
+
md.append("")
|
|
315
|
+
|
|
316
|
+
def _add_network_findings(self, md):
|
|
317
|
+
"""Add network analysis findings"""
|
|
318
|
+
|
|
319
|
+
network = self.results.get('network', {})
|
|
320
|
+
|
|
321
|
+
# URLs
|
|
322
|
+
urls = network.get('urls', [])
|
|
323
|
+
if urls:
|
|
324
|
+
md.append(f"### 🔗 URLs Found\n")
|
|
325
|
+
md.append(f"**Total URLs:** {len(urls)}\n")
|
|
326
|
+
|
|
327
|
+
# Sneak peek - first 10
|
|
328
|
+
md.append("#### 👀 Sample URLs (First 10)\n")
|
|
329
|
+
for url in urls[:10]:
|
|
330
|
+
md.append(f"- `{url}`")
|
|
331
|
+
|
|
332
|
+
if len(urls) > 10:
|
|
333
|
+
md.append(f"\n*📜 See all {len(urls)} URLs below*\n")
|
|
334
|
+
md.append("---\n")
|
|
335
|
+
md.append(f"#### 📋 All URLs ({len(urls)} total)\n")
|
|
336
|
+
for i, url in enumerate(urls, 1):
|
|
337
|
+
md.append(f"{i}. `{url}`")
|
|
338
|
+
md.append("")
|
|
339
|
+
|
|
340
|
+
# Suspicious URLs
|
|
341
|
+
suspicious = network.get('suspicious_urls', [])
|
|
342
|
+
if suspicious:
|
|
343
|
+
md.append(f"\n### 🚨 Suspicious URLs\n")
|
|
344
|
+
md.append(f"**Total Suspicious:** {len(suspicious)}\n")
|
|
345
|
+
|
|
346
|
+
for i, susp in enumerate(suspicious, 1):
|
|
347
|
+
md.append(f"**{i}. {susp.get('url', 'N/A')}**")
|
|
348
|
+
md.append(f"- **Reason:** {susp.get('reason', 'N/A')}")
|
|
349
|
+
if susp.get('file'):
|
|
350
|
+
md.append(f"- **File:** `{susp.get('file', 'N/A')}`")
|
|
351
|
+
md.append("")
|
|
352
|
+
|
|
353
|
+
# WebSockets
|
|
354
|
+
websockets = network.get('websockets', [])
|
|
355
|
+
if websockets:
|
|
356
|
+
md.append(f"\n### 🔌 WebSocket Connections\n")
|
|
357
|
+
md.append(f"**Total WebSockets:** {len(websockets)}\n")
|
|
358
|
+
|
|
359
|
+
for i, ws in enumerate(websockets, 1):
|
|
360
|
+
md.append(f"**{i}. {ws.get('url', 'N/A')}**")
|
|
361
|
+
md.append(f"- **File:** `{ws.get('file', 'N/A')}`")
|
|
362
|
+
md.append("")
|
|
363
|
+
|
|
364
|
+
# C2 Indicators
|
|
365
|
+
c2 = network.get('c2_indicators', [])
|
|
366
|
+
if c2:
|
|
367
|
+
md.append(f"\n### 🎯 C2 Communication Indicators\n")
|
|
368
|
+
md.append(f"**Total Indicators:** {len(c2)}\n")
|
|
369
|
+
|
|
370
|
+
for i, indicator in enumerate(c2, 1):
|
|
371
|
+
if 'url' in indicator:
|
|
372
|
+
md.append(f"**{i}. URL:** `{indicator['url']}`")
|
|
373
|
+
elif 'domain' in indicator:
|
|
374
|
+
md.append(f"**{i}. Domain:** `{indicator['domain']}`")
|
|
375
|
+
md.append(f"- {indicator.get('description', 'N/A')}")
|
|
376
|
+
md.append("")
|
|
377
|
+
|
|
378
|
+
# Domains and IPs
|
|
379
|
+
domains = network.get('domains', [])
|
|
380
|
+
ips = network.get('ip_addresses', [])
|
|
381
|
+
|
|
382
|
+
if domains:
|
|
383
|
+
unique_domains = list(set([d for d in domains if d and len(d) > 3]))
|
|
384
|
+
md.append(f"\n### 🌍 Unique Domains\n")
|
|
385
|
+
md.append(f"**Total Domains:** {len(unique_domains)}\n")
|
|
386
|
+
|
|
387
|
+
# Sneak peek - first 15
|
|
388
|
+
if len(unique_domains) > 15:
|
|
389
|
+
md.append(f"#### 👀 Sample Domains (First 15)\n")
|
|
390
|
+
for domain in unique_domains[:15]:
|
|
391
|
+
md.append(f"- `{domain}`")
|
|
392
|
+
md.append(f"\n*📜 See all {len(unique_domains)} domains below*\n")
|
|
393
|
+
md.append("---\n")
|
|
394
|
+
md.append(f"#### 📋 All Domains ({len(unique_domains)} total)\n")
|
|
395
|
+
for i, domain in enumerate(unique_domains, 1):
|
|
396
|
+
md.append(f"{i}. `{domain}`")
|
|
397
|
+
else:
|
|
398
|
+
for domain in unique_domains:
|
|
399
|
+
md.append(f"- `{domain}`")
|
|
400
|
+
md.append("")
|
|
401
|
+
|
|
402
|
+
if ips:
|
|
403
|
+
unique_ips = list(set(ips))
|
|
404
|
+
md.append(f"\n### 📍 IP Addresses\n")
|
|
405
|
+
md.append(f"**Total IPs:** {len(unique_ips)}\n")
|
|
406
|
+
|
|
407
|
+
for i, ip in enumerate(unique_ips, 1):
|
|
408
|
+
md.append(f"{i}. `{ip}`")
|
|
409
|
+
md.append("")
|
|
410
|
+
|
|
411
|
+
def _add_yara_results(self, md):
|
|
412
|
+
"""Add YARA scan results"""
|
|
413
|
+
|
|
414
|
+
yara_files = [
|
|
415
|
+
'yara_apk_results.txt',
|
|
416
|
+
'yara_decompiled_results.txt',
|
|
417
|
+
'yara_jadx_results.txt'
|
|
418
|
+
]
|
|
419
|
+
|
|
420
|
+
has_results = False
|
|
421
|
+
|
|
422
|
+
for yara_file in yara_files:
|
|
423
|
+
filepath = self.temp_dir / yara_file
|
|
424
|
+
if filepath.exists():
|
|
425
|
+
content = filepath.read_text(errors='ignore').strip()
|
|
426
|
+
if content:
|
|
427
|
+
has_results = True
|
|
428
|
+
md.append(f"\n### {yara_file.replace('_', ' ').replace('.txt', '').title()}\n")
|
|
429
|
+
md.append(f"```")
|
|
430
|
+
md.append(content[:2000]) # Limit output
|
|
431
|
+
md.append(f"```\n")
|
|
432
|
+
|
|
433
|
+
if not has_results:
|
|
434
|
+
md.append("No YARA rule matches found.\n")
|
|
435
|
+
|
|
436
|
+
def _add_recommendations(self, md):
|
|
437
|
+
"""Add analysis recommendations"""
|
|
438
|
+
|
|
439
|
+
obf = self.results.get('obfuscation', {})
|
|
440
|
+
recommendations = obf.get('recommendations', [])
|
|
441
|
+
|
|
442
|
+
if recommendations:
|
|
443
|
+
md.append("### Based on Protection Type\n")
|
|
444
|
+
for rec in recommendations:
|
|
445
|
+
md.append(f"{rec} ")
|
|
446
|
+
md.append("")
|
|
447
|
+
|
|
448
|
+
# General recommendations
|
|
449
|
+
md.append("\n### General Security Recommendations\n")
|
|
450
|
+
md.append("1. **Dynamic Analysis:** Run the app in a controlled environment with Frida hooks")
|
|
451
|
+
md.append("2. **Network Monitoring:** Capture and analyze network traffic")
|
|
452
|
+
md.append("3. **Behavioral Analysis:** Monitor file system, IPC, and system calls")
|
|
453
|
+
md.append("4. **Memory Dump:** Extract and analyze runtime memory")
|
|
454
|
+
md.append("5. **API Hooking:** Hook sensitive APIs to understand behavior")
|
|
455
|
+
md.append("")
|
|
456
|
+
|
|
457
|
+
def generate_json(self, output_file):
|
|
458
|
+
"""Generate comprehensive JSON report"""
|
|
459
|
+
|
|
460
|
+
report = {
|
|
461
|
+
'metadata': {
|
|
462
|
+
'apk_name': self.apk_name,
|
|
463
|
+
'sha256': self.sha256,
|
|
464
|
+
'timestamp': self.timestamp,
|
|
465
|
+
'analyzer_version': '1.0'
|
|
466
|
+
},
|
|
467
|
+
'results': self.results
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
# Create parent directory if needed
|
|
471
|
+
output_path = Path(output_file)
|
|
472
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
473
|
+
|
|
474
|
+
with open(output_file, 'w') as f:
|
|
475
|
+
json.dump(report, f, indent=2)
|
|
476
|
+
|
|
477
|
+
def main():
|
|
478
|
+
parser = argparse.ArgumentParser(description='Report Generator')
|
|
479
|
+
parser.add_argument('--workspace', required=True, help='Workspace directory')
|
|
480
|
+
parser.add_argument('--temp', required=True, help='Temp directory with results')
|
|
481
|
+
parser.add_argument('--output', required=True, help='Output markdown file')
|
|
482
|
+
parser.add_argument('--json', required=True, help='Output JSON file')
|
|
483
|
+
parser.add_argument('--apk-name', required=True, help='APK filename')
|
|
484
|
+
parser.add_argument('--sha256', required=True, help='APK SHA256 hash')
|
|
485
|
+
|
|
486
|
+
args = parser.parse_args()
|
|
487
|
+
|
|
488
|
+
generator = ReportGenerator(
|
|
489
|
+
args.workspace,
|
|
490
|
+
args.temp,
|
|
491
|
+
args.apk_name,
|
|
492
|
+
args.sha256
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
print("Generating markdown report...")
|
|
496
|
+
generator.generate_markdown(args.output)
|
|
497
|
+
|
|
498
|
+
print("Generating JSON report...")
|
|
499
|
+
generator.generate_json(args.json)
|
|
500
|
+
|
|
501
|
+
print(f"Reports generated successfully!")
|
|
502
|
+
print(f" Markdown: {args.output}")
|
|
503
|
+
print(f" JSON: {args.json}")
|
|
504
|
+
|
|
505
|
+
if __name__ == '__main__':
|
|
506
|
+
main()
|