anais-apk-forensic 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +249 -0
- package/anais.sh +669 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/dex_payload_hunter.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/entropy_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/find_encrypted_payload.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator_modular.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/so_string_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_enhanced_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_results_processor.cpython-314.pyc +0 -0
- package/analysis_tools/apk_basic_info.py +85 -0
- package/analysis_tools/check_zip_encryption.py +142 -0
- package/analysis_tools/detect_obfuscation.py +650 -0
- package/analysis_tools/dex_payload_hunter.py +734 -0
- package/analysis_tools/entropy_analyzer.py +335 -0
- package/analysis_tools/error_logger.py +75 -0
- package/analysis_tools/find_encrypted_payload.py +485 -0
- package/analysis_tools/fix_apk_headers.py +154 -0
- package/analysis_tools/manifest_analyzer.py +214 -0
- package/analysis_tools/network_analyzer.py +287 -0
- package/analysis_tools/report_generator.py +506 -0
- package/analysis_tools/report_generator_modular.py +885 -0
- package/analysis_tools/sast_scanner.py +412 -0
- package/analysis_tools/so_string_analyzer.py +406 -0
- package/analysis_tools/yara_enhanced_analyzer.py +330 -0
- package/analysis_tools/yara_results_processor.py +368 -0
- package/analyzer_config.json +113 -0
- package/apkid/__init__.py +32 -0
- package/apkid/__pycache__/__init__.cpython-313.pyc +0 -0
- package/apkid/__pycache__/__init__.cpython-314.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-313.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-314.pyc +0 -0
- package/apkid/__pycache__/main.cpython-313.pyc +0 -0
- package/apkid/__pycache__/main.cpython-314.pyc +0 -0
- package/apkid/__pycache__/output.cpython-313.pyc +0 -0
- package/apkid/__pycache__/rules.cpython-313.pyc +0 -0
- package/apkid/apkid.py +266 -0
- package/apkid/main.py +98 -0
- package/apkid/output.py +177 -0
- package/apkid/rules/apk/common.yara +68 -0
- package/apkid/rules/apk/obfuscators.yara +118 -0
- package/apkid/rules/apk/packers.yara +1197 -0
- package/apkid/rules/apk/protectors.yara +301 -0
- package/apkid/rules/dex/abnormal.yara +104 -0
- package/apkid/rules/dex/anti-vm.yara +568 -0
- package/apkid/rules/dex/common.yara +60 -0
- package/apkid/rules/dex/compilers.yara +434 -0
- package/apkid/rules/dex/obfuscators.yara +602 -0
- package/apkid/rules/dex/packers.yara +761 -0
- package/apkid/rules/dex/protectors.yara +520 -0
- package/apkid/rules/dll/common.yara +38 -0
- package/apkid/rules/dll/obfuscators.yara +43 -0
- package/apkid/rules/elf/anti-vm.yara +43 -0
- package/apkid/rules/elf/common.yara +54 -0
- package/apkid/rules/elf/obfuscators.yara +991 -0
- package/apkid/rules/elf/packers.yara +1128 -0
- package/apkid/rules/elf/protectors.yara +794 -0
- package/apkid/rules/res/common.yara +43 -0
- package/apkid/rules/res/obfuscators.yara +46 -0
- package/apkid/rules/res/protectors.yara +46 -0
- package/apkid/rules.py +77 -0
- package/bin/anais +3 -0
- package/dist/cli.js +82 -0
- package/dist/index.js +123 -0
- package/dist/types/index.js +2 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/output.js +44 -0
- package/dist/utils/paths.js +107 -0
- package/docs/ARCHITECTURE.txt +353 -0
- package/docs/Workflow and Reference.md +445 -0
- package/package.json +70 -0
- package/rules/yara_general_rules.yar +323 -0
- package/scripts/dynamic_analysis_helper.sh +334 -0
- package/scripts/frida/dpt_dex_dumper.js +145 -0
- package/scripts/frida/frida_dex_dump.js +145 -0
- package/scripts/frida/frida_hooks.js +437 -0
- package/scripts/frida/frida_websocket_extractor.js +154 -0
- package/scripts/setup.sh +206 -0
- package/scripts/validate_framework.sh +224 -0
- package/src/cli.ts +91 -0
- package/src/index.ts +123 -0
- package/src/types/index.ts +44 -0
- package/src/utils/index.ts +6 -0
- package/src/utils/output.ts +50 -0
- package/src/utils/paths.ts +72 -0
- package/tsconfig.json +14 -0
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
YARA Results Processor
|
|
4
|
+
Parses and enhances YARA scan results with detailed analysis and categorization
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import re
|
|
10
|
+
import json
|
|
11
|
+
import argparse
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from collections import defaultdict
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
# YARA rule severity mapping
|
|
17
|
+
SEVERITY_MAP = {
|
|
18
|
+
'AndroidMalware': 'CRITICAL',
|
|
19
|
+
'Backdoor': 'CRITICAL',
|
|
20
|
+
'Trojan': 'CRITICAL',
|
|
21
|
+
'Ransomware': 'CRITICAL',
|
|
22
|
+
'BankingMalware': 'CRITICAL',
|
|
23
|
+
'Spyware': 'HIGH',
|
|
24
|
+
'Adware': 'HIGH',
|
|
25
|
+
'DexProtector': 'HIGH',
|
|
26
|
+
'DPTShell': 'HIGH',
|
|
27
|
+
'Packer': 'HIGH',
|
|
28
|
+
'Obfuscator': 'MEDIUM',
|
|
29
|
+
'SuspiciousAPI': 'MEDIUM',
|
|
30
|
+
'NetworkArtifact': 'MEDIUM',
|
|
31
|
+
'CryptoMining': 'HIGH',
|
|
32
|
+
'PrivilegeEscalation': 'CRITICAL',
|
|
33
|
+
'RootDetection': 'MEDIUM',
|
|
34
|
+
'EmulatorDetection': 'MEDIUM',
|
|
35
|
+
'DynamicCodeLoading': 'HIGH',
|
|
36
|
+
'NativeCode': 'LOW',
|
|
37
|
+
'Reflection': 'MEDIUM',
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def parse_yara_output(yara_file):
|
|
42
|
+
"""Parse YARA output file"""
|
|
43
|
+
if not os.path.exists(yara_file):
|
|
44
|
+
return []
|
|
45
|
+
|
|
46
|
+
matches = []
|
|
47
|
+
current_match = None
|
|
48
|
+
|
|
49
|
+
try:
|
|
50
|
+
with open(yara_file, 'r', encoding='utf-8', errors='ignore') as f:
|
|
51
|
+
for line in f:
|
|
52
|
+
line = line.strip()
|
|
53
|
+
|
|
54
|
+
if not line:
|
|
55
|
+
continue
|
|
56
|
+
|
|
57
|
+
# Match line format: "RuleName file_path"
|
|
58
|
+
if not line.startswith('0x') and not line.startswith('$'):
|
|
59
|
+
parts = line.split(None, 1)
|
|
60
|
+
if len(parts) >= 1:
|
|
61
|
+
if current_match:
|
|
62
|
+
matches.append(current_match)
|
|
63
|
+
|
|
64
|
+
rule_name = parts[0]
|
|
65
|
+
file_path = parts[1] if len(parts) > 1 else "Unknown"
|
|
66
|
+
|
|
67
|
+
current_match = {
|
|
68
|
+
'rule': rule_name,
|
|
69
|
+
'file': file_path,
|
|
70
|
+
'strings': []
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# String match format: "0x123:$string_name: matched content"
|
|
74
|
+
elif line.startswith('0x'):
|
|
75
|
+
if current_match:
|
|
76
|
+
# Parse offset and string info
|
|
77
|
+
match_info = re.match(r'(0x[0-9a-fA-F]+):\$([^:]+):\s*(.+)', line)
|
|
78
|
+
if match_info:
|
|
79
|
+
offset = match_info.group(1)
|
|
80
|
+
string_name = match_info.group(2)
|
|
81
|
+
content = match_info.group(3)
|
|
82
|
+
|
|
83
|
+
current_match['strings'].append({
|
|
84
|
+
'offset': offset,
|
|
85
|
+
'name': string_name,
|
|
86
|
+
'content': content
|
|
87
|
+
})
|
|
88
|
+
|
|
89
|
+
# Add last match
|
|
90
|
+
if current_match:
|
|
91
|
+
matches.append(current_match)
|
|
92
|
+
|
|
93
|
+
except Exception as e:
|
|
94
|
+
print(f"[!] Error parsing {yara_file}: {e}")
|
|
95
|
+
|
|
96
|
+
return matches
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def categorize_matches(matches):
|
|
100
|
+
"""Categorize YARA matches by severity"""
|
|
101
|
+
categorized = {
|
|
102
|
+
'CRITICAL': [],
|
|
103
|
+
'HIGH': [],
|
|
104
|
+
'MEDIUM': [],
|
|
105
|
+
'LOW': [],
|
|
106
|
+
'INFO': []
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
for match in matches:
|
|
110
|
+
rule_name = match['rule']
|
|
111
|
+
|
|
112
|
+
# Determine severity
|
|
113
|
+
severity = 'INFO'
|
|
114
|
+
for keyword, sev in SEVERITY_MAP.items():
|
|
115
|
+
if keyword.lower() in rule_name.lower():
|
|
116
|
+
severity = sev
|
|
117
|
+
break
|
|
118
|
+
|
|
119
|
+
# Enhance match with metadata
|
|
120
|
+
enhanced_match = {
|
|
121
|
+
**match,
|
|
122
|
+
'severity': severity,
|
|
123
|
+
'description': get_rule_description(rule_name),
|
|
124
|
+
'recommendation': get_recommendation(rule_name)
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
categorized[severity].append(enhanced_match)
|
|
128
|
+
|
|
129
|
+
return categorized
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_rule_description(rule_name):
|
|
133
|
+
"""Get human-readable description for YARA rule"""
|
|
134
|
+
descriptions = {
|
|
135
|
+
'AndroidMalware': 'Known Android malware signature detected',
|
|
136
|
+
'Backdoor': 'Backdoor functionality identified',
|
|
137
|
+
'Trojan': 'Trojan behavior patterns found',
|
|
138
|
+
'Ransomware': 'Ransomware indicators present',
|
|
139
|
+
'BankingMalware': 'Banking trojan signatures detected',
|
|
140
|
+
'Spyware': 'Spyware capabilities identified',
|
|
141
|
+
'Adware': 'Advertising/adware libraries detected',
|
|
142
|
+
'DexProtector': 'DexProtector packer detected',
|
|
143
|
+
'DPTShell': 'DPT-Shell obfuscation detected',
|
|
144
|
+
'Packer': 'Code packing/obfuscation detected',
|
|
145
|
+
'Obfuscator': 'Code obfuscation patterns found',
|
|
146
|
+
'SuspiciousAPI': 'Suspicious API calls detected',
|
|
147
|
+
'NetworkArtifact': 'Suspicious network artifacts found',
|
|
148
|
+
'CryptoMining': 'Cryptocurrency mining code detected',
|
|
149
|
+
'PrivilegeEscalation': 'Privilege escalation attempts detected',
|
|
150
|
+
'RootDetection': 'Root detection mechanisms found',
|
|
151
|
+
'EmulatorDetection': 'Emulator detection code present',
|
|
152
|
+
'DynamicCodeLoading': 'Dynamic code loading detected',
|
|
153
|
+
'NativeCode': 'Native code (JNI) usage detected',
|
|
154
|
+
'Reflection': 'Java reflection usage detected',
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
for keyword, desc in descriptions.items():
|
|
158
|
+
if keyword.lower() in rule_name.lower():
|
|
159
|
+
return desc
|
|
160
|
+
|
|
161
|
+
return f"YARA rule '{rule_name}' matched"
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def get_recommendation(rule_name):
|
|
165
|
+
"""Get security recommendations based on YARA rule"""
|
|
166
|
+
recommendations = {
|
|
167
|
+
'Backdoor': 'URGENT: Review for unauthorized remote access capabilities. Check for command & control infrastructure.',
|
|
168
|
+
'Trojan': 'URGENT: Analyze malicious payload. Identify data exfiltration mechanisms.',
|
|
169
|
+
'Ransomware': 'CRITICAL: Check for encryption routines and ransom payment mechanisms.',
|
|
170
|
+
'BankingMalware': 'CRITICAL: Review for credential theft, overlay attacks, and SMS interception.',
|
|
171
|
+
'Spyware': 'HIGH: Analyze data collection capabilities. Check for keylogging and screen capture.',
|
|
172
|
+
'DexProtector': 'Dynamic analysis required - DEX will be unpacked at runtime.',
|
|
173
|
+
'DPTShell': 'Advanced packer detected - use Frida to dump unpacked DEX from memory.',
|
|
174
|
+
'Packer': 'Packed code detected - static analysis may be limited. Consider dynamic analysis.',
|
|
175
|
+
'DynamicCodeLoading': 'Monitor runtime behavior for dynamically loaded code execution.',
|
|
176
|
+
'CryptoMining': 'Check for unauthorized resource usage and background mining operations.',
|
|
177
|
+
'PrivilegeEscalation': 'Review for root exploits and privilege escalation vulnerabilities.',
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
for keyword, rec in recommendations.items():
|
|
181
|
+
if keyword.lower() in rule_name.lower():
|
|
182
|
+
return rec
|
|
183
|
+
|
|
184
|
+
return 'Further investigation recommended.'
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def generate_summary_stats(categorized):
|
|
188
|
+
"""Generate summary statistics"""
|
|
189
|
+
stats = {
|
|
190
|
+
'total_matches': sum(len(matches) for matches in categorized.values()),
|
|
191
|
+
'by_severity': {
|
|
192
|
+
severity: len(matches)
|
|
193
|
+
for severity, matches in categorized.items()
|
|
194
|
+
},
|
|
195
|
+
'unique_rules': len(set(
|
|
196
|
+
match['rule']
|
|
197
|
+
for matches in categorized.values()
|
|
198
|
+
for match in matches
|
|
199
|
+
)),
|
|
200
|
+
'affected_files': len(set(
|
|
201
|
+
match['file']
|
|
202
|
+
for matches in categorized.values()
|
|
203
|
+
for match in matches
|
|
204
|
+
))
|
|
205
|
+
}
|
|
206
|
+
return stats
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def generate_markdown_report(categorized, stats, output_file):
|
|
210
|
+
"""Generate detailed Markdown report"""
|
|
211
|
+
md_lines = [
|
|
212
|
+
"# YARA Scan Results",
|
|
213
|
+
"",
|
|
214
|
+
"## Summary",
|
|
215
|
+
"",
|
|
216
|
+
f"- **Total Matches**: {stats['total_matches']}",
|
|
217
|
+
f"- **Unique Rules**: {stats['unique_rules']}",
|
|
218
|
+
f"- **Affected Files**: {stats['affected_files']}",
|
|
219
|
+
"",
|
|
220
|
+
"### Severity Breakdown",
|
|
221
|
+
""
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
# Severity breakdown
|
|
225
|
+
for severity in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO']:
|
|
226
|
+
count = stats['by_severity'][severity]
|
|
227
|
+
if count > 0:
|
|
228
|
+
emoji = {'CRITICAL': '🔴', 'HIGH': '🟠', 'MEDIUM': '🟡', 'LOW': '🔵', 'INFO': '⚪'}
|
|
229
|
+
md_lines.append(f"- {emoji.get(severity, '•')} **{severity}**: {count} match(es)")
|
|
230
|
+
|
|
231
|
+
md_lines.append("")
|
|
232
|
+
|
|
233
|
+
# Detailed findings by severity
|
|
234
|
+
for severity in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO']:
|
|
235
|
+
matches = categorized[severity]
|
|
236
|
+
if not matches:
|
|
237
|
+
continue
|
|
238
|
+
|
|
239
|
+
md_lines.extend([
|
|
240
|
+
f"## {severity} Severity Findings",
|
|
241
|
+
""
|
|
242
|
+
])
|
|
243
|
+
|
|
244
|
+
# Group by rule
|
|
245
|
+
rules = defaultdict(list)
|
|
246
|
+
for match in matches:
|
|
247
|
+
rules[match['rule']].append(match)
|
|
248
|
+
|
|
249
|
+
for rule_name, rule_matches in rules.items():
|
|
250
|
+
md_lines.extend([
|
|
251
|
+
f"### {rule_name}",
|
|
252
|
+
"",
|
|
253
|
+
f"**Description**: {rule_matches[0]['description']}",
|
|
254
|
+
"",
|
|
255
|
+
f"**Recommendation**: {rule_matches[0]['recommendation']}",
|
|
256
|
+
"",
|
|
257
|
+
f"**Matches**: {len(rule_matches)} file(s)",
|
|
258
|
+
""
|
|
259
|
+
])
|
|
260
|
+
|
|
261
|
+
# List affected files
|
|
262
|
+
for i, match in enumerate(rule_matches, 1):
|
|
263
|
+
md_lines.append(f"{i}. `{match['file']}`")
|
|
264
|
+
|
|
265
|
+
# Show matched strings (limit to first 5)
|
|
266
|
+
if match['strings']:
|
|
267
|
+
md_lines.append("")
|
|
268
|
+
md_lines.append(" **Matched Patterns**:")
|
|
269
|
+
for string_match in match['strings'][:5]:
|
|
270
|
+
content = string_match['content'][:100] # Truncate long strings
|
|
271
|
+
md_lines.append(f" - `{string_match['name']}` at `{string_match['offset']}`: `{content}`")
|
|
272
|
+
|
|
273
|
+
if len(match['strings']) > 5:
|
|
274
|
+
md_lines.append(f" - ... and {len(match['strings']) - 5} more pattern(s)")
|
|
275
|
+
|
|
276
|
+
md_lines.append("")
|
|
277
|
+
|
|
278
|
+
md_lines.append("")
|
|
279
|
+
|
|
280
|
+
# Write report
|
|
281
|
+
try:
|
|
282
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
283
|
+
f.write('\n'.join(md_lines))
|
|
284
|
+
return True
|
|
285
|
+
except Exception as e:
|
|
286
|
+
print(f"[!] Error writing Markdown report: {e}")
|
|
287
|
+
return False
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def process_yara_results(yara_files, output_dir):
|
|
291
|
+
"""Process multiple YARA result files"""
|
|
292
|
+
all_matches = []
|
|
293
|
+
|
|
294
|
+
print("[*] Processing YARA results...")
|
|
295
|
+
|
|
296
|
+
for yara_file in yara_files:
|
|
297
|
+
if not os.path.exists(yara_file):
|
|
298
|
+
print(f"[!] File not found: {yara_file}")
|
|
299
|
+
continue
|
|
300
|
+
|
|
301
|
+
print(f"[*] Parsing: {yara_file}")
|
|
302
|
+
matches = parse_yara_output(yara_file)
|
|
303
|
+
all_matches.extend(matches)
|
|
304
|
+
print(f" Found {len(matches)} match(es)")
|
|
305
|
+
|
|
306
|
+
if not all_matches:
|
|
307
|
+
print("[!] No YARA matches found")
|
|
308
|
+
return False
|
|
309
|
+
|
|
310
|
+
# Categorize matches
|
|
311
|
+
categorized = categorize_matches(all_matches)
|
|
312
|
+
|
|
313
|
+
# Generate statistics
|
|
314
|
+
stats = generate_summary_stats(categorized)
|
|
315
|
+
|
|
316
|
+
# Print console summary
|
|
317
|
+
print("\n" + "="*70)
|
|
318
|
+
print("YARA SCAN SUMMARY")
|
|
319
|
+
print("="*70)
|
|
320
|
+
print(f"Total Matches: {stats['total_matches']}")
|
|
321
|
+
print(f"Unique Rules: {stats['unique_rules']}")
|
|
322
|
+
print(f"Affected Files: {stats['affected_files']}")
|
|
323
|
+
print("\nBy Severity:")
|
|
324
|
+
for severity in ['CRITICAL', 'HIGH', 'MEDIUM', 'LOW', 'INFO']:
|
|
325
|
+
count = stats['by_severity'][severity]
|
|
326
|
+
if count > 0:
|
|
327
|
+
emoji = {'CRITICAL': '🔴', 'HIGH': '🟠', 'MEDIUM': '🟡', 'LOW': '🔵', 'INFO': '⚪'}
|
|
328
|
+
print(f" {emoji.get(severity, '•')} {severity}: {count}")
|
|
329
|
+
print("="*70 + "\n")
|
|
330
|
+
|
|
331
|
+
# Save JSON
|
|
332
|
+
json_output = os.path.join(output_dir, 'yara_processed.json')
|
|
333
|
+
try:
|
|
334
|
+
with open(json_output, 'w') as f:
|
|
335
|
+
json.dump({
|
|
336
|
+
'statistics': stats,
|
|
337
|
+
'findings': categorized
|
|
338
|
+
}, f, indent=2)
|
|
339
|
+
print(f"[✓] JSON report saved: {json_output}")
|
|
340
|
+
except Exception as e:
|
|
341
|
+
print(f"[!] Error saving JSON: {e}")
|
|
342
|
+
|
|
343
|
+
# Generate Markdown report
|
|
344
|
+
md_output = os.path.join(output_dir, 'yara_detailed_report.md')
|
|
345
|
+
if generate_markdown_report(categorized, stats, md_output):
|
|
346
|
+
print(f"[✓] Markdown report saved: {md_output}")
|
|
347
|
+
|
|
348
|
+
return True
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def main():
|
|
352
|
+
parser = argparse.ArgumentParser(
|
|
353
|
+
description='YARA Results Processor - Enhanced YARA scan analysis'
|
|
354
|
+
)
|
|
355
|
+
parser.add_argument('yara_files', nargs='+', help='YARA result files to process')
|
|
356
|
+
parser.add_argument('--output-dir', required=True, help='Output directory for reports')
|
|
357
|
+
|
|
358
|
+
args = parser.parse_args()
|
|
359
|
+
|
|
360
|
+
# Create output directory if needed
|
|
361
|
+
os.makedirs(args.output_dir, exist_ok=True)
|
|
362
|
+
|
|
363
|
+
success = process_yara_results(args.yara_files, args.output_dir)
|
|
364
|
+
sys.exit(0 if success else 1)
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
if __name__ == '__main__':
|
|
368
|
+
main()
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
{
|
|
2
|
+
"analyzer": {
|
|
3
|
+
"version": "1.0",
|
|
4
|
+
"author": "Mobile CySec Expert",
|
|
5
|
+
"description": "Comprehensive Anais Static Core"
|
|
6
|
+
},
|
|
7
|
+
"paths": {
|
|
8
|
+
"workspace": "~/Documents/Anais-Reports",
|
|
9
|
+
"tools": "./analysis_tools",
|
|
10
|
+
"reports": "~/Documents/Anais-Reports/reports",
|
|
11
|
+
"yara_rules": "./rules/yara_general_rules.yar"
|
|
12
|
+
},
|
|
13
|
+
"tools": {
|
|
14
|
+
"apktool": {
|
|
15
|
+
"enabled": true,
|
|
16
|
+
"force": true,
|
|
17
|
+
"no_src": false,
|
|
18
|
+
"no_res": false
|
|
19
|
+
},
|
|
20
|
+
"jadx": {
|
|
21
|
+
"enabled": true,
|
|
22
|
+
"deobf": true,
|
|
23
|
+
"deobf_min": 3,
|
|
24
|
+
"deobf_max": 40,
|
|
25
|
+
"threads": 4
|
|
26
|
+
},
|
|
27
|
+
"yara": {
|
|
28
|
+
"enabled": true,
|
|
29
|
+
"recursive": true,
|
|
30
|
+
"scan_apk": true,
|
|
31
|
+
"scan_decompiled": true,
|
|
32
|
+
"scan_jadx": true
|
|
33
|
+
},
|
|
34
|
+
"androguard": {
|
|
35
|
+
"enabled": true,
|
|
36
|
+
"analyze_dex": true,
|
|
37
|
+
"extract_strings": true,
|
|
38
|
+
"analyze_manifest": true
|
|
39
|
+
}
|
|
40
|
+
},
|
|
41
|
+
"analysis": {
|
|
42
|
+
"check_protection": true,
|
|
43
|
+
"detect_obfuscation": true,
|
|
44
|
+
"run_sast": true,
|
|
45
|
+
"analyze_manifest": true,
|
|
46
|
+
"analyze_network": true,
|
|
47
|
+
"scan_native_libs": true,
|
|
48
|
+
"extract_strings": true,
|
|
49
|
+
"check_crypto": true
|
|
50
|
+
},
|
|
51
|
+
"protection_bypass": {
|
|
52
|
+
"fix_headers": true,
|
|
53
|
+
"remove_encryption_flags": true,
|
|
54
|
+
"max_attempts": 3
|
|
55
|
+
},
|
|
56
|
+
"sast": {
|
|
57
|
+
"max_files_to_scan": 1000,
|
|
58
|
+
"include_low_severity": false,
|
|
59
|
+
"deep_scan": true,
|
|
60
|
+
"scan_smali": true,
|
|
61
|
+
"scan_java": true,
|
|
62
|
+
"scan_native": true,
|
|
63
|
+
"scan_assets": true
|
|
64
|
+
},
|
|
65
|
+
"reporting": {
|
|
66
|
+
"format": ["markdown", "json"],
|
|
67
|
+
"include_code_snippets": true,
|
|
68
|
+
"max_snippet_length": 500,
|
|
69
|
+
"include_yara_results": true,
|
|
70
|
+
"detailed_findings": true
|
|
71
|
+
},
|
|
72
|
+
"patterns": {
|
|
73
|
+
"malware_indicators": [
|
|
74
|
+
"accessibility service abuse",
|
|
75
|
+
"crypto wallet targeting",
|
|
76
|
+
"c2 communication",
|
|
77
|
+
"data exfiltration",
|
|
78
|
+
"remote code execution",
|
|
79
|
+
"root detection bypass"
|
|
80
|
+
],
|
|
81
|
+
"suspicious_tlds": [
|
|
82
|
+
".top",
|
|
83
|
+
".xyz",
|
|
84
|
+
".tk",
|
|
85
|
+
".ml",
|
|
86
|
+
".ga",
|
|
87
|
+
".cf",
|
|
88
|
+
".gq",
|
|
89
|
+
".pw"
|
|
90
|
+
],
|
|
91
|
+
"suspicious_permissions": [
|
|
92
|
+
"BIND_ACCESSIBILITY_SERVICE",
|
|
93
|
+
"SYSTEM_ALERT_WINDOW",
|
|
94
|
+
"REQUEST_INSTALL_PACKAGES",
|
|
95
|
+
"READ_SMS",
|
|
96
|
+
"RECEIVE_SMS",
|
|
97
|
+
"READ_CONTACTS"
|
|
98
|
+
]
|
|
99
|
+
},
|
|
100
|
+
"thresholds": {
|
|
101
|
+
"risk_score": {
|
|
102
|
+
"critical": 70,
|
|
103
|
+
"high": 40,
|
|
104
|
+
"medium": 20,
|
|
105
|
+
"low": 0
|
|
106
|
+
},
|
|
107
|
+
"obfuscation_confidence": {
|
|
108
|
+
"high": 80,
|
|
109
|
+
"medium": 50,
|
|
110
|
+
"low": 30
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
Copyright (C) 2023 RedNaga. https://rednaga.io
|
|
4
|
+
All rights reserved. Contact: rednaga@protonmail.com
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
This file is part of APKiD
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
Commercial License Usage
|
|
11
|
+
------------------------
|
|
12
|
+
Licensees holding valid commercial APKiD licenses may use this file
|
|
13
|
+
in accordance with the commercial license agreement provided with the
|
|
14
|
+
Software or, alternatively, in accordance with the terms contained in
|
|
15
|
+
a written agreement between you and RedNaga.
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
GNU General Public License Usage
|
|
19
|
+
--------------------------------
|
|
20
|
+
Alternatively, this file may be used under the terms of the GNU General
|
|
21
|
+
Public License version 3.0 as published by the Free Software Foundation
|
|
22
|
+
and appearing in the file LICENSE.GPL included in the packaging of this
|
|
23
|
+
file. Please visit http://www.gnu.org/copyleft/gpl.html and review the
|
|
24
|
+
information to ensure the GNU General Public License version 3.0
|
|
25
|
+
requirements will be met.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
__title__ = 'apkid'
|
|
29
|
+
__version__ = '3.0.0'
|
|
30
|
+
__author__ = 'Caleb Fenton & Tim Strazzere'
|
|
31
|
+
__license__ = 'GPL & Commercial'
|
|
32
|
+
__copyright__ = 'Copyright (C) 2025 RedNaga'
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|