anais-apk-forensic 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +249 -0
- package/anais.sh +669 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/apk_basic_info.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/check_zip_encryption.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/detect_obfuscation.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/dex_payload_hunter.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/entropy_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/error_logger.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/find_encrypted_payload.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/fix_apk_headers.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/manifest_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/network_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/report_generator_modular.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-313.pyc +0 -0
- package/analysis_tools/__pycache__/sast_scanner.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/so_string_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_enhanced_analyzer.cpython-314.pyc +0 -0
- package/analysis_tools/__pycache__/yara_results_processor.cpython-314.pyc +0 -0
- package/analysis_tools/apk_basic_info.py +85 -0
- package/analysis_tools/check_zip_encryption.py +142 -0
- package/analysis_tools/detect_obfuscation.py +650 -0
- package/analysis_tools/dex_payload_hunter.py +734 -0
- package/analysis_tools/entropy_analyzer.py +335 -0
- package/analysis_tools/error_logger.py +75 -0
- package/analysis_tools/find_encrypted_payload.py +485 -0
- package/analysis_tools/fix_apk_headers.py +154 -0
- package/analysis_tools/manifest_analyzer.py +214 -0
- package/analysis_tools/network_analyzer.py +287 -0
- package/analysis_tools/report_generator.py +506 -0
- package/analysis_tools/report_generator_modular.py +885 -0
- package/analysis_tools/sast_scanner.py +412 -0
- package/analysis_tools/so_string_analyzer.py +406 -0
- package/analysis_tools/yara_enhanced_analyzer.py +330 -0
- package/analysis_tools/yara_results_processor.py +368 -0
- package/analyzer_config.json +113 -0
- package/apkid/__init__.py +32 -0
- package/apkid/__pycache__/__init__.cpython-313.pyc +0 -0
- package/apkid/__pycache__/__init__.cpython-314.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-313.pyc +0 -0
- package/apkid/__pycache__/apkid.cpython-314.pyc +0 -0
- package/apkid/__pycache__/main.cpython-313.pyc +0 -0
- package/apkid/__pycache__/main.cpython-314.pyc +0 -0
- package/apkid/__pycache__/output.cpython-313.pyc +0 -0
- package/apkid/__pycache__/rules.cpython-313.pyc +0 -0
- package/apkid/apkid.py +266 -0
- package/apkid/main.py +98 -0
- package/apkid/output.py +177 -0
- package/apkid/rules/apk/common.yara +68 -0
- package/apkid/rules/apk/obfuscators.yara +118 -0
- package/apkid/rules/apk/packers.yara +1197 -0
- package/apkid/rules/apk/protectors.yara +301 -0
- package/apkid/rules/dex/abnormal.yara +104 -0
- package/apkid/rules/dex/anti-vm.yara +568 -0
- package/apkid/rules/dex/common.yara +60 -0
- package/apkid/rules/dex/compilers.yara +434 -0
- package/apkid/rules/dex/obfuscators.yara +602 -0
- package/apkid/rules/dex/packers.yara +761 -0
- package/apkid/rules/dex/protectors.yara +520 -0
- package/apkid/rules/dll/common.yara +38 -0
- package/apkid/rules/dll/obfuscators.yara +43 -0
- package/apkid/rules/elf/anti-vm.yara +43 -0
- package/apkid/rules/elf/common.yara +54 -0
- package/apkid/rules/elf/obfuscators.yara +991 -0
- package/apkid/rules/elf/packers.yara +1128 -0
- package/apkid/rules/elf/protectors.yara +794 -0
- package/apkid/rules/res/common.yara +43 -0
- package/apkid/rules/res/obfuscators.yara +46 -0
- package/apkid/rules/res/protectors.yara +46 -0
- package/apkid/rules.py +77 -0
- package/bin/anais +3 -0
- package/dist/cli.js +82 -0
- package/dist/index.js +123 -0
- package/dist/types/index.js +2 -0
- package/dist/utils/index.js +21 -0
- package/dist/utils/output.js +44 -0
- package/dist/utils/paths.js +107 -0
- package/docs/ARCHITECTURE.txt +353 -0
- package/docs/Workflow and Reference.md +445 -0
- package/package.json +70 -0
- package/rules/yara_general_rules.yar +323 -0
- package/scripts/dynamic_analysis_helper.sh +334 -0
- package/scripts/frida/dpt_dex_dumper.js +145 -0
- package/scripts/frida/frida_dex_dump.js +145 -0
- package/scripts/frida/frida_hooks.js +437 -0
- package/scripts/frida/frida_websocket_extractor.js +154 -0
- package/scripts/setup.sh +206 -0
- package/scripts/validate_framework.sh +224 -0
- package/src/cli.ts +91 -0
- package/src/index.ts +123 -0
- package/src/types/index.ts +44 -0
- package/src/utils/index.ts +6 -0
- package/src/utils/output.ts +50 -0
- package/src/utils/paths.ts +72 -0
- package/tsconfig.json +14 -0
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Entropy Analysis Tool for APK Files
|
|
4
|
+
Visualizes entropy distribution to identify encrypted/obfuscated regions
|
|
5
|
+
Useful for detecting hidden payloads in protected APKs (DexProtector, DPT-Shell, etc.)
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
import zipfile
|
|
11
|
+
import math
|
|
12
|
+
import json
|
|
13
|
+
import argparse
|
|
14
|
+
from collections import Counter
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def calculate_entropy(data):
|
|
19
|
+
"""Calculate Shannon entropy of data"""
|
|
20
|
+
if not data:
|
|
21
|
+
return 0.0
|
|
22
|
+
|
|
23
|
+
entropy = 0.0
|
|
24
|
+
counter = Counter(data)
|
|
25
|
+
length = len(data)
|
|
26
|
+
|
|
27
|
+
for count in counter.values():
|
|
28
|
+
probability = count / length
|
|
29
|
+
entropy -= probability * math.log2(probability)
|
|
30
|
+
|
|
31
|
+
return entropy
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def analyze_chunk_entropy(data, chunk_size=1024):
|
|
35
|
+
"""Analyze entropy across chunks of data"""
|
|
36
|
+
chunks = []
|
|
37
|
+
for i in range(0, len(data), chunk_size):
|
|
38
|
+
chunk = data[i:i+chunk_size]
|
|
39
|
+
if len(chunk) > 0:
|
|
40
|
+
entropy = calculate_entropy(chunk)
|
|
41
|
+
chunks.append({
|
|
42
|
+
'offset': i,
|
|
43
|
+
'size': len(chunk),
|
|
44
|
+
'entropy': entropy,
|
|
45
|
+
'suspicious': entropy > 7.0 # High entropy indicates encryption/compression
|
|
46
|
+
})
|
|
47
|
+
return chunks
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def generate_ascii_chart(chunks, width=80, height=20):
|
|
51
|
+
"""Generate ASCII art entropy chart"""
|
|
52
|
+
if not chunks:
|
|
53
|
+
return "No data to visualize"
|
|
54
|
+
|
|
55
|
+
max_entropy = 8.0 # Max theoretical entropy for byte data
|
|
56
|
+
chart = []
|
|
57
|
+
|
|
58
|
+
# Header
|
|
59
|
+
chart.append("Entropy Distribution Chart (0.0 - 8.0)")
|
|
60
|
+
chart.append("=" * width)
|
|
61
|
+
chart.append("ā = High Entropy (>7.0, likely encrypted)")
|
|
62
|
+
chart.append("ā = Medium-High (6.0-7.0)")
|
|
63
|
+
chart.append("ā = Medium (5.0-6.0)")
|
|
64
|
+
chart.append("ā = Low (<5.0, plaintext)")
|
|
65
|
+
chart.append("-" * width)
|
|
66
|
+
|
|
67
|
+
# Data visualization
|
|
68
|
+
chunk_groups = []
|
|
69
|
+
group_size = max(1, len(chunks) // width)
|
|
70
|
+
|
|
71
|
+
for i in range(0, len(chunks), group_size):
|
|
72
|
+
group = chunks[i:i+group_size]
|
|
73
|
+
avg_entropy = sum(c['entropy'] for c in group) / len(group)
|
|
74
|
+
chunk_groups.append(avg_entropy)
|
|
75
|
+
|
|
76
|
+
# Build chart rows
|
|
77
|
+
for row in range(height):
|
|
78
|
+
threshold = max_entropy * (1 - row / height)
|
|
79
|
+
line = ""
|
|
80
|
+
for entropy in chunk_groups:
|
|
81
|
+
if entropy >= threshold:
|
|
82
|
+
if entropy > 7.0:
|
|
83
|
+
line += "ā"
|
|
84
|
+
elif entropy > 6.0:
|
|
85
|
+
line += "ā"
|
|
86
|
+
elif entropy > 5.0:
|
|
87
|
+
line += "ā"
|
|
88
|
+
else:
|
|
89
|
+
line += "ā"
|
|
90
|
+
else:
|
|
91
|
+
line += " "
|
|
92
|
+
chart.append(f"{threshold:4.1f} |{line}|")
|
|
93
|
+
|
|
94
|
+
chart.append(" " + "-" * width)
|
|
95
|
+
chart.append(f" 0{'':>{width-20}}File Length")
|
|
96
|
+
|
|
97
|
+
return "\n".join(chart)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def analyze_file_entropy(file_path, chunk_size=1024):
|
|
101
|
+
"""Analyze entropy of a single file"""
|
|
102
|
+
try:
|
|
103
|
+
with open(file_path, 'rb') as f:
|
|
104
|
+
data = f.read()
|
|
105
|
+
|
|
106
|
+
overall_entropy = calculate_entropy(data)
|
|
107
|
+
chunks = analyze_chunk_entropy(data, chunk_size)
|
|
108
|
+
|
|
109
|
+
# Find suspicious regions
|
|
110
|
+
suspicious_regions = [c for c in chunks if c['suspicious']]
|
|
111
|
+
high_entropy_ratio = len(suspicious_regions) / len(chunks) if chunks else 0
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
'file': file_path,
|
|
115
|
+
'size': len(data),
|
|
116
|
+
'overall_entropy': overall_entropy,
|
|
117
|
+
'chunks': chunks,
|
|
118
|
+
'suspicious_regions': suspicious_regions,
|
|
119
|
+
'high_entropy_ratio': high_entropy_ratio,
|
|
120
|
+
'chart': generate_ascii_chart(chunks)
|
|
121
|
+
}
|
|
122
|
+
except Exception as e:
|
|
123
|
+
return {
|
|
124
|
+
'file': file_path,
|
|
125
|
+
'error': str(e)
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def analyze_apk_entropy(apk_path, output_json, chunk_size=1024):
|
|
130
|
+
"""Analyze entropy of files within APK"""
|
|
131
|
+
results = {
|
|
132
|
+
'apk': apk_path,
|
|
133
|
+
'chunk_size': chunk_size,
|
|
134
|
+
'files': [],
|
|
135
|
+
'summary': {}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
try:
|
|
139
|
+
with zipfile.ZipFile(apk_path, 'r') as zf:
|
|
140
|
+
# Target files: DEX, SO libraries, and large assets
|
|
141
|
+
target_extensions = ['.dex', '.so', '.dat', '.bin', '.jar']
|
|
142
|
+
target_files = []
|
|
143
|
+
|
|
144
|
+
for file_info in zf.filelist:
|
|
145
|
+
filename = file_info.filename
|
|
146
|
+
|
|
147
|
+
# Skip directories
|
|
148
|
+
if filename.endswith('/'):
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
# Check if target file
|
|
152
|
+
is_target = any(filename.endswith(ext) for ext in target_extensions)
|
|
153
|
+
is_large_asset = filename.startswith('assets/') and file_info.file_size > 50000
|
|
154
|
+
|
|
155
|
+
if is_target or is_large_asset:
|
|
156
|
+
target_files.append(file_info)
|
|
157
|
+
|
|
158
|
+
print(f"[*] Analyzing {len(target_files)} files in APK...")
|
|
159
|
+
|
|
160
|
+
for file_info in target_files:
|
|
161
|
+
filename = file_info.filename
|
|
162
|
+
print(f"[*] Processing: {filename} ({file_info.file_size} bytes)")
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
data = zf.read(filename)
|
|
166
|
+
overall_entropy = calculate_entropy(data)
|
|
167
|
+
chunks = analyze_chunk_entropy(data, chunk_size)
|
|
168
|
+
|
|
169
|
+
suspicious_regions = [c for c in chunks if c['suspicious']]
|
|
170
|
+
high_entropy_ratio = len(suspicious_regions) / len(chunks) if chunks else 0
|
|
171
|
+
|
|
172
|
+
file_result = {
|
|
173
|
+
'filename': filename,
|
|
174
|
+
'size': len(data),
|
|
175
|
+
'overall_entropy': overall_entropy,
|
|
176
|
+
'high_entropy_ratio': high_entropy_ratio,
|
|
177
|
+
'suspicious_regions_count': len(suspicious_regions),
|
|
178
|
+
'chunks': chunks[:100], # Limit to first 100 chunks for JSON size
|
|
179
|
+
'assessment': classify_entropy(overall_entropy, high_entropy_ratio)
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
# Add chart for highly suspicious files
|
|
183
|
+
if overall_entropy > 7.0 or high_entropy_ratio > 0.5:
|
|
184
|
+
file_result['chart'] = generate_ascii_chart(chunks, width=60)
|
|
185
|
+
|
|
186
|
+
results['files'].append(file_result)
|
|
187
|
+
|
|
188
|
+
except Exception as e:
|
|
189
|
+
print(f"[!] Error processing {filename}: {e}")
|
|
190
|
+
results['files'].append({
|
|
191
|
+
'filename': filename,
|
|
192
|
+
'error': str(e)
|
|
193
|
+
})
|
|
194
|
+
|
|
195
|
+
# Generate summary
|
|
196
|
+
results['summary'] = generate_summary(results['files'])
|
|
197
|
+
|
|
198
|
+
except Exception as e:
|
|
199
|
+
results['error'] = str(e)
|
|
200
|
+
print(f"[!] Error analyzing APK: {e}")
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
# Save results
|
|
204
|
+
try:
|
|
205
|
+
with open(output_json, 'w') as f:
|
|
206
|
+
json.dump(results, f, indent=2)
|
|
207
|
+
print(f"[ā] Results saved to: {output_json}")
|
|
208
|
+
|
|
209
|
+
# Print summary
|
|
210
|
+
print_summary(results)
|
|
211
|
+
return True
|
|
212
|
+
|
|
213
|
+
except Exception as e:
|
|
214
|
+
print(f"[!] Error saving results: {e}")
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def classify_entropy(overall_entropy, high_entropy_ratio):
|
|
219
|
+
"""Classify file based on entropy characteristics"""
|
|
220
|
+
if overall_entropy > 7.5 and high_entropy_ratio > 0.8:
|
|
221
|
+
return {
|
|
222
|
+
'level': 'CRITICAL',
|
|
223
|
+
'description': 'Heavily encrypted/obfuscated - likely encrypted payload',
|
|
224
|
+
'recommendation': 'Dynamic analysis required - this file is encrypted'
|
|
225
|
+
}
|
|
226
|
+
elif overall_entropy > 7.0 and high_entropy_ratio > 0.5:
|
|
227
|
+
return {
|
|
228
|
+
'level': 'HIGH',
|
|
229
|
+
'description': 'Significant encryption detected',
|
|
230
|
+
'recommendation': 'Contains encrypted sections - monitor for runtime unpacking'
|
|
231
|
+
}
|
|
232
|
+
elif overall_entropy > 6.5 or high_entropy_ratio > 0.3:
|
|
233
|
+
return {
|
|
234
|
+
'level': 'MEDIUM',
|
|
235
|
+
'description': 'Moderate obfuscation or compression',
|
|
236
|
+
'recommendation': 'May contain compressed or partially encrypted data'
|
|
237
|
+
}
|
|
238
|
+
else:
|
|
239
|
+
return {
|
|
240
|
+
'level': 'LOW',
|
|
241
|
+
'description': 'Low entropy - mostly plaintext or uncompressed',
|
|
242
|
+
'recommendation': 'Safe for static analysis'
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def generate_summary(files):
|
|
247
|
+
"""Generate analysis summary"""
|
|
248
|
+
summary = {
|
|
249
|
+
'total_files': len(files),
|
|
250
|
+
'highly_suspicious': [],
|
|
251
|
+
'suspicious': [],
|
|
252
|
+
'normal': []
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
for file_data in files:
|
|
256
|
+
if 'error' in file_data:
|
|
257
|
+
continue
|
|
258
|
+
|
|
259
|
+
assessment = file_data.get('assessment', {})
|
|
260
|
+
level = assessment.get('level', 'UNKNOWN')
|
|
261
|
+
|
|
262
|
+
entry = {
|
|
263
|
+
'filename': file_data['filename'],
|
|
264
|
+
'entropy': file_data.get('overall_entropy', 0),
|
|
265
|
+
'high_entropy_ratio': file_data.get('high_entropy_ratio', 0)
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
if level == 'CRITICAL':
|
|
269
|
+
summary['highly_suspicious'].append(entry)
|
|
270
|
+
elif level in ['HIGH', 'MEDIUM']:
|
|
271
|
+
summary['suspicious'].append(entry)
|
|
272
|
+
else:
|
|
273
|
+
summary['normal'].append(entry)
|
|
274
|
+
|
|
275
|
+
return summary
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def print_summary(results):
|
|
279
|
+
"""Print analysis summary to console"""
|
|
280
|
+
summary = results['summary']
|
|
281
|
+
|
|
282
|
+
print("\n" + "="*70)
|
|
283
|
+
print("ENTROPY ANALYSIS SUMMARY")
|
|
284
|
+
print("="*70)
|
|
285
|
+
|
|
286
|
+
print(f"\nš Total Files Analyzed: {summary['total_files']}")
|
|
287
|
+
|
|
288
|
+
if summary['highly_suspicious']:
|
|
289
|
+
print(f"\nš“ HIGHLY SUSPICIOUS ({len(summary['highly_suspicious'])} files):")
|
|
290
|
+
for file in summary['highly_suspicious']:
|
|
291
|
+
print(f" ⢠{file['filename']}")
|
|
292
|
+
print(f" Entropy: {file['entropy']:.2f} | High Entropy Ratio: {file['high_entropy_ratio']:.1%}")
|
|
293
|
+
|
|
294
|
+
if summary['suspicious']:
|
|
295
|
+
print(f"\nš” SUSPICIOUS ({len(summary['suspicious'])} files):")
|
|
296
|
+
for file in summary['suspicious']:
|
|
297
|
+
print(f" ⢠{file['filename']}")
|
|
298
|
+
print(f" Entropy: {file['entropy']:.2f} | High Entropy Ratio: {file['high_entropy_ratio']:.1%}")
|
|
299
|
+
|
|
300
|
+
if summary['normal']:
|
|
301
|
+
print(f"\nš¢ NORMAL ({len(summary['normal'])} files)")
|
|
302
|
+
|
|
303
|
+
print("\n" + "="*70)
|
|
304
|
+
|
|
305
|
+
# Show charts for highly suspicious files
|
|
306
|
+
for file_data in results['files']:
|
|
307
|
+
if file_data.get('assessment', {}).get('level') == 'CRITICAL':
|
|
308
|
+
print(f"\nš CHART: {file_data['filename']}")
|
|
309
|
+
print(file_data.get('chart', 'No chart available'))
|
|
310
|
+
print()
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def main():
|
|
314
|
+
parser = argparse.ArgumentParser(
|
|
315
|
+
description='Entropy Analysis Tool - Detect encrypted/obfuscated regions in APK files'
|
|
316
|
+
)
|
|
317
|
+
parser.add_argument('apk_path', help='Path to APK file')
|
|
318
|
+
parser.add_argument('output_json', help='Path to output JSON file')
|
|
319
|
+
parser.add_argument('--chunk-size', type=int, default=1024,
|
|
320
|
+
help='Chunk size for entropy analysis (default: 1024 bytes)')
|
|
321
|
+
parser.add_argument('--verbose', action='store_true',
|
|
322
|
+
help='Verbose output')
|
|
323
|
+
|
|
324
|
+
args = parser.parse_args()
|
|
325
|
+
|
|
326
|
+
if not os.path.exists(args.apk_path):
|
|
327
|
+
print(f"[!] Error: APK file not found: {args.apk_path}")
|
|
328
|
+
sys.exit(1)
|
|
329
|
+
|
|
330
|
+
success = analyze_apk_entropy(args.apk_path, args.output_json, args.chunk_size)
|
|
331
|
+
sys.exit(0 if success else 1)
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
if __name__ == '__main__':
|
|
335
|
+
main()
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Error Logging Utility
|
|
4
|
+
Centralized error logging for analysis tools
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
import logging
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
|
|
12
|
+
class AnalysisLogger:
|
|
13
|
+
"""Logger for analysis tools with file and console output"""
|
|
14
|
+
|
|
15
|
+
def __init__(self, tool_name, log_file=None, verbose=False):
|
|
16
|
+
self.tool_name = tool_name
|
|
17
|
+
self.verbose = verbose
|
|
18
|
+
|
|
19
|
+
# Default log file
|
|
20
|
+
if log_file is None:
|
|
21
|
+
script_dir = Path(__file__).parent.parent
|
|
22
|
+
log_file = script_dir / 'analysis_errors.log'
|
|
23
|
+
|
|
24
|
+
self.log_file = Path(log_file)
|
|
25
|
+
|
|
26
|
+
# Setup file logging
|
|
27
|
+
self.file_logger = logging.getLogger(f'{tool_name}_file')
|
|
28
|
+
self.file_logger.setLevel(logging.DEBUG)
|
|
29
|
+
|
|
30
|
+
# File handler - logs everything
|
|
31
|
+
if not self.file_logger.handlers:
|
|
32
|
+
fh = logging.FileHandler(self.log_file, encoding='utf-8')
|
|
33
|
+
fh.setLevel(logging.DEBUG)
|
|
34
|
+
formatter = logging.Formatter(
|
|
35
|
+
'[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s',
|
|
36
|
+
datefmt='%Y-%m-%d %H:%M:%S'
|
|
37
|
+
)
|
|
38
|
+
fh.setFormatter(formatter)
|
|
39
|
+
self.file_logger.addHandler(fh)
|
|
40
|
+
|
|
41
|
+
def info(self, message):
|
|
42
|
+
"""Log info message"""
|
|
43
|
+
if self.verbose:
|
|
44
|
+
print(f"[INFO] {message}", file=sys.stderr)
|
|
45
|
+
self.file_logger.info(message)
|
|
46
|
+
|
|
47
|
+
def warning(self, message):
|
|
48
|
+
"""Log warning - show brief message, log details"""
|
|
49
|
+
print(f"[WARNING] {message}", file=sys.stderr)
|
|
50
|
+
self.file_logger.warning(message)
|
|
51
|
+
|
|
52
|
+
def error(self, message, detail=None):
|
|
53
|
+
"""Log error - show brief message, log full details to file"""
|
|
54
|
+
print(f"[ERROR] {message}", file=sys.stderr)
|
|
55
|
+
self.file_logger.error(message)
|
|
56
|
+
|
|
57
|
+
if detail:
|
|
58
|
+
self.file_logger.error(f"Error details: {detail}")
|
|
59
|
+
|
|
60
|
+
def exception(self, message, exc_info=True):
|
|
61
|
+
"""Log exception with traceback to file only"""
|
|
62
|
+
print(f"[ERROR] {message} (details in {self.log_file})", file=sys.stderr)
|
|
63
|
+
self.file_logger.exception(message, exc_info=exc_info)
|
|
64
|
+
|
|
65
|
+
def critical(self, message, detail=None):
|
|
66
|
+
"""Log critical error"""
|
|
67
|
+
print(f"[CRITICAL] {message}", file=sys.stderr)
|
|
68
|
+
self.file_logger.critical(message)
|
|
69
|
+
|
|
70
|
+
if detail:
|
|
71
|
+
self.file_logger.critical(f"Critical details: {detail}")
|
|
72
|
+
|
|
73
|
+
def setup_logger(tool_name, log_file=None, verbose=False):
|
|
74
|
+
"""Setup and return logger instance"""
|
|
75
|
+
return AnalysisLogger(tool_name, log_file, verbose)
|