CAPE-parsers 0.1.42__py3-none-any.whl → 0.1.54__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cape_parsers/CAPE/community/AgentTesla.py +25 -10
- cape_parsers/CAPE/community/Amadey.py +199 -29
- cape_parsers/CAPE/community/Arkei.py +13 -15
- cape_parsers/CAPE/community/AsyncRAT.py +4 -2
- cape_parsers/CAPE/community/AuroraStealer.py +9 -6
- cape_parsers/CAPE/community/Carbanak.py +7 -7
- cape_parsers/CAPE/community/CobaltStrikeBeacon.py +5 -4
- cape_parsers/CAPE/community/CobaltStrikeStager.py +4 -1
- cape_parsers/CAPE/community/DCRat.py +4 -2
- cape_parsers/CAPE/community/Fareit.py +8 -9
- cape_parsers/CAPE/community/KoiLoader.py +3 -3
- cape_parsers/CAPE/community/LokiBot.py +11 -8
- cape_parsers/CAPE/community/Lumma.py +58 -40
- cape_parsers/CAPE/community/MonsterV2.py +93 -0
- cape_parsers/CAPE/community/MyKings.py +52 -0
- cape_parsers/CAPE/community/NanoCore.py +9 -9
- cape_parsers/CAPE/community/Nighthawk.py +1 -0
- cape_parsers/CAPE/community/Njrat.py +4 -4
- cape_parsers/CAPE/community/PhemedroneStealer.py +2 -0
- cape_parsers/CAPE/community/Snake.py +31 -18
- cape_parsers/CAPE/community/SparkRAT.py +3 -1
- cape_parsers/CAPE/community/Stealc.py +95 -63
- cape_parsers/CAPE/community/VenomRAT.py +4 -2
- cape_parsers/CAPE/community/WinosStager.py +75 -0
- cape_parsers/CAPE/community/XWorm.py +4 -2
- cape_parsers/CAPE/community/XenoRAT.py +4 -2
- cape_parsers/CAPE/core/AdaptixBeacon.py +7 -5
- cape_parsers/CAPE/core/AuraStealer.py +100 -0
- cape_parsers/CAPE/core/Azorult.py +5 -3
- cape_parsers/CAPE/core/BitPaymer.py +5 -2
- cape_parsers/CAPE/core/BlackDropper.py +10 -5
- cape_parsers/CAPE/core/Blister.py +12 -10
- cape_parsers/CAPE/core/BruteRatel.py +20 -7
- cape_parsers/CAPE/core/BumbleBee.py +34 -22
- cape_parsers/CAPE/core/DarkGate.py +3 -3
- cape_parsers/CAPE/core/DoppelPaymer.py +4 -2
- cape_parsers/CAPE/core/DridexLoader.py +4 -3
- cape_parsers/CAPE/core/Formbook.py +2 -2
- cape_parsers/CAPE/core/GuLoader.py +2 -5
- cape_parsers/CAPE/core/IcedID.py +5 -5
- cape_parsers/CAPE/core/IcedIDLoader.py +4 -4
- cape_parsers/CAPE/core/Latrodectus.py +14 -10
- cape_parsers/CAPE/core/NitroBunnyDownloader.py +151 -0
- cape_parsers/CAPE/core/Oyster.py +8 -6
- cape_parsers/CAPE/core/PikaBot.py +6 -6
- cape_parsers/CAPE/core/PlugX.py +3 -1
- cape_parsers/CAPE/core/QakBot.py +2 -1
- cape_parsers/CAPE/core/Quickbind.py +7 -11
- cape_parsers/CAPE/core/RedLine.py +2 -2
- cape_parsers/CAPE/core/Remcos.py +59 -51
- cape_parsers/CAPE/core/Rhadamanthys.py +175 -36
- cape_parsers/CAPE/core/SmokeLoader.py +2 -2
- cape_parsers/CAPE/core/Socks5Systemz.py +5 -5
- cape_parsers/CAPE/core/SquirrelWaffle.py +3 -3
- cape_parsers/CAPE/core/Strrat.py +1 -1
- cape_parsers/CAPE/core/WarzoneRAT.py +3 -2
- cape_parsers/CAPE/core/Zloader.py +21 -15
- cape_parsers/RATDecoders/test_rats.py +1 -0
- cape_parsers/__init__.py +14 -5
- cape_parsers/deprecated/BlackNix.py +59 -0
- cape_parsers/{CAPE/core → deprecated}/BuerLoader.py +1 -1
- cape_parsers/{CAPE/core → deprecated}/ChChes.py +3 -3
- cape_parsers/{CAPE/core → deprecated}/Enfal.py +1 -1
- cape_parsers/{CAPE/core → deprecated}/EvilGrab.py +5 -6
- cape_parsers/{CAPE/community → deprecated}/Greame.py +3 -1
- cape_parsers/{CAPE/core → deprecated}/HttpBrowser.py +7 -8
- cape_parsers/{CAPE/community → deprecated}/Pandora.py +2 -0
- cape_parsers/{CAPE/community → deprecated}/Punisher.py +2 -1
- cape_parsers/{CAPE/core → deprecated}/RCSession.py +7 -9
- cape_parsers/{CAPE/community → deprecated}/REvil.py +10 -5
- cape_parsers/{CAPE/core → deprecated}/RedLeaf.py +5 -7
- cape_parsers/{CAPE/community → deprecated}/Retefe.py +0 -2
- cape_parsers/{CAPE/community → deprecated}/Rozena.py +2 -5
- cape_parsers/{CAPE/community → deprecated}/SmallNet.py +6 -2
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info}/METADATA +24 -3
- cape_parsers-0.1.54.dist-info/RECORD +117 -0
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info}/WHEEL +1 -1
- cape_parsers/CAPE/community/BlackNix.py +0 -57
- cape_parsers/CAPE/core/Stealc.py +0 -21
- cape_parsers-0.1.42.dist-info/RECORD +0 -113
- /cape_parsers/{CAPE/community → deprecated}/BackOffLoader.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/BackOffPOS.py +0 -0
- /cape_parsers/{CAPE/core → deprecated}/Emotet.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/PoisonIvy.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/TSCookie.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/TrickBot.py +0 -0
- /cape_parsers/{CAPE/core → deprecated}/UrsnifV3.py +0 -0
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info/licenses}/LICENSE +0 -0
|
@@ -6,8 +6,10 @@ except ImportError as e:
|
|
|
6
6
|
print(f"Problem to import extract_strings: {e}")
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
def extract_config(data):
|
|
9
|
+
def extract_config(data: bytes):
|
|
10
|
+
config = {}
|
|
10
11
|
config_dict = {}
|
|
12
|
+
is_c2_found = False
|
|
11
13
|
with suppress(Exception):
|
|
12
14
|
if data[:2] == b"MZ":
|
|
13
15
|
lines = extract_strings(data=data, on_demand=True, minchars=3)
|
|
@@ -22,20 +24,24 @@ def extract_config(data):
|
|
|
22
24
|
# Data Exfiltration via Telegram
|
|
23
25
|
if "api.telegram.org" in lines[base + x]:
|
|
24
26
|
config_dict["Protocol"] = "Telegram"
|
|
25
|
-
|
|
27
|
+
config["CNCs"] = lines[base + x]
|
|
26
28
|
config_dict["Password"] = lines[base + x + 1]
|
|
29
|
+
is_c2_found = True
|
|
27
30
|
break
|
|
28
31
|
# Data Exfiltration via Discord
|
|
29
32
|
elif "discord" in lines[base + x]:
|
|
30
33
|
config_dict["Protocol"] = "Discord"
|
|
31
|
-
|
|
34
|
+
config["CNCs"] = [lines[base + x]]
|
|
35
|
+
is_c2_found = True
|
|
32
36
|
break
|
|
33
37
|
# Data Exfiltration via FTP
|
|
34
38
|
elif "ftp:" in lines[base + x]:
|
|
35
39
|
config_dict["Protocol"] = "FTP"
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
40
|
+
hostname = lines[base + x]
|
|
41
|
+
username = lines[base + x + 1]
|
|
42
|
+
password = lines[base + x + 2]
|
|
43
|
+
config["CNCs"] = [f"ftp://{username}:{password}@{hostname}"]
|
|
44
|
+
is_c2_found = True
|
|
39
45
|
break
|
|
40
46
|
# Data Exfiltration via SMTP
|
|
41
47
|
elif "@" in lines[base + x]:
|
|
@@ -45,15 +51,17 @@ def extract_config(data):
|
|
|
45
51
|
config_dict["Port"] = lines[base + x - 2]
|
|
46
52
|
elif lines[base + x - 2] in {"true", "false"} and lines[base + x - 3].isdigit() and len(lines[base + x - 3]) <= 5:
|
|
47
53
|
config_dict["Port"] = lines[base + x - 3]
|
|
48
|
-
config_dict["
|
|
54
|
+
config_dict["CNCs"] = [lines[base + +x - 1]]
|
|
49
55
|
config_dict["Username"] = lines[base + x]
|
|
50
56
|
config_dict["Password"] = lines[base + x + 1]
|
|
51
57
|
if "@" in lines[base + x + 2]:
|
|
52
58
|
config_dict["EmailTo"] = lines[base + x + 2]
|
|
59
|
+
is_c2_found = True
|
|
53
60
|
break
|
|
54
61
|
# Get Persistence Payload Filename
|
|
55
62
|
for x in range(2, 22):
|
|
56
|
-
|
|
63
|
+
# Only extract Persistence Filename when a C2 is detected.
|
|
64
|
+
if ".exe" in lines[base + x] and is_c2_found:
|
|
57
65
|
config_dict["Persistence_Filename"] = lines[base + x]
|
|
58
66
|
break
|
|
59
67
|
# Get External IP Check Services
|
|
@@ -72,6 +80,13 @@ def extract_config(data):
|
|
|
72
80
|
for x in range(1, 8):
|
|
73
81
|
if any(s in lines[base + index + x] for s in temp_match):
|
|
74
82
|
config_dict["Protocol"] = "HTTP(S)"
|
|
75
|
-
|
|
83
|
+
config["CNCs"] = lines[base + index + x]
|
|
76
84
|
break
|
|
77
|
-
|
|
85
|
+
if config or config_dict:
|
|
86
|
+
return config.setdefault("raw", config_dict)
|
|
87
|
+
|
|
88
|
+
if __name__ == "__main__":
|
|
89
|
+
import sys
|
|
90
|
+
|
|
91
|
+
with open(sys.argv[1], "rb") as f:
|
|
92
|
+
print(extract_config(f.read()))
|
|
@@ -1,43 +1,213 @@
|
|
|
1
1
|
import base64
|
|
2
|
+
import yara
|
|
3
|
+
import pefile
|
|
4
|
+
import json
|
|
5
|
+
import struct
|
|
2
6
|
import re
|
|
3
7
|
|
|
4
|
-
str_hash_data = 'd6052c4fe86a6346964a6bbbe2423e20'
|
|
5
|
-
str_alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 '
|
|
6
8
|
|
|
7
|
-
|
|
8
|
-
|
|
9
|
+
RULE_SOURCE_KEY = """
|
|
10
|
+
rule Amadey_Key_String
|
|
11
|
+
{
|
|
12
|
+
meta:
|
|
13
|
+
author = "YungBinary"
|
|
14
|
+
description = "Find decryption key in Amadey."
|
|
15
|
+
strings:
|
|
16
|
+
$chunk_1 = {
|
|
17
|
+
6A 20
|
|
18
|
+
68 ?? ?? ?? ??
|
|
19
|
+
B9 ?? ?? ?? ??
|
|
20
|
+
E8 ?? ?? ?? ??
|
|
21
|
+
68 ?? ?? ?? ??
|
|
22
|
+
E8 ?? ?? ?? ??
|
|
23
|
+
59
|
|
24
|
+
C3
|
|
25
|
+
}
|
|
26
|
+
condition:
|
|
27
|
+
$chunk_1
|
|
28
|
+
}
|
|
29
|
+
"""
|
|
9
30
|
|
|
10
|
-
|
|
11
|
-
|
|
31
|
+
RULE_SOURCE_ENCODED_STRINGS = """
|
|
32
|
+
rule Amadey_Encoded_Strings
|
|
33
|
+
{
|
|
34
|
+
meta:
|
|
35
|
+
author = "YungBinary"
|
|
36
|
+
description = "Find encoded strings in Amadey."
|
|
37
|
+
strings:
|
|
38
|
+
$chunk_1 = {
|
|
39
|
+
6A ??
|
|
40
|
+
68 ?? ?? ?? ??
|
|
41
|
+
B9 ?? ?? ?? ??
|
|
42
|
+
E8 ?? ?? ?? ??
|
|
43
|
+
68 ?? ?? ?? ??
|
|
44
|
+
E8 ?? ?? ?? ??
|
|
45
|
+
59
|
|
46
|
+
C3
|
|
47
|
+
}
|
|
48
|
+
condition:
|
|
49
|
+
$chunk_1
|
|
50
|
+
}
|
|
51
|
+
"""
|
|
12
52
|
|
|
13
|
-
for i in range(len(str_data)):
|
|
14
|
-
str_hash += str_hash_data[i % len(str_hash_data)]
|
|
15
53
|
|
|
16
|
-
|
|
54
|
+
def contains_non_printable(byte_array):
|
|
55
|
+
for byte in byte_array:
|
|
56
|
+
if not chr(byte).isprintable():
|
|
57
|
+
return True
|
|
58
|
+
return False
|
|
17
59
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
60
|
+
|
|
61
|
+
def yara_scan_generator(raw_data, rule_source):
|
|
62
|
+
yara_rules = yara.compile(source=rule_source)
|
|
63
|
+
matches = yara_rules.match(data=raw_data)
|
|
64
|
+
|
|
65
|
+
for match in matches:
|
|
66
|
+
for block in match.strings:
|
|
67
|
+
for instance in block.instances:
|
|
68
|
+
yield instance.offset, block.identifier
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_keys(pe, data):
|
|
72
|
+
image_base = pe.OPTIONAL_HEADER.ImageBase
|
|
73
|
+
keys = []
|
|
74
|
+
for offset, _ in yara_scan_generator(data, RULE_SOURCE_KEY):
|
|
75
|
+
try:
|
|
76
|
+
key_string_rva = struct.unpack('i', data[offset + 3 : offset + 7])[0]
|
|
77
|
+
key_string_dword_offset = pe.get_offset_from_rva(key_string_rva - image_base)
|
|
78
|
+
key_string = pe.get_string_from_data(key_string_dword_offset, data)
|
|
79
|
+
|
|
80
|
+
if b"=" not in key_string:
|
|
81
|
+
keys.append(key_string.decode())
|
|
82
|
+
|
|
83
|
+
if len(keys) == 2:
|
|
84
|
+
return keys
|
|
85
|
+
|
|
86
|
+
except Exception:
|
|
87
|
+
continue
|
|
88
|
+
|
|
89
|
+
return []
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def get_encoded_strings(pe, data):
|
|
93
|
+
encoded_strings = []
|
|
94
|
+
image_base = pe.OPTIONAL_HEADER.ImageBase
|
|
95
|
+
for offset, _ in yara_scan_generator(data, RULE_SOURCE_ENCODED_STRINGS):
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
encoded_string_size = data[offset + 1]
|
|
99
|
+
encoded_string_rva = struct.unpack('i', data[offset + 3 : offset + 7])[0]
|
|
100
|
+
encoded_string_dword_offset = pe.get_offset_from_rva(encoded_string_rva - image_base)
|
|
101
|
+
encoded_string = pe.get_string_from_data(encoded_string_dword_offset, data)
|
|
102
|
+
|
|
103
|
+
# Make sure the string matches length from operand
|
|
104
|
+
if encoded_string_size != len(encoded_string):
|
|
105
|
+
continue
|
|
106
|
+
|
|
107
|
+
encoded_strings.append(encoded_string.decode())
|
|
108
|
+
|
|
109
|
+
except Exception:
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
return encoded_strings
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def decode_amadey_string(key: str, encoded_str: str) -> bytes:
|
|
116
|
+
"""
|
|
117
|
+
Decode Amadey encoded strings that look like base64
|
|
118
|
+
"""
|
|
119
|
+
alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 "
|
|
120
|
+
|
|
121
|
+
decoded = ""
|
|
122
|
+
for i in range(len(encoded_str)):
|
|
123
|
+
if encoded_str[i] == "=":
|
|
124
|
+
decoded += "="
|
|
21
125
|
continue
|
|
22
|
-
alphabet_count = str_alphabet.find(str_data[i])
|
|
23
|
-
hash_count = str_alphabet.find(str_hash[i])
|
|
24
|
-
index_calc = (alphabet_count + len(str_alphabet) - hash_count) % len(str_alphabet)
|
|
25
|
-
out += str_alphabet[index_calc]
|
|
26
126
|
|
|
27
|
-
|
|
127
|
+
index_1 = alphabet.index(encoded_str[i % len(encoded_str)])
|
|
128
|
+
index_2 = alphabet.index(key[i % len(key)])
|
|
129
|
+
|
|
130
|
+
index_result = (index_1 + (0x3F - index_2) + 0x3F) % 0x3F
|
|
131
|
+
|
|
132
|
+
decoded += alphabet[index_result]
|
|
133
|
+
|
|
134
|
+
decoded = base64.b64decode(decoded)
|
|
135
|
+
|
|
136
|
+
return decoded
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def find_campaign_id(data):
|
|
140
|
+
pattern = br'\x00\x00\x00([0-9a-f]{6})\x00\x00'
|
|
141
|
+
matches = re.findall(pattern, data)
|
|
142
|
+
if matches:
|
|
143
|
+
return matches[0]
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def extract_config(data):
|
|
147
|
+
pe = pefile.PE(data=data, fast_load=True)
|
|
148
|
+
# image_base = pe.OPTIONAL_HEADER.ImageBase
|
|
149
|
+
|
|
150
|
+
keys = get_keys(pe, data)
|
|
151
|
+
if not keys:
|
|
152
|
+
return {}
|
|
153
|
+
|
|
154
|
+
decode_key = keys[0]
|
|
155
|
+
rc4_key = keys[1]
|
|
156
|
+
encoded_strings = get_encoded_strings(pe, data)
|
|
157
|
+
|
|
158
|
+
decoded_strings = []
|
|
159
|
+
for encoded_string in encoded_strings:
|
|
160
|
+
try:
|
|
161
|
+
decoded_string = decode_amadey_string(decode_key, encoded_string)
|
|
162
|
+
if not decoded_string or contains_non_printable(decoded_string):
|
|
163
|
+
continue
|
|
164
|
+
decoded_strings.append(decoded_string.decode())
|
|
165
|
+
except Exception:
|
|
166
|
+
continue
|
|
167
|
+
|
|
168
|
+
if not decoded_strings:
|
|
169
|
+
return {}
|
|
170
|
+
|
|
171
|
+
decoded_strings = decoded_strings[:10]
|
|
172
|
+
final_config = {}
|
|
173
|
+
version = ""
|
|
174
|
+
install_dir = ""
|
|
175
|
+
install_file = ""
|
|
176
|
+
version_pattern = r"^\d+\.\d{1,2}$"
|
|
177
|
+
install_dir_pattern = r"^[0-9a-f]{10}$"
|
|
178
|
+
|
|
179
|
+
for i in range(len(decoded_strings)):
|
|
180
|
+
s = decoded_strings[i]
|
|
181
|
+
if s.endswith(".php"):
|
|
182
|
+
c2 = decoded_strings[i-1]
|
|
183
|
+
final_config.setdefault("CNCs", []).append(f"http://{c2}{s}")
|
|
184
|
+
elif re.match(version_pattern, s):
|
|
185
|
+
version = s
|
|
186
|
+
elif re.match(install_dir_pattern, s):
|
|
187
|
+
install_dir = s
|
|
188
|
+
elif s.endswith(".exe"):
|
|
189
|
+
install_file = s
|
|
190
|
+
|
|
191
|
+
if version:
|
|
192
|
+
final_config["version"] = version
|
|
193
|
+
if install_dir:
|
|
194
|
+
final_config.setdefault("raw", {})["install_dir"] = install_dir
|
|
195
|
+
if install_file:
|
|
196
|
+
final_config.setdefault("raw", {})["install_file"] = install_file
|
|
197
|
+
|
|
198
|
+
final_config["cryptokey"] = rc4_key
|
|
199
|
+
final_config["cryptokey_type"] = "RC4"
|
|
200
|
+
|
|
201
|
+
campaign_id = find_campaign_id(data)
|
|
202
|
+
if campaign_id:
|
|
203
|
+
final_config["campaign_id"] = campaign_id.decode()
|
|
204
|
+
|
|
205
|
+
return final_config
|
|
28
206
|
|
|
29
|
-
file_data = open('/tmp/amadey.bin','rb').read()
|
|
30
207
|
|
|
31
|
-
strings = []
|
|
32
|
-
for m in re.finditer(rb'[a-zA-Z =0-9]{4,}',file_data):
|
|
33
|
-
strings.append(m.group().decode('utf-8'))
|
|
34
208
|
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
temp = decrypt(s, str_hash_data, str_alphabet)
|
|
38
|
-
if is_ascii(temp) and len(temp) > 3:
|
|
39
|
-
print(temp.decode('utf-8'))
|
|
40
|
-
except:
|
|
41
|
-
continue
|
|
209
|
+
if __name__ == "__main__":
|
|
210
|
+
import sys
|
|
42
211
|
|
|
43
|
-
|
|
212
|
+
with open(sys.argv[1], "rb") as f:
|
|
213
|
+
print(json.dumps(extract_config(f.read()), indent=4))
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import struct
|
|
2
2
|
import pefile
|
|
3
3
|
import yara
|
|
4
|
-
|
|
4
|
+
from contextlib import suppress
|
|
5
5
|
|
|
6
6
|
# Hash = 69ba4e2995d6b11bb319d7373d150560ea295c02773fe5aa9c729bfd2c334e1e
|
|
7
7
|
|
|
@@ -46,10 +46,10 @@ def xor_data(data, key):
|
|
|
46
46
|
|
|
47
47
|
|
|
48
48
|
def extract_config(data):
|
|
49
|
-
|
|
49
|
+
config = {}
|
|
50
50
|
|
|
51
51
|
# Attempt to extract via old method
|
|
52
|
-
|
|
52
|
+
with suppress(Exception):
|
|
53
53
|
domain = ""
|
|
54
54
|
uri = ""
|
|
55
55
|
lines = data.decode().split("\n")
|
|
@@ -59,14 +59,12 @@ def extract_config(data):
|
|
|
59
59
|
if line.startswith("/") and line[-4] == ".":
|
|
60
60
|
uri = line
|
|
61
61
|
if domain and uri:
|
|
62
|
-
|
|
63
|
-
return
|
|
64
|
-
except Exception:
|
|
65
|
-
pass
|
|
62
|
+
config.setdefault("CNCs", []).append(f"{domain}{uri}")
|
|
63
|
+
return config
|
|
66
64
|
|
|
67
65
|
# Try with new method
|
|
68
66
|
|
|
69
|
-
#config_dict["Strings"] = []
|
|
67
|
+
# config_dict["Strings"] = []
|
|
70
68
|
pe = pefile.PE(data=data, fast_load=True)
|
|
71
69
|
image_base = pe.OPTIONAL_HEADER.ImageBase
|
|
72
70
|
domain = ""
|
|
@@ -84,17 +82,17 @@ def extract_config(data):
|
|
|
84
82
|
if rule_str_name.startswith("$decode"):
|
|
85
83
|
key_rva = data[str_decode_offset + 3 : str_decode_offset + 7]
|
|
86
84
|
encoded_str_rva = data[str_decode_offset + 8 : str_decode_offset + 12]
|
|
87
|
-
#dword_rva = data[str_decode_offset + 21 : str_decode_offset + 25]
|
|
85
|
+
# dword_rva = data[str_decode_offset + 21 : str_decode_offset + 25]
|
|
88
86
|
|
|
89
87
|
key_offset = pe.get_offset_from_rva(struct.unpack("i", key_rva)[0] - image_base)
|
|
90
88
|
encoded_str_offset = pe.get_offset_from_rva(struct.unpack("i", encoded_str_rva)[0] - image_base)
|
|
91
|
-
#dword_offset = struct.unpack("i", dword_rva)[0]
|
|
92
|
-
#dword_name = f"dword_{hex(dword_offset)[2:]}"
|
|
89
|
+
# dword_offset = struct.unpack("i", dword_rva)[0]
|
|
90
|
+
# dword_name = f"dword_{hex(dword_offset)[2:]}"
|
|
93
91
|
|
|
94
92
|
key = data[key_offset : key_offset + str_size]
|
|
95
93
|
encoded_str = data[encoded_str_offset : encoded_str_offset + str_size]
|
|
96
94
|
decoded_str = xor_data(encoded_str, key).decode()
|
|
97
|
-
#config_dict["Strings"].append({dword_name : decoded_str})
|
|
95
|
+
# config_dict["Strings"].append({dword_name : decoded_str})
|
|
98
96
|
|
|
99
97
|
if last_str in ("http://", "https://"):
|
|
100
98
|
domain += decoded_str
|
|
@@ -114,12 +112,12 @@ def extract_config(data):
|
|
|
114
112
|
continue
|
|
115
113
|
|
|
116
114
|
if domain and uri:
|
|
117
|
-
|
|
115
|
+
config.setdefault("CNCs", []).append(f"{domain}{uri}")
|
|
118
116
|
|
|
119
117
|
if botnet_id:
|
|
120
|
-
|
|
118
|
+
config.setdefault("botnet", botnet_id)
|
|
121
119
|
|
|
122
|
-
return
|
|
120
|
+
return config
|
|
123
121
|
|
|
124
122
|
|
|
125
123
|
if __name__ == "__main__":
|
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
from rat_king_parser.rkp import RATConfigParser
|
|
6
6
|
|
|
7
7
|
HAVE_ASYNCRAT_COMMON = False
|
|
8
|
-
module_file_path =
|
|
8
|
+
module_file_path = "/opt/CAPEv2/data/asyncrat_common.py"
|
|
9
9
|
if os.path.exists(module_file_path):
|
|
10
10
|
try:
|
|
11
|
-
module_name = os.path.basename(module_file_path).replace(
|
|
11
|
+
module_name = os.path.basename(module_file_path).replace(".py", "")
|
|
12
12
|
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
|
|
13
13
|
asyncrat_common = importlib.util.module_from_spec(spec)
|
|
14
14
|
sys.modules[module_name] = asyncrat_common
|
|
@@ -17,6 +17,7 @@ if os.path.exists(module_file_path):
|
|
|
17
17
|
except Exception as e:
|
|
18
18
|
print("Error loading asyncrat_common.py", e)
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
def extract_config(data: bytes):
|
|
21
22
|
config = RATConfigParser(data=data, remap_config=True).report.get("config", {})
|
|
22
23
|
if config and HAVE_ASYNCRAT_COMMON:
|
|
@@ -24,6 +25,7 @@ def extract_config(data: bytes):
|
|
|
24
25
|
|
|
25
26
|
return config
|
|
26
27
|
|
|
28
|
+
|
|
27
29
|
if __name__ == "__main__":
|
|
28
30
|
data = open(sys.argv[1], "rb").read()
|
|
29
31
|
print(extract_config(data))
|
|
@@ -32,9 +32,12 @@ def extract_config(data):
|
|
|
32
32
|
key = item.split(":")[0].strip("{").strip('"')
|
|
33
33
|
value = item.split(":")[1].strip('"')
|
|
34
34
|
if key == "IP":
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
config_dict[
|
|
35
|
+
config_dict["CNCs"] = [f"tcp://{value}"]
|
|
36
|
+
elif key == "BuildID":
|
|
37
|
+
config_dict["build"] = value
|
|
38
|
+
else:
|
|
39
|
+
if value:
|
|
40
|
+
config_dict.setdefault("raw", {})[key] = value
|
|
38
41
|
|
|
39
42
|
grabber_found = False
|
|
40
43
|
|
|
@@ -47,13 +50,13 @@ def extract_config(data):
|
|
|
47
50
|
data_dict = json.loads(decoded_str)
|
|
48
51
|
for elem in data_dict:
|
|
49
52
|
if elem["Method"] == "DW":
|
|
50
|
-
config_dict["Loader module"] = elem
|
|
53
|
+
config_dict.setdefault("raw", {})["Loader module"] = elem
|
|
51
54
|
|
|
52
55
|
if b"PS" in decoded_str:
|
|
53
56
|
data_dict = json.loads(decoded_str)
|
|
54
57
|
for elem in data_dict:
|
|
55
58
|
if elem["Method"] == "PS":
|
|
56
|
-
config_dict["PowerShell module"] = elem
|
|
59
|
+
config_dict.setdefault("raw", {})["PowerShell module"] = elem
|
|
57
60
|
|
|
58
61
|
if b"Path" in decoded_str:
|
|
59
62
|
grabber_found = True
|
|
@@ -68,6 +71,6 @@ def extract_config(data):
|
|
|
68
71
|
|
|
69
72
|
if not grabber_found:
|
|
70
73
|
grabber_found = True
|
|
71
|
-
config_dict["Grabber"] = cleanup_str
|
|
74
|
+
config_dict.setdefault("raw", {})["Grabber"] = cleanup_str
|
|
72
75
|
|
|
73
76
|
return config_dict
|
|
@@ -158,22 +158,22 @@ def extract_config(filebuf):
|
|
|
158
158
|
if dec:
|
|
159
159
|
ver = re.findall(r"^(\d+\.\d+)$", dec)
|
|
160
160
|
if ver:
|
|
161
|
-
cfg["
|
|
161
|
+
cfg["version"] = ver[0]
|
|
162
162
|
|
|
163
163
|
data = data_sections[0].get_data()
|
|
164
164
|
items = data.split(b"\x00")
|
|
165
165
|
|
|
166
166
|
with suppress(IndexError, UnicodeDecodeError, ValueError):
|
|
167
|
-
cfg["Unknown 1"] = decode_string(items[0], sbox).decode("utf8")
|
|
168
|
-
cfg["Unknown 2"] = decode_string(items[8], sbox).decode("utf8")
|
|
167
|
+
cfg.setdefault("raw", {})["Unknown 1"] = decode_string(items[0], sbox).decode("utf8")
|
|
168
|
+
cfg.setdefault("raw", {})["Unknown 2"] = decode_string(items[8], sbox).decode("utf8")
|
|
169
169
|
c2_dec = decode_string(items[10], sbox).decode("utf8")
|
|
170
170
|
if "|" in c2_dec:
|
|
171
171
|
c2_dec = c2_dec.split("|")
|
|
172
|
-
cfg["
|
|
173
|
-
if float(cfg["
|
|
174
|
-
cfg["
|
|
172
|
+
cfg["CNCs"] = c2_dec
|
|
173
|
+
if float(cfg["version"]) < 1.7:
|
|
174
|
+
cfg["campaign"] = decode_string(items[276], sbox).decode("utf8")
|
|
175
175
|
else:
|
|
176
|
-
cfg["
|
|
176
|
+
cfg["campaign"] = decode_string(items[25], sbox).decode("utf8")
|
|
177
177
|
|
|
178
178
|
return cfg
|
|
179
179
|
|
|
@@ -360,11 +360,11 @@ class cobaltstrikeConfig:
|
|
|
360
360
|
if parsed_setting == "Not Found" and quiet:
|
|
361
361
|
continue
|
|
362
362
|
if not isinstance(parsed_setting, list):
|
|
363
|
-
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting))
|
|
363
|
+
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting)) # noqa: G001
|
|
364
364
|
elif parsed_setting == []:
|
|
365
|
-
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val="Empty"))
|
|
365
|
+
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val="Empty")) # noqa: G001
|
|
366
366
|
else:
|
|
367
|
-
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting[0]))
|
|
367
|
+
log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting[0])) # noqa: G001
|
|
368
368
|
for val in parsed_setting[1:]:
|
|
369
369
|
log.debug(" " * COLUMN_WIDTH, end="")
|
|
370
370
|
print(val)
|
|
@@ -460,4 +460,5 @@ def extract_config(data):
|
|
|
460
460
|
output = cobaltstrikeConfig(data).parse_config(as_json=True)
|
|
461
461
|
if output is None:
|
|
462
462
|
output = cobaltstrikeConfig(data).parse_encrypted_config(as_json=True)
|
|
463
|
-
|
|
463
|
+
if output:
|
|
464
|
+
return {"raw": output}
|
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
from rat_king_parser.rkp import RATConfigParser
|
|
6
6
|
|
|
7
7
|
HAVE_ASYNCRAT_COMMON = False
|
|
8
|
-
module_file_path =
|
|
8
|
+
module_file_path = "/opt/CAPEv2/data/asyncrat_common.py"
|
|
9
9
|
if os.path.exists(module_file_path):
|
|
10
10
|
try:
|
|
11
|
-
module_name = os.path.basename(module_file_path).replace(
|
|
11
|
+
module_name = os.path.basename(module_file_path).replace(".py", "")
|
|
12
12
|
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
|
|
13
13
|
asyncrat_common = importlib.util.module_from_spec(spec)
|
|
14
14
|
sys.modules[module_name] = asyncrat_common
|
|
@@ -17,6 +17,7 @@ if os.path.exists(module_file_path):
|
|
|
17
17
|
except Exception as e:
|
|
18
18
|
print("Error loading asyncrat_common.py", e)
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
def extract_config(data: bytes):
|
|
21
22
|
config = RATConfigParser(data=data, remap_config=True).report.get("config", {})
|
|
22
23
|
if config and HAVE_ASYNCRAT_COMMON:
|
|
@@ -24,6 +25,7 @@ def extract_config(data: bytes):
|
|
|
24
25
|
|
|
25
26
|
return config
|
|
26
27
|
|
|
28
|
+
|
|
27
29
|
if __name__ == "__main__":
|
|
28
30
|
data = open(sys.argv[1], "rb").read()
|
|
29
31
|
print(extract_config(data))
|
|
@@ -35,10 +35,8 @@ def extract_config(memdump_path, read=False):
|
|
|
35
35
|
if buf and len(buf[0]) > 200:
|
|
36
36
|
cData = buf[0][200:]
|
|
37
37
|
"""
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
"downloads": [],
|
|
41
|
-
}
|
|
38
|
+
config = {}
|
|
39
|
+
artifacts_raw = {}
|
|
42
40
|
|
|
43
41
|
start = F.find(b"YUIPWDFILE0YUIPKDFILE0YUICRYPTED0YUI1.0")
|
|
44
42
|
if start:
|
|
@@ -53,15 +51,16 @@ def extract_config(memdump_path, read=False):
|
|
|
53
51
|
# url = self._check_valid_url(url)
|
|
54
52
|
if url is None:
|
|
55
53
|
continue
|
|
54
|
+
url = url.lower()
|
|
56
55
|
if gate_url.match(url):
|
|
57
|
-
|
|
56
|
+
config.setdefault("CNCs", []).append(url.decode())
|
|
58
57
|
elif exe_url.match(url) or dll_url.match(url):
|
|
59
|
-
artifacts_raw["downloads"].append(url.
|
|
58
|
+
artifacts_raw["downloads"].append(url.decode())
|
|
60
59
|
except Exception as e:
|
|
61
60
|
print(e, sys.exc_info(), "PONY")
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
return
|
|
61
|
+
config["CNCs"] = list(set(config["controllers"]))
|
|
62
|
+
config.setdefault("raw", {})["downloads"] = list(set(artifacts_raw["downloads"]))
|
|
63
|
+
return config
|
|
65
64
|
|
|
66
65
|
|
|
67
66
|
if __name__ == "__main__":
|
|
@@ -89,7 +89,7 @@ def xor_data(data, key):
|
|
|
89
89
|
|
|
90
90
|
|
|
91
91
|
def extract_config(data):
|
|
92
|
-
|
|
92
|
+
config = {}
|
|
93
93
|
|
|
94
94
|
xor_key = b""
|
|
95
95
|
encoded_payload = b""
|
|
@@ -119,9 +119,9 @@ def extract_config(data):
|
|
|
119
119
|
encoded_payload = remove_nulls(encoded_payload, encoded_payload_size)
|
|
120
120
|
decoded_payload = xor_data(encoded_payload, xor_key)
|
|
121
121
|
|
|
122
|
-
|
|
122
|
+
config["CNCs"] = find_c2(decoded_payload)
|
|
123
123
|
|
|
124
|
-
return
|
|
124
|
+
return config
|
|
125
125
|
|
|
126
126
|
|
|
127
127
|
if __name__ == "__main__":
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
import re
|
|
24
24
|
import struct
|
|
25
25
|
import sys
|
|
26
|
-
|
|
26
|
+
from contextlib import suppress
|
|
27
27
|
import pefile
|
|
28
28
|
from Cryptodome.Cipher import DES3
|
|
29
29
|
from Cryptodome.Util.Padding import unpad
|
|
@@ -128,7 +128,8 @@ def decoder(data):
|
|
|
128
128
|
else:
|
|
129
129
|
x = bytearray(img)
|
|
130
130
|
|
|
131
|
-
|
|
131
|
+
# ToDo add \.php
|
|
132
|
+
url_re = rb"https?:\/\/[a-zA-Z0-9\/\.:?\-_]+\.php"
|
|
132
133
|
urls = re.findall(url_re, x)
|
|
133
134
|
if not urls:
|
|
134
135
|
for i in range(len(x)):
|
|
@@ -144,21 +145,23 @@ def decoder(data):
|
|
|
144
145
|
confs = find_conf(img)
|
|
145
146
|
if iv and iv not in (b"", -1) and confs != []:
|
|
146
147
|
for conf in confs:
|
|
147
|
-
|
|
148
|
+
with suppress(ValueError):
|
|
148
149
|
dec = DES3.new(key, DES3.MODE_CBC, iv)
|
|
149
150
|
temp = dec.decrypt(conf)
|
|
150
151
|
temp = unpad(temp, 8)
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
pass
|
|
152
|
+
if not temp.endswith(b".php"):
|
|
153
|
+
continue
|
|
154
|
+
urls.append("http://" + temp.decod())
|
|
155
155
|
return urls
|
|
156
156
|
|
|
157
157
|
|
|
158
158
|
def extract_config(filebuf):
|
|
159
159
|
|
|
160
160
|
urls = decoder(filebuf)
|
|
161
|
-
|
|
161
|
+
if urls:
|
|
162
|
+
return {"CNCs": urls}
|
|
163
|
+
else:
|
|
164
|
+
return {}
|
|
162
165
|
|
|
163
166
|
|
|
164
167
|
if __name__ == "__main__":
|