CAPE-parsers 0.1.42__py3-none-any.whl → 0.1.54__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cape_parsers/CAPE/community/AgentTesla.py +25 -10
- cape_parsers/CAPE/community/Amadey.py +199 -29
- cape_parsers/CAPE/community/Arkei.py +13 -15
- cape_parsers/CAPE/community/AsyncRAT.py +4 -2
- cape_parsers/CAPE/community/AuroraStealer.py +9 -6
- cape_parsers/CAPE/community/Carbanak.py +7 -7
- cape_parsers/CAPE/community/CobaltStrikeBeacon.py +5 -4
- cape_parsers/CAPE/community/CobaltStrikeStager.py +4 -1
- cape_parsers/CAPE/community/DCRat.py +4 -2
- cape_parsers/CAPE/community/Fareit.py +8 -9
- cape_parsers/CAPE/community/KoiLoader.py +3 -3
- cape_parsers/CAPE/community/LokiBot.py +11 -8
- cape_parsers/CAPE/community/Lumma.py +58 -40
- cape_parsers/CAPE/community/MonsterV2.py +93 -0
- cape_parsers/CAPE/community/MyKings.py +52 -0
- cape_parsers/CAPE/community/NanoCore.py +9 -9
- cape_parsers/CAPE/community/Nighthawk.py +1 -0
- cape_parsers/CAPE/community/Njrat.py +4 -4
- cape_parsers/CAPE/community/PhemedroneStealer.py +2 -0
- cape_parsers/CAPE/community/Snake.py +31 -18
- cape_parsers/CAPE/community/SparkRAT.py +3 -1
- cape_parsers/CAPE/community/Stealc.py +95 -63
- cape_parsers/CAPE/community/VenomRAT.py +4 -2
- cape_parsers/CAPE/community/WinosStager.py +75 -0
- cape_parsers/CAPE/community/XWorm.py +4 -2
- cape_parsers/CAPE/community/XenoRAT.py +4 -2
- cape_parsers/CAPE/core/AdaptixBeacon.py +7 -5
- cape_parsers/CAPE/core/AuraStealer.py +100 -0
- cape_parsers/CAPE/core/Azorult.py +5 -3
- cape_parsers/CAPE/core/BitPaymer.py +5 -2
- cape_parsers/CAPE/core/BlackDropper.py +10 -5
- cape_parsers/CAPE/core/Blister.py +12 -10
- cape_parsers/CAPE/core/BruteRatel.py +20 -7
- cape_parsers/CAPE/core/BumbleBee.py +34 -22
- cape_parsers/CAPE/core/DarkGate.py +3 -3
- cape_parsers/CAPE/core/DoppelPaymer.py +4 -2
- cape_parsers/CAPE/core/DridexLoader.py +4 -3
- cape_parsers/CAPE/core/Formbook.py +2 -2
- cape_parsers/CAPE/core/GuLoader.py +2 -5
- cape_parsers/CAPE/core/IcedID.py +5 -5
- cape_parsers/CAPE/core/IcedIDLoader.py +4 -4
- cape_parsers/CAPE/core/Latrodectus.py +14 -10
- cape_parsers/CAPE/core/NitroBunnyDownloader.py +151 -0
- cape_parsers/CAPE/core/Oyster.py +8 -6
- cape_parsers/CAPE/core/PikaBot.py +6 -6
- cape_parsers/CAPE/core/PlugX.py +3 -1
- cape_parsers/CAPE/core/QakBot.py +2 -1
- cape_parsers/CAPE/core/Quickbind.py +7 -11
- cape_parsers/CAPE/core/RedLine.py +2 -2
- cape_parsers/CAPE/core/Remcos.py +59 -51
- cape_parsers/CAPE/core/Rhadamanthys.py +175 -36
- cape_parsers/CAPE/core/SmokeLoader.py +2 -2
- cape_parsers/CAPE/core/Socks5Systemz.py +5 -5
- cape_parsers/CAPE/core/SquirrelWaffle.py +3 -3
- cape_parsers/CAPE/core/Strrat.py +1 -1
- cape_parsers/CAPE/core/WarzoneRAT.py +3 -2
- cape_parsers/CAPE/core/Zloader.py +21 -15
- cape_parsers/RATDecoders/test_rats.py +1 -0
- cape_parsers/__init__.py +14 -5
- cape_parsers/deprecated/BlackNix.py +59 -0
- cape_parsers/{CAPE/core → deprecated}/BuerLoader.py +1 -1
- cape_parsers/{CAPE/core → deprecated}/ChChes.py +3 -3
- cape_parsers/{CAPE/core → deprecated}/Enfal.py +1 -1
- cape_parsers/{CAPE/core → deprecated}/EvilGrab.py +5 -6
- cape_parsers/{CAPE/community → deprecated}/Greame.py +3 -1
- cape_parsers/{CAPE/core → deprecated}/HttpBrowser.py +7 -8
- cape_parsers/{CAPE/community → deprecated}/Pandora.py +2 -0
- cape_parsers/{CAPE/community → deprecated}/Punisher.py +2 -1
- cape_parsers/{CAPE/core → deprecated}/RCSession.py +7 -9
- cape_parsers/{CAPE/community → deprecated}/REvil.py +10 -5
- cape_parsers/{CAPE/core → deprecated}/RedLeaf.py +5 -7
- cape_parsers/{CAPE/community → deprecated}/Retefe.py +0 -2
- cape_parsers/{CAPE/community → deprecated}/Rozena.py +2 -5
- cape_parsers/{CAPE/community → deprecated}/SmallNet.py +6 -2
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info}/METADATA +24 -3
- cape_parsers-0.1.54.dist-info/RECORD +117 -0
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info}/WHEEL +1 -1
- cape_parsers/CAPE/community/BlackNix.py +0 -57
- cape_parsers/CAPE/core/Stealc.py +0 -21
- cape_parsers-0.1.42.dist-info/RECORD +0 -113
- /cape_parsers/{CAPE/community → deprecated}/BackOffLoader.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/BackOffPOS.py +0 -0
- /cape_parsers/{CAPE/core → deprecated}/Emotet.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/PoisonIvy.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/TSCookie.py +0 -0
- /cape_parsers/{CAPE/community → deprecated}/TrickBot.py +0 -0
- /cape_parsers/{CAPE/core → deprecated}/UrsnifV3.py +0 -0
- {cape_parsers-0.1.42.dist-info → cape_parsers-0.1.54.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,31 +1,36 @@
|
|
|
1
1
|
import struct
|
|
2
2
|
import pefile
|
|
3
3
|
import yara
|
|
4
|
+
import ipaddress
|
|
5
|
+
from contextlib import suppress
|
|
4
6
|
|
|
5
7
|
|
|
6
|
-
#
|
|
8
|
+
# V1 hash = 619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470
|
|
9
|
+
# V2 hash = 2f42dcf05dd87e6352491ff9d4ea3dc3f854df53d548a8da0c323be42df797b6 (32-bit payload)
|
|
10
|
+
# V2 hash = 8301936f439f43579cffe98e11e3224051e2fb890ffe9df680bbbd8db0729387 (64-bit payload)
|
|
7
11
|
|
|
8
|
-
RULE_SOURCE = """
|
|
12
|
+
RULE_SOURCE = """
|
|
13
|
+
rule StealC
|
|
9
14
|
{
|
|
10
15
|
meta:
|
|
11
16
|
author = "Yung Binary"
|
|
12
17
|
strings:
|
|
13
|
-
$decode_1 = {
|
|
14
|
-
|
|
15
|
-
68 ?? ?? ?? ??
|
|
16
|
-
68 ?? ?? ?? ??
|
|
17
|
-
E8 ?? ?? ?? ??
|
|
18
|
-
}
|
|
19
|
-
$decode_2 = {
|
|
20
|
-
6A ??
|
|
21
|
-
68 ?? ?? ?? ??
|
|
22
|
-
68 ?? ?? ?? ??
|
|
23
|
-
[0-5]
|
|
24
|
-
E8 ?? ?? ?? ??
|
|
25
|
-
}
|
|
18
|
+
$decode_1 = {6A ?? 68 [4] 68 [4] E8}
|
|
19
|
+
$decode_2 = {6A ?? 68 [4] 68 [4] [0-5] E8}
|
|
26
20
|
condition:
|
|
27
21
|
any of them
|
|
28
|
-
}
|
|
22
|
+
}
|
|
23
|
+
rule StealcV2
|
|
24
|
+
{
|
|
25
|
+
meta:
|
|
26
|
+
author = "kevoreilly"
|
|
27
|
+
strings:
|
|
28
|
+
$botnet32 = {AB AB AB AB 89 4B ?? C7 43 ?? 0F 00 00 00 88 0B A0 [4] EB 12 3C 20 74 0B 0F B6 06 8B CB 50 E8}
|
|
29
|
+
$botnet64 = {0F 11 01 48 C7 41 ?? 00 00 00 00 48 8B D9 48 C7 41 ?? 0F 00 00 00 C6 01 00 8A 05 [4] EB ?? 3C 20 74 ?? 48 8B 4B ?? 44 8A 0F}
|
|
30
|
+
condition:
|
|
31
|
+
any of them
|
|
32
|
+
}
|
|
33
|
+
"""
|
|
29
34
|
|
|
30
35
|
|
|
31
36
|
def yara_scan(raw_data):
|
|
@@ -38,6 +43,13 @@ def yara_scan(raw_data):
|
|
|
38
43
|
yield block.identifier, instance.offset
|
|
39
44
|
|
|
40
45
|
|
|
46
|
+
def _is_ip(ip):
|
|
47
|
+
try:
|
|
48
|
+
ipaddress.ip_address(ip)
|
|
49
|
+
return True
|
|
50
|
+
except Exception:
|
|
51
|
+
return False
|
|
52
|
+
|
|
41
53
|
def xor_data(data, key):
|
|
42
54
|
decoded = bytearray()
|
|
43
55
|
for i in range(len(data)):
|
|
@@ -45,78 +57,98 @@ def xor_data(data, key):
|
|
|
45
57
|
return decoded
|
|
46
58
|
|
|
47
59
|
|
|
48
|
-
def
|
|
49
|
-
|
|
60
|
+
def extract_ascii_string(data: bytes, offset: int, max_length=4096) -> str:
|
|
61
|
+
if offset >= len(data):
|
|
62
|
+
raise ValueError("Offset beyond data bounds")
|
|
63
|
+
end = data.find(b'\x00', offset, offset + max_length)
|
|
64
|
+
if end == -1:
|
|
65
|
+
end = offset + max_length
|
|
66
|
+
return data[offset:end].decode('ascii', errors='replace')
|
|
50
67
|
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
68
|
+
|
|
69
|
+
def parse_text(data):
|
|
70
|
+
global domain, uri
|
|
71
|
+
with suppress(Exception):
|
|
55
72
|
lines = data.decode().split("\n")
|
|
73
|
+
if not lines:
|
|
74
|
+
return
|
|
56
75
|
for line in lines:
|
|
57
76
|
if line.startswith("http") and "://" in line:
|
|
58
77
|
domain = line
|
|
59
|
-
|
|
78
|
+
elif _is_ip(line):
|
|
79
|
+
domain = line
|
|
80
|
+
if line.startswith("/") and len(line) >= 4 and line[-4] == ".":
|
|
60
81
|
uri = line
|
|
61
|
-
if domain and uri:
|
|
62
|
-
config_dict.setdefault("C2", []).append(f"{domain}{uri}")
|
|
63
|
-
return config_dict
|
|
64
|
-
except Exception:
|
|
65
|
-
pass
|
|
66
82
|
|
|
67
|
-
# Try with new method
|
|
68
83
|
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
uri = ""
|
|
74
|
-
botnet_id = ""
|
|
84
|
+
def parse_pe(data):
|
|
85
|
+
global domain, uri, botnet_id
|
|
86
|
+
pe = None
|
|
87
|
+
image_base = 0
|
|
75
88
|
last_str = ""
|
|
89
|
+
with suppress(Exception):
|
|
90
|
+
pe = pefile.PE(data=data, fast_load=True)
|
|
91
|
+
if not pe:
|
|
92
|
+
return
|
|
93
|
+
image_base = pe.OPTIONAL_HEADER.ImageBase
|
|
94
|
+
if not image_base:
|
|
95
|
+
return
|
|
76
96
|
for match in yara_scan(data):
|
|
77
97
|
try:
|
|
78
98
|
rule_str_name, str_decode_offset = match
|
|
99
|
+
if rule_str_name.startswith("$botnet"):
|
|
100
|
+
botnet_var = struct.unpack("I", data[str_decode_offset - 4 : str_decode_offset])[0]
|
|
101
|
+
if hasattr(pe, 'OPTIONAL_HEADER'):
|
|
102
|
+
magic = pe.OPTIONAL_HEADER.Magic
|
|
103
|
+
if magic == 0x10b: # 32-bit
|
|
104
|
+
botnet_offset = pe.get_offset_from_rva(botnet_var - image_base)
|
|
105
|
+
elif magic == 0x20b: # 64-bit
|
|
106
|
+
botnet_offset = pe.get_offset_from_rva(pe.get_rva_from_offset(str_decode_offset) + botnet_var)
|
|
107
|
+
if botnet_offset:
|
|
108
|
+
botnet_id = extract_ascii_string(data, botnet_offset)
|
|
79
109
|
str_size = int(data[str_decode_offset + 1])
|
|
80
110
|
# Ignore size 0 strings
|
|
81
111
|
if not str_size:
|
|
82
112
|
continue
|
|
83
|
-
|
|
84
113
|
if rule_str_name.startswith("$decode"):
|
|
85
114
|
key_rva = data[str_decode_offset + 3 : str_decode_offset + 7]
|
|
86
115
|
encoded_str_rva = data[str_decode_offset + 8 : str_decode_offset + 12]
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
elif "http" in decoded_str and "://" in decoded_str:
|
|
104
|
-
domain = decoded_str
|
|
105
|
-
elif uri == "" and decoded_str.startswith("/") and decoded_str[-4] == ".":
|
|
106
|
-
uri = decoded_str
|
|
107
|
-
elif last_str[0] == '/' and last_str[-1] == '/':
|
|
108
|
-
botnet_id = decoded_str
|
|
109
|
-
|
|
110
|
-
last_str = decoded_str
|
|
111
|
-
|
|
116
|
+
key_offset = pe.get_offset_from_rva(struct.unpack("i", key_rva)[0] - image_base)
|
|
117
|
+
encoded_str_offset = pe.get_offset_from_rva(struct.unpack("i", encoded_str_rva)[0] - image_base)
|
|
118
|
+
key = data[key_offset : key_offset + str_size]
|
|
119
|
+
encoded_str = data[encoded_str_offset : encoded_str_offset + str_size]
|
|
120
|
+
decoded_str = xor_data(encoded_str, key).decode()
|
|
121
|
+
if last_str in ("http://", "https://"):
|
|
122
|
+
domain += decoded_str
|
|
123
|
+
elif decoded_str in ("http://", "https://"):
|
|
124
|
+
domain = decoded_str
|
|
125
|
+
elif "http" in decoded_str and "://" in decoded_str:
|
|
126
|
+
domain = decoded_str
|
|
127
|
+
elif uri is None and decoded_str.startswith("/") and decoded_str[-4] == ".":
|
|
128
|
+
uri = decoded_str
|
|
129
|
+
elif last_str[0] == "/" and last_str[-1] == "/":
|
|
130
|
+
botnet_id = decoded_str
|
|
131
|
+
last_str = decoded_str
|
|
112
132
|
except Exception:
|
|
113
133
|
continue
|
|
134
|
+
return
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def extract_config(data):
|
|
138
|
+
global domain, uri, botnet_id
|
|
139
|
+
domain = uri = botnet_id = None
|
|
140
|
+
config_dict = {}
|
|
141
|
+
|
|
142
|
+
if data[:2] == b'MZ':
|
|
143
|
+
parse_pe(data)
|
|
144
|
+
else:
|
|
145
|
+
parse_text(data)
|
|
114
146
|
|
|
115
147
|
if domain and uri:
|
|
116
|
-
config_dict.setdefault("
|
|
148
|
+
config_dict.setdefault("CNCs", []).append(f"{domain}{uri}")
|
|
117
149
|
|
|
118
150
|
if botnet_id:
|
|
119
|
-
config_dict.setdefault("
|
|
151
|
+
config_dict.setdefault("botnet", botnet_id)
|
|
120
152
|
|
|
121
153
|
return config_dict
|
|
122
154
|
|
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
from rat_king_parser.rkp import RATConfigParser
|
|
6
6
|
|
|
7
7
|
HAVE_ASYNCRAT_COMMON = False
|
|
8
|
-
module_file_path =
|
|
8
|
+
module_file_path = "/opt/CAPEv2/data/asyncrat_common.py"
|
|
9
9
|
if os.path.exists(module_file_path):
|
|
10
10
|
try:
|
|
11
|
-
module_name = os.path.basename(module_file_path).replace(
|
|
11
|
+
module_name = os.path.basename(module_file_path).replace(".py", "")
|
|
12
12
|
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
|
|
13
13
|
asyncrat_common = importlib.util.module_from_spec(spec)
|
|
14
14
|
sys.modules[module_name] = asyncrat_common
|
|
@@ -17,6 +17,7 @@ if os.path.exists(module_file_path):
|
|
|
17
17
|
except Exception as e:
|
|
18
18
|
print("Error loading asyncrat_common.py", e)
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
def extract_config(data: bytes):
|
|
21
22
|
config = RATConfigParser(data=data, remap_config=True).report.get("config", {})
|
|
22
23
|
if config and HAVE_ASYNCRAT_COMMON:
|
|
@@ -24,6 +25,7 @@ def extract_config(data: bytes):
|
|
|
24
25
|
|
|
25
26
|
return config
|
|
26
27
|
|
|
28
|
+
|
|
27
29
|
if __name__ == "__main__":
|
|
28
30
|
data = open(sys.argv[1], "rb").read()
|
|
29
31
|
print(extract_config(data))
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Description: Winos 4.0 "OnlineModule" config parser
|
|
3
|
+
Author: x.com/YungBinary
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from contextlib import suppress
|
|
7
|
+
import re
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
CONFIG_KEY_MAP = {
|
|
11
|
+
"dd": "execution_delay_seconds",
|
|
12
|
+
"cl": "communication_interval_seconds",
|
|
13
|
+
"bb": "version",
|
|
14
|
+
"bz": "comment",
|
|
15
|
+
"jp": "keylogger",
|
|
16
|
+
"bh": "end_bluescreen",
|
|
17
|
+
"ll": "anti_traffic_monitoring",
|
|
18
|
+
"dl": "entrypoint",
|
|
19
|
+
"sh": "process_daemon",
|
|
20
|
+
"kl": "process_hollowing"
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def find_config(data):
|
|
25
|
+
start = ":db|".encode("utf-16le")
|
|
26
|
+
end = ":1p|".encode("utf-16le")
|
|
27
|
+
pattern = re.compile(re.escape(start) + b".*?" + re.escape(end), re.DOTALL)
|
|
28
|
+
match = pattern.search(data)
|
|
29
|
+
if match:
|
|
30
|
+
return match.group(0).decode("utf-16le")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def extract_config(data: bytes) -> dict:
|
|
34
|
+
config_dict = {}
|
|
35
|
+
final_config = {}
|
|
36
|
+
|
|
37
|
+
with suppress(Exception):
|
|
38
|
+
config = find_config(data)
|
|
39
|
+
if not config:
|
|
40
|
+
return config_dict
|
|
41
|
+
|
|
42
|
+
# Reverse the config string, which is delimited by '|'
|
|
43
|
+
config = config[::-1]
|
|
44
|
+
# Remove leading/trailing pipes and split into key/value pairs
|
|
45
|
+
elements = [element for element in config.strip('|').split('|') if ':' in element]
|
|
46
|
+
# Split each element for key : value in a dictionary
|
|
47
|
+
config_dict = dict(element.split(':', 1) for element in elements)
|
|
48
|
+
if config_dict:
|
|
49
|
+
# Handle extraction and formatting of CNCs
|
|
50
|
+
for i in range(1, 4):
|
|
51
|
+
p, o, t = config_dict.get(f"p{i}"), config_dict.get(f"o{i}"), config_dict.get(f"t{i}")
|
|
52
|
+
if p and p != "127.0.0.1" and o:
|
|
53
|
+
protocol = {"0": "udp", "1": "tcp"}.get(t)
|
|
54
|
+
if protocol:
|
|
55
|
+
cnc = f"{protocol}://{p}:{o}"
|
|
56
|
+
final_config.setdefault("CNCs", []).append(cnc)
|
|
57
|
+
|
|
58
|
+
if "CNCs" not in final_config:
|
|
59
|
+
return {}
|
|
60
|
+
|
|
61
|
+
final_config["CNCs"] = list(set(final_config["CNCs"]))
|
|
62
|
+
# Extract campaign ID
|
|
63
|
+
final_config["campaign_id"] = "default" if config_dict["fz"] == "\u9ed8\u8ba4" else config_dict["fz"]
|
|
64
|
+
|
|
65
|
+
# Map keys, e.g. dd -> execution_delay_seconds
|
|
66
|
+
final_config["raw"] = {v: config_dict[k] for k, v in CONFIG_KEY_MAP.items() if k in config_dict}
|
|
67
|
+
|
|
68
|
+
return final_config
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
if __name__ == "__main__":
|
|
72
|
+
import sys
|
|
73
|
+
|
|
74
|
+
with open(sys.argv[1], "rb") as f:
|
|
75
|
+
print(extract_config(f.read()))
|
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
from rat_king_parser.rkp import RATConfigParser
|
|
6
6
|
|
|
7
7
|
HAVE_ASYNCRAT_COMMON = False
|
|
8
|
-
module_file_path =
|
|
8
|
+
module_file_path = "/opt/CAPEv2/data/asyncrat_common.py"
|
|
9
9
|
if os.path.exists(module_file_path):
|
|
10
10
|
try:
|
|
11
|
-
module_name = os.path.basename(module_file_path).replace(
|
|
11
|
+
module_name = os.path.basename(module_file_path).replace(".py", "")
|
|
12
12
|
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
|
|
13
13
|
asyncrat_common = importlib.util.module_from_spec(spec)
|
|
14
14
|
sys.modules[module_name] = asyncrat_common
|
|
@@ -17,6 +17,7 @@ if os.path.exists(module_file_path):
|
|
|
17
17
|
except Exception as e:
|
|
18
18
|
print("Error loading asyncrat_common.py", e)
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
def extract_config(data: bytes):
|
|
21
22
|
config = RATConfigParser(data=data, remap_config=True).report.get("config", {})
|
|
22
23
|
if config and HAVE_ASYNCRAT_COMMON:
|
|
@@ -24,6 +25,7 @@ def extract_config(data: bytes):
|
|
|
24
25
|
|
|
25
26
|
return config
|
|
26
27
|
|
|
28
|
+
|
|
27
29
|
if __name__ == "__main__":
|
|
28
30
|
data = open(sys.argv[1], "rb").read()
|
|
29
31
|
print(extract_config(data))
|
|
@@ -5,10 +5,10 @@ import os
|
|
|
5
5
|
from rat_king_parser.rkp import RATConfigParser
|
|
6
6
|
|
|
7
7
|
HAVE_ASYNCRAT_COMMON = False
|
|
8
|
-
module_file_path =
|
|
8
|
+
module_file_path = "/opt/CAPEv2/data/asyncrat_common.py"
|
|
9
9
|
if os.path.exists(module_file_path):
|
|
10
10
|
try:
|
|
11
|
-
module_name = os.path.basename(module_file_path).replace(
|
|
11
|
+
module_name = os.path.basename(module_file_path).replace(".py", "")
|
|
12
12
|
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
|
|
13
13
|
asyncrat_common = importlib.util.module_from_spec(spec)
|
|
14
14
|
sys.modules[module_name] = asyncrat_common
|
|
@@ -17,6 +17,7 @@ if os.path.exists(module_file_path):
|
|
|
17
17
|
except Exception as e:
|
|
18
18
|
print("Error loading asyncrat_common.py", e)
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
def extract_config(data: bytes):
|
|
21
22
|
config = RATConfigParser(data=data, remap_config=True).report.get("config", {})
|
|
22
23
|
if config and HAVE_ASYNCRAT_COMMON:
|
|
@@ -24,6 +25,7 @@ def extract_config(data: bytes):
|
|
|
24
25
|
|
|
25
26
|
return config
|
|
26
27
|
|
|
28
|
+
|
|
27
29
|
if __name__ == "__main__":
|
|
28
30
|
data = open(sys.argv[1], "rb").read()
|
|
29
31
|
print(extract_config(data))
|
|
@@ -26,11 +26,12 @@ def parse_http_config(rc4_key: bytes, data: bytes) -> dict:
|
|
|
26
26
|
|
|
27
27
|
def read_str(length: int):
|
|
28
28
|
nonlocal offset
|
|
29
|
-
value = data[offset:offset + length].decode("utf-8", errors="replace")
|
|
29
|
+
value = data[offset : offset + length].decode("utf-8", errors="replace")
|
|
30
30
|
offset += length
|
|
31
31
|
return value
|
|
32
32
|
|
|
33
|
-
config["
|
|
33
|
+
config["cryptokey"] = rc4_key.hex()
|
|
34
|
+
config["cryptokey_type"] = "RC4"
|
|
34
35
|
config["agent_type"] = f"{read('<I'):8X}"
|
|
35
36
|
config["use_ssl"] = read("<B")
|
|
36
37
|
host_count = read("<I")
|
|
@@ -58,7 +59,8 @@ def parse_http_config(rc4_key: bytes, data: bytes) -> dict:
|
|
|
58
59
|
config["sleep_delay"] = read("<I")
|
|
59
60
|
config["jitter_delay"] = read("<I")
|
|
60
61
|
|
|
61
|
-
return config
|
|
62
|
+
return {"raw": config}
|
|
63
|
+
|
|
62
64
|
|
|
63
65
|
def extract_config(filebuf: bytes) -> dict:
|
|
64
66
|
pe = pefile.PE(data=filebuf, fast_load=True)
|
|
@@ -78,9 +80,9 @@ def extract_config(filebuf: bytes) -> dict:
|
|
|
78
80
|
pos = start_offset + 1
|
|
79
81
|
continue
|
|
80
82
|
|
|
81
|
-
encrypted_data = data[pos:pos + key_offset]
|
|
83
|
+
encrypted_data = data[pos : pos + key_offset]
|
|
82
84
|
pos += key_offset
|
|
83
|
-
rc4_key = data[pos:pos + 16]
|
|
85
|
+
rc4_key = data[pos : pos + 16]
|
|
84
86
|
|
|
85
87
|
if key_offset == 787:
|
|
86
88
|
pass
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import struct
|
|
3
|
+
from contextlib import suppress
|
|
4
|
+
from typing import Any, Dict, Tuple
|
|
5
|
+
|
|
6
|
+
import pefile
|
|
7
|
+
from Cryptodome.Cipher import AES
|
|
8
|
+
from Cryptodome.Util.Padding import unpad
|
|
9
|
+
|
|
10
|
+
# Define the format for the fixed-size header part.
|
|
11
|
+
# < : little-endian
|
|
12
|
+
# 32s : 32-byte string (for aes_key)
|
|
13
|
+
# 16s : 16-byte string (for iv)
|
|
14
|
+
# I : 4-byte unsigned int (for dword1)
|
|
15
|
+
# I : 4-byte unsigned int (for dword2)
|
|
16
|
+
HEADER_FORMAT = "<32s16sII"
|
|
17
|
+
HEADER_SIZE = struct.calcsize(HEADER_FORMAT) # This will be 32 + 16 + 4 + 4 = 56 bytes
|
|
18
|
+
|
|
19
|
+
def parse_blob(data: bytes):
|
|
20
|
+
"""
|
|
21
|
+
Parse the blob according to the scheme:
|
|
22
|
+
- 32 bytes = AES key
|
|
23
|
+
- Next 16 bytes = IV
|
|
24
|
+
- Next 2 DWORDs (8 bytes total) = XOR to get cipher data size
|
|
25
|
+
- Remaining bytes = cipher data of that size
|
|
26
|
+
"""
|
|
27
|
+
aes_key, iv, dword1, dword2 = struct.unpack_from(HEADER_FORMAT, data, 0)
|
|
28
|
+
ciphertext_size = dword1 ^ dword2
|
|
29
|
+
cipher_data = data[HEADER_SIZE : HEADER_SIZE + ciphertext_size]
|
|
30
|
+
return aes_key, iv, cipher_data
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def decrypt(data: bytes) -> Tuple[bytes, bytes, bytes]:
|
|
34
|
+
aes_key, iv, cipher_data = parse_blob(data)
|
|
35
|
+
cipher = AES.new(aes_key, AES.MODE_CBC, iv)
|
|
36
|
+
plaintext_padded = cipher.decrypt(cipher_data)
|
|
37
|
+
return aes_key, iv, unpad(plaintext_padded, AES.block_size)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def extract_config(data: bytes) -> Dict[str, Any]:
|
|
41
|
+
cfg: Dict[str, Any] = {}
|
|
42
|
+
plaintext = b""
|
|
43
|
+
|
|
44
|
+
pe = pefile.PE(data=data, fast_load=True)
|
|
45
|
+
try:
|
|
46
|
+
data_section = [s for s in pe.sections if s.Name.find(b".data") != -1][0]
|
|
47
|
+
except IndexError:
|
|
48
|
+
return cfg
|
|
49
|
+
|
|
50
|
+
if not data_section:
|
|
51
|
+
return cfg
|
|
52
|
+
|
|
53
|
+
data = data_section.get_data()
|
|
54
|
+
block_size = 4096
|
|
55
|
+
zeros = b"\x00" * block_size
|
|
56
|
+
offset = data.find(zeros)
|
|
57
|
+
if offset == -1:
|
|
58
|
+
return cfg
|
|
59
|
+
|
|
60
|
+
while offset > 0:
|
|
61
|
+
with suppress(Exception):
|
|
62
|
+
aes_key, iv, plaintext = decrypt(data[offset : offset + block_size])
|
|
63
|
+
if plaintext and b"conf" in plaintext:
|
|
64
|
+
break
|
|
65
|
+
|
|
66
|
+
offset -= 1
|
|
67
|
+
|
|
68
|
+
if plaintext:
|
|
69
|
+
try:
|
|
70
|
+
parsed = json.loads(plaintext.decode("utf-8", errors="ignore").rstrip("\x00"))
|
|
71
|
+
except json.JSONDecodeError:
|
|
72
|
+
return cfg
|
|
73
|
+
|
|
74
|
+
conf = parsed.get("conf", {})
|
|
75
|
+
build = parsed.get("build", {})
|
|
76
|
+
if conf:
|
|
77
|
+
cfg = {
|
|
78
|
+
"CNCs": conf.get("hosts"),
|
|
79
|
+
"user_agent": conf.get("useragents"),
|
|
80
|
+
"version": build.get("ver"),
|
|
81
|
+
"build": build.get("build_id"),
|
|
82
|
+
"cryptokey": aes_key.hex(),
|
|
83
|
+
"cryptokey_type": "AES",
|
|
84
|
+
"raw": {
|
|
85
|
+
"iv": iv.hex(),
|
|
86
|
+
"anti_vm": conf.get("anti_vm"),
|
|
87
|
+
"anti_dbg": conf.get("anti_dbg"),
|
|
88
|
+
"self_del": conf.get("self_del"),
|
|
89
|
+
"run_delay": conf.get("run_delay"),
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
return cfg
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == "__main__":
|
|
97
|
+
import sys
|
|
98
|
+
|
|
99
|
+
with open(sys.argv[1], "rb") as f:
|
|
100
|
+
print(extract_config(f.read()))
|
|
@@ -30,7 +30,7 @@ rule Azorult
|
|
|
30
30
|
cape_type = "Azorult Payload"
|
|
31
31
|
strings:
|
|
32
32
|
$ref_c2 = {6A 00 6A 00 6A 00 6A 00 68 ?? ?? ?? ?? FF 55 F0 8B D8 C7 47 10 ?? ?? ?? ?? 90 C7 45 B0 C0 C6 2D 00 6A 04 8D 45 B0 50 6A 06 53 FF 55 D4}
|
|
33
|
-
|
|
33
|
+
condition:
|
|
34
34
|
uint16(0) == 0x5A4D and all of them
|
|
35
35
|
}
|
|
36
36
|
"""
|
|
@@ -48,16 +48,18 @@ def extract_config(filebuf):
|
|
|
48
48
|
for instance in block.instances:
|
|
49
49
|
try:
|
|
50
50
|
cnc_offset = struct.unpack("i", instance.matched_data[21:25])[0]
|
|
51
|
-
cnc = pe.get_data(cnc_offset-image_base, 32).split(b"\x00")[0]
|
|
51
|
+
cnc = pe.get_data(cnc_offset - image_base, 32).split(b"\x00")[0]
|
|
52
52
|
if cnc:
|
|
53
53
|
if not cnc.startswith(b"http"):
|
|
54
54
|
cnc = b"http://" + cnc
|
|
55
|
-
return {"
|
|
55
|
+
return {"CNCs": [cnc.decode()]}
|
|
56
56
|
except Exception as e:
|
|
57
57
|
log.error("Error parsing Azorult config: %s", e)
|
|
58
58
|
return {}
|
|
59
59
|
|
|
60
|
+
|
|
60
61
|
if __name__ == "__main__":
|
|
61
62
|
import sys
|
|
63
|
+
|
|
62
64
|
with open(sys.argv[1], "rb") as f:
|
|
63
65
|
print(extract_config(f.read()))
|
|
@@ -86,7 +86,10 @@ def extract_config(file_data):
|
|
|
86
86
|
for item in raw.split(b"\x00"):
|
|
87
87
|
data = "".join(convert_char(c) for c in item)
|
|
88
88
|
if len(data) == 760:
|
|
89
|
-
config
|
|
89
|
+
config.setdefault("cryptokey", data)
|
|
90
|
+
# ToDO proper naming here
|
|
91
|
+
config.setdefault("raw", {})["cryptokey_type"] = "RSA public key"
|
|
92
|
+
|
|
90
93
|
elif len(data) > 1 and "\\x" not in data:
|
|
91
|
-
config["strings"] = data
|
|
94
|
+
config.setdefault("raw", {})["strings"] = data
|
|
92
95
|
return config
|
|
@@ -55,7 +55,7 @@ def extract_config(data: bytes) -> dict:
|
|
|
55
55
|
return {}
|
|
56
56
|
|
|
57
57
|
rdata_data = rdata_section.get_data()
|
|
58
|
-
patterns = [
|
|
58
|
+
patterns = [rb"Builder\.dll\x00", rb"Builder\.exe\x00"]
|
|
59
59
|
matches = []
|
|
60
60
|
for pattern in patterns:
|
|
61
61
|
matches.extend(re.finditer(pattern, rdata_data))
|
|
@@ -66,7 +66,7 @@ def extract_config(data: bytes) -> dict:
|
|
|
66
66
|
end = min(len(rdata_data), match.end() + 1024)
|
|
67
67
|
found_strings.update(re.findall(b"[\x20-\x7E]{4,}?\x00", rdata_data[start:end]))
|
|
68
68
|
|
|
69
|
-
|
|
69
|
+
config = {}
|
|
70
70
|
urls = []
|
|
71
71
|
directories = []
|
|
72
72
|
campaign = ""
|
|
@@ -74,7 +74,7 @@ def extract_config(data: bytes) -> dict:
|
|
|
74
74
|
if found_strings:
|
|
75
75
|
key = get_year(pe)
|
|
76
76
|
if not key:
|
|
77
|
-
return
|
|
77
|
+
return {}
|
|
78
78
|
for string in found_strings:
|
|
79
79
|
with suppress(UnicodeDecodeError):
|
|
80
80
|
decoded_string = string.decode("utf-8").rstrip("\x00")
|
|
@@ -88,9 +88,14 @@ def extract_config(data: bytes) -> dict:
|
|
|
88
88
|
elif re.match(r"^(?![A-Z]{6,}$)[a-zA-Z0-9\-=]{6,}$", decoded_string):
|
|
89
89
|
campaign = decoded_string
|
|
90
90
|
|
|
91
|
-
|
|
91
|
+
if urls:
|
|
92
|
+
config["CNCs"] = sorted(urls)
|
|
93
|
+
if campaign:
|
|
94
|
+
config["campaign"] = campaign
|
|
95
|
+
if directories:
|
|
96
|
+
config["raw"] = {"directories": directories}
|
|
92
97
|
|
|
93
|
-
return
|
|
98
|
+
return config
|
|
94
99
|
|
|
95
100
|
|
|
96
101
|
if __name__ == "__main__":
|
|
@@ -542,16 +542,18 @@ def extract_config(data):
|
|
|
542
542
|
injection_method = "Process hollowing IE or Werfault"
|
|
543
543
|
|
|
544
544
|
config = {
|
|
545
|
-
"
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
545
|
+
"raw": {
|
|
546
|
+
"Flag": hex(flag),
|
|
547
|
+
"Payload export hash": hex(u32(payload_export_hash)),
|
|
548
|
+
"Payload filename": w_payload_filename_and_cmdline,
|
|
549
|
+
"Compressed data size": hex(u32(compressed_data_size)),
|
|
550
|
+
"Uncompressed data size": hex(u32(uncompressed_data_size)),
|
|
551
|
+
"Rabbit key": binascii.hexlify(key).decode(),
|
|
552
|
+
"Rabbit IV": binascii.hexlify(iv).decode(),
|
|
553
|
+
"Persistence": persistance,
|
|
554
|
+
"Sleep after injection": sleep_after_injection,
|
|
555
|
+
"Injection method": injection_method,
|
|
556
|
+
}
|
|
555
557
|
}
|
|
556
558
|
|
|
557
559
|
return config
|