CAPE-parsers 0.1.52__tar.gz → 0.1.54__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/PKG-INFO +1 -1
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Lumma.py +10 -5
- cape_parsers-0.1.54/cape_parsers/CAPE/core/NitroBunnyDownloader.py +151 -0
- cape_parsers-0.1.54/cape_parsers/CAPE/core/Rhadamanthys.py +319 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/pyproject.toml +1 -1
- cape_parsers-0.1.52/cape_parsers/CAPE/core/Rhadamanthys.py +0 -190
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/LICENSE +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/AgentTesla.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Amadey.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Arkei.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/AsyncRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/AuroraStealer.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Carbanak.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/CobaltStrikeBeacon.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/CobaltStrikeStager.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/DCRat.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Fareit.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/KoiLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/LokiBot.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/MonsterV2.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/MyKings.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/NanoCore.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Nighthawk.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Njrat.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/PhemedroneStealer.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/QuasarRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Snake.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/SparkRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/Stealc.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/VenomRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/WinosStager.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/XWorm.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/XenoRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/AdaptixBeacon.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/AuraStealer.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Azorult.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/BitPaymer.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/BlackDropper.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Blister.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/BruteRatel.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/BumbleBee.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/DarkGate.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/DoppelPaymer.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/DridexLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Formbook.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/GuLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/IcedID.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/IcedIDLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Latrodectus.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Oyster.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/PikaBot.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/PlugX.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/QakBot.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Quickbind.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/RedLine.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Remcos.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/SmokeLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Socks5Systemz.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/SquirrelWaffle.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Strrat.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/WarzoneRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/Zloader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/core/test_cape.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/RATDecoders/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/RATDecoders/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/RATDecoders/test_rats.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/BackOffLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/BackOffPOS.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/BlackNix.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/BuerLoader.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/ChChes.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Emotet.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Enfal.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/EvilGrab.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Greame.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Hancitor.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/HttpBrowser.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/JavaDropper.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Nymaim.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Pandora.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/PoisonIvy.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/PredatorPain.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Punisher.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/RCSession.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/REvil.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/RedLeaf.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Retefe.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/Rozena.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/SmallNet.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/TSCookie.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/TrickBot.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/UrsnifV3.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/_ShadowTech.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/_VirusRat.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/_jRat.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/unrecom.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/deprecated/xRAT.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/malduck/LICENSE +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/malduck/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/malduck/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/malduck/test_malduck.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/mwcp/README.md +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/mwcp/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/mwcp/test_mwcp.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/__init__.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/aplib.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/blzpack.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/blzpack_lib.so +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/dotnet_utils.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/lznt1.py +0 -0
- {cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/utils/strings.py +0 -0
|
@@ -5,7 +5,7 @@ import struct
|
|
|
5
5
|
from contextlib import suppress
|
|
6
6
|
import pefile
|
|
7
7
|
import yara
|
|
8
|
-
|
|
8
|
+
from Cryptodome.Cipher import ChaCha20
|
|
9
9
|
|
|
10
10
|
RULE_SOURCE_BUILD_ID = """rule LummaBuildId
|
|
11
11
|
{
|
|
@@ -142,7 +142,7 @@ def contains_non_printable(byte_array):
|
|
|
142
142
|
return True
|
|
143
143
|
return False
|
|
144
144
|
|
|
145
|
-
|
|
145
|
+
"""
|
|
146
146
|
def mask32(x):
|
|
147
147
|
return x & 0xFFFFFFFF
|
|
148
148
|
|
|
@@ -242,7 +242,7 @@ def chacha20_xor(message, key, nonce, counter):
|
|
|
242
242
|
xor_key.append(message[i] ^ key_stream[i])
|
|
243
243
|
|
|
244
244
|
return xor_key
|
|
245
|
-
|
|
245
|
+
"""
|
|
246
246
|
|
|
247
247
|
def extract_c2_domain(data):
|
|
248
248
|
pattern = rb"([\w-]+\.[\w]+)\x00"
|
|
@@ -315,7 +315,9 @@ def extract_config(data):
|
|
|
315
315
|
counter = 2
|
|
316
316
|
for i in range(12):
|
|
317
317
|
encrypted_string = data[encrypted_strings_offset : encrypted_strings_offset + 40]
|
|
318
|
-
|
|
318
|
+
chacha20_cipher = ChaCha20.new(key=key, nonce=nonce)
|
|
319
|
+
chacha20_cipher.seek(counter)
|
|
320
|
+
decoded_c2 = chacha20_cipher.decrypt(encrypted_string).split(b"\x00", 1)[0]
|
|
319
321
|
if contains_non_printable(decoded_c2):
|
|
320
322
|
break
|
|
321
323
|
config.setdefault("CNCs", []).append("https://" + decoded_c2.decode())
|
|
@@ -348,7 +350,10 @@ def extract_config(data):
|
|
|
348
350
|
c2_encrypted = data[c2_dword_offset : c2_dword_offset + 0x80]
|
|
349
351
|
counters = [0, 2, 4, 6, 8, 10, 12, 14, 16]
|
|
350
352
|
for counter in counters:
|
|
351
|
-
decrypted = chacha20_xor(c2_encrypted, key, nonce, counter)
|
|
353
|
+
# decrypted = chacha20_xor(c2_encrypted, key, nonce, counter)
|
|
354
|
+
chacha20_cipher = ChaCha20.new(key=key, nonce=nonce)
|
|
355
|
+
chacha20_cipher.seek(counter)
|
|
356
|
+
decrypted = chacha20_cipher.decrypt(c2_encrypted).split(b"\x00", 1)[0]
|
|
352
357
|
c2 = extract_c2_domain(decrypted)
|
|
353
358
|
if c2 is not None and len(c2) > 10:
|
|
354
359
|
config["CNCs"].append("https://" + c2.decode())
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# Copyright (C) 2024 enzok
|
|
2
|
+
# This program is free software: you can redistribute it and/or modify
|
|
3
|
+
# it under the terms of the GNU General Public License as published by
|
|
4
|
+
# the Free Software Foundation, either version 3 of the License, or
|
|
5
|
+
# (at your option) any later version.
|
|
6
|
+
#
|
|
7
|
+
# This program is distributed in the hope that it will be useful,
|
|
8
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
9
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
10
|
+
# GNU General Public License for more details.
|
|
11
|
+
#
|
|
12
|
+
# You should have received a copy of the GNU General Public License
|
|
13
|
+
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
import struct
|
|
17
|
+
|
|
18
|
+
import pefile
|
|
19
|
+
import yara
|
|
20
|
+
|
|
21
|
+
log = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
DESCRIPTION = "NitroBunnyDownloader configuration parser."
|
|
24
|
+
AUTHOR = "enzok"
|
|
25
|
+
|
|
26
|
+
yara_rule = """
|
|
27
|
+
rule NitroBunnyDownloader
|
|
28
|
+
{
|
|
29
|
+
meta:
|
|
30
|
+
author = "enzok"
|
|
31
|
+
description = "NitroBunnyDownloader Payload"
|
|
32
|
+
cape_type = "NitroBunnyDownloader Payload"
|
|
33
|
+
hash = "960e59200ec0a4b5fb3b44e6da763f5fec4092997975140797d4eec491de411b"
|
|
34
|
+
strings:
|
|
35
|
+
$config = {E8 [3] 00 41 B8 ?? ?? 00 00 48 8D 15 [3] 00 48 89 C1 48 89 ?? E8 [3] 00}
|
|
36
|
+
$string1 = "X-Amz-User-Agent:" wide
|
|
37
|
+
$string2 = "Amz-Security-Flag:" wide
|
|
38
|
+
$string3 = "/cart" wide
|
|
39
|
+
$string4 = "Cookie: " wide
|
|
40
|
+
$string5 = "wishlist" wide
|
|
41
|
+
condition:
|
|
42
|
+
uint16(0) == 0x5A4D and $config and 2 of ($string*)
|
|
43
|
+
}
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
yara_rules = yara.compile(source=yara_rule)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def yara_scan(raw_data):
|
|
50
|
+
try:
|
|
51
|
+
return yara_rules.match(data=raw_data)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
print(e)
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def read_dword(data, off):
|
|
58
|
+
if off + 4 > len(data):
|
|
59
|
+
raise ValueError(f"EOF reading dword at {off}")
|
|
60
|
+
val = struct.unpack_from("<I", data, off)[0]
|
|
61
|
+
return val, off + 4
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def read_qword(data, off):
|
|
65
|
+
"""Read a 64-bit unsigned little-endian value."""
|
|
66
|
+
if off + 8 > len(data):
|
|
67
|
+
raise ValueError(f"EOF reading qword at {off}")
|
|
68
|
+
val = struct.unpack_from("<Q", data, off)[0]
|
|
69
|
+
return val, off + 8
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def read_utf16le_string(data, off, length):
|
|
73
|
+
if off + length > len(data):
|
|
74
|
+
raise ValueError(f"EOF reading string at {off} len={length}")
|
|
75
|
+
raw = data[off:off + length]
|
|
76
|
+
s = raw.decode("utf-16le", errors="replace").rstrip("\x00")
|
|
77
|
+
return s, off + length
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def read_string_list(data, off, count):
|
|
81
|
+
items = []
|
|
82
|
+
for i in range(count):
|
|
83
|
+
length_words, off = read_qword(data, off)
|
|
84
|
+
s, off = read_utf16le_string(data, off, length_words)
|
|
85
|
+
items.append(s)
|
|
86
|
+
return items, off
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def extract_config(filebuf):
|
|
90
|
+
yara_hit = yara_scan(filebuf)
|
|
91
|
+
if not yara_hit:
|
|
92
|
+
return None
|
|
93
|
+
|
|
94
|
+
cfg = {}
|
|
95
|
+
config_code_offset = None
|
|
96
|
+
for hit in yara_hit:
|
|
97
|
+
if hit.rule != "NitroBunnyDownloader":
|
|
98
|
+
continue
|
|
99
|
+
|
|
100
|
+
for item in hit.strings:
|
|
101
|
+
for instance in item.instances:
|
|
102
|
+
if "$config" in item.identifier:
|
|
103
|
+
config_code_offset = instance.offset
|
|
104
|
+
break
|
|
105
|
+
|
|
106
|
+
if config_code_offset is None:
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
pe = pefile.PE(data=filebuf, fast_load=True)
|
|
111
|
+
config_length = pe.get_dword_from_offset(config_code_offset + 7)
|
|
112
|
+
config_offset = pe.get_dword_from_offset(config_code_offset + 14)
|
|
113
|
+
rva = pe.get_rva_from_offset(config_code_offset + 18)
|
|
114
|
+
config_rva = rva + config_offset
|
|
115
|
+
data = pe.get_data(config_rva, config_length)
|
|
116
|
+
off = 0
|
|
117
|
+
raw = cfg["raw"] = {}
|
|
118
|
+
port, off = read_dword(data, off)
|
|
119
|
+
num, off = read_dword(data, off)
|
|
120
|
+
cncs, off = read_string_list(data, off, num)
|
|
121
|
+
num, off = read_qword(data, off)
|
|
122
|
+
raw["user_agent"], off = read_utf16le_string(data, off, num)
|
|
123
|
+
num, off = read_dword(data, off)
|
|
124
|
+
raw["http_header_items"], off = read_string_list(data, off, num)
|
|
125
|
+
num, off = read_dword(data, off)
|
|
126
|
+
raw["uri_list"], off = read_string_list(data, off, num)
|
|
127
|
+
raw["unknown_1"], off = read_dword(data, off)
|
|
128
|
+
raw["unknown_2"], off = read_dword(data, off)
|
|
129
|
+
|
|
130
|
+
if cncs:
|
|
131
|
+
cfg["CNCs"] = []
|
|
132
|
+
schema = {80: "http", 443: "https"}.get(port, "tcp")
|
|
133
|
+
for cnc in cncs:
|
|
134
|
+
cnc = f"{schema}://{cnc}"
|
|
135
|
+
if port not in (80, 443):
|
|
136
|
+
cnc += f":{port}"
|
|
137
|
+
|
|
138
|
+
cfg["CNCs"].append(cnc)
|
|
139
|
+
|
|
140
|
+
except Exception as e:
|
|
141
|
+
log.error("Error: %s", e)
|
|
142
|
+
return None
|
|
143
|
+
|
|
144
|
+
return cfg
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
if __name__ == "__main__":
|
|
148
|
+
import sys
|
|
149
|
+
|
|
150
|
+
with open(sys.argv[1], "rb") as f:
|
|
151
|
+
print(extract_config(f.read()))
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
import struct
|
|
2
|
+
import base64
|
|
3
|
+
import re
|
|
4
|
+
import json
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
DESCRIPTION = "Rhadamanthys parser"
|
|
8
|
+
AUTHOR = "kevoreilly, YungBinary"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def mask32(x):
|
|
12
|
+
return x & 0xFFFFFFFF
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def add32(x, y):
|
|
16
|
+
return mask32(x + y)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def left_rotate(x, n):
|
|
20
|
+
return mask32(x << n) | (x >> (32 - n))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def quarter_round(block, a, b, c, d):
|
|
24
|
+
block[a] = add32(block[a], block[b])
|
|
25
|
+
block[d] ^= block[a]
|
|
26
|
+
block[d] = left_rotate(block[d], 16)
|
|
27
|
+
block[c] = add32(block[c], block[d])
|
|
28
|
+
block[b] ^= block[c]
|
|
29
|
+
block[b] = left_rotate(block[b], 12)
|
|
30
|
+
block[a] = add32(block[a], block[b])
|
|
31
|
+
block[d] ^= block[a]
|
|
32
|
+
block[d] = left_rotate(block[d], 8)
|
|
33
|
+
block[c] = add32(block[c], block[d])
|
|
34
|
+
block[b] ^= block[c]
|
|
35
|
+
block[b] = left_rotate(block[b], 7)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def chacha20_permute(block):
|
|
39
|
+
for doubleround in range(10):
|
|
40
|
+
quarter_round(block, 0, 4, 8, 12)
|
|
41
|
+
quarter_round(block, 1, 5, 9, 13)
|
|
42
|
+
quarter_round(block, 2, 6, 10, 14)
|
|
43
|
+
quarter_round(block, 3, 7, 11, 15)
|
|
44
|
+
quarter_round(block, 0, 5, 10, 15)
|
|
45
|
+
quarter_round(block, 1, 6, 11, 12)
|
|
46
|
+
quarter_round(block, 2, 7, 8, 13)
|
|
47
|
+
quarter_round(block, 3, 4, 9, 14)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def words_from_bytes(b):
|
|
51
|
+
assert len(b) % 4 == 0
|
|
52
|
+
return [int.from_bytes(b[4 * i : 4 * i + 4], "little") for i in range(len(b) // 4)]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def bytes_from_words(w):
|
|
56
|
+
return b"".join(word.to_bytes(4, "little") for word in w)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def chacha20_block(key, nonce, blocknum):
|
|
60
|
+
# This implementation doesn't support 16-byte keys.
|
|
61
|
+
assert len(key) == 32
|
|
62
|
+
assert len(nonce) == 12
|
|
63
|
+
assert blocknum < 2**32
|
|
64
|
+
constant_words = words_from_bytes(b"expand 32-byte k")
|
|
65
|
+
key_words = words_from_bytes(key)
|
|
66
|
+
nonce_words = words_from_bytes(nonce)
|
|
67
|
+
# fmt: off
|
|
68
|
+
original_block = [
|
|
69
|
+
constant_words[0], constant_words[1], constant_words[2], constant_words[3],
|
|
70
|
+
key_words[0], key_words[1], key_words[2], key_words[3],
|
|
71
|
+
key_words[4], key_words[5], key_words[6], key_words[7],
|
|
72
|
+
mask32(blocknum), nonce_words[0], nonce_words[1], nonce_words[2],
|
|
73
|
+
]
|
|
74
|
+
# fmt: on
|
|
75
|
+
permuted_block = list(original_block)
|
|
76
|
+
chacha20_permute(permuted_block)
|
|
77
|
+
for i in range(len(permuted_block)):
|
|
78
|
+
permuted_block[i] = add32(permuted_block[i], original_block[i])
|
|
79
|
+
return bytes_from_words(permuted_block)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def chacha20_stream(key, nonce, length, blocknum):
|
|
83
|
+
output = bytearray()
|
|
84
|
+
while length > 0:
|
|
85
|
+
block = chacha20_block(key, nonce, blocknum)
|
|
86
|
+
take = min(length, len(block))
|
|
87
|
+
output.extend(block[:take])
|
|
88
|
+
length -= take
|
|
89
|
+
blocknum += 1
|
|
90
|
+
return output
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def decrypt_config(data):
|
|
94
|
+
decrypted_config = b""
|
|
95
|
+
data_len = len(data)
|
|
96
|
+
v3 = 0
|
|
97
|
+
while True:
|
|
98
|
+
v8 = 4
|
|
99
|
+
while v8:
|
|
100
|
+
if data_len <= (v3 + 4):
|
|
101
|
+
return decrypted_config
|
|
102
|
+
a = data[v3]
|
|
103
|
+
b = data[v3 + 4]
|
|
104
|
+
c = a ^ b
|
|
105
|
+
decrypted_config += bytes([c])
|
|
106
|
+
v8 -= 1
|
|
107
|
+
v3 += 1
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def chacha20_xor(custom_b64_decoded, key, nonce):
|
|
111
|
+
message_len = len(custom_b64_decoded)
|
|
112
|
+
key_stream = chacha20_stream(key, nonce, message_len, 0x80)
|
|
113
|
+
|
|
114
|
+
xor_key = bytearray()
|
|
115
|
+
for i in range(message_len):
|
|
116
|
+
xor_key.append(custom_b64_decoded[i] ^ key_stream[i])
|
|
117
|
+
|
|
118
|
+
return xor_key
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def extract_base64_strings(data, minchars, maxchars):
|
|
122
|
+
apat = b"([A-Za-z0-9-|]{" + str(minchars).encode() + b"," + str(maxchars).encode() + b"})\x00"
|
|
123
|
+
strings = [s.decode() for s in re.findall(apat, data)]
|
|
124
|
+
upat = b"((?:[A-Za-z0-9-|]\x00){" + str(minchars).encode() + b"," + str(maxchars).encode() + b"})\x00\x00"
|
|
125
|
+
strings.extend(ws.decode("utf-16le") for ws in re.findall(upat, data))
|
|
126
|
+
return strings
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def extract_c2_url(data):
|
|
130
|
+
pattern = b"(http[\x20-\x7e]+)\x00"
|
|
131
|
+
match = re.search(pattern, data)
|
|
132
|
+
if match:
|
|
133
|
+
return match.group(1).decode()
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def custom_b64decode(data: bytes, custom_alphabet: bytes):
|
|
137
|
+
"""Decodes base64 data using a custom alphabet."""
|
|
138
|
+
standard_alphabet = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
|
139
|
+
# Translate the data back to the standard alphabet before decoding
|
|
140
|
+
table = bytes.maketrans(custom_alphabet, standard_alphabet)
|
|
141
|
+
return base64.b64decode(data.translate(table), validate=True)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def lzo_noheader_decompress(data: bytes, decompressed_size: int):
|
|
145
|
+
src = 0
|
|
146
|
+
dst = bytearray()
|
|
147
|
+
length = len(data)
|
|
148
|
+
|
|
149
|
+
while src < length:
|
|
150
|
+
ctrl = data[src]
|
|
151
|
+
src += 1
|
|
152
|
+
|
|
153
|
+
# Special short match
|
|
154
|
+
# Copies exactly 3 bytes from dst starting match_len + 1 bytes back.
|
|
155
|
+
if ctrl == 0x20:
|
|
156
|
+
match_len = data[src]
|
|
157
|
+
src += 1
|
|
158
|
+
start = len(dst) - match_len - 1
|
|
159
|
+
end = start + 3
|
|
160
|
+
#print(f"Control code: {hex(ctrl)}, Offset backtrack length: {hex(match_len)}, Current offset: {hex(len(dst))}, New offset: {hex(start)}")
|
|
161
|
+
dst.extend(dst[start:end])
|
|
162
|
+
|
|
163
|
+
elif ctrl >= 0xE0 or ctrl == 0x40:
|
|
164
|
+
# Compute base copy length from the upper bits of ctrl
|
|
165
|
+
base_len = ((ctrl >> 5) - 1) + 3
|
|
166
|
+
|
|
167
|
+
if ctrl >= 0xE0:
|
|
168
|
+
# Long copy: extra length byte follows
|
|
169
|
+
copy_len = base_len + data[src]
|
|
170
|
+
# Offset is byte after
|
|
171
|
+
start = data[src + 1]
|
|
172
|
+
src += 2
|
|
173
|
+
elif ctrl == 0x40:
|
|
174
|
+
# Short copy: offset byte after control code
|
|
175
|
+
copy_len = base_len
|
|
176
|
+
start = data[src]
|
|
177
|
+
src += 1
|
|
178
|
+
|
|
179
|
+
# Calculate offset in output buffer
|
|
180
|
+
offset = len(dst) - start - 1
|
|
181
|
+
|
|
182
|
+
#print(f"Control code: {hex(ctrl)}, Offset backtrack length: {hex(start)}, Current offset: {hex(len(dst))}, New offset: {hex(len(dst) - start)}, Length to copy: {hex(copy_len)}")
|
|
183
|
+
|
|
184
|
+
# Copy from previously decompressed data
|
|
185
|
+
dst.extend(dst[offset:offset + copy_len])
|
|
186
|
+
|
|
187
|
+
else:
|
|
188
|
+
# Literal run
|
|
189
|
+
literal_len = (ctrl & 0x1F) + 1
|
|
190
|
+
#print(f"Control code: {hex(ctrl)}, Literal length: {hex(literal_len)}")
|
|
191
|
+
dst.extend(data[src:src+literal_len])
|
|
192
|
+
src += literal_len
|
|
193
|
+
|
|
194
|
+
if len(dst) == decompressed_size:
|
|
195
|
+
return bytes(dst)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def parse_compression_header(config: bytes):
|
|
199
|
+
"""Parse compressed size, decompressed size, and data offset from config"""
|
|
200
|
+
|
|
201
|
+
# 0x2A when looking at the config in memory
|
|
202
|
+
base_offset = 0x26
|
|
203
|
+
|
|
204
|
+
# Compressed data offset field, for calculating the offset to the compressed buffer
|
|
205
|
+
comp_offset_field = config[base_offset]
|
|
206
|
+
# Number of bytes the field spans
|
|
207
|
+
comp_offset_size_len = (comp_offset_field & 3) + 1
|
|
208
|
+
for i in range(1, comp_offset_size_len):
|
|
209
|
+
comp_offset_field |= config[base_offset + i] << (8 * i)
|
|
210
|
+
|
|
211
|
+
comp_size_offset = comp_offset_field >> 2
|
|
212
|
+
|
|
213
|
+
# Compressed size field, for finding the size of the compressed buffer
|
|
214
|
+
comp_offset = base_offset + comp_offset_size_len
|
|
215
|
+
comp_size_field = config[comp_offset]
|
|
216
|
+
# Number of bytes the field spans
|
|
217
|
+
comp_size_len = (comp_size_field & 3) + 1
|
|
218
|
+
for i in range(1, comp_size_len):
|
|
219
|
+
comp_size_field |= config[comp_offset + i] << (8 * i)
|
|
220
|
+
|
|
221
|
+
# Decompressed size field
|
|
222
|
+
decomp_field_offset = base_offset + comp_offset_size_len + comp_size_len
|
|
223
|
+
decomp_size_field = config[decomp_field_offset]
|
|
224
|
+
# Number of bytes the field spans
|
|
225
|
+
decomp_field_len = (decomp_size_field & 3) + 1
|
|
226
|
+
for i in range(1, decomp_field_len):
|
|
227
|
+
decomp_size_field |= config[decomp_field_offset + i] << (8 * i)
|
|
228
|
+
|
|
229
|
+
# Calculate return values
|
|
230
|
+
decompressed_size = decomp_size_field >> 2
|
|
231
|
+
compressed_data_offset = decomp_field_offset + decomp_field_len + comp_size_offset
|
|
232
|
+
compressed_size_key = config[0x28] << 8
|
|
233
|
+
compressed_size = (compressed_size_key | comp_size_field) >> 2
|
|
234
|
+
compressed_data = config[compressed_data_offset : compressed_data_offset + compressed_size]
|
|
235
|
+
|
|
236
|
+
return {
|
|
237
|
+
"compressed_size": compressed_size,
|
|
238
|
+
"decompressed_size": decompressed_size,
|
|
239
|
+
"compressed_data": compressed_data
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def extract_config(data):
|
|
244
|
+
config_dict = {}
|
|
245
|
+
magic = struct.unpack("I", data[:4])[0]
|
|
246
|
+
if magic == 0x59485221:
|
|
247
|
+
config_dict["CNCs"] = [data[24:].split(b"\0", 1)[0].decode()]
|
|
248
|
+
return config_dict
|
|
249
|
+
else:
|
|
250
|
+
key = b"\x52\xAB\xDF\x06\xB6\xB1\x3A\xC0\xDA\x2D\x22\xDC\x6C\xD2\xBE\x6C\x20\x17\x69\xE0\x12\xB5\xE6\xEC\x0E\xAB\x4C\x14\x73\x4A\xED\x51"
|
|
251
|
+
nonce = b"\x5F\x14\xD7\x9C\xFC\xFC\x43\x9E\xC3\x40\x6B\xBA"
|
|
252
|
+
|
|
253
|
+
custom_alphabets = [
|
|
254
|
+
b"ABC1fghijklmnop234NOPQRSTUVWXY567DEFGHIJKLMZ089abcdeqrstuvwxyz-|",
|
|
255
|
+
b"4NOPQRSTUVWXY567DdeEqrstuvwxyz-ABC1fghop23Fijkbc|lmnGHIJKLMZ089a", # 0.9.2
|
|
256
|
+
b"3Fijkbc|l4NOPQRSTUVWXY567DdewxEqrstuvyz-ABC1fghop2mnGHIJKLMZ089a", # 0.9.3
|
|
257
|
+
]
|
|
258
|
+
|
|
259
|
+
# Extract base64 strings
|
|
260
|
+
extracted_strings = extract_base64_strings(data, 100, 256)
|
|
261
|
+
if not extracted_strings:
|
|
262
|
+
return config_dict
|
|
263
|
+
|
|
264
|
+
pattern = re.compile(b'.\x80')
|
|
265
|
+
for string in extracted_strings:
|
|
266
|
+
try:
|
|
267
|
+
custom_b64_decoded = custom_b64decode(string, custom_alphabets[0])
|
|
268
|
+
|
|
269
|
+
xor_key = chacha20_xor(custom_b64_decoded, key, nonce)
|
|
270
|
+
|
|
271
|
+
# Decrypted, but may still be the compressed malware configuration
|
|
272
|
+
config = decrypt_config(xor_key)
|
|
273
|
+
# Attempt to extract C2 url, only works in version prior to 0.9.2
|
|
274
|
+
c2_url = extract_c2_url(config)
|
|
275
|
+
if c2_url:
|
|
276
|
+
config_dict = {"CNCs": [c2_url]}
|
|
277
|
+
return config_dict
|
|
278
|
+
else:
|
|
279
|
+
# Handle new variants that compress the Command and Control server(s)
|
|
280
|
+
custom_b64_decoded = custom_b64decode(string, custom_alphabets[2])
|
|
281
|
+
xor_key = chacha20_xor(custom_b64_decoded, key, nonce)
|
|
282
|
+
config = decrypt_config(xor_key)
|
|
283
|
+
|
|
284
|
+
parsed = parse_compression_header(config)
|
|
285
|
+
if not parsed:
|
|
286
|
+
return config_dict
|
|
287
|
+
|
|
288
|
+
decompressed = lzo_noheader_decompress(parsed['compressed_data'], parsed['decompressed_size'])
|
|
289
|
+
|
|
290
|
+
# Try old alphabet for 0.9.2
|
|
291
|
+
if not decompressed:
|
|
292
|
+
custom_b64_decoded = custom_b64decode(string, custom_alphabets[1])
|
|
293
|
+
xor_key = chacha20_xor(custom_b64_decoded, key, nonce)
|
|
294
|
+
config = decrypt_config(xor_key)
|
|
295
|
+
|
|
296
|
+
parsed = parse_compression_header(config)
|
|
297
|
+
if not parsed:
|
|
298
|
+
return config_dict
|
|
299
|
+
|
|
300
|
+
decompressed = lzo_noheader_decompress(parsed['compressed_data'], parsed['decompressed_size'])
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
cncs = [f"https://{chunk.decode()}" for chunk in pattern.split(decompressed) if chunk]
|
|
304
|
+
if cncs:
|
|
305
|
+
config_dict = {"CNCs": cncs}
|
|
306
|
+
return config_dict
|
|
307
|
+
|
|
308
|
+
except Exception:
|
|
309
|
+
continue
|
|
310
|
+
|
|
311
|
+
return config_dict
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
if __name__ == "__main__":
|
|
315
|
+
import sys
|
|
316
|
+
|
|
317
|
+
with open(sys.argv[1], "rb") as f:
|
|
318
|
+
config_json = json.dumps(extract_config(f.read()), indent=4)
|
|
319
|
+
print(config_json)
|
|
@@ -1,190 +0,0 @@
|
|
|
1
|
-
import struct
|
|
2
|
-
import base64
|
|
3
|
-
import re
|
|
4
|
-
import json
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
DESCRIPTION = "Rhadamanthys parser"
|
|
8
|
-
AUTHOR = "kevoreilly, YungBinary"
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def mask32(x):
|
|
12
|
-
return x & 0xFFFFFFFF
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
def add32(x, y):
|
|
16
|
-
return mask32(x + y)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def left_rotate(x, n):
|
|
20
|
-
return mask32(x << n) | (x >> (32 - n))
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def quarter_round(block, a, b, c, d):
|
|
24
|
-
block[a] = add32(block[a], block[b])
|
|
25
|
-
block[d] ^= block[a]
|
|
26
|
-
block[d] = left_rotate(block[d], 16)
|
|
27
|
-
block[c] = add32(block[c], block[d])
|
|
28
|
-
block[b] ^= block[c]
|
|
29
|
-
block[b] = left_rotate(block[b], 12)
|
|
30
|
-
block[a] = add32(block[a], block[b])
|
|
31
|
-
block[d] ^= block[a]
|
|
32
|
-
block[d] = left_rotate(block[d], 8)
|
|
33
|
-
block[c] = add32(block[c], block[d])
|
|
34
|
-
block[b] ^= block[c]
|
|
35
|
-
block[b] = left_rotate(block[b], 7)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def chacha20_permute(block):
|
|
39
|
-
for doubleround in range(10):
|
|
40
|
-
quarter_round(block, 0, 4, 8, 12)
|
|
41
|
-
quarter_round(block, 1, 5, 9, 13)
|
|
42
|
-
quarter_round(block, 2, 6, 10, 14)
|
|
43
|
-
quarter_round(block, 3, 7, 11, 15)
|
|
44
|
-
quarter_round(block, 0, 5, 10, 15)
|
|
45
|
-
quarter_round(block, 1, 6, 11, 12)
|
|
46
|
-
quarter_round(block, 2, 7, 8, 13)
|
|
47
|
-
quarter_round(block, 3, 4, 9, 14)
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def words_from_bytes(b):
|
|
51
|
-
assert len(b) % 4 == 0
|
|
52
|
-
return [int.from_bytes(b[4 * i : 4 * i + 4], "little") for i in range(len(b) // 4)]
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def bytes_from_words(w):
|
|
56
|
-
return b"".join(word.to_bytes(4, "little") for word in w)
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def chacha20_block(key, nonce, blocknum):
|
|
60
|
-
# This implementation doesn't support 16-byte keys.
|
|
61
|
-
assert len(key) == 32
|
|
62
|
-
assert len(nonce) == 12
|
|
63
|
-
assert blocknum < 2**32
|
|
64
|
-
constant_words = words_from_bytes(b"expand 32-byte k")
|
|
65
|
-
key_words = words_from_bytes(key)
|
|
66
|
-
nonce_words = words_from_bytes(nonce)
|
|
67
|
-
# fmt: off
|
|
68
|
-
original_block = [
|
|
69
|
-
constant_words[0], constant_words[1], constant_words[2], constant_words[3],
|
|
70
|
-
key_words[0], key_words[1], key_words[2], key_words[3],
|
|
71
|
-
key_words[4], key_words[5], key_words[6], key_words[7],
|
|
72
|
-
mask32(blocknum), nonce_words[0], nonce_words[1], nonce_words[2],
|
|
73
|
-
]
|
|
74
|
-
# fmt: on
|
|
75
|
-
permuted_block = list(original_block)
|
|
76
|
-
chacha20_permute(permuted_block)
|
|
77
|
-
for i in range(len(permuted_block)):
|
|
78
|
-
permuted_block[i] = add32(permuted_block[i], original_block[i])
|
|
79
|
-
return bytes_from_words(permuted_block)
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
def chacha20_stream(key, nonce, length, blocknum):
|
|
83
|
-
output = bytearray()
|
|
84
|
-
while length > 0:
|
|
85
|
-
block = chacha20_block(key, nonce, blocknum)
|
|
86
|
-
take = min(length, len(block))
|
|
87
|
-
output.extend(block[:take])
|
|
88
|
-
length -= take
|
|
89
|
-
blocknum += 1
|
|
90
|
-
return output
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def decrypt_config(data):
|
|
94
|
-
decrypted_config = b"\x21\x52\x48\x59"
|
|
95
|
-
data_len = len(data)
|
|
96
|
-
v3 = 0
|
|
97
|
-
while True:
|
|
98
|
-
v8 = 4
|
|
99
|
-
while v8:
|
|
100
|
-
if data_len <= (v3 + 4):
|
|
101
|
-
return decrypted_config
|
|
102
|
-
a = data[v3]
|
|
103
|
-
b = data[v3 + 4]
|
|
104
|
-
c = a ^ b
|
|
105
|
-
decrypted_config += bytes([c])
|
|
106
|
-
v8 -= 1
|
|
107
|
-
v3 += 1
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def chacha20_xor(custom_b64_decoded, key, nonce):
|
|
111
|
-
message_len = len(custom_b64_decoded)
|
|
112
|
-
key_stream = chacha20_stream(key, nonce, message_len, 0x80)
|
|
113
|
-
|
|
114
|
-
xor_key = bytearray()
|
|
115
|
-
for i in range(message_len):
|
|
116
|
-
xor_key.append(custom_b64_decoded[i] ^ key_stream[i])
|
|
117
|
-
|
|
118
|
-
return xor_key
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def extract_strings(data, minchars, maxchars):
|
|
122
|
-
apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + str(maxchars).encode() + b"})\x00"
|
|
123
|
-
strings = [string.decode() for string in re.findall(apat, data)]
|
|
124
|
-
match = re.search(apat, data)
|
|
125
|
-
if not match:
|
|
126
|
-
return None
|
|
127
|
-
upat = b"((?:[\x20-\x7e][\x00]){" + str(minchars).encode() + b"," + str(maxchars).encode() + b"})\x00\x00"
|
|
128
|
-
strings.extend(str(ws.decode("utf-16le")) for ws in re.findall(upat, data))
|
|
129
|
-
return strings
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
def extract_c2_url(data):
|
|
133
|
-
pattern = b"(http[\x20-\x7e]+)\x00"
|
|
134
|
-
match = re.search(pattern, data)
|
|
135
|
-
return match.group(1).decode()
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def is_potential_custom_base64(string):
|
|
139
|
-
custom_alphabet = "ABC1fghijklmnop234NOPQRSTUVWXY567DEFGHIJKLMZ089abcdeqrstuvwxyz-|"
|
|
140
|
-
for c in string:
|
|
141
|
-
if c not in custom_alphabet:
|
|
142
|
-
return False
|
|
143
|
-
return True
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
def custom_b64decode(data):
|
|
147
|
-
"""Decodes base64 data using a custom alphabet."""
|
|
148
|
-
standard_alphabet = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
|
149
|
-
custom_alphabet = b"ABC1fghijklmnop234NOPQRSTUVWXY567DEFGHIJKLMZ089abcdeqrstuvwxyz-|"
|
|
150
|
-
# Translate the data back to the standard alphabet before decoding
|
|
151
|
-
table = bytes.maketrans(custom_alphabet, standard_alphabet)
|
|
152
|
-
return base64.b64decode(data.translate(table), validate=True)
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
def extract_config(data):
|
|
156
|
-
config_dict = {}
|
|
157
|
-
magic = struct.unpack("I", data[:4])[0]
|
|
158
|
-
if magic == 0x59485221:
|
|
159
|
-
config_dict["CNCs"] = [data[24:].split(b"\0", 1)[0].decode()]
|
|
160
|
-
return config_dict
|
|
161
|
-
else:
|
|
162
|
-
key = b"\x52\xAB\xDF\x06\xB6\xB1\x3A\xC0\xDA\x2D\x22\xDC\x6C\xD2\xBE\x6C\x20\x17\x69\xE0\x12\xB5\xE6\xEC\x0E\xAB\x4C\x14\x73\x4A\xED\x51"
|
|
163
|
-
nonce = b"\x5F\x14\xD7\x9C\xFC\xFC\x43\x9E\xC3\x40\x6B\xBA"
|
|
164
|
-
|
|
165
|
-
extracted_strings = extract_strings(data, 0x100, 0x100)
|
|
166
|
-
for string in extracted_strings:
|
|
167
|
-
try:
|
|
168
|
-
if not is_potential_custom_base64(string):
|
|
169
|
-
continue
|
|
170
|
-
|
|
171
|
-
custom_b64_decoded = custom_b64decode(string)
|
|
172
|
-
xor_key = chacha20_xor(custom_b64_decoded, key, nonce)
|
|
173
|
-
decrypted_config = decrypt_config(xor_key)
|
|
174
|
-
reexecution_delay = int.from_bytes(decrypted_config[5:7], byteorder="little")
|
|
175
|
-
|
|
176
|
-
c2_url = extract_c2_url(decrypted_config)
|
|
177
|
-
if not c2_url:
|
|
178
|
-
continue
|
|
179
|
-
config_dict = {"raw": {"Reexecution_delay": reexecution_delay}, "CNCs": [c2_url]}
|
|
180
|
-
return config_dict
|
|
181
|
-
except Exception:
|
|
182
|
-
continue
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
if __name__ == "__main__":
|
|
186
|
-
import sys
|
|
187
|
-
|
|
188
|
-
with open(sys.argv[1], "rb") as f:
|
|
189
|
-
config_json = json.dumps(extract_config(f.read()), indent=4)
|
|
190
|
-
print(config_json)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/CobaltStrikeBeacon.py
RENAMED
|
File without changes
|
{cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/CobaltStrikeStager.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{cape_parsers-0.1.52 → cape_parsers-0.1.54}/cape_parsers/CAPE/community/PhemedroneStealer.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|