elasticpot 2.0.1.dev0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/__init__.py +0 -0
- core/config.py +50 -0
- core/logfile.py +74 -0
- core/output.py +39 -0
- core/paths.py +54 -0
- core/protocol.py +451 -0
- core/tools.py +171 -0
- elasticpot/__init__.py +26 -0
- elasticpot/cli.py +519 -0
- elasticpot/data/Dockerfile +56 -0
- elasticpot/data/docs/INSTALL.md +424 -0
- elasticpot/data/docs/INSTALLWIN.md +435 -0
- elasticpot/data/docs/PLUGINS.md +21 -0
- elasticpot/data/docs/TODO.md +3 -0
- elasticpot/data/docs/datadog/README.md +32 -0
- elasticpot/data/docs/discord/README.md +58 -0
- elasticpot/data/docs/geoipupdtask.ps1 +270 -0
- elasticpot/data/docs/mysql/README.md +176 -0
- elasticpot/data/docs/mysql/READMEWIN.md +157 -0
- elasticpot/data/docs/mysql/mysql.sql +78 -0
- elasticpot/data/docs/postgres/README.md +184 -0
- elasticpot/data/docs/postgres/READMEWIN.md +196 -0
- elasticpot/data/docs/postgres/postgres.sql +72 -0
- elasticpot/data/docs/slack/README.md +68 -0
- elasticpot/data/docs/sqlite3/README.md +131 -0
- elasticpot/data/docs/sqlite3/READMEWIN.md +123 -0
- elasticpot/data/docs/sqlite3/sqlite3.sql +70 -0
- elasticpot/data/docs/telegram/README.md +103 -0
- elasticpot/data/etc/honeypot.cfg.base +472 -0
- elasticpot/data/responses/aliases.json +8 -0
- elasticpot/data/responses/banner.json +13 -0
- elasticpot/data/responses/cluster.json +17 -0
- elasticpot/data/responses/clusterstore.json +21 -0
- elasticpot/data/responses/error.json +21 -0
- elasticpot/data/responses/index1long.json +12 -0
- elasticpot/data/responses/index1short.json +3 -0
- elasticpot/data/responses/index2long.json +12 -0
- elasticpot/data/responses/index2short.json +3 -0
- elasticpot/data/responses/indices.txt +2 -0
- elasticpot/data/responses/mapping.json +41 -0
- elasticpot/data/responses/nodes.json +37 -0
- elasticpot/data/responses/nodes2.json +11 -0
- elasticpot/data/responses/nodes2.txt +1 -0
- elasticpot/data/responses/pluginhead.html +33 -0
- elasticpot/data/responses/search.json +25 -0
- elasticpot/data/responses/search2.json +28 -0
- elasticpot/data/responses/settings.json +30 -0
- elasticpot/data/responses/stats1.json +755 -0
- elasticpot/data/responses/stats2.json +163 -0
- elasticpot/data/responses/store.json +47 -0
- elasticpot/data/test/.gitignore +6 -0
- elasticpot/data/test/README.md +36 -0
- elasticpot/data/test/baseline +233 -0
- elasticpot/data/test/test.py +64 -0
- elasticpot/data/test/testurls.txt +39 -0
- elasticpot/honeypot.py +134 -0
- elasticpot-2.0.1.dev0.dist-info/METADATA +155 -0
- elasticpot-2.0.1.dev0.dist-info/RECORD +84 -0
- elasticpot-2.0.1.dev0.dist-info/WHEEL +6 -0
- elasticpot-2.0.1.dev0.dist-info/entry_points.txt +2 -0
- elasticpot-2.0.1.dev0.dist-info/licenses/LICENSE +674 -0
- elasticpot-2.0.1.dev0.dist-info/top_level.txt +3 -0
- output_plugins/__init__.py +0 -0
- output_plugins/couch.py +68 -0
- output_plugins/datadog.py +71 -0
- output_plugins/discord.py +121 -0
- output_plugins/elastic.py +137 -0
- output_plugins/hpfeed.py +43 -0
- output_plugins/influx2.py +64 -0
- output_plugins/jsonlog.py +36 -0
- output_plugins/kafka.py +57 -0
- output_plugins/localsyslog.py +65 -0
- output_plugins/mongodb.py +84 -0
- output_plugins/mysql.py +227 -0
- output_plugins/nlcvapi.py +125 -0
- output_plugins/postgres.py +160 -0
- output_plugins/redisdb.py +47 -0
- output_plugins/rethinkdblog.py +46 -0
- output_plugins/slack.py +79 -0
- output_plugins/socketlog.py +40 -0
- output_plugins/sqlite.py +158 -0
- output_plugins/telegram.py +128 -0
- output_plugins/textlog.py +34 -0
- output_plugins/xmpp.py +179 -0
core/__init__.py
ADDED
|
File without changes
|
core/config.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
|
|
2
|
+
from configparser import ConfigParser, ExtendedInterpolation
|
|
3
|
+
|
|
4
|
+
from os import environ
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def to_environ_key(key):
|
|
8
|
+
return key.upper()
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EnvironmentConfigParser(ConfigParser):
|
|
12
|
+
|
|
13
|
+
def has_option(self, section, option):
|
|
14
|
+
if to_environ_key('_'.join((section, option))) in environ:
|
|
15
|
+
return True
|
|
16
|
+
return super(EnvironmentConfigParser, self).has_option(section, option)
|
|
17
|
+
|
|
18
|
+
def get(self, section, option, **kwargs):
|
|
19
|
+
key = to_environ_key('_'.join((section, option)))
|
|
20
|
+
if key in environ:
|
|
21
|
+
return environ[key]
|
|
22
|
+
return super(EnvironmentConfigParser, self).get(section, option, **kwargs)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def readConfigFile(cfgfile):
|
|
26
|
+
"""
|
|
27
|
+
Read config files and return ConfigParser object
|
|
28
|
+
|
|
29
|
+
@param cfgfile: filename or array of filenames
|
|
30
|
+
@return: ConfigParser object
|
|
31
|
+
"""
|
|
32
|
+
parser = EnvironmentConfigParser(
|
|
33
|
+
interpolation=ExtendedInterpolation(),
|
|
34
|
+
converters={'list': lambda x: [i.strip() for i in x.split(',')]}
|
|
35
|
+
)
|
|
36
|
+
parser.read(cfgfile)
|
|
37
|
+
return parser
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _config_files():
|
|
41
|
+
# Import here (not at module top) to avoid any circular-import risk.
|
|
42
|
+
from core.paths import workdir_path, bundled
|
|
43
|
+
return [
|
|
44
|
+
bundled('etc', 'honeypot.cfg.base'), # bundled read-only defaults
|
|
45
|
+
workdir_path('etc', 'honeypot.cfg'), # site-local overrides
|
|
46
|
+
workdir_path('honeypot.cfg'), # convenience root-level override
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
CONFIG = readConfigFile(_config_files())
|
core/logfile.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
|
|
2
|
+
from sys import stdout
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from pytz import timezone
|
|
6
|
+
|
|
7
|
+
from twisted.python import log, util
|
|
8
|
+
from twisted.python.logfile import DailyLogFile
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class HoneypotDailyLogFile(DailyLogFile):
|
|
12
|
+
"""
|
|
13
|
+
Overload original Twisted with improved date formatting
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def suffix(self, tupledate):
|
|
17
|
+
"""
|
|
18
|
+
Return the suffix given a (year, month, day) tuple or unixtime
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
return "{:02d}-{:02d}-{:02d}".format(tupledate[0], tupledate[1], tupledate[2])
|
|
22
|
+
except Exception:
|
|
23
|
+
# try taking a float unixtime
|
|
24
|
+
return '_'.join(map(str, self.toDate(tupledate)))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def myFLOemit(self, eventDict):
|
|
28
|
+
"""
|
|
29
|
+
Format the given log event as text and write it to the output file.
|
|
30
|
+
|
|
31
|
+
@param eventDict: a log event
|
|
32
|
+
@type eventDict: L{dict} mapping L{str} (native string) to L{object}
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
# Custom emit for FileLogObserver
|
|
36
|
+
text = log.textFromEventDict(eventDict)
|
|
37
|
+
if text is None:
|
|
38
|
+
return
|
|
39
|
+
timeStr = self.formatTime(eventDict['time'])
|
|
40
|
+
fmtDict = {
|
|
41
|
+
'text': text.replace('\n', '\n\t')
|
|
42
|
+
}
|
|
43
|
+
msgStr = log._safeFormat('%(text)s\n', fmtDict)
|
|
44
|
+
util.untilConcludes(self.write, timeStr + ' ' + msgStr)
|
|
45
|
+
util.untilConcludes(self.flush)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def myFLOformatTime(self, when):
|
|
49
|
+
"""
|
|
50
|
+
Log time in UTC
|
|
51
|
+
|
|
52
|
+
By default it's formatted as an ISO8601-like string (ISO8601 date and
|
|
53
|
+
ISO8601 time separated by a space). It can be customized using the
|
|
54
|
+
C{timeFormat} attribute, which will be used as input for the underlying
|
|
55
|
+
L{datetime.datetime.strftime} call.
|
|
56
|
+
|
|
57
|
+
@type when: C{int}
|
|
58
|
+
@param when: POSIX (ie, UTC) timestamp.
|
|
59
|
+
|
|
60
|
+
@rtype: C{str}
|
|
61
|
+
"""
|
|
62
|
+
timeFormatString = self.timeFormat
|
|
63
|
+
if timeFormatString is None:
|
|
64
|
+
timeFormatString = '[%Y-%m-%d %H:%M:%S.%fZ]'
|
|
65
|
+
return datetime.fromtimestamp(when, tz=timezone('UTC')).strftime(timeFormatString)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def set_logger(cfg_options):
|
|
69
|
+
log.FileLogObserver.emit = myFLOemit
|
|
70
|
+
log.FileLogObserver.formatTime = myFLOformatTime
|
|
71
|
+
if cfg_options['logfile'] is None:
|
|
72
|
+
log.startLogging(stdout)
|
|
73
|
+
else:
|
|
74
|
+
log.startLogging(HoneypotDailyLogFile.fromFullPath(cfg_options['logfile']), setStdout=False)
|
core/output.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
|
|
2
|
+
from socket import gethostname
|
|
3
|
+
|
|
4
|
+
from core.config import CONFIG
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Output(object):
|
|
8
|
+
"""
|
|
9
|
+
Abstract base class intended to be inherited by output plugins.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, general_options):
|
|
13
|
+
|
|
14
|
+
self.cfg = general_options
|
|
15
|
+
|
|
16
|
+
if 'sensor' in self.cfg:
|
|
17
|
+
self.sensor = self.cfg['sensor']
|
|
18
|
+
else:
|
|
19
|
+
self.sensor = CONFIG.get('honeypot', 'sensor_name', fallback=gethostname())
|
|
20
|
+
|
|
21
|
+
self.start()
|
|
22
|
+
|
|
23
|
+
def start(self):
|
|
24
|
+
"""
|
|
25
|
+
Abstract method to initialize output plugin
|
|
26
|
+
"""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def stop(self):
|
|
30
|
+
"""
|
|
31
|
+
Abstract method to shut down output plugin
|
|
32
|
+
"""
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
def write(self, event):
|
|
36
|
+
"""
|
|
37
|
+
Handle a general event within the output plugin
|
|
38
|
+
"""
|
|
39
|
+
pass
|
core/paths.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"""
|
|
2
|
+
paths.py - Single source of truth for runtime path resolution.
|
|
3
|
+
|
|
4
|
+
The honeypot needs a "working directory" containing:
|
|
5
|
+
data/ geolocation databases, SQLite db, etc.
|
|
6
|
+
etc/ config files (honeypot-launch.cfg.base, honeypot.cfg.base)
|
|
7
|
+
log/ rotating log files (created on demand)
|
|
8
|
+
responses/ ElastiSearch wire-protocol response stubs
|
|
9
|
+
|
|
10
|
+
Priority for locating the working directory:
|
|
11
|
+
1. ELASTICPOT_WORKDIR environment variable
|
|
12
|
+
2. Current working directory
|
|
13
|
+
|
|
14
|
+
The bundled read-only defaults (etc/*.cfg.base, responses/*.json)
|
|
15
|
+
are installed inside the `elasticpot` package and located via the package's
|
|
16
|
+
own __file__ attribute, which works on all Python versions without
|
|
17
|
+
requiring pkg_resources or importlib.resources.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import absolute_import
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
from os import getcwd, environ
|
|
24
|
+
from os.path import abspath, dirname, join
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_workdir():
|
|
28
|
+
"""Return the absolute path to the runtime working directory."""
|
|
29
|
+
env = environ.get('ELASTICPOT_WORKDIR', '').strip()
|
|
30
|
+
if env:
|
|
31
|
+
return abspath(env)
|
|
32
|
+
return getcwd()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def workdir_path(*parts):
|
|
36
|
+
"""Return an absolute path rooted at the working directory."""
|
|
37
|
+
return join(get_workdir(), *parts)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def bundled(*parts):
|
|
41
|
+
"""
|
|
42
|
+
Return the filesystem path to a file bundled inside the installed package.
|
|
43
|
+
Arguments are path components relative to the elasticpot/data/ directory,
|
|
44
|
+
passed as separate strings (like os.path.join) to avoid hardcoded separators.
|
|
45
|
+
|
|
46
|
+
Uses the package's own __file__ to locate the data directory, which works
|
|
47
|
+
on all Python versions (2.7+) without requiring pkg_resources or
|
|
48
|
+
importlib.resources.
|
|
49
|
+
"""
|
|
50
|
+
# elasticpot/data/ lives alongside this module's package (core/ is a sibling
|
|
51
|
+
# of elasticpot/), so we go up one level from core/ to find elasticpot/data/.
|
|
52
|
+
here = dirname(abspath(__file__))
|
|
53
|
+
package_dir = join(dirname(here), 'elasticpot')
|
|
54
|
+
return join(package_dir, 'data', *parts)
|
core/protocol.py
ADDED
|
@@ -0,0 +1,451 @@
|
|
|
1
|
+
|
|
2
|
+
from __future__ import absolute_import
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
from collections import OrderedDict
|
|
6
|
+
from ipaddress import ip_address, ip_network
|
|
7
|
+
from json import dumps, load
|
|
8
|
+
from os import sep
|
|
9
|
+
from random import randint
|
|
10
|
+
from sys import version_info
|
|
11
|
+
from time import time
|
|
12
|
+
|
|
13
|
+
from core.tools import (
|
|
14
|
+
decode,
|
|
15
|
+
encode,
|
|
16
|
+
get_local_ip,
|
|
17
|
+
get_utc_time,
|
|
18
|
+
resolve_url,
|
|
19
|
+
write_event
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from twisted.python import log
|
|
23
|
+
from twisted.web.resource import Resource
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
from urllib.parse import unquote
|
|
27
|
+
except ImportError:
|
|
28
|
+
from urlparse import unquote # type: ignore
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
if version_info[0] >= 3:
|
|
32
|
+
def unicode(x):
|
|
33
|
+
return x
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class Index(Resource):
|
|
37
|
+
isLeaf = True
|
|
38
|
+
page_cache = {
|
|
39
|
+
'aliases.json': '',
|
|
40
|
+
'banner.json': '',
|
|
41
|
+
'cluster.json': '',
|
|
42
|
+
'clusterstore.json': '',
|
|
43
|
+
'error.json': '',
|
|
44
|
+
'index1long.json': '',
|
|
45
|
+
'index1short.json': '',
|
|
46
|
+
'index2long.json': '',
|
|
47
|
+
'index2short.json': '',
|
|
48
|
+
'indices.txt': '',
|
|
49
|
+
'nodes.json': '',
|
|
50
|
+
'nodes2.json': '',
|
|
51
|
+
'nodes2.txt': '',
|
|
52
|
+
'mapping.json': '',
|
|
53
|
+
'pluginhead.html': '',
|
|
54
|
+
'search.json': '',
|
|
55
|
+
'search2.json': '',
|
|
56
|
+
'settings.json': '',
|
|
57
|
+
'stats1.json': '',
|
|
58
|
+
'stats2.json': '',
|
|
59
|
+
'store.json': ''
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
def __init__(self, options):
|
|
63
|
+
self.cfg = options
|
|
64
|
+
|
|
65
|
+
def logger(self, request, log_level, msg):
|
|
66
|
+
ip = request.getClientAddress().host
|
|
67
|
+
for network in self.cfg['blacklist']:
|
|
68
|
+
if ip_address(unicode(ip)) in ip_network(unicode(network)):
|
|
69
|
+
return
|
|
70
|
+
log.msg('[{}] ({}:{}): {}'.format(log_level, ip, request.getClientAddress().port, msg))
|
|
71
|
+
|
|
72
|
+
def render_HEAD(self, request):
|
|
73
|
+
path = unquote(decode(request.uri))
|
|
74
|
+
collapsed_path = resolve_url(path)
|
|
75
|
+
|
|
76
|
+
self.logger(request, 'INFO', '{}: {}'.format(decode(request.method), path))
|
|
77
|
+
|
|
78
|
+
event = {
|
|
79
|
+
'eventid': 'elasticpot.recon',
|
|
80
|
+
'message': 'Head scan',
|
|
81
|
+
'url': collapsed_path
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
self.report_event(request, event)
|
|
85
|
+
|
|
86
|
+
return self.send_response(request)
|
|
87
|
+
|
|
88
|
+
def render_GET(self, request):
|
|
89
|
+
path = unquote(decode(request.uri))
|
|
90
|
+
collapsed_path = resolve_url(path)
|
|
91
|
+
url_path = list(filter(None, collapsed_path.split('/')))
|
|
92
|
+
|
|
93
|
+
self.logger(request, 'INFO', '{}: {}'.format(decode(request.method), path))
|
|
94
|
+
|
|
95
|
+
event = {
|
|
96
|
+
'eventid': 'elasticpot.recon',
|
|
97
|
+
'message': 'Scan',
|
|
98
|
+
'url': collapsed_path
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
self.report_event(request, event)
|
|
102
|
+
|
|
103
|
+
if len(url_path) == 0:
|
|
104
|
+
# /
|
|
105
|
+
return self.fake_banner(request)
|
|
106
|
+
elif url_path[0].startswith('_nodes'):
|
|
107
|
+
# /_nodes
|
|
108
|
+
# /_nodes/stats
|
|
109
|
+
return self.fake_nodes(request)
|
|
110
|
+
# Not handled:
|
|
111
|
+
# /_nodes/_local
|
|
112
|
+
elif url_path[0].startswith('_search'):
|
|
113
|
+
# /_search
|
|
114
|
+
# /_search?pretty
|
|
115
|
+
# /_search?source
|
|
116
|
+
return self.fake_search(request)
|
|
117
|
+
elif url_path[0] == '_stats':
|
|
118
|
+
# /_stats
|
|
119
|
+
# /_stats/
|
|
120
|
+
# /_stats/indexing
|
|
121
|
+
return self.fake_stats1(request)
|
|
122
|
+
elif url_path[0] == '_mapping':
|
|
123
|
+
# /_mapping
|
|
124
|
+
return self.fake_mapping(request)
|
|
125
|
+
elif url_path[0].startswith('favicon.'):
|
|
126
|
+
# /favicon.ico
|
|
127
|
+
return self.send_response(request)
|
|
128
|
+
elif 'alias' in collapsed_path:
|
|
129
|
+
# /%2A/_alias
|
|
130
|
+
# /_aliases
|
|
131
|
+
# /_aliases?pretty
|
|
132
|
+
# /_aliases?pretty=true
|
|
133
|
+
# /_cat/aliases?format=json&h=alias
|
|
134
|
+
return self.fake_alias(request)
|
|
135
|
+
elif url_path[-1].startswith('_settings'):
|
|
136
|
+
# /*/_settings
|
|
137
|
+
return self.fake_settings(request)
|
|
138
|
+
# Not handled (should return settings.json too):
|
|
139
|
+
# /*
|
|
140
|
+
elif len(url_path) >= 2:
|
|
141
|
+
if url_path[0] == '_cat':
|
|
142
|
+
if url_path[1].startswith('indices'):
|
|
143
|
+
# /_cat/indices
|
|
144
|
+
# /_cat/indices?pretty
|
|
145
|
+
# /_cat/indices?v
|
|
146
|
+
# /_cat/indices?format=json
|
|
147
|
+
# /_cat/indices?format=json&h=index
|
|
148
|
+
# /_cat/indices?format=text&v=true
|
|
149
|
+
# /_cat/indices?bytes=b&format=json
|
|
150
|
+
# /_cat/indices/1cf0aa9d61f185b59f643939f862c01f89b21360?bytes=b
|
|
151
|
+
# /_cat/indices/db18744ea5570fa9bf868df44fecd4b58332ff24?bytes=b
|
|
152
|
+
has_header = 'v' in url_path[1]
|
|
153
|
+
json_formatted = 'format=json' in url_path[1]
|
|
154
|
+
terse = 'h=index' in url_path[1]
|
|
155
|
+
return self.fake_indices(request, has_header, json_formatted, terse)
|
|
156
|
+
elif url_path[1].startswith('nodes'):
|
|
157
|
+
# /_cat/nodes
|
|
158
|
+
# /_cat/nodes?format=json
|
|
159
|
+
# /_cat/nodes?h=name,id,i,po,v,m,u,dt,du,r,gto
|
|
160
|
+
json_formatted = 'format=json' in url_path[1]
|
|
161
|
+
return self.fake_nodes2(request, json_formatted)
|
|
162
|
+
else:
|
|
163
|
+
return self.fake_error(request, url_path[0])
|
|
164
|
+
elif url_path[-1] == 'store' or url_path[-2] == 'store':
|
|
165
|
+
# /_all/_stats/store
|
|
166
|
+
# /_stats/store
|
|
167
|
+
# /_stats/store/?pretty&human&level=cluster
|
|
168
|
+
cluster = 'level=cluster' in collapsed_path
|
|
169
|
+
pretty = 'pretty' in collapsed_path
|
|
170
|
+
return self.fake_store(request, cluster, pretty)
|
|
171
|
+
elif url_path[0] == '_plugin' and url_path[1].startswith('head'):
|
|
172
|
+
# /_plugin/head
|
|
173
|
+
return self.fake_plugins(request)
|
|
174
|
+
elif url_path[-1].startswith('_search'):
|
|
175
|
+
# /1cf0aa9d61f185b59f643939f862c01f89b21360/_search?pretty=true&q=*:*
|
|
176
|
+
# /1cf0aa9d61f185b59f643939f862c01f89b21360/_search?size=5000
|
|
177
|
+
# /db18744ea5570fa9bf868df44fecd4b58332ff24/_search?pretty=true&q=*:*
|
|
178
|
+
# /db18744ea5570fa9bf868df44fecd4b58332ff24/_search?size=5000
|
|
179
|
+
json_formatted = 'pretty' in url_path[-1]
|
|
180
|
+
index = url_path[0]
|
|
181
|
+
return self.fake_search2(request, index, json_formatted)
|
|
182
|
+
elif url_path[0] == '_cluster':
|
|
183
|
+
if url_path[1].startswith('health'):
|
|
184
|
+
# /_cluster/health
|
|
185
|
+
return self.fake_cluster(request)
|
|
186
|
+
elif url_path[1] == 'stats':
|
|
187
|
+
# /_cluster/stats
|
|
188
|
+
return self.fake_stats2(request)
|
|
189
|
+
# Not handled:
|
|
190
|
+
# /_cluster/state
|
|
191
|
+
else:
|
|
192
|
+
return self.fake_error(request, url_path[0])
|
|
193
|
+
else:
|
|
194
|
+
return self.fake_error(request, url_path[0])
|
|
195
|
+
# Not handled:
|
|
196
|
+
# /evox/about
|
|
197
|
+
# /stalker_portal/c/
|
|
198
|
+
# /streaming/clients_live.php
|
|
199
|
+
# /streaming/QxAvEzlK.php
|
|
200
|
+
# /streaming/uo6jIDnf.php
|
|
201
|
+
# These should return
|
|
202
|
+
# {"error": "Incorrect HTTP method for uri [{path}] and method [GET], allowed: [POST]","status": 405}
|
|
203
|
+
else:
|
|
204
|
+
# /api.php
|
|
205
|
+
# /client_area/
|
|
206
|
+
# /HNAP1
|
|
207
|
+
# /index/_search?pretty=true&q=*:*
|
|
208
|
+
# /login.php
|
|
209
|
+
# /Nmap/folder/check1592730162
|
|
210
|
+
# /nmaplowercheck1592730162
|
|
211
|
+
# /NmapUpperCheck1592730162
|
|
212
|
+
# /nice%20ports,/Trinity.txt.bak
|
|
213
|
+
# /robots.txt
|
|
214
|
+
# /sitemap.xml
|
|
215
|
+
# /stalker_portal/c/version.js
|
|
216
|
+
# /stat
|
|
217
|
+
# /streaming
|
|
218
|
+
# /system_api.php
|
|
219
|
+
# /4e5e5d7364f443e28fbf0d3ae744a59a
|
|
220
|
+
# /?c=4e5e5d7364f443e28fbf0d3ae744a59a
|
|
221
|
+
return self.fake_error(request, url_path[0])
|
|
222
|
+
# Not handled:
|
|
223
|
+
# /c
|
|
224
|
+
# This should return:
|
|
225
|
+
# {"error":{"root_cause":[{"type":"illegal_argument_exception","reason":"request [/] contains unrecognized parameter: [c]"}],
|
|
226
|
+
# "type":"illegal_argument_exception","reason":"request [/] contains unrecognized parameter: [c]"},"status":400}
|
|
227
|
+
|
|
228
|
+
def render_POST(self, request):
|
|
229
|
+
path = unquote(decode(request.uri))
|
|
230
|
+
|
|
231
|
+
self.logger(request, 'INFO', '{}: {}'.format(decode(request.method), path))
|
|
232
|
+
|
|
233
|
+
if request.getHeader('Content-Length'):
|
|
234
|
+
collapsed_path = resolve_url(path)
|
|
235
|
+
content_length = int(request.getHeader('Content-Length'))
|
|
236
|
+
if content_length > 0:
|
|
237
|
+
post_data = decode(request.content.read())
|
|
238
|
+
self.logger(request, 'INFO', 'POST body: {}'.format(post_data))
|
|
239
|
+
event = {
|
|
240
|
+
'eventid': 'elasticpot.attack',
|
|
241
|
+
'message': 'Exploit',
|
|
242
|
+
'payload': post_data,
|
|
243
|
+
'url': path
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
self.report_event(request, event)
|
|
247
|
+
|
|
248
|
+
# /_search
|
|
249
|
+
# /_search?pretty
|
|
250
|
+
# /_search?source
|
|
251
|
+
# /1cf0aa9d61f185b59f643939f862c01f89b21360/_search
|
|
252
|
+
# /db18744ea5570fa9bf868df44fecd4b58332ff24/_search
|
|
253
|
+
if '/_search' in collapsed_path:
|
|
254
|
+
return self.fake_search(request)
|
|
255
|
+
|
|
256
|
+
# Not handled:
|
|
257
|
+
# /website/blog/
|
|
258
|
+
# /info/info
|
|
259
|
+
# /_sql?format=json
|
|
260
|
+
# /sdk
|
|
261
|
+
# send empty response as we're now done
|
|
262
|
+
return self.send_response(request)
|
|
263
|
+
|
|
264
|
+
def fake_banner(self, request):
|
|
265
|
+
response = self.get_json('banner.json')
|
|
266
|
+
response['name'] = self.cfg['instance_name']
|
|
267
|
+
response['cluster_name'] = self.cfg['cluster_name']
|
|
268
|
+
response['version']['number'] = self.cfg['spoofed_version']
|
|
269
|
+
page = dumps(response, indent=2, separators=(',', ' : '), sort_keys=True)
|
|
270
|
+
return self.send_response(request, page)
|
|
271
|
+
|
|
272
|
+
def fake_indices(self, request, has_header, json_formatted, terse):
|
|
273
|
+
if json_formatted:
|
|
274
|
+
if terse:
|
|
275
|
+
index1 = self.get_page('index1short.json')
|
|
276
|
+
index2 = self.get_page('index2short.json')
|
|
277
|
+
else:
|
|
278
|
+
index1 = self.get_page('index1long.json')
|
|
279
|
+
index2 = self.get_page('index2long.json')
|
|
280
|
+
page = '[{},{}]'.format(index1, index2)
|
|
281
|
+
else:
|
|
282
|
+
page = self.get_page('indices.txt')
|
|
283
|
+
if has_header:
|
|
284
|
+
header = 'health status index uuid pri rep docs.count docs.deleted store.size pri.store.size'
|
|
285
|
+
page = header + '\n' + page
|
|
286
|
+
return self.send_response(request, page)
|
|
287
|
+
|
|
288
|
+
def fake_cluster(self, request):
|
|
289
|
+
response = self.get_json('cluster.json')
|
|
290
|
+
response['cluster_name'] = self.cfg['cluster_name']
|
|
291
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
292
|
+
return self.send_response(request, page)
|
|
293
|
+
|
|
294
|
+
def fake_alias(self, request):
|
|
295
|
+
page = self.get_page('aliases.json')
|
|
296
|
+
return self.send_response(request, page)
|
|
297
|
+
|
|
298
|
+
def fake_nodes(self, request):
|
|
299
|
+
public_ip = decode(self.cfg['public_ip'])
|
|
300
|
+
node_name = 'x1JG6g9PRHy6ClCOO2-C4g'
|
|
301
|
+
response = self.get_json('nodes.json')
|
|
302
|
+
response['cluster_name'] = self.cfg['cluster_name']
|
|
303
|
+
response['nodes'][node_name]['name'] = self.cfg['instance_name']
|
|
304
|
+
response['nodes'][node_name]['transport_address'] = 'inet[/{}:9300]'.format(public_ip)
|
|
305
|
+
response['nodes'][node_name]['host'] = self.cfg['host_name']
|
|
306
|
+
response['nodes'][node_name]['ip'] = public_ip
|
|
307
|
+
response['nodes'][node_name]['version'] = self.cfg['spoofed_version']
|
|
308
|
+
response['nodes'][node_name]['build'] = self.cfg['build']
|
|
309
|
+
response['nodes'][node_name]['http_address'] = 'inet[/{}:9200]'.format(public_ip)
|
|
310
|
+
response['nodes'][node_name]['os']['available_processors'] = self.cfg['total_processors']
|
|
311
|
+
response['nodes'][node_name]['os']['cpu']['total_cores'] = self.cfg['total_cores']
|
|
312
|
+
response['nodes'][node_name]['os']['cpu']['total_sockets'] = self.cfg['total_sockets']
|
|
313
|
+
response['nodes'][node_name]['process']['id'] = randint(100, 40000)
|
|
314
|
+
response['nodes'][node_name]['network']['primary_interface']['address'] = public_ip
|
|
315
|
+
response['nodes'][node_name]['network']['primary_interface']['mac_address'] = self.cfg['mac_address']
|
|
316
|
+
response['nodes'][node_name]['transport']['publish_address'] = 'inet[/{}:9200]'.format(public_ip)
|
|
317
|
+
response['nodes'][node_name]['http']['publish_address'] = 'inet[/{}:9200]'.format(public_ip)
|
|
318
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
319
|
+
return self.send_response(request, page)
|
|
320
|
+
|
|
321
|
+
def fake_nodes2(self, request, json_formatted):
|
|
322
|
+
public_ip = decode(self.cfg['public_ip'])
|
|
323
|
+
if json_formatted:
|
|
324
|
+
response = self.get_json('nodes2.json')
|
|
325
|
+
response['ip'] = public_ip
|
|
326
|
+
page = '[{}]'.format(dumps(response, separators=(',', ':')), sort_keys=True)
|
|
327
|
+
else:
|
|
328
|
+
page = '{} {}'.format(public_ip, self.get_page('nodes2.txt'))
|
|
329
|
+
return self.send_response(request, page)
|
|
330
|
+
|
|
331
|
+
def fake_search(self, request):
|
|
332
|
+
shards = randint(5, 50)
|
|
333
|
+
response = self.get_json('search.json')
|
|
334
|
+
response['took'] = randint(1, 25)
|
|
335
|
+
response['_shards']['total'] = shards
|
|
336
|
+
response['_shards']['successful'] = shards
|
|
337
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
338
|
+
return self.send_response(request, page)
|
|
339
|
+
|
|
340
|
+
def fake_search2(self, request, index, json_formatted):
|
|
341
|
+
response = self.get_json('search2.json')
|
|
342
|
+
response['hits']['hits'][0]['_index'] = index
|
|
343
|
+
if json_formatted:
|
|
344
|
+
page = dumps(response, indent=2, separators=(',', ' : '), sort_keys=True)
|
|
345
|
+
else:
|
|
346
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
347
|
+
return self.send_response(request, page)
|
|
348
|
+
|
|
349
|
+
def fake_plugins(self, request):
|
|
350
|
+
page = self.get_page('pluginhead.html')
|
|
351
|
+
return self.send_response(request, page)
|
|
352
|
+
|
|
353
|
+
def fake_stats1(self, request):
|
|
354
|
+
page = self.get_page('stats1.json')
|
|
355
|
+
return self.send_response(request, page)
|
|
356
|
+
|
|
357
|
+
def fake_stats2(self, request):
|
|
358
|
+
response = self.get_json('stats2.json')
|
|
359
|
+
response['cluster_name'] = self.cfg['cluster_name']
|
|
360
|
+
response['nodes']['os']['allocated_processors'] = self.cfg['total_processors']
|
|
361
|
+
response['nodes']['os']['available_processors'] = self.cfg['total_processors']
|
|
362
|
+
response['timestamp'] = int(time())
|
|
363
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
364
|
+
return self.send_response(request, page)
|
|
365
|
+
|
|
366
|
+
def fake_mapping(self, request):
|
|
367
|
+
page = self.get_page('mapping.json')
|
|
368
|
+
return self.send_response(request, page)
|
|
369
|
+
|
|
370
|
+
def fake_settings(self, request):
|
|
371
|
+
page = self.get_page('settings.json')
|
|
372
|
+
return self.send_response(request, page)
|
|
373
|
+
|
|
374
|
+
def fake_store(self, request, cluster, pretty):
|
|
375
|
+
if cluster:
|
|
376
|
+
response = self.get_json('clusterstore.json')
|
|
377
|
+
else:
|
|
378
|
+
response = self.get_json('store.json')
|
|
379
|
+
if pretty:
|
|
380
|
+
page = dumps(response, indent=2, separators=(',', ' : '), sort_keys=True)
|
|
381
|
+
else:
|
|
382
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
383
|
+
return self.send_response(request, page)
|
|
384
|
+
|
|
385
|
+
def fake_error(self, request, index):
|
|
386
|
+
response = self.get_json('error.json')
|
|
387
|
+
response['error']['root_cause'][0]['reason'] = 'no such index [{}]'.format(index)
|
|
388
|
+
response['error']['root_cause'][0]['resource.id'] = index
|
|
389
|
+
response['error']['root_cause'][0]['index'] = index
|
|
390
|
+
response['error']['reason'] = 'no such index [{}]'.format(index)
|
|
391
|
+
response['error']['resource.id'] = index
|
|
392
|
+
response['error']['index'] = index
|
|
393
|
+
page = dumps(response, separators=(',', ':'), sort_keys=True)
|
|
394
|
+
return self.send_response(request, page)
|
|
395
|
+
|
|
396
|
+
def report_event(self, request, event):
|
|
397
|
+
unix_time = time()
|
|
398
|
+
human_time = get_utc_time(unix_time)
|
|
399
|
+
local_ip = get_local_ip()
|
|
400
|
+
event['timestamp'] = human_time
|
|
401
|
+
event['unixtime'] = unix_time
|
|
402
|
+
event['src_ip'] = request.getClientAddress().host
|
|
403
|
+
event['src_port'] = request.getClientAddress().port
|
|
404
|
+
event['dst_port'] = self.cfg['port']
|
|
405
|
+
event['sensor'] = self.cfg['sensor']
|
|
406
|
+
event['request'] = decode(request.method)
|
|
407
|
+
user_agent = request.getHeader('User-Agent')
|
|
408
|
+
if user_agent:
|
|
409
|
+
event['user_agent'] = user_agent
|
|
410
|
+
content_type = request.getHeader('Content-Type')
|
|
411
|
+
if content_type:
|
|
412
|
+
event['content_type'] = content_type
|
|
413
|
+
accept_language = request.getHeader('Accept-Language')
|
|
414
|
+
if accept_language:
|
|
415
|
+
event['accept_language'] = accept_language
|
|
416
|
+
event['dst_ip'] = self.cfg['public_ip'] if self.cfg['report_public_ip'] else local_ip
|
|
417
|
+
write_event(event, self.cfg)
|
|
418
|
+
|
|
419
|
+
def get_json(self, page):
|
|
420
|
+
if page not in self.page_cache:
|
|
421
|
+
log.msg('Missing JSON file: "{}".'.format(page))
|
|
422
|
+
return {}
|
|
423
|
+
if self.page_cache[page] == '':
|
|
424
|
+
with open('{}{}{}'.format(self.cfg['responses_dir'], sep, page), 'r') as f:
|
|
425
|
+
self.page_cache[page] = load(f, object_pairs_hook=OrderedDict)
|
|
426
|
+
return self.page_cache[page]
|
|
427
|
+
|
|
428
|
+
# a simple wrapper to cache files from "responses" folder
|
|
429
|
+
def get_page(self, page):
|
|
430
|
+
if page not in self.page_cache:
|
|
431
|
+
log.msg('Missing file: "{}".'.format(page))
|
|
432
|
+
if page.lower().endswith('.json'):
|
|
433
|
+
return '{}'
|
|
434
|
+
else:
|
|
435
|
+
return ''
|
|
436
|
+
# if page is not in cache, load it from file
|
|
437
|
+
if self.page_cache[page] == '':
|
|
438
|
+
if page.lower().endswith('.json'):
|
|
439
|
+
self.page_cache[page] = dumps(self.get_json(page), separators=(',', ':'), sort_keys=True)
|
|
440
|
+
else:
|
|
441
|
+
with open('{}{}{}'.format(self.cfg['responses_dir'], sep, page), 'r') as f:
|
|
442
|
+
self.page_cache[page] = f.read()
|
|
443
|
+
return self.page_cache[page]
|
|
444
|
+
|
|
445
|
+
# overload base class's send_response() to set appropriate headers and server version
|
|
446
|
+
def send_response(self, request, page=''):
|
|
447
|
+
request.setHeader('Server', 'Apache')
|
|
448
|
+
request.setHeader('Content-Length', str(len(page)))
|
|
449
|
+
request.setHeader('Content-Type', 'application/json; charset=UTF-8')
|
|
450
|
+
request.setHeader('Connection', 'Close')
|
|
451
|
+
return encode('{}'.format(page))
|