pgsqlpot 2.0.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- core/__init__.py +0 -0
- core/config.py +50 -0
- core/logfile.py +74 -0
- core/output.py +39 -0
- core/paths.py +53 -0
- core/protocol.py +161 -0
- core/tools.py +170 -0
- output_plugins/__init__.py +0 -0
- output_plugins/couch.py +68 -0
- output_plugins/datadog.py +74 -0
- output_plugins/discord.py +133 -0
- output_plugins/elastic.py +137 -0
- output_plugins/hpfeed.py +43 -0
- output_plugins/influx2.py +66 -0
- output_plugins/jsonlog.py +36 -0
- output_plugins/kafka.py +57 -0
- output_plugins/localsyslog.py +66 -0
- output_plugins/mongodb.py +83 -0
- output_plugins/mysql.py +210 -0
- output_plugins/nlcvapi.py +119 -0
- output_plugins/postgres.py +154 -0
- output_plugins/redisdb.py +47 -0
- output_plugins/rethinkdblog.py +46 -0
- output_plugins/slack.py +94 -0
- output_plugins/socketlog.py +40 -0
- output_plugins/sqlite.py +141 -0
- output_plugins/telegram.py +141 -0
- output_plugins/textlog.py +46 -0
- output_plugins/xmpp.py +193 -0
- pgsqlpot/__init__.py +25 -0
- pgsqlpot/cli.py +512 -0
- pgsqlpot/data/Dockerfile +56 -0
- pgsqlpot/data/docs/INSTALL.md +400 -0
- pgsqlpot/data/docs/INSTALLWIN.md +411 -0
- pgsqlpot/data/docs/PLUGINS.md +21 -0
- pgsqlpot/data/docs/TODO.md +8 -0
- pgsqlpot/data/docs/datadog/README.md +32 -0
- pgsqlpot/data/docs/discord/README.md +58 -0
- pgsqlpot/data/docs/geoipupdtask.ps1 +270 -0
- pgsqlpot/data/docs/mysql/README.md +176 -0
- pgsqlpot/data/docs/mysql/READMEWIN.md +157 -0
- pgsqlpot/data/docs/mysql/mysql.sql +85 -0
- pgsqlpot/data/docs/postgres/README.md +184 -0
- pgsqlpot/data/docs/postgres/READMEWIN.md +196 -0
- pgsqlpot/data/docs/postgres/postgres.sql +73 -0
- pgsqlpot/data/docs/slack/README.md +68 -0
- pgsqlpot/data/docs/sqlite3/README.md +131 -0
- pgsqlpot/data/docs/sqlite3/READMEWIN.md +123 -0
- pgsqlpot/data/docs/sqlite3/sqlite3.sql +69 -0
- pgsqlpot/data/docs/telegram/README.md +103 -0
- pgsqlpot/data/etc/honeypot.cfg +415 -0
- pgsqlpot/data/etc/honeypot.cfg.base +418 -0
- pgsqlpot/data/test/.gitignore +3 -0
- pgsqlpot/data/test/test.py +51 -0
- pgsqlpot/honeypot.py +117 -0
- pgsqlpot-2.0.0.dist-info/METADATA +152 -0
- pgsqlpot-2.0.0.dist-info/RECORD +61 -0
- pgsqlpot-2.0.0.dist-info/WHEEL +6 -0
- pgsqlpot-2.0.0.dist-info/entry_points.txt +2 -0
- pgsqlpot-2.0.0.dist-info/licenses/LICENSE +674 -0
- pgsqlpot-2.0.0.dist-info/top_level.txt +3 -0
core/__init__.py
ADDED
|
File without changes
|
core/config.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
|
|
2
|
+
from configparser import ConfigParser, ExtendedInterpolation
|
|
3
|
+
|
|
4
|
+
from os import environ
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def to_environ_key(key):
|
|
8
|
+
return key.upper()
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EnvironmentConfigParser(ConfigParser):
|
|
12
|
+
|
|
13
|
+
def has_option(self, section, option):
|
|
14
|
+
if to_environ_key('_'.join((section, option))) in environ:
|
|
15
|
+
return True
|
|
16
|
+
return super(EnvironmentConfigParser, self).has_option(section, option)
|
|
17
|
+
|
|
18
|
+
def get(self, section, option, **kwargs):
|
|
19
|
+
key = to_environ_key('_'.join((section, option)))
|
|
20
|
+
if key in environ:
|
|
21
|
+
return environ[key]
|
|
22
|
+
return super(EnvironmentConfigParser, self).get(section, option, **kwargs)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def readConfigFile(cfgfile):
|
|
26
|
+
"""
|
|
27
|
+
Read config files and return ConfigParser object
|
|
28
|
+
|
|
29
|
+
@param cfgfile: filename or array of filenames
|
|
30
|
+
@return: ConfigParser object
|
|
31
|
+
"""
|
|
32
|
+
parser = EnvironmentConfigParser(
|
|
33
|
+
interpolation=ExtendedInterpolation(),
|
|
34
|
+
converters={'list': lambda x: [i.strip() for i in x.split(',')]}
|
|
35
|
+
)
|
|
36
|
+
parser.read(cfgfile)
|
|
37
|
+
return parser
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _config_files():
|
|
41
|
+
# Import here (not at module top) to avoid any circular-import risk.
|
|
42
|
+
from core.paths import workdir_path, bundled
|
|
43
|
+
return [
|
|
44
|
+
bundled('etc', 'honeypot.cfg.base'), # bundled read-only defaults
|
|
45
|
+
workdir_path('etc', 'honeypot.cfg'), # site-local overrides
|
|
46
|
+
workdir_path('honeypot.cfg'), # convenience root-level override
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
CONFIG = readConfigFile(_config_files())
|
core/logfile.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
|
|
2
|
+
from sys import stdout
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
from pytz import timezone
|
|
6
|
+
|
|
7
|
+
from twisted.python import log, util
|
|
8
|
+
from twisted.python.logfile import DailyLogFile
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class HoneypotDailyLogFile(DailyLogFile):
|
|
12
|
+
"""
|
|
13
|
+
Overload original Twisted with improved date formatting
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def suffix(self, tupledate):
|
|
17
|
+
"""
|
|
18
|
+
Return the suffix given a (year, month, day) tuple or unixtime
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
return "{:02d}-{:02d}-{:02d}".format(tupledate[0], tupledate[1], tupledate[2])
|
|
22
|
+
except Exception:
|
|
23
|
+
# try taking a float unixtime
|
|
24
|
+
return '_'.join(map(str, self.toDate(tupledate)))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def myFLOemit(self, eventDict):
|
|
28
|
+
"""
|
|
29
|
+
Format the given log event as text and write it to the output file.
|
|
30
|
+
|
|
31
|
+
@param eventDict: a log event
|
|
32
|
+
@type eventDict: L{dict} mapping L{str} (native string) to L{object}
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
# Custom emit for FileLogObserver
|
|
36
|
+
text = log.textFromEventDict(eventDict)
|
|
37
|
+
if text is None:
|
|
38
|
+
return
|
|
39
|
+
timeStr = self.formatTime(eventDict['time'])
|
|
40
|
+
fmtDict = {
|
|
41
|
+
'text': text.replace('\n', '\n\t')
|
|
42
|
+
}
|
|
43
|
+
msgStr = log._safeFormat('%(text)s\n', fmtDict)
|
|
44
|
+
util.untilConcludes(self.write, timeStr + ' ' + msgStr)
|
|
45
|
+
util.untilConcludes(self.flush)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def myFLOformatTime(self, when):
|
|
49
|
+
"""
|
|
50
|
+
Log time in UTC
|
|
51
|
+
|
|
52
|
+
By default it's formatted as an ISO8601-like string (ISO8601 date and
|
|
53
|
+
ISO8601 time separated by a space). It can be customized using the
|
|
54
|
+
C{timeFormat} attribute, which will be used as input for the underlying
|
|
55
|
+
L{datetime.datetime.strftime} call.
|
|
56
|
+
|
|
57
|
+
@type when: C{int}
|
|
58
|
+
@param when: POSIX (ie, UTC) timestamp.
|
|
59
|
+
|
|
60
|
+
@rtype: C{str}
|
|
61
|
+
"""
|
|
62
|
+
timeFormatString = self.timeFormat
|
|
63
|
+
if timeFormatString is None:
|
|
64
|
+
timeFormatString = '[%Y-%m-%d %H:%M:%S.%fZ]'
|
|
65
|
+
return datetime.fromtimestamp(when, tz=timezone('UTC')).strftime(timeFormatString)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def set_logger(cfg_options):
|
|
69
|
+
log.FileLogObserver.emit = myFLOemit
|
|
70
|
+
log.FileLogObserver.formatTime = myFLOformatTime
|
|
71
|
+
if cfg_options['logfile'] is None:
|
|
72
|
+
log.startLogging(stdout)
|
|
73
|
+
else:
|
|
74
|
+
log.startLogging(HoneypotDailyLogFile.fromFullPath(cfg_options['logfile']), setStdout=False)
|
core/output.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
|
|
2
|
+
from socket import gethostname
|
|
3
|
+
|
|
4
|
+
from core.config import CONFIG
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Output(object):
|
|
8
|
+
"""
|
|
9
|
+
Abstract base class intended to be inherited by output plugins.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
def __init__(self, general_options):
|
|
13
|
+
|
|
14
|
+
self.cfg = general_options
|
|
15
|
+
|
|
16
|
+
if 'sensor' in self.cfg:
|
|
17
|
+
self.sensor = self.cfg['sensor']
|
|
18
|
+
else:
|
|
19
|
+
self.sensor = CONFIG.get('honeypot', 'sensor_name', fallback=gethostname())
|
|
20
|
+
|
|
21
|
+
self.start()
|
|
22
|
+
|
|
23
|
+
def start(self):
|
|
24
|
+
"""
|
|
25
|
+
Abstract method to initialize output plugin
|
|
26
|
+
"""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def stop(self):
|
|
30
|
+
"""
|
|
31
|
+
Abstract method to shut down output plugin
|
|
32
|
+
"""
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
def write(self, event):
|
|
36
|
+
"""
|
|
37
|
+
Handle a general event within the output plugin
|
|
38
|
+
"""
|
|
39
|
+
pass
|
core/paths.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
paths.py - Single source of truth for runtime path resolution.
|
|
3
|
+
|
|
4
|
+
The honeypot needs a "working directory" containing:
|
|
5
|
+
data/ geolocation databases, SQLite db, etc.
|
|
6
|
+
etc/ config file (honeypot.cfg.base)
|
|
7
|
+
log/ rotating log files (created on demand)
|
|
8
|
+
|
|
9
|
+
Priority for locating the working directory:
|
|
10
|
+
1. PGSLQPOT_WORKDIR environment variable
|
|
11
|
+
2. Current working directory
|
|
12
|
+
|
|
13
|
+
The bundled read-only defaults (etc/*.cfg.base, responses/*.json)
|
|
14
|
+
are installed inside the `pgsqlpot` package and located via the package's
|
|
15
|
+
own __file__ attribute, which works on all Python versions without
|
|
16
|
+
requiring pkg_resources or importlib.resources.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import absolute_import
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
from os import getcwd, environ
|
|
23
|
+
from os.path import abspath, dirname, join
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_workdir():
|
|
27
|
+
"""Return the absolute path to the runtime working directory."""
|
|
28
|
+
env = environ.get('PGSLQPOT_WORKDIR', '').strip()
|
|
29
|
+
if env:
|
|
30
|
+
return abspath(env)
|
|
31
|
+
return getcwd()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def workdir_path(*parts):
|
|
35
|
+
"""Return an absolute path rooted at the working directory."""
|
|
36
|
+
return join(get_workdir(), *parts)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def bundled(*parts):
|
|
40
|
+
"""
|
|
41
|
+
Return the filesystem path to a file bundled inside the installed package.
|
|
42
|
+
Arguments are path components relative to the pgsqlpot/data/ directory,
|
|
43
|
+
passed as separate strings (like os.path.join) to avoid hardcoded separators.
|
|
44
|
+
|
|
45
|
+
Uses the package's own __file__ to locate the data directory, which works
|
|
46
|
+
on all Python versions (2.7+) without requiring pkg_resources or
|
|
47
|
+
importlib.resources.
|
|
48
|
+
"""
|
|
49
|
+
# pgsqlpot/data/ lives alongside this module's package (core/ is a sibling
|
|
50
|
+
# of pgsqlpot/), so we go up one level from core/ to find pgsqlpot/data/.
|
|
51
|
+
here = dirname(abspath(__file__))
|
|
52
|
+
package_dir = join(dirname(here), 'pgsqlpot')
|
|
53
|
+
return join(package_dir, 'data', *parts)
|
core/protocol.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
|
|
2
|
+
from __future__ import absolute_import
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
from ipaddress import ip_address, ip_network
|
|
6
|
+
from sys import version_info
|
|
7
|
+
from time import time
|
|
8
|
+
from uuid import uuid4
|
|
9
|
+
|
|
10
|
+
from core.tools import (
|
|
11
|
+
decode,
|
|
12
|
+
encode,
|
|
13
|
+
get_local_ip,
|
|
14
|
+
get_utc_time,
|
|
15
|
+
printable,
|
|
16
|
+
to_int,
|
|
17
|
+
write_event
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
from twisted.internet.protocol import Factory, Protocol
|
|
21
|
+
from twisted.python.log import msg
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
if version_info[0] >= 3:
|
|
25
|
+
def unicode(x):
|
|
26
|
+
return x
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class PostgresServer(Protocol):
|
|
30
|
+
def __init__(self, options):
|
|
31
|
+
self.cfg = options
|
|
32
|
+
self._variables = {}
|
|
33
|
+
self._state = None
|
|
34
|
+
|
|
35
|
+
def connectionMade(self):
|
|
36
|
+
self._state = 1
|
|
37
|
+
self._variables = {}
|
|
38
|
+
self.session = uuid4().hex[:12]
|
|
39
|
+
self.report_event('connect')
|
|
40
|
+
|
|
41
|
+
def connectionLost(self, reason):
|
|
42
|
+
if self._state == 3:
|
|
43
|
+
username = self._variables['user'] if 'user' in self._variables else ''
|
|
44
|
+
self.report_event('login', username, '')
|
|
45
|
+
self.report_event('disconnect', reason.value)
|
|
46
|
+
self._state = 1
|
|
47
|
+
self._variables = {}
|
|
48
|
+
|
|
49
|
+
def dataReceived(self, data):
|
|
50
|
+
if self._state == 1:
|
|
51
|
+
# Client: I'd like to log in
|
|
52
|
+
# '\x00\x00\x00\x08\x04\xD2\x16\x2F'
|
|
53
|
+
self._state = 2
|
|
54
|
+
# Server: Sure, go ahead
|
|
55
|
+
self.transport.write(b'N')
|
|
56
|
+
elif self._state == 2:
|
|
57
|
+
# Client: OK, logging in
|
|
58
|
+
# '\x00\x00\x00\x36\x00\x03\x00\x00'
|
|
59
|
+
# Here's the user name, 'foo'
|
|
60
|
+
# 'user\x00foo\x00'
|
|
61
|
+
# the database I need access to, 'bar'
|
|
62
|
+
# 'database\x00bar\x00'
|
|
63
|
+
# and some other stuff ('application': 'pgcli')
|
|
64
|
+
# 'application_name\x00pgcli\x00'
|
|
65
|
+
# End-of-stuff
|
|
66
|
+
# '\x00'
|
|
67
|
+
self.read_data_custom(data)
|
|
68
|
+
self._state = 3
|
|
69
|
+
# Server: M'kay, send password
|
|
70
|
+
self.transport.write(b'R\x00\x00\x00\x08\x00\x00\x00\x03')
|
|
71
|
+
elif self._state == 3:
|
|
72
|
+
message_type = to_int(data[0])
|
|
73
|
+
if message_type == 0x70 and 'user' in self._variables:
|
|
74
|
+
# Client: Here's the password
|
|
75
|
+
# '\x70\x00\x00\x00\x08'
|
|
76
|
+
# 'secret\x00'
|
|
77
|
+
self.read_password_custom(data)
|
|
78
|
+
self._state = 4
|
|
79
|
+
username = self._variables['user']
|
|
80
|
+
password = self._variables['password']
|
|
81
|
+
self.report_event('login', username, password)
|
|
82
|
+
# Server: Wrong credentials
|
|
83
|
+
self.transport.write(
|
|
84
|
+
b'E\x00\x00\x00\x64' +
|
|
85
|
+
b'SFATAL\x00' +
|
|
86
|
+
b'VFATAL\x00' +
|
|
87
|
+
b'C28P01\x00' +
|
|
88
|
+
b'Mpassword authentication failed for user "' + encode(username) + b'"\x00' +
|
|
89
|
+
b'Fauth.c\x00' +
|
|
90
|
+
b'L323\x00' +
|
|
91
|
+
b'Rauth_failed\x00' +
|
|
92
|
+
b'\x00'
|
|
93
|
+
)
|
|
94
|
+
# Server: Try again with the correct one
|
|
95
|
+
self.transport.write(b'R\x00\x00\x00\x08\x00\x00\x00\x03')
|
|
96
|
+
|
|
97
|
+
def read_data_custom(self, data):
|
|
98
|
+
data = decode(data)
|
|
99
|
+
encoded_list = data[8:-1].split('\x00')
|
|
100
|
+
self._variables = dict(zip(*([iter(encoded_list)] * 2)))
|
|
101
|
+
|
|
102
|
+
def read_password_custom(self, data):
|
|
103
|
+
data = decode(data)
|
|
104
|
+
self._variables['password'] = data[5:].split('\x00')[0]
|
|
105
|
+
|
|
106
|
+
def report_event(self, operation, username=None, password=None):
|
|
107
|
+
operation = operation.lower()
|
|
108
|
+
unix_time = time()
|
|
109
|
+
peer = self.transport.getPeer()
|
|
110
|
+
ip = peer.host
|
|
111
|
+
for network in self.cfg['blacklist']:
|
|
112
|
+
if ip_address(unicode(ip)) in ip_network(unicode(network)):
|
|
113
|
+
return
|
|
114
|
+
port = peer.port
|
|
115
|
+
event = {
|
|
116
|
+
'eventid': 'pgsqlpot.' + operation,
|
|
117
|
+
'operation': operation,
|
|
118
|
+
'timestamp': get_utc_time(unix_time),
|
|
119
|
+
'unixtime': unix_time,
|
|
120
|
+
'src_ip': ip,
|
|
121
|
+
'src_port': port,
|
|
122
|
+
'dst_port': self.cfg['port'],
|
|
123
|
+
'sensor': self.cfg['sensor'],
|
|
124
|
+
'dst_ip': self.cfg['public_ip'] if self.cfg['report_public_ip'] else get_local_ip(),
|
|
125
|
+
'session': self.session
|
|
126
|
+
}
|
|
127
|
+
if operation == 'login':
|
|
128
|
+
event['username'] = printable(username)
|
|
129
|
+
event['password'] = printable(password)
|
|
130
|
+
self._variables = {
|
|
131
|
+
printable(key): printable(value)
|
|
132
|
+
for key, value in self._variables.items()
|
|
133
|
+
if key not in ['user', 'password']
|
|
134
|
+
}
|
|
135
|
+
if self._variables:
|
|
136
|
+
event['variables'] = self._variables
|
|
137
|
+
message = 'Login with username: "{}", password: "{}" from {}:{}.'.format(
|
|
138
|
+
event['username'],
|
|
139
|
+
event['password'],
|
|
140
|
+
ip,
|
|
141
|
+
port
|
|
142
|
+
)
|
|
143
|
+
elif operation == 'connect':
|
|
144
|
+
message = 'Connection made from {}:{}.'.format(ip, port)
|
|
145
|
+
elif operation == 'disconnect':
|
|
146
|
+
message = '{}:{} disconnected. Reason: {}'.format(ip, port, username)
|
|
147
|
+
else:
|
|
148
|
+
command = printable(username)
|
|
149
|
+
if password:
|
|
150
|
+
command += ' ' + printable(password)
|
|
151
|
+
message = 'Unknown operation "{}" from {}:{}.'.format(command, ip, port)
|
|
152
|
+
msg(message)
|
|
153
|
+
write_event(event, self.cfg)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
class PostgresFactory(Factory):
|
|
157
|
+
def __init__(self, cfg):
|
|
158
|
+
self.cfg = cfg
|
|
159
|
+
|
|
160
|
+
def buildProtocol(self, addr):
|
|
161
|
+
return PostgresServer(self.cfg)
|
core/tools.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from ipaddress import ip_address, ip_network
|
|
4
|
+
from os import makedirs, path
|
|
5
|
+
from string import printable as p
|
|
6
|
+
from socket import socket, AF_INET, SOCK_DGRAM
|
|
7
|
+
from sys import version_info
|
|
8
|
+
|
|
9
|
+
from core.config import CONFIG
|
|
10
|
+
|
|
11
|
+
from pytz import timezone
|
|
12
|
+
|
|
13
|
+
from twisted.python.log import msg
|
|
14
|
+
|
|
15
|
+
try:
|
|
16
|
+
from urllib.request import urlopen
|
|
17
|
+
except ImportError:
|
|
18
|
+
from urllib import urlopen
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
if version_info[0] >= 3:
|
|
22
|
+
def decode(x):
|
|
23
|
+
return x.decode('utf-8', errors='ignore')
|
|
24
|
+
def encode(x):
|
|
25
|
+
return x.encode()
|
|
26
|
+
def ord(x):
|
|
27
|
+
return x
|
|
28
|
+
def to_bytes(x):
|
|
29
|
+
return bytes(x, 'ascii')
|
|
30
|
+
def to_int(x):
|
|
31
|
+
return x
|
|
32
|
+
def unicode(x):
|
|
33
|
+
return x
|
|
34
|
+
else:
|
|
35
|
+
def decode(x):
|
|
36
|
+
return x
|
|
37
|
+
def encode(x):
|
|
38
|
+
return x
|
|
39
|
+
def to_bytes(x):
|
|
40
|
+
return bytes(x)
|
|
41
|
+
def to_int(x):
|
|
42
|
+
return ord(x)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def mkdir(dir_path):
|
|
46
|
+
if not dir_path:
|
|
47
|
+
return
|
|
48
|
+
if path.exists(dir_path) and path.isdir(dir_path):
|
|
49
|
+
return
|
|
50
|
+
makedirs(dir_path)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def import_plugins(cfg):
|
|
54
|
+
# Load output modules (inspired by the Cowrie honeypot)
|
|
55
|
+
msg('Loading the plugins...')
|
|
56
|
+
output_plugins = []
|
|
57
|
+
general_options = cfg
|
|
58
|
+
for x in CONFIG.sections():
|
|
59
|
+
if not x.startswith('output_'):
|
|
60
|
+
continue
|
|
61
|
+
if CONFIG.getboolean(x, 'enabled') is False:
|
|
62
|
+
continue
|
|
63
|
+
engine = x.split('_')[1]
|
|
64
|
+
try:
|
|
65
|
+
output = __import__('output_plugins.{}'.format(engine),
|
|
66
|
+
globals(), locals(), ['output'], 0).Output(general_options)
|
|
67
|
+
output_plugins.append(output)
|
|
68
|
+
msg('Loaded output engine: {}'.format(engine))
|
|
69
|
+
except ImportError as e:
|
|
70
|
+
msg('Failed to load output engine: {} due to ImportError: {}'.format(engine, e))
|
|
71
|
+
except Exception as e:
|
|
72
|
+
msg('Failed to load output engine: {} {}'.format(engine, e))
|
|
73
|
+
return output_plugins
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def stop_plugins(cfg):
|
|
77
|
+
msg('Stoping the plugins...')
|
|
78
|
+
for plugin in cfg['output_plugins']:
|
|
79
|
+
try:
|
|
80
|
+
plugin.stop()
|
|
81
|
+
except Exception as e:
|
|
82
|
+
msg(e)
|
|
83
|
+
continue
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def get_public_ip(ip_reporter):
|
|
87
|
+
try:
|
|
88
|
+
if version_info[0] < 3:
|
|
89
|
+
return urlopen(ip_reporter).read().decode('latin1', errors='replace').encode('utf-8')
|
|
90
|
+
else:
|
|
91
|
+
return decode(urlopen(ip_reporter).read())
|
|
92
|
+
except:
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def get_local_ip():
|
|
97
|
+
s = socket(AF_INET, SOCK_DGRAM)
|
|
98
|
+
try:
|
|
99
|
+
s.connect(('10.255.255.255', 1))
|
|
100
|
+
ip = s.getsockname()[0]
|
|
101
|
+
except:
|
|
102
|
+
ip = '127.0.0.1'
|
|
103
|
+
finally:
|
|
104
|
+
s.close()
|
|
105
|
+
return ip
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def get_utc_time(unix_time):
|
|
109
|
+
return datetime.fromtimestamp(unix_time, tz=timezone('UTC')).isoformat() + 'Z'
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def printable(x):
|
|
113
|
+
x = encode(x)
|
|
114
|
+
if all(c in to_bytes(p) for c in x):
|
|
115
|
+
return decode(x)
|
|
116
|
+
else:
|
|
117
|
+
return ''.join('\\x{:02X}'.format(to_int(c)) for c in x)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def write_event(event, cfg):
|
|
121
|
+
ip = event['src_ip']
|
|
122
|
+
for network in cfg['blacklist']:
|
|
123
|
+
if ip_address(unicode(ip)) in ip_network(unicode(network)):
|
|
124
|
+
return
|
|
125
|
+
output_plugins = cfg['output_plugins']
|
|
126
|
+
for plugin in output_plugins:
|
|
127
|
+
try:
|
|
128
|
+
plugin.write(event)
|
|
129
|
+
except Exception as e:
|
|
130
|
+
msg(e)
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def geolocate(remote_ip, reader_city, reader_asn):
|
|
135
|
+
try:
|
|
136
|
+
response_city = reader_city.city(remote_ip)
|
|
137
|
+
city = response_city.city.name
|
|
138
|
+
if city is None:
|
|
139
|
+
city = ''
|
|
140
|
+
else:
|
|
141
|
+
city = decode(city.encode('utf-8'))
|
|
142
|
+
country = response_city.country.name
|
|
143
|
+
if country is None:
|
|
144
|
+
country = ''
|
|
145
|
+
country_code = ''
|
|
146
|
+
else:
|
|
147
|
+
country = decode(country.encode('utf-8'))
|
|
148
|
+
country_code = decode(response_city.country.iso_code.encode('utf-8'))
|
|
149
|
+
except Exception as e:
|
|
150
|
+
msg(e)
|
|
151
|
+
city = ''
|
|
152
|
+
country = ''
|
|
153
|
+
country_code = ''
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
response_asn = reader_asn.asn(remote_ip)
|
|
157
|
+
if response_asn.autonomous_system_organization is None:
|
|
158
|
+
org = ''
|
|
159
|
+
else:
|
|
160
|
+
org = decode(response_asn.autonomous_system_organization.encode('utf-8'))
|
|
161
|
+
|
|
162
|
+
if response_asn.autonomous_system_number is not None:
|
|
163
|
+
asn_num = response_asn.autonomous_system_number
|
|
164
|
+
else:
|
|
165
|
+
asn_num = 0
|
|
166
|
+
except Exception as e:
|
|
167
|
+
msg(e)
|
|
168
|
+
org = ''
|
|
169
|
+
asn_num = 0
|
|
170
|
+
return country, country_code, city, org, asn_num
|
|
File without changes
|
output_plugins/couch.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
|
|
2
|
+
from __future__ import absolute_import
|
|
3
|
+
|
|
4
|
+
from core import output
|
|
5
|
+
from core.config import CONFIG
|
|
6
|
+
from core.tools import geolocate
|
|
7
|
+
|
|
8
|
+
from couchdb import Server
|
|
9
|
+
from geoip2.database import Reader
|
|
10
|
+
|
|
11
|
+
from twisted.python.log import msg
|
|
12
|
+
|
|
13
|
+
class Output(output.Output):
|
|
14
|
+
|
|
15
|
+
def start(self):
|
|
16
|
+
host = CONFIG.get('output_couch', 'host', fallback='localhost')
|
|
17
|
+
port = CONFIG.getint('output_couch', 'port', fallback=5984)
|
|
18
|
+
username = CONFIG.get('output_couch', 'username', fallback='pgsqlpot', raw=True)
|
|
19
|
+
password = CONFIG.get('output_couch', 'password', fallback='', raw=True)
|
|
20
|
+
db_name = CONFIG.get('output_couch', 'database', fallback='pgsqlpot')
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
couchserver = Server('http://{}:{}@{}:{}'.format(username, password, host, port))
|
|
24
|
+
|
|
25
|
+
if db_name in couchserver:
|
|
26
|
+
self.couch_db = couchserver[db_name]
|
|
27
|
+
else:
|
|
28
|
+
self.couch_db = couchserver.create(db_name)
|
|
29
|
+
except Exception as e:
|
|
30
|
+
msg('output_couch: Error: {}'.format(e))
|
|
31
|
+
|
|
32
|
+
self.geoip = CONFIG.getboolean('output_couch', 'geoip', fallback=True)
|
|
33
|
+
|
|
34
|
+
if self.geoip:
|
|
35
|
+
geoipdb_city_path = CONFIG.get('output_couch', 'geoip_citydb', fallback='data/GeoLite2-City.mmdb')
|
|
36
|
+
geoipdb_asn_path = CONFIG.get('output_couch', 'geoip_asndb', fallback='data/GeoLite2-ASN.mmdb')
|
|
37
|
+
try:
|
|
38
|
+
self.reader_city = Reader(geoipdb_city_path)
|
|
39
|
+
except:
|
|
40
|
+
self.reader_city = None
|
|
41
|
+
msg('Failed to open City GeoIP database {}'.format(geoipdb_city_path))
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
self.reader_asn = Reader(geoipdb_asn_path)
|
|
45
|
+
except:
|
|
46
|
+
self.reader_asn = None
|
|
47
|
+
msg('Failed to open ASN GeoIP database {}'.format(geoipdb_asn_path))
|
|
48
|
+
|
|
49
|
+
def stop(self):
|
|
50
|
+
if self.geoip:
|
|
51
|
+
if self.reader_city is not None:
|
|
52
|
+
self.reader_city.close()
|
|
53
|
+
if self.reader_asn is not None:
|
|
54
|
+
self.reader_asn.close()
|
|
55
|
+
|
|
56
|
+
def write(self, event):
|
|
57
|
+
if self.geoip:
|
|
58
|
+
country, country_code, city, org, asn_num = geolocate(event['src_ip'], self.reader_city, self.reader_asn)
|
|
59
|
+
event['country'] = country
|
|
60
|
+
event['country_code'] = country_code
|
|
61
|
+
event['city'] = city
|
|
62
|
+
event['org'] = org
|
|
63
|
+
event['asn'] = asn_num
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
self.couch_db.save(event)
|
|
67
|
+
except Exception as e:
|
|
68
|
+
msg('output_couch: Error: {}'.format(e))
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Simple Datadog HTTP logger.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import absolute_import
|
|
6
|
+
|
|
7
|
+
from io import BytesIO
|
|
8
|
+
from json import dumps
|
|
9
|
+
from platform import node
|
|
10
|
+
|
|
11
|
+
from core import output
|
|
12
|
+
from core.config import CONFIG
|
|
13
|
+
from core.tools import to_bytes
|
|
14
|
+
|
|
15
|
+
from twisted.internet import reactor
|
|
16
|
+
from twisted.python.log import msg
|
|
17
|
+
from twisted.web import client, http_headers
|
|
18
|
+
from twisted.web.client import FileBodyProducer
|
|
19
|
+
from twisted.internet.ssl import ClientContextFactory
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class WebClientContextFactory(ClientContextFactory):
|
|
23
|
+
def getContext(self, hostname, port):
|
|
24
|
+
return ClientContextFactory.getContext(self)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class QuietHTTP11ClientFactory(client._HTTP11ClientFactory):
|
|
28
|
+
noisy = False
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Output(output.Output):
|
|
32
|
+
def start(self):
|
|
33
|
+
self.url = CONFIG.get('output_datadog', 'url')
|
|
34
|
+
self.api_key = CONFIG.get('output_datadog', 'api_key', fallback='')
|
|
35
|
+
if len(self.api_key) == 0:
|
|
36
|
+
msg('Datadog output module: API key is not defined.')
|
|
37
|
+
self.ddsource = CONFIG.get('output_datadog', 'ddsource', fallback='pgsqlpot')
|
|
38
|
+
self.ddtags = CONFIG.get('output_datadog', 'ddtags', fallback='env:dev')
|
|
39
|
+
self.service = CONFIG.get('output_datadog', 'service', fallback='honeypot')
|
|
40
|
+
self.hostname = CONFIG.get('output_datadog', 'hostname', fallback=node())
|
|
41
|
+
|
|
42
|
+
contextFactory = WebClientContextFactory()
|
|
43
|
+
myQuietPool = client.HTTPConnectionPool(reactor)
|
|
44
|
+
myQuietPool._factory = QuietHTTP11ClientFactory
|
|
45
|
+
self.agent = client.Agent(reactor, contextFactory=contextFactory, pool=myQuietPool)
|
|
46
|
+
|
|
47
|
+
def stop(self):
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
def write(self, event):
|
|
51
|
+
messg = '{} INFO [MS-SQL Pot on {} ({})] {} from {}:{}'.format(
|
|
52
|
+
event['timestamp'], event['sensor'], event['session'],
|
|
53
|
+
event['operation'].capitalize(), event['src_ip'], event['dst_port']
|
|
54
|
+
)
|
|
55
|
+
if event['operation'].lower() == 'login':
|
|
56
|
+
messg += ', username: "{}", password: "{}"'.format(event['username'], event['password'])
|
|
57
|
+
messg += '.'
|
|
58
|
+
event['message'] = messg
|
|
59
|
+
event['ddsource'] = self.ddsource
|
|
60
|
+
event['ddtags'] = self.ddtags
|
|
61
|
+
event['hostname'] = self.hostname
|
|
62
|
+
event['service'] = self.service
|
|
63
|
+
self.postentry(event)
|
|
64
|
+
|
|
65
|
+
def postentry(self, entry):
|
|
66
|
+
base_headers = {
|
|
67
|
+
b'Accept': [b'application/json'],
|
|
68
|
+
b'Content-Type': [b'application/json'],
|
|
69
|
+
b'DD-API-KEY': [to_bytes(self.api_key)],
|
|
70
|
+
}
|
|
71
|
+
headers = http_headers.Headers(base_headers)
|
|
72
|
+
body = FileBodyProducer(BytesIO(to_bytes(dumps(entry, sort_keys=True))))
|
|
73
|
+
self.agent.request(b'POST', to_bytes(self.url), headers, body)
|
|
74
|
+
|