atomicshop 2.16.10__py3-none-any.whl → 2.16.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/basics/enums.py +2 -2
- atomicshop/basics/list_of_classes.py +29 -0
- atomicshop/dns.py +2 -4
- atomicshop/file_io/docxs.py +4 -4
- atomicshop/file_io/file_io.py +12 -0
- atomicshop/filesystem.py +265 -198
- atomicshop/mitm/config_static.py +7 -8
- atomicshop/mitm/connection_thread_worker.py +59 -39
- atomicshop/mitm/engines/__parent/parser___parent.py +0 -1
- atomicshop/mitm/engines/__parent/recorder___parent.py +5 -6
- atomicshop/mitm/engines/__parent/responder___parent.py +0 -1
- atomicshop/mitm/import_config.py +6 -4
- atomicshop/mitm/initialize_engines.py +6 -6
- atomicshop/mitm/message.py +1 -0
- atomicshop/mitm/{initialize_mitm_server.py → mitm_main.py} +57 -32
- atomicshop/mitm/recs_files.py +17 -17
- atomicshop/mitm/statistic_analyzer.py +2 -2
- atomicshop/ssh_remote.py +9 -9
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +1 -1
- atomicshop/wrappers/loggingw/reading.py +63 -100
- atomicshop/wrappers/pywin32w/wmis/helpers.py +5 -1
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +0 -32
- atomicshop/wrappers/socketw/dns_server.py +9 -10
- atomicshop/wrappers/socketw/exception_wrapper.py +5 -7
- atomicshop/wrappers/socketw/get_process.py +3 -3
- atomicshop/wrappers/socketw/receiver.py +3 -3
- atomicshop/wrappers/socketw/sender.py +1 -1
- atomicshop/wrappers/socketw/sni.py +1 -1
- atomicshop/wrappers/socketw/socket_server_tester.py +5 -5
- atomicshop/wrappers/winregw/__init__.py +0 -0
- atomicshop/wrappers/winregw/winreg_network.py +174 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/METADATA +1 -1
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/RECORD +37 -34
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/WHEEL +0 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/top_level.txt +0 -0
atomicshop/mitm/config_static.py
CHANGED
|
@@ -4,7 +4,7 @@ from dataclasses import dataclass
|
|
|
4
4
|
from . import import_config
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
SCRIPT_VERSION: str = '1.7.
|
|
7
|
+
SCRIPT_VERSION: str = '1.7.3'
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
# CONFIG = None
|
|
@@ -19,6 +19,7 @@ LIST_OF_BOOLEANS: list = [
|
|
|
19
19
|
('tcp', 'enable'),
|
|
20
20
|
('tcp', 'engines_usage'),
|
|
21
21
|
('tcp', 'server_response_mode'),
|
|
22
|
+
('logrec', 'enable_request_response_recordings_in_logs'),
|
|
22
23
|
('certificates', 'default_server_certificate_usage'),
|
|
23
24
|
('certificates', 'sni_add_new_domains_to_default_server_certificate'),
|
|
24
25
|
('certificates', 'custom_server_certificate_usage'),
|
|
@@ -34,8 +35,7 @@ LIST_OF_BOOLEANS: list = [
|
|
|
34
35
|
TOML_TO_STATIC_CATEGORIES: dict = {
|
|
35
36
|
'dns': 'DNSServer',
|
|
36
37
|
'tcp': 'TCPServer',
|
|
37
|
-
'
|
|
38
|
-
'recorder': 'Recorder',
|
|
38
|
+
'logrec': 'LogRec',
|
|
39
39
|
'certificates': 'Certificates',
|
|
40
40
|
'skip_extensions': 'SkipExtensions',
|
|
41
41
|
'process_name': 'ProcessName'
|
|
@@ -101,13 +101,12 @@ class TCPServer:
|
|
|
101
101
|
|
|
102
102
|
|
|
103
103
|
@dataclass
|
|
104
|
-
class
|
|
104
|
+
class LogRec:
|
|
105
105
|
logs_path: str
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
@dataclass
|
|
109
|
-
class Recorder:
|
|
110
106
|
recordings_path: str
|
|
107
|
+
enable_request_response_recordings_in_logs: bool
|
|
108
|
+
|
|
109
|
+
recordings_directory_name: str = 'recs'
|
|
111
110
|
|
|
112
111
|
|
|
113
112
|
@dataclass
|
|
@@ -24,7 +24,25 @@ def thread_worker_main(
|
|
|
24
24
|
reference_module
|
|
25
25
|
):
|
|
26
26
|
def output_statistics_csv_row():
|
|
27
|
-
|
|
27
|
+
# If there is no '.code' attribute in HTTPResponse, this means that this is not a HTTP message, so there is no
|
|
28
|
+
# status code.
|
|
29
|
+
try:
|
|
30
|
+
http_status_code: str = ','.join([str(x.code) for x in client_message.response_list_of_raw_decoded])
|
|
31
|
+
except AttributeError:
|
|
32
|
+
http_status_code: str = str()
|
|
33
|
+
|
|
34
|
+
# Same goes for the '.path' attribute, if it is not HTTP message then there will be no path.
|
|
35
|
+
try:
|
|
36
|
+
http_path: str = client_message.request_raw_decoded.path
|
|
37
|
+
except AttributeError:
|
|
38
|
+
http_path: str = str()
|
|
39
|
+
|
|
40
|
+
# Same goes for the '.command' attribute, if it is not HTTP message then there will be no command.
|
|
41
|
+
try:
|
|
42
|
+
http_command: str = client_message.request_raw_decoded.command
|
|
43
|
+
except AttributeError:
|
|
44
|
+
http_command: str = str()
|
|
45
|
+
|
|
28
46
|
response_size_bytes = ','.join([str(len(x)) for x in client_message.response_list_of_raw_bytes])
|
|
29
47
|
|
|
30
48
|
statistics_writer.write_row(
|
|
@@ -32,13 +50,13 @@ def thread_worker_main(
|
|
|
32
50
|
tls_type=tls_type,
|
|
33
51
|
tls_version=tls_version,
|
|
34
52
|
protocol=client_message.protocol,
|
|
35
|
-
path=
|
|
36
|
-
status_code=
|
|
37
|
-
command=
|
|
53
|
+
path=http_path,
|
|
54
|
+
status_code=http_status_code,
|
|
55
|
+
command=http_command,
|
|
38
56
|
request_time_sent=client_message.request_time_received,
|
|
39
57
|
request_size_bytes=len(client_message.request_raw_bytes),
|
|
40
58
|
response_size_bytes=response_size_bytes,
|
|
41
|
-
recorded_file_path=
|
|
59
|
+
recorded_file_path=client_message.recorded_file_path,
|
|
42
60
|
process_cmd=process_commandline,
|
|
43
61
|
error=str())
|
|
44
62
|
|
|
@@ -126,7 +144,7 @@ def thread_worker_main(
|
|
|
126
144
|
message = "There was an exception in HTTP Parsing module!"
|
|
127
145
|
print_api(
|
|
128
146
|
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
129
|
-
traceback_string=True
|
|
147
|
+
traceback_string=True)
|
|
130
148
|
# Socket connection can be closed since we have a problem in current thread and break the loop
|
|
131
149
|
client_connection_boolean = False
|
|
132
150
|
break
|
|
@@ -160,10 +178,10 @@ def thread_worker_main(
|
|
|
160
178
|
message = "Exception in Parser"
|
|
161
179
|
print_api(
|
|
162
180
|
message, error_type=True, logger=parser.logger, logger_method='critical',
|
|
163
|
-
traceback_string=True
|
|
181
|
+
traceback_string=True)
|
|
164
182
|
print_api(
|
|
165
183
|
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
166
|
-
traceback_string=True
|
|
184
|
+
traceback_string=True)
|
|
167
185
|
# At this point we can pass the exception and continue the script.
|
|
168
186
|
pass
|
|
169
187
|
# Socket connection can be closed since we have a problem in current thread and break the loop
|
|
@@ -193,10 +211,10 @@ def thread_worker_main(
|
|
|
193
211
|
message = "Exception in Responder"
|
|
194
212
|
print_api(
|
|
195
213
|
message, error_type=True, logger=responder.logger, logger_method='critical',
|
|
196
|
-
traceback_string=True
|
|
214
|
+
traceback_string=True)
|
|
197
215
|
print_api(
|
|
198
216
|
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
199
|
-
traceback_string=True
|
|
217
|
+
traceback_string=True)
|
|
200
218
|
pass
|
|
201
219
|
# Socket connection can be closed since we have a problem in current thread and break the loop.
|
|
202
220
|
client_connection_boolean = False
|
|
@@ -220,8 +238,9 @@ def thread_worker_main(
|
|
|
220
238
|
service_client = socket_client.SocketClient(
|
|
221
239
|
service_name=client_message.server_name, service_port=client_message.destination_port,
|
|
222
240
|
tls=is_tls,
|
|
223
|
-
dns_servers_list=
|
|
224
|
-
|
|
241
|
+
dns_servers_list=(
|
|
242
|
+
config_static.TCPServer.forwarding_dns_service_ipv4_list___only_for_localhost)
|
|
243
|
+
)
|
|
225
244
|
# If we're not on localhost, then connect to domain directly.
|
|
226
245
|
else:
|
|
227
246
|
service_client = socket_client.SocketClient(
|
|
@@ -253,18 +272,19 @@ def thread_worker_main(
|
|
|
253
272
|
|
|
254
273
|
# This is the point after the response mode check was finished.
|
|
255
274
|
# Recording the message, doesn't matter what type of mode this is.
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
message
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
275
|
+
if config_static.LogRec.enable_request_response_recordings_in_logs:
|
|
276
|
+
try:
|
|
277
|
+
recorded_file = recorder(class_client_message=client_message,
|
|
278
|
+
record_path=config_static.LogRec.recordings_path).record()
|
|
279
|
+
client_message.recorded_file_path = recorded_file
|
|
280
|
+
except Exception:
|
|
281
|
+
message = "Exception in Recorder"
|
|
282
|
+
print_api(
|
|
283
|
+
message, error_type=True, logger=recorder.logger, logger_method='critical',
|
|
284
|
+
traceback_string=True)
|
|
285
|
+
print_api(
|
|
286
|
+
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
287
|
+
traceback_string=True)
|
|
268
288
|
|
|
269
289
|
function_recorded = True
|
|
270
290
|
|
|
@@ -294,7 +314,7 @@ def thread_worker_main(
|
|
|
294
314
|
message = "Not sending anything to the client, since there is no response available"
|
|
295
315
|
print_api(
|
|
296
316
|
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
297
|
-
traceback_string=True
|
|
317
|
+
traceback_string=True)
|
|
298
318
|
# Pass the exception
|
|
299
319
|
pass
|
|
300
320
|
# Break the while loop
|
|
@@ -312,18 +332,19 @@ def thread_worker_main(
|
|
|
312
332
|
# === At this point while loop of 'client_connection_boolean' was broken =======================================
|
|
313
333
|
# If recorder wasn't executed before, then execute it now
|
|
314
334
|
if not function_recorded:
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
message
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
335
|
+
if config_static.LogRec.enable_request_response_recordings_in_logs:
|
|
336
|
+
try:
|
|
337
|
+
recorded_file = recorder(
|
|
338
|
+
class_client_message=client_message, record_path=config_static.LogRec.recordings_path).record()
|
|
339
|
+
client_message.recorded_file_path = recorded_file
|
|
340
|
+
except Exception:
|
|
341
|
+
message = "Exception in Recorder"
|
|
342
|
+
print_api(
|
|
343
|
+
message, error_type=True, logger=recorder.logger, logger_method='critical',
|
|
344
|
+
traceback_string=True)
|
|
345
|
+
print_api(
|
|
346
|
+
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
347
|
+
traceback_string=True)
|
|
327
348
|
|
|
328
349
|
# Save statistics file.
|
|
329
350
|
output_statistics_csv_row()
|
|
@@ -343,5 +364,4 @@ def thread_worker_main(
|
|
|
343
364
|
except Exception:
|
|
344
365
|
message = "Undocumented exception in thread worker"
|
|
345
366
|
print_api(
|
|
346
|
-
message, error_type=True, logger=network_logger, logger_method='critical',
|
|
347
|
-
traceback_string=True, oneline=True)
|
|
367
|
+
message, error_type=True, logger=network_logger, logger_method='critical', traceback_string=True)
|
|
@@ -3,8 +3,8 @@ from datetime import datetime
|
|
|
3
3
|
|
|
4
4
|
from ...shared_functions import build_module_names, create_custom_logger, get_json
|
|
5
5
|
from ... import message, recs_files
|
|
6
|
-
from ....
|
|
7
|
-
from .... import
|
|
6
|
+
from .... import filesystem, urls
|
|
7
|
+
from ....file_io import file_io
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
# The class that is responsible for Recording Requests / Responses.
|
|
@@ -52,7 +52,7 @@ class RecorderParent:
|
|
|
52
52
|
# This will happen if the message is not HTTP.
|
|
53
53
|
try:
|
|
54
54
|
# Parse the url to components.
|
|
55
|
-
http_path_parsed = url_parser(self.class_client_message.request_raw_decoded.path)
|
|
55
|
+
http_path_parsed = urls.url_parser(self.class_client_message.request_raw_decoded.path)
|
|
56
56
|
# Get only directories.
|
|
57
57
|
http_path_directories_string = '-'.join(http_path_parsed['directories'])
|
|
58
58
|
# Add '_' character before 'http_path' to look better on the file name.
|
|
@@ -64,7 +64,7 @@ class RecorderParent:
|
|
|
64
64
|
# If HTTP Path is not defined, 'http_path' will be empty, and it will not interfere with file name.
|
|
65
65
|
self.record_file_path: str = \
|
|
66
66
|
self.engine_record_path + os.sep + \
|
|
67
|
-
day_time_format + "_" + self.class_client_message.server_name +
|
|
67
|
+
day_time_format + "_" + self.class_client_message.server_name + self.file_extension
|
|
68
68
|
|
|
69
69
|
def convert_messages(self):
|
|
70
70
|
"""
|
|
@@ -91,8 +91,7 @@ class RecorderParent:
|
|
|
91
91
|
record_message = get_json(self.class_client_message)
|
|
92
92
|
|
|
93
93
|
# Since we already dumped the object to dictionary string, we'll just save the object to regular file.
|
|
94
|
-
|
|
95
|
-
output_file.write(record_message)
|
|
94
|
+
file_io.write_file(record_message, self.record_file_path, enable_long_file_path=True)
|
|
96
95
|
|
|
97
96
|
self.logger.info(f"Recorded to file: {self.record_file_path}")
|
|
98
97
|
|
atomicshop/mitm/import_config.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import os
|
|
1
2
|
from pathlib import Path
|
|
2
3
|
|
|
3
4
|
from ..print_api import print_api
|
|
@@ -131,13 +132,14 @@ def manipulations_after_import():
|
|
|
131
132
|
config_static.SkipExtensions.SKIP_EXTENSION_ID_LIST = skip_extensions
|
|
132
133
|
|
|
133
134
|
# If the paths are relative, convert them to absolute paths.
|
|
134
|
-
config_static.
|
|
135
|
-
config_static.
|
|
136
|
-
config_static.Recorder.recordings_path = filesystem.check_absolute_path___add_full(
|
|
137
|
-
config_static.Recorder.recordings_path, config_static.MainConfig.SCRIPT_DIRECTORY)
|
|
135
|
+
config_static.LogRec.logs_path = filesystem.check_absolute_path___add_full(
|
|
136
|
+
config_static.LogRec.logs_path, config_static.MainConfig.SCRIPT_DIRECTORY)
|
|
138
137
|
config_static.Certificates.custom_server_certificate_path = filesystem.check_absolute_path___add_full(
|
|
139
138
|
config_static.Certificates.custom_server_certificate_path, config_static.MainConfig.SCRIPT_DIRECTORY)
|
|
140
139
|
|
|
140
|
+
config_static.LogRec.recordings_path = (
|
|
141
|
+
config_static.LogRec.logs_path + os.sep + config_static.LogRec.recordings_directory_name)
|
|
142
|
+
|
|
141
143
|
# At this point the user that sets the config can set it to null or empty string ''. We will make sure
|
|
142
144
|
# that the path is None if it's empty.
|
|
143
145
|
if config_static.Certificates.custom_private_key_path:
|
|
@@ -52,12 +52,12 @@ class ModuleCategory:
|
|
|
52
52
|
raise ValueError(f"Engine Configuration file doesn't contain any domains: {engine_config_file_path}")
|
|
53
53
|
|
|
54
54
|
# Full path to file
|
|
55
|
-
self.parser_file_path = filesystem.
|
|
56
|
-
engine_directory_path, file_name_check_pattern=configuration_data['parser_file'])[0]
|
|
57
|
-
self.responder_file_path = filesystem.
|
|
58
|
-
engine_directory_path, file_name_check_pattern=configuration_data['responder_file'])[0]
|
|
59
|
-
self.recorder_file_path = filesystem.
|
|
60
|
-
engine_directory_path, file_name_check_pattern=configuration_data['recorder_file'])[0]
|
|
55
|
+
self.parser_file_path = filesystem.get_paths_from_directory(
|
|
56
|
+
engine_directory_path, get_file=True, file_name_check_pattern=configuration_data['parser_file'])[0].path
|
|
57
|
+
self.responder_file_path = filesystem.get_paths_from_directory(
|
|
58
|
+
engine_directory_path, get_file=True, file_name_check_pattern=configuration_data['responder_file'])[0].path
|
|
59
|
+
self.recorder_file_path = filesystem.get_paths_from_directory(
|
|
60
|
+
engine_directory_path, get_file=True, file_name_check_pattern=configuration_data['recorder_file'])[0].path
|
|
61
61
|
|
|
62
62
|
def initialize_engine(self, logs_path: str, logger=None, reference_general: bool = False, **kwargs):
|
|
63
63
|
if not reference_general:
|
atomicshop/mitm/message.py
CHANGED
|
@@ -17,18 +17,22 @@ from .connection_thread_worker import thread_worker_main
|
|
|
17
17
|
from . import config_static, recs_files
|
|
18
18
|
|
|
19
19
|
|
|
20
|
+
NETWORK_INTERFACE_IS_DYNAMIC: bool = bool()
|
|
21
|
+
NETWORK_INTERFACE_IPV4_ADDRESS_LIST: list[str] = list()
|
|
22
|
+
|
|
23
|
+
|
|
20
24
|
def exit_cleanup():
|
|
21
|
-
if
|
|
25
|
+
if permissions.is_admin():
|
|
22
26
|
is_dns_dynamic, current_dns_gateway = dns.get_default_dns_gateway()
|
|
23
27
|
print_api(f'Current DNS Gateway: {current_dns_gateway}')
|
|
24
28
|
|
|
25
|
-
if
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
+
if is_dns_dynamic != NETWORK_INTERFACE_IS_DYNAMIC or \
|
|
30
|
+
(not is_dns_dynamic and current_dns_gateway != NETWORK_INTERFACE_IPV4_ADDRESS_LIST):
|
|
31
|
+
dns.set_connection_dns_gateway_dynamic(use_default_connection=True)
|
|
32
|
+
print_api("Returned default DNS gateway...", color='blue')
|
|
29
33
|
|
|
30
34
|
|
|
31
|
-
def
|
|
35
|
+
def mitm_server_main(config_file_path: str):
|
|
32
36
|
on_exit.register_exit_handler(exit_cleanup)
|
|
33
37
|
|
|
34
38
|
# Main function should return integer with error code, 0 is successful.
|
|
@@ -42,20 +46,22 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
42
46
|
return result
|
|
43
47
|
|
|
44
48
|
# Create folders.
|
|
45
|
-
filesystem.create_directory(config_static.
|
|
46
|
-
|
|
49
|
+
filesystem.create_directory(config_static.LogRec.logs_path)
|
|
50
|
+
|
|
51
|
+
if config_static.LogRec.enable_request_response_recordings_in_logs:
|
|
52
|
+
filesystem.create_directory(config_static.LogRec.recordings_path)
|
|
53
|
+
# Compress recordings of the previous days if there are any.
|
|
54
|
+
recs_files.recs_archiver_in_process(config_static.LogRec.recordings_path)
|
|
55
|
+
|
|
47
56
|
if config_static.Certificates.sni_get_server_certificate_from_server_socket:
|
|
48
57
|
filesystem.create_directory(
|
|
49
58
|
config_static.Certificates.sni_server_certificate_from_server_socket_download_directory)
|
|
50
59
|
|
|
51
|
-
# Compress recordings of the previous days if there are any.
|
|
52
|
-
recs_files.recs_archiver_in_process(config_static.Recorder.recordings_path)
|
|
53
|
-
|
|
54
60
|
# Create a logger that will log messages to file, Initiate System logger.
|
|
55
61
|
logger_name = "system"
|
|
56
62
|
system_logger = loggingw.create_logger(
|
|
57
63
|
logger_name=logger_name,
|
|
58
|
-
file_path=f"{config_static.
|
|
64
|
+
file_path=f"{config_static.LogRec.logs_path}{os.sep}{logger_name}.txt",
|
|
59
65
|
add_stream=True,
|
|
60
66
|
add_timedfile=True,
|
|
61
67
|
formatter_streamhandler='DEFAULT',
|
|
@@ -68,8 +74,9 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
68
74
|
system_logger.info(f"Python Version: {get_current_python_version_string()}")
|
|
69
75
|
system_logger.info(f"Script Version: {config_static.SCRIPT_VERSION}")
|
|
70
76
|
system_logger.info(f"Atomic Workshop Version: {atomicshop.__version__}")
|
|
71
|
-
system_logger.info(f"Log folder: {config_static.
|
|
72
|
-
|
|
77
|
+
system_logger.info(f"Log folder: {config_static.LogRec.logs_path}")
|
|
78
|
+
if config_static.LogRec.enable_request_response_recordings_in_logs:
|
|
79
|
+
system_logger.info(f"Recordings folder for Requests/Responses: {config_static.LogRec.recordings_path}")
|
|
73
80
|
system_logger.info(f"Loaded system logger: {system_logger}")
|
|
74
81
|
|
|
75
82
|
system_logger.info(f"TCP Server Target IP: {config_static.DNSServer.target_tcp_server_ipv4}")
|
|
@@ -103,8 +110,9 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
103
110
|
system_logger.info("Importing engine modules.")
|
|
104
111
|
|
|
105
112
|
# Get full paths of all the 'engine_config.ini' files.
|
|
106
|
-
engine_config_path_list = filesystem.
|
|
113
|
+
engine_config_path_list = filesystem.get_paths_from_directory(
|
|
107
114
|
directory_path=config_static.MainConfig.ENGINES_DIRECTORY_PATH,
|
|
115
|
+
get_file=True,
|
|
108
116
|
file_name_check_pattern=config_static.MainConfig.ENGINE_CONFIG_FILE_NAME)
|
|
109
117
|
|
|
110
118
|
# Iterate through all the 'engine_config.ini' file paths.
|
|
@@ -113,8 +121,8 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
113
121
|
for engine_config_path in engine_config_path_list:
|
|
114
122
|
# Initialize engine.
|
|
115
123
|
current_module = ModuleCategory(config_static.MainConfig.SCRIPT_DIRECTORY)
|
|
116
|
-
current_module.fill_engine_fields_from_config(engine_config_path)
|
|
117
|
-
current_module.initialize_engine(logs_path=config_static.
|
|
124
|
+
current_module.fill_engine_fields_from_config(engine_config_path.path)
|
|
125
|
+
current_module.initialize_engine(logs_path=config_static.LogRec.logs_path,
|
|
118
126
|
logger=system_logger)
|
|
119
127
|
|
|
120
128
|
# Extending the full engine domain list with this list.
|
|
@@ -125,7 +133,7 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
125
133
|
# ==== Initialize Reference Module =============================================================================
|
|
126
134
|
reference_module = ModuleCategory(config_static.MainConfig.SCRIPT_DIRECTORY)
|
|
127
135
|
reference_module.fill_engine_fields_from_general_reference(config_static.MainConfig.ENGINES_DIRECTORY_PATH)
|
|
128
|
-
reference_module.initialize_engine(logs_path=config_static.
|
|
136
|
+
reference_module.initialize_engine(logs_path=config_static.LogRec.logs_path,
|
|
129
137
|
logger=system_logger, stdout=False, reference_general=True)
|
|
130
138
|
# === EOF Initialize Reference Module ==========================================================================
|
|
131
139
|
# === Engine logging ===========================================================================================
|
|
@@ -151,8 +159,12 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
151
159
|
message = f"[*] Engine domains found, but the DNS routing is set not to use them for routing."
|
|
152
160
|
print_api(message, color="yellow", logger=system_logger)
|
|
153
161
|
elif not engines_list and config_static.DNSServer.resolve_to_tcp_server_only_engine_domains:
|
|
154
|
-
|
|
155
|
-
|
|
162
|
+
error_message = (
|
|
163
|
+
f"No engines were found in: [{config_static.MainConfig.ENGINES_DIRECTORY_PATH}]\n"
|
|
164
|
+
f"But the DNS routing is set to use them for routing.\n"
|
|
165
|
+
f"Please check your DNS configuration in the 'config.ini' file.")
|
|
166
|
+
print_api(error_message, color="red")
|
|
167
|
+
return 1
|
|
156
168
|
|
|
157
169
|
if config_static.DNSServer.resolve_to_tcp_server_all_domains:
|
|
158
170
|
print_api("All domains will be routed by the DNS server to Built-in TCP Server.", logger=system_logger)
|
|
@@ -175,8 +187,12 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
175
187
|
message = f"Engines found, and the TCP server is set to use them for processing."
|
|
176
188
|
print_api(message, logger=system_logger)
|
|
177
189
|
elif not engines_list and config_static.TCPServer.engines_usage:
|
|
178
|
-
|
|
179
|
-
|
|
190
|
+
error_message = (
|
|
191
|
+
f"No engines were found in: [{config_static.MainConfig.ENGINES_DIRECTORY_PATH}]\n"
|
|
192
|
+
f"But the TCP server is set to use them for processing.\n"
|
|
193
|
+
f"Please check your TCP configuration in the 'config.ini' file.")
|
|
194
|
+
print_api(error_message, color="red")
|
|
195
|
+
return 1
|
|
180
196
|
else:
|
|
181
197
|
print_api("TCP Server is disabled.", logger=system_logger, color="yellow")
|
|
182
198
|
|
|
@@ -188,7 +204,7 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
188
204
|
network_logger_name = "network"
|
|
189
205
|
network_logger = loggingw.create_logger(
|
|
190
206
|
logger_name=network_logger_name,
|
|
191
|
-
directory_path=config_static.
|
|
207
|
+
directory_path=config_static.LogRec.logs_path,
|
|
192
208
|
add_stream=True,
|
|
193
209
|
add_timedfile=True,
|
|
194
210
|
formatter_streamhandler='DEFAULT',
|
|
@@ -216,7 +232,7 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
216
232
|
# Passing the engine domain list to DNS server to work with.
|
|
217
233
|
# 'list' function re-initializes the current list, or else it will be the same instance object.
|
|
218
234
|
tcp_resolve_domain_list=list(config_static.Certificates.domains_all_times),
|
|
219
|
-
log_directory_path=config_static.
|
|
235
|
+
log_directory_path=config_static.LogRec.logs_path,
|
|
220
236
|
offline_mode=config_static.DNSServer.offline_mode,
|
|
221
237
|
resolve_to_tcp_server_only_tcp_resolve_domains=(
|
|
222
238
|
config_static.DNSServer.resolve_to_tcp_server_only_engine_domains),
|
|
@@ -268,7 +284,7 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
268
284
|
ssh_pass=config_static.ProcessName.ssh_pass,
|
|
269
285
|
ssh_script_to_execute=config_static.ProcessName.ssh_script_to_execute,
|
|
270
286
|
logger=listener_logger,
|
|
271
|
-
statistics_logs_directory=config_static.
|
|
287
|
+
statistics_logs_directory=config_static.LogRec.logs_path,
|
|
272
288
|
forwarding_dns_service_ipv4_list___only_for_localhost=(
|
|
273
289
|
config_static.TCPServer.forwarding_dns_service_ipv4_list___only_for_localhost),
|
|
274
290
|
skip_extension_id_list=config_static.SkipExtensions.SKIP_EXTENSION_ID_LIST,
|
|
@@ -298,11 +314,18 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
298
314
|
set_dns_gateway = True
|
|
299
315
|
|
|
300
316
|
if set_dns_gateway:
|
|
301
|
-
#
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
317
|
+
# Get current network interface state.
|
|
318
|
+
global NETWORK_INTERFACE_IS_DYNAMIC, NETWORK_INTERFACE_IPV4_ADDRESS_LIST
|
|
319
|
+
NETWORK_INTERFACE_IS_DYNAMIC, NETWORK_INTERFACE_IPV4_ADDRESS_LIST = dns.get_default_dns_gateway()
|
|
320
|
+
|
|
321
|
+
# Set the DNS gateway to the specified one only if the DNS gateway is dynamic or it is static but different
|
|
322
|
+
# from the one specified in the configuration file.
|
|
323
|
+
if (NETWORK_INTERFACE_IS_DYNAMIC or (not NETWORK_INTERFACE_IS_DYNAMIC and
|
|
324
|
+
NETWORK_INTERFACE_IPV4_ADDRESS_LIST != dns_gateway_server_list)):
|
|
325
|
+
dns.set_connection_dns_gateway_static(
|
|
326
|
+
dns_servers=dns_gateway_server_list,
|
|
327
|
+
use_default_connection=True
|
|
328
|
+
)
|
|
306
329
|
|
|
307
330
|
# General exception handler will catch all the exceptions that occurred in the threads and write it to the log.
|
|
308
331
|
# noinspection PyBroadException
|
|
@@ -319,13 +342,14 @@ def initialize_mitm_server(config_file_path: str):
|
|
|
319
342
|
socket_thread.start()
|
|
320
343
|
except Exception:
|
|
321
344
|
message = f"Unhandled Exception occurred in 'loop_for_incoming_sockets' function"
|
|
322
|
-
print_api(message, error_type=True, color="red", logger=network_logger, traceback_string=True
|
|
345
|
+
print_api(message, error_type=True, color="red", logger=network_logger, traceback_string=True)
|
|
323
346
|
|
|
324
347
|
# Compress recordings each day in a separate process.
|
|
325
348
|
recs_archiver_thread = threading.Thread(target=_loop_at_midnight_recs_archive)
|
|
326
349
|
recs_archiver_thread.daemon = True
|
|
327
350
|
recs_archiver_thread.start()
|
|
328
351
|
|
|
352
|
+
if config_static.DNSServer.enable or config_static.TCPServer.enable:
|
|
329
353
|
# This is needed for Keyboard Exception.
|
|
330
354
|
while True:
|
|
331
355
|
time.sleep(1)
|
|
@@ -338,7 +362,8 @@ def _loop_at_midnight_recs_archive():
|
|
|
338
362
|
current_date = datetime.datetime.now().strftime('%d')
|
|
339
363
|
# If it's midnight, start the archiving process.
|
|
340
364
|
if current_date != previous_date:
|
|
341
|
-
|
|
365
|
+
if config_static.LogRec.enable_request_response_recordings_in_logs:
|
|
366
|
+
recs_files.recs_archiver_in_process(config_static.LogRec.recordings_path)
|
|
342
367
|
# Update the previous date.
|
|
343
368
|
previous_date = current_date
|
|
344
369
|
# Sleep for 1 minute.
|
atomicshop/mitm/recs_files.py
CHANGED
|
@@ -22,43 +22,44 @@ def recs_archiver(recs_directory: str) -> list:
|
|
|
22
22
|
today_date_string = datetime.datetime.now().strftime(REC_FILE_DATE_FORMAT)
|
|
23
23
|
|
|
24
24
|
# There should not be recording json files in recs root.
|
|
25
|
-
files_in_recs_root: list = filesystem.
|
|
26
|
-
recs_directory, file_name_check_pattern='
|
|
25
|
+
files_in_recs_root: list = filesystem.get_paths_from_directory(
|
|
26
|
+
recs_directory, get_file=True, file_name_check_pattern='*\\.json', recursive=False)
|
|
27
27
|
if files_in_recs_root:
|
|
28
28
|
raise NotImplementedError("The files in recs root directory are not implemented yet.")
|
|
29
29
|
|
|
30
30
|
# Each engine should have its own directory inside recordings. We will find all the directories inside recs folder.
|
|
31
|
-
directory_paths_in_recs: list = filesystem.
|
|
31
|
+
directory_paths_in_recs: list = filesystem.get_paths_from_directory(
|
|
32
|
+
recs_directory, get_directory=True, recursive=False)
|
|
32
33
|
|
|
33
34
|
file_list_per_directory: list = list()
|
|
34
35
|
for directory_path in directory_paths_in_recs:
|
|
35
|
-
all_recs_files =
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
36
|
+
all_recs_files = filesystem.get_paths_from_directory(
|
|
37
|
+
directory_path=directory_path.path,
|
|
38
|
+
get_file=True,
|
|
39
|
+
file_name_check_pattern='*.json',
|
|
40
|
+
datetime_format=REC_FILE_DATE_FORMAT,
|
|
41
|
+
recursive=False
|
|
39
42
|
)
|
|
40
43
|
file_list_per_directory.append((directory_path, all_recs_files))
|
|
41
44
|
|
|
42
45
|
archived_files: list = list()
|
|
43
46
|
for directory_path, all_recs_files in file_list_per_directory:
|
|
44
|
-
|
|
45
|
-
for recs_file_dict in all_recs_files:
|
|
47
|
+
for recs_atomic_path in all_recs_files:
|
|
46
48
|
# We don't need to archive today's files.
|
|
47
|
-
if today_date_string ==
|
|
49
|
+
if today_date_string == recs_atomic_path.datetime_string:
|
|
48
50
|
continue
|
|
49
51
|
|
|
50
|
-
target_directory_path: str = f"{directory_path}{os.sep}{
|
|
51
|
-
if target_directory_path not in archive_directories:
|
|
52
|
-
archive_directories.append(target_directory_path)
|
|
53
|
-
|
|
52
|
+
target_directory_path: str = f"{directory_path.path}{os.sep}{recs_atomic_path.datetime_string}"
|
|
54
53
|
filesystem.create_directory(target_directory_path)
|
|
55
54
|
filesystem.move_file(
|
|
56
|
-
|
|
55
|
+
recs_atomic_path.path, f'{target_directory_path}{os.sep}{recs_atomic_path.name}')
|
|
57
56
|
|
|
58
57
|
# Archive directories.
|
|
58
|
+
archive_directories: list = filesystem.get_paths_from_directory(
|
|
59
|
+
directory_path.path, get_directory=True, recursive=False)
|
|
59
60
|
for archive_directory in archive_directories:
|
|
60
61
|
archived_file: str = zips.archive_directory(
|
|
61
|
-
archive_directory, remove_original=True, include_root_directory=True)
|
|
62
|
+
archive_directory.path, remove_original=True, include_root_directory=True)
|
|
62
63
|
archived_files.append(archived_file)
|
|
63
64
|
|
|
64
65
|
return archived_files
|
|
@@ -71,4 +72,3 @@ def recs_archiver_in_process(recs_directory: str):
|
|
|
71
72
|
|
|
72
73
|
process = multiprocessing.Process(target=recs_archiver, args=(recs_directory,))
|
|
73
74
|
process.start()
|
|
74
|
-
|
|
@@ -28,9 +28,9 @@ def analyze(main_file_path: str):
|
|
|
28
28
|
summary_path: str = filesystem.check_absolute_path___add_full(config['report_file_path'], script_directory)
|
|
29
29
|
|
|
30
30
|
# Get the content from statistics files.
|
|
31
|
+
log_file_path_pattern: str = f"{config['statistic_files_path']}{os.sep}statistics.csv"
|
|
31
32
|
statistics_content: list = reading.get_all_log_files_into_list(
|
|
32
|
-
|
|
33
|
-
file_name_pattern='statistics*.csv',
|
|
33
|
+
log_file_path=log_file_path_pattern,
|
|
34
34
|
log_type='csv'
|
|
35
35
|
)
|
|
36
36
|
|