atomicshop 2.15.11__py3-none-any.whl → 3.10.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atomicshop/__init__.py +1 -1
- atomicshop/{addons/mains → a_mains}/FACT/update_extract.py +3 -2
- atomicshop/a_mains/dns_gateway_setting.py +11 -0
- atomicshop/a_mains/get_local_tcp_ports.py +85 -0
- atomicshop/a_mains/github_wrapper.py +11 -0
- atomicshop/a_mains/install_ca_certificate.py +172 -0
- atomicshop/a_mains/process_from_port.py +119 -0
- atomicshop/a_mains/set_default_dns_gateway.py +90 -0
- atomicshop/a_mains/update_config_toml.py +38 -0
- atomicshop/basics/ansi_escape_codes.py +3 -1
- atomicshop/basics/argparse_template.py +2 -0
- atomicshop/basics/booleans.py +27 -30
- atomicshop/basics/bytes_arrays.py +43 -0
- atomicshop/basics/classes.py +149 -1
- atomicshop/basics/enums.py +2 -2
- atomicshop/basics/exceptions.py +5 -1
- atomicshop/basics/list_of_classes.py +29 -0
- atomicshop/basics/multiprocesses.py +374 -50
- atomicshop/basics/strings.py +72 -3
- atomicshop/basics/threads.py +14 -0
- atomicshop/basics/tracebacks.py +13 -3
- atomicshop/certificates.py +153 -52
- atomicshop/config_init.py +11 -6
- atomicshop/console_user_response.py +7 -14
- atomicshop/consoles.py +9 -0
- atomicshop/datetimes.py +1 -1
- atomicshop/diff_check.py +3 -3
- atomicshop/dns.py +128 -3
- atomicshop/etws/_pywintrace_fix.py +17 -0
- atomicshop/etws/trace.py +40 -42
- atomicshop/etws/traces/trace_dns.py +56 -44
- atomicshop/etws/traces/trace_tcp.py +130 -0
- atomicshop/file_io/csvs.py +27 -5
- atomicshop/file_io/docxs.py +34 -17
- atomicshop/file_io/file_io.py +31 -17
- atomicshop/file_io/jsons.py +49 -0
- atomicshop/file_io/tomls.py +139 -0
- atomicshop/filesystem.py +616 -291
- atomicshop/get_process_list.py +3 -3
- atomicshop/http_parse.py +149 -93
- atomicshop/ip_addresses.py +6 -1
- atomicshop/mitm/centered_settings.py +132 -0
- atomicshop/mitm/config_static.py +207 -0
- atomicshop/mitm/config_toml_editor.py +55 -0
- atomicshop/mitm/connection_thread_worker.py +875 -357
- atomicshop/mitm/engines/__parent/parser___parent.py +4 -17
- atomicshop/mitm/engines/__parent/recorder___parent.py +108 -51
- atomicshop/mitm/engines/__parent/requester___parent.py +116 -0
- atomicshop/mitm/engines/__parent/responder___parent.py +75 -114
- atomicshop/mitm/engines/__reference_general/parser___reference_general.py +10 -7
- atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +5 -5
- atomicshop/mitm/engines/__reference_general/requester___reference_general.py +47 -0
- atomicshop/mitm/engines/__reference_general/responder___reference_general.py +95 -13
- atomicshop/mitm/engines/create_module_template.py +58 -14
- atomicshop/mitm/import_config.py +359 -139
- atomicshop/mitm/initialize_engines.py +160 -80
- atomicshop/mitm/message.py +64 -23
- atomicshop/mitm/mitm_main.py +892 -0
- atomicshop/mitm/recs_files.py +183 -0
- atomicshop/mitm/shared_functions.py +4 -10
- atomicshop/mitm/ssh_tester.py +82 -0
- atomicshop/mitm/statistic_analyzer.py +136 -40
- atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +265 -83
- atomicshop/monitor/checks/dns.py +1 -1
- atomicshop/networks.py +671 -0
- atomicshop/on_exit.py +39 -9
- atomicshop/package_mains_processor.py +84 -0
- atomicshop/permissions/permissions.py +22 -0
- atomicshop/permissions/ubuntu_permissions.py +239 -0
- atomicshop/permissions/win_permissions.py +33 -0
- atomicshop/print_api.py +24 -42
- atomicshop/process.py +24 -6
- atomicshop/process_poller/process_pool.py +0 -1
- atomicshop/process_poller/simple_process_pool.py +204 -5
- atomicshop/python_file_patcher.py +1 -1
- atomicshop/python_functions.py +27 -75
- atomicshop/speech_recognize.py +8 -0
- atomicshop/ssh_remote.py +158 -172
- atomicshop/system_resource_monitor.py +61 -47
- atomicshop/system_resources.py +8 -8
- atomicshop/tempfiles.py +1 -2
- atomicshop/urls.py +6 -0
- atomicshop/venvs.py +28 -0
- atomicshop/versioning.py +27 -0
- atomicshop/web.py +98 -27
- atomicshop/web_apis/google_custom_search.py +44 -0
- atomicshop/web_apis/google_llm.py +188 -0
- atomicshop/websocket_parse.py +450 -0
- atomicshop/wrappers/certauthw/certauth.py +1 -0
- atomicshop/wrappers/cryptographyw.py +29 -8
- atomicshop/wrappers/ctyping/etw_winapi/const.py +97 -47
- atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py +178 -49
- atomicshop/wrappers/ctyping/file_details_winapi.py +67 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
- atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +2 -2
- atomicshop/wrappers/ctyping/setup_device.py +466 -0
- atomicshop/wrappers/ctyping/win_console.py +39 -0
- atomicshop/wrappers/dockerw/dockerw.py +113 -2
- atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
- atomicshop/wrappers/elasticsearchw/elastic_infra.py +75 -0
- atomicshop/wrappers/elasticsearchw/elasticsearchw.py +2 -20
- atomicshop/wrappers/factw/get_file_data.py +12 -5
- atomicshop/wrappers/factw/install/install_after_restart.py +89 -5
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +20 -14
- atomicshop/wrappers/githubw.py +537 -54
- atomicshop/wrappers/loggingw/consts.py +1 -1
- atomicshop/wrappers/loggingw/filters.py +23 -0
- atomicshop/wrappers/loggingw/formatters.py +12 -0
- atomicshop/wrappers/loggingw/handlers.py +214 -107
- atomicshop/wrappers/loggingw/loggers.py +19 -0
- atomicshop/wrappers/loggingw/loggingw.py +860 -22
- atomicshop/wrappers/loggingw/reading.py +134 -112
- atomicshop/wrappers/mongodbw/mongo_infra.py +31 -0
- atomicshop/wrappers/mongodbw/mongodbw.py +1324 -36
- atomicshop/wrappers/netshw.py +271 -0
- atomicshop/wrappers/playwrightw/engine.py +34 -19
- atomicshop/wrappers/playwrightw/infra.py +5 -0
- atomicshop/wrappers/playwrightw/javascript.py +7 -3
- atomicshop/wrappers/playwrightw/keyboard.py +14 -0
- atomicshop/wrappers/playwrightw/scenarios.py +172 -5
- atomicshop/wrappers/playwrightw/waits.py +9 -7
- atomicshop/wrappers/powershell_networking.py +80 -0
- atomicshop/wrappers/psutilw/processes.py +37 -1
- atomicshop/wrappers/psutilw/psutil_networks.py +85 -0
- atomicshop/wrappers/pyopensslw.py +9 -2
- atomicshop/wrappers/pywin32w/cert_store.py +116 -0
- atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +3 -105
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +3 -57
- atomicshop/wrappers/pywin32w/wmis/msft_netipaddress.py +113 -0
- atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +259 -0
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +112 -0
- atomicshop/wrappers/pywin32w/wmis/wmi_helpers.py +236 -0
- atomicshop/wrappers/socketw/accepter.py +21 -7
- atomicshop/wrappers/socketw/certificator.py +216 -150
- atomicshop/wrappers/socketw/creator.py +190 -50
- atomicshop/wrappers/socketw/dns_server.py +491 -182
- atomicshop/wrappers/socketw/exception_wrapper.py +45 -52
- atomicshop/wrappers/socketw/process_getter.py +86 -0
- atomicshop/wrappers/socketw/receiver.py +144 -102
- atomicshop/wrappers/socketw/sender.py +65 -35
- atomicshop/wrappers/socketw/sni.py +334 -165
- atomicshop/wrappers/socketw/socket_base.py +134 -0
- atomicshop/wrappers/socketw/socket_client.py +137 -95
- atomicshop/wrappers/socketw/socket_server_tester.py +11 -7
- atomicshop/wrappers/socketw/socket_wrapper.py +717 -116
- atomicshop/wrappers/socketw/ssl_base.py +15 -14
- atomicshop/wrappers/socketw/statistics_csv.py +148 -17
- atomicshop/wrappers/sysmonw.py +1 -1
- atomicshop/wrappers/ubuntu_terminal.py +65 -26
- atomicshop/wrappers/win_auditw.py +189 -0
- atomicshop/wrappers/winregw/__init__.py +0 -0
- atomicshop/wrappers/winregw/winreg_installed_software.py +58 -0
- atomicshop/wrappers/winregw/winreg_network.py +232 -0
- {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/METADATA +31 -51
- atomicshop-3.10.5.dist-info/RECORD +306 -0
- {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/WHEEL +1 -1
- atomicshop/_basics_temp.py +0 -101
- atomicshop/a_installs/win/fibratus.py +0 -9
- atomicshop/a_installs/win/mongodb.py +0 -9
- atomicshop/a_installs/win/pycharm.py +0 -9
- atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
- atomicshop/addons/a_setup_scripts/install_pywintrace_0.3.cmd +0 -2
- atomicshop/addons/mains/__pycache__/install_fibratus_windows.cpython-312.pyc +0 -0
- atomicshop/addons/mains/__pycache__/msi_unpacker.cpython-312.pyc +0 -0
- atomicshop/addons/mains/install_docker_rootless_ubuntu.py +0 -11
- atomicshop/addons/mains/install_docker_ubuntu_main_sudo.py +0 -11
- atomicshop/addons/mains/install_elastic_search_and_kibana_ubuntu.py +0 -10
- atomicshop/addons/mains/install_wsl_ubuntu_lts_admin.py +0 -9
- atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
- atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
- atomicshop/addons/package_setup/Setup.cmd +0 -7
- atomicshop/archiver/_search_in_zip.py +0 -189
- atomicshop/archiver/archiver.py +0 -34
- atomicshop/archiver/search_in_archive.py +0 -250
- atomicshop/archiver/sevenz_app_w.py +0 -86
- atomicshop/archiver/sevenzs.py +0 -44
- atomicshop/archiver/zips.py +0 -293
- atomicshop/file_types.py +0 -24
- atomicshop/mitm/config_editor.py +0 -37
- atomicshop/mitm/engines/create_module_template_example.py +0 -13
- atomicshop/mitm/initialize_mitm_server.py +0 -268
- atomicshop/pbtkmultifile_argparse.py +0 -88
- atomicshop/permissions.py +0 -151
- atomicshop/script_as_string_processor.py +0 -38
- atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
- atomicshop/ssh_scripts/process_from_port.py +0 -27
- atomicshop/wrappers/_process_wrapper_curl.py +0 -27
- atomicshop/wrappers/_process_wrapper_tar.py +0 -21
- atomicshop/wrappers/dockerw/install_docker.py +0 -209
- atomicshop/wrappers/elasticsearchw/infrastructure.py +0 -265
- atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -232
- atomicshop/wrappers/ffmpegw.py +0 -125
- atomicshop/wrappers/fibratusw/install.py +0 -81
- atomicshop/wrappers/mongodbw/infrastructure.py +0 -53
- atomicshop/wrappers/mongodbw/install_mongodb.py +0 -190
- atomicshop/wrappers/msiw.py +0 -149
- atomicshop/wrappers/nodejsw/install_nodejs.py +0 -139
- atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
- atomicshop/wrappers/psutilw/networks.py +0 -45
- atomicshop/wrappers/pycharmw.py +0 -81
- atomicshop/wrappers/socketw/base.py +0 -59
- atomicshop/wrappers/socketw/get_process.py +0 -107
- atomicshop/wrappers/wslw.py +0 -191
- atomicshop-2.15.11.dist-info/RECORD +0 -302
- /atomicshop/{addons/mains → a_mains}/FACT/factw_fact_extractor_docker_image_main_sudo.py +0 -0
- /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compile.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.dll +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.exp +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.lib +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +0 -0
- /atomicshop/{archiver → permissions}/__init__.py +0 -0
- /atomicshop/{wrappers/fibratusw → web_apis}/__init__.py +0 -0
- /atomicshop/wrappers/{nodejsw → pywin32w/wmis}/__init__.py +0 -0
- /atomicshop/wrappers/pywin32w/{wmi_win32process.py → wmis/win32process.py} +0 -0
- {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info/licenses}/LICENSE.txt +0 -0
- {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/top_level.txt +0 -0
atomicshop/archiver/zips.py
DELETED
|
@@ -1,293 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import time
|
|
3
|
-
import zipfile
|
|
4
|
-
from io import BytesIO
|
|
5
|
-
from typing import Union
|
|
6
|
-
|
|
7
|
-
from .. import filesystem
|
|
8
|
-
from ..print_api import print_api
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def is_zip_zipfile(file_object: Union[str, bytes]) -> bool:
|
|
12
|
-
"""
|
|
13
|
-
Function checks if the file is a zip file.
|
|
14
|
-
:param file_object: can be two types:
|
|
15
|
-
string, full path to the file.
|
|
16
|
-
bytes or BytesIO, the bytes of the file.
|
|
17
|
-
:return: boolean.
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
try:
|
|
21
|
-
if isinstance(file_object, bytes):
|
|
22
|
-
with BytesIO(file_object) as file_object:
|
|
23
|
-
with zipfile.ZipFile(file_object) as zip_object:
|
|
24
|
-
zip_object.testzip()
|
|
25
|
-
return True
|
|
26
|
-
elif isinstance(file_object, str):
|
|
27
|
-
with zipfile.ZipFile(file_object) as zip_object:
|
|
28
|
-
zip_object.testzip()
|
|
29
|
-
return True
|
|
30
|
-
except zipfile.BadZipFile:
|
|
31
|
-
return False
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def is_zip_magic_number(file_path: str) -> bool:
|
|
35
|
-
"""
|
|
36
|
-
Function checks if the file is a zip file using magic number.
|
|
37
|
-
:param file_path: string, full path to the file.
|
|
38
|
-
:return: boolean.
|
|
39
|
-
|
|
40
|
-
50 4B 03 04: This is the most common signature, found at the beginning of a ZIP file.
|
|
41
|
-
It signifies the start of a file within the ZIP archive and is present in almost all ZIP files.
|
|
42
|
-
Each file within the ZIP archive starts with this signature.
|
|
43
|
-
50 4B 05 06: This is the end of central directory record signature.
|
|
44
|
-
It's found at the end of a ZIP file and is essential for identifying the structure of the ZIP archive,
|
|
45
|
-
especially in cases where the file is split or is a multi-part archive.
|
|
46
|
-
50 4B 07 08: This signature is used for spanned ZIP archives (also known as split or multi-volume ZIP archives).
|
|
47
|
-
It's found in the end of central directory locator for ZIP files that are split across multiple volumes.
|
|
48
|
-
"""
|
|
49
|
-
|
|
50
|
-
with open(file_path, 'rb') as file:
|
|
51
|
-
# Read the first 4 bytes of the file
|
|
52
|
-
signature = file.read(4)
|
|
53
|
-
|
|
54
|
-
# Check if the signature matches any of the ZIP signatures
|
|
55
|
-
return signature in [b'PK\x03\x04', b'PK\x05\x06', b'PK\x07\x08']
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def extract_archive_with_zipfile(
|
|
59
|
-
archive_path: str,
|
|
60
|
-
extract_directory: str = None,
|
|
61
|
-
files_without_directories: bool = False,
|
|
62
|
-
remove_first_directory: bool = False,
|
|
63
|
-
print_kwargs: dict = None
|
|
64
|
-
) -> str:
|
|
65
|
-
"""
|
|
66
|
-
Function will extract the archive using standard library 'zipfile'.
|
|
67
|
-
This method preserves original date and time of the files inside the archive.
|
|
68
|
-
|
|
69
|
-
:param archive_path: string, full path to archived file.
|
|
70
|
-
:param extract_directory: string, full path to directory that the files will be extracted to.
|
|
71
|
-
If not specified, the files will be extracted to the same directory as the archived file, using the file name
|
|
72
|
-
without extension as the directory name.
|
|
73
|
-
:param files_without_directories: boolean, default 'False'.
|
|
74
|
-
'True': All the files in the archive will be extracted without subdirectories hierarchy.
|
|
75
|
-
Meaning, that if there are duplicate file names, the latest file with the same file name will overwrite
|
|
76
|
-
all the rest of the files with the same name.
|
|
77
|
-
'False': Subdirectory hierarchy will be preserved as it is currently in the archived file.
|
|
78
|
-
:param remove_first_directory: boolean, default is 'False'.
|
|
79
|
-
'True': all the files will be extracted without first directory in the hierarchy.
|
|
80
|
-
Example: package_some_name_1.1.1_build/subdir1/file.exe
|
|
81
|
-
Will be extracted as: subdir/file.exe
|
|
82
|
-
:param print_kwargs: dict, kwargs for print_api.
|
|
83
|
-
|
|
84
|
-
:return: string, full path to directory that the files were extracted to.
|
|
85
|
-
"""
|
|
86
|
-
|
|
87
|
-
if print_kwargs is None:
|
|
88
|
-
print_kwargs = dict()
|
|
89
|
-
|
|
90
|
-
# If 'extract_directory' is not specified, extract to the same directory as the archived file.
|
|
91
|
-
if extract_directory is None:
|
|
92
|
-
extract_directory = (
|
|
93
|
-
filesystem.get_file_directory(archive_path) + os.sep +
|
|
94
|
-
filesystem.get_file_name_without_extension(archive_path))
|
|
95
|
-
|
|
96
|
-
print_api(f'Extracting to directory: {extract_directory}', **print_kwargs)
|
|
97
|
-
|
|
98
|
-
# initiating the archived file path as 'zipfile.ZipFile' object.
|
|
99
|
-
with zipfile.ZipFile(archive_path) as zip_object:
|
|
100
|
-
# '.infolist()' method of the object contains all the directories and files that are in the archive including
|
|
101
|
-
# information about each one, like date and time of archiving.
|
|
102
|
-
for zip_info in zip_object.infolist():
|
|
103
|
-
# '.filename' attribute of the 'infolist()' method is relative path to each directory and file.
|
|
104
|
-
# If 'filename' ends with '/' it is a directory (it doesn't matter if it is windows or *nix)
|
|
105
|
-
# If so, skip current iteration.
|
|
106
|
-
if zip_info.filename[-1] == '/':
|
|
107
|
-
continue
|
|
108
|
-
|
|
109
|
-
if files_without_directories:
|
|
110
|
-
# Put into 'filename' the string that contains only the filename without subdirectories.
|
|
111
|
-
zip_info.filename = os.path.basename(zip_info.filename)
|
|
112
|
-
elif remove_first_directory:
|
|
113
|
-
# Cut the first directory from the filename.
|
|
114
|
-
zip_info.filename = zip_info.filename.split('/', maxsplit=1)[1]
|
|
115
|
-
|
|
116
|
-
print_api(f'Extracting: {zip_info.filename}', **print_kwargs)
|
|
117
|
-
|
|
118
|
-
# Extract current file from the archive using 'zip_info' of the current file with 'filename' that we
|
|
119
|
-
# updated under specified parameters to specified directory.
|
|
120
|
-
zip_object.extract(zip_info, extract_directory)
|
|
121
|
-
|
|
122
|
-
# === Change the date and time of extracted file from current time to the time specified in 'zip_info'.
|
|
123
|
-
# Get full path to extracted file.
|
|
124
|
-
extracted_file_path: str = extract_directory + os.sep + zip_info.filename
|
|
125
|
-
# Create needed datetime object with original archived datetime from 'zip_info.date_time'.
|
|
126
|
-
date_time = time.mktime(zip_info.date_time + (0, 0, -1))
|
|
127
|
-
# Using 'os' library, changed the datetime of the file to the object created in previous step.
|
|
128
|
-
os.utime(extracted_file_path, (date_time, date_time))
|
|
129
|
-
print_api('Extraction done.', color="green", **print_kwargs)
|
|
130
|
-
|
|
131
|
-
return extract_directory
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
def get_file_list_from_zip(file_path: str) -> list:
|
|
135
|
-
"""
|
|
136
|
-
Function returns the list of file names and their relative directories inside the zip file.
|
|
137
|
-
:param file_path: string, full path to the zip file.
|
|
138
|
-
:return: list of strings.
|
|
139
|
-
"""
|
|
140
|
-
|
|
141
|
-
with zipfile.ZipFile(file_path, 'r') as zip_object:
|
|
142
|
-
return zip_object.namelist()
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
# def search_file_in_zip(
|
|
146
|
-
# file_path: str = None,
|
|
147
|
-
# file_bytes: bytes = None,
|
|
148
|
-
# file_names_to_search: list[str] = None,
|
|
149
|
-
# case_sensitive: bool = True,
|
|
150
|
-
# return_first_only: bool = False,
|
|
151
|
-
# return_empty_list_per_file_name: bool = False,
|
|
152
|
-
# recursive: bool = False,
|
|
153
|
-
# callback_functions: list = None,
|
|
154
|
-
# extract_file_to_path: str = None
|
|
155
|
-
# ) -> dict[str, list[bytes]]:
|
|
156
|
-
# """
|
|
157
|
-
# Function searches for the file names inside the zip file and returns a dictionary where the keys are the
|
|
158
|
-
# names of the callback functions and the values are lists of found file bytes.
|
|
159
|
-
# :param file_path: string, full path to the zip file.
|
|
160
|
-
# :param file_bytes: bytes, the bytes of the zip file.
|
|
161
|
-
# :param file_names_to_search: list of strings, the names of the files to search.
|
|
162
|
-
# :param case_sensitive: boolean, default is 'True'. Determines if file name search should be case sensitive.
|
|
163
|
-
# :param return_first_only: boolean, default is 'False'. Return only the first found file for each file name.
|
|
164
|
-
# :param return_empty_list_per_file_name: boolean, default is 'False'.
|
|
165
|
-
# True: Return empty list for each file name that wasn't found.
|
|
166
|
-
# False: Don't return empty list for each file name that wasn't found.
|
|
167
|
-
# :param recursive: boolean, default is 'False'. If True, search for file names recursively in nested zip files.
|
|
168
|
-
# :param callback_functions: list of callables, default is None. Each function takes a file name and should return a
|
|
169
|
-
# boolean that will tell the main function if this file is 'found' or not.
|
|
170
|
-
# :param extract_file_to_path: string, full path to the directory where the found files should be extracted.
|
|
171
|
-
# :return: dictionary of lists of bytes.
|
|
172
|
-
# """
|
|
173
|
-
#
|
|
174
|
-
# def get_unique_filename(directory, filename):
|
|
175
|
-
# """
|
|
176
|
-
# Generates a unique filename by appending a number if the file already exists.
|
|
177
|
-
# """
|
|
178
|
-
# name, ext = os.path.splitext(filename)
|
|
179
|
-
# counter = 1
|
|
180
|
-
# unique_filename = filename
|
|
181
|
-
# while os.path.exists(os.path.join(directory, unique_filename)):
|
|
182
|
-
# unique_filename = f"{name}_{counter}{ext}"
|
|
183
|
-
# counter += 1
|
|
184
|
-
# return unique_filename
|
|
185
|
-
#
|
|
186
|
-
# def is_zip_file(file, zip_obj):
|
|
187
|
-
# try:
|
|
188
|
-
# with zip_obj.open(file) as file_data:
|
|
189
|
-
# with zipfile.ZipFile(BytesIO(file_data.read())) as zip_file:
|
|
190
|
-
# if zip_file.testzip() is None: # No errors found
|
|
191
|
-
# return True
|
|
192
|
-
# except zipfile.BadZipFile:
|
|
193
|
-
# return False
|
|
194
|
-
# return False
|
|
195
|
-
#
|
|
196
|
-
# def match_file_name(target, current):
|
|
197
|
-
# if case_sensitive:
|
|
198
|
-
# return current.endswith(target)
|
|
199
|
-
# else:
|
|
200
|
-
# return current.lower().endswith(target.lower())
|
|
201
|
-
#
|
|
202
|
-
# def search_in_zip(zip_obj, file_names, results, found_set):
|
|
203
|
-
# for item in zip_obj.infolist():
|
|
204
|
-
# if item.filename.endswith('/'): # Skip directories
|
|
205
|
-
# continue
|
|
206
|
-
# is_nested_zip = recursive and is_zip_file(item.filename, zip_obj)
|
|
207
|
-
#
|
|
208
|
-
# with zip_obj.open(item) as file_data:
|
|
209
|
-
# archived_file_bytes = file_data.read()
|
|
210
|
-
#
|
|
211
|
-
# # This is needed to know if the file should be extracted to directory or not.
|
|
212
|
-
# should_extract = False
|
|
213
|
-
#
|
|
214
|
-
# name_matched = False
|
|
215
|
-
# if file_names is not None:
|
|
216
|
-
# name_matched = any(match_file_name(file_name, item.filename) for file_name in file_names)
|
|
217
|
-
# if name_matched:
|
|
218
|
-
# should_extract = True
|
|
219
|
-
#
|
|
220
|
-
# callback_matched = False
|
|
221
|
-
# if callback_functions:
|
|
222
|
-
# for callback in callback_functions:
|
|
223
|
-
# callback_result = callback(archived_file_bytes)
|
|
224
|
-
# if callback_result:
|
|
225
|
-
# callback_matched = True
|
|
226
|
-
# # Initialize key for callback function name if not present
|
|
227
|
-
# if callback.__name__ not in results:
|
|
228
|
-
# results[callback.__name__] = []
|
|
229
|
-
# file_info = {
|
|
230
|
-
# 'bytes': archived_file_bytes,
|
|
231
|
-
# 'name': item.filename,
|
|
232
|
-
# 'size': item.file_size,
|
|
233
|
-
# 'modified_time': item.date_time
|
|
234
|
-
# }
|
|
235
|
-
# results[callback.__name__].append(file_info)
|
|
236
|
-
# if return_first_only:
|
|
237
|
-
# found_set.add(item.filename)
|
|
238
|
-
#
|
|
239
|
-
# should_extract = True
|
|
240
|
-
# break # Stop checking other callbacks if one has found it
|
|
241
|
-
#
|
|
242
|
-
# if should_extract and extract_file_to_path:
|
|
243
|
-
# unique_filename = get_unique_filename(extract_file_to_path, os.path.basename(item.filename))
|
|
244
|
-
# with open(os.path.join(extract_file_to_path, unique_filename), 'wb') as f:
|
|
245
|
-
# f.write(archived_file_bytes)
|
|
246
|
-
#
|
|
247
|
-
# if not callback_matched:
|
|
248
|
-
# if is_nested_zip:
|
|
249
|
-
# # If the file is a nested ZIP and hasn't matched a callback, search recursively
|
|
250
|
-
# nested_zip_bytes = BytesIO(archived_file_bytes)
|
|
251
|
-
# with zipfile.ZipFile(nested_zip_bytes) as nested_zip:
|
|
252
|
-
# search_in_zip(nested_zip, file_names, results, found_set)
|
|
253
|
-
# elif name_matched:
|
|
254
|
-
# # Handle name match when no callbacks are provided or no callback matched
|
|
255
|
-
# if item.filename not in results:
|
|
256
|
-
# results[item.filename] = []
|
|
257
|
-
# file_info = {
|
|
258
|
-
# 'bytes': archived_file_bytes,
|
|
259
|
-
# 'name': item.filename,
|
|
260
|
-
# 'size': item.file_size,
|
|
261
|
-
# 'modified_time': item.date_time
|
|
262
|
-
# }
|
|
263
|
-
# results[item.filename].append(file_info)
|
|
264
|
-
# if return_first_only:
|
|
265
|
-
# found_set.add(item.filename) # Mark as found
|
|
266
|
-
#
|
|
267
|
-
# if file_names is not None and len(found_set) == len(file_names):
|
|
268
|
-
# return # All files found, stop searching
|
|
269
|
-
#
|
|
270
|
-
# if file_names_to_search is None and callback_functions is None:
|
|
271
|
-
# raise ValueError("Either file_names_to_search or callback_functions must be provided.")
|
|
272
|
-
#
|
|
273
|
-
# # Initialize results dictionary.
|
|
274
|
-
# if callback_functions:
|
|
275
|
-
# results = {callback.__name__: [] for callback in callback_functions}
|
|
276
|
-
# else:
|
|
277
|
-
# results = {}
|
|
278
|
-
#
|
|
279
|
-
# found_set = set()
|
|
280
|
-
# if file_bytes is not None:
|
|
281
|
-
# with zipfile.ZipFile(BytesIO(file_bytes), 'r') as zip_ref:
|
|
282
|
-
# search_in_zip(zip_ref, file_names_to_search, results, found_set)
|
|
283
|
-
# elif file_path is not None:
|
|
284
|
-
# with zipfile.ZipFile(file_path, 'r') as zip_ref:
|
|
285
|
-
# search_in_zip(zip_ref, file_names_to_search, results, found_set)
|
|
286
|
-
# else:
|
|
287
|
-
# raise ValueError("Either file_path or file_bytes must be provided.")
|
|
288
|
-
#
|
|
289
|
-
# if not return_empty_list_per_file_name:
|
|
290
|
-
# # Filter out keys with empty lists
|
|
291
|
-
# results = {key: value for key, value in results.items() if value}
|
|
292
|
-
#
|
|
293
|
-
# return results
|
atomicshop/file_types.py
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
from typing import Union
|
|
2
|
-
|
|
3
|
-
import magic
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def get_mime_type(file_object: Union[str, bytes]):
|
|
7
|
-
"""
|
|
8
|
-
Determine the MIME type of the given input.
|
|
9
|
-
The input can be a file path (string) or a bytes object.
|
|
10
|
-
|
|
11
|
-
:param file_object: File path as a string or bytes object.
|
|
12
|
-
:return: MIME type as a string.
|
|
13
|
-
"""
|
|
14
|
-
mime = magic.Magic(mime=True)
|
|
15
|
-
|
|
16
|
-
# Check if input is a file path (str) or bytes
|
|
17
|
-
if isinstance(file_object, str):
|
|
18
|
-
# Assuming input_data is a file path
|
|
19
|
-
return mime.from_file(file_object)
|
|
20
|
-
elif isinstance(file_object, bytes):
|
|
21
|
-
# Assuming input_data is bytes
|
|
22
|
-
return mime.from_buffer(file_object)
|
|
23
|
-
else:
|
|
24
|
-
raise TypeError("Input must be a file path (str) or bytes object.")
|
atomicshop/mitm/config_editor.py
DELETED
|
@@ -1,37 +0,0 @@
|
|
|
1
|
-
import configparser
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
class CategoryNotFoundInConfigError(Exception):
|
|
5
|
-
pass
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def edit_property(category: str, category_property: str, value: str, config_file_path: str) -> None:
|
|
9
|
-
"""
|
|
10
|
-
Edit a property in the config file.
|
|
11
|
-
|
|
12
|
-
:param category: str, Category in the config file.
|
|
13
|
-
:param category_property: str, Property in the category.
|
|
14
|
-
:param value: str, Value to set to the property.
|
|
15
|
-
:param config_file_path: str, Path to the config file.
|
|
16
|
-
|
|
17
|
-
:return: None.
|
|
18
|
-
|
|
19
|
-
-----------
|
|
20
|
-
|
|
21
|
-
Config Example:
|
|
22
|
-
[category]
|
|
23
|
-
category_property = value
|
|
24
|
-
"""
|
|
25
|
-
config = configparser.ConfigParser()
|
|
26
|
-
config.read(config_file_path)
|
|
27
|
-
|
|
28
|
-
if category not in config:
|
|
29
|
-
raise CategoryNotFoundInConfigError(f"Category '{category}' not found in the config file.")
|
|
30
|
-
|
|
31
|
-
# Change the value of the property if it is different from the current value.
|
|
32
|
-
current_value = config[category][category_property]
|
|
33
|
-
if current_value != value:
|
|
34
|
-
config[category][category_property] = value
|
|
35
|
-
|
|
36
|
-
with open(config_file_path, "w") as configfile:
|
|
37
|
-
config.write(configfile)
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
engine_name = '_example'
|
|
2
|
-
domains = ['example.com']
|
|
3
|
-
# This script should be in 'engines' folder.
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
# === Create template from above settings. ===
|
|
7
|
-
# Get current file directory, should be the 'engines' directory.
|
|
8
|
-
# noinspection PyPep8
|
|
9
|
-
from atomicshop import filesystem
|
|
10
|
-
engines_path = filesystem.get_working_directory()
|
|
11
|
-
# Create the template.
|
|
12
|
-
from atomicshop.mitm.engines.create_module_template import CreateModuleTemplate
|
|
13
|
-
CreateModuleTemplate(engine_name, domains, engines_path)
|
|
@@ -1,268 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import sys
|
|
3
|
-
import threading
|
|
4
|
-
import time
|
|
5
|
-
|
|
6
|
-
# Importing atomicshop package to get the version of the package.
|
|
7
|
-
import atomicshop
|
|
8
|
-
|
|
9
|
-
from .import_config import ImportConfig
|
|
10
|
-
from .initialize_engines import ModuleCategory
|
|
11
|
-
from .connection_thread_worker import thread_worker_main
|
|
12
|
-
from .. import filesystem, queues
|
|
13
|
-
from ..python_functions import get_current_python_version_string, check_python_version_compliance
|
|
14
|
-
from ..wrappers.socketw.socket_wrapper import SocketWrapper
|
|
15
|
-
from ..wrappers.socketw import dns_server
|
|
16
|
-
from ..wrappers.psutilw import networks
|
|
17
|
-
from ..basics import dicts_nested
|
|
18
|
-
from ..wrappers.loggingw import loggingw
|
|
19
|
-
from ..print_api import print_api
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
STATISTICS_HEADER: str = \
|
|
23
|
-
'request_time_sent,host,path,command,status_code,request_size_bytes,response_size_bytes,file_path,process_cmd,error'
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
def initialize_mitm_server(config_static):
|
|
27
|
-
# Main function should return integer with error code, 0 is successful.
|
|
28
|
-
# Since listening server is infinite, this will not be reached.
|
|
29
|
-
# After modules import - we check for python version.
|
|
30
|
-
check_python_version_compliance(minimum_version='3.11')
|
|
31
|
-
|
|
32
|
-
# Preparing everything for the logging module.
|
|
33
|
-
# Log folder path is in the "config.ini" file, so we need to read it before setting loggers.
|
|
34
|
-
config_importer = ImportConfig(
|
|
35
|
-
file_name=config_static.CONFIG_INI_SERVER_FILE_NAME, directory_path=config_static.WORKING_DIRECTORY)
|
|
36
|
-
config_importer.open()
|
|
37
|
-
config = config_importer.config
|
|
38
|
-
|
|
39
|
-
# Create folders.
|
|
40
|
-
filesystem.create_directory(config['log']['logs_path'])
|
|
41
|
-
filesystem.create_directory(config['recorder']['recordings_path'])
|
|
42
|
-
if config['certificates']['sni_get_server_certificate_from_server_socket']:
|
|
43
|
-
filesystem.create_directory(
|
|
44
|
-
config['certificates']['sni_server_certificate_from_server_socket_download_directory'])
|
|
45
|
-
|
|
46
|
-
# Create a logger that will log messages to file, Initiate System logger.
|
|
47
|
-
logger_name = "system"
|
|
48
|
-
system_logger = loggingw.create_logger(
|
|
49
|
-
logger_name=logger_name,
|
|
50
|
-
file_path=f"{config['log']['logs_path']}{os.sep}{logger_name}.txt",
|
|
51
|
-
add_stream=True,
|
|
52
|
-
add_timedfile=True,
|
|
53
|
-
formatter_streamhandler='DEFAULT',
|
|
54
|
-
formatter_filehandler='DEFAULT'
|
|
55
|
-
)
|
|
56
|
-
|
|
57
|
-
# Writing first log.
|
|
58
|
-
system_logger.info("======================================")
|
|
59
|
-
|
|
60
|
-
if config_importer.admin_rights is not None:
|
|
61
|
-
if not config_importer.admin_rights:
|
|
62
|
-
system_logger.error("User continued with errors on Command Line harvesting for system processes.")
|
|
63
|
-
|
|
64
|
-
system_logger.info("Server Started.")
|
|
65
|
-
system_logger.info(f"Python Version: {get_current_python_version_string()}")
|
|
66
|
-
system_logger.info(f"Script Version: {config_static.SCRIPT_VERSION}")
|
|
67
|
-
system_logger.info(f"Atomic Workshop Version: {atomicshop.__version__}")
|
|
68
|
-
system_logger.info(f"Loaded config.ini: {config_importer.config_parser.file_path}")
|
|
69
|
-
system_logger.info(f"Log folder: {config['log']['logs_path']}")
|
|
70
|
-
system_logger.info(f"Recordings folder for Requests/Responses: {config['recorder']['recordings_path']}")
|
|
71
|
-
system_logger.info(f"Loaded system logger: {system_logger}")
|
|
72
|
-
|
|
73
|
-
system_logger.info(f"TCP Server Target IP: {config['dns']['target_tcp_server_ipv4']}")
|
|
74
|
-
|
|
75
|
-
# Some 'config.ini' settings logging ===========================================================================
|
|
76
|
-
if config['certificates']['default_server_certificate_usage']:
|
|
77
|
-
system_logger.info(
|
|
78
|
-
f"Default server certificate usage enabled, if no SNI available: "
|
|
79
|
-
f"{config_static.CONFIG_EXTENDED['certificates']['default_server_certificate_directory']}"
|
|
80
|
-
f"{os.sep}{config_static.CONFIG_EXTENDED['certificates']['default_server_certificate_name']}.pem")
|
|
81
|
-
|
|
82
|
-
if config['certificates']['sni_server_certificates_cache_directory']:
|
|
83
|
-
system_logger.info(
|
|
84
|
-
f"SNI function certificates creation enabled. Certificates cache: "
|
|
85
|
-
f"{config['certificates']['sni_server_certificates_cache_directory']}")
|
|
86
|
-
else:
|
|
87
|
-
system_logger.info(f"SNI function certificates creation disabled.")
|
|
88
|
-
|
|
89
|
-
if config['certificates']['custom_server_certificate_usage']:
|
|
90
|
-
system_logger.info(f"Custom server certificate usage is enabled.")
|
|
91
|
-
system_logger.info(f"Custom Certificate Path: {config['certificates']['custom_server_certificate_path']}")
|
|
92
|
-
|
|
93
|
-
# If 'custom_private_key_path' field was populated.
|
|
94
|
-
if config['certificates']['custom_private_key_path']:
|
|
95
|
-
system_logger.info(
|
|
96
|
-
f"Custom Certificate Private Key Path: {config['certificates']['custom_private_key_path']}")
|
|
97
|
-
else:
|
|
98
|
-
system_logger.info(f"Custom Certificate Private Key Path wasn't provided in [advanced] section. "
|
|
99
|
-
f"Assuming the private key is inside the certificate file.")
|
|
100
|
-
|
|
101
|
-
# === Importing engine modules =================================================================================
|
|
102
|
-
system_logger.info("Importing engine modules.")
|
|
103
|
-
|
|
104
|
-
# Get full paths of all the 'engine_config.ini' files.
|
|
105
|
-
engine_config_path_list = filesystem.get_file_paths_from_directory(
|
|
106
|
-
directory_path=config_static.ENGINES_DIRECTORY_PATH,
|
|
107
|
-
file_name_check_pattern=config_static.ENGINE_CONFIG_FILE_NAME)
|
|
108
|
-
|
|
109
|
-
# Iterate through all the 'engine_config.ini' file paths.
|
|
110
|
-
domains_engine_list_full: list = list()
|
|
111
|
-
engines_list: list = list()
|
|
112
|
-
for engine_config_path in engine_config_path_list:
|
|
113
|
-
# Initialize engine.
|
|
114
|
-
current_module = ModuleCategory(config_static.WORKING_DIRECTORY)
|
|
115
|
-
current_module.fill_engine_fields_from_config(engine_config_path)
|
|
116
|
-
current_module.initialize_engine(logs_path=config['log']['logs_path'],
|
|
117
|
-
logger=system_logger)
|
|
118
|
-
|
|
119
|
-
# Extending the full engine domain list with this list.
|
|
120
|
-
domains_engine_list_full.extend(current_module.domain_list)
|
|
121
|
-
# Append the object to the engines list
|
|
122
|
-
engines_list.append(current_module)
|
|
123
|
-
# === EOF Importing engine modules =============================================================================
|
|
124
|
-
# ==== Initialize Reference Module =============================================================================
|
|
125
|
-
reference_module = ModuleCategory(config_static.WORKING_DIRECTORY)
|
|
126
|
-
reference_module.fill_engine_fields_from_general_reference(config_static.ENGINES_DIRECTORY_PATH)
|
|
127
|
-
reference_module.initialize_engine(logs_path=config['log']['logs_path'],
|
|
128
|
-
logger=system_logger, stdout=False, reference_general=True)
|
|
129
|
-
# === EOF Initialize Reference Module ==========================================================================
|
|
130
|
-
# === Engine logging ===========================================================================================
|
|
131
|
-
# Printing the parsers using "start=1" for index to start counting from "1" and not "0"
|
|
132
|
-
print_api(f"[*] Found Engines:", logger=system_logger)
|
|
133
|
-
for index, engine in enumerate(engines_list, start=1):
|
|
134
|
-
message = f"[*] {index}: {engine.engine_name} | {engine.domain_list}"
|
|
135
|
-
print_api(message, logger=system_logger)
|
|
136
|
-
|
|
137
|
-
message = (f"[*] Modules: {engine.parser_class_object.__name__}, "
|
|
138
|
-
f"{engine.responder_class_object.__name__}, "
|
|
139
|
-
f"{engine.recorder_class_object.__name__}")
|
|
140
|
-
print_api(message, logger=system_logger)
|
|
141
|
-
|
|
142
|
-
if config['dns']['enable_dns_server']:
|
|
143
|
-
print_api("DNS Server is enabled.", logger=system_logger)
|
|
144
|
-
|
|
145
|
-
# If engines were found and dns is set to route by the engine domains.
|
|
146
|
-
if engines_list and config['dns']['route_to_tcp_server_only_engine_domains']:
|
|
147
|
-
print_api("Engine domains will be routed by the DNS server to Built-in TCP Server.", logger=system_logger)
|
|
148
|
-
# If engines were found, but the dns isn't set to route to engines.
|
|
149
|
-
elif engines_list and not config['dns']['route_to_tcp_server_only_engine_domains']:
|
|
150
|
-
message = f"[*] Engine domains found, but the DNS routing is set not to use them for routing."
|
|
151
|
-
print_api(message, color="yellow", logger=system_logger)
|
|
152
|
-
elif not engines_list and config['dns']['route_to_tcp_server_only_engine_domains']:
|
|
153
|
-
raise ValueError("No engines were found, but the DNS routing is set to use them for routing.\n"
|
|
154
|
-
"Please check your DNS configuration in the 'config.ini' file.")
|
|
155
|
-
|
|
156
|
-
if config['dns']['route_to_tcp_server_all_domains']:
|
|
157
|
-
print_api("All domains will be routed by the DNS server to Built-in TCP Server.", logger=system_logger)
|
|
158
|
-
|
|
159
|
-
if config['dns']['regular_resolving']:
|
|
160
|
-
print_api(
|
|
161
|
-
"Regular DNS resolving is enabled. Built-in TCP server will not be routed to",
|
|
162
|
-
logger=system_logger, color="yellow")
|
|
163
|
-
else:
|
|
164
|
-
print_api("DNS Server is disabled.", logger=system_logger, color="yellow")
|
|
165
|
-
|
|
166
|
-
if config['tcp']['enable_tcp_server']:
|
|
167
|
-
print_api("TCP Server is enabled.", logger=system_logger)
|
|
168
|
-
|
|
169
|
-
if engines_list and not config['tcp']['engines_usage']:
|
|
170
|
-
message = \
|
|
171
|
-
f"Engines found, but the TCP server is set not to use them for processing. General responses only."
|
|
172
|
-
print_api(message, color="yellow", logger=system_logger)
|
|
173
|
-
elif engines_list and config['tcp']['engines_usage']:
|
|
174
|
-
message = f"Engines found, and the TCP server is set to use them for processing."
|
|
175
|
-
print_api(message, logger=system_logger)
|
|
176
|
-
elif not engines_list and config['tcp']['engines_usage']:
|
|
177
|
-
raise ValueError("No engines were found, but the TCP server is set to use them for processing.\n"
|
|
178
|
-
"Please check your TCP configuration in the 'config.ini' file.")
|
|
179
|
-
else:
|
|
180
|
-
print_api("TCP Server is disabled.", logger=system_logger, color="yellow")
|
|
181
|
-
|
|
182
|
-
# === EOF Engine Logging =======================================================================================
|
|
183
|
-
|
|
184
|
-
# Assigning all the engines domains to all time domains, that will be responsible for adding new domains.
|
|
185
|
-
config_static.CONFIG_EXTENDED['certificates']['domains_all_times'] = list(domains_engine_list_full)
|
|
186
|
-
|
|
187
|
-
# Creating Statistics logger.
|
|
188
|
-
statistics_logger = loggingw.create_logger(
|
|
189
|
-
logger_name="statistics",
|
|
190
|
-
directory_path=config['log']['logs_path'],
|
|
191
|
-
add_timedfile=True,
|
|
192
|
-
formatter_filehandler='MESSAGE',
|
|
193
|
-
file_type='csv',
|
|
194
|
-
header=STATISTICS_HEADER
|
|
195
|
-
)
|
|
196
|
-
|
|
197
|
-
network_logger_name = "network"
|
|
198
|
-
network_logger = loggingw.create_logger(
|
|
199
|
-
logger_name=network_logger_name,
|
|
200
|
-
directory_path=config['log']['logs_path'],
|
|
201
|
-
add_stream=True,
|
|
202
|
-
add_timedfile=True,
|
|
203
|
-
formatter_streamhandler='DEFAULT',
|
|
204
|
-
formatter_filehandler='DEFAULT'
|
|
205
|
-
)
|
|
206
|
-
system_logger.info(f"Loaded network logger: {network_logger}")
|
|
207
|
-
|
|
208
|
-
# Initiate Listener logger, which is a child of network logger, so he uses the same settings and handlers
|
|
209
|
-
listener_logger = loggingw.get_logger_with_level(f'{network_logger_name}.listener')
|
|
210
|
-
system_logger.info(f"Loaded listener logger: {listener_logger}")
|
|
211
|
-
|
|
212
|
-
# Create request domain queue.
|
|
213
|
-
domain_queue = queues.NonBlockQueue()
|
|
214
|
-
|
|
215
|
-
# === Initialize DNS module ====================================================================================
|
|
216
|
-
if config['dns']['enable_dns_server']:
|
|
217
|
-
# Check if the DNS server port is in use.
|
|
218
|
-
port_in_use = networks.get_processes_using_port_list([config['dns']['listening_port']])
|
|
219
|
-
if port_in_use:
|
|
220
|
-
for port, process_info in port_in_use.items():
|
|
221
|
-
message = f"Port [{port}] is already in use by process: {process_info}"
|
|
222
|
-
print_api(message, error_type=True, logger_method='critical', logger=system_logger)
|
|
223
|
-
|
|
224
|
-
# Wait for the message to be printed and saved to file.
|
|
225
|
-
time.sleep(1)
|
|
226
|
-
sys.exit(1)
|
|
227
|
-
|
|
228
|
-
# before executing TCP sockets and after executing 'network' logger.
|
|
229
|
-
dns_server_instance = dns_server.DnsServer(config)
|
|
230
|
-
# Passing the engine domain list to DNS server to work with.
|
|
231
|
-
# 'list' function re-initializes the current list, or else it will be the same instance object.
|
|
232
|
-
dns_server_instance.domain_list = list(domains_engine_list_full)
|
|
233
|
-
|
|
234
|
-
dns_server_instance.request_domain_queue = domain_queue
|
|
235
|
-
# Initiate the thread.
|
|
236
|
-
dns_thread = threading.Thread(target=dns_server_instance.start)
|
|
237
|
-
dns_thread.daemon = True
|
|
238
|
-
dns_thread.start()
|
|
239
|
-
|
|
240
|
-
# === EOF Initialize DNS module ================================================================================
|
|
241
|
-
# === Initialize TCP Server ====================================================================================
|
|
242
|
-
if config['tcp']['enable_tcp_server']:
|
|
243
|
-
port_in_use = networks.get_processes_using_port_list(config['tcp']['listening_port_list'])
|
|
244
|
-
if port_in_use:
|
|
245
|
-
for port, process_info in port_in_use.items():
|
|
246
|
-
print_api(f"Port [{port}] is already in use by process: {process_info}", logger=system_logger,
|
|
247
|
-
error_type=True, logger_method='critical')
|
|
248
|
-
# Wait for the message to be printed and saved to file.
|
|
249
|
-
time.sleep(1)
|
|
250
|
-
sys.exit(1)
|
|
251
|
-
|
|
252
|
-
socket_wrapper = SocketWrapper(
|
|
253
|
-
config=dicts_nested.merge(config, config_static.CONFIG_EXTENDED), logger=listener_logger,
|
|
254
|
-
statistics_logger=statistics_logger)
|
|
255
|
-
|
|
256
|
-
socket_wrapper.create_tcp_listening_socket_list()
|
|
257
|
-
|
|
258
|
-
socket_wrapper.requested_domain_from_dns_server = domain_queue
|
|
259
|
-
|
|
260
|
-
# General exception handler will catch all the exceptions that occurred in the threads and write it to the log.
|
|
261
|
-
try:
|
|
262
|
-
socket_wrapper.loop_for_incoming_sockets(function_reference=thread_worker_main, reference_args=(
|
|
263
|
-
network_logger, statistics_logger, engines_list, reference_module, config,))
|
|
264
|
-
except Exception:
|
|
265
|
-
message = f"Unhandled Exception occurred in 'loop_for_incoming_sockets' function"
|
|
266
|
-
print_api(message, error_type=True, color="red", logger=network_logger, traceback_string=True, oneline=True)
|
|
267
|
-
|
|
268
|
-
# === EOF Initialize TCP Server ================================================================================
|