atomicshop 2.16.10__py3-none-any.whl → 2.16.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/basics/enums.py +2 -2
- atomicshop/basics/list_of_classes.py +29 -0
- atomicshop/dns.py +2 -4
- atomicshop/file_io/docxs.py +4 -4
- atomicshop/file_io/file_io.py +12 -0
- atomicshop/filesystem.py +265 -198
- atomicshop/mitm/config_static.py +7 -8
- atomicshop/mitm/connection_thread_worker.py +59 -39
- atomicshop/mitm/engines/__parent/parser___parent.py +0 -1
- atomicshop/mitm/engines/__parent/recorder___parent.py +5 -6
- atomicshop/mitm/engines/__parent/responder___parent.py +0 -1
- atomicshop/mitm/import_config.py +6 -4
- atomicshop/mitm/initialize_engines.py +6 -6
- atomicshop/mitm/message.py +1 -0
- atomicshop/mitm/{initialize_mitm_server.py → mitm_main.py} +57 -32
- atomicshop/mitm/recs_files.py +17 -17
- atomicshop/mitm/statistic_analyzer.py +2 -2
- atomicshop/ssh_remote.py +9 -9
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +1 -1
- atomicshop/wrappers/loggingw/reading.py +63 -100
- atomicshop/wrappers/pywin32w/wmis/helpers.py +5 -1
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +0 -32
- atomicshop/wrappers/socketw/dns_server.py +9 -10
- atomicshop/wrappers/socketw/exception_wrapper.py +5 -7
- atomicshop/wrappers/socketw/get_process.py +3 -3
- atomicshop/wrappers/socketw/receiver.py +3 -3
- atomicshop/wrappers/socketw/sender.py +1 -1
- atomicshop/wrappers/socketw/sni.py +1 -1
- atomicshop/wrappers/socketw/socket_server_tester.py +5 -5
- atomicshop/wrappers/winregw/__init__.py +0 -0
- atomicshop/wrappers/winregw/winreg_network.py +174 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/METADATA +1 -1
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/RECORD +37 -34
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/WHEEL +0 -0
- {atomicshop-2.16.10.dist-info → atomicshop-2.16.12.dist-info}/top_level.txt +0 -0
atomicshop/ssh_remote.py
CHANGED
|
@@ -126,24 +126,24 @@ class SSHRemote:
|
|
|
126
126
|
except paramiko.ssh_exception.NoValidConnectionsError as e:
|
|
127
127
|
error = str(e)
|
|
128
128
|
# Logging the error also. Since the process name isn't critical, we'll continue script execution.
|
|
129
|
-
print_api(error, logger=self.logger, logger_method='error', traceback_string=True
|
|
129
|
+
print_api(error, logger=self.logger, logger_method='error', traceback_string=True)
|
|
130
130
|
pass
|
|
131
131
|
except paramiko.ssh_exception.SSHException as e:
|
|
132
132
|
error = str(e)
|
|
133
133
|
# Logging the error also. Since the process name isn't critical, we'll continue script execution.
|
|
134
|
-
print_api(error, logger=self.logger, logger_method='error', traceback_string=True
|
|
134
|
+
print_api(error, logger=self.logger, logger_method='error', traceback_string=True)
|
|
135
135
|
pass
|
|
136
136
|
except ConnectionResetError:
|
|
137
137
|
# Returning the error.
|
|
138
138
|
error = "An existing connection was forcibly closed by the remote host."
|
|
139
139
|
# Logging the error also. Since the process name isn't critical, we'll continue script execution.
|
|
140
|
-
print_api(error, logger=self.logger, logger_method='error', traceback_string=True
|
|
140
|
+
print_api(error, logger=self.logger, logger_method='error', traceback_string=True)
|
|
141
141
|
pass
|
|
142
142
|
except TimeoutError:
|
|
143
143
|
# Returning the error.
|
|
144
144
|
error = "Connection timed out."
|
|
145
145
|
# Logging the error also. Since the process name isn't critical, we'll continue script execution.
|
|
146
|
-
print_api(error, logger=self.logger, logger_method='error', traceback_string=True
|
|
146
|
+
print_api(error, logger=self.logger, logger_method='error', traceback_string=True)
|
|
147
147
|
pass
|
|
148
148
|
|
|
149
149
|
return error
|
|
@@ -165,24 +165,24 @@ class SSHRemote:
|
|
|
165
165
|
except AttributeError as function_exception_object:
|
|
166
166
|
if function_exception_object.name == "open_session":
|
|
167
167
|
result_exception = "'SSHRemote().connect' wasn't executed."
|
|
168
|
-
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True
|
|
168
|
+
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True)
|
|
169
169
|
|
|
170
170
|
# Since getting Process name is not the main feature of the server, we can pass the exception
|
|
171
171
|
pass
|
|
172
172
|
else:
|
|
173
173
|
result_exception = f"Couldn't execute script over SSH. Unknown yet exception with 'AttributeError' " \
|
|
174
174
|
f"and name: {function_exception_object.name}"
|
|
175
|
-
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True
|
|
175
|
+
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True)
|
|
176
176
|
# Since getting Process name is not the main feature of the server, we can pass the exception
|
|
177
177
|
pass
|
|
178
178
|
except socket.error:
|
|
179
179
|
result_exception = "Couldn't execute script over SSH. SSH socket closed."
|
|
180
|
-
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True
|
|
180
|
+
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True)
|
|
181
181
|
# Since getting Process name is not the main feature of the server, we can pass the exception
|
|
182
182
|
pass
|
|
183
183
|
except Exception:
|
|
184
184
|
result_exception = "Couldn't execute script over SSH. Unknown yet exception."
|
|
185
|
-
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True
|
|
185
|
+
print_api(result_exception, logger=self.logger, logger_method='error', traceback_string=True)
|
|
186
186
|
# Since getting Process name is not the main feature of the server, we can pass the exception
|
|
187
187
|
pass
|
|
188
188
|
|
|
@@ -334,7 +334,7 @@ class SSHRemote:
|
|
|
334
334
|
# Basically we don't care much about SSH exceptions. Just log them and pass to record.
|
|
335
335
|
except Exception as function_exception_object:
|
|
336
336
|
execution_error = function_exception_object
|
|
337
|
-
print_api(execution_error, logger=self.logger, logger_method='error', traceback_string=True
|
|
337
|
+
print_api(execution_error, logger=self.logger, logger_method='error', traceback_string=True)
|
|
338
338
|
pass
|
|
339
339
|
|
|
340
340
|
# Closing SSH connection at this stage.
|
|
@@ -66,7 +66,7 @@ def install_before_restart(
|
|
|
66
66
|
|
|
67
67
|
if not fact_source_archive_path:
|
|
68
68
|
# Download the FACT_core repo.
|
|
69
|
-
if not filesystem.
|
|
69
|
+
if not filesystem.get_paths_from_directory(installation_directory, get_file=True):
|
|
70
70
|
git_wrapper = githubw.GitHubWrapper(repo_url=config_install.FACT_CORE_GITHUB_URL)
|
|
71
71
|
git_wrapper.build_links_from_repo_url()
|
|
72
72
|
git_wrapper.download_and_extract_branch(
|
|
@@ -4,18 +4,12 @@ from pathlib import Path
|
|
|
4
4
|
import datetime
|
|
5
5
|
|
|
6
6
|
from ... import filesystem, datetimes
|
|
7
|
-
from ...basics import booleans
|
|
7
|
+
from ...basics import booleans, list_of_classes
|
|
8
8
|
from ...file_io import csvs
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class LogReaderTimeCouldntBeFoundInFileNameError(Exception):
|
|
12
|
-
pass
|
|
13
|
-
|
|
14
|
-
|
|
15
11
|
def get_logs_paths(
|
|
16
|
-
|
|
17
|
-
log_file_path: str = None,
|
|
18
|
-
file_name_pattern: str = '*.*',
|
|
12
|
+
log_file_path: str,
|
|
19
13
|
date_format: str = None,
|
|
20
14
|
latest_only: bool = False,
|
|
21
15
|
previous_day_only: bool = False,
|
|
@@ -24,8 +18,6 @@ def get_logs_paths(
|
|
|
24
18
|
"""
|
|
25
19
|
This function gets the logs file paths from the directory. Supports rotating files to get the logs by time.
|
|
26
20
|
|
|
27
|
-
:param log_files_directory_path: Path to the log files. If specified, the function will get all the files from the
|
|
28
|
-
directory by the 'file_name_pattern'.
|
|
29
21
|
:param log_file_path: Path to the log file. If specified, the function will get the file and all the rotated logs
|
|
30
22
|
associated with this file. The 'file_name_pattern' will become the file name using the file name and extension.
|
|
31
23
|
|
|
@@ -37,8 +29,6 @@ def get_logs_paths(
|
|
|
37
29
|
|
|
38
30
|
# The 'log_files_directory_path' will also be taken from the 'log_file_path':
|
|
39
31
|
log_files_directory_path = 'C:/logs'
|
|
40
|
-
:param file_name_pattern: Pattern to match the log files names.
|
|
41
|
-
Default file_name_pattern will match all the files.
|
|
42
32
|
:param date_format: date format string pattern to match the date in the log file name.
|
|
43
33
|
If specified, the function will get the log file by the date pattern.
|
|
44
34
|
If not specified, the function will get the file date by file last modified time.
|
|
@@ -53,11 +43,6 @@ def get_logs_paths(
|
|
|
53
43
|
|
|
54
44
|
"""
|
|
55
45
|
|
|
56
|
-
if not log_files_directory_path and not log_file_path:
|
|
57
|
-
raise ValueError('Either "log_files_directory_path" or "log_file_path" must be specified.')
|
|
58
|
-
elif log_files_directory_path and log_file_path:
|
|
59
|
-
raise ValueError('Both "log_files_directory_path" and "log_file_path" cannot be specified at the same time.')
|
|
60
|
-
|
|
61
46
|
if latest_only or previous_day_only or specific_date:
|
|
62
47
|
booleans.check_3_booleans_when_only_1_can_be_true(
|
|
63
48
|
(latest_only, 'latest_only'),
|
|
@@ -67,73 +52,63 @@ def get_logs_paths(
|
|
|
67
52
|
if not date_format and specific_date:
|
|
68
53
|
raise ValueError('If "specific_date" is specified, "date_format" must be specified.')
|
|
69
54
|
|
|
70
|
-
#
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
log_file_extension: str = Path(log_file_path).suffix
|
|
75
|
-
file_name_pattern = f'{log_file_name}*{log_file_extension}'
|
|
55
|
+
# Get the file_name_pattern from the file name. Build the file_name_pattern.
|
|
56
|
+
log_file_name: str = Path(log_file_path).stem
|
|
57
|
+
log_file_extension: str = Path(log_file_path).suffix
|
|
58
|
+
file_name_pattern: str = f'{log_file_name}*{log_file_extension}'
|
|
76
59
|
|
|
77
|
-
|
|
78
|
-
|
|
60
|
+
# Get the directory path from the file path.
|
|
61
|
+
log_files_directory_path: str = str(Path(log_file_path).parent)
|
|
79
62
|
|
|
80
|
-
# Get all the log file paths by the file_name_pattern.
|
|
81
|
-
logs_files: list = filesystem.
|
|
63
|
+
# Get all the log file paths by the file_name_pattern and the date_format string.
|
|
64
|
+
logs_files: list = filesystem.get_paths_from_directory(
|
|
82
65
|
log_files_directory_path,
|
|
66
|
+
get_file=True,
|
|
83
67
|
file_name_check_pattern=file_name_pattern,
|
|
84
68
|
add_last_modified_time=True,
|
|
85
|
-
sort_by_last_modified_time=True
|
|
69
|
+
sort_by_last_modified_time=True,
|
|
70
|
+
datetime_format=date_format
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# The above will not include the latest log file if it is not rotated yet.
|
|
74
|
+
# noinspection PyTypeChecker
|
|
75
|
+
last_log_file_atomic_path: filesystem.AtomicPath = None
|
|
76
|
+
if os.path.isfile(log_file_path):
|
|
77
|
+
last_log_file_atomic_path = filesystem.AtomicPath(log_file_path)
|
|
78
|
+
last_log_file_atomic_path.update(update_last_modified=True)
|
|
79
|
+
|
|
80
|
+
if logs_files and last_log_file_atomic_path and date_format:
|
|
81
|
+
# The problem here is the file name that doesn't contain the date string in the name.
|
|
82
|
+
# If it is regular log rotation, then there will be one file that doesn't have the date string in the name.
|
|
83
|
+
# If the function used to get the previous day log, then there will be no file that doesn't have the date
|
|
84
|
+
# string.
|
|
85
|
+
|
|
86
|
+
# Get the latest timestamp from the files with dates.
|
|
87
|
+
latest_datetime_float: float = 0
|
|
88
|
+
for file_index, single_file in enumerate(logs_files):
|
|
89
|
+
if single_file.datetime_float > latest_datetime_float:
|
|
90
|
+
latest_datetime_float = single_file.datetime_float
|
|
91
|
+
|
|
92
|
+
# We will add one day to the latest date that we found and assign to the latest file in rotation
|
|
93
|
+
# which is without the datetime string.
|
|
94
|
+
latest_datetime_float += 86400
|
|
95
|
+
last_log_file_atomic_path.datetime_float = latest_datetime_float
|
|
96
|
+
last_log_file_atomic_path.datetime_datetime = datetime.datetime.fromtimestamp(latest_datetime_float)
|
|
97
|
+
last_log_file_atomic_path.datetime_string = (
|
|
98
|
+
last_log_file_atomic_path.datetime_datetime.strftime(date_format))
|
|
99
|
+
last_log_file_atomic_path.datetime_format = date_format
|
|
100
|
+
|
|
101
|
+
# Add the last log file to the list.
|
|
102
|
+
logs_files.append(last_log_file_atomic_path)
|
|
103
|
+
|
|
104
|
+
# Sort the files by the last modified time.
|
|
105
|
+
logs_files = list_of_classes.sort_by_attributes(logs_files, ['datetime_float'])
|
|
106
|
+
elif last_log_file_atomic_path and logs_files and not date_format:
|
|
107
|
+
logs_files.append(last_log_file_atomic_path)
|
|
108
|
+
elif last_log_file_atomic_path and not logs_files:
|
|
109
|
+
logs_files = [last_log_file_atomic_path]
|
|
86
110
|
|
|
87
|
-
# Get the datetime object from the first file name by the date pattern.
|
|
88
|
-
first_date_string = None
|
|
89
111
|
if logs_files:
|
|
90
|
-
first_file_name: str = Path(logs_files[0]['file_path']).name
|
|
91
|
-
first_datetime_object, first_date_string, first_timestamp_float = (
|
|
92
|
-
datetimes.get_datetime_from_complex_string_by_pattern(first_file_name, date_format))
|
|
93
|
-
|
|
94
|
-
# The problem here is the file name that doesn't contain the date string in the name.
|
|
95
|
-
# If it is regular log rotation, then there will be one file that doesn't have the date string in the name.
|
|
96
|
-
# If the function used to get the previous day log, then there will be no file that doesn't have the date string.
|
|
97
|
-
if len(logs_files) > 1 or (len(logs_files) == 1 and first_date_string):
|
|
98
|
-
if date_format:
|
|
99
|
-
latest_timestamp: float = 0
|
|
100
|
-
for file_index, single_file in enumerate(logs_files):
|
|
101
|
-
# Get file name from current loop file path.
|
|
102
|
-
current_file_name: str = Path(single_file['file_path']).name
|
|
103
|
-
logs_files[file_index]['file_name'] = current_file_name
|
|
104
|
-
|
|
105
|
-
# Get the datetime object from the file name by the date format pattern.
|
|
106
|
-
datetime_object, date_string, timestamp_float = (
|
|
107
|
-
datetimes.get_datetime_from_complex_string_by_pattern(current_file_name, date_format))
|
|
108
|
-
|
|
109
|
-
# Update the last modified time to the dictionary.
|
|
110
|
-
logs_files[file_index]['last_modified'] = timestamp_float
|
|
111
|
-
logs_files[file_index]['datetime'] = datetime_object
|
|
112
|
-
logs_files[file_index]['date_string'] = date_string
|
|
113
|
-
|
|
114
|
-
if timestamp_float and timestamp_float > latest_timestamp:
|
|
115
|
-
latest_timestamp = timestamp_float
|
|
116
|
-
|
|
117
|
-
# Check timestamps, if more than 1 file is None, then the function that gets the date from the file name
|
|
118
|
-
# didn't work properly, probably because of the string datetime format or the filenames.
|
|
119
|
-
none_timestamps = [single_file['last_modified'] for single_file in logs_files].count(None)
|
|
120
|
-
if none_timestamps > 1:
|
|
121
|
-
raise LogReaderTimeCouldntBeFoundInFileNameError(
|
|
122
|
-
'The date pattern could not be found in the file name. Check the date pattern and the file names.')
|
|
123
|
-
|
|
124
|
-
# Now, there should be a file that doesn't have the string date pattern in the file name.
|
|
125
|
-
# We will add one day to the latest date that we found and assign to that file path.
|
|
126
|
-
for file_index, single_file in enumerate(logs_files):
|
|
127
|
-
if single_file['last_modified'] is None:
|
|
128
|
-
latest_timestamp += 86400
|
|
129
|
-
logs_files[file_index]['last_modified'] = latest_timestamp
|
|
130
|
-
logs_files[file_index]['datetime'] = datetime.datetime.fromtimestamp(latest_timestamp)
|
|
131
|
-
logs_files[file_index]['date_string'] = logs_files[file_index]['datetime'].strftime(date_format)
|
|
132
|
-
break
|
|
133
|
-
|
|
134
|
-
# Sort the files by the last modified time.
|
|
135
|
-
logs_files = sorted(logs_files, key=lambda x: x['last_modified'], reverse=False)
|
|
136
|
-
|
|
137
112
|
if latest_only:
|
|
138
113
|
logs_files = [logs_files[-1]]
|
|
139
114
|
|
|
@@ -146,19 +121,13 @@ def get_logs_paths(
|
|
|
146
121
|
|
|
147
122
|
if specific_date:
|
|
148
123
|
# Check if there is a specific date log file.
|
|
149
|
-
logs_files = [single_file for single_file in logs_files if single_file
|
|
150
|
-
# If there is only one file, meaning it is the current day log.
|
|
151
|
-
# If the 'previous_day_only' is True, then there are no previous day logs to output.
|
|
152
|
-
elif len(logs_files) == 1 and previous_day_only:
|
|
153
|
-
logs_files = []
|
|
124
|
+
logs_files = [single_file for single_file in logs_files if single_file.datetime_string == specific_date]
|
|
154
125
|
|
|
155
126
|
return logs_files
|
|
156
127
|
|
|
157
128
|
|
|
158
129
|
def get_all_log_files_into_list(
|
|
159
|
-
log_files_directory_path: str = None,
|
|
160
130
|
log_file_path: str = None,
|
|
161
|
-
file_name_pattern: str = '*.*',
|
|
162
131
|
date_format: str = None,
|
|
163
132
|
log_type: Literal['csv'] = 'csv',
|
|
164
133
|
header_type_of_files: Literal['first', 'all'] = 'first',
|
|
@@ -170,10 +139,7 @@ def get_all_log_files_into_list(
|
|
|
170
139
|
This function gets the logs contents from the log files. Supports rotating files to get the logs by time.
|
|
171
140
|
All the contents will be merged into one list.
|
|
172
141
|
|
|
173
|
-
:param log_files_directory_path: Path to the log files. Check the 'get_logs_paths' function for more details.
|
|
174
142
|
:param log_file_path: Path to the log file. Check the 'get_logs_paths' function for more details.
|
|
175
|
-
:param file_name_pattern: Pattern to match the log files names.
|
|
176
|
-
Default file_name_pattern will match all the files.
|
|
177
143
|
:param date_format: date format string pattern to match the date in the log file name.
|
|
178
144
|
If specified, the function will get the log file by the date pattern.
|
|
179
145
|
If not specified, the function will get the file date by file last modified time.
|
|
@@ -202,9 +168,7 @@ def get_all_log_files_into_list(
|
|
|
202
168
|
|
|
203
169
|
# Get all the log file paths by the file_name_pattern.
|
|
204
170
|
logs_files: list = get_logs_paths(
|
|
205
|
-
log_files_directory_path=log_files_directory_path,
|
|
206
171
|
log_file_path=log_file_path,
|
|
207
|
-
file_name_pattern=file_name_pattern,
|
|
208
172
|
date_format=date_format)
|
|
209
173
|
|
|
210
174
|
# Read all the logs.
|
|
@@ -213,13 +177,13 @@ def get_all_log_files_into_list(
|
|
|
213
177
|
for single_file in logs_files:
|
|
214
178
|
if log_type == 'csv':
|
|
215
179
|
if header_type_of_files == 'all':
|
|
216
|
-
csv_content, _ = csvs.read_csv_to_list_of_dicts_by_header(single_file
|
|
180
|
+
csv_content, _ = csvs.read_csv_to_list_of_dicts_by_header(single_file.path, **print_kwargs)
|
|
217
181
|
logs_content.extend(csv_content)
|
|
218
182
|
elif header_type_of_files == 'first':
|
|
219
183
|
# The function gets empty header to read it from the CSV file, the returns the header that it read.
|
|
220
184
|
# Then each time the header is fed once again to the function.
|
|
221
185
|
csv_content, header = csvs.read_csv_to_list_of_dicts_by_header(
|
|
222
|
-
single_file
|
|
186
|
+
single_file.path, header=header, **print_kwargs)
|
|
223
187
|
# Any way the first file will be read with header.
|
|
224
188
|
logs_content.extend(csv_content)
|
|
225
189
|
|
|
@@ -230,7 +194,7 @@ def get_all_log_files_into_list(
|
|
|
230
194
|
if remove_logs:
|
|
231
195
|
# Remove the statistics files.
|
|
232
196
|
for single_file in logs_files:
|
|
233
|
-
filesystem.remove_file(single_file
|
|
197
|
+
filesystem.remove_file(single_file.path)
|
|
234
198
|
|
|
235
199
|
if move_to_path:
|
|
236
200
|
# Get formatted time stamp for file name.
|
|
@@ -243,9 +207,8 @@ def get_all_log_files_into_list(
|
|
|
243
207
|
filesystem.create_directory(move_to_path_with_timestamp)
|
|
244
208
|
# Move the statistics files.
|
|
245
209
|
for single_file in logs_files:
|
|
246
|
-
|
|
247
|
-
move_to_path_with_file
|
|
248
|
-
filesystem.move_file(single_file['file_path'], move_to_path_with_file)
|
|
210
|
+
move_to_path_with_file = f'{move_to_path_with_timestamp}{os.sep}{single_file.name}'
|
|
211
|
+
filesystem.move_file(single_file.path, move_to_path_with_file)
|
|
249
212
|
|
|
250
213
|
return logs_content
|
|
251
214
|
|
|
@@ -301,9 +264,9 @@ class LogReader:
|
|
|
301
264
|
:param log_type: Type of log to get.
|
|
302
265
|
:param get_previous_file: Boolean, if True, the function will get the previous log file.
|
|
303
266
|
For example, your log is set to rotate every Midnight.
|
|
304
|
-
Meaning, once the day will change, the function will get the log file from the previous day in the
|
|
305
|
-
of the return tuple. This happens only once each 24 hours. Not from the time
|
|
306
|
-
the time the day changed.
|
|
267
|
+
Meaning, once the day will change, the function will get the log file from the previous day in the
|
|
268
|
+
third entry of the return tuple. This happens only once each 24 hours. Not from the time
|
|
269
|
+
the function was called, but from the time the day changed.
|
|
307
270
|
:param header: List of strings that will be the header of the CSV file. Default is 'None'.
|
|
308
271
|
None: the header from the CSV file will be used. The first row of the CSV file will be the header.
|
|
309
272
|
Meaning, that the first line will be skipped and the second line will be the first row of the content.
|
|
@@ -362,7 +325,7 @@ class LogReader:
|
|
|
362
325
|
# if not latest_statistics_file_path_object:
|
|
363
326
|
# return [], [], self.header
|
|
364
327
|
|
|
365
|
-
latest_statistics_file_path: str = latest_statistics_file_path_object[0]
|
|
328
|
+
latest_statistics_file_path: str = latest_statistics_file_path_object[0].path
|
|
366
329
|
|
|
367
330
|
# Get the previous day statistics file path.
|
|
368
331
|
previous_day_statistics_file_path: Union[str, None] = None
|
|
@@ -371,7 +334,7 @@ class LogReader:
|
|
|
371
334
|
log_file_path=self.log_file_path,
|
|
372
335
|
date_format=self.date_format,
|
|
373
336
|
previous_day_only=True
|
|
374
|
-
)[0]
|
|
337
|
+
)[0].path
|
|
375
338
|
# If you get IndexError, it means that there are no previous day logs to read.
|
|
376
339
|
except IndexError:
|
|
377
340
|
pass
|
|
@@ -114,7 +114,11 @@ def call_method(
|
|
|
114
114
|
# Check if the method executed successfully.
|
|
115
115
|
for result in results:
|
|
116
116
|
if result != 0:
|
|
117
|
-
|
|
117
|
+
if result == 91:
|
|
118
|
+
raise PermissionError(f"Failed to execute method '{method_name}' with error code: {result}, "
|
|
119
|
+
f"Try with Admin rights.")
|
|
120
|
+
else:
|
|
121
|
+
raise WmiMethodExecutionError(f"Failed to execute method '{method_name}' with error code: {result}")
|
|
118
122
|
|
|
119
123
|
|
|
120
124
|
"""
|
|
@@ -133,35 +133,3 @@ def set_dns_server(
|
|
|
133
133
|
|
|
134
134
|
# Set DNS servers
|
|
135
135
|
helpers.call_method(adapter_config, 'SetDNSServerSearchOrder', dns_servers)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def is_adapter_dns_gateway_from_dhcp(
|
|
139
|
-
use_default_interface: bool = False,
|
|
140
|
-
connection_name: str = None,
|
|
141
|
-
mac_address: str = None
|
|
142
|
-
) -> bool:
|
|
143
|
-
"""
|
|
144
|
-
Check if the adapter is set to obtain the DNS servers automatically from DHCP.
|
|
145
|
-
:param use_default_interface: bool, if True, the default network interface will be used.
|
|
146
|
-
This is the adapter that your internet is being used from.
|
|
147
|
-
:param connection_name: string, adapter name as shown in the network settings.
|
|
148
|
-
:param mac_address: string, MAC address of the adapter. Format: '00:00:00:00:00:00'.
|
|
149
|
-
:return: bool, True if DHCP is enabled, False otherwise.
|
|
150
|
-
"""
|
|
151
|
-
|
|
152
|
-
adapter_config, current_adapter = get_wmi_network_configuration(
|
|
153
|
-
use_default_interface=use_default_interface, connection_name=connection_name, mac_address=mac_address)
|
|
154
|
-
|
|
155
|
-
# If DHCP is not enabled.
|
|
156
|
-
if not adapter_config.DHCPEnabled:
|
|
157
|
-
# Then it is obvious that DNS Gateway is also Statis.
|
|
158
|
-
return False
|
|
159
|
-
# If DHCP is enabled.
|
|
160
|
-
else:
|
|
161
|
-
# Then we need to check if Default IP gateway is the same as DNS Gateway, if so.
|
|
162
|
-
if adapter_config.DefaultIPGateway == adapter_config.DNSServerSearchOrder:
|
|
163
|
-
# Then it is set dynamically from DHCP.
|
|
164
|
-
return True
|
|
165
|
-
else:
|
|
166
|
-
# If not, so it is static.
|
|
167
|
-
return False
|
|
@@ -230,17 +230,16 @@ class DnsServer:
|
|
|
230
230
|
# This error happens when the client closes the connection before the server.
|
|
231
231
|
# This is not an error for a DNS Server, but we'll log it anyway only with the full DNS logger.
|
|
232
232
|
message = "Error: to receive DNS request, An existing connection was forcibly closed"
|
|
233
|
-
# print_api(message, logger=self.logger, logger_method='error', traceback_string=True
|
|
233
|
+
# print_api(message, logger=self.logger, logger_method='error', traceback_string=True)
|
|
234
234
|
print_api(
|
|
235
|
-
message, logger=self.dns_full_logger, logger_method='error', traceback_string=True
|
|
236
|
-
oneline=True)
|
|
235
|
+
message, logger=self.dns_full_logger, logger_method='error', traceback_string=True)
|
|
237
236
|
self.dns_full_logger.info("==========")
|
|
238
237
|
pass
|
|
239
238
|
continue
|
|
240
239
|
except Exception:
|
|
241
240
|
message = "Unknown Exception: to receive DNS request"
|
|
242
241
|
print_api(
|
|
243
|
-
message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
242
|
+
message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
244
243
|
self.logger.info("==========")
|
|
245
244
|
pass
|
|
246
245
|
continue
|
|
@@ -417,9 +416,9 @@ class DnsServer:
|
|
|
417
416
|
except ValueError:
|
|
418
417
|
message = f"Looks like wrong type of response for QTYPE: {qtype_string}. Response: "
|
|
419
418
|
print_api(message, logger=self.logger, logger_method='critical',
|
|
420
|
-
traceback_string=True
|
|
419
|
+
traceback_string=True)
|
|
421
420
|
print_api(f"{dns_built_response}", logger=self.logger, logger_method='critical',
|
|
422
|
-
traceback_string=True
|
|
421
|
+
traceback_string=True)
|
|
423
422
|
# Pass the exception.
|
|
424
423
|
pass
|
|
425
424
|
# Continue to the next DNS request, since there's nothing to do here right now.
|
|
@@ -430,9 +429,9 @@ class DnsServer:
|
|
|
430
429
|
(f"Unknown exception while creating response for QTYPE: {qtype_string}. "
|
|
431
430
|
f"Response: ")
|
|
432
431
|
print_api(message, logger=self.logger, logger_method='critical',
|
|
433
|
-
traceback_string=True
|
|
432
|
+
traceback_string=True)
|
|
434
433
|
print_api(f"{dns_built_response}", logger=self.logger, logger_method='critical',
|
|
435
|
-
traceback_string=True
|
|
434
|
+
traceback_string=True)
|
|
436
435
|
# Pass the exception.
|
|
437
436
|
pass
|
|
438
437
|
# Continue to the next DNS request, since there's nothing to do here right now.
|
|
@@ -479,7 +478,7 @@ class DnsServer:
|
|
|
479
478
|
google_dns_ipv4_socket.recvfrom(self.buffer_size_receive)
|
|
480
479
|
except TimeoutError as function_exception_object:
|
|
481
480
|
print_api(function_exception_object, logger=self.logger, logger_method='error',
|
|
482
|
-
traceback_string=True
|
|
481
|
+
traceback_string=True)
|
|
483
482
|
google_dns_ipv4_socket.close()
|
|
484
483
|
counter += 1
|
|
485
484
|
# Pass the exception.
|
|
@@ -725,7 +724,7 @@ class DnsServer:
|
|
|
725
724
|
except Exception:
|
|
726
725
|
message = "Unknown Exception: to parse DNS request"
|
|
727
726
|
print_api(
|
|
728
|
-
message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
727
|
+
message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
729
728
|
self.logger.info("==========")
|
|
730
729
|
pass
|
|
731
730
|
continue
|
|
@@ -27,14 +27,12 @@ def connection_exception_decorator(function_name):
|
|
|
27
27
|
message = f"Socket Accept: {kwargs['domain_from_dns_server']}:{port}: " \
|
|
28
28
|
f"* Established connection was aborted by software on the host..."
|
|
29
29
|
wrapper_handle_connection_exceptions.message = message
|
|
30
|
-
print_api(message, logger_method='error', traceback_string=True, **kwargs['print_kwargs'])
|
|
31
|
-
pass
|
|
30
|
+
print_api(message, logger_method='error', traceback_string=True, oneline=True, **kwargs['print_kwargs'])
|
|
32
31
|
except ConnectionResetError:
|
|
33
32
|
message = f"Socket Accept: {kwargs['domain_from_dns_server']}:{port}: " \
|
|
34
33
|
f"* An existing connection was forcibly closed by the remote host..."
|
|
35
34
|
wrapper_handle_connection_exceptions.message = message
|
|
36
35
|
print_api(message, logger_method='error', traceback_string=True, oneline=True, **kwargs['print_kwargs'])
|
|
37
|
-
pass
|
|
38
36
|
except ssl.SSLEOFError as e:
|
|
39
37
|
# A subclass of SSLError raised when the SSL connection has been terminated abruptly. Generally, you
|
|
40
38
|
# shouldn't try to reuse the underlying transport when this error is encountered.
|
|
@@ -47,7 +45,7 @@ def connection_exception_decorator(function_name):
|
|
|
47
45
|
message = \
|
|
48
46
|
f"Socket Accept: {kwargs['domain_from_dns_server']}:{port}: {message}"
|
|
49
47
|
wrapper_handle_connection_exceptions.message = message
|
|
50
|
-
print_api(message, error_type=True, logger_method='error', **kwargs['print_kwargs'])
|
|
48
|
+
print_api(message, error_type=True, logger_method='error', oneline=True, **kwargs['print_kwargs'])
|
|
51
49
|
except Exception:
|
|
52
50
|
message = f"Socket Accept: port {port}: {message}"
|
|
53
51
|
wrapper_handle_connection_exceptions.message = message
|
|
@@ -61,7 +59,7 @@ def connection_exception_decorator(function_name):
|
|
|
61
59
|
message = \
|
|
62
60
|
f"Socket Accept: {kwargs['domain_from_dns_server']}:{port}: {message}"
|
|
63
61
|
wrapper_handle_connection_exceptions.message = message
|
|
64
|
-
print_api(message, logger_method='error', **kwargs['print_kwargs'])
|
|
62
|
+
print_api(message, logger_method='error', oneline=True, **kwargs['print_kwargs'])
|
|
65
63
|
except Exception:
|
|
66
64
|
message = f"Socket Accept: port {port}: {message}"
|
|
67
65
|
wrapper_handle_connection_exceptions.message = message
|
|
@@ -88,14 +86,14 @@ def connection_exception_decorator(function_name):
|
|
|
88
86
|
|
|
89
87
|
message = "SSLError on accept. Not documented..."
|
|
90
88
|
wrapper_handle_connection_exceptions.message = message
|
|
91
|
-
print_api(message, logger_method='error', **kwargs['print_kwargs'])
|
|
89
|
+
print_api(message, logger_method='error', oneline=True, **kwargs['print_kwargs'])
|
|
92
90
|
|
|
93
91
|
message = f'ssl.SSLError:{exception_object}'
|
|
94
92
|
wrapper_handle_connection_exceptions.message = message
|
|
95
93
|
message = \
|
|
96
94
|
f"Socket Accept: {kwargs['domain_from_dns_server']}:{port}: {message}"
|
|
97
95
|
wrapper_handle_connection_exceptions.message = message
|
|
98
|
-
print_api(message, logger_method='error', **kwargs['print_kwargs'])
|
|
96
|
+
print_api(message, logger_method='error', oneline=True, **kwargs['print_kwargs'])
|
|
99
97
|
pass
|
|
100
98
|
except FileNotFoundError:
|
|
101
99
|
message = "'SSLSocket.accept()' crashed: 'FileNotFoundError'. Some problem with SSL during Handshake - " \
|
|
@@ -67,20 +67,20 @@ class GetCommandLine:
|
|
|
67
67
|
except ModuleNotFoundError as function_exception_object:
|
|
68
68
|
execution_error = f"Module not installed: {function_exception_object}"
|
|
69
69
|
print_api(
|
|
70
|
-
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
70
|
+
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
71
71
|
**print_kwargs)
|
|
72
72
|
pass
|
|
73
73
|
except psutil.AccessDenied:
|
|
74
74
|
execution_error = f"Access Denied for 'psutil' to read system process command line. " \
|
|
75
75
|
f"Run script with Admin Rights."
|
|
76
76
|
print_api(
|
|
77
|
-
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
77
|
+
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
78
78
|
**print_kwargs)
|
|
79
79
|
pass
|
|
80
80
|
except Exception:
|
|
81
81
|
execution_error = "There was undocumented exception in localhost script execution."
|
|
82
82
|
print_api(
|
|
83
|
-
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
83
|
+
execution_error, error_type=True, logger_method="error", traceback_string=True,
|
|
84
84
|
**print_kwargs)
|
|
85
85
|
pass
|
|
86
86
|
|
|
@@ -49,17 +49,17 @@ class Receiver:
|
|
|
49
49
|
class_data = self.ssl_socket.recv(self.buffer_size_receive)
|
|
50
50
|
except ConnectionAbortedError:
|
|
51
51
|
message = "* Connection was aborted by the client. Exiting..."
|
|
52
|
-
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
52
|
+
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
53
53
|
# This will be treated as empty message - indicate that socket was closed and will be handled properly.
|
|
54
54
|
pass
|
|
55
55
|
except ConnectionResetError:
|
|
56
56
|
message = "* Connection was forcibly closed by the client. Exiting..."
|
|
57
|
-
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
57
|
+
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
58
58
|
# This will be treated as empty message - indicate that socket was closed and will be handled properly.
|
|
59
59
|
pass
|
|
60
60
|
except ssl.SSLError:
|
|
61
61
|
message = "* Encountered SSL error on packet receive. Exiting..."
|
|
62
|
-
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
62
|
+
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
63
63
|
# This will be treated as empty message - indicate that socket was closed and will be handled properly.
|
|
64
64
|
pass
|
|
65
65
|
|
|
@@ -50,7 +50,7 @@ class Sender:
|
|
|
50
50
|
self.logger.info(f"Sent the message to destination.")
|
|
51
51
|
except ConnectionResetError:
|
|
52
52
|
message = "* Couldn't reach the server - Connection was reset. Exiting..."
|
|
53
|
-
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True
|
|
53
|
+
print_api(message, logger=self.logger, logger_method='critical', traceback_string=True)
|
|
54
54
|
# Since the connection is down, it will be handled in thread_worker_main
|
|
55
55
|
function_result = False
|
|
56
56
|
pass
|
|
@@ -267,7 +267,7 @@ class SNIHandler:
|
|
|
267
267
|
print_api(message, **(print_kwargs or {}))
|
|
268
268
|
except Exception as exception_object:
|
|
269
269
|
message = f"SNI Handler: Undocumented exception general settings section: {exception_object}"
|
|
270
|
-
print_api(message, error_type=True, logger_method="error", traceback_string=True,
|
|
270
|
+
print_api(message, error_type=True, logger_method="error", traceback_string=True,
|
|
271
271
|
**(print_kwargs or {}))
|
|
272
272
|
pass
|
|
273
273
|
|
|
@@ -3,7 +3,7 @@ import threading
|
|
|
3
3
|
from .socket_client import SocketClient
|
|
4
4
|
from ..configparserw import ConfigParserWrapper
|
|
5
5
|
from ..loggingw import loggingw
|
|
6
|
-
from ...
|
|
6
|
+
from ... import filesystem
|
|
7
7
|
from ...file_io import jsons, file_io
|
|
8
8
|
|
|
9
9
|
|
|
@@ -57,14 +57,14 @@ def execute_test(config_static):
|
|
|
57
57
|
loggingw.get_logger_with_stream_handler("network")
|
|
58
58
|
|
|
59
59
|
# Get all the files in requests folder recursively.
|
|
60
|
-
request_file_list =
|
|
60
|
+
request_file_list = filesystem.get_paths_from_directory(config['requests_directory'], get_file=True)
|
|
61
61
|
print(f"Found request files: {len(request_file_list)}")
|
|
62
62
|
|
|
63
63
|
# Get contents of all request files to list of contents.
|
|
64
64
|
requests_bytes_list: list = list()
|
|
65
65
|
for request_file_path in request_file_list:
|
|
66
66
|
if config['request_type'] == 'json':
|
|
67
|
-
request_file_content = jsons.read_json_file(request_file_path)
|
|
67
|
+
request_file_content = jsons.read_json_file(request_file_path.path)
|
|
68
68
|
|
|
69
69
|
# If imported json is regular and not combined json.
|
|
70
70
|
if isinstance(request_file_content, dict):
|
|
@@ -79,13 +79,13 @@ def execute_test(config_static):
|
|
|
79
79
|
requests_bytes_list.extend(
|
|
80
80
|
get_key_values_from_json(json_dict, config['request_json_hex_key_list']))
|
|
81
81
|
elif config['request_type'] == 'string':
|
|
82
|
-
request_file_content = file_io.read_file(request_file_path)
|
|
82
|
+
request_file_content = file_io.read_file(request_file_path.path)
|
|
83
83
|
# Convert string content to bytes and append to list.
|
|
84
84
|
requests_bytes_list.append(request_file_content.encode())
|
|
85
85
|
print(f"Extracted 1 request.")
|
|
86
86
|
elif config['request_type'] == 'binary':
|
|
87
87
|
# The content is already in bytes, so just appending.
|
|
88
|
-
requests_bytes_list.append(file_io.read_file(request_file_path, 'rb'))
|
|
88
|
+
requests_bytes_list.append(file_io.read_file(request_file_path.path, 'rb'))
|
|
89
89
|
print(f"Extracted 1 request.")
|
|
90
90
|
|
|
91
91
|
print(f"Finished parsing. Parsed requests: {len(requests_bytes_list)}")
|
|
File without changes
|