atomicshop 2.14.12__py3-none-any.whl → 2.14.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/config_init.py +1 -1
- atomicshop/filesystem.py +14 -3
- atomicshop/mitm/import_config.py +3 -3
- atomicshop/mitm/statistic_analyzer.py +79 -478
- atomicshop/mitm/statistic_analyzer_helper/__init__.py +0 -0
- atomicshop/mitm/statistic_analyzer_helper/analyzer_helper.py +136 -0
- atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +330 -0
- atomicshop/question_answer_engine.py +2 -2
- atomicshop/wrappers/elasticsearchw/infrastructure.py +1 -1
- atomicshop/wrappers/loggingw/reading.py +2 -3
- atomicshop/wrappers/socketw/socket_client.py +1 -1
- {atomicshop-2.14.12.dist-info → atomicshop-2.14.14.dist-info}/METADATA +1 -1
- {atomicshop-2.14.12.dist-info → atomicshop-2.14.14.dist-info}/RECORD +17 -14
- {atomicshop-2.14.12.dist-info → atomicshop-2.14.14.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.14.12.dist-info → atomicshop-2.14.14.dist-info}/WHEEL +0 -0
- {atomicshop-2.14.12.dist-info → atomicshop-2.14.14.dist-info}/top_level.txt +0 -0
|
File without changes
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
from ... import datetimes
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_the_last_day_number(statistics_content: list, stop_after_lines: int = None) -> int:
|
|
7
|
+
"""
|
|
8
|
+
This function gets the last day number from the statistics content.
|
|
9
|
+
|
|
10
|
+
:param statistics_content: list, of lines in the statistics content.
|
|
11
|
+
:param stop_after_lines: integer, if specified, the function will stop after the specified number of lines.
|
|
12
|
+
:return: integer, the last day number.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
last_day_number = None
|
|
16
|
+
start_time_temp = None
|
|
17
|
+
for line_index, line in enumerate(statistics_content):
|
|
18
|
+
try:
|
|
19
|
+
request_time = datetime.datetime.strptime(line['request_time_sent'], '%Y-%m-%d %H:%M:%S.%f')
|
|
20
|
+
except ValueError:
|
|
21
|
+
continue
|
|
22
|
+
|
|
23
|
+
if not start_time_temp:
|
|
24
|
+
start_time_temp = request_time
|
|
25
|
+
|
|
26
|
+
if stop_after_lines:
|
|
27
|
+
if line_index == stop_after_lines:
|
|
28
|
+
break
|
|
29
|
+
|
|
30
|
+
last_day_number = datetimes.get_difference_between_dates_in_days(start_time_temp, request_time)
|
|
31
|
+
return last_day_number
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def create_empty_features_dict() -> dict:
|
|
35
|
+
"""
|
|
36
|
+
This function creates an empty dictionary for the daily stats. This should be initiated for each 'host_type' of:
|
|
37
|
+
'domain', 'subdomain', 'url_no_parameters'.
|
|
38
|
+
:return: dict
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
return {
|
|
42
|
+
'total_count': {}, 'normal_count': {}, 'error_count': {},
|
|
43
|
+
'request_0_byte_count': {}, 'response_0_byte_count': {},
|
|
44
|
+
'request_sizes_list': {}, 'response_sizes_list': {},
|
|
45
|
+
'request_sizes_no_0_bytes_list': {}, 'response_sizes_no_0_bytes_list': {},
|
|
46
|
+
'average_request_size': {}, 'average_response_size': {},
|
|
47
|
+
'average_request_size_no_0_bytes': {}, 'average_response_size_no_0_bytes': {}}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def add_to_count_to_daily_stats(
|
|
51
|
+
daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str) -> None:
|
|
52
|
+
"""
|
|
53
|
+
This function adds 1 to the 'count' feature of the current day in the daily stats.
|
|
54
|
+
|
|
55
|
+
:param daily_stats: dict, the daily statistics dict.
|
|
56
|
+
:param current_day: integer, the current day number.
|
|
57
|
+
:param last_day: integer, the last day number.
|
|
58
|
+
:param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
|
|
59
|
+
:param feature: string, the feature to add the count to. Can be: 'total_count', 'normal_count', 'error_count',
|
|
60
|
+
'request_0_byte_count', 'response_0_byte_count'.
|
|
61
|
+
:param host_name: string, the name of the host.
|
|
62
|
+
|
|
63
|
+
:return: None.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
# Aggregate daily domain hits.
|
|
67
|
+
if host_name not in daily_stats[host_type][feature].keys():
|
|
68
|
+
daily_stats[host_type][feature][host_name] = {}
|
|
69
|
+
# Iterate from first day to the last day.
|
|
70
|
+
for day in range(0, last_day + 1):
|
|
71
|
+
daily_stats[host_type][feature][host_name][day] = 0
|
|
72
|
+
|
|
73
|
+
# Add count to current day.
|
|
74
|
+
daily_stats[host_type][feature][host_name][current_day] += 1
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def add_to_list_to_daily_stats(
|
|
78
|
+
daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str,
|
|
79
|
+
size: float) -> None:
|
|
80
|
+
"""
|
|
81
|
+
This function adds the 'size' to the 'feature' list of the current day in the daily stats.
|
|
82
|
+
|
|
83
|
+
:param daily_stats: dict, the daily statistics dict.
|
|
84
|
+
:param current_day: integer, the current day number.
|
|
85
|
+
:param last_day: integer, the last day number.
|
|
86
|
+
:param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
|
|
87
|
+
:param feature: string, the feature to add the count to. Can be: 'request_sizes_list', 'response_sizes_list',
|
|
88
|
+
'request_sizes_no_0_bytes_list', 'response_sizes_no_0_bytes_list'.
|
|
89
|
+
:param host_name: string, the name of the host.
|
|
90
|
+
:param size: float, the size in bytes to add to the list.
|
|
91
|
+
|
|
92
|
+
:return: None.
|
|
93
|
+
"""
|
|
94
|
+
|
|
95
|
+
# Aggregate daily domain hits.
|
|
96
|
+
if host_name not in daily_stats[host_type][feature].keys():
|
|
97
|
+
daily_stats[host_type][feature][host_name] = {}
|
|
98
|
+
# Iterate from first day to the last day.
|
|
99
|
+
for day in range(0, last_day + 1):
|
|
100
|
+
daily_stats[host_type][feature][host_name][day] = []
|
|
101
|
+
|
|
102
|
+
# Add count to current day.
|
|
103
|
+
daily_stats[host_type][feature][host_name][current_day].append(size)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def add_to_average_to_daily_stats(
|
|
107
|
+
daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str,
|
|
108
|
+
list_of_sizes: list) -> None:
|
|
109
|
+
"""
|
|
110
|
+
This function adds the average size in bytes calculated from the 'list_of_sizes' to the 'feature' of the current
|
|
111
|
+
day in the daily stats.
|
|
112
|
+
|
|
113
|
+
:param daily_stats: dict, the daily statistics dict.
|
|
114
|
+
:param current_day: integer, the current day number.
|
|
115
|
+
:param last_day: integer, the last day number.
|
|
116
|
+
:param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
|
|
117
|
+
:param feature: string, the feature to add the count to. Can be: 'average_request_size', 'average_response_size',
|
|
118
|
+
'average_request_size_no_0_bytes', 'average_response_size_no_0_bytes'.
|
|
119
|
+
:param host_name: string, the name of the host.
|
|
120
|
+
:param list_of_sizes: list, the list of sizes to calculate the average from.
|
|
121
|
+
|
|
122
|
+
:return: None.
|
|
123
|
+
"""
|
|
124
|
+
|
|
125
|
+
# Aggregate daily domain hits.
|
|
126
|
+
if host_name not in daily_stats[host_type][feature].keys():
|
|
127
|
+
daily_stats[host_type][feature][host_name] = {}
|
|
128
|
+
# Iterate from first day to the last day.
|
|
129
|
+
for day in range(0, last_day + 1):
|
|
130
|
+
daily_stats[host_type][feature][host_name][day] = 0
|
|
131
|
+
|
|
132
|
+
# If the list of size is empty, add 0 to the average, since we cannot divide by 0.
|
|
133
|
+
if len(list_of_sizes) == 0:
|
|
134
|
+
daily_stats[host_type][feature][host_name][current_day] = 0
|
|
135
|
+
else:
|
|
136
|
+
daily_stats[host_type][feature][host_name][current_day] = sum(list_of_sizes) / len(list_of_sizes)
|
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
import statistics
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from ...print_api import print_api
|
|
5
|
+
from ...wrappers.loggingw import reading, consts
|
|
6
|
+
from ...file_io import csvs
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def calculate_moving_average(
|
|
10
|
+
file_path: str,
|
|
11
|
+
moving_average_window_days,
|
|
12
|
+
top_bottom_deviation_percentage: float,
|
|
13
|
+
get_deviation_for_last_day_only: bool = False,
|
|
14
|
+
print_kwargs: dict = None
|
|
15
|
+
) -> list:
|
|
16
|
+
"""
|
|
17
|
+
This function calculates the moving average of the daily statistics.
|
|
18
|
+
|
|
19
|
+
:param file_path: string, the path to the 'statistics.csv' file.
|
|
20
|
+
:param moving_average_window_days: integer, the window size for the moving average.
|
|
21
|
+
:param top_bottom_deviation_percentage: float, the percentage of deviation from the moving average to the top or
|
|
22
|
+
bottom.
|
|
23
|
+
:param get_deviation_for_last_day_only: bool, if True, only the last day will be analyzed.
|
|
24
|
+
Example: With 'moving_average_window_days=5', the last 6 days will be analyzed.
|
|
25
|
+
5 days for moving average and the last day for deviation.
|
|
26
|
+
File names example:
|
|
27
|
+
statistics_2021-01-01.csv
|
|
28
|
+
statistics_2021-01-02.csv
|
|
29
|
+
statistics_2021-01-03.csv
|
|
30
|
+
statistics_2021-01-04.csv
|
|
31
|
+
statistics_2021-01-05.csv
|
|
32
|
+
statistics_2021-01-06.csv
|
|
33
|
+
Files 01 to 05 will be used for moving average and the file 06 for deviation.
|
|
34
|
+
Meaning the average calculated for 2021-01-06 will be compared to the values moving average of 2021-01-01
|
|
35
|
+
to 2021-01-05.
|
|
36
|
+
:param print_kwargs: dict, the print_api arguments.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
date_pattern: str = consts.DEFAULT_ROTATING_SUFFIXES_FROM_WHEN['midnight']
|
|
40
|
+
|
|
41
|
+
# Get all the file paths and their midnight rotations.
|
|
42
|
+
logs_paths: list = reading.get_logs_paths(
|
|
43
|
+
log_file_path=file_path,
|
|
44
|
+
date_pattern=date_pattern
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
if get_deviation_for_last_day_only:
|
|
48
|
+
days_back_to_analyze: int = moving_average_window_days + 1
|
|
49
|
+
logs_paths = logs_paths[-days_back_to_analyze:]
|
|
50
|
+
|
|
51
|
+
statistics_content: dict = {}
|
|
52
|
+
# Read each file to its day.
|
|
53
|
+
for log_path_dict in logs_paths:
|
|
54
|
+
date_string = log_path_dict['date_string']
|
|
55
|
+
statistics_content[date_string] = {}
|
|
56
|
+
|
|
57
|
+
statistics_content[date_string]['file'] = log_path_dict
|
|
58
|
+
|
|
59
|
+
log_file_content, log_file_header = (
|
|
60
|
+
csvs.read_csv_to_list_of_dicts_by_header(log_path_dict['file_path'], **(print_kwargs or {})))
|
|
61
|
+
statistics_content[date_string]['content'] = log_file_content
|
|
62
|
+
statistics_content[date_string]['header'] = log_file_header
|
|
63
|
+
|
|
64
|
+
statistics_content[date_string]['content_no_errors'] = get_content_without_errors(log_file_content)
|
|
65
|
+
|
|
66
|
+
# Get the data dictionary from the statistics content.
|
|
67
|
+
statistics_content[date_string]['statistics_daily'] = compute_statistics_from_content(
|
|
68
|
+
statistics_content[date_string]['content_no_errors']
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
moving_average_dict: dict = compute_moving_averages_from_average_statistics(
|
|
72
|
+
statistics_content,
|
|
73
|
+
moving_average_window_days
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
# Add the moving average to the statistics content.
|
|
77
|
+
for day, day_dict in statistics_content.items():
|
|
78
|
+
try:
|
|
79
|
+
day_dict['moving_average'] = moving_average_dict[day]
|
|
80
|
+
except KeyError:
|
|
81
|
+
day_dict['moving_average'] = {}
|
|
82
|
+
|
|
83
|
+
# Find deviation from the moving average to the bottom or top by specified percentage.
|
|
84
|
+
deviation_list: list = find_deviation_from_moving_average(
|
|
85
|
+
statistics_content, top_bottom_deviation_percentage)
|
|
86
|
+
|
|
87
|
+
return deviation_list
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def get_content_without_errors(content: list) -> list:
|
|
91
|
+
"""
|
|
92
|
+
This function gets the 'statistics.csv' file content without errors from the 'content' list.
|
|
93
|
+
|
|
94
|
+
:param content: list, the content list.
|
|
95
|
+
:return: list, the content without errors.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
traffic_statistics_without_errors: list = []
|
|
99
|
+
for line in content:
|
|
100
|
+
# Skip empty lines, headers and errors.
|
|
101
|
+
if line['host'] == 'host' or line['command'] == '':
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
traffic_statistics_without_errors.append(line)
|
|
105
|
+
|
|
106
|
+
return traffic_statistics_without_errors
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def get_data_dict_from_statistics_content(content: list) -> dict:
|
|
110
|
+
"""
|
|
111
|
+
This function gets the data dictionary from the 'statistics.csv' file content.
|
|
112
|
+
|
|
113
|
+
:param content: list, the content list.
|
|
114
|
+
:return: dict, the data dictionary.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
hosts_requests_responses: dict = {}
|
|
118
|
+
for line in content:
|
|
119
|
+
# If subdomain is not in the dictionary, add it.
|
|
120
|
+
if line['host'] not in hosts_requests_responses:
|
|
121
|
+
hosts_requests_responses[line['host']] = {
|
|
122
|
+
'request_sizes': [],
|
|
123
|
+
'response_sizes': []
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
# Append the sizes.
|
|
127
|
+
try:
|
|
128
|
+
hosts_requests_responses[line['host']]['request_sizes'].append(int(line['request_size_bytes']))
|
|
129
|
+
hosts_requests_responses[line['host']]['response_sizes'].append(
|
|
130
|
+
int(line['response_size_bytes']))
|
|
131
|
+
except ValueError:
|
|
132
|
+
print_api(line, color='yellow')
|
|
133
|
+
raise
|
|
134
|
+
|
|
135
|
+
return hosts_requests_responses
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def compute_statistics_from_data_dict(data_dict: dict):
|
|
139
|
+
"""
|
|
140
|
+
This function computes the statistics from the data dictionary.
|
|
141
|
+
|
|
142
|
+
:param data_dict: dict, the data dictionary.
|
|
143
|
+
:return: dict, the statistics dictionary.
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
for host, host_dict in data_dict.items():
|
|
147
|
+
count = len(host_dict['request_sizes'])
|
|
148
|
+
avg_request_size = statistics.mean(host_dict['request_sizes']) if count > 0 else 0
|
|
149
|
+
median_request_size = statistics.median(host_dict['request_sizes']) if count > 0 else 0
|
|
150
|
+
avg_response_size = statistics.mean(host_dict['response_sizes']) if count > 0 else 0
|
|
151
|
+
median_response_size = statistics.median(host_dict['response_sizes']) if count > 0 else 0
|
|
152
|
+
|
|
153
|
+
data_dict[host]['count'] = count
|
|
154
|
+
data_dict[host]['avg_request_size'] = avg_request_size
|
|
155
|
+
data_dict[host]['median_request_size'] = median_request_size
|
|
156
|
+
data_dict[host]['avg_response_size'] = avg_response_size
|
|
157
|
+
data_dict[host]['median_response_size'] = median_response_size
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def compute_statistics_from_content(content: list):
|
|
161
|
+
"""
|
|
162
|
+
This function computes the statistics from the 'statistics.csv' file content.
|
|
163
|
+
|
|
164
|
+
:param content: list, the content list.
|
|
165
|
+
:return: dict, the statistics dictionary.
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
hosts_requests_responses: dict = get_data_dict_from_statistics_content(content)
|
|
169
|
+
compute_statistics_from_data_dict(hosts_requests_responses)
|
|
170
|
+
|
|
171
|
+
return hosts_requests_responses
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def compute_moving_averages_from_average_statistics(
|
|
175
|
+
average_statistics_dict: dict,
|
|
176
|
+
moving_average_window_days: int
|
|
177
|
+
):
|
|
178
|
+
"""
|
|
179
|
+
This function computes the moving averages from the average statistics dictionary.
|
|
180
|
+
|
|
181
|
+
:param average_statistics_dict: dict, the average statistics dictionary.
|
|
182
|
+
:param moving_average_window_days: integer, the window size for the moving average.
|
|
183
|
+
:return: dict, the moving averages' dictionary.
|
|
184
|
+
"""
|
|
185
|
+
|
|
186
|
+
moving_average: dict = {}
|
|
187
|
+
for day_index, (day, day_dict) in enumerate(average_statistics_dict.items()):
|
|
188
|
+
current_day = day_index + 1
|
|
189
|
+
if current_day < moving_average_window_days:
|
|
190
|
+
continue
|
|
191
|
+
|
|
192
|
+
# Create list of the last 'moving_average_window_days' days, including the current day.
|
|
193
|
+
last_x_window_days_content_list = (
|
|
194
|
+
list(average_statistics_dict.values()))[current_day-moving_average_window_days:current_day]
|
|
195
|
+
|
|
196
|
+
# Compute the moving averages.
|
|
197
|
+
moving_average[day] = compute_average_for_current_day_from_past_x_days(last_x_window_days_content_list)
|
|
198
|
+
|
|
199
|
+
return moving_average
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def compute_average_for_current_day_from_past_x_days(previous_days_content_list: list) -> dict:
|
|
203
|
+
"""
|
|
204
|
+
This function computes the average for the current day from the past x days.
|
|
205
|
+
|
|
206
|
+
:param previous_days_content_list: list, the list of the previous days content.
|
|
207
|
+
:return: dict, the average dictionary.
|
|
208
|
+
"""
|
|
209
|
+
|
|
210
|
+
moving_average: dict = {}
|
|
211
|
+
for entry in previous_days_content_list:
|
|
212
|
+
statistics_daily = entry['statistics_daily']
|
|
213
|
+
for host, host_dict in statistics_daily.items():
|
|
214
|
+
if host not in moving_average:
|
|
215
|
+
moving_average[host] = {
|
|
216
|
+
'counts': [],
|
|
217
|
+
'avg_request_sizes': [],
|
|
218
|
+
'avg_response_sizes': [],
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
moving_average[host]['counts'].append(int(host_dict['count']))
|
|
222
|
+
moving_average[host]['avg_request_sizes'].append(float(host_dict['avg_request_size']))
|
|
223
|
+
moving_average[host]['avg_response_sizes'].append(float(host_dict['avg_response_size']))
|
|
224
|
+
|
|
225
|
+
# Compute the moving average.
|
|
226
|
+
moving_average_results: dict = {}
|
|
227
|
+
for host, host_dict in moving_average.items():
|
|
228
|
+
ma_count = statistics.mean(host_dict['counts'])
|
|
229
|
+
ma_request_size = statistics.mean(host_dict['avg_request_sizes'])
|
|
230
|
+
ma_response_size = statistics.mean(host_dict['avg_response_sizes'])
|
|
231
|
+
|
|
232
|
+
moving_average_results[host] = {
|
|
233
|
+
'ma_count': ma_count,
|
|
234
|
+
'ma_request_size': ma_request_size,
|
|
235
|
+
'ma_response_size': ma_response_size,
|
|
236
|
+
'counts': host_dict['counts'],
|
|
237
|
+
'avg_request_sizes': host_dict['avg_request_sizes'],
|
|
238
|
+
'avg_response_sizes': host_dict['avg_response_sizes']
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
return moving_average_results
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def find_deviation_from_moving_average(
|
|
245
|
+
statistics_content: dict,
|
|
246
|
+
top_bottom_deviation_percentage: float
|
|
247
|
+
) -> list:
|
|
248
|
+
"""
|
|
249
|
+
This function finds the deviation from the moving average to the bottom or top by specified percentage.
|
|
250
|
+
|
|
251
|
+
:param statistics_content: dict, the statistics content dictionary.
|
|
252
|
+
:param top_bottom_deviation_percentage: float, the percentage of deviation from the moving average to the top or
|
|
253
|
+
bottom.
|
|
254
|
+
:return: list, the deviation list.
|
|
255
|
+
"""
|
|
256
|
+
|
|
257
|
+
def _check_deviation(
|
|
258
|
+
check_type: Literal['count', 'avg_request_size', 'avg_response_size'],
|
|
259
|
+
ma_check_type: Literal['ma_count', 'ma_request_size', 'ma_response_size'],
|
|
260
|
+
day_statistics_content_dict: dict,
|
|
261
|
+
moving_averages_dict: dict
|
|
262
|
+
):
|
|
263
|
+
"""
|
|
264
|
+
This function checks the deviation for the host.
|
|
265
|
+
"""
|
|
266
|
+
|
|
267
|
+
nonlocal message
|
|
268
|
+
|
|
269
|
+
host_moving_average_by_type = moving_averages_dict[host][ma_check_type]
|
|
270
|
+
check_type_moving_by_percent = (
|
|
271
|
+
host_moving_average_by_type * top_bottom_deviation_percentage)
|
|
272
|
+
check_type_moving_above = host_moving_average_by_type + check_type_moving_by_percent
|
|
273
|
+
check_type_moving_below = host_moving_average_by_type - check_type_moving_by_percent
|
|
274
|
+
|
|
275
|
+
deviation_type = None
|
|
276
|
+
if day_statistics_content_dict[check_type] > check_type_moving_above:
|
|
277
|
+
deviation_type = 'above'
|
|
278
|
+
elif day_statistics_content_dict[check_type] < check_type_moving_below:
|
|
279
|
+
deviation_type = 'below'
|
|
280
|
+
|
|
281
|
+
if deviation_type:
|
|
282
|
+
message = f'[{check_type}] is [{deviation_type}] the moving average.'
|
|
283
|
+
deviation_list.append({
|
|
284
|
+
'day': day,
|
|
285
|
+
'host': host,
|
|
286
|
+
'message': message,
|
|
287
|
+
'value': day_statistics_content_dict[check_type],
|
|
288
|
+
'ma_value': host_moving_average_by_type,
|
|
289
|
+
'check_type': check_type,
|
|
290
|
+
'percentage': top_bottom_deviation_percentage,
|
|
291
|
+
'ma_value_checked': check_type_moving_above,
|
|
292
|
+
'deviation_type': deviation_type,
|
|
293
|
+
'data': day_statistics_content_dict,
|
|
294
|
+
'ma_data': moving_averages_dict[host]
|
|
295
|
+
})
|
|
296
|
+
|
|
297
|
+
deviation_list: list = []
|
|
298
|
+
for day_index, (day, day_dict) in enumerate(statistics_content.items()):
|
|
299
|
+
# If it's the first day, there is no previous day moving average.
|
|
300
|
+
if day_index == 0:
|
|
301
|
+
previous_day_moving_average_dict = {}
|
|
302
|
+
else:
|
|
303
|
+
previous_day_moving_average_dict = list(statistics_content.values())[day_index-1].get('moving_average', {})
|
|
304
|
+
|
|
305
|
+
# If there is no moving average for previous day continue to the next day.
|
|
306
|
+
if not previous_day_moving_average_dict:
|
|
307
|
+
continue
|
|
308
|
+
|
|
309
|
+
for host, host_dict in day_dict['statistics_daily'].items():
|
|
310
|
+
# If the host is not in the moving averages, then this is clear deviation.
|
|
311
|
+
# It means that in the current day, there were no requests for this host.
|
|
312
|
+
if host not in previous_day_moving_average_dict:
|
|
313
|
+
message = f'Host not in the moving averages: {host}'
|
|
314
|
+
deviation_list.append({
|
|
315
|
+
'day': day,
|
|
316
|
+
'host': host,
|
|
317
|
+
'data': host_dict,
|
|
318
|
+
'message': message,
|
|
319
|
+
'type': 'clear'
|
|
320
|
+
})
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
_check_deviation(
|
|
324
|
+
'count', 'ma_count', host_dict, previous_day_moving_average_dict)
|
|
325
|
+
_check_deviation(
|
|
326
|
+
'avg_request_size', 'ma_request_size', host_dict, previous_day_moving_average_dict)
|
|
327
|
+
_check_deviation(
|
|
328
|
+
'avg_response_size', 'ma_response_size', host_dict, previous_day_moving_average_dict)
|
|
329
|
+
|
|
330
|
+
return deviation_list
|
|
@@ -4,7 +4,7 @@ import sys
|
|
|
4
4
|
import json
|
|
5
5
|
|
|
6
6
|
# Custom class imports.
|
|
7
|
-
from atomicshop
|
|
7
|
+
from atomicshop import filesystem
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class QAEngine:
|
|
@@ -20,7 +20,7 @@ class QAEngine:
|
|
|
20
20
|
# Get 'qa.json' full path.
|
|
21
21
|
qa_fullpath: str = script_directory + os.sep + self.qa_filename
|
|
22
22
|
# Check if it exists.
|
|
23
|
-
if not
|
|
23
|
+
if not filesystem.is_file_exists(qa_fullpath):
|
|
24
24
|
print(f'File non-existent: {qa_fullpath}')
|
|
25
25
|
sys.exit()
|
|
26
26
|
|
|
@@ -89,7 +89,7 @@ def is_elastic_config_file_exists(
|
|
|
89
89
|
config_file_path = config_basic.ELASTIC_CONFIG_FILE
|
|
90
90
|
|
|
91
91
|
# if not ubuntu_terminal.is_sudo_file_exists(config_file_path):
|
|
92
|
-
if not filesystem.
|
|
92
|
+
if not filesystem.is_file_exists(config_file_path):
|
|
93
93
|
if output_message:
|
|
94
94
|
message = f"Configuration file does not exist at {config_file_path}."
|
|
95
95
|
print_api(message, color='red', error_type=True)
|
|
@@ -92,13 +92,13 @@ def get_logs_paths(
|
|
|
92
92
|
logs_files[file_index]['datetime'] = datetime_object
|
|
93
93
|
logs_files[file_index]['date_string'] = date_string
|
|
94
94
|
|
|
95
|
-
if timestamp_float > latest_timestamp:
|
|
95
|
+
if timestamp_float and timestamp_float > latest_timestamp:
|
|
96
96
|
latest_timestamp = timestamp_float
|
|
97
97
|
|
|
98
98
|
# Now, there should be a file that doesn't have the string date pattern in the file name.
|
|
99
99
|
# We will add one day to the latest date that we found and assign to that file path.
|
|
100
100
|
for file_index, single_file in enumerate(logs_files):
|
|
101
|
-
if single_file['last_modified']
|
|
101
|
+
if single_file['last_modified'] is None:
|
|
102
102
|
latest_timestamp += 86400
|
|
103
103
|
logs_files[file_index]['last_modified'] = latest_timestamp
|
|
104
104
|
logs_files[file_index]['datetime'] = datetime.datetime.fromtimestamp(latest_timestamp)
|
|
@@ -122,7 +122,6 @@ def get_logs_paths(
|
|
|
122
122
|
elif len(logs_files) == 1 and previous_day_only:
|
|
123
123
|
logs_files = []
|
|
124
124
|
|
|
125
|
-
|
|
126
125
|
return logs_files
|
|
127
126
|
|
|
128
127
|
|
|
@@ -344,7 +344,7 @@ class SocketClient:
|
|
|
344
344
|
elif save_as_file and cert_file_path:
|
|
345
345
|
# If certificate from socket exists, then we don't need to get it from the socket and write to file.
|
|
346
346
|
# and we will return None, since no certificate was fetched.
|
|
347
|
-
if filesystem.
|
|
347
|
+
if filesystem.is_file_exists(cert_file_path):
|
|
348
348
|
return None
|
|
349
349
|
else:
|
|
350
350
|
print_api("Certificate from socket doesn't exist, fetching.", logger=self.logger)
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
atomicshop/__init__.py,sha256=
|
|
1
|
+
atomicshop/__init__.py,sha256=U_puqMerMK4pch7eXDpGxyvJxHra5X7FFCvSLPeMSUQ,124
|
|
2
2
|
atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
|
|
3
3
|
atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
|
|
4
4
|
atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
|
|
5
5
|
atomicshop/appointment_management.py,sha256=BsYH_PClTGLVazcuNjt30--hpXKYjSmHp1R1iQbM4Hc,7330
|
|
6
6
|
atomicshop/certificates.py,sha256=J-cmd6Rpq3zZyzsOH-GcdqIXdg2UwM8_E9mg7XtUph8,3787
|
|
7
7
|
atomicshop/command_line_processing.py,sha256=u5yT9Ger_cu7ni5ID0VFlRbVD46ARHeNC9tRM-_YXrQ,1038
|
|
8
|
-
atomicshop/config_init.py,sha256=
|
|
8
|
+
atomicshop/config_init.py,sha256=BSxc2FhytQPv06g5z9wbAXuA6oYCAsAJLxu_mTExhwI,2491
|
|
9
9
|
atomicshop/console_output.py,sha256=AOSJjrRryE97PAGtgDL03IBtWSi02aNol8noDnW3k6M,4667
|
|
10
10
|
atomicshop/console_user_response.py,sha256=31HIy9QGXa7f-GVR8MzJauQ79E_ZqAeagF3Ks4GGdDU,3234
|
|
11
11
|
atomicshop/datetimes.py,sha256=XF-6PbMlXgxHAOCVBGWUnAwDlFuZS1YFUGk6STFWsq0,18362
|
|
@@ -14,7 +14,7 @@ atomicshop/dns.py,sha256=h4uZKoz4wbBlLOOduL1GtRcTm-YpiPnGOEGxUm7hhOI,2140
|
|
|
14
14
|
atomicshop/domains.py,sha256=Rxu6JhhMqFZRcoFs69IoEd1PtYca0lMCG6F1AomP7z4,3197
|
|
15
15
|
atomicshop/emails.py,sha256=I0KyODQpIMEsNRi9YWSOL8EUPBiWyon3HRdIuSj3AEU,1410
|
|
16
16
|
atomicshop/file_types.py,sha256=-0jzQMRlmU1AP9DARjk-HJm1tVE22E6ngP2mRblyEjY,763
|
|
17
|
-
atomicshop/filesystem.py,sha256=
|
|
17
|
+
atomicshop/filesystem.py,sha256=aTnO1bcRiNWwkD787pKPi7ze-H95cV8YTc2WmLxcSk4,54539
|
|
18
18
|
atomicshop/functions.py,sha256=pK8hoCE9z61PtWCxQJsda7YAphrLH1wxU5x-1QJP-sY,499
|
|
19
19
|
atomicshop/get_process_list.py,sha256=hi1NOG-i8S6EcyQ6LTfP4pdxqRfjEijz9SZ5nEbcM9Q,6076
|
|
20
20
|
atomicshop/get_process_name_cmd_dll.py,sha256=CtaSp3mgxxJKCCVW8BLx6BJNx4giCklU_T7USiCEwfc,5162
|
|
@@ -30,7 +30,7 @@ atomicshop/print_api.py,sha256=j0bZ9b2rFKCcr0TVx1ARraVKeEs6JaaSgIlBdndy1nI,11600
|
|
|
30
30
|
atomicshop/process.py,sha256=U2gyRl0bw2138y-rOMABMVptRvAL81ZfX1JyfxJI_Oo,15973
|
|
31
31
|
atomicshop/python_file_patcher.py,sha256=kd3rBWvTcosLEk-7TycNdfKW9fZbe161iVwmH4niUo0,5515
|
|
32
32
|
atomicshop/python_functions.py,sha256=zJg4ogUwECxrDD7xdDN5JikIUctITM5lsyabr_ZNsRw,4435
|
|
33
|
-
atomicshop/question_answer_engine.py,sha256=
|
|
33
|
+
atomicshop/question_answer_engine.py,sha256=7nM6kGDSFjQNi87b87-kP9lYM0vTjBHn1rEQGNAfdGA,825
|
|
34
34
|
atomicshop/queues.py,sha256=Al0fdC3ZJmdKfv-PyBeIck9lnfLr82BYchvzr189gsI,640
|
|
35
35
|
atomicshop/scheduling.py,sha256=NF1_csXwez4RbHoRyUXQg1pGdswGmS1WdSGAQ54m6R8,4550
|
|
36
36
|
atomicshop/script_as_string_processor.py,sha256=JKENiARNuKQ0UegP2GvIVgNPbr6CzGiMElUVTddOX3A,1776
|
|
@@ -122,12 +122,12 @@ atomicshop/mains/installs/pycharm.py,sha256=uYTfME7hOeNkAsOZxDDPj2hDqmkxrFqVV6Nv
|
|
|
122
122
|
atomicshop/mitm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
123
123
|
atomicshop/mitm/config_editor.py,sha256=9ZwD6NGqgsr1f85NyFwWwM7FDut2vGQ4xari3vS9UT0,1130
|
|
124
124
|
atomicshop/mitm/connection_thread_worker.py,sha256=PQ8bwOgrPudYP5oPnSi_DWaKXOi038M8TMImlLkxuPI,20486
|
|
125
|
-
atomicshop/mitm/import_config.py,sha256=
|
|
125
|
+
atomicshop/mitm/import_config.py,sha256=GS-VeeJiVDN7V2faxZCrlwWVVSAmznbZWq5XHci0ffk,7964
|
|
126
126
|
atomicshop/mitm/initialize_engines.py,sha256=YnXPK1UKrmULnfL4zLo2LOpKWq-aGKzc9p3n8tfcYCM,8170
|
|
127
127
|
atomicshop/mitm/initialize_mitm_server.py,sha256=j1yMUbHsnFh9l5rFiUgBQA0mRZqREOKviP0frRzYikM,14611
|
|
128
128
|
atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
|
|
129
129
|
atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
|
|
130
|
-
atomicshop/mitm/statistic_analyzer.py,sha256=
|
|
130
|
+
atomicshop/mitm/statistic_analyzer.py,sha256=FdUmKVmDZp0-2ohBVwubG9v0wz8Wb-NIi50Qk8BR85E,22913
|
|
131
131
|
atomicshop/mitm/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
132
132
|
atomicshop/mitm/engines/create_module_template.py,sha256=tRjVSm1sD6FzML71Qbuwvita0qsusdFGm8NZLsZ-XMs,4853
|
|
133
133
|
atomicshop/mitm/engines/create_module_template_example.py,sha256=X5xhvbV6-g9jU_bQVhf_crZmaH50LRWz3bS-faQ18ds,489
|
|
@@ -139,6 +139,9 @@ atomicshop/mitm/engines/__reference_general/__init__.py,sha256=47DEQpj8HBSa-_TIm
|
|
|
139
139
|
atomicshop/mitm/engines/__reference_general/parser___reference_general.py,sha256=QolWZKm8SiPxxSoyWY_UK7ODam7EUMAgVfOPFnXxODE,2987
|
|
140
140
|
atomicshop/mitm/engines/__reference_general/recorder___reference_general.py,sha256=KENDVf9OwXD9gwSh4B1XxACCe7iHYjrvnW1t6F64wdE,695
|
|
141
141
|
atomicshop/mitm/engines/__reference_general/responder___reference_general.py,sha256=1AM49UaFTKA0AHw-k3SV3uH3QbG-o6ux0c-GoWkKNU0,6993
|
|
142
|
+
atomicshop/mitm/statistic_analyzer_helper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
143
|
+
atomicshop/mitm/statistic_analyzer_helper/analyzer_helper.py,sha256=pk6L1t1ea1kvlBoR9QEJptOmaX-mumhwLsP2GCKukbk,5920
|
|
144
|
+
atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py,sha256=yi3-8xYZEpCIQiKYHrw4wt0T00CeCXwypY32wlXigLg,13542
|
|
142
145
|
atomicshop/monitor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
143
146
|
atomicshop/monitor/change_monitor.py,sha256=K5NlVp99XIDDPnQQMdru4BDmua_DtcDIhVAzkTOvD5s,7673
|
|
144
147
|
atomicshop/monitor/checks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -198,7 +201,7 @@ atomicshop/wrappers/dockerw/install_docker.py,sha256=IKHInhSb9iO-g9zOYRrE4EX4eA2
|
|
|
198
201
|
atomicshop/wrappers/elasticsearchw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
199
202
|
atomicshop/wrappers/elasticsearchw/config_basic.py,sha256=fDujtrjEjbWiYh_WQ3OcYp_8mXhXPYeKLy4wSPL5qM0,1177
|
|
200
203
|
atomicshop/wrappers/elasticsearchw/elasticsearchw.py,sha256=7TqFdEFznO8NlligJhEKk1vm641ALpCYdaRl1uoXdzM,9768
|
|
201
|
-
atomicshop/wrappers/elasticsearchw/infrastructure.py,sha256=
|
|
204
|
+
atomicshop/wrappers/elasticsearchw/infrastructure.py,sha256=at0sD-SFtmEvfGyIU_YBEKoU-MNeVtDQSNscPm0JWLc,10368
|
|
202
205
|
atomicshop/wrappers/elasticsearchw/install_elastic.py,sha256=pblQBJ6o4ymp-g1EioBoUA6R1s-ZhA-FAoIcvc9Rpr0,8620
|
|
203
206
|
atomicshop/wrappers/elasticsearchw/queries/__init__.py,sha256=KBjT-bAt75CJsx1Apko9mpuFU4pfZV8DcGWQvpX65RU,78
|
|
204
207
|
atomicshop/wrappers/elasticsearchw/queries/aggregation.py,sha256=N9a5yMMnb10sMa_x1qJBFQpgyJ49UWo8_vxuqmUtZ1A,1742
|
|
@@ -240,7 +243,7 @@ atomicshop/wrappers/loggingw/formatters.py,sha256=7XUJvlB0CK4DCkEp8NTL0S0dkyrZD0
|
|
|
240
243
|
atomicshop/wrappers/loggingw/handlers.py,sha256=yFYBeTkxnpmtlauoH3ZEFEHUYQYu9YL-ycd9sYTvOl4,16928
|
|
241
244
|
atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
|
|
242
245
|
atomicshop/wrappers/loggingw/loggingw.py,sha256=lo4OZPXCbYZi3GqpaaJSs9SOGFfqD2EgHzzTK7f5IR4,11275
|
|
243
|
-
atomicshop/wrappers/loggingw/reading.py,sha256=
|
|
246
|
+
atomicshop/wrappers/loggingw/reading.py,sha256=b4-ibM5WwjEOanvHY3hIsu9-4b2RAdPYiCxvl7745fk,17521
|
|
244
247
|
atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
245
248
|
atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
|
|
246
249
|
atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -280,13 +283,13 @@ atomicshop/wrappers/socketw/get_process.py,sha256=APw_oOXsuR5KljYesd4J8MuzR-kaw2
|
|
|
280
283
|
atomicshop/wrappers/socketw/receiver.py,sha256=m8hXKOa8dqEQGUdcbYjshH8-j0CsMGRkge2ifYKhaAw,9050
|
|
281
284
|
atomicshop/wrappers/socketw/sender.py,sha256=hHpBLc0LCfOIUErq2mc0ATfp0tDDQ5XhcYT4hRAZARU,3680
|
|
282
285
|
atomicshop/wrappers/socketw/sni.py,sha256=GIm5uUJCh5i-pjY4FhSkoK4oo9uL_fFq1Mbr6PKXpBg,10014
|
|
283
|
-
atomicshop/wrappers/socketw/socket_client.py,sha256=
|
|
286
|
+
atomicshop/wrappers/socketw/socket_client.py,sha256=IvRnxeqsj5RGm4lR3btUz3MSxTWhe2q1X_zNoZ8tK-M,20229
|
|
284
287
|
atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5eyTx0UGBOJi74viowtpU5Jvs,6291
|
|
285
288
|
atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
|
|
286
289
|
atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
|
|
287
290
|
atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
|
|
288
|
-
atomicshop-2.14.
|
|
289
|
-
atomicshop-2.14.
|
|
290
|
-
atomicshop-2.14.
|
|
291
|
-
atomicshop-2.14.
|
|
292
|
-
atomicshop-2.14.
|
|
291
|
+
atomicshop-2.14.14.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
|
|
292
|
+
atomicshop-2.14.14.dist-info/METADATA,sha256=9F0J0JZEwuLAVo78ASBM9KOu4CHZdXEXJ9l8b57buzw,10479
|
|
293
|
+
atomicshop-2.14.14.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
294
|
+
atomicshop-2.14.14.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
|
|
295
|
+
atomicshop-2.14.14.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|