atomicshop 2.2.8__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

@@ -104,7 +104,7 @@ def initialize_mitm_server(config_static):
104
104
  system_logger.info("Importing engine modules.")
105
105
 
106
106
  # Get full paths of all the 'engine_config.ini' files.
107
- engine_config_path_list, _ = get_file_paths_and_relative_directories(
107
+ engine_config_path_list = get_file_paths_and_relative_directories(
108
108
  directory_fullpath=config_static.ENGINES_DIRECTORY_PATH,
109
109
  file_name_check_tuple=(config_static.ENGINE_CONFIG_FILE_NAME, ComparisonOperator.EQ))
110
110
 
@@ -114,7 +114,7 @@ def initialize_mitm_server(config_static):
114
114
  for engine_config_path in engine_config_path_list:
115
115
  # Initialize engine.
116
116
  current_module = ModuleCategory(config_static.WORKING_DIRECTORY)
117
- current_module.fill_engine_fields_from_config(engine_config_path)
117
+ current_module.fill_engine_fields_from_config(engine_config_path['path'])
118
118
  current_module.initialize_engine(logs_path=config['log']['logs_path'],
119
119
  logger=system_logger)
120
120
 
@@ -0,0 +1,465 @@
1
+ import datetime
2
+
3
+ from .. import filesystem, domains, datetimes, urls
4
+ from ..basics import dicts
5
+ from ..file_io import tomls, xlsxs
6
+ from ..wrappers.loggingw import reading
7
+ from ..print_api import print_api
8
+
9
+
10
+ def get_the_last_day_number(statistics_content: list, stop_after_lines: int = None) -> int:
11
+ """
12
+ This function gets the last day number from the statistics content.
13
+
14
+ :param statistics_content: list, of lines in the statistics content.
15
+ :param stop_after_lines: integer, if specified, the function will stop after the specified number of lines.
16
+ :return: integer, the last day number.
17
+ """
18
+
19
+ last_day_number = None
20
+ start_time_temp = None
21
+ for line_index, line in enumerate(statistics_content):
22
+ try:
23
+ request_time = datetime.datetime.strptime(line['request_time_sent'], '%Y-%m-%d %H:%M:%S.%f')
24
+ except ValueError:
25
+ continue
26
+
27
+ if not start_time_temp:
28
+ start_time_temp = request_time
29
+
30
+ if stop_after_lines:
31
+ if line_index == stop_after_lines:
32
+ break
33
+
34
+ last_day_number = datetimes.get_difference_between_dates_in_days(start_time_temp, request_time)
35
+ return last_day_number
36
+
37
+
38
+ def create_empty_features_dict() -> dict:
39
+ """
40
+ This function creates an empty dictionary for the daily stats. This should be initiated for each 'host_type' of:
41
+ 'domain', 'subdomain', 'url_no_parameters'.
42
+ :return: dict
43
+ """
44
+
45
+ return {
46
+ 'total_count': {}, 'normal_count': {}, 'error_count': {},
47
+ 'request_0_byte_count': {}, 'response_0_byte_count': {},
48
+ 'request_sizes_list': {}, 'response_sizes_list': {},
49
+ 'request_sizes_no_0_bytes_list': {}, 'response_sizes_no_0_bytes_list': {},
50
+ 'average_request_size': {}, 'average_response_size': {},
51
+ 'average_request_size_no_0_bytes': {}, 'average_response_size_no_0_bytes': {}}
52
+
53
+
54
+ def add_to_count_to_daily_stats(
55
+ daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str) -> None:
56
+ """
57
+ This function adds 1 to the 'count' feature of the current day in the daily stats.
58
+
59
+ :param daily_stats: dict, the daily statistics dict.
60
+ :param current_day: integer, the current day number.
61
+ :param last_day: integer, the last day number.
62
+ :param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
63
+ :param feature: string, the feature to add the count to. Can be: 'total_count', 'normal_count', 'error_count',
64
+ 'request_0_byte_count', 'response_0_byte_count'.
65
+ :param host_name: string, the name of the host.
66
+
67
+ :return: None.
68
+ """
69
+
70
+ # Aggregate daily domain hits.
71
+ if host_name not in daily_stats[host_type][feature].keys():
72
+ daily_stats[host_type][feature][host_name] = {}
73
+ # Iterate from first day to the last day.
74
+ for day in range(0, last_day + 1):
75
+ daily_stats[host_type][feature][host_name][day] = 0
76
+
77
+ # Add count to current day.
78
+ daily_stats[host_type][feature][host_name][current_day] += 1
79
+
80
+
81
+ def add_to_list_to_daily_stats(
82
+ daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str,
83
+ size: float) -> None:
84
+ """
85
+ This function adds the 'size' to the 'feature' list of the current day in the daily stats.
86
+
87
+ :param daily_stats: dict, the daily statistics dict.
88
+ :param current_day: integer, the current day number.
89
+ :param last_day: integer, the last day number.
90
+ :param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
91
+ :param feature: string, the feature to add the count to. Can be: 'request_sizes_list', 'response_sizes_list',
92
+ 'request_sizes_no_0_bytes_list', 'response_sizes_no_0_bytes_list'.
93
+ :param host_name: string, the name of the host.
94
+ :param size: float, the size in bytes to add to the list.
95
+
96
+ :return: None.
97
+ """
98
+
99
+ # Aggregate daily domain hits.
100
+ if host_name not in daily_stats[host_type][feature].keys():
101
+ daily_stats[host_type][feature][host_name] = {}
102
+ # Iterate from first day to the last day.
103
+ for day in range(0, last_day + 1):
104
+ daily_stats[host_type][feature][host_name][day] = []
105
+
106
+ # Add count to current day.
107
+ daily_stats[host_type][feature][host_name][current_day].append(size)
108
+
109
+
110
+ def add_to_average_to_daily_stats(
111
+ daily_stats: dict, current_day: int, last_day: int, host_type: str, feature: str, host_name: str,
112
+ list_of_sizes: list) -> None:
113
+ """
114
+ This function adds the average size in bytes calculated from the 'list_of_sizes' to the 'feature' of the current
115
+ day in the daily stats.
116
+
117
+ :param daily_stats: dict, the daily statistics dict.
118
+ :param current_day: integer, the current day number.
119
+ :param last_day: integer, the last day number.
120
+ :param host_type: string, the type of the host. Can be: 'domain', 'subdomain', 'url_no_parameters'.
121
+ :param feature: string, the feature to add the count to. Can be: 'average_request_size', 'average_response_size',
122
+ 'average_request_size_no_0_bytes', 'average_response_size_no_0_bytes'.
123
+ :param host_name: string, the name of the host.
124
+ :param list_of_sizes: list, the list of sizes to calculate the average from.
125
+
126
+ :return: None.
127
+ """
128
+
129
+ # Aggregate daily domain hits.
130
+ if host_name not in daily_stats[host_type][feature].keys():
131
+ daily_stats[host_type][feature][host_name] = {}
132
+ # Iterate from first day to the last day.
133
+ for day in range(0, last_day + 1):
134
+ daily_stats[host_type][feature][host_name][day] = 0
135
+
136
+ # If the list of size is empty, add 0 to the average, since we cannot divide by 0.
137
+ if len(list_of_sizes) == 0:
138
+ daily_stats[host_type][feature][host_name][current_day] = 0
139
+ else:
140
+ daily_stats[host_type][feature][host_name][current_day] = sum(list_of_sizes) / len(list_of_sizes)
141
+
142
+
143
+ def analyze(main_file_path: str):
144
+ """
145
+ This function is the main function for the statistic analyzer.
146
+ :param main_file_path: Path to the main file that is calling this function (__file__).
147
+ :return:
148
+ """
149
+
150
+ # Get the config and set variables.
151
+ script_directory: str = filesystem.get_file_directory(main_file_path)
152
+ config_path: str = filesystem.add_object_to_path(script_directory, 'config_stats.toml')
153
+ config: dict = tomls.read_toml_file(config_path)
154
+ summary_path: str = filesystem.check_absolute_path___add_full(config['report_file_path'], script_directory)
155
+
156
+ # Get the content from statistics files.
157
+ statistics_content: list = reading.get_logs(
158
+ config['statistic_files_path'], pattern='statistics*.csv', log_type='csv',
159
+ )
160
+
161
+ # Initialize loop.
162
+ line_total_count: int = len(statistics_content)
163
+ start_time = None
164
+ last_day_number = None
165
+ overall_stats: dict = {
166
+ 'domain': {'total_count': {}, 'normal_count': {}, 'error_count': {}},
167
+ 'subdomain': {'total_count': {}, 'normal_count': {}, 'error_count': {}}
168
+ }
169
+ daily_stats: dict = {
170
+ 'domain': create_empty_features_dict(),
171
+ 'subdomain': create_empty_features_dict(),
172
+ 'url_no_parameters': create_empty_features_dict()
173
+ }
174
+
175
+ # Start the main loop.
176
+ for line_index, line in enumerate(statistics_content):
177
+ # Converting time string to object.
178
+ # If the time string is not of the specific format, continue to the next line.
179
+ try:
180
+ request_time = datetime.datetime.strptime(line['request_time_sent'], '%Y-%m-%d %H:%M:%S.%f')
181
+ except ValueError:
182
+ continue
183
+
184
+ if not start_time:
185
+ start_time = request_time
186
+
187
+ # For testing, you can set the 'break_after_lines' to an integer, which symbolizes the number of the line
188
+ # of the 'statistics_content' to stop the loop after.
189
+ break_after_lines = None
190
+
191
+ # Find the last day number. If 'break_after_lines' is specified, the loop will stop after the specified line.
192
+ if not last_day_number:
193
+ last_day_number = get_the_last_day_number(statistics_content, break_after_lines)
194
+
195
+ if break_after_lines:
196
+ if line_index == break_after_lines:
197
+ break
198
+
199
+ if config['strings_to_include_in_subdomain'] and config['strings_to_include_in_subdomain'] != ['']:
200
+ # Checking that 'strings_to_include_in_subdomain' are in the subdomain, if not, continue to the next line.
201
+ if not any(string in line['host'] for string in config['strings_to_include_in_subdomain']):
202
+ continue
203
+
204
+ if config['strings_to_exclude_from_subdomain'] and config['strings_to_exclude_from_subdomain'] != ['']:
205
+ # Checking that 'strings_to_exclude_from_subdomain' are not in the subdomain, if they are, continue.
206
+ if any(string in line['host'] for string in config['strings_to_exclude_from_subdomain']):
207
+ continue
208
+
209
+ # Get the subdomain with the main domain from the 'host' column of current line.
210
+ subdomain = line['host']
211
+ # Get the main domain from the subdomain.
212
+ # Check if suffix of the 'host' is '.com'.
213
+ if line['host'].endswith('.com'):
214
+ # Get only the main domain.
215
+ main_domain = line['host'].split('.')[-2] + '.com'
216
+ # If the suffix is not '.com', use the 'domains' library to get the main domain.
217
+ else:
218
+ # This is the slowest part of the whole loop.
219
+ main_domain = domains.get_registered_domain(line['host'])
220
+
221
+ # If the domain is empty, continue to the next line.
222
+ if not main_domain:
223
+ continue
224
+
225
+ # If the domain is already in the dict, add 1 to the counter, else add the key to the dict.
226
+ if main_domain in overall_stats['domain']['total_count'].keys():
227
+ overall_stats['domain']['total_count'][main_domain] = (
228
+ overall_stats['domain']['total_count'][main_domain] + 1)
229
+ else:
230
+ overall_stats['domain']['total_count'][main_domain] = 1
231
+
232
+ # If the subdomain is already in the dict, add 1 to the counter, else add the key to the dict.
233
+ if subdomain in overall_stats['subdomain']['total_count'].keys():
234
+ overall_stats['subdomain']['total_count'][subdomain] = (
235
+ overall_stats['subdomain']['total_count'][subdomain] + 1)
236
+ else:
237
+ # overall_stats['subdomain']['total_count'] = {}
238
+ overall_stats['subdomain']['total_count'][subdomain] = 1
239
+
240
+ # Check if there is an error in the line and count the domain under 'error_count' key.
241
+ if line['error'] != '':
242
+ # If the domain is already in the dict, add 1 to the counter, else add the key to the dict.
243
+ if main_domain in overall_stats['domain']['error_count'].keys():
244
+ overall_stats['domain']['error_count'][main_domain] = (
245
+ overall_stats['domain']['error_count'][main_domain] + 1)
246
+ else:
247
+ # overall_stats['domain']['total_count'] = {}
248
+ overall_stats['domain']['error_count'][main_domain] = 1
249
+
250
+ # If the subdomain is already in the dict, add 1 to the counter, else add the key to the dict.
251
+ if subdomain in overall_stats['subdomain']['error_count'].keys():
252
+ overall_stats['subdomain']['error_count'][subdomain] = (
253
+ overall_stats['subdomain']['error_count'][subdomain] + 1)
254
+ else:
255
+ # overall_stats['subdomain']['total_count'] = {}
256
+ overall_stats['subdomain']['error_count'][subdomain] = 1
257
+ else:
258
+ # If the domain is already in the dict, add 1 to the counter, else add the key to the dict.
259
+ if main_domain in overall_stats['domain']['normal_count'].keys():
260
+ overall_stats['domain']['normal_count'][main_domain] = (
261
+ overall_stats['domain']['normal_count'][main_domain] + 1)
262
+ else:
263
+ # overall_stats['domain']['total_count'] = {}
264
+ overall_stats['domain']['normal_count'][main_domain] = 1
265
+
266
+ # If the subdomain is already in the dict, add 1 to the counter, else add the key to the dict.
267
+ if subdomain in overall_stats['subdomain']['normal_count'].keys():
268
+ overall_stats['subdomain']['normal_count'][subdomain] = (
269
+ overall_stats['subdomain']['normal_count'][subdomain] + 1)
270
+ else:
271
+ # overall_stats['subdomain']['total_count'] = {}
272
+ overall_stats['subdomain']['normal_count'][subdomain] = 1
273
+
274
+ # Get the URL without parameters.
275
+ url = line['host'] + line['path']
276
+ url_no_parameters = urls.url_parser(url)['path']
277
+
278
+ # Get the request and response sizes.
279
+ # If the size is not numeric that can be converted to integer, set it to None.
280
+ # Since, probably there was an SSL 'error' in the line.
281
+ try:
282
+ request_size = int(line['request_size_bytes'])
283
+ response_size = int(line['response_size_bytes'])
284
+ except ValueError:
285
+ request_size = None
286
+ response_size = None
287
+
288
+ # Start Day aggregation ========================================================================================
289
+ # Daily stats.
290
+ day_number = datetimes.get_difference_between_dates_in_days(start_time, request_time)
291
+
292
+ # Add 1 to the total count of the current day.
293
+ add_to_count_to_daily_stats(
294
+ daily_stats, day_number, last_day_number, 'domain', 'total_count', main_domain)
295
+ add_to_count_to_daily_stats(
296
+ daily_stats, day_number, last_day_number, 'subdomain', 'total_count', subdomain)
297
+ add_to_count_to_daily_stats(
298
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'total_count', url_no_parameters)
299
+
300
+ # Handle line if it has error.
301
+ if line['error'] != '':
302
+ add_to_count_to_daily_stats(
303
+ daily_stats, day_number, last_day_number, 'domain', 'error_count', main_domain)
304
+ add_to_count_to_daily_stats(
305
+ daily_stats, day_number, last_day_number, 'subdomain', 'error_count', subdomain)
306
+ add_to_count_to_daily_stats(
307
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'error_count', url_no_parameters)
308
+ else:
309
+ add_to_count_to_daily_stats(
310
+ daily_stats, day_number, last_day_number, 'domain', 'normal_count', main_domain)
311
+ add_to_count_to_daily_stats(
312
+ daily_stats, day_number, last_day_number, 'subdomain', 'normal_count', subdomain)
313
+ add_to_count_to_daily_stats(
314
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'normal_count', url_no_parameters)
315
+
316
+ if request_size == 0:
317
+ add_to_count_to_daily_stats(
318
+ daily_stats, day_number, last_day_number, 'domain', 'request_0_byte_count',
319
+ main_domain)
320
+ add_to_count_to_daily_stats(
321
+ daily_stats, day_number, last_day_number, 'subdomain', 'request_0_byte_count',
322
+ subdomain)
323
+ add_to_count_to_daily_stats(
324
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'request_0_byte_count',
325
+ url_no_parameters)
326
+
327
+ if response_size == 0:
328
+ add_to_count_to_daily_stats(
329
+ daily_stats, day_number, last_day_number, 'domain', 'response_0_byte_count',
330
+ main_domain)
331
+ add_to_count_to_daily_stats(
332
+ daily_stats, day_number, last_day_number, 'subdomain', 'response_0_byte_count',
333
+ subdomain)
334
+ add_to_count_to_daily_stats(
335
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'response_0_byte_count',
336
+ url_no_parameters)
337
+
338
+ if request_size is not None and response_size is not None:
339
+ add_to_list_to_daily_stats(
340
+ daily_stats, day_number, last_day_number, 'domain', 'request_sizes_list', main_domain, request_size)
341
+ add_to_list_to_daily_stats(
342
+ daily_stats, day_number, last_day_number, 'subdomain', 'request_sizes_list', subdomain, request_size)
343
+ add_to_list_to_daily_stats(
344
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'request_sizes_list', url_no_parameters,
345
+ request_size)
346
+
347
+ add_to_list_to_daily_stats(
348
+ daily_stats, day_number, last_day_number, 'domain', 'response_sizes_list', main_domain, response_size)
349
+ add_to_list_to_daily_stats(
350
+ daily_stats, day_number, last_day_number, 'subdomain', 'response_sizes_list', subdomain, response_size)
351
+ add_to_list_to_daily_stats(
352
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'response_sizes_list', url_no_parameters,
353
+ response_size)
354
+
355
+ if request_size != 0 and request_size is not None:
356
+ add_to_list_to_daily_stats(
357
+ daily_stats, day_number, last_day_number, 'domain', 'request_sizes_no_0_bytes_list',
358
+ main_domain, request_size)
359
+ add_to_list_to_daily_stats(
360
+ daily_stats, day_number, last_day_number, 'subdomain', 'request_sizes_no_0_bytes_list',
361
+ subdomain, request_size)
362
+ add_to_list_to_daily_stats(
363
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'request_sizes_no_0_bytes_list',
364
+ url_no_parameters, request_size)
365
+
366
+ if response_size != 0 and response_size is not None:
367
+ add_to_list_to_daily_stats(
368
+ daily_stats, day_number, last_day_number, 'domain', 'response_sizes_no_0_bytes_list',
369
+ main_domain, response_size)
370
+ add_to_list_to_daily_stats(
371
+ daily_stats, day_number, last_day_number, 'subdomain', 'response_sizes_no_0_bytes_list',
372
+ subdomain, response_size)
373
+ add_to_list_to_daily_stats(
374
+ daily_stats, day_number, last_day_number, 'url_no_parameters', 'response_sizes_no_0_bytes_list',
375
+ url_no_parameters, response_size)
376
+
377
+ print_api(f'Processing line: {line_index+1}/{line_total_count}', print_end='\r')
378
+
379
+ # Calculate daily average request and response sizes.
380
+ for host_type, features in daily_stats.items():
381
+ for feature, hosts in features.items():
382
+ if feature == 'request_sizes_list':
383
+ feature_name = 'average_request_size'
384
+ elif feature == 'response_sizes_list':
385
+ feature_name = 'average_response_size'
386
+ elif feature == 'request_sizes_no_0_bytes_list':
387
+ feature_name = 'average_request_size_no_0_bytes'
388
+ elif feature == 'response_sizes_no_0_bytes_list':
389
+ feature_name = 'average_response_size_no_0_bytes'
390
+ else:
391
+ continue
392
+
393
+ for host_name, days in hosts.items():
394
+ for day, sizes in days.items():
395
+ add_to_average_to_daily_stats(
396
+ daily_stats, day, last_day_number, host_type, feature_name, host_name, sizes)
397
+
398
+ # Sorting overall stats.
399
+ sorted_overall_stats: dict = {
400
+ 'domain': {'total_count': {}, 'normal_count': {}, 'error_count': {}},
401
+ 'subdomain': {'total_count': {}, 'normal_count': {}, 'error_count': {}}
402
+ }
403
+ for feature_dict, feature_dict_value in overall_stats.items():
404
+ for feature, feature_value in feature_dict_value.items():
405
+ sorted_overall_stats[feature_dict][feature] = (
406
+ dicts.sort_by_values(feature_value, reverse=True))
407
+
408
+ # Create combined dictionary of the sorted statistics to export to XLSX file.
409
+ combined_sorted_stats = {}
410
+ # Add overall stats.
411
+ for feature_dict, feature_dict_value in sorted_overall_stats.items():
412
+ for feature, feature_value in feature_dict_value.items():
413
+ for feature_index, (host_name, counter) in enumerate(feature_value.items()):
414
+ if feature_index == 0:
415
+ try:
416
+ combined_sorted_stats[f'overall_stats']['host_name'].append('')
417
+ combined_sorted_stats[f'overall_stats']['counter'].append('')
418
+ combined_sorted_stats[f'overall_stats']['host_name'].append(f'{feature_dict}_{feature}')
419
+ combined_sorted_stats[f'overall_stats']['counter'].append('counter')
420
+ except KeyError:
421
+ combined_sorted_stats[f'overall_stats'] = \
422
+ {f'host_name': [f'{feature_dict}_{feature}'], 'counter': ['counter']}
423
+
424
+ combined_sorted_stats[f'overall_stats']['host_name'].append(host_name)
425
+ combined_sorted_stats[f'overall_stats']['counter'].append(counter)
426
+
427
+ feature_name = ''
428
+ # Add daily stats to combined dict. Each day will be a column.
429
+ for host_type, features in daily_stats.items():
430
+ for feature, hosts in features.items():
431
+ if 'count' in feature:
432
+ feature_name = 'counts'
433
+ elif 'list' in feature:
434
+ feature_name = 'lists'
435
+ elif 'average' in feature:
436
+ feature_name = 'averages'
437
+
438
+ for feature_index, (host_name, days) in enumerate(hosts.items()):
439
+ if feature_index == 0:
440
+ try:
441
+ combined_sorted_stats[f'daily_{feature_name}']['host_name'].append('')
442
+ for day in days.keys():
443
+ combined_sorted_stats[f'daily_{feature_name}']['Day' + str(day)].append('')
444
+ combined_sorted_stats[f'daily_{feature_name}']['host_name'].append(f'{host_type}_{feature}')
445
+ for day in days.keys():
446
+ (combined_sorted_stats[f'daily_{feature_name}']['Day' + str(day)].
447
+ append('Day' + str(day)))
448
+ except KeyError:
449
+ combined_sorted_stats[f'daily_{feature_name}'] = {f'host_name': [f'{host_type}_{feature}']}
450
+ for day in days.keys():
451
+ combined_sorted_stats[f'daily_{feature_name}']['Day' + str(day)] = ['Day' + str(day)]
452
+
453
+ combined_sorted_stats[f'daily_{feature_name}']['host_name'].append(host_name)
454
+ for day_number, counter in days.items():
455
+ combined_sorted_stats[f'daily_{feature_name}']['Day' + str(day_number)].append(counter)
456
+
457
+ try:
458
+ xlsxs.write_xlsx(combined_sorted_stats, file_path=summary_path)
459
+ except FileNotFoundError:
460
+ directory_path = filesystem.get_file_directory(summary_path)
461
+ print_api(f'Directory does not exist, creating it: {directory_path}')
462
+ filesystem.create_directory(directory_path)
463
+ xlsxs.write_xlsx(combined_sorted_stats, file_path=summary_path)
464
+
465
+ return
@@ -17,6 +17,11 @@ for connection in psutil.net_connections():
17
17
  command_line = psutil.Process(connection.pid).cmdline()
18
18
  # Command line object is returned as list of parameters. We need 'shlex.join' to join the iterables
19
19
  # to regular, readable string.
20
- print(shlex.join(command_line))
20
+ result = shlex.join(command_line)
21
+ # If the result is still a PID, we'll try to get process name.
22
+ if result.isnumeric():
23
+ # Get the process name from the connection PID.
24
+ result = psutil.Process(connection.pid).name()
25
+ print(result)
21
26
  # Break the loop, when first match is found.
22
27
  break
File without changes
@@ -0,0 +1,2 @@
1
+ FACT_ADDRESS: str = 'http://localhost:5000'
2
+ FIRMWARE_ENDPOINT: str = '/rest/firmware'
@@ -0,0 +1,80 @@
1
+ import requests
2
+ import base64
3
+
4
+ from . import fact_config
5
+ from ... print_api import print_api
6
+ from ... file_io import file_io
7
+
8
+
9
+ def upload_firmware(firmware_file_path: str, params: dict, use_all_analysis_systems: bool = False):
10
+ """
11
+ Upload firmware binary file to the server.
12
+
13
+ :param firmware_file_path: Path to firmware file.
14
+ :param use_all_analysis_systems: Use all analysis systems.
15
+ :param params: Parameters:
16
+ {
17
+ "device_name": <string>,
18
+ "device_part": <string>, # new in FACT 2.5
19
+ "device_class": <string>,
20
+ "file_name": <string>,
21
+ "version": <string>, # supersedes firmware_version field
22
+ "vendor": <string>,
23
+ "release_date": <string>,
24
+ "tags": <string>,
25
+ "requested_analysis_systems": <list>,
26
+ "binary": <string(base64)>
27
+ }
28
+
29
+ 'device_name' and 'tags' aren't required.
30
+ 'binary' and 'file_name' is filled by this function from the firmware file.
31
+ 'requested_analysis_systems' is filled by this function if 'use_all_analysis_systems' is True.
32
+
33
+ Example from https://github.com/fkie-cad/FACT_core/wiki/Rest-API#restfirmwareuid:
34
+ {
35
+ "device_name": "rest_test",
36
+ "device_part": <string>,
37
+ "device_class": "Router",
38
+ "file_name": "firmware.bin",
39
+ "version": "1.1",
40
+ "vendor": "AVM",
41
+ "release_date": "2011-01-01",
42
+ "tags": "tag1,tag2",
43
+ "requested_analysis_systems": ["file_type", "file_hashes"],
44
+ "binary": "dGVzdDEyMzQgdBzb21lIHRlc3QgZQ=="
45
+ }
46
+
47
+ :return: None.
48
+ """
49
+
50
+ url: str = f'{fact_config.FACT_ADDRESS}/{fact_config.FIRMWARE_ENDPOINT}'
51
+
52
+ # Add all analysis systems to the list.
53
+ if use_all_analysis_systems:
54
+ params['requested_analysis_systems'] = [
55
+ 'binwalk', 'cpu_architecture', 'crypto_hints', 'crypto_material', 'cve_lookup', 'cwe_checker',
56
+ 'device_tree', 'elf_analysis', 'exploit_mitigations', 'file_hashes', 'file_system_metadata',
57
+ 'file_type', 'hardware_analysis', 'hashlookup', 'information_leaks', 'init_systems', 'input_vectors',
58
+ 'interesting_uris', 'ip_and_uri_finder', 'ipc_analyzer', 'kernel_config', 'known_vulnerabilities',
59
+ 'printable_strings', 'qemu_exec', 'software_components', 'source_code_analysis', 'string_evaluator',
60
+ 'tlsh', 'unpacker', 'users_and_passwords'
61
+ ]
62
+
63
+ # Open firmware file.
64
+ firmware_binary_content = file_io.read_file(firmware_file_path, file_mode='rb')
65
+ # Encode firmware file to base64.
66
+ params['binary'] = base64.b64encode(firmware_binary_content)
67
+
68
+ # Send firmware file to the server.
69
+ response = requests.put(
70
+ url,
71
+ params=params,
72
+ )
73
+
74
+ # Check response status code.
75
+ if response.status_code == 200:
76
+ # Print response.
77
+ print_api(response.json())
78
+ else:
79
+ # Print error.
80
+ print_api('Error: ' + str(response.status_code), error_type=True, logger_method='critical')