atomicshop 2.14.10a0__py3-none-any.whl → 2.14.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.14.10a0'
4
+ __version__ = '2.14.12'
@@ -1,5 +1,5 @@
1
1
  from operator import itemgetter
2
- from json import dumps, loads
2
+ import json
3
3
 
4
4
  from . import dicts, strings
5
5
 
@@ -150,7 +150,7 @@ def convert_to_set(list_of_dicts, sort_keys: bool = False) -> set:
150
150
  :return: set.
151
151
  """
152
152
 
153
- return set(dumps(x, sort_keys=sort_keys) for x in list_of_dicts)
153
+ return set(json.dumps(x, sort_keys=sort_keys) for x in list_of_dicts)
154
154
 
155
155
 
156
156
  def convert_from_set(set_object: set) -> list:
@@ -161,4 +161,59 @@ def convert_from_set(set_object: set) -> list:
161
161
  :return: list of dicts.
162
162
  """
163
163
 
164
- return [loads(x) for x in set_object]
164
+ return [json.loads(x) for x in set_object]
165
+
166
+
167
+ def summarize_entries(list_instance: list, list_of_keys_to_remove: list = None) -> list:
168
+ """
169
+ The function will summarize entries in a list of dicts.
170
+
171
+ :param list_instance: list of dicts, the entries to summarize.
172
+ :param list_of_keys_to_remove: list, the keys to remove from each entry before summarizing.
173
+ :return: list, of the summarized entries, each entry without the keys in 'list_of_keys_to_remove',
174
+ including the count of the entry.
175
+
176
+ --------------------------------------
177
+
178
+ Example:
179
+ list_instance = [
180
+ {'time': '2021-08-01 00:00:00', 'name': 'name1', 'cmdline': 'cmdline1', 'domain': 'domain1'},
181
+ {'time': '2021-08-01 00:00:00', 'name': 'name2', 'cmdline': 'cmdline2', 'domain': 'domain2'},
182
+ {'time': '2021-08-01 00:00:00', 'name': 'name1', 'cmdline': 'cmdline1', 'domain': 'domain1'}
183
+ ]
184
+
185
+ list_of_keys_to_remove = ['time', 'cmdline']
186
+
187
+ summarize_entries(list_instance, list_of_keys_to_remove)
188
+
189
+ Output:
190
+ [
191
+ {'name': 'name1', 'domain': 'domain1', 'count': 2},
192
+ {'name': 'name2', 'domain': 'domain2', 'count': 1}
193
+ ]
194
+ """
195
+
196
+ summed_entries: dict = dict()
197
+ for entry in list_instance:
198
+ # Copy the entry to new dict, since we're going to remove a key.
199
+ line_copied = entry.copy()
200
+
201
+ # Remove the keys in the 'list_of_keys_to_remove'.
202
+ if list_of_keys_to_remove:
203
+ for key in list_of_keys_to_remove:
204
+ _ = line_copied.pop(key, None)
205
+
206
+ line_json_string = json.dumps(line_copied)
207
+ if line_json_string not in summed_entries:
208
+ summed_entries[line_json_string] = 1
209
+ else:
210
+ summed_entries[line_json_string] += 1
211
+
212
+ result_list: list = []
213
+ for json_string_record, count in summed_entries.items():
214
+ record = json.loads(json_string_record)
215
+ result_list.append(
216
+ {**record, 'count': count}
217
+ )
218
+
219
+ return result_list
atomicshop/datetimes.py CHANGED
@@ -70,13 +70,20 @@ DATE_TIME_STRING_FORMAT_SPECIFIERS_TO_REGEX: dict = {
70
70
  }
71
71
 
72
72
 
73
- def get_datetime_from_complex_string_by_pattern(complex_string: str, date_pattern: str) -> tuple[datetime, str, float]:
73
+ def get_datetime_from_complex_string_by_pattern(
74
+ complex_string: str,
75
+ date_pattern: str
76
+ ) -> tuple[
77
+ Union[datetime, None],
78
+ Union[str, None],
79
+ Union[float, None]
80
+ ]:
74
81
  """
75
82
  Function will get datetime object from a complex string by pattern.
76
83
 
77
84
  :param complex_string: string that contains date and time.
78
85
  :param date_pattern: pattern that will be used to extract date and time from the string.
79
- :return: datetime object.
86
+ :return: tuple(datetime object, date string, timestamp float)
80
87
  """
81
88
 
82
89
  # Convert the date pattern to regex pattern
@@ -91,7 +98,7 @@ def get_datetime_from_complex_string_by_pattern(complex_string: str, date_patter
91
98
  date_timestamp = date_obj.timestamp()
92
99
  return date_obj, date_str.group(), date_timestamp
93
100
  else:
94
- raise ValueError("No valid date found in the string")
101
+ return None, None, None
95
102
 
96
103
 
97
104
  def datetime_format_to_regex(format_str: str) -> str:
@@ -53,7 +53,11 @@ class DnsRequestResponseTrace:
53
53
  """
54
54
 
55
55
  self.attrs = attrs
56
- self.skip_record_list = skip_record_list
56
+
57
+ if skip_record_list:
58
+ self.skip_record_list: list = skip_record_list
59
+ else:
60
+ self.skip_record_list: list = list()
57
61
 
58
62
  if not session_name:
59
63
  session_name = ETW_DEFAULT_SESSION_NAME
@@ -62,7 +62,7 @@ def search_for_hyperlink_in_files(directory_path: str, hyperlink: str, relative_
62
62
  input('press Enter')
63
63
  """
64
64
 
65
- if not filesystem.check_directory_existence(directory_path):
65
+ if not filesystem.is_directory_exists(directory_path):
66
66
  raise NotADirectoryError(f"Directory doesn't exist: {directory_path}")
67
67
 
68
68
  # Get all the docx files in the specified directory.
atomicshop/filesystem.py CHANGED
@@ -246,6 +246,16 @@ def check_absolute_path___add_full(filesystem_path: str, full_path_to_add: str)
246
246
 
247
247
 
248
248
  def check_file_existence(file_path: str) -> bool:
249
+ """This will be removed in future versions. Use 'is_file_exists' instead."""
250
+ return is_file_exists(file_path)
251
+
252
+
253
+ def check_directory_existence(directory_path: str) -> bool:
254
+ """This will be removed in future versions. Use 'is_directory_exists' instead."""
255
+ return is_directory_exists(directory_path)
256
+
257
+
258
+ def is_file_exists(file_path: str) -> bool:
249
259
  """
250
260
  Function to check if the path is a file.
251
261
 
@@ -260,7 +270,7 @@ def check_file_existence(file_path: str) -> bool:
260
270
  return False
261
271
 
262
272
 
263
- def check_directory_existence(directory_path: str) -> bool:
273
+ def is_directory_exists(directory_path: str) -> bool:
264
274
  """
265
275
  Function to check if a path is a directory.
266
276
 
@@ -459,7 +469,7 @@ def move_folder(source_directory: str, target_directory: str, overwrite: bool =
459
469
 
460
470
  # Check if 'overwrite' is set to 'True' and if the directory exists.
461
471
  if not overwrite:
462
- if check_directory_existence(target_directory):
472
+ if is_directory_exists(target_directory):
463
473
  raise FileExistsError(f'Directory already exists: {target_directory}')
464
474
 
465
475
  # Move directory.
@@ -545,7 +555,7 @@ def copy_directory(source_directory: str, target_directory: str, overwrite: bool
545
555
 
546
556
  # Check if 'overwrite' is set to 'True' and if the directory exists.
547
557
  if overwrite:
548
- if check_directory_existence(target_directory):
558
+ if is_directory_exists(target_directory):
549
559
  remove_directory(target_directory)
550
560
 
551
561
  # Copy directory.
@@ -1383,7 +1393,7 @@ def backup_folder(directory_path: str, backup_directory: str) -> None:
1383
1393
  Final path will look like: 'C:\\Users\\user1\\Downloads\\backup\\20231003-120000-000000_folder1'
1384
1394
  """
1385
1395
 
1386
- if check_directory_existence(directory_path):
1396
+ if is_directory_exists(directory_path):
1387
1397
  timestamp: str = datetimes.TimeFormats().get_current_formatted_time_filename_stamp(True)
1388
1398
  directory_name = Path(directory_path).name
1389
1399
  backup_directory_path: str = str(Path(backup_directory) / f"{timestamp}_{directory_name}")
@@ -1,13 +1,12 @@
1
- import os
2
1
  import datetime
3
2
  import statistics
4
3
  import json
5
- from typing import Literal
4
+ from typing import Literal, Union
6
5
 
7
6
  from .. import filesystem, domains, datetimes, urls
8
7
  from ..basics import dicts
9
8
  from ..file_io import tomls, xlsxs, csvs, jsons
10
- from ..wrappers.loggingw import reading
9
+ from ..wrappers.loggingw import reading, consts
11
10
  from ..print_api import print_api
12
11
 
13
12
 
@@ -479,7 +478,7 @@ def calculate_moving_average(
479
478
  moving_average_window_days,
480
479
  top_bottom_deviation_percentage: float,
481
480
  print_kwargs: dict = None
482
- ):
481
+ ) -> list:
483
482
  """
484
483
  This function calculates the moving average of the daily statistics.
485
484
 
@@ -490,7 +489,7 @@ def calculate_moving_average(
490
489
  :param print_kwargs: dict, the print_api arguments.
491
490
  """
492
491
 
493
- date_pattern: str = '%Y_%m_%d'
492
+ date_pattern: str = consts.DEFAULT_ROTATING_SUFFIXES_FROM_WHEN['midnight']
494
493
 
495
494
  # Get all the file paths and their midnight rotations.
496
495
  logs_paths: list = reading.get_logs_paths(
@@ -782,19 +781,19 @@ def find_deviation_from_moving_average(
782
781
 
783
782
  def moving_average_calculator_main(
784
783
  statistics_file_path: str,
785
- output_directory: str,
786
784
  moving_average_window_days: int,
787
- top_bottom_deviation_percentage: float
788
- ) -> int:
785
+ top_bottom_deviation_percentage: float,
786
+ output_json_file_path: str = None
787
+ ) -> Union[list, None]:
789
788
  """
790
789
  This function is the main function for the moving average calculator.
791
790
 
792
791
  :param statistics_file_path: string, the statistics file path.
793
- :param output_directory: string, the output directory.
794
792
  :param moving_average_window_days: integer, the moving average window days.
795
793
  :param top_bottom_deviation_percentage: float, the top bottom deviation percentage. Example: 0.1 for 10%.
796
- :return: integer, the return code.
794
+ :param output_json_file_path: string, if None, no json file will be written.
797
795
  -----------------------------
796
+ :return: the deviation list of dicts.
798
797
 
799
798
  Example:
800
799
  import sys
@@ -804,9 +803,9 @@ def moving_average_calculator_main(
804
803
  def main():
805
804
  return statistic_analyzer.moving_average_calculator_main(
806
805
  statistics_file_path='statistics.csv',
807
- output_directory='output',
808
806
  moving_average_window_days=7,
809
- top_bottom_deviation_percentage=0.1
807
+ top_bottom_deviation_percentage=0.1,
808
+ output_json_file='C:\\output\\deviation_list.json'
810
809
  )
811
810
 
812
811
 
@@ -828,13 +827,15 @@ def moving_average_calculator_main(
828
827
  )
829
828
 
830
829
  if deviation_list:
831
- for deviation_list_index, deviation in enumerate(deviation_list):
832
- convert_data_value_to_string('request_sizes', deviation_list_index)
833
- convert_data_value_to_string('response_sizes', deviation_list_index)
834
- convert_value_to_string('ma_data', deviation_list_index)
830
+ if output_json_file_path:
831
+ for deviation_list_index, deviation in enumerate(deviation_list):
832
+ convert_data_value_to_string('request_sizes', deviation_list_index)
833
+ convert_data_value_to_string('response_sizes', deviation_list_index)
834
+ convert_value_to_string('ma_data', deviation_list_index)
835
+
836
+ print_api(f'Deviation Found, saving to file: {output_json_file_path}', color='blue')
837
+ jsons.write_json_file(deviation_list, output_json_file_path, use_default_indent=True)
835
838
 
836
- file_path = output_directory + os.sep + 'deviation.json'
837
- print_api(f'Deviation Found, saving to file: {file_path}', color='blue')
838
- jsons.write_json_file(deviation_list, file_path, use_default_indent=True)
839
+ return deviation_list
839
840
 
840
- return 0
841
+ return None
@@ -0,0 +1,49 @@
1
+ DEFAULT_ROTATING_SUFFIXES_FROM_WHEN: dict = {
2
+ 'midnight': '%Y-%m-%d',
3
+ 'S': '%Y-%m-%d_%H-%M-%S',
4
+ 'M': '%Y-%m-%d_%H-%M',
5
+ 'H': '%Y-%m-%d_%H',
6
+ 'D': '%Y-%m-%d',
7
+ 'W0': '%Y-%m-%d',
8
+ 'W1': '%Y-%m-%d',
9
+ 'W2': '%Y-%m-%d',
10
+ 'W3': '%Y-%m-%d',
11
+ 'W4': '%Y-%m-%d',
12
+ 'W5': '%Y-%m-%d',
13
+ 'W6': '%Y-%m-%d'
14
+ }
15
+
16
+
17
+ DEFAULT_STREAM_FORMATTER: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
18
+ DEFAULT_MESSAGE_FORMATTER: str = "%(message)s"
19
+
20
+ FORMAT_ELEMENT_TO_HEADER: dict = {
21
+ 'asctime': 'Event Time [Y-M-D H:M:S]',
22
+ 'created': 'Created',
23
+ 'filename': "ModuleFileName ",
24
+ 'funcName': 'Function',
25
+ 'levelname': 'Log Level',
26
+ 'levelno': 'Level Number',
27
+ 'lineno': 'Line ',
28
+ 'module': 'Module',
29
+ 'msecs': '[MS.mS]',
30
+ 'message': 'Message',
31
+ 'name': 'Logger Name ',
32
+ 'pathname': 'Path',
33
+ 'process': 'Process',
34
+ 'processName': 'Process Name',
35
+ 'relativeCreated': 'Relative Created',
36
+ 'thread': 'Thread',
37
+ 'threadName': 'Thread Name'
38
+ }
39
+
40
+ DEFAULT_FORMATTER_TXT_FILE: str = \
41
+ "{asctime} | " \
42
+ "{levelname:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['levelname'])}" + "s} | " \
43
+ "{name:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['name'])}" + "s} | " \
44
+ "{filename:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['filename'])}" + "s} : " \
45
+ "{lineno:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['lineno'])}" + "d} | " \
46
+ "{threadName} | {message}"
47
+
48
+ DEFAULT_FORMATTER_CSV_FILE: str = \
49
+ '\"{asctime}\",{levelname},{name},{filename},{lineno},{threadName},\"{message}\"'
@@ -1,6 +1,8 @@
1
1
  import logging
2
2
  import time
3
3
 
4
+ from . import consts
5
+
4
6
 
5
7
  # Log formatter, means how the log will look inside the file
6
8
  # Format for specific object: %(levelname)s
@@ -10,41 +12,6 @@ import time
10
12
  # ".40" truncating the string to only 40 characters. Example: %(message).250s
11
13
 
12
14
 
13
- DEFAULT_STREAM_FORMATTER: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
14
- DEFAULT_MESSAGE_FORMATTER: str = "%(message)s"
15
-
16
- FORMAT_ELEMENT_TO_HEADER: dict = {
17
- 'asctime': 'Event Time [Y-M-D H:M:S]',
18
- 'created': 'Created',
19
- 'filename': "ModuleFileName ",
20
- 'funcName': 'Function',
21
- 'levelname': 'Log Level',
22
- 'levelno': 'Level Number',
23
- 'lineno': 'Line ',
24
- 'module': 'Module',
25
- 'msecs': '[MS.mS]',
26
- 'message': 'Message',
27
- 'name': 'Logger Name ',
28
- 'pathname': 'Path',
29
- 'process': 'Process',
30
- 'processName': 'Process Name',
31
- 'relativeCreated': 'Relative Created',
32
- 'thread': 'Thread',
33
- 'threadName': 'Thread Name'
34
- }
35
-
36
- DEFAULT_FORMATTER_TXT_FILE: str = \
37
- "{asctime} | " \
38
- "{levelname:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['levelname'])}" + "s} | " \
39
- "{name:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['name'])}" + "s} | " \
40
- "{filename:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['filename'])}" + "s} : " \
41
- "{lineno:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['lineno'])}" + "d} | " \
42
- "{threadName} | {message}"
43
-
44
- DEFAULT_FORMATTER_CSV_FILE: str = \
45
- '\"{asctime}\",{levelname},{name},{filename},{lineno},{threadName},\"{message}\"'
46
-
47
-
48
15
  class NanosecondsFormatter(logging.Formatter):
49
16
  def __init__(self, fmt=None, datefmt=None, style='%', use_nanoseconds=False):
50
17
  super().__init__(fmt, datefmt, style)
@@ -73,24 +40,6 @@ class NanosecondsFormatter(logging.Formatter):
73
40
  return s
74
41
 
75
42
 
76
-
77
-
78
-
79
- # if datefmt is None:
80
- # # Use the default behavior if no datefmt is provided
81
- # return super().formatTime(record, datefmt)
82
- # elif '%f' in datefmt:
83
- # # Format the time up to seconds
84
- # base_time = time.strftime(datefmt.replace('%f', ''), ct)
85
- # # Calculate nanoseconds from the fractional part of the timestamp
86
- # nanoseconds = f'{record.created:.9f}'.split('.')[1]
87
- # # Return the formatted string with nanoseconds appended
88
- # return base_time + nanoseconds
89
- # else:
90
- # # Use the provided datefmt if it doesn't include %f
91
- # return time.strftime(datefmt, ct)
92
-
93
-
94
43
  class FormatterProcessor:
95
44
  """
96
45
  Class to process the formatter.
@@ -168,7 +117,7 @@ class FormatterProcessor:
168
117
  # Iterate through all the elements and get the header list.
169
118
  header_dict: dict = dict()
170
119
  for element in self.list_of_elements:
171
- header_dict.update({element: FORMAT_ELEMENT_TO_HEADER[element]})
120
+ header_dict.update({element: consts.FORMAT_ELEMENT_TO_HEADER[element]})
172
121
 
173
122
  return header_dict
174
123
 
@@ -9,7 +9,7 @@ from typing import Literal, Union
9
9
  import threading
10
10
  from datetime import datetime
11
11
 
12
- from . import loggers, formatters, filters
12
+ from . import loggers, formatters, filters, consts
13
13
  from ... import datetimes, filesystem
14
14
 
15
15
 
@@ -34,15 +34,15 @@ def _process_formatter_attribute(
34
34
  """
35
35
 
36
36
  if formatter == 'DEFAULT' and file_type is None:
37
- return formatters.DEFAULT_STREAM_FORMATTER
37
+ return consts.DEFAULT_STREAM_FORMATTER
38
38
  elif formatter == 'DEFAULT' and file_type == 'txt':
39
- return formatters.DEFAULT_FORMATTER_TXT_FILE
39
+ return consts.DEFAULT_FORMATTER_TXT_FILE
40
40
  elif formatter == 'DEFAULT' and file_type == 'csv':
41
- return formatters.DEFAULT_FORMATTER_CSV_FILE
41
+ return consts.DEFAULT_FORMATTER_CSV_FILE
42
42
  elif formatter == 'DEFAULT' and file_type == 'json':
43
- return formatters.DEFAULT_MESSAGE_FORMATTER
43
+ return consts.DEFAULT_MESSAGE_FORMATTER
44
44
  elif formatter == 'MESSAGE':
45
- return formatters.DEFAULT_MESSAGE_FORMATTER
45
+ return consts.DEFAULT_MESSAGE_FORMATTER
46
46
  else:
47
47
  return formatter
48
48
 
@@ -65,7 +65,17 @@ def get_logs_paths(
65
65
  add_last_modified_time=True,
66
66
  sort_by_last_modified_time=True)
67
67
 
68
- if len(logs_files) > 1:
68
+ # Get the datetime object from the first file name by the date pattern.
69
+ first_date_string = None
70
+ if logs_files:
71
+ first_file_name: str = Path(logs_files[0]['file_path']).name
72
+ first_datetime_object, first_date_string, first_timestamp_float = (
73
+ datetimes.get_datetime_from_complex_string_by_pattern(first_file_name, date_pattern))
74
+
75
+ # The problem here is the file name that doesn't contain the date string in the name.
76
+ # If it is regular log rotation, then there will be one file that doesn't have the date string in the name.
77
+ # If the function used to get the previous day log, then there will be no file that doesn't have the date string.
78
+ if len(logs_files) > 1 or (len(logs_files) == 1 and first_date_string):
69
79
  if date_pattern:
70
80
  latest_timestamp: float = 0
71
81
  for file_index, single_file in enumerate(logs_files):
@@ -74,14 +84,8 @@ def get_logs_paths(
74
84
  logs_files[file_index]['file_name'] = current_file_name
75
85
 
76
86
  # Get the datetime object from the file name by the date pattern.
77
- try:
78
- datetime_object, date_string, timestamp_float = (
79
- datetimes.get_datetime_from_complex_string_by_pattern(current_file_name, date_pattern))
80
- # ValueError will be raised if the date pattern does not match the file name.
81
- except ValueError:
82
- timestamp_float = 0
83
- datetime_object = None
84
- date_string = None
87
+ datetime_object, date_string, timestamp_float = (
88
+ datetimes.get_datetime_from_complex_string_by_pattern(current_file_name, date_pattern))
85
89
 
86
90
  # Update the last modified time to the dictionary.
87
91
  logs_files[file_index]['last_modified'] = timestamp_float
@@ -118,6 +122,7 @@ def get_logs_paths(
118
122
  elif len(logs_files) == 1 and previous_day_only:
119
123
  logs_files = []
120
124
 
125
+
121
126
  return logs_files
122
127
 
123
128
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.14.10a0
3
+ Version: 2.14.12
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=nSkABCdtmyIE9Hrt7epJjiL99KrAXtXkVSaz6all6rM,126
1
+ atomicshop/__init__.py,sha256=X3k111NJA7eZtjRpWoDPX-8UI7APCHhTzJOfr58o7vQ,124
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -8,13 +8,13 @@ atomicshop/command_line_processing.py,sha256=u5yT9Ger_cu7ni5ID0VFlRbVD46ARHeNC9t
8
8
  atomicshop/config_init.py,sha256=z2RXD_mw9nQlAOpuGry1h9QT-2LhNscXgGAktN3dCVQ,2497
9
9
  atomicshop/console_output.py,sha256=AOSJjrRryE97PAGtgDL03IBtWSi02aNol8noDnW3k6M,4667
10
10
  atomicshop/console_user_response.py,sha256=31HIy9QGXa7f-GVR8MzJauQ79E_ZqAeagF3Ks4GGdDU,3234
11
- atomicshop/datetimes.py,sha256=Rmn-Ag36xRI1xbyoMs2fe1PB5hzb_UHEIwRVPppz0M0,18278
11
+ atomicshop/datetimes.py,sha256=XF-6PbMlXgxHAOCVBGWUnAwDlFuZS1YFUGk6STFWsq0,18362
12
12
  atomicshop/diff_check.py,sha256=RJvzJhyYAZyRPKVDk1dS7UwZCx0kq__WDZ6N0rNfZUY,27110
13
13
  atomicshop/dns.py,sha256=h4uZKoz4wbBlLOOduL1GtRcTm-YpiPnGOEGxUm7hhOI,2140
14
14
  atomicshop/domains.py,sha256=Rxu6JhhMqFZRcoFs69IoEd1PtYca0lMCG6F1AomP7z4,3197
15
15
  atomicshop/emails.py,sha256=I0KyODQpIMEsNRi9YWSOL8EUPBiWyon3HRdIuSj3AEU,1410
16
16
  atomicshop/file_types.py,sha256=-0jzQMRlmU1AP9DARjk-HJm1tVE22E6ngP2mRblyEjY,763
17
- atomicshop/filesystem.py,sha256=202ue2LkjI1KdaxvB_RHV-2eIczy2-caZGLO4PSePik,53887
17
+ atomicshop/filesystem.py,sha256=emiwRQuM56yXATPTMFrhYmLp9LlQ9l_RA3TU6q5NoIg,54232
18
18
  atomicshop/functions.py,sha256=pK8hoCE9z61PtWCxQJsda7YAphrLH1wxU5x-1QJP-sY,499
19
19
  atomicshop/get_process_list.py,sha256=hi1NOG-i8S6EcyQ6LTfP4pdxqRfjEijz9SZ5nEbcM9Q,6076
20
20
  atomicshop/get_process_name_cmd_dll.py,sha256=CtaSp3mgxxJKCCVW8BLx6BJNx4giCklU_T7USiCEwfc,5162
@@ -92,7 +92,7 @@ atomicshop/basics/guids.py,sha256=iRx5n18ATZWhpo748BwEjuLWLsu9y3OwF5-Adp-Dtik,40
92
92
  atomicshop/basics/hexs.py,sha256=i8CTG-J0TGGa25yFSbWEvpVyHFnof_qSWUrmXY-ylKM,1054
93
93
  atomicshop/basics/if_else.py,sha256=MakivJChofZCpr0mOVjwCthzpiaBxXVB-zv7GwMOqVo,202
94
94
  atomicshop/basics/isinstancing.py,sha256=fQ35xfqbguQz2BUn-3a4KVGskhTcIn8JjRtxV2rFcRQ,876
95
- atomicshop/basics/list_of_dicts.py,sha256=qI2uoYIcHjR8RSD5vtkqhpMgL6XTYRGJDcr9cb2HbZM,6051
95
+ atomicshop/basics/list_of_dicts.py,sha256=tj0LNPf1ljNI_qpoO-PiOT4Ulmk1M-UpTGyn9twVcw8,8039
96
96
  atomicshop/basics/lists.py,sha256=I0C62vrDrNwCTNl0EjUZNa1Jsd8l0rTkp28GEx9QoEI,4258
97
97
  atomicshop/basics/multiprocesses.py,sha256=nSskxJSlEdalPM_Uf8cc9kAYYlVwYM1GonBLAhCL2mM,18831
98
98
  atomicshop/basics/numbers.py,sha256=ESX0z_7o_ok3sOmCKAUBoZinATklgMy2v-4RndqXlVM,1837
@@ -108,11 +108,11 @@ atomicshop/etws/providers.py,sha256=CXNx8pYdjtpLIpA66IwrnE64XhY4U5ExnFBMLEb8Uzk,
108
108
  atomicshop/etws/sessions.py,sha256=b_KeiOvgOBJezJokN81TRlrvJiQNJlIWN4Z6UVjuxP0,1335
109
109
  atomicshop/etws/trace.py,sha256=WMOjdazK97UcIdhVgcnjh98OCbuEJcnm1Z_yPp_nE2c,7258
110
110
  atomicshop/etws/traces/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
111
- atomicshop/etws/traces/trace_dns.py,sha256=rWQ8bv8eMHBRRkA8oxO9caYqj0h4Emw4aZXmoI3Q6fg,6292
111
+ atomicshop/etws/traces/trace_dns.py,sha256=WvOZm7KNdP4r6ofkZhUGi9WjtYlkV3mUp_yxita3Qg4,6399
112
112
  atomicshop/etws/traces/trace_sysmon_process_creation.py,sha256=OM-bkK38uYMwWLZKNOTDa0Xdk3sO6sqsxoMUIiPvm5g,4656
113
113
  atomicshop/file_io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
114
114
  atomicshop/file_io/csvs.py,sha256=eS2SSGwcC1MlRPPgoqyFyE-qqH2esUWQvv3wWLMiOuA,5876
115
- atomicshop/file_io/docxs.py,sha256=6tcYFGp0vRsHR47VwcRqwhdt2DQOwrAUYhrwN996n9U,5117
115
+ atomicshop/file_io/docxs.py,sha256=yNlNXKLIvPkHQNF544VvCrbxcXsHX6G-6_V-8Ixp2zI,5111
116
116
  atomicshop/file_io/file_io.py,sha256=FOZ6_PjOASxSDHESe4fwDv5miXYR10OHTxkxtEHoZYQ,6555
117
117
  atomicshop/file_io/jsons.py,sha256=q9ZU8slBKnHLrtn3TnbK1qxrRpj5ZvCm6AlsFzoANjo,5303
118
118
  atomicshop/file_io/tomls.py,sha256=oa0Wm8yMkPRXKN9jgBuTnKbioSOee4mABW5IMUFCYyU,3041
@@ -127,7 +127,7 @@ atomicshop/mitm/initialize_engines.py,sha256=YnXPK1UKrmULnfL4zLo2LOpKWq-aGKzc9p3
127
127
  atomicshop/mitm/initialize_mitm_server.py,sha256=j1yMUbHsnFh9l5rFiUgBQA0mRZqREOKviP0frRzYikM,14611
128
128
  atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
129
129
  atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
130
- atomicshop/mitm/statistic_analyzer.py,sha256=ctsf-MBIUvG4-R0K4gFQyi_b42-VCq-5s7hgO9jMOes,38415
130
+ atomicshop/mitm/statistic_analyzer.py,sha256=uUf6b6uLXxUMetEj5l225B1Id69acEmVRRX4zHsUs4M,38600
131
131
  atomicshop/mitm/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
132
132
  atomicshop/mitm/engines/create_module_template.py,sha256=tRjVSm1sD6FzML71Qbuwvita0qsusdFGm8NZLsZ-XMs,4853
133
133
  atomicshop/mitm/engines/create_module_template_example.py,sha256=X5xhvbV6-g9jU_bQVhf_crZmaH50LRWz3bS-faQ18ds,489
@@ -234,12 +234,13 @@ atomicshop/wrappers/factw/rest/statistics.py,sha256=vznwzKP1gEF7uXz3HsuV66BU9wrp
234
234
  atomicshop/wrappers/factw/rest/status.py,sha256=4O3xS1poafwyUiLDkhyx4oMMe4PBwABuRPpOMnMKgIU,641
235
235
  atomicshop/wrappers/fibratusw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
236
236
  atomicshop/wrappers/fibratusw/install.py,sha256=PLVymDe0HuOvU0r2lje8BkQAgtiOWEeRO7n-1zKuL7A,3287
237
+ atomicshop/wrappers/loggingw/consts.py,sha256=JWiUJEydjhwatBxtIJsGTmDUSTLbmIRidtR6qRLMaIY,1608
237
238
  atomicshop/wrappers/loggingw/filters.py,sha256=CMs5PAMb68zxJgBcQobaOFDG5kLJBOVYnoBHjDgksO8,2859
238
- atomicshop/wrappers/loggingw/formatters.py,sha256=808R7K3e3ZJD2BXfqI6UMOyXGrCgt9SYh2Uv7sL_1KQ,7432
239
- atomicshop/wrappers/loggingw/handlers.py,sha256=bv3oCm_P0JdXaJKYjhyfFNMNong6Nc9LE4JGFxLN2As,16940
239
+ atomicshop/wrappers/loggingw/formatters.py,sha256=7XUJvlB0CK4DCkEp8NTL0S0dkyrZD0UTADgEwkStKOY,5483
240
+ atomicshop/wrappers/loggingw/handlers.py,sha256=yFYBeTkxnpmtlauoH3ZEFEHUYQYu9YL-ycd9sYTvOl4,16928
240
241
  atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
241
242
  atomicshop/wrappers/loggingw/loggingw.py,sha256=lo4OZPXCbYZi3GqpaaJSs9SOGFfqD2EgHzzTK7f5IR4,11275
242
- atomicshop/wrappers/loggingw/reading.py,sha256=yh7uNPxEdn6KsxSKrYny2C57XdI25F5gaByz77CO_pw,17038
243
+ atomicshop/wrappers/loggingw/reading.py,sha256=TUTzffs3hSgfKjoJlf0l8o-1MOA4ag7O7cMglaxJFDQ,17500
243
244
  atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
244
245
  atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
245
246
  atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -284,8 +285,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
284
285
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
285
286
  atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
286
287
  atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
287
- atomicshop-2.14.10a0.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
288
- atomicshop-2.14.10a0.dist-info/METADATA,sha256=VanW8oY3z7wC7NEucff9VcdAh0MBXkO1Qh7seIpikXg,10481
289
- atomicshop-2.14.10a0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
290
- atomicshop-2.14.10a0.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
291
- atomicshop-2.14.10a0.dist-info/RECORD,,
288
+ atomicshop-2.14.12.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
289
+ atomicshop-2.14.12.dist-info/METADATA,sha256=ml2p_-2MUkPlV2TOi-uFOF0ecFg7-1bf1mDSr16JxxE,10479
290
+ atomicshop-2.14.12.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
291
+ atomicshop-2.14.12.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
292
+ atomicshop-2.14.12.dist-info/RECORD,,