atomicshop 2.12.0__py3-none-any.whl → 2.12.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.12.0'
4
+ __version__ = '2.12.3'
@@ -5,7 +5,7 @@ from .print_api import print_api
5
5
  from .basics.lists import remove_duplicates
6
6
  from .datetimes import convert_single_digit_to_zero_padded, create_date_range_for_year, \
7
7
  create_date_range_for_year_month
8
- from .file_io.csvs import read_csv_to_list
8
+ from .file_io import csvs
9
9
 
10
10
 
11
11
  class AppointmentManager:
@@ -38,7 +38,8 @@ class AppointmentManager:
38
38
  def read_latest_date_csv(self):
39
39
  try:
40
40
  # Read the csv to list of dicts.
41
- csv_list, _ = read_csv_to_list(file_path=self.latest_date_to_check_filepath, raise_exception=True)
41
+ csv_list, _ = csvs.read_csv_to_list_of_dicts_by_header(
42
+ file_path=self.latest_date_to_check_filepath, raise_exception=True)
42
43
  # It has only 1 line, so get it to dict.
43
44
  latest_date_dict = csv_list[0]
44
45
 
@@ -100,7 +101,8 @@ class BlacklistEngine:
100
101
  def read_blacklist_csv(self) -> None:
101
102
  try:
102
103
  # Read the csv to list of dicts.
103
- csv_list, _ = read_csv_to_list(file_path=self.blacklist_dates_filepath, raise_exception=True)
104
+ csv_list, _ = csvs.read_csv_to_list_of_dicts_by_header(
105
+ file_path=self.blacklist_dates_filepath, raise_exception=True)
104
106
 
105
107
  daterange = None
106
108
  # Iterate through all the rows.
atomicshop/diff_check.py CHANGED
@@ -38,7 +38,7 @@ class DiffChecker:
38
38
  The object is 'None' by default, since there are objects that are needed to be provided in the
39
39
  function input for that object. So, not always you know what your object type during class initialization.
40
40
  :param check_object_display_name: string, name of the object to display in the message.
41
- If not specified, the 'check_object' will be displayed.
41
+ If not specified, the provided 'check_object' will be displayed.
42
42
  :param aggregation: boolean, if True, the object will be aggregated with other objects in the list of objects.
43
43
  Meaning, that the object will be checked against the existing objects in the list, and if it is not
44
44
  in the list, it will be added to the list. If it is in the list, it will be ignored.
@@ -62,6 +62,74 @@ class DiffChecker:
62
62
 
63
63
  True: return updated dictionary on first cycle. This is the default.
64
64
  False: don't return updated dictionary on first cycle.
65
+
66
+ --------------------------------------------------
67
+
68
+ Working example:
69
+ from atomicshop import diff_check
70
+
71
+
72
+ # Example of checking list of dicts.
73
+ check_list_of_dicts = [
74
+ {'name': 'John', 'age': 25},
75
+ {'name': 'Alice', 'age': 30}
76
+ ]
77
+
78
+ diff_checker = diff_check.DiffChecker(
79
+ check_object=check_list_of_dicts,
80
+ check_object_display_name='List of Dicts',
81
+ aggregation=True,
82
+ input_file_path='D:\\input\\list_of_dicts.json',
83
+ input_file_write_only=True,
84
+ return_first_cycle=True
85
+ )
86
+
87
+ result, message = diff_checker.check_list_of_dicts(
88
+ sort_by_keys=['name']
89
+ )
90
+
91
+ # If result is not None, it means that the object was updated.
92
+ if result:
93
+ print(message)
94
+
95
+ --------------------------------------------------
96
+
97
+ Working example when you need to aggregate a list of dicts, meaning only new entries will be added to the list:
98
+ from atomicshop import diff_check
99
+
100
+
101
+ diff_checker = diff_check.DiffChecker(
102
+ check_object_display_name='List of Dicts',
103
+ aggregation=True,
104
+ input_file_path='D:\\input\\list_of_dicts.json',
105
+ input_file_write_only=True,
106
+ return_first_cycle=True
107
+ )
108
+
109
+ # Example of checking list of dicts.
110
+ check_list_of_dicts = [
111
+ {'name': 'John', 'age': 25},
112
+ {'name': 'Alice', 'age': 30}
113
+ ]
114
+
115
+ diff_checker.check_object = check_list_of_dicts
116
+ result, message = diff_checker.check_list_of_dicts()
117
+
118
+ # If result is not None, it means that the object was updated.
119
+ if result:
120
+ print(message)
121
+
122
+
123
+ check_list_of_dicts = [
124
+ {'name': 'John', 'age': 25},
125
+ {'name': 'Jessie', 'age': 50}
126
+ ]
127
+
128
+ diff_checker.check_object = check_list_of_dicts
129
+ result, message = diff_checker.check_list_of_dicts()
130
+
131
+ if result:
132
+ print(message)
65
133
  """
66
134
 
67
135
  # 'check_object' can be none, so checking if it not equals empty string.
@@ -130,13 +198,13 @@ class DiffChecker:
130
198
  try:
131
199
  if self.save_as == 'txt':
132
200
  self.previous_content = file_io.read_file(
133
- self.input_file_path, stderr=False, **print_kwargs)
201
+ self.input_file_path, stderr=False, **(print_kwargs or {}))
134
202
  elif self.save_as == 'json':
135
203
  self.previous_content = jsons.read_json_file(
136
- self.input_file_path, stderr=False, **print_kwargs)
204
+ self.input_file_path, stderr=False, **(print_kwargs or {}))
137
205
  except FileNotFoundError as except_object:
138
206
  message = f"Input File [{Path(except_object.filename).name}] doesn't exist - Will create new one."
139
- print_api(message, color='yellow', **print_kwargs)
207
+ print_api(message, color='yellow', **(print_kwargs or {}))
140
208
  pass
141
209
 
142
210
  # get the content of current function.
@@ -182,10 +250,10 @@ class DiffChecker:
182
250
  if self.input_file_path:
183
251
  if self.save_as == 'txt':
184
252
  # noinspection PyTypeChecker
185
- file_io.write_file(self.previous_content, self.input_file_path, **print_kwargs)
253
+ file_io.write_file(self.previous_content, self.input_file_path, **(print_kwargs or {}))
186
254
  elif self.save_as == 'json':
187
255
  jsons.write_json_file(
188
- self.previous_content, self.input_file_path, use_default_indent=True, **print_kwargs)
256
+ self.previous_content, self.input_file_path, use_default_indent=True, **(print_kwargs or {}))
189
257
  else:
190
258
  message = f"Object didn't change: {self.check_object_display_name}"
191
259
 
@@ -6,14 +6,25 @@ from . import file_io
6
6
 
7
7
 
8
8
  @read_file_decorator
9
- def read_csv_to_list(file_path: str,
10
- file_mode: str = 'r',
11
- encoding=None,
12
- header: list = None,
13
- file_object=None,
14
- **kwargs) -> Tuple[List, List | None]:
9
+ def read_csv_to_list_of_dicts_by_header(
10
+ file_path: str,
11
+ file_mode: str = 'r',
12
+ encoding=None,
13
+ header: list = None,
14
+ file_object=None,
15
+ **kwargs
16
+ ) -> Tuple[List, List | None]:
15
17
  """
16
18
  Function to read csv file and output its contents as list of dictionaries for each row.
19
+ Each key of the dictionary is a header field.
20
+
21
+ Example:
22
+ CSV file:
23
+ name,age,city
24
+ John,25,New York
25
+
26
+ Output:
27
+ [{'name': 'John', 'age': '25', 'city': 'New York'}]
17
28
 
18
29
  :param file_path: String with full file path to json file.
19
30
  :param file_mode: string, file reading mode. Examples: 'r', 'rb'. Default is 'r'.
@@ -39,25 +50,85 @@ def read_csv_to_list(file_path: str,
39
50
  return csv_list, header
40
51
 
41
52
 
42
- def write_list_to_csv(csv_list: list, csv_filepath: str) -> None:
53
+ @read_file_decorator
54
+ def read_csv_to_list_of_lists(
55
+ file_path: str,
56
+ file_mode: str = 'r',
57
+ encoding=None,
58
+ exclude_header_from_content: bool = False,
59
+ file_object=None,
60
+ **kwargs
61
+ ) -> Tuple[List, List | None]:
62
+ """
63
+ Function to read csv file and output its contents as list of lists for each row.
64
+
65
+ Example:
66
+ CSV file:
67
+ name,age,city
68
+ John,25,New York
69
+
70
+ Output:
71
+ [['name', 'age', 'city'], ['John', '25', 'New York']]
72
+
73
+ :param file_path: String with full file path to json file.
74
+ :param file_mode: string, file reading mode. Examples: 'r', 'rb'. Default is 'r'.
75
+ :param encoding: string, encoding of the file. Default is 'None'.
76
+ :param exclude_header_from_content: Boolean, if True, the header will be excluded from the content.
77
+ :param file_object: file object of the 'open()' function in the decorator. Decorator executes the 'with open()'
78
+ statement and passes to this function. That's why the default is 'None', since we get it from the decorator.
79
+ :param kwargs: Keyword arguments for 'read_file' function.
80
+ :return: list.
81
+ """
82
+
83
+ # Read CSV file to list of lists.
84
+ csv_reader = csv.reader(file_object)
85
+
86
+ csv_list = list(csv_reader)
87
+
88
+ # Get the header if there is only something in the content.
89
+ if csv_list:
90
+ header = csv_list[0]
91
+ else:
92
+ header = []
93
+
94
+ if exclude_header_from_content and csv_list:
95
+ csv_list.pop(0)
96
+
97
+ return csv_list, header
98
+
99
+
100
+ def write_list_to_csv(
101
+ file_path: str,
102
+ content_list: list,
103
+ mode: str = 'w'
104
+ ) -> None:
43
105
  """
44
106
  Function to write list object that each iteration of it contains dict object with same keys and different values.
45
107
 
46
- :param csv_list: List object that each iteration contains dictionary with same keys and different values.
47
- :param csv_filepath: Full file path to CSV file.
108
+ :param file_path: Full file path to CSV file.
109
+ :param content_list: List object that each iteration contains dictionary with same keys and different values.
110
+ :param mode: String, file writing mode. Default is 'w'.
48
111
  :return: None.
49
112
  """
50
113
 
51
- with open(csv_filepath, mode='w') as csv_file:
52
- # Create header from keys of the first dictionary in list.
53
- header = csv_list[0].keys()
54
- # Create CSV writer.
55
- writer = csv.DictWriter(csv_file, fieldnames=header, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
56
-
57
- # Write header.
58
- writer.writeheader()
59
- # Write list of dits as rows.
60
- writer.writerows(csv_list)
114
+ with open(file_path, mode=mode) as csv_file:
115
+ if len(content_list) > 0 and isinstance(content_list[0], dict):
116
+ # Treat the list as list of dictionaries.
117
+ header = content_list[0].keys()
118
+
119
+ # Create CSV writer.
120
+ writer = csv.DictWriter(csv_file, fieldnames=header, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
121
+
122
+ # Write header.
123
+ writer.writeheader()
124
+ # Write list of dits as rows.
125
+ writer.writerows(content_list)
126
+ # Else, treat the list as list of lists.
127
+ else:
128
+ # Create CSV writer.
129
+ writer = csv.writer(csv_file)
130
+ # Write list of lists as rows.
131
+ writer.writerows(content_list)
61
132
 
62
133
 
63
134
  def get_header(file_path: str, print_kwargs: dict = None) -> list:
@@ -155,7 +155,9 @@ def analyze(main_file_path: str):
155
155
 
156
156
  # Get the content from statistics files.
157
157
  statistics_content: list = reading.get_logs(
158
- config['statistic_files_path'], pattern='statistics*.csv', log_type='csv',
158
+ config['statistic_files_path'],
159
+ pattern='statistics*.csv',
160
+ log_type='csv',
159
161
  )
160
162
 
161
163
  # Initialize loop.
@@ -245,6 +245,6 @@ def save_firmware_uids_as_csv(
245
245
 
246
246
  # Save UIDs as CSV file.
247
247
  file_path = directory_path + os.sep + 'uids.csv'
248
- csvs.write_list_to_csv(export_list, file_path)
248
+ csvs.write_list_to_csv(file_path, export_list)
249
249
 
250
250
  return None
@@ -511,6 +511,6 @@ def save_firmware_uids_as_csv(
511
511
 
512
512
  # Save UIDs as CSV file.
513
513
  file_path = directory_path + os.sep + 'uids.csv'
514
- csvs.write_list_to_csv(export_list, file_path)
514
+ csvs.write_list_to_csv(file_path, export_list)
515
515
 
516
516
  return None
@@ -1,12 +1,84 @@
1
1
  import os
2
2
  from typing import Literal
3
+ from pathlib import Path
3
4
 
4
5
  from ... import filesystem, datetimes
5
6
  from ...file_io import csvs
6
7
 
7
8
 
9
+ def get_logs_paths(
10
+ log_files_directory_path: str = None,
11
+ log_file_path: str = None,
12
+ pattern: str = '*.*',
13
+ log_type: Literal['csv'] = 'csv',
14
+ latest_only: bool = False,
15
+ previous_day_only: bool = False
16
+ ):
17
+ """
18
+ This function gets the logs file paths from the directory. Supports rotating files to get the logs by time.
19
+
20
+ :param log_files_directory_path: Path to the log files. If specified, the function will get all the files from the
21
+ directory by the 'pattern'.
22
+ :param log_file_path: Path to the log file. If specified, the function will get the file and all the rotated logs
23
+ associated with this file. The 'pattern' will become the file name using the file name and extension.
24
+
25
+ Example:
26
+ log_file_path = 'C:/logs/test_log.csv'
27
+
28
+ # The function will get all the files that start with 'test_log' and have '.csv' extension:
29
+ pattern = 'test_log*.csv'
30
+
31
+ # The 'log_files_directory_path' will also be taken from the 'log_file_path':
32
+ log_files_directory_path = 'C:/logs'
33
+ :param pattern: Pattern to match the log files names.
34
+ Default pattern will match all the files.
35
+ :param log_type: Type of log to get.
36
+ :param latest_only: Boolean, if True, only the latest log file path will be returned.
37
+ :param previous_day_only: Boolean, if True, only the log file path from the previous day will be returned.
38
+ """
39
+
40
+ if not log_files_directory_path and not log_file_path:
41
+ raise ValueError('Either "log_files_directory_path" or "log_file_path" must be specified.')
42
+ elif log_files_directory_path and log_file_path:
43
+ raise ValueError('Both "log_files_directory_path" and "log_file_path" cannot be specified at the same time.')
44
+
45
+ if log_type != 'csv':
46
+ raise ValueError('Only "csv" log type is supported.')
47
+
48
+ if latest_only and previous_day_only:
49
+ raise ValueError('Both "latest_only" and "previous_day_only" cannot be True at the same time.')
50
+
51
+ # If log file path is specified, get the pattern from the file name.
52
+ if log_file_path:
53
+ # Build the pattern.
54
+ log_file_name: str = Path(log_file_path).stem
55
+ log_file_extension: str = Path(log_file_path).suffix
56
+ pattern = f'{log_file_name}*{log_file_extension}'
57
+
58
+ # Get the directory path from the file path.
59
+ log_files_directory_path = Path(log_file_path).parent
60
+
61
+ # Get all the log file paths by the pattern.
62
+ logs_files: list = filesystem.get_file_paths_from_directory(
63
+ log_files_directory_path, file_name_check_pattern=pattern,
64
+ add_last_modified_time=True, sort_by_last_modified_time=True)
65
+
66
+ if latest_only:
67
+ logs_files = [logs_files[-1]]
68
+
69
+ if previous_day_only:
70
+ # Check if there is a previous day log file.
71
+ if len(logs_files) == 1:
72
+ logs_files = []
73
+ else:
74
+ logs_files = [logs_files[-2]]
75
+
76
+ return logs_files
77
+
78
+
8
79
  def get_logs(
9
- path: str,
80
+ log_files_directory_path: str = None,
81
+ log_file_path: str = None,
10
82
  pattern: str = '*.*',
11
83
  log_type: Literal['csv'] = 'csv',
12
84
  header_type_of_files: Literal['first', 'all'] = 'first',
@@ -17,7 +89,8 @@ def get_logs(
17
89
  """
18
90
  This function gets the logs from the log files. Supports rotating files to get the logs by time.
19
91
 
20
- :param path: Path to the log files.
92
+ :param log_files_directory_path: Path to the log files. Check the 'get_logs_paths' function for more details.
93
+ :param log_file_path: Path to the log file. Check the 'get_logs_paths' function for more details.
21
94
  :param pattern: Pattern to match the log files names.
22
95
  Default pattern will match all the files.
23
96
  :param log_type: Type of log to get.
@@ -36,9 +109,13 @@ def get_logs(
36
109
  if remove_logs and move_to_path:
37
110
  raise ValueError('Both "remove_logs" and "move_to_path" cannot be True/specified at the same time.')
38
111
 
39
- logs_files: list = filesystem.get_file_paths_from_directory(
40
- path, file_name_check_pattern=pattern,
41
- add_last_modified_time=True, sort_by_last_modified_time=True)
112
+ if header_type_of_files not in ['first', 'all']:
113
+ raise ValueError('Only "first" and "all" header types are supported.')
114
+
115
+ # Get all the log file paths by the pattern.
116
+ logs_files: list = get_logs_paths(
117
+ log_files_directory_path=log_files_directory_path, log_file_path=log_file_path,
118
+ pattern=pattern, log_type=log_type)
42
119
 
43
120
  # Read all the logs.
44
121
  logs_content: list = list()
@@ -46,12 +123,12 @@ def get_logs(
46
123
  for single_file in logs_files:
47
124
  if log_type == 'csv':
48
125
  if header_type_of_files == 'all':
49
- csv_content, _ = csvs.read_csv_to_list(single_file['file_path'], **print_kwargs)
126
+ csv_content, _ = csvs.read_csv_to_list_of_dicts_by_header(single_file['file_path'], **print_kwargs)
50
127
  logs_content.extend(csv_content)
51
128
  elif header_type_of_files == 'first':
52
129
  # The function gets empty header to read it from the CSV file, the returns the header that it read.
53
130
  # Then each time the header is fed once again to the function.
54
- csv_content, header = csvs.read_csv_to_list(single_file['file_path'], header=header, **print_kwargs)
131
+ csv_content, header = csvs.read_csv_to_list_of_dicts_by_header(single_file['file_path'], header=header, **print_kwargs)
55
132
  # Any way the first file will be read with header.
56
133
  logs_content.extend(csv_content)
57
134
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.12.0
3
+ Version: 2.12.3
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,15 +1,15 @@
1
- atomicshop/__init__.py,sha256=8ZKqx9-Se0JtEyqegyhmE18yPrampj8_boZTeK7vSMc,123
1
+ atomicshop/__init__.py,sha256=IVAETwFBOuR1wwKN05VRVYWganSHrqXew86vdZG64O8,123
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
5
- atomicshop/appointment_management.py,sha256=N3wVGJgrqJfsj_lqiRfaL3FxMEe57by5Stzanh189mk,7263
5
+ atomicshop/appointment_management.py,sha256=BsYH_PClTGLVazcuNjt30--hpXKYjSmHp1R1iQbM4Hc,7330
6
6
  atomicshop/certificates.py,sha256=J-cmd6Rpq3zZyzsOH-GcdqIXdg2UwM8_E9mg7XtUph8,3787
7
7
  atomicshop/command_line_processing.py,sha256=u5yT9Ger_cu7ni5ID0VFlRbVD46ARHeNC9tRM-_YXrQ,1038
8
8
  atomicshop/config_init.py,sha256=z2RXD_mw9nQlAOpuGry1h9QT-2LhNscXgGAktN3dCVQ,2497
9
9
  atomicshop/console_output.py,sha256=AOSJjrRryE97PAGtgDL03IBtWSi02aNol8noDnW3k6M,4667
10
10
  atomicshop/console_user_response.py,sha256=31HIy9QGXa7f-GVR8MzJauQ79E_ZqAeagF3Ks4GGdDU,3234
11
11
  atomicshop/datetimes.py,sha256=ICr0_gQqWnIw4BuNtabrHzjSlwnZkBfhyCrOILs5xpU,14623
12
- atomicshop/diff_check.py,sha256=RON9cSTgy3jAnwUmAUkOyfF6bgrBKOq9Sbgyl3RYodw,12350
12
+ atomicshop/diff_check.py,sha256=R4G9QISkTQAnJDOKUE6pH6xJ-roMsUwlSd7eVjTkHWA,14517
13
13
  atomicshop/dns.py,sha256=bNZOo5jVPzq7OT2qCPukXoK3zb1oOsyaelUwQEyK1SA,2500
14
14
  atomicshop/domains.py,sha256=Rxu6JhhMqFZRcoFs69IoEd1PtYca0lMCG6F1AomP7z4,3197
15
15
  atomicshop/emails.py,sha256=I0KyODQpIMEsNRi9YWSOL8EUPBiWyon3HRdIuSj3AEU,1410
@@ -104,7 +104,7 @@ atomicshop/etw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
104
  atomicshop/etw/dns_trace.py,sha256=RaREpwJETAMZSd1Lhbg0sO3ugBMw3y1fSKdvP5NfTqM,5189
105
105
  atomicshop/etw/etw.py,sha256=xVJNbfCq4KgRfsDnul6CrIdAMl9xRBixZ-hUyqiB2g4,2403
106
106
  atomicshop/file_io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
- atomicshop/file_io/csvs.py,sha256=4R4Kij8FmxNwXFjDtlF_A0flAk0Hj5nZKlEnqC5VxgQ,3125
107
+ atomicshop/file_io/csvs.py,sha256=oc4ijOHYzayx89DfW2_cktrf81kcGVFKUvKQDAljVrA,5300
108
108
  atomicshop/file_io/docxs.py,sha256=6tcYFGp0vRsHR47VwcRqwhdt2DQOwrAUYhrwN996n9U,5117
109
109
  atomicshop/file_io/file_io.py,sha256=FR84ihjGlr7Eqejo-_js4nBICVst31axD0bwX19S2eM,6385
110
110
  atomicshop/file_io/jsons.py,sha256=q9ZU8slBKnHLrtn3TnbK1qxrRpj5ZvCm6AlsFzoANjo,5303
@@ -118,7 +118,7 @@ atomicshop/mitm/initialize_engines.py,sha256=UGdT5DKYNri3MNOxESP7oeSxYiUDrVilJ4j
118
118
  atomicshop/mitm/initialize_mitm_server.py,sha256=aXNZlRu1_RGjC7lagvs2Q8rjQiygxYucy-U4C_SBnsk,13871
119
119
  atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
120
120
  atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
121
- atomicshop/mitm/statistic_analyzer.py,sha256=1g5l6X-NbnHvh_TREJRumTDWgE4ixUNJ8pKGneKcf4Y,23524
121
+ atomicshop/mitm/statistic_analyzer.py,sha256=K6HN7iKMthpEZYmVS1aa0jpW2g5Owq4Jl-mZIQzxWYo,23542
122
122
  atomicshop/mitm/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
123
  atomicshop/mitm/engines/create_module_template.py,sha256=tRjVSm1sD6FzML71Qbuwvita0qsusdFGm8NZLsZ-XMs,4853
124
124
  atomicshop/mitm/engines/create_module_template_example.py,sha256=X5xhvbV6-g9jU_bQVhf_crZmaH50LRWz3bS-faQ18ds,489
@@ -196,14 +196,14 @@ atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py,sha2
196
196
  atomicshop/wrappers/factw/postgresql/__init__.py,sha256=xMBn2d3Exo23IPP2F_9-SXmOlhFbwWDgS9KwozSTjA0,162
197
197
  atomicshop/wrappers/factw/postgresql/analysis.py,sha256=2Rxzy2jyq3zEKIo53z8VkjuslKE_i5mq2ZpmJAvyd6U,716
198
198
  atomicshop/wrappers/factw/postgresql/file_object.py,sha256=VRiCXnsd6yDbnsE-TEKYPC-gkAgFVkE6rygRrJLQShI,713
199
- atomicshop/wrappers/factw/postgresql/firmware.py,sha256=_fiRv8biUxazfiCFXpqYDUgZHSNe4cgtZAKO69nIZ4M,10759
199
+ atomicshop/wrappers/factw/postgresql/firmware.py,sha256=wnohSnSOCmlTUCzzHIIPGkRrnownlGKgIFyhhdhNEoA,10759
200
200
  atomicshop/wrappers/factw/postgresql/fw_files.py,sha256=P1jq4AAZa7fygWdEZtFJOnfz4tyqmPpvFzEMDKrCRkU,1291
201
201
  atomicshop/wrappers/factw/postgresql/included_files.py,sha256=sn5YhLkrsvjhrVSA8O8YUNfbqR9STprSuQGEnHsK0jE,1025
202
202
  atomicshop/wrappers/factw/postgresql/virtual_file_path.py,sha256=iR68A_My_ohgRcYdueMaQF9EHOgBRN3bIi8Nq59g3kc,1098
203
203
  atomicshop/wrappers/factw/rest/__init__.py,sha256=MuzZDJ38myxmwLhNhHIsDk0DXkcNbsB_t4R4SSYl--Y,150
204
204
  atomicshop/wrappers/factw/rest/binary_search.py,sha256=AXMFTma3awymrSlE8T1MSV8Q-PCqk586WBDlBr4TbR4,826
205
205
  atomicshop/wrappers/factw/rest/file_object.py,sha256=E_CA9lYpUqpxPDJ8c9dAqQAkJq8NafTecKa3q3EKr40,3218
206
- atomicshop/wrappers/factw/rest/firmware.py,sha256=FezneouU1lUO9uZ6_8ZQNxr4MDlFIoTbBgjIZiNo3_k,20387
206
+ atomicshop/wrappers/factw/rest/firmware.py,sha256=MEdrupbZbjsAsCpTlSast2Y610WbXWCtuDT75rr0g3g,20387
207
207
  atomicshop/wrappers/factw/rest/router.py,sha256=fdGok5ESBxcZHIBgM93l4yTPRGoeooQNsrPWIETieGk,710
208
208
  atomicshop/wrappers/factw/rest/statistics.py,sha256=vznwzKP1gEF7uXz3HsuV66BU9wrp73N_eFqpFpye9Qw,653
209
209
  atomicshop/wrappers/factw/rest/status.py,sha256=4O3xS1poafwyUiLDkhyx4oMMe4PBwABuRPpOMnMKgIU,641
@@ -214,7 +214,7 @@ atomicshop/wrappers/loggingw/formatters.py,sha256=mUtcJJfmhLNrwUVYShXTmdu40dBaJu
214
214
  atomicshop/wrappers/loggingw/handlers.py,sha256=qm5Fbu8eDmlstMduUe5nKUlJU5IazFkSnQizz8Qt2os,5479
215
215
  atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
216
216
  atomicshop/wrappers/loggingw/loggingw.py,sha256=v9WAseZXB50LluT9rIUcRvvevg2nLVKPgz3dbGejfV0,12151
217
- atomicshop/wrappers/loggingw/reading.py,sha256=xs7L6Jo-vedrhCVP7m-cJo0VhWmoSoK86avR4Tm0kG4,3675
217
+ atomicshop/wrappers/loggingw/reading.py,sha256=XKQVggjleXqS-sjY8q7o_xzMBhWDdJO0A1d4DDE2rDA,7183
218
218
  atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
219
219
  atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
220
220
  atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -249,8 +249,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
249
249
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
250
250
  atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
251
251
  atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
252
- atomicshop-2.12.0.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
253
- atomicshop-2.12.0.dist-info/METADATA,sha256=yur7wBqT9rw3Av358zCJ1dGraOU-lnOLyUXzoRDu_yc,10447
254
- atomicshop-2.12.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
255
- atomicshop-2.12.0.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
256
- atomicshop-2.12.0.dist-info/RECORD,,
252
+ atomicshop-2.12.3.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
253
+ atomicshop-2.12.3.dist-info/METADATA,sha256=dlGv8-kBilmNG_mToWm8PUJK9XwcjNFCKZc2-jtzdvc,10447
254
+ atomicshop-2.12.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
255
+ atomicshop-2.12.3.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
256
+ atomicshop-2.12.3.dist-info/RECORD,,