atomicshop 2.14.5__py3-none-any.whl → 2.14.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.14.5'
4
+ __version__ = '2.14.6'
atomicshop/datetimes.py CHANGED
@@ -3,6 +3,7 @@ from datetime import timedelta
3
3
  import time
4
4
  import random
5
5
  import re
6
+ from typing import Union
6
7
 
7
8
 
8
9
  class MonthToNumber:
@@ -47,6 +48,28 @@ class MonthToNumber:
47
48
  'דצמבר': '12'}
48
49
 
49
50
 
51
+ # Mapping of datetime format specifiers to regex patterns
52
+ DATE_TIME_STRING_FORMAT_SPECIFIERS_TO_REGEX: dict = {
53
+ '%Y': r'\d{4}', # Year with century
54
+ '%m': r'\d{2}', # Month as a zero-padded decimal number
55
+ '%d': r'\d{2}', # Day of the month as a zero-padded decimal number
56
+ '%H': r'\d{2}', # Hour (24-hour clock) as a zero-padded decimal number
57
+ '%I': r'\d{2}', # Hour (12-hour clock) as a zero-padded decimal number
58
+ '%M': r'\d{2}', # Minute as a zero-padded decimal number
59
+ '%S': r'\d{2}', # Second as a zero-padded decimal number
60
+ '%f': r'\d{6}', # Microsecond as a decimal number, zero-padded on the left
61
+ '%j': r'\d{3}', # Day of the year as a zero-padded decimal number
62
+ '%U': r'\d{2}', # Week number of the year (Sunday as the first day of the week)
63
+ '%W': r'\d{2}', # Week number of the year (Monday as the first day of the week)
64
+ '%w': r'\d', # Weekday as a decimal number (0 = Sunday, 6 = Saturday)
65
+ '%y': r'\d{2}', # Year without century
66
+ '%p': r'(AM|PM)', # AM or PM
67
+ '%z': r'[+-]\d{4}', # UTC offset in the form ±HHMM
68
+ '%Z': r'[A-Z]+', # Time zone name
69
+ '%%': r'%' # Literal '%'
70
+ }
71
+
72
+
50
73
  def get_datetime_from_complex_string_by_pattern(complex_string: str, date_pattern: str) -> tuple[datetime, str, float]:
51
74
  """
52
75
  Function will get datetime object from a complex string by pattern.
@@ -71,6 +94,49 @@ def get_datetime_from_complex_string_by_pattern(complex_string: str, date_patter
71
94
  raise ValueError("No valid date found in the string")
72
95
 
73
96
 
97
+ def datetime_format_to_regex(format_str: str) -> str:
98
+ """
99
+ Convert a datetime format string to a regex pattern.
100
+
101
+ :param format_str: The datetime format string to convert.
102
+ :return: The regex pattern that matches the format string.
103
+
104
+ Example:
105
+ datetime_format_to_regex("%Y-%m-%d")
106
+
107
+ Output:
108
+ '^\\d{4}-\\d{2}-\\d{2}$'
109
+ """
110
+
111
+ # Escape all non-format characters
112
+ escaped_format_str = re.escape(format_str)
113
+
114
+ # Replace escaped format specifiers with their regex equivalents
115
+ for specifier, regex in DATE_TIME_STRING_FORMAT_SPECIFIERS_TO_REGEX.items():
116
+ escaped_format_str = escaped_format_str.replace(re.escape(specifier), regex)
117
+
118
+ # Return the full regex pattern with start and end anchors
119
+ return f"^{escaped_format_str}$"
120
+
121
+
122
+ def extract_datetime_format_from_string(complex_string: str) -> Union[str, None]:
123
+ """
124
+ Extract the datetime format from the suffix used in TimedRotatingFileHandler.
125
+
126
+ Args:
127
+ - suffix: The suffix string from the handler.
128
+
129
+ Returns:
130
+ - The datetime format string, or None if it cannot be determined.
131
+ """
132
+ # Regular expression to match datetime format components in the suffix
133
+ datetime_format_regex = r"%[a-zA-Z]"
134
+ matches = re.findall(datetime_format_regex, complex_string)
135
+ if matches:
136
+ return "".join(matches)
137
+ return None
138
+
139
+
74
140
  def convert_single_digit_to_zero_padded(string: str):
75
141
  """
76
142
  Function will check if string is a single character digit and will add zero in front of it.
@@ -0,0 +1,37 @@
1
+ import configparser
2
+
3
+
4
+ class CategoryNotFoundInConfigError(Exception):
5
+ pass
6
+
7
+
8
+ def edit_property(category: str, category_property: str, value: str, config_file_path: str) -> None:
9
+ """
10
+ Edit a property in the config file.
11
+
12
+ :param category: str, Category in the config file.
13
+ :param category_property: str, Property in the category.
14
+ :param value: str, Value to set to the property.
15
+ :param config_file_path: str, Path to the config file.
16
+
17
+ :return: None.
18
+
19
+ -----------
20
+
21
+ Config Example:
22
+ [category]
23
+ category_property = value
24
+ """
25
+ config = configparser.ConfigParser()
26
+ config.read(config_file_path)
27
+
28
+ if category not in config:
29
+ raise CategoryNotFoundInConfigError(f"Category '{category}' not found in the config file.")
30
+
31
+ # Change the value of the property if it is different from the current value.
32
+ current_value = config[category][category_property]
33
+ if current_value != value:
34
+ config[category][category_property] = value
35
+
36
+ with open(config_file_path, "w") as configfile:
37
+ config.write(configfile)
@@ -83,7 +83,7 @@ class ModuleCategory:
83
83
 
84
84
  # Initiating logger for each engine by its name
85
85
  # initiate_logger(current_module.engine_name, log_file_extension)
86
- loggingw.get_complex_logger(
86
+ loggingw.create_logger(
87
87
  logger_name=self.engine_name,
88
88
  directory_path=logs_path,
89
89
  add_stream=True,
@@ -1,5 +1,7 @@
1
1
  import os
2
+ import sys
2
3
  import threading
4
+ import time
3
5
 
4
6
  # Importing atomicshop package to get the version of the package.
5
7
  import atomicshop
@@ -10,7 +12,8 @@ from .connection_thread_worker import thread_worker_main
10
12
  from .. import filesystem, queues
11
13
  from ..python_functions import get_current_python_version_string, check_python_version_compliance
12
14
  from ..wrappers.socketw.socket_wrapper import SocketWrapper
13
- from ..wrappers.socketw.dns_server import DnsServer
15
+ from ..wrappers.socketw import dns_server
16
+ from ..wrappers.psutilw import networks
14
17
  from ..basics import dicts_nested
15
18
  from ..wrappers.loggingw import loggingw
16
19
  from ..print_api import print_api
@@ -42,7 +45,7 @@ def initialize_mitm_server(config_static):
42
45
 
43
46
  # Create a logger that will log messages to file, Initiate System logger.
44
47
  logger_name = "system"
45
- system_logger = loggingw.get_complex_logger(
48
+ system_logger = loggingw.create_logger(
46
49
  logger_name=logger_name,
47
50
  file_path=f'{config['log']['logs_path']}{os.sep}{logger_name}.txt',
48
51
  add_stream=True,
@@ -182,7 +185,7 @@ def initialize_mitm_server(config_static):
182
185
  config_static.CONFIG_EXTENDED['certificates']['domains_all_times'] = list(domains_engine_list_full)
183
186
 
184
187
  # Creating Statistics logger.
185
- statistics_logger = loggingw.get_complex_logger(
188
+ statistics_logger = loggingw.create_logger(
186
189
  logger_name="statistics",
187
190
  directory_path=config['log']['logs_path'],
188
191
  add_timedfile=True,
@@ -192,7 +195,7 @@ def initialize_mitm_server(config_static):
192
195
  )
193
196
 
194
197
  network_logger_name = "network"
195
- network_logger = loggingw.get_complex_logger(
198
+ network_logger = loggingw.create_logger(
196
199
  logger_name=network_logger_name,
197
200
  directory_path=config['log']['logs_path'],
198
201
  add_stream=True,
@@ -211,19 +214,41 @@ def initialize_mitm_server(config_static):
211
214
 
212
215
  # === Initialize DNS module ====================================================================================
213
216
  if config['dns']['enable_dns_server']:
217
+ # Check if the DNS server port is in use.
218
+ port_in_use = networks.get_processes_using_port_list([config['dns']['listening_port']])
219
+ if port_in_use:
220
+ for port, process_info in port_in_use.items():
221
+ message = f"Port [{port}] is already in use by process: {process_info}"
222
+ print_api(message, error_type=True, logger_method='critical', logger=system_logger)
223
+
224
+ # Wait for the message to be printed and saved to file.
225
+ time.sleep(1)
226
+ sys.exit(1)
227
+
214
228
  # before executing TCP sockets and after executing 'network' logger.
215
- dns_server = DnsServer(config)
229
+ dns_server_instance = dns_server.DnsServer(config)
216
230
  # Passing the engine domain list to DNS server to work with.
217
231
  # 'list' function re-initializes the current list, or else it will be the same instance object.
218
- dns_server.domain_list = list(domains_engine_list_full)
232
+ dns_server_instance.domain_list = list(domains_engine_list_full)
219
233
 
220
- dns_server.request_domain_queue = domain_queue
234
+ dns_server_instance.request_domain_queue = domain_queue
221
235
  # Initiate the thread.
222
- threading.Thread(target=dns_server.start).start()
236
+ dns_thread = threading.Thread(target=dns_server_instance.start)
237
+ dns_thread.daemon = True
238
+ dns_thread.start()
223
239
 
224
240
  # === EOF Initialize DNS module ================================================================================
225
241
  # === Initialize TCP Server ====================================================================================
226
242
  if config['tcp']['enable_tcp_server']:
243
+ port_in_use = networks.get_processes_using_port_list(config['tcp']['listening_port_list'])
244
+ if port_in_use:
245
+ for port, process_info in port_in_use.items():
246
+ print_api(f"Port [{port}] is already in use by process: {process_info}", logger=system_logger,
247
+ error_type=True, logger_method='critical')
248
+ # Wait for the message to be printed and saved to file.
249
+ time.sleep(1)
250
+ sys.exit(1)
251
+
227
252
  socket_wrapper = SocketWrapper(
228
253
  config=dicts_nested.merge(config, config_static.CONFIG_EXTENDED), logger=listener_logger,
229
254
  statistics_logger=statistics_logger)
atomicshop/print_api.py CHANGED
@@ -99,15 +99,16 @@ def print_api(
99
99
  # If 'logger.error' should be outputted to console, and 'color' wasn't selected, then set color to 'yellow'.
100
100
  if logger_method == 'error' and not color:
101
101
  color = 'yellow'
102
- error_type = True
103
102
  # If 'logger.critical' should be outputted to console, and 'color' wasn't selected, then set color to 'red'.
104
103
  elif logger_method == 'critical' and not color:
105
104
  color = 'red'
106
- error_type = True
107
105
 
108
106
  if color:
109
107
  message = get_colors_basic_dict(color) + message + ColorsBasic.END
110
108
 
109
+ if logger_method == 'error' or logger_method == 'critical':
110
+ error_type = True
111
+
111
112
  # If exception was raised and 'stderr=True'.
112
113
  if sys.exc_info()[0] is not None and stderr:
113
114
  # If 'traceback' is set to 'True', we'll output traceback of exception.
@@ -1,15 +1,50 @@
1
1
  import logging
2
2
  from logging.handlers import TimedRotatingFileHandler, QueueListener, QueueHandler
3
+ from logging import FileHandler
4
+ import time
3
5
  import re
4
6
  import os
5
7
  from pathlib import Path
8
+ import queue
9
+ from typing import Literal, Union
10
+ import threading
11
+ from datetime import datetime
12
+
13
+ from . import loggers, formatters
14
+ from ... import datetimes, filesystem
6
15
 
7
16
 
8
17
  DEFAULT_DATE_STRING_FORMAT: str = "%Y_%m_%d"
9
- DEFAULT_DATE_REGEX_PATTERN: str = r"^\d{4}_\d{2}_\d{2}$"
18
+ # Not used, only for the reference:
19
+ # _DEFAULT_DATE_REGEX_PATTERN: str = r"^\d{4}_\d{2}_\d{2}$"
20
+
21
+
22
+ class ForceAtTimeRotationTimedRotatingFileHandler(TimedRotatingFileHandler):
23
+ def __init__(self, *args, **kwargs):
24
+ super().__init__(*args, **kwargs)
25
+ self._last_rotated_date = None
26
+ self._start_rotation_check()
27
+
28
+ def _start_rotation_check(self):
29
+ self._rotation_thread = threading.Thread(target=self._check_for_rotation)
30
+ self._rotation_thread.daemon = True
31
+ self._rotation_thread.start()
32
+
33
+ def _check_for_rotation(self):
34
+ while True:
35
+ now = datetime.now()
36
+ current_date = now.date()
37
+ # Check if it's midnight and the logs haven't been rotated today
38
+ if now.hour == 0 and now.minute == 0 and current_date != self._last_rotated_date:
39
+ self._last_rotated_date = current_date
40
+ self.doRollover()
41
+ time.sleep(0.1)
42
+
43
+ def doRollover(self):
44
+ super().doRollover()
10
45
 
11
46
 
12
- class TimedRotatingFileHandlerWithHeader(TimedRotatingFileHandler):
47
+ class TimedRotatingFileHandlerWithHeader(ForceAtTimeRotationTimedRotatingFileHandler):
13
48
  """
14
49
  Custom TimedRotatingFileHandler that writes a header to the log file each time there is a file rotation.
15
50
  Useful for writing CSV files.
@@ -36,6 +71,164 @@ class TimedRotatingFileHandlerWithHeader(TimedRotatingFileHandler):
36
71
  super().emit(record)
37
72
 
38
73
 
74
+ def _process_formatter_attribute(
75
+ formatter: Union[
76
+ Literal['DEFAULT', 'MESSAGE'],
77
+ str,
78
+ None],
79
+ file_type: Union[
80
+ Literal['txt', 'csv', 'json'],
81
+ None] = None
82
+ ):
83
+ """
84
+ Function to process the formatter attribute.
85
+ """
86
+
87
+ if formatter == 'DEFAULT' and file_type is None:
88
+ return formatters.DEFAULT_STREAM_FORMATTER
89
+ elif formatter == 'DEFAULT' and file_type == 'txt':
90
+ return formatters.DEFAULT_FORMATTER_TXT_FILE
91
+ elif formatter == 'DEFAULT' and file_type == 'csv':
92
+ return formatters.DEFAULT_FORMATTER_CSV_FILE
93
+ elif formatter == 'DEFAULT' and file_type == 'json':
94
+ return formatters.DEFAULT_MESSAGE_FORMATTER
95
+ elif formatter == 'MESSAGE':
96
+ return formatters.DEFAULT_MESSAGE_FORMATTER
97
+ else:
98
+ return formatter
99
+
100
+
101
+ def add_stream_handler(
102
+ logger: logging.Logger,
103
+ logging_level: str = "DEBUG",
104
+ formatter: Union[
105
+ Literal['DEFAULT', 'MESSAGE'],
106
+ str,
107
+ None] = None,
108
+ formatter_use_nanoseconds: bool = False
109
+ ):
110
+ """
111
+ Function to add StreamHandler to logger.
112
+ Stream formatter will output messages to the console.
113
+ """
114
+
115
+ # Getting the StreamHandler.
116
+ stream_handler = get_stream_handler()
117
+ # Setting log level for the handler, that will use the logger while initiated.
118
+ loggers.set_logging_level(stream_handler, logging_level)
119
+
120
+ # If formatter_message_only is set to True, then formatter will be used only for the 'message' part.
121
+ formatter = _process_formatter_attribute(formatter)
122
+
123
+ # If formatter was provided, then it will be used.
124
+ if formatter:
125
+ logging_formatter = formatters.get_logging_formatter_from_string(
126
+ formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
127
+ set_formatter(stream_handler, logging_formatter)
128
+
129
+ # Adding the handler to the main logger
130
+ loggers.add_handler(logger, stream_handler)
131
+
132
+ # Disable propagation from the 'root' logger, so we will not see the messages twice.
133
+ loggers.set_propagation(logger)
134
+
135
+
136
+ def add_timedfilehandler_with_queuehandler(
137
+ logger: logging.Logger,
138
+ file_path: str,
139
+ file_type: Literal[
140
+ 'txt',
141
+ 'csv',
142
+ 'json'] = 'txt',
143
+ logging_level="DEBUG",
144
+ formatter: Union[
145
+ Literal['DEFAULT', 'MESSAGE'],
146
+ str,
147
+ None] = None,
148
+ formatter_use_nanoseconds: bool = False,
149
+ when: str = 'midnight',
150
+ interval: int = 1,
151
+ delay: bool = True,
152
+ encoding=None,
153
+ header: str = None
154
+ ):
155
+ """
156
+ Function to add TimedRotatingFileHandler and QueueHandler to logger.
157
+ TimedRotatingFileHandler will output messages to the file through QueueHandler.
158
+ This is needed, since TimedRotatingFileHandler is not thread-safe, though official docs say it is.
159
+ """
160
+
161
+ # If file name wasn't provided we will use the logger name instead.
162
+ # if not file_name_no_extension:
163
+ # file_name_no_extension = logger.name
164
+
165
+ # Setting the TimedRotatingFileHandler, without adding it to the logger.
166
+ # It will be added to the QueueListener, which will use the TimedRotatingFileHandler to write logs.
167
+ # This is needed since there's a bug in TimedRotatingFileHandler, which won't let it be used with
168
+ # threads the same way it would be used for multiprocess.
169
+
170
+ # Creating file handler with log filename. At this stage the log file is created and locked by the handler,
171
+ # Unless we use "delay=True" to tell the class to write the file only if there's something to write.
172
+
173
+ filesystem.create_directory(os.path.dirname(file_path))
174
+
175
+ if file_type == "csv":
176
+ # If file extension is CSV, we'll set the header to the file.
177
+ # This is needed since the CSV file will be rotated, and we'll need to set the header each time.
178
+ # We'll use the custom TimedRotatingFileHandlerWithHeader class.
179
+ file_handler = get_timed_rotating_file_handler_with_header(
180
+ file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
181
+ else:
182
+ file_handler = get_timed_rotating_file_handler(
183
+ file_path, when=when, interval=interval, delay=delay, encoding=encoding)
184
+
185
+ loggers.set_logging_level(file_handler, logging_level)
186
+
187
+ formatter = _process_formatter_attribute(formatter, file_type=file_type)
188
+
189
+ # If formatter was passed to the function we'll add it to handler.
190
+ if formatter:
191
+ # Convert string to Formatter object. Moved to newer styling of python 3: style='{'
192
+ logging_formatter = formatters.get_logging_formatter_from_string(
193
+ formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
194
+ # Setting the formatter in file handler.
195
+ set_formatter(file_handler, logging_formatter)
196
+
197
+ # This function will change the suffix behavior of the rotated file name.
198
+ change_rotated_filename(file_handler)
199
+
200
+ queue_handler = start_queue_listener_for_file_handler_and_get_queue_handler(file_handler)
201
+ loggers.set_logging_level(queue_handler, logging_level)
202
+
203
+ # Add the QueueHandler to the logger.
204
+ loggers.add_handler(logger, queue_handler)
205
+
206
+ # Disable propagation from the 'root' logger, so we will not see the messages twice.
207
+ loggers.set_propagation(logger)
208
+
209
+
210
+ def start_queue_listener_for_file_handler_and_get_queue_handler(file_handler):
211
+ """
212
+ Function to start QueueListener, which will put the logs from FileHandler to the Queue.
213
+ QueueHandler will get the logs from the Queue and put them to the file that was set in the FileHandler.
214
+
215
+ :param file_handler: FileHandler object.
216
+ :return: QueueHandler object.
217
+ """
218
+
219
+ # Create the Queue between threads. "-1" means that there can infinite number of items that can be
220
+ # put in the Queue. if integer is bigger than 0, it means that this will be the maximum
221
+ # number of items.
222
+ queue_object = queue.Queue(-1)
223
+ # Create QueueListener, which will put the logs from FileHandler to the Queue and put the logs to the queue.
224
+ start_queue_listener_for_file_handler(file_handler, queue_object)
225
+
226
+ return get_queue_handler(queue_object)
227
+
228
+
229
+ # BASE FUNCTIONS =======================================================================================================
230
+
231
+
39
232
  def get_stream_handler() -> logging.StreamHandler:
40
233
  """
41
234
  Function to get a StreamHandler.
@@ -49,7 +242,7 @@ def get_stream_handler() -> logging.StreamHandler:
49
242
 
50
243
  def get_timed_rotating_file_handler(
51
244
  log_file_path: str, when: str = "midnight", interval: int = 1, delay: bool = False, encoding=None
52
- ) -> logging.handlers.TimedRotatingFileHandler:
245
+ ) -> ForceAtTimeRotationTimedRotatingFileHandler:
53
246
  """
54
247
  Function to get a TimedRotatingFileHandler.
55
248
  This handler will output messages to a file, rotating the log file at certain timed intervals.
@@ -67,7 +260,7 @@ def get_timed_rotating_file_handler(
67
260
  :return: TimedRotatingFileHandler.
68
261
  """
69
262
 
70
- return TimedRotatingFileHandler(
263
+ return ForceAtTimeRotationTimedRotatingFileHandler(
71
264
  filename=log_file_path, when=when, interval=interval, delay=delay, encoding=encoding)
72
265
 
73
266
 
@@ -157,8 +350,7 @@ def get_handler_name(handler: logging.Handler) -> str:
157
350
 
158
351
  def change_rotated_filename(
159
352
  file_handler: logging.Handler,
160
- date_format_string: str = None,
161
- date_regex_pattern: str = None
353
+ date_format_string: str = None
162
354
  ):
163
355
  """
164
356
  Function to change the way TimedRotatingFileHandler managing the rotating filename.
@@ -166,8 +358,6 @@ def change_rotated_filename(
166
358
  :param file_handler: FileHandler to change the rotating filename for.
167
359
  :param date_format_string: Date format string to use for the rotated log filename.
168
360
  If None, the default 'DEFAULT_DATE_STRING_FORMAT' will be used.
169
- :param date_regex_pattern: Regex pattern to match the rotated log filenames.
170
- If None, the default 'DEFAULT_DATE_REGEX_PATTERN' will be used.
171
361
  """
172
362
  # Changing the way TimedRotatingFileHandler managing the rotating filename
173
363
  # Default file suffix is only "Year_Month_Day" with addition of the dot (".") character to the
@@ -188,8 +378,10 @@ def change_rotated_filename(
188
378
  # file_handler.extMatch = re.compile(r"^\d{4}_\d{2}_\d{2}" + re.escape(log_file_extension) + r"$")
189
379
  # file_handler.extMatch = re.compile(r"^\d{4}_\d{2}_\d{2}.txt$")
190
380
 
191
- # Update the namer function to format the rotated filename correctly
192
- def namer(name):
381
+ def callback_namer(name):
382
+ """
383
+ Callback function to change the filename of the rotated log file on file rotation.
384
+ """
193
385
  # Currently the 'name' is full file path + '.' + logfile_suffix.
194
386
  # Example: 'C:\\path\\to\\file.log._2021_12_24'
195
387
  # Get the parent directory of the file: C:\path\to
@@ -205,22 +397,23 @@ def change_rotated_filename(
205
397
 
206
398
  return f"{parent_dir}{os.sep}{file_stem}{date_part}{file_extension}"
207
399
 
208
- # Construct the new suffix without the file extension
209
400
  if date_format_string is None:
210
- logfile_suffix = f"_{DEFAULT_DATE_STRING_FORMAT}"
211
- else:
212
- logfile_suffix = f"_{date_format_string}"
401
+ date_format_string = DEFAULT_DATE_STRING_FORMAT
402
+
403
+ # Construct the new suffix without the file extension
404
+ logfile_suffix = f"_{date_format_string}"
405
+
406
+ # Get regex pattern from string format.
407
+ # Example: '%Y_%m_%d' -> r'\d{4}_\d{2}_\d{2}'
408
+ date_regex_pattern = datetimes.datetime_format_to_regex(date_format_string)
213
409
 
214
410
  # Regex pattern to match the rotated log filenames
215
- if date_regex_pattern is None:
216
- logfile_regex_suffix = re.compile(DEFAULT_DATE_REGEX_PATTERN)
217
- else:
218
- logfile_regex_suffix = re.compile(date_regex_pattern)
411
+ logfile_regex_suffix = re.compile(date_regex_pattern)
219
412
 
220
413
  # Update the handler's suffix to include the date format
221
414
  file_handler.suffix = logfile_suffix
222
415
 
223
- file_handler.namer = namer
416
+ file_handler.namer = callback_namer
224
417
  # Update the handler's extMatch regex to match the new filename format
225
418
  file_handler.extMatch = logfile_regex_suffix
226
419
 
@@ -242,3 +435,23 @@ def has_handlers(logger: logging.Logger) -> bool:
242
435
  return False
243
436
  else:
244
437
  return True
438
+
439
+
440
+ def extract_datetime_format_from_file_handler(file_handler: FileHandler) -> Union[str, None]:
441
+ """
442
+ Extract the datetime string formats from all TimedRotatingFileHandlers in the logger.
443
+
444
+ Args:
445
+ - logger: The logger instance.
446
+
447
+ Returns:
448
+ - A list of datetime string formats used by the handlers.
449
+ """
450
+ # Extract the suffix
451
+ suffix = getattr(file_handler, 'suffix', None)
452
+ if suffix:
453
+ datetime_format = datetimes.extract_datetime_format_from_string(suffix)
454
+ if datetime_format:
455
+ return datetime_format
456
+
457
+ return None
@@ -1,12 +1,11 @@
1
1
  import logging
2
2
  import os
3
- import queue
4
3
  from typing import Literal, Union
5
4
 
6
5
  from . import loggers, handlers, formatters
7
6
 
8
7
 
9
- def get_complex_logger(
8
+ def create_logger(
10
9
  logger_name: str,
11
10
  file_path: str = None,
12
11
  directory_path: str = None,
@@ -18,20 +17,18 @@ def get_complex_logger(
18
17
  'json'] = 'txt',
19
18
  logging_level="DEBUG",
20
19
  formatter_streamhandler: Union[
21
- Literal[
22
- 'MESSAGE',
23
- 'DEFAULT'],
20
+ Literal['MESSAGE', 'DEFAULT'],
21
+ str,
24
22
  None] = None,
25
23
  formatter_filehandler: Union[
26
- Literal[
27
- 'MESSAGE',
28
- 'DEFAULT',],
24
+ Literal['MESSAGE', 'DEFAULT'],
25
+ str,
29
26
  None] = None,
30
27
  formatter_streamhandler_use_nanoseconds: bool = True,
31
28
  formatter_filehandler_use_nanoseconds: bool = True,
32
29
  when: str = "midnight",
33
30
  interval: int = 1,
34
- delay: bool = True,
31
+ delay: bool = False,
35
32
  encoding=None,
36
33
  header: str = None
37
34
  ) -> logging.Logger:
@@ -56,6 +53,7 @@ def get_complex_logger(
56
53
  'DEFAULT': Default formatter will be used:
57
54
  "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
58
55
  'MESSAGE': Formatter will be used only for the 'message' part.
56
+ string: Custom formatter, regular syntax for logging.Formatter.
59
57
  :param formatter_filehandler: string, Formatter to use for handler. It is template of how a message will look like.
60
58
  None: No formatter will be used.
61
59
  'DEFAULT': Default formatter will be used for each file extension:
@@ -64,6 +62,7 @@ def get_complex_logger(
64
62
  json: '{"time": "%(asctime)s", "level": "%(levelname)s", "thread": "%(threadName)s",
65
63
  "logger": "%(name)s", "message": "%(message)s"}'
66
64
  'MESSAGE': Formatter will be used only for the 'message' part.
65
+ string: Custom formatter, regular syntax for logging.Formatter.
67
66
  :param formatter_streamhandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
68
67
  in the formatter in case you provide 'asctime' element.
69
68
  :param formatter_filehandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
@@ -96,7 +95,7 @@ def get_complex_logger(
96
95
  header: str = "time,host,error"
97
96
  output_log_file: str = "D:\\logs\\log_file.csv"
98
97
 
99
- error_logger = loggingw.get_complex_logger(
98
+ error_logger = loggingw.create_logger(
100
99
  logger_name=f'{self.__class__.__name__}_CSV',
101
100
  file_path=output_log_file,
102
101
  add_timedfile=True,
@@ -121,7 +120,7 @@ def get_complex_logger(
121
120
  header: str = "time,host,error"
122
121
  output_log_file: str = "D:\\logs\\log_file.txt"
123
122
 
124
- error_logger = loggingw.get_complex_logger(
123
+ error_logger = loggingw.create_logger(
125
124
  logger_name=f'{self.__class__.__name__}',
126
125
  file_path=output_log_file,
127
126
  add_stream=True,
@@ -152,12 +151,12 @@ def get_complex_logger(
152
151
  logger = get_logger_with_level(logger_name, logging_level)
153
152
 
154
153
  if add_stream:
155
- add_stream_handler(
154
+ handlers.add_stream_handler(
156
155
  logger=logger, logging_level=logging_level, formatter=formatter_streamhandler,
157
156
  formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
158
157
 
159
158
  if add_timedfile:
160
- add_timedfilehandler_with_queuehandler(
159
+ handlers.add_timedfilehandler_with_queuehandler(
161
160
  logger=logger, file_path=file_path, logging_level=logging_level, formatter=formatter_filehandler,
162
161
  formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds, file_type=file_type,
163
162
  when=when, interval=interval, delay=delay, encoding=encoding, header=header)
@@ -187,156 +186,6 @@ def get_logger_with_level(
187
186
  return logger
188
187
 
189
188
 
190
- def _process_formatter_attribute(
191
- formatter: Union[
192
- Literal['DEFAULT', 'MESSAGE'],
193
- None],
194
- file_type: Union[
195
- Literal['txt', 'csv', 'json'],
196
- None] = None
197
- ):
198
- """
199
- Function to process the formatter attribute.
200
- """
201
-
202
- if formatter == 'DEFAULT' and file_type is None:
203
- return formatters.DEFAULT_STREAM_FORMATTER
204
- elif formatter == 'DEFAULT' and file_type == 'txt':
205
- return formatters.DEFAULT_FORMATTER_TXT_FILE
206
- elif formatter == 'DEFAULT' and file_type == 'csv':
207
- return formatters.DEFAULT_FORMATTER_CSV_FILE
208
- elif formatter == 'DEFAULT' and file_type == 'json':
209
- return formatters.DEFAULT_MESSAGE_FORMATTER
210
- elif formatter == 'MESSAGE':
211
- return formatters.DEFAULT_MESSAGE_FORMATTER
212
- else:
213
- return formatter
214
-
215
-
216
- def add_stream_handler(
217
- logger: logging.Logger,
218
- logging_level: str = "DEBUG",
219
- formatter: Union[
220
- Literal['DEFAULT', 'MESSAGE'],
221
- None] = None,
222
- formatter_use_nanoseconds: bool = False
223
- ):
224
- """
225
- Function to add StreamHandler to logger.
226
- Stream formatter will output messages to the console.
227
- """
228
-
229
- # Getting the StreamHandler.
230
- stream_handler = handlers.get_stream_handler()
231
- # Setting log level for the handler, that will use the logger while initiated.
232
- loggers.set_logging_level(stream_handler, logging_level)
233
-
234
- # If formatter_message_only is set to True, then formatter will be used only for the 'message' part.
235
- formatter = _process_formatter_attribute(formatter)
236
-
237
- # If formatter was provided, then it will be used.
238
- if formatter:
239
- logging_formatter = formatters.get_logging_formatter_from_string(
240
- formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
241
- handlers.set_formatter(stream_handler, logging_formatter)
242
-
243
- # Adding the handler to the main logger
244
- loggers.add_handler(logger, stream_handler)
245
-
246
- # Disable propagation from the 'root' logger, so we will not see the messages twice.
247
- loggers.set_propagation(logger)
248
-
249
-
250
- def add_timedfilehandler_with_queuehandler(
251
- logger: logging.Logger,
252
- file_path: str,
253
- file_type: Literal[
254
- 'txt',
255
- 'csv',
256
- 'json'] = 'txt',
257
- logging_level="DEBUG",
258
- formatter: Union[
259
- Literal['DEFAULT', 'MESSAGE'],
260
- None] = None,
261
- formatter_use_nanoseconds: bool = False,
262
- when: str = 'midnight',
263
- interval: int = 1,
264
- delay: bool = True,
265
- encoding=None,
266
- header: str = None
267
- ):
268
- """
269
- Function to add TimedRotatingFileHandler and QueueHandler to logger.
270
- TimedRotatingFileHandler will output messages to the file through QueueHandler.
271
- This is needed, since TimedRotatingFileHandler is not thread-safe, though official docs say it is.
272
- """
273
-
274
- # If file name wasn't provided we will use the logger name instead.
275
- # if not file_name_no_extension:
276
- # file_name_no_extension = logger.name
277
-
278
- # Setting the TimedRotatingFileHandler, without adding it to the logger.
279
- # It will be added to the QueueListener, which will use the TimedRotatingFileHandler to write logs.
280
- # This is needed since there's a bug in TimedRotatingFileHandler, which won't let it be used with
281
- # threads the same way it would be used for multiprocess.
282
-
283
- # Creating file handler with log filename. At this stage the log file is created and locked by the handler,
284
- # Unless we use "delay=True" to tell the class to write the file only if there's something to write.
285
-
286
- if file_type == "csv":
287
- # If file extension is CSV, we'll set the header to the file.
288
- # This is needed since the CSV file will be rotated, and we'll need to set the header each time.
289
- # We'll use the custom TimedRotatingFileHandlerWithHeader class.
290
- file_handler = handlers.get_timed_rotating_file_handler_with_header(
291
- file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
292
- else:
293
- file_handler = handlers.get_timed_rotating_file_handler(
294
- file_path, when=when, interval=interval, delay=delay, encoding=encoding)
295
-
296
- loggers.set_logging_level(file_handler, logging_level)
297
-
298
- formatter = _process_formatter_attribute(formatter, file_type=file_type)
299
-
300
- # If formatter was passed to the function we'll add it to handler.
301
- if formatter:
302
- # Convert string to Formatter object. Moved to newer styling of python 3: style='{'
303
- logging_formatter = formatters.get_logging_formatter_from_string(
304
- formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
305
- # Setting the formatter in file handler.
306
- handlers.set_formatter(file_handler, logging_formatter)
307
-
308
- # This function will change the suffix behavior of the rotated file name.
309
- handlers.change_rotated_filename(file_handler)
310
-
311
- queue_handler = start_queue_listener_for_file_handler_and_get_queue_handler(file_handler)
312
- loggers.set_logging_level(queue_handler, logging_level)
313
-
314
- # Add the QueueHandler to the logger.
315
- loggers.add_handler(logger, queue_handler)
316
-
317
- # Disable propagation from the 'root' logger, so we will not see the messages twice.
318
- loggers.set_propagation(logger)
319
-
320
-
321
- def start_queue_listener_for_file_handler_and_get_queue_handler(file_handler):
322
- """
323
- Function to start QueueListener, which will put the logs from FileHandler to the Queue.
324
- QueueHandler will get the logs from the Queue and put them to the file that was set in the FileHandler.
325
-
326
- :param file_handler: FileHandler object.
327
- :return: QueueHandler object.
328
- """
329
-
330
- # Create the Queue between threads. "-1" means that there can infinite number of items that can be
331
- # put in the Queue. if integer is bigger than 0, it means that this will be the maximum
332
- # number of items.
333
- queue_object = queue.Queue(-1)
334
- # Create QueueListener, which will put the logs from FileHandler to the Queue and put the logs to the queue.
335
- handlers.start_queue_listener_for_file_handler(file_handler, queue_object)
336
-
337
- return handlers.get_queue_handler(queue_object)
338
-
339
-
340
189
  def disable_default_logger():
341
190
  """
342
191
  Function to disable default logger.
@@ -353,3 +202,22 @@ def disable_default_logger():
353
202
 
354
203
  # Disabling the default logger in Python
355
204
  logging.disable(logging.CRITICAL)
205
+
206
+
207
+ def get_datetime_format_string_from_logger_file_handlers(logger: logging.Logger) -> list:
208
+ """
209
+ Function to get datetime format string from the logger's file handlers.
210
+ This is useful when you want to know the datetime format string that is used on file rotation.
211
+ :param logger: Logger to get the datetime format string from.
212
+ :return: List of datetime format strings.
213
+ """
214
+
215
+ datetime_format_strings = []
216
+
217
+ for handler in logger.handlers:
218
+ if isinstance(handler, logging.FileHandler):
219
+ date_time_format_string = handlers.extract_datetime_format_from_file_handler(handler)
220
+ if date_time_format_string:
221
+ datetime_format_strings.append(date_time_format_string)
222
+
223
+ return datetime_format_strings
@@ -75,8 +75,8 @@ def get_logs_paths(
75
75
 
76
76
  # Get the datetime object from the file name by the date pattern.
77
77
  try:
78
- datetime_object, date_string, timestamp_float = datetimes.get_datetime_from_complex_string_by_pattern(
79
- current_file_name, date_pattern)
78
+ datetime_object, date_string, timestamp_float = (
79
+ datetimes.get_datetime_from_complex_string_by_pattern(current_file_name, date_pattern))
80
80
  # ValueError will be raised if the date pattern does not match the file name.
81
81
  except ValueError:
82
82
  timestamp_float = 0
@@ -0,0 +1,40 @@
1
+ from typing import Union
2
+ import shlex
3
+
4
+ import psutil
5
+
6
+
7
+ def get_process_using_port(port: int) -> Union[dict, None]:
8
+ """
9
+ Function to find the process using the port.
10
+ :param port: Port number.
11
+ :return: dict['pid', 'name', 'cmdline'] or None.
12
+ """
13
+ for proc in psutil.process_iter(['pid', 'name', 'cmdline']):
14
+ try:
15
+ connections = proc.connections(kind='inet')
16
+ for conn in connections:
17
+ if conn.laddr.port == port:
18
+ return {
19
+ 'pid': proc.info['pid'],
20
+ 'name': proc.info['name'],
21
+ 'cmdline': shlex.join(proc.info['cmdline'])
22
+ }
23
+ except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
24
+ pass
25
+ return None
26
+
27
+
28
+ def get_processes_using_port_list(ports: list) -> Union[dict, None]:
29
+ """
30
+ Function to find the process using the port.
31
+ :param ports: List of port numbers.
32
+ :return: dict[port: {'pid', 'name', 'cmdline'}] or None.
33
+ """
34
+ port_process_map = {}
35
+ for port in ports:
36
+ process_info = get_process_using_port(port)
37
+ if process_info:
38
+ port_process_map[port] = process_info
39
+
40
+ return port_process_map
@@ -6,11 +6,16 @@ import socket
6
6
 
7
7
  from ...print_api import print_api
8
8
  from ..loggingw import loggingw
9
+ from ..psutilw import networks
9
10
 
10
11
  import dnslib
11
12
  from dnslib import DNSRecord, DNSHeader, RR, A
12
13
 
13
14
 
15
+ class DnsPortInUseError(Exception):
16
+ pass
17
+
18
+
14
19
  class DnsServer:
15
20
  """
16
21
  DnsServer class is responsible to handle DNS Requests on port 53 based on configuration and send DNS Response back.
@@ -56,8 +61,12 @@ class DnsServer:
56
61
 
57
62
  # Logger that logs all the DNS Requests and responses in DNS format. These entries will not present in
58
63
  # network log of TCP Server module.
59
- self.dns_full_logger = loggingw.get_logger_with_timedfilehandler(
60
- logger_name="dns", directory_path=self.config['log']['logs_path'], disable_duplicate_ms=True)
64
+ self.dns_full_logger = loggingw.create_logger(
65
+ logger_name="dns",
66
+ directory_path=self.config['log']['logs_path'],
67
+ add_timedfile=True,
68
+ formatter_filehandler='DEFAULT'
69
+ )
61
70
 
62
71
  def thread_worker_empty_dns_cache(self, function_sleep_time: int):
63
72
  """
@@ -78,6 +87,11 @@ class DnsServer:
78
87
  :return: None.
79
88
  """
80
89
 
90
+ port_in_use = networks.get_processes_using_port_list([self.config['dns']['listening_port']])
91
+ if port_in_use:
92
+ for port, process_info in port_in_use.items():
93
+ raise DnsPortInUseError(f"Port [{port}] is already in use by process: {process_info}")
94
+
81
95
  self.logger.info("DNS Server Module Started.")
82
96
 
83
97
  # Define objects for global usage
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.14.5
3
+ Version: 2.14.6
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=hgaCf3sGJGnktr5gfFAzZxI1NReX4ILvEOdMvHiDI40,123
1
+ atomicshop/__init__.py,sha256=hybMlQ9qVeOgdPaVMdVvrp-rXkVt1Ifm1_zQ95KG2kM,123
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -8,7 +8,7 @@ atomicshop/command_line_processing.py,sha256=u5yT9Ger_cu7ni5ID0VFlRbVD46ARHeNC9t
8
8
  atomicshop/config_init.py,sha256=z2RXD_mw9nQlAOpuGry1h9QT-2LhNscXgGAktN3dCVQ,2497
9
9
  atomicshop/console_output.py,sha256=AOSJjrRryE97PAGtgDL03IBtWSi02aNol8noDnW3k6M,4667
10
10
  atomicshop/console_user_response.py,sha256=31HIy9QGXa7f-GVR8MzJauQ79E_ZqAeagF3Ks4GGdDU,3234
11
- atomicshop/datetimes.py,sha256=wZ75JS6Aq5kTQrCSrjCwBrT-KPO7Xu9_BRWKrY8uU3c,15645
11
+ atomicshop/datetimes.py,sha256=Rmn-Ag36xRI1xbyoMs2fe1PB5hzb_UHEIwRVPppz0M0,18278
12
12
  atomicshop/diff_check.py,sha256=RJvzJhyYAZyRPKVDk1dS7UwZCx0kq__WDZ6N0rNfZUY,27110
13
13
  atomicshop/dns.py,sha256=h4uZKoz4wbBlLOOduL1GtRcTm-YpiPnGOEGxUm7hhOI,2140
14
14
  atomicshop/domains.py,sha256=Rxu6JhhMqFZRcoFs69IoEd1PtYca0lMCG6F1AomP7z4,3197
@@ -26,7 +26,7 @@ atomicshop/keyboard_press.py,sha256=1W5kRtOB75fulVx-uF2yarBhW0_IzdI1k73AnvXstk0,
26
26
  atomicshop/on_exit.py,sha256=Wf1iy2e0b0Zu7oRxrct3VkLdQ_x9B32-z_JerKTt9Z0,5493
27
27
  atomicshop/pbtkmultifile_argparse.py,sha256=aEk8nhvoQVu-xyfZosK3ma17CwIgOjzO1erXXdjwtS4,4574
28
28
  atomicshop/permissions.py,sha256=P6tiUKV-Gw-c3ePEVsst9bqWaHJbB4ZlJB4xbDYVpEs,4436
29
- atomicshop/print_api.py,sha256=DhbCQd0MWZZ5GYEk4oTu1opRFC-b31g1VWZgTGewG2Y,11568
29
+ atomicshop/print_api.py,sha256=j0bZ9b2rFKCcr0TVx1ARraVKeEs6JaaSgIlBdndy1nI,11600
30
30
  atomicshop/process.py,sha256=U2gyRl0bw2138y-rOMABMVptRvAL81ZfX1JyfxJI_Oo,15973
31
31
  atomicshop/python_file_patcher.py,sha256=kd3rBWvTcosLEk-7TycNdfKW9fZbe161iVwmH4niUo0,5515
32
32
  atomicshop/python_functions.py,sha256=zJg4ogUwECxrDD7xdDN5JikIUctITM5lsyabr_ZNsRw,4435
@@ -120,10 +120,11 @@ atomicshop/file_io/xlsxs.py,sha256=v_dyg9GD4LqgWi6wA1QuWRZ8zG4ZwB6Dz52ytdcmmmI,2
120
120
  atomicshop/file_io/xmls.py,sha256=zh3SuK-dNaFq2NDNhx6ivcf4GYCfGM8M10PcEwDSpxk,2104
121
121
  atomicshop/mains/installs/pycharm.py,sha256=uYTfME7hOeNkAsOZxDDPj2hDqmkxrFqVV6Nv6xnYNVk,141
122
122
  atomicshop/mitm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
+ atomicshop/mitm/config_editor.py,sha256=9ZwD6NGqgsr1f85NyFwWwM7FDut2vGQ4xari3vS9UT0,1130
123
124
  atomicshop/mitm/connection_thread_worker.py,sha256=PQ8bwOgrPudYP5oPnSi_DWaKXOi038M8TMImlLkxuPI,20486
124
125
  atomicshop/mitm/import_config.py,sha256=_V-IVJ7a1L6E-VOR4CDfZj-S1odbsIlBe13ij0NlpqY,7974
125
- atomicshop/mitm/initialize_engines.py,sha256=jiuA9BPLiUKG_7kE9FMCLrhf5W4LnPoYgc9FfV3AKRs,8175
126
- atomicshop/mitm/initialize_mitm_server.py,sha256=avC1lfS65dCdJ3cfF3olGIKycnz3ChlzCuXFmhah0Uk,13418
126
+ atomicshop/mitm/initialize_engines.py,sha256=YnXPK1UKrmULnfL4zLo2LOpKWq-aGKzc9p3n8tfcYCM,8170
127
+ atomicshop/mitm/initialize_mitm_server.py,sha256=0xjHleUFecU8nyhzf5z8On-nZtL26A80prH4GHSqNc8,14611
127
128
  atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
128
129
  atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
129
130
  atomicshop/mitm/statistic_analyzer.py,sha256=ctsf-MBIUvG4-R0K4gFQyi_b42-VCq-5s7hgO9jMOes,38415
@@ -234,10 +235,10 @@ atomicshop/wrappers/factw/rest/status.py,sha256=4O3xS1poafwyUiLDkhyx4oMMe4PBwABu
234
235
  atomicshop/wrappers/fibratusw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
235
236
  atomicshop/wrappers/fibratusw/install.py,sha256=PLVymDe0HuOvU0r2lje8BkQAgtiOWEeRO7n-1zKuL7A,3287
236
237
  atomicshop/wrappers/loggingw/formatters.py,sha256=808R7K3e3ZJD2BXfqI6UMOyXGrCgt9SYh2Uv7sL_1KQ,7432
237
- atomicshop/wrappers/loggingw/handlers.py,sha256=MzoVWE_WX2OZZ5nD9eJpEBK-SW9XJVkm1Hl_9B3gM8E,9769
238
+ atomicshop/wrappers/loggingw/handlers.py,sha256=0n4Bqr9yQ7GiQOBYtSVFtCFBeEUfcvkaAFYgWY1E3Tg,18014
238
239
  atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
239
- atomicshop/wrappers/loggingw/loggingw.py,sha256=JPNSglpuN7ryXsmUHLMCnJRsjvRPV3Q64CN3OgSEFsU,14863
240
- atomicshop/wrappers/loggingw/reading.py,sha256=wse-38zUDHB3HUB28R8Ah_Ig3Wxt2tChapKtu-yyy2E,17036
240
+ atomicshop/wrappers/loggingw/loggingw.py,sha256=irzlIYXTcCpSVjmb6NU6GJGCIy3V48ZVKNvKOlH70CU,9417
241
+ atomicshop/wrappers/loggingw/reading.py,sha256=yh7uNPxEdn6KsxSKrYny2C57XdI25F5gaByz77CO_pw,17038
241
242
  atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
242
243
  atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
243
244
  atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -253,6 +254,7 @@ atomicshop/wrappers/playwrightw/waits.py,sha256=308fdOu6YDqQ7K7xywj7R27sSmFanPBQ
253
254
  atomicshop/wrappers/psutilw/cpus.py,sha256=w6LPBMINqS-T_X8vzdYkLS2Wzuve28Ydp_GafTCngrc,236
254
255
  atomicshop/wrappers/psutilw/disks.py,sha256=3ZSVoommKH1TWo37j_83frB-NqXF4Nf5q5mBCX8G4jE,9221
255
256
  atomicshop/wrappers/psutilw/memories.py,sha256=_S0aL8iaoIHebd1vOFrY_T9aROM5Jx2D5CvDh_4j0Vc,528
257
+ atomicshop/wrappers/psutilw/networks.py,sha256=Q2EtyemncDhDsNYZREME0nOIxM-jQqIktFN3i5HtSog,1294
256
258
  atomicshop/wrappers/psutilw/psutilw.py,sha256=q3EwgprqyrR4zLCjl4l5DHFOQoukEvQMIPjNB504oQ0,21262
257
259
  atomicshop/wrappers/psycopgw/psycopgw.py,sha256=XJvVf0oAUjCHkrYfKeFuGCpfn0Oxj3u4SbKMKA1508E,7118
258
260
  atomicshop/wrappers/pywin32w/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -270,7 +272,7 @@ atomicshop/wrappers/socketw/accepter.py,sha256=HQC1EyZmyUtVEfFbaBkHCE-VZp6RWyd9m
270
272
  atomicshop/wrappers/socketw/base.py,sha256=1vvg8EhRGvnxdrRAm1VJSLCXkm2SZDHRjdpTuhkH3Mg,1844
271
273
  atomicshop/wrappers/socketw/certificator.py,sha256=SxCKFyBlwzs4uohugfBokOYQpZJyH8KY46m87Q23n6w,9017
272
274
  atomicshop/wrappers/socketw/creator.py,sha256=C-l57G854HAtWJonVbgfQge290-Dg0Ov4aurJAWIKls,11199
273
- atomicshop/wrappers/socketw/dns_server.py,sha256=laMR_opplvytVTvVcztNoDChZGMCDR31fDQejrCNaEI,42032
275
+ atomicshop/wrappers/socketw/dns_server.py,sha256=u4RSDIK4UR1aKOulwSGscchxYa7pTlPxMO-StHuBMag,42464
274
276
  atomicshop/wrappers/socketw/exception_wrapper.py,sha256=_YDnzyEcUnV6VISU3bk-UPdnsMvHjKJBHwxLMTsxQu8,8495
275
277
  atomicshop/wrappers/socketw/get_process.py,sha256=APw_oOXsuR5KljYesd4J8MuzR-kaw2ez3MN3oD_h9Qc,5226
276
278
  atomicshop/wrappers/socketw/receiver.py,sha256=m8hXKOa8dqEQGUdcbYjshH8-j0CsMGRkge2ifYKhaAw,9050
@@ -281,8 +283,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
281
283
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
282
284
  atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
283
285
  atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
284
- atomicshop-2.14.5.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
285
- atomicshop-2.14.5.dist-info/METADATA,sha256=gY_7i-PXmCnygtkJYX2xDi6Jl9jH8AeVRnaBhR2SsCk,10478
286
- atomicshop-2.14.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
287
- atomicshop-2.14.5.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
288
- atomicshop-2.14.5.dist-info/RECORD,,
286
+ atomicshop-2.14.6.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
287
+ atomicshop-2.14.6.dist-info/METADATA,sha256=GxNk1sCw7qnOT6BX5QRiy9KCqX1KQfwt4lU0ESSxaos,10478
288
+ atomicshop-2.14.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
289
+ atomicshop-2.14.6.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
290
+ atomicshop-2.14.6.dist-info/RECORD,,