atomicshop 2.14.4__py3-none-any.whl → 2.14.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.14.4'
4
+ __version__ = '2.14.5'
@@ -83,8 +83,14 @@ class ModuleCategory:
83
83
 
84
84
  # Initiating logger for each engine by its name
85
85
  # initiate_logger(current_module.engine_name, log_file_extension)
86
- loggingw.get_logger_with_stream_handler_and_timedfilehandler(
87
- logger_name=self.engine_name, directory_path=logs_path, disable_duplicate_ms=True)
86
+ loggingw.get_complex_logger(
87
+ logger_name=self.engine_name,
88
+ directory_path=logs_path,
89
+ add_stream=True,
90
+ add_timedfile=True,
91
+ formatter_streamhandler='DEFAULT',
92
+ formatter_filehandler='DEFAULT'
93
+ )
88
94
 
89
95
 
90
96
  # Assigning external class object by message domain received from client. If the domain is not in the list,
@@ -41,8 +41,15 @@ def initialize_mitm_server(config_static):
41
41
  config['certificates']['sni_server_certificate_from_server_socket_download_directory'])
42
42
 
43
43
  # Create a logger that will log messages to file, Initiate System logger.
44
- system_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
45
- "system", config['log']['logs_path'], disable_duplicate_ms=True)
44
+ logger_name = "system"
45
+ system_logger = loggingw.get_complex_logger(
46
+ logger_name=logger_name,
47
+ file_path=f'{config['log']['logs_path']}{os.sep}{logger_name}.txt',
48
+ add_stream=True,
49
+ add_timedfile=True,
50
+ formatter_streamhandler='DEFAULT',
51
+ formatter_filehandler='DEFAULT'
52
+ )
46
53
 
47
54
  # Writing first log.
48
55
  system_logger.info("======================================")
@@ -175,14 +182,24 @@ def initialize_mitm_server(config_static):
175
182
  config_static.CONFIG_EXTENDED['certificates']['domains_all_times'] = list(domains_engine_list_full)
176
183
 
177
184
  # Creating Statistics logger.
178
- statistics_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
179
- logger_name="statistics", directory_path=config['log']['logs_path'],
180
- file_extension=config_static.CSV_EXTENSION, formatter_message_only=True, header=STATISTICS_HEADER
185
+ statistics_logger = loggingw.get_complex_logger(
186
+ logger_name="statistics",
187
+ directory_path=config['log']['logs_path'],
188
+ add_timedfile=True,
189
+ formatter_filehandler='MESSAGE',
190
+ file_type='csv',
191
+ header=STATISTICS_HEADER
181
192
  )
182
193
 
183
194
  network_logger_name = "network"
184
- network_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
185
- logger_name=network_logger_name, directory_path=config['log']['logs_path'], disable_duplicate_ms=True)
195
+ network_logger = loggingw.get_complex_logger(
196
+ logger_name=network_logger_name,
197
+ directory_path=config['log']['logs_path'],
198
+ add_stream=True,
199
+ add_timedfile=True,
200
+ formatter_streamhandler='DEFAULT',
201
+ formatter_filehandler='DEFAULT'
202
+ )
186
203
  system_logger.info(f"Loaded network logger: {network_logger}")
187
204
 
188
205
  # Initiate Listener logger, which is a child of network logger, so he uses the same settings and handlers
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import time
2
3
 
3
4
 
4
5
  # Log formatter, means how the log will look inside the file
@@ -8,32 +9,9 @@ import logging
8
9
 
9
10
  # ".40" truncating the string to only 40 characters. Example: %(message).250s
10
11
 
11
- # Adding '%(asctime)s.%(msecs)06f' will print milliseconds as well as nanoseconds:
12
- # 2022-02-17 15:15:51,913.335562
13
- # If you don't use custom 'datefmt' in your 'setFormatter' function,
14
- # it will print duplicate milliseconds:
15
- # 2022-02-17 15:15:51,913.913.335562
16
- # The setting should be like:
17
- # file_handler.setFormatter(logging.Formatter(log_formatter_file, datefmt='%Y-%m-%d,%H:%M:%S'))
18
- # 's' stands for string. 'd' stands for digits, a.k.a. 'int'. 'f' stands for float.
19
-
20
- # Old tryouts:
21
- # log_formatter_file: str = f"%(asctime)s.%(msecs)06f | " \
22
- # log_formatter_file: str = f"%(asctime)s.%(msecs) | " \
23
- # f"%(levelname)-{len(log_header_level)}s | " \
24
- # f"%(name)-{len(log_header_logger)}s | " \
25
- # f"%(filename)-{len(log_header_script)}s : " \
26
- # f"%(lineno)-{len(log_header_line)}d | " \
27
- # "%(threadName)s: %(message)s"
28
- # log_formatter_file: str = "{asctime}.{msecs:0<3.0f} | " \
29
- # log_formatter_file: str = "{asctime}.{msecs:0>3.0f}.{msecs:0>.6f} | " \
30
-
31
- # Old tryouts for reference:
32
- # file_formatter = logging.Formatter(log_formatter_file, style='{')
33
- # file_formatter.default_time_format = '%Y-%m-%d %H:%M:%S'
34
- # file_formatter.default_msec_format = '%s,%03d'
35
- # file_formatter.default_msec_format = '%s,%03f'
36
12
 
13
+ DEFAULT_STREAM_FORMATTER: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
14
+ DEFAULT_MESSAGE_FORMATTER: str = "%(message)s"
37
15
 
38
16
  FORMAT_ELEMENT_TO_HEADER: dict = {
39
17
  'asctime': 'Event Time [Y-M-D H:M:S]',
@@ -56,7 +34,7 @@ FORMAT_ELEMENT_TO_HEADER: dict = {
56
34
  }
57
35
 
58
36
  DEFAULT_FORMATTER_TXT_FILE: str = \
59
- "{asctime},{msecs:013.9f} | " \
37
+ "{asctime} | " \
60
38
  "{levelname:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['levelname'])}" + "s} | " \
61
39
  "{name:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['name'])}" + "s} | " \
62
40
  "{filename:<" + f"{len(FORMAT_ELEMENT_TO_HEADER['filename'])}" + "s} : " \
@@ -64,7 +42,53 @@ DEFAULT_FORMATTER_TXT_FILE: str = \
64
42
  "{threadName} | {message}"
65
43
 
66
44
  DEFAULT_FORMATTER_CSV_FILE: str = \
67
- '\"{asctime}.{msecs:010.6f}\",{levelname},{name},{filename},{lineno},{threadName},\"{message}\"'
45
+ '\"{asctime}\",{levelname},{name},{filename},{lineno},{threadName},\"{message}\"'
46
+
47
+
48
+ class NanosecondsFormatter(logging.Formatter):
49
+ def __init__(self, fmt=None, datefmt=None, style='%', use_nanoseconds=False):
50
+ super().__init__(fmt, datefmt, style)
51
+ self.use_nanoseconds = use_nanoseconds
52
+
53
+ def formatTime(self, record, datefmt=None):
54
+ ct = self.converter(record.created)
55
+
56
+ if datefmt:
57
+ # Remove unsupported %f from datefmt if present
58
+ if '%f' in datefmt:
59
+ datefmt = datefmt.replace('%f', '')
60
+ self.use_nanoseconds = True
61
+ else:
62
+ # Default time format if datefmt is not provided
63
+ datefmt = '%Y-%m-%d %H:%M:%S'
64
+
65
+ s = time.strftime(datefmt, ct)
66
+
67
+ if self.use_nanoseconds:
68
+ # Calculate nanoseconds from the fractional part of the timestamp
69
+ nanoseconds = f'{record.created:.9f}'.split('.')[1]
70
+ # Return the formatted string with nanoseconds appended
71
+ return f'{s}.{nanoseconds}'
72
+ else:
73
+ return s
74
+
75
+
76
+
77
+
78
+
79
+ # if datefmt is None:
80
+ # # Use the default behavior if no datefmt is provided
81
+ # return super().formatTime(record, datefmt)
82
+ # elif '%f' in datefmt:
83
+ # # Format the time up to seconds
84
+ # base_time = time.strftime(datefmt.replace('%f', ''), ct)
85
+ # # Calculate nanoseconds from the fractional part of the timestamp
86
+ # nanoseconds = f'{record.created:.9f}'.split('.')[1]
87
+ # # Return the formatted string with nanoseconds appended
88
+ # return base_time + nanoseconds
89
+ # else:
90
+ # # Use the provided datefmt if it doesn't include %f
91
+ # return time.strftime(datefmt, ct)
68
92
 
69
93
 
70
94
  class FormatterProcessor:
@@ -150,7 +174,11 @@ class FormatterProcessor:
150
174
 
151
175
 
152
176
  def get_logging_formatter_from_string(
153
- formatter: str, style=None, datefmt=None, disable_duplicate_ms: bool = False) -> logging.Formatter:
177
+ formatter: str,
178
+ style=None,
179
+ datefmt=None,
180
+ use_nanoseconds: bool = False
181
+ ) -> logging.Formatter:
154
182
  """
155
183
  Function to get the logging formatter from the string.
156
184
 
@@ -160,12 +188,12 @@ def get_logging_formatter_from_string(
160
188
  '%': will use the '%' style.
161
189
  '{': will use the '{' style.
162
190
  :param datefmt: string, date format of 'asctime' element. Default is None.
163
- :param disable_duplicate_ms: bool, if True, will disable the duplicate milliseconds in the 'asctime' element.
164
- Example: If we're using '%(asctime)s.%(msecs)06f' msecs value in our time stamp, we need to use custom
165
- 'datefmt' to get rid of the additional duplicate milliseconds:
166
- Instead of '2022-02-17 15:15:51,913.913.335562' print '2022-02-17 15:15:51,913.335562'
167
- The problem with this method is that milliseconds aren't adjusted to 3 digits with zeroes (like 1 = 001).
168
- We can use the regular strftime format: datefmt='%Y-%m-%d,%H:%M:%S:%f'
191
+ We use custom formatter that can process the date format with nanoseconds:
192
+ '%Y-%m-%d %H:%M:%S.%f' -> '2021-01-01 00:00:00.000000000'
193
+ :param use_nanoseconds: bool, if set to True, the formatter will use nanoseconds instead of milliseconds.
194
+ This will print 'asctime' in the following format: '2021-01-01 00:00:00.000000000', instead of
195
+ '2021-01-01 00:00:00.000'.
196
+
169
197
  :return: logging.Formatter, formatter.
170
198
  """
171
199
 
@@ -173,10 +201,8 @@ def get_logging_formatter_from_string(
173
201
  if not style:
174
202
  style = FormatterProcessor(formatter).get_style()['style']
175
203
 
176
- # The regular 'datefmt' is '%Y-%m-%d,%H:%M:%S:%f'. If we want to use it with milliseconds 'msecs' element,
177
- # we need to disable the duplicate milliseconds.
178
- if disable_duplicate_ms:
179
- datefmt = '%Y-%m-%d,%H:%M:%S'
180
-
181
204
  # Create the logging formatter.
182
- return logging.Formatter(formatter, style=style, datefmt=datefmt)
205
+ if use_nanoseconds or '%f' in datefmt:
206
+ return NanosecondsFormatter(formatter, style=style, datefmt=datefmt, use_nanoseconds=use_nanoseconds)
207
+ else:
208
+ return logging.Formatter(formatter, style=style, datefmt=datefmt)
@@ -2,6 +2,11 @@ import logging
2
2
  from logging.handlers import TimedRotatingFileHandler, QueueListener, QueueHandler
3
3
  import re
4
4
  import os
5
+ from pathlib import Path
6
+
7
+
8
+ DEFAULT_DATE_STRING_FORMAT: str = "%Y_%m_%d"
9
+ DEFAULT_DATE_REGEX_PATTERN: str = r"^\d{4}_\d{2}_\d{2}$"
5
10
 
6
11
 
7
12
  class TimedRotatingFileHandlerWithHeader(TimedRotatingFileHandler):
@@ -150,7 +155,20 @@ def get_handler_name(handler: logging.Handler) -> str:
150
155
  return handler.get_name()
151
156
 
152
157
 
153
- def change_rotated_filename(file_handler: logging.Handler, file_extension: str):
158
+ def change_rotated_filename(
159
+ file_handler: logging.Handler,
160
+ date_format_string: str = None,
161
+ date_regex_pattern: str = None
162
+ ):
163
+ """
164
+ Function to change the way TimedRotatingFileHandler managing the rotating filename.
165
+
166
+ :param file_handler: FileHandler to change the rotating filename for.
167
+ :param date_format_string: Date format string to use for the rotated log filename.
168
+ If None, the default 'DEFAULT_DATE_STRING_FORMAT' will be used.
169
+ :param date_regex_pattern: Regex pattern to match the rotated log filenames.
170
+ If None, the default 'DEFAULT_DATE_REGEX_PATTERN' will be used.
171
+ """
154
172
  # Changing the way TimedRotatingFileHandler managing the rotating filename
155
173
  # Default file suffix is only "Year_Month_Day" with addition of the dot (".") character to the
156
174
  # "file name + extension" that you provide it. Example: log file name:
@@ -170,18 +188,40 @@ def change_rotated_filename(file_handler: logging.Handler, file_extension: str):
170
188
  # file_handler.extMatch = re.compile(r"^\d{4}_\d{2}_\d{2}" + re.escape(log_file_extension) + r"$")
171
189
  # file_handler.extMatch = re.compile(r"^\d{4}_\d{2}_\d{2}.txt$")
172
190
 
173
- # Set variables that are responsible for setting TimedRotatingFileHandler filename on rotation.
174
- # Log files time format, need only date
175
- format_date_log_filename: str = "%Y_%m_%d"
176
- # Log file suffix.
177
- logfile_suffix: str = "_" + format_date_log_filename + file_extension
178
- # Regex object to match the TimedRotatingFileHandler file name suffix.
179
- # "re.escape" is used to "escape" strings in regex and use them as is.
180
- logfile_regex_suffix = re.compile(r"^\d{4}_\d{2}_\d{2}" + re.escape(file_extension) + r"$")
191
+ # Update the namer function to format the rotated filename correctly
192
+ def namer(name):
193
+ # Currently the 'name' is full file path + '.' + logfile_suffix.
194
+ # Example: 'C:\\path\\to\\file.log._2021_12_24'
195
+ # Get the parent directory of the file: C:\path\to
196
+ parent_dir: str = str(Path(name).parent)
197
+ # Get the base filename without the extension: file.log
198
+ filename: str = Path(name).stem
199
+ # Get the date part of the filename: _2021_12_24
200
+ date_part: str = str(Path(name).suffix).replace(".", "")
201
+ # Get the file extension: log
202
+ file_extension: str = Path(filename).suffix
203
+ # Get the file name without the extension: file
204
+ file_stem: str = Path(filename).stem
205
+
206
+ return f"{parent_dir}{os.sep}{file_stem}{date_part}{file_extension}"
207
+
208
+ # Construct the new suffix without the file extension
209
+ if date_format_string is None:
210
+ logfile_suffix = f"_{DEFAULT_DATE_STRING_FORMAT}"
211
+ else:
212
+ logfile_suffix = f"_{date_format_string}"
213
+
214
+ # Regex pattern to match the rotated log filenames
215
+ if date_regex_pattern is None:
216
+ logfile_regex_suffix = re.compile(DEFAULT_DATE_REGEX_PATTERN)
217
+ else:
218
+ logfile_regex_suffix = re.compile(date_regex_pattern)
181
219
 
182
- # Changing the setting that we set above
220
+ # Update the handler's suffix to include the date format
183
221
  file_handler.suffix = logfile_suffix
184
- file_handler.namer = lambda name: name.replace(file_extension + ".", "") + file_extension
222
+
223
+ file_handler.namer = namer
224
+ # Update the handler's extMatch regex to match the new filename format
185
225
  file_handler.extMatch = logfile_regex_suffix
186
226
 
187
227
 
@@ -1,80 +1,34 @@
1
- import os
2
1
  import logging
2
+ import os
3
3
  import queue
4
+ from typing import Literal, Union
4
5
 
5
6
  from . import loggers, handlers, formatters
6
7
 
7
8
 
8
- def get_logger_with_level(logger_name: str, logging_level="DEBUG") -> logging.Logger:
9
- """
10
- Function to get a logger and set logging level.
11
-
12
- :param logger_name: Name of the logger.
13
- :param logging_level: 'int' or 'str', Logging level to set to the logger.
14
- None: if None, the logger level will not be set.
15
- :return: Logger.
16
- """
17
-
18
- # Get the logger.
19
- logger: logging.Logger = loggers.get_logger(logger_name)
20
- # Set the logger level if it is not None.
21
- if logging_level:
22
- loggers.set_logging_level(logger, logging_level)
23
-
24
- return logger
25
-
26
-
27
- def get_logger_with_stream_handler(
28
- logger_name: str, logging_level="DEBUG",
29
- formatter: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
30
- ) -> logging.Logger:
31
- """
32
- Function to get a logger and add StreamHandler to it.
33
-
34
- :param logger_name: Name of the logger.
35
- :param logging_level: 'int' or 'str', Logging level to set to the logger.
36
- None: if None, the logger level will not be set.
37
- :param formatter: Formatter to use for StreamHandler. It is template of how a message will look like.
38
- :return: Logger.
39
- """
40
-
41
- # Get the logger.
42
- logger: logging.Logger = loggers.get_logger(logger_name)
43
- # Set the logger level if it is not None.
44
- if logging_level:
45
- loggers.set_logging_level(logger, logging_level)
46
- # Add StreamHandler to the logger.
47
- add_stream_handler(logger, logging_level, formatter)
48
-
49
- return logger
50
-
51
-
52
- def get_logger_with_timedfilehandler(
9
+ def get_complex_logger(
53
10
  logger_name: str,
54
- directory_path, file_name: str = None, file_extension: str = '.txt',
55
- logging_level="DEBUG", formatter='default',
56
- formatter_message_only: bool = False, disable_duplicate_ms: bool = False,
57
- when: str = "midnight", interval: int = 1, delay: bool = True, encoding=None
58
- ) -> logging.Logger:
59
- logger = get_logger_with_level(logger_name, logging_level)
60
- add_timedfilehandler_with_queuehandler(
61
- logger, directory_path, file_name, file_extension, logging_level, formatter,
62
- formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding
63
- )
64
-
65
- return logger
66
-
67
-
68
- def get_logger_with_stream_handler_and_timedfilehandler(
69
- logger_name: str,
70
- directory_path,
71
- file_name: str = None,
72
- file_extension: str = '.txt',
11
+ file_path: str = None,
12
+ directory_path: str = None,
13
+ add_stream: bool = False,
14
+ add_timedfile: bool = False,
15
+ file_type: Literal[
16
+ 'txt',
17
+ 'csv',
18
+ 'json'] = 'txt',
73
19
  logging_level="DEBUG",
74
- formatter_filehandler='default',
75
- formatter_streamhandler: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s",
76
- formatter_message_only: bool = False,
77
- disable_duplicate_ms: bool = False,
20
+ formatter_streamhandler: Union[
21
+ Literal[
22
+ 'MESSAGE',
23
+ 'DEFAULT'],
24
+ None] = None,
25
+ formatter_filehandler: Union[
26
+ Literal[
27
+ 'MESSAGE',
28
+ 'DEFAULT',],
29
+ None] = None,
30
+ formatter_streamhandler_use_nanoseconds: bool = True,
31
+ formatter_filehandler_use_nanoseconds: bool = True,
78
32
  when: str = "midnight",
79
33
  interval: int = 1,
80
34
  delay: bool = True,
@@ -85,26 +39,35 @@ def get_logger_with_stream_handler_and_timedfilehandler(
85
39
  Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
86
40
 
87
41
  :param logger_name: Name of the logger.
88
- :param directory_path: string, Path to the directory where the log file will be created.
89
- :param file_name: string, Name of the log file without file extension, since we add it through separate argument.
90
- If not provided, logger name will be used.
91
- :param file_extension: string, Extension of the log file. Default is '.txt'.
92
- '.txt': Text file.
93
- '.csv': CSV file.
94
- '.json': JSON file.
42
+ :param file_path: full path to the log file. If you don't want to use the file, set it to None.
43
+ You can set the directory_path only and then the 'logger_name' will be used as the file name with the
44
+ 'file_type' as the file extension.
45
+ :param directory_path: full path to the directory where the log file will be saved.
46
+ :param add_stream: bool, If set to True, StreamHandler will be added to the logger.
47
+ :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger.
48
+ :param file_type: string, file type of the log file. Default is 'txt'.
49
+ 'txt': Text file.
50
+ 'csv': CSV file.
51
+ 'json': JSON file.
95
52
  :param logging_level: str or int, Logging level for the handler, that will use the logger while initiated.
53
+ :param formatter_streamhandler: string, Formatter to use for StreamHandler. It is template of how a message will
54
+ look like.
55
+ None: No formatter will be used.
56
+ 'DEFAULT': Default formatter will be used:
57
+ "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
58
+ 'MESSAGE': Formatter will be used only for the 'message' part.
96
59
  :param formatter_filehandler: string, Formatter to use for handler. It is template of how a message will look like.
97
60
  None: No formatter will be used.
98
- 'default': Default formatter will be used for each file extension:
99
- .txt: "%(asctime)s | %(levelname)s | %(threadName)s | %(name)s | %(message)s"
100
- .csv: "%(asctime)s,%(levelname)s,%(threadName)s,%(name)s,%(message)s"
101
- .json: '{"time": "%(asctime)s", "level": "%(levelname)s", "thread": "%(threadName)s",
61
+ 'DEFAULT': Default formatter will be used for each file extension:
62
+ txt: "%(asctime)s | %(levelname)s | %(threadName)s | %(name)s | %(message)s"
63
+ csv: "%(asctime)s,%(levelname)s,%(threadName)s,%(name)s,%(message)s"
64
+ json: '{"time": "%(asctime)s", "level": "%(levelname)s", "thread": "%(threadName)s",
102
65
  "logger": "%(name)s", "message": "%(message)s"}'
103
- :param formatter_streamhandler: string, Formatter to use for StreamHandler. It is template of how a message will
104
- look like.
105
- :param formatter_message_only: bool, If set to True, formatter will be used only for the 'message' part.
106
- :param disable_duplicate_ms: bool, If set to True, duplicate milliseconds will be removed from formatter
107
- 'asctime' element.
66
+ 'MESSAGE': Formatter will be used only for the 'message' part.
67
+ :param formatter_streamhandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
68
+ in the formatter in case you provide 'asctime' element.
69
+ :param formatter_filehandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
70
+ in the formatter in case you provide 'asctime' element.
108
71
  :param when: string, When to rotate the log file. Default is 'midnight'.
109
72
  [when="midnight"] is set to rotate the filename at midnight. This means that the current file name will be
110
73
  added Yesterday's date to the end of the file and today's file will continue to write at the same
@@ -119,6 +82,7 @@ def get_logger_with_stream_handler_and_timedfilehandler(
119
82
  :param encoding: string, Encoding to use for the log file. Default is None.
120
83
  :param header: string, Header to write to the log file.
121
84
  Example: "time,host,error"
85
+ Useful for 'csv' file type format.
122
86
 
123
87
  :return: Logger.
124
88
 
@@ -130,11 +94,41 @@ def get_logger_with_stream_handler_and_timedfilehandler(
130
94
 
131
95
  def main():
132
96
  header: str = "time,host,error"
133
- output_directory: str = "D:\\logs"
97
+ output_log_file: str = "D:\\logs\\log_file.csv"
98
+
99
+ error_logger = loggingw.get_complex_logger(
100
+ logger_name=f'{self.__class__.__name__}_CSV',
101
+ file_path=output_log_file,
102
+ add_timedfile=True,
103
+ file_type='csv',
104
+ formatter_filehandler='MESSAGE',
105
+ header=header
106
+ )
107
+
108
+ error_logger.info(error_message)
109
+
110
+
111
+ if __name__ == "__main__":
112
+ main()
113
+
114
+ ------------------------------
115
+
116
+ Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file:
117
+ from atomicshop.wrappers.loggingw import loggingw
118
+
134
119
 
135
- error_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
136
- logger_name="errors", directory_path=output_directory,
137
- file_extension=".csv", formatter_message_only=True, header=header
120
+ def main():
121
+ header: str = "time,host,error"
122
+ output_log_file: str = "D:\\logs\\log_file.txt"
123
+
124
+ error_logger = loggingw.get_complex_logger(
125
+ logger_name=f'{self.__class__.__name__}',
126
+ file_path=output_log_file,
127
+ add_stream=True,
128
+ add_timedfile=True,
129
+ file_type='txt',
130
+ formatter_streamhandler='DEFAULT',
131
+ formatter_filehandler='DEFAULT'
138
132
  )
139
133
 
140
134
  error_logger.info(f"{datetime.now()},host1,/path/to/file,error message")
@@ -143,32 +137,93 @@ def get_logger_with_stream_handler_and_timedfilehandler(
143
137
  if __name__ == "__main__":
144
138
  main()
145
139
  """
140
+
141
+ if not directory_path and not file_path:
142
+ raise ValueError("You need to provide 'directory_path' or 'file_path'.")
143
+ if directory_path and file_path:
144
+ raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
145
+
146
+ if directory_path:
147
+ if directory_path.endswith(os.sep):
148
+ directory_path = directory_path[:-1]
149
+
150
+ file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
151
+
146
152
  logger = get_logger_with_level(logger_name, logging_level)
147
- add_stream_handler(logger, logging_level, formatter_streamhandler, formatter_message_only)
148
- add_timedfilehandler_with_queuehandler(
149
- logger, directory_path, file_name, file_extension, logging_level, formatter_filehandler,
150
- formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding, header
151
- )
153
+
154
+ if add_stream:
155
+ add_stream_handler(
156
+ logger=logger, logging_level=logging_level, formatter=formatter_streamhandler,
157
+ formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
158
+
159
+ if add_timedfile:
160
+ add_timedfilehandler_with_queuehandler(
161
+ logger=logger, file_path=file_path, logging_level=logging_level, formatter=formatter_filehandler,
162
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds, file_type=file_type,
163
+ when=when, interval=interval, delay=delay, encoding=encoding, header=header)
164
+
165
+ return logger
166
+
167
+
168
+ def get_logger_with_level(
169
+ logger_name: str,
170
+ logging_level="DEBUG"
171
+ ) -> logging.Logger:
172
+ """
173
+ Function to get a logger and set logging level.
174
+
175
+ :param logger_name: Name of the logger.
176
+ :param logging_level: 'int' or 'str', Logging level to set to the logger.
177
+ None: if None, the logger level will not be set.
178
+ :return: Logger.
179
+ """
180
+
181
+ # Get the logger.
182
+ logger: logging.Logger = loggers.get_logger(logger_name)
183
+ # Set the logger level if it is not None.
184
+ if logging_level:
185
+ loggers.set_logging_level(logger, logging_level)
152
186
 
153
187
  return logger
154
188
 
155
189
 
190
+ def _process_formatter_attribute(
191
+ formatter: Union[
192
+ Literal['DEFAULT', 'MESSAGE'],
193
+ None],
194
+ file_type: Union[
195
+ Literal['txt', 'csv', 'json'],
196
+ None] = None
197
+ ):
198
+ """
199
+ Function to process the formatter attribute.
200
+ """
201
+
202
+ if formatter == 'DEFAULT' and file_type is None:
203
+ return formatters.DEFAULT_STREAM_FORMATTER
204
+ elif formatter == 'DEFAULT' and file_type == 'txt':
205
+ return formatters.DEFAULT_FORMATTER_TXT_FILE
206
+ elif formatter == 'DEFAULT' and file_type == 'csv':
207
+ return formatters.DEFAULT_FORMATTER_CSV_FILE
208
+ elif formatter == 'DEFAULT' and file_type == 'json':
209
+ return formatters.DEFAULT_MESSAGE_FORMATTER
210
+ elif formatter == 'MESSAGE':
211
+ return formatters.DEFAULT_MESSAGE_FORMATTER
212
+ else:
213
+ return formatter
214
+
215
+
156
216
  def add_stream_handler(
157
- logger: logging.Logger, logging_level: str = "DEBUG",
158
- formatter: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s",
159
- formatter_message_only: bool = False
217
+ logger: logging.Logger,
218
+ logging_level: str = "DEBUG",
219
+ formatter: Union[
220
+ Literal['DEFAULT', 'MESSAGE'],
221
+ None] = None,
222
+ formatter_use_nanoseconds: bool = False
160
223
  ):
161
224
  """
162
225
  Function to add StreamHandler to logger.
163
226
  Stream formatter will output messages to the console.
164
-
165
- :param logger: Logger to add the handler to.
166
- :param logging_level: Logging level for the handler, that will use the logger while initiated.
167
- :param formatter: Formatter to use for StreamHandler. It is template of how a message will look like.
168
- None: No formatter will be used.
169
- 'default': Default formatter will be used:
170
- "%(levelname)s | %(threadName)s | %(name)s | %(message)s"
171
- :param formatter_message_only: bool, If set to True, formatter will be used only for the 'message' part.
172
227
  """
173
228
 
174
229
  # Getting the StreamHandler.
@@ -177,12 +232,12 @@ def add_stream_handler(
177
232
  loggers.set_logging_level(stream_handler, logging_level)
178
233
 
179
234
  # If formatter_message_only is set to True, then formatter will be used only for the 'message' part.
180
- if formatter_message_only:
181
- formatter = "%(message)s"
235
+ formatter = _process_formatter_attribute(formatter)
182
236
 
183
237
  # If formatter was provided, then it will be used.
184
238
  if formatter:
185
- logging_formatter = formatters.get_logging_formatter_from_string(formatter)
239
+ logging_formatter = formatters.get_logging_formatter_from_string(
240
+ formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
186
241
  handlers.set_formatter(stream_handler, logging_formatter)
187
242
 
188
243
  # Adding the handler to the main logger
@@ -194,13 +249,16 @@ def add_stream_handler(
194
249
 
195
250
  def add_timedfilehandler_with_queuehandler(
196
251
  logger: logging.Logger,
197
- directory_path: str,
198
- file_name_no_extension: str = None,
199
- file_extension: str = '.txt',
252
+ file_path: str,
253
+ file_type: Literal[
254
+ 'txt',
255
+ 'csv',
256
+ 'json'] = 'txt',
200
257
  logging_level="DEBUG",
201
- formatter='default',
202
- formatter_message_only: bool = False,
203
- disable_duplicate_ms: bool = False,
258
+ formatter: Union[
259
+ Literal['DEFAULT', 'MESSAGE'],
260
+ None] = None,
261
+ formatter_use_nanoseconds: bool = False,
204
262
  when: str = 'midnight',
205
263
  interval: int = 1,
206
264
  delay: bool = True,
@@ -211,45 +269,11 @@ def add_timedfilehandler_with_queuehandler(
211
269
  Function to add TimedRotatingFileHandler and QueueHandler to logger.
212
270
  TimedRotatingFileHandler will output messages to the file through QueueHandler.
213
271
  This is needed, since TimedRotatingFileHandler is not thread-safe, though official docs say it is.
214
-
215
- :param logger: Logger to add the handler to.
216
- :param directory_path: string, Path to the directory where the log file will be created.
217
- :param file_name_no_extension: string, Name of the log file without file extension, since we add it through
218
- separate argument. If not provided, logger name will be used.
219
- :param file_extension: string, Extension of the log file. Default is '.txt'.
220
- :param logging_level: str or int, Logging level for the handler, that will use the logger while initiated.
221
- :param formatter: string, Formatter to use for handler. It is template of how a message will look like.
222
- None: No formatter will be used.
223
- 'default': Default formatter will be used for each file extension:
224
- .txt: "%(asctime)s | %(levelname)s | %(threadName)s | %(name)s | %(message)s"
225
- .csv: "%(asctime)s,%(levelname)s,%(threadName)s,%(name)s,%(message)s"
226
- .json: '{"time": "%(asctime)s", "level": "%(levelname)s", "thread": "%(threadName)s",
227
- "logger": "%(name)s", "message": "%(message)s"}'
228
- :param formatter_message_only: bool, If set to True, formatter will be used only for the 'message' part.
229
- :param disable_duplicate_ms: bool, If set to True, duplicate milliseconds will be removed from formatter
230
- 'asctime' element.
231
- :param when: string, When to rotate the log file. Default is 'midnight'.
232
- [when="midnight"] is set to rotate the filename at midnight. This means that the current file name will be
233
- added Yesterday's date to the end of the file and today's file will continue to write at the same
234
- filename. Also, if the script finished working on 25.11.2021, the name of the log file will be "test.log"
235
- If you run the script again on 28.11.2021, the logging module will take the last modification date of
236
- the file "test.log" and assign a date to it: test.log.2021_11_25
237
- The log filename of 28.11.2021 will be called "test.log" again.
238
- :param interval: int, Interval to rotate the log file. Default is 1.
239
- If 'when="midnight"' and 'interval=1', then the log file will be rotated every midnight.
240
- If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
241
- :param delay: bool, If set to True, the log file will be created only if there's something to write.
242
- :param encoding: string, Encoding to use for the log file. Default is None.
243
- :param header: string, Header to write to the log file.
244
- Example: "time,host,error"
245
272
  """
246
273
 
247
274
  # If file name wasn't provided we will use the logger name instead.
248
- if not file_name_no_extension:
249
- file_name_no_extension = logger.name
250
-
251
- # Set log file path.
252
- log_file_path = f'{directory_path}{os.sep}{file_name_no_extension}{file_extension}'
275
+ # if not file_name_no_extension:
276
+ # file_name_no_extension = logger.name
253
277
 
254
278
  # Setting the TimedRotatingFileHandler, without adding it to the logger.
255
279
  # It will be added to the QueueListener, which will use the TimedRotatingFileHandler to write logs.
@@ -259,41 +283,30 @@ def add_timedfilehandler_with_queuehandler(
259
283
  # Creating file handler with log filename. At this stage the log file is created and locked by the handler,
260
284
  # Unless we use "delay=True" to tell the class to write the file only if there's something to write.
261
285
 
262
- if file_extension == ".csv":
286
+ if file_type == "csv":
263
287
  # If file extension is CSV, we'll set the header to the file.
264
288
  # This is needed since the CSV file will be rotated, and we'll need to set the header each time.
265
289
  # We'll use the custom TimedRotatingFileHandlerWithHeader class.
266
290
  file_handler = handlers.get_timed_rotating_file_handler_with_header(
267
- log_file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
291
+ file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
268
292
  else:
269
293
  file_handler = handlers.get_timed_rotating_file_handler(
270
- log_file_path, when=when, interval=interval, delay=delay, encoding=encoding)
294
+ file_path, when=when, interval=interval, delay=delay, encoding=encoding)
271
295
 
272
296
  loggers.set_logging_level(file_handler, logging_level)
273
297
 
274
- if formatter == "default":
275
- # Create file formatter based on extension
276
- if file_extension == ".txt":
277
- formatter = formatters.DEFAULT_FORMATTER_TXT_FILE
278
- elif file_extension == ".csv":
279
- formatter = formatters.DEFAULT_FORMATTER_CSV_FILE
280
- elif file_extension == ".json":
281
- formatter = "%(message)s"
282
-
283
- # If 'formatter_message_only' is set to 'True', we'll use the formatter only for the message part.
284
- if formatter_message_only:
285
- formatter = "%(message)s"
298
+ formatter = _process_formatter_attribute(formatter, file_type=file_type)
286
299
 
287
300
  # If formatter was passed to the function we'll add it to handler.
288
301
  if formatter:
289
302
  # Convert string to Formatter object. Moved to newer styling of python 3: style='{'
290
303
  logging_formatter = formatters.get_logging_formatter_from_string(
291
- formatter, disable_duplicate_ms=disable_duplicate_ms)
304
+ formatter=formatter, use_nanoseconds=formatter_use_nanoseconds)
292
305
  # Setting the formatter in file handler.
293
306
  handlers.set_formatter(file_handler, logging_formatter)
294
307
 
295
308
  # This function will change the suffix behavior of the rotated file name.
296
- handlers.change_rotated_filename(file_handler, file_extension)
309
+ handlers.change_rotated_filename(file_handler)
297
310
 
298
311
  queue_handler = start_queue_listener_for_file_handler_and_get_queue_handler(file_handler)
299
312
  loggers.set_logging_level(queue_handler, logging_level)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.14.4
3
+ Version: 2.14.5
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=tunTjRW84u7Nao5t0Z6ZWFYrQDUW_zun_a86yEIubts,123
1
+ atomicshop/__init__.py,sha256=hgaCf3sGJGnktr5gfFAzZxI1NReX4ILvEOdMvHiDI40,123
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -122,8 +122,8 @@ atomicshop/mains/installs/pycharm.py,sha256=uYTfME7hOeNkAsOZxDDPj2hDqmkxrFqVV6Nv
122
122
  atomicshop/mitm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
123
123
  atomicshop/mitm/connection_thread_worker.py,sha256=PQ8bwOgrPudYP5oPnSi_DWaKXOi038M8TMImlLkxuPI,20486
124
124
  atomicshop/mitm/import_config.py,sha256=_V-IVJ7a1L6E-VOR4CDfZj-S1odbsIlBe13ij0NlpqY,7974
125
- atomicshop/mitm/initialize_engines.py,sha256=UGdT5DKYNri3MNOxESP7oeSxYiUDrVilJ4jic_nwdew,8055
126
- atomicshop/mitm/initialize_mitm_server.py,sha256=5JGkyvAvz1sJVeRGMJWSQiQ-VOdrU-NJn633oxQe0cw,13143
125
+ atomicshop/mitm/initialize_engines.py,sha256=jiuA9BPLiUKG_7kE9FMCLrhf5W4LnPoYgc9FfV3AKRs,8175
126
+ atomicshop/mitm/initialize_mitm_server.py,sha256=avC1lfS65dCdJ3cfF3olGIKycnz3ChlzCuXFmhah0Uk,13418
127
127
  atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
128
128
  atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
129
129
  atomicshop/mitm/statistic_analyzer.py,sha256=ctsf-MBIUvG4-R0K4gFQyi_b42-VCq-5s7hgO9jMOes,38415
@@ -233,10 +233,10 @@ atomicshop/wrappers/factw/rest/statistics.py,sha256=vznwzKP1gEF7uXz3HsuV66BU9wrp
233
233
  atomicshop/wrappers/factw/rest/status.py,sha256=4O3xS1poafwyUiLDkhyx4oMMe4PBwABuRPpOMnMKgIU,641
234
234
  atomicshop/wrappers/fibratusw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
235
235
  atomicshop/wrappers/fibratusw/install.py,sha256=PLVymDe0HuOvU0r2lje8BkQAgtiOWEeRO7n-1zKuL7A,3287
236
- atomicshop/wrappers/loggingw/formatters.py,sha256=mUtcJJfmhLNrwUVYShXTmdu40dBaJu4TS8FiuTXI7ys,7189
237
- atomicshop/wrappers/loggingw/handlers.py,sha256=2A_3Qy1B0RvVWZmQocAB6CmpqlXoKJ-yi6iBWG2jNLo,8274
236
+ atomicshop/wrappers/loggingw/formatters.py,sha256=808R7K3e3ZJD2BXfqI6UMOyXGrCgt9SYh2Uv7sL_1KQ,7432
237
+ atomicshop/wrappers/loggingw/handlers.py,sha256=MzoVWE_WX2OZZ5nD9eJpEBK-SW9XJVkm1Hl_9B3gM8E,9769
238
238
  atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
239
- atomicshop/wrappers/loggingw/loggingw.py,sha256=m6YySEedP3_4Ik1S_uGMxETSbmRkmMYmAZxhHBlXSlo,16616
239
+ atomicshop/wrappers/loggingw/loggingw.py,sha256=JPNSglpuN7ryXsmUHLMCnJRsjvRPV3Q64CN3OgSEFsU,14863
240
240
  atomicshop/wrappers/loggingw/reading.py,sha256=wse-38zUDHB3HUB28R8Ah_Ig3Wxt2tChapKtu-yyy2E,17036
241
241
  atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
242
242
  atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
@@ -281,8 +281,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
281
281
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
282
282
  atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
283
283
  atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
284
- atomicshop-2.14.4.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
285
- atomicshop-2.14.4.dist-info/METADATA,sha256=fjxtj_SzmyjVK6o_Z08jX-lLJoz7m_vYNPvn0fCoFYU,10478
286
- atomicshop-2.14.4.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
287
- atomicshop-2.14.4.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
288
- atomicshop-2.14.4.dist-info/RECORD,,
284
+ atomicshop-2.14.5.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
285
+ atomicshop-2.14.5.dist-info/METADATA,sha256=gY_7i-PXmCnygtkJYX2xDi6Jl9jH8AeVRnaBhR2SsCk,10478
286
+ atomicshop-2.14.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
287
+ atomicshop-2.14.5.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
288
+ atomicshop-2.14.5.dist-info/RECORD,,