atomicshop 2.12.20__py3-none-any.whl → 2.12.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '2.12.20'
4
+ __version__ = '2.12.22'
@@ -105,7 +105,11 @@ def write_list_to_csv(
105
105
  mode: str = 'w'
106
106
  ) -> None:
107
107
  """
108
- Function to write list object that each iteration of it contains dict object with same keys and different values.
108
+ This function got dual purpose:
109
+ 1. Write list object that each iteration of it contains list object with same length.
110
+ 2. Write list object that each iteration of it contains dict object with same keys and different values.
111
+ The dictionary inside the function will be identified by the first iteration of the list.
112
+ Other objects (inside the provided list) than dictionary will be identified as regular objects.
109
113
 
110
114
  :param file_path: Full file path to CSV file.
111
115
  :param content_list: List object that each iteration contains dictionary with same keys and different values.
@@ -113,7 +117,7 @@ def write_list_to_csv(
113
117
  :return: None.
114
118
  """
115
119
 
116
- with open(file_path, mode=mode) as csv_file:
120
+ with open(file_path, mode=mode, newline='') as csv_file:
117
121
  if len(content_list) > 0 and isinstance(content_list[0], dict):
118
122
  # Treat the list as list of dictionaries.
119
123
  header = content_list[0].keys()
@@ -16,26 +16,13 @@ from ..wrappers.loggingw import loggingw
16
16
  from ..print_api import print_api
17
17
 
18
18
 
19
+ STATISTICS_HEADER: str = \
20
+ 'request_time_sent,host,path,command,status_code,request_size_bytes,response_size_bytes,file_path,process_cmd,error'
21
+
22
+
19
23
  def initialize_mitm_server(config_static):
20
24
  # Main function should return integer with error code, 0 is successful.
21
25
  # Since listening server is infinite, this will not be reached.
22
- def output_statistics_csv_header():
23
- # Since there is no implementation of header in logging file handler modules, we'll do it manually each time.
24
- statistics_header: list = ['request_time_sent',
25
- 'host',
26
- 'path',
27
- 'command',
28
- 'status_code',
29
- 'request_size_bytes',
30
- 'response_size_bytes',
31
- # 'request_hex',
32
- # 'response_hex',
33
- 'file_path',
34
- 'process_cmd',
35
- 'error'
36
- ]
37
- statistics_logger.info(','.join(statistics_header))
38
-
39
26
  # After modules import - we check for python version.
40
27
  check_python_version_compliance(minimum_version='3.11')
41
28
 
@@ -190,9 +177,8 @@ def initialize_mitm_server(config_static):
190
177
  # Creating Statistics logger.
191
178
  statistics_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
192
179
  logger_name="statistics", directory_path=config['log']['logs_path'],
193
- file_extension=config_static.CSV_EXTENSION, formatter_message_only=True
180
+ file_extension=config_static.CSV_EXTENSION, formatter_message_only=True, header=STATISTICS_HEADER
194
181
  )
195
- output_statistics_csv_header()
196
182
 
197
183
  network_logger_name = "network"
198
184
  network_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
@@ -1,13 +1,11 @@
1
- import time
2
- from typing import Union
3
1
  import os
4
2
 
5
- from . import included_files, fw_files, virtual_file_path, file_object, analysis
6
- from .. import config_fact, get_file_data
3
+ from . import fw_files, virtual_file_path, file_object, analysis
4
+ from .. import config_fact
7
5
  from ....print_api import print_api, print_status_of_list
8
- from ....file_io import file_io, jsons, csvs
6
+ from ....file_io import jsons, csvs
9
7
  from ....basics import dicts
10
- from .... import filesystem, ip_addresses
8
+ from .... import ip_addresses
11
9
  from ...psycopgw import psycopgw
12
10
 
13
11
 
@@ -1,6 +1,34 @@
1
1
  import logging
2
2
  from logging.handlers import TimedRotatingFileHandler, QueueListener, QueueHandler
3
3
  import re
4
+ import os
5
+
6
+
7
+ class TimedRotatingFileHandlerWithHeader(TimedRotatingFileHandler):
8
+ """
9
+ Custom TimedRotatingFileHandler that writes a header to the log file each time there is a file rotation.
10
+ Useful for writing CSV files.
11
+
12
+ :param header: string, Header to write to the log file.
13
+ Example: "time,host,error"
14
+ """
15
+ def __init__(self, *args, **kwargs):
16
+ self.header = kwargs.pop('header', None)
17
+ super().__init__(*args, **kwargs)
18
+
19
+ def doRollover(self):
20
+ super().doRollover()
21
+ self._write_header()
22
+
23
+ def _write_header(self):
24
+ if self.header:
25
+ with open(self.baseFilename, 'a') as f:
26
+ f.write(self.header + '\n')
27
+
28
+ def emit(self, record):
29
+ if not os.path.exists(self.baseFilename) or os.path.getsize(self.baseFilename) == 0:
30
+ self._write_header()
31
+ super().emit(record)
4
32
 
5
33
 
6
34
  def get_stream_handler() -> logging.StreamHandler:
@@ -38,6 +66,28 @@ def get_timed_rotating_file_handler(
38
66
  filename=log_file_path, when=when, interval=interval, delay=delay, encoding=encoding)
39
67
 
40
68
 
69
+ def get_timed_rotating_file_handler_with_header(
70
+ log_file_path: str, when: str = "midnight", interval: int = 1, delay: bool = False, encoding=None,
71
+ header: str = None) -> TimedRotatingFileHandlerWithHeader:
72
+ """
73
+ Function to get a TimedRotatingFileHandler with header.
74
+ This handler will output messages to a file, rotating the log file at certain timed intervals.
75
+ It will write a header to the log file each time there is a file rotation.
76
+
77
+ :param log_file_path: Path to the log file.
78
+ :param when: When to rotate the log file. Possible
79
+ :param interval: Interval to rotate the log file.
80
+ :param delay: bool, If set to True, the log file will be created only if there's something to write.
81
+ :param encoding: Encoding to use for the log file. Same as for the TimeRotatingFileHandler, which uses Default None.
82
+ :param header: Header to write to the log file.
83
+ Example: "time,host,error"
84
+ :return: TimedRotatingFileHandlerWithHeader.
85
+ """
86
+
87
+ return TimedRotatingFileHandlerWithHeader(
88
+ filename=log_file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
89
+
90
+
41
91
  def start_queue_listener_for_file_handler(
42
92
  file_handler: logging.FileHandler, queue_object) -> logging.handlers.QueueListener:
43
93
  """
@@ -133,3 +183,22 @@ def change_rotated_filename(file_handler: logging.Handler, file_extension: str):
133
183
  file_handler.suffix = logfile_suffix
134
184
  file_handler.namer = lambda name: name.replace(file_extension + ".", "") + file_extension
135
185
  file_handler.extMatch = logfile_regex_suffix
186
+
187
+
188
+ def has_handlers(logger: logging.Logger) -> bool:
189
+ """
190
+ Function to check if the logger has handlers.
191
+ :param logger: Logger to check
192
+ :return: True if logger has handlers, False otherwise
193
+ """
194
+
195
+ # Omitted the usage of "hasHandlers()" method, since sometimes returned "True" even when there were no handlers
196
+ # Didn't research the issue much, just used the "len(logger.handlers)" to check how many handlers there are
197
+ # in the logger.
198
+ # if not logging.getLogger(function_module_name).hasHandlers():
199
+ # if len(logging.getLogger(function_module_name).handlers) == 0:
200
+
201
+ if len(logger.handlers) == 0:
202
+ return False
203
+ else:
204
+ return True
@@ -78,7 +78,8 @@ def get_logger_with_stream_handler_and_timedfilehandler(
78
78
  when: str = "midnight",
79
79
  interval: int = 1,
80
80
  delay: bool = True,
81
- encoding=None
81
+ encoding=None,
82
+ header: str = None
82
83
  ) -> logging.Logger:
83
84
  """
84
85
  Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
@@ -116,6 +117,8 @@ def get_logger_with_stream_handler_and_timedfilehandler(
116
117
  If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
117
118
  :param delay: bool, If set to True, the log file will be created only if there's something to write.
118
119
  :param encoding: string, Encoding to use for the log file. Default is None.
120
+ :param header: string, Header to write to the log file.
121
+ Example: "time,host,error"
119
122
 
120
123
  :return: Logger.
121
124
 
@@ -126,25 +129,14 @@ def get_logger_with_stream_handler_and_timedfilehandler(
126
129
 
127
130
 
128
131
  def main():
129
- def output_csv_header():
130
- # Since there is no implementation of header in logging file handler modules, we'll do it manually each time.
131
- header: list = ['time',
132
- 'host',
133
- 'path',
134
- 'error'
135
- ]
136
- error_logger.info(','.join(header))
137
-
138
-
132
+ header: str = "time,host,error"
139
133
  output_directory: str = "D:\\logs"
140
134
 
141
135
  error_logger = loggingw.get_logger_with_stream_handler_and_timedfilehandler(
142
136
  logger_name="errors", directory_path=output_directory,
143
- file_extension=".csv", formatter_message_only=True
137
+ file_extension=".csv", formatter_message_only=True, header=header
144
138
  )
145
139
 
146
- output_csv_header()
147
-
148
140
  error_logger.info(f"{datetime.now()},host1,/path/to/file,error message")
149
141
 
150
142
 
@@ -155,7 +147,7 @@ def get_logger_with_stream_handler_and_timedfilehandler(
155
147
  add_stream_handler(logger, logging_level, formatter_streamhandler, formatter_message_only)
156
148
  add_timedfilehandler_with_queuehandler(
157
149
  logger, directory_path, file_name, file_extension, logging_level, formatter_filehandler,
158
- formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding
150
+ formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding, header
159
151
  )
160
152
 
161
153
  return logger
@@ -201,10 +193,20 @@ def add_stream_handler(
201
193
 
202
194
 
203
195
  def add_timedfilehandler_with_queuehandler(
204
- logger: logging.Logger, directory_path, file_name_no_extension: str = None, file_extension: str = '.txt',
196
+ logger: logging.Logger,
197
+ directory_path: str,
198
+ file_name_no_extension: str = None,
199
+ file_extension: str = '.txt',
205
200
  logging_level="DEBUG",
206
- formatter='default', formatter_message_only: bool = False, disable_duplicate_ms: bool = False,
207
- when: str = 'midnight', interval: int = 1, delay: bool = True, encoding=None):
201
+ formatter='default',
202
+ formatter_message_only: bool = False,
203
+ disable_duplicate_ms: bool = False,
204
+ when: str = 'midnight',
205
+ interval: int = 1,
206
+ delay: bool = True,
207
+ encoding=None,
208
+ header: str = None
209
+ ):
208
210
  """
209
211
  Function to add TimedRotatingFileHandler and QueueHandler to logger.
210
212
  TimedRotatingFileHandler will output messages to the file through QueueHandler.
@@ -238,6 +240,8 @@ def add_timedfilehandler_with_queuehandler(
238
240
  If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
239
241
  :param delay: bool, If set to True, the log file will be created only if there's something to write.
240
242
  :param encoding: string, Encoding to use for the log file. Default is None.
243
+ :param header: string, Header to write to the log file.
244
+ Example: "time,host,error"
241
245
  """
242
246
 
243
247
  # If file name wasn't provided we will use the logger name instead.
@@ -254,8 +258,17 @@ def add_timedfilehandler_with_queuehandler(
254
258
 
255
259
  # Creating file handler with log filename. At this stage the log file is created and locked by the handler,
256
260
  # Unless we use "delay=True" to tell the class to write the file only if there's something to write.
257
- file_handler = handlers.get_timed_rotating_file_handler(
258
- log_file_path, when=when, interval=interval, delay=delay, encoding=encoding)
261
+
262
+ if file_extension == ".csv":
263
+ # If file extension is CSV, we'll set the header to the file.
264
+ # This is needed since the CSV file will be rotated, and we'll need to set the header each time.
265
+ # We'll use the custom TimedRotatingFileHandlerWithHeader class.
266
+ file_handler = handlers.get_timed_rotating_file_handler_with_header(
267
+ log_file_path, when=when, interval=interval, delay=delay, encoding=encoding, header=header)
268
+ else:
269
+ file_handler = handlers.get_timed_rotating_file_handler(
270
+ log_file_path, when=when, interval=interval, delay=delay, encoding=encoding)
271
+
259
272
  loggers.set_logging_level(file_handler, logging_level)
260
273
 
261
274
  if formatter == "default":
@@ -113,6 +113,10 @@ def get_logs_paths(
113
113
  logs_files = []
114
114
  else:
115
115
  logs_files = [logs_files[-2]]
116
+ # If there is only one file, meaning it is the current day log.
117
+ # If the 'previous_day_only' is True, then there are no previous day logs to output.
118
+ elif len(logs_files) == 1 and previous_day_only:
119
+ logs_files = []
116
120
 
117
121
  return logs_files
118
122
 
@@ -236,16 +240,25 @@ def get_latest_lines(
236
240
  return: List of new lines.
237
241
 
238
242
  Usage:
243
+ from typing import Union
244
+
245
+
246
+ # The header of the log file will be read from the first iteration of the log file.
247
+ # When the file is rotated, this header will be used to not read the header again.
248
+ header: Union[list, None] = None
239
249
  while True:
240
- latest_lines, current_lines, existing_lines, last_24_hours_lines = get_latest_log_lines(
250
+ latest_lines, previous_day_24h_lines, header = reading.get_latest_lines(
241
251
  log_file_path='/path/to/log.csv',
242
- log_type='csv'
252
+ log_type='csv',
253
+ date_pattern='%Y_%m_%d',
254
+ get_previous_file=True,
255
+ header=header
243
256
  )
244
257
 
245
258
  if latest_lines:
246
259
  # Do something with the new lines.
247
260
 
248
- if last_24_hours_lines:
261
+ if previous_day_24h_lines:
249
262
  # Do something with the last 24 hours lines. Reminder, this will happen once a day on log rotation.
250
263
 
251
264
  time.sleep(1)
@@ -300,7 +313,8 @@ def get_latest_lines(
300
313
  log_type='csv',
301
314
  previous_day_only=True
302
315
  )[0]['file_path']
303
- except KeyError:
316
+ # If you get IndexError, it means that there are no previous day logs to read.
317
+ except IndexError:
304
318
  pass
305
319
 
306
320
  # Count all the rotated files.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: atomicshop
3
- Version: 2.12.20
3
+ Version: 2.12.22
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License: MIT License
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=U5HBBCgZbCFjvJTS1UanW_wKk5yZesdp5sAnQ2RGDSg,124
1
+ atomicshop/__init__.py,sha256=g2vsW13t8sldcuaaZZDrssh2JaK2htDJxU8zZNrBFoM,124
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -104,7 +104,7 @@ atomicshop/etw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
104
104
  atomicshop/etw/dns_trace.py,sha256=I4OZsiZUDyj7B4fKTOqsB1tcX1DUMw9uh4CwXlcmHfY,5571
105
105
  atomicshop/etw/etw.py,sha256=xVJNbfCq4KgRfsDnul6CrIdAMl9xRBixZ-hUyqiB2g4,2403
106
106
  atomicshop/file_io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
107
- atomicshop/file_io/csvs.py,sha256=WOtDyVFhhA1RD3yrU9P33kDY1EKFEz8UmVCUkqYKuog,5503
107
+ atomicshop/file_io/csvs.py,sha256=y8cJtnlN-NNxNupzJgSeGq9aQ4wNxYLFPX9vNNlUiIc,5830
108
108
  atomicshop/file_io/docxs.py,sha256=6tcYFGp0vRsHR47VwcRqwhdt2DQOwrAUYhrwN996n9U,5117
109
109
  atomicshop/file_io/file_io.py,sha256=FOZ6_PjOASxSDHESe4fwDv5miXYR10OHTxkxtEHoZYQ,6555
110
110
  atomicshop/file_io/jsons.py,sha256=q9ZU8slBKnHLrtn3TnbK1qxrRpj5ZvCm6AlsFzoANjo,5303
@@ -116,7 +116,7 @@ atomicshop/mitm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
116
116
  atomicshop/mitm/connection_thread_worker.py,sha256=PQ8bwOgrPudYP5oPnSi_DWaKXOi038M8TMImlLkxuPI,20486
117
117
  atomicshop/mitm/import_config.py,sha256=_V-IVJ7a1L6E-VOR4CDfZj-S1odbsIlBe13ij0NlpqY,7974
118
118
  atomicshop/mitm/initialize_engines.py,sha256=UGdT5DKYNri3MNOxESP7oeSxYiUDrVilJ4jic_nwdew,8055
119
- atomicshop/mitm/initialize_mitm_server.py,sha256=aXNZlRu1_RGjC7lagvs2Q8rjQiygxYucy-U4C_SBnsk,13871
119
+ atomicshop/mitm/initialize_mitm_server.py,sha256=5JGkyvAvz1sJVeRGMJWSQiQ-VOdrU-NJn633oxQe0cw,13143
120
120
  atomicshop/mitm/message.py,sha256=u2U2f2SOHdBNU-6r1Ik2W14ai2EOwxUV4wVfGZA098k,1732
121
121
  atomicshop/mitm/shared_functions.py,sha256=PaK_sbnEA5zo9k2ktEOKLmvo-6wRUunxzSNRr41uXIQ,1924
122
122
  atomicshop/mitm/statistic_analyzer.py,sha256=WvTal-Aox-enM-5jYtFqiTplNquS4VMnmQYNEIXvZZA,23552
@@ -201,7 +201,7 @@ atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py,sha2
201
201
  atomicshop/wrappers/factw/postgresql/__init__.py,sha256=xMBn2d3Exo23IPP2F_9-SXmOlhFbwWDgS9KwozSTjA0,162
202
202
  atomicshop/wrappers/factw/postgresql/analysis.py,sha256=2Rxzy2jyq3zEKIo53z8VkjuslKE_i5mq2ZpmJAvyd6U,716
203
203
  atomicshop/wrappers/factw/postgresql/file_object.py,sha256=VRiCXnsd6yDbnsE-TEKYPC-gkAgFVkE6rygRrJLQShI,713
204
- atomicshop/wrappers/factw/postgresql/firmware.py,sha256=wnohSnSOCmlTUCzzHIIPGkRrnownlGKgIFyhhdhNEoA,10759
204
+ atomicshop/wrappers/factw/postgresql/firmware.py,sha256=ywPn8yBAliX4FO7ZqfrPsGPZMxBTPLSwEmex4pz1CZ8,10668
205
205
  atomicshop/wrappers/factw/postgresql/fw_files.py,sha256=P1jq4AAZa7fygWdEZtFJOnfz4tyqmPpvFzEMDKrCRkU,1291
206
206
  atomicshop/wrappers/factw/postgresql/included_files.py,sha256=sn5YhLkrsvjhrVSA8O8YUNfbqR9STprSuQGEnHsK0jE,1025
207
207
  atomicshop/wrappers/factw/postgresql/virtual_file_path.py,sha256=iR68A_My_ohgRcYdueMaQF9EHOgBRN3bIi8Nq59g3kc,1098
@@ -214,12 +214,11 @@ atomicshop/wrappers/factw/rest/statistics.py,sha256=vznwzKP1gEF7uXz3HsuV66BU9wrp
214
214
  atomicshop/wrappers/factw/rest/status.py,sha256=4O3xS1poafwyUiLDkhyx4oMMe4PBwABuRPpOMnMKgIU,641
215
215
  atomicshop/wrappers/fibratusw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
216
216
  atomicshop/wrappers/fibratusw/install.py,sha256=PLVymDe0HuOvU0r2lje8BkQAgtiOWEeRO7n-1zKuL7A,3287
217
- atomicshop/wrappers/loggingw/checks.py,sha256=AGFsTsLxHQd1yAraa5popqLaGO9VM0KpcPGuSLn5ptU,719
218
217
  atomicshop/wrappers/loggingw/formatters.py,sha256=mUtcJJfmhLNrwUVYShXTmdu40dBaJu4TS8FiuTXI7ys,7189
219
- atomicshop/wrappers/loggingw/handlers.py,sha256=qm5Fbu8eDmlstMduUe5nKUlJU5IazFkSnQizz8Qt2os,5479
218
+ atomicshop/wrappers/loggingw/handlers.py,sha256=2A_3Qy1B0RvVWZmQocAB6CmpqlXoKJ-yi6iBWG2jNLo,8274
220
219
  atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
221
- atomicshop/wrappers/loggingw/loggingw.py,sha256=TJBT8n9wCcAgrnjvT3sz3OvRqJ0A3Q5GtYt7ZncvVv4,16146
222
- atomicshop/wrappers/loggingw/reading.py,sha256=CtYOwOLFHj_hqYyZx-dKUo5ZDrn3cO-f7vU4EX515xI,14980
220
+ atomicshop/wrappers/loggingw/loggingw.py,sha256=m6YySEedP3_4Ik1S_uGMxETSbmRkmMYmAZxhHBlXSlo,16616
221
+ atomicshop/wrappers/loggingw/reading.py,sha256=iRXwPHhwkzuBFz4nlRdO9fpfxnNCYRE09r8JvtqTcao,15671
223
222
  atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
224
223
  atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
225
224
  atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -255,8 +254,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
255
254
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
256
255
  atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
257
256
  atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
258
- atomicshop-2.12.20.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
259
- atomicshop-2.12.20.dist-info/METADATA,sha256=tpZ7mC5c8cpsmgMGGFRrXl5NUsNi8yfGpcMhH8HEYFY,10479
260
- atomicshop-2.12.20.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
261
- atomicshop-2.12.20.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
262
- atomicshop-2.12.20.dist-info/RECORD,,
257
+ atomicshop-2.12.22.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
258
+ atomicshop-2.12.22.dist-info/METADATA,sha256=754IBZW-ExlLv33i93QGyMKGQOEXOrw1uIsXmERVvKM,10479
259
+ atomicshop-2.12.22.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
260
+ atomicshop-2.12.22.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
261
+ atomicshop-2.12.22.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- import logging
2
-
3
-
4
- def has_handlers(logger: logging.Logger) -> bool:
5
- """
6
- Function to check if the logger has handlers.
7
- :param logger: Logger to check
8
- :return: True if logger has handlers, False otherwise
9
- """
10
-
11
- # Omitted the usage of "hasHandlers()" method, since sometimes returned "True" even when there were no handlers
12
- # Didn't research the issue much, just used the "len(logger.handlers)" to check how many handlers there are
13
- # in the logger.
14
- # if not logging.getLogger(function_module_name).hasHandlers():
15
- # if len(logging.getLogger(function_module_name).handlers) == 0:
16
-
17
- if len(logger.handlers) == 0:
18
- return False
19
- else:
20
- return True