atomicshop 3.2.12__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

@@ -8,6 +8,7 @@ import contextlib
8
8
  import threading
9
9
  import queue
10
10
  import multiprocessing
11
+ import time
11
12
 
12
13
  from . import loggers, handlers, filters
13
14
  from ...file_io import csvs
@@ -15,6 +16,9 @@ from ...basics import tracebacks, ansi_escape_codes
15
16
  from ... import print_api
16
17
 
17
18
 
19
+ QUEUE_LISTENER_PROCESS_NAME_PREFIX: str = "QueueListener-"
20
+
21
+
18
22
  class LoggingwLoggerAlreadyExistsError(Exception):
19
23
  pass
20
24
 
@@ -23,6 +27,7 @@ class LoggingwLoggerAlreadyExistsError(Exception):
23
27
  def create_logger(
24
28
  logger_name: str = None,
25
29
  get_queue_listener: bool = False,
30
+ start_queue_listener_multiprocess_add_queue_handler: bool = False,
26
31
 
27
32
  add_stream: bool = False,
28
33
  add_timedfile: bool = False,
@@ -64,6 +69,8 @@ def create_logger(
64
69
  :param logger_name: Name of the logger.
65
70
  :param get_queue_listener: bool, If set to True, QueueListener will be started with all the handlers
66
71
  like 'add_timedfile' and 'add_stream', using the 'log_queue'.
72
+ :param start_queue_listener_multiprocess_add_queue_handler: bool, If set to True, the QueueListener will be
73
+ started in a separate multiprocessing process, without you handling this manually.
67
74
 
68
75
  Only one of the following parameters can be set at a time: 'logger_name', 'get_queue_listener'.
69
76
 
@@ -205,9 +212,12 @@ def create_logger(
205
212
  from atomicshop.wrappers.loggingw import loggingw
206
213
 
207
214
 
208
- def worker1(log_queue: multiprocessing.Queue):
215
+ def worker1(
216
+ log_queue: multiprocessing.Queue,
217
+ logger_name: str
218
+ ):
209
219
  error_logger = loggingw.create_logger(
210
- logger_name='network',
220
+ logger_name=logger_name,
211
221
  add_queue_handler=True,
212
222
  log_queue=log_queue
213
223
  )
@@ -215,9 +225,12 @@ def create_logger(
215
225
  error_logger.info("Worker1 log message for 'network' logger.")
216
226
 
217
227
 
218
- def worker2(log_queue: multiprocessing.Queue):
228
+ def worker2(
229
+ log_queue: multiprocessing.Queue,
230
+ logger_name: str
231
+ ):
219
232
  error_logger = loggingw.create_logger(
220
- logger_name='network',
233
+ logger_name=logger_name,
221
234
  add_queue_handler=True,
222
235
  log_queue=log_queue
223
236
  )
@@ -238,8 +251,8 @@ def create_logger(
238
251
  formatter_filehandler='DEFAULT'
239
252
  )
240
253
 
241
- process1 = multiprocessing.Process(target=worker1, args=(log_queue,))
242
- process2 = multiprocessing.Process(target=worker2, args=(log_queue,))
254
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
255
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
243
256
 
244
257
  process1.start()
245
258
  process2.start()
@@ -247,16 +260,199 @@ def create_logger(
247
260
  process1.join()
248
261
  process2.join()
249
262
 
263
+ # If we exit the function, we need to stop the listener
250
264
  queue_listener.stop()
251
265
 
252
266
  return 0
253
267
 
254
268
 
269
+ if __name__ == "__main__":
270
+ sys.exit(main())
271
+
272
+ --------------------------------------------------
273
+
274
+ Example if you need to start a QueueListener in multiprocessing, which is less garbage code and python's
275
+ garbage collector handles the listener closing without the need to call 'stop()' method:
276
+
277
+ import sys
278
+ import multiprocessing
279
+ from atomicshop.wrappers.loggingw import loggingw
280
+
281
+
282
+ def worker1(
283
+ log_queue: multiprocessing.Queue,
284
+ logger_name: str
285
+ ):
286
+ error_logger = loggingw.create_logger(
287
+ logger_name=logger_name,
288
+ add_queue_handler=True,
289
+ log_queue=log_queue
290
+ )
291
+
292
+ error_logger.info("Worker1 log message for 'network' logger.")
293
+
294
+
295
+ def worker2(
296
+ log_queue: multiprocessing.Queue,
297
+ logger_name: str
298
+ ):
299
+ error_logger = loggingw.create_logger(
300
+ logger_name=logger_name,
301
+ add_queue_handler=True,
302
+ log_queue=log_queue
303
+ )
304
+
305
+ error_logger.info("Worker2 log message for 'network' logger.")
306
+
307
+
308
+ def main():
309
+ log_queue = multiprocessing.Queue()
310
+ logger_name: str = 'network'
311
+
312
+ loggingw.start_queue_listener_in_multiprocessing(
313
+ logger_name=logger_name,
314
+ add_stream=True,
315
+ add_timedfile=True,
316
+ log_queue=log_queue,
317
+ file_type='txt',
318
+ formatter_streamhandler='DEFAULT',
319
+ formatter_filehandler='DEFAULT'
320
+ )
321
+
322
+ # If you want you can get the QueueListener processes.
323
+ # listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
324
+ # Or if you started several listeners, you can get all of them:
325
+ # listener_processes_list: list = loggingw.get_listener_processes()
326
+
327
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, logger_name))
328
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, logger_name))
329
+
330
+ process1.start()
331
+ process2.start()
332
+
333
+ process1.join()
334
+ process2.join()
335
+
336
+ return 0
337
+
338
+
339
+ if __name__ == "__main__":
340
+ sys.exit(main())
341
+
342
+ ---------------------------------------------------
343
+
344
+ Or you can use the 'create_logger' function with 'start_queue_listener_multiprocess=True' parameter,
345
+ which will start the QueueListener in a separate multiprocessing process automatically if you want to use the
346
+ queue handler logger also in the main process:
347
+
348
+ import sys
349
+ import multiprocessing
350
+ from atomicshop.wrappers.loggingw import loggingw
351
+
352
+
353
+ def worker1(
354
+ log_queue: multiprocessing.Queue,
355
+ logger_name: str
356
+ ):
357
+ error_logger = loggingw.create_logger(
358
+ logger_name=logger_name,
359
+ add_queue_handler=True,
360
+ log_queue=log_queue
361
+ )
362
+
363
+ error_logger.info("Worker1 log message for 'network' logger.")
364
+
365
+
366
+ def worker2(
367
+ log_queue: multiprocessing.Queue,
368
+ logger_name: str
369
+ ):
370
+ error_logger = loggingw.create_logger(
371
+ logger_name=logger_name,
372
+ add_queue_handler=True,
373
+ log_queue=log_queue
374
+ )
375
+
376
+ error_logger.info("Worker2 log message for 'network' logger.")
377
+
378
+
379
+ def main():
380
+ log_queue = multiprocessing.Queue()
381
+
382
+ main_logger: Logger = loggingw.create_logger(
383
+ logger_name='network',
384
+ start_queue_listener_multiprocess_add_queue_handler=True,
385
+ add_stream=True,
386
+ add_timedfile=True,
387
+ log_queue=log_queue,
388
+ file_type='txt',
389
+ formatter_streamhandler='DEFAULT',
390
+ formatter_filehandler='DEFAULT'
391
+ )
392
+
393
+ main_logger.info("Main process log message for 'network' logger.")
394
+
395
+ # If you want you can get the QueueListener processes.
396
+ # listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
397
+ # Or if you started several listeners, you can get all of them:
398
+ # listener_processes_list: list = loggingw.get_listener_processes()
399
+
400
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
401
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
402
+
403
+ process1.start()
404
+ process2.start()
405
+
406
+ process1.join()
407
+ process2.join()
408
+
409
+ return 0
410
+
411
+
255
412
  if __name__ == "__main__":
256
413
  sys.exit(main())
257
414
  """
258
415
 
259
- if logger_name and get_queue_listener:
416
+ if start_queue_listener_multiprocess_add_queue_handler and (get_queue_listener or add_queue_handler):
417
+ raise ValueError("You don't need to set 'get_queue_listener' or 'add_queue_handler' "
418
+ "when setting 'start_queue_listener_multiprocess_add_queue_handler'.")
419
+
420
+ if start_queue_listener_multiprocess_add_queue_handler:
421
+ logger_instance: Logger = _create_logger_with_queue_handler(
422
+ logger_name=logger_name,
423
+ log_queue=log_queue
424
+ )
425
+
426
+ # Start the QueueListener in a separate multiprocessing process.
427
+ start_queue_listener_in_multiprocessing(
428
+ logger_name=logger_name,
429
+ add_stream=add_stream,
430
+ add_timedfile=add_timedfile,
431
+ add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
432
+ log_queue=log_queue,
433
+ file_path=file_path,
434
+ directory_path=directory_path,
435
+ file_type=file_type,
436
+ logging_level=logging_level,
437
+ formatter_streamhandler=formatter_streamhandler,
438
+ formatter_filehandler=formatter_filehandler,
439
+ formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
440
+ formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
441
+ filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
442
+ filehandler_rotation_date_format=filehandler_rotation_date_format,
443
+ filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
444
+ filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
445
+ when=when,
446
+ interval=interval,
447
+ backupCount=backupCount,
448
+ delay=delay,
449
+ encoding=encoding,
450
+ header=header
451
+ )
452
+
453
+ return logger_instance
454
+
455
+ if logger_name and get_queue_listener and not start_queue_listener_multiprocess_add_queue_handler:
260
456
  raise ValueError("You can't set both 'logger_name' and 'get_queue_listener'.")
261
457
  if not logger_name and not get_queue_listener:
262
458
  raise ValueError("You need to provide 'logger_name' or 'get_queue_listener'.")
@@ -366,6 +562,23 @@ def create_logger(
366
562
  return queue_listener
367
563
 
368
564
 
565
+ def _create_logger_with_queue_handler(
566
+ logger_name: str,
567
+ log_queue: Union[queue.Queue, multiprocessing.Queue]
568
+ ) -> Logger:
569
+ """
570
+ The function to create a logger with QueueHandler so the QueueListener can be started later in multiprocessing.
571
+ """
572
+
573
+ logger_instance: Logger = create_logger(
574
+ logger_name=logger_name,
575
+ add_queue_handler=True,
576
+ log_queue=log_queue
577
+ )
578
+
579
+ return logger_instance
580
+
581
+
369
582
  def get_logger_with_level(
370
583
  logger_name: str,
371
584
  logging_level="DEBUG"
@@ -406,6 +619,135 @@ def disable_default_logger():
406
619
  logging.disable(logging.CRITICAL)
407
620
 
408
621
 
622
+ def start_queue_listener_in_multiprocessing(
623
+ logger_name: str = None,
624
+
625
+ add_stream: bool = False,
626
+ add_timedfile: bool = False,
627
+ add_timedfile_with_internal_queue: bool = False,
628
+
629
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
630
+ file_path: str = None,
631
+ directory_path: str = None,
632
+ file_type: Literal[
633
+ 'txt',
634
+ 'csv',
635
+ 'json'] = 'txt',
636
+ logging_level="DEBUG",
637
+ formatter_streamhandler: Union[
638
+ Literal['MESSAGE', 'DEFAULT'],
639
+ str,
640
+ None] = None,
641
+ formatter_filehandler: Union[
642
+ Literal['MESSAGE', 'DEFAULT'],
643
+ str,
644
+ None] = None,
645
+ formatter_streamhandler_use_nanoseconds: bool = True,
646
+ formatter_filehandler_use_nanoseconds: bool = True,
647
+ filehandler_rotate_at_rollover_time: bool = True,
648
+ filehandler_rotation_date_format: str = None,
649
+ filehandler_rotation_callback_namer_function: callable = None,
650
+ filehandler_rotation_use_default_namer_function: bool = True,
651
+ when: str = "midnight",
652
+ interval: int = 1,
653
+ backupCount: int = 0,
654
+ delay: bool = False,
655
+ encoding=None,
656
+ header: str = None
657
+ ) -> multiprocessing.Process:
658
+ """
659
+ Function to start a QueueListener in multiprocessing.
660
+ PARAMETERS are same as in 'create_logger' function.
661
+
662
+ logger_name: Name of the logger. Will be used only to name the QueueListener process.
663
+ """
664
+
665
+ if not file_path and directory_path and logger_name:
666
+ file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
667
+
668
+ worker_kwargs = dict(
669
+ get_queue_listener=True,
670
+
671
+ add_stream=add_stream,
672
+ add_timedfile=add_timedfile,
673
+ add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
674
+
675
+ log_queue=log_queue,
676
+ file_path=file_path,
677
+ file_type=file_type,
678
+ logging_level=logging_level,
679
+ formatter_streamhandler=formatter_streamhandler,
680
+ formatter_filehandler=formatter_filehandler,
681
+ formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
682
+ formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
683
+ filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
684
+ filehandler_rotation_date_format=filehandler_rotation_date_format,
685
+ filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
686
+ filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
687
+ when=when,
688
+ interval=interval,
689
+ backupCount=backupCount,
690
+ delay=delay,
691
+ encoding=encoding,
692
+ header=header,
693
+ )
694
+
695
+ is_ready: multiprocessing.Event = multiprocessing.Event()
696
+
697
+ # Create a new process to run the QueueListener.
698
+ queue_listener_process = multiprocessing.Process(
699
+ target=_queue_listener_multiprocessing_worker,
700
+ name=f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}",
701
+ args=(is_ready,),
702
+ kwargs=worker_kwargs,
703
+ daemon=True
704
+ )
705
+ queue_listener_process.start()
706
+
707
+ # Wait until the QueueListener is loaded and ready.
708
+ is_ready.wait()
709
+
710
+ return queue_listener_process
711
+
712
+
713
+ def _queue_listener_multiprocessing_worker(
714
+ is_ready: multiprocessing.Event,
715
+ **kwargs
716
+ ):
717
+ network_logger_queue_listener = create_logger(**kwargs)
718
+ is_ready.set() # Signal that the logger is loaded and ready.
719
+
720
+ try:
721
+ while True:
722
+ time.sleep(1) # keep the process alive
723
+ except KeyboardInterrupt:
724
+ pass
725
+ finally:
726
+ network_logger_queue_listener.stop()
727
+
728
+
729
+ def get_listener_processes(
730
+ logger_name: str = None
731
+ ) -> list:
732
+ """
733
+ Function to get the list of QueueListener processes.
734
+ :param logger_name: Name of the logger to filter the listener processes.
735
+ If None, all listener processes will be returned.
736
+ If provided logger_name, only the listener processes for that logger will be returned.
737
+ :return: List of QueueListener multiprocessing processes.
738
+ """
739
+
740
+ listener_processes: list = []
741
+ for process in multiprocessing.active_children():
742
+ # If logger_name is provided, filter the processes by logger_name.
743
+ if logger_name and process.name == f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}":
744
+ listener_processes.append(process)
745
+ if not logger_name and process.name.startswith(QUEUE_LISTENER_PROCESS_NAME_PREFIX):
746
+ listener_processes.append(process)
747
+
748
+ return listener_processes
749
+
750
+
409
751
  def get_datetime_format_string_from_logger_file_handlers(logger: logging.Logger) -> list:
410
752
  """
411
753
  Function to get datetime format string from the logger's file handlers.
@@ -552,12 +894,15 @@ def temporary_change_logger_stream_record_color(logger: logging.Logger, color: s
552
894
  found_stream_handler.removeFilter(color_filter)
553
895
 
554
896
 
555
- class ExceptionCsvLogger:
897
+ class CsvLogger:
556
898
  def __init__(
557
899
  self,
558
900
  logger_name: str,
559
901
  directory_path: str = None,
560
- custom_header: str = None
902
+ custom_header: str = None,
903
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
904
+ add_queue_handler_start_listener_multiprocessing: bool = False,
905
+ add_queue_handler_no_listener_multiprocessing: bool = False
561
906
  ):
562
907
  """
563
908
  Initialize the ExceptionCsvLogger object.
@@ -571,12 +916,26 @@ class ExceptionCsvLogger:
571
916
  "custom1,custom2,custom3".
572
917
  These will be added to the default header as:
573
918
  "timestamp,custom1,custom2,custom3,exception".
919
+ :param log_queue: Queue to use for the logger, needed for the queue handler/listener.
920
+
921
+ :param add_queue_handler_start_listener_multiprocessing: bool, whether to add a queue handler that will use
922
+ the 'log_queue' and start the queue listener with the same 'log_queue' for multiprocessing.
923
+ :param add_queue_handler_no_listener_multiprocessing: bool, whether to add a queue handler that will use
924
+ the 'log_queue' but will not start the queue listener for multiprocessing. This is useful when you
925
+ already started the queue listener and want to add more handlers to the logger without
926
+ starting a new listener.
927
+
928
+ If you don't set any of 'add_queue_handler_start_listener_multiprocessing' or
929
+ 'add_queue_handler_no_listener_multiprocessing', the logger will be created without a queue handler.
574
930
  """
575
931
 
576
- if custom_header:
577
- self.header = f"timestamp,{custom_header},exception"
578
- else:
579
- self.header = "timestamp,exception"
932
+ if add_queue_handler_no_listener_multiprocessing and add_queue_handler_start_listener_multiprocessing:
933
+ raise ValueError(
934
+ "You can't set both 'add_queue_handler_start_listener_multiprocessing' and "
935
+ "'add_queue_handler_no_listener_multiprocessing' to True."
936
+ )
937
+
938
+ self.header = custom_header
580
939
 
581
940
  if is_logger_exists(logger_name):
582
941
  self.logger = get_logger_with_level(logger_name)
@@ -584,13 +943,95 @@ class ExceptionCsvLogger:
584
943
  if directory_path is None:
585
944
  raise ValueError("You need to provide 'directory_path' if the logger doesn't exist.")
586
945
 
587
- self.logger = create_logger(
588
- logger_name=logger_name,
589
- directory_path=directory_path,
590
- file_type="csv",
591
- add_timedfile=True,
592
- formatter_filehandler='MESSAGE',
593
- header=self.header)
946
+ if add_queue_handler_start_listener_multiprocessing:
947
+ if not log_queue:
948
+ raise ValueError(
949
+ "You need to provide 'logger_queue' if 'add_queue_handler_start_listener_multiprocess' is set to True.")
950
+
951
+ # Create a logger with a queue handler that starts a listener for multiprocessing.
952
+ self.logger = create_logger(
953
+ logger_name=logger_name,
954
+ start_queue_listener_multiprocess_add_queue_handler=True,
955
+ log_queue=log_queue,
956
+ directory_path=directory_path,
957
+ add_timedfile=True,
958
+ formatter_filehandler='MESSAGE',
959
+ file_type='csv',
960
+ header=self.header
961
+ )
962
+ elif add_queue_handler_no_listener_multiprocessing:
963
+ if not log_queue:
964
+ raise ValueError(
965
+ "You need to provide 'logger_queue' if 'add_queue_handler_no_listener_multiprocess' is set to True.")
966
+
967
+ # Create a logger with a queue handler that does not start a listener for multiprocessing.
968
+ self.logger = create_logger(
969
+ logger_name=logger_name,
970
+ add_queue_handler=True,
971
+ log_queue=log_queue
972
+ )
973
+ elif not add_queue_handler_start_listener_multiprocessing and not add_queue_handler_no_listener_multiprocessing:
974
+ self.logger = create_logger(
975
+ logger_name=logger_name,
976
+ directory_path=directory_path,
977
+ file_type="csv",
978
+ add_timedfile=True,
979
+ formatter_filehandler='MESSAGE',
980
+ header=self.header)
981
+
982
+ def write(
983
+ self,
984
+ row_of_cols: list
985
+ ):
986
+ """
987
+ Write a row of columns to the log file.
988
+
989
+ :param row_of_cols: List of columns to write to the csv log file.
990
+ """
991
+
992
+ output_csv_line: str = csvs.escape_csv_line_to_string(row_of_cols)
993
+
994
+ # If the number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header',
995
+ # raise an exception.
996
+ if (csvs.get_number_of_cells_in_string_line(output_csv_line) !=
997
+ csvs.get_number_of_cells_in_string_line(self.header)):
998
+ raise ValueError(
999
+ "Number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header'.")
1000
+
1001
+ self.logger.info(output_csv_line)
1002
+
1003
+ def get_logger(self):
1004
+ return self.logger
1005
+
1006
+
1007
+ class ExceptionCsvLogger(CsvLogger):
1008
+ def __init__(
1009
+ self,
1010
+ logger_name: str,
1011
+ directory_path: str = None,
1012
+ custom_header: str = None,
1013
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
1014
+ add_queue_handler_start_listener_multiprocessing: bool = False,
1015
+ add_queue_handler_no_listener_multiprocessing: bool = False
1016
+ ):
1017
+ """
1018
+ Initialize the ExceptionCsvLogger object.
1019
+ """
1020
+
1021
+ if custom_header:
1022
+ custom_header = f"timestamp,{custom_header},exception"
1023
+ else:
1024
+ custom_header = "timestamp,exception"
1025
+
1026
+ super().__init__(
1027
+ logger_name=logger_name,
1028
+ directory_path=directory_path,
1029
+ custom_header=custom_header,
1030
+ log_queue=log_queue,
1031
+ add_queue_handler_start_listener_multiprocessing=add_queue_handler_start_listener_multiprocessing,
1032
+ add_queue_handler_no_listener_multiprocessing=add_queue_handler_no_listener_multiprocessing
1033
+ )
1034
+
594
1035
 
595
1036
  def write(
596
1037
  self,
@@ -616,21 +1057,11 @@ class ExceptionCsvLogger:
616
1057
  message = tracebacks.get_as_string()
617
1058
 
618
1059
  if custom_csv_string:
619
- output_csv_line: str = csvs.escape_csv_line_to_string([datetime.datetime.now(), custom_csv_string, message])
1060
+ row_of_cols: list = [datetime.datetime.now(), custom_csv_string, message]
620
1061
  else:
621
- output_csv_line: str = csvs.escape_csv_line_to_string([datetime.datetime.now(), message])
622
-
623
- # If the number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header',
624
- # raise an exception.
625
- if (csvs.get_number_of_cells_in_string_line(output_csv_line) !=
626
- csvs.get_number_of_cells_in_string_line(self.header)):
627
- raise ValueError(
628
- "Number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header'.")
1062
+ row_of_cols: list = [datetime.datetime.now(), message]
629
1063
 
630
- self.logger.info(output_csv_line)
1064
+ super().write(row_of_cols)
631
1065
 
632
1066
  if stdout:
633
1067
  print_api.print_api('', error_type=True, color="red", traceback_string=True)
634
-
635
- def get_logger(self):
636
- return self.logger
@@ -327,10 +327,15 @@ class DnsServer:
327
327
  self.dns_questions_to_answers_cache = dict()
328
328
  self.logger.info("*** DNS cache cleared")
329
329
 
330
- def start(self):
330
+ def start(
331
+ self,
332
+ is_ready_multiprocessing: multiprocessing.Event = None
333
+ ):
331
334
  """
332
335
  Main DNS Server function to start it.
333
336
 
337
+ :param is_ready_multiprocessing: multiprocessing.Event: Event to signal that the DNS Server is ready.
338
+
334
339
  :return: None.
335
340
  """
336
341
 
@@ -383,6 +388,10 @@ class DnsServer:
383
388
  # receiving connections.
384
389
  main_socket_object.bind((self.listening_interface, self.listening_port))
385
390
 
391
+ if is_ready_multiprocessing:
392
+ # If the DNS Server is running in a separate process, signal that the DNS Server is ready.
393
+ is_ready_multiprocessing.set()
394
+
386
395
  while True:
387
396
  # Needed this logging line when DNS was separate process.
388
397
  # self.logger.info("Waiting to receive new requests...")
@@ -926,7 +935,8 @@ def start_dns_server_multiprocessing_worker(
926
935
  offline_mode: bool,
927
936
  cache_timeout_minutes: int,
928
937
  logging_queue: multiprocessing.Queue,
929
- logger_name: str
938
+ logger_name: str,
939
+ is_ready_multiprocessing: multiprocessing.Event=None
930
940
  ):
931
941
  # Setting the current thread name to the current process name.
932
942
  current_process_name = multiprocessing.current_process().name
@@ -953,4 +963,4 @@ def start_dns_server_multiprocessing_worker(
953
963
  time.sleep(1)
954
964
  return 1
955
965
 
956
- dns_server_instance.start()
966
+ dns_server_instance.start(is_ready_multiprocessing=is_ready_multiprocessing)
@@ -160,6 +160,13 @@ class SocketClient:
160
160
  f"Domain {self.service_name} doesn't exist - Couldn't resolve with {self.dns_servers_list}.")
161
161
  print_api.print_api(error_string, logger=self.logger, logger_method='error')
162
162
  return None, error_string
163
+ except dns.resolver.LifetimeTimeout as e:
164
+ exception_type: str = type(e).__name__
165
+ error_string = (
166
+ f"Socket Client Connect: {exception_type}: "
167
+ f"Timeout while resolving domain {self.service_name} with {self.dns_servers_list}.")
168
+ print_api.print_api(error_string, logger=self.logger, logger_method='error')
169
+ return None, error_string
163
170
 
164
171
  # If DNS was resolved correctly or DNS servers weren't specified - we can try connecting.
165
172
  # If 'connection_ip' was manually specified or resolved with 'dnspython' - the connection