atomicshop 2.19.19__py3-none-any.whl → 2.20.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

@@ -1,9 +1,13 @@
1
1
  import logging
2
2
  import os
3
+ from logging import Logger
4
+ from logging.handlers import QueueListener
3
5
  from typing import Literal, Union
4
6
  import datetime
5
7
  import contextlib
6
8
  import threading
9
+ import queue
10
+ import multiprocessing
7
11
 
8
12
  from . import loggers, handlers, filters
9
13
  from ...file_io import csvs
@@ -17,11 +21,17 @@ class LoggingwLoggerAlreadyExistsError(Exception):
17
21
 
18
22
  # noinspection PyPep8Naming
19
23
  def create_logger(
20
- logger_name: str,
21
- file_path: str = None,
22
- directory_path: str = None,
24
+ logger_name: str = None,
25
+ get_queue_listener: bool = False,
26
+
23
27
  add_stream: bool = False,
24
28
  add_timedfile: bool = False,
29
+ add_timedfile_with_internal_queue: bool = False,
30
+ add_queue_handler: bool = False,
31
+
32
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
33
+ file_path: str = None,
34
+ directory_path: str = None,
25
35
  file_type: Literal[
26
36
  'txt',
27
37
  'csv',
@@ -47,17 +57,30 @@ def create_logger(
47
57
  delay: bool = False,
48
58
  encoding=None,
49
59
  header: str = None
50
- ) -> logging.Logger:
60
+ ) -> None | QueueListener | Logger:
51
61
  """
52
62
  Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
53
63
 
54
64
  :param logger_name: Name of the logger.
65
+ :param get_queue_listener: bool, If set to True, QueueListener will be started with all the handlers
66
+ like 'add_timedfile' and 'add_stream', using the 'log_queue'.
67
+
68
+ Only one of the following parameters can be set at a time: 'logger_name', 'get_queue_listener'.
69
+
55
70
  :param file_path: full path to the log file. If you don't want to use the file, set it to None.
56
71
  You can set the directory_path only and then the 'logger_name' will be used as the file name with the
57
72
  'file_type' as the file extension.
58
73
  :param directory_path: full path to the directory where the log file will be saved.
59
74
  :param add_stream: bool, If set to True, StreamHandler will be added to the logger.
60
- :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger.
75
+ :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger directly.
76
+ :param add_timedfile_with_internal_queue: bool, If set to True, TimedRotatingFileHandler will be added
77
+ to the logger, but not directly.
78
+ Internal queue.Queue will be created, then used by the QueueListener, which will get the
79
+ TimerRotatingFileHandler as the handler.
80
+ Then the QueueHandler using the same internal queue will be added to the logger.
81
+ This is done to improve the multithreading compatibility.
82
+ :param add_queue_handler: bool, If set to True, QueueHandler will be added to the logger, using the 'log_queue'.
83
+ :param log_queue: queue.Queue or multiprocessing.Queue, Queue to use for the QueueHandler.
61
84
  :param file_type: string, file type of the log file. Default is 'txt'.
62
85
  'txt': Text file.
63
86
  'csv': CSV file.
@@ -133,7 +156,7 @@ def create_logger(
133
156
  error_logger = loggingw.create_logger(
134
157
  logger_name=f'{self.__class__.__name__}_CSV',
135
158
  file_path=output_log_file,
136
- add_timedfile=True,
159
+ add_timedfile_with_internal_queue=True,
137
160
  file_type='csv',
138
161
  formatter_filehandler='MESSAGE',
139
162
  header=header
@@ -159,7 +182,7 @@ def create_logger(
159
182
  logger_name=f'{self.__class__.__name__}',
160
183
  file_path=output_log_file,
161
184
  add_stream=True,
162
- add_timedfile=True,
185
+ add_timedfile_with_internal_queue=True,
163
186
  file_type='txt',
164
187
  formatter_streamhandler='DEFAULT',
165
188
  formatter_filehandler='DEFAULT'
@@ -170,16 +193,93 @@ def create_logger(
170
193
 
171
194
  if __name__ == "__main__":
172
195
  main()
196
+
197
+ ------------------------------
198
+
199
+ Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file in multiprocessing,
200
+ while QueueListener is in the main process writes to the file and outputs to the console and the QueueHandler
201
+ in two child subprocesses sends the logs to the main process through the multiprocessing.Queue:
202
+
203
+ import sys
204
+ import multiprocessing
205
+ from atomicshop.wrappers.loggingw import loggingw
206
+
207
+
208
+ def worker1(log_queue: multiprocessing.Queue):
209
+ error_logger = loggingw.create_logger(
210
+ logger_name='network',
211
+ add_queue_handler=True,
212
+ log_queue=log_queue
213
+ )
214
+
215
+ error_logger.info("Worker1 log message for 'network' logger.")
216
+
217
+
218
+ def worker2(log_queue: multiprocessing.Queue):
219
+ error_logger = loggingw.create_logger(
220
+ logger_name='network',
221
+ add_queue_handler=True,
222
+ log_queue=log_queue
223
+ )
224
+
225
+ error_logger.info("Worker2 log message for 'network' logger.")
226
+
227
+
228
+ def main():
229
+ log_queue = multiprocessing.Queue()
230
+
231
+ queue_listener = loggingw.create_logger(
232
+ get_queue_listener=True,
233
+ add_stream=True,
234
+ add_timedfile=True,
235
+ log_queue=log_queue,
236
+ file_type='txt',
237
+ formatter_streamhandler='DEFAULT',
238
+ formatter_filehandler='DEFAULT'
239
+ )
240
+
241
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue,))
242
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue,))
243
+
244
+ process1.start()
245
+ process2.start()
246
+
247
+ process1.join()
248
+ process2.join()
249
+
250
+ queue_listener.stop()
251
+
252
+ return 0
253
+
254
+
255
+ if __name__ == "__main__":
256
+ sys.exit(main())
173
257
  """
174
258
 
259
+ if logger_name and get_queue_listener:
260
+ raise ValueError("You can't set both 'logger_name' and 'get_queue_listener'.")
261
+ if not logger_name and not get_queue_listener:
262
+ raise ValueError("You need to provide 'logger_name' or 'get_queue_listener'.")
263
+
175
264
  # Check if the logger exists before creating it.
176
- if loggers.is_logger_exists(logger_name):
177
- raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
265
+ if logger_name:
266
+ if loggers.is_logger_exists(logger_name):
267
+ raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
268
+
269
+ if not logger_name and not file_path:
270
+ raise ValueError("You need to provide 'file_path' if 'logger_name' is not set.")
271
+
272
+ if get_queue_listener and not log_queue:
273
+ raise ValueError("You need to provide 'log_queue' if 'get_queue_listener' is set to True.")
178
274
 
179
- if not directory_path and not file_path:
180
- raise ValueError("You need to provide 'directory_path' or 'file_path'.")
181
- if directory_path and file_path:
182
- raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
275
+ if add_queue_handler and not log_queue:
276
+ raise ValueError("You need to provide 'log_queue' if 'add_queue_handler' is set to True.")
277
+
278
+ if add_timedfile or add_timedfile_with_internal_queue:
279
+ if not directory_path and not file_path:
280
+ raise ValueError("You need to provide 'directory_path' or 'file_path'.")
281
+ if directory_path and file_path:
282
+ raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
183
283
 
184
284
  if directory_path:
185
285
  if directory_path.endswith(os.sep):
@@ -187,24 +287,83 @@ def create_logger(
187
287
 
188
288
  file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
189
289
 
190
- logger = get_logger_with_level(logger_name, logging_level)
290
+ # --- Add the handlers to a tuple ---
191
291
 
292
+ handlers_tuple: tuple = ()
192
293
  if add_stream:
193
- handlers.add_stream_handler(
194
- logger=logger, logging_level=logging_level, formatter=formatter_streamhandler,
294
+ stream_handler = handlers.get_stream_handler_extended(
295
+ logging_level=logging_level,
296
+ formatter=formatter_streamhandler,
195
297
  formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
196
298
 
299
+ handlers_tuple += (stream_handler,)
300
+
197
301
  if add_timedfile:
198
- handlers.add_timedfilehandler_with_queuehandler(
199
- logger=logger, file_path=file_path, logging_level=logging_level, formatter=formatter_filehandler,
200
- formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds, file_type=file_type,
302
+ timed_file_handler = handlers.get_timed_rotating_file_handler_extended(
303
+ file_path=file_path,
304
+ logging_level=logging_level,
305
+ formatter=formatter_filehandler,
306
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
307
+ file_type=file_type,
201
308
  rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
202
309
  rotation_date_format=filehandler_rotation_date_format,
203
310
  rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
204
311
  rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
205
- when=when, interval=interval, delay=delay, backupCount=backupCount, encoding=encoding, header=header)
312
+ when=when,
313
+ interval=interval,
314
+ delay=delay,
315
+ backupCount=backupCount,
316
+ encoding=encoding,
317
+ header=header
318
+ )
206
319
 
207
- return logger
320
+ handlers_tuple += (timed_file_handler,)
321
+
322
+ if add_timedfile_with_internal_queue:
323
+ timed_file_handler_with_queue = handlers.get_timed_rotating_file_handler_extended(
324
+ file_path=file_path,
325
+ logging_level=logging_level,
326
+ formatter=formatter_filehandler,
327
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
328
+ file_type=file_type,
329
+ rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
330
+ rotation_date_format=filehandler_rotation_date_format,
331
+ rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
332
+ rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
333
+ use_internal_queue_listener=True,
334
+ when=when,
335
+ interval=interval,
336
+ delay=delay,
337
+ backupCount=backupCount,
338
+ encoding=encoding,
339
+ header=header
340
+ )
341
+
342
+ handlers_tuple += (timed_file_handler_with_queue,)
343
+
344
+ if add_queue_handler:
345
+ queue_handler = handlers.get_queue_handler_extended(log_queue)
346
+ handlers_tuple += (queue_handler,)
347
+
348
+ # --- Create the logger ---
349
+
350
+ if logger_name:
351
+ logger = get_logger_with_level(logger_name, logging_level)
352
+
353
+ # Add the handlers to the logger.
354
+ for handler in handlers_tuple:
355
+ loggers.add_handler(logger, handler)
356
+
357
+ # Disable propagation from the 'root' logger, so we will not see the messages twice.
358
+ loggers.set_propagation(logger)
359
+
360
+ return logger
361
+
362
+ # --- create the QueueListener ---
363
+
364
+ if get_queue_listener:
365
+ queue_listener: logging.handlers.QueueListener = handlers.start_queue_listener_for_handlers(handlers_tuple, log_queue)
366
+ return queue_listener
208
367
 
209
368
 
210
369
  def get_logger_with_level(
@@ -276,7 +435,7 @@ def is_logger_exists(logger_name: str) -> bool:
276
435
  return loggers.is_logger_exists(logger_name)
277
436
 
278
437
 
279
- def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger:
438
+ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger | None:
280
439
  """
281
440
  Function to find the parent logger with StreamHandler.
282
441
  Example:
@@ -286,7 +445,7 @@ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> loggin
286
445
  StreamHandler from the 'parent' logger.
287
446
 
288
447
  :param logger: Logger to find the parent logger with StreamHandler.
289
- :return: Parent logger with StreamHandler.
448
+ :return: Parent logger with StreamHandler or None if the logger doesn't have StreamHandler.
290
449
  """
291
450
 
292
451
  # Start with current logger to see if it has a stream handler.
@@ -302,6 +461,10 @@ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> loggin
302
461
  # If the current logger doesn't have the stream handler, let's move to the parent.
303
462
  current_logger = current_logger.parent
304
463
 
464
+ # If none of the parent loggers have the stream handler, break the loop.
465
+ if current_logger is None:
466
+ break
467
+
305
468
  return current_logger
306
469
 
307
470
 
@@ -12,6 +12,7 @@ from bs4 import BeautifulSoup
12
12
 
13
13
  from . import engine, base, combos
14
14
  from ...basics import threads, multiprocesses
15
+ from ...web import USER_AGENTS
15
16
 
16
17
 
17
18
  def get_text_from_html_tag(url: str, tag_name: str, attribute: str, value: str) -> str:
@@ -218,10 +219,12 @@ def _fetch_content(
218
219
  with sync_playwright() as p:
219
220
  browser = p.chromium.launch(headless=headless) # Set headless=True if you don't want to see the browser
220
221
 
222
+ user_agent: str = USER_AGENTS['Windows_Chrome_Latest']
223
+
221
224
  if text_fetch_method == "playwright_copypaste":
222
- context = browser.new_context(permissions=["clipboard-read", "clipboard-write"])
225
+ context = browser.new_context(permissions=["clipboard-read", "clipboard-write"], user_agent=user_agent)
223
226
  else:
224
- context = browser.new_context()
227
+ context = browser.new_context(user_agent=user_agent)
225
228
 
226
229
  page = context.new_page()
227
230
 
@@ -87,3 +87,16 @@ def is_socket_closed(socket_object) -> bool:
87
87
  return False
88
88
  except socket.error:
89
89
  return False
90
+
91
+
92
+ def get_host_name_from_ip_address(ip_address: str) -> str:
93
+ """
94
+ Get the host name from the IP address.
95
+ :param ip_address: string, IP address.
96
+ :return: string, host name.
97
+ """
98
+
99
+ host_name, alias_list, ipaddr_list = socket.gethostbyaddr(ip_address)
100
+ _ = alias_list, ipaddr_list
101
+
102
+ return host_name
@@ -6,6 +6,7 @@ import socket
6
6
  import logging
7
7
  from pathlib import Path
8
8
  from typing import Literal
9
+ import multiprocessing
9
10
 
10
11
  from ...print_api import print_api
11
12
  from ..loggingw import loggingw
@@ -45,7 +46,7 @@ class DnsStatisticsCSVWriter:
45
46
  self.csv_logger = loggingw.create_logger(
46
47
  logger_name=LOGGER_NAME,
47
48
  directory_path=statistics_directory_path,
48
- add_timedfile=True,
49
+ add_timedfile_with_internal_queue=True,
49
50
  formatter_filehandler='MESSAGE',
50
51
  file_type='csv',
51
52
  header=DNS_STATISTICS_HEADER
@@ -156,12 +157,14 @@ class DnsServer:
156
157
  offline_mode: bool = False,
157
158
  tcp_target_server_ipv4: str = '127.0.0.1',
158
159
  tcp_resolve_domain_list: list = None,
159
- request_domain_queue: queues.NonBlockQueue = None,
160
+ request_domain_queue: multiprocessing.Queue = None,
160
161
  buffer_size_receive: int = 8192,
161
162
  response_ttl: int = 60,
162
163
  dns_service_retries: int = 5,
163
164
  cache_timeout_minutes: int = 60,
164
- logger: logging.Logger = None
165
+ logger: logging.Logger = None,
166
+ logging_queue: multiprocessing.Queue = None,
167
+ logger_name: str = None
165
168
  ):
166
169
  """
167
170
  Initialize the DNS Server object with all the necessary settings.
@@ -186,14 +189,20 @@ class DnsServer:
186
189
  the domains to.
187
190
  :param tcp_resolve_domain_list: list: List of domains that will be resolved to the TCP Server.
188
191
  This means that all the requests will be resolved to the specified offline IPv4 address.
189
- :param request_domain_queue: queues.NonBlockQueue: Queue to pass all the requested domains that hit the DNS
192
+ :param request_domain_queue: multiprocessing Queue to pass all the requested domains that hit the DNS
190
193
  :param buffer_size_receive: int: Buffer size of the connection while receiving messages.
191
194
  :param response_ttl: int, Time to live of the DNS Response that will be returned. Default is 60 seconds.
192
195
  :param dns_service_retries: int, How many times the request will be sent to forwarded DNS Service on errors:
193
196
  (socket connect / request send / response receive).
194
197
  :param cache_timeout_minutes: int: Timeout in minutes to clear the DNS Cache.
195
198
  server. Each domain will be pass in the queue as a string.
199
+
196
200
  :param logger: logging.Logger: Logger object to use for logging. If not provided, a new logger will be created.
201
+ :param logging_queue: multiprocessing.Queue: Queue to pass the logs to the QueueListener.
202
+ You will use this in case you run the DNS Server in a separate process.
203
+ Of course, you need to have a QueueListener to listen to this queue.
204
+
205
+ You can pass only one of the following: 'logger', 'logging_queue'.
197
206
  """
198
207
 
199
208
  self.listening_interface: str = listening_interface
@@ -207,11 +216,16 @@ class DnsServer:
207
216
  self.resolve_to_tcp_server_only_tcp_resolve_domains: bool = resolve_to_tcp_server_only_tcp_resolve_domains
208
217
  self.resolve_to_tcp_server_all_domains: bool = resolve_to_tcp_server_all_domains
209
218
  self.resolve_regular: bool = resolve_regular
210
- self.request_domain_queue: queues.NonBlockQueue = request_domain_queue
219
+ self.request_domain_queue: multiprocessing.Queue = request_domain_queue
211
220
  self.buffer_size_receive: int = buffer_size_receive
212
221
  self.response_ttl: int = response_ttl
213
222
  self.dns_service_retries: int = dns_service_retries
214
223
  self.cache_timeout_minutes: int = cache_timeout_minutes
224
+ self.logging_queue: multiprocessing.Queue = logging_queue
225
+ self.logging_name: str = logger_name
226
+
227
+ if logger and logging_queue:
228
+ raise ValueError("You can pass only one of the following: 'logger', 'logging_queue'.")
215
229
 
216
230
  if not tcp_resolve_domain_list:
217
231
  self.tcp_resolve_domain_list = list()
@@ -242,20 +256,31 @@ class DnsServer:
242
256
  # network log of TCP Server module.
243
257
  self.dns_statistics_csv_writer = DnsStatisticsCSVWriter(statistics_directory_path=log_directory_path)
244
258
 
259
+ if not logger_name and not logger and not logging_queue:
260
+ self.logger_name = Path(__file__).stem
261
+ elif logger_name and (logger or logging_queue):
262
+ self.logger_name = f'{logger_name}.{Path(__file__).stem}'
263
+
245
264
  # Check if the logger was provided, if not, create a new logger.
246
- if not logger:
247
- self.logger = loggingw.create_logger(
265
+ if not logger and not logging_queue:
266
+ self.logger: logging.Logger = loggingw.create_logger(
248
267
  logger_name=Path(__file__).stem,
249
268
  directory_path=self.log_directory_path,
250
269
  add_stream=True,
251
- add_timedfile=True,
270
+ add_timedfile_with_internal_queue=True,
252
271
  formatter_streamhandler='DEFAULT',
253
272
  formatter_filehandler='DEFAULT',
254
273
  backupCount=backupCount_log_files_x_days
255
274
  )
256
- else:
275
+ elif logger:
257
276
  # Create child logger for the provided logger with the module's name.
258
- self.logger: logging.Logger = loggingw.get_logger_with_level(f'{logger.name}.{Path(__file__).stem}')
277
+ self.logger: logging.Logger = loggingw.get_logger_with_level(self.logger_name)
278
+ elif logging_queue:
279
+ self.logger: logging.Logger = loggingw.create_logger(
280
+ logger_name=self.logger_name,
281
+ add_queue_handler=True,
282
+ log_queue=self.logging_queue
283
+ )
259
284
 
260
285
  self.test_config()
261
286
 
@@ -271,6 +296,9 @@ class DnsServer:
271
296
  raise_if_all_false=True
272
297
  )
273
298
  except ValueError as e:
299
+ print_api(f'DnsConfigurationValuesError: {str(e)}', error_type=True, color="red", logger=self.logger)
300
+ # Wait for the message to be printed and saved to file.
301
+ time.sleep(1)
274
302
  raise DnsConfigurationValuesError(e)
275
303
 
276
304
  ips_ports: list[str] = [f'{self.listening_interface}:{self.listening_port}']
@@ -279,7 +307,12 @@ class DnsServer:
279
307
  error_messages: list = list()
280
308
  for port, process_info in port_in_use.items():
281
309
  error_messages.append(f"Port [{port}] is already in use by process: {process_info}")
282
- raise DnsPortInUseError("\n".join(error_messages))
310
+
311
+ message = "\n".join(error_messages)
312
+ print_api(f'DnsPortInUseError: {str(e)}', error_type=True, color="red", logger=self.logger)
313
+ # Wait for the message to be printed and saved to file.
314
+ time.sleep(1)
315
+ raise DnsPortInUseError(message)
283
316
 
284
317
  def thread_worker_empty_dns_cache(self, function_sleep_time: int):
285
318
  """
@@ -457,7 +490,7 @@ class DnsServer:
457
490
  if forward_to_tcp_server:
458
491
  # If the request is forwarded to TCP server, then we'll put the domain in the domain queue.
459
492
  # self.request_domain_queue.put(question_domain)
460
- self.request_domain_queue.queue = question_domain
493
+ self.request_domain_queue.put(question_domain)
461
494
 
462
495
  # Make DNS response that will refer TCP traffic to our server
463
496
  dns_built_response = DNSRecord(
@@ -846,3 +879,58 @@ class DnsServer:
846
879
  self.logger.info("==========")
847
880
  pass
848
881
  continue
882
+
883
+
884
+ # noinspection PyPep8Naming
885
+ def start_dns_server_multiprocessing_worker(
886
+ listening_interface: str,
887
+ listening_port: int,
888
+ log_directory_path: str,
889
+ backupCount_log_files_x_days: int,
890
+ forwarding_dns_service_ipv4: str,
891
+ tcp_target_server_ipv4: str,
892
+ # Passing the engine domain list to DNS server to work with.
893
+ # 'list' function re-initializes the current list, or else it will be the same instance object.
894
+ tcp_resolve_domain_list: list,
895
+ offline_mode: bool,
896
+ resolve_to_tcp_server_only_tcp_resolve_domains: bool,
897
+ resolve_to_tcp_server_all_domains: bool,
898
+ resolve_regular: bool,
899
+ cache_timeout_minutes: int,
900
+ request_domain_queue: multiprocessing.Queue,
901
+ logging_queue: multiprocessing.Queue,
902
+ logger_name: str
903
+ ):
904
+ # Setting the current thread name to the current process name.
905
+ current_process_name = multiprocessing.current_process().name
906
+ threading.current_thread().name = current_process_name
907
+
908
+
909
+
910
+ try:
911
+ dns_server_instance = DnsServer(
912
+ listening_interface=listening_interface,
913
+ listening_port=listening_port,
914
+ log_directory_path=log_directory_path,
915
+ backupCount_log_files_x_days=backupCount_log_files_x_days,
916
+ forwarding_dns_service_ipv4=forwarding_dns_service_ipv4,
917
+ tcp_target_server_ipv4=tcp_target_server_ipv4,
918
+ # Passing the engine domain list to DNS server to work with.
919
+ # 'list' function re-initializes the current list, or else it will be the same instance object.
920
+ tcp_resolve_domain_list=tcp_resolve_domain_list,
921
+ offline_mode=offline_mode,
922
+ resolve_to_tcp_server_only_tcp_resolve_domains=resolve_to_tcp_server_only_tcp_resolve_domains,
923
+ resolve_to_tcp_server_all_domains=resolve_to_tcp_server_all_domains,
924
+ resolve_regular=resolve_regular,
925
+ cache_timeout_minutes=cache_timeout_minutes,
926
+ request_domain_queue=request_domain_queue,
927
+ logging_queue=logging_queue,
928
+ logger_name=logger_name
929
+ )
930
+ except (DnsPortInUseError, DnsConfigurationValuesError) as e:
931
+ print_api(e, error_type=True, color="red", logger=dns_server_instance.logger)
932
+ # Wait for the message to be printed and saved to file.
933
+ time.sleep(1)
934
+ return 1
935
+
936
+ dns_server_instance.start()