atomicshop 2.19.19__py3-none-any.whl → 2.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

@@ -1,9 +1,13 @@
1
1
  import logging
2
2
  import os
3
+ from logging import Logger
4
+ from logging.handlers import QueueListener
3
5
  from typing import Literal, Union
4
6
  import datetime
5
7
  import contextlib
6
8
  import threading
9
+ import queue
10
+ import multiprocessing
7
11
 
8
12
  from . import loggers, handlers, filters
9
13
  from ...file_io import csvs
@@ -17,11 +21,17 @@ class LoggingwLoggerAlreadyExistsError(Exception):
17
21
 
18
22
  # noinspection PyPep8Naming
19
23
  def create_logger(
20
- logger_name: str,
21
- file_path: str = None,
22
- directory_path: str = None,
24
+ logger_name: str = None,
25
+ get_queue_listener: bool = False,
26
+
23
27
  add_stream: bool = False,
24
28
  add_timedfile: bool = False,
29
+ add_timedfile_with_internal_queue: bool = False,
30
+ add_queue_handler: bool = False,
31
+
32
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
33
+ file_path: str = None,
34
+ directory_path: str = None,
25
35
  file_type: Literal[
26
36
  'txt',
27
37
  'csv',
@@ -47,17 +57,30 @@ def create_logger(
47
57
  delay: bool = False,
48
58
  encoding=None,
49
59
  header: str = None
50
- ) -> logging.Logger:
60
+ ) -> None | QueueListener | Logger:
51
61
  """
52
62
  Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
53
63
 
54
64
  :param logger_name: Name of the logger.
65
+ :param get_queue_listener: bool, If set to True, QueueListener will be started with all the handlers
66
+ like 'add_timedfile' and 'add_stream', using the 'log_queue'.
67
+
68
+ Only one of the following parameters can be set at a time: 'logger_name', 'get_queue_listener'.
69
+
55
70
  :param file_path: full path to the log file. If you don't want to use the file, set it to None.
56
71
  You can set the directory_path only and then the 'logger_name' will be used as the file name with the
57
72
  'file_type' as the file extension.
58
73
  :param directory_path: full path to the directory where the log file will be saved.
59
74
  :param add_stream: bool, If set to True, StreamHandler will be added to the logger.
60
- :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger.
75
+ :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger directly.
76
+ :param add_timedfile_with_internal_queue: bool, If set to True, TimedRotatingFileHandler will be added
77
+ to the logger, but not directly.
78
+ Internal queue.Queue will be created, then used by the QueueListener, which will get the
79
+ TimerRotatingFileHandler as the handler.
80
+ Then the QueueHandler using the same internal queue will be added to the logger.
81
+ This is done to improve the multithreading compatibility.
82
+ :param add_queue_handler: bool, If set to True, QueueHandler will be added to the logger, using the 'log_queue'.
83
+ :param log_queue: queue.Queue or multiprocessing.Queue, Queue to use for the QueueHandler.
61
84
  :param file_type: string, file type of the log file. Default is 'txt'.
62
85
  'txt': Text file.
63
86
  'csv': CSV file.
@@ -133,7 +156,7 @@ def create_logger(
133
156
  error_logger = loggingw.create_logger(
134
157
  logger_name=f'{self.__class__.__name__}_CSV',
135
158
  file_path=output_log_file,
136
- add_timedfile=True,
159
+ add_timedfile_with_internal_queue=True,
137
160
  file_type='csv',
138
161
  formatter_filehandler='MESSAGE',
139
162
  header=header
@@ -159,7 +182,7 @@ def create_logger(
159
182
  logger_name=f'{self.__class__.__name__}',
160
183
  file_path=output_log_file,
161
184
  add_stream=True,
162
- add_timedfile=True,
185
+ add_timedfile_with_internal_queue=True,
163
186
  file_type='txt',
164
187
  formatter_streamhandler='DEFAULT',
165
188
  formatter_filehandler='DEFAULT'
@@ -170,16 +193,93 @@ def create_logger(
170
193
 
171
194
  if __name__ == "__main__":
172
195
  main()
196
+
197
+ ------------------------------
198
+
199
+ Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file in multiprocessing,
200
+ while QueueListener is in the main process writes to the file and outputs to the console and the QueueHandler
201
+ in two child subprocesses sends the logs to the main process through the multiprocessing.Queue:
202
+
203
+ import sys
204
+ import multiprocessing
205
+ from atomicshop.wrappers.loggingw import loggingw
206
+
207
+
208
+ def worker1(log_queue: multiprocessing.Queue):
209
+ error_logger = loggingw.create_logger(
210
+ logger_name='network',
211
+ add_queue_handler=True,
212
+ log_queue=log_queue
213
+ )
214
+
215
+ error_logger.info("Worker1 log message for 'network' logger.")
216
+
217
+
218
+ def worker2(log_queue: multiprocessing.Queue):
219
+ error_logger = loggingw.create_logger(
220
+ logger_name='network',
221
+ add_queue_handler=True,
222
+ log_queue=log_queue
223
+ )
224
+
225
+ error_logger.info("Worker2 log message for 'network' logger.")
226
+
227
+
228
+ def main():
229
+ log_queue = multiprocessing.Queue()
230
+
231
+ queue_listener = loggingw.create_logger(
232
+ get_queue_listener=True,
233
+ add_stream=True,
234
+ add_timedfile=True,
235
+ log_queue=log_queue,
236
+ file_type='txt',
237
+ formatter_streamhandler='DEFAULT',
238
+ formatter_filehandler='DEFAULT'
239
+ )
240
+
241
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue,))
242
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue,))
243
+
244
+ process1.start()
245
+ process2.start()
246
+
247
+ process1.join()
248
+ process2.join()
249
+
250
+ queue_listener.stop()
251
+
252
+ return 0
253
+
254
+
255
+ if __name__ == "__main__":
256
+ sys.exit(main())
173
257
  """
174
258
 
259
+ if logger_name and get_queue_listener:
260
+ raise ValueError("You can't set both 'logger_name' and 'get_queue_listener'.")
261
+ if not logger_name and not get_queue_listener:
262
+ raise ValueError("You need to provide 'logger_name' or 'get_queue_listener'.")
263
+
175
264
  # Check if the logger exists before creating it.
176
- if loggers.is_logger_exists(logger_name):
177
- raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
265
+ if logger_name:
266
+ if loggers.is_logger_exists(logger_name):
267
+ raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
268
+
269
+ if not logger_name and not file_path:
270
+ raise ValueError("You need to provide 'file_path' if 'logger_name' is not set.")
271
+
272
+ if get_queue_listener and not log_queue:
273
+ raise ValueError("You need to provide 'log_queue' if 'get_queue_listener' is set to True.")
178
274
 
179
- if not directory_path and not file_path:
180
- raise ValueError("You need to provide 'directory_path' or 'file_path'.")
181
- if directory_path and file_path:
182
- raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
275
+ if add_queue_handler and not log_queue:
276
+ raise ValueError("You need to provide 'log_queue' if 'add_queue_handler' is set to True.")
277
+
278
+ if add_timedfile or add_timedfile_with_internal_queue:
279
+ if not directory_path and not file_path:
280
+ raise ValueError("You need to provide 'directory_path' or 'file_path'.")
281
+ if directory_path and file_path:
282
+ raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
183
283
 
184
284
  if directory_path:
185
285
  if directory_path.endswith(os.sep):
@@ -187,24 +287,83 @@ def create_logger(
187
287
 
188
288
  file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
189
289
 
190
- logger = get_logger_with_level(logger_name, logging_level)
290
+ # --- Add the handlers to a tuple ---
191
291
 
292
+ handlers_tuple: tuple = ()
192
293
  if add_stream:
193
- handlers.add_stream_handler(
194
- logger=logger, logging_level=logging_level, formatter=formatter_streamhandler,
294
+ stream_handler = handlers.get_stream_handler_extended(
295
+ logging_level=logging_level,
296
+ formatter=formatter_streamhandler,
195
297
  formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
196
298
 
299
+ handlers_tuple += (stream_handler,)
300
+
197
301
  if add_timedfile:
198
- handlers.add_timedfilehandler_with_queuehandler(
199
- logger=logger, file_path=file_path, logging_level=logging_level, formatter=formatter_filehandler,
200
- formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds, file_type=file_type,
302
+ timed_file_handler = handlers.get_timed_rotating_file_handler_extended(
303
+ file_path=file_path,
304
+ logging_level=logging_level,
305
+ formatter=formatter_filehandler,
306
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
307
+ file_type=file_type,
201
308
  rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
202
309
  rotation_date_format=filehandler_rotation_date_format,
203
310
  rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
204
311
  rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
205
- when=when, interval=interval, delay=delay, backupCount=backupCount, encoding=encoding, header=header)
312
+ when=when,
313
+ interval=interval,
314
+ delay=delay,
315
+ backupCount=backupCount,
316
+ encoding=encoding,
317
+ header=header
318
+ )
206
319
 
207
- return logger
320
+ handlers_tuple += (timed_file_handler,)
321
+
322
+ if add_timedfile_with_internal_queue:
323
+ timed_file_handler_with_queue = handlers.get_timed_rotating_file_handler_extended(
324
+ file_path=file_path,
325
+ logging_level=logging_level,
326
+ formatter=formatter_filehandler,
327
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
328
+ file_type=file_type,
329
+ rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
330
+ rotation_date_format=filehandler_rotation_date_format,
331
+ rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
332
+ rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
333
+ use_internal_queue_listener=True,
334
+ when=when,
335
+ interval=interval,
336
+ delay=delay,
337
+ backupCount=backupCount,
338
+ encoding=encoding,
339
+ header=header
340
+ )
341
+
342
+ handlers_tuple += (timed_file_handler_with_queue,)
343
+
344
+ if add_queue_handler:
345
+ queue_handler = handlers.get_queue_handler_extended(log_queue)
346
+ handlers_tuple += (queue_handler,)
347
+
348
+ # --- Create the logger ---
349
+
350
+ if logger_name:
351
+ logger = get_logger_with_level(logger_name, logging_level)
352
+
353
+ # Add the handlers to the logger.
354
+ for handler in handlers_tuple:
355
+ loggers.add_handler(logger, handler)
356
+
357
+ # Disable propagation from the 'root' logger, so we will not see the messages twice.
358
+ loggers.set_propagation(logger)
359
+
360
+ return logger
361
+
362
+ # --- create the QueueListener ---
363
+
364
+ if get_queue_listener:
365
+ queue_listener: logging.handlers.QueueListener = handlers.start_queue_listener_for_handlers(handlers_tuple, log_queue)
366
+ return queue_listener
208
367
 
209
368
 
210
369
  def get_logger_with_level(
@@ -276,7 +435,7 @@ def is_logger_exists(logger_name: str) -> bool:
276
435
  return loggers.is_logger_exists(logger_name)
277
436
 
278
437
 
279
- def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger:
438
+ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger | None:
280
439
  """
281
440
  Function to find the parent logger with StreamHandler.
282
441
  Example:
@@ -286,7 +445,7 @@ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> loggin
286
445
  StreamHandler from the 'parent' logger.
287
446
 
288
447
  :param logger: Logger to find the parent logger with StreamHandler.
289
- :return: Parent logger with StreamHandler.
448
+ :return: Parent logger with StreamHandler or None if the logger doesn't have StreamHandler.
290
449
  """
291
450
 
292
451
  # Start with current logger to see if it has a stream handler.
@@ -302,6 +461,10 @@ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> loggin
302
461
  # If the current logger doesn't have the stream handler, let's move to the parent.
303
462
  current_logger = current_logger.parent
304
463
 
464
+ # If none of the parent loggers have the stream handler, break the loop.
465
+ if current_logger is None:
466
+ break
467
+
305
468
  return current_logger
306
469
 
307
470
 
@@ -87,3 +87,16 @@ def is_socket_closed(socket_object) -> bool:
87
87
  return False
88
88
  except socket.error:
89
89
  return False
90
+
91
+
92
+ def get_host_name_from_ip_address(ip_address: str) -> str:
93
+ """
94
+ Get the host name from the IP address.
95
+ :param ip_address: string, IP address.
96
+ :return: string, host name.
97
+ """
98
+
99
+ host_name, alias_list, ipaddr_list = socket.gethostbyaddr(ip_address)
100
+ _ = alias_list, ipaddr_list
101
+
102
+ return host_name
@@ -6,6 +6,7 @@ import socket
6
6
  import logging
7
7
  from pathlib import Path
8
8
  from typing import Literal
9
+ import multiprocessing
9
10
 
10
11
  from ...print_api import print_api
11
12
  from ..loggingw import loggingw
@@ -45,7 +46,7 @@ class DnsStatisticsCSVWriter:
45
46
  self.csv_logger = loggingw.create_logger(
46
47
  logger_name=LOGGER_NAME,
47
48
  directory_path=statistics_directory_path,
48
- add_timedfile=True,
49
+ add_timedfile_with_internal_queue=True,
49
50
  formatter_filehandler='MESSAGE',
50
51
  file_type='csv',
51
52
  header=DNS_STATISTICS_HEADER
@@ -156,12 +157,14 @@ class DnsServer:
156
157
  offline_mode: bool = False,
157
158
  tcp_target_server_ipv4: str = '127.0.0.1',
158
159
  tcp_resolve_domain_list: list = None,
159
- request_domain_queue: queues.NonBlockQueue = None,
160
+ request_domain_queue: multiprocessing.Queue = None,
160
161
  buffer_size_receive: int = 8192,
161
162
  response_ttl: int = 60,
162
163
  dns_service_retries: int = 5,
163
164
  cache_timeout_minutes: int = 60,
164
- logger: logging.Logger = None
165
+ logger: logging.Logger = None,
166
+ logging_queue: multiprocessing.Queue = None,
167
+ logger_name: str = None
165
168
  ):
166
169
  """
167
170
  Initialize the DNS Server object with all the necessary settings.
@@ -186,14 +189,20 @@ class DnsServer:
186
189
  the domains to.
187
190
  :param tcp_resolve_domain_list: list: List of domains that will be resolved to the TCP Server.
188
191
  This means that all the requests will be resolved to the specified offline IPv4 address.
189
- :param request_domain_queue: queues.NonBlockQueue: Queue to pass all the requested domains that hit the DNS
192
+ :param request_domain_queue: multiprocessing Queue to pass all the requested domains that hit the DNS
190
193
  :param buffer_size_receive: int: Buffer size of the connection while receiving messages.
191
194
  :param response_ttl: int, Time to live of the DNS Response that will be returned. Default is 60 seconds.
192
195
  :param dns_service_retries: int, How many times the request will be sent to forwarded DNS Service on errors:
193
196
  (socket connect / request send / response receive).
194
197
  :param cache_timeout_minutes: int: Timeout in minutes to clear the DNS Cache.
195
198
  server. Each domain will be pass in the queue as a string.
199
+
196
200
  :param logger: logging.Logger: Logger object to use for logging. If not provided, a new logger will be created.
201
+ :param logging_queue: multiprocessing.Queue: Queue to pass the logs to the QueueListener.
202
+ You will use this in case you run the DNS Server in a separate process.
203
+ Of course, you need to have a QueueListener to listen to this queue.
204
+
205
+ You can pass only one of the following: 'logger', 'logging_queue'.
197
206
  """
198
207
 
199
208
  self.listening_interface: str = listening_interface
@@ -207,11 +216,16 @@ class DnsServer:
207
216
  self.resolve_to_tcp_server_only_tcp_resolve_domains: bool = resolve_to_tcp_server_only_tcp_resolve_domains
208
217
  self.resolve_to_tcp_server_all_domains: bool = resolve_to_tcp_server_all_domains
209
218
  self.resolve_regular: bool = resolve_regular
210
- self.request_domain_queue: queues.NonBlockQueue = request_domain_queue
219
+ self.request_domain_queue: multiprocessing.Queue = request_domain_queue
211
220
  self.buffer_size_receive: int = buffer_size_receive
212
221
  self.response_ttl: int = response_ttl
213
222
  self.dns_service_retries: int = dns_service_retries
214
223
  self.cache_timeout_minutes: int = cache_timeout_minutes
224
+ self.logging_queue: multiprocessing.Queue = logging_queue
225
+ self.logging_name: str = logger_name
226
+
227
+ if logger and logging_queue:
228
+ raise ValueError("You can pass only one of the following: 'logger', 'logging_queue'.")
215
229
 
216
230
  if not tcp_resolve_domain_list:
217
231
  self.tcp_resolve_domain_list = list()
@@ -242,20 +256,31 @@ class DnsServer:
242
256
  # network log of TCP Server module.
243
257
  self.dns_statistics_csv_writer = DnsStatisticsCSVWriter(statistics_directory_path=log_directory_path)
244
258
 
259
+ if not logger_name and not logger and not logging_queue:
260
+ self.logger_name = Path(__file__).stem
261
+ elif logger_name and (logger or logging_queue):
262
+ self.logger_name = f'{logger_name}.{Path(__file__).stem}'
263
+
245
264
  # Check if the logger was provided, if not, create a new logger.
246
- if not logger:
247
- self.logger = loggingw.create_logger(
265
+ if not logger and not logging_queue:
266
+ self.logger: logging.Logger = loggingw.create_logger(
248
267
  logger_name=Path(__file__).stem,
249
268
  directory_path=self.log_directory_path,
250
269
  add_stream=True,
251
- add_timedfile=True,
270
+ add_timedfile_with_internal_queue=True,
252
271
  formatter_streamhandler='DEFAULT',
253
272
  formatter_filehandler='DEFAULT',
254
273
  backupCount=backupCount_log_files_x_days
255
274
  )
256
- else:
275
+ elif logger:
257
276
  # Create child logger for the provided logger with the module's name.
258
- self.logger: logging.Logger = loggingw.get_logger_with_level(f'{logger.name}.{Path(__file__).stem}')
277
+ self.logger: logging.Logger = loggingw.get_logger_with_level(self.logger_name)
278
+ elif logging_queue:
279
+ self.logger: logging.Logger = loggingw.create_logger(
280
+ logger_name=self.logger_name,
281
+ add_queue_handler=True,
282
+ log_queue=self.logging_queue
283
+ )
259
284
 
260
285
  self.test_config()
261
286
 
@@ -271,6 +296,9 @@ class DnsServer:
271
296
  raise_if_all_false=True
272
297
  )
273
298
  except ValueError as e:
299
+ print_api(f'DnsConfigurationValuesError: {str(e)}', error_type=True, color="red", logger=self.logger)
300
+ # Wait for the message to be printed and saved to file.
301
+ time.sleep(1)
274
302
  raise DnsConfigurationValuesError(e)
275
303
 
276
304
  ips_ports: list[str] = [f'{self.listening_interface}:{self.listening_port}']
@@ -279,7 +307,12 @@ class DnsServer:
279
307
  error_messages: list = list()
280
308
  for port, process_info in port_in_use.items():
281
309
  error_messages.append(f"Port [{port}] is already in use by process: {process_info}")
282
- raise DnsPortInUseError("\n".join(error_messages))
310
+
311
+ message = "\n".join(error_messages)
312
+ print_api(f'DnsPortInUseError: {str(e)}', error_type=True, color="red", logger=self.logger)
313
+ # Wait for the message to be printed and saved to file.
314
+ time.sleep(1)
315
+ raise DnsPortInUseError(message)
283
316
 
284
317
  def thread_worker_empty_dns_cache(self, function_sleep_time: int):
285
318
  """
@@ -457,7 +490,7 @@ class DnsServer:
457
490
  if forward_to_tcp_server:
458
491
  # If the request is forwarded to TCP server, then we'll put the domain in the domain queue.
459
492
  # self.request_domain_queue.put(question_domain)
460
- self.request_domain_queue.queue = question_domain
493
+ self.request_domain_queue.put(question_domain)
461
494
 
462
495
  # Make DNS response that will refer TCP traffic to our server
463
496
  dns_built_response = DNSRecord(
@@ -846,3 +879,58 @@ class DnsServer:
846
879
  self.logger.info("==========")
847
880
  pass
848
881
  continue
882
+
883
+
884
+ # noinspection PyPep8Naming
885
+ def start_dns_server_multiprocessing_worker(
886
+ listening_interface: str,
887
+ listening_port: int,
888
+ log_directory_path: str,
889
+ backupCount_log_files_x_days: int,
890
+ forwarding_dns_service_ipv4: str,
891
+ tcp_target_server_ipv4: str,
892
+ # Passing the engine domain list to DNS server to work with.
893
+ # 'list' function re-initializes the current list, or else it will be the same instance object.
894
+ tcp_resolve_domain_list: list,
895
+ offline_mode: bool,
896
+ resolve_to_tcp_server_only_tcp_resolve_domains: bool,
897
+ resolve_to_tcp_server_all_domains: bool,
898
+ resolve_regular: bool,
899
+ cache_timeout_minutes: int,
900
+ request_domain_queue: multiprocessing.Queue,
901
+ logging_queue: multiprocessing.Queue,
902
+ logger_name: str
903
+ ):
904
+ # Setting the current thread name to the current process name.
905
+ current_process_name = multiprocessing.current_process().name
906
+ threading.current_thread().name = current_process_name
907
+
908
+
909
+
910
+ try:
911
+ dns_server_instance = DnsServer(
912
+ listening_interface=listening_interface,
913
+ listening_port=listening_port,
914
+ log_directory_path=log_directory_path,
915
+ backupCount_log_files_x_days=backupCount_log_files_x_days,
916
+ forwarding_dns_service_ipv4=forwarding_dns_service_ipv4,
917
+ tcp_target_server_ipv4=tcp_target_server_ipv4,
918
+ # Passing the engine domain list to DNS server to work with.
919
+ # 'list' function re-initializes the current list, or else it will be the same instance object.
920
+ tcp_resolve_domain_list=tcp_resolve_domain_list,
921
+ offline_mode=offline_mode,
922
+ resolve_to_tcp_server_only_tcp_resolve_domains=resolve_to_tcp_server_only_tcp_resolve_domains,
923
+ resolve_to_tcp_server_all_domains=resolve_to_tcp_server_all_domains,
924
+ resolve_regular=resolve_regular,
925
+ cache_timeout_minutes=cache_timeout_minutes,
926
+ request_domain_queue=request_domain_queue,
927
+ logging_queue=logging_queue,
928
+ logger_name=logger_name
929
+ )
930
+ except (DnsPortInUseError, DnsConfigurationValuesError) as e:
931
+ print_api(e, error_type=True, color="red", logger=dns_server_instance.logger)
932
+ # Wait for the message to be printed and saved to file.
933
+ time.sleep(1)
934
+ return 1
935
+
936
+ dns_server_instance.start()
@@ -3,6 +3,8 @@ import select
3
3
  from typing import Literal, Union
4
4
  from pathlib import Path
5
5
  import logging
6
+ import socket
7
+ import multiprocessing
6
8
 
7
9
  from ..psutilw import networks
8
10
  from ..certauthw import certauthw
@@ -67,7 +69,8 @@ class SocketWrapper:
67
69
  logger: logging.Logger = None,
68
70
  exceptions_logger: loggingw.ExceptionCsvLogger = None,
69
71
  statistics_logs_directory: str = None,
70
- request_domain_from_dns_server_queue: queues.NonBlockQueue = None
72
+ request_domain_from_dns_server_queue: multiprocessing.Queue = None,
73
+ engines_domains: dict = None
71
74
  ):
72
75
  """
73
76
  Socket Wrapper class that will be used to create sockets, listen on them, accept connections and send them to
@@ -160,10 +163,18 @@ class SocketWrapper:
160
163
 
161
164
  statistics_writer: statistics_csv.StatisticsCSVWriter object, there is a logger object that
162
165
  will be used to write the statistics file.
163
- :param request_domain_from_dns_server_queue: queues.NonBlockQueue object, non-blocking queue that will be used
166
+ :param request_domain_from_dns_server_queue: multiprocessing queue that will be used
164
167
  to get the domain name that was requested from the DNS server (atomicshop.wrappers.socketw.dns_server).
165
168
  This is used to get the domain name that got to the DNS server and set it to the socket in case SNI
166
169
  was empty (in the SNIHandler class to set the 'server_hostname' for the socket).
170
+ :param engines_domains: dictionary of engines that will be used to process the requests. Example:
171
+ [
172
+ {'this_is_engine_name': ['example.com', 'example.org']},
173
+ {'this_is_engine_name2': ['example2.com', 'example2.org']}
174
+ ]
175
+
176
+ the 'engine_name' for statistics.csv file will be taken from the key of the dictionary, while correlated
177
+ by the domain name from the list in the dictionary.
167
178
  """
168
179
 
169
180
  self.listening_interface: str = listening_interface
@@ -199,7 +210,8 @@ class SocketWrapper:
199
210
  self.statistics_logs_directory: str = statistics_logs_directory
200
211
  self.forwarding_dns_service_ipv4_list___only_for_localhost = (
201
212
  forwarding_dns_service_ipv4_list___only_for_localhost)
202
- self.request_domain_from_dns_server_queue = request_domain_from_dns_server_queue
213
+ self.request_domain_from_dns_server_queue: multiprocessing.Queue = request_domain_from_dns_server_queue
214
+ self.engines_domains: dict = engines_domains
203
215
 
204
216
  self.socket_object = None
205
217
 
@@ -232,7 +244,7 @@ class SocketWrapper:
232
244
  logger_name='SocketWrapper',
233
245
  directory_path=self.statistics_logs_directory,
234
246
  add_stream=True,
235
- add_timedfile=True,
247
+ add_timedfile_with_internal_queue=True,
236
248
  formatter_streamhandler='DEFAULT',
237
249
  formatter_filehandler='DEFAULT'
238
250
  )
@@ -452,10 +464,9 @@ class SocketWrapper:
452
464
  # Get the domain queue. Tried using "Queue.Queue" object, but it stomped the SSL Sockets
453
465
  # from accepting connections.
454
466
  domain_from_dns_server = None
455
- if self.request_domain_from_dns_server_queue.queue:
456
- domain_from_dns_server = self.request_domain_from_dns_server_queue.queue
457
- self.logger.info(
458
- f"Requested domain from DNS Server: {self.request_domain_from_dns_server_queue.queue}")
467
+ if self.request_domain_from_dns_server_queue is not None:
468
+ domain_from_dns_server = self.request_domain_from_dns_server_queue.get()
469
+ self.logger.info(f"Requested domain from DNS Server: {domain_from_dns_server}")
459
470
 
460
471
  # Wait from any connection on "accept()".
461
472
  # 'client_socket' is socket or ssl socket, 'client_address' is a tuple (ip_address, port).
@@ -477,6 +488,11 @@ class SocketWrapper:
477
488
  logger=self.logger)
478
489
  process_name = get_command_instance.get_process_name(print_kwargs={'logger': self.logger})
479
490
 
491
+ source_ip: str = client_address[0]
492
+ source_hostname: str = socket.gethostbyaddr(source_ip)[0]
493
+ engine_name: str = get_engine_name(domain_from_dns_server, self.engines_domains)
494
+ dest_port: int = listening_socket_object.getsockname()[1]
495
+
480
496
  # If 'accept()' function worked well, SSL worked well, then 'client_socket' won't be empty.
481
497
  if client_socket:
482
498
  # Get the protocol type from the socket.
@@ -527,10 +543,21 @@ class SocketWrapper:
527
543
  print_kwargs={'logger': self.logger}
528
544
  )
529
545
 
546
+ # If the 'domain_from_dns_server' is empty, it means that the 'engine_name' is not set.
547
+ # In this case we will set the 'engine_name' to from the SNI.
548
+ if engine_name == '':
549
+ sni_hostname: str = ssl_client_socket.server_hostname
550
+ if sni_hostname:
551
+ engine_name = get_engine_name(sni_hostname, self.engines_domains)
552
+
530
553
  if accept_error_message:
531
554
  # Write statistics after wrap is there was an error.
532
555
  self.statistics_writer.write_accept_error(
556
+ engine=engine_name,
557
+ source_host=source_hostname,
558
+ source_ip=source_ip,
533
559
  error_message=accept_error_message,
560
+ dest_port=str(dest_port),
534
561
  host=domain_from_dns_server,
535
562
  process_name=process_name)
536
563
 
@@ -564,7 +591,11 @@ class SocketWrapper:
564
591
  else:
565
592
  # Write statistics after accept.
566
593
  self.statistics_writer.write_accept_error(
594
+ engine=engine_name,
595
+ source_host=source_hostname,
596
+ source_ip=source_ip,
567
597
  error_message=accept_error_message,
598
+ dest_port=str(dest_port),
568
599
  host=domain_from_dns_server,
569
600
  process_name=process_name)
570
601
  except Exception as e:
@@ -601,3 +632,22 @@ def before_socket_thread_worker(
601
632
  callable_function(*thread_args)
602
633
  except Exception as e:
603
634
  exceptions_logger.write(e)
635
+
636
+
637
+ def get_engine_name(domain: str, engines_domains: dict):
638
+ """
639
+ Function that will get the engine name from the domain name.
640
+ :param domain: string, domain name.
641
+ :param engines_domains: dictionary, dictionary that contains the engine names and domains. Example:
642
+ [
643
+ {'this_is_engine_name': ['example.com', 'example.org']},
644
+ {'this_is_engine_name2': ['example2.com', 'example2.org']}
645
+ ]
646
+ :return: string, engine name.
647
+ """
648
+
649
+ for engine_name, engine_domain_list in engines_domains.items():
650
+ if any(engine_domain in domain for engine_domain in engine_domain_list):
651
+ return engine_name
652
+
653
+ return ''