atomicshop 2.15.11__py3-none-any.whl → 3.10.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (221) hide show
  1. atomicshop/__init__.py +1 -1
  2. atomicshop/{addons/mains → a_mains}/FACT/update_extract.py +3 -2
  3. atomicshop/a_mains/dns_gateway_setting.py +11 -0
  4. atomicshop/a_mains/get_local_tcp_ports.py +85 -0
  5. atomicshop/a_mains/github_wrapper.py +11 -0
  6. atomicshop/a_mains/install_ca_certificate.py +172 -0
  7. atomicshop/a_mains/process_from_port.py +119 -0
  8. atomicshop/a_mains/set_default_dns_gateway.py +90 -0
  9. atomicshop/a_mains/update_config_toml.py +38 -0
  10. atomicshop/basics/ansi_escape_codes.py +3 -1
  11. atomicshop/basics/argparse_template.py +2 -0
  12. atomicshop/basics/booleans.py +27 -30
  13. atomicshop/basics/bytes_arrays.py +43 -0
  14. atomicshop/basics/classes.py +149 -1
  15. atomicshop/basics/enums.py +2 -2
  16. atomicshop/basics/exceptions.py +5 -1
  17. atomicshop/basics/list_of_classes.py +29 -0
  18. atomicshop/basics/multiprocesses.py +374 -50
  19. atomicshop/basics/strings.py +72 -3
  20. atomicshop/basics/threads.py +14 -0
  21. atomicshop/basics/tracebacks.py +13 -3
  22. atomicshop/certificates.py +153 -52
  23. atomicshop/config_init.py +11 -6
  24. atomicshop/console_user_response.py +7 -14
  25. atomicshop/consoles.py +9 -0
  26. atomicshop/datetimes.py +1 -1
  27. atomicshop/diff_check.py +3 -3
  28. atomicshop/dns.py +128 -3
  29. atomicshop/etws/_pywintrace_fix.py +17 -0
  30. atomicshop/etws/trace.py +40 -42
  31. atomicshop/etws/traces/trace_dns.py +56 -44
  32. atomicshop/etws/traces/trace_tcp.py +130 -0
  33. atomicshop/file_io/csvs.py +27 -5
  34. atomicshop/file_io/docxs.py +34 -17
  35. atomicshop/file_io/file_io.py +31 -17
  36. atomicshop/file_io/jsons.py +49 -0
  37. atomicshop/file_io/tomls.py +139 -0
  38. atomicshop/filesystem.py +616 -291
  39. atomicshop/get_process_list.py +3 -3
  40. atomicshop/http_parse.py +149 -93
  41. atomicshop/ip_addresses.py +6 -1
  42. atomicshop/mitm/centered_settings.py +132 -0
  43. atomicshop/mitm/config_static.py +207 -0
  44. atomicshop/mitm/config_toml_editor.py +55 -0
  45. atomicshop/mitm/connection_thread_worker.py +875 -357
  46. atomicshop/mitm/engines/__parent/parser___parent.py +4 -17
  47. atomicshop/mitm/engines/__parent/recorder___parent.py +108 -51
  48. atomicshop/mitm/engines/__parent/requester___parent.py +116 -0
  49. atomicshop/mitm/engines/__parent/responder___parent.py +75 -114
  50. atomicshop/mitm/engines/__reference_general/parser___reference_general.py +10 -7
  51. atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +5 -5
  52. atomicshop/mitm/engines/__reference_general/requester___reference_general.py +47 -0
  53. atomicshop/mitm/engines/__reference_general/responder___reference_general.py +95 -13
  54. atomicshop/mitm/engines/create_module_template.py +58 -14
  55. atomicshop/mitm/import_config.py +359 -139
  56. atomicshop/mitm/initialize_engines.py +160 -80
  57. atomicshop/mitm/message.py +64 -23
  58. atomicshop/mitm/mitm_main.py +892 -0
  59. atomicshop/mitm/recs_files.py +183 -0
  60. atomicshop/mitm/shared_functions.py +4 -10
  61. atomicshop/mitm/ssh_tester.py +82 -0
  62. atomicshop/mitm/statistic_analyzer.py +136 -40
  63. atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +265 -83
  64. atomicshop/monitor/checks/dns.py +1 -1
  65. atomicshop/networks.py +671 -0
  66. atomicshop/on_exit.py +39 -9
  67. atomicshop/package_mains_processor.py +84 -0
  68. atomicshop/permissions/permissions.py +22 -0
  69. atomicshop/permissions/ubuntu_permissions.py +239 -0
  70. atomicshop/permissions/win_permissions.py +33 -0
  71. atomicshop/print_api.py +24 -42
  72. atomicshop/process.py +24 -6
  73. atomicshop/process_poller/process_pool.py +0 -1
  74. atomicshop/process_poller/simple_process_pool.py +204 -5
  75. atomicshop/python_file_patcher.py +1 -1
  76. atomicshop/python_functions.py +27 -75
  77. atomicshop/speech_recognize.py +8 -0
  78. atomicshop/ssh_remote.py +158 -172
  79. atomicshop/system_resource_monitor.py +61 -47
  80. atomicshop/system_resources.py +8 -8
  81. atomicshop/tempfiles.py +1 -2
  82. atomicshop/urls.py +6 -0
  83. atomicshop/venvs.py +28 -0
  84. atomicshop/versioning.py +27 -0
  85. atomicshop/web.py +98 -27
  86. atomicshop/web_apis/google_custom_search.py +44 -0
  87. atomicshop/web_apis/google_llm.py +188 -0
  88. atomicshop/websocket_parse.py +450 -0
  89. atomicshop/wrappers/certauthw/certauth.py +1 -0
  90. atomicshop/wrappers/cryptographyw.py +29 -8
  91. atomicshop/wrappers/ctyping/etw_winapi/const.py +97 -47
  92. atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py +178 -49
  93. atomicshop/wrappers/ctyping/file_details_winapi.py +67 -0
  94. atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
  95. atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +2 -2
  96. atomicshop/wrappers/ctyping/setup_device.py +466 -0
  97. atomicshop/wrappers/ctyping/win_console.py +39 -0
  98. atomicshop/wrappers/dockerw/dockerw.py +113 -2
  99. atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
  100. atomicshop/wrappers/elasticsearchw/elastic_infra.py +75 -0
  101. atomicshop/wrappers/elasticsearchw/elasticsearchw.py +2 -20
  102. atomicshop/wrappers/factw/get_file_data.py +12 -5
  103. atomicshop/wrappers/factw/install/install_after_restart.py +89 -5
  104. atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +20 -14
  105. atomicshop/wrappers/githubw.py +537 -54
  106. atomicshop/wrappers/loggingw/consts.py +1 -1
  107. atomicshop/wrappers/loggingw/filters.py +23 -0
  108. atomicshop/wrappers/loggingw/formatters.py +12 -0
  109. atomicshop/wrappers/loggingw/handlers.py +214 -107
  110. atomicshop/wrappers/loggingw/loggers.py +19 -0
  111. atomicshop/wrappers/loggingw/loggingw.py +860 -22
  112. atomicshop/wrappers/loggingw/reading.py +134 -112
  113. atomicshop/wrappers/mongodbw/mongo_infra.py +31 -0
  114. atomicshop/wrappers/mongodbw/mongodbw.py +1324 -36
  115. atomicshop/wrappers/netshw.py +271 -0
  116. atomicshop/wrappers/playwrightw/engine.py +34 -19
  117. atomicshop/wrappers/playwrightw/infra.py +5 -0
  118. atomicshop/wrappers/playwrightw/javascript.py +7 -3
  119. atomicshop/wrappers/playwrightw/keyboard.py +14 -0
  120. atomicshop/wrappers/playwrightw/scenarios.py +172 -5
  121. atomicshop/wrappers/playwrightw/waits.py +9 -7
  122. atomicshop/wrappers/powershell_networking.py +80 -0
  123. atomicshop/wrappers/psutilw/processes.py +37 -1
  124. atomicshop/wrappers/psutilw/psutil_networks.py +85 -0
  125. atomicshop/wrappers/pyopensslw.py +9 -2
  126. atomicshop/wrappers/pywin32w/cert_store.py +116 -0
  127. atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
  128. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +3 -105
  129. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +3 -57
  130. atomicshop/wrappers/pywin32w/wmis/msft_netipaddress.py +113 -0
  131. atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +259 -0
  132. atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +112 -0
  133. atomicshop/wrappers/pywin32w/wmis/wmi_helpers.py +236 -0
  134. atomicshop/wrappers/socketw/accepter.py +21 -7
  135. atomicshop/wrappers/socketw/certificator.py +216 -150
  136. atomicshop/wrappers/socketw/creator.py +190 -50
  137. atomicshop/wrappers/socketw/dns_server.py +491 -182
  138. atomicshop/wrappers/socketw/exception_wrapper.py +45 -52
  139. atomicshop/wrappers/socketw/process_getter.py +86 -0
  140. atomicshop/wrappers/socketw/receiver.py +144 -102
  141. atomicshop/wrappers/socketw/sender.py +65 -35
  142. atomicshop/wrappers/socketw/sni.py +334 -165
  143. atomicshop/wrappers/socketw/socket_base.py +134 -0
  144. atomicshop/wrappers/socketw/socket_client.py +137 -95
  145. atomicshop/wrappers/socketw/socket_server_tester.py +11 -7
  146. atomicshop/wrappers/socketw/socket_wrapper.py +717 -116
  147. atomicshop/wrappers/socketw/ssl_base.py +15 -14
  148. atomicshop/wrappers/socketw/statistics_csv.py +148 -17
  149. atomicshop/wrappers/sysmonw.py +1 -1
  150. atomicshop/wrappers/ubuntu_terminal.py +65 -26
  151. atomicshop/wrappers/win_auditw.py +189 -0
  152. atomicshop/wrappers/winregw/__init__.py +0 -0
  153. atomicshop/wrappers/winregw/winreg_installed_software.py +58 -0
  154. atomicshop/wrappers/winregw/winreg_network.py +232 -0
  155. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/METADATA +31 -51
  156. atomicshop-3.10.5.dist-info/RECORD +306 -0
  157. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/WHEEL +1 -1
  158. atomicshop/_basics_temp.py +0 -101
  159. atomicshop/a_installs/win/fibratus.py +0 -9
  160. atomicshop/a_installs/win/mongodb.py +0 -9
  161. atomicshop/a_installs/win/pycharm.py +0 -9
  162. atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
  163. atomicshop/addons/a_setup_scripts/install_pywintrace_0.3.cmd +0 -2
  164. atomicshop/addons/mains/__pycache__/install_fibratus_windows.cpython-312.pyc +0 -0
  165. atomicshop/addons/mains/__pycache__/msi_unpacker.cpython-312.pyc +0 -0
  166. atomicshop/addons/mains/install_docker_rootless_ubuntu.py +0 -11
  167. atomicshop/addons/mains/install_docker_ubuntu_main_sudo.py +0 -11
  168. atomicshop/addons/mains/install_elastic_search_and_kibana_ubuntu.py +0 -10
  169. atomicshop/addons/mains/install_wsl_ubuntu_lts_admin.py +0 -9
  170. atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
  171. atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
  172. atomicshop/addons/package_setup/Setup.cmd +0 -7
  173. atomicshop/archiver/_search_in_zip.py +0 -189
  174. atomicshop/archiver/archiver.py +0 -34
  175. atomicshop/archiver/search_in_archive.py +0 -250
  176. atomicshop/archiver/sevenz_app_w.py +0 -86
  177. atomicshop/archiver/sevenzs.py +0 -44
  178. atomicshop/archiver/zips.py +0 -293
  179. atomicshop/file_types.py +0 -24
  180. atomicshop/mitm/config_editor.py +0 -37
  181. atomicshop/mitm/engines/create_module_template_example.py +0 -13
  182. atomicshop/mitm/initialize_mitm_server.py +0 -268
  183. atomicshop/pbtkmultifile_argparse.py +0 -88
  184. atomicshop/permissions.py +0 -151
  185. atomicshop/script_as_string_processor.py +0 -38
  186. atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
  187. atomicshop/ssh_scripts/process_from_port.py +0 -27
  188. atomicshop/wrappers/_process_wrapper_curl.py +0 -27
  189. atomicshop/wrappers/_process_wrapper_tar.py +0 -21
  190. atomicshop/wrappers/dockerw/install_docker.py +0 -209
  191. atomicshop/wrappers/elasticsearchw/infrastructure.py +0 -265
  192. atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -232
  193. atomicshop/wrappers/ffmpegw.py +0 -125
  194. atomicshop/wrappers/fibratusw/install.py +0 -81
  195. atomicshop/wrappers/mongodbw/infrastructure.py +0 -53
  196. atomicshop/wrappers/mongodbw/install_mongodb.py +0 -190
  197. atomicshop/wrappers/msiw.py +0 -149
  198. atomicshop/wrappers/nodejsw/install_nodejs.py +0 -139
  199. atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
  200. atomicshop/wrappers/psutilw/networks.py +0 -45
  201. atomicshop/wrappers/pycharmw.py +0 -81
  202. atomicshop/wrappers/socketw/base.py +0 -59
  203. atomicshop/wrappers/socketw/get_process.py +0 -107
  204. atomicshop/wrappers/wslw.py +0 -191
  205. atomicshop-2.15.11.dist-info/RECORD +0 -302
  206. /atomicshop/{addons/mains → a_mains}/FACT/factw_fact_extractor_docker_image_main_sudo.py +0 -0
  207. /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
  208. /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
  209. /atomicshop/{addons → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
  210. /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
  211. /atomicshop/{addons → a_mains/addons}/process_list/compile.cmd +0 -0
  212. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.dll +0 -0
  213. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.exp +0 -0
  214. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.lib +0 -0
  215. /atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +0 -0
  216. /atomicshop/{archiver → permissions}/__init__.py +0 -0
  217. /atomicshop/{wrappers/fibratusw → web_apis}/__init__.py +0 -0
  218. /atomicshop/wrappers/{nodejsw → pywin32w/wmis}/__init__.py +0 -0
  219. /atomicshop/wrappers/pywin32w/{wmi_win32process.py → wmis/win32process.py} +0 -0
  220. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info/licenses}/LICENSE.txt +0 -0
  221. {atomicshop-2.15.11.dist-info → atomicshop-3.10.5.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,42 @@
1
1
  import logging
2
2
  import os
3
- from typing import Literal, Union
3
+ from logging import Logger
4
+ from logging.handlers import QueueListener
5
+ from typing import Literal, Union, Callable
6
+ import datetime
7
+ import contextlib
8
+ import threading
9
+ import queue
10
+ import multiprocessing
11
+ import time
4
12
 
5
- from . import loggers, handlers
13
+ from . import loggers, handlers, filters
14
+ from ...file_io import csvs
15
+ from ...basics import tracebacks, ansi_escape_codes
16
+ from ... import print_api
6
17
 
7
18
 
19
+ QUEUE_LISTENER_PROCESS_NAME_PREFIX: str = "QueueListener-"
20
+
21
+
22
+ class LoggingwLoggerAlreadyExistsError(Exception):
23
+ pass
24
+
25
+
26
+ # noinspection PyPep8Naming
8
27
  def create_logger(
9
- logger_name: str,
10
- file_path: str = None,
11
- directory_path: str = None,
28
+ logger_name: str = None,
29
+ get_queue_listener: bool = False,
30
+ start_queue_listener_multiprocess_add_queue_handler: bool = False,
31
+
12
32
  add_stream: bool = False,
13
33
  add_timedfile: bool = False,
34
+ add_timedfile_with_internal_queue: bool = False,
35
+ add_queue_handler: bool = False,
36
+
37
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
38
+ file_path: str = None,
39
+ directory_path: str = None,
14
40
  file_type: Literal[
15
41
  'txt',
16
42
  'csv',
@@ -28,24 +54,40 @@ def create_logger(
28
54
  formatter_filehandler_use_nanoseconds: bool = True,
29
55
  filehandler_rotate_at_rollover_time: bool = True,
30
56
  filehandler_rotation_date_format: str = None,
31
- filehandler_rotation_callback_namer_function: callable = None,
57
+ filehandler_rotation_callback_namer_function: Callable = None,
32
58
  filehandler_rotation_use_default_namer_function: bool = True,
33
59
  when: str = "midnight",
34
60
  interval: int = 1,
61
+ backupCount: int = 0,
35
62
  delay: bool = False,
36
63
  encoding=None,
37
64
  header: str = None
38
- ) -> logging.Logger:
65
+ ) -> None | QueueListener | Logger:
39
66
  """
40
67
  Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
41
68
 
42
69
  :param logger_name: Name of the logger.
70
+ :param get_queue_listener: bool, If set to True, QueueListener will be started with all the handlers
71
+ like 'add_timedfile' and 'add_stream', using the 'log_queue'.
72
+ :param start_queue_listener_multiprocess_add_queue_handler: bool, If set to True, the QueueListener will be
73
+ started in a separate multiprocessing process, without you handling this manually.
74
+
75
+ Only one of the following parameters can be set at a time: 'logger_name', 'get_queue_listener'.
76
+
43
77
  :param file_path: full path to the log file. If you don't want to use the file, set it to None.
44
78
  You can set the directory_path only and then the 'logger_name' will be used as the file name with the
45
79
  'file_type' as the file extension.
46
80
  :param directory_path: full path to the directory where the log file will be saved.
47
81
  :param add_stream: bool, If set to True, StreamHandler will be added to the logger.
48
- :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger.
82
+ :param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger directly.
83
+ :param add_timedfile_with_internal_queue: bool, If set to True, TimedRotatingFileHandler will be added
84
+ to the logger, but not directly.
85
+ Internal queue.Queue will be created, then used by the QueueListener, which will get the
86
+ TimerRotatingFileHandler as the handler.
87
+ Then the QueueHandler using the same internal queue will be added to the logger.
88
+ This is done to improve the multithreading compatibility.
89
+ :param add_queue_handler: bool, If set to True, QueueHandler will be added to the logger, using the 'log_queue'.
90
+ :param log_queue: queue.Queue or multiprocessing.Queue, Queue to use for the QueueHandler.
49
91
  :param file_type: string, file type of the log file. Default is 'txt'.
50
92
  'txt': Text file.
51
93
  'csv': CSV file.
@@ -73,7 +115,9 @@ def create_logger(
73
115
  in the formatter in case you provide 'asctime' element.
74
116
  :param filehandler_rotate_at_rollover_time: bool,
75
117
  If set to True, the log file will be rotated at the rollover time, even if there's nothing to write.
118
+ This behavior overrides the TimedRotatingFileHandler default behavior on doRollover.
76
119
  If set to False, the log file will be rotated after 'when' time, but only when event occurs.
120
+ This is the default doRollover behavior of the TimedRotatingFileHandler.
77
121
  :param filehandler_rotation_date_format: string, Date format to use for the log file rotation.
78
122
  Example for 'when="midnight"': the default date format is '%Y-%m-%d', resulting in filename on rotation like:
79
123
  "test.log.2021-11-25"
@@ -95,6 +139,9 @@ def create_logger(
95
139
  :param interval: int, Interval to rotate the log file. Default is 1.
96
140
  If 'when="midnight"' and 'interval=1', then the log file will be rotated every midnight.
97
141
  If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
142
+ :param backupCount: int, Number of backup files to keep. Default is 0.
143
+ If backupCount is > 0, when rollover is done, no more than backupCount files are kept, the oldest are deleted.
144
+ If backupCount is == 0, all the backup files will be kept.
98
145
  :param delay: bool, If set to True, the log file will be created only if there's something to write.
99
146
  :param encoding: string, Encoding to use for the log file. Default is None.
100
147
  :param header: string, Header to write to the log file.
@@ -116,7 +163,7 @@ def create_logger(
116
163
  error_logger = loggingw.create_logger(
117
164
  logger_name=f'{self.__class__.__name__}_CSV',
118
165
  file_path=output_log_file,
119
- add_timedfile=True,
166
+ add_timedfile_with_internal_queue=True,
120
167
  file_type='csv',
121
168
  formatter_filehandler='MESSAGE',
122
169
  header=header
@@ -142,7 +189,7 @@ def create_logger(
142
189
  logger_name=f'{self.__class__.__name__}',
143
190
  file_path=output_log_file,
144
191
  add_stream=True,
145
- add_timedfile=True,
192
+ add_timedfile_with_internal_queue=True,
146
193
  file_type='txt',
147
194
  formatter_streamhandler='DEFAULT',
148
195
  formatter_filehandler='DEFAULT'
@@ -153,12 +200,282 @@ def create_logger(
153
200
 
154
201
  if __name__ == "__main__":
155
202
  main()
203
+
204
+ ------------------------------
205
+
206
+ Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file in multiprocessing,
207
+ while QueueListener is in the main process writes to the file and outputs to the console and the QueueHandler
208
+ in two child subprocesses sends the logs to the main process through the multiprocessing.Queue:
209
+
210
+ import sys
211
+ import multiprocessing
212
+ from atomicshop.wrappers.loggingw import loggingw
213
+
214
+
215
+ def worker1(
216
+ log_queue: multiprocessing.Queue,
217
+ logger_name: str
218
+ ):
219
+ error_logger = loggingw.create_logger(
220
+ logger_name=logger_name,
221
+ add_queue_handler=True,
222
+ log_queue=log_queue
223
+ )
224
+
225
+ error_logger.info("Worker1 log message for 'network' logger.")
226
+
227
+
228
+ def worker2(
229
+ log_queue: multiprocessing.Queue,
230
+ logger_name: str
231
+ ):
232
+ error_logger = loggingw.create_logger(
233
+ logger_name=logger_name,
234
+ add_queue_handler=True,
235
+ log_queue=log_queue
236
+ )
237
+
238
+ error_logger.info("Worker2 log message for 'network' logger.")
239
+
240
+
241
+ def main():
242
+ log_queue = multiprocessing.Queue()
243
+
244
+ queue_listener = loggingw.create_logger(
245
+ get_queue_listener=True,
246
+ add_stream=True,
247
+ add_timedfile=True,
248
+ log_queue=log_queue,
249
+ file_type='txt',
250
+ formatter_streamhandler='DEFAULT',
251
+ formatter_filehandler='DEFAULT'
252
+ )
253
+
254
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
255
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
256
+
257
+ process1.start()
258
+ process2.start()
259
+
260
+ process1.join()
261
+ process2.join()
262
+
263
+ # If we exit the function, we need to stop the listener
264
+ queue_listener.stop()
265
+
266
+ return 0
267
+
268
+
269
+ if __name__ == "__main__":
270
+ sys.exit(main())
271
+
272
+ --------------------------------------------------
273
+
274
+ Example if you need to start a QueueListener in multiprocessing, which is less garbage code and python's
275
+ garbage collector handles the listener closing without the need to call 'stop()' method:
276
+
277
+ import sys
278
+ import multiprocessing
279
+ from atomicshop.wrappers.loggingw import loggingw
280
+
281
+
282
+ def worker1(
283
+ log_queue: multiprocessing.Queue,
284
+ logger_name: str
285
+ ):
286
+ error_logger = loggingw.create_logger(
287
+ logger_name=logger_name,
288
+ add_queue_handler=True,
289
+ log_queue=log_queue
290
+ )
291
+
292
+ error_logger.info("Worker1 log message for 'network' logger.")
293
+
294
+
295
+ def worker2(
296
+ log_queue: multiprocessing.Queue,
297
+ logger_name: str
298
+ ):
299
+ error_logger = loggingw.create_logger(
300
+ logger_name=logger_name,
301
+ add_queue_handler=True,
302
+ log_queue=log_queue
303
+ )
304
+
305
+ error_logger.info("Worker2 log message for 'network' logger.")
306
+
307
+
308
+ def main():
309
+ log_queue = multiprocessing.Queue()
310
+ logger_name: str = 'network'
311
+
312
+ loggingw.start_queue_listener_in_multiprocessing(
313
+ logger_name=logger_name,
314
+ add_stream=True,
315
+ add_timedfile=True,
316
+ log_queue=log_queue,
317
+ file_type='txt',
318
+ formatter_streamhandler='DEFAULT',
319
+ formatter_filehandler='DEFAULT'
320
+ )
321
+
322
+ # If you want you can get the QueueListener processes.
323
+ # listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
324
+ # Or if you started several listeners, you can get all of them:
325
+ # listener_processes_list: list = loggingw.get_listener_processes()
326
+
327
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, logger_name))
328
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, logger_name))
329
+
330
+ process1.start()
331
+ process2.start()
332
+
333
+ process1.join()
334
+ process2.join()
335
+
336
+ return 0
337
+
338
+
339
+ if __name__ == "__main__":
340
+ sys.exit(main())
341
+
342
+ ---------------------------------------------------
343
+
344
+ Or you can use the 'create_logger' function with 'start_queue_listener_multiprocess=True' parameter,
345
+ which will start the QueueListener in a separate multiprocessing process automatically if you want to use the
346
+ queue handler logger also in the main process:
347
+
348
+ import sys
349
+ import multiprocessing
350
+ from atomicshop.wrappers.loggingw import loggingw
351
+
352
+
353
+ def worker1(
354
+ log_queue: multiprocessing.Queue,
355
+ logger_name: str
356
+ ):
357
+ error_logger = loggingw.create_logger(
358
+ logger_name=logger_name,
359
+ add_queue_handler=True,
360
+ log_queue=log_queue
361
+ )
362
+
363
+ error_logger.info("Worker1 log message for 'network' logger.")
364
+
365
+
366
+ def worker2(
367
+ log_queue: multiprocessing.Queue,
368
+ logger_name: str
369
+ ):
370
+ error_logger = loggingw.create_logger(
371
+ logger_name=logger_name,
372
+ add_queue_handler=True,
373
+ log_queue=log_queue
374
+ )
375
+
376
+ error_logger.info("Worker2 log message for 'network' logger.")
377
+
378
+
379
+ def main():
380
+ log_queue = multiprocessing.Queue()
381
+
382
+ main_logger: Logger = loggingw.create_logger(
383
+ logger_name='network',
384
+ start_queue_listener_multiprocess_add_queue_handler=True,
385
+ add_stream=True,
386
+ add_timedfile=True,
387
+ log_queue=log_queue,
388
+ file_type='txt',
389
+ formatter_streamhandler='DEFAULT',
390
+ formatter_filehandler='DEFAULT'
391
+ )
392
+
393
+ main_logger.info("Main process log message for 'network' logger.")
394
+
395
+ # If you want you can get the QueueListener processes.
396
+ # listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
397
+ # Or if you started several listeners, you can get all of them:
398
+ # listener_processes_list: list = loggingw.get_listener_processes()
399
+
400
+ process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
401
+ process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
402
+
403
+ process1.start()
404
+ process2.start()
405
+
406
+ process1.join()
407
+ process2.join()
408
+
409
+ return 0
410
+
411
+
412
+ if __name__ == "__main__":
413
+ sys.exit(main())
156
414
  """
157
415
 
158
- if not directory_path and not file_path:
159
- raise ValueError("You need to provide 'directory_path' or 'file_path'.")
160
- if directory_path and file_path:
161
- raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
416
+ if start_queue_listener_multiprocess_add_queue_handler and (get_queue_listener or add_queue_handler):
417
+ raise ValueError("You don't need to set 'get_queue_listener' or 'add_queue_handler' "
418
+ "when setting 'start_queue_listener_multiprocess_add_queue_handler'.")
419
+
420
+ if start_queue_listener_multiprocess_add_queue_handler:
421
+ logger_instance: Logger = _create_logger_with_queue_handler(
422
+ logger_name=logger_name,
423
+ log_queue=log_queue
424
+ )
425
+
426
+ # Start the QueueListener in a separate multiprocessing process.
427
+ start_queue_listener_in_multiprocessing(
428
+ logger_name=logger_name,
429
+ add_stream=add_stream,
430
+ add_timedfile=add_timedfile,
431
+ add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
432
+ log_queue=log_queue,
433
+ file_path=file_path,
434
+ directory_path=directory_path,
435
+ file_type=file_type,
436
+ logging_level=logging_level,
437
+ formatter_streamhandler=formatter_streamhandler,
438
+ formatter_filehandler=formatter_filehandler,
439
+ formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
440
+ formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
441
+ filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
442
+ filehandler_rotation_date_format=filehandler_rotation_date_format,
443
+ filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
444
+ filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
445
+ when=when,
446
+ interval=interval,
447
+ backupCount=backupCount,
448
+ delay=delay,
449
+ encoding=encoding,
450
+ header=header
451
+ )
452
+
453
+ return logger_instance
454
+
455
+ if logger_name and get_queue_listener and not start_queue_listener_multiprocess_add_queue_handler:
456
+ raise ValueError("You can't set both 'logger_name' and 'get_queue_listener'.")
457
+ if not logger_name and not get_queue_listener:
458
+ raise ValueError("You need to provide 'logger_name' or 'get_queue_listener'.")
459
+
460
+ # Check if the logger exists before creating it.
461
+ if logger_name:
462
+ if loggers.is_logger_exists(logger_name):
463
+ raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
464
+
465
+ if not logger_name and not file_path:
466
+ raise ValueError("You need to provide 'file_path' if 'logger_name' is not set.")
467
+
468
+ if get_queue_listener and not log_queue:
469
+ raise ValueError("You need to provide 'log_queue' if 'get_queue_listener' is set to True.")
470
+
471
+ if add_queue_handler and not log_queue:
472
+ raise ValueError("You need to provide 'log_queue' if 'add_queue_handler' is set to True.")
473
+
474
+ if add_timedfile or add_timedfile_with_internal_queue:
475
+ if not directory_path and not file_path:
476
+ raise ValueError("You need to provide 'directory_path' or 'file_path'.")
477
+ if directory_path and file_path:
478
+ raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
162
479
 
163
480
  if directory_path:
164
481
  if directory_path.endswith(os.sep):
@@ -166,24 +483,100 @@ def create_logger(
166
483
 
167
484
  file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
168
485
 
169
- logger = get_logger_with_level(logger_name, logging_level)
486
+ # --- Add the handlers to a tuple ---
170
487
 
488
+ handlers_tuple: tuple = ()
171
489
  if add_stream:
172
- handlers.add_stream_handler(
173
- logger=logger, logging_level=logging_level, formatter=formatter_streamhandler,
490
+ stream_handler = handlers.get_stream_handler_extended(
491
+ logging_level=logging_level,
492
+ formatter=formatter_streamhandler,
174
493
  formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
175
494
 
495
+ handlers_tuple += (stream_handler,)
496
+
176
497
  if add_timedfile:
177
- handlers.add_timedfilehandler_with_queuehandler(
178
- logger=logger, file_path=file_path, logging_level=logging_level, formatter=formatter_filehandler,
179
- formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds, file_type=file_type,
498
+ timed_file_handler = handlers.get_timed_rotating_file_handler_extended(
499
+ file_path=file_path,
500
+ logging_level=logging_level,
501
+ formatter=formatter_filehandler,
502
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
503
+ file_type=file_type,
180
504
  rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
181
505
  rotation_date_format=filehandler_rotation_date_format,
182
506
  rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
183
507
  rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
184
- when=when, interval=interval, delay=delay, encoding=encoding, header=header)
508
+ when=when,
509
+ interval=interval,
510
+ delay=delay,
511
+ backupCount=backupCount,
512
+ encoding=encoding,
513
+ header=header
514
+ )
185
515
 
186
- return logger
516
+ handlers_tuple += (timed_file_handler,)
517
+
518
+ if add_timedfile_with_internal_queue:
519
+ timed_file_handler_with_queue = handlers.get_timed_rotating_file_handler_extended(
520
+ file_path=file_path,
521
+ logging_level=logging_level,
522
+ formatter=formatter_filehandler,
523
+ formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
524
+ file_type=file_type,
525
+ rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
526
+ rotation_date_format=filehandler_rotation_date_format,
527
+ rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
528
+ rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
529
+ use_internal_queue_listener=True,
530
+ when=when,
531
+ interval=interval,
532
+ delay=delay,
533
+ backupCount=backupCount,
534
+ encoding=encoding,
535
+ header=header
536
+ )
537
+
538
+ handlers_tuple += (timed_file_handler_with_queue,)
539
+
540
+ if add_queue_handler:
541
+ queue_handler = handlers.get_queue_handler_extended(log_queue)
542
+ handlers_tuple += (queue_handler,)
543
+
544
+ # --- Create the logger ---
545
+
546
+ if logger_name:
547
+ logger = get_logger_with_level(logger_name, logging_level)
548
+
549
+ # Add the handlers to the logger.
550
+ for handler in handlers_tuple:
551
+ loggers.add_handler(logger, handler)
552
+
553
+ # Disable propagation from the 'root' logger, so we will not see the messages twice.
554
+ loggers.set_propagation(logger)
555
+
556
+ return logger
557
+
558
+ # --- create the QueueListener ---
559
+
560
+ if get_queue_listener:
561
+ queue_listener: logging.handlers.QueueListener = handlers.start_queue_listener_for_handlers(handlers_tuple, log_queue)
562
+ return queue_listener
563
+
564
+
565
+ def _create_logger_with_queue_handler(
566
+ logger_name: str,
567
+ log_queue: Union[queue.Queue, multiprocessing.Queue]
568
+ ) -> Logger:
569
+ """
570
+ The function to create a logger with QueueHandler so the QueueListener can be started later in multiprocessing.
571
+ """
572
+
573
+ logger_instance: Logger = create_logger(
574
+ logger_name=logger_name,
575
+ add_queue_handler=True,
576
+ log_queue=log_queue
577
+ )
578
+
579
+ return logger_instance
187
580
 
188
581
 
189
582
  def get_logger_with_level(
@@ -226,6 +619,135 @@ def disable_default_logger():
226
619
  logging.disable(logging.CRITICAL)
227
620
 
228
621
 
622
+ def start_queue_listener_in_multiprocessing(
623
+ logger_name: str = None,
624
+
625
+ add_stream: bool = False,
626
+ add_timedfile: bool = False,
627
+ add_timedfile_with_internal_queue: bool = False,
628
+
629
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
630
+ file_path: str = None,
631
+ directory_path: str = None,
632
+ file_type: Literal[
633
+ 'txt',
634
+ 'csv',
635
+ 'json'] = 'txt',
636
+ logging_level="DEBUG",
637
+ formatter_streamhandler: Union[
638
+ Literal['MESSAGE', 'DEFAULT'],
639
+ str,
640
+ None] = None,
641
+ formatter_filehandler: Union[
642
+ Literal['MESSAGE', 'DEFAULT'],
643
+ str,
644
+ None] = None,
645
+ formatter_streamhandler_use_nanoseconds: bool = True,
646
+ formatter_filehandler_use_nanoseconds: bool = True,
647
+ filehandler_rotate_at_rollover_time: bool = True,
648
+ filehandler_rotation_date_format: str = None,
649
+ filehandler_rotation_callback_namer_function: Callable = None,
650
+ filehandler_rotation_use_default_namer_function: bool = True,
651
+ when: str = "midnight",
652
+ interval: int = 1,
653
+ backupCount: int = 0,
654
+ delay: bool = False,
655
+ encoding=None,
656
+ header: str = None
657
+ ) -> multiprocessing.Process:
658
+ """
659
+ Function to start a QueueListener in multiprocessing.
660
+ PARAMETERS are same as in 'create_logger' function.
661
+
662
+ logger_name: Name of the logger. Will be used only to name the QueueListener process.
663
+ """
664
+
665
+ if not file_path and directory_path and logger_name:
666
+ file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
667
+
668
+ worker_kwargs = dict(
669
+ get_queue_listener=True,
670
+
671
+ add_stream=add_stream,
672
+ add_timedfile=add_timedfile,
673
+ add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
674
+
675
+ log_queue=log_queue,
676
+ file_path=file_path,
677
+ file_type=file_type,
678
+ logging_level=logging_level,
679
+ formatter_streamhandler=formatter_streamhandler,
680
+ formatter_filehandler=formatter_filehandler,
681
+ formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
682
+ formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
683
+ filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
684
+ filehandler_rotation_date_format=filehandler_rotation_date_format,
685
+ filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
686
+ filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
687
+ when=when,
688
+ interval=interval,
689
+ backupCount=backupCount,
690
+ delay=delay,
691
+ encoding=encoding,
692
+ header=header,
693
+ )
694
+
695
+ is_ready: multiprocessing.Event = multiprocessing.Event()
696
+
697
+ # Create a new process to run the QueueListener.
698
+ queue_listener_process = multiprocessing.Process(
699
+ target=_queue_listener_multiprocessing_worker,
700
+ name=f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}",
701
+ args=(is_ready,),
702
+ kwargs=worker_kwargs,
703
+ daemon=True
704
+ )
705
+ queue_listener_process.start()
706
+
707
+ # Wait until the QueueListener is loaded and ready.
708
+ is_ready.wait()
709
+
710
+ return queue_listener_process
711
+
712
+
713
+ def _queue_listener_multiprocessing_worker(
714
+ is_ready: multiprocessing.Event,
715
+ **kwargs
716
+ ):
717
+ network_logger_queue_listener = create_logger(**kwargs)
718
+ is_ready.set() # Signal that the logger is loaded and ready.
719
+
720
+ try:
721
+ while True:
722
+ time.sleep(1) # keep the process alive
723
+ except KeyboardInterrupt:
724
+ pass
725
+ finally:
726
+ network_logger_queue_listener.stop()
727
+
728
+
729
+ def get_listener_processes(
730
+ logger_name: str = None
731
+ ) -> list:
732
+ """
733
+ Function to get the list of QueueListener processes.
734
+ :param logger_name: Name of the logger to filter the listener processes.
735
+ If None, all listener processes will be returned.
736
+ If provided logger_name, only the listener processes for that logger will be returned.
737
+ :return: List of QueueListener multiprocessing processes.
738
+ """
739
+
740
+ listener_processes: list = []
741
+ for process in multiprocessing.active_children():
742
+ # If logger_name is provided, filter the processes by logger_name.
743
+ if logger_name and process.name == f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}":
744
+ listener_processes.append(process)
745
+ if not logger_name and process.name.startswith(QUEUE_LISTENER_PROCESS_NAME_PREFIX):
746
+ listener_processes.append(process)
747
+
748
+ return listener_processes
749
+
750
+
229
751
  def get_datetime_format_string_from_logger_file_handlers(logger: logging.Logger) -> list:
230
752
  """
231
753
  Function to get datetime format string from the logger's file handlers.
@@ -243,3 +765,319 @@ def get_datetime_format_string_from_logger_file_handlers(logger: logging.Logger)
243
765
  datetime_format_strings.append(date_time_format_string)
244
766
 
245
767
  return datetime_format_strings
768
+
769
+
770
+ def is_logger_exists(logger_name: str) -> bool:
771
+ """
772
+ Function to check if the logger exists.
773
+ :param logger_name: Name of the logger.
774
+ :return: True if the logger exists, False if it doesn't.
775
+ """
776
+
777
+ return loggers.is_logger_exists(logger_name)
778
+
779
+
780
+ def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger | None:
781
+ """
782
+ Function to find the parent logger with StreamHandler.
783
+ Example:
784
+ logger_name = "parent.child.grandchild"
785
+ 'parent' logger has StreamHandler, but 'child' and 'grandchild' don't.
786
+ This function will return the 'parent' logger, since both 'child' and 'grandchild' will inherit the
787
+ StreamHandler from the 'parent' logger.
788
+
789
+ :param logger: Logger to find the parent logger with StreamHandler.
790
+ :return: Parent logger with StreamHandler or None if the logger doesn't have StreamHandler.
791
+ """
792
+
793
+ # Start with current logger to see if it has a stream handler.
794
+ current_logger = logger
795
+ found: bool = False
796
+ while current_logger and not current_logger.handlers:
797
+ for handler in current_logger.handlers:
798
+ if isinstance(handler, logging.StreamHandler):
799
+ found = True
800
+ break
801
+
802
+ if not found:
803
+ # If the current logger doesn't have the stream handler, let's move to the parent.
804
+ current_logger = current_logger.parent
805
+
806
+ # If none of the parent loggers have the stream handler, break the loop.
807
+ if current_logger is None:
808
+ break
809
+
810
+ return current_logger
811
+
812
+
813
+ @contextlib.contextmanager
814
+ def _temporary_change_logger_stream_handler_color(logger: logging.Logger, color: str):
815
+ """
816
+ THIS IS ONLY FOR REFERENCE.
817
+ Better use 'temporary_change_logger_stream_record_color', since it is thread safe.
818
+ If there are several threads that use this logger, there could be a problem, since unwanted messages
819
+ could be colored with the color of the other thread.
820
+
821
+ Context manager to temporarily change the color of the logger's StreamHandler formatter.
822
+
823
+ Example:
824
+ with temporary_change_logger_stream_handler_color(logger, color):
825
+ # Do something with the temporary color.
826
+ pass
827
+ """
828
+
829
+ # Find the current or the topmost logger's StreamHandler.
830
+ # Could be that it is a child logger inherits its handlers from the parent.
831
+ logger_with_handlers = find_the_parent_logger_with_stream_handler(logger)
832
+
833
+ found_stream_handler = None
834
+ for handler in logger_with_handlers.handlers:
835
+ if isinstance(handler, logging.StreamHandler):
836
+ found_stream_handler = handler
837
+ break
838
+
839
+ # Save the original formatter
840
+ original_formatter = found_stream_handler.formatter
841
+ original_formatter_string = handlers.get_formatter_string(found_stream_handler)
842
+
843
+ # Create a colored formatter for errors
844
+ color_formatter = logging.Formatter(
845
+ ansi_escape_codes.get_colors_basic_dict(color) + original_formatter_string +
846
+ ansi_escape_codes.ColorsBasic.END)
847
+
848
+ # thread_id = threading.get_ident()
849
+ # color_filter = filters.ThreadColorLogFilter(color, thread_id)
850
+ # found_stream_handler.addFilter(color_filter)
851
+ try:
852
+ found_stream_handler.setFormatter(color_formatter)
853
+ yield
854
+ finally:
855
+ found_stream_handler.setFormatter(original_formatter)
856
+ # found_stream_handler.removeFilter(color_filter)
857
+
858
+
859
+ @contextlib.contextmanager
860
+ def temporary_change_logger_stream_record_color(logger: logging.Logger, color: str):
861
+ """
862
+ This function will temporarily change the color of the logger's StreamHandler record message.
863
+
864
+ Example:
865
+ with temporary_change_logger_stream_record_color(logger, "red"):
866
+ # Do something with the temporary color.
867
+ logger.error("This message will be colored with the 'red'.")
868
+ """
869
+
870
+ # Find the current or the topmost logger's StreamHandler.
871
+ # Could be that it is a child logger inherits its handlers from the parent.
872
+ logger_with_handlers = find_the_parent_logger_with_stream_handler(logger)
873
+
874
+ found_stream_handler = None
875
+ for handler in logger_with_handlers.handlers:
876
+ if isinstance(handler, logging.StreamHandler):
877
+ found_stream_handler = handler
878
+ break
879
+
880
+ # Save the original state of the handler
881
+ # original_filters = found_stream_handler.filters.copy() # To restore the original filters
882
+
883
+ # Create a thread-specific color filter
884
+ thread_id = threading.get_ident()
885
+ color_filter = filters.ThreadColorLogFilter(color, thread_id)
886
+
887
+ # Add the filter to the handler
888
+ found_stream_handler.addFilter(color_filter)
889
+
890
+ try:
891
+ yield # Do the logging within the context
892
+ finally:
893
+ # Restore the original filters, ensuring thread safety
894
+ found_stream_handler.removeFilter(color_filter)
895
+
896
+
897
+ class CsvLogger:
898
+ def __init__(
899
+ self,
900
+ logger_name: str,
901
+ directory_path: str = None,
902
+ custom_header: str = None,
903
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
904
+ add_queue_handler_start_listener_multiprocessing: bool = False,
905
+ add_queue_handler_no_listener_multiprocessing: bool = False
906
+ ):
907
+ """
908
+ Initialize the ExceptionCsvLogger object.
909
+
910
+ :param logger_name: Name of the logger.
911
+ :param directory_path: Directory path where the log file will be saved.
912
+ You can leave it as None, but if the logger doesn't exist, you will get an exception.
913
+ :param custom_header: Custom header to write to the log file.
914
+ If None, the default header will be used: "timestamp,exception", since that what is written to the log file.
915
+ If you want to add more columns to the csv file, you can provide a custom header:
916
+ "custom1,custom2,custom3".
917
+ These will be added to the default header as:
918
+ "timestamp,custom1,custom2,custom3,exception".
919
+ :param log_queue: Queue to use for the logger, needed for the queue handler/listener.
920
+
921
+ :param add_queue_handler_start_listener_multiprocessing: bool, whether to add a queue handler that will use
922
+ the 'log_queue' and start the queue listener with the same 'log_queue' for multiprocessing.
923
+ :param add_queue_handler_no_listener_multiprocessing: bool, whether to add a queue handler that will use
924
+ the 'log_queue' but will not start the queue listener for multiprocessing. This is useful when you
925
+ already started the queue listener and want to add more handlers to the logger without
926
+ starting a new listener.
927
+
928
+ If you don't set any of 'add_queue_handler_start_listener_multiprocessing' or
929
+ 'add_queue_handler_no_listener_multiprocessing', the logger will be created without a queue handler.
930
+ """
931
+
932
+ if add_queue_handler_no_listener_multiprocessing and add_queue_handler_start_listener_multiprocessing:
933
+ raise ValueError(
934
+ "You can't set both 'add_queue_handler_start_listener_multiprocessing' and "
935
+ "'add_queue_handler_no_listener_multiprocessing' to True."
936
+ )
937
+
938
+ self.header = custom_header
939
+
940
+ if is_logger_exists(logger_name):
941
+ self.logger = get_logger_with_level(logger_name)
942
+ else:
943
+ if directory_path is None:
944
+ raise ValueError("You need to provide 'directory_path' if the logger doesn't exist.")
945
+
946
+ if add_queue_handler_start_listener_multiprocessing:
947
+ if not log_queue:
948
+ raise ValueError(
949
+ "You need to provide 'logger_queue' if 'add_queue_handler_start_listener_multiprocess' is set to True.")
950
+
951
+ # Create a logger with a queue handler that starts a listener for multiprocessing.
952
+ self.logger = create_logger(
953
+ logger_name=logger_name,
954
+ start_queue_listener_multiprocess_add_queue_handler=True,
955
+ log_queue=log_queue,
956
+ directory_path=directory_path,
957
+ add_timedfile=True,
958
+ formatter_filehandler='MESSAGE',
959
+ file_type='csv',
960
+ header=self.header
961
+ )
962
+ elif add_queue_handler_no_listener_multiprocessing:
963
+ if not log_queue:
964
+ raise ValueError(
965
+ "You need to provide 'logger_queue' if 'add_queue_handler_no_listener_multiprocess' is set to True.")
966
+
967
+ # Create a logger with a queue handler that does not start a listener for multiprocessing.
968
+ self.logger = create_logger(
969
+ logger_name=logger_name,
970
+ add_queue_handler=True,
971
+ log_queue=log_queue
972
+ )
973
+ elif not add_queue_handler_start_listener_multiprocessing and not add_queue_handler_no_listener_multiprocessing:
974
+ self.logger = create_logger(
975
+ logger_name=logger_name,
976
+ directory_path=directory_path,
977
+ file_type="csv",
978
+ add_timedfile=True,
979
+ formatter_filehandler='MESSAGE',
980
+ header=self.header)
981
+
982
+ def write(
983
+ self,
984
+ row_of_cols: list
985
+ ):
986
+ """
987
+ Write a row of columns to the log file.
988
+
989
+ :param row_of_cols: List of columns to write to the csv log file.
990
+ """
991
+
992
+ output_csv_line: str = csvs.escape_csv_line_to_string(row_of_cols)
993
+
994
+ # If the number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header',
995
+ # raise an exception.
996
+ if (csvs.get_number_of_cells_in_string_line(output_csv_line) !=
997
+ csvs.get_number_of_cells_in_string_line(self.header)):
998
+ raise ValueError(
999
+ "Number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header'.")
1000
+
1001
+ self.logger.info(output_csv_line)
1002
+
1003
+ def get_logger(self):
1004
+ return self.logger
1005
+
1006
+
1007
+ class ExceptionCsvLogger(CsvLogger):
1008
+ def __init__(
1009
+ self,
1010
+ logger_name: str,
1011
+ directory_path: str = None,
1012
+ custom_header: str = None,
1013
+ log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
1014
+ add_queue_handler_start_listener_multiprocessing: bool = False,
1015
+ add_queue_handler_no_listener_multiprocessing: bool = False
1016
+ ):
1017
+ """
1018
+ Initialize the ExceptionCsvLogger object.
1019
+ """
1020
+
1021
+ if custom_header:
1022
+ custom_header = f"timestamp,{custom_header},exception"
1023
+ else:
1024
+ custom_header = "timestamp,exception"
1025
+
1026
+ super().__init__(
1027
+ logger_name=logger_name,
1028
+ directory_path=directory_path,
1029
+ custom_header=custom_header,
1030
+ log_queue=log_queue,
1031
+ add_queue_handler_start_listener_multiprocessing=add_queue_handler_start_listener_multiprocessing,
1032
+ add_queue_handler_no_listener_multiprocessing=add_queue_handler_no_listener_multiprocessing
1033
+ )
1034
+
1035
+
1036
+ def write(
1037
+ self,
1038
+ message: Union[str, Exception] = None,
1039
+ custom_csv_string: str = None,
1040
+ custom_exception_attribute: str = None,
1041
+ custom_exception_attribute_placement: Literal['before', 'after'] = 'before',
1042
+ stdout: bool = True
1043
+ ):
1044
+ """
1045
+ Write the message to the log file.
1046
+
1047
+ :param message: The message to write to the log file.
1048
+ If None, the message will be retrieved from current traceback frame.
1049
+ :param custom_csv_string: Custom CSV string to add between the timestamp and the exception.
1050
+ Currently, without the 'custom_csv_string', the csv line written as "timestamp,exception" as the header.
1051
+ If you add a 'custom_csv_string', the csv line will be written as "timestamp,custom_csv_string,exception".
1052
+ Meaning, that you need to provide the 'custom_header' during the initialization of the object.
1053
+ Off course, you can use as many commas as you need in the 'custom_csv_string': "custom1,custom2,custom3".
1054
+ This need to be mirrored in the 'custom_header' as well: "custom1,custom2,custom3".
1055
+ :param custom_exception_attribute: If the 'message' is an Exception, you can provide a custom attribute
1056
+ name to extract from the Exception object and add it to the exception message.
1057
+ For example, if the Exception has an attribute 'engine_name', you can provide it here
1058
+ and the exception message will be appended with the value of that attribute.
1059
+ :param custom_exception_attribute_placement: 'before' or 'after', where to place
1060
+ the custom exception attribute value in the exception message.
1061
+ :param stdout: If set to True, the exception will be printed to the console.
1062
+ """
1063
+
1064
+ if message is None or isinstance(message, Exception):
1065
+ custom_attribute: str | None = getattr(message, custom_exception_attribute, None)
1066
+ traceback_string: str = tracebacks.get_as_string()
1067
+ if custom_attribute:
1068
+ if custom_exception_attribute_placement == 'before':
1069
+ message = f"{custom_exception_attribute}: [{custom_attribute}] | {traceback_string}"
1070
+ else:
1071
+ message = f"{traceback_string} | {custom_exception_attribute}: [{custom_attribute}]"
1072
+ else:
1073
+ message = traceback_string
1074
+
1075
+ if custom_csv_string:
1076
+ row_of_cols: list = [datetime.datetime.now(), custom_csv_string, message]
1077
+ else:
1078
+ row_of_cols: list = [datetime.datetime.now(), message]
1079
+
1080
+ super().write(row_of_cols)
1081
+
1082
+ if stdout:
1083
+ print_api.print_api('', error_type=True, color="red", traceback_string=True)