atomicshop 2.11.47__py3-none-any.whl → 3.10.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atomicshop/__init__.py +1 -1
- atomicshop/{addons/mains → a_mains}/FACT/update_extract.py +3 -2
- atomicshop/a_mains/addons/process_list/compile.cmd +7 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.dll +0 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.exp +0 -0
- atomicshop/a_mains/addons/process_list/compiled/Win10x64/process_list.lib +0 -0
- atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +8 -1
- atomicshop/a_mains/dns_gateway_setting.py +11 -0
- atomicshop/a_mains/get_local_tcp_ports.py +85 -0
- atomicshop/a_mains/github_wrapper.py +11 -0
- atomicshop/a_mains/install_ca_certificate.py +172 -0
- atomicshop/{addons/mains → a_mains}/msi_unpacker.py +3 -1
- atomicshop/a_mains/process_from_port.py +119 -0
- atomicshop/a_mains/set_default_dns_gateway.py +90 -0
- atomicshop/a_mains/update_config_toml.py +38 -0
- atomicshop/appointment_management.py +5 -3
- atomicshop/basics/ansi_escape_codes.py +3 -1
- atomicshop/basics/argparse_template.py +2 -0
- atomicshop/basics/booleans.py +27 -30
- atomicshop/basics/bytes_arrays.py +43 -0
- atomicshop/basics/classes.py +149 -1
- atomicshop/basics/dicts.py +12 -0
- atomicshop/basics/enums.py +2 -2
- atomicshop/basics/exceptions.py +5 -1
- atomicshop/basics/list_of_classes.py +29 -0
- atomicshop/basics/list_of_dicts.py +69 -5
- atomicshop/basics/lists.py +14 -0
- atomicshop/basics/multiprocesses.py +374 -50
- atomicshop/basics/package_module.py +10 -0
- atomicshop/basics/strings.py +160 -7
- atomicshop/basics/threads.py +14 -0
- atomicshop/basics/tracebacks.py +13 -4
- atomicshop/certificates.py +153 -52
- atomicshop/config_init.py +12 -7
- atomicshop/console_user_response.py +7 -14
- atomicshop/consoles.py +9 -0
- atomicshop/datetimes.py +98 -0
- atomicshop/diff_check.py +340 -40
- atomicshop/dns.py +128 -12
- atomicshop/etws/_pywintrace_fix.py +17 -0
- atomicshop/etws/const.py +38 -0
- atomicshop/etws/providers.py +21 -0
- atomicshop/etws/sessions.py +43 -0
- atomicshop/etws/trace.py +168 -0
- atomicshop/etws/traces/trace_dns.py +162 -0
- atomicshop/etws/traces/trace_sysmon_process_creation.py +126 -0
- atomicshop/etws/traces/trace_tcp.py +130 -0
- atomicshop/file_io/csvs.py +222 -24
- atomicshop/file_io/docxs.py +35 -18
- atomicshop/file_io/file_io.py +35 -19
- atomicshop/file_io/jsons.py +49 -0
- atomicshop/file_io/tomls.py +139 -0
- atomicshop/filesystem.py +864 -293
- atomicshop/get_process_list.py +133 -0
- atomicshop/{process_name_cmd.py → get_process_name_cmd_dll.py} +52 -19
- atomicshop/http_parse.py +149 -93
- atomicshop/ip_addresses.py +6 -1
- atomicshop/mitm/centered_settings.py +132 -0
- atomicshop/mitm/config_static.py +207 -0
- atomicshop/mitm/config_toml_editor.py +55 -0
- atomicshop/mitm/connection_thread_worker.py +875 -357
- atomicshop/mitm/engines/__parent/parser___parent.py +4 -17
- atomicshop/mitm/engines/__parent/recorder___parent.py +108 -51
- atomicshop/mitm/engines/__parent/requester___parent.py +116 -0
- atomicshop/mitm/engines/__parent/responder___parent.py +75 -114
- atomicshop/mitm/engines/__reference_general/parser___reference_general.py +10 -7
- atomicshop/mitm/engines/__reference_general/recorder___reference_general.py +5 -5
- atomicshop/mitm/engines/__reference_general/requester___reference_general.py +47 -0
- atomicshop/mitm/engines/__reference_general/responder___reference_general.py +95 -13
- atomicshop/mitm/engines/create_module_template.py +58 -14
- atomicshop/mitm/import_config.py +359 -139
- atomicshop/mitm/initialize_engines.py +160 -74
- atomicshop/mitm/message.py +64 -23
- atomicshop/mitm/mitm_main.py +892 -0
- atomicshop/mitm/recs_files.py +183 -0
- atomicshop/mitm/shared_functions.py +4 -10
- atomicshop/mitm/ssh_tester.py +82 -0
- atomicshop/mitm/statistic_analyzer.py +257 -166
- atomicshop/mitm/statistic_analyzer_helper/analyzer_helper.py +136 -0
- atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +525 -0
- atomicshop/monitor/change_monitor.py +96 -120
- atomicshop/monitor/checks/dns.py +139 -70
- atomicshop/monitor/checks/file.py +77 -0
- atomicshop/monitor/checks/network.py +81 -77
- atomicshop/monitor/checks/process_running.py +33 -34
- atomicshop/monitor/checks/url.py +94 -0
- atomicshop/networks.py +671 -0
- atomicshop/on_exit.py +205 -0
- atomicshop/package_mains_processor.py +84 -0
- atomicshop/permissions/permissions.py +22 -0
- atomicshop/permissions/ubuntu_permissions.py +239 -0
- atomicshop/permissions/win_permissions.py +33 -0
- atomicshop/print_api.py +24 -41
- atomicshop/process.py +63 -17
- atomicshop/process_poller/__init__.py +0 -0
- atomicshop/process_poller/pollers/__init__.py +0 -0
- atomicshop/process_poller/pollers/psutil_pywin32wmi_dll.py +95 -0
- atomicshop/process_poller/process_pool.py +207 -0
- atomicshop/process_poller/simple_process_pool.py +311 -0
- atomicshop/process_poller/tracer_base.py +45 -0
- atomicshop/process_poller/tracers/__init__.py +0 -0
- atomicshop/process_poller/tracers/event_log.py +46 -0
- atomicshop/process_poller/tracers/sysmon_etw.py +68 -0
- atomicshop/python_file_patcher.py +1 -1
- atomicshop/python_functions.py +27 -75
- atomicshop/question_answer_engine.py +2 -2
- atomicshop/scheduling.py +24 -5
- atomicshop/sound.py +4 -2
- atomicshop/speech_recognize.py +8 -0
- atomicshop/ssh_remote.py +158 -172
- atomicshop/startup/__init__.py +0 -0
- atomicshop/startup/win/__init__.py +0 -0
- atomicshop/startup/win/startup_folder.py +53 -0
- atomicshop/startup/win/task_scheduler.py +119 -0
- atomicshop/system_resource_monitor.py +61 -46
- atomicshop/system_resources.py +8 -8
- atomicshop/tempfiles.py +1 -2
- atomicshop/timer.py +30 -11
- atomicshop/urls.py +41 -0
- atomicshop/venvs.py +28 -0
- atomicshop/versioning.py +27 -0
- atomicshop/web.py +110 -25
- atomicshop/web_apis/__init__.py +0 -0
- atomicshop/web_apis/google_custom_search.py +44 -0
- atomicshop/web_apis/google_llm.py +188 -0
- atomicshop/websocket_parse.py +450 -0
- atomicshop/wrappers/certauthw/certauth.py +1 -0
- atomicshop/wrappers/cryptographyw.py +29 -8
- atomicshop/wrappers/ctyping/etw_winapi/__init__.py +0 -0
- atomicshop/wrappers/ctyping/etw_winapi/const.py +335 -0
- atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py +393 -0
- atomicshop/wrappers/ctyping/file_details_winapi.py +67 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
- atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +13 -9
- atomicshop/wrappers/ctyping/msi_windows_installer/tables.py +35 -0
- atomicshop/wrappers/ctyping/setup_device.py +466 -0
- atomicshop/wrappers/ctyping/win_console.py +39 -0
- atomicshop/wrappers/dockerw/dockerw.py +113 -2
- atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
- atomicshop/wrappers/elasticsearchw/elastic_infra.py +75 -0
- atomicshop/wrappers/elasticsearchw/elasticsearchw.py +2 -20
- atomicshop/wrappers/factw/get_file_data.py +12 -5
- atomicshop/wrappers/factw/install/install_after_restart.py +89 -5
- atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +20 -14
- atomicshop/wrappers/factw/postgresql/firmware.py +4 -6
- atomicshop/wrappers/githubw.py +583 -51
- atomicshop/wrappers/loggingw/consts.py +49 -0
- atomicshop/wrappers/loggingw/filters.py +102 -0
- atomicshop/wrappers/loggingw/formatters.py +58 -71
- atomicshop/wrappers/loggingw/handlers.py +459 -40
- atomicshop/wrappers/loggingw/loggers.py +19 -0
- atomicshop/wrappers/loggingw/loggingw.py +1010 -178
- atomicshop/wrappers/loggingw/reading.py +344 -19
- atomicshop/wrappers/mongodbw/__init__.py +0 -0
- atomicshop/wrappers/mongodbw/mongo_infra.py +31 -0
- atomicshop/wrappers/mongodbw/mongodbw.py +1432 -0
- atomicshop/wrappers/netshw.py +271 -0
- atomicshop/wrappers/playwrightw/engine.py +34 -19
- atomicshop/wrappers/playwrightw/infra.py +5 -0
- atomicshop/wrappers/playwrightw/javascript.py +7 -3
- atomicshop/wrappers/playwrightw/keyboard.py +14 -0
- atomicshop/wrappers/playwrightw/scenarios.py +172 -5
- atomicshop/wrappers/playwrightw/waits.py +9 -7
- atomicshop/wrappers/powershell_networking.py +80 -0
- atomicshop/wrappers/psutilw/processes.py +81 -0
- atomicshop/wrappers/psutilw/psutil_networks.py +85 -0
- atomicshop/wrappers/psutilw/psutilw.py +9 -0
- atomicshop/wrappers/pyopensslw.py +9 -2
- atomicshop/wrappers/pywin32w/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/cert_store.py +116 -0
- atomicshop/wrappers/pywin32w/console.py +34 -0
- atomicshop/wrappers/pywin32w/win_event_log/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribe.py +212 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +57 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +49 -0
- atomicshop/wrappers/pywin32w/win_event_log/subscribes/schannel_logging.py +97 -0
- atomicshop/wrappers/pywin32w/winshell.py +19 -0
- atomicshop/wrappers/pywin32w/wmis/__init__.py +0 -0
- atomicshop/wrappers/pywin32w/wmis/msft_netipaddress.py +113 -0
- atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +259 -0
- atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +112 -0
- atomicshop/wrappers/pywin32w/wmis/wmi_helpers.py +236 -0
- atomicshop/wrappers/socketw/accepter.py +21 -7
- atomicshop/wrappers/socketw/certificator.py +216 -150
- atomicshop/wrappers/socketw/creator.py +190 -50
- atomicshop/wrappers/socketw/dns_server.py +500 -173
- atomicshop/wrappers/socketw/exception_wrapper.py +45 -52
- atomicshop/wrappers/socketw/process_getter.py +86 -0
- atomicshop/wrappers/socketw/receiver.py +144 -102
- atomicshop/wrappers/socketw/sender.py +65 -35
- atomicshop/wrappers/socketw/sni.py +334 -165
- atomicshop/wrappers/socketw/socket_base.py +134 -0
- atomicshop/wrappers/socketw/socket_client.py +137 -95
- atomicshop/wrappers/socketw/socket_server_tester.py +14 -9
- atomicshop/wrappers/socketw/socket_wrapper.py +717 -116
- atomicshop/wrappers/socketw/ssl_base.py +15 -14
- atomicshop/wrappers/socketw/statistics_csv.py +148 -17
- atomicshop/wrappers/sysmonw.py +157 -0
- atomicshop/wrappers/ubuntu_terminal.py +65 -26
- atomicshop/wrappers/win_auditw.py +189 -0
- atomicshop/wrappers/winregw/__init__.py +0 -0
- atomicshop/wrappers/winregw/winreg_installed_software.py +58 -0
- atomicshop/wrappers/winregw/winreg_network.py +232 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/METADATA +31 -49
- atomicshop-3.10.5.dist-info/RECORD +306 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/WHEEL +1 -1
- atomicshop/_basics_temp.py +0 -101
- atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
- atomicshop/addons/a_setup_scripts/install_pywintrace_0.3.cmd +0 -2
- atomicshop/addons/mains/install_docker_rootless_ubuntu.py +0 -11
- atomicshop/addons/mains/install_docker_ubuntu_main_sudo.py +0 -11
- atomicshop/addons/mains/install_elastic_search_and_kibana_ubuntu.py +0 -10
- atomicshop/addons/mains/install_wsl_ubuntu_lts_admin.py +0 -9
- atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
- atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
- atomicshop/addons/package_setup/Setup.cmd +0 -7
- atomicshop/addons/process_list/compile.cmd +0 -2
- atomicshop/addons/process_list/compiled/Win10x64/process_list.dll +0 -0
- atomicshop/addons/process_list/compiled/Win10x64/process_list.exp +0 -0
- atomicshop/addons/process_list/compiled/Win10x64/process_list.lib +0 -0
- atomicshop/archiver/_search_in_zip.py +0 -189
- atomicshop/archiver/archiver.py +0 -34
- atomicshop/archiver/search_in_archive.py +0 -250
- atomicshop/archiver/sevenz_app_w.py +0 -86
- atomicshop/archiver/sevenzs.py +0 -44
- atomicshop/archiver/zips.py +0 -293
- atomicshop/etw/dns_trace.py +0 -118
- atomicshop/etw/etw.py +0 -61
- atomicshop/file_types.py +0 -24
- atomicshop/mitm/engines/create_module_template_example.py +0 -13
- atomicshop/mitm/initialize_mitm_server.py +0 -240
- atomicshop/monitor/checks/hash.py +0 -44
- atomicshop/monitor/checks/hash_checks/file.py +0 -55
- atomicshop/monitor/checks/hash_checks/url.py +0 -62
- atomicshop/pbtkmultifile_argparse.py +0 -88
- atomicshop/permissions.py +0 -110
- atomicshop/process_poller.py +0 -237
- atomicshop/script_as_string_processor.py +0 -38
- atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
- atomicshop/ssh_scripts/process_from_port.py +0 -27
- atomicshop/wrappers/_process_wrapper_curl.py +0 -27
- atomicshop/wrappers/_process_wrapper_tar.py +0 -21
- atomicshop/wrappers/dockerw/install_docker.py +0 -209
- atomicshop/wrappers/elasticsearchw/infrastructure.py +0 -265
- atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -232
- atomicshop/wrappers/ffmpegw.py +0 -125
- atomicshop/wrappers/loggingw/checks.py +0 -20
- atomicshop/wrappers/nodejsw/install_nodejs.py +0 -139
- atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
- atomicshop/wrappers/socketw/base.py +0 -59
- atomicshop/wrappers/socketw/get_process.py +0 -107
- atomicshop/wrappers/wslw.py +0 -191
- atomicshop-2.11.47.dist-info/RECORD +0 -251
- /atomicshop/{addons/mains → a_mains}/FACT/factw_fact_extractor_docker_image_main_sudo.py +0 -0
- /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
- /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
- /atomicshop/{addons/mains → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
- /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
- /atomicshop/{addons/mains → a_mains}/search_for_hyperlinks_in_docx.py +0 -0
- /atomicshop/{archiver → etws}/__init__.py +0 -0
- /atomicshop/{etw → etws/traces}/__init__.py +0 -0
- /atomicshop/{monitor/checks/hash_checks → mitm/statistic_analyzer_helper}/__init__.py +0 -0
- /atomicshop/{wrappers/nodejsw → permissions}/__init__.py +0 -0
- /atomicshop/wrappers/pywin32w/{wmi_win32process.py → wmis/win32process.py} +0 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info/licenses}/LICENSE.txt +0 -0
- {atomicshop-2.11.47.dist-info → atomicshop-3.10.5.dist-info}/top_level.txt +0 -0
|
@@ -1,40 +1,594 @@
|
|
|
1
|
-
import os
|
|
2
1
|
import logging
|
|
2
|
+
import os
|
|
3
|
+
from logging import Logger
|
|
4
|
+
from logging.handlers import QueueListener
|
|
5
|
+
from typing import Literal, Union, Callable
|
|
6
|
+
import datetime
|
|
7
|
+
import contextlib
|
|
8
|
+
import threading
|
|
3
9
|
import queue
|
|
10
|
+
import multiprocessing
|
|
11
|
+
import time
|
|
12
|
+
|
|
13
|
+
from . import loggers, handlers, filters
|
|
14
|
+
from ...file_io import csvs
|
|
15
|
+
from ...basics import tracebacks, ansi_escape_codes
|
|
16
|
+
from ... import print_api
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
QUEUE_LISTENER_PROCESS_NAME_PREFIX: str = "QueueListener-"
|
|
4
20
|
|
|
5
|
-
from . import loggers, handlers, formatters
|
|
6
21
|
|
|
22
|
+
class LoggingwLoggerAlreadyExistsError(Exception):
|
|
23
|
+
pass
|
|
7
24
|
|
|
8
|
-
|
|
25
|
+
|
|
26
|
+
# noinspection PyPep8Naming
|
|
27
|
+
def create_logger(
|
|
28
|
+
logger_name: str = None,
|
|
29
|
+
get_queue_listener: bool = False,
|
|
30
|
+
start_queue_listener_multiprocess_add_queue_handler: bool = False,
|
|
31
|
+
|
|
32
|
+
add_stream: bool = False,
|
|
33
|
+
add_timedfile: bool = False,
|
|
34
|
+
add_timedfile_with_internal_queue: bool = False,
|
|
35
|
+
add_queue_handler: bool = False,
|
|
36
|
+
|
|
37
|
+
log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
|
|
38
|
+
file_path: str = None,
|
|
39
|
+
directory_path: str = None,
|
|
40
|
+
file_type: Literal[
|
|
41
|
+
'txt',
|
|
42
|
+
'csv',
|
|
43
|
+
'json'] = 'txt',
|
|
44
|
+
logging_level="DEBUG",
|
|
45
|
+
formatter_streamhandler: Union[
|
|
46
|
+
Literal['MESSAGE', 'DEFAULT'],
|
|
47
|
+
str,
|
|
48
|
+
None] = None,
|
|
49
|
+
formatter_filehandler: Union[
|
|
50
|
+
Literal['MESSAGE', 'DEFAULT'],
|
|
51
|
+
str,
|
|
52
|
+
None] = None,
|
|
53
|
+
formatter_streamhandler_use_nanoseconds: bool = True,
|
|
54
|
+
formatter_filehandler_use_nanoseconds: bool = True,
|
|
55
|
+
filehandler_rotate_at_rollover_time: bool = True,
|
|
56
|
+
filehandler_rotation_date_format: str = None,
|
|
57
|
+
filehandler_rotation_callback_namer_function: Callable = None,
|
|
58
|
+
filehandler_rotation_use_default_namer_function: bool = True,
|
|
59
|
+
when: str = "midnight",
|
|
60
|
+
interval: int = 1,
|
|
61
|
+
backupCount: int = 0,
|
|
62
|
+
delay: bool = False,
|
|
63
|
+
encoding=None,
|
|
64
|
+
header: str = None
|
|
65
|
+
) -> None | QueueListener | Logger:
|
|
9
66
|
"""
|
|
10
|
-
Function to get a logger and
|
|
67
|
+
Function to get a logger and add StreamHandler and TimedRotatingFileHandler to it.
|
|
11
68
|
|
|
12
69
|
:param logger_name: Name of the logger.
|
|
13
|
-
:param
|
|
14
|
-
|
|
70
|
+
:param get_queue_listener: bool, If set to True, QueueListener will be started with all the handlers
|
|
71
|
+
like 'add_timedfile' and 'add_stream', using the 'log_queue'.
|
|
72
|
+
:param start_queue_listener_multiprocess_add_queue_handler: bool, If set to True, the QueueListener will be
|
|
73
|
+
started in a separate multiprocessing process, without you handling this manually.
|
|
74
|
+
|
|
75
|
+
Only one of the following parameters can be set at a time: 'logger_name', 'get_queue_listener'.
|
|
76
|
+
|
|
77
|
+
:param file_path: full path to the log file. If you don't want to use the file, set it to None.
|
|
78
|
+
You can set the directory_path only and then the 'logger_name' will be used as the file name with the
|
|
79
|
+
'file_type' as the file extension.
|
|
80
|
+
:param directory_path: full path to the directory where the log file will be saved.
|
|
81
|
+
:param add_stream: bool, If set to True, StreamHandler will be added to the logger.
|
|
82
|
+
:param add_timedfile: bool, If set to True, TimedRotatingFileHandler will be added to the logger directly.
|
|
83
|
+
:param add_timedfile_with_internal_queue: bool, If set to True, TimedRotatingFileHandler will be added
|
|
84
|
+
to the logger, but not directly.
|
|
85
|
+
Internal queue.Queue will be created, then used by the QueueListener, which will get the
|
|
86
|
+
TimerRotatingFileHandler as the handler.
|
|
87
|
+
Then the QueueHandler using the same internal queue will be added to the logger.
|
|
88
|
+
This is done to improve the multithreading compatibility.
|
|
89
|
+
:param add_queue_handler: bool, If set to True, QueueHandler will be added to the logger, using the 'log_queue'.
|
|
90
|
+
:param log_queue: queue.Queue or multiprocessing.Queue, Queue to use for the QueueHandler.
|
|
91
|
+
:param file_type: string, file type of the log file. Default is 'txt'.
|
|
92
|
+
'txt': Text file.
|
|
93
|
+
'csv': CSV file.
|
|
94
|
+
'json': JSON file.
|
|
95
|
+
:param logging_level: str or int, Logging level for the handler, that will use the logger while initiated.
|
|
96
|
+
:param formatter_streamhandler: string, Formatter to use for StreamHandler. It is template of how a message will
|
|
97
|
+
look like.
|
|
98
|
+
None: No formatter will be used.
|
|
99
|
+
'DEFAULT': Default formatter will be used:
|
|
100
|
+
"%(levelname)s | %(threadName)s | %(name)s | %(message)s"
|
|
101
|
+
'MESSAGE': Formatter will be used only for the 'message' part.
|
|
102
|
+
string: Custom formatter, regular syntax for logging.Formatter.
|
|
103
|
+
:param formatter_filehandler: string, Formatter to use for handler. It is template of how a message will look like.
|
|
104
|
+
None: No formatter will be used.
|
|
105
|
+
'DEFAULT': Default formatter will be used for each file extension:
|
|
106
|
+
txt: "%(asctime)s | %(levelname)s | %(threadName)s | %(name)s | %(message)s"
|
|
107
|
+
csv: "%(asctime)s,%(levelname)s,%(threadName)s,%(name)s,%(message)s"
|
|
108
|
+
json: '{"time": "%(asctime)s", "level": "%(levelname)s", "thread": "%(threadName)s",
|
|
109
|
+
"logger": "%(name)s", "message": "%(message)s"}'
|
|
110
|
+
'MESSAGE': Formatter will be used only for the 'message' part.
|
|
111
|
+
string: Custom formatter, regular syntax for logging.Formatter.
|
|
112
|
+
:param formatter_streamhandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
|
|
113
|
+
in the formatter in case you provide 'asctime' element.
|
|
114
|
+
:param formatter_filehandler_use_nanoseconds: bool, If set to True, the nanoseconds will be used
|
|
115
|
+
in the formatter in case you provide 'asctime' element.
|
|
116
|
+
:param filehandler_rotate_at_rollover_time: bool,
|
|
117
|
+
If set to True, the log file will be rotated at the rollover time, even if there's nothing to write.
|
|
118
|
+
This behavior overrides the TimedRotatingFileHandler default behavior on doRollover.
|
|
119
|
+
If set to False, the log file will be rotated after 'when' time, but only when event occurs.
|
|
120
|
+
This is the default doRollover behavior of the TimedRotatingFileHandler.
|
|
121
|
+
:param filehandler_rotation_date_format: string, Date format to use for the log file rotation.
|
|
122
|
+
Example for 'when="midnight"': the default date format is '%Y-%m-%d', resulting in filename on rotation like:
|
|
123
|
+
"test.log.2021-11-25"
|
|
124
|
+
If you want to change the date format to '%Y_%m_%d', the filename will be:
|
|
125
|
+
"test.log.2021_11_25"
|
|
126
|
+
:param filehandler_rotation_callback_namer_function: callable, Callback function to use for the log file naming
|
|
127
|
+
on rotation. If set to None, logging module default function will be used. With "when='midnight'",
|
|
128
|
+
and filename: "test.log" this will name the file on rotation similar to: "test.log.2021-11-25".
|
|
129
|
+
:param filehandler_rotation_use_default_namer_function: bool, If set to True, the default namer function will be
|
|
130
|
+
used for the log file naming on rotation. With "when='midnight'" and filename: "test.log",
|
|
131
|
+
this will name the file on rotation similar to: "test_2021-11-25.log".
|
|
132
|
+
:param when: string, When to rotate the log file. Default is 'midnight'.
|
|
133
|
+
[when="midnight"] is set to rotate the filename at midnight. This means that the current file name will be
|
|
134
|
+
added Yesterday's date to the end of the file and today's file will continue to write at the same
|
|
135
|
+
filename. Also, if the script finished working on 25.11.2021, the name of the log file will be "test.log"
|
|
136
|
+
If you run the script again on 28.11.2021, the logging module will take the last modification date of
|
|
137
|
+
the file "test.log" and assign a date to it: test.log.2021_11_25
|
|
138
|
+
The log filename of 28.11.2021 will be called "test.log" again.
|
|
139
|
+
:param interval: int, Interval to rotate the log file. Default is 1.
|
|
140
|
+
If 'when="midnight"' and 'interval=1', then the log file will be rotated every midnight.
|
|
141
|
+
If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
|
|
142
|
+
:param backupCount: int, Number of backup files to keep. Default is 0.
|
|
143
|
+
If backupCount is > 0, when rollover is done, no more than backupCount files are kept, the oldest are deleted.
|
|
144
|
+
If backupCount is == 0, all the backup files will be kept.
|
|
145
|
+
:param delay: bool, If set to True, the log file will be created only if there's something to write.
|
|
146
|
+
:param encoding: string, Encoding to use for the log file. Default is None.
|
|
147
|
+
:param header: string, Header to write to the log file.
|
|
148
|
+
Example: "time,host,error"
|
|
149
|
+
Useful for 'csv' file type format.
|
|
150
|
+
|
|
15
151
|
:return: Logger.
|
|
152
|
+
|
|
153
|
+
================================================================================================================
|
|
154
|
+
|
|
155
|
+
Working example to write CSV logs to the file and output messages to the console:
|
|
156
|
+
from atomicshop.wrappers.loggingw import loggingw
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def main():
|
|
160
|
+
header: str = "time,host,error"
|
|
161
|
+
output_log_file: str = "D:\\logs\\log_file.csv"
|
|
162
|
+
|
|
163
|
+
error_logger = loggingw.create_logger(
|
|
164
|
+
logger_name=f'{self.__class__.__name__}_CSV',
|
|
165
|
+
file_path=output_log_file,
|
|
166
|
+
add_timedfile_with_internal_queue=True,
|
|
167
|
+
file_type='csv',
|
|
168
|
+
formatter_filehandler='MESSAGE',
|
|
169
|
+
header=header
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
error_logger.info(error_message)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
if __name__ == "__main__":
|
|
176
|
+
main()
|
|
177
|
+
|
|
178
|
+
------------------------------
|
|
179
|
+
|
|
180
|
+
Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file:
|
|
181
|
+
from atomicshop.wrappers.loggingw import loggingw
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def main():
|
|
185
|
+
header: str = "time,host,error"
|
|
186
|
+
output_log_file: str = "D:\\logs\\log_file.txt"
|
|
187
|
+
|
|
188
|
+
error_logger = loggingw.create_logger(
|
|
189
|
+
logger_name=f'{self.__class__.__name__}',
|
|
190
|
+
file_path=output_log_file,
|
|
191
|
+
add_stream=True,
|
|
192
|
+
add_timedfile_with_internal_queue=True,
|
|
193
|
+
file_type='txt',
|
|
194
|
+
formatter_streamhandler='DEFAULT',
|
|
195
|
+
formatter_filehandler='DEFAULT'
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
error_logger.info(f"{datetime.now()},host1,/path/to/file,error message")
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
if __name__ == "__main__":
|
|
202
|
+
main()
|
|
203
|
+
|
|
204
|
+
------------------------------
|
|
205
|
+
|
|
206
|
+
Example to use StreamHandler to output to console and TimedRotatingFileHandler to write to file in multiprocessing,
|
|
207
|
+
while QueueListener is in the main process writes to the file and outputs to the console and the QueueHandler
|
|
208
|
+
in two child subprocesses sends the logs to the main process through the multiprocessing.Queue:
|
|
209
|
+
|
|
210
|
+
import sys
|
|
211
|
+
import multiprocessing
|
|
212
|
+
from atomicshop.wrappers.loggingw import loggingw
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def worker1(
|
|
216
|
+
log_queue: multiprocessing.Queue,
|
|
217
|
+
logger_name: str
|
|
218
|
+
):
|
|
219
|
+
error_logger = loggingw.create_logger(
|
|
220
|
+
logger_name=logger_name,
|
|
221
|
+
add_queue_handler=True,
|
|
222
|
+
log_queue=log_queue
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
error_logger.info("Worker1 log message for 'network' logger.")
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def worker2(
|
|
229
|
+
log_queue: multiprocessing.Queue,
|
|
230
|
+
logger_name: str
|
|
231
|
+
):
|
|
232
|
+
error_logger = loggingw.create_logger(
|
|
233
|
+
logger_name=logger_name,
|
|
234
|
+
add_queue_handler=True,
|
|
235
|
+
log_queue=log_queue
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
error_logger.info("Worker2 log message for 'network' logger.")
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def main():
|
|
242
|
+
log_queue = multiprocessing.Queue()
|
|
243
|
+
|
|
244
|
+
queue_listener = loggingw.create_logger(
|
|
245
|
+
get_queue_listener=True,
|
|
246
|
+
add_stream=True,
|
|
247
|
+
add_timedfile=True,
|
|
248
|
+
log_queue=log_queue,
|
|
249
|
+
file_type='txt',
|
|
250
|
+
formatter_streamhandler='DEFAULT',
|
|
251
|
+
formatter_filehandler='DEFAULT'
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
|
|
255
|
+
process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
|
|
256
|
+
|
|
257
|
+
process1.start()
|
|
258
|
+
process2.start()
|
|
259
|
+
|
|
260
|
+
process1.join()
|
|
261
|
+
process2.join()
|
|
262
|
+
|
|
263
|
+
# If we exit the function, we need to stop the listener
|
|
264
|
+
queue_listener.stop()
|
|
265
|
+
|
|
266
|
+
return 0
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
if __name__ == "__main__":
|
|
270
|
+
sys.exit(main())
|
|
271
|
+
|
|
272
|
+
--------------------------------------------------
|
|
273
|
+
|
|
274
|
+
Example if you need to start a QueueListener in multiprocessing, which is less garbage code and python's
|
|
275
|
+
garbage collector handles the listener closing without the need to call 'stop()' method:
|
|
276
|
+
|
|
277
|
+
import sys
|
|
278
|
+
import multiprocessing
|
|
279
|
+
from atomicshop.wrappers.loggingw import loggingw
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def worker1(
|
|
283
|
+
log_queue: multiprocessing.Queue,
|
|
284
|
+
logger_name: str
|
|
285
|
+
):
|
|
286
|
+
error_logger = loggingw.create_logger(
|
|
287
|
+
logger_name=logger_name,
|
|
288
|
+
add_queue_handler=True,
|
|
289
|
+
log_queue=log_queue
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
error_logger.info("Worker1 log message for 'network' logger.")
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def worker2(
|
|
296
|
+
log_queue: multiprocessing.Queue,
|
|
297
|
+
logger_name: str
|
|
298
|
+
):
|
|
299
|
+
error_logger = loggingw.create_logger(
|
|
300
|
+
logger_name=logger_name,
|
|
301
|
+
add_queue_handler=True,
|
|
302
|
+
log_queue=log_queue
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
error_logger.info("Worker2 log message for 'network' logger.")
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def main():
|
|
309
|
+
log_queue = multiprocessing.Queue()
|
|
310
|
+
logger_name: str = 'network'
|
|
311
|
+
|
|
312
|
+
loggingw.start_queue_listener_in_multiprocessing(
|
|
313
|
+
logger_name=logger_name,
|
|
314
|
+
add_stream=True,
|
|
315
|
+
add_timedfile=True,
|
|
316
|
+
log_queue=log_queue,
|
|
317
|
+
file_type='txt',
|
|
318
|
+
formatter_streamhandler='DEFAULT',
|
|
319
|
+
formatter_filehandler='DEFAULT'
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
# If you want you can get the QueueListener processes.
|
|
323
|
+
# listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
|
|
324
|
+
# Or if you started several listeners, you can get all of them:
|
|
325
|
+
# listener_processes_list: list = loggingw.get_listener_processes()
|
|
326
|
+
|
|
327
|
+
process1 = multiprocessing.Process(target=worker1, args=(log_queue, logger_name))
|
|
328
|
+
process2 = multiprocessing.Process(target=worker2, args=(log_queue, logger_name))
|
|
329
|
+
|
|
330
|
+
process1.start()
|
|
331
|
+
process2.start()
|
|
332
|
+
|
|
333
|
+
process1.join()
|
|
334
|
+
process2.join()
|
|
335
|
+
|
|
336
|
+
return 0
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
if __name__ == "__main__":
|
|
340
|
+
sys.exit(main())
|
|
341
|
+
|
|
342
|
+
---------------------------------------------------
|
|
343
|
+
|
|
344
|
+
Or you can use the 'create_logger' function with 'start_queue_listener_multiprocess=True' parameter,
|
|
345
|
+
which will start the QueueListener in a separate multiprocessing process automatically if you want to use the
|
|
346
|
+
queue handler logger also in the main process:
|
|
347
|
+
|
|
348
|
+
import sys
|
|
349
|
+
import multiprocessing
|
|
350
|
+
from atomicshop.wrappers.loggingw import loggingw
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def worker1(
|
|
354
|
+
log_queue: multiprocessing.Queue,
|
|
355
|
+
logger_name: str
|
|
356
|
+
):
|
|
357
|
+
error_logger = loggingw.create_logger(
|
|
358
|
+
logger_name=logger_name,
|
|
359
|
+
add_queue_handler=True,
|
|
360
|
+
log_queue=log_queue
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
error_logger.info("Worker1 log message for 'network' logger.")
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
def worker2(
|
|
367
|
+
log_queue: multiprocessing.Queue,
|
|
368
|
+
logger_name: str
|
|
369
|
+
):
|
|
370
|
+
error_logger = loggingw.create_logger(
|
|
371
|
+
logger_name=logger_name,
|
|
372
|
+
add_queue_handler=True,
|
|
373
|
+
log_queue=log_queue
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
error_logger.info("Worker2 log message for 'network' logger.")
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
def main():
|
|
380
|
+
log_queue = multiprocessing.Queue()
|
|
381
|
+
|
|
382
|
+
main_logger: Logger = loggingw.create_logger(
|
|
383
|
+
logger_name='network',
|
|
384
|
+
start_queue_listener_multiprocess_add_queue_handler=True,
|
|
385
|
+
add_stream=True,
|
|
386
|
+
add_timedfile=True,
|
|
387
|
+
log_queue=log_queue,
|
|
388
|
+
file_type='txt',
|
|
389
|
+
formatter_streamhandler='DEFAULT',
|
|
390
|
+
formatter_filehandler='DEFAULT'
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
main_logger.info("Main process log message for 'network' logger.")
|
|
394
|
+
|
|
395
|
+
# If you want you can get the QueueListener processes.
|
|
396
|
+
# listener_processes = loggingw.get_listener_processes(logger_name=logger_name)[0]
|
|
397
|
+
# Or if you started several listeners, you can get all of them:
|
|
398
|
+
# listener_processes_list: list = loggingw.get_listener_processes()
|
|
399
|
+
|
|
400
|
+
process1 = multiprocessing.Process(target=worker1, args=(log_queue, 'network'))
|
|
401
|
+
process2 = multiprocessing.Process(target=worker2, args=(log_queue, 'network'))
|
|
402
|
+
|
|
403
|
+
process1.start()
|
|
404
|
+
process2.start()
|
|
405
|
+
|
|
406
|
+
process1.join()
|
|
407
|
+
process2.join()
|
|
408
|
+
|
|
409
|
+
return 0
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
if __name__ == "__main__":
|
|
413
|
+
sys.exit(main())
|
|
16
414
|
"""
|
|
17
415
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
if logging_level:
|
|
22
|
-
loggers.set_logging_level(logger, logging_level)
|
|
416
|
+
if start_queue_listener_multiprocess_add_queue_handler and (get_queue_listener or add_queue_handler):
|
|
417
|
+
raise ValueError("You don't need to set 'get_queue_listener' or 'add_queue_handler' "
|
|
418
|
+
"when setting 'start_queue_listener_multiprocess_add_queue_handler'.")
|
|
23
419
|
|
|
24
|
-
|
|
420
|
+
if start_queue_listener_multiprocess_add_queue_handler:
|
|
421
|
+
logger_instance: Logger = _create_logger_with_queue_handler(
|
|
422
|
+
logger_name=logger_name,
|
|
423
|
+
log_queue=log_queue
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
# Start the QueueListener in a separate multiprocessing process.
|
|
427
|
+
start_queue_listener_in_multiprocessing(
|
|
428
|
+
logger_name=logger_name,
|
|
429
|
+
add_stream=add_stream,
|
|
430
|
+
add_timedfile=add_timedfile,
|
|
431
|
+
add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
|
|
432
|
+
log_queue=log_queue,
|
|
433
|
+
file_path=file_path,
|
|
434
|
+
directory_path=directory_path,
|
|
435
|
+
file_type=file_type,
|
|
436
|
+
logging_level=logging_level,
|
|
437
|
+
formatter_streamhandler=formatter_streamhandler,
|
|
438
|
+
formatter_filehandler=formatter_filehandler,
|
|
439
|
+
formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
|
|
440
|
+
formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
|
|
441
|
+
filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
|
|
442
|
+
filehandler_rotation_date_format=filehandler_rotation_date_format,
|
|
443
|
+
filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
|
|
444
|
+
filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
|
|
445
|
+
when=when,
|
|
446
|
+
interval=interval,
|
|
447
|
+
backupCount=backupCount,
|
|
448
|
+
delay=delay,
|
|
449
|
+
encoding=encoding,
|
|
450
|
+
header=header
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
return logger_instance
|
|
454
|
+
|
|
455
|
+
if logger_name and get_queue_listener and not start_queue_listener_multiprocess_add_queue_handler:
|
|
456
|
+
raise ValueError("You can't set both 'logger_name' and 'get_queue_listener'.")
|
|
457
|
+
if not logger_name and not get_queue_listener:
|
|
458
|
+
raise ValueError("You need to provide 'logger_name' or 'get_queue_listener'.")
|
|
459
|
+
|
|
460
|
+
# Check if the logger exists before creating it.
|
|
461
|
+
if logger_name:
|
|
462
|
+
if loggers.is_logger_exists(logger_name):
|
|
463
|
+
raise LoggingwLoggerAlreadyExistsError(f"Logger '{logger_name}' already exists.")
|
|
464
|
+
|
|
465
|
+
if not logger_name and not file_path:
|
|
466
|
+
raise ValueError("You need to provide 'file_path' if 'logger_name' is not set.")
|
|
467
|
+
|
|
468
|
+
if get_queue_listener and not log_queue:
|
|
469
|
+
raise ValueError("You need to provide 'log_queue' if 'get_queue_listener' is set to True.")
|
|
470
|
+
|
|
471
|
+
if add_queue_handler and not log_queue:
|
|
472
|
+
raise ValueError("You need to provide 'log_queue' if 'add_queue_handler' is set to True.")
|
|
473
|
+
|
|
474
|
+
if add_timedfile or add_timedfile_with_internal_queue:
|
|
475
|
+
if not directory_path and not file_path:
|
|
476
|
+
raise ValueError("You need to provide 'directory_path' or 'file_path'.")
|
|
477
|
+
if directory_path and file_path:
|
|
478
|
+
raise ValueError("You can't provide both 'directory_path' and 'file_path'.")
|
|
479
|
+
|
|
480
|
+
if directory_path:
|
|
481
|
+
if directory_path.endswith(os.sep):
|
|
482
|
+
directory_path = directory_path[:-1]
|
|
483
|
+
|
|
484
|
+
file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
|
|
485
|
+
|
|
486
|
+
# --- Add the handlers to a tuple ---
|
|
487
|
+
|
|
488
|
+
handlers_tuple: tuple = ()
|
|
489
|
+
if add_stream:
|
|
490
|
+
stream_handler = handlers.get_stream_handler_extended(
|
|
491
|
+
logging_level=logging_level,
|
|
492
|
+
formatter=formatter_streamhandler,
|
|
493
|
+
formatter_use_nanoseconds=formatter_streamhandler_use_nanoseconds)
|
|
494
|
+
|
|
495
|
+
handlers_tuple += (stream_handler,)
|
|
496
|
+
|
|
497
|
+
if add_timedfile:
|
|
498
|
+
timed_file_handler = handlers.get_timed_rotating_file_handler_extended(
|
|
499
|
+
file_path=file_path,
|
|
500
|
+
logging_level=logging_level,
|
|
501
|
+
formatter=formatter_filehandler,
|
|
502
|
+
formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
|
|
503
|
+
file_type=file_type,
|
|
504
|
+
rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
|
|
505
|
+
rotation_date_format=filehandler_rotation_date_format,
|
|
506
|
+
rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
|
|
507
|
+
rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
|
|
508
|
+
when=when,
|
|
509
|
+
interval=interval,
|
|
510
|
+
delay=delay,
|
|
511
|
+
backupCount=backupCount,
|
|
512
|
+
encoding=encoding,
|
|
513
|
+
header=header
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
handlers_tuple += (timed_file_handler,)
|
|
517
|
+
|
|
518
|
+
if add_timedfile_with_internal_queue:
|
|
519
|
+
timed_file_handler_with_queue = handlers.get_timed_rotating_file_handler_extended(
|
|
520
|
+
file_path=file_path,
|
|
521
|
+
logging_level=logging_level,
|
|
522
|
+
formatter=formatter_filehandler,
|
|
523
|
+
formatter_use_nanoseconds=formatter_filehandler_use_nanoseconds,
|
|
524
|
+
file_type=file_type,
|
|
525
|
+
rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
|
|
526
|
+
rotation_date_format=filehandler_rotation_date_format,
|
|
527
|
+
rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
|
|
528
|
+
rotation_use_default_callback_namer_function=filehandler_rotation_use_default_namer_function,
|
|
529
|
+
use_internal_queue_listener=True,
|
|
530
|
+
when=when,
|
|
531
|
+
interval=interval,
|
|
532
|
+
delay=delay,
|
|
533
|
+
backupCount=backupCount,
|
|
534
|
+
encoding=encoding,
|
|
535
|
+
header=header
|
|
536
|
+
)
|
|
537
|
+
|
|
538
|
+
handlers_tuple += (timed_file_handler_with_queue,)
|
|
539
|
+
|
|
540
|
+
if add_queue_handler:
|
|
541
|
+
queue_handler = handlers.get_queue_handler_extended(log_queue)
|
|
542
|
+
handlers_tuple += (queue_handler,)
|
|
25
543
|
|
|
544
|
+
# --- Create the logger ---
|
|
26
545
|
|
|
27
|
-
|
|
28
|
-
logger_name
|
|
29
|
-
|
|
546
|
+
if logger_name:
|
|
547
|
+
logger = get_logger_with_level(logger_name, logging_level)
|
|
548
|
+
|
|
549
|
+
# Add the handlers to the logger.
|
|
550
|
+
for handler in handlers_tuple:
|
|
551
|
+
loggers.add_handler(logger, handler)
|
|
552
|
+
|
|
553
|
+
# Disable propagation from the 'root' logger, so we will not see the messages twice.
|
|
554
|
+
loggers.set_propagation(logger)
|
|
555
|
+
|
|
556
|
+
return logger
|
|
557
|
+
|
|
558
|
+
# --- create the QueueListener ---
|
|
559
|
+
|
|
560
|
+
if get_queue_listener:
|
|
561
|
+
queue_listener: logging.handlers.QueueListener = handlers.start_queue_listener_for_handlers(handlers_tuple, log_queue)
|
|
562
|
+
return queue_listener
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
def _create_logger_with_queue_handler(
|
|
566
|
+
logger_name: str,
|
|
567
|
+
log_queue: Union[queue.Queue, multiprocessing.Queue]
|
|
568
|
+
) -> Logger:
|
|
569
|
+
"""
|
|
570
|
+
The function to create a logger with QueueHandler so the QueueListener can be started later in multiprocessing.
|
|
571
|
+
"""
|
|
572
|
+
|
|
573
|
+
logger_instance: Logger = create_logger(
|
|
574
|
+
logger_name=logger_name,
|
|
575
|
+
add_queue_handler=True,
|
|
576
|
+
log_queue=log_queue
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
return logger_instance
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def get_logger_with_level(
|
|
583
|
+
logger_name: str,
|
|
584
|
+
logging_level="DEBUG"
|
|
30
585
|
) -> logging.Logger:
|
|
31
586
|
"""
|
|
32
|
-
Function to get a logger and
|
|
587
|
+
Function to get a logger and set logging level.
|
|
33
588
|
|
|
34
589
|
:param logger_name: Name of the logger.
|
|
35
590
|
:param logging_level: 'int' or 'str', Logging level to set to the logger.
|
|
36
591
|
None: if None, the logger level will not be set.
|
|
37
|
-
:param formatter: Formatter to use for StreamHandler. It is template of how a message will look like.
|
|
38
592
|
:return: Logger.
|
|
39
593
|
"""
|
|
40
594
|
|
|
@@ -43,209 +597,487 @@ def get_logger_with_stream_handler(
|
|
|
43
597
|
# Set the logger level if it is not None.
|
|
44
598
|
if logging_level:
|
|
45
599
|
loggers.set_logging_level(logger, logging_level)
|
|
46
|
-
# Add StreamHandler to the logger.
|
|
47
|
-
add_stream_handler(logger, logging_level, formatter)
|
|
48
600
|
|
|
49
601
|
return logger
|
|
50
602
|
|
|
51
603
|
|
|
52
|
-
def
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
formatter_message_only: bool = False, disable_duplicate_ms: bool = False,
|
|
57
|
-
when: str = "midnight", interval: int = 1, delay: bool = True, encoding=None
|
|
58
|
-
) -> logging.Logger:
|
|
59
|
-
logger = get_logger_with_level(logger_name, logging_level)
|
|
60
|
-
add_timedfilehandler_with_queuehandler(
|
|
61
|
-
logger, directory_path, file_name, file_extension, logging_level, formatter,
|
|
62
|
-
formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding
|
|
63
|
-
)
|
|
604
|
+
def disable_default_logger():
|
|
605
|
+
"""
|
|
606
|
+
Function to disable default logger.
|
|
607
|
+
"""
|
|
64
608
|
|
|
65
|
-
|
|
609
|
+
# # Get the default logger.
|
|
610
|
+
# logger = logging.getLogger()
|
|
611
|
+
# # Remove all handlers from the logger.
|
|
612
|
+
# logger.handlers.clear()
|
|
613
|
+
# # Set the logger level to 'NOTSET'.
|
|
614
|
+
# logger.setLevel(logging.NOTSET)
|
|
615
|
+
# # Disable propagation from the 'root' logger, so we will not see the messages twice.
|
|
616
|
+
# loggers.set_propagation(logger)
|
|
66
617
|
|
|
618
|
+
# Disabling the default logger in Python
|
|
619
|
+
logging.disable(logging.CRITICAL)
|
|
67
620
|
|
|
68
|
-
def get_logger_with_stream_handler_and_timedfilehandler(
|
|
69
|
-
logger_name: str,
|
|
70
|
-
directory_path, file_name: str = None, file_extension: str = '.txt',
|
|
71
|
-
logging_level="DEBUG", formatter_filehandler='default',
|
|
72
|
-
formatter_streamhandler: str = "%(levelname)s | %(threadName)s | %(name)s | %(message)s",
|
|
73
|
-
formatter_message_only: bool = False, disable_duplicate_ms: bool = False,
|
|
74
|
-
when: str = "midnight", interval: int = 1, delay: bool = True, encoding=None
|
|
75
|
-
) -> logging.Logger:
|
|
76
|
-
logger = get_logger_with_level(logger_name, logging_level)
|
|
77
|
-
add_stream_handler(logger, logging_level, formatter_streamhandler, formatter_message_only)
|
|
78
|
-
add_timedfilehandler_with_queuehandler(
|
|
79
|
-
logger, directory_path, file_name, file_extension, logging_level, formatter_filehandler,
|
|
80
|
-
formatter_message_only, disable_duplicate_ms, when, interval, delay, encoding
|
|
81
|
-
)
|
|
82
621
|
|
|
83
|
-
|
|
622
|
+
def start_queue_listener_in_multiprocessing(
|
|
623
|
+
logger_name: str = None,
|
|
84
624
|
|
|
625
|
+
add_stream: bool = False,
|
|
626
|
+
add_timedfile: bool = False,
|
|
627
|
+
add_timedfile_with_internal_queue: bool = False,
|
|
85
628
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
629
|
+
log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
|
|
630
|
+
file_path: str = None,
|
|
631
|
+
directory_path: str = None,
|
|
632
|
+
file_type: Literal[
|
|
633
|
+
'txt',
|
|
634
|
+
'csv',
|
|
635
|
+
'json'] = 'txt',
|
|
636
|
+
logging_level="DEBUG",
|
|
637
|
+
formatter_streamhandler: Union[
|
|
638
|
+
Literal['MESSAGE', 'DEFAULT'],
|
|
639
|
+
str,
|
|
640
|
+
None] = None,
|
|
641
|
+
formatter_filehandler: Union[
|
|
642
|
+
Literal['MESSAGE', 'DEFAULT'],
|
|
643
|
+
str,
|
|
644
|
+
None] = None,
|
|
645
|
+
formatter_streamhandler_use_nanoseconds: bool = True,
|
|
646
|
+
formatter_filehandler_use_nanoseconds: bool = True,
|
|
647
|
+
filehandler_rotate_at_rollover_time: bool = True,
|
|
648
|
+
filehandler_rotation_date_format: str = None,
|
|
649
|
+
filehandler_rotation_callback_namer_function: Callable = None,
|
|
650
|
+
filehandler_rotation_use_default_namer_function: bool = True,
|
|
651
|
+
when: str = "midnight",
|
|
652
|
+
interval: int = 1,
|
|
653
|
+
backupCount: int = 0,
|
|
654
|
+
delay: bool = False,
|
|
655
|
+
encoding=None,
|
|
656
|
+
header: str = None
|
|
657
|
+
) -> multiprocessing.Process:
|
|
91
658
|
"""
|
|
92
|
-
Function to
|
|
93
|
-
|
|
659
|
+
Function to start a QueueListener in multiprocessing.
|
|
660
|
+
PARAMETERS are same as in 'create_logger' function.
|
|
94
661
|
|
|
95
|
-
:
|
|
96
|
-
:param logging_level: Logging level for the handler, that will use the logger while initiated.
|
|
97
|
-
:param formatter: Formatter to use for StreamHandler. It is template of how a message will look like.
|
|
98
|
-
None: No formatter will be used.
|
|
99
|
-
'default': Default formatter will be used:
|
|
100
|
-
"%(levelname)s | %(threadName)s | %(name)s | %(message)s"
|
|
101
|
-
:param formatter_message_only: bool, If set to True, formatter will be used only for the 'message' part.
|
|
662
|
+
logger_name: Name of the logger. Will be used only to name the QueueListener process.
|
|
102
663
|
"""
|
|
103
664
|
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
# Setting log level for the handler, that will use the logger while initiated.
|
|
107
|
-
loggers.set_logging_level(stream_handler, logging_level)
|
|
665
|
+
if not file_path and directory_path and logger_name:
|
|
666
|
+
file_path = f"{directory_path}{os.sep}{logger_name}.{file_type}"
|
|
108
667
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
formatter = "%(message)s"
|
|
668
|
+
worker_kwargs = dict(
|
|
669
|
+
get_queue_listener=True,
|
|
112
670
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
handlers.set_formatter(stream_handler, logging_formatter)
|
|
671
|
+
add_stream=add_stream,
|
|
672
|
+
add_timedfile=add_timedfile,
|
|
673
|
+
add_timedfile_with_internal_queue=add_timedfile_with_internal_queue,
|
|
117
674
|
|
|
118
|
-
|
|
119
|
-
|
|
675
|
+
log_queue=log_queue,
|
|
676
|
+
file_path=file_path,
|
|
677
|
+
file_type=file_type,
|
|
678
|
+
logging_level=logging_level,
|
|
679
|
+
formatter_streamhandler=formatter_streamhandler,
|
|
680
|
+
formatter_filehandler=formatter_filehandler,
|
|
681
|
+
formatter_streamhandler_use_nanoseconds=formatter_streamhandler_use_nanoseconds,
|
|
682
|
+
formatter_filehandler_use_nanoseconds=formatter_filehandler_use_nanoseconds,
|
|
683
|
+
filehandler_rotate_at_rollover_time=filehandler_rotate_at_rollover_time,
|
|
684
|
+
filehandler_rotation_date_format=filehandler_rotation_date_format,
|
|
685
|
+
filehandler_rotation_callback_namer_function=filehandler_rotation_callback_namer_function,
|
|
686
|
+
filehandler_rotation_use_default_namer_function=filehandler_rotation_use_default_namer_function,
|
|
687
|
+
when=when,
|
|
688
|
+
interval=interval,
|
|
689
|
+
backupCount=backupCount,
|
|
690
|
+
delay=delay,
|
|
691
|
+
encoding=encoding,
|
|
692
|
+
header=header,
|
|
693
|
+
)
|
|
120
694
|
|
|
121
|
-
|
|
122
|
-
loggers.set_propagation(logger)
|
|
695
|
+
is_ready: multiprocessing.Event = multiprocessing.Event()
|
|
123
696
|
|
|
697
|
+
# Create a new process to run the QueueListener.
|
|
698
|
+
queue_listener_process = multiprocessing.Process(
|
|
699
|
+
target=_queue_listener_multiprocessing_worker,
|
|
700
|
+
name=f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}",
|
|
701
|
+
args=(is_ready,),
|
|
702
|
+
kwargs=worker_kwargs,
|
|
703
|
+
daemon=True
|
|
704
|
+
)
|
|
705
|
+
queue_listener_process.start()
|
|
124
706
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
the file "test.log" and assign a date to it: test.log.2021_11_25
|
|
157
|
-
The log filename of 28.11.2021 will be called "test.log" again.
|
|
158
|
-
:param interval: int, Interval to rotate the log file. Default is 1.
|
|
159
|
-
If 'when="midnight"' and 'interval=1', then the log file will be rotated every midnight.
|
|
160
|
-
If 'when="midnight"' and 'interval=2', then the log file will be rotated every 2nd midnights.
|
|
161
|
-
:param delay: bool, If set to True, the log file will be created only if there's something to write.
|
|
162
|
-
:param encoding: string, Encoding to use for the log file. Default is None.
|
|
707
|
+
# Wait until the QueueListener is loaded and ready.
|
|
708
|
+
is_ready.wait()
|
|
709
|
+
|
|
710
|
+
return queue_listener_process
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
def _queue_listener_multiprocessing_worker(
|
|
714
|
+
is_ready: multiprocessing.Event,
|
|
715
|
+
**kwargs
|
|
716
|
+
):
|
|
717
|
+
network_logger_queue_listener = create_logger(**kwargs)
|
|
718
|
+
is_ready.set() # Signal that the logger is loaded and ready.
|
|
719
|
+
|
|
720
|
+
try:
|
|
721
|
+
while True:
|
|
722
|
+
time.sleep(1) # keep the process alive
|
|
723
|
+
except KeyboardInterrupt:
|
|
724
|
+
pass
|
|
725
|
+
finally:
|
|
726
|
+
network_logger_queue_listener.stop()
|
|
727
|
+
|
|
728
|
+
|
|
729
|
+
def get_listener_processes(
|
|
730
|
+
logger_name: str = None
|
|
731
|
+
) -> list:
|
|
732
|
+
"""
|
|
733
|
+
Function to get the list of QueueListener processes.
|
|
734
|
+
:param logger_name: Name of the logger to filter the listener processes.
|
|
735
|
+
If None, all listener processes will be returned.
|
|
736
|
+
If provided logger_name, only the listener processes for that logger will be returned.
|
|
737
|
+
:return: List of QueueListener multiprocessing processes.
|
|
163
738
|
"""
|
|
164
739
|
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
740
|
+
listener_processes: list = []
|
|
741
|
+
for process in multiprocessing.active_children():
|
|
742
|
+
# If logger_name is provided, filter the processes by logger_name.
|
|
743
|
+
if logger_name and process.name == f"{QUEUE_LISTENER_PROCESS_NAME_PREFIX}{logger_name}":
|
|
744
|
+
listener_processes.append(process)
|
|
745
|
+
if not logger_name and process.name.startswith(QUEUE_LISTENER_PROCESS_NAME_PREFIX):
|
|
746
|
+
listener_processes.append(process)
|
|
168
747
|
|
|
169
|
-
|
|
170
|
-
log_file_path = f'{directory_path}{os.sep}{file_name_no_extension}{file_extension}'
|
|
748
|
+
return listener_processes
|
|
171
749
|
|
|
172
|
-
# Setting the TimedRotatingFileHandler, without adding it to the logger.
|
|
173
|
-
# It will be added to the QueueListener, which will use the TimedRotatingFileHandler to write logs.
|
|
174
|
-
# This is needed since there's a bug in TimedRotatingFileHandler, which won't let it be used with
|
|
175
|
-
# threads the same way it would be used for multiprocess.
|
|
176
750
|
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
751
|
+
def get_datetime_format_string_from_logger_file_handlers(logger: logging.Logger) -> list:
|
|
752
|
+
"""
|
|
753
|
+
Function to get datetime format string from the logger's file handlers.
|
|
754
|
+
This is useful when you want to know the datetime format string that is used on file rotation.
|
|
755
|
+
:param logger: Logger to get the datetime format string from.
|
|
756
|
+
:return: List of datetime format strings.
|
|
757
|
+
"""
|
|
182
758
|
|
|
183
|
-
|
|
184
|
-
# Create file formatter based on extension
|
|
185
|
-
if file_extension == ".txt":
|
|
186
|
-
formatter = formatters.DEFAULT_FORMATTER_TXT_FILE
|
|
187
|
-
elif file_extension == ".csv":
|
|
188
|
-
formatter = formatters.DEFAULT_FORMATTER_CSV_FILE
|
|
189
|
-
elif file_extension == ".json":
|
|
190
|
-
formatter = "%(message)s"
|
|
759
|
+
datetime_format_strings = []
|
|
191
760
|
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
761
|
+
for handler in logger.handlers:
|
|
762
|
+
if isinstance(handler, logging.FileHandler):
|
|
763
|
+
date_time_format_string = handlers.extract_datetime_format_from_file_handler(handler)
|
|
764
|
+
if date_time_format_string:
|
|
765
|
+
datetime_format_strings.append(date_time_format_string)
|
|
195
766
|
|
|
196
|
-
|
|
197
|
-
if formatter:
|
|
198
|
-
# Convert string to Formatter object. Moved to newer styling of python 3: style='{'
|
|
199
|
-
logging_formatter = formatters.get_logging_formatter_from_string(
|
|
200
|
-
formatter, disable_duplicate_ms=disable_duplicate_ms)
|
|
201
|
-
# Setting the formatter in file handler.
|
|
202
|
-
handlers.set_formatter(file_handler, logging_formatter)
|
|
767
|
+
return datetime_format_strings
|
|
203
768
|
|
|
204
|
-
# This function will change the suffix behavior of the rotated file name.
|
|
205
|
-
handlers.change_rotated_filename(file_handler, file_extension)
|
|
206
769
|
|
|
207
|
-
|
|
208
|
-
|
|
770
|
+
def is_logger_exists(logger_name: str) -> bool:
|
|
771
|
+
"""
|
|
772
|
+
Function to check if the logger exists.
|
|
773
|
+
:param logger_name: Name of the logger.
|
|
774
|
+
:return: True if the logger exists, False if it doesn't.
|
|
775
|
+
"""
|
|
209
776
|
|
|
210
|
-
|
|
211
|
-
loggers.add_handler(logger, queue_handler)
|
|
777
|
+
return loggers.is_logger_exists(logger_name)
|
|
212
778
|
|
|
213
|
-
# Disable propagation from the 'root' logger, so we will not see the messages twice.
|
|
214
|
-
loggers.set_propagation(logger)
|
|
215
779
|
|
|
780
|
+
def find_the_parent_logger_with_stream_handler(logger: logging.Logger) -> logging.Logger | None:
|
|
781
|
+
"""
|
|
782
|
+
Function to find the parent logger with StreamHandler.
|
|
783
|
+
Example:
|
|
784
|
+
logger_name = "parent.child.grandchild"
|
|
785
|
+
'parent' logger has StreamHandler, but 'child' and 'grandchild' don't.
|
|
786
|
+
This function will return the 'parent' logger, since both 'child' and 'grandchild' will inherit the
|
|
787
|
+
StreamHandler from the 'parent' logger.
|
|
216
788
|
|
|
217
|
-
|
|
789
|
+
:param logger: Logger to find the parent logger with StreamHandler.
|
|
790
|
+
:return: Parent logger with StreamHandler or None if the logger doesn't have StreamHandler.
|
|
218
791
|
"""
|
|
219
|
-
Function to start QueueListener, which will put the logs from FileHandler to the Queue.
|
|
220
|
-
QueueHandler will get the logs from the Queue and put them to the file that was set in the FileHandler.
|
|
221
792
|
|
|
222
|
-
|
|
223
|
-
|
|
793
|
+
# Start with current logger to see if it has a stream handler.
|
|
794
|
+
current_logger = logger
|
|
795
|
+
found: bool = False
|
|
796
|
+
while current_logger and not current_logger.handlers:
|
|
797
|
+
for handler in current_logger.handlers:
|
|
798
|
+
if isinstance(handler, logging.StreamHandler):
|
|
799
|
+
found = True
|
|
800
|
+
break
|
|
801
|
+
|
|
802
|
+
if not found:
|
|
803
|
+
# If the current logger doesn't have the stream handler, let's move to the parent.
|
|
804
|
+
current_logger = current_logger.parent
|
|
805
|
+
|
|
806
|
+
# If none of the parent loggers have the stream handler, break the loop.
|
|
807
|
+
if current_logger is None:
|
|
808
|
+
break
|
|
809
|
+
|
|
810
|
+
return current_logger
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
@contextlib.contextmanager
|
|
814
|
+
def _temporary_change_logger_stream_handler_color(logger: logging.Logger, color: str):
|
|
224
815
|
"""
|
|
816
|
+
THIS IS ONLY FOR REFERENCE.
|
|
817
|
+
Better use 'temporary_change_logger_stream_record_color', since it is thread safe.
|
|
818
|
+
If there are several threads that use this logger, there could be a problem, since unwanted messages
|
|
819
|
+
could be colored with the color of the other thread.
|
|
820
|
+
|
|
821
|
+
Context manager to temporarily change the color of the logger's StreamHandler formatter.
|
|
225
822
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
handlers.start_queue_listener_for_file_handler(file_handler, queue_object)
|
|
823
|
+
Example:
|
|
824
|
+
with temporary_change_logger_stream_handler_color(logger, color):
|
|
825
|
+
# Do something with the temporary color.
|
|
826
|
+
pass
|
|
827
|
+
"""
|
|
232
828
|
|
|
233
|
-
|
|
829
|
+
# Find the current or the topmost logger's StreamHandler.
|
|
830
|
+
# Could be that it is a child logger inherits its handlers from the parent.
|
|
831
|
+
logger_with_handlers = find_the_parent_logger_with_stream_handler(logger)
|
|
234
832
|
|
|
833
|
+
found_stream_handler = None
|
|
834
|
+
for handler in logger_with_handlers.handlers:
|
|
835
|
+
if isinstance(handler, logging.StreamHandler):
|
|
836
|
+
found_stream_handler = handler
|
|
837
|
+
break
|
|
235
838
|
|
|
236
|
-
|
|
839
|
+
# Save the original formatter
|
|
840
|
+
original_formatter = found_stream_handler.formatter
|
|
841
|
+
original_formatter_string = handlers.get_formatter_string(found_stream_handler)
|
|
842
|
+
|
|
843
|
+
# Create a colored formatter for errors
|
|
844
|
+
color_formatter = logging.Formatter(
|
|
845
|
+
ansi_escape_codes.get_colors_basic_dict(color) + original_formatter_string +
|
|
846
|
+
ansi_escape_codes.ColorsBasic.END)
|
|
847
|
+
|
|
848
|
+
# thread_id = threading.get_ident()
|
|
849
|
+
# color_filter = filters.ThreadColorLogFilter(color, thread_id)
|
|
850
|
+
# found_stream_handler.addFilter(color_filter)
|
|
851
|
+
try:
|
|
852
|
+
found_stream_handler.setFormatter(color_formatter)
|
|
853
|
+
yield
|
|
854
|
+
finally:
|
|
855
|
+
found_stream_handler.setFormatter(original_formatter)
|
|
856
|
+
# found_stream_handler.removeFilter(color_filter)
|
|
857
|
+
|
|
858
|
+
|
|
859
|
+
@contextlib.contextmanager
|
|
860
|
+
def temporary_change_logger_stream_record_color(logger: logging.Logger, color: str):
|
|
237
861
|
"""
|
|
238
|
-
|
|
862
|
+
This function will temporarily change the color of the logger's StreamHandler record message.
|
|
863
|
+
|
|
864
|
+
Example:
|
|
865
|
+
with temporary_change_logger_stream_record_color(logger, "red"):
|
|
866
|
+
# Do something with the temporary color.
|
|
867
|
+
logger.error("This message will be colored with the 'red'.")
|
|
239
868
|
"""
|
|
240
869
|
|
|
241
|
-
#
|
|
242
|
-
# logger
|
|
243
|
-
|
|
244
|
-
# logger.handlers.clear()
|
|
245
|
-
# # Set the logger level to 'NOTSET'.
|
|
246
|
-
# logger.setLevel(logging.NOTSET)
|
|
247
|
-
# # Disable propagation from the 'root' logger, so we will not see the messages twice.
|
|
248
|
-
# loggers.set_propagation(logger)
|
|
870
|
+
# Find the current or the topmost logger's StreamHandler.
|
|
871
|
+
# Could be that it is a child logger inherits its handlers from the parent.
|
|
872
|
+
logger_with_handlers = find_the_parent_logger_with_stream_handler(logger)
|
|
249
873
|
|
|
250
|
-
|
|
251
|
-
|
|
874
|
+
found_stream_handler = None
|
|
875
|
+
for handler in logger_with_handlers.handlers:
|
|
876
|
+
if isinstance(handler, logging.StreamHandler):
|
|
877
|
+
found_stream_handler = handler
|
|
878
|
+
break
|
|
879
|
+
|
|
880
|
+
# Save the original state of the handler
|
|
881
|
+
# original_filters = found_stream_handler.filters.copy() # To restore the original filters
|
|
882
|
+
|
|
883
|
+
# Create a thread-specific color filter
|
|
884
|
+
thread_id = threading.get_ident()
|
|
885
|
+
color_filter = filters.ThreadColorLogFilter(color, thread_id)
|
|
886
|
+
|
|
887
|
+
# Add the filter to the handler
|
|
888
|
+
found_stream_handler.addFilter(color_filter)
|
|
889
|
+
|
|
890
|
+
try:
|
|
891
|
+
yield # Do the logging within the context
|
|
892
|
+
finally:
|
|
893
|
+
# Restore the original filters, ensuring thread safety
|
|
894
|
+
found_stream_handler.removeFilter(color_filter)
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
class CsvLogger:
|
|
898
|
+
def __init__(
|
|
899
|
+
self,
|
|
900
|
+
logger_name: str,
|
|
901
|
+
directory_path: str = None,
|
|
902
|
+
custom_header: str = None,
|
|
903
|
+
log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
|
|
904
|
+
add_queue_handler_start_listener_multiprocessing: bool = False,
|
|
905
|
+
add_queue_handler_no_listener_multiprocessing: bool = False
|
|
906
|
+
):
|
|
907
|
+
"""
|
|
908
|
+
Initialize the ExceptionCsvLogger object.
|
|
909
|
+
|
|
910
|
+
:param logger_name: Name of the logger.
|
|
911
|
+
:param directory_path: Directory path where the log file will be saved.
|
|
912
|
+
You can leave it as None, but if the logger doesn't exist, you will get an exception.
|
|
913
|
+
:param custom_header: Custom header to write to the log file.
|
|
914
|
+
If None, the default header will be used: "timestamp,exception", since that what is written to the log file.
|
|
915
|
+
If you want to add more columns to the csv file, you can provide a custom header:
|
|
916
|
+
"custom1,custom2,custom3".
|
|
917
|
+
These will be added to the default header as:
|
|
918
|
+
"timestamp,custom1,custom2,custom3,exception".
|
|
919
|
+
:param log_queue: Queue to use for the logger, needed for the queue handler/listener.
|
|
920
|
+
|
|
921
|
+
:param add_queue_handler_start_listener_multiprocessing: bool, whether to add a queue handler that will use
|
|
922
|
+
the 'log_queue' and start the queue listener with the same 'log_queue' for multiprocessing.
|
|
923
|
+
:param add_queue_handler_no_listener_multiprocessing: bool, whether to add a queue handler that will use
|
|
924
|
+
the 'log_queue' but will not start the queue listener for multiprocessing. This is useful when you
|
|
925
|
+
already started the queue listener and want to add more handlers to the logger without
|
|
926
|
+
starting a new listener.
|
|
927
|
+
|
|
928
|
+
If you don't set any of 'add_queue_handler_start_listener_multiprocessing' or
|
|
929
|
+
'add_queue_handler_no_listener_multiprocessing', the logger will be created without a queue handler.
|
|
930
|
+
"""
|
|
931
|
+
|
|
932
|
+
if add_queue_handler_no_listener_multiprocessing and add_queue_handler_start_listener_multiprocessing:
|
|
933
|
+
raise ValueError(
|
|
934
|
+
"You can't set both 'add_queue_handler_start_listener_multiprocessing' and "
|
|
935
|
+
"'add_queue_handler_no_listener_multiprocessing' to True."
|
|
936
|
+
)
|
|
937
|
+
|
|
938
|
+
self.header = custom_header
|
|
939
|
+
|
|
940
|
+
if is_logger_exists(logger_name):
|
|
941
|
+
self.logger = get_logger_with_level(logger_name)
|
|
942
|
+
else:
|
|
943
|
+
if directory_path is None:
|
|
944
|
+
raise ValueError("You need to provide 'directory_path' if the logger doesn't exist.")
|
|
945
|
+
|
|
946
|
+
if add_queue_handler_start_listener_multiprocessing:
|
|
947
|
+
if not log_queue:
|
|
948
|
+
raise ValueError(
|
|
949
|
+
"You need to provide 'logger_queue' if 'add_queue_handler_start_listener_multiprocess' is set to True.")
|
|
950
|
+
|
|
951
|
+
# Create a logger with a queue handler that starts a listener for multiprocessing.
|
|
952
|
+
self.logger = create_logger(
|
|
953
|
+
logger_name=logger_name,
|
|
954
|
+
start_queue_listener_multiprocess_add_queue_handler=True,
|
|
955
|
+
log_queue=log_queue,
|
|
956
|
+
directory_path=directory_path,
|
|
957
|
+
add_timedfile=True,
|
|
958
|
+
formatter_filehandler='MESSAGE',
|
|
959
|
+
file_type='csv',
|
|
960
|
+
header=self.header
|
|
961
|
+
)
|
|
962
|
+
elif add_queue_handler_no_listener_multiprocessing:
|
|
963
|
+
if not log_queue:
|
|
964
|
+
raise ValueError(
|
|
965
|
+
"You need to provide 'logger_queue' if 'add_queue_handler_no_listener_multiprocess' is set to True.")
|
|
966
|
+
|
|
967
|
+
# Create a logger with a queue handler that does not start a listener for multiprocessing.
|
|
968
|
+
self.logger = create_logger(
|
|
969
|
+
logger_name=logger_name,
|
|
970
|
+
add_queue_handler=True,
|
|
971
|
+
log_queue=log_queue
|
|
972
|
+
)
|
|
973
|
+
elif not add_queue_handler_start_listener_multiprocessing and not add_queue_handler_no_listener_multiprocessing:
|
|
974
|
+
self.logger = create_logger(
|
|
975
|
+
logger_name=logger_name,
|
|
976
|
+
directory_path=directory_path,
|
|
977
|
+
file_type="csv",
|
|
978
|
+
add_timedfile=True,
|
|
979
|
+
formatter_filehandler='MESSAGE',
|
|
980
|
+
header=self.header)
|
|
981
|
+
|
|
982
|
+
def write(
|
|
983
|
+
self,
|
|
984
|
+
row_of_cols: list
|
|
985
|
+
):
|
|
986
|
+
"""
|
|
987
|
+
Write a row of columns to the log file.
|
|
988
|
+
|
|
989
|
+
:param row_of_cols: List of columns to write to the csv log file.
|
|
990
|
+
"""
|
|
991
|
+
|
|
992
|
+
output_csv_line: str = csvs.escape_csv_line_to_string(row_of_cols)
|
|
993
|
+
|
|
994
|
+
# If the number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header',
|
|
995
|
+
# raise an exception.
|
|
996
|
+
if (csvs.get_number_of_cells_in_string_line(output_csv_line) !=
|
|
997
|
+
csvs.get_number_of_cells_in_string_line(self.header)):
|
|
998
|
+
raise ValueError(
|
|
999
|
+
"Number of cells in the 'output_csv_line' doesn't match the number of cells in the 'header'.")
|
|
1000
|
+
|
|
1001
|
+
self.logger.info(output_csv_line)
|
|
1002
|
+
|
|
1003
|
+
def get_logger(self):
|
|
1004
|
+
return self.logger
|
|
1005
|
+
|
|
1006
|
+
|
|
1007
|
+
class ExceptionCsvLogger(CsvLogger):
|
|
1008
|
+
def __init__(
|
|
1009
|
+
self,
|
|
1010
|
+
logger_name: str,
|
|
1011
|
+
directory_path: str = None,
|
|
1012
|
+
custom_header: str = None,
|
|
1013
|
+
log_queue: Union[queue.Queue, multiprocessing.Queue] = None,
|
|
1014
|
+
add_queue_handler_start_listener_multiprocessing: bool = False,
|
|
1015
|
+
add_queue_handler_no_listener_multiprocessing: bool = False
|
|
1016
|
+
):
|
|
1017
|
+
"""
|
|
1018
|
+
Initialize the ExceptionCsvLogger object.
|
|
1019
|
+
"""
|
|
1020
|
+
|
|
1021
|
+
if custom_header:
|
|
1022
|
+
custom_header = f"timestamp,{custom_header},exception"
|
|
1023
|
+
else:
|
|
1024
|
+
custom_header = "timestamp,exception"
|
|
1025
|
+
|
|
1026
|
+
super().__init__(
|
|
1027
|
+
logger_name=logger_name,
|
|
1028
|
+
directory_path=directory_path,
|
|
1029
|
+
custom_header=custom_header,
|
|
1030
|
+
log_queue=log_queue,
|
|
1031
|
+
add_queue_handler_start_listener_multiprocessing=add_queue_handler_start_listener_multiprocessing,
|
|
1032
|
+
add_queue_handler_no_listener_multiprocessing=add_queue_handler_no_listener_multiprocessing
|
|
1033
|
+
)
|
|
1034
|
+
|
|
1035
|
+
|
|
1036
|
+
def write(
|
|
1037
|
+
self,
|
|
1038
|
+
message: Union[str, Exception] = None,
|
|
1039
|
+
custom_csv_string: str = None,
|
|
1040
|
+
custom_exception_attribute: str = None,
|
|
1041
|
+
custom_exception_attribute_placement: Literal['before', 'after'] = 'before',
|
|
1042
|
+
stdout: bool = True
|
|
1043
|
+
):
|
|
1044
|
+
"""
|
|
1045
|
+
Write the message to the log file.
|
|
1046
|
+
|
|
1047
|
+
:param message: The message to write to the log file.
|
|
1048
|
+
If None, the message will be retrieved from current traceback frame.
|
|
1049
|
+
:param custom_csv_string: Custom CSV string to add between the timestamp and the exception.
|
|
1050
|
+
Currently, without the 'custom_csv_string', the csv line written as "timestamp,exception" as the header.
|
|
1051
|
+
If you add a 'custom_csv_string', the csv line will be written as "timestamp,custom_csv_string,exception".
|
|
1052
|
+
Meaning, that you need to provide the 'custom_header' during the initialization of the object.
|
|
1053
|
+
Off course, you can use as many commas as you need in the 'custom_csv_string': "custom1,custom2,custom3".
|
|
1054
|
+
This need to be mirrored in the 'custom_header' as well: "custom1,custom2,custom3".
|
|
1055
|
+
:param custom_exception_attribute: If the 'message' is an Exception, you can provide a custom attribute
|
|
1056
|
+
name to extract from the Exception object and add it to the exception message.
|
|
1057
|
+
For example, if the Exception has an attribute 'engine_name', you can provide it here
|
|
1058
|
+
and the exception message will be appended with the value of that attribute.
|
|
1059
|
+
:param custom_exception_attribute_placement: 'before' or 'after', where to place
|
|
1060
|
+
the custom exception attribute value in the exception message.
|
|
1061
|
+
:param stdout: If set to True, the exception will be printed to the console.
|
|
1062
|
+
"""
|
|
1063
|
+
|
|
1064
|
+
if message is None or isinstance(message, Exception):
|
|
1065
|
+
custom_attribute: str | None = getattr(message, custom_exception_attribute, None)
|
|
1066
|
+
traceback_string: str = tracebacks.get_as_string()
|
|
1067
|
+
if custom_attribute:
|
|
1068
|
+
if custom_exception_attribute_placement == 'before':
|
|
1069
|
+
message = f"{custom_exception_attribute}: [{custom_attribute}] | {traceback_string}"
|
|
1070
|
+
else:
|
|
1071
|
+
message = f"{traceback_string} | {custom_exception_attribute}: [{custom_attribute}]"
|
|
1072
|
+
else:
|
|
1073
|
+
message = traceback_string
|
|
1074
|
+
|
|
1075
|
+
if custom_csv_string:
|
|
1076
|
+
row_of_cols: list = [datetime.datetime.now(), custom_csv_string, message]
|
|
1077
|
+
else:
|
|
1078
|
+
row_of_cols: list = [datetime.datetime.now(), message]
|
|
1079
|
+
|
|
1080
|
+
super().write(row_of_cols)
|
|
1081
|
+
|
|
1082
|
+
if stdout:
|
|
1083
|
+
print_api.print_api('', error_type=True, color="red", traceback_string=True)
|