pythonLogs 5.0.2__cp313-cp313-macosx_15_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pythonLogs/.env.example +32 -0
- pythonLogs/__init__.py +91 -0
- pythonLogs/basic_log.py +68 -0
- pythonLogs/constants.py +56 -0
- pythonLogs/factory.py +446 -0
- pythonLogs/log_utils.py +359 -0
- pythonLogs/memory_utils.py +182 -0
- pythonLogs/settings.py +56 -0
- pythonLogs/size_rotating.py +137 -0
- pythonLogs/thread_safety.py +156 -0
- pythonLogs/timed_rotating.py +126 -0
- pythonlogs-5.0.2.dist-info/METADATA +578 -0
- pythonlogs-5.0.2.dist-info/RECORD +15 -0
- pythonlogs-5.0.2.dist-info/WHEEL +4 -0
- pythonlogs-5.0.2.dist-info/licenses/LICENSE +21 -0
pythonLogs/log_utils.py
ADDED
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
import errno
|
|
2
|
+
import gzip
|
|
3
|
+
import logging.handlers
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import sys
|
|
7
|
+
import threading
|
|
8
|
+
import time
|
|
9
|
+
from datetime import datetime, timedelta, timezone as dttz
|
|
10
|
+
from functools import lru_cache
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Callable, Optional, Set
|
|
13
|
+
from zoneinfo import ZoneInfo
|
|
14
|
+
from pythonLogs.constants import DEFAULT_FILE_MODE, LEVEL_MAP
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Global cache for checked directories with thread safety and size limits
|
|
18
|
+
_checked_directories: Set[str] = set()
|
|
19
|
+
_directory_lock = threading.Lock()
|
|
20
|
+
_max_cached_directories = 500 # Limit cache size to prevent unbounded growth
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_stream_handler(
|
|
24
|
+
level: int,
|
|
25
|
+
formatter: logging.Formatter,
|
|
26
|
+
) -> logging.StreamHandler:
|
|
27
|
+
stream_hdlr = logging.StreamHandler()
|
|
28
|
+
stream_hdlr.setFormatter(formatter)
|
|
29
|
+
stream_hdlr.setLevel(level)
|
|
30
|
+
return stream_hdlr
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_logger_and_formatter(
|
|
34
|
+
name: str,
|
|
35
|
+
datefmt: str,
|
|
36
|
+
show_location: bool,
|
|
37
|
+
timezone_: str,
|
|
38
|
+
) -> tuple[logging.Logger, logging.Formatter]:
|
|
39
|
+
logger = logging.getLogger(name)
|
|
40
|
+
|
|
41
|
+
# More efficient handler cleanup with context manager-like pattern
|
|
42
|
+
handlers_to_remove = list(logger.handlers)
|
|
43
|
+
for handler in handlers_to_remove:
|
|
44
|
+
try:
|
|
45
|
+
handler.close()
|
|
46
|
+
except (OSError, ValueError):
|
|
47
|
+
pass # Ignore expected errors during cleanup
|
|
48
|
+
finally:
|
|
49
|
+
logger.removeHandler(handler)
|
|
50
|
+
|
|
51
|
+
formatt = get_format(show_location, name, timezone_)
|
|
52
|
+
formatter = logging.Formatter(formatt, datefmt=datefmt)
|
|
53
|
+
formatter.converter = get_timezone_function(timezone_)
|
|
54
|
+
return logger, formatter
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def check_filename_instance(filenames: list | tuple) -> None:
|
|
58
|
+
if not isinstance(filenames, (list, tuple)):
|
|
59
|
+
err_msg = f"Unable to parse filenames. Filename instance is not list or tuple. | {filenames}"
|
|
60
|
+
write_stderr(err_msg)
|
|
61
|
+
raise TypeError(err_msg)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def check_directory_permissions(directory_path: str) -> None:
|
|
65
|
+
# Thread-safe check with double-checked locking pattern
|
|
66
|
+
if directory_path in _checked_directories:
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
with _directory_lock:
|
|
70
|
+
# Check again inside the lock to avoid race conditions
|
|
71
|
+
if directory_path in _checked_directories:
|
|
72
|
+
return
|
|
73
|
+
|
|
74
|
+
path_obj = Path(directory_path)
|
|
75
|
+
|
|
76
|
+
if path_obj.exists():
|
|
77
|
+
if not os.access(directory_path, os.W_OK | os.X_OK):
|
|
78
|
+
err_msg = f"Unable to access directory | {directory_path}"
|
|
79
|
+
write_stderr(err_msg)
|
|
80
|
+
raise PermissionError(err_msg)
|
|
81
|
+
else:
|
|
82
|
+
try:
|
|
83
|
+
path_obj.mkdir(mode=DEFAULT_FILE_MODE, parents=True, exist_ok=True)
|
|
84
|
+
except PermissionError as e:
|
|
85
|
+
err_msg = f"Unable to create directory | {directory_path}"
|
|
86
|
+
write_stderr(f"{err_msg} | {repr(e)}")
|
|
87
|
+
raise PermissionError(err_msg)
|
|
88
|
+
|
|
89
|
+
# Add to cache with size limit enforcement
|
|
90
|
+
if len(_checked_directories) >= _max_cached_directories:
|
|
91
|
+
# Remove a random entry to make space (simple eviction strategy)
|
|
92
|
+
_checked_directories.pop()
|
|
93
|
+
_checked_directories.add(directory_path)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def remove_old_logs(logs_dir: str, days_to_keep: int) -> None:
|
|
97
|
+
if days_to_keep <= 0:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
cutoff_time = datetime.now() - timedelta(days=days_to_keep)
|
|
101
|
+
|
|
102
|
+
try:
|
|
103
|
+
for file_path in Path(logs_dir).glob("*.gz"):
|
|
104
|
+
try:
|
|
105
|
+
if file_path.stat().st_mtime < cutoff_time.timestamp():
|
|
106
|
+
file_path.unlink()
|
|
107
|
+
except (OSError, IOError) as e:
|
|
108
|
+
write_stderr(f"Unable to delete old log | {file_path} | {repr(e)}")
|
|
109
|
+
except OSError as e:
|
|
110
|
+
write_stderr(f"Unable to scan directory for old logs | {logs_dir} | {repr(e)}")
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def delete_file(path: str) -> bool:
|
|
114
|
+
"""Remove the given file and returns True if the file was successfully removed"""
|
|
115
|
+
path_obj = Path(path)
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
if path_obj.is_file():
|
|
119
|
+
path_obj.unlink()
|
|
120
|
+
elif path_obj.is_dir():
|
|
121
|
+
shutil.rmtree(path_obj)
|
|
122
|
+
elif path_obj.exists():
|
|
123
|
+
# Handle special files
|
|
124
|
+
path_obj.unlink()
|
|
125
|
+
else:
|
|
126
|
+
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path)
|
|
127
|
+
except OSError as e:
|
|
128
|
+
write_stderr(repr(e))
|
|
129
|
+
raise e
|
|
130
|
+
return True
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def is_older_than_x_days(path: str, days: int) -> bool:
|
|
134
|
+
"""Check if a file or directory is older than the specified number of days"""
|
|
135
|
+
path_obj = Path(path)
|
|
136
|
+
|
|
137
|
+
if not path_obj.exists():
|
|
138
|
+
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path)
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
if int(days) == 0:
|
|
142
|
+
cutoff_time = datetime.now()
|
|
143
|
+
else:
|
|
144
|
+
cutoff_time = datetime.now() - timedelta(days=int(days))
|
|
145
|
+
except ValueError as e:
|
|
146
|
+
write_stderr(repr(e))
|
|
147
|
+
raise e
|
|
148
|
+
|
|
149
|
+
file_time = datetime.fromtimestamp(path_obj.stat().st_mtime)
|
|
150
|
+
return file_time < cutoff_time
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
# Cache stderr timezone for better performance
|
|
154
|
+
@lru_cache(maxsize=1)
|
|
155
|
+
def get_stderr_timezone():
|
|
156
|
+
timezone_name = os.getenv("LOG_TIMEZONE", "UTC")
|
|
157
|
+
if timezone_name.lower() == "localtime":
|
|
158
|
+
return None # Use system local timezone
|
|
159
|
+
try:
|
|
160
|
+
return ZoneInfo(timezone_name)
|
|
161
|
+
except Exception:
|
|
162
|
+
# Fallback to local timezone if requested timezone is not available
|
|
163
|
+
return None
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def write_stderr(msg: str) -> None:
|
|
167
|
+
"""Write msg to stderr with optimized timezone handling"""
|
|
168
|
+
try:
|
|
169
|
+
tz = get_stderr_timezone()
|
|
170
|
+
if tz is None:
|
|
171
|
+
# Use local timezone
|
|
172
|
+
dt = datetime.now()
|
|
173
|
+
else:
|
|
174
|
+
dt = datetime.now(dttz.utc).astimezone(tz)
|
|
175
|
+
dt_timezone = dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
|
|
176
|
+
sys.stderr.write(f"[{dt_timezone}]:[ERROR]:{msg}\n")
|
|
177
|
+
except (OSError, ValueError, KeyError):
|
|
178
|
+
# Fallback to simple timestamp if timezone fails
|
|
179
|
+
sys.stderr.write(f"[{datetime.now().isoformat()}]:[ERROR]:{msg}\n")
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def get_level(level: str) -> int:
|
|
183
|
+
"""Get logging level using enum values"""
|
|
184
|
+
if not isinstance(level, str):
|
|
185
|
+
write_stderr(f"Unable to get log level. Setting default level to: 'INFO' ({logging.INFO})")
|
|
186
|
+
return logging.INFO
|
|
187
|
+
|
|
188
|
+
return LEVEL_MAP.get(level.lower(), logging.INFO)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def get_log_path(directory: str, filename: str) -> str:
|
|
192
|
+
"""Get log file path with optimized validation"""
|
|
193
|
+
log_file_path = str(Path(directory) / filename)
|
|
194
|
+
|
|
195
|
+
# Check directory permissions (cached)
|
|
196
|
+
check_directory_permissions(directory)
|
|
197
|
+
|
|
198
|
+
# Only validate write access to directory, not create the file
|
|
199
|
+
if not os.access(directory, os.W_OK):
|
|
200
|
+
err_message = f"Unable to write to log directory | {directory}"
|
|
201
|
+
write_stderr(err_message)
|
|
202
|
+
raise PermissionError(err_message)
|
|
203
|
+
|
|
204
|
+
return log_file_path
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
@lru_cache(maxsize=32)
|
|
208
|
+
def get_timezone_offset(timezone_: str) -> str:
|
|
209
|
+
"""Cache timezone offset calculation with fallback for missing timezone data"""
|
|
210
|
+
if timezone_.lower() == "localtime":
|
|
211
|
+
return time.strftime("%z")
|
|
212
|
+
else:
|
|
213
|
+
try:
|
|
214
|
+
return datetime.now(ZoneInfo(timezone_)).strftime("%z")
|
|
215
|
+
except Exception:
|
|
216
|
+
# Fallback to localtime if the requested timezone is not available,
|
|
217
|
+
# This is common on Windows systems without full timezone data
|
|
218
|
+
return time.strftime("%z")
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def get_format(show_location: bool, name: str, timezone_: str) -> str:
|
|
222
|
+
"""Get log format string with cached timezone offset"""
|
|
223
|
+
_debug_fmt = ""
|
|
224
|
+
_logger_name = ""
|
|
225
|
+
|
|
226
|
+
if name:
|
|
227
|
+
_logger_name = f"[{name}]:"
|
|
228
|
+
|
|
229
|
+
if show_location:
|
|
230
|
+
_debug_fmt = "[%(filename)s:%(funcName)s:%(lineno)d]:"
|
|
231
|
+
|
|
232
|
+
utc_offset = get_timezone_offset(timezone_)
|
|
233
|
+
return f"[%(asctime)s.%(msecs)03d{utc_offset}]:[%(levelname)s]:{_logger_name}{_debug_fmt}%(message)s"
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def gzip_file_with_sufix(file_path: str, sufix: str) -> str | None:
|
|
237
|
+
"""gzip file with improved error handling and performance"""
|
|
238
|
+
path_obj = Path(file_path)
|
|
239
|
+
|
|
240
|
+
if not path_obj.is_file():
|
|
241
|
+
return None
|
|
242
|
+
|
|
243
|
+
# Use pathlib for cleaner path operations
|
|
244
|
+
renamed_dst = path_obj.with_name(f"{path_obj.stem}_{sufix}{path_obj.suffix}.gz")
|
|
245
|
+
|
|
246
|
+
# Windows-specific retry mechanism for file locking issues
|
|
247
|
+
max_retries = 3 if sys.platform == "win32" else 1
|
|
248
|
+
retry_delay = 0.1 # 100ms delay between retries
|
|
249
|
+
|
|
250
|
+
for attempt in range(max_retries):
|
|
251
|
+
try:
|
|
252
|
+
with open(file_path, "rb") as fin:
|
|
253
|
+
with gzip.open(renamed_dst, "wb", compresslevel=6) as fout: # Balanced compression
|
|
254
|
+
shutil.copyfileobj(fin, fout, length=64 * 1024) # type: ignore # 64KB chunks for better performance
|
|
255
|
+
break # Success, exit retry loop
|
|
256
|
+
except PermissionError as e:
|
|
257
|
+
# Windows file locking issue - retry with delay
|
|
258
|
+
if attempt < max_retries - 1 and sys.platform == "win32":
|
|
259
|
+
time.sleep(retry_delay)
|
|
260
|
+
continue
|
|
261
|
+
# Final attempt failed or not Windows - treat as regular error
|
|
262
|
+
write_stderr(f"Unable to gzip log file | {file_path} | {repr(e)}")
|
|
263
|
+
raise e
|
|
264
|
+
except (OSError, IOError) as e:
|
|
265
|
+
write_stderr(f"Unable to gzip log file | {file_path} | {repr(e)}")
|
|
266
|
+
raise e
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
path_obj.unlink() # Use pathlib for deletion
|
|
270
|
+
except OSError as e:
|
|
271
|
+
write_stderr(f"Unable to delete source log file | {file_path} | {repr(e)}")
|
|
272
|
+
raise e
|
|
273
|
+
|
|
274
|
+
return str(renamed_dst)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
@lru_cache(maxsize=32)
|
|
278
|
+
def get_timezone_function(time_zone: str) -> Callable:
|
|
279
|
+
"""Get timezone function with caching and fallback for missing timezone data"""
|
|
280
|
+
match time_zone.lower():
|
|
281
|
+
case "utc":
|
|
282
|
+
try:
|
|
283
|
+
# Try to create UTC timezone to verify it's available
|
|
284
|
+
ZoneInfo("UTC")
|
|
285
|
+
return time.gmtime
|
|
286
|
+
except Exception:
|
|
287
|
+
# Fallback to localtime if UTC timezone data is missing
|
|
288
|
+
return time.localtime
|
|
289
|
+
case "localtime":
|
|
290
|
+
return time.localtime
|
|
291
|
+
case _:
|
|
292
|
+
try:
|
|
293
|
+
# Cache the timezone object
|
|
294
|
+
tz = ZoneInfo(time_zone)
|
|
295
|
+
return lambda *args: datetime.now(tz=tz).timetuple()
|
|
296
|
+
except Exception:
|
|
297
|
+
# Fallback to localtime if the requested timezone is not available
|
|
298
|
+
return time.localtime
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
# Shared handler cleanup utility
|
|
302
|
+
def cleanup_logger_handlers(logger: Optional[logging.Logger]) -> None:
|
|
303
|
+
"""Clean up logger resources by closing all handlers.
|
|
304
|
+
|
|
305
|
+
This is a centralized utility to ensure consistent cleanup behavior
|
|
306
|
+
across all logger types and prevent code duplication.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
logger: The logger to clean up (can be None)
|
|
310
|
+
"""
|
|
311
|
+
if logger is None:
|
|
312
|
+
return
|
|
313
|
+
|
|
314
|
+
# Create a snapshot of handlers to avoid modification during iteration
|
|
315
|
+
handlers_to_remove = list(logger.handlers)
|
|
316
|
+
for handler in handlers_to_remove:
|
|
317
|
+
try:
|
|
318
|
+
handler.close()
|
|
319
|
+
except (OSError, ValueError):
|
|
320
|
+
# Ignore errors during cleanup to prevent cascading failures
|
|
321
|
+
pass
|
|
322
|
+
finally:
|
|
323
|
+
logger.removeHandler(handler)
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
# Public API for directory cache management
|
|
327
|
+
def set_directory_cache_limit(max_directories: int) -> None:
|
|
328
|
+
"""Set the maximum number of directories to cache.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
max_directories: Maximum number of directories to keep in cache
|
|
332
|
+
"""
|
|
333
|
+
global _max_cached_directories
|
|
334
|
+
|
|
335
|
+
with _directory_lock:
|
|
336
|
+
_max_cached_directories = max_directories
|
|
337
|
+
# Trim cache if it exceeds new limit
|
|
338
|
+
while len(_checked_directories) > max_directories:
|
|
339
|
+
_checked_directories.pop()
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def clear_directory_cache() -> None:
|
|
343
|
+
"""Clear the directory cache to free memory."""
|
|
344
|
+
with _directory_lock:
|
|
345
|
+
_checked_directories.clear()
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def get_directory_cache_stats() -> dict:
|
|
349
|
+
"""Get statistics about the directory cache.
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
Dict with cache statistics including size and limit
|
|
353
|
+
"""
|
|
354
|
+
with _directory_lock:
|
|
355
|
+
return {
|
|
356
|
+
"cached_directories": len(_checked_directories),
|
|
357
|
+
"max_directories": _max_cached_directories,
|
|
358
|
+
"directories": list(_checked_directories)
|
|
359
|
+
}
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import threading
|
|
3
|
+
import weakref
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
from typing import Any, Dict, Optional, Set
|
|
6
|
+
|
|
7
|
+
from . import log_utils
|
|
8
|
+
from .log_utils import cleanup_logger_handlers
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
# Formatter cache to reduce memory usage for identical formatters
|
|
13
|
+
_formatter_cache: Dict[str, logging.Formatter] = {}
|
|
14
|
+
_formatter_cache_lock = threading.Lock()
|
|
15
|
+
_max_formatters = 50 # Limit formatter cache size
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def get_cached_formatter(format_string: str, datefmt: Optional[str] = None) -> logging.Formatter:
|
|
19
|
+
"""Get a cached formatter or create and cache a new one.
|
|
20
|
+
|
|
21
|
+
This reduces memory usage by reusing formatter instances with
|
|
22
|
+
identical configuration instead of creating new ones each time.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
format_string: The format string for the formatter
|
|
26
|
+
datefmt: Optional date format string
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Cached or newly created formatter instance
|
|
30
|
+
"""
|
|
31
|
+
# Create cache key from configuration
|
|
32
|
+
cache_key = f"{format_string}|{datefmt or ''}"
|
|
33
|
+
|
|
34
|
+
with _formatter_cache_lock:
|
|
35
|
+
# Return existing formatter if cached
|
|
36
|
+
if cache_key in _formatter_cache:
|
|
37
|
+
return _formatter_cache[cache_key]
|
|
38
|
+
|
|
39
|
+
# Enforce cache size limit
|
|
40
|
+
if len(_formatter_cache) >= _max_formatters:
|
|
41
|
+
# Remove the oldest entry (FIFO eviction)
|
|
42
|
+
oldest_key = next(iter(_formatter_cache))
|
|
43
|
+
_formatter_cache.pop(oldest_key)
|
|
44
|
+
|
|
45
|
+
# Create and cache new formatter
|
|
46
|
+
formatter = logging.Formatter(fmt=format_string, datefmt=datefmt)
|
|
47
|
+
_formatter_cache[cache_key] = formatter
|
|
48
|
+
return formatter
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def clear_formatter_cache() -> None:
|
|
52
|
+
"""Clear the formatter cache to free memory."""
|
|
53
|
+
with _formatter_cache_lock:
|
|
54
|
+
_formatter_cache.clear()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# Directory cache utilities with memory management
|
|
58
|
+
def set_directory_cache_limit(max_directories: int) -> None:
|
|
59
|
+
"""Set the maximum number of directories to cache.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
max_directories: Maximum number of directories to keep in cache
|
|
63
|
+
"""
|
|
64
|
+
log_utils.set_directory_cache_limit(max_directories)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def clear_directory_cache() -> None:
|
|
68
|
+
"""Clear the directory cache to free memory."""
|
|
69
|
+
log_utils.clear_directory_cache()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
# Weak reference registry for tracking active loggers without preventing GC
|
|
73
|
+
_active_loggers: Set[weakref.ReferenceType] = set()
|
|
74
|
+
_weak_ref_lock = threading.Lock()
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def register_logger_weakref(logger: logging.Logger) -> None:
|
|
78
|
+
"""Register a weak reference to a logger for memory tracking.
|
|
79
|
+
|
|
80
|
+
This allows monitoring active loggers without preventing garbage collection.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
logger: Logger to track
|
|
84
|
+
"""
|
|
85
|
+
global _active_loggers
|
|
86
|
+
|
|
87
|
+
def cleanup_callback(ref):
|
|
88
|
+
with _weak_ref_lock:
|
|
89
|
+
_active_loggers.discard(ref)
|
|
90
|
+
|
|
91
|
+
with _weak_ref_lock:
|
|
92
|
+
weak_ref = weakref.ref(logger, cleanup_callback)
|
|
93
|
+
_active_loggers.add(weak_ref)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def get_active_logger_count() -> int:
|
|
97
|
+
"""Get the count of currently active loggers.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Number of active logger instances
|
|
101
|
+
"""
|
|
102
|
+
global _active_loggers
|
|
103
|
+
with _weak_ref_lock:
|
|
104
|
+
# Clean up dead references
|
|
105
|
+
dead_refs = {ref for ref in _active_loggers if ref() is None}
|
|
106
|
+
_active_loggers -= dead_refs
|
|
107
|
+
return len(_active_loggers)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def get_memory_stats() -> Dict[str, Any]:
|
|
111
|
+
"""Get memory usage statistics for the logging system.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Dictionary containing memory usage statistics
|
|
115
|
+
"""
|
|
116
|
+
from . import factory
|
|
117
|
+
|
|
118
|
+
# Get registry stats using public API
|
|
119
|
+
registered_loggers = factory.LoggerFactory.get_registered_loggers()
|
|
120
|
+
registry_size = len(registered_loggers)
|
|
121
|
+
|
|
122
|
+
# Get memory limits using public API
|
|
123
|
+
factory_limits = factory.LoggerFactory.get_memory_limits()
|
|
124
|
+
|
|
125
|
+
with _formatter_cache_lock:
|
|
126
|
+
formatter_cache_size = len(_formatter_cache)
|
|
127
|
+
|
|
128
|
+
# Get directory cache stats using public API
|
|
129
|
+
directory_stats = log_utils.get_directory_cache_stats()
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
'registry_size': registry_size,
|
|
133
|
+
'formatter_cache_size': formatter_cache_size,
|
|
134
|
+
'directory_cache_size': directory_stats['cached_directories'],
|
|
135
|
+
'active_logger_count': get_active_logger_count(),
|
|
136
|
+
'max_registry_size': factory_limits['max_loggers'],
|
|
137
|
+
'max_formatter_cache': _max_formatters,
|
|
138
|
+
'max_directory_cache': directory_stats['max_directories'],
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
# LRU cache size optimization
|
|
143
|
+
def optimize_lru_cache_sizes() -> None:
|
|
144
|
+
"""Optimize LRU cache sizes based on typical usage patterns."""
|
|
145
|
+
# Clear existing caches and reduce their sizes
|
|
146
|
+
|
|
147
|
+
# Clear and recreate timezone function cache with smaller size
|
|
148
|
+
log_utils.get_timezone_function.cache_clear()
|
|
149
|
+
log_utils.get_timezone_function = lru_cache(maxsize=8)(log_utils.get_timezone_function.__wrapped__)
|
|
150
|
+
|
|
151
|
+
# Clear and recreate timezone offset cache with smaller size
|
|
152
|
+
log_utils.get_timezone_offset.cache_clear()
|
|
153
|
+
log_utils.get_timezone_offset = lru_cache(maxsize=8)(log_utils.get_timezone_offset.__wrapped__)
|
|
154
|
+
|
|
155
|
+
# Clear and recreate stderr timezone cache with smaller size
|
|
156
|
+
log_utils.get_stderr_timezone.cache_clear()
|
|
157
|
+
log_utils.get_stderr_timezone = lru_cache(maxsize=4)(log_utils.get_stderr_timezone.__wrapped__)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def force_garbage_collection() -> Dict[str, int]:
|
|
161
|
+
"""Force garbage collection and return collection statistics.
|
|
162
|
+
|
|
163
|
+
This can be useful for testing memory leaks or forcing cleanup
|
|
164
|
+
in long-running applications.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Dictionary with garbage collection statistics
|
|
168
|
+
"""
|
|
169
|
+
import gc
|
|
170
|
+
|
|
171
|
+
# Clear all our caches first using public APIs
|
|
172
|
+
clear_formatter_cache()
|
|
173
|
+
clear_directory_cache()
|
|
174
|
+
|
|
175
|
+
# Force garbage collection
|
|
176
|
+
collected = gc.collect()
|
|
177
|
+
|
|
178
|
+
return {
|
|
179
|
+
'objects_collected': collected,
|
|
180
|
+
'garbage_count': len(gc.garbage),
|
|
181
|
+
'reference_cycles': gc.get_count(),
|
|
182
|
+
}
|
pythonLogs/settings.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from functools import lru_cache
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from dotenv import load_dotenv
|
|
4
|
+
from pydantic import Field
|
|
5
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
6
|
+
from pythonLogs.constants import (
|
|
7
|
+
DEFAULT_BACKUP_COUNT,
|
|
8
|
+
DEFAULT_DATE_FORMAT,
|
|
9
|
+
DEFAULT_ENCODING,
|
|
10
|
+
DEFAULT_ROTATE_SUFFIX,
|
|
11
|
+
DEFAULT_TIMEZONE,
|
|
12
|
+
LogLevel,
|
|
13
|
+
RotateWhen,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
# Lazy loading flag for dotenv
|
|
18
|
+
_dotenv_loaded = False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class LogSettings(BaseSettings):
|
|
22
|
+
"""If any ENV variable is omitted, it falls back to default values here"""
|
|
23
|
+
|
|
24
|
+
level: Optional[LogLevel] = Field(default=LogLevel.INFO)
|
|
25
|
+
timezone: Optional[str] = Field(default=DEFAULT_TIMEZONE)
|
|
26
|
+
encoding: Optional[str] = Field(default=DEFAULT_ENCODING)
|
|
27
|
+
appname: Optional[str] = Field(default="app")
|
|
28
|
+
filename: Optional[str] = Field(default="app.log")
|
|
29
|
+
directory: Optional[str] = Field(default="./logs")
|
|
30
|
+
days_to_keep: Optional[int] = Field(default=DEFAULT_BACKUP_COUNT)
|
|
31
|
+
date_format: Optional[str] = Field(default=DEFAULT_DATE_FORMAT)
|
|
32
|
+
stream_handler: Optional[bool] = Field(default=True)
|
|
33
|
+
show_location: Optional[bool] = Field(default=False)
|
|
34
|
+
# Memory management
|
|
35
|
+
max_loggers: Optional[int] = Field(default=100)
|
|
36
|
+
logger_ttl_seconds: Optional[int] = Field(default=3600)
|
|
37
|
+
|
|
38
|
+
# SizeRotatingLog
|
|
39
|
+
max_file_size_mb: Optional[int] = Field(default=10)
|
|
40
|
+
|
|
41
|
+
# TimedRotatingLog
|
|
42
|
+
rotate_when: Optional[RotateWhen] = Field(default=RotateWhen.MIDNIGHT)
|
|
43
|
+
rotate_at_utc: Optional[bool] = Field(default=True)
|
|
44
|
+
rotate_file_sufix: Optional[str] = Field(default=DEFAULT_ROTATE_SUFFIX)
|
|
45
|
+
|
|
46
|
+
model_config = SettingsConfigDict(env_prefix="LOG_", env_file=".env", extra="allow")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@lru_cache(maxsize=1)
|
|
50
|
+
def get_log_settings() -> LogSettings:
|
|
51
|
+
"""Get cached log settings instance to avoid repeated instantiation"""
|
|
52
|
+
global _dotenv_loaded
|
|
53
|
+
if not _dotenv_loaded:
|
|
54
|
+
load_dotenv()
|
|
55
|
+
_dotenv_loaded = True
|
|
56
|
+
return LogSettings()
|