winiutils 2.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- winiutils/__init__.py +1 -0
- winiutils/dev/__init__.py +1 -0
- winiutils/dev/builders/__init__.py +1 -0
- winiutils/dev/cli/__init__.py +1 -0
- winiutils/dev/cli/subcommands.py +6 -0
- winiutils/dev/configs/__init__.py +1 -0
- winiutils/dev/tests/__init__.py +1 -0
- winiutils/dev/tests/fixtures/__init__.py +1 -0
- winiutils/dev/tests/fixtures/fixtures.py +32 -0
- winiutils/main.py +9 -0
- winiutils/py.typed +0 -0
- winiutils/resources/__init__.py +1 -0
- winiutils/src/__init__.py +4 -0
- winiutils/src/data/__init__.py +8 -0
- winiutils/src/data/dataframe/__init__.py +7 -0
- winiutils/src/data/dataframe/cleaning.py +734 -0
- winiutils/src/data/structures/__init__.py +8 -0
- winiutils/src/data/structures/dicts.py +40 -0
- winiutils/src/data/structures/text/__init__.py +7 -0
- winiutils/src/data/structures/text/string.py +157 -0
- winiutils/src/iterating/__init__.py +8 -0
- winiutils/src/iterating/concurrent/__init__.py +9 -0
- winiutils/src/iterating/concurrent/concurrent.py +301 -0
- winiutils/src/iterating/concurrent/multiprocessing.py +186 -0
- winiutils/src/iterating/concurrent/multithreading.py +132 -0
- winiutils/src/iterating/iterate.py +45 -0
- winiutils/src/oop/__init__.py +7 -0
- winiutils/src/oop/mixins/__init__.py +8 -0
- winiutils/src/oop/mixins/meta.py +217 -0
- winiutils/src/oop/mixins/mixin.py +58 -0
- winiutils/src/security/__init__.py +8 -0
- winiutils/src/security/cryptography.py +100 -0
- winiutils/src/security/keyring.py +167 -0
- winiutils-2.3.12.dist-info/METADATA +283 -0
- winiutils-2.3.12.dist-info/RECORD +38 -0
- winiutils-2.3.12.dist-info/WHEEL +4 -0
- winiutils-2.3.12.dist-info/entry_points.txt +4 -0
- winiutils-2.3.12.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Multiprocessing utilities for CPU-bound parallel execution.
|
|
2
|
+
|
|
3
|
+
This module provides functions for parallel processing using Python's
|
|
4
|
+
multiprocessing module. It includes utilities for handling timeouts,
|
|
5
|
+
managing process pools, and organizing parallel execution of CPU-bound
|
|
6
|
+
functions.
|
|
7
|
+
|
|
8
|
+
Use multiprocessing for CPU-bound tasks that benefit from true parallelism
|
|
9
|
+
by bypassing Python's Global Interpreter Lock (GIL).
|
|
10
|
+
|
|
11
|
+
Example:
|
|
12
|
+
>>> from winiutils.src.iterating.concurrent.multiprocessing import (
|
|
13
|
+
... multiprocess_loop,
|
|
14
|
+
... )
|
|
15
|
+
>>> def square(x):
|
|
16
|
+
... return x * x
|
|
17
|
+
>>> results = multiprocess_loop(
|
|
18
|
+
... process_function=square,
|
|
19
|
+
... process_args=[[1], [2], [3]],
|
|
20
|
+
... process_args_len=3,
|
|
21
|
+
... )
|
|
22
|
+
>>> results
|
|
23
|
+
[1, 4, 9]
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
import logging
|
|
27
|
+
import multiprocessing
|
|
28
|
+
from collections.abc import Callable, Iterable
|
|
29
|
+
from functools import wraps
|
|
30
|
+
from multiprocessing.pool import Pool
|
|
31
|
+
from typing import Any
|
|
32
|
+
|
|
33
|
+
from winiutils.src.iterating.concurrent.concurrent import concurrent_loop
|
|
34
|
+
|
|
35
|
+
logger = logging.getLogger(__name__)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_spwan_pool(*args: Any, **kwargs: Any) -> Pool:
|
|
39
|
+
"""Create a multiprocessing pool with the spawn context.
|
|
40
|
+
|
|
41
|
+
Uses the 'spawn' start method which creates a fresh Python interpreter
|
|
42
|
+
process. This is safer than 'fork' as it avoids issues with inherited
|
|
43
|
+
file descriptors and locks.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
*args: Positional arguments passed to ``Pool`` constructor.
|
|
47
|
+
**kwargs: Keyword arguments passed to ``Pool`` constructor.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
A multiprocessing Pool configured with the spawn context.
|
|
51
|
+
|
|
52
|
+
Example:
|
|
53
|
+
>>> pool = get_spwan_pool(processes=4)
|
|
54
|
+
>>> with pool:
|
|
55
|
+
... results = pool.map(square, [1, 2, 3])
|
|
56
|
+
"""
|
|
57
|
+
return multiprocessing.get_context("spawn").Pool(*args, **kwargs)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def cancel_on_timeout(seconds: float, message: str) -> Callable[..., Any]:
|
|
61
|
+
"""Create a decorator that cancels function execution on timeout.
|
|
62
|
+
|
|
63
|
+
Creates a wrapper that executes the decorated function in a separate
|
|
64
|
+
process and terminates it if execution time exceeds the specified
|
|
65
|
+
timeout.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
seconds: Maximum execution time in seconds before timeout.
|
|
69
|
+
message: Error message to include in the warning log when timeout
|
|
70
|
+
occurs.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
A decorator function that wraps the target function with timeout
|
|
74
|
+
functionality.
|
|
75
|
+
|
|
76
|
+
Raises:
|
|
77
|
+
multiprocessing.TimeoutError: When function execution exceeds the
|
|
78
|
+
timeout.
|
|
79
|
+
|
|
80
|
+
Warning:
|
|
81
|
+
Only works with functions that are pickle-able. This means it may
|
|
82
|
+
not work as a decorator on methods or closures. Instead, use it as
|
|
83
|
+
a wrapper function::
|
|
84
|
+
|
|
85
|
+
my_func = cancel_on_timeout(
|
|
86
|
+
seconds=2,
|
|
87
|
+
message="Test timeout",
|
|
88
|
+
)(my_func)
|
|
89
|
+
|
|
90
|
+
Example:
|
|
91
|
+
>>> def slow_function():
|
|
92
|
+
... import time
|
|
93
|
+
... time.sleep(10)
|
|
94
|
+
... return "done"
|
|
95
|
+
>>> timed_func = cancel_on_timeout(
|
|
96
|
+
... seconds=1,
|
|
97
|
+
... message="Function took too long",
|
|
98
|
+
... )(slow_function)
|
|
99
|
+
>>> timed_func() # Raises TimeoutError after 1 second
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
103
|
+
@wraps(func)
|
|
104
|
+
def wrapper(*args: object, **kwargs: object) -> object:
|
|
105
|
+
spawn_pool = get_spwan_pool(processes=1)
|
|
106
|
+
with spawn_pool as pool:
|
|
107
|
+
async_result = pool.apply_async(func, args, kwargs)
|
|
108
|
+
try:
|
|
109
|
+
return async_result.get(timeout=seconds)
|
|
110
|
+
except multiprocessing.TimeoutError:
|
|
111
|
+
logger.warning(
|
|
112
|
+
"%s -> Execution exceeded %s seconds: %s",
|
|
113
|
+
func,
|
|
114
|
+
seconds,
|
|
115
|
+
message,
|
|
116
|
+
)
|
|
117
|
+
raise
|
|
118
|
+
finally:
|
|
119
|
+
pool.terminate() # Ensure the worker process is killed
|
|
120
|
+
pool.join() # Wait for cleanup
|
|
121
|
+
|
|
122
|
+
return wrapper
|
|
123
|
+
|
|
124
|
+
return decorator
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def multiprocess_loop(
|
|
128
|
+
process_function: Callable[..., Any],
|
|
129
|
+
process_args: Iterable[Iterable[Any]],
|
|
130
|
+
process_args_static: Iterable[Any] | None = None,
|
|
131
|
+
deepcopy_static_args: Iterable[Any] | None = None,
|
|
132
|
+
process_args_len: int = 1,
|
|
133
|
+
) -> list[Any]:
|
|
134
|
+
"""Execute a function in parallel using multiprocessing Pool.
|
|
135
|
+
|
|
136
|
+
Executes the given function with the provided arguments in parallel
|
|
137
|
+
using multiprocessing Pool, which is suitable for CPU-bound tasks.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
process_function: The function to execute in parallel. Must be
|
|
141
|
+
pickle-able.
|
|
142
|
+
process_args: Iterable of argument lists for each parallel call.
|
|
143
|
+
Each inner iterable contains the arguments for one function
|
|
144
|
+
call. Example: ``[(1, 2), (3, 4), (5, 6)]``
|
|
145
|
+
process_args_static: Optional constant arguments to append to each
|
|
146
|
+
call. These are shared across all calls without copying.
|
|
147
|
+
Defaults to None.
|
|
148
|
+
deepcopy_static_args: Optional arguments that should be deep-copied
|
|
149
|
+
for each process. Use this for mutable objects that should not
|
|
150
|
+
be shared between processes. Defaults to None.
|
|
151
|
+
process_args_len: Length of ``process_args``. Used for progress bar
|
|
152
|
+
and worker pool sizing. Defaults to 1.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
List of results from the function executions, in the original
|
|
156
|
+
submission order.
|
|
157
|
+
|
|
158
|
+
Note:
|
|
159
|
+
- Use multiprocessing for CPU-bound tasks as it bypasses Python's
|
|
160
|
+
GIL by creating separate processes.
|
|
161
|
+
- Multiprocessing is not safe for mutable objects; use
|
|
162
|
+
``deepcopy_static_args`` for mutable data.
|
|
163
|
+
- If ConnectionErrors occur during debugging, try reducing the
|
|
164
|
+
number of processes.
|
|
165
|
+
- All functions and arguments must be pickle-able.
|
|
166
|
+
|
|
167
|
+
Example:
|
|
168
|
+
>>> def add(a, b, c):
|
|
169
|
+
... return a + b + c
|
|
170
|
+
>>> results = multiprocess_loop(
|
|
171
|
+
... process_function=add,
|
|
172
|
+
... process_args=[[1, 2], [3, 4]],
|
|
173
|
+
... process_args_static=[10],
|
|
174
|
+
... process_args_len=2,
|
|
175
|
+
... )
|
|
176
|
+
>>> results
|
|
177
|
+
[13, 17]
|
|
178
|
+
"""
|
|
179
|
+
return concurrent_loop(
|
|
180
|
+
threading=False,
|
|
181
|
+
process_function=process_function,
|
|
182
|
+
process_args=process_args,
|
|
183
|
+
process_args_static=process_args_static,
|
|
184
|
+
deepcopy_static_args=deepcopy_static_args,
|
|
185
|
+
process_args_len=process_args_len,
|
|
186
|
+
)
|
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
"""Multithreading utilities for I/O-bound parallel execution.
|
|
2
|
+
|
|
3
|
+
This module provides functions for parallel processing using thread pools.
|
|
4
|
+
It includes utilities for handling thread pools, managing futures, and
|
|
5
|
+
organizing parallel execution of I/O-bound tasks.
|
|
6
|
+
|
|
7
|
+
Use multithreading for I/O-bound tasks such as network requests, file
|
|
8
|
+
operations, or database queries where threads spend most of their time
|
|
9
|
+
waiting for external resources.
|
|
10
|
+
|
|
11
|
+
Example:
|
|
12
|
+
>>> from winiutils.src.iterating.concurrent.multithreading import (
|
|
13
|
+
... multithread_loop,
|
|
14
|
+
... )
|
|
15
|
+
>>> def fetch_url(url):
|
|
16
|
+
... import requests
|
|
17
|
+
... return requests.get(url).status_code
|
|
18
|
+
>>> results = multithread_loop(
|
|
19
|
+
... process_function=fetch_url,
|
|
20
|
+
... process_args=[["https://example.com"], ["https://google.com"]],
|
|
21
|
+
... process_args_len=2,
|
|
22
|
+
... )
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from collections.abc import Callable, Generator, Iterable
|
|
26
|
+
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
|
|
27
|
+
from typing import Any
|
|
28
|
+
|
|
29
|
+
from winiutils.src.iterating.concurrent.concurrent import concurrent_loop
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_future_results_as_completed(
|
|
33
|
+
futures: Iterable[Future[Any]],
|
|
34
|
+
) -> Generator[Any, None, None]:
|
|
35
|
+
"""Yield future results as they complete.
|
|
36
|
+
|
|
37
|
+
Yields results from futures in the order they complete, not in the
|
|
38
|
+
order they were submitted. This allows processing results as soon as
|
|
39
|
+
they're available.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
futures: Iterable of Future objects to get results from.
|
|
43
|
+
|
|
44
|
+
Yields:
|
|
45
|
+
The result of each completed future.
|
|
46
|
+
|
|
47
|
+
Example:
|
|
48
|
+
>>> with ThreadPoolExecutor() as executor:
|
|
49
|
+
... futures = [executor.submit(square, i) for i in range(3)]
|
|
50
|
+
... for result in get_future_results_as_completed(futures):
|
|
51
|
+
... print(result)
|
|
52
|
+
"""
|
|
53
|
+
for future in as_completed(futures):
|
|
54
|
+
yield future.result()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def multithread_loop(
|
|
58
|
+
process_function: Callable[..., Any],
|
|
59
|
+
process_args: Iterable[Iterable[Any]],
|
|
60
|
+
process_args_static: Iterable[Any] | None = None,
|
|
61
|
+
process_args_len: int = 1,
|
|
62
|
+
) -> list[Any]:
|
|
63
|
+
"""Execute a function in parallel using ThreadPoolExecutor.
|
|
64
|
+
|
|
65
|
+
Executes the given function with the provided arguments in parallel
|
|
66
|
+
using ThreadPoolExecutor, which is suitable for I/O-bound tasks.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
process_function: The function to execute in parallel.
|
|
70
|
+
process_args: Iterable of argument lists for each parallel call.
|
|
71
|
+
Each inner iterable contains the arguments for one function
|
|
72
|
+
call. Example: ``[["url1"], ["url2"], ["url3"]]``
|
|
73
|
+
process_args_static: Optional constant arguments to append to each
|
|
74
|
+
call. These are shared across all calls. Defaults to None.
|
|
75
|
+
process_args_len: Length of ``process_args``. Used for progress bar
|
|
76
|
+
and worker pool sizing. Defaults to 1.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
List of results from the function executions, in the original
|
|
80
|
+
submission order.
|
|
81
|
+
|
|
82
|
+
Note:
|
|
83
|
+
Use ThreadPoolExecutor for I/O-bound tasks (network requests, file
|
|
84
|
+
I/O, database queries). For CPU-bound tasks, use
|
|
85
|
+
``multiprocess_loop()`` instead.
|
|
86
|
+
|
|
87
|
+
Example:
|
|
88
|
+
>>> def download(url, timeout):
|
|
89
|
+
... import requests
|
|
90
|
+
... return requests.get(url, timeout=timeout).text
|
|
91
|
+
>>> results = multithread_loop(
|
|
92
|
+
... process_function=download,
|
|
93
|
+
... process_args=[["https://example.com"], ["https://google.com"]],
|
|
94
|
+
... process_args_static=[30], # 30 second timeout for all
|
|
95
|
+
... process_args_len=2,
|
|
96
|
+
... )
|
|
97
|
+
"""
|
|
98
|
+
return concurrent_loop(
|
|
99
|
+
threading=True,
|
|
100
|
+
process_function=process_function,
|
|
101
|
+
process_args=process_args,
|
|
102
|
+
process_args_static=process_args_static,
|
|
103
|
+
process_args_len=process_args_len,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def imap_unordered(
|
|
108
|
+
executor: ThreadPoolExecutor,
|
|
109
|
+
func: Callable[..., Any],
|
|
110
|
+
iterable: Iterable[Any],
|
|
111
|
+
) -> Generator[Any, None, None]:
|
|
112
|
+
"""Apply a function to each item in an iterable in parallel.
|
|
113
|
+
|
|
114
|
+
Similar to ``multiprocessing.Pool.imap_unordered()``, this function
|
|
115
|
+
submits all items to the executor and yields results as they complete.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
executor: ThreadPoolExecutor to use for parallel execution.
|
|
119
|
+
func: Function to apply to each item in the iterable.
|
|
120
|
+
iterable: Iterable of items to apply the function to.
|
|
121
|
+
|
|
122
|
+
Yields:
|
|
123
|
+
Results of applying the function to each item, in completion order
|
|
124
|
+
(not submission order).
|
|
125
|
+
|
|
126
|
+
Example:
|
|
127
|
+
>>> with ThreadPoolExecutor(max_workers=4) as executor:
|
|
128
|
+
... for result in imap_unordered(executor, square, [1, 2, 3]):
|
|
129
|
+
... print(result)
|
|
130
|
+
"""
|
|
131
|
+
results = [executor.submit(func, item) for item in iterable]
|
|
132
|
+
yield from get_future_results_as_completed(results)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""Iterating utilities for handling iterables.
|
|
2
|
+
|
|
3
|
+
This module provides utility functions for working with iterables,
|
|
4
|
+
including getting the length of an iterable with a default value.
|
|
5
|
+
These utilities help with iterable operations and manipulations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from collections.abc import Iterable
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def get_len_with_default(iterable: Iterable[Any], default: int | None = None) -> int:
|
|
13
|
+
"""Get the length of an iterable, falling back to a default value.
|
|
14
|
+
|
|
15
|
+
Attempts to get the length of an iterable using ``len()``. If the
|
|
16
|
+
iterable doesn't support ``len()`` (e.g., generators), returns the
|
|
17
|
+
provided default value instead.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
iterable: The iterable to get the length of.
|
|
21
|
+
default: Default value to return if the iterable doesn't support
|
|
22
|
+
``len()``. If None and the iterable doesn't support ``len()``,
|
|
23
|
+
a TypeError is raised.
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
The length of the iterable, or the default value if the iterable
|
|
27
|
+
doesn't support ``len()``.
|
|
28
|
+
|
|
29
|
+
Raises:
|
|
30
|
+
TypeError: If the iterable doesn't support ``len()`` and no default
|
|
31
|
+
value is provided.
|
|
32
|
+
|
|
33
|
+
Example:
|
|
34
|
+
>>> get_len_with_default([1, 2, 3])
|
|
35
|
+
3
|
|
36
|
+
>>> get_len_with_default((x for x in range(10)), default=10)
|
|
37
|
+
10
|
|
38
|
+
"""
|
|
39
|
+
try:
|
|
40
|
+
return len(iterable) # type: ignore[arg-type]
|
|
41
|
+
except TypeError as e:
|
|
42
|
+
if default is None:
|
|
43
|
+
msg = "Can't get length of iterable and no default value provided"
|
|
44
|
+
raise TypeError(msg) from e
|
|
45
|
+
return default
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"""Mixins and metaclasses package.
|
|
2
|
+
|
|
3
|
+
This package provides metaclasses and mixins for class behavior modification:
|
|
4
|
+
|
|
5
|
+
Modules:
|
|
6
|
+
meta: Metaclasses for automatic method logging and instrumentation.
|
|
7
|
+
mixin: Mixin classes that provide composable behavior extensions.
|
|
8
|
+
"""
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""Metaclass utilities for class behavior modification and enforcement.
|
|
2
|
+
|
|
3
|
+
This module provides metaclasses that can be used to modify class behavior
|
|
4
|
+
at creation time. These metaclasses can be used individually or combined
|
|
5
|
+
to create classes with enhanced capabilities and stricter implementation
|
|
6
|
+
requirements.
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
>>> from winiutils.src.oop.mixins.meta import ABCLoggingMeta
|
|
10
|
+
>>> class MyClass(metaclass=ABCLoggingMeta):
|
|
11
|
+
... def my_method(self, x):
|
|
12
|
+
... return x * 2
|
|
13
|
+
>>> obj = MyClass()
|
|
14
|
+
>>> obj.my_method(5) # Logs: "MyClass - Calling my_method with ..."
|
|
15
|
+
10
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import logging
|
|
19
|
+
import time
|
|
20
|
+
from abc import ABCMeta
|
|
21
|
+
from collections.abc import Callable
|
|
22
|
+
from functools import wraps
|
|
23
|
+
from typing import Any
|
|
24
|
+
|
|
25
|
+
from pyrig.src.modules.function import is_func
|
|
26
|
+
|
|
27
|
+
from winiutils.src.data.structures.text.string import value_to_truncated_string
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class ABCLoggingMeta(ABCMeta):
|
|
33
|
+
"""Metaclass that automatically adds logging to class methods.
|
|
34
|
+
|
|
35
|
+
Wraps non-magic methods with a logging decorator that tracks method
|
|
36
|
+
calls, arguments, execution time, and return values. Includes rate
|
|
37
|
+
limiting to prevent log flooding.
|
|
38
|
+
|
|
39
|
+
This metaclass extends ``ABCMeta``, so classes using it can also define
|
|
40
|
+
abstract methods.
|
|
41
|
+
|
|
42
|
+
Attributes:
|
|
43
|
+
Inherits all attributes from ``ABCMeta``.
|
|
44
|
+
|
|
45
|
+
Example:
|
|
46
|
+
>>> class Calculator(metaclass=ABCLoggingMeta):
|
|
47
|
+
... def add(self, a, b):
|
|
48
|
+
... return a + b
|
|
49
|
+
>>> calc = Calculator()
|
|
50
|
+
>>> calc.add(2, 3) # Logs method call and result
|
|
51
|
+
5
|
|
52
|
+
|
|
53
|
+
Note:
|
|
54
|
+
- Magic methods (``__init__``, ``__str__``, etc.) are not logged.
|
|
55
|
+
- Properties are not logged.
|
|
56
|
+
- Logging is rate-limited to once per second per method to prevent
|
|
57
|
+
log flooding.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
def __new__(
|
|
61
|
+
mcs: type["ABCLoggingMeta"],
|
|
62
|
+
name: str,
|
|
63
|
+
bases: tuple[type, ...],
|
|
64
|
+
dct: dict[str, Any],
|
|
65
|
+
) -> "ABCLoggingMeta":
|
|
66
|
+
"""Create a new class with logging-wrapped methods.
|
|
67
|
+
|
|
68
|
+
Intercepts class creation to wrap all non-magic methods with logging
|
|
69
|
+
functionality. Handles regular methods, class methods, and static
|
|
70
|
+
methods.
|
|
71
|
+
|
|
72
|
+
Args:
|
|
73
|
+
mcs: The metaclass instance.
|
|
74
|
+
name: The name of the class being created.
|
|
75
|
+
bases: The base classes of the class being created.
|
|
76
|
+
dct: The attribute dictionary of the class being created.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
A new class with logging functionality added to its methods.
|
|
80
|
+
"""
|
|
81
|
+
# Wrap all callables of the class with a logging wrapper
|
|
82
|
+
|
|
83
|
+
for attr_name, attr_value in dct.items():
|
|
84
|
+
if mcs.is_loggable_method(attr_value):
|
|
85
|
+
if isinstance(attr_value, classmethod):
|
|
86
|
+
wrapped_method = mcs.wrap_with_logging(
|
|
87
|
+
func=attr_value.__func__, class_name=name, call_times={}
|
|
88
|
+
)
|
|
89
|
+
dct[attr_name] = classmethod(wrapped_method)
|
|
90
|
+
elif isinstance(attr_value, staticmethod):
|
|
91
|
+
wrapped_method = mcs.wrap_with_logging(
|
|
92
|
+
func=attr_value.__func__, class_name=name, call_times={}
|
|
93
|
+
)
|
|
94
|
+
dct[attr_name] = staticmethod(wrapped_method)
|
|
95
|
+
else:
|
|
96
|
+
dct[attr_name] = mcs.wrap_with_logging(
|
|
97
|
+
func=attr_value, class_name=name, call_times={}
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
return super().__new__(mcs, name, bases, dct)
|
|
101
|
+
|
|
102
|
+
@staticmethod
|
|
103
|
+
def is_loggable_method(method: Callable[..., Any]) -> bool:
|
|
104
|
+
"""Determine if a method should have logging applied.
|
|
105
|
+
|
|
106
|
+
Checks whether a method is a valid candidate for logging. Methods
|
|
107
|
+
are logged if they are callable, have a name, and are not magic
|
|
108
|
+
methods (those starting with ``__``).
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
method: The method to check.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
True if the method should be wrapped with logging, False
|
|
115
|
+
otherwise.
|
|
116
|
+
|
|
117
|
+
Note:
|
|
118
|
+
Properties are not logged as they are not callable in the
|
|
119
|
+
traditional sense and cause issues with the wrapping mechanism.
|
|
120
|
+
"""
|
|
121
|
+
return (
|
|
122
|
+
is_func(method) # must be a method-like attribute
|
|
123
|
+
and not getattr(method, "__name__", "__").startswith(
|
|
124
|
+
"__"
|
|
125
|
+
) # must not be a magic method
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@staticmethod
|
|
129
|
+
def wrap_with_logging(
|
|
130
|
+
func: Callable[..., Any],
|
|
131
|
+
class_name: str,
|
|
132
|
+
call_times: dict[str, float],
|
|
133
|
+
) -> Callable[..., Any]:
|
|
134
|
+
"""Wrap a function with logging functionality.
|
|
135
|
+
|
|
136
|
+
Creates a wrapper that logs method calls, arguments, execution time,
|
|
137
|
+
and return values. Includes rate limiting to prevent excessive
|
|
138
|
+
logging (once per second per method).
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
func: The function to wrap with logging.
|
|
142
|
+
class_name: The name of the class containing the function. Used
|
|
143
|
+
in log messages.
|
|
144
|
+
call_times: Dictionary to track when methods were last called.
|
|
145
|
+
Used for rate limiting. This dictionary is mutated by the
|
|
146
|
+
wrapper.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
A wrapped function with logging capabilities.
|
|
150
|
+
|
|
151
|
+
Note:
|
|
152
|
+
Arguments and return values are truncated to 20 characters in
|
|
153
|
+
log messages to prevent excessively long log lines.
|
|
154
|
+
"""
|
|
155
|
+
time_time = time.time # Cache the time.time function for performance
|
|
156
|
+
|
|
157
|
+
@wraps(func)
|
|
158
|
+
def wrapper(*args: object, **kwargs: object) -> object:
|
|
159
|
+
# call_times as a dictionary to store the call times of the function
|
|
160
|
+
# we only log if the time since the last call is greater than the threshold
|
|
161
|
+
# this is to avoid spamming the logs
|
|
162
|
+
|
|
163
|
+
func_name = func.__name__ # ty:ignore[unresolved-attribute]
|
|
164
|
+
|
|
165
|
+
threshold = 1
|
|
166
|
+
|
|
167
|
+
last_call_time = call_times.get(func_name, 0)
|
|
168
|
+
|
|
169
|
+
current_time = time_time()
|
|
170
|
+
|
|
171
|
+
do_logging = (current_time - last_call_time) > threshold
|
|
172
|
+
|
|
173
|
+
max_log_length = 20
|
|
174
|
+
|
|
175
|
+
if do_logging:
|
|
176
|
+
args_str = value_to_truncated_string(
|
|
177
|
+
value=args, max_length=max_log_length
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
kwargs_str = value_to_truncated_string(
|
|
181
|
+
value=kwargs, max_length=max_log_length
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
logger.info(
|
|
185
|
+
"%s - Calling %s with %s and %s",
|
|
186
|
+
class_name,
|
|
187
|
+
func_name,
|
|
188
|
+
args_str,
|
|
189
|
+
kwargs_str,
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
# Execute the function and return the result
|
|
193
|
+
|
|
194
|
+
result = func(*args, **kwargs)
|
|
195
|
+
|
|
196
|
+
if do_logging:
|
|
197
|
+
duration = time_time() - current_time
|
|
198
|
+
|
|
199
|
+
result_str = value_to_truncated_string(
|
|
200
|
+
value=result, max_length=max_log_length
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
logger.info(
|
|
204
|
+
"%s - %s finished with %s seconds -> returning %s",
|
|
205
|
+
class_name,
|
|
206
|
+
func_name,
|
|
207
|
+
duration,
|
|
208
|
+
result_str,
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
# save the call time for the next call
|
|
212
|
+
|
|
213
|
+
call_times[func_name] = current_time
|
|
214
|
+
|
|
215
|
+
return result
|
|
216
|
+
|
|
217
|
+
return wrapper
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Mixin utilities for class composition and behavior extension.
|
|
2
|
+
|
|
3
|
+
This module provides mixin classes that facilitate class composition through
|
|
4
|
+
the mixin pattern. It includes utilities for automatic method logging with
|
|
5
|
+
performance tracking.
|
|
6
|
+
|
|
7
|
+
These utilities help create robust class hierarchies with built-in logging
|
|
8
|
+
capabilities without requiring explicit decorator usage.
|
|
9
|
+
|
|
10
|
+
Example:
|
|
11
|
+
>>> from winiutils.src.oop.mixins.mixin import ABCLoggingMixin
|
|
12
|
+
>>> class MyService(ABCLoggingMixin):
|
|
13
|
+
... def process(self, data):
|
|
14
|
+
... return data.upper()
|
|
15
|
+
>>> service = MyService()
|
|
16
|
+
>>> service.process("hello") # Logs method call automatically
|
|
17
|
+
'HELLO'
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import logging
|
|
21
|
+
|
|
22
|
+
from winiutils.src.oop.mixins.meta import ABCLoggingMeta
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ABCLoggingMixin(metaclass=ABCLoggingMeta):
|
|
28
|
+
"""Mixin class that provides automatic method logging.
|
|
29
|
+
|
|
30
|
+
This mixin can be used as a base class for any class that needs
|
|
31
|
+
automatic method logging with performance tracking. All non-magic
|
|
32
|
+
methods will be automatically wrapped with logging functionality.
|
|
33
|
+
|
|
34
|
+
The logging includes:
|
|
35
|
+
- Method name and class name
|
|
36
|
+
- Arguments passed to the method (truncated)
|
|
37
|
+
- Execution time
|
|
38
|
+
- Return value (truncated)
|
|
39
|
+
|
|
40
|
+
Inheriting from this class is equivalent to using ``ABCLoggingMeta``
|
|
41
|
+
as the metaclass, but provides a cleaner inheritance syntax.
|
|
42
|
+
|
|
43
|
+
Example:
|
|
44
|
+
>>> class DataProcessor(ABCLoggingMixin):
|
|
45
|
+
... def transform(self, data):
|
|
46
|
+
... return [x * 2 for x in data]
|
|
47
|
+
>>> processor = DataProcessor()
|
|
48
|
+
>>> processor.transform([1, 2, 3])
|
|
49
|
+
[2, 4, 6]
|
|
50
|
+
# Logs: "DataProcessor - Calling transform with ..."
|
|
51
|
+
# Logs: "DataProcessor - transform finished with 0.001 seconds -> ..."
|
|
52
|
+
|
|
53
|
+
Note:
|
|
54
|
+
- Magic methods (``__init__``, ``__str__``, etc.) are not logged.
|
|
55
|
+
- Logging is rate-limited to once per second per method.
|
|
56
|
+
- This class can be combined with abstract methods since it uses
|
|
57
|
+
``ABCLoggingMeta`` which extends ``ABCMeta``.
|
|
58
|
+
"""
|