qa-testing-utils 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qa_testing_utils-0.1.0.dist-info/METADATA +15 -0
- qa_testing_utils-0.1.0.dist-info/RECORD +14 -0
- qa_testing_utils-0.1.0.dist-info/WHEEL +4 -0
- utils/__init__.py +3 -0
- utils/exception_utils.py +50 -0
- utils/exceptions.py +12 -0
- utils/file_utils.py +196 -0
- utils/logger.py +140 -0
- utils/matchers.py +371 -0
- utils/object_utils.py +185 -0
- utils/stream_utils.py +24 -0
- utils/string_utils.py +69 -0
- utils/thread_utils.py +13 -0
- utils/tuple_utils.py +49 -0
@@ -0,0 +1,15 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: qa-testing-utils
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: Testing utilities library for QA automation
|
5
|
+
Author-email: Adrian Herscu <adrian.herscu@gmail.com>
|
6
|
+
Requires-Python: >=3.13
|
7
|
+
Requires-Dist: allure-pytest
|
8
|
+
Requires-Dist: more-itertools
|
9
|
+
Requires-Dist: ppretty
|
10
|
+
Requires-Dist: pyfunctional
|
11
|
+
Requires-Dist: pyhamcrest
|
12
|
+
Requires-Dist: returns
|
13
|
+
Description-Content-Type: text/markdown
|
14
|
+
|
15
|
+
# qa-testing-utils
|
@@ -0,0 +1,14 @@
|
|
1
|
+
utils/__init__.py,sha256=evRL7IFTg2oPJ1I9Xh10Iz6x6CmVp7QhpOKjtfcf8oA,85
|
2
|
+
utils/exception_utils.py,sha256=e-AT0jZmtCoNd3esv9U-A8YElu7Nj4qVwJ1aHqyPJyE,1340
|
3
|
+
utils/exceptions.py,sha256=_s7es20G9-ET2HeLqU0yhuDAXpnQQs_ecjBmztz94Pk,441
|
4
|
+
utils/file_utils.py,sha256=UITm-8RbEajIeCKASJVqyK-l0xN29iNwOl5UeZngTKg,6311
|
5
|
+
utils/logger.py,sha256=iuar3asVT4VnK4pqaOTdUetP7vaBsan9-Za_4nDMsCI,4123
|
6
|
+
utils/matchers.py,sha256=7O22-tNiS-j1k8xBHDeAyH5aEEpwWlq_fopL-e-AlRY,12510
|
7
|
+
utils/object_utils.py,sha256=0CtfGPrR-s9d2OHlOwfzUfWH-NLvS7rwelSvM2mS6Rg,5807
|
8
|
+
utils/stream_utils.py,sha256=nbAwL7Nh2jhA6qoTkyAfCX5UNJHjHBNo-4kOGszLqjc,729
|
9
|
+
utils/string_utils.py,sha256=L2hRnwnRciaW5rwY_kmuBRb9zC65VPyAGcYt-HnXt18,2616
|
10
|
+
utils/thread_utils.py,sha256=73oW55OAJNqoZ-6y7B7te07CLLT4y-9sQJ831fcWpUk,293
|
11
|
+
utils/tuple_utils.py,sha256=pIcJntr-PNvaOIP0Pv4sBwO7oIbTVFmGwr9Ic5nJDA0,1851
|
12
|
+
qa_testing_utils-0.1.0.dist-info/METADATA,sha256=L9rw_9O9NlKG7VFuwYiGrq3m3B6WHxFPNq5Q7eL-pGc,410
|
13
|
+
qa_testing_utils-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
14
|
+
qa_testing_utils-0.1.0.dist-info/RECORD,,
|
utils/__init__.py
ADDED
utils/exception_utils.py
ADDED
@@ -0,0 +1,50 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import functools
|
6
|
+
import logging
|
7
|
+
from typing import Any, Callable
|
8
|
+
|
9
|
+
from returns.maybe import Maybe, Nothing, Some
|
10
|
+
from qa.testing.utils.object_utils import Supplier
|
11
|
+
|
12
|
+
|
13
|
+
def safely[T](supplier: Supplier[T]) -> Maybe[T]:
|
14
|
+
"""
|
15
|
+
Calls a function safely, wrapping its result in Maybe, and swallowing any exceptions.
|
16
|
+
The function should be a no-argument callable::
|
17
|
+
|
18
|
+
safely(lambda: call_something_that_may_fail(params))
|
19
|
+
|
20
|
+
Args:
|
21
|
+
supplier (Supplier[T]): The supplier to be called.
|
22
|
+
|
23
|
+
Returns:
|
24
|
+
Maybe[T]: The result wrapped in Maybe, or Nothing if an exception occurs.
|
25
|
+
"""
|
26
|
+
try:
|
27
|
+
result = supplier()
|
28
|
+
return Some(result)
|
29
|
+
except Exception as e:
|
30
|
+
logging.exception(f"Exception occurred: {e}")
|
31
|
+
return Nothing
|
32
|
+
|
33
|
+
|
34
|
+
def swallow(func: Callable[..., Any]) -> Callable[..., Any]:
|
35
|
+
"""
|
36
|
+
Decorates a function to swallow any exceptions.
|
37
|
+
|
38
|
+
If an exception will occur, None will be returned.
|
39
|
+
|
40
|
+
Args:
|
41
|
+
func (Callable): the function, supplied by the run-time
|
42
|
+
|
43
|
+
Returns:
|
44
|
+
Callable: the decorated function
|
45
|
+
"""
|
46
|
+
@functools.wraps(func)
|
47
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
48
|
+
return safely(lambda: func(*args, **kwargs)).value_or(None)
|
49
|
+
|
50
|
+
return wrapper
|
utils/exceptions.py
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
class TestException(Exception):
|
6
|
+
"""
|
7
|
+
Marks an exception raised by tests infrastructure. Useful to differentiate
|
8
|
+
between unexpected run-time exceptions, which should be handled as
|
9
|
+
programming errors, and legitimate run-time exceptions such as time-out,
|
10
|
+
not found, etc. The former might be handled via a retry mechanism.
|
11
|
+
"""
|
12
|
+
pass
|
utils/file_utils.py
ADDED
@@ -0,0 +1,196 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import csv
|
6
|
+
from io import DEFAULT_BUFFER_SIZE, BufferedReader, RawIOBase
|
7
|
+
from lzma import LZMADecompressor
|
8
|
+
from pathlib import Path
|
9
|
+
from tarfile import TarInfo, open
|
10
|
+
from typing import BinaryIO, Iterable, Iterator, Tuple, final, override
|
11
|
+
from zlib import crc32
|
12
|
+
|
13
|
+
from more_itertools import peekable
|
14
|
+
from qa.testing.utils.logger import *
|
15
|
+
from qa.testing.utils.object_utils import *
|
16
|
+
from qa.testing.utils.string_utils import *
|
17
|
+
|
18
|
+
LAUNCHING_DIR = Path.cwd()
|
19
|
+
|
20
|
+
|
21
|
+
@final
|
22
|
+
class IterableReader(RawIOBase, LoggerMixin, ImmutableMixin):
|
23
|
+
"""
|
24
|
+
I/O read-only stream over iterable of bytes, enabling streaming mode.
|
25
|
+
"""
|
26
|
+
|
27
|
+
def __init__(self, chunks: Iterable[bytes]):
|
28
|
+
self._chunks = iter(chunks)
|
29
|
+
self._accumulated_buffer = bytearray()
|
30
|
+
|
31
|
+
@override
|
32
|
+
def readable(self) -> bool:
|
33
|
+
return True
|
34
|
+
|
35
|
+
@override
|
36
|
+
def readinto(self, output_buffer: memoryview) -> int: # type: ignore
|
37
|
+
# consume chunks, accumulating their bytes up to size of output buffer
|
38
|
+
while len(self._accumulated_buffer) < len(output_buffer) \
|
39
|
+
and (chunk := next(self._chunks, None)) is not None:
|
40
|
+
self.log.debug(f"buffered chunk with length={len(chunk)}")
|
41
|
+
self._accumulated_buffer.extend(chunk)
|
42
|
+
|
43
|
+
# consume accumulated bytes up to size of output buffer
|
44
|
+
consumed_bytes = min(len(self._accumulated_buffer), len(output_buffer))
|
45
|
+
output_buffer[:consumed_bytes] = self._accumulated_buffer[:consumed_bytes]
|
46
|
+
|
47
|
+
# delete consumed bytes, shifting left the accumulated buffer
|
48
|
+
del self._accumulated_buffer[:consumed_bytes]
|
49
|
+
|
50
|
+
self.log.debug(f"consumed {consumed_bytes} bytes")
|
51
|
+
return consumed_bytes
|
52
|
+
|
53
|
+
@staticmethod
|
54
|
+
def from_(
|
55
|
+
chunks: Iterable[bytes],
|
56
|
+
buffer_size: int = DEFAULT_BUFFER_SIZE) -> BinaryIO:
|
57
|
+
"""
|
58
|
+
Converts a stream of binary chunks into a BufferedReader.
|
59
|
+
|
60
|
+
You should ensure closing.
|
61
|
+
|
62
|
+
Args:
|
63
|
+
chunks (Iterable[bytes]): stream of binary chunks
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
io.BufferedReader: buffered reader around stream of binary chunks.
|
67
|
+
"""
|
68
|
+
return BufferedReader(IterableReader(chunks), buffer_size)
|
69
|
+
|
70
|
+
|
71
|
+
# TODO perhaps there should be a writable stream to iterator utility too...
|
72
|
+
|
73
|
+
|
74
|
+
def stream_file(
|
75
|
+
file_path: Path,
|
76
|
+
chunk_size: int = DEFAULT_BUFFER_SIZE) -> Iterator[bytes]:
|
77
|
+
"""
|
78
|
+
Streams a binary file from disk into an iterator.
|
79
|
+
|
80
|
+
If the iterator is not consumed, the file will be closed when the iterator
|
81
|
+
will be garbage collected.
|
82
|
+
|
83
|
+
Args:
|
84
|
+
file_path (Path): path to file
|
85
|
+
chunk_size (int, optional): the chunk size. Defaults to 8192.
|
86
|
+
|
87
|
+
Yields:
|
88
|
+
Iterator[bytes]: the binary chunks stream
|
89
|
+
"""
|
90
|
+
with file_path.open('rb') as f:
|
91
|
+
yield from iter(lambda: f.read(chunk_size), EMPTY_BYTE_STRING)
|
92
|
+
|
93
|
+
|
94
|
+
def read_lines(
|
95
|
+
byte_stream: Iterable[bytes],
|
96
|
+
encoding: str = UTF_8,
|
97
|
+
eol: str = LF) -> Iterator[str]:
|
98
|
+
"""
|
99
|
+
Converts a stream of binary chunks into stream of text lines.
|
100
|
+
Handles cases where lines are split across chunks.
|
101
|
+
|
102
|
+
Args:
|
103
|
+
byte_stream (Iterable[bytes]): the binary (chunks) stream
|
104
|
+
encoding (str, optional): expected text encoding. Defaults to 'utf-8'.
|
105
|
+
eol (str, optional): expected line-ending. Default to LF.
|
106
|
+
|
107
|
+
Yields:
|
108
|
+
Iterator[str]: stream of text lines, not terminated by EOL marker
|
109
|
+
"""
|
110
|
+
has_content = False
|
111
|
+
buffer = bytearray()
|
112
|
+
eol_bytes = eol.encode(encoding)
|
113
|
+
|
114
|
+
for chunk in byte_stream:
|
115
|
+
print(DOT, end=SPACE)
|
116
|
+
has_content = True
|
117
|
+
buffer.extend(chunk)
|
118
|
+
*lines, buffer = buffer.split(eol_bytes) # keep partial line in buffer
|
119
|
+
trace(f"streaming {len(lines)} lines; leftover {len(buffer)} chars")
|
120
|
+
yield from (line.decode(encoding) for line in lines)
|
121
|
+
|
122
|
+
if buffer: # yield the leftover
|
123
|
+
yield buffer.decode(encoding)
|
124
|
+
|
125
|
+
if not has_content:
|
126
|
+
trace("no lines")
|
127
|
+
|
128
|
+
|
129
|
+
def decompress_xz_stream(compressed_chunks: Iterable[bytes]) -> Iterator[bytes]:
|
130
|
+
"""
|
131
|
+
Decompresses XZ stream.
|
132
|
+
|
133
|
+
Args:
|
134
|
+
compressed_chunks (Iterable[bytes]): stream of binary compressed chunks
|
135
|
+
|
136
|
+
Yields:
|
137
|
+
Iterator[bytes]: the decompressed stream
|
138
|
+
"""
|
139
|
+
decompressor = LZMADecompressor()
|
140
|
+
return map(decompressor.decompress, compressed_chunks)
|
141
|
+
|
142
|
+
|
143
|
+
def extract_files_from_tar(tar_chunks: Iterable[bytes]) -> Iterator[Tuple[TarInfo, bytes]]:
|
144
|
+
"""
|
145
|
+
Extracts files from decompressed TAR stream.
|
146
|
+
|
147
|
+
Args:
|
148
|
+
tar_chunks (Iterable[bytes]): stream of decompressed TAR chunks
|
149
|
+
|
150
|
+
Yields:
|
151
|
+
Iterator[Tuple[tarfile.TarInfo, bytes]]: \
|
152
|
+
streams tuples of meta-data and data for each file
|
153
|
+
"""
|
154
|
+
with open(fileobj=IterableReader.from_(tar_chunks),
|
155
|
+
mode='r|*') as tar:
|
156
|
+
for member in tar:
|
157
|
+
if member.isfile():
|
158
|
+
extracted_file = tar.extractfile(member)
|
159
|
+
if extracted_file:
|
160
|
+
yield member, extracted_file.read()
|
161
|
+
|
162
|
+
|
163
|
+
def crc32_of(file: BinaryIO, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
|
164
|
+
"""
|
165
|
+
Calculate the CRC of a binary stream from its current position to its tail,
|
166
|
+
using chunked reading.
|
167
|
+
|
168
|
+
Args:
|
169
|
+
file (BinaryIO): The file object to read data from, starting from its current position.
|
170
|
+
chunk_size (int): The size of chunks to read at a time (default is 8192).
|
171
|
+
|
172
|
+
Returns:
|
173
|
+
int: Calculated CRC value of the remaining file content.
|
174
|
+
"""
|
175
|
+
crc_value = 0
|
176
|
+
|
177
|
+
while chunk := file.read(chunk_size):
|
178
|
+
crc_value = crc32(chunk, crc_value)
|
179
|
+
|
180
|
+
return crc_value & 0xFFFFFFFF # ensure 32-bit unsigned
|
181
|
+
|
182
|
+
|
183
|
+
def write_csv(file_path: Path, data_stream: Iterable[dict]):
|
184
|
+
"""
|
185
|
+
Writes a stream of flattened telemetry packets to a CSV file.
|
186
|
+
|
187
|
+
Args:
|
188
|
+
file_path: Path to the CSV file to be written.
|
189
|
+
data_stream: Iterable of dictionaries representing the rows to be written.
|
190
|
+
"""
|
191
|
+
stream = peekable(data_stream) # Allow peeking to extract headers
|
192
|
+
with file_path.open(mode="w", newline="") as csv_file:
|
193
|
+
writer = csv.DictWriter(
|
194
|
+
csv_file, fieldnames=list(stream.peek().keys()))
|
195
|
+
writer.writeheader()
|
196
|
+
writer.writerows(stream)
|
utils/logger.py
ADDED
@@ -0,0 +1,140 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import inspect
|
6
|
+
import logging
|
7
|
+
from functools import cached_property, wraps
|
8
|
+
from typing import Callable, ParamSpec, TypeVar, cast, final
|
9
|
+
|
10
|
+
import allure
|
11
|
+
from qa.testing.utils.string_utils import EMPTY, LF
|
12
|
+
|
13
|
+
|
14
|
+
def trace[T](value: T) -> T:
|
15
|
+
"""Logs at debug level using the invoking module name as the logger."""
|
16
|
+
frame = inspect.currentframe()
|
17
|
+
try:
|
18
|
+
if frame is not None:
|
19
|
+
caller_frame = frame.f_back
|
20
|
+
if caller_frame is not None:
|
21
|
+
caller_module = inspect.getmodule(caller_frame)
|
22
|
+
logger_name = caller_module.__name__ if caller_module else '__main__'
|
23
|
+
logger = logging.getLogger(logger_name)
|
24
|
+
logger.debug(f"=== {value}")
|
25
|
+
else:
|
26
|
+
logging.getLogger(__name__).debug(f"=== {value}")
|
27
|
+
else:
|
28
|
+
logging.getLogger(__name__).debug(f"=== {value}")
|
29
|
+
finally:
|
30
|
+
del frame
|
31
|
+
|
32
|
+
return value
|
33
|
+
|
34
|
+
|
35
|
+
def logger[T:type](cls: T) -> T:
|
36
|
+
"""
|
37
|
+
Class decorator that injects a logger into annotated class.
|
38
|
+
|
39
|
+
Args:
|
40
|
+
cls (type): automatically provided by the runtime
|
41
|
+
|
42
|
+
Returns:
|
43
|
+
_type_: the decorated class
|
44
|
+
"""
|
45
|
+
cls._logger = logging.getLogger(cls.__name__)
|
46
|
+
|
47
|
+
@property
|
48
|
+
def log(self: T) -> logging.Logger:
|
49
|
+
return cast(logging.Logger, getattr(self, '_logger', None))
|
50
|
+
|
51
|
+
cls.log = log
|
52
|
+
|
53
|
+
return cls
|
54
|
+
|
55
|
+
|
56
|
+
class LoggerMixin:
|
57
|
+
"""
|
58
|
+
Yet another way of adding logging by deriving from this one.
|
59
|
+
"""
|
60
|
+
@final
|
61
|
+
@cached_property
|
62
|
+
def log(self) -> logging.Logger:
|
63
|
+
return logging.getLogger(self.__class__.__name__)
|
64
|
+
|
65
|
+
@final
|
66
|
+
def trace[T](self, value: T) -> T:
|
67
|
+
"""
|
68
|
+
Logs value at DEBUG level using this logger.
|
69
|
+
|
70
|
+
Use to log something as a value, usually in a lambda expression::
|
71
|
+
|
72
|
+
then.eventually_assert_that(
|
73
|
+
lambda: self.trace(...call some API...),
|
74
|
+
greater_that(0)) \
|
75
|
+
|
76
|
+
.and_....other verifications may follow...
|
77
|
+
|
78
|
+
Args:
|
79
|
+
value (T): the value
|
80
|
+
|
81
|
+
Returns:
|
82
|
+
T: the value
|
83
|
+
"""
|
84
|
+
self.log.debug(f"=== {value}")
|
85
|
+
return value
|
86
|
+
|
87
|
+
|
88
|
+
P = ParamSpec('P')
|
89
|
+
R = TypeVar('R')
|
90
|
+
|
91
|
+
|
92
|
+
def traced(func: Callable[P, R]) -> Callable[P, R]:
|
93
|
+
"""
|
94
|
+
Method decorator that logs the function call with its arguments and the
|
95
|
+
return value.
|
96
|
+
|
97
|
+
Args:
|
98
|
+
func (Callable[P, R]): The function to be decorated.
|
99
|
+
*args (Any): Positional arguments to be passed to the function.
|
100
|
+
**kwargs (Any): Keyword arguments to be passed to the function.
|
101
|
+
|
102
|
+
Returns:
|
103
|
+
Callable[P, R]: The result of the function call.
|
104
|
+
"""
|
105
|
+
@wraps(func)
|
106
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
107
|
+
# NOTE: each time a decorated function is called this logic will be
|
108
|
+
# re-evaluated.
|
109
|
+
signature = inspect.signature(func)
|
110
|
+
parameters = list(signature.parameters.keys())
|
111
|
+
|
112
|
+
if parameters and parameters[0] == 'self' and len(args) > 0:
|
113
|
+
instance = args[0]
|
114
|
+
logger = logging.getLogger(f"{instance.__class__.__name__}")
|
115
|
+
logger.debug(
|
116
|
+
f">>> {func.__name__} "
|
117
|
+
f"{", ".join([str(arg) for arg in args[1:]])} "
|
118
|
+
f"{LF.join(
|
119
|
+
f"{key}={str(value)}"
|
120
|
+
for key, value in kwargs.items()) if kwargs else EMPTY}")
|
121
|
+
|
122
|
+
with allure.step( # type: ignore
|
123
|
+
f"{func.__name__} "
|
124
|
+
f"{', '.join([str(arg) for arg in args[1:]])}"):
|
125
|
+
result = func(*args, **kwargs)
|
126
|
+
|
127
|
+
if result == instance:
|
128
|
+
logger.debug(f"<<< {func.__name__}")
|
129
|
+
else:
|
130
|
+
logger.debug(f"<<< {func.__name__} {result}")
|
131
|
+
|
132
|
+
return result
|
133
|
+
else:
|
134
|
+
logger = logging.getLogger(func.__name__)
|
135
|
+
logger.debug(f">>> {func.__name__} {args} {kwargs}")
|
136
|
+
result = func(*args, **kwargs)
|
137
|
+
logger.debug(f"<<< {func.__name__} {result}")
|
138
|
+
return result
|
139
|
+
|
140
|
+
return wrapper
|
utils/matchers.py
ADDED
@@ -0,0 +1,371 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from datetime import date, datetime
|
6
|
+
from typing import (Any, Callable, Iterable, Iterator, List, Optional, Sequence,
|
7
|
+
Union, cast, final, override)
|
8
|
+
|
9
|
+
from hamcrest.core.base_matcher import BaseMatcher
|
10
|
+
from hamcrest.core.description import Description
|
11
|
+
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
|
12
|
+
from hamcrest.core.matcher import Matcher
|
13
|
+
|
14
|
+
from qa.testing.utils.logger import LoggerMixin
|
15
|
+
|
16
|
+
|
17
|
+
class TracingMatcher[T](BaseMatcher[T], LoggerMixin):
|
18
|
+
"""
|
19
|
+
A matcher wrapper that adds debug logging around another matcher.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def __init__(self, matcher: Matcher[T]) -> None:
|
23
|
+
self._matcher = matcher
|
24
|
+
|
25
|
+
def _matches(self, item: Any) -> bool:
|
26
|
+
result = self._matcher.matches(item)
|
27
|
+
self.log.debug(f"{item!r} -> {result}")
|
28
|
+
return result
|
29
|
+
|
30
|
+
def describe_to(self, description: Description) -> None:
|
31
|
+
self._matcher.describe_to(description)
|
32
|
+
|
33
|
+
|
34
|
+
def traced[T](matcher: Matcher[T]) -> TracingMatcher[T]:
|
35
|
+
"""
|
36
|
+
Wraps a matcher with TraceMatcher to enable debug logging.
|
37
|
+
|
38
|
+
Usage:
|
39
|
+
assert_that(actual, trace(contains_string("hello")))
|
40
|
+
"""
|
41
|
+
return TracingMatcher(matcher)
|
42
|
+
|
43
|
+
|
44
|
+
class ContainsStringIgnoringCase(BaseMatcher[str]):
|
45
|
+
def __init__(self, substring: str) -> None:
|
46
|
+
self.substring: str = substring.lower()
|
47
|
+
|
48
|
+
def _matches(self, item: Any) -> bool:
|
49
|
+
if not isinstance(item, str):
|
50
|
+
return False
|
51
|
+
return self.substring in item.lower()
|
52
|
+
|
53
|
+
def describe_to(self, description: Description) -> None:
|
54
|
+
description.append_text(
|
55
|
+
f"a string containing (case-insensitive) '{self.substring}'")
|
56
|
+
|
57
|
+
|
58
|
+
def contains_string_ignoring_case(substring: str) -> ContainsStringIgnoringCase:
|
59
|
+
return ContainsStringIgnoringCase(substring)
|
60
|
+
|
61
|
+
|
62
|
+
@final
|
63
|
+
class IsIteratorYielding[T](BaseMatcher[Iterator[T]]):
|
64
|
+
"""
|
65
|
+
Matcher for data yielded by iterators.
|
66
|
+
"""
|
67
|
+
|
68
|
+
def __init__(self, element_matcher: Matcher[T]) -> None:
|
69
|
+
self.element_matcher = element_matcher
|
70
|
+
|
71
|
+
@override
|
72
|
+
def _matches(self, item: Iterable[T]) -> bool:
|
73
|
+
try:
|
74
|
+
for element in item:
|
75
|
+
if self.element_matcher.matches(element):
|
76
|
+
return True
|
77
|
+
except TypeError: # not an iterator
|
78
|
+
pass
|
79
|
+
return False
|
80
|
+
|
81
|
+
@override
|
82
|
+
def describe_to(self, description: Description) -> None:
|
83
|
+
description.append_text("a stream containing ") \
|
84
|
+
.append_description_of(self.element_matcher)
|
85
|
+
|
86
|
+
# TODO IsStreamContainingEvery
|
87
|
+
|
88
|
+
|
89
|
+
@final
|
90
|
+
class IsStreamContainingEvery[T](BaseMatcher[Iterator[T]]):
|
91
|
+
"""
|
92
|
+
Matcher to ensure every element yielded by an iterator matches a given matcher.
|
93
|
+
"""
|
94
|
+
|
95
|
+
def __init__(self, element_matcher: Matcher[T]) -> None:
|
96
|
+
self.element_matcher = element_matcher
|
97
|
+
|
98
|
+
@override
|
99
|
+
def _matches(self, item: Iterable[T]) -> bool:
|
100
|
+
try:
|
101
|
+
for element in item:
|
102
|
+
if not self.element_matcher.matches(element):
|
103
|
+
return False # One non-matching element means failure
|
104
|
+
return True # All elements matched
|
105
|
+
except TypeError: # not an iterator
|
106
|
+
pass
|
107
|
+
return False
|
108
|
+
|
109
|
+
@override
|
110
|
+
def describe_to(self, description: Description) -> None:
|
111
|
+
description.append_text("a stream where every item is ") \
|
112
|
+
.append_description_of(self.element_matcher)
|
113
|
+
|
114
|
+
|
115
|
+
@final
|
116
|
+
class IsIteratorYieldingAll[T](BaseMatcher[Iterator[T]]):
|
117
|
+
"""
|
118
|
+
Matcher to ensure that the iterator yields at least one instance of each specified matcher.
|
119
|
+
"""
|
120
|
+
|
121
|
+
def __init__(self, element_matchers: List[Matcher[T]]) -> None:
|
122
|
+
self.element_matchers = element_matchers
|
123
|
+
|
124
|
+
@override
|
125
|
+
def _matches(self, item: Iterable[T]) -> bool:
|
126
|
+
unmatched_matchers = set(self.element_matchers)
|
127
|
+
try:
|
128
|
+
for element in item:
|
129
|
+
unmatched_matchers = {
|
130
|
+
m for m in unmatched_matchers if not m.matches(element)}
|
131
|
+
if not unmatched_matchers: # All matchers have been satisfied
|
132
|
+
return True
|
133
|
+
except TypeError: # not an iterator
|
134
|
+
pass
|
135
|
+
return False
|
136
|
+
|
137
|
+
@override
|
138
|
+
def describe_to(self, description: Description) -> None:
|
139
|
+
description.append_text("a stream containing each of: ")
|
140
|
+
for index, matcher in enumerate(self.element_matchers):
|
141
|
+
if index > 0:
|
142
|
+
description.append_text(", ")
|
143
|
+
description.append_description_of(matcher)
|
144
|
+
|
145
|
+
|
146
|
+
DateOrDateTime = Union[date, datetime]
|
147
|
+
|
148
|
+
|
149
|
+
class IsWithinDates(BaseMatcher[DateOrDateTime]):
|
150
|
+
def __init__(
|
151
|
+
self, start_date: Optional[DateOrDateTime],
|
152
|
+
end_date: Optional[DateOrDateTime]) -> None:
|
153
|
+
self.start_date = start_date
|
154
|
+
self.end_date = end_date
|
155
|
+
|
156
|
+
def _matches(self, item: Optional[DateOrDateTime]) -> bool:
|
157
|
+
if not isinstance(item, (date, datetime)):
|
158
|
+
return False
|
159
|
+
|
160
|
+
# Convert item to a consistent type for comparison
|
161
|
+
if isinstance(item, datetime):
|
162
|
+
item = item.date() if isinstance(
|
163
|
+
self.start_date, date) or isinstance(
|
164
|
+
self.end_date, date) else item
|
165
|
+
elif isinstance(item, date) and (isinstance(self.start_date, datetime) or isinstance(self.end_date, datetime)):
|
166
|
+
item = datetime.combine(item, datetime.min.time())
|
167
|
+
|
168
|
+
# Convert start_date and end_date to compatible types if they are not None
|
169
|
+
start = self.start_date
|
170
|
+
if start is not None:
|
171
|
+
start = start.date() if isinstance(
|
172
|
+
start, datetime) and isinstance(item, date) else start
|
173
|
+
|
174
|
+
end = self.end_date
|
175
|
+
if end is not None:
|
176
|
+
end = end.date() if isinstance(
|
177
|
+
end, datetime) and isinstance(
|
178
|
+
item, date) else end
|
179
|
+
|
180
|
+
# Perform the comparison, handling open-ended ranges
|
181
|
+
if start is None and end is not None:
|
182
|
+
return item <= end
|
183
|
+
elif start is not None and end is None:
|
184
|
+
return item >= start
|
185
|
+
elif start is not None and end is not None:
|
186
|
+
return start <= item <= end
|
187
|
+
|
188
|
+
# If both start_date and end_date are None, return False (no valid range)
|
189
|
+
return False
|
190
|
+
|
191
|
+
def describe_to(self, description: Description) -> None:
|
192
|
+
if self.start_date is None:
|
193
|
+
description.append_text(f"a date before {self.end_date}")
|
194
|
+
elif self.end_date is None:
|
195
|
+
description.append_text(f"a date after {self.start_date}")
|
196
|
+
else:
|
197
|
+
description.append_text(
|
198
|
+
f"a date within {self.start_date} and {self.end_date}")
|
199
|
+
|
200
|
+
|
201
|
+
def within_dates(
|
202
|
+
start_date: Optional[DateOrDateTime],
|
203
|
+
end_date: Optional[DateOrDateTime]) -> IsWithinDates:
|
204
|
+
return IsWithinDates(start_date, end_date)
|
205
|
+
|
206
|
+
|
207
|
+
def yields_item[T](match: Union[Matcher[T], T]) -> Matcher[Iterator[T]]:
|
208
|
+
"""
|
209
|
+
Matches if any element of yielded by iterator matches a given matcher.
|
210
|
+
|
211
|
+
:param match: The matcher to satisfy, or an expected value for
|
212
|
+
:py:func:`~hamcrest.core.core.isequal.equal_to` matching.
|
213
|
+
|
214
|
+
This matcher iterates the evaluated iterator, searching for any element
|
215
|
+
that satisfies a given matcher. If a matching element is found,
|
216
|
+
``has_item`` is satisfied.
|
217
|
+
|
218
|
+
If the ``match`` argument is not a matcher, it is implicitly wrapped in an
|
219
|
+
:py:func:`~hamcrest.core.core.isequal.equal_to` matcher to check for
|
220
|
+
equality.
|
221
|
+
"""
|
222
|
+
return IsIteratorYielding(wrap_matcher(match))
|
223
|
+
|
224
|
+
|
225
|
+
def yields_every[T](match: Union[Matcher[T], T]) -> Matcher[Iterator[T]]:
|
226
|
+
"""
|
227
|
+
Matches if every element yielded by the iterator matches a given matcher.
|
228
|
+
|
229
|
+
:param match: The matcher to satisfy, or an expected value for equality matching.
|
230
|
+
|
231
|
+
This matcher iterates through the evaluated iterator, checking that every
|
232
|
+
element satisfies the given matcher. If any element does not match, the matcher fails.
|
233
|
+
|
234
|
+
If the `match` argument is not a matcher, it is implicitly wrapped in an
|
235
|
+
equality matcher.
|
236
|
+
"""
|
237
|
+
return IsStreamContainingEvery(wrap_matcher(match))
|
238
|
+
|
239
|
+
|
240
|
+
def yields_items[T](matches: Iterable[Union[Matcher[T],
|
241
|
+
T]]) -> Matcher[Iterator[T]]:
|
242
|
+
"""
|
243
|
+
Matches if each specified item is yielded at least once by the iterator.
|
244
|
+
|
245
|
+
:param matches: An iterable of matchers or values, each of which should be yielded
|
246
|
+
at least once in the iterator for this matcher to succeed.
|
247
|
+
|
248
|
+
This matcher will iterate through the evaluated iterator and check if it yields
|
249
|
+
at least one instance of each specified matcher or value.
|
250
|
+
"""
|
251
|
+
wrapped_matchers = [wrap_matcher(match) for match in matches]
|
252
|
+
return IsIteratorYieldingAll(wrapped_matchers)
|
253
|
+
|
254
|
+
|
255
|
+
def adapted_object[T, R](
|
256
|
+
converter: Callable[[T], R],
|
257
|
+
matcher: Matcher[R]) -> Matcher[T]:
|
258
|
+
"""
|
259
|
+
Hamcrest matcher adapting an object of type T by specified converter and
|
260
|
+
applying specified matcher. For example::
|
261
|
+
|
262
|
+
adapt_object( lambda message: message.id,
|
263
|
+
is_greater_than(0) )
|
264
|
+
|
265
|
+
where id being a number, and is_greater_than being a matcher that can be
|
266
|
+
applied on numbers.
|
267
|
+
|
268
|
+
See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`
|
269
|
+
|
270
|
+
Args:
|
271
|
+
converter (Callable[[T], R]): function converting T into R
|
272
|
+
matcher (Matcher[R]): matcher for adapted type R
|
273
|
+
|
274
|
+
Returns:
|
275
|
+
Matcher[T]: matcher for target type T
|
276
|
+
"""
|
277
|
+
@final
|
278
|
+
class AdaptedMatcher(BaseMatcher[T]):
|
279
|
+
@override
|
280
|
+
def _matches(self, item: T) -> bool:
|
281
|
+
return False if item is None \
|
282
|
+
else matcher.matches(converter(item))
|
283
|
+
|
284
|
+
@override
|
285
|
+
def describe_to(self, description: Description) -> None:
|
286
|
+
description.append_description_of(matcher)
|
287
|
+
|
288
|
+
return AdaptedMatcher()
|
289
|
+
|
290
|
+
|
291
|
+
def adapted_sequence[T, R](
|
292
|
+
converter: Callable[[T], R],
|
293
|
+
matcher: Matcher[Sequence[R]]) -> Matcher[Sequence[T]]:
|
294
|
+
"""
|
295
|
+
Hamcrest matcher adapting a Sequence of type T by specified converter and
|
296
|
+
applying specified matcher. For example::
|
297
|
+
|
298
|
+
adapt_sequence( lambda message: message.id,
|
299
|
+
has_item(is_greater_than(0)) )
|
300
|
+
|
301
|
+
where id being a number, and is_greater_than being a matcher that can be
|
302
|
+
applied on numbers.
|
303
|
+
|
304
|
+
See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`
|
305
|
+
|
306
|
+
Args:
|
307
|
+
converter (Callable[[T], R]): function converting T into R
|
308
|
+
matcher (Matcher[Sequence[R]): matcher for adapted Sequence of R
|
309
|
+
|
310
|
+
Returns:
|
311
|
+
Matcher[Sequence[T]]: matcher for target Sequence of type T
|
312
|
+
"""
|
313
|
+
@final
|
314
|
+
class AdaptedMatcher(BaseMatcher[Sequence[T]]):
|
315
|
+
@override
|
316
|
+
def _matches(self, item: Sequence[T]) -> bool:
|
317
|
+
return matcher.matches([converter(x) for x in item])
|
318
|
+
|
319
|
+
@override
|
320
|
+
def describe_to(self, description: Description) -> None:
|
321
|
+
description.append_description_of(matcher)
|
322
|
+
|
323
|
+
return AdaptedMatcher()
|
324
|
+
|
325
|
+
|
326
|
+
def adapted_iterator[T, R](
|
327
|
+
converter: Callable[[T], R],
|
328
|
+
matcher: Matcher[Iterator[R]]) -> Matcher[Iterator[T]]:
|
329
|
+
"""
|
330
|
+
Hamcrest matcher adapting an Iterator of type T by specified converter and
|
331
|
+
applying specified matcher. For example::
|
332
|
+
|
333
|
+
adapt_iterator( lambda message: message.id,
|
334
|
+
yields_item(is_greater_than(0)) )
|
335
|
+
|
336
|
+
where id being a number, and is_greater_than being a matcher that can be
|
337
|
+
applied on numbers.
|
338
|
+
|
339
|
+
See more on `PyHamcrest <https://github.com/hamcrest/PyHamcrest>`
|
340
|
+
|
341
|
+
Args:
|
342
|
+
converter (Callable[[T], R]): function converting T into R
|
343
|
+
matcher (Matcher[Iterator[R]): matcher for adapted Iterator of R
|
344
|
+
|
345
|
+
Returns:
|
346
|
+
Matcher[Iterator[T]]: matcher for target Iterator of type T
|
347
|
+
"""
|
348
|
+
@final
|
349
|
+
class AdaptedMatcher(BaseMatcher[Iterator[T]]):
|
350
|
+
@override
|
351
|
+
def _matches(self, item: Iterable[T]) -> bool:
|
352
|
+
return matcher.matches(map(converter, item))
|
353
|
+
|
354
|
+
@override
|
355
|
+
def describe_to(self, description: Description) -> None:
|
356
|
+
description.append_description_of(matcher)
|
357
|
+
|
358
|
+
return AdaptedMatcher()
|
359
|
+
|
360
|
+
|
361
|
+
def match_as[T](matcher: Matcher[object]) -> Matcher[T]: # type: ignore
|
362
|
+
"""
|
363
|
+
Utility function to cast a generic matcher to the specific type Matcher[T].
|
364
|
+
|
365
|
+
Args:
|
366
|
+
matcher: The original matcher that needs to be cast.
|
367
|
+
|
368
|
+
Returns:
|
369
|
+
A matcher cast to Matcher[T].
|
370
|
+
"""
|
371
|
+
return cast(Matcher[T], matcher)
|
utils/object_utils.py
ADDED
@@ -0,0 +1,185 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import threading
|
6
|
+
from dataclasses import asdict, fields, is_dataclass, replace
|
7
|
+
from enum import Enum
|
8
|
+
from typing import (Any, Callable, Dict, Protocol, Type, final)
|
9
|
+
|
10
|
+
# TODO: move to stream_utils module
|
11
|
+
type Supplier[T] = Callable[[], T]
|
12
|
+
type Predicate[T] = Callable[[T], bool]
|
13
|
+
|
14
|
+
|
15
|
+
class Valid(Protocol):
|
16
|
+
"""
|
17
|
+
Specifies a method for validating objects.
|
18
|
+
"""
|
19
|
+
|
20
|
+
def is_valid(self) -> bool:
|
21
|
+
"""
|
22
|
+
Should be implemented by objects that need validation.
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
bool: true, if the object is valid
|
26
|
+
"""
|
27
|
+
...
|
28
|
+
|
29
|
+
|
30
|
+
class ImmutableMixin:
|
31
|
+
"""
|
32
|
+
Enforces immutability by overriding __setattr__ to raise AttributeError.
|
33
|
+
|
34
|
+
This implementation does not work with the WithMixin if the attributes are
|
35
|
+
initialized with default values.
|
36
|
+
|
37
|
+
It also does not work when applied to a super type for which the __init__
|
38
|
+
is overridden.
|
39
|
+
|
40
|
+
Use it with non-dataclasses.
|
41
|
+
"""
|
42
|
+
|
43
|
+
def __setattr__(self, key: str, value: Any) -> None:
|
44
|
+
if hasattr(self, key):
|
45
|
+
raise AttributeError(f"Can't modify attribute '{
|
46
|
+
key}' after initialization")
|
47
|
+
super().__setattr__(key, value) # Properly sets the attribute
|
48
|
+
|
49
|
+
|
50
|
+
class WithMixin:
|
51
|
+
'''
|
52
|
+
Supports immutability by copying on change.
|
53
|
+
|
54
|
+
For example, instead of mutating like this::
|
55
|
+
|
56
|
+
obj.field = a_new_value
|
57
|
+
|
58
|
+
use::
|
59
|
+
|
60
|
+
dup_object_with_changes = obj.with_(field=a_new_value)
|
61
|
+
|
62
|
+
This will ensure that the changes are applied on a duplicate of `obj`.
|
63
|
+
|
64
|
+
Can be applied on plain Python classes, and on `dataclases` too.
|
65
|
+
'''
|
66
|
+
@final
|
67
|
+
def with_[T:WithMixin](self: T, **changes: Any) -> T:
|
68
|
+
if is_dataclass(self):
|
69
|
+
# Directly use replace for dataclasses; it will raise an error for invalid fields
|
70
|
+
return replace(self, **changes)
|
71
|
+
|
72
|
+
duplicated_object = self.__class__(**self.__dict__)
|
73
|
+
for key, value in changes.items():
|
74
|
+
# Get the current attribute to determine its type
|
75
|
+
current_attr = getattr(self, key, None)
|
76
|
+
if isinstance(current_attr, Enum):
|
77
|
+
# If the current attribute is an enum,
|
78
|
+
# convert the value to the corresponding enum
|
79
|
+
value = current_attr.__class__(value)
|
80
|
+
setattr(duplicated_object, key, value)
|
81
|
+
return duplicated_object
|
82
|
+
|
83
|
+
|
84
|
+
class ToDictMixin:
|
85
|
+
|
86
|
+
def to_dict(self) -> Dict[str, Any]:
|
87
|
+
"""
|
88
|
+
Converts a dataclass instance (with nested dataclasses) to a dictionary.
|
89
|
+
"""
|
90
|
+
def convert(value):
|
91
|
+
if isinstance(value, ToDictMixin):
|
92
|
+
return value.to_dict()
|
93
|
+
elif isinstance(value, list):
|
94
|
+
return [convert(v) for v in value]
|
95
|
+
elif isinstance(value, dict):
|
96
|
+
return {k: convert(v) for k, v in value.items()}
|
97
|
+
return value
|
98
|
+
|
99
|
+
if not is_dataclass(self):
|
100
|
+
raise TypeError("not a dataclass instance")
|
101
|
+
|
102
|
+
return {key: convert(value) for key, value in asdict(self).items()}
|
103
|
+
|
104
|
+
def flatten(self, prefix: str = "") -> Dict[str, Any]:
|
105
|
+
"""
|
106
|
+
Flattens the nested structure into a flat dictionary for CSV serialization.
|
107
|
+
"""
|
108
|
+
flat_dict = {}
|
109
|
+
|
110
|
+
def flatten_value(key: str, value: Any):
|
111
|
+
if isinstance(value, ToDictMixin):
|
112
|
+
# Flatten nested ToDictMixin dataclasses
|
113
|
+
nested_flat = value.flatten(prefix=f"{key}_")
|
114
|
+
flat_dict.update(nested_flat)
|
115
|
+
elif isinstance(value, list):
|
116
|
+
# Serialize lists as JSON strings or expand into multiple columns
|
117
|
+
for idx, item in enumerate(value):
|
118
|
+
flat_dict[f"{key}[{idx}]"] = item
|
119
|
+
elif isinstance(value, dict):
|
120
|
+
# Serialize dicts as JSON strings or expand into multiple columns
|
121
|
+
for sub_key, sub_val in value.items():
|
122
|
+
flat_dict[f"{key}_{sub_key}"] = sub_val
|
123
|
+
else:
|
124
|
+
# Directly add non-nested fields
|
125
|
+
flat_dict[key] = value
|
126
|
+
|
127
|
+
if not is_dataclass(self):
|
128
|
+
raise TypeError("not a dataclass instance")
|
129
|
+
|
130
|
+
for field in fields(self):
|
131
|
+
value = getattr(self, field.name)
|
132
|
+
flatten_value(f"{prefix}{field.name}", value)
|
133
|
+
|
134
|
+
return flat_dict
|
135
|
+
|
136
|
+
|
137
|
+
class SingletonMeta(type):
|
138
|
+
"""
|
139
|
+
A thread-safe implementation of a Singleton metaclass.
|
140
|
+
"""
|
141
|
+
_instances: Dict[Type['SingletonBase'], 'SingletonBase'] = {}
|
142
|
+
_lock: threading.Lock = threading.Lock() # Ensure thread-safety
|
143
|
+
|
144
|
+
def __call__(cls, *args: Any, **kwargs: Any) -> 'SingletonBase':
|
145
|
+
with SingletonMeta._lock:
|
146
|
+
if cls not in SingletonMeta._instances:
|
147
|
+
instance = super().__call__(*args, **kwargs)
|
148
|
+
SingletonMeta._instances[cls] = instance
|
149
|
+
return SingletonMeta._instances[cls]
|
150
|
+
|
151
|
+
|
152
|
+
class SingletonBase(metaclass=SingletonMeta):
|
153
|
+
"""
|
154
|
+
Base class for singletons using SingletonMeta.
|
155
|
+
"""
|
156
|
+
pass
|
157
|
+
|
158
|
+
|
159
|
+
class InvalidValueException(ValueError):
|
160
|
+
pass
|
161
|
+
|
162
|
+
|
163
|
+
def valid[T:Valid](value: T) -> T:
|
164
|
+
"""
|
165
|
+
Validates specified object, assuming that it supports the Valid protocol.
|
166
|
+
|
167
|
+
Args:
|
168
|
+
value (T:Valid): the object
|
169
|
+
|
170
|
+
Raises:
|
171
|
+
TypeError: if the object does not support the Valid protocol
|
172
|
+
InvalidValueException: if the object is invalid
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
T:Valid: the validated object
|
176
|
+
"""
|
177
|
+
if not (hasattr(value, 'is_valid') and callable(
|
178
|
+
getattr(value, 'is_valid'))):
|
179
|
+
raise TypeError(
|
180
|
+
f"{value.__class__.__name__} does not conform to the Valid protocol")
|
181
|
+
|
182
|
+
if value.is_valid():
|
183
|
+
return value
|
184
|
+
else:
|
185
|
+
raise InvalidValueException(value)
|
utils/stream_utils.py
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from typing import Iterator
|
6
|
+
|
7
|
+
from qa.testing.utils.object_utils import Predicate
|
8
|
+
|
9
|
+
|
10
|
+
def process_next[T](i: Iterator[T], p: Predicate[T]) -> Iterator[T]:
|
11
|
+
# DELETEME -- not needed so far
|
12
|
+
"""
|
13
|
+
Processes next items per specified predicate. Useful, for cases in which
|
14
|
+
first item in a stream decides the meaning of rest of items.
|
15
|
+
|
16
|
+
Args:
|
17
|
+
i (Iterator[T]): the iterator to process
|
18
|
+
p (Predicate[T]): the predicate to be applied on `next(i)`
|
19
|
+
|
20
|
+
Returns:
|
21
|
+
Iterator[T]: the original iterator if the predicate evaluated true, \
|
22
|
+
otherwise empty iterator
|
23
|
+
"""
|
24
|
+
return i if p(next(i)) else iter([])
|
utils/string_utils.py
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from typing import Callable, Type
|
6
|
+
|
7
|
+
from ppretty import ppretty # type: ignore
|
8
|
+
|
9
|
+
EMPTY = ""
|
10
|
+
SPACE = " "
|
11
|
+
DOT = "."
|
12
|
+
LF = "\n"
|
13
|
+
UTF_8 = "utf-8"
|
14
|
+
EMPTY_BYTE_STRING = b''
|
15
|
+
|
16
|
+
|
17
|
+
def to_string[T](indent: str = ' ',
|
18
|
+
depth: int = 1,
|
19
|
+
width: int = 72,
|
20
|
+
seq_length: int = 15,
|
21
|
+
show_protected: bool = False,
|
22
|
+
show_private: bool = False,
|
23
|
+
show_static: bool = False,
|
24
|
+
show_properties: bool = True,
|
25
|
+
show_address: bool = False,
|
26
|
+
str_length: int = 50) -> Callable[[Type[T]], Type[T]]:
|
27
|
+
"""
|
28
|
+
Class decorator providing a readable __str__ implementation.
|
29
|
+
|
30
|
+
The default Python __str__ implementation, returns the type and the memory
|
31
|
+
address of instance.
|
32
|
+
|
33
|
+
Important for diagnostics, actually every object that is logged, must
|
34
|
+
provide such readable __str__.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
indent (str, optional): indentation; Defaults to ' '.
|
38
|
+
depth (int, optional): depth in object hierarchy; defaults to 1.
|
39
|
+
width (int, optional): width of line before line-feed; defaults to 72.
|
40
|
+
seq_length (int, optional): how many items to include; defaults to 15.
|
41
|
+
show_protected (bool, optional): include protected; Defaults to False.
|
42
|
+
show_private (bool, optional): include private; defaults to False.
|
43
|
+
show_static (bool, optional): include static; defaults to False.
|
44
|
+
show_properties (bool, optional): include properties; defaults to True.
|
45
|
+
show_address (bool, optional): include object's memory address; defaults to False.
|
46
|
+
str_length (int, optional): maximum string length per item; defaults to 50.
|
47
|
+
|
48
|
+
Returns:
|
49
|
+
Callable[[Type[T]], Type[T]]: _description_
|
50
|
+
"""
|
51
|
+
def decorator(cls: Type[T]) -> Type[T]:
|
52
|
+
def __str__(self: T) -> str:
|
53
|
+
# IMPORTANT: must not use something that calls __str__
|
54
|
+
return ppretty(self,
|
55
|
+
indent=indent,
|
56
|
+
depth=depth,
|
57
|
+
width=width,
|
58
|
+
seq_length=seq_length,
|
59
|
+
show_protected=show_protected,
|
60
|
+
show_private=show_private,
|
61
|
+
show_static=show_static,
|
62
|
+
show_properties=show_properties,
|
63
|
+
show_address=show_address,
|
64
|
+
str_length=str_length) # type: ignore
|
65
|
+
|
66
|
+
cls.__str__ = __str__
|
67
|
+
return cls
|
68
|
+
|
69
|
+
return decorator
|
utils/thread_utils.py
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import concurrent.futures
|
6
|
+
import time
|
7
|
+
from datetime import timedelta
|
8
|
+
|
9
|
+
COMMON_EXECUTOR = concurrent.futures.ThreadPoolExecutor()
|
10
|
+
|
11
|
+
|
12
|
+
def sleep_for(duration: timedelta):
|
13
|
+
time.sleep(duration.total_seconds())
|
utils/tuple_utils.py
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
from dataclasses import is_dataclass, replace, fields
|
6
|
+
from typing import Any, Self, Tuple, Type
|
7
|
+
|
8
|
+
|
9
|
+
class FromTupleMixin:
|
10
|
+
"""
|
11
|
+
Class decorator adding a `from_tuple` method allowing instantiation from
|
12
|
+
a tuple matching the order of decorated class fields.
|
13
|
+
|
14
|
+
Works with frozen dataclasses too.
|
15
|
+
"""
|
16
|
+
@classmethod
|
17
|
+
def from_tuple(cls: Type[Self], data: Tuple[Any, ...]) -> Self:
|
18
|
+
if is_dataclass(cls):
|
19
|
+
# Retrieve all fields, including inherited ones
|
20
|
+
cls_fields = [f.name for f in fields(cls)]
|
21
|
+
|
22
|
+
# Create a dictionary of field names to values from the tuple
|
23
|
+
field_values = {name: value for name,
|
24
|
+
value in zip(cls_fields, data)}
|
25
|
+
|
26
|
+
# Create a new instance using `__new__`
|
27
|
+
instance = cls.__new__(cls)
|
28
|
+
|
29
|
+
# If the dataclass is frozen, use `replace` to set the attributes
|
30
|
+
if getattr(cls, '__dataclass_params__').frozen:
|
31
|
+
return replace(instance, **field_values)
|
32
|
+
else:
|
33
|
+
# If the dataclass is not frozen, use setattr to set attributes
|
34
|
+
for key, value in field_values.items():
|
35
|
+
setattr(instance, key, value)
|
36
|
+
|
37
|
+
# Call __init__ if defined
|
38
|
+
instance.__init__(*data)
|
39
|
+
return instance
|
40
|
+
else:
|
41
|
+
# For vanilla classes, assume fields are defined in __init__
|
42
|
+
# Using `__init__` directly as the custom initializer
|
43
|
+
instance = cls.__new__(cls)
|
44
|
+
for attr, value in zip(cls.__annotations__.keys(), data):
|
45
|
+
setattr(instance, attr, value)
|
46
|
+
|
47
|
+
# Call __init__ if it expects parameters
|
48
|
+
instance.__init__(*data)
|
49
|
+
return instance
|