qa-testing-utils 0.0.7__py3-none-any.whl → 0.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- qa_testing_utils/__init__.py +1 -1
- qa_testing_utils/conftest_helpers.py +47 -0
- qa_testing_utils/exception_utils.py +1 -1
- qa_testing_utils/file_utils.py +8 -3
- qa_testing_utils/logger.py +20 -5
- qa_testing_utils/logging.ini +35 -0
- qa_testing_utils/matchers.py +53 -46
- qa_testing_utils/object_utils.py +94 -41
- qa_testing_utils/stream_utils.py +32 -5
- qa_testing_utils/string_utils.py +2 -2
- qa_testing_utils/thread_utils.py +5 -0
- {qa_testing_utils-0.0.7.dist-info → qa_testing_utils-0.0.9.dist-info}/METADATA +1 -1
- qa_testing_utils-0.0.9.dist-info/RECORD +17 -0
- qa_testing_utils-0.0.7.dist-info/RECORD +0 -15
- {qa_testing_utils-0.0.7.dist-info → qa_testing_utils-0.0.9.dist-info}/WHEEL +0 -0
- {qa_testing_utils-0.0.7.dist-info → qa_testing_utils-0.0.9.dist-info}/entry_points.txt +0 -0
qa_testing_utils/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = '0.0.
|
1
|
+
__version__ = '0.0.9'
|
@@ -0,0 +1,47 @@
|
|
1
|
+
# SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
#
|
3
|
+
# SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
import inspect
|
6
|
+
import logging.config
|
7
|
+
from pathlib import Path
|
8
|
+
import sys
|
9
|
+
from typing import Callable, Optional
|
10
|
+
|
11
|
+
import pytest
|
12
|
+
|
13
|
+
|
14
|
+
def configure(config: pytest.Config,
|
15
|
+
path: Path = Path(__file__).parent / "logging.ini") -> None:
|
16
|
+
"""
|
17
|
+
Configures logging for pytest using a specified INI file, or defaults to internal logging.ini.
|
18
|
+
"""
|
19
|
+
caller_module = inspect.getmodule(inspect.stack()[1][0])
|
20
|
+
module_name = caller_module.__name__ if caller_module else "unknown"
|
21
|
+
|
22
|
+
if path.is_file():
|
23
|
+
logging.config.fileConfig(path)
|
24
|
+
logging.info(f"{module_name} loaded logs config from: {path}")
|
25
|
+
else:
|
26
|
+
sys.stderr.write(f"{module_name} couldn't find logs config file {path}")
|
27
|
+
|
28
|
+
|
29
|
+
def makereport(
|
30
|
+
item: pytest.Item, call: pytest.CallInfo[None]) -> pytest.TestReport:
|
31
|
+
report = pytest.TestReport.from_item_and_call(item, call)
|
32
|
+
|
33
|
+
if call.when == "call":
|
34
|
+
report.sections.append(('body', get_test_body(item)))
|
35
|
+
|
36
|
+
return report
|
37
|
+
|
38
|
+
|
39
|
+
def get_test_body(item: pytest.Item) -> str:
|
40
|
+
function: Optional[Callable[..., None]] = getattr(item, 'function', None)
|
41
|
+
if function is None:
|
42
|
+
return "No function found for this test item."
|
43
|
+
|
44
|
+
try:
|
45
|
+
return inspect.getsource(function)
|
46
|
+
except Exception as e:
|
47
|
+
return f"Could not get source code: {str(e)}"
|
@@ -7,7 +7,7 @@ import logging
|
|
7
7
|
from typing import Any, Callable
|
8
8
|
|
9
9
|
from returns.maybe import Maybe, Nothing, Some
|
10
|
-
from qa_testing_utils.
|
10
|
+
from qa_testing_utils.stream_utils import Supplier
|
11
11
|
|
12
12
|
|
13
13
|
def safely[T](supplier: Supplier[T]) -> Maybe[T]:
|
qa_testing_utils/file_utils.py
CHANGED
@@ -88,7 +88,7 @@ def stream_file(
|
|
88
88
|
Iterator[bytes]: the binary chunks stream
|
89
89
|
"""
|
90
90
|
with file_path.open('rb') as f:
|
91
|
-
yield from iter(lambda: f.read(chunk_size),
|
91
|
+
yield from iter(lambda: f.read(chunk_size), EMPTY_BYTES)
|
92
92
|
|
93
93
|
|
94
94
|
def read_lines(
|
@@ -180,7 +180,7 @@ def crc32_of(file: BinaryIO, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
|
|
180
180
|
return crc_value & 0xFFFFFFFF # ensure 32-bit unsigned
|
181
181
|
|
182
182
|
|
183
|
-
def write_csv(file_path: Path, data_stream: Iterable[dict]):
|
183
|
+
def write_csv(file_path: Path, data_stream: Iterable[dict[str, object]]):
|
184
184
|
"""
|
185
185
|
Writes a stream of flattened telemetry packets to a CSV file.
|
186
186
|
|
@@ -189,8 +189,13 @@ def write_csv(file_path: Path, data_stream: Iterable[dict]):
|
|
189
189
|
data_stream: Iterable of dictionaries representing the rows to be written.
|
190
190
|
"""
|
191
191
|
stream = peekable(data_stream) # Allow peeking to extract headers
|
192
|
+
try:
|
193
|
+
first_row: dict[str, object] = stream.peek()
|
194
|
+
except StopIteration:
|
195
|
+
# No data to write
|
196
|
+
return
|
192
197
|
with file_path.open(mode="w", newline="") as csv_file:
|
193
198
|
writer = csv.DictWriter(
|
194
|
-
csv_file, fieldnames=list(
|
199
|
+
csv_file, fieldnames=list(first_row.keys()))
|
195
200
|
writer.writeheader()
|
196
201
|
writer.writerows(stream)
|
qa_testing_utils/logger.py
CHANGED
@@ -8,7 +8,7 @@ from functools import cached_property, wraps
|
|
8
8
|
from typing import Callable, ParamSpec, TypeVar, cast, final
|
9
9
|
|
10
10
|
import allure
|
11
|
-
from qa_testing_utils.string_utils import
|
11
|
+
from qa_testing_utils.string_utils import EMPTY_STRING, LF
|
12
12
|
|
13
13
|
|
14
14
|
def trace[T](value: T) -> T:
|
@@ -55,7 +55,15 @@ def logger[T:type](cls: T) -> T:
|
|
55
55
|
|
56
56
|
class LoggerMixin:
|
57
57
|
"""
|
58
|
-
|
58
|
+
Mixin that provides a `log` property for convenient class-based logging.
|
59
|
+
|
60
|
+
Inherit from this mixin to get a `self.log` logger named after the class.
|
61
|
+
Useful for adding debug/info/error logging to any class without boilerplate.
|
62
|
+
|
63
|
+
Example:
|
64
|
+
class MyClass(LoggerMixin):
|
65
|
+
def do_something(self):
|
66
|
+
self.log.info("Doing something")
|
59
67
|
"""
|
60
68
|
@final
|
61
69
|
@cached_property
|
@@ -91,8 +99,15 @@ R = TypeVar('R')
|
|
91
99
|
|
92
100
|
def traced(func: Callable[P, R]) -> Callable[P, R]:
|
93
101
|
"""
|
94
|
-
|
95
|
-
|
102
|
+
Decorator to log function entry, arguments, and return value at DEBUG level.
|
103
|
+
|
104
|
+
Also adds an Allure step for reporting. Use on methods where tracing is useful
|
105
|
+
for debugging or reporting.
|
106
|
+
|
107
|
+
Example:
|
108
|
+
@traced
|
109
|
+
def my_method(self, x):
|
110
|
+
...
|
96
111
|
|
97
112
|
Args:
|
98
113
|
func (Callable[P, R]): The function to be decorated.
|
@@ -117,7 +132,7 @@ def traced(func: Callable[P, R]) -> Callable[P, R]:
|
|
117
132
|
f"{", ".join([str(arg) for arg in args[1:]])} "
|
118
133
|
f"{LF.join(
|
119
134
|
f"{key}={str(value)}"
|
120
|
-
for key, value in kwargs.items()) if kwargs else
|
135
|
+
for key, value in kwargs.items()) if kwargs else EMPTY_STRING}")
|
121
136
|
|
122
137
|
with allure.step( # type: ignore
|
123
138
|
f"{func.__name__} "
|
@@ -0,0 +1,35 @@
|
|
1
|
+
; SPDX-FileCopyrightText: 2025 Adrian Herscu
|
2
|
+
;
|
3
|
+
; SPDX-License-Identifier: Apache-2.0
|
4
|
+
|
5
|
+
[handlers]
|
6
|
+
; NOTE: each key here must have a handler_xxx section below
|
7
|
+
keys=console,overwrite_file
|
8
|
+
|
9
|
+
[handler_console]
|
10
|
+
class=StreamHandler
|
11
|
+
level=DEBUG
|
12
|
+
formatter=time_level_thread_name_message
|
13
|
+
args=(sys.stdout,)
|
14
|
+
|
15
|
+
[handler_overwrite_file]
|
16
|
+
class=FileHandler
|
17
|
+
level=DEBUG
|
18
|
+
formatter=time_level_thread_name_message
|
19
|
+
args=('pytest.log', 'w')
|
20
|
+
|
21
|
+
[formatters]
|
22
|
+
; NOTE: each key here must have a formatter_xxx section below
|
23
|
+
keys=time_level_thread_name_message
|
24
|
+
|
25
|
+
[formatter_time_level_thread_name_message]
|
26
|
+
format=%(asctime)s [%(levelname)-1.1s] [%(threadName)-10.10s]: %(name)-14.14s - %(message)s
|
27
|
+
; NOTE don't need milliseconds meanwhile, otherwise append .%03d below
|
28
|
+
datefmt=%H:%M:%S
|
29
|
+
|
30
|
+
[loggers]
|
31
|
+
keys=root
|
32
|
+
|
33
|
+
[logger_root]
|
34
|
+
level=DEBUG
|
35
|
+
handlers=console,overwrite_file
|
qa_testing_utils/matchers.py
CHANGED
@@ -56,6 +56,15 @@ class ContainsStringIgnoringCase(BaseMatcher[str]):
|
|
56
56
|
|
57
57
|
|
58
58
|
def contains_string_ignoring_case(substring: str) -> ContainsStringIgnoringCase:
|
59
|
+
"""
|
60
|
+
Creates a matcher that checks if a given string contains the specified substring, ignoring case.
|
61
|
+
|
62
|
+
Args:
|
63
|
+
substring (str): The substring to search for within the target string, case-insensitively.
|
64
|
+
|
65
|
+
Returns:
|
66
|
+
ContainsStringIgnoringCase: A matcher object that evaluates whether the target string contains the specified substring, ignoring case.
|
67
|
+
"""
|
59
68
|
return ContainsStringIgnoringCase(substring)
|
60
69
|
|
61
70
|
|
@@ -70,12 +79,11 @@ class IsIteratorYielding[T](BaseMatcher[Iterator[T]]):
|
|
70
79
|
|
71
80
|
@override
|
72
81
|
def _matches(self, item: Iterable[T]) -> bool:
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
pass
|
82
|
+
for element in item:
|
83
|
+
if self.element_matcher.matches(element):
|
84
|
+
return True
|
85
|
+
|
86
|
+
# No matching element found
|
79
87
|
return False
|
80
88
|
|
81
89
|
@override
|
@@ -97,14 +105,12 @@ class IsStreamContainingEvery[T](BaseMatcher[Iterator[T]]):
|
|
97
105
|
|
98
106
|
@override
|
99
107
|
def _matches(self, item: Iterable[T]) -> bool:
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
pass
|
107
|
-
return False
|
108
|
+
for element in item:
|
109
|
+
if not self.element_matcher.matches(element):
|
110
|
+
return False # One non-matching element means failure
|
111
|
+
|
112
|
+
# All elements matched
|
113
|
+
return True
|
108
114
|
|
109
115
|
@override
|
110
116
|
def describe_to(self, description: Description) -> None:
|
@@ -124,14 +130,12 @@ class IsIteratorYieldingAll[T](BaseMatcher[Iterator[T]]):
|
|
124
130
|
@override
|
125
131
|
def _matches(self, item: Iterable[T]) -> bool:
|
126
132
|
unmatched_matchers = set(self.element_matchers)
|
127
|
-
|
128
|
-
|
129
|
-
unmatched_matchers
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
except TypeError: # not an iterator
|
134
|
-
pass
|
133
|
+
for element in item:
|
134
|
+
unmatched_matchers = {
|
135
|
+
m for m in unmatched_matchers if not m.matches(element)}
|
136
|
+
if not unmatched_matchers: # All matchers have been satisfied
|
137
|
+
return True
|
138
|
+
|
135
139
|
return False
|
136
140
|
|
137
141
|
@override
|
@@ -154,38 +158,31 @@ class IsWithinDates(BaseMatcher[DateOrDateTime]):
|
|
154
158
|
self.end_date = end_date
|
155
159
|
|
156
160
|
def _matches(self, item: Optional[DateOrDateTime]) -> bool:
|
157
|
-
if
|
161
|
+
if item is None:
|
158
162
|
return False
|
159
163
|
|
160
|
-
#
|
161
|
-
if isinstance(item, datetime):
|
162
|
-
item = item.date() if isinstance(
|
163
|
-
self.start_date, date) or isinstance(
|
164
|
-
self.end_date, date) else item
|
165
|
-
elif isinstance(item, date) and (isinstance(self.start_date, datetime) or isinstance(self.end_date, datetime)):
|
164
|
+
# Normalize item to datetime
|
165
|
+
if not isinstance(item, datetime):
|
166
166
|
item = datetime.combine(item, datetime.min.time())
|
167
167
|
|
168
|
-
#
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
168
|
+
# Normalize start_date and end_date to datetime
|
169
|
+
def to_datetime(value: Optional[DateOrDateTime]) -> Optional[datetime]:
|
170
|
+
if value is None:
|
171
|
+
return None
|
172
|
+
return value if isinstance(
|
173
|
+
value, datetime) else datetime.combine(
|
174
|
+
value, datetime.min.time())
|
173
175
|
|
174
|
-
|
175
|
-
|
176
|
-
end = end.date() if isinstance(
|
177
|
-
end, datetime) and isinstance(
|
178
|
-
item, date) else end
|
176
|
+
start = to_datetime(self.start_date)
|
177
|
+
end = to_datetime(self.end_date)
|
179
178
|
|
180
|
-
|
181
|
-
if start is None and end is not None:
|
182
|
-
return item <= end
|
183
|
-
elif start is not None and end is None:
|
184
|
-
return item >= start
|
185
|
-
elif start is not None and end is not None:
|
179
|
+
if start and end:
|
186
180
|
return start <= item <= end
|
181
|
+
if start:
|
182
|
+
return item >= start
|
183
|
+
if end:
|
184
|
+
return item <= end
|
187
185
|
|
188
|
-
# If both start_date and end_date are None, return False (no valid range)
|
189
186
|
return False
|
190
187
|
|
191
188
|
def describe_to(self, description: Description) -> None:
|
@@ -201,6 +198,16 @@ class IsWithinDates(BaseMatcher[DateOrDateTime]):
|
|
201
198
|
def within_dates(
|
202
199
|
start_date: Optional[DateOrDateTime],
|
203
200
|
end_date: Optional[DateOrDateTime]) -> IsWithinDates:
|
201
|
+
"""
|
202
|
+
Creates an instance of IsWithinDates to check if a date or datetime value falls within the specified start and end dates.
|
203
|
+
|
204
|
+
Args:
|
205
|
+
start_date (Optional[DateOrDateTime]): The start of the date range. Can be None to indicate no lower bound.
|
206
|
+
end_date (Optional[DateOrDateTime]): The end of the date range. Can be None to indicate no upper bound.
|
207
|
+
|
208
|
+
Returns:
|
209
|
+
IsWithinDates: An instance configured with the provided start and end dates.
|
210
|
+
"""
|
204
211
|
return IsWithinDates(start_date, end_date)
|
205
212
|
|
206
213
|
|
qa_testing_utils/object_utils.py
CHANGED
@@ -5,13 +5,11 @@
|
|
5
5
|
import threading
|
6
6
|
from dataclasses import asdict, fields, is_dataclass, replace
|
7
7
|
from enum import Enum
|
8
|
-
from typing import (Any,
|
9
|
-
|
10
|
-
# TODO: move to stream_utils module
|
11
|
-
type Supplier[T] = Callable[[], T]
|
12
|
-
type Predicate[T] = Callable[[T], bool]
|
8
|
+
from typing import (Any, Dict, Optional, Protocol,
|
9
|
+
final, runtime_checkable, ClassVar)
|
13
10
|
|
14
11
|
|
12
|
+
@runtime_checkable
|
15
13
|
class Valid(Protocol):
|
16
14
|
"""
|
17
15
|
Specifies a method for validating objects.
|
@@ -29,15 +27,20 @@ class Valid(Protocol):
|
|
29
27
|
|
30
28
|
class ImmutableMixin:
|
31
29
|
"""
|
32
|
-
|
30
|
+
Mixin to enforce immutability after initialization.
|
33
31
|
|
34
|
-
|
35
|
-
|
32
|
+
Overrides __setattr__ to raise AttributeError if an attribute is modified after being set.
|
33
|
+
Intended for use with non-dataclasses. For dataclasses, use `@dataclass(frozen=True)`.
|
36
34
|
|
37
|
-
|
38
|
-
|
35
|
+
Limitations:
|
36
|
+
- Does not work with WithMixin if attributes have default values.
|
37
|
+
- Does not work if applied to a superclass with a custom __init__.
|
39
38
|
|
40
|
-
|
39
|
+
Example:
|
40
|
+
class MyImmutable(ImmutableMixin):
|
41
|
+
foo: int = 1
|
42
|
+
obj = MyImmutable()
|
43
|
+
obj.foo = 2 # Raises AttributeError
|
41
44
|
"""
|
42
45
|
|
43
46
|
def __setattr__(self, key: str, value: Any) -> None:
|
@@ -49,19 +52,21 @@ class ImmutableMixin:
|
|
49
52
|
|
50
53
|
class WithMixin:
|
51
54
|
'''
|
52
|
-
|
53
|
-
|
54
|
-
For example, instead of mutating like this::
|
55
|
+
Mixin to support copy-on-change (functional update) for objects.
|
55
56
|
|
56
|
-
|
57
|
+
Instead of mutating an object, use `with_()` to create a copy with updated fields:
|
58
|
+
obj2 = obj.with_(field=new_value)
|
57
59
|
|
58
|
-
|
60
|
+
Works with both plain Python classes and dataclasses.
|
59
61
|
|
60
|
-
|
62
|
+
Example:
|
63
|
+
@dataclass(frozen=True)
|
64
|
+
class Point(WithMixin):
|
65
|
+
x: int
|
66
|
+
y: int
|
61
67
|
|
62
|
-
|
63
|
-
|
64
|
-
Can be applied on plain Python classes, and on `dataclases` too.
|
68
|
+
p1 = Point(1, 2)
|
69
|
+
p2 = p1.with_(x=3) # p2 is Point(3, 2)
|
65
70
|
'''
|
66
71
|
@final
|
67
72
|
def with_[T:WithMixin](self: T, **changes: Any) -> T:
|
@@ -82,18 +87,37 @@ class WithMixin:
|
|
82
87
|
|
83
88
|
|
84
89
|
class ToDictMixin:
|
90
|
+
"""
|
91
|
+
Mixin to add serialization methods to dataclasses.
|
92
|
+
|
93
|
+
Provides:
|
94
|
+
- to_dict(): Recursively converts a dataclass (and nested dataclasses) to a dictionary.
|
95
|
+
- flatten(): Flattens nested structure for CSV or flat serialization.
|
96
|
+
|
97
|
+
Example:
|
98
|
+
@dataclass
|
99
|
+
class User(ToDictMixin):
|
100
|
+
name: str
|
101
|
+
age: int
|
102
|
+
|
103
|
+
user = User("Alice", 30)
|
104
|
+
user.to_dict() # {'name': 'Alice', 'age': 30}
|
105
|
+
"""
|
85
106
|
|
86
107
|
def to_dict(self) -> Dict[str, Any]:
|
87
108
|
"""
|
88
109
|
Converts a dataclass instance (with nested dataclasses) to a dictionary.
|
89
110
|
"""
|
90
|
-
|
111
|
+
from typing import cast
|
112
|
+
|
113
|
+
def convert(value: Any) -> Any:
|
91
114
|
if isinstance(value, ToDictMixin):
|
92
115
|
return value.to_dict()
|
93
116
|
elif isinstance(value, list):
|
94
|
-
|
117
|
+
# Provide a type hint for v
|
118
|
+
return [convert(v) for v in cast(list[Any], value)]
|
95
119
|
elif isinstance(value, dict):
|
96
|
-
return {k: convert(v) for k, v in value.items()}
|
120
|
+
return {k: convert(v) for k, v in value.items()} # type: ignore
|
97
121
|
return value
|
98
122
|
|
99
123
|
if not is_dataclass(self):
|
@@ -105,20 +129,20 @@ class ToDictMixin:
|
|
105
129
|
"""
|
106
130
|
Flattens the nested structure into a flat dictionary for CSV serialization.
|
107
131
|
"""
|
108
|
-
flat_dict = {}
|
132
|
+
flat_dict: Dict[str, Any] = {}
|
109
133
|
|
110
|
-
def flatten_value(key: str, value: Any):
|
134
|
+
def flatten_value(key: str, value: Any) -> None:
|
111
135
|
if isinstance(value, ToDictMixin):
|
112
136
|
# Flatten nested ToDictMixin dataclasses
|
113
137
|
nested_flat = value.flatten(prefix=f"{key}_")
|
114
138
|
flat_dict.update(nested_flat)
|
115
139
|
elif isinstance(value, list):
|
116
140
|
# Serialize lists as JSON strings or expand into multiple columns
|
117
|
-
for idx, item in enumerate(value):
|
141
|
+
for idx, item in enumerate(value): # type: ignore
|
118
142
|
flat_dict[f"{key}[{idx}]"] = item
|
119
143
|
elif isinstance(value, dict):
|
120
144
|
# Serialize dicts as JSON strings or expand into multiple columns
|
121
|
-
for sub_key, sub_val in value.items():
|
145
|
+
for sub_key, sub_val in value.items(): # type: ignore
|
122
146
|
flat_dict[f"{key}_{sub_key}"] = sub_val
|
123
147
|
else:
|
124
148
|
# Directly add non-nested fields
|
@@ -136,27 +160,41 @@ class ToDictMixin:
|
|
136
160
|
|
137
161
|
class SingletonMeta(type):
|
138
162
|
"""
|
139
|
-
|
163
|
+
Thread-safe singleton metaclass.
|
164
|
+
|
165
|
+
Ensures only one instance of a class exists per process.
|
166
|
+
Use by setting `metaclass=SingletonMeta` on your class.
|
140
167
|
"""
|
141
|
-
_instances: Dict[
|
142
|
-
_lock: threading.Lock = threading.Lock() # Ensure thread-safety
|
168
|
+
_instances: ClassVar[Dict[type, object]] = {}
|
169
|
+
_lock: ClassVar[threading.Lock] = threading.Lock() # Ensure thread-safety
|
143
170
|
|
144
|
-
def __call__(
|
171
|
+
def __call__(
|
172
|
+
cls: type,
|
173
|
+
*args: Any, **kwargs: Any) -> "SingletonBase":
|
145
174
|
with SingletonMeta._lock:
|
146
175
|
if cls not in SingletonMeta._instances:
|
147
|
-
instance = super().__call__(*args, **kwargs)
|
176
|
+
instance = super().__call__(*args, **kwargs) # type: ignore
|
148
177
|
SingletonMeta._instances[cls] = instance
|
149
|
-
return SingletonMeta._instances[cls]
|
178
|
+
return SingletonMeta._instances[cls] # type: ignore[return-value]
|
150
179
|
|
151
180
|
|
152
181
|
class SingletonBase(metaclass=SingletonMeta):
|
153
182
|
"""
|
154
183
|
Base class for singletons using SingletonMeta.
|
184
|
+
|
185
|
+
Inherit from this class to make your class a singleton.
|
155
186
|
"""
|
156
187
|
pass
|
157
188
|
|
158
189
|
|
159
190
|
class InvalidValueException(ValueError):
|
191
|
+
"""
|
192
|
+
Raised when an object fails validation via the Valid protocol.
|
193
|
+
|
194
|
+
Example:
|
195
|
+
if not obj.is_valid():
|
196
|
+
raise InvalidValueException(obj)
|
197
|
+
"""
|
160
198
|
pass
|
161
199
|
|
162
200
|
|
@@ -168,18 +206,33 @@ def valid[T:Valid](value: T) -> T:
|
|
168
206
|
value (T:Valid): the object
|
169
207
|
|
170
208
|
Raises:
|
171
|
-
TypeError: if the object does not support the Valid protocol
|
172
209
|
InvalidValueException: if the object is invalid
|
173
210
|
|
174
211
|
Returns:
|
175
212
|
T:Valid: the validated object
|
176
213
|
"""
|
177
|
-
if not (hasattr(value, 'is_valid') and callable(
|
178
|
-
getattr(value, 'is_valid'))):
|
179
|
-
raise TypeError(
|
180
|
-
f"{value.__class__.__name__} does not conform to the Valid protocol")
|
181
|
-
|
182
214
|
if value.is_valid():
|
183
215
|
return value
|
184
|
-
|
185
|
-
|
216
|
+
|
217
|
+
raise InvalidValueException(value)
|
218
|
+
|
219
|
+
|
220
|
+
def require_not_none[T](
|
221
|
+
value: Optional[T],
|
222
|
+
message: str = "Value must not be None") -> T:
|
223
|
+
"""
|
224
|
+
Ensures that the provided value is not None.
|
225
|
+
|
226
|
+
Args:
|
227
|
+
value (Optional[T]): The value to check for None.
|
228
|
+
message (str, optional): The error message to use if value is None. Defaults to "Value must not be None".
|
229
|
+
|
230
|
+
Returns:
|
231
|
+
T: The value, guaranteed to be not None.
|
232
|
+
|
233
|
+
Raises:
|
234
|
+
ValueError: If value is None.
|
235
|
+
"""
|
236
|
+
if value is None:
|
237
|
+
raise ValueError(message)
|
238
|
+
return value
|
qa_testing_utils/stream_utils.py
CHANGED
@@ -2,16 +2,43 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
from typing import Iterator
|
5
|
+
from typing import Callable, Iterator
|
6
6
|
|
7
|
-
|
7
|
+
|
8
|
+
"""
|
9
|
+
A generic callable type alias representing a supplier of values of type T.
|
10
|
+
|
11
|
+
A Supplier is a function that takes no arguments and returns a value of type T.
|
12
|
+
This is useful for lazy evaluation, deferred computation, or providing values on demand.
|
13
|
+
|
14
|
+
Example:
|
15
|
+
def random_int_supplier() -> int:
|
16
|
+
import random
|
17
|
+
return random.randint(1, 100)
|
18
|
+
|
19
|
+
supplier: Supplier[int] = random_int_supplier
|
20
|
+
"""
|
21
|
+
type Supplier[T] = Callable[[], T]
|
22
|
+
|
23
|
+
"""
|
24
|
+
A generic callable type alias representing a predicate (boolean-valued function) over values of type T.
|
25
|
+
|
26
|
+
A Predicate is a function that takes a single argument of type T and returns a boolean.
|
27
|
+
It is commonly used for filtering, validation, or conditional logic.
|
28
|
+
|
29
|
+
Example:
|
30
|
+
def is_even(n: int) -> bool:
|
31
|
+
return n % 2 == 0
|
32
|
+
|
33
|
+
even_predicate: Predicate[int] = is_even
|
34
|
+
"""
|
35
|
+
type Predicate[T] = Callable[[T], bool]
|
8
36
|
|
9
37
|
|
10
38
|
def process_next[T](i: Iterator[T], p: Predicate[T]) -> Iterator[T]:
|
11
|
-
# DELETEME -- not needed so far
|
12
39
|
"""
|
13
|
-
Processes next items per specified predicate. Useful
|
14
|
-
first item in a stream decides the meaning of rest of items.
|
40
|
+
Processes next items per specified predicate. Useful for cases in which
|
41
|
+
the first item in a stream decides the meaning of the rest of the items.
|
15
42
|
|
16
43
|
Args:
|
17
44
|
i (Iterator[T]): the iterator to process
|
qa_testing_utils/string_utils.py
CHANGED
@@ -6,12 +6,12 @@ from typing import Callable, Type
|
|
6
6
|
|
7
7
|
from ppretty import ppretty # type: ignore
|
8
8
|
|
9
|
-
|
9
|
+
EMPTY_STRING = ""
|
10
10
|
SPACE = " "
|
11
11
|
DOT = "."
|
12
12
|
LF = "\n"
|
13
13
|
UTF_8 = "utf-8"
|
14
|
-
|
14
|
+
EMPTY_BYTES = b''
|
15
15
|
|
16
16
|
|
17
17
|
def to_string[T](indent: str = ' ',
|
qa_testing_utils/thread_utils.py
CHANGED
@@ -0,0 +1,17 @@
|
|
1
|
+
qa_testing_utils-0.0.9.dist-info/METADATA,sha256=FEqqeR5_qM1chBwuVr-RtgDyGg3TptUXtK5OLWpqFpU,479
|
2
|
+
qa_testing_utils-0.0.9.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
qa_testing_utils-0.0.9.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
|
+
qa_testing_utils/__init__.py,sha256=lZoBoCrPmDHiztV57wRru9CSJWCzBJ4qzAHBIoqWxZQ,21
|
5
|
+
qa_testing_utils/conftest_helpers.py,sha256=Hcpbc1CFNANFfVDyu9Elf5TB5tX4zSHzHSrRhlsHGsM,1408
|
6
|
+
qa_testing_utils/exception_utils.py,sha256=iPa-EE1gvKLxzEB3KzNMWGHaS7xEH_B3Yxd8KWiMROA,1340
|
7
|
+
qa_testing_utils/exceptions.py,sha256=_s7es20G9-ET2HeLqU0yhuDAXpnQQs_ecjBmztz94Pk,441
|
8
|
+
qa_testing_utils/file_utils.py,sha256=a6VPIbSZQVuOEnrprzlj-YTtfMskNzuClnhECRN2CPw,6444
|
9
|
+
qa_testing_utils/logger.py,sha256=840RhoO5AHewqEBNa6A2WbWb8DF-AFrasMPDSPH6JNM,4625
|
10
|
+
qa_testing_utils/logging.ini,sha256=ZcCKCnUiRl3IVB0ZK9fe79PsAXEMag7QwRpxmO4yBHE,834
|
11
|
+
qa_testing_utils/matchers.py,sha256=WOPqtCPt5tFdn6JpyDDqPfPSHQvRMpv0uXCv6-2IqNE,12558
|
12
|
+
qa_testing_utils/object_utils.py,sha256=CNbRVB3RAmjeGDRQe9cDsSF_iuZY6vENvZj2pwB_mS0,7393
|
13
|
+
qa_testing_utils/stream_utils.py,sha256=zYa2UDfrWsXwRP6nrG87uGkkUZPBLATHXBgu7GUk3Aw,1536
|
14
|
+
qa_testing_utils/string_utils.py,sha256=tU1VfmzcS_Zqj4hqzdr6kWdM5dpdlRhiBNiwJAbBDVg,2617
|
15
|
+
qa_testing_utils/thread_utils.py,sha256=3Ecyg-bnkcPyT_r9xVWS79uv1BTgXxeIJJZhT3tXorM,416
|
16
|
+
qa_testing_utils/tuple_utils.py,sha256=pIcJntr-PNvaOIP0Pv4sBwO7oIbTVFmGwr9Ic5nJDA0,1851
|
17
|
+
qa_testing_utils-0.0.9.dist-info/RECORD,,
|
@@ -1,15 +0,0 @@
|
|
1
|
-
qa_testing_utils-0.0.7.dist-info/METADATA,sha256=bxP3fCjJCJTkV3YtRfd6Mbwp_v3UlsFekYcqBo3a-cQ,479
|
2
|
-
qa_testing_utils-0.0.7.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
-
qa_testing_utils-0.0.7.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
|
-
qa_testing_utils/__init__.py,sha256=7sNbee72r3qCAVloFm2RRWCTa9gVjMS7nCJcJ-URzl4,21
|
5
|
-
qa_testing_utils/exception_utils.py,sha256=fQ1UtFJg5SHEc2XhH8NHHX6aY7btkauKHQKhfr_1jQI,1340
|
6
|
-
qa_testing_utils/exceptions.py,sha256=_s7es20G9-ET2HeLqU0yhuDAXpnQQs_ecjBmztz94Pk,441
|
7
|
-
qa_testing_utils/file_utils.py,sha256=LGZBB6xqadpEpqY4HXKcrxCj2D3OCzyvGvZsCk6NQus,6311
|
8
|
-
qa_testing_utils/logger.py,sha256=bWmJrPyHmg6sA_Qmjk-Guxwoa4g2VfulcIwRAWFZ5d8,4123
|
9
|
-
qa_testing_utils/matchers.py,sha256=qd-wczEsjvdn1mYfBq7-lvYiW6sUOnQlKJgFiK-6F1I,12510
|
10
|
-
qa_testing_utils/object_utils.py,sha256=0CtfGPrR-s9d2OHlOwfzUfWH-NLvS7rwelSvM2mS6Rg,5807
|
11
|
-
qa_testing_utils/stream_utils.py,sha256=vIhXY2nO2NjfyLV0Uirw9XJcDlh2DAFJp2RCfQXcHrk,729
|
12
|
-
qa_testing_utils/string_utils.py,sha256=L2hRnwnRciaW5rwY_kmuBRb9zC65VPyAGcYt-HnXt18,2616
|
13
|
-
qa_testing_utils/thread_utils.py,sha256=73oW55OAJNqoZ-6y7B7te07CLLT4y-9sQJ831fcWpUk,293
|
14
|
-
qa_testing_utils/tuple_utils.py,sha256=pIcJntr-PNvaOIP0Pv4sBwO7oIbTVFmGwr9Ic5nJDA0,1851
|
15
|
-
qa_testing_utils-0.0.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|