qa-testing-utils 0.0.7__tar.gz → 0.0.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/PKG-INFO +1 -1
  2. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/pyproject.toml +1 -1
  3. qa_testing_utils-0.0.8/src/qa_testing_utils/__init__.py +1 -0
  4. qa_testing_utils-0.0.8/src/qa_testing_utils/conftest_helpers.py +47 -0
  5. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/exception_utils.py +1 -1
  6. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/file_utils.py +8 -3
  7. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/logger.py +20 -5
  8. qa_testing_utils-0.0.8/src/qa_testing_utils/logging.ini +35 -0
  9. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/matchers.py +53 -46
  10. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/object_utils.py +94 -41
  11. qa_testing_utils-0.0.8/src/qa_testing_utils/stream_utils.py +51 -0
  12. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/string_utils.py +2 -2
  13. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/thread_utils.py +5 -0
  14. qa_testing_utils-0.0.8/tests/exception_utils_tests.py +34 -0
  15. qa_testing_utils-0.0.8/tests/file_utils_tests.py +92 -0
  16. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/logger_tests.py +46 -0
  17. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/matchers_tests.py +38 -3
  18. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/object_utils_tests.py +39 -0
  19. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/self_tests.py +3 -0
  20. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/stream_utils_tests.py +7 -5
  21. qa_testing_utils-0.0.8/tests/thread_utils_tests.py +16 -0
  22. qa_testing_utils-0.0.8/tests/tuple_utils_tests.py +44 -0
  23. qa_testing_utils-0.0.7/src/qa_testing_utils/__init__.py +0 -1
  24. qa_testing_utils-0.0.7/src/qa_testing_utils/stream_utils.py +0 -24
  25. qa_testing_utils-0.0.7/tests/exception_utils_tests.py +0 -14
  26. qa_testing_utils-0.0.7/tests/tuple_utils_tests.py +0 -17
  27. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/README.md +0 -0
  28. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/exceptions.py +0 -0
  29. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/src/qa_testing_utils/tuple_utils.py +0 -0
  30. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/__init__.py +0 -0
  31. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/assertion_tests.py +0 -0
  32. {qa_testing_utils-0.0.7 → qa_testing_utils-0.0.8}/tests/string_utils_tests.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qa-testing-utils
3
- Version: 0.0.7
3
+ Version: 0.0.8
4
4
  Summary: QA testing utilities
5
5
  Author-Email: Adrian Herscu <adrian.herscu@gmail.com>
6
6
  License: Apache-2.0
@@ -31,7 +31,7 @@ dependencies = [
31
31
  "more-itertools==10.7.0",
32
32
  "returns==0.25.0",
33
33
  ]
34
- version = "0.0.7"
34
+ version = "0.0.8"
35
35
 
36
36
  [project.license]
37
37
  text = "Apache-2.0"
@@ -0,0 +1 @@
1
+ __version__ = '0.0.8'
@@ -0,0 +1,47 @@
1
+ # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ import inspect
6
+ import logging.config
7
+ from pathlib import Path
8
+ import sys
9
+ from typing import Callable, Optional
10
+
11
+ import pytest
12
+
13
+
14
+ def configure(config: pytest.Config,
15
+ path: Path = Path(__file__).parent / "logging.ini") -> None:
16
+ """
17
+ Configures logging for pytest using a specified INI file, or defaults to internal logging.ini.
18
+ """
19
+ caller_module = inspect.getmodule(inspect.stack()[1][0])
20
+ module_name = caller_module.__name__ if caller_module else "unknown"
21
+
22
+ if path.is_file():
23
+ logging.config.fileConfig(path)
24
+ logging.info(f"{module_name} loaded logs config from: {path}")
25
+ else:
26
+ sys.stderr.write(f"{module_name} couldn't find logs config file {path}")
27
+
28
+
29
+ def makereport(
30
+ item: pytest.Item, call: pytest.CallInfo[None]) -> pytest.TestReport:
31
+ report = pytest.TestReport.from_item_and_call(item, call)
32
+
33
+ if call.when == "call":
34
+ report.sections.append(('body', get_test_body(item)))
35
+
36
+ return report
37
+
38
+
39
+ def get_test_body(item: pytest.Item) -> str:
40
+ function: Optional[Callable[..., None]] = getattr(item, 'function', None)
41
+ if function is None:
42
+ return "No function found for this test item."
43
+
44
+ try:
45
+ return inspect.getsource(function)
46
+ except Exception as e:
47
+ return f"Could not get source code: {str(e)}"
@@ -7,7 +7,7 @@ import logging
7
7
  from typing import Any, Callable
8
8
 
9
9
  from returns.maybe import Maybe, Nothing, Some
10
- from qa_testing_utils.object_utils import Supplier
10
+ from qa_testing_utils.stream_utils import Supplier
11
11
 
12
12
 
13
13
  def safely[T](supplier: Supplier[T]) -> Maybe[T]:
@@ -88,7 +88,7 @@ def stream_file(
88
88
  Iterator[bytes]: the binary chunks stream
89
89
  """
90
90
  with file_path.open('rb') as f:
91
- yield from iter(lambda: f.read(chunk_size), EMPTY_BYTE_STRING)
91
+ yield from iter(lambda: f.read(chunk_size), EMPTY_BYTES)
92
92
 
93
93
 
94
94
  def read_lines(
@@ -180,7 +180,7 @@ def crc32_of(file: BinaryIO, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
180
180
  return crc_value & 0xFFFFFFFF # ensure 32-bit unsigned
181
181
 
182
182
 
183
- def write_csv(file_path: Path, data_stream: Iterable[dict]):
183
+ def write_csv(file_path: Path, data_stream: Iterable[dict[str, object]]):
184
184
  """
185
185
  Writes a stream of flattened telemetry packets to a CSV file.
186
186
 
@@ -189,8 +189,13 @@ def write_csv(file_path: Path, data_stream: Iterable[dict]):
189
189
  data_stream: Iterable of dictionaries representing the rows to be written.
190
190
  """
191
191
  stream = peekable(data_stream) # Allow peeking to extract headers
192
+ try:
193
+ first_row: dict[str, object] = stream.peek()
194
+ except StopIteration:
195
+ # No data to write
196
+ return
192
197
  with file_path.open(mode="w", newline="") as csv_file:
193
198
  writer = csv.DictWriter(
194
- csv_file, fieldnames=list(stream.peek().keys()))
199
+ csv_file, fieldnames=list(first_row.keys()))
195
200
  writer.writeheader()
196
201
  writer.writerows(stream)
@@ -8,7 +8,7 @@ from functools import cached_property, wraps
8
8
  from typing import Callable, ParamSpec, TypeVar, cast, final
9
9
 
10
10
  import allure
11
- from qa_testing_utils.string_utils import EMPTY, LF
11
+ from qa_testing_utils.string_utils import EMPTY_STRING, LF
12
12
 
13
13
 
14
14
  def trace[T](value: T) -> T:
@@ -55,7 +55,15 @@ def logger[T:type](cls: T) -> T:
55
55
 
56
56
  class LoggerMixin:
57
57
  """
58
- Yet another way of adding logging by deriving from this one.
58
+ Mixin that provides a `log` property for convenient class-based logging.
59
+
60
+ Inherit from this mixin to get a `self.log` logger named after the class.
61
+ Useful for adding debug/info/error logging to any class without boilerplate.
62
+
63
+ Example:
64
+ class MyClass(LoggerMixin):
65
+ def do_something(self):
66
+ self.log.info("Doing something")
59
67
  """
60
68
  @final
61
69
  @cached_property
@@ -91,8 +99,15 @@ R = TypeVar('R')
91
99
 
92
100
  def traced(func: Callable[P, R]) -> Callable[P, R]:
93
101
  """
94
- Method decorator that logs the function call with its arguments and the
95
- return value.
102
+ Decorator to log function entry, arguments, and return value at DEBUG level.
103
+
104
+ Also adds an Allure step for reporting. Use on methods where tracing is useful
105
+ for debugging or reporting.
106
+
107
+ Example:
108
+ @traced
109
+ def my_method(self, x):
110
+ ...
96
111
 
97
112
  Args:
98
113
  func (Callable[P, R]): The function to be decorated.
@@ -117,7 +132,7 @@ def traced(func: Callable[P, R]) -> Callable[P, R]:
117
132
  f"{", ".join([str(arg) for arg in args[1:]])} "
118
133
  f"{LF.join(
119
134
  f"{key}={str(value)}"
120
- for key, value in kwargs.items()) if kwargs else EMPTY}")
135
+ for key, value in kwargs.items()) if kwargs else EMPTY_STRING}")
121
136
 
122
137
  with allure.step( # type: ignore
123
138
  f"{func.__name__} "
@@ -0,0 +1,35 @@
1
+ ; SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ ;
3
+ ; SPDX-License-Identifier: Apache-2.0
4
+
5
+ [handlers]
6
+ ; NOTE: each key here must have a handler_xxx section below
7
+ keys=console,overwrite_file
8
+
9
+ [handler_console]
10
+ class=StreamHandler
11
+ level=DEBUG
12
+ formatter=time_level_thread_name_message
13
+ args=(sys.stdout,)
14
+
15
+ [handler_overwrite_file]
16
+ class=FileHandler
17
+ level=DEBUG
18
+ formatter=time_level_thread_name_message
19
+ args=('pytest.log', 'w')
20
+
21
+ [formatters]
22
+ ; NOTE: each key here must have a formatter_xxx section below
23
+ keys=time_level_thread_name_message
24
+
25
+ [formatter_time_level_thread_name_message]
26
+ format=%(asctime)s [%(levelname)-1.1s] [%(threadName)-10.10s]: %(name)-14.14s - %(message)s
27
+ ; NOTE don't need milliseconds meanwhile, otherwise append .%03d below
28
+ datefmt=%H:%M:%S
29
+
30
+ [loggers]
31
+ keys=root
32
+
33
+ [logger_root]
34
+ level=DEBUG
35
+ handlers=console,overwrite_file
@@ -56,6 +56,15 @@ class ContainsStringIgnoringCase(BaseMatcher[str]):
56
56
 
57
57
 
58
58
  def contains_string_ignoring_case(substring: str) -> ContainsStringIgnoringCase:
59
+ """
60
+ Creates a matcher that checks if a given string contains the specified substring, ignoring case.
61
+
62
+ Args:
63
+ substring (str): The substring to search for within the target string, case-insensitively.
64
+
65
+ Returns:
66
+ ContainsStringIgnoringCase: A matcher object that evaluates whether the target string contains the specified substring, ignoring case.
67
+ """
59
68
  return ContainsStringIgnoringCase(substring)
60
69
 
61
70
 
@@ -70,12 +79,11 @@ class IsIteratorYielding[T](BaseMatcher[Iterator[T]]):
70
79
 
71
80
  @override
72
81
  def _matches(self, item: Iterable[T]) -> bool:
73
- try:
74
- for element in item:
75
- if self.element_matcher.matches(element):
76
- return True
77
- except TypeError: # not an iterator
78
- pass
82
+ for element in item:
83
+ if self.element_matcher.matches(element):
84
+ return True
85
+
86
+ # No matching element found
79
87
  return False
80
88
 
81
89
  @override
@@ -97,14 +105,12 @@ class IsStreamContainingEvery[T](BaseMatcher[Iterator[T]]):
97
105
 
98
106
  @override
99
107
  def _matches(self, item: Iterable[T]) -> bool:
100
- try:
101
- for element in item:
102
- if not self.element_matcher.matches(element):
103
- return False # One non-matching element means failure
104
- return True # All elements matched
105
- except TypeError: # not an iterator
106
- pass
107
- return False
108
+ for element in item:
109
+ if not self.element_matcher.matches(element):
110
+ return False # One non-matching element means failure
111
+
112
+ # All elements matched
113
+ return True
108
114
 
109
115
  @override
110
116
  def describe_to(self, description: Description) -> None:
@@ -124,14 +130,12 @@ class IsIteratorYieldingAll[T](BaseMatcher[Iterator[T]]):
124
130
  @override
125
131
  def _matches(self, item: Iterable[T]) -> bool:
126
132
  unmatched_matchers = set(self.element_matchers)
127
- try:
128
- for element in item:
129
- unmatched_matchers = {
130
- m for m in unmatched_matchers if not m.matches(element)}
131
- if not unmatched_matchers: # All matchers have been satisfied
132
- return True
133
- except TypeError: # not an iterator
134
- pass
133
+ for element in item:
134
+ unmatched_matchers = {
135
+ m for m in unmatched_matchers if not m.matches(element)}
136
+ if not unmatched_matchers: # All matchers have been satisfied
137
+ return True
138
+
135
139
  return False
136
140
 
137
141
  @override
@@ -154,38 +158,31 @@ class IsWithinDates(BaseMatcher[DateOrDateTime]):
154
158
  self.end_date = end_date
155
159
 
156
160
  def _matches(self, item: Optional[DateOrDateTime]) -> bool:
157
- if not isinstance(item, (date, datetime)):
161
+ if item is None:
158
162
  return False
159
163
 
160
- # Convert item to a consistent type for comparison
161
- if isinstance(item, datetime):
162
- item = item.date() if isinstance(
163
- self.start_date, date) or isinstance(
164
- self.end_date, date) else item
165
- elif isinstance(item, date) and (isinstance(self.start_date, datetime) or isinstance(self.end_date, datetime)):
164
+ # Normalize item to datetime
165
+ if not isinstance(item, datetime):
166
166
  item = datetime.combine(item, datetime.min.time())
167
167
 
168
- # Convert start_date and end_date to compatible types if they are not None
169
- start = self.start_date
170
- if start is not None:
171
- start = start.date() if isinstance(
172
- start, datetime) and isinstance(item, date) else start
168
+ # Normalize start_date and end_date to datetime
169
+ def to_datetime(value: Optional[DateOrDateTime]) -> Optional[datetime]:
170
+ if value is None:
171
+ return None
172
+ return value if isinstance(
173
+ value, datetime) else datetime.combine(
174
+ value, datetime.min.time())
173
175
 
174
- end = self.end_date
175
- if end is not None:
176
- end = end.date() if isinstance(
177
- end, datetime) and isinstance(
178
- item, date) else end
176
+ start = to_datetime(self.start_date)
177
+ end = to_datetime(self.end_date)
179
178
 
180
- # Perform the comparison, handling open-ended ranges
181
- if start is None and end is not None:
182
- return item <= end
183
- elif start is not None and end is None:
184
- return item >= start
185
- elif start is not None and end is not None:
179
+ if start and end:
186
180
  return start <= item <= end
181
+ if start:
182
+ return item >= start
183
+ if end:
184
+ return item <= end
187
185
 
188
- # If both start_date and end_date are None, return False (no valid range)
189
186
  return False
190
187
 
191
188
  def describe_to(self, description: Description) -> None:
@@ -201,6 +198,16 @@ class IsWithinDates(BaseMatcher[DateOrDateTime]):
201
198
  def within_dates(
202
199
  start_date: Optional[DateOrDateTime],
203
200
  end_date: Optional[DateOrDateTime]) -> IsWithinDates:
201
+ """
202
+ Creates an instance of IsWithinDates to check if a date or datetime value falls within the specified start and end dates.
203
+
204
+ Args:
205
+ start_date (Optional[DateOrDateTime]): The start of the date range. Can be None to indicate no lower bound.
206
+ end_date (Optional[DateOrDateTime]): The end of the date range. Can be None to indicate no upper bound.
207
+
208
+ Returns:
209
+ IsWithinDates: An instance configured with the provided start and end dates.
210
+ """
204
211
  return IsWithinDates(start_date, end_date)
205
212
 
206
213
 
@@ -5,13 +5,11 @@
5
5
  import threading
6
6
  from dataclasses import asdict, fields, is_dataclass, replace
7
7
  from enum import Enum
8
- from typing import (Any, Callable, Dict, Protocol, Type, final)
9
-
10
- # TODO: move to stream_utils module
11
- type Supplier[T] = Callable[[], T]
12
- type Predicate[T] = Callable[[T], bool]
8
+ from typing import (Any, Dict, Optional, Protocol,
9
+ final, runtime_checkable, ClassVar)
13
10
 
14
11
 
12
+ @runtime_checkable
15
13
  class Valid(Protocol):
16
14
  """
17
15
  Specifies a method for validating objects.
@@ -29,15 +27,20 @@ class Valid(Protocol):
29
27
 
30
28
  class ImmutableMixin:
31
29
  """
32
- Enforces immutability by overriding __setattr__ to raise AttributeError.
30
+ Mixin to enforce immutability after initialization.
33
31
 
34
- This implementation does not work with the WithMixin if the attributes are
35
- initialized with default values.
32
+ Overrides __setattr__ to raise AttributeError if an attribute is modified after being set.
33
+ Intended for use with non-dataclasses. For dataclasses, use `@dataclass(frozen=True)`.
36
34
 
37
- It also does not work when applied to a super type for which the __init__
38
- is overridden.
35
+ Limitations:
36
+ - Does not work with WithMixin if attributes have default values.
37
+ - Does not work if applied to a superclass with a custom __init__.
39
38
 
40
- Use it with non-dataclasses.
39
+ Example:
40
+ class MyImmutable(ImmutableMixin):
41
+ foo: int = 1
42
+ obj = MyImmutable()
43
+ obj.foo = 2 # Raises AttributeError
41
44
  """
42
45
 
43
46
  def __setattr__(self, key: str, value: Any) -> None:
@@ -49,19 +52,21 @@ class ImmutableMixin:
49
52
 
50
53
  class WithMixin:
51
54
  '''
52
- Supports immutability by copying on change.
53
-
54
- For example, instead of mutating like this::
55
+ Mixin to support copy-on-change (functional update) for objects.
55
56
 
56
- obj.field = a_new_value
57
+ Instead of mutating an object, use `with_()` to create a copy with updated fields:
58
+ obj2 = obj.with_(field=new_value)
57
59
 
58
- use::
60
+ Works with both plain Python classes and dataclasses.
59
61
 
60
- dup_object_with_changes = obj.with_(field=a_new_value)
62
+ Example:
63
+ @dataclass(frozen=True)
64
+ class Point(WithMixin):
65
+ x: int
66
+ y: int
61
67
 
62
- This will ensure that the changes are applied on a duplicate of `obj`.
63
-
64
- Can be applied on plain Python classes, and on `dataclases` too.
68
+ p1 = Point(1, 2)
69
+ p2 = p1.with_(x=3) # p2 is Point(3, 2)
65
70
  '''
66
71
  @final
67
72
  def with_[T:WithMixin](self: T, **changes: Any) -> T:
@@ -82,18 +87,37 @@ class WithMixin:
82
87
 
83
88
 
84
89
  class ToDictMixin:
90
+ """
91
+ Mixin to add serialization methods to dataclasses.
92
+
93
+ Provides:
94
+ - to_dict(): Recursively converts a dataclass (and nested dataclasses) to a dictionary.
95
+ - flatten(): Flattens nested structure for CSV or flat serialization.
96
+
97
+ Example:
98
+ @dataclass
99
+ class User(ToDictMixin):
100
+ name: str
101
+ age: int
102
+
103
+ user = User("Alice", 30)
104
+ user.to_dict() # {'name': 'Alice', 'age': 30}
105
+ """
85
106
 
86
107
  def to_dict(self) -> Dict[str, Any]:
87
108
  """
88
109
  Converts a dataclass instance (with nested dataclasses) to a dictionary.
89
110
  """
90
- def convert(value):
111
+ from typing import cast
112
+
113
+ def convert(value: Any) -> Any:
91
114
  if isinstance(value, ToDictMixin):
92
115
  return value.to_dict()
93
116
  elif isinstance(value, list):
94
- return [convert(v) for v in value]
117
+ # Provide a type hint for v
118
+ return [convert(v) for v in cast(list[Any], value)]
95
119
  elif isinstance(value, dict):
96
- return {k: convert(v) for k, v in value.items()}
120
+ return {k: convert(v) for k, v in value.items()} # type: ignore
97
121
  return value
98
122
 
99
123
  if not is_dataclass(self):
@@ -105,20 +129,20 @@ class ToDictMixin:
105
129
  """
106
130
  Flattens the nested structure into a flat dictionary for CSV serialization.
107
131
  """
108
- flat_dict = {}
132
+ flat_dict: Dict[str, Any] = {}
109
133
 
110
- def flatten_value(key: str, value: Any):
134
+ def flatten_value(key: str, value: Any) -> None:
111
135
  if isinstance(value, ToDictMixin):
112
136
  # Flatten nested ToDictMixin dataclasses
113
137
  nested_flat = value.flatten(prefix=f"{key}_")
114
138
  flat_dict.update(nested_flat)
115
139
  elif isinstance(value, list):
116
140
  # Serialize lists as JSON strings or expand into multiple columns
117
- for idx, item in enumerate(value):
141
+ for idx, item in enumerate(value): # type: ignore
118
142
  flat_dict[f"{key}[{idx}]"] = item
119
143
  elif isinstance(value, dict):
120
144
  # Serialize dicts as JSON strings or expand into multiple columns
121
- for sub_key, sub_val in value.items():
145
+ for sub_key, sub_val in value.items(): # type: ignore
122
146
  flat_dict[f"{key}_{sub_key}"] = sub_val
123
147
  else:
124
148
  # Directly add non-nested fields
@@ -136,27 +160,41 @@ class ToDictMixin:
136
160
 
137
161
  class SingletonMeta(type):
138
162
  """
139
- A thread-safe implementation of a Singleton metaclass.
163
+ Thread-safe singleton metaclass.
164
+
165
+ Ensures only one instance of a class exists per process.
166
+ Use by setting `metaclass=SingletonMeta` on your class.
140
167
  """
141
- _instances: Dict[Type['SingletonBase'], 'SingletonBase'] = {}
142
- _lock: threading.Lock = threading.Lock() # Ensure thread-safety
168
+ _instances: ClassVar[Dict[type, object]] = {}
169
+ _lock: ClassVar[threading.Lock] = threading.Lock() # Ensure thread-safety
143
170
 
144
- def __call__(cls, *args: Any, **kwargs: Any) -> 'SingletonBase':
171
+ def __call__(
172
+ cls: type,
173
+ *args: Any, **kwargs: Any) -> "SingletonBase":
145
174
  with SingletonMeta._lock:
146
175
  if cls not in SingletonMeta._instances:
147
- instance = super().__call__(*args, **kwargs)
176
+ instance = super().__call__(*args, **kwargs) # type: ignore
148
177
  SingletonMeta._instances[cls] = instance
149
- return SingletonMeta._instances[cls]
178
+ return SingletonMeta._instances[cls] # type: ignore[return-value]
150
179
 
151
180
 
152
181
  class SingletonBase(metaclass=SingletonMeta):
153
182
  """
154
183
  Base class for singletons using SingletonMeta.
184
+
185
+ Inherit from this class to make your class a singleton.
155
186
  """
156
187
  pass
157
188
 
158
189
 
159
190
  class InvalidValueException(ValueError):
191
+ """
192
+ Raised when an object fails validation via the Valid protocol.
193
+
194
+ Example:
195
+ if not obj.is_valid():
196
+ raise InvalidValueException(obj)
197
+ """
160
198
  pass
161
199
 
162
200
 
@@ -168,18 +206,33 @@ def valid[T:Valid](value: T) -> T:
168
206
  value (T:Valid): the object
169
207
 
170
208
  Raises:
171
- TypeError: if the object does not support the Valid protocol
172
209
  InvalidValueException: if the object is invalid
173
210
 
174
211
  Returns:
175
212
  T:Valid: the validated object
176
213
  """
177
- if not (hasattr(value, 'is_valid') and callable(
178
- getattr(value, 'is_valid'))):
179
- raise TypeError(
180
- f"{value.__class__.__name__} does not conform to the Valid protocol")
181
-
182
214
  if value.is_valid():
183
215
  return value
184
- else:
185
- raise InvalidValueException(value)
216
+
217
+ raise InvalidValueException(value)
218
+
219
+
220
+ def require_not_none[T](
221
+ value: Optional[T],
222
+ message: str = "Value must not be None") -> T:
223
+ """
224
+ Ensures that the provided value is not None.
225
+
226
+ Args:
227
+ value (Optional[T]): The value to check for None.
228
+ message (str, optional): The error message to use if value is None. Defaults to "Value must not be None".
229
+
230
+ Returns:
231
+ T: The value, guaranteed to be not None.
232
+
233
+ Raises:
234
+ ValueError: If value is None.
235
+ """
236
+ if value is None:
237
+ raise ValueError(message)
238
+ return value
@@ -0,0 +1,51 @@
1
+ # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from typing import Callable, Iterator
6
+
7
+
8
+ """
9
+ A generic callable type alias representing a supplier of values of type T.
10
+
11
+ A Supplier is a function that takes no arguments and returns a value of type T.
12
+ This is useful for lazy evaluation, deferred computation, or providing values on demand.
13
+
14
+ Example:
15
+ def random_int_supplier() -> int:
16
+ import random
17
+ return random.randint(1, 100)
18
+
19
+ supplier: Supplier[int] = random_int_supplier
20
+ """
21
+ type Supplier[T] = Callable[[], T]
22
+
23
+ """
24
+ A generic callable type alias representing a predicate (boolean-valued function) over values of type T.
25
+
26
+ A Predicate is a function that takes a single argument of type T and returns a boolean.
27
+ It is commonly used for filtering, validation, or conditional logic.
28
+
29
+ Example:
30
+ def is_even(n: int) -> bool:
31
+ return n % 2 == 0
32
+
33
+ even_predicate: Predicate[int] = is_even
34
+ """
35
+ type Predicate[T] = Callable[[T], bool]
36
+
37
+
38
+ def process_next[T](i: Iterator[T], p: Predicate[T]) -> Iterator[T]:
39
+ """
40
+ Processes next items per specified predicate. Useful for cases in which
41
+ the first item in a stream decides the meaning of the rest of the items.
42
+
43
+ Args:
44
+ i (Iterator[T]): the iterator to process
45
+ p (Predicate[T]): the predicate to be applied on `next(i)`
46
+
47
+ Returns:
48
+ Iterator[T]: the original iterator if the predicate evaluated true, \
49
+ otherwise empty iterator
50
+ """
51
+ return i if p(next(i)) else iter([])
@@ -6,12 +6,12 @@ from typing import Callable, Type
6
6
 
7
7
  from ppretty import ppretty # type: ignore
8
8
 
9
- EMPTY = ""
9
+ EMPTY_STRING = ""
10
10
  SPACE = " "
11
11
  DOT = "."
12
12
  LF = "\n"
13
13
  UTF_8 = "utf-8"
14
- EMPTY_BYTE_STRING = b''
14
+ EMPTY_BYTES = b''
15
15
 
16
16
 
17
17
  def to_string[T](indent: str = ' ',
@@ -10,4 +10,9 @@ COMMON_EXECUTOR = concurrent.futures.ThreadPoolExecutor()
10
10
 
11
11
 
12
12
  def sleep_for(duration: timedelta):
13
+ """
14
+ Sleep for the specified duration.
15
+ Args:
16
+ duration (timedelta): The amount of time to sleep.
17
+ """
13
18
  time.sleep(duration.total_seconds())
@@ -0,0 +1,34 @@
1
+ # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from typing import Any
6
+
7
+ from qa_testing_utils.exception_utils import *
8
+
9
+
10
+ def should_return_nothing_when_exception_is_raised():
11
+ def trouble(p: Any) -> str:
12
+ raise Exception("trouble")
13
+
14
+ assert safely(lambda: trouble(7)).value_or("nada") == "nada"
15
+
16
+
17
+ def should_return_value_when_no_exception_occurs():
18
+ def ok() -> int:
19
+ return 42
20
+ assert safely(ok).value_or(-1) == 42
21
+
22
+
23
+ def should_return_result_when_swallow_decorator_and_no_exception():
24
+ @swallow
25
+ def ok(x: int) -> int:
26
+ return x * 2
27
+ assert ok(3) == 6
28
+
29
+
30
+ def should_return_none_when_swallow_decorator_and_exception():
31
+ @swallow
32
+ def fail() -> None:
33
+ raise RuntimeError("fail!")
34
+ assert fail() is None
@@ -0,0 +1,92 @@
1
+ import csv
2
+ from pathlib import Path
3
+ from qa_testing_utils.file_utils import *
4
+
5
+
6
+ def should_create_valid_csv(tmp_path: Path) -> None:
7
+ # Prepare test data
8
+ data: list[dict[str, object]] = [
9
+ {"a": 1, "b": "x"},
10
+ {"a": 2, "b": "y"},
11
+ {"a": 3, "b": "z"},
12
+ ]
13
+ csv_path = tmp_path / "test.csv"
14
+
15
+ # Call the function
16
+ write_csv(csv_path, data)
17
+
18
+ # Read back and check
19
+ with csv_path.open() as f:
20
+ reader = csv.DictReader(f)
21
+ rows = list(reader)
22
+ assert rows == [
23
+ {"a": "1", "b": "x"},
24
+ {"a": "2", "b": "y"},
25
+ {"a": "3", "b": "z"},
26
+ ]
27
+
28
+
29
+ def should_write_empty_csv(tmp_path: Path) -> None:
30
+ csv_path = tmp_path / "empty.csv"
31
+ write_csv(csv_path, [])
32
+ assert not csv_path.exists() or csv_path.read_text() == ""
33
+
34
+
35
+ def should_iterable_reader_reads_chunks():
36
+ data = [b'abc', b'def', b'ghi']
37
+ reader = IterableReader.from_(data)
38
+ assert reader.read() == b'abcdefghi'
39
+ reader.close()
40
+
41
+
42
+ def should_stream_file_reads_file(tmp_path: Path):
43
+ file_path = tmp_path / "test.bin"
44
+ file_path.write_bytes(b"1234567890")
45
+ chunks = list(stream_file(file_path, chunk_size=4))
46
+ assert chunks == [b"1234", b"5678", b"90"]
47
+
48
+
49
+ def should_read_lines_handles_split_lines():
50
+ chunks = [b"hello ", b"world\nthis is", b" a test\nend"]
51
+ lines = list(read_lines(chunks, encoding="utf-8", eol="\n"))
52
+ assert lines == ["hello world", "this is a test", "end"]
53
+
54
+
55
+ def should_crc32_of_file(tmp_path: Path):
56
+ file_path = tmp_path / "crc.bin"
57
+ file_path.write_bytes(b"abc123")
58
+ with file_path.open("rb") as f:
59
+ crc = crc32_of(f)
60
+ import zlib
61
+ assert crc == zlib.crc32(b"abc123") & 0xFFFFFFFF
62
+
63
+
64
+ def should_decompress_xz_stream():
65
+ import lzma
66
+ original = [b"hello world"]
67
+ compressed = [lzma.compress(original[0])]
68
+ decompressed = list(decompress_xz_stream(compressed))
69
+ assert b"".join(decompressed) == b"hello world"
70
+
71
+
72
+ def should_extract_files_from_tar():
73
+ import tarfile
74
+ import io
75
+ # Create a tar archive in memory
76
+ file_content = b"testdata"
77
+ tar_bytes = io.BytesIO()
78
+ with tarfile.open(fileobj=tar_bytes, mode="w") as tar:
79
+ info = tarfile.TarInfo(name="file.txt")
80
+ info.size = len(file_content)
81
+ tar.addfile(info, io.BytesIO(file_content))
82
+ tar_bytes.seek(0)
83
+ # Split tar_bytes into chunks to simulate streaming
84
+ tar_chunks = list(iter(lambda: tar_bytes.read(4), b""))
85
+ # Extract files from tar stream
86
+ files = list(extract_files_from_tar(tar_chunks))
87
+ assert len(files) == 1
88
+ tarinfo, data = files[0]
89
+ assert tarinfo.name == "file.txt"
90
+ assert data == file_content
91
+
92
+ # TODO: decompress_xz_stream and extract_files_from_tar require binary test data.
@@ -46,3 +46,49 @@ def should_preserve_signature():
46
46
 
47
47
  print(my_func.__name__) # Prints: my_func
48
48
  print(my_func.__doc__) # Prints: my documentation
49
+
50
+
51
+ def should_return_value_and_log_with_trace():
52
+ value = trace(123)
53
+ assert value == 123
54
+
55
+
56
+ def should_inject_logger_with_logger_decorator():
57
+ @logger
58
+ class Foo:
59
+ pass
60
+ f = Foo()
61
+ # The logger is injected as a property, so we access it via the property
62
+ log = getattr(f, 'log', None)
63
+ assert log is not None
64
+ assert isinstance(log, logging.Logger)
65
+ assert log.name == 'Foo'
66
+
67
+
68
+ def should_provide_log_property_with_logger_mixin():
69
+ class Bar(LoggerMixin):
70
+ pass
71
+ b = Bar()
72
+ assert hasattr(b, 'log')
73
+ assert isinstance(b.log, logging.Logger)
74
+ assert b.log.name == 'Bar'
75
+
76
+
77
+ def should_return_value_and_log_with_logger_mixin_trace():
78
+ class Baz(LoggerMixin):
79
+ pass
80
+ b = Baz()
81
+ value = b.trace('abc')
82
+ assert value == 'abc'
83
+
84
+
85
+ def should_log_entry_and_exit_with_traced_decorator():
86
+ calls: list[tuple[int, int]] = []
87
+
88
+ @traced
89
+ def foo(x: int, y: int) -> int:
90
+ calls.append((x, y))
91
+ return x + y
92
+ result: int = foo(2, 3)
93
+ assert result == 5
94
+ assert calls == [(2, 3)]
@@ -3,7 +3,7 @@
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
5
  from datetime import datetime
6
- from typing import Callable
6
+ from typing import Callable, Union
7
7
 
8
8
  import attr
9
9
  import pytest
@@ -118,7 +118,8 @@ def should_adapt_stream():
118
118
  ]
119
119
  )
120
120
  def should_match_within_dates(
121
- test_date: datetime, start_date: Union[datetime, None],
121
+ test_date: datetime,
122
+ start_date: Union[datetime, None],
122
123
  end_date: Union[datetime, None]):
123
124
  assert_that(test_date, within_dates(start_date, end_date))
124
125
 
@@ -126,12 +127,46 @@ def should_match_within_dates(
126
127
  @pytest.mark.parametrize(
127
128
  "test_date, start_date, end_date",
128
129
  [
130
+ (None, datetime(2023, 1, 1), datetime(2023, 12, 31)),
129
131
  (datetime(2022, 12, 31), datetime(2023, 1, 1), datetime(2023, 12, 31)),
130
132
  (datetime(2024, 1, 1), datetime(2023, 1, 1), datetime(2023, 12, 31)),
131
133
  ]
132
134
  )
133
135
  def should_fail_not_within_dates(
134
- test_date: datetime, start_date: Union[datetime, None],
136
+ test_date: datetime,
137
+ start_date: Union[datetime, None],
135
138
  end_date: Union[datetime, None]):
136
139
  with pytest.raises(AssertionError):
137
140
  assert_that(test_date, within_dates(start_date, end_date))
141
+
142
+
143
+ def should_match_contains_string_ignoring_case():
144
+ assert_that("Hello World", contains_string_ignoring_case("hello"))
145
+ assert_that("Hello World", contains_string_ignoring_case("WORLD"))
146
+ # Should fail if not present
147
+ with pytest.raises(AssertionError):
148
+ assert_that("Hello World", contains_string_ignoring_case("bye"))
149
+
150
+
151
+ def should_match_iterator_yielding():
152
+ # Should match if any element matches
153
+ assert_that(iter([1, 2, 3]), yields_item(2))
154
+ # Should fail if none match
155
+ with pytest.raises(AssertionError):
156
+ assert_that(iter([1, 2, 3]), yields_item(5))
157
+
158
+
159
+ def should_match_stream_containing_every():
160
+ # Should match if all elements match
161
+ assert_that(iter([2, 2, 2]), yields_every(2))
162
+ # Should fail if any element does not match
163
+ with pytest.raises(AssertionError):
164
+ assert_that(iter([2, 2, 3]), yields_every(2))
165
+
166
+
167
+ def should_match_iterator_yielding_all():
168
+ # Should match if all specified items are yielded at least once
169
+ assert_that(iter([1, 2, 3]), yields_items([1, 2]))
170
+ # Should fail if any specified item is not yielded
171
+ with pytest.raises(AssertionError):
172
+ assert_that(iter([1, 2, 3]), yields_items([1, 4]))
@@ -111,3 +111,42 @@ def should_be_singleton():
111
111
 
112
112
  assert BarSingleton(1) == BarSingleton(2)
113
113
  assert BarSingleton(3).i == 1 # type: ignore
114
+
115
+
116
+ def should_convert_to_dict_and_flatten():
117
+ from dataclasses import dataclass
118
+
119
+ @dataclass
120
+ class Address(ToDictMixin):
121
+ city: str
122
+ zip: int
123
+
124
+ @dataclass
125
+ class User(ToDictMixin):
126
+ name: str
127
+ age: int
128
+ address: Address
129
+ tags: list[str]
130
+ meta: dict[str, int]
131
+ user = User(
132
+ "Alice", 30, Address("London", 12345),
133
+ ["a", "b"],
134
+ {"score": 10})
135
+ # to_dict
136
+ d = user.to_dict()
137
+ assert d == {
138
+ "name": "Alice",
139
+ "age": 30,
140
+ "address": {"city": "London", "zip": 12345},
141
+ "tags": ["a", "b"],
142
+ "meta": {"score": 10}
143
+ }
144
+ # flatten
145
+ flat = user.flatten()
146
+ assert flat["name"] == "Alice"
147
+ assert flat["age"] == 30
148
+ assert flat["address_city"] == "London"
149
+ assert flat["address_zip"] == 12345
150
+ assert flat["tags[0]"] == "a"
151
+ assert flat["tags[1]"] == "b"
152
+ assert flat["meta_score"] == 10
@@ -25,10 +25,12 @@ class SelfTests(LoggerMixin):
25
25
  self.log.debug(f"after {method}")
26
26
 
27
27
  def should_print(self):
28
+ """Test that print statement works (placeholder/self-test)."""
28
29
  print("hello")
29
30
 
30
31
  @traced
31
32
  def should_assert_true(self):
33
+ """Test that a traced assertion passes (decorator coverage)."""
32
34
  assert True
33
35
 
34
36
  # @pytest.mark.skip
@@ -65,6 +67,7 @@ class SelfTests(LoggerMixin):
65
67
  is_("Foo(id=1, mandatory='present', name='kuku')"))
66
68
 
67
69
  def should_retry(self):
70
+ """Test retry logic for a function that may fail multiple times."""
68
71
  retry_policy = retry(
69
72
  stop=stop_after_attempt(1000),
70
73
  wait=wait_fixed(timedelta(milliseconds=1)),
@@ -2,17 +2,19 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
+ # NOTE: there is another streaming library named pystreamapi.
6
+ # In contrast with pyfunctional it is type annotated.
7
+ # However, it eagerly consumes its underlying stream, defeating its purpose...
8
+ # ISSUE see https://github.com/PickwickSoft/pystreamapi/issues/94
9
+ # hence, we'll ignore type warnings:
10
+ # type: ignore
11
+
5
12
  from typing import Iterator
6
13
 
7
14
  from functional import seq
8
15
  from more_itertools import peekable
9
16
  from qa_testing_utils.stream_utils import *
10
17
 
11
- # NOTE: there is another streaming library named pystreamapi.
12
- # In contrast with pyfunctional it is type annotated.
13
- # However, it eagerly consumes its underlying stream, defeating its purpose...
14
- # ISSUE see https://github.com/PickwickSoft/pystreamapi/issues/94
15
-
16
18
 
17
19
  def gen() -> Iterator[int]:
18
20
  yield -1
@@ -0,0 +1,16 @@
1
+ # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from datetime import timedelta
6
+ from qa_testing_utils.thread_utils import sleep_for
7
+ import time
8
+
9
+
10
+ def should_sleep_for_specified_duration():
11
+ start = time.monotonic()
12
+ sleep_for(timedelta(milliseconds=120))
13
+ elapsed = time.monotonic() - start
14
+ # Allow some tolerance for timing imprecision
15
+ assert elapsed >= 0.11
16
+ assert elapsed < 0.5
@@ -0,0 +1,44 @@
1
+ # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ import attr
6
+ from hamcrest import assert_that, is_ # type: ignore
7
+ from qa_testing_utils.tuple_utils import *
8
+
9
+
10
+ def should_assert_from_tuple():
11
+ @attr.define
12
+ class Foo(FromTupleMixin):
13
+ id: int
14
+ name: str
15
+
16
+ assert_that(str(Foo.from_tuple((1, "kuku"))),
17
+ is_("Foo(id=1, name='kuku')"))
18
+
19
+
20
+ def should_assert_from_tuple_with_frozen_dataclass():
21
+ import dataclasses
22
+
23
+ @dataclasses.dataclass(frozen=True)
24
+ class Bar(FromTupleMixin):
25
+ x: int
26
+ y: str
27
+ bar = Bar.from_tuple((42, "baz"))
28
+ assert bar.x == 42
29
+ assert bar.y == "baz"
30
+ assert isinstance(bar, Bar)
31
+
32
+
33
+ def should_assert_from_tuple_with_vanilla_class():
34
+ class Baz(FromTupleMixin):
35
+ a: int
36
+ b: str
37
+
38
+ def __init__(self, a: int, b: str):
39
+ self.a = a
40
+ self.b = b
41
+ baz = Baz.from_tuple((7, "qux"))
42
+ assert baz.a == 7
43
+ assert baz.b == "qux"
44
+ assert isinstance(baz, Baz)
@@ -1 +0,0 @@
1
- __version__ = '0.0.7'
@@ -1,24 +0,0 @@
1
- # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from typing import Iterator
6
-
7
- from qa_testing_utils.object_utils import Predicate
8
-
9
-
10
- def process_next[T](i: Iterator[T], p: Predicate[T]) -> Iterator[T]:
11
- # DELETEME -- not needed so far
12
- """
13
- Processes next items per specified predicate. Useful, for cases in which
14
- first item in a stream decides the meaning of rest of items.
15
-
16
- Args:
17
- i (Iterator[T]): the iterator to process
18
- p (Predicate[T]): the predicate to be applied on `next(i)`
19
-
20
- Returns:
21
- Iterator[T]: the original iterator if the predicate evaluated true, \
22
- otherwise empty iterator
23
- """
24
- return i if p(next(i)) else iter([])
@@ -1,14 +0,0 @@
1
- # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- from typing import Any
6
-
7
- from qa_testing_utils.exception_utils import *
8
-
9
-
10
- def should_swallow_exception():
11
- def trouble(p: Any) -> str:
12
- raise Exception("trouble")
13
-
14
- assert safely(lambda: trouble(7)).value_or("nada") == "nada"
@@ -1,17 +0,0 @@
1
- # SPDX-FileCopyrightText: 2025 Adrian Herscu
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
-
5
- import attr
6
- from hamcrest import assert_that, is_ # type: ignore
7
- from qa_testing_utils.tuple_utils import *
8
-
9
-
10
- def should_assert_from_tuple():
11
- @attr.define
12
- class Foo(FromTupleMixin):
13
- id: int
14
- name: str
15
-
16
- assert_that(str(Foo.from_tuple((1, "kuku"))),
17
- is_("Foo(id=1, name='kuku')"))