asyncstdlib 3.12.5__tar.gz → 3.13.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/LICENSE +1 -1
  2. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/PKG-INFO +4 -3
  3. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/__init__.py +1 -1
  4. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/_lrucache.pyi +7 -0
  5. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/asynctools.py +1 -1
  6. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/builtins.py +2 -2
  7. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/builtins.pyi +1 -1
  8. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/contextlib.py +9 -12
  9. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/contextlib.pyi +2 -2
  10. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/functools.py +20 -24
  11. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/itertools.py +18 -5
  12. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/itertools.pyi +13 -7
  13. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/pyproject.toml +1 -1
  14. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_builtins.py +2 -3
  15. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_functools.py +5 -5
  16. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_itertools.py +18 -7
  17. asyncstdlib-3.13.1/unittests/utility.py +150 -0
  18. asyncstdlib-3.12.5/unittests/utility.py +0 -142
  19. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/README.rst +0 -0
  20. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/_core.py +0 -0
  21. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/_lrucache.py +0 -0
  22. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/_typing.py +0 -0
  23. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/_utility.py +0 -0
  24. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/functools.pyi +0 -0
  25. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/heapq.py +0 -0
  26. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/heapq.pyi +0 -0
  27. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/asyncstdlib/py.typed +0 -0
  28. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/__init__.py +0 -0
  29. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_asynctools.py +0 -0
  30. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_contextlib.py +0 -0
  31. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_functools_lru.py +0 -0
  32. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_heapq.py +0 -0
  33. {asyncstdlib-3.12.5 → asyncstdlib-3.13.1}/unittests/test_helpers.py +0 -0
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2019 - 2020 Max Fischer
3
+ Copyright (c) 2019 - 2024 Max Kühn
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,9 +1,9 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: asyncstdlib
3
- Version: 3.12.5
3
+ Version: 3.13.1
4
4
  Summary: The missing async toolbox
5
5
  Keywords: async,enumerate,itertools,builtins,functools,contextlib
6
- Author-email: Max Fischer <maxfischer2781@gmail.com>
6
+ Author-email: Max Kühn <maxfischer2781@gmail.com>
7
7
  Requires-Python: ~=3.8
8
8
  Description-Content-Type: text/x-rst
9
9
  Classifier: Development Status :: 5 - Production/Stable
@@ -17,6 +17,7 @@ Classifier: Programming Language :: Python :: 3.10
17
17
  Classifier: Programming Language :: Python :: 3.11
18
18
  Classifier: Programming Language :: Python :: 3.12
19
19
  Classifier: Programming Language :: Python :: 3.13
20
+ License-File: LICENSE
20
21
  Requires-Dist: sphinx ; extra == "doc"
21
22
  Requires-Dist: sphinxcontrib-trio ; extra == "doc"
22
23
  Requires-Dist: pytest ; extra == "test"
@@ -45,7 +45,7 @@ from .itertools import (
45
45
  from .asynctools import borrow, scoped_iter, await_each, any_iter, apply, sync
46
46
  from .heapq import merge, nlargest, nsmallest
47
47
 
48
- __version__ = "3.12.5"
48
+ __version__ = "3.13.1"
49
49
 
50
50
  __all__ = [
51
51
  "anext",
@@ -9,6 +9,7 @@ from typing import (
9
9
  overload,
10
10
  Protocol,
11
11
  )
12
+ from types import CoroutineType
12
13
  from typing_extensions import ParamSpec, Concatenate
13
14
 
14
15
  from ._typing import AC, TypedDict
@@ -42,6 +43,12 @@ class LRUAsyncCallable(Protocol[AC]):
42
43
  owner: type | None = ...,
43
44
  ) -> LRUAsyncBoundCallable[S, P, R]: ...
44
45
  @overload
46
+ def __get__(
47
+ self: LRUAsyncCallable[Callable[Concatenate[S, P], CoroutineType[Any, Any, R]]],
48
+ instance: S,
49
+ owner: type | None = ...,
50
+ ) -> LRUAsyncBoundCallable[S, P, R]: ...
51
+ @overload
45
52
  def __get__(
46
53
  self: LRUAsyncCallable[Callable[Concatenate[S, P], Awaitable[R]]],
47
54
  instance: S,
@@ -35,7 +35,7 @@ class _BorrowedAsyncIterator(AsyncGenerator[T, S]):
35
35
  __slots__ = "__wrapped__", "__anext__", "asend", "athrow", "_wrapper"
36
36
 
37
37
  # Type checker does not understand `__slot__` definitions
38
- __anext__: Callable[[Any], Awaitable[T]]
38
+ __anext__: Callable[[Any], Coroutine[Any, Any, T]]
39
39
  asend: Any
40
40
  athrow: Any
41
41
 
@@ -164,7 +164,7 @@ async def zip(
164
164
 
165
165
 
166
166
  async def _zip_inner(
167
- aiters: Tuple[AsyncIterator[T], ...]
167
+ aiters: Tuple[AsyncIterator[T], ...],
168
168
  ) -> AsyncIterator[Tuple[T, ...]]:
169
169
  """Direct zip transposing tuple-of-iterators to iterator-of-tuples"""
170
170
  try:
@@ -175,7 +175,7 @@ async def _zip_inner(
175
175
 
176
176
 
177
177
  async def _zip_inner_strict(
178
- aiters: Tuple[AsyncIterator[T], ...]
178
+ aiters: Tuple[AsyncIterator[T], ...],
179
179
  ) -> AsyncIterator[Tuple[T, ...]]:
180
180
  """Length aware zip checking that all iterators are equal length"""
181
181
  # track index of the last iterator we tried to anext
@@ -231,7 +231,7 @@ async def tuple(iterable: AnyIterable[T]) -> builtins.tuple[T, ...]: ...
231
231
  async def dict() -> builtins.dict[Any, Any]: ...
232
232
  @overload
233
233
  async def dict(
234
- iterable: AnyIterable[builtins.tuple[HK, T]]
234
+ iterable: AnyIterable[builtins.tuple[HK, T]],
235
235
  ) -> builtins.dict[HK, T]: ...
236
236
  @overload
237
237
  async def dict(
@@ -28,7 +28,7 @@ AbstractContextManager = AsyncContextManager
28
28
 
29
29
 
30
30
  def contextmanager(
31
- func: Callable[..., AsyncGenerator[T, None]]
31
+ func: Callable[..., AsyncGenerator[T, None]],
32
32
  ) -> Callable[..., AsyncContextManager[T]]:
33
33
  r"""
34
34
  Create an asynchronous context manager out of an asynchronous generator function
@@ -328,18 +328,15 @@ class ExitStack:
328
328
  If a context manager must also be entered, use :py:meth:`~.enter_context`
329
329
  instead.
330
330
  """
331
- try:
331
+ aexit: Callable[..., Awaitable[Union[None, bool]]]
332
+ if hasattr(exit, "__aexit__"):
332
333
  aexit = exit.__aexit__ # type: ignore
333
- except AttributeError:
334
- try:
335
- aexit = awaitify(
336
- exit.__exit__, # type: ignore
337
- )
338
- except AttributeError:
339
- assert callable(
340
- exit
341
- ), f"Expected (async) context manager or callable, got {exit}"
342
- aexit = awaitify(exit)
334
+ elif hasattr(exit, "__exit__"):
335
+ aexit = awaitify(exit.__exit__) # type: ignore
336
+ elif callable(exit):
337
+ aexit = awaitify(exit) # type: ignore
338
+ else:
339
+ raise TypeError(f"Expected (async) context manager or callable, got {exit}")
343
340
  self._exit_callbacks.append(aexit) # pyright: ignore[reportUnknownArgumentType]
344
341
  return exit
345
342
 
@@ -55,12 +55,12 @@ class ContextDecorator(AsyncContextManager[T], metaclass=ABCMeta):
55
55
  P = ParamSpec("P")
56
56
 
57
57
  def contextmanager(
58
- func: Callable[P, AsyncGenerator[T, None]]
58
+ func: Callable[P, AsyncGenerator[T, None]],
59
59
  ) -> Callable[P, ContextDecorator[T]]: ...
60
60
 
61
61
  class closing(Generic[AClose]):
62
62
  def __init__(self, thing: AClose) -> None: ...
63
- async def __aenter__(self: Self) -> Self: ...
63
+ async def __aenter__(self: Self) -> AClose: ...
64
64
  async def __aexit__(
65
65
  self,
66
66
  exc_type: type[BaseException] | None,
@@ -7,7 +7,6 @@ from typing import (
7
7
  Generic,
8
8
  Generator,
9
9
  Optional,
10
- Coroutine,
11
10
  AsyncContextManager,
12
11
  Type,
13
12
  cast,
@@ -66,25 +65,25 @@ class AwaitableValue(Generic[R]):
66
65
  return f"{self.__class__.__name__}({self.value!r})"
67
66
 
68
67
 
69
- class _FutureCachedValue(Generic[R, T]):
70
- """A placeholder object to control concurrent access to a cached awaitable value.
68
+ class _FutureCachedPropertyValue(Generic[R, T]):
69
+ """
70
+ A placeholder object to control concurrent access to a cached awaitable value
71
71
 
72
72
  When given a lock to coordinate access, only the first task to await on a
73
73
  cached property triggers the underlying coroutine. Once a value has been
74
74
  produced, all tasks are unblocked and given the same, single value.
75
-
76
75
  """
77
76
 
78
- __slots__ = ("_get_attribute", "_instance", "_name", "_lock")
77
+ __slots__ = ("_func", "_instance", "_name", "_lock")
79
78
 
80
79
  def __init__(
81
80
  self,
82
- get_attribute: Callable[[T], Coroutine[Any, Any, R]],
81
+ func: Callable[[T], Awaitable[R]],
83
82
  instance: T,
84
83
  name: str,
85
84
  lock: AsyncContextManager[Any],
86
85
  ):
87
- self._get_attribute = get_attribute
86
+ self._func = func
88
87
  self._instance = instance
89
88
  self._name = name
90
89
  self._lock = lock
@@ -98,7 +97,6 @@ class _FutureCachedValue(Generic[R, T]):
98
97
 
99
98
  If the instance (no longer) has this attribute, it was deleted and the
100
99
  process is restarted by delegating to the descriptor.
101
-
102
100
  """
103
101
  try:
104
102
  return self._instance.__dict__[self._name]
@@ -116,12 +114,17 @@ class _FutureCachedValue(Generic[R, T]):
116
114
  # the instance attribute is still this placeholder, and we
117
115
  # hold the lock. Start the getter to store the value on the
118
116
  # instance and return the value.
119
- return await self._get_attribute(self._instance)
117
+ return await self._get_attribute()
120
118
 
121
119
  # another task produced a value, or the instance.__dict__ object was
122
120
  # deleted in the interim.
123
121
  return await stored
124
122
 
123
+ async def _get_attribute(self) -> R:
124
+ value = await self._func(self._instance)
125
+ self._instance.__dict__[self._name] = AwaitableValue(value)
126
+ return value
127
+
125
128
  def __repr__(self) -> str:
126
129
  return (
127
130
  f"<{type(self).__name__} for '{type(self._instance).__name__}."
@@ -135,9 +138,10 @@ class CachedProperty(Generic[T, R]):
135
138
  getter: Callable[[T], Awaitable[R]],
136
139
  asynccontextmanager_type: Type[AsyncContextManager[Any]] = nullcontext,
137
140
  ):
138
- self.func = getter
141
+ self.func = self.__wrapped__ = getter
139
142
  self.attrname = None
140
143
  self.__doc__ = getter.__doc__
144
+ self.__module__ = getter.__module__
141
145
  self._asynccontextmanager_type = asynccontextmanager_type
142
146
 
143
147
  def __set_name__(self, owner: Any, name: str) -> None:
@@ -175,19 +179,12 @@ class CachedProperty(Generic[T, R]):
175
179
  # on this instance. It takes care of coordinating between different
176
180
  # tasks awaiting on the placeholder until the cached value has been
177
181
  # produced.
178
- wrapper = _FutureCachedValue(
179
- self._get_attribute, instance, name, self._asynccontextmanager_type()
182
+ wrapper = _FutureCachedPropertyValue(
183
+ self.func, instance, name, self._asynccontextmanager_type()
180
184
  )
181
185
  cache[name] = wrapper
182
186
  return wrapper
183
187
 
184
- async def _get_attribute(self, instance: T) -> R:
185
- value = await self.func(instance)
186
- name = self.attrname
187
- assert name is not None # enforced in __get__
188
- instance.__dict__[name] = AwaitableValue(value)
189
- return value
190
-
191
188
 
192
189
  def cached_property(
193
190
  type_or_getter: Union[Type[AsyncContextManager[Any]], Callable[[T], Awaitable[R]]],
@@ -257,7 +254,9 @@ def cached_property(
257
254
  Instances on which a value is to be cached must have a
258
255
  ``__dict__`` attribute that is a mutable mapping.
259
256
  """
260
- if isinstance(type_or_getter, type) and issubclass(
257
+ if iscoroutinefunction(type_or_getter):
258
+ return CachedProperty(type_or_getter)
259
+ elif isinstance(type_or_getter, type) and issubclass(
261
260
  type_or_getter, AsyncContextManager
262
261
  ):
263
262
 
@@ -272,12 +271,9 @@ def cached_property(
272
271
  )
273
272
 
274
273
  return decorator
275
-
276
- if not iscoroutinefunction(type_or_getter):
274
+ else:
277
275
  raise ValueError("cached_property can only be used with a coroutine function")
278
276
 
279
- return CachedProperty(type_or_getter)
280
-
281
277
 
282
278
  __REDUCE_SENTINEL = Sentinel("<no default>")
283
279
 
@@ -32,7 +32,6 @@ from .builtins import (
32
32
  zip,
33
33
  enumerate as aenumerate,
34
34
  iter as aiter,
35
- tuple as atuple,
36
35
  )
37
36
 
38
37
  S = TypeVar("S")
@@ -122,17 +121,31 @@ async def accumulate(
122
121
  yield value
123
122
 
124
123
 
125
- async def batched(iterable: AnyIterable[T], n: int) -> AsyncIterator[Tuple[T, ...]]:
124
+ async def batched(
125
+ iterable: AnyIterable[T], n: int, strict: bool = False
126
+ ) -> AsyncIterator[Tuple[T, ...]]:
126
127
  """
127
128
  Batch the ``iterable`` to tuples of the length ``n``.
128
129
 
129
- This lazily exhausts ``iterable`` and returns each batch as soon as it's ready.
130
+ This lazily exhausts ``iterable`` and returns each batch as soon as it is ready.
131
+ If ``strict`` is :py:data:`True` and the last batch is smaller than ``n``,
132
+ :py:exc:`ValueError` is raised.
130
133
  """
131
134
  if n < 1:
132
135
  raise ValueError("n must be at least one")
133
136
  async with ScopedIter(iterable) as item_iter:
134
- while batch := await atuple(islice(_borrow(item_iter), n)):
135
- yield batch
137
+ batch: list[T] = []
138
+ try:
139
+ while True:
140
+ batch.clear()
141
+ for _ in range(n):
142
+ batch.append(await anext(item_iter))
143
+ yield tuple(batch)
144
+ except StopAsyncIteration:
145
+ if batch:
146
+ if strict and len(batch) < n:
147
+ raise ValueError("batched(): incomplete batch") from None
148
+ yield tuple(batch)
136
149
 
137
150
 
138
151
  class chain(AsyncIterator[T]):
@@ -32,27 +32,33 @@ def accumulate(
32
32
  initial: T1,
33
33
  ) -> AsyncIterator[T1]: ...
34
34
  @overload
35
- def batched(iterable: AnyIterable[T], n: Literal[1]) -> AsyncIterator[tuple[T]]: ...
35
+ def batched(
36
+ iterable: AnyIterable[T], n: Literal[1], strict: bool = ...
37
+ ) -> AsyncIterator[tuple[T]]: ...
36
38
  @overload
37
- def batched(iterable: AnyIterable[T], n: Literal[2]) -> AsyncIterator[tuple[T, T]]: ...
39
+ def batched(
40
+ iterable: AnyIterable[T], n: Literal[2], strict: bool = ...
41
+ ) -> AsyncIterator[tuple[T, T]]: ...
38
42
  @overload
39
43
  def batched(
40
- iterable: AnyIterable[T], n: Literal[3]
44
+ iterable: AnyIterable[T], n: Literal[3], strict: bool = ...
41
45
  ) -> AsyncIterator[tuple[T, T, T]]: ...
42
46
  @overload
43
47
  def batched(
44
- iterable: AnyIterable[T], n: Literal[4]
48
+ iterable: AnyIterable[T], n: Literal[4], strict: bool = ...
45
49
  ) -> AsyncIterator[tuple[T, T, T, T]]: ...
46
50
  @overload
47
51
  def batched(
48
- iterable: AnyIterable[T], n: Literal[5]
52
+ iterable: AnyIterable[T], n: Literal[5], strict: bool = ...
49
53
  ) -> AsyncIterator[tuple[T, T, T, T, T]]: ...
50
54
  @overload
51
55
  def batched(
52
- iterable: AnyIterable[T], n: Literal[6]
56
+ iterable: AnyIterable[T], n: Literal[6], strict: bool = ...
53
57
  ) -> AsyncIterator[tuple[T, T, T, T, T, T]]: ...
54
58
  @overload
55
- def batched(iterable: AnyIterable[T], n: int) -> AsyncIterator[tuple[T, ...]]: ...
59
+ def batched(
60
+ iterable: AnyIterable[T], n: int, strict: bool = ...
61
+ ) -> AsyncIterator[tuple[T, ...]]: ...
56
62
 
57
63
  class chain(AsyncIterator[T]):
58
64
  __slots__: tuple[str, ...]
@@ -6,7 +6,7 @@ build-backend = "flit_core.buildapi"
6
6
  dynamic = ["version", "description"]
7
7
  name = "asyncstdlib"
8
8
  authors = [
9
- {name = "Max Fischer", email = "maxfischer2781@gmail.com"},
9
+ {name = "Max Kühn", email = "maxfischer2781@gmail.com"},
10
10
  ]
11
11
  readme = "README.rst"
12
12
  classifiers = [
@@ -4,7 +4,7 @@ import pytest
4
4
 
5
5
  import asyncstdlib as a
6
6
 
7
- from .utility import sync, asyncify, awaitify, inside_loop
7
+ from .utility import sync, asyncify, awaitify
8
8
 
9
9
 
10
10
  def hide_coroutine(corofunc):
@@ -83,8 +83,7 @@ async def test_zip_close_immediately():
83
83
  yield 1
84
84
  finally:
85
85
  nonlocal closed
86
- if await inside_loop():
87
- closed = True
86
+ closed = True
88
87
 
89
88
  zip_iter = a.zip(asyncify(range(-5, 0)), SomeIterable())
90
89
  async for va, vb in zip_iter:
@@ -5,7 +5,7 @@ import pytest
5
5
  import asyncstdlib as a
6
6
  from asyncstdlib.functools import CachedProperty
7
7
 
8
- from .utility import Lock, Schedule, Switch, asyncify, multi_sync, sync
8
+ from .utility import Lock, Schedule, Switch, asyncify, sync
9
9
 
10
10
 
11
11
  @sync
@@ -44,7 +44,7 @@ async def test_cache_property_nodict():
44
44
  Foo().bar
45
45
 
46
46
 
47
- @multi_sync
47
+ @sync
48
48
  async def test_cache_property_order():
49
49
  class Value:
50
50
  def __init__(self, value):
@@ -66,7 +66,7 @@ async def test_cache_property_order():
66
66
  assert (await val.cached) == 1337 # last value fetched
67
67
 
68
68
 
69
- @multi_sync
69
+ @sync
70
70
  async def test_cache_property_lock_order():
71
71
  class Value:
72
72
  def __init__(self, value):
@@ -87,7 +87,7 @@ async def test_cache_property_lock_order():
87
87
  assert (await val.cached) == 5 # first value fetched
88
88
 
89
89
 
90
- @multi_sync
90
+ @sync
91
91
  async def test_cache_property_lock_deletion():
92
92
  class Value:
93
93
  def __init__(self, value):
@@ -300,7 +300,7 @@ async def test_lru_cache_misuse():
300
300
 
301
301
 
302
302
  @pytest.mark.parametrize("size", [16, None])
303
- @multi_sync
303
+ @sync
304
304
  async def test_lru_cache_concurrent(size):
305
305
  current = 0
306
306
 
@@ -6,7 +6,7 @@ import pytest
6
6
 
7
7
  import asyncstdlib as a
8
8
 
9
- from .utility import sync, asyncify, awaitify, multi_sync, Schedule, Switch, Lock
9
+ from .utility import sync, asyncify, awaitify, Schedule, Switch, Lock
10
10
 
11
11
 
12
12
  @sync
@@ -79,6 +79,19 @@ async def test_batched_invalid(length):
79
79
  await a.list(a.batched(range(10), length))
80
80
 
81
81
 
82
+ @sync
83
+ @pytest.mark.parametrize("values", ([1, 2, 3, 4], [1, 2, 3, 4, 5], [1]))
84
+ async def test_batched_strict(values: "list[int]"):
85
+ for n in range(1, len(values) + 1):
86
+ batches = a.batched(values, n, strict=True)
87
+ if len(values) % n == 0:
88
+ assert values == list(await a.reduce(lambda a, b: a + b, batches))
89
+ else:
90
+ assert await a.anext(batches)
91
+ with pytest.raises(ValueError):
92
+ await a.list(batches)
93
+
94
+
82
95
  @sync
83
96
  async def test_cycle():
84
97
  async for _ in a.cycle([]):
@@ -314,7 +327,7 @@ async def test_tee():
314
327
  assert await a.list(iterator) == iterable
315
328
 
316
329
 
317
- @multi_sync
330
+ @sync
318
331
  async def test_tee_concurrent_locked():
319
332
  """Test that properly uses a lock for synchronisation"""
320
333
  items = [1, 2, 3, -5, 12, 78, -1, 111]
@@ -322,8 +335,7 @@ async def test_tee_concurrent_locked():
322
335
  async def iter_values():
323
336
  for item in items:
324
337
  # switch to other tasks a few times to guarantees another runs
325
- for _ in range(5):
326
- await Switch()
338
+ await Switch(5)
327
339
  yield item
328
340
 
329
341
  async def test_peer(peer_tee):
@@ -345,7 +357,7 @@ async def test_tee_concurrent_locked():
345
357
  platform.python_implementation() != "CPython",
346
358
  reason="async generators only protect against concurrent access on CPython",
347
359
  )
348
- @multi_sync
360
+ @sync
349
361
  async def test_tee_concurrent_unlocked():
350
362
  """Test that tee does not prevent concurrency without a lock"""
351
363
  items = list(range(12))
@@ -354,8 +366,7 @@ async def test_tee_concurrent_unlocked():
354
366
  async def iter_values():
355
367
  for item in items:
356
368
  # switch to other tasks a few times to guarantee another runs
357
- for _ in range(5):
358
- await Switch()
369
+ await Switch(5)
359
370
  yield item
360
371
 
361
372
  async def test_peer(peer_tee):
@@ -0,0 +1,150 @@
1
+ from typing import (
2
+ Callable,
3
+ Coroutine,
4
+ Iterable,
5
+ AsyncIterator,
6
+ TypeVar,
7
+ Awaitable,
8
+ Deque,
9
+ Tuple,
10
+ Any,
11
+ )
12
+ from functools import wraps
13
+ from collections import deque
14
+ from random import randint
15
+
16
+
17
+ T = TypeVar("T")
18
+
19
+
20
+ async def asyncify(iterable: Iterable[T]) -> AsyncIterator[T]:
21
+ """
22
+ Convert an iterable into an async iterable
23
+
24
+ This is intended to sequence literals like lists to `async` iterators
25
+ in order to force usage of `async` code paths. There is no functional
26
+ or other advantage otherwise.
27
+ """
28
+ for value in iterable:
29
+ yield value
30
+
31
+
32
+ def awaitify(call: Callable[..., T]) -> Callable[..., Awaitable[T]]:
33
+ """
34
+ Convert a callable (`foo()`) into an async callable (`await foo()`)
35
+
36
+ This is intended to convert `lambda` expressions to `async` functions
37
+ in order to force usage of `async` code paths. There is no functional
38
+ or other advantage otherwise.
39
+ """
40
+
41
+ async def await_wrapper(*args: Any, **kwargs: Any) -> T:
42
+ return call(*args, **kwargs)
43
+
44
+ return await_wrapper
45
+
46
+
47
+ class Schedule:
48
+ r"""
49
+ Signal to the event loop to adopt and run new coroutines
50
+
51
+ :param coros: The coroutines to start running
52
+
53
+ In order to communicate with the event loop and start the coroutines,
54
+ the :py:class:`Schedule` must be `await`\ ed.
55
+ """
56
+
57
+ def __init__(self, *coros: Coroutine[Any, Any, Any]):
58
+ self.coros = coros
59
+
60
+ def __await__(self):
61
+ yield self
62
+
63
+
64
+ class Switch:
65
+ """
66
+ Signal to the event loop to run another coroutine
67
+
68
+ Pauses the coroutine but immediately continues after
69
+ all other runnable coroutines of the event loop.
70
+ This is similar to the common ``sleep(0)`` function
71
+ of regular event loop frameworks.
72
+
73
+ If a single argument is given, this specifies how many
74
+ turns should be skipped. The default corresponds to `0`.
75
+ If two arguments are given, this is interpreted as an
76
+ inclusive interval to randomly select the skip count.
77
+ """
78
+
79
+ def __init__(self, skip: int = 0, limit: int = 0, /) -> None:
80
+ if limit <= 0:
81
+ self._idle_count = skip
82
+ else:
83
+ self._idle_count = randint(skip, limit)
84
+
85
+ def __await__(self):
86
+ yield self
87
+ for _ in range(self._idle_count):
88
+ yield self
89
+
90
+
91
+ class Lock:
92
+ """Simple lock for exclusive access"""
93
+
94
+ def __init__(self):
95
+ self._owned = False
96
+ self._waiting: list[object] = []
97
+
98
+ async def __aenter__(self):
99
+ if self._owned:
100
+ # wait until it is our turn to take the lock
101
+ token = object()
102
+ self._waiting.append(token)
103
+ # a spin-lock should be fine since tests are short anyways
104
+ while self._owned or self._waiting[0] is not token:
105
+ await Switch()
106
+ # we will take the lock now, remove our wait claim
107
+ self._waiting.pop(0)
108
+ self._owned = True
109
+
110
+ async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any):
111
+ self._owned = False
112
+
113
+
114
+ def sync(test_case: Callable[..., Coroutine[None, Any, Any]], /) -> Callable[..., None]:
115
+ """
116
+ Mark an ``async def`` test case to be run synchronously with children
117
+
118
+ This provides a primitive "event loop" which only responds
119
+ to :py:class:`Schedule`, :py:class:`Switch` and :py:class:`Lock`.
120
+
121
+ It should be applied as a decorator on an ``async def`` function, which
122
+ is then turned into a synchronous callable that will run the ``async def``
123
+ function and all tasks it spawns.
124
+ Other decorators, most prominently :py:func:`pytest.mark.parametrize`,
125
+ can be applied around it.
126
+ """
127
+
128
+ @wraps(test_case)
129
+ def run_sync(*args: Any, **kwargs: Any):
130
+ run_queue: Deque[Tuple[Coroutine[Any, Any, Any], Any]] = deque()
131
+ run_queue.append((test_case(*args, **kwargs), None))
132
+ while run_queue:
133
+ coro, event = run_queue.popleft()
134
+ try:
135
+ event = coro.send(event)
136
+ except StopIteration as e:
137
+ result = e.args[0] if e.args else None
138
+ assert result is None, f"got '{result!r}' expected 'None'"
139
+ else:
140
+ if isinstance(event, Schedule):
141
+ run_queue.extend((new_coro, None) for new_coro in event.coros)
142
+ run_queue.append((coro, event))
143
+ elif isinstance(event, Switch):
144
+ run_queue.append((coro, event))
145
+ else: # pragma: no cover
146
+ raise RuntimeError(
147
+ f"test case {test_case} yielded an unexpected event {event}"
148
+ )
149
+
150
+ return run_sync
@@ -1,142 +0,0 @@
1
- from typing import (
2
- Callable,
3
- Coroutine,
4
- Iterable,
5
- AsyncIterator,
6
- TypeVar,
7
- Awaitable,
8
- Deque,
9
- Tuple,
10
- Any,
11
- )
12
- from functools import wraps
13
- from collections import deque
14
-
15
-
16
- T = TypeVar("T")
17
-
18
-
19
- async def asyncify(iterable: Iterable[T]) -> AsyncIterator[T]:
20
- """Convert an iterable to async iterable"""
21
- for value in iterable:
22
- yield value
23
-
24
-
25
- def awaitify(call: Callable[..., T]) -> Callable[..., Awaitable[T]]:
26
- async def await_wrapper(*args, **kwargs):
27
- return call(*args, **kwargs)
28
-
29
- return await_wrapper
30
-
31
-
32
- class PingPong:
33
- """Signal to the event loop which gets returned unchanged"""
34
-
35
- def __await__(self):
36
- return (yield self)
37
-
38
-
39
- async def inside_loop():
40
- """Test whether there is an active event loop available"""
41
- signal = PingPong()
42
- return await signal is signal
43
-
44
-
45
- def sync(test_case: Callable[..., Coroutine[T, Any, Any]]) -> Callable[..., T]:
46
- """
47
- Mark an ``async def`` test case to be run synchronously
48
-
49
- This emulates a primitive "event loop" which only responds
50
- to the :py:class:`PingPong` by sending it back.
51
- """
52
-
53
- @wraps(test_case)
54
- def run_sync(*args: Any, **kwargs: Any) -> T:
55
- coro = test_case(*args, **kwargs)
56
- try:
57
- event = None
58
- while True:
59
- event = coro.send(event)
60
- if not isinstance(event, PingPong): # pragma: no cover
61
- raise RuntimeError(
62
- f"test case {test_case} yielded an unexpected event {event}"
63
- )
64
- except StopIteration as e:
65
- result = e.args[0] if e.args else None
66
- return result
67
-
68
- return run_sync
69
-
70
-
71
- class Schedule:
72
- """Signal to the event loop to adopt and run a new coroutine"""
73
-
74
- def __init__(self, *coros: Coroutine[Any, Any, Any]):
75
- self.coros = coros
76
-
77
- def __await__(self):
78
- yield self
79
-
80
-
81
- class Switch:
82
- """Signal to the event loop to run another coroutine"""
83
-
84
- def __await__(self):
85
- yield self
86
-
87
-
88
- class Lock:
89
- def __init__(self):
90
- self._owned = False
91
- self._waiting: list[object] = []
92
-
93
- async def __aenter__(self):
94
- if self._owned:
95
- # wait until it is our turn to take the lock
96
- token = object()
97
- self._waiting.append(token)
98
- while self._owned or self._waiting[0] is not token:
99
- await Switch()
100
- # take the lock and remove our wait claim
101
- self._owned = True
102
- self._waiting.pop(0)
103
- self._owned = True
104
-
105
- async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any):
106
- self._owned = False
107
-
108
-
109
- def multi_sync(test_case: Callable[..., Coroutine[T, Any, Any]]) -> Callable[..., T]:
110
- """
111
- Mark an ``async def`` test case to be run synchronously with children
112
-
113
- This emulates a primitive "event loop" which only responds
114
- to the :py:class:`PingPong`, :py:class:`Schedule`, :py:class:`Switch`
115
- and :py:class:`Lock`.
116
- """
117
-
118
- @wraps(test_case)
119
- def run_sync(*args: Any, **kwargs: Any):
120
- run_queue: Deque[Tuple[Coroutine[Any, Any, Any], Any]] = deque()
121
- run_queue.append((test_case(*args, **kwargs), None))
122
- while run_queue:
123
- coro, event = run_queue.popleft()
124
- try:
125
- event = coro.send(event)
126
- except StopIteration as e:
127
- result = e.args[0] if e.args else None
128
- assert result is None, f"got '{result!r}' expected 'None'"
129
- else:
130
- if isinstance(event, PingPong):
131
- run_queue.appendleft((coro, event))
132
- elif isinstance(event, Schedule):
133
- run_queue.extend((new_coro, None) for new_coro in event.coros)
134
- run_queue.append((coro, event))
135
- elif isinstance(event, Switch):
136
- run_queue.append((coro, event))
137
- else: # pragma: no cover
138
- raise RuntimeError(
139
- f"test case {test_case} yielded an unexpected event {event}"
140
- )
141
-
142
- return run_sync
File without changes