queutils 0.9.4__tar.gz → 0.10.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {queutils-0.9.4 → queutils-0.10.0}/PKG-INFO +1 -3
  2. {queutils-0.9.4 → queutils-0.10.0}/README.md +0 -2
  3. {queutils-0.9.4 → queutils-0.10.0}/docs/iterablequeue.md +0 -2
  4. {queutils-0.9.4 → queutils-0.10.0}/pypi.md +0 -2
  5. {queutils-0.9.4 → queutils-0.10.0}/pyproject.toml +6 -1
  6. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/__init__.py +5 -0
  7. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/asyncqueue.py +0 -8
  8. queutils-0.10.0/src/queutils/awrap.py +45 -0
  9. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/eventcounterqueue.py +0 -7
  10. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/filequeue.py +7 -11
  11. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/iterablequeue.py +62 -62
  12. queutils-0.10.0/tests/test_awrap.py +100 -0
  13. {queutils-0.9.4 → queutils-0.10.0}/tests/test_eventcounterqueue.py +1 -1
  14. {queutils-0.9.4 → queutils-0.10.0}/tests/test_iterablequeue.py +67 -51
  15. {queutils-0.9.4 → queutils-0.10.0}/.github/workflows/codeql.yml +0 -0
  16. {queutils-0.9.4 → queutils-0.10.0}/.github/workflows/dependency-review.yml +0 -0
  17. {queutils-0.9.4 → queutils-0.10.0}/.github/workflows/python-package.yml +0 -0
  18. {queutils-0.9.4 → queutils-0.10.0}/.github/workflows/python-publish.yml +0 -0
  19. {queutils-0.9.4 → queutils-0.10.0}/.gitignore +0 -0
  20. {queutils-0.9.4 → queutils-0.10.0}/LICENSE +0 -0
  21. {queutils-0.9.4 → queutils-0.10.0}/codecov.yml +0 -0
  22. {queutils-0.9.4 → queutils-0.10.0}/demos/asyncqueue_demo.py +0 -0
  23. {queutils-0.9.4 → queutils-0.10.0}/demos/filequeue_demo.py +0 -0
  24. {queutils-0.9.4 → queutils-0.10.0}/demos/iterablequeue_demo.py +0 -0
  25. {queutils-0.9.4 → queutils-0.10.0}/docs/asyncqueue.md +0 -0
  26. {queutils-0.9.4 → queutils-0.10.0}/docs/filequeue.md +0 -0
  27. {queutils-0.9.4 → queutils-0.10.0}/docs/rm_links +0 -0
  28. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/countable.py +0 -0
  29. {queutils-0.9.4 → queutils-0.10.0}/src/queutils/py.typed +0 -0
  30. {queutils-0.9.4 → queutils-0.10.0}/tests/test_asyncqueue.py +0 -0
  31. {queutils-0.9.4 → queutils-0.10.0}/tests/test_demos.py +0 -0
  32. {queutils-0.9.4 → queutils-0.10.0}/tests/test_filequeue.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: queutils
3
- Version: 0.9.4
3
+ Version: 0.10.0
4
4
  Summary: Handy Python Queue utilies
5
5
  Project-URL: Homepage, https://github.com/Jylpah/queutils
6
6
  Project-URL: Bug Tracker, https://github.com/Jylpah/queutils/issues
@@ -72,8 +72,6 @@ producers are "finished", the queue enters into "filled" state and no new items
72
72
  - Producers must be registered with `add_producer()` and they must notify the queue
73
73
  with `finish()` once they have finished adding items
74
74
  - Countable interface to count number of items task_done() through `count` property
75
- - Countable property can be disabled with count_items=False. This is useful when you
76
- want to sum the count of multiple IterableQueues
77
75
 
78
76
  # EventCounterQueue
79
77
 
@@ -41,8 +41,6 @@ producers are "finished", the queue enters into "filled" state and no new items
41
41
  - Producers must be registered with `add_producer()` and they must notify the queue
42
42
  with `finish()` once they have finished adding items
43
43
  - Countable interface to count number of items task_done() through `count` property
44
- - Countable property can be disabled with count_items=False. This is useful when you
45
- want to sum the count of multiple IterableQueues
46
44
 
47
45
  # EventCounterQueue
48
46
 
@@ -11,8 +11,6 @@ iterated in `async for` loop. The great benefit of `IterableQueue` is that it te
11
11
  - Producers must be registered with `add_producer()` and they must notify the queue
12
12
  with `finish()` once they have finished adding items
13
13
  - Countable interface to count number of items task_done() through `count` property
14
- - Countable property can be disabled with count_items=False. This is useful when you
15
- want to sum the count of multiple IterableQueues
16
14
 
17
15
  ### Experimental
18
16
 
@@ -41,8 +41,6 @@ producers are "finished", the queue enters into "filled" state and no new items
41
41
  - Producers must be registered with `add_producer()` and they must notify the queue
42
42
  with `finish()` once they have finished adding items
43
43
  - Countable interface to count number of items task_done() through `count` property
44
- - Countable property can be disabled with count_items=False. This is useful when you
45
- want to sum the count of multiple IterableQueues
46
44
 
47
45
  # EventCounterQueue
48
46
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "queutils"
3
- version = "0.9.4"
3
+ version = "0.10.0"
4
4
  authors = [{ name = "Jylpah", email = "jylpah@gmail.com" }]
5
5
  description = "Handy Python Queue utilies"
6
6
  readme = { file = "pypi.md", content-type = "text/markdown" }
@@ -67,3 +67,8 @@ asyncio_default_fixture_loop_scope = "function"
67
67
  reportGeneralTypeIssues = false
68
68
  reportInvalidStringEscapeSequence = false
69
69
  typeCheckingMode = "off"
70
+ include = ["src", "tests"]
71
+ extraPaths = ["src"]
72
+
73
+ [tool.coverage.run]
74
+ omit = ['tests/*']
@@ -6,6 +6,10 @@ from .eventcounterqueue import (
6
6
  QCounter as QCounter,
7
7
  EventCounterQueue as EventCounterQueue,
8
8
  )
9
+ from .awrap import (
10
+ awrap as awrap,
11
+ abatch as abatch,
12
+ )
9
13
 
10
14
  __all__ = [
11
15
  "asyncqueue",
@@ -13,4 +17,5 @@ __all__ = [
13
17
  "eventcounterqueue",
14
18
  "filequeue",
15
19
  "iterablequeue",
20
+ "awrap",
16
21
  ]
@@ -19,18 +19,10 @@ from queue import Full, Empty, Queue
19
19
  from asyncio.queues import QueueEmpty, QueueFull
20
20
  import asyncio
21
21
  from typing import Generic, TypeVar
22
- import logging
23
22
  from asyncio import sleep
24
23
 
25
24
  T = TypeVar("T")
26
25
 
27
- logger = logging.getLogger(__name__)
28
-
29
- debug = logger.debug
30
- message = logger.warning
31
- verbose = logger.info
32
- error = logger.error
33
-
34
26
 
35
27
  class AsyncQueue(asyncio.Queue, Generic[T]):
36
28
  """
@@ -0,0 +1,45 @@
1
+ """awrap() is a async wrapper for Iterables
2
+
3
+ It converts an Iterable[T] to AsyncGenerator[T].
4
+ AsyncGenerator[T] is also AsyncIterable[T] allowing it to be used in async for
5
+ """
6
+
7
+ from typing import Iterable, Sequence, TypeVar, AsyncGenerator, AsyncIterable
8
+
9
+ T = TypeVar("T")
10
+
11
+
12
+ async def awrap(iterable: Iterable[T]) -> AsyncGenerator[T, None]:
13
+ """
14
+ Async wrapper for Iterable[T] so it can be used in async for
15
+ Can be used in async for loop
16
+ """
17
+ for item in iter(iterable):
18
+ yield item
19
+
20
+
21
+ async def abatch(
22
+ iterable: Iterable[T] | AsyncIterable[T], size: int
23
+ ) -> AsyncGenerator[Sequence[T], None]:
24
+ """
25
+ Async wrapper reads batches from a AsyncIterable[T] or Iterable[T]
26
+ Can be used in async for loop
27
+ """
28
+ batch: list[T] = []
29
+ if isinstance(iterable, AsyncIterable):
30
+ async for item in iterable:
31
+ batch.append(item)
32
+ if len(batch) == size:
33
+ yield batch
34
+ batch = []
35
+ elif isinstance(iterable, Iterable):
36
+ for item in iterable:
37
+ batch.append(item)
38
+ if len(batch) == size:
39
+ yield batch
40
+ batch = []
41
+ else:
42
+ raise TypeError(f"Expected Iterable or AsyncIterable, got {type(iterable)}")
43
+
44
+ if batch:
45
+ yield batch
@@ -4,13 +4,6 @@ from deprecated import deprecated
4
4
  from .countable import Countable
5
5
  from .iterablequeue import IterableQueue, QueueDone
6
6
  from collections import defaultdict
7
- import logging
8
-
9
- logger = logging.getLogger()
10
- error = logger.error
11
- message = logger.warning
12
- verbose = logger.info
13
- debug = logger.debug
14
7
 
15
8
  ###########################################
16
9
  #
@@ -29,8 +29,6 @@ from .iterablequeue import IterableQueue
29
29
 
30
30
  logger = logging.getLogger(__name__)
31
31
  error = logger.error
32
- message = logger.warning
33
- verbose = logger.info
34
32
  debug = logger.debug
35
33
 
36
34
 
@@ -47,8 +45,8 @@ def str2path(filename: str | Path, suffix: str | None = None) -> Path:
47
45
 
48
46
  class FileQueue(IterableQueue[Path]):
49
47
  """
50
- Class to create a IterableQueue(asyncio.Queue) of filenames based on
51
- given directories and files as arguments.
48
+ Class to create a IterableQueue(asyncio.Queue) of filenames based on
49
+ given directories and files as arguments.
52
50
  Supports include/exclude filters based on filenames.
53
51
  """
54
52
 
@@ -58,21 +56,20 @@ class FileQueue(IterableQueue[Path]):
58
56
  filter: str = "*",
59
57
  exclude: bool = False,
60
58
  case_sensitive: bool = True,
61
- follow_symlinks: bool = False,
59
+ follow_symlinks: bool = False,
62
60
  **kwargs,
63
61
  ):
64
62
  assert base is None or isinstance(base, Path), "base has to be Path or None"
65
63
  assert isinstance(filter, str), "filter has to be string"
66
64
  assert isinstance(case_sensitive, bool), "case_sensitive has to be bool"
67
65
  assert isinstance(follow_symlinks, bool), "follow_symlinks has to be bool"
68
-
66
+
69
67
  # debug(f"maxsize={str(maxsize)}, filter='{filter}'")
70
- super().__init__(count_items=True, **kwargs)
68
+ super().__init__(**kwargs)
71
69
  self._base: Optional[Path] = base
72
- # self._done: bool = False
73
70
  self._case_sensitive: bool = False
74
71
  self._exclude: bool = False
75
- self._follow_symlinks : bool = follow_symlinks
72
+ self._follow_symlinks: bool = follow_symlinks
76
73
  self.set_filter(filter=filter, exclude=exclude, case_sensitive=case_sensitive)
77
74
 
78
75
  def set_filter(
@@ -119,8 +116,7 @@ class FileQueue(IterableQueue[Path]):
119
116
  await self.put(path)
120
117
  except Exception as err:
121
118
  error(f"{err}")
122
- return await self.finish()
123
-
119
+ return await self.finish_producer()
124
120
 
125
121
  async def put(self, path: Path) -> None:
126
122
  """Recursive function to build process queue. Sanitize filename"""
@@ -17,16 +17,14 @@ __email__ = "Jylpah@gmail.com"
17
17
  __status__ = "Production"
18
18
 
19
19
 
20
- from asyncio import Queue, QueueFull, QueueEmpty, Event, Lock
20
+ from asyncio import Queue, QueueFull, Event, Lock
21
21
  from typing import AsyncIterable, TypeVar, Optional
22
+ from deprecated import deprecated
22
23
  from .countable import Countable
23
24
  import logging
24
25
 
25
26
  # Setup logging
26
27
  logger = logging.getLogger(__name__)
27
- error = logger.error
28
- message = logger.warning
29
- verbose = logger.info
30
28
  debug = logger.debug
31
29
 
32
30
  T = TypeVar("T")
@@ -45,34 +43,44 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
45
43
  IterableQueue is asyncio.Queue subclass that can be iterated asynchronusly.
46
44
 
47
45
  IterableQueue terminates automatically when the queue has been
48
- filled and emptied. Supports:
46
+ filled and emptied.
47
+
48
+ Supports:
49
49
  - asyncio.Queue() interface, _nowait() methods are experimental
50
50
  - AsyncIterable(): async for item in queue:
51
51
  - Automatic termination of the consumers when the queue has been emptied with QueueDone exception
52
52
  - Producers must be registered with add_producer() and they must notify the queue
53
53
  with finish() once they have finished adding items
54
54
  - Countable interface to count number of items task_done() through 'count' property
55
- - Countable property can be disabled with count_items=False. This is useful when you
56
- want to sum the count of multiple IterableQueues
55
+
56
+ IterableQueue stages:
57
+
58
+ 1) Initialized: Queue has been created and it is empty
59
+ 2) is_illed: All producers have finished adding items to the queue
60
+ 3) empty/has_wip: Queue has been emptied
61
+ 4) is_done: All items have been marked with task_done()
57
62
  """
58
63
 
59
- def __init__(self, count_items: bool = True, **kwargs):
64
+ def __init__(self, **kwargs) -> None:
60
65
  # _Q is required instead of inheriting from Queue()
61
66
  # using super() since Queue is Optional[T], not [T]
62
67
  self._Q: Queue[Optional[T]] = Queue(**kwargs)
68
+ self._maxsize: int = self._Q.maxsize # Asyncio.Queue has _maxsize
63
69
  self._producers: int = 0
64
- self._count_items: bool = count_items
65
70
  self._count: int = 0
66
71
  self._wip: int = 0
67
72
 
68
73
  self._modify: Lock = Lock()
69
74
  self._put_lock: Lock = Lock()
70
75
 
76
+ # the last producer has finished
71
77
  self._filled: Event = Event()
78
+ # the last producer has finished and the queue is empty
72
79
  self._empty: Event = Event()
80
+ # the queue is done, all items have been marked with task_done()
73
81
  self._done: Event = Event()
74
82
 
75
- self._empty.set()
83
+ self._empty.clear() # this will be tested only after queue is filled
76
84
 
77
85
  @property
78
86
  def is_filled(self) -> bool:
@@ -99,12 +107,6 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
99
107
  """
100
108
  return self._Q.full()
101
109
 
102
- def check_done(self) -> bool:
103
- if self.is_filled and self.empty() and not self.has_wip:
104
- self._done.set()
105
- return True
106
- return False
107
-
108
110
  def empty(self) -> bool:
109
111
  """
110
112
  Queue has no items except None as a sentinel
@@ -137,101 +139,95 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
137
139
 
138
140
  @property
139
141
  def count(self) -> int:
140
- if self._count_items:
141
- return self._count
142
- else:
143
- return 0
142
+ return self._count
144
143
 
145
144
  async def add_producer(self, N: int = 1) -> int:
146
145
  """
147
146
  Add producer(s) to the queue
148
147
  """
149
- assert N > 0, "N has to be positive"
148
+ if N <= 0:
149
+ raise ValueError("N has to be positive")
150
150
  async with self._modify:
151
151
  if self.is_filled:
152
152
  raise QueueDone
153
153
  self._producers += N
154
154
  return self._producers
155
155
 
156
- async def finish(self, all: bool = False, empty: bool = False) -> bool:
156
+ @deprecated(version="0.10.0", reason="Use finish_producer() instead for clarity")
157
+ async def finish(self, all: bool = False) -> bool:
158
+ """
159
+ Finish producer
160
+
161
+ Depreciated function, use finish_producer() instead
162
+ """
163
+ return await self.finish_producer(all=all)
164
+
165
+ async def finish_producer(self, all: bool = False) -> bool:
157
166
  """
158
167
  Producer has finished adding items to the queue.
159
168
  Once the last producers has finished, the queue is_filled.
160
169
  - all: finish() queue for all producers at once
170
+
171
+ Return True if the last producer is 'finished'
161
172
  """
162
173
  async with self._modify:
163
- if self._producers <= 0 or self.is_filled:
164
- # raise ValueError("finish() called more than the is producers")
165
- self._producers = 0
166
- return False
174
+ if self.is_filled:
175
+ return True
176
+
167
177
  self._producers -= 1
168
178
 
169
- if all or self._producers <= 0:
179
+ if self._producers < 0:
180
+ raise ValueError("Too many finish() calls")
181
+ elif all or self._producers == 0:
170
182
  self._filled.set()
171
183
  self._producers = 0
172
184
 
173
185
  if self._producers == 0:
174
- if empty:
175
- try:
176
- while True:
177
- _ = self.get_nowait()
178
- self.task_done()
179
- except (QueueDone, QueueEmpty):
180
- pass
181
-
182
186
  async with self._put_lock:
183
- if empty:
184
- try:
185
- _ = self.get_nowait()
186
- self.task_done()
187
- except (QueueDone, QueueEmpty):
188
- pass
189
- self.check_done()
187
+ if self._Q.qsize() == 0:
188
+ self._empty.set()
189
+ if not self.has_wip:
190
+ self._done.set()
190
191
  await self._Q.put(None)
191
192
  return True
192
193
  return False
193
194
 
194
195
  async def put(self, item: T) -> None:
196
+ if item is None:
197
+ raise ValueError("Cannot add None to IterableQueue")
195
198
  async with self._put_lock:
196
199
  if self.is_filled: # should this be inside put_lock?
197
200
  raise QueueDone
198
201
  if self._producers <= 0:
199
202
  raise ValueError("No registered producers")
200
- elif item is None:
201
- raise ValueError("Cannot add None to IterableQueue")
202
203
  await self._Q.put(item=item)
203
- self._empty.clear()
204
204
  return None
205
205
 
206
206
  def put_nowait(self, item: T) -> None:
207
207
  """
208
208
  Experimental asyncio.Queue.put_nowait() implementation
209
209
  """
210
- # raise NotImplementedError
211
210
  if self.is_filled:
212
211
  raise QueueDone
213
212
  if self._producers <= 0:
214
213
  raise ValueError("No registered producers")
215
- elif item is None:
214
+ if item is None:
216
215
  raise ValueError("Cannot add None to IterableQueue")
217
216
  self._Q.put_nowait(item=item)
218
- self._empty.clear()
219
217
  return None
220
218
 
221
219
  async def get(self) -> T:
222
220
  item = await self._Q.get()
223
221
  if item is None:
224
222
  self._empty.set()
223
+ if not self.has_wip:
224
+ self._done.set()
225
225
  self._Q.task_done()
226
- self.check_done()
227
226
  async with self._put_lock:
228
227
  await self._Q.put(None)
229
228
  raise QueueDone
230
229
  else:
231
- if self._Q.qsize() == 0:
232
- self._empty.set()
233
- async with self._modify:
234
- self._wip += 1
230
+ self._wip += 1
235
231
  return item
236
232
 
237
233
  def get_nowait(self) -> T:
@@ -241,16 +237,15 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
241
237
  item: T | None = self._Q.get_nowait()
242
238
  if item is None:
243
239
  self._empty.set()
240
+ if not self.has_wip:
241
+ self._done.set()
244
242
  self._Q.task_done()
245
- self.check_done()
246
243
  try:
247
244
  self._Q.put_nowait(None)
248
245
  except QueueFull:
249
246
  pass
250
247
  raise QueueDone
251
248
  else:
252
- if self._Q.qsize() == 0:
253
- self._empty.set()
254
249
  self._wip += 1
255
250
  return item
256
251
 
@@ -260,7 +255,8 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
260
255
  self._wip -= 1
261
256
  if self._wip < 0:
262
257
  raise ValueError("task_done() called more than tasks open")
263
- self.check_done()
258
+ if self.is_filled and self._empty.is_set() and not self.has_wip:
259
+ self._done.set()
264
260
 
265
261
  async def join(self) -> None:
266
262
  debug("Waiting queue to be filled")
@@ -271,14 +267,18 @@ class IterableQueue(Queue[T], AsyncIterable[T], Countable):
271
267
  return None
272
268
 
273
269
  def __aiter__(self):
270
+ """
271
+ Return ASyncIterator to be able iterate the queue using async for
272
+ """
274
273
  return self
275
274
 
276
275
  async def __anext__(self) -> T:
277
- async with self._modify:
278
- if self._wip > 0: # do not mark task_done() at first call
279
- self.task_done()
276
+ """
277
+ Async iterator for IterableQueue
278
+ """
279
+ if self._wip > 0: # do not mark task_done() at first call
280
+ self.task_done()
280
281
  try:
281
- item = await self.get()
282
- return item
282
+ return await self.get()
283
283
  except QueueDone:
284
284
  raise StopAsyncIteration
@@ -0,0 +1,100 @@
1
+ import pytest # type: ignore
2
+ from asyncio import (
3
+ Task,
4
+ create_task,
5
+ )
6
+
7
+ from queutils import IterableQueue, QueueDone, abatch, awrap
8
+
9
+
10
+ async def _producer_int(
11
+ Q: IterableQueue[int], n: int, finish: bool = False, wait: float = 0
12
+ ) -> None:
13
+ await Q.add_producer(N=1)
14
+ try:
15
+ for i in range(n):
16
+ await Q.put(i)
17
+ except QueueDone:
18
+ raise ValueError("Queue is done even no one closed it")
19
+ await Q.finish_producer()
20
+ return None
21
+
22
+
23
+ @pytest.mark.timeout(5)
24
+ @pytest.mark.parametrize(
25
+ "N",
26
+ [
27
+ (int(10e4)),
28
+ (int(10e4)),
29
+ ],
30
+ )
31
+ @pytest.mark.asyncio
32
+ async def test_1_awrap_iterable(N: int):
33
+ """Test for awrap() with AsyncIterable"""
34
+
35
+ iterable: list[int] = [i for i in range(N)]
36
+
37
+ max_elem: int = -1
38
+
39
+ async for elem in awrap(iterable):
40
+ assert elem > max_elem, "outcome"
41
+ max_elem = elem
42
+ assert max_elem == N - 1, f"last element is incorrect: {max_elem} != {N - 1}"
43
+
44
+
45
+ @pytest.mark.timeout(5)
46
+ @pytest.mark.parametrize(
47
+ "bsize, N",
48
+ [
49
+ (17, int(10e4)),
50
+ (89, int(10e4)),
51
+ ],
52
+ )
53
+ @pytest.mark.asyncio
54
+ async def test_2_abatch_iterable(bsize: int, N: int):
55
+ """Test for abatch() with AsyncIterable"""
56
+
57
+ iterable: list[int] = [i for i in range(N)]
58
+
59
+ max_elem: int = -1
60
+ last: bool = False
61
+
62
+ async for batch in abatch(iterable, bsize):
63
+ assert not last, "abatch() returned mismatched batch that was not the last"
64
+ if len(batch) < bsize:
65
+ last = True
66
+ assert last or len(batch) == bsize, f"Batch size is {len(batch)} != {bsize}"
67
+ for elem in batch:
68
+ assert elem > max_elem, f"batch is not sorted: {batch}"
69
+ max_elem = elem
70
+
71
+
72
+ @pytest.mark.timeout(10)
73
+ @pytest.mark.parametrize(
74
+ "Qsize, bsize, N",
75
+ [
76
+ (101, 17, int(10e4)),
77
+ (11, 89, int(10e4)),
78
+ ],
79
+ )
80
+ @pytest.mark.asyncio
81
+ async def test_3_abatch_asynciterable(Qsize: int, bsize: int, N: int):
82
+ """Test for abatch() with AsyncIterable"""
83
+
84
+ Q = IterableQueue[int](maxsize=Qsize)
85
+ producer: Task = create_task(_producer_int(Q, N))
86
+
87
+ max_elem: int = -1
88
+ last: bool = False
89
+
90
+ async for batch in abatch(Q, bsize):
91
+ assert not last, "abatch() returned mismatched batch that was not the last"
92
+ if len(batch) < bsize:
93
+ last = True
94
+ assert last or len(batch) == bsize, f"Batch size is {len(batch)} != {bsize}"
95
+ for elem in batch:
96
+ assert elem > max_elem, f"batch is not sorted: {batch}"
97
+ max_elem = elem
98
+ assert Q.empty(), f"Queue is not empty after async for: Q.size={Q.qsize()}"
99
+ assert Q.is_done, "Queue is not done"
100
+ producer.cancel()
@@ -54,7 +54,7 @@ async def test_1_category_counter_queue(
54
54
  count: int = randint(1, 10)
55
55
  await Q.send(cat, count)
56
56
  _counter[cat] += count
57
- await Q.finish()
57
+ await Q.finish_producer()
58
58
  return _counter
59
59
 
60
60
  senders: list[Task] = list()
@@ -13,9 +13,10 @@ from random import random
13
13
 
14
14
  from queutils import IterableQueue, QueueDone
15
15
 
16
- QSIZE: int = 10
17
- N: int = 100 # N >> QSIZE
18
- THREADS: int = 4
16
+ QSIZE: int = 37
17
+ N: int = 500 # N >> QSIZE
18
+ THREADS: int = 17
19
+ TIMEOUT: int = 30
19
20
  # N : int = int(1e10)
20
21
 
21
22
 
@@ -36,7 +37,7 @@ async def _producer_int(
36
37
  except QueueDone:
37
38
  pass
38
39
  if finish:
39
- await Q.finish()
40
+ await Q.finish_producer()
40
41
  return None
41
42
 
42
43
 
@@ -54,17 +55,19 @@ async def _consumer_int(Q: IterableQueue[int], n: int = -1, wait: float = 0) ->
54
55
  return True
55
56
 
56
57
 
57
- @pytest.mark.timeout(10)
58
+ @pytest.mark.timeout(TIMEOUT)
58
59
  @pytest.mark.asyncio
59
60
  async def test_1_put_get_async(test_interablequeue_int: IterableQueue[int]):
60
61
  """Test: put(), get(), join(), qsize(), empty() == True"""
61
62
  Q = test_interablequeue_int
62
63
  try:
63
- async with timeout(5):
64
+ async with timeout(TIMEOUT / 2):
64
65
  await _producer_int(Q, QSIZE - 1, finish=True)
65
66
  except TimeoutError:
66
67
  assert False, "IterableQueue got stuck"
67
- assert Q.qsize() == QSIZE - 1, f"qsize() returned {Q.qsize()}, should be {QSIZE-1}"
68
+ assert Q.qsize() == QSIZE - 1, (
69
+ f"qsize() returned {Q.qsize()}, should be {QSIZE - 1}"
70
+ )
68
71
  try:
69
72
  await Q.put(1)
70
73
  assert False, "Queue is filled and put() should raise an exception"
@@ -73,10 +76,10 @@ async def test_1_put_get_async(test_interablequeue_int: IterableQueue[int]):
73
76
  assert not Q.is_done, "is_done returned True even queue is not finished"
74
77
  consumer: Task = create_task(_consumer_int(Q))
75
78
  try:
76
- async with timeout(5):
79
+ async with timeout(TIMEOUT / 2):
77
80
  await Q.join()
78
81
  await Q.get()
79
- assert False, "Queue is done and put() should raise an exception"
82
+ assert False, "Queue is done and get() should raise an exception"
80
83
  except TimeoutError:
81
84
  assert False, "IterableQueue.join() took too long"
82
85
  except QueueDone:
@@ -86,13 +89,13 @@ async def test_1_put_get_async(test_interablequeue_int: IterableQueue[int]):
86
89
  consumer.cancel()
87
90
 
88
91
 
89
- @pytest.mark.timeout(10)
92
+ @pytest.mark.timeout(TIMEOUT)
90
93
  @pytest.mark.asyncio
91
94
  async def test_2_put_get_nowait(test_interablequeue_int: IterableQueue[int]):
92
95
  """Test put_nowait() and get_nowait() methods"""
93
96
  Q = test_interablequeue_int
94
97
  producer: Task = create_task(_producer_int(Q, N))
95
- await sleep(1)
98
+ await sleep(N / 500)
96
99
  # In theory this could fail without a real error
97
100
  # if QSIZE is huge and/or system is slow
98
101
  assert Q.qsize() == Q.maxsize, "Queue was supposed to be at maxsize"
@@ -105,35 +108,44 @@ async def test_2_put_get_nowait(test_interablequeue_int: IterableQueue[int]):
105
108
  except QueueFull:
106
109
  pass # OK, Queue was supposed to be full
107
110
 
111
+ finisher = create_task(Q.finish_producer(all=True))
112
+
108
113
  try:
109
114
  while True:
110
- _ = Q.get_nowait()
111
- Q.task_done()
112
- await sleep(0.01)
113
- except QueueEmpty:
114
- assert Q.qsize() == 0, "Queue size should be zero"
115
+ try:
116
+ _ = Q.get_nowait()
117
+ Q.task_done()
118
+ except QueueEmpty:
119
+ assert Q.qsize() == 0, "Queue size should be zero"
120
+ await sleep(0.01)
121
+ except QueueDone:
122
+ pass
115
123
 
116
124
  try:
117
- async with timeout(5):
118
- await Q.finish()
125
+ async with timeout(3):
126
+ await Q.finish_producer()
119
127
  await Q.join()
120
128
  except TimeoutError:
121
129
  assert False, "Queue.join() took longer than it should"
122
- assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
130
+ assert Q.qsize() == 0, f"queue size is {Q.qsize()} > 0 even it should be empty"
123
131
  assert Q.empty(), "queue not empty()"
124
132
  producer.cancel()
133
+ finisher.cancel()
134
+ await sleep(0.1)
135
+ assert producer.done(), "producer has not finished"
136
+ assert finisher.done(), "finisher has not finished"
125
137
 
126
138
 
127
- @pytest.mark.timeout(10)
139
+ @pytest.mark.timeout(TIMEOUT)
128
140
  @pytest.mark.asyncio
129
141
  async def test_3_multiple_producers(test_interablequeue_int: IterableQueue[int]):
130
142
  Q = test_interablequeue_int
131
143
  workers: list[Task] = list()
132
144
  for _ in range(THREADS):
133
- workers.append(create_task(_producer_int(Q, N, finish=True, wait=0.05)))
145
+ workers.append(create_task(_producer_int(Q, N, finish=True, wait=0.01)))
134
146
  try:
135
147
  assert not Q.is_done, "is_done returned True even queue is not finished"
136
- async with timeout(10):
148
+ async with timeout(TIMEOUT):
137
149
  async for _ in Q:
138
150
  pass
139
151
  except TimeoutError:
@@ -147,7 +159,7 @@ async def test_3_multiple_producers(test_interablequeue_int: IterableQueue[int])
147
159
  w.cancel()
148
160
 
149
161
 
150
- @pytest.mark.timeout(10)
162
+ @pytest.mark.timeout(TIMEOUT)
151
163
  @pytest.mark.asyncio
152
164
  async def test_4_multiple_producers_consumers(
153
165
  test_interablequeue_int: IterableQueue[int],
@@ -157,26 +169,26 @@ async def test_4_multiple_producers_consumers(
157
169
  consumers: list[Task] = list()
158
170
 
159
171
  for _ in range(THREADS):
160
- producers.append(create_task(_producer_int(Q, N, finish=False, wait=0.05)))
161
- consumers.append(create_task(_consumer_int(Q, 2 * N, wait=0.06)))
172
+ producers.append(create_task(_producer_int(Q, N, finish=False, wait=0.01)))
173
+ consumers.append(create_task(_consumer_int(Q, 2 * N, wait=0.01)))
162
174
  try:
163
- async with timeout(10):
175
+ async with timeout(TIMEOUT):
164
176
  await gather(*producers)
165
- await Q.finish(all=True)
177
+ await Q.finish_producer(all=True)
166
178
  await Q.join()
167
179
  assert not Q.has_wip, "Queue should not have any items WIP"
168
180
  except TimeoutError:
169
181
  assert False, "IterableQueue.join() took too long"
170
- assert (
171
- Q.count == THREADS * N
172
- ), f"count returned wrong value {Q.count}, should be {THREADS*N}"
182
+ assert Q.count == THREADS * N, (
183
+ f"count returned wrong value {Q.count}, should be {THREADS * N}"
184
+ )
173
185
  assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
174
186
  assert Q.empty(), "queue not empty"
175
187
  for p in consumers:
176
188
  p.cancel()
177
189
 
178
190
 
179
- @pytest.mark.timeout(10)
191
+ @pytest.mark.timeout(TIMEOUT)
180
192
  @pytest.mark.asyncio
181
193
  async def test_5_empty_join(test_interablequeue_int: IterableQueue[int]):
182
194
  """Test for await join when an empty queue is finished"""
@@ -185,48 +197,52 @@ async def test_5_empty_join(test_interablequeue_int: IterableQueue[int]):
185
197
  assert not Q.is_done, "is_done returned True even queue is not finished"
186
198
  consumer: Task = create_task(_consumer_int(Q))
187
199
  try:
188
-
189
200
  async with timeout(3):
190
201
  await Q.join()
191
- assert (
192
- Q.empty()
193
- ), "Queue is done after 3 secs and the join() should finish before timeout(5)"
202
+ assert Q.empty(), (
203
+ "Queue is done after 3 secs and the join() should finish before timeout(5)"
204
+ )
194
205
  except TimeoutError:
195
206
  assert False, "await IterableQueue.join() failed with an empty queue finished"
196
207
  await sleep(0.1)
197
208
 
198
209
  try:
199
210
  consumer.cancel()
200
- assert (
201
- not consumer.cancelled()
202
- ), "consumer task was cancelled and did not complete even it should have"
211
+ assert not consumer.cancelled(), (
212
+ "consumer task was cancelled and did not complete even it should have"
213
+ )
203
214
  except Exception as err:
204
215
  assert False, f"Unknown Exception caught: {err}"
205
216
  assert producer.done(), "producer has not finished"
206
217
 
207
218
 
208
- @pytest.mark.timeout(10)
219
+ @pytest.mark.timeout(TIMEOUT)
209
220
  @pytest.mark.asyncio
210
221
  async def test_6_finish_full_queue(test_interablequeue_int: IterableQueue[int]):
211
222
  """Test for await join when an empty queue is finished"""
212
223
  Q = test_interablequeue_int
213
224
  producer: Task = create_task(_producer_int(Q, n=QSIZE * 2))
214
225
  try:
215
- await sleep(0.5)
216
- async with timeout(3):
217
- await Q.finish(all=True, empty=True)
218
- assert (
219
- Q.empty()
220
- ), f"Queue should be empty: qsize={Q._Q.qsize()}: {Q._Q.get_nowait()}, {Q._Q.get_nowait()}"
226
+ await sleep(1)
227
+ finisher: Task = create_task(Q.finish_producer(all=True))
228
+ async for _ in Q:
229
+ pass
230
+ assert Q.empty(), (
231
+ f"Queue should be empty: qsize={Q.qsize()}: {Q._Q.get_nowait()}, {Q._Q.get_nowait()}"
232
+ )
221
233
  assert Q.is_done, "Queue is not done"
222
234
  except TimeoutError:
223
- assert False, "await IterableQueue.join() failed with an empty queue finished"
235
+ assert False, f"await IterableQueue.finish() failed. qsize={Q.qsize()}"
224
236
  await sleep(0.1)
225
237
  assert Q.is_done, "Queue is not done"
226
238
  producer.cancel()
239
+ finisher.cancel()
240
+ await sleep(0.1)
241
+ assert producer.done(), "producer has not finished"
242
+ assert finisher.done(), "finisher has not finished"
227
243
 
228
244
 
229
- @pytest.mark.timeout(10)
245
+ @pytest.mark.timeout(TIMEOUT)
230
246
  @pytest.mark.asyncio
231
247
  async def test_7_aiter(test_interablequeue_int: IterableQueue[int]):
232
248
  """Test for await join when an empty queue is finished"""
@@ -245,7 +261,7 @@ async def test_7_aiter(test_interablequeue_int: IterableQueue[int]):
245
261
  assert False, "await IterableQueue.join() failed with an empty queue finished"
246
262
 
247
263
 
248
- @pytest.mark.timeout(10)
264
+ @pytest.mark.timeout(TIMEOUT)
249
265
  @pytest.mark.asyncio
250
266
  async def test_8_aiter_1_item(test_interablequeue_int: IterableQueue[int]):
251
267
  """Test for await join when an empty queue is finished"""
@@ -260,8 +276,8 @@ async def test_8_aiter_1_item(test_interablequeue_int: IterableQueue[int]):
260
276
  count += 1
261
277
  assert i >= 0, "Did not receive an int"
262
278
  assert count == 1, f"Did not receive correct number of elements {count} != 1"
263
- assert (
264
- True
265
- ), "Queue is done after 3 secs and the join() should finish before timeout(5)"
279
+ assert True, (
280
+ "Queue is done after 3 secs and the join() should finish before timeout(5)"
281
+ )
266
282
  except TimeoutError:
267
283
  assert False, "await IterableQueue.join() failed with an empty queue finished"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes