queutils 0.9.1__tar.gz → 0.9.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {queutils-0.9.1 → queutils-0.9.4}/.github/workflows/python-publish.yml +17 -20
  2. {queutils-0.9.1 → queutils-0.9.4}/PKG-INFO +20 -9
  3. {queutils-0.9.1 → queutils-0.9.4}/README.md +14 -3
  4. {queutils-0.9.1 → queutils-0.9.4}/docs/filequeue.md +14 -0
  5. queutils-0.9.4/docs/rm_links +14 -0
  6. queutils-0.9.4/pypi.md +69 -0
  7. {queutils-0.9.1 → queutils-0.9.4}/pyproject.toml +2 -2
  8. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/__init__.py +3 -3
  9. queutils-0.9.1/src/queutils/categorycounterqueue.py → queutils-0.9.4/src/queutils/eventcounterqueue.py +16 -16
  10. queutils-0.9.4/tests/test_eventcounterqueue.py +80 -0
  11. queutils-0.9.1/tests/test_ccategorycounterqueue.py +0 -333
  12. {queutils-0.9.1 → queutils-0.9.4}/.github/workflows/codeql.yml +0 -0
  13. {queutils-0.9.1 → queutils-0.9.4}/.github/workflows/dependency-review.yml +0 -0
  14. {queutils-0.9.1 → queutils-0.9.4}/.github/workflows/python-package.yml +0 -0
  15. {queutils-0.9.1 → queutils-0.9.4}/.gitignore +0 -0
  16. {queutils-0.9.1 → queutils-0.9.4}/LICENSE +0 -0
  17. {queutils-0.9.1 → queutils-0.9.4}/codecov.yml +0 -0
  18. {queutils-0.9.1 → queutils-0.9.4}/demos/asyncqueue_demo.py +0 -0
  19. {queutils-0.9.1 → queutils-0.9.4}/demos/filequeue_demo.py +0 -0
  20. {queutils-0.9.1 → queutils-0.9.4}/demos/iterablequeue_demo.py +0 -0
  21. {queutils-0.9.1 → queutils-0.9.4}/docs/asyncqueue.md +0 -0
  22. {queutils-0.9.1 → queutils-0.9.4}/docs/iterablequeue.md +0 -0
  23. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/asyncqueue.py +0 -0
  24. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/countable.py +0 -0
  25. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/filequeue.py +0 -0
  26. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/iterablequeue.py +0 -0
  27. {queutils-0.9.1 → queutils-0.9.4}/src/queutils/py.typed +0 -0
  28. {queutils-0.9.1 → queutils-0.9.4}/tests/test_asyncqueue.py +0 -0
  29. {queutils-0.9.1 → queutils-0.9.4}/tests/test_demos.py +0 -0
  30. {queutils-0.9.1 → queutils-0.9.4}/tests/test_filequeue.py +0 -0
  31. {queutils-0.9.1 → queutils-0.9.4}/tests/test_iterablequeue.py +0 -0
@@ -12,28 +12,25 @@ on:
12
12
  release:
13
13
  types: [published]
14
14
 
15
- permissions:
16
- contents: read
17
-
18
15
  jobs:
19
16
  deploy:
20
-
21
17
  runs-on: ubuntu-latest
18
+ environment: production
19
+ permissions:
20
+ id-token: write
21
+ contents: read
22
22
 
23
23
  steps:
24
- - uses: actions/checkout@v4
25
- - name: Set up Python
26
- uses: actions/setup-python@v5
27
- with:
28
- python-version: '3.11'
29
- - name: Install dependencies
30
- run: |
31
- python -m pip install --upgrade pip
32
- pip install build
33
- - name: Build package
34
- run: python -m build
35
- - name: Publish package
36
- uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
37
- with:
38
- user: __token__
39
- password: ${{ secrets.PYPI_API_TOKEN }}
24
+ - uses: actions/checkout@v4
25
+ - name: Set up Python
26
+ uses: actions/setup-python@v5
27
+ with:
28
+ python-version: "3.12"
29
+ - name: Install dependencies
30
+ run: |
31
+ python -m pip install --upgrade pip
32
+ pip install build
33
+ - name: Build package
34
+ run: python -m build
35
+ - name: Publish package
36
+ uses: pypa/gh-action-pypi-publish@v1.12.4
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: queutils
3
- Version: 0.9.1
3
+ Version: 0.9.4
4
4
  Summary: Handy Python Queue utilies
5
5
  Project-URL: Homepage, https://github.com/Jylpah/queutils
6
6
  Project-URL: Bug Tracker, https://github.com/Jylpah/queutils/issues
@@ -35,15 +35,16 @@ Description-Content-Type: text/markdown
35
35
 
36
36
  Queutils *[Queue Utils]* is a package of handy Python queue classes:
37
37
 
38
- - **[AsyncQueue](docs/asyncqueue.md)** - An `async` wrapper for non-async `queue.Queue`
39
- - **[IterableQueue](docs/iterablequeue.md)** - An `AsyncIterable` queue that terminates when finished
40
- - **[FileQueue](docs/filequeue.md)** - Builds an iterable queue of filenames from files/dirs given as input
38
+ - **AsyncQueue** - An `async` wrapper for non-async `queue.Queue`
39
+ - **IterableQueue** - An `AsyncIterable` queue that terminates when finished
40
+ - **EventCounterQueue** - An `IterableQueue` for counting events in `async` threads
41
+ - **FileQueue** - Builds an `IterableQueue[pathlib.Path]` of filenames from files/dirs given as input
41
42
 
42
43
 
43
44
  # AsyncQueue
44
45
 
45
- [`AsyncQueue`](docs/asyncqueue.md) is a async wrapper for non-async `queue.Queue`. It can be used to create
46
- an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessing.Queue` and thus enable `async` code in parent/child processes to communicate over `multiprocessing.Queue` as it were an `asyncio.Queue`.
46
+ `AsyncQueue` is a async wrapper for non-async `queue.Queue`. It can be used to create
47
+ an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessing.Queue` and thus enable `async` code in parent/child processes to communicate over `multiprocessing.Queue` as it were an `asyncio.Queue`. Uses `sleep()` for `get()`/`put()` if the queue is empty/full.
47
48
 
48
49
  ## Features
49
50
 
@@ -54,10 +55,10 @@ an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessin
54
55
 
55
56
  # IterableQueue
56
57
 
57
- [`IterableQueue`](docs/iterablequeue.md) is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
58
+ `IterableQueue` is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
58
59
  iterated in `async for` loop. `IterableQueue` terminates automatically when the queue has been filled and emptied.
59
60
 
60
- The `IterableQueue` requires "producers" (functions adding items to the queue) to register themselves and it
61
+ The `IterableQueue` requires "producers" (functions adding items to the queue) to register themselves with `add_producer()` call and it
61
62
  keeps count of registered producers which are "finished" adding items to the queue. Once all the registered
62
63
  producers are "finished", the queue enters into "filled" state and no new items can be added. Once an
63
64
  "filled" queue is emptied, the queue becomes "done" and all new `get()` calls to the queue will
@@ -74,9 +75,19 @@ producers are "finished", the queue enters into "filled" state and no new items
74
75
  - Countable property can be disabled with count_items=False. This is useful when you
75
76
  want to sum the count of multiple IterableQueues
76
77
 
78
+ # EventCounterQueue
79
+
80
+ `EventCounterQueue` can be used to count named events (default event is `count`) between `async` threads. `async` worker threads call `queue.send(event="event_name", N=amount)`. The receving end can either `receive()` a single event or `listen()` all events and return `collections.defaultdict[str, int]` as a result.
81
+
82
+ ## Features
83
+
84
+ - Supports multiple producers and a single listener
85
+ - Default event is `count`
86
+
87
+
77
88
  # FileQueue
78
89
 
79
- [`FileQueue`](docs/filequeue.md) builds a queue (`IterableQueue[pathlib.Path]`) of the matching
90
+ `FileQueue` builds a queue (`IterableQueue[pathlib.Path]`) of the matching
80
91
  files found based on search parameters given. It can search both list of files or directories or
81
92
  mixed. Async method `FileQueue.mk_queue()` searches subdirectories of given directories.
82
93
 
@@ -6,13 +6,14 @@ Queutils *[Queue Utils]* is a package of handy Python queue classes:
6
6
 
7
7
  - **[AsyncQueue](docs/asyncqueue.md)** - An `async` wrapper for non-async `queue.Queue`
8
8
  - **[IterableQueue](docs/iterablequeue.md)** - An `AsyncIterable` queue that terminates when finished
9
- - **[FileQueue](docs/filequeue.md)** - Builds an iterable queue of filenames from files/dirs given as input
9
+ - **EventCounterQueue** - An `IterableQueue` for counting events in `async` threads
10
+ - **[FileQueue](docs/filequeue.md)** - Builds an `IterableQueue[pathlib.Path]` of filenames from files/dirs given as input
10
11
 
11
12
 
12
13
  # AsyncQueue
13
14
 
14
15
  [`AsyncQueue`](docs/asyncqueue.md) is a async wrapper for non-async `queue.Queue`. It can be used to create
15
- an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessing.Queue` and thus enable `async` code in parent/child processes to communicate over `multiprocessing.Queue` as it were an `asyncio.Queue`.
16
+ an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessing.Queue` and thus enable `async` code in parent/child processes to communicate over `multiprocessing.Queue` as it were an `asyncio.Queue`. Uses `sleep()` for `get()`/`put()` if the queue is empty/full.
16
17
 
17
18
  ## Features
18
19
 
@@ -26,7 +27,7 @@ an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessin
26
27
  [`IterableQueue`](docs/iterablequeue.md) is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
27
28
  iterated in `async for` loop. `IterableQueue` terminates automatically when the queue has been filled and emptied.
28
29
 
29
- The `IterableQueue` requires "producers" (functions adding items to the queue) to register themselves and it
30
+ The `IterableQueue` requires "producers" (functions adding items to the queue) to register themselves with `add_producer()` call and it
30
31
  keeps count of registered producers which are "finished" adding items to the queue. Once all the registered
31
32
  producers are "finished", the queue enters into "filled" state and no new items can be added. Once an
32
33
  "filled" queue is emptied, the queue becomes "done" and all new `get()` calls to the queue will
@@ -43,6 +44,16 @@ producers are "finished", the queue enters into "filled" state and no new items
43
44
  - Countable property can be disabled with count_items=False. This is useful when you
44
45
  want to sum the count of multiple IterableQueues
45
46
 
47
+ # EventCounterQueue
48
+
49
+ `EventCounterQueue` can be used to count named events (default event is `count`) between `async` threads. `async` worker threads call `queue.send(event="event_name", N=amount)`. The receving end can either `receive()` a single event or `listen()` all events and return `collections.defaultdict[str, int]` as a result.
50
+
51
+ ## Features
52
+
53
+ - Supports multiple producers and a single listener
54
+ - Default event is `count`
55
+
56
+
46
57
  # FileQueue
47
58
 
48
59
  [`FileQueue`](docs/filequeue.md) builds a queue (`IterableQueue[pathlib.Path]`) of the matching
@@ -35,4 +35,18 @@ async def main() -> None:
35
35
 
36
36
  if __name__ == "__main__":
37
37
  run(main())
38
+ ```
39
+
40
+ ### Run
41
+
42
+ ```bash
43
+ cd demos
44
+ python -m filequeue_demo
45
+ ```
46
+ Output
47
+ ```text
48
+ found asyncqueue_demo.py
49
+ found iterablequeue_demo.py
50
+ found filequeue_demo.py
51
+ finished, no need to use fileQ.join()
38
52
  ```
@@ -0,0 +1,14 @@
1
+ #!/bin/bash
2
+
3
+ # Usage: ./rmlinks.sh input.md > output.md
4
+
5
+ if [ "$#" -ne 1 ]; then
6
+ echo "Usage: $0 <markdown-file>"
7
+ exit 1
8
+ fi
9
+
10
+ INPUT_FILE="$1"
11
+
12
+ # Use sed to remove markdown links to /docs but keep the link text
13
+ # Matches: [link text](/docs/whatever.md) and turns into: link text
14
+ perl -pe 's/\[([^\]]+)\]\((\/?docs\/[^)]+)\)/\1/g' "$INPUT_FILE"
queutils-0.9.4/pypi.md ADDED
@@ -0,0 +1,69 @@
1
+ [![Python package](https://github.com/Jylpah/queutils/actions/workflows/python-package.yml/badge.svg)](https://github.com/Jylpah/queutils/actions/workflows/python-package.yml) [![codecov](https://codecov.io/gh/Jylpah/queutils/graph/badge.svg?token=rMKdbfHOFs)](https://codecov.io/gh/Jylpah/queutils)
2
+
3
+ # Queutils
4
+
5
+ Queutils *[Queue Utils]* is a package of handy Python queue classes:
6
+
7
+ - **AsyncQueue** - An `async` wrapper for non-async `queue.Queue`
8
+ - **IterableQueue** - An `AsyncIterable` queue that terminates when finished
9
+ - **EventCounterQueue** - An `IterableQueue` for counting events in `async` threads
10
+ - **FileQueue** - Builds an `IterableQueue[pathlib.Path]` of filenames from files/dirs given as input
11
+
12
+
13
+ # AsyncQueue
14
+
15
+ `AsyncQueue` is a async wrapper for non-async `queue.Queue`. It can be used to create
16
+ an `asyncio.Queue` compatible interface to a (non-async) managed `multiprocessing.Queue` and thus enable `async` code in parent/child processes to communicate over `multiprocessing.Queue` as it were an `asyncio.Queue`. Uses `sleep()` for `get()`/`put()` if the queue is empty/full.
17
+
18
+ ## Features
19
+
20
+ - `asyncio.Queue` compatible
21
+ - `queue.Queue` support
22
+ - `multiprocessing.Queue` support
23
+
24
+
25
+ # IterableQueue
26
+
27
+ `IterableQueue` is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
28
+ iterated in `async for` loop. `IterableQueue` terminates automatically when the queue has been filled and emptied.
29
+
30
+ The `IterableQueue` requires "producers" (functions adding items to the queue) to register themselves with `add_producer()` call and it
31
+ keeps count of registered producers which are "finished" adding items to the queue. Once all the registered
32
+ producers are "finished", the queue enters into "filled" state and no new items can be added. Once an
33
+ "filled" queue is emptied, the queue becomes "done" and all new `get()` calls to the queue will
34
+ `raise QueueDone` exception.
35
+
36
+ ## Features
37
+
38
+ - `asyncio.Queue` interface, `_nowait()` methods are experimental
39
+ - `AsyncIterable` support: `async for item in queue:`
40
+ - Automatic termination of the consumers with `QueueDone` exception when the queue has been emptied
41
+ - Producers must be registered with `add_producer()` and they must notify the queue
42
+ with `finish()` once they have finished adding items
43
+ - Countable interface to count number of items task_done() through `count` property
44
+ - Countable property can be disabled with count_items=False. This is useful when you
45
+ want to sum the count of multiple IterableQueues
46
+
47
+ # EventCounterQueue
48
+
49
+ `EventCounterQueue` can be used to count named events (default event is `count`) between `async` threads. `async` worker threads call `queue.send(event="event_name", N=amount)`. The receving end can either `receive()` a single event or `listen()` all events and return `collections.defaultdict[str, int]` as a result.
50
+
51
+ ## Features
52
+
53
+ - Supports multiple producers and a single listener
54
+ - Default event is `count`
55
+
56
+
57
+ # FileQueue
58
+
59
+ `FileQueue` builds a queue (`IterableQueue[pathlib.Path]`) of the matching
60
+ files found based on search parameters given. It can search both list of files or directories or
61
+ mixed. Async method `FileQueue.mk_queue()` searches subdirectories of given directories.
62
+
63
+ ## Features
64
+
65
+ - Input can be given both as `str` and `pathlib.Path`
66
+ - `exclude: bool` exclusive or inclusive filtering. Default is `False`.
67
+ - `case_sensitive: bool` case sensitive filtering (use of `fnmatch` or `fnmatchcase`). Default is `True`.
68
+ - `follow_symlinks: bool` whether to follow symlinks. Default is `False`.
69
+
@@ -1,9 +1,9 @@
1
1
  [project]
2
2
  name = "queutils"
3
- version = "0.9.1"
3
+ version = "0.9.4"
4
4
  authors = [{ name = "Jylpah", email = "jylpah@gmail.com" }]
5
5
  description = "Handy Python Queue utilies"
6
- readme = "README.md"
6
+ readme = { file = "pypi.md", content-type = "text/markdown" }
7
7
  requires-python = ">=3.11"
8
8
  classifiers = [
9
9
  "Programming Language :: Python :: 3",
@@ -2,15 +2,15 @@ from .countable import Countable as Countable
2
2
  from .asyncqueue import AsyncQueue as AsyncQueue
3
3
  from .iterablequeue import IterableQueue as IterableQueue, QueueDone as QueueDone
4
4
  from .filequeue import FileQueue as FileQueue
5
- from .categorycounterqueue import (
5
+ from .eventcounterqueue import (
6
6
  QCounter as QCounter,
7
- CategoryCounterQueue as CategoryCounterQueue,
7
+ EventCounterQueue as EventCounterQueue,
8
8
  )
9
9
 
10
10
  __all__ = [
11
11
  "asyncqueue",
12
12
  "countable",
13
- "categorycounterqueue",
13
+ "eventcounterqueue",
14
14
  "filequeue",
15
15
  "iterablequeue",
16
16
  ]
@@ -20,7 +20,7 @@ debug = logger.debug
20
20
  T = TypeVar("T")
21
21
 
22
22
 
23
- @deprecated(version="0.9.1", reason="Use CategoryCounterQueue instead")
23
+ @deprecated(version="0.9.1", reason="Use EventCounterQueue instead")
24
24
  class CounterQueue(Queue[T], Countable):
25
25
  """
26
26
  CounterQueue is a asyncio.Queue for counting items
@@ -55,9 +55,9 @@ class CounterQueue(Queue[T], Countable):
55
55
  return self._count_items
56
56
 
57
57
 
58
- class CategoryCounterQueue(IterableQueue[tuple[str, int]]):
58
+ class EventCounterQueue(IterableQueue[tuple[str, int]]):
59
59
  """
60
- CategorySummerQueue is a asyncio.Queue for summing up values by category
60
+ EventCounterQueue is a asyncio.Queue for counting events by name
61
61
  """
62
62
 
63
63
  _counter: defaultdict[str, int]
@@ -69,29 +69,29 @@ class CategoryCounterQueue(IterableQueue[tuple[str, int]]):
69
69
  self._counter = defaultdict(int)
70
70
 
71
71
  async def receive(self) -> tuple[str, int]:
72
- """Receive a category value from the queue and sum it"""
73
- category: str
72
+ """Receive an event value from the queue and sum it"""
73
+ event: str
74
74
  value: int
75
- category, value = await super().get()
76
- self._counter[category] += value
75
+ event, value = await super().get()
76
+ self._counter[event] += value
77
77
  super().task_done()
78
- return (category, value)
78
+ return (event, value)
79
79
 
80
- async def send(self, category: str = "count", value: int = 1) -> None:
81
- """Send count of a category"""
82
- await super().put((category, value))
80
+ async def send(self, event: str = "count", value: int = 1) -> None:
81
+ """Send count of an event"""
82
+ await super().put((event, value))
83
83
  return None
84
84
 
85
- def get_count(self, category: str = "count") -> int:
86
- """Return count of a category"""
87
- return self._counter[category]
85
+ def get_count(self, event: str = "count") -> int:
86
+ """Return count for an event"""
87
+ return self._counter[event]
88
88
 
89
89
  def get_counts(self) -> defaultdict[str, int]:
90
- """Return counts of all categories"""
90
+ """Return counts of all events"""
91
91
  return self._counter
92
92
 
93
93
  async def listen(self) -> defaultdict[str, int]:
94
- """Listen for category values"""
94
+ """Listen for event values"""
95
95
  try:
96
96
  while True:
97
97
  await self.receive()
@@ -0,0 +1,80 @@
1
+ import pytest # type: ignore
2
+ from asyncio import (
3
+ Task,
4
+ create_task,
5
+ gather,
6
+ TimeoutError,
7
+ )
8
+ from random import choice, randint
9
+ import string
10
+ from collections import defaultdict
11
+
12
+ from queutils import EventCounterQueue
13
+
14
+
15
+ def randomword(length: int) -> str:
16
+ """Generate a random word of fixed length"""
17
+ # https://stackoverflow.com/a/2030081/12946084
18
+ letters: str = string.ascii_lowercase
19
+ return "".join(choice(letters) for i in range(length))
20
+
21
+
22
+ QSIZE: int = 10
23
+ N: int = 100 # N >> QSIZE
24
+ THREADS: int = 4
25
+ # N : int = int(1e10)
26
+
27
+
28
+ @pytest.mark.parametrize(
29
+ "events,N, producers",
30
+ [
31
+ ([randomword(5) for _ in range(10)], 1000, 1),
32
+ ([randomword(5) for _ in range(20)], 10000, 1),
33
+ ([randomword(5) for _ in range(5)], 1000, 3),
34
+ ],
35
+ )
36
+ @pytest.mark.timeout(10)
37
+ @pytest.mark.asyncio
38
+ async def test_1_category_counter_queue(
39
+ events: list[str], N: int, producers: int
40
+ ) -> None:
41
+ """Test EventCounterQueue"""
42
+ Q = EventCounterQueue(maxsize=QSIZE)
43
+
44
+ async def producer(
45
+ Q: EventCounterQueue, events: list[str], N: int = 100
46
+ ) -> defaultdict[str, int]:
47
+ """
48
+ Test Producer for EventCounterQueue
49
+ """
50
+ _counter: defaultdict[str, int] = defaultdict(int)
51
+ await Q.add_producer()
52
+ for _ in range(N):
53
+ cat: str = choice(events)
54
+ count: int = randint(1, 10)
55
+ await Q.send(cat, count)
56
+ _counter[cat] += count
57
+ await Q.finish()
58
+ return _counter
59
+
60
+ senders: list[Task] = list()
61
+
62
+ for _ in range(producers):
63
+ senders.append(create_task(producer(Q, events, N)))
64
+
65
+ try:
66
+ res_in: defaultdict[str, int] = await Q.listen()
67
+ res_out: defaultdict[str, int] = defaultdict(int)
68
+ for res in await gather(*senders):
69
+ for event, count in res.items():
70
+ res_out[event] += count
71
+
72
+ assert res_in == res_out, f"EventCounterQueue: {res_in} != {res_out}"
73
+ assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
74
+ assert Q.empty(), "queue not empty"
75
+ assert Q.count == N * producers, (
76
+ f"count returned wrong value {Q.count}, should be {N * producers}"
77
+ )
78
+
79
+ except TimeoutError:
80
+ assert False, "await IterableQueue.join() failed with an empty queue finished"
@@ -1,333 +0,0 @@
1
- import pytest # type: ignore
2
- from asyncio.queues import QueueEmpty, QueueFull
3
- from asyncio import (
4
- Task,
5
- create_task,
6
- sleep,
7
- gather,
8
- timeout,
9
- TimeoutError,
10
- CancelledError,
11
- )
12
- from random import random, choice, randint
13
- import string
14
- from collections import defaultdict
15
-
16
- from queutils import IterableQueue, CategoryCounterQueue, QueueDone
17
-
18
-
19
- def randomword(length: int) -> str:
20
- """Generate a random word of fixed length"""
21
- # https://stackoverflow.com/a/2030081/12946084
22
- letters: str = string.ascii_lowercase
23
- return "".join(choice(letters) for i in range(length))
24
-
25
-
26
- QSIZE: int = 10
27
- N: int = 100 # N >> QSIZE
28
- THREADS: int = 4
29
- # N : int = int(1e10)
30
-
31
-
32
- @pytest.fixture
33
- def test_interablequeue_int() -> IterableQueue[int]:
34
- return IterableQueue[int](maxsize=QSIZE)
35
-
36
-
37
- async def _producer_int(
38
- Q: IterableQueue[int], n: int, finish: bool = False, wait: float = 0
39
- ) -> None:
40
- await Q.add_producer(N=1)
41
- await sleep(wait)
42
- try:
43
- for i in range(n):
44
- await sleep(wait * random())
45
- await Q.put(i)
46
- except QueueDone:
47
- pass
48
- if finish:
49
- await Q.finish()
50
- return None
51
-
52
-
53
- async def _consumer_int(Q: IterableQueue[int], n: int = -1, wait: float = 0) -> bool:
54
- try:
55
- while n != 0:
56
- _ = await Q.get()
57
- await sleep(wait * random())
58
- Q.task_done()
59
- n -= 1
60
- except QueueDone:
61
- pass
62
- except CancelledError:
63
- raise
64
- return True
65
-
66
-
67
- @pytest.mark.timeout(10)
68
- @pytest.mark.asyncio
69
- async def test_1_put_get_async(test_interablequeue_int: IterableQueue[int]):
70
- """Test: put(), get(), join(), qsize(), empty() == True"""
71
- Q = test_interablequeue_int
72
- try:
73
- async with timeout(5):
74
- await _producer_int(Q, QSIZE - 1, finish=True)
75
- except TimeoutError:
76
- assert False, "IterableQueue got stuck"
77
- assert Q.qsize() == QSIZE - 1, (
78
- f"qsize() returned {Q.qsize()}, should be {QSIZE - 1}"
79
- )
80
- try:
81
- await Q.put(1)
82
- assert False, "Queue is filled and put() should raise an exception"
83
- except QueueDone:
84
- pass # Queue is done and put() should raise an exception
85
- assert not Q.is_done, "is_done returned True even queue is not finished"
86
- consumer: Task = create_task(_consumer_int(Q))
87
- try:
88
- async with timeout(5):
89
- await Q.join()
90
- await Q.get()
91
- assert False, "Queue is done and put() should raise an exception"
92
- except TimeoutError:
93
- assert False, "IterableQueue.join() took too long"
94
- except QueueDone:
95
- pass # should be raised
96
- assert Q.qsize() == 0, "queue not empty"
97
- assert Q.empty(), "queue not empty"
98
- consumer.cancel()
99
-
100
-
101
- @pytest.mark.timeout(10)
102
- @pytest.mark.asyncio
103
- async def test_2_put_get_nowait(test_interablequeue_int: IterableQueue[int]):
104
- """Test put_nowait() and get_nowait() methods"""
105
- Q = test_interablequeue_int
106
- producer: Task = create_task(_producer_int(Q, N))
107
- await sleep(1)
108
- # In theory this could fail without a real error
109
- # if QSIZE is huge and/or system is slow
110
- assert Q.qsize() == Q.maxsize, "Queue was supposed to be at maxsize"
111
- assert Q.full(), "Queue should be full"
112
- assert not Q.empty(), "Queue should not be empty"
113
-
114
- try:
115
- Q.put_nowait(1)
116
- assert False, "Queue was supposed to be full, but was not"
117
- except QueueFull:
118
- pass # OK, Queue was supposed to be full
119
-
120
- try:
121
- while True:
122
- _ = Q.get_nowait()
123
- Q.task_done()
124
- await sleep(0.01)
125
- except QueueEmpty:
126
- assert Q.qsize() == 0, "Queue size should be zero"
127
-
128
- try:
129
- async with timeout(5):
130
- await Q.finish()
131
- await Q.join()
132
- except TimeoutError:
133
- assert False, "Queue.join() took longer than it should"
134
- assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
135
- assert Q.empty(), "queue not empty()"
136
- producer.cancel()
137
-
138
-
139
- @pytest.mark.timeout(10)
140
- @pytest.mark.asyncio
141
- async def test_3_multiple_producers(test_interablequeue_int: IterableQueue[int]):
142
- Q = test_interablequeue_int
143
- workers: list[Task] = list()
144
- for _ in range(THREADS):
145
- workers.append(create_task(_producer_int(Q, N, finish=True, wait=0.05)))
146
- try:
147
- assert not Q.is_done, "is_done returned True even queue is not finished"
148
- async with timeout(10):
149
- async for _ in Q:
150
- pass
151
- except TimeoutError:
152
- assert False, "IterableQueue.join() took too long"
153
- except QueueDone:
154
- pass # Queue is done
155
-
156
- assert Q.qsize() == 0, f"queue size is {Q.qsize()} even it should be empty"
157
- assert Q.empty(), "queue not empty"
158
- for w in workers:
159
- w.cancel()
160
-
161
-
162
- @pytest.mark.timeout(10)
163
- @pytest.mark.asyncio
164
- async def test_4_multiple_producers_consumers(
165
- test_interablequeue_int: IterableQueue[int],
166
- ):
167
- Q = test_interablequeue_int
168
- producers: list[Task] = list()
169
- consumers: list[Task] = list()
170
-
171
- for _ in range(THREADS):
172
- producers.append(create_task(_producer_int(Q, N, finish=False, wait=0.05)))
173
- consumers.append(create_task(_consumer_int(Q, 2 * N, wait=0.06)))
174
- try:
175
- async with timeout(10):
176
- await gather(*producers)
177
- await Q.finish(all=True)
178
- await Q.join()
179
- assert not Q.has_wip, "Queue should not have any items WIP"
180
- except TimeoutError:
181
- assert False, "IterableQueue.join() took too long"
182
- assert Q.count == THREADS * N, (
183
- f"count returned wrong value {Q.count}, should be {THREADS * N}"
184
- )
185
- assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
186
- assert Q.empty(), "queue not empty"
187
- for p in consumers:
188
- p.cancel()
189
-
190
-
191
- @pytest.mark.timeout(10)
192
- @pytest.mark.asyncio
193
- async def test_5_empty_join(test_interablequeue_int: IterableQueue[int]):
194
- """Test for await join when an empty queue is finished"""
195
- Q = test_interablequeue_int
196
- producer: Task = create_task(_producer_int(Q, n=0, finish=True, wait=2))
197
- assert not Q.is_done, "is_done returned True even queue is not finished"
198
- consumer: Task = create_task(_consumer_int(Q))
199
- try:
200
- async with timeout(3):
201
- await Q.join()
202
- assert Q.empty(), (
203
- "Queue is done after 3 secs and the join() should finish before timeout(5)"
204
- )
205
- except TimeoutError:
206
- assert False, "await IterableQueue.join() failed with an empty queue finished"
207
- await sleep(0.1)
208
-
209
- try:
210
- consumer.cancel()
211
- assert not consumer.cancelled(), (
212
- "consumer task was cancelled and did not complete even it should have"
213
- )
214
- except Exception as err:
215
- assert False, f"Unknown Exception caught: {err}"
216
- assert producer.done(), "producer has not finished"
217
-
218
-
219
- @pytest.mark.timeout(10)
220
- @pytest.mark.asyncio
221
- async def test_6_finish_full_queue(test_interablequeue_int: IterableQueue[int]):
222
- """Test for await join when an empty queue is finished"""
223
- Q = test_interablequeue_int
224
- producer: Task = create_task(_producer_int(Q, n=QSIZE * 2))
225
- try:
226
- await sleep(0.5)
227
- async with timeout(3):
228
- await Q.finish(all=True, empty=True)
229
- assert Q.empty(), (
230
- f"Queue should be empty: qsize={Q._Q.qsize()}: {Q._Q.get_nowait()}, {Q._Q.get_nowait()}"
231
- )
232
- assert Q.is_done, "Queue is not done"
233
- except TimeoutError:
234
- assert False, "await IterableQueue.join() failed with an empty queue finished"
235
- await sleep(0.1)
236
- assert Q.is_done, "Queue is not done"
237
- producer.cancel()
238
-
239
-
240
- @pytest.mark.timeout(10)
241
- @pytest.mark.asyncio
242
- async def test_7_aiter(test_interablequeue_int: IterableQueue[int]):
243
- """Test for await join when an empty queue is finished"""
244
- Q = test_interablequeue_int
245
- await _producer_int(Q, n=QSIZE - 1, finish=True)
246
-
247
- try:
248
- await sleep(0.5)
249
- async for i in Q:
250
- assert i >= 0, "Did not receive an int"
251
- assert Q.is_done, "Queue is not done"
252
- # assert (
253
- # True
254
- # ), "Queue is done after 3 secs and the join() should finish before timeout(5)"
255
- except TimeoutError:
256
- assert False, "await IterableQueue.join() failed with an empty queue finished"
257
-
258
-
259
- @pytest.mark.timeout(10)
260
- @pytest.mark.asyncio
261
- async def test_8_aiter_1_item(test_interablequeue_int: IterableQueue[int]):
262
- """Test for await join when an empty queue is finished"""
263
- Q = test_interablequeue_int
264
- await _producer_int(Q, n=1, finish=True)
265
-
266
- try:
267
- assert Q.qsize() == 1, f"incorrect queue length {Q.qsize()} != 1"
268
- await sleep(0.5)
269
- count: int = 0
270
- async for i in Q:
271
- count += 1
272
- assert i >= 0, "Did not receive an int"
273
- assert count == 1, f"Did not receive correct number of elements {count} != 1"
274
- assert True, (
275
- "Queue is done after 3 secs and the join() should finish before timeout(5)"
276
- )
277
- except TimeoutError:
278
- assert False, "await IterableQueue.join() failed with an empty queue finished"
279
-
280
-
281
- @pytest.mark.parametrize(
282
- "cats,N, producers",
283
- [
284
- ([randomword(5) for _ in range(10)], 1000, 1),
285
- ([randomword(5) for _ in range(20)], 10000, 1),
286
- ([randomword(5) for _ in range(5)], 1000, 3),
287
- ],
288
- )
289
- @pytest.mark.timeout(10)
290
- @pytest.mark.asyncio
291
- async def test_9_category_counter_queue(
292
- cats: list[str], N: int, producers: int
293
- ) -> None:
294
- """Test CategoryCounterQueue"""
295
- Q = CategoryCounterQueue(maxsize=QSIZE)
296
-
297
- async def producer(
298
- Q: CategoryCounterQueue, cats: list[str], N: int = 100
299
- ) -> defaultdict[str, int]:
300
- """
301
- Test Producer for CategoryCounterQueue
302
- """
303
- _counter: defaultdict[str, int] = defaultdict(int)
304
- await Q.add_producer()
305
- for _ in range(N):
306
- cat: str = choice(cats)
307
- count: int = randint(1, 10)
308
- await Q.send(cat, count)
309
- _counter[cat] += count
310
- await Q.finish()
311
- return _counter
312
-
313
- senders: list[Task] = list()
314
-
315
- for _ in range(producers):
316
- senders.append(create_task(producer(Q, cats, N)))
317
-
318
- try:
319
- res_in: defaultdict[str, int] = await Q.listen()
320
- res_out: defaultdict[str, int] = defaultdict(int)
321
- for res in await gather(*senders):
322
- for cat, count in res.items():
323
- res_out[cat] += count
324
-
325
- assert res_in == res_out, f"CategoryCounterQueue: {res_in} != {res_out}"
326
- assert Q.qsize() == 0, "queue size is > 0 even it should be empty"
327
- assert Q.empty(), "queue not empty"
328
- assert Q.count == N * producers, (
329
- f"count returned wrong value {Q.count}, should be {N * producers}"
330
- )
331
-
332
- except TimeoutError:
333
- assert False, "await IterableQueue.join() failed with an empty queue finished"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes