queutils 0.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- queutils-0.8.1/.github/workflows/python-package.yml +45 -0
- queutils-0.8.1/.gitignore +138 -0
- queutils-0.8.1/LICENSE +21 -0
- queutils-0.8.1/PKG-INFO +78 -0
- queutils-0.8.1/README.md +49 -0
- queutils-0.8.1/codecov.yml +9 -0
- queutils-0.8.1/demos/asyncqueue_demo.py +90 -0
- queutils-0.8.1/demos/filequeue_demo.py +22 -0
- queutils-0.8.1/demos/iterablequeue_demo.py +53 -0
- queutils-0.8.1/docs/asyncqueue.md +183 -0
- queutils-0.8.1/docs/filequeue.md +38 -0
- queutils-0.8.1/docs/iterablequeue.md +124 -0
- queutils-0.8.1/pyproject.toml +65 -0
- queutils-0.8.1/src/queutils/__init__.py +11 -0
- queutils-0.8.1/src/queutils/asyncqueue.py +112 -0
- queutils-0.8.1/src/queutils/countable.py +11 -0
- queutils-0.8.1/src/queutils/filequeue.py +156 -0
- queutils-0.8.1/src/queutils/iterablequeue.py +283 -0
- queutils-0.8.1/tests/test_asyncqueue.py +121 -0
- queutils-0.8.1/tests/test_demos.py +9 -0
- queutils-0.8.1/tests/test_filequeue.py +106 -0
- queutils-0.8.1/tests/test_iterablequeue.py +267 -0
@@ -0,0 +1,45 @@
|
|
1
|
+
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
2
|
+
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
3
|
+
|
4
|
+
name: Python package
|
5
|
+
|
6
|
+
on:
|
7
|
+
push:
|
8
|
+
branches: [ "main" ]
|
9
|
+
pull_request:
|
10
|
+
branches: [ "main" ]
|
11
|
+
|
12
|
+
jobs:
|
13
|
+
build:
|
14
|
+
strategy:
|
15
|
+
fail-fast: false
|
16
|
+
matrix:
|
17
|
+
python-version: ["3.11", "3.12"]
|
18
|
+
os: [ ubuntu-latest, windows-latest, macos-latest ]
|
19
|
+
runs-on: ${{ matrix.os }}
|
20
|
+
|
21
|
+
steps:
|
22
|
+
- uses: actions/checkout@v4
|
23
|
+
- name: Set up Python ${{ matrix.python-version }}
|
24
|
+
uses: actions/setup-python@v5
|
25
|
+
with:
|
26
|
+
python-version: ${{ matrix.python-version }}
|
27
|
+
cache: 'pip'
|
28
|
+
- name: Install dependencies
|
29
|
+
run: |
|
30
|
+
python -m pip install --upgrade pip
|
31
|
+
python -m pip install .
|
32
|
+
python -m pip install ".[dev]"
|
33
|
+
- name: Lint with ruff
|
34
|
+
run: |
|
35
|
+
ruff check src tests
|
36
|
+
- name: Test with mypy
|
37
|
+
run: |
|
38
|
+
mypy src
|
39
|
+
- name: Test with pytest
|
40
|
+
run: |
|
41
|
+
pytest --cov=src tests
|
42
|
+
- name: Upload coverage reports to Codecov
|
43
|
+
uses: codecov/codecov-action@v4
|
44
|
+
env:
|
45
|
+
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
@@ -0,0 +1,138 @@
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
2
|
+
__pycache__/
|
3
|
+
*.py[cod]
|
4
|
+
*$py.class
|
5
|
+
|
6
|
+
# C extensions
|
7
|
+
*.so
|
8
|
+
|
9
|
+
# Distribution / packaging
|
10
|
+
.Python
|
11
|
+
build/
|
12
|
+
develop-eggs/
|
13
|
+
dist/
|
14
|
+
downloads/
|
15
|
+
eggs/
|
16
|
+
.eggs/
|
17
|
+
lib/
|
18
|
+
lib64/
|
19
|
+
parts/
|
20
|
+
sdist/
|
21
|
+
var/
|
22
|
+
wheels/
|
23
|
+
pip-wheel-metadata/
|
24
|
+
share/python-wheels/
|
25
|
+
*.egg-info/
|
26
|
+
.installed.cfg
|
27
|
+
*.egg
|
28
|
+
MANIFEST
|
29
|
+
.pypirc
|
30
|
+
|
31
|
+
# PyInstaller
|
32
|
+
# Usually these files are written by a python script from a template
|
33
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
34
|
+
*.manifest
|
35
|
+
*.spec
|
36
|
+
|
37
|
+
# Installer logs
|
38
|
+
pip-log.txt
|
39
|
+
pip-delete-this-directory.txt
|
40
|
+
|
41
|
+
# Unit test / coverage reports
|
42
|
+
htmlcov/
|
43
|
+
.tox/
|
44
|
+
.nox/
|
45
|
+
.coverage
|
46
|
+
.coverage.*
|
47
|
+
.cache
|
48
|
+
nosetests.xml
|
49
|
+
coverage.xml
|
50
|
+
*.cover
|
51
|
+
*.py,cover
|
52
|
+
.hypothesis/
|
53
|
+
.pytest_cache/
|
54
|
+
|
55
|
+
# Translations
|
56
|
+
*.mo
|
57
|
+
*.pot
|
58
|
+
|
59
|
+
# Django stuff:
|
60
|
+
*.log
|
61
|
+
local_settings.py
|
62
|
+
db.sqlite3
|
63
|
+
db.sqlite3-journal
|
64
|
+
|
65
|
+
# Flask stuff:
|
66
|
+
instance/
|
67
|
+
.webassets-cache
|
68
|
+
|
69
|
+
# Scrapy stuff:
|
70
|
+
.scrapy
|
71
|
+
|
72
|
+
# Sphinx documentation
|
73
|
+
docs/_build/
|
74
|
+
|
75
|
+
# PyBuilder
|
76
|
+
target/
|
77
|
+
|
78
|
+
# Jupyter Notebook
|
79
|
+
.ipynb_checkpoints
|
80
|
+
|
81
|
+
# IPython
|
82
|
+
profile_default/
|
83
|
+
ipython_config.py
|
84
|
+
|
85
|
+
# pyenv
|
86
|
+
.python-version
|
87
|
+
|
88
|
+
# pipenv
|
89
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
90
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
91
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
92
|
+
# install all needed dependencies.
|
93
|
+
#Pipfile.lock
|
94
|
+
|
95
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
96
|
+
__pypackages__/
|
97
|
+
|
98
|
+
# Celery stuff
|
99
|
+
celerybeat-schedule
|
100
|
+
celerybeat.pid
|
101
|
+
|
102
|
+
# SageMath parsed files
|
103
|
+
*.sage.py
|
104
|
+
|
105
|
+
# Environments
|
106
|
+
.env
|
107
|
+
.venv
|
108
|
+
env/
|
109
|
+
venv/
|
110
|
+
ENV/
|
111
|
+
env.bak/
|
112
|
+
venv.bak/
|
113
|
+
|
114
|
+
# Spyder project settings
|
115
|
+
.spyderproject
|
116
|
+
.spyproject
|
117
|
+
|
118
|
+
# Rope project settings
|
119
|
+
.ropeproject
|
120
|
+
|
121
|
+
# mkdocs documentation
|
122
|
+
/site
|
123
|
+
|
124
|
+
# mypy
|
125
|
+
.mypy_cache/
|
126
|
+
.dmypy.json
|
127
|
+
dmypy.json
|
128
|
+
|
129
|
+
# Pyre type checker
|
130
|
+
.pyre/
|
131
|
+
|
132
|
+
# VS Code
|
133
|
+
*.code-workspace
|
134
|
+
.vscode/
|
135
|
+
|
136
|
+
# Tmp
|
137
|
+
tmp/
|
138
|
+
*.swp
|
queutils-0.8.1/LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2024 Jylpah
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
queutils-0.8.1/PKG-INFO
ADDED
@@ -0,0 +1,78 @@
|
|
1
|
+
Metadata-Version: 2.3
|
2
|
+
Name: queutils
|
3
|
+
Version: 0.8.1
|
4
|
+
Summary: Handy Python Queue utilies
|
5
|
+
Project-URL: Homepage, https://github.com/Jylpah/queutils
|
6
|
+
Project-URL: Bug Tracker, https://github.com/Jylpah/queutils/issues
|
7
|
+
Author-email: Jylpah <jylpah@gmail.com>
|
8
|
+
License-File: LICENSE
|
9
|
+
Classifier: Development Status :: 4 - Beta
|
10
|
+
Classifier: Framework :: AsyncIO
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
12
|
+
Classifier: Operating System :: OS Independent
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
14
|
+
Classifier: Topic :: Software Development :: Libraries
|
15
|
+
Requires-Python: >=3.11
|
16
|
+
Requires-Dist: aioconsole>=0.6
|
17
|
+
Provides-Extra: dev
|
18
|
+
Requires-Dist: build>=0.10; extra == 'dev'
|
19
|
+
Requires-Dist: hatchling>=1.22.4; extra == 'dev'
|
20
|
+
Requires-Dist: mypy>=1.8; extra == 'dev'
|
21
|
+
Requires-Dist: pip-chill>=1.0; extra == 'dev'
|
22
|
+
Requires-Dist: pytest-asyncio>=0.23; extra == 'dev'
|
23
|
+
Requires-Dist: pytest-cov>=4.1; extra == 'dev'
|
24
|
+
Requires-Dist: pytest-datafiles>=3.0; extra == 'dev'
|
25
|
+
Requires-Dist: pytest-timeout>=2.2; extra == 'dev'
|
26
|
+
Requires-Dist: pytest>=8.0; extra == 'dev'
|
27
|
+
Requires-Dist: ruff>=0.1.9; extra == 'dev'
|
28
|
+
Description-Content-Type: text/markdown
|
29
|
+
|
30
|
+
[](https://github.com/Jylpah/queutils/actions/workflows/python-package.yml) [](https://codecov.io/gh/Jylpah/queutils)
|
31
|
+
|
32
|
+
# Queutils
|
33
|
+
|
34
|
+
Queutils *[Queue Utils]* is a package if handy Python queue classes:
|
35
|
+
- **[AsyncQueue](docs/asyncqueue.md)** - `async` wrapper for `queue.Queue`
|
36
|
+
- **[IterableQueue](docs/iterablequeue.md)** - `AsyncIterable` queue
|
37
|
+
- **[FileQueue](docs/filequeue.md)** - builds a queue of filenames from input
|
38
|
+
|
39
|
+
|
40
|
+
# AsyncQueue
|
41
|
+
|
42
|
+
[`AsyncQueue`](docs/asyncqueue.md) is a async wrapper for non-async `queue.Queue`. It can be used to create
|
43
|
+
an `asyncio.Queue` compatible out of a (non-async) `multiprocessing.Queue`. This is handy to have `async` code running in `multiprocessing` processes and yet be able to communicate with the parent via (non-async) managed `multiprocessing.Queue` queue.
|
44
|
+
|
45
|
+
## Features
|
46
|
+
|
47
|
+
- `asyncio.Queue` compatible
|
48
|
+
- `queue.Queue` support
|
49
|
+
- `multiprocessing.Queue` support
|
50
|
+
|
51
|
+
|
52
|
+
# IterableQueue
|
53
|
+
|
54
|
+
[`IterableQueue`](docs/iterablequeue.md) is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
|
55
|
+
iterated in `async for` loop. `IterableQueue` terminates automatically when the queue has been filled and emptied.
|
56
|
+
|
57
|
+
## Features
|
58
|
+
|
59
|
+
- `asyncio.Queue` interface, `_nowait()` methods are experimental
|
60
|
+
- `AsyncIterable` support: `async for item in queue:`
|
61
|
+
- Automatic termination of the consumers with `QueueDone` exception when the queue has been emptied
|
62
|
+
- Producers must be registered with `add_producer()` and they must notify the queue
|
63
|
+
with `finish()` once they have finished adding items
|
64
|
+
- Countable interface to count number of items task_done() through `count` property
|
65
|
+
- Countable property can be disabled with count_items=False. This is useful when you
|
66
|
+
want to sum the count of multiple IterableQueues
|
67
|
+
|
68
|
+
# FileQueue
|
69
|
+
|
70
|
+
[`FileQueue`](docs/filequeue.md) builds a queue (`IterableQueue[pathlib.Path]`) of the matching files found based on search parameters given. It can search both list of files or directories or mixed. Async method `FileQueue.mk_queue()` searches subdirectories of given directories.
|
71
|
+
|
72
|
+
## Features
|
73
|
+
|
74
|
+
- Input can be given both as `str` and `pathlib.Path`
|
75
|
+
- `exclude: bool` exclusive or inclusive filtering. Default is `False`.
|
76
|
+
- `case_sensitive: bool` case sensitive filtering (use of `fnmatch` or `fnmatchcase`). Default is `True`.
|
77
|
+
- `follow_symlinks: bool` whether to follow symlinks. Default is `False`.
|
78
|
+
|
queutils-0.8.1/README.md
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
[](https://github.com/Jylpah/queutils/actions/workflows/python-package.yml) [](https://codecov.io/gh/Jylpah/queutils)
|
2
|
+
|
3
|
+
# Queutils
|
4
|
+
|
5
|
+
Queutils *[Queue Utils]* is a package if handy Python queue classes:
|
6
|
+
- **[AsyncQueue](docs/asyncqueue.md)** - `async` wrapper for `queue.Queue`
|
7
|
+
- **[IterableQueue](docs/iterablequeue.md)** - `AsyncIterable` queue
|
8
|
+
- **[FileQueue](docs/filequeue.md)** - builds a queue of filenames from input
|
9
|
+
|
10
|
+
|
11
|
+
# AsyncQueue
|
12
|
+
|
13
|
+
[`AsyncQueue`](docs/asyncqueue.md) is a async wrapper for non-async `queue.Queue`. It can be used to create
|
14
|
+
an `asyncio.Queue` compatible out of a (non-async) `multiprocessing.Queue`. This is handy to have `async` code running in `multiprocessing` processes and yet be able to communicate with the parent via (non-async) managed `multiprocessing.Queue` queue.
|
15
|
+
|
16
|
+
## Features
|
17
|
+
|
18
|
+
- `asyncio.Queue` compatible
|
19
|
+
- `queue.Queue` support
|
20
|
+
- `multiprocessing.Queue` support
|
21
|
+
|
22
|
+
|
23
|
+
# IterableQueue
|
24
|
+
|
25
|
+
[`IterableQueue`](docs/iterablequeue.md) is an `asyncio.Queue` subclass that is `AsyncIterable[T]` i.e. it can be
|
26
|
+
iterated in `async for` loop. `IterableQueue` terminates automatically when the queue has been filled and emptied.
|
27
|
+
|
28
|
+
## Features
|
29
|
+
|
30
|
+
- `asyncio.Queue` interface, `_nowait()` methods are experimental
|
31
|
+
- `AsyncIterable` support: `async for item in queue:`
|
32
|
+
- Automatic termination of the consumers with `QueueDone` exception when the queue has been emptied
|
33
|
+
- Producers must be registered with `add_producer()` and they must notify the queue
|
34
|
+
with `finish()` once they have finished adding items
|
35
|
+
- Countable interface to count number of items task_done() through `count` property
|
36
|
+
- Countable property can be disabled with count_items=False. This is useful when you
|
37
|
+
want to sum the count of multiple IterableQueues
|
38
|
+
|
39
|
+
# FileQueue
|
40
|
+
|
41
|
+
[`FileQueue`](docs/filequeue.md) builds a queue (`IterableQueue[pathlib.Path]`) of the matching files found based on search parameters given. It can search both list of files or directories or mixed. Async method `FileQueue.mk_queue()` searches subdirectories of given directories.
|
42
|
+
|
43
|
+
## Features
|
44
|
+
|
45
|
+
- Input can be given both as `str` and `pathlib.Path`
|
46
|
+
- `exclude: bool` exclusive or inclusive filtering. Default is `False`.
|
47
|
+
- `case_sensitive: bool` case sensitive filtering (use of `fnmatch` or `fnmatchcase`). Default is `True`.
|
48
|
+
- `follow_symlinks: bool` whether to follow symlinks. Default is `False`.
|
49
|
+
|
@@ -0,0 +1,90 @@
|
|
1
|
+
from multiprocessing import Manager
|
2
|
+
from multiprocessing.pool import Pool, AsyncResult
|
3
|
+
from queutils import AsyncQueue
|
4
|
+
import queue
|
5
|
+
from asyncio import run, sleep, CancelledError, create_task, Task, gather
|
6
|
+
import asyncio
|
7
|
+
from random import random, sample
|
8
|
+
from typing import List
|
9
|
+
from time import time
|
10
|
+
|
11
|
+
start : float = time()
|
12
|
+
|
13
|
+
def since() -> float:
|
14
|
+
return time() - start
|
15
|
+
|
16
|
+
asyncQ : AsyncQueue
|
17
|
+
|
18
|
+
async def main() -> None:
|
19
|
+
|
20
|
+
with Manager() as manager:
|
21
|
+
inQ: queue.Queue[int] = manager.Queue(maxsize=5)
|
22
|
+
ainQ: AsyncQueue[int] = AsyncQueue(inQ)
|
23
|
+
reader : Task[int] = create_task(consumer(ainQ))
|
24
|
+
N_process : int = 3
|
25
|
+
count : int = 0
|
26
|
+
work : List[int] = list(sample(range(3,7), N_process))
|
27
|
+
print(f"{N_process} producers will put {', '.join([str(i) for i in work])} items to the queue")
|
28
|
+
with Pool(
|
29
|
+
processes=N_process,
|
30
|
+
initializer=producer_init,
|
31
|
+
initargs=[inQ],
|
32
|
+
) as pool:
|
33
|
+
results: AsyncResult = pool.map_async(producer, work)
|
34
|
+
pool.close()
|
35
|
+
|
36
|
+
while not results.ready():
|
37
|
+
await sleep(0.2)
|
38
|
+
print(f"{since():.3f} waiting")
|
39
|
+
await ainQ.join()
|
40
|
+
reader.cancel()
|
41
|
+
|
42
|
+
for res in results.get():
|
43
|
+
count += res
|
44
|
+
|
45
|
+
print(f"{since():.3f} :: {N_process} producers added {count} items to the queue")
|
46
|
+
reader_count : int = 0
|
47
|
+
for res in await gather(*[reader]):
|
48
|
+
reader_count += res
|
49
|
+
print(f"{since():.3f} :: reader read {reader_count} items from the queue")
|
50
|
+
|
51
|
+
|
52
|
+
def producer_init(outQ: queue.Queue[int]):
|
53
|
+
global asyncQ
|
54
|
+
asyncQ = AsyncQueue(outQ)
|
55
|
+
|
56
|
+
|
57
|
+
def producer(N :int) -> int:
|
58
|
+
return run(producer_async(N), debug=True)
|
59
|
+
|
60
|
+
async def producer_async(N: int) -> int:
|
61
|
+
"""
|
62
|
+
async function to add N items to the shared queue
|
63
|
+
"""
|
64
|
+
global asyncQ
|
65
|
+
count : int = 0
|
66
|
+
for i in range(N):
|
67
|
+
await sleep(0.1 * random())
|
68
|
+
await asyncQ.put(i)
|
69
|
+
print(f"{since():.3f} put {i} to the queue")
|
70
|
+
count += 1
|
71
|
+
return count
|
72
|
+
|
73
|
+
async def consumer(inQ: asyncio.queues.Queue[int]) -> int:
|
74
|
+
"""
|
75
|
+
Async consumer of an async queue
|
76
|
+
"""
|
77
|
+
try:
|
78
|
+
count : int = 0
|
79
|
+
while True:
|
80
|
+
await sleep(0.1 * random())
|
81
|
+
print(f"{since():.3f} awating to read the queue")
|
82
|
+
i : int = await inQ.get()
|
83
|
+
print(f"{since():.3f} read {i} from the queue")
|
84
|
+
count += 1
|
85
|
+
except CancelledError:
|
86
|
+
pass
|
87
|
+
return count
|
88
|
+
|
89
|
+
if __name__ == "__main__":
|
90
|
+
run(main())
|
@@ -0,0 +1,22 @@
|
|
1
|
+
from queutils import FileQueue
|
2
|
+
from pathlib import Path
|
3
|
+
from asyncio import Task, create_task, run
|
4
|
+
|
5
|
+
async def main() -> None:
|
6
|
+
fileQ = FileQueue(filter="*.py", case_sensitive=False)
|
7
|
+
current_dir = Path(__file__).parent
|
8
|
+
spider : Task = create_task(fileQ.mk_queue(files=[current_dir]))
|
9
|
+
async for filename in fileQ:
|
10
|
+
try:
|
11
|
+
rel_path : Path = filename.relative_to(current_dir)
|
12
|
+
print(f"found {rel_path}")
|
13
|
+
except ValueError as err:
|
14
|
+
print(f"{err}")
|
15
|
+
|
16
|
+
if spider.done():
|
17
|
+
print("finished, no need to use fileQ.join()")
|
18
|
+
else:
|
19
|
+
print("Oops, it did not work as promised")
|
20
|
+
|
21
|
+
if __name__ == "__main__":
|
22
|
+
run(main())
|
@@ -0,0 +1,53 @@
|
|
1
|
+
from asyncio import sleep, run, TaskGroup
|
2
|
+
from random import random
|
3
|
+
from queutils import IterableQueue, QueueDone
|
4
|
+
from time import time
|
5
|
+
|
6
|
+
start : float = time()
|
7
|
+
|
8
|
+
def since() -> float:
|
9
|
+
return time() - start
|
10
|
+
|
11
|
+
async def producer(
|
12
|
+
Q: IterableQueue[int], N: int, id: int
|
13
|
+
) -> None:
|
14
|
+
"""
|
15
|
+
Fill the queue with N items
|
16
|
+
"""
|
17
|
+
await Q.add_producer()
|
18
|
+
try:
|
19
|
+
for i in range(N):
|
20
|
+
await sleep(0.5 * random())
|
21
|
+
print(f"{since():.2f} producer {id}: awaiting to put {i} to queue")
|
22
|
+
await Q.put(i)
|
23
|
+
print(f"{since():.2f} producer {id}: put {i} to queue")
|
24
|
+
await Q.finish()
|
25
|
+
except QueueDone:
|
26
|
+
print(f"ERROR: producer {id}, this should not happen")
|
27
|
+
return None
|
28
|
+
|
29
|
+
async def consumer(Q: IterableQueue[int], id: int = 1):
|
30
|
+
"""
|
31
|
+
Consume the queue
|
32
|
+
"""
|
33
|
+
async for i in Q:
|
34
|
+
print(f"{since():.2f} consumer {id}: got {i} from queue")
|
35
|
+
await sleep(0.5 * random())
|
36
|
+
print(f"{since():.2f} consumer {id}: queue is done")
|
37
|
+
|
38
|
+
async def main() -> None:
|
39
|
+
"""
|
40
|
+
Create a queue with maxsize and have multiple producers to fill it and
|
41
|
+
multiple consumers to consume it over async for loop
|
42
|
+
"""
|
43
|
+
queue : IterableQueue[int] = IterableQueue(maxsize=5)
|
44
|
+
|
45
|
+
async with TaskGroup() as tg:
|
46
|
+
for i in range(1,3):
|
47
|
+
tg.create_task(producer(Q=queue, N=5, id=i))
|
48
|
+
await sleep(2)
|
49
|
+
for i in range(1,4):
|
50
|
+
tg.create_task(consumer(Q=queue, id=i))
|
51
|
+
|
52
|
+
if __name__ == "__main__":
|
53
|
+
run(main())
|
@@ -0,0 +1,183 @@
|
|
1
|
+
# AsyncQueue
|
2
|
+
|
3
|
+
`AsyncQueue` is a async wrapper for non-async `queue.Queue`. It can be used to create
|
4
|
+
an `asyncio.Queue` compatible out of a (non-async) `multiprocessing.Queue`. This is handy to have `async` code running in `multiprocessing` processes and yet be able to communicate with the parent via (non-async) managed `multiprocessing.Queue` queue.
|
5
|
+
|
6
|
+
|
7
|
+
## Features
|
8
|
+
|
9
|
+
- `asyncio.Queue` compatible
|
10
|
+
- `queue.Queue` support
|
11
|
+
- `multiprocessing.Queue` support
|
12
|
+
|
13
|
+
## Example
|
14
|
+
|
15
|
+
### Create async wrapper
|
16
|
+
|
17
|
+
```python
|
18
|
+
import queue
|
19
|
+
from queutils import AsyncQueue
|
20
|
+
|
21
|
+
syncQ: queue.Queue[int] = queue.Queue(maxsize=5)
|
22
|
+
asyncQ: AsyncQueue[int] = AsyncQueue(syncQ)
|
23
|
+
|
24
|
+
# asyncQ can not be used as any asyncio.Queue
|
25
|
+
```
|
26
|
+
|
27
|
+
### Full example
|
28
|
+
|
29
|
+
Below is example code where a `multiprocessing.Manager.queue` is used to communicate between three child producer processes and a parent process that reads the queue. The code in both the parent and the child processes is `async`.
|
30
|
+
|
31
|
+
```python
|
32
|
+
from multiprocessing import Manager
|
33
|
+
from multiprocessing.pool import Pool, AsyncResult
|
34
|
+
from queutils import AsyncQueue
|
35
|
+
import queue
|
36
|
+
from asyncio import run, sleep, CancelledError, create_task, Task, gather
|
37
|
+
import asyncio
|
38
|
+
from random import random, sample
|
39
|
+
from typing import List
|
40
|
+
from time import time
|
41
|
+
|
42
|
+
start : float = time()
|
43
|
+
|
44
|
+
def since() -> float:
|
45
|
+
return time() - start
|
46
|
+
|
47
|
+
asyncQ : AsyncQueue
|
48
|
+
|
49
|
+
async def main() -> None:
|
50
|
+
"""
|
51
|
+
Creates a multiprocessing.pool.Pool inside a multiprocessing.Manager() context manager and
|
52
|
+
uses multiprocessing.Manager.Queue() to communicate between the parent process (consumer)
|
53
|
+
and three child processes (producers).
|
54
|
+
"""
|
55
|
+
|
56
|
+
with Manager() as manager:
|
57
|
+
inQ: queue.Queue[int] = manager.Queue(maxsize=5)
|
58
|
+
ainQ: AsyncQueue[int] = AsyncQueue(inQ)
|
59
|
+
reader : Task[int] = create_task(consumer(ainQ))
|
60
|
+
N_process : int = 3
|
61
|
+
count : int = 0
|
62
|
+
work : List[int] = list(sample(range(3,7), N_process))
|
63
|
+
print(f"work to do {work}")
|
64
|
+
with Pool(
|
65
|
+
processes=N_process,
|
66
|
+
initializer=producer_init,
|
67
|
+
initargs=[inQ],
|
68
|
+
) as pool:
|
69
|
+
results: AsyncResult = pool.map_async(producer, work)
|
70
|
+
pool.close()
|
71
|
+
|
72
|
+
# wait the child processes to finish
|
73
|
+
while not results.ready():
|
74
|
+
await sleep(0.2)
|
75
|
+
print(f"{since():.3f} waiting")
|
76
|
+
await ainQ.join()
|
77
|
+
reader.cancel()
|
78
|
+
|
79
|
+
for res in results.get():
|
80
|
+
count += res
|
81
|
+
|
82
|
+
print(f"{since():.3f} :: {N_process} producers added {count} items to the queue")
|
83
|
+
reader_count : int = 0
|
84
|
+
for res in await gather(*[reader]):
|
85
|
+
reader_count += res
|
86
|
+
print(f"{since():.3f} :: reader read {reader_count} items from the queue")
|
87
|
+
|
88
|
+
|
89
|
+
def producer_init(outQ: queue.Queue[int]):
|
90
|
+
"""
|
91
|
+
multiprocessing init function to assing the created AsyncQueue to a global variable that is
|
92
|
+
then available in the forked child process
|
93
|
+
"""
|
94
|
+
global asyncQ
|
95
|
+
asyncQ = AsyncQueue(outQ)
|
96
|
+
|
97
|
+
|
98
|
+
def producer(N :int) -> int:
|
99
|
+
return run(producer_async(N), debug=True)
|
100
|
+
|
101
|
+
async def producer_async(N: int) -> int:
|
102
|
+
"""
|
103
|
+
async function to add N items to the shared queue
|
104
|
+
"""
|
105
|
+
global asyncQ
|
106
|
+
count : int = 0
|
107
|
+
for i in range(N):
|
108
|
+
await sleep(0.1 * random())
|
109
|
+
await asyncQ.put(i)
|
110
|
+
print(f"{since():.3f} put {i} to the queue")
|
111
|
+
count += 1
|
112
|
+
return count
|
113
|
+
|
114
|
+
async def consumer(inQ: asyncio.queues.Queue[int]) -> int:
|
115
|
+
"""
|
116
|
+
Async consumer of an async queue
|
117
|
+
"""
|
118
|
+
try:
|
119
|
+
count : int = 0
|
120
|
+
while True:
|
121
|
+
await sleep(0.1 * random())
|
122
|
+
print(f"{since():.3f} awating to read the queue")
|
123
|
+
i : int = await inQ.get()
|
124
|
+
print(f"{since():.3f} read {i} from the queue")
|
125
|
+
count += 1
|
126
|
+
except CancelledError:
|
127
|
+
pass
|
128
|
+
return count
|
129
|
+
|
130
|
+
if __name__ == "__main__":
|
131
|
+
run(main())
|
132
|
+
```
|
133
|
+
|
134
|
+
## Results
|
135
|
+
|
136
|
+
```bash
|
137
|
+
% python asyncqueue_demo.py queutils/demos
|
138
|
+
3 producers will put 3, 6, 4 items to the queue
|
139
|
+
0.039 awating to read the queue
|
140
|
+
0.089 put 0 to the queue
|
141
|
+
0.092 read 0 from the queue
|
142
|
+
0.100 put 0 to the queue
|
143
|
+
0.108 put 0 to the queue
|
144
|
+
0.108 put 1 to the queue
|
145
|
+
0.155 put 1 to the queue
|
146
|
+
0.158 put 2 to the queue
|
147
|
+
0.192 awating to read the queue
|
148
|
+
0.193 read 0 from the queue
|
149
|
+
0.196 put 3 to the queue
|
150
|
+
0.203 awating to read the queue
|
151
|
+
0.203 read 0 from the queue
|
152
|
+
0.210 waiting
|
153
|
+
0.210 put 1 to the queue
|
154
|
+
0.285 awating to read the queue
|
155
|
+
0.286 read 1 from the queue
|
156
|
+
0.293 put 2 to the queue
|
157
|
+
0.348 awating to read the queue
|
158
|
+
0.349 read 1 from the queue
|
159
|
+
0.350 put 3 to the queue
|
160
|
+
0.411 waiting
|
161
|
+
0.425 awating to read the queue
|
162
|
+
0.426 read 2 from the queue
|
163
|
+
0.427 put 2 to the queue
|
164
|
+
0.466 awating to read the queue
|
165
|
+
0.466 read 3 from the queue
|
166
|
+
0.475 put 4 to the queue
|
167
|
+
0.486 awating to read the queue
|
168
|
+
0.486 read 1 from the queue
|
169
|
+
0.521 awating to read the queue
|
170
|
+
0.522 read 2 from the queue
|
171
|
+
0.539 awating to read the queue
|
172
|
+
0.540 read 3 from the queue
|
173
|
+
0.567 put 5 to the queue
|
174
|
+
0.613 waiting
|
175
|
+
0.634 awating to read the queue
|
176
|
+
0.634 read 2 from the queue
|
177
|
+
0.655 awating to read the queue
|
178
|
+
0.655 read 4 from the queue
|
179
|
+
0.690 awating to read the queue
|
180
|
+
0.691 read 5 from the queue
|
181
|
+
0.697 :: 3 producers added 13 items to the queue
|
182
|
+
0.697 :: reader read 13 items from the queue
|
183
|
+
```
|