funboost 18.7__py3-none-any.whl → 18.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of funboost might be problematic. Click here for more details.
- funboost/__init__.py +3 -3
- funboost/concurrent_pool/async_pool_executor.py +37 -109
- funboost/concurrent_pool/backup/__init__.py +0 -0
- funboost/concurrent_pool/backup/async_pool_executor0223.py +268 -0
- funboost/concurrent_pool/backup/async_pool_executor_back.py +268 -0
- funboost/concurrent_pool/backup/async_pool_executor_janus.py +166 -0
- funboost/publishers/base_publisher.py +4 -76
- funboost/publishers/msg_result_getter.py +168 -0
- funboost/utils/monkey_patches.py +45 -0
- funboost/utils/redis_manager.py +14 -1
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/METADATA +424 -424
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/RECORD +15 -9
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/WHEEL +1 -1
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/LICENSE +0 -0
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/top_level.txt +0 -0
funboost/__init__.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from funboost.utils import monkey_patches
|
|
1
2
|
from funboost.utils import show_funboost_flag
|
|
2
3
|
import typing
|
|
3
4
|
# noinspection PyUnresolvedReferences
|
|
@@ -18,7 +19,7 @@ from funboost.consumers.base_consumer import (ExceptionForRequeue, ExceptionForR
|
|
|
18
19
|
wait_for_possible_has_finish_all_tasks_by_conusmer_list,
|
|
19
20
|
ActiveCousumerProcessInfoGetter, FunctionResultStatus)
|
|
20
21
|
from funboost.publishers.base_publisher import (PriorityConsumingControlConfig,
|
|
21
|
-
AbstractPublisher, AsyncResult, HasNotAsyncResult)
|
|
22
|
+
AbstractPublisher, AsyncResult, HasNotAsyncResult, AioAsyncResult)
|
|
22
23
|
from funboost.factories.publisher_factotry import get_publisher
|
|
23
24
|
from funboost.factories.consumer_factory import get_consumer
|
|
24
25
|
|
|
@@ -27,12 +28,11 @@ from funboost.utils import nb_print, patch_print, LogManager, get_logger, Logger
|
|
|
27
28
|
from funboost.timing_job import fsdf_background_scheduler, timing_publish_deco
|
|
28
29
|
from funboost.constant import BrokerEnum, ConcurrentModeEnum
|
|
29
30
|
|
|
31
|
+
|
|
30
32
|
# 有的包默认没加handlers,原始的日志不漂亮且不可跳转不知道哪里发生的。这里把warnning级别以上的日志默认加上handlers。
|
|
31
33
|
# nb_log.get_logger(name='', log_level_int=30, log_filename='pywarning.log')
|
|
32
34
|
|
|
33
35
|
|
|
34
|
-
|
|
35
|
-
|
|
36
36
|
class IdeAutoCompleteHelper(LoggerMixin):
|
|
37
37
|
"""
|
|
38
38
|
为了被装饰的消费函数的敲代码时候的被pycharm自动补全而写的类。
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
1
3
|
import atexit
|
|
2
4
|
import asyncio
|
|
3
5
|
import threading
|
|
@@ -41,33 +43,7 @@ if __name__ == '__main__':
|
|
|
41
43
|
"""
|
|
42
44
|
|
|
43
45
|
|
|
44
|
-
class
|
|
45
|
-
def __init__(self, size, loop=None):
|
|
46
|
-
self._size = size
|
|
47
|
-
self.loop = loop or asyncio.new_event_loop()
|
|
48
|
-
self._sem = asyncio.Semaphore(self._size, loop=self.loop)
|
|
49
|
-
# atexit.register(self.shutdown)
|
|
50
|
-
Thread(target=self._start_loop_in_new_thread).start()
|
|
51
|
-
|
|
52
|
-
def submit(self, func, *args, **kwargs):
|
|
53
|
-
while self._sem.locked():
|
|
54
|
-
time.sleep(0.001)
|
|
55
|
-
asyncio.run_coroutine_threadsafe(self._run_func(func, *args, **kwargs), self.loop)
|
|
56
|
-
|
|
57
|
-
async def _run_func(self, func, *args, **kwargs):
|
|
58
|
-
async with self._sem:
|
|
59
|
-
result = await func(*args, **kwargs)
|
|
60
|
-
return result
|
|
61
|
-
|
|
62
|
-
def _start_loop_in_new_thread(self, ):
|
|
63
|
-
self.loop.run_forever()
|
|
64
|
-
|
|
65
|
-
def shutdown(self):
|
|
66
|
-
self.loop.stop()
|
|
67
|
-
self.loop.close()
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
46
|
+
class AsyncPoolExecutorLtPy310(nb_log.LoggerMixin):
|
|
71
47
|
"""
|
|
72
48
|
使api和线程池一样,最好的性能做法是submit也弄成 async def,生产和消费在同一个线程同一个loop一起运行,但会对调用链路的兼容性产生破坏,从而调用方式不兼容线程池。
|
|
73
49
|
"""
|
|
@@ -83,8 +59,8 @@ class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
|
83
59
|
self._sem = asyncio.Semaphore(self._size, loop=self.loop)
|
|
84
60
|
self._queue = asyncio.Queue(maxsize=size, loop=self.loop)
|
|
85
61
|
self._lock = threading.Lock()
|
|
86
|
-
t = Thread(target=self._start_loop_in_new_thread)
|
|
87
|
-
t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
62
|
+
t = Thread(target=self._start_loop_in_new_thread, daemon=True)
|
|
63
|
+
# t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
88
64
|
t.start()
|
|
89
65
|
self._can_be_closed_flag = False
|
|
90
66
|
atexit.register(self.shutdown)
|
|
@@ -146,79 +122,39 @@ class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
|
146
122
|
print('关闭循环')
|
|
147
123
|
|
|
148
124
|
|
|
149
|
-
class
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
A simple producer/consumer example, using an asyncio.Queue:
|
|
153
|
-
"""
|
|
154
|
-
|
|
155
|
-
"""
|
|
156
|
-
边生产边消费。此框架没用到这个类,这个要求生产和消费在同一个线程里面,对原有同步方式的框架代码改造不方便。
|
|
157
|
-
"""
|
|
158
|
-
|
|
159
|
-
def __init__(self, items, concurrent_num=200, consume_fun_specify=None):
|
|
125
|
+
class AsyncPoolExecutorGtPy310(AsyncPoolExecutorLtPy310):
|
|
126
|
+
# noinspection PyMissingConstructor
|
|
127
|
+
def __init__(self, size, loop=None):
|
|
160
128
|
"""
|
|
161
129
|
|
|
162
|
-
:param
|
|
163
|
-
:param
|
|
164
|
-
:param consume_fun_specify: 指定的异步消费函数对象,如果不指定就要继承并重写consume_fun函数。
|
|
130
|
+
:param size: 同时并发运行的协程任务数量。
|
|
131
|
+
:param loop:
|
|
165
132
|
"""
|
|
166
|
-
self.
|
|
167
|
-
self.
|
|
168
|
-
self.
|
|
169
|
-
self.
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
133
|
+
self._size = size
|
|
134
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
135
|
+
self._sem = asyncio.Semaphore(self._size, )
|
|
136
|
+
self._queue = asyncio.Queue(maxsize=size, )
|
|
137
|
+
self._lock = threading.Lock()
|
|
138
|
+
t = Thread(target=self._start_loop_in_new_thread, daemon=True)
|
|
139
|
+
# t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
140
|
+
t.start()
|
|
141
|
+
self._can_be_closed_flag = False
|
|
142
|
+
atexit.register(self.shutdown)
|
|
174
143
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
item = await self.queue.get()
|
|
179
|
-
# process the item
|
|
180
|
-
# print('consuming {}...'.format(item))
|
|
181
|
-
# simulate i/o operation using sleep
|
|
182
|
-
try:
|
|
183
|
-
if self.consume_fun_specify:
|
|
184
|
-
await self.consume_fun_specify(item)
|
|
185
|
-
else:
|
|
186
|
-
await self.consume_fun(item)
|
|
187
|
-
except Exception as e:
|
|
188
|
-
print(e)
|
|
144
|
+
self._event = threading.Event()
|
|
145
|
+
# print(self._event.is_set())
|
|
146
|
+
self._event.set()
|
|
189
147
|
|
|
190
|
-
|
|
191
|
-
|
|
148
|
+
def _start_loop_in_new_thread(self, ):
|
|
149
|
+
# self._loop.run_until_complete(self.__run()) # 这种也可以。
|
|
150
|
+
# self._loop.run_forever()
|
|
192
151
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
要么继承此类重写此方法,要么在类的初始化时候指定consume_fun_specify为一个异步函数。
|
|
197
|
-
:param item:
|
|
198
|
-
:return:
|
|
199
|
-
"""
|
|
200
|
-
print(item, '请重写 consume_fun 方法')
|
|
201
|
-
await asyncio.sleep(1)
|
|
152
|
+
# asyncio.set_event_loop(self.loop)
|
|
153
|
+
self.loop.run_until_complete(asyncio.wait([self.loop.create_task(self._consume()) for _ in range(self._size)], ))
|
|
154
|
+
self._can_be_closed_flag = True
|
|
202
155
|
|
|
203
|
-
async def __run(self):
|
|
204
|
-
# schedule the consumer
|
|
205
|
-
tasks = []
|
|
206
|
-
for _ in range(self._concurrent_num):
|
|
207
|
-
task = asyncio.ensure_future(self.consume())
|
|
208
|
-
tasks.append(task)
|
|
209
|
-
# run the producer and wait for completion
|
|
210
|
-
await self.produce()
|
|
211
|
-
# wait until the consumer has processed all items
|
|
212
|
-
await self.queue.join()
|
|
213
|
-
# the consumer is still awaiting for an item, cancel it
|
|
214
|
-
for task in tasks:
|
|
215
|
-
task.cancel()
|
|
216
|
-
|
|
217
|
-
def start_run(self):
|
|
218
|
-
loop = asyncio.get_event_loop()
|
|
219
|
-
loop.run_until_complete(self.__run())
|
|
220
|
-
# loop.close()
|
|
221
156
|
|
|
157
|
+
AsyncPoolExecutor = AsyncPoolExecutorLtPy310 if sys.version_info.minor < 10 else AsyncPoolExecutorGtPy310
|
|
222
158
|
|
|
223
159
|
if __name__ == '__main__':
|
|
224
160
|
def test_async_pool_executor():
|
|
@@ -226,7 +162,7 @@ if __name__ == '__main__':
|
|
|
226
162
|
# from concurrent.futures.thread import ThreadPoolExecutor
|
|
227
163
|
# noinspection PyUnusedLocal
|
|
228
164
|
async def f(x):
|
|
229
|
-
|
|
165
|
+
await asyncio.sleep(1)
|
|
230
166
|
pass
|
|
231
167
|
print('打印', x)
|
|
232
168
|
# await asyncio.sleep(1)
|
|
@@ -240,11 +176,11 @@ if __name__ == '__main__':
|
|
|
240
176
|
print(1111)
|
|
241
177
|
|
|
242
178
|
t1 = time.time()
|
|
243
|
-
|
|
244
|
-
pool = ThreadPoolExecutor(200) # 协程不能用线程池运行,否则压根不会执行print打印,对于一部函数 f(x)得到的是一个协程,必须进一步把协程编排成任务放在loop循环里面运行。
|
|
245
|
-
for i in range(1,
|
|
179
|
+
pool = AsyncPoolExecutor(20)
|
|
180
|
+
# pool = ThreadPoolExecutor(200) # 协程不能用线程池运行,否则压根不会执行print打印,对于一部函数 f(x)得到的是一个协程,必须进一步把协程编排成任务放在loop循环里面运行。
|
|
181
|
+
for i in range(1, 501):
|
|
246
182
|
print('放入', i)
|
|
247
|
-
pool.submit(
|
|
183
|
+
pool.submit(f, i)
|
|
248
184
|
# time.sleep(5)
|
|
249
185
|
# pool.submit(f, 'hi')
|
|
250
186
|
# pool.submit(f, 'hi2')
|
|
@@ -254,15 +190,7 @@ if __name__ == '__main__':
|
|
|
254
190
|
print(time.time() - t1)
|
|
255
191
|
|
|
256
192
|
|
|
257
|
-
async def _my_fun(item):
|
|
258
|
-
print('嘻嘻', item)
|
|
259
|
-
# await asyncio.sleep(1)
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
def test_async_producer_consumer():
|
|
263
|
-
AsyncProducerConsumer([i for i in range(100000)], concurrent_num=200, consume_fun_specify=_my_fun).start_run()
|
|
264
|
-
print('over')
|
|
265
|
-
|
|
266
|
-
|
|
267
193
|
test_async_pool_executor()
|
|
268
194
|
# test_async_producer_consumer()
|
|
195
|
+
|
|
196
|
+
print(sys.version_info)
|
|
File without changes
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import atexit
|
|
2
|
+
import asyncio
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
import traceback
|
|
6
|
+
from threading import Thread
|
|
7
|
+
import nb_log # noqa
|
|
8
|
+
|
|
9
|
+
# if os.name == 'posix':
|
|
10
|
+
# import uvloop
|
|
11
|
+
#
|
|
12
|
+
# asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) # 打猴子补丁最好放在代码顶层,否则很大机会出问题。
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
# 也可以采用 janus 的 线程安全的queue方式来实现异步池,此queue性能和本模块实现的生产 消费相比,性能并没有提高,所以就不重新用这这个包来实现一次了。
|
|
16
|
+
import janus
|
|
17
|
+
import asyncio
|
|
18
|
+
import time
|
|
19
|
+
import threading
|
|
20
|
+
import nb_log
|
|
21
|
+
queue = janus.Queue(maxsize=6000)
|
|
22
|
+
|
|
23
|
+
async def consume():
|
|
24
|
+
while 1:
|
|
25
|
+
# time.sleep(1)
|
|
26
|
+
val = await queue.async_q.get() # 这是async,不要看错了
|
|
27
|
+
print(val)
|
|
28
|
+
|
|
29
|
+
def push():
|
|
30
|
+
for i in range(50000):
|
|
31
|
+
# time.sleep(0.2)
|
|
32
|
+
# print(i)
|
|
33
|
+
queue.sync_q.put(i) # 这是sync。不要看错了。
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == '__main__':
|
|
37
|
+
threading.Thread(target=push).start()
|
|
38
|
+
loop = asyncio.get_event_loop()
|
|
39
|
+
loop.create_task(consume())
|
|
40
|
+
loop.run_forever()
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class AsyncPoolExecutor2:
|
|
45
|
+
def __init__(self, size, loop=None):
|
|
46
|
+
self._size = size
|
|
47
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
48
|
+
self._sem = asyncio.Semaphore(self._size, loop=self.loop)
|
|
49
|
+
# atexit.register(self.shutdown)
|
|
50
|
+
Thread(target=self._start_loop_in_new_thread).start()
|
|
51
|
+
|
|
52
|
+
def submit(self, func, *args, **kwargs):
|
|
53
|
+
while self._sem.locked():
|
|
54
|
+
time.sleep(0.001)
|
|
55
|
+
asyncio.run_coroutine_threadsafe(self._run_func(func, *args, **kwargs), self.loop)
|
|
56
|
+
|
|
57
|
+
async def _run_func(self, func, *args, **kwargs):
|
|
58
|
+
async with self._sem:
|
|
59
|
+
result = await func(*args, **kwargs)
|
|
60
|
+
return result
|
|
61
|
+
|
|
62
|
+
def _start_loop_in_new_thread(self, ):
|
|
63
|
+
self.loop.run_forever()
|
|
64
|
+
|
|
65
|
+
def shutdown(self):
|
|
66
|
+
self.loop.stop()
|
|
67
|
+
self.loop.close()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
71
|
+
"""
|
|
72
|
+
使api和线程池一样,最好的性能做法是submit也弄成 async def,生产和消费在同一个线程同一个loop一起运行,但会对调用链路的兼容性产生破坏,从而调用方式不兼容线程池。
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(self, size, loop=None):
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
:param size: 同时并发运行的协程任务数量。
|
|
79
|
+
:param loop:
|
|
80
|
+
"""
|
|
81
|
+
self._size = size
|
|
82
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
83
|
+
self._sem = asyncio.Semaphore(self._size, )
|
|
84
|
+
self._queue = asyncio.Queue(maxsize=size, )
|
|
85
|
+
self._lock = threading.Lock()
|
|
86
|
+
t = Thread(target=self._start_loop_in_new_thread,daemon=True)
|
|
87
|
+
# t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
88
|
+
t.start()
|
|
89
|
+
self._can_be_closed_flag = False
|
|
90
|
+
atexit.register(self.shutdown)
|
|
91
|
+
|
|
92
|
+
self._event = threading.Event()
|
|
93
|
+
# print(self._event.is_set())
|
|
94
|
+
self._event.set()
|
|
95
|
+
|
|
96
|
+
def submit000(self, func, *args, **kwargs):
|
|
97
|
+
# 这个性能比下面的采用 run_coroutine_threadsafe + result返回快了3倍多。
|
|
98
|
+
with self._lock:
|
|
99
|
+
while 1:
|
|
100
|
+
if not self._queue.full():
|
|
101
|
+
self.loop.call_soon_threadsafe(self._queue.put_nowait, (func, args, kwargs))
|
|
102
|
+
break
|
|
103
|
+
else:
|
|
104
|
+
time.sleep(0.01)
|
|
105
|
+
|
|
106
|
+
def submit(self, func, *args, **kwargs):
|
|
107
|
+
future = asyncio.run_coroutine_threadsafe(self._produce(func, *args, **kwargs), self.loop) # 这个 run_coroutine_threadsafe 方法也有缺点,消耗的性能巨大。
|
|
108
|
+
future.result() # 阻止过快放入,放入超过队列大小后,使submit阻塞。
|
|
109
|
+
|
|
110
|
+
async def _produce(self, func, *args, **kwargs):
|
|
111
|
+
await self._queue.put((func, args, kwargs))
|
|
112
|
+
|
|
113
|
+
async def _consume(self):
|
|
114
|
+
while True:
|
|
115
|
+
func, args, kwargs = await self._queue.get()
|
|
116
|
+
if isinstance(func, str) and func.startswith('stop'):
|
|
117
|
+
# self.logger.debug(func)
|
|
118
|
+
break
|
|
119
|
+
# noinspection PyBroadException,PyUnusedLocal
|
|
120
|
+
try:
|
|
121
|
+
await func(*args, **kwargs)
|
|
122
|
+
except Exception as e:
|
|
123
|
+
traceback.print_exc()
|
|
124
|
+
# self._queue.task_done()
|
|
125
|
+
|
|
126
|
+
async def __run(self):
|
|
127
|
+
for _ in range(self._size):
|
|
128
|
+
asyncio.ensure_future(self._consume())
|
|
129
|
+
|
|
130
|
+
def _start_loop_in_new_thread(self, ):
|
|
131
|
+
# self._loop.run_until_complete(self.__run()) # 这种也可以。
|
|
132
|
+
# self._loop.run_forever()
|
|
133
|
+
|
|
134
|
+
# asyncio.set_event_loop(self.loop)
|
|
135
|
+
self.loop.run_until_complete(asyncio.wait([self.loop.create_task(self._consume()) for _ in range(self._size)],))
|
|
136
|
+
self._can_be_closed_flag = True
|
|
137
|
+
|
|
138
|
+
def shutdown(self):
|
|
139
|
+
if self.loop.is_running(): # 这个可能是atregster触发,也可能是用户手动调用,需要判断一下,不能关闭两次。
|
|
140
|
+
for i in range(self._size):
|
|
141
|
+
self.submit(f'stop{i}', )
|
|
142
|
+
while not self._can_be_closed_flag:
|
|
143
|
+
time.sleep(0.1)
|
|
144
|
+
self.loop.stop()
|
|
145
|
+
self.loop.close()
|
|
146
|
+
print('关闭循环')
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class AsyncProducerConsumer:
|
|
150
|
+
"""
|
|
151
|
+
参考 https://asyncio.readthedocs.io/en/latest/producer_consumer.html 官方文档。
|
|
152
|
+
A simple producer/consumer example, using an asyncio.Queue:
|
|
153
|
+
"""
|
|
154
|
+
|
|
155
|
+
"""
|
|
156
|
+
边生产边消费。此框架没用到这个类,这个要求生产和消费在同一个线程里面,对原有同步方式的框架代码改造不方便。
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
def __init__(self, items, concurrent_num=200, consume_fun_specify=None):
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
:param items: 要消费的参数列表
|
|
163
|
+
:param concurrent_num: 并发数量
|
|
164
|
+
:param consume_fun_specify: 指定的异步消费函数对象,如果不指定就要继承并重写consume_fun函数。
|
|
165
|
+
"""
|
|
166
|
+
self.queue = asyncio.Queue()
|
|
167
|
+
self.items = items
|
|
168
|
+
self._concurrent_num = concurrent_num
|
|
169
|
+
self.consume_fun_specify = consume_fun_specify
|
|
170
|
+
|
|
171
|
+
async def produce(self):
|
|
172
|
+
for item in self.items:
|
|
173
|
+
await self.queue.put(item)
|
|
174
|
+
|
|
175
|
+
async def consume(self):
|
|
176
|
+
while True:
|
|
177
|
+
# wait for an item from the producer
|
|
178
|
+
item = await self.queue.get()
|
|
179
|
+
# process the item
|
|
180
|
+
# print('consuming {}...'.format(item))
|
|
181
|
+
# simulate i/o operation using sleep
|
|
182
|
+
try:
|
|
183
|
+
if self.consume_fun_specify:
|
|
184
|
+
await self.consume_fun_specify(item)
|
|
185
|
+
else:
|
|
186
|
+
await self.consume_fun(item)
|
|
187
|
+
except Exception as e:
|
|
188
|
+
print(e)
|
|
189
|
+
|
|
190
|
+
# Notify the queue that the item has been processed
|
|
191
|
+
self.queue.task_done()
|
|
192
|
+
|
|
193
|
+
@staticmethod
|
|
194
|
+
async def consume_fun(item):
|
|
195
|
+
"""
|
|
196
|
+
要么继承此类重写此方法,要么在类的初始化时候指定consume_fun_specify为一个异步函数。
|
|
197
|
+
:param item:
|
|
198
|
+
:return:
|
|
199
|
+
"""
|
|
200
|
+
print(item, '请重写 consume_fun 方法')
|
|
201
|
+
await asyncio.sleep(1)
|
|
202
|
+
|
|
203
|
+
async def __run(self):
|
|
204
|
+
# schedule the consumer
|
|
205
|
+
tasks = []
|
|
206
|
+
for _ in range(self._concurrent_num):
|
|
207
|
+
task = asyncio.ensure_future(self.consume())
|
|
208
|
+
tasks.append(task)
|
|
209
|
+
# run the producer and wait for completion
|
|
210
|
+
await self.produce()
|
|
211
|
+
# wait until the consumer has processed all items
|
|
212
|
+
await self.queue.join()
|
|
213
|
+
# the consumer is still awaiting for an item, cancel it
|
|
214
|
+
for task in tasks:
|
|
215
|
+
task.cancel()
|
|
216
|
+
|
|
217
|
+
def start_run(self):
|
|
218
|
+
loop = asyncio.get_event_loop()
|
|
219
|
+
loop.run_until_complete(self.__run())
|
|
220
|
+
# loop.close()
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if __name__ == '__main__':
|
|
224
|
+
def test_async_pool_executor():
|
|
225
|
+
from funboost.concurrent_pool import CustomThreadPoolExecutor as ThreadPoolExecutor
|
|
226
|
+
# from concurrent.futures.thread import ThreadPoolExecutor
|
|
227
|
+
# noinspection PyUnusedLocal
|
|
228
|
+
async def f(x):
|
|
229
|
+
# await asyncio.sleep(0.1)
|
|
230
|
+
pass
|
|
231
|
+
print('打印', x)
|
|
232
|
+
# await asyncio.sleep(1)
|
|
233
|
+
# raise Exception('aaa')
|
|
234
|
+
|
|
235
|
+
def f2(x):
|
|
236
|
+
pass
|
|
237
|
+
# time.sleep(0.001)
|
|
238
|
+
print('打印', x)
|
|
239
|
+
|
|
240
|
+
print(1111)
|
|
241
|
+
|
|
242
|
+
t1 = time.time()
|
|
243
|
+
pool = AsyncPoolExecutor(20)
|
|
244
|
+
# pool = ThreadPoolExecutor(200) # 协程不能用线程池运行,否则压根不会执行print打印,对于一部函数 f(x)得到的是一个协程,必须进一步把协程编排成任务放在loop循环里面运行。
|
|
245
|
+
for i in range(1, 501):
|
|
246
|
+
print('放入', i)
|
|
247
|
+
pool.submit(f, i)
|
|
248
|
+
# time.sleep(5)
|
|
249
|
+
# pool.submit(f, 'hi')
|
|
250
|
+
# pool.submit(f, 'hi2')
|
|
251
|
+
# pool.submit(f, 'hi3')
|
|
252
|
+
# print(2222)
|
|
253
|
+
pool.shutdown()
|
|
254
|
+
print(time.time() - t1)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
async def _my_fun(item):
|
|
258
|
+
print('嘻嘻', item)
|
|
259
|
+
# await asyncio.sleep(1)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def test_async_producer_consumer():
|
|
263
|
+
AsyncProducerConsumer([i for i in range(100000)], concurrent_num=200, consume_fun_specify=_my_fun).start_run()
|
|
264
|
+
print('over')
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
test_async_pool_executor()
|
|
268
|
+
# test_async_producer_consumer()
|