funboost 18.7__py3-none-any.whl → 18.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of funboost might be problematic. Click here for more details.
- funboost/__init__.py +3 -3
- funboost/concurrent_pool/async_pool_executor.py +37 -109
- funboost/concurrent_pool/backup/__init__.py +0 -0
- funboost/concurrent_pool/backup/async_pool_executor0223.py +268 -0
- funboost/concurrent_pool/backup/async_pool_executor_back.py +268 -0
- funboost/concurrent_pool/backup/async_pool_executor_janus.py +166 -0
- funboost/publishers/base_publisher.py +4 -76
- funboost/publishers/msg_result_getter.py +168 -0
- funboost/utils/monkey_patches.py +45 -0
- funboost/utils/redis_manager.py +14 -1
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/METADATA +424 -424
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/RECORD +15 -9
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/WHEEL +1 -1
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/LICENSE +0 -0
- {funboost-18.7.dist-info → funboost-18.9.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
import atexit
|
|
2
|
+
import asyncio
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
import traceback
|
|
6
|
+
from threading import Thread
|
|
7
|
+
import nb_log # noqa
|
|
8
|
+
|
|
9
|
+
# if os.name == 'posix':
|
|
10
|
+
# import uvloop
|
|
11
|
+
#
|
|
12
|
+
# asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) # 打猴子补丁最好放在代码顶层,否则很大机会出问题。
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
# 也可以采用 janus 的 线程安全的queue方式来实现异步池,此queue性能和本模块实现的生产 消费相比,性能并没有提高,所以就不重新用这这个包来实现一次了。
|
|
16
|
+
import janus
|
|
17
|
+
import asyncio
|
|
18
|
+
import time
|
|
19
|
+
import threading
|
|
20
|
+
import nb_log
|
|
21
|
+
queue = janus.Queue(maxsize=6000)
|
|
22
|
+
|
|
23
|
+
async def consume():
|
|
24
|
+
while 1:
|
|
25
|
+
# time.sleep(1)
|
|
26
|
+
val = await queue.async_q.get() # 这是async,不要看错了
|
|
27
|
+
print(val)
|
|
28
|
+
|
|
29
|
+
def push():
|
|
30
|
+
for i in range(50000):
|
|
31
|
+
# time.sleep(0.2)
|
|
32
|
+
# print(i)
|
|
33
|
+
queue.sync_q.put(i) # 这是sync。不要看错了。
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == '__main__':
|
|
37
|
+
threading.Thread(target=push).start()
|
|
38
|
+
loop = asyncio.get_event_loop()
|
|
39
|
+
loop.create_task(consume())
|
|
40
|
+
loop.run_forever()
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class AsyncPoolExecutor2:
|
|
45
|
+
def __init__(self, size, loop=None):
|
|
46
|
+
self._size = size
|
|
47
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
48
|
+
self._sem = asyncio.Semaphore(self._size, loop=self.loop)
|
|
49
|
+
# atexit.register(self.shutdown)
|
|
50
|
+
Thread(target=self._start_loop_in_new_thread).start()
|
|
51
|
+
|
|
52
|
+
def submit(self, func, *args, **kwargs):
|
|
53
|
+
while self._sem.locked():
|
|
54
|
+
time.sleep(0.001)
|
|
55
|
+
asyncio.run_coroutine_threadsafe(self._run_func(func, *args, **kwargs), self.loop)
|
|
56
|
+
|
|
57
|
+
async def _run_func(self, func, *args, **kwargs):
|
|
58
|
+
async with self._sem:
|
|
59
|
+
result = await func(*args, **kwargs)
|
|
60
|
+
return result
|
|
61
|
+
|
|
62
|
+
def _start_loop_in_new_thread(self, ):
|
|
63
|
+
self.loop.run_forever()
|
|
64
|
+
|
|
65
|
+
def shutdown(self):
|
|
66
|
+
self.loop.stop()
|
|
67
|
+
self.loop.close()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
71
|
+
"""
|
|
72
|
+
使api和线程池一样,最好的性能做法是submit也弄成 async def,生产和消费在同一个线程同一个loop一起运行,但会对调用链路的兼容性产生破坏,从而调用方式不兼容线程池。
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(self, size, loop=None):
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
:param size: 同时并发运行的协程任务数量。
|
|
79
|
+
:param loop:
|
|
80
|
+
"""
|
|
81
|
+
self._size = size
|
|
82
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
83
|
+
self._sem = asyncio.Semaphore(self._size, loop=self.loop)
|
|
84
|
+
self._queue = asyncio.Queue(maxsize=size, loop=self.loop)
|
|
85
|
+
self._lock = threading.Lock()
|
|
86
|
+
t = Thread(target=self._start_loop_in_new_thread,daemon=True)
|
|
87
|
+
# t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
88
|
+
t.start()
|
|
89
|
+
self._can_be_closed_flag = False
|
|
90
|
+
atexit.register(self.shutdown)
|
|
91
|
+
|
|
92
|
+
self._event = threading.Event()
|
|
93
|
+
# print(self._event.is_set())
|
|
94
|
+
self._event.set()
|
|
95
|
+
|
|
96
|
+
def submit000(self, func, *args, **kwargs):
|
|
97
|
+
# 这个性能比下面的采用 run_coroutine_threadsafe + result返回快了3倍多。
|
|
98
|
+
with self._lock:
|
|
99
|
+
while 1:
|
|
100
|
+
if not self._queue.full():
|
|
101
|
+
self.loop.call_soon_threadsafe(self._queue.put_nowait, (func, args, kwargs))
|
|
102
|
+
break
|
|
103
|
+
else:
|
|
104
|
+
time.sleep(0.01)
|
|
105
|
+
|
|
106
|
+
def submit(self, func, *args, **kwargs):
|
|
107
|
+
future = asyncio.run_coroutine_threadsafe(self._produce(func, *args, **kwargs), self.loop) # 这个 run_coroutine_threadsafe 方法也有缺点,消耗的性能巨大。
|
|
108
|
+
future.result() # 阻止过快放入,放入超过队列大小后,使submit阻塞。
|
|
109
|
+
|
|
110
|
+
async def _produce(self, func, *args, **kwargs):
|
|
111
|
+
await self._queue.put((func, args, kwargs))
|
|
112
|
+
|
|
113
|
+
async def _consume(self):
|
|
114
|
+
while True:
|
|
115
|
+
func, args, kwargs = await self._queue.get()
|
|
116
|
+
if isinstance(func, str) and func.startswith('stop'):
|
|
117
|
+
# self.logger.debug(func)
|
|
118
|
+
break
|
|
119
|
+
# noinspection PyBroadException,PyUnusedLocal
|
|
120
|
+
try:
|
|
121
|
+
await func(*args, **kwargs)
|
|
122
|
+
except Exception as e:
|
|
123
|
+
traceback.print_exc()
|
|
124
|
+
# self._queue.task_done()
|
|
125
|
+
|
|
126
|
+
async def __run(self):
|
|
127
|
+
for _ in range(self._size):
|
|
128
|
+
asyncio.ensure_future(self._consume())
|
|
129
|
+
|
|
130
|
+
def _start_loop_in_new_thread(self, ):
|
|
131
|
+
# self._loop.run_until_complete(self.__run()) # 这种也可以。
|
|
132
|
+
# self._loop.run_forever()
|
|
133
|
+
|
|
134
|
+
# asyncio.set_event_loop(self.loop)
|
|
135
|
+
self.loop.run_until_complete(asyncio.wait([self._consume() for _ in range(self._size)], loop=self.loop))
|
|
136
|
+
self._can_be_closed_flag = True
|
|
137
|
+
|
|
138
|
+
def shutdown(self):
|
|
139
|
+
if self.loop.is_running(): # 这个可能是atregster触发,也可能是用户手动调用,需要判断一下,不能关闭两次。
|
|
140
|
+
for i in range(self._size):
|
|
141
|
+
self.submit(f'stop{i}', )
|
|
142
|
+
while not self._can_be_closed_flag:
|
|
143
|
+
time.sleep(0.1)
|
|
144
|
+
self.loop.stop()
|
|
145
|
+
self.loop.close()
|
|
146
|
+
print('关闭循环')
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class AsyncProducerConsumer:
|
|
150
|
+
"""
|
|
151
|
+
参考 https://asyncio.readthedocs.io/en/latest/producer_consumer.html 官方文档。
|
|
152
|
+
A simple producer/consumer example, using an asyncio.Queue:
|
|
153
|
+
"""
|
|
154
|
+
|
|
155
|
+
"""
|
|
156
|
+
边生产边消费。此框架没用到这个类,这个要求生产和消费在同一个线程里面,对原有同步方式的框架代码改造不方便。
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
def __init__(self, items, concurrent_num=200, consume_fun_specify=None):
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
:param items: 要消费的参数列表
|
|
163
|
+
:param concurrent_num: 并发数量
|
|
164
|
+
:param consume_fun_specify: 指定的异步消费函数对象,如果不指定就要继承并重写consume_fun函数。
|
|
165
|
+
"""
|
|
166
|
+
self.queue = asyncio.Queue()
|
|
167
|
+
self.items = items
|
|
168
|
+
self._concurrent_num = concurrent_num
|
|
169
|
+
self.consume_fun_specify = consume_fun_specify
|
|
170
|
+
|
|
171
|
+
async def produce(self):
|
|
172
|
+
for item in self.items:
|
|
173
|
+
await self.queue.put(item)
|
|
174
|
+
|
|
175
|
+
async def consume(self):
|
|
176
|
+
while True:
|
|
177
|
+
# wait for an item from the producer
|
|
178
|
+
item = await self.queue.get()
|
|
179
|
+
# process the item
|
|
180
|
+
# print('consuming {}...'.format(item))
|
|
181
|
+
# simulate i/o operation using sleep
|
|
182
|
+
try:
|
|
183
|
+
if self.consume_fun_specify:
|
|
184
|
+
await self.consume_fun_specify(item)
|
|
185
|
+
else:
|
|
186
|
+
await self.consume_fun(item)
|
|
187
|
+
except Exception as e:
|
|
188
|
+
print(e)
|
|
189
|
+
|
|
190
|
+
# Notify the queue that the item has been processed
|
|
191
|
+
self.queue.task_done()
|
|
192
|
+
|
|
193
|
+
@staticmethod
|
|
194
|
+
async def consume_fun(item):
|
|
195
|
+
"""
|
|
196
|
+
要么继承此类重写此方法,要么在类的初始化时候指定consume_fun_specify为一个异步函数。
|
|
197
|
+
:param item:
|
|
198
|
+
:return:
|
|
199
|
+
"""
|
|
200
|
+
print(item, '请重写 consume_fun 方法')
|
|
201
|
+
await asyncio.sleep(1)
|
|
202
|
+
|
|
203
|
+
async def __run(self):
|
|
204
|
+
# schedule the consumer
|
|
205
|
+
tasks = []
|
|
206
|
+
for _ in range(self._concurrent_num):
|
|
207
|
+
task = asyncio.ensure_future(self.consume())
|
|
208
|
+
tasks.append(task)
|
|
209
|
+
# run the producer and wait for completion
|
|
210
|
+
await self.produce()
|
|
211
|
+
# wait until the consumer has processed all items
|
|
212
|
+
await self.queue.join()
|
|
213
|
+
# the consumer is still awaiting for an item, cancel it
|
|
214
|
+
for task in tasks:
|
|
215
|
+
task.cancel()
|
|
216
|
+
|
|
217
|
+
def start_run(self):
|
|
218
|
+
loop = asyncio.get_event_loop()
|
|
219
|
+
loop.run_until_complete(self.__run())
|
|
220
|
+
# loop.close()
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if __name__ == '__main__':
|
|
224
|
+
def test_async_pool_executor():
|
|
225
|
+
from funboost.concurrent_pool import CustomThreadPoolExecutor as ThreadPoolExecutor
|
|
226
|
+
# from concurrent.futures.thread import ThreadPoolExecutor
|
|
227
|
+
# noinspection PyUnusedLocal
|
|
228
|
+
async def f(x):
|
|
229
|
+
# await asyncio.sleep(0.1)
|
|
230
|
+
pass
|
|
231
|
+
print('打印', x)
|
|
232
|
+
# await asyncio.sleep(1)
|
|
233
|
+
# raise Exception('aaa')
|
|
234
|
+
|
|
235
|
+
def f2(x):
|
|
236
|
+
pass
|
|
237
|
+
# time.sleep(0.001)
|
|
238
|
+
print('打印', x)
|
|
239
|
+
|
|
240
|
+
print(1111)
|
|
241
|
+
|
|
242
|
+
t1 = time.time()
|
|
243
|
+
pool = AsyncPoolExecutor(20)
|
|
244
|
+
# pool = ThreadPoolExecutor(200) # 协程不能用线程池运行,否则压根不会执行print打印,对于一部函数 f(x)得到的是一个协程,必须进一步把协程编排成任务放在loop循环里面运行。
|
|
245
|
+
for i in range(1, 501):
|
|
246
|
+
print('放入', i)
|
|
247
|
+
pool.submit(f, i)
|
|
248
|
+
# time.sleep(5)
|
|
249
|
+
# pool.submit(f, 'hi')
|
|
250
|
+
# pool.submit(f, 'hi2')
|
|
251
|
+
# pool.submit(f, 'hi3')
|
|
252
|
+
# print(2222)
|
|
253
|
+
pool.shutdown()
|
|
254
|
+
print(time.time() - t1)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
async def _my_fun(item):
|
|
258
|
+
print('嘻嘻', item)
|
|
259
|
+
# await asyncio.sleep(1)
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def test_async_producer_consumer():
|
|
263
|
+
AsyncProducerConsumer([i for i in range(100000)], concurrent_num=200, consume_fun_specify=_my_fun).start_run()
|
|
264
|
+
print('over')
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
test_async_pool_executor()
|
|
268
|
+
# test_async_producer_consumer()
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import atexit
|
|
2
|
+
import asyncio
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
import traceback
|
|
6
|
+
from threading import Thread
|
|
7
|
+
import nb_log # noqa
|
|
8
|
+
|
|
9
|
+
# if os.name == 'posix':
|
|
10
|
+
# import uvloop
|
|
11
|
+
#
|
|
12
|
+
# asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) # 打猴子补丁最好放在代码顶层,否则很大机会出问题。
|
|
13
|
+
|
|
14
|
+
"""
|
|
15
|
+
# 也可以采用 janus 的 线程安全的queue方式来实现异步池,此queue性能和本模块实现的生产 消费相比,性能并没有提高,所以就不重新用这这个包来实现一次了。
|
|
16
|
+
import janus
|
|
17
|
+
import asyncio
|
|
18
|
+
import time
|
|
19
|
+
import threading
|
|
20
|
+
import nb_log
|
|
21
|
+
queue = janus.Queue(maxsize=6000)
|
|
22
|
+
|
|
23
|
+
async def consume():
|
|
24
|
+
while 1:
|
|
25
|
+
# time.sleep(1)
|
|
26
|
+
val = await queue.async_q.get() # 这是async,不要看错了
|
|
27
|
+
print(val)
|
|
28
|
+
|
|
29
|
+
def push():
|
|
30
|
+
for i in range(50000):
|
|
31
|
+
# time.sleep(0.2)
|
|
32
|
+
# print(i)
|
|
33
|
+
queue.sync_q.put(i) # 这是sync。不要看错了。
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == '__main__':
|
|
37
|
+
threading.Thread(target=push).start()
|
|
38
|
+
loop = asyncio.get_event_loop()
|
|
39
|
+
loop.create_task(consume())
|
|
40
|
+
loop.run_forever()
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class AsyncPoolExecutor(nb_log.LoggerMixin):
|
|
48
|
+
"""
|
|
49
|
+
使api和线程池一样,最好的性能做法是submit也弄成 async def,生产和消费在同一个线程同一个loop一起运行,但会对调用链路的兼容性产生破坏,从而调用方式不兼容线程池。
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def __init__(self, size, loop=None):
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
:param size: 同时并发运行的协程任务数量。
|
|
56
|
+
:param loop:
|
|
57
|
+
"""
|
|
58
|
+
self._size = size
|
|
59
|
+
self.loop = loop or asyncio.new_event_loop()
|
|
60
|
+
self.queue = janus.Queue(maxsize=6000)
|
|
61
|
+
self._lock = threading.Lock()
|
|
62
|
+
t = Thread(target=self._start_loop_in_new_thread,daemon=True)
|
|
63
|
+
# t.setDaemon(True) # 设置守护线程是为了有机会触发atexit,使程序自动结束,不用手动调用shutdown
|
|
64
|
+
t.start()
|
|
65
|
+
self._can_be_closed_flag = False
|
|
66
|
+
atexit.register(self.shutdown)
|
|
67
|
+
|
|
68
|
+
self._event = threading.Event()
|
|
69
|
+
# print(self._event.is_set())
|
|
70
|
+
self._event.set()
|
|
71
|
+
|
|
72
|
+
def submit000(self, func, *args, **kwargs):
|
|
73
|
+
# 这个性能比下面的采用 run_coroutine_threadsafe + result返回快了3倍多。
|
|
74
|
+
with self._lock:
|
|
75
|
+
while 1:
|
|
76
|
+
if not self._queue.full():
|
|
77
|
+
self.loop.call_soon_threadsafe(self._queue.put_nowait, (func, args, kwargs))
|
|
78
|
+
break
|
|
79
|
+
else:
|
|
80
|
+
time.sleep(0.01)
|
|
81
|
+
|
|
82
|
+
def submit(self, func, *args, **kwargs):
|
|
83
|
+
future = asyncio.run_coroutine_threadsafe(self._produce(func, *args, **kwargs), self.loop) # 这个 run_coroutine_threadsafe 方法也有缺点,消耗的性能巨大。
|
|
84
|
+
future.result() # 阻止过快放入,放入超过队列大小后,使submit阻塞。
|
|
85
|
+
|
|
86
|
+
async def _produce(self, func, *args, **kwargs):
|
|
87
|
+
await self._queue.put((func, args, kwargs))
|
|
88
|
+
|
|
89
|
+
async def _consume(self):
|
|
90
|
+
while True:
|
|
91
|
+
func, args, kwargs = await self._queue.get()
|
|
92
|
+
if isinstance(func, str) and func.startswith('stop'):
|
|
93
|
+
# self.logger.debug(func)
|
|
94
|
+
break
|
|
95
|
+
# noinspection PyBroadException,PyUnusedLocal
|
|
96
|
+
try:
|
|
97
|
+
await func(*args, **kwargs)
|
|
98
|
+
except Exception as e:
|
|
99
|
+
traceback.print_exc()
|
|
100
|
+
# self._queue.task_done()
|
|
101
|
+
|
|
102
|
+
async def __run(self):
|
|
103
|
+
for _ in range(self._size):
|
|
104
|
+
asyncio.ensure_future(self._consume())
|
|
105
|
+
|
|
106
|
+
def _start_loop_in_new_thread(self, ):
|
|
107
|
+
# self._loop.run_until_complete(self.__run()) # 这种也可以。
|
|
108
|
+
# self._loop.run_forever()
|
|
109
|
+
|
|
110
|
+
# asyncio.set_event_loop(self.loop)
|
|
111
|
+
self.loop.run_until_complete(asyncio.wait([self._consume() for _ in range(self._size)], loop=self.loop))
|
|
112
|
+
self._can_be_closed_flag = True
|
|
113
|
+
|
|
114
|
+
def shutdown(self):
|
|
115
|
+
if self.loop.is_running(): # 这个可能是atregster触发,也可能是用户手动调用,需要判断一下,不能关闭两次。
|
|
116
|
+
for i in range(self._size):
|
|
117
|
+
self.submit(f'stop{i}', )
|
|
118
|
+
while not self._can_be_closed_flag:
|
|
119
|
+
time.sleep(0.1)
|
|
120
|
+
self.loop.stop()
|
|
121
|
+
self.loop.close()
|
|
122
|
+
print('关闭循环')
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
if __name__ == '__main__':
|
|
129
|
+
def test_async_pool_executor():
|
|
130
|
+
from funboost.concurrent_pool import CustomThreadPoolExecutor as ThreadPoolExecutor
|
|
131
|
+
# from concurrent.futures.thread import ThreadPoolExecutor
|
|
132
|
+
# noinspection PyUnusedLocal
|
|
133
|
+
async def f(x):
|
|
134
|
+
# await asyncio.sleep(0.1)
|
|
135
|
+
pass
|
|
136
|
+
print('打印', x)
|
|
137
|
+
# await asyncio.sleep(1)
|
|
138
|
+
# raise Exception('aaa')
|
|
139
|
+
|
|
140
|
+
def f2(x):
|
|
141
|
+
pass
|
|
142
|
+
# time.sleep(0.001)
|
|
143
|
+
print('打印', x)
|
|
144
|
+
|
|
145
|
+
print(1111)
|
|
146
|
+
|
|
147
|
+
t1 = time.time()
|
|
148
|
+
pool = AsyncPoolExecutor(20)
|
|
149
|
+
# pool = ThreadPoolExecutor(200) # 协程不能用线程池运行,否则压根不会执行print打印,对于一部函数 f(x)得到的是一个协程,必须进一步把协程编排成任务放在loop循环里面运行。
|
|
150
|
+
for i in range(1, 501):
|
|
151
|
+
print('放入', i)
|
|
152
|
+
pool.submit(f, i)
|
|
153
|
+
# time.sleep(5)
|
|
154
|
+
# pool.submit(f, 'hi')
|
|
155
|
+
# pool.submit(f, 'hi2')
|
|
156
|
+
# pool.submit(f, 'hi3')
|
|
157
|
+
# print(2222)
|
|
158
|
+
pool.shutdown()
|
|
159
|
+
print(time.time() - t1)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
test_async_pool_executor()
|
|
166
|
+
# test_async_producer_consumer()
|
|
@@ -17,85 +17,13 @@ from kombu.exceptions import KombuError
|
|
|
17
17
|
from pikav1.exceptions import AMQPError as PikaAMQPError
|
|
18
18
|
|
|
19
19
|
from nb_log import LoggerLevelSetterMixin, LogManager, LoggerMixin
|
|
20
|
-
from funboost.utils import decorators, RedisMixin, time_util
|
|
21
|
-
from funboost import funboost_config_deafult
|
|
22
|
-
from funboost.concurrent_pool import CustomThreadPoolExecutor
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
class HasNotAsyncResult(Exception):
|
|
26
|
-
pass
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
class AsyncResult(RedisMixin):
|
|
30
|
-
callback_run_executor = CustomThreadPoolExecutor(200)
|
|
31
|
-
|
|
32
|
-
def __init__(self, task_id, timeout=120):
|
|
33
|
-
self.task_id = task_id
|
|
34
|
-
self.timeout = timeout
|
|
35
|
-
self._has_pop = False
|
|
36
|
-
self._status_and_result = None
|
|
37
|
-
|
|
38
|
-
def set_timeout(self, timeout=60):
|
|
39
|
-
self.timeout = timeout
|
|
40
|
-
return self
|
|
41
|
-
|
|
42
|
-
def is_pending(self):
|
|
43
|
-
return not self.redis_db_filter_and_rpc_result.exists(self.task_id)
|
|
44
|
-
|
|
45
|
-
@property
|
|
46
|
-
def status_and_result(self):
|
|
47
|
-
if not self._has_pop:
|
|
48
|
-
redis_value = self.redis_db_filter_and_rpc_result.blpop(self.task_id, self.timeout)
|
|
49
|
-
self._has_pop = True
|
|
50
|
-
if redis_value is not None:
|
|
51
|
-
status_and_result_str = redis_value[1]
|
|
52
|
-
self._status_and_result = json.loads(status_and_result_str)
|
|
53
|
-
self.redis_db_filter_and_rpc_result.lpush(self.task_id, status_and_result_str)
|
|
54
|
-
self.redis_db_filter_and_rpc_result.expire(self.task_id, 600)
|
|
55
|
-
return self._status_and_result
|
|
56
|
-
return None
|
|
57
|
-
return self._status_and_result
|
|
58
|
-
|
|
59
|
-
def get(self):
|
|
60
|
-
# print(self.status_and_result)
|
|
61
|
-
if self.status_and_result is not None:
|
|
62
|
-
return self.status_and_result['result']
|
|
63
|
-
else:
|
|
64
|
-
raise HasNotAsyncResult
|
|
65
|
-
|
|
66
|
-
@property
|
|
67
|
-
def result(self):
|
|
68
|
-
return self.get()
|
|
69
|
-
|
|
70
|
-
def is_success(self):
|
|
71
|
-
return self.status_and_result['success']
|
|
72
|
-
|
|
73
|
-
def _run_callback_func(self, callback_func):
|
|
74
|
-
callback_func(self.status_and_result)
|
|
75
|
-
|
|
76
|
-
def set_callback(self, callback_func: typing.Callable):
|
|
77
|
-
"""
|
|
78
|
-
:param callback_func: 函数结果回调函数,使回调函数自动在线程池中并发运行。
|
|
79
|
-
:return:
|
|
80
|
-
"""
|
|
81
|
-
|
|
82
|
-
''' 用法例如
|
|
83
|
-
from test_frame.test_rpc.test_consume import add
|
|
84
|
-
def show_result(status_and_result: dict):
|
|
85
|
-
"""
|
|
86
|
-
:param status_and_result: 一个字典包括了函数入参、函数结果、函数是否运行成功、函数运行异常类型
|
|
87
|
-
"""
|
|
88
|
-
print(status_and_result)
|
|
89
|
-
|
|
90
|
-
for i in range(100):
|
|
91
|
-
async_result = add.push(i, i * 2)
|
|
92
|
-
# print(async_result.result) # 执行 .result是获取函数的运行结果,会阻塞当前发布消息的线程直到函数运行完成。
|
|
93
|
-
async_result.set_callback(show_result) # 使用回调函数在线程池中并发的运行函数结果
|
|
94
|
-
'''
|
|
95
|
-
self.callback_run_executor.submit(self._run_callback_func, callback_func)
|
|
96
20
|
|
|
21
|
+
from funboost.publishers.msg_result_getter import AsyncResult, AioAsyncResult, HasNotAsyncResult
|
|
22
|
+
from funboost.utils import decorators, time_util
|
|
23
|
+
from funboost import funboost_config_deafult
|
|
97
24
|
|
|
98
25
|
RedisAsyncResult = AsyncResult # 别名
|
|
26
|
+
RedisAioAsyncResult = AioAsyncResult # 别名
|
|
99
27
|
|
|
100
28
|
|
|
101
29
|
class PriorityConsumingControlConfig:
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
import json
|
|
5
|
+
from funboost.concurrent_pool import CustomThreadPoolExecutor
|
|
6
|
+
from funboost.utils import RedisMixin
|
|
7
|
+
from funboost.utils.redis_manager import AioRedisMixin
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class HasNotAsyncResult(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AsyncResult(RedisMixin):
|
|
15
|
+
callback_run_executor = CustomThreadPoolExecutor(200)
|
|
16
|
+
|
|
17
|
+
def __init__(self, task_id, timeout=120):
|
|
18
|
+
self.task_id = task_id
|
|
19
|
+
self.timeout = timeout
|
|
20
|
+
self._has_pop = False
|
|
21
|
+
self._status_and_result = None
|
|
22
|
+
|
|
23
|
+
def set_timeout(self, timeout=60):
|
|
24
|
+
self.timeout = timeout
|
|
25
|
+
return self
|
|
26
|
+
|
|
27
|
+
def is_pending(self):
|
|
28
|
+
return not self.redis_db_filter_and_rpc_result.exists(self.task_id)
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def status_and_result(self):
|
|
32
|
+
if not self._has_pop:
|
|
33
|
+
redis_value = self.redis_db_filter_and_rpc_result.blpop(self.task_id, self.timeout)
|
|
34
|
+
self._has_pop = True
|
|
35
|
+
if redis_value is not None:
|
|
36
|
+
status_and_result_str = redis_value[1]
|
|
37
|
+
self._status_and_result = json.loads(status_and_result_str)
|
|
38
|
+
self.redis_db_filter_and_rpc_result.lpush(self.task_id, status_and_result_str)
|
|
39
|
+
self.redis_db_filter_and_rpc_result.expire(self.task_id, 600)
|
|
40
|
+
return self._status_and_result
|
|
41
|
+
return None
|
|
42
|
+
return self._status_and_result
|
|
43
|
+
|
|
44
|
+
def get(self):
|
|
45
|
+
# print(self.status_and_result)
|
|
46
|
+
if self.status_and_result is not None:
|
|
47
|
+
return self.status_and_result['result']
|
|
48
|
+
else:
|
|
49
|
+
raise HasNotAsyncResult
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def result(self):
|
|
53
|
+
return self.get()
|
|
54
|
+
|
|
55
|
+
def is_success(self):
|
|
56
|
+
if self.status_and_result is not None:
|
|
57
|
+
return self.status_and_result['success']
|
|
58
|
+
else:
|
|
59
|
+
raise HasNotAsyncResult
|
|
60
|
+
|
|
61
|
+
def _run_callback_func(self, callback_func):
|
|
62
|
+
callback_func(self.status_and_result)
|
|
63
|
+
|
|
64
|
+
def set_callback(self, callback_func: typing.Callable):
|
|
65
|
+
"""
|
|
66
|
+
:param callback_func: 函数结果回调函数,使回调函数自动在线程池中并发运行。
|
|
67
|
+
:return:
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
''' 用法例如
|
|
71
|
+
from test_frame.test_rpc.test_consume import add
|
|
72
|
+
def show_result(status_and_result: dict):
|
|
73
|
+
"""
|
|
74
|
+
:param status_and_result: 一个字典包括了函数入参、函数结果、函数是否运行成功、函数运行异常类型
|
|
75
|
+
"""
|
|
76
|
+
print(status_and_result)
|
|
77
|
+
|
|
78
|
+
for i in range(100):
|
|
79
|
+
async_result = add.push(i, i * 2)
|
|
80
|
+
# print(async_result.result) # 执行 .result是获取函数的运行结果,会阻塞当前发布消息的线程直到函数运行完成。
|
|
81
|
+
async_result.set_callback(show_result) # 使用回调函数在线程池中并发的运行函数结果
|
|
82
|
+
'''
|
|
83
|
+
self.callback_run_executor.submit(self._run_callback_func, callback_func)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class AioAsyncResult(AioRedisMixin):
|
|
87
|
+
''' 这个是可以用于asyncio的语法环境中。'''
|
|
88
|
+
'''
|
|
89
|
+
import asyncio
|
|
90
|
+
|
|
91
|
+
from funboost import AioAsyncResult
|
|
92
|
+
from test_frame.test_rpc.test_consume import add
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
async def process_result(status_and_result: dict):
|
|
96
|
+
"""
|
|
97
|
+
:param status_and_result: 一个字典包括了函数入参、函数结果、函数是否运行成功、函数运行异常类型
|
|
98
|
+
"""
|
|
99
|
+
await asyncio.sleep(1)
|
|
100
|
+
print(status_and_result)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def test_get_result(i):
|
|
104
|
+
async_result = add.push(i, i * 2)
|
|
105
|
+
aio_async_result = AioAsyncResult(task_id=async_result.task_id) # 这里要使用asyncio语法的类,更方便的配合asyncio异步编程生态
|
|
106
|
+
print(await aio_async_result.result) # 注意这里有个await,如果不await就是打印一个协程对象,不会得到结果。这是asyncio的基本语法,需要用户精通asyncio。
|
|
107
|
+
print(await aio_async_result.status_and_result)
|
|
108
|
+
await aio_async_result.set_callback(process_result) # 你也可以编排任务到loop中
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
if __name__ == '__main__':
|
|
112
|
+
loop = asyncio.get_event_loop()
|
|
113
|
+
for j in range(100):
|
|
114
|
+
loop.create_task(test_get_result(j))
|
|
115
|
+
loop.run_forever()
|
|
116
|
+
|
|
117
|
+
'''
|
|
118
|
+
|
|
119
|
+
def __init__(self, task_id, timeout=120):
|
|
120
|
+
self.task_id = task_id
|
|
121
|
+
self.timeout = timeout
|
|
122
|
+
self._has_pop = False
|
|
123
|
+
self._status_and_result = None
|
|
124
|
+
|
|
125
|
+
def set_timeout(self, timeout=60):
|
|
126
|
+
self.timeout = timeout
|
|
127
|
+
return self
|
|
128
|
+
|
|
129
|
+
async def is_pending(self):
|
|
130
|
+
is_exists = await self.aioredis_db_filter_and_rpc_result.exists(self.task_id)
|
|
131
|
+
return not is_exists
|
|
132
|
+
|
|
133
|
+
@property
|
|
134
|
+
async def status_and_result(self):
|
|
135
|
+
if not self._has_pop:
|
|
136
|
+
redis_value = await self.aioredis_db_filter_and_rpc_result.blpop(self.task_id, self.timeout)
|
|
137
|
+
self._has_pop = True
|
|
138
|
+
if redis_value is not None:
|
|
139
|
+
status_and_result_str = redis_value[1]
|
|
140
|
+
self._status_and_result = json.loads(status_and_result_str)
|
|
141
|
+
await self.aioredis_db_filter_and_rpc_result.lpush(self.task_id, status_and_result_str)
|
|
142
|
+
await self.aioredis_db_filter_and_rpc_result.expire(self.task_id, 600)
|
|
143
|
+
return self._status_and_result
|
|
144
|
+
return None
|
|
145
|
+
return self._status_and_result
|
|
146
|
+
|
|
147
|
+
async def get(self):
|
|
148
|
+
# print(self.status_and_result)
|
|
149
|
+
if (await self.status_and_result) is not None:
|
|
150
|
+
return (await self.status_and_result)['result']
|
|
151
|
+
else:
|
|
152
|
+
raise HasNotAsyncResult
|
|
153
|
+
|
|
154
|
+
@property
|
|
155
|
+
async def result(self):
|
|
156
|
+
return await self.get()
|
|
157
|
+
|
|
158
|
+
async def is_success(self):
|
|
159
|
+
if (await self.status_and_result) is not None:
|
|
160
|
+
return (await self.status_and_result)['success']
|
|
161
|
+
else:
|
|
162
|
+
raise HasNotAsyncResult
|
|
163
|
+
|
|
164
|
+
async def _run_callback_func(self, callback_func):
|
|
165
|
+
await callback_func(await self.status_and_result)
|
|
166
|
+
|
|
167
|
+
async def set_callback(self, aio_callback_func: typing.Callable):
|
|
168
|
+
asyncio.create_task(self._run_callback_func(callback_func=aio_callback_func))
|