funboost 44.3__py3-none-any.whl → 44.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of funboost might be problematic. Click here for more details.
- funboost/__init__.py +1 -1
- funboost/concurrent_pool/async_helper.py +3 -1
- funboost/constant.py +9 -10
- funboost/consumers/base_consumer.py +21 -22
- funboost/consumers/redis_consumer_ack_using_timeout.py +77 -0
- funboost/consumers/redis_consumer_simple.py +0 -1
- funboost/contrib/api_publish_msg.py +4 -3
- funboost/core/booster.py +17 -2
- funboost/core/funboost_time.py +26 -9
- funboost/core/helper_funs.py +16 -2
- funboost/core/lazy_impoter.py +1 -1
- funboost/core/loggers.py +3 -3
- funboost/factories/broker_kind__publsiher_consumer_type_map.py +3 -2
- funboost/funboost_config_deafult.py +1 -1
- funboost/function_result_web/__pycache__/functions.cpython-39.pyc +0 -0
- funboost/utils/decorators.py +1 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/__init__.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/add_to_pythonpath.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__init__.py +59 -59
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/__init__.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/client.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/compat.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/connection.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/exceptions.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/lock.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/utils.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/client.py +4804 -4804
- funboost/utils/dependency_packages_in_pythonpath/aioredis/compat.py +8 -8
- funboost/utils/dependency_packages_in_pythonpath/aioredis/connection.py +1668 -1668
- funboost/utils/dependency_packages_in_pythonpath/aioredis/exceptions.py +96 -96
- funboost/utils/dependency_packages_in_pythonpath/aioredis/lock.py +306 -306
- funboost/utils/dependency_packages_in_pythonpath/aioredis/log.py +15 -15
- funboost/utils/dependency_packages_in_pythonpath/aioredis/sentinel.py +329 -329
- funboost/utils/dependency_packages_in_pythonpath/aioredis/utils.py +61 -61
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/StoppableThread.py +133 -133
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__init__.py +16 -16
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/StoppableThread.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/__init__.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/exceptions.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/py3_raise.cpython-39.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/dafunc.py +244 -244
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/exceptions.py +98 -98
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/py2_raise.py +7 -7
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/py3_raise.py +7 -7
- funboost/utils/json_helper.py +14 -1
- funboost/utils/times/__init__.py +85 -85
- funboost/utils/times/version.py +1 -1
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/METADATA +9 -4
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/RECORD +54 -82
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/entry_points.txt +0 -1
- funboost/function_result_web/__pycache__/app.cpython-37.pyc +0 -0
- funboost/function_result_web/__pycache__/functions.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/__init__.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/__init__.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/add_to_pythonpath.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/__pycache__/add_to_pythonpath.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/__init__.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/__init__.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/client.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/client.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/compat.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/compat.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/connection.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/connection.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/exceptions.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/exceptions.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/lock.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/lock.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/utils.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/utils.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/StoppableThread.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/StoppableThread.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/__init__.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/__init__.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/exceptions.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/exceptions.cpython-37.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/py3_raise.cpython-311.pyc +0 -0
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/py3_raise.cpython-37.pyc +0 -0
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/LICENSE +0 -0
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/WHEEL +0 -0
- {funboost-44.3.dist-info → funboost-44.5.dist-info}/top_level.txt +0 -0
funboost/__init__.py
CHANGED
|
@@ -2,9 +2,11 @@ from functools import partial
|
|
|
2
2
|
import asyncio
|
|
3
3
|
from concurrent.futures import Executor
|
|
4
4
|
from funboost.concurrent_pool.custom_threadpool_executor import ThreadPoolExecutorShrinkAble
|
|
5
|
+
# from funboost.concurrent_pool.flexible_thread_pool import FlexibleThreadPool
|
|
5
6
|
|
|
6
7
|
# 没有使用内置的concurrent.futures.ThreadpoolExecutor线程池,而是使用智能伸缩线程池。
|
|
7
|
-
async_executor_default = ThreadPoolExecutorShrinkAble()
|
|
8
|
+
async_executor_default = ThreadPoolExecutorShrinkAble(500)
|
|
9
|
+
# async_executor_default = FlexibleThreadPool(50) # 这个不支持future特性
|
|
8
10
|
|
|
9
11
|
|
|
10
12
|
async def simple_run_in_executor(f, *args, async_executor: Executor = None, async_loop=None, **kwargs):
|
funboost/constant.py
CHANGED
|
@@ -3,14 +3,20 @@ class BrokerEnum:
|
|
|
3
3
|
RABBITMQ_AMQPSTORM = 'RABBITMQ_AMQPSTORM' # 使用 amqpstorm 包操作rabbitmq 作为 分布式消息队列,支持消费确认.强烈推荐这个作为funboost中间件。
|
|
4
4
|
RABBITMQ = RABBITMQ_AMQPSTORM
|
|
5
5
|
|
|
6
|
-
RABBITMQ_RABBITPY = 'RABBITMQ_RABBITPY' # 使用 rabbitpy 包操作rabbitmq 作为
|
|
6
|
+
RABBITMQ_RABBITPY = 'RABBITMQ_RABBITPY' # 使用 rabbitpy 包操作rabbitmq 作为 分布式消息队列,支持消费确认,不建议使用
|
|
7
7
|
|
|
8
8
|
REDIS = 'REDIS' # 使用 redis 的 list结构,brpop 作为分布式消息队列。随意重启和关闭会丢失大量消息,不支持消费确认。注重性能不在乎丢失消息可以选这个redis方案。
|
|
9
|
+
REDIS_ACK_ABLE = 'REDIS_ACK_ABLE' # 基于redis的 list + 临时unack的set队列,采用了 lua脚本操持了取任务和加到pengding为原子性,,基于进程心跳消失判断消息是否为掉线进程的,随意重启和掉线不会丢失任务。
|
|
10
|
+
REIDS_ACK_USING_TIMEOUT = 'reids_ack_using_timeout' # 基于redis的 list + 临时unack的set队列,使用超时多少秒没确认消费就自动重回队列,请注意 ack_timeout的设置值和函数耗时大小,否则会发生反复重回队列的后果,boost可以设置ack超时,broker_exclusive_config={'ack_timeout': 1800}
|
|
11
|
+
REDIS_PRIORITY = 'REDIS_PRIORITY' # # 基于redis的多 list + 临时unack的set队列,blpop监听多个key,和rabbitmq的x-max-priority属性一样,支持任务优先级。看文档4.29优先级队列说明。
|
|
12
|
+
REDIS_STREAM = 'REDIS_STREAM' # 基于redis 5.0 版本以后,使用 stream 数据结构作为分布式消息队列,支持消费确认和持久化和分组消费,是redis官方推荐的消息队列形式,比list结构更适合。
|
|
13
|
+
RedisBrpopLpush = 'RedisBrpopLpush' # 基于redis的list结构但是采用brpoplpush 双队列形式,和 redis_ack_able的实现差不多,实现上采用了原生命令就不需要lua脚本来实现取出和加入unack了。
|
|
14
|
+
REDIS_PUBSUB = 'REDIS_PUBSUB' # 基于redis 发布订阅的,发布一个消息多个消费者都能收到同一条消息,但不支持持久化
|
|
9
15
|
|
|
10
16
|
MEMORY_QUEUE = 'MEMORY_QUEUE' # 使用python queue.Queue实现的基于当前python进程的消息队列,不支持跨进程 跨脚本 跨机器共享任务,不支持持久化,适合一次性短期简单任务。
|
|
11
17
|
LOCAL_PYTHON_QUEUE = MEMORY_QUEUE # 别名,python本地queue就是基于python自带的语言的queue.Queue,消息存在python程序的内存中,不支持重启断点接续。
|
|
12
18
|
|
|
13
|
-
RABBITMQ_PIKA = 'RABBITMQ_PIKA' # 使用pika包操作rabbitmq 作为
|
|
19
|
+
RABBITMQ_PIKA = 'RABBITMQ_PIKA' # 使用pika包操作rabbitmq 作为 分布式消息队列。,不建议使用
|
|
14
20
|
|
|
15
21
|
MONGOMQ = 'MONGOMQ' # 使用mongo的表中的行模拟的 作为分布式消息队列,支持消费确认。
|
|
16
22
|
|
|
@@ -27,19 +33,13 @@ class BrokerEnum:
|
|
|
27
33
|
|
|
28
34
|
KAFKA_CONFLUENT_SASlPlAIN = 'KAFKA_CONFLUENT_SASlPlAIN' # 可以设置账号密码的kafka
|
|
29
35
|
|
|
30
|
-
REDIS_ACK_ABLE = 'REDIS_ACK_ABLE' # 基于redis的 list + 临时unack的set队列,采用了 lua脚本操持了取任务和加到pengding为原子性,随意重启和掉线不会丢失任务。
|
|
31
|
-
|
|
32
|
-
REDIS_PRIORITY = 'REDIS_PRIORITY' # # 基于redis的多 list + 临时unack的set队列,blpop监听多个key,和rabbitmq的x-max-priority属性一样,支持任务优先级。看文档4.29优先级队列说明。
|
|
33
|
-
|
|
34
36
|
SQLACHEMY = 'SQLACHEMY' # 基于SQLACHEMY 的连接作为分布式消息队列中间件支持持久化和消费确认。支持mysql oracle sqlserver等5种数据库。
|
|
35
37
|
|
|
36
38
|
ROCKETMQ = 'ROCKETMQ' # 基于 rocketmq 作为分布式消息队列,这个中间件必须在linux下运行,win不支持。
|
|
37
39
|
|
|
38
|
-
REDIS_STREAM = 'REDIS_STREAM' # 基于redis 5.0 版本以后,使用 stream 数据结构作为分布式消息队列,支持消费确认和持久化和分组消费,是redis官方推荐的消息队列形式,比list结构更适合。
|
|
39
|
-
|
|
40
40
|
ZEROMQ = 'ZEROMQ' # 基于zeromq作为分布式消息队列,不需要安装中间件,可以支持跨机器但不支持持久化。
|
|
41
41
|
|
|
42
|
-
|
|
42
|
+
|
|
43
43
|
|
|
44
44
|
"""
|
|
45
45
|
操作 kombu 包,这个包也是celery的中间件依赖包,这个包可以操作10种中间件(例如rabbitmq redis),但没包括分布式函数调度框架的kafka nsq zeromq 等。
|
|
@@ -69,7 +69,6 @@ class BrokerEnum:
|
|
|
69
69
|
|
|
70
70
|
PEEWEE = 'PEEWEE' # peewee包操作mysql,使用表模拟消息队列
|
|
71
71
|
|
|
72
|
-
REDIS_PUBSUB = 'REDIS_PUBSUB' # 基于redis 发布订阅的,发布一个消息多个消费者都能收到同一条消息,但不支持持久化
|
|
73
72
|
|
|
74
73
|
CELERY = 'CELERY' # funboost支持celery框架来发布和消费任务,由celery框架来调度执行任务,但是写法简单远远暴击用户亲自使用celery的麻烦程度,
|
|
75
74
|
# 用户永无无需关心和操作Celery对象实例,无需关心celery的task_routes和include配置,funboost来自动化设置这些celery配置。
|
|
@@ -30,13 +30,13 @@ from threading import Lock
|
|
|
30
30
|
import asyncio
|
|
31
31
|
|
|
32
32
|
import nb_log
|
|
33
|
-
from funboost.core.current_task import funboost_current_task
|
|
33
|
+
from funboost.core.current_task import funboost_current_task
|
|
34
34
|
from funboost.core.loggers import develop_logger
|
|
35
35
|
|
|
36
36
|
from funboost.core.func_params_model import BoosterParams, PublisherParams, BaseJsonAbleModel
|
|
37
37
|
from funboost.core.task_id_logger import TaskIdLogger
|
|
38
|
-
from
|
|
39
|
-
|
|
38
|
+
from funboost.utils.json_helper import JsonUtils
|
|
39
|
+
from nb_log import (get_logger, LoggerLevelSetterMixin, LogManager, is_main_process,
|
|
40
40
|
nb_log_config_default)
|
|
41
41
|
from funboost.core.loggers import FunboostFileLoggerMixin, logger_prompt
|
|
42
42
|
|
|
@@ -332,7 +332,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
332
332
|
try:
|
|
333
333
|
self._concurrent_mode_dispatcher.check_all_concurrent_mode()
|
|
334
334
|
self._check_monkey_patch()
|
|
335
|
-
except BaseException:
|
|
335
|
+
except BaseException: # noqa
|
|
336
336
|
traceback.print_exc()
|
|
337
337
|
os._exit(4444) # noqa
|
|
338
338
|
self.logger.info(f'开始消费 {self._queue_name} 中的消息')
|
|
@@ -384,10 +384,10 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
384
384
|
"""
|
|
385
385
|
raise NotImplementedError
|
|
386
386
|
|
|
387
|
-
def convert_msg_before_run(self, msg: typing.Union[str,dict]):
|
|
387
|
+
def convert_msg_before_run(self, msg: typing.Union[str, dict]) -> dict:
|
|
388
388
|
"""
|
|
389
389
|
转换消息,消息没有使用funboost来发送,并且没有extra相关字段时候
|
|
390
|
-
用户也可以按照4.21文档,继承任意Consumer类,并实现这个方法 convert_msg_before_run
|
|
390
|
+
用户也可以按照4.21文档,继承任意Consumer类,并实现这个方法 convert_msg_before_run,先转换不规范的消息.
|
|
391
391
|
"""
|
|
392
392
|
""" 一般消息至少包含这样
|
|
393
393
|
{
|
|
@@ -405,7 +405,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
405
405
|
extra_params = {'task_id': task_id, 'publish_time': round(time.time(), 4),
|
|
406
406
|
'publish_time_format': time.strftime('%Y-%m-%d %H:%M:%S')}
|
|
407
407
|
"""
|
|
408
|
-
if isinstance(msg,str):
|
|
408
|
+
if isinstance(msg, str):
|
|
409
409
|
msg = json.loads(msg)
|
|
410
410
|
# 以下是清洗补全字段.
|
|
411
411
|
if 'extra' not in msg:
|
|
@@ -429,9 +429,8 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
429
429
|
self._last_show_pause_log_time = time.time()
|
|
430
430
|
else:
|
|
431
431
|
break
|
|
432
|
-
|
|
433
|
-
self._print_message_get_from_broker(
|
|
434
|
-
kw['body'] = self.convert_msg_before_run(msg)
|
|
432
|
+
kw['body'] = self.convert_msg_before_run(kw['body'])
|
|
433
|
+
self._print_message_get_from_broker(kw['body'])
|
|
435
434
|
if self._judge_is_daylight():
|
|
436
435
|
self._requeue(kw)
|
|
437
436
|
time.sleep(self.time_interval_for_check_do_not_run_time)
|
|
@@ -530,13 +529,11 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
530
529
|
if self._has_execute_times_in_recent_second >= qpsx:
|
|
531
530
|
time.sleep((1 - (time.time() - self._last_start_count_qps_timestamp)) * 1)
|
|
532
531
|
|
|
533
|
-
def _print_message_get_from_broker(self, msg,broker_name=None):
|
|
532
|
+
def _print_message_get_from_broker(self, msg, broker_name=None):
|
|
534
533
|
# print(999)
|
|
535
534
|
if self.consumer_params.is_show_message_get_from_broker:
|
|
536
|
-
if isinstance(msg, (dict, list)):
|
|
537
|
-
msg = json.dumps(msg, ensure_ascii=False)
|
|
538
535
|
# self.logger.debug(f'从 {broker_name} 中间件 的 {self._queue_name} 中取出的消息是 {msg}')
|
|
539
|
-
self.logger.debug(f'从 {broker_name or self.consumer_params.broker_kind} 中间件 的 {self._queue_name} 中取出的消息是 {msg}')
|
|
536
|
+
self.logger.debug(f'从 {broker_name or self.consumer_params.broker_kind} 中间件 的 {self._queue_name} 中取出的消息是 {JsonUtils.to_json_str(msg)}')
|
|
540
537
|
|
|
541
538
|
def _get_priority_conf(self, kw: dict, broker_task_config_key: str):
|
|
542
539
|
broker_task_config = kw['body'].get('extra', {}).get(broker_task_config_key, None)
|
|
@@ -624,13 +621,15 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
624
621
|
msg = f'{self._unit_time_for_count} 秒内执行了 {self._execute_task_times_every_unit_time} 次函数 [ {self.consuming_function.__name__} ] ,' \
|
|
625
622
|
f'函数平均运行耗时 {avarage_function_spend_time} 秒。 '
|
|
626
623
|
self.logger.info(msg)
|
|
627
|
-
if
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
624
|
+
if time.time() - self._last_show_remaining_execution_time > self._show_remaining_execution_time_interval:
|
|
625
|
+
self._msg_num_in_broker = self.publisher_of_same_queue.get_message_count()
|
|
626
|
+
if self._msg_num_in_broker != -1 : # 有的中间件无法统计或没实现统计队列剩余数量的,统一返回的是-1,不显示这句话。
|
|
627
|
+
# msg += f''' ,预计还需要 {time_util.seconds_to_hour_minute_second(self._msg_num_in_broker * avarage_function_spend_time / active_consumer_num)} 时间 才能执行完成 {self._msg_num_in_broker}个剩余的任务'''
|
|
628
|
+
need_time = time_util.seconds_to_hour_minute_second(self._msg_num_in_broker / (self._execute_task_times_every_unit_time / self._unit_time_for_count) /
|
|
629
|
+
self._distributed_consumer_statistics.active_consumer_num)
|
|
630
|
+
msg += f''' 预计还需要 {need_time} 时间 才能执行完成 队列 {self.queue_name} 中的 {self._msg_num_in_broker} 个剩余任务'''
|
|
631
|
+
self.logger.info(msg)
|
|
632
|
+
self._last_show_remaining_execution_time = time.time()
|
|
634
633
|
self._current_time_for_execute_task_times_every_unit_time = time.time()
|
|
635
634
|
self._consuming_function_cost_time_total_every_unit_time = 0
|
|
636
635
|
self._execute_task_times_every_unit_time = 0
|
|
@@ -830,7 +829,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
830
829
|
self.logger.critical(msg=log_msg)
|
|
831
830
|
# noinspection PyProtectedMember,PyUnresolvedReferences
|
|
832
831
|
os._exit(444)
|
|
833
|
-
if not self.consumer_params.function_timeout
|
|
832
|
+
if not self.consumer_params.function_timeout:
|
|
834
833
|
rs = await corotinue_obj
|
|
835
834
|
# rs = await asyncio.wait_for(corotinue_obj, timeout=4)
|
|
836
835
|
else:
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# @Author : ydf
|
|
3
|
+
# @Time : 2024/8/8 0008 13:32
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
from funboost.consumers.base_consumer import AbstractConsumer
|
|
7
|
+
from funboost.utils.decorators import RedisDistributedLockContextManager
|
|
8
|
+
from funboost.utils.json_helper import JsonUtils
|
|
9
|
+
from funboost.utils.redis_manager import RedisMixin
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class RedisConsumerAckUsingTimeout(AbstractConsumer, RedisMixin):
|
|
13
|
+
"""
|
|
14
|
+
redis作为中间件实现的。
|
|
15
|
+
使用超时未能ack就自动重入消息队列,例如消息取出后,由于突然断电或重启或其他原因,导致消息以后再也不能主动ack了,超过一定时间就重新放入消息队列
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
BROKER_EXCLUSIVE_CONFIG_DEFAULT = {'ack_timeout': 3600}
|
|
19
|
+
|
|
20
|
+
# RedisConsumerAckUsingTimeout的ack timeot 是代表消息取出后过了多少秒还未ack,就自动重回队列。这个配置一定要大于函数消耗时间,否则不停的重回队列。
|
|
21
|
+
'''用法,如何设置ack_timeout,是使用 broker_exclusive_config 中传递,就能覆盖这里的3600,用户不用改BROKER_EXCLUSIVE_CONFIG_DEFAULT的源码。
|
|
22
|
+
@boost(BoosterParams(queue_name='test_redis_ack__use_timeout', broker_kind=BrokerEnum.REIDS_ACK_USING_TIMEOUT,
|
|
23
|
+
concurrent_num=5, log_level=20, broker_exclusive_config={'ack_timeout': 30}))
|
|
24
|
+
'''
|
|
25
|
+
|
|
26
|
+
def custom_init(self):
|
|
27
|
+
self._unack_zset_name = f'{self._queue_name}__unack_using_timeout'
|
|
28
|
+
self._ack_timeout = self.consumer_params.broker_exclusive_config['ack_timeout']
|
|
29
|
+
self._last_show_unack_ts = time.time()
|
|
30
|
+
|
|
31
|
+
def start_consuming_message(self):
|
|
32
|
+
self._is_send_consumer_hearbeat_to_redis = True
|
|
33
|
+
super().start_consuming_message()
|
|
34
|
+
self.keep_circulating(10, block=False)(self._requeue_tasks_which_unconfirmed)()
|
|
35
|
+
|
|
36
|
+
# def _add_task_str_to_unack_zset(self, task_str, ):
|
|
37
|
+
# self.redis_db_frame.zadd(self._unack_zset_name, {task_str: time.time()})
|
|
38
|
+
|
|
39
|
+
def _confirm_consume(self, kw):
|
|
40
|
+
self.redis_db_frame.zrem(self._unack_zset_name, kw['task_str'])
|
|
41
|
+
|
|
42
|
+
def _requeue(self, kw):
|
|
43
|
+
self.redis_db_frame.rpush(self._queue_name, JsonUtils.to_json_str(kw['body']))
|
|
44
|
+
|
|
45
|
+
def _shedual_task(self):
|
|
46
|
+
lua = '''
|
|
47
|
+
local v = redis.call("lpop", KEYS[1])
|
|
48
|
+
if v then
|
|
49
|
+
redis.call('zadd',KEYS[2],ARGV[1],v)
|
|
50
|
+
end
|
|
51
|
+
return v
|
|
52
|
+
'''
|
|
53
|
+
script = self.redis_db_frame.register_script(lua)
|
|
54
|
+
while True:
|
|
55
|
+
return_v = script(keys=[self._queue_name, self._unack_zset_name], args=[time.time()])
|
|
56
|
+
if return_v:
|
|
57
|
+
task_str = return_v
|
|
58
|
+
kw = {'body': task_str, 'task_str': task_str}
|
|
59
|
+
self._submit_task(kw)
|
|
60
|
+
else:
|
|
61
|
+
time.sleep(0.1)
|
|
62
|
+
|
|
63
|
+
def _requeue_tasks_which_unconfirmed(self):
|
|
64
|
+
"""不使用这种方案,不适合本来来就需要长耗时的函数,很死板"""
|
|
65
|
+
# 防止在多个进程或多个机器中同时做扫描和放入未确认消费的任务。使用个分布式锁。
|
|
66
|
+
lock_key = f'funboost_lock__requeue_tasks_which_unconfirmed_timeout:{self._queue_name}'
|
|
67
|
+
with RedisDistributedLockContextManager(self.redis_db_frame, lock_key, ) as lock:
|
|
68
|
+
if lock.has_aquire_lock:
|
|
69
|
+
time_max = time.time() - self._ack_timeout
|
|
70
|
+
for value in self.redis_db_frame.zrangebyscore(self._unack_zset_name, 0, time_max):
|
|
71
|
+
self.logger.warning(f'超过了 {self._ack_timeout} 秒未能确认消费, 向 {self._queue_name} 队列重新放入未消费确认的任务 {value} ,')
|
|
72
|
+
self._requeue({'body': value})
|
|
73
|
+
self.redis_db_frame.zrem(self._unack_zset_name, value)
|
|
74
|
+
if time.time() - self._last_show_unack_ts > 600: # 不要频繁提示打扰
|
|
75
|
+
self.logger.info(f'{self._unack_zset_name} 中有待确认消费任务的数量是'
|
|
76
|
+
f' {self.redis_db_frame.zcard(self._unack_zset_name)}')
|
|
77
|
+
self._last_show_unack_ts = time.time()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import traceback
|
|
2
2
|
import typing
|
|
3
3
|
|
|
4
|
-
from funboost import AioAsyncResult, AsyncResult
|
|
4
|
+
from funboost import AioAsyncResult, AsyncResult,PriorityConsumingControlConfig
|
|
5
5
|
|
|
6
6
|
from funboost.core.cli.discovery_boosters import BoosterDiscovery
|
|
7
7
|
from funboost import BoostersManager
|
|
@@ -39,11 +39,12 @@ async def publish_msg(msg_item: MsgItem):
|
|
|
39
39
|
if msg_item.need_result:
|
|
40
40
|
if booster.boost_params.is_using_rpc_mode is False:
|
|
41
41
|
raise ValueError(f' need_result 为true,{booster.queue_name} 队列消费者 需要@boost设置支持rpc模式')
|
|
42
|
-
async_result = booster.
|
|
42
|
+
async_result = await booster.aio_publish(msg_item.msg_body,priority_control_config=PriorityConsumingControlConfig(is_using_rpc_mode=True))
|
|
43
43
|
status_and_result = await AioAsyncResult(async_result.task_id, timeout=msg_item.timeout).status_and_result
|
|
44
|
+
print(status_and_result)
|
|
44
45
|
# status_and_result = AsyncResult(async_result.task_id, timeout=msg_item.timeout).status_and_result
|
|
45
46
|
else:
|
|
46
|
-
booster.
|
|
47
|
+
await booster.aio_publish(msg_item.msg_body)
|
|
47
48
|
return PublishResponse(succ=True, msg=f'{msg_item.queue_name} 队列,消息发布成功', status_and_result=status_and_result)
|
|
48
49
|
except Exception as e:
|
|
49
50
|
return PublishResponse(succ=False, msg=f'{msg_item.queue_name} 队列,消息发布失败 {type(e)} {e} {traceback.format_exc()}',
|
funboost/core/booster.py
CHANGED
|
@@ -4,6 +4,8 @@ import os
|
|
|
4
4
|
import types
|
|
5
5
|
import typing
|
|
6
6
|
|
|
7
|
+
from funboost.concurrent_pool import FlexibleThreadPool
|
|
8
|
+
from funboost.concurrent_pool.async_helper import simple_run_in_executor
|
|
7
9
|
from funboost.utils.ctrl_c_end import ctrl_c_recv
|
|
8
10
|
from funboost.core.loggers import flogger, develop_logger, logger_prompt
|
|
9
11
|
|
|
@@ -15,8 +17,8 @@ from funboost.core.func_params_model import BoosterParams, FunctionResultStatusP
|
|
|
15
17
|
from funboost.factories.consumer_factory import get_consumer
|
|
16
18
|
from collections import defaultdict
|
|
17
19
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
+
|
|
21
|
+
from funboost.core.msg_result_getter import AsyncResult, AioAsyncResult
|
|
20
22
|
|
|
21
23
|
|
|
22
24
|
class Booster:
|
|
@@ -123,6 +125,19 @@ class Booster:
|
|
|
123
125
|
consumer = BoostersManager.get_or_create_booster_by_queue_name(self.queue_name).consumer
|
|
124
126
|
return consumer.publisher_of_same_queue.publish(msg=msg, task_id=task_id, priority_control_config=priority_control_config)
|
|
125
127
|
|
|
128
|
+
async def aio_push(self, *func_args, **func_kwargs) -> AioAsyncResult:
|
|
129
|
+
"""asyncio 生态下发布消息,因为同步push只需要消耗不到1毫秒,所以基本上大概可以直接在asyncio异步生态中直接调用同步的push方法,
|
|
130
|
+
但为了更好的防止网络波动(例如发布消息到外网的消息队列耗时达到10毫秒),可以使用aio_push"""
|
|
131
|
+
async_result = await simple_run_in_executor(self.push, *func_args, **func_kwargs)
|
|
132
|
+
return AioAsyncResult(async_result.task_id, )
|
|
133
|
+
|
|
134
|
+
async def aio_publish(self, msg: typing.Union[str, dict], task_id=None,
|
|
135
|
+
priority_control_config: PriorityConsumingControlConfig = None) -> AioAsyncResult:
|
|
136
|
+
"""asyncio 生态下发布消息,因为同步push只需要消耗不到1毫秒,所以基本上大概可以直接在asyncio异步生态中直接调用同步的push方法,
|
|
137
|
+
但为了更好的防止网络波动(例如发布消息到外网的消息队列耗时达到10毫秒),可以使用aio_push"""
|
|
138
|
+
async_result = await simple_run_in_executor(self.publish,msg,task_id,priority_control_config)
|
|
139
|
+
return AioAsyncResult(async_result.task_id, )
|
|
140
|
+
|
|
126
141
|
# noinspection PyMethodMayBeStatic
|
|
127
142
|
def multi_process_consume(self, process_num=1):
|
|
128
143
|
"""超高速多进程消费"""
|
funboost/core/funboost_time.py
CHANGED
|
@@ -13,16 +13,33 @@ class FunboostTime(NbTime):
|
|
|
13
13
|
def get_time_zone_str(self,time_zone: typing.Union[str, datetime.tzinfo,None] = None):
|
|
14
14
|
return time_zone or self.default_time_zone or FunboostCommonConfig.TIMEZONE or self.get_localzone_name()
|
|
15
15
|
|
|
16
|
+
@staticmethod
|
|
17
|
+
def _get_tow_digist(num:int)->str:
|
|
18
|
+
if len(str(num)) ==1:
|
|
19
|
+
return f'0{num}'
|
|
20
|
+
return str(num)
|
|
21
|
+
|
|
22
|
+
def get_str(self, formatter=None):
|
|
23
|
+
return self.datetime_obj.strftime(formatter or self.datetime_formatter)
|
|
24
|
+
|
|
25
|
+
def get_str_fast(self):
|
|
26
|
+
t_str = f'{self.datetime_obj.year}-{self._get_tow_digist(self.datetime_obj.month)}-{self._get_tow_digist(self.datetime_obj.day)} {self._get_tow_digist(self.datetime_obj.hour)}:{self._get_tow_digist(self.datetime_obj.minute)}:{self._get_tow_digist(self.datetime_obj.second)}'
|
|
27
|
+
return t_str
|
|
16
28
|
|
|
17
29
|
|
|
18
30
|
if __name__ == '__main__':
|
|
19
|
-
print(
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
#
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
datetime.datetime.now(tz=
|
|
31
|
+
print(FunboostTime().get_str())
|
|
32
|
+
tz=pytz.timezone(FunboostCommonConfig.TIMEZONE)
|
|
33
|
+
for i in range(1000000):
|
|
34
|
+
pass
|
|
35
|
+
# FunboostTime()#.get_str_fast()
|
|
36
|
+
|
|
37
|
+
# datetime.datetime.now().strftime(NbTime.FORMATTER_DATETIME_NO_ZONE)
|
|
38
|
+
tz = pytz.timezone(FunboostCommonConfig.TIMEZONE)
|
|
39
|
+
datetime.datetime.now(tz=tz)
|
|
40
|
+
# datetime.datetime.now(tz=pytz.timezone(FunboostCommonConfig.TIMEZONE))#.strftime(NbTime.FORMATTER_DATETIME_NO_ZONE)
|
|
41
|
+
# datetime.datetime.now(tz=pytz.timezone(FunboostCommonConfig.TIMEZONE)).timestamp()
|
|
42
|
+
|
|
43
|
+
# time.strftime(NbTime.FORMATTER_DATETIME_NO_ZONE)
|
|
44
|
+
# time.time()
|
|
28
45
|
print(NbTime())
|
funboost/core/helper_funs.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import copy
|
|
2
|
+
import pytz
|
|
2
3
|
import time
|
|
3
4
|
import uuid
|
|
4
|
-
|
|
5
|
+
import datetime
|
|
5
6
|
from funboost.core.funboost_time import FunboostTime
|
|
6
7
|
|
|
7
8
|
|
|
@@ -46,7 +47,7 @@ class MsgGenerater:
|
|
|
46
47
|
|
|
47
48
|
@staticmethod
|
|
48
49
|
def generate_publish_time() -> float:
|
|
49
|
-
return round(
|
|
50
|
+
return round(time.time(),4)
|
|
50
51
|
|
|
51
52
|
@staticmethod
|
|
52
53
|
def generate_publish_time_format() -> str:
|
|
@@ -59,3 +60,16 @@ class MsgGenerater:
|
|
|
59
60
|
return extra_params
|
|
60
61
|
|
|
61
62
|
|
|
63
|
+
|
|
64
|
+
if __name__ == '__main__':
|
|
65
|
+
|
|
66
|
+
from funboost import FunboostCommonConfig
|
|
67
|
+
|
|
68
|
+
print(FunboostTime())
|
|
69
|
+
for i in range(1000000):
|
|
70
|
+
# time.time()
|
|
71
|
+
# MsgGenerater.generate_publish_time_format()
|
|
72
|
+
|
|
73
|
+
datetime.datetime.now(tz=pytz.timezone(FunboostCommonConfig.TIMEZONE)).strftime(FunboostTime.FORMATTER_DATETIME_NO_ZONE)
|
|
74
|
+
|
|
75
|
+
print(FunboostTime())
|
funboost/core/lazy_impoter.py
CHANGED
funboost/core/loggers.py
CHANGED
|
@@ -40,11 +40,11 @@ class FunboostMetaTypeFileLogger(type):
|
|
|
40
40
|
cls.logger: logging.Logger = get_funboost_file_logger(name)
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
|
|
43
|
+
nb_log.LogManager('_KeepAliveTimeThread').preset_log_level(_try_get_user_funboost_common_config('KEEPALIVETIMETHREAD_LOG_LEVEL') or logging.DEBUG)
|
|
44
44
|
|
|
45
|
+
flogger = get_funboost_file_logger('funboost', )
|
|
45
46
|
# print(_try_get_user_funboost_common_config('FUNBOOST_PROMPT_LOG_LEVEL'))
|
|
46
|
-
logger_prompt = get_funboost_file_logger('funboost.prompt', log_level_int=_try_get_user_funboost_common_config('FUNBOOST_PROMPT_LOG_LEVEL') or logging.DEBUG)
|
|
47
|
-
nb_log.LogManager('_KeepAliveTimeThread').preset_log_level(_try_get_user_funboost_common_config('KEEPALIVETIMETHREAD_LOG_LEVEL') or logging.DEBUG)
|
|
47
|
+
logger_prompt = get_funboost_file_logger('funboost.prompt', log_level_int=_try_get_user_funboost_common_config('FUNBOOST_PROMPT_LOG_LEVEL') or logging.DEBUG)
|
|
48
48
|
|
|
49
49
|
# 开发时候的调试日志,比print方便通过级别一键屏蔽。
|
|
50
50
|
develop_logger = get_logger('funboost_develop', log_level_int=logging.WARNING, log_filename='funboost_develop.log')
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import typing
|
|
2
2
|
|
|
3
|
+
|
|
3
4
|
from funboost.publishers.http_publisher import HTTPPublisher
|
|
4
5
|
from funboost.publishers.nats_publisher import NatsPublisher
|
|
5
6
|
from funboost.publishers.peewee_publisher import PeeweePublisher
|
|
@@ -48,6 +49,7 @@ from funboost.consumers.udp_consumer import UDPConsumer
|
|
|
48
49
|
from funboost.consumers.zeromq_consumer import ZeroMqConsumer
|
|
49
50
|
from funboost.consumers.mqtt_consumer import MqttConsumer
|
|
50
51
|
from funboost.consumers.httpsqs_consumer import HttpsqsConsumer
|
|
52
|
+
from funboost.consumers.redis_consumer_ack_using_timeout import RedisConsumerAckUsingTimeout
|
|
51
53
|
|
|
52
54
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
53
55
|
from funboost.consumers.base_consumer import AbstractConsumer
|
|
@@ -77,6 +79,7 @@ broker_kind__publsiher_consumer_type_map = {
|
|
|
77
79
|
BrokerEnum.TXT_FILE: (TxtFilePublisher, TxtFileConsumer),
|
|
78
80
|
BrokerEnum.PEEWEE: (PeeweePublisher, PeeweeConsumer),
|
|
79
81
|
BrokerEnum.REDIS_PUBSUB: (RedisPubSubPublisher, RedisPbSubConsumer),
|
|
82
|
+
BrokerEnum.REIDS_ACK_USING_TIMEOUT: (RedisPublisher, RedisConsumerAckUsingTimeout),
|
|
80
83
|
|
|
81
84
|
}
|
|
82
85
|
|
|
@@ -163,8 +166,6 @@ def regist_to_funboost(broker_kind: str):
|
|
|
163
166
|
register_custom_broker(broker_kind, NsqPublisher, NsqConsumer)
|
|
164
167
|
|
|
165
168
|
|
|
166
|
-
|
|
167
|
-
|
|
168
169
|
if __name__ == '__main__':
|
|
169
170
|
import sys
|
|
170
171
|
|
|
@@ -87,7 +87,7 @@ class BrokerConnConfig(DataClassBase):
|
|
|
87
87
|
KOMBU_URL = 'redis://127.0.0.1:6379/9' # 这个就是celery依赖包kombu使用的消息队列格式,所以funboost支持一切celery支持的消息队列种类。
|
|
88
88
|
# KOMBU_URL = 'sqla+sqlite:////dssf_kombu_sqlite.sqlite' # 4个//// 代表磁盘根目录下生成一个文件。推荐绝对路径。3个///是相对路径。
|
|
89
89
|
|
|
90
|
-
CELERY_BROKER_URL = 'redis://127.0.0.1:6379/12' # 使用celery作为中间件。funboost新增支持celery
|
|
90
|
+
CELERY_BROKER_URL = 'redis://127.0.0.1:6379/12' # 使用celery作为中间件。funboost新增支持celery框架来运行函数,url内容就是celery的broker形式.
|
|
91
91
|
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/13' # celery结果存放,可以为None
|
|
92
92
|
|
|
93
93
|
DRAMATIQ_URL = RABBITMQ_URL
|
|
Binary file
|
funboost/utils/decorators.py
CHANGED
|
@@ -332,6 +332,7 @@ class RedisDistributedLockContextManager(LoggerMixin, LoggerLevelSetterMixin):
|
|
|
332
332
|
self._expire_seconds = expire_seconds
|
|
333
333
|
self.identifier = str(uuid.uuid4())
|
|
334
334
|
self.has_aquire_lock = False
|
|
335
|
+
self.logger.setLevel(logging.INFO)
|
|
335
336
|
|
|
336
337
|
def __enter__(self):
|
|
337
338
|
self._line = sys._getframe().f_back.f_lineno # 调用此方法的代码的函数
|
|
Binary file
|
funboost/utils/dependency_packages_in_pythonpath/__pycache__/add_to_pythonpath.cpython-39.pyc
CHANGED
|
Binary file
|
|
@@ -1,59 +1,59 @@
|
|
|
1
|
-
from aioredis.client import Redis, StrictRedis
|
|
2
|
-
from aioredis.connection import (
|
|
3
|
-
BlockingConnectionPool,
|
|
4
|
-
Connection,
|
|
5
|
-
ConnectionPool,
|
|
6
|
-
SSLConnection,
|
|
7
|
-
UnixDomainSocketConnection,
|
|
8
|
-
)
|
|
9
|
-
from aioredis.exceptions import (
|
|
10
|
-
AuthenticationError,
|
|
11
|
-
AuthenticationWrongNumberOfArgsError,
|
|
12
|
-
BusyLoadingError,
|
|
13
|
-
ChildDeadlockedError,
|
|
14
|
-
ConnectionError,
|
|
15
|
-
DataError,
|
|
16
|
-
InvalidResponse,
|
|
17
|
-
PubSubError,
|
|
18
|
-
ReadOnlyError,
|
|
19
|
-
RedisError,
|
|
20
|
-
ResponseError,
|
|
21
|
-
TimeoutError,
|
|
22
|
-
WatchError,
|
|
23
|
-
)
|
|
24
|
-
from aioredis.utils import from_url
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
def int_or_str(value):
|
|
28
|
-
try:
|
|
29
|
-
return int(value)
|
|
30
|
-
except ValueError:
|
|
31
|
-
return value
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
__version__ = "2.0.1"
|
|
35
|
-
VERSION = tuple(map(int_or_str, __version__.split(".")))
|
|
36
|
-
|
|
37
|
-
__all__ = [
|
|
38
|
-
"AuthenticationError",
|
|
39
|
-
"AuthenticationWrongNumberOfArgsError",
|
|
40
|
-
"BlockingConnectionPool",
|
|
41
|
-
"BusyLoadingError",
|
|
42
|
-
"ChildDeadlockedError",
|
|
43
|
-
"Connection",
|
|
44
|
-
"ConnectionError",
|
|
45
|
-
"ConnectionPool",
|
|
46
|
-
"DataError",
|
|
47
|
-
"from_url",
|
|
48
|
-
"InvalidResponse",
|
|
49
|
-
"PubSubError",
|
|
50
|
-
"ReadOnlyError",
|
|
51
|
-
"Redis",
|
|
52
|
-
"RedisError",
|
|
53
|
-
"ResponseError",
|
|
54
|
-
"SSLConnection",
|
|
55
|
-
"StrictRedis",
|
|
56
|
-
"TimeoutError",
|
|
57
|
-
"UnixDomainSocketConnection",
|
|
58
|
-
"WatchError",
|
|
59
|
-
]
|
|
1
|
+
from aioredis.client import Redis, StrictRedis
|
|
2
|
+
from aioredis.connection import (
|
|
3
|
+
BlockingConnectionPool,
|
|
4
|
+
Connection,
|
|
5
|
+
ConnectionPool,
|
|
6
|
+
SSLConnection,
|
|
7
|
+
UnixDomainSocketConnection,
|
|
8
|
+
)
|
|
9
|
+
from aioredis.exceptions import (
|
|
10
|
+
AuthenticationError,
|
|
11
|
+
AuthenticationWrongNumberOfArgsError,
|
|
12
|
+
BusyLoadingError,
|
|
13
|
+
ChildDeadlockedError,
|
|
14
|
+
ConnectionError,
|
|
15
|
+
DataError,
|
|
16
|
+
InvalidResponse,
|
|
17
|
+
PubSubError,
|
|
18
|
+
ReadOnlyError,
|
|
19
|
+
RedisError,
|
|
20
|
+
ResponseError,
|
|
21
|
+
TimeoutError,
|
|
22
|
+
WatchError,
|
|
23
|
+
)
|
|
24
|
+
from aioredis.utils import from_url
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def int_or_str(value):
|
|
28
|
+
try:
|
|
29
|
+
return int(value)
|
|
30
|
+
except ValueError:
|
|
31
|
+
return value
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
__version__ = "2.0.1"
|
|
35
|
+
VERSION = tuple(map(int_or_str, __version__.split(".")))
|
|
36
|
+
|
|
37
|
+
__all__ = [
|
|
38
|
+
"AuthenticationError",
|
|
39
|
+
"AuthenticationWrongNumberOfArgsError",
|
|
40
|
+
"BlockingConnectionPool",
|
|
41
|
+
"BusyLoadingError",
|
|
42
|
+
"ChildDeadlockedError",
|
|
43
|
+
"Connection",
|
|
44
|
+
"ConnectionError",
|
|
45
|
+
"ConnectionPool",
|
|
46
|
+
"DataError",
|
|
47
|
+
"from_url",
|
|
48
|
+
"InvalidResponse",
|
|
49
|
+
"PubSubError",
|
|
50
|
+
"ReadOnlyError",
|
|
51
|
+
"Redis",
|
|
52
|
+
"RedisError",
|
|
53
|
+
"ResponseError",
|
|
54
|
+
"SSLConnection",
|
|
55
|
+
"StrictRedis",
|
|
56
|
+
"TimeoutError",
|
|
57
|
+
"UnixDomainSocketConnection",
|
|
58
|
+
"WatchError",
|
|
59
|
+
]
|
funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/__init__.cpython-39.pyc
CHANGED
|
Binary file
|
|
Binary file
|
|
Binary file
|
funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/connection.cpython-39.pyc
CHANGED
|
Binary file
|
funboost/utils/dependency_packages_in_pythonpath/aioredis/__pycache__/exceptions.cpython-39.pyc
CHANGED
|
Binary file
|
|
Binary file
|