funboost 18.9__py3-none-any.whl → 19.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of funboost might be problematic. Click here for more details.

@@ -106,6 +106,8 @@ class FunctionResultStatus(LoggerMixin, LoggerLevelSetterMixin):
106
106
  script_name_long = sys.argv[0]
107
107
  script_name = script_name_long.split('/')[-1].split('\\')[-1]
108
108
 
109
+ FUNC_RUN_ERROR = 'FUNC_RUN_ERROR'
110
+
109
111
  def __init__(self, queue_name: str, fucntion_name: str, msg_dict: dict):
110
112
  # print(params)
111
113
  self.queue_name = queue_name
@@ -172,8 +174,9 @@ class ResultPersistenceHelper(MongoMixin, LoggerMixin):
172
174
  self._bulk_list_lock = Lock()
173
175
  self._last_bulk_insert_time = 0
174
176
  self._has_start_bulk_insert_thread = False
177
+ self._queue_name = queue_name
175
178
  if self.function_result_status_persistance_conf.is_save_status:
176
- task_status_col = self.mongo_db_task_status.get_collection(queue_name)
179
+ task_status_col = self.get_mongo_collection('task_status', queue_name)
177
180
  try:
178
181
  # params_str 如果很长,必须使用TEXt或HASHED索引。
179
182
  task_status_col.create_indexes([IndexModel([("insert_time_str", -1)]), IndexModel([("insert_time", -1)]),
@@ -184,11 +187,11 @@ class ResultPersistenceHelper(MongoMixin, LoggerMixin):
184
187
  except pymongo.errors.OperationFailure as e: # 新的mongo服务端,每次启动重复创建已存在索引会报错,try一下。
185
188
  self.logger.warning(e)
186
189
  # self._mongo_bulk_write_helper = MongoBulkWriteHelper(task_status_col, 100, 2)
187
- self.task_status_col = task_status_col
188
190
  self.logger.info(f"函数运行状态结果将保存至mongo的 task_status 库的 {queue_name} 集合中,请确认 funboost.py文件中配置的 MONGO_CONNECT_URL")
189
191
 
190
192
  def save_function_result_to_mongo(self, function_result_status: FunctionResultStatus):
191
193
  if self.function_result_status_persistance_conf.is_save_status:
194
+ task_status_col = self.get_mongo_collection('task_status', self._queue_name)
192
195
  item = function_result_status.get_status_dict()
193
196
  item2 = copy.copy(item)
194
197
  if not self.function_result_status_persistance_conf.is_save_result:
@@ -211,12 +214,13 @@ class ResultPersistenceHelper(MongoMixin, LoggerMixin):
211
214
  daemon=False)(self._bulk_insert)()
212
215
  self.logger.warning(f'启动批量保存函数消费状态 结果到mongo的 线程')
213
216
  else:
214
- self.task_status_col.insert_one(item2) # 立即实时插入。
217
+ task_status_col.insert_one(item2) # 立即实时插入。
215
218
 
216
219
  def _bulk_insert(self):
217
220
  with self._bulk_list_lock:
218
221
  if time.time() - self._last_bulk_insert_time > 0.5 and self._bulk_list:
219
- self.task_status_col.bulk_write(self._bulk_list, ordered=False)
222
+ task_status_col = self.get_mongo_collection('task_status', self._queue_name)
223
+ task_status_col.bulk_write(self._bulk_list, ordered=False)
220
224
  self._bulk_list.clear()
221
225
  self._last_bulk_insert_time = time.time()
222
226
 
@@ -689,13 +693,14 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
689
693
  f'函数 {self.consuming_function.__name__} 达到最大重试次数 {self._get_priority_conf(kw, "max_retry_times")} 后,仍然失败, 入参是 {function_only_params} ')
690
694
  if self._get_priority_conf(kw, 'is_using_rpc_mode'):
691
695
  # print(function_result_status.get_status_dict(without_datetime_obj=
692
- with RedisMixin().redis_db_filter_and_rpc_result.pipeline() as p:
693
- # RedisMixin().redis_db_frame.lpush(kw['body']['extra']['task_id'], json.dumps(function_result_status.get_status_dict(without_datetime_obj=True)))
694
- # RedisMixin().redis_db_frame.expire(kw['body']['extra']['task_id'], 600)
695
- p.lpush(kw['body']['extra']['task_id'],
696
- json.dumps(current_function_result_status.get_status_dict(without_datetime_obj=True)))
697
- p.expire(kw['body']['extra']['task_id'], 600)
698
- p.execute()
696
+ if (current_function_result_status.success is False and current_retry_times == max_retry_times) or current_function_result_status.success is True:
697
+ with RedisMixin().redis_db_filter_and_rpc_result.pipeline() as p:
698
+ # RedisMixin().redis_db_frame.lpush(kw['body']['extra']['task_id'], json.dumps(function_result_status.get_status_dict(without_datetime_obj=True)))
699
+ # RedisMixin().redis_db_frame.expire(kw['body']['extra']['task_id'], 600)
700
+ p.lpush(kw['body']['extra']['task_id'],
701
+ json.dumps(current_function_result_status.get_status_dict(without_datetime_obj=True)))
702
+ p.expire(kw['body']['extra']['task_id'], 600)
703
+ p.execute()
699
704
 
700
705
  with self._lock_for_count_execute_task_times_every_unit_time:
701
706
  self._execute_task_times_every_unit_time += 1
@@ -753,6 +758,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
753
758
  exc_info=self._get_priority_conf(kw, 'is_print_detail_exception'))
754
759
  # traceback.print_exc()
755
760
  function_result_status.exception = f'{e.__class__.__name__} {str(e)}'
761
+ function_result_status.result = FunctionResultStatus.FUNC_RUN_ERROR
756
762
  return function_result_status
757
763
 
758
764
  async def _async_run(self, kw: dict, ):
@@ -791,7 +797,8 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
791
797
  p.expire(kw['body']['extra']['task_id'], 600)
792
798
  p.execute()
793
799
 
794
- await simple_run_in_executor(push_result)
800
+ if (current_function_result_status.success is False and current_retry_times == max_retry_times) or current_function_result_status.success is True:
801
+ await simple_run_in_executor(push_result)
795
802
 
796
803
  # 异步执行不存在线程并发,不需要加锁。
797
804
  self._execute_task_times_every_unit_time += 1
@@ -856,6 +863,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
856
863
  f'函数运行时间是 {round(time.time() - t_start, 4)} 秒,\n 入参是 {function_only_params} \n 原因是 {type(e)} {e} ',
857
864
  exc_info=self._get_priority_conf(kw, 'is_print_detail_exception'))
858
865
  function_result_status.exception = f'{e.__class__.__name__} {str(e)}'
866
+ function_result_status.result = FunctionResultStatus.FUNC_RUN_ERROR
859
867
  return function_result_status
860
868
 
861
869
  @abc.abstractmethod
@@ -15,7 +15,7 @@ from funboost.utils.mongo_util import MongoMixin
15
15
  #
16
16
  # do_patch_frame_config()
17
17
 
18
- db = MongoMixin().mongo_db_task_status
18
+
19
19
 
20
20
 
21
21
  # print(db)
@@ -23,6 +23,7 @@ db = MongoMixin().mongo_db_task_status
23
23
  # print(db.list_collection_names())
24
24
 
25
25
  def get_cols(col_name_search: str):
26
+ db = MongoMixin().mongo_db_task_status
26
27
  if not col_name_search:
27
28
  collection_name_list = db.list_collection_names()
28
29
  else:
@@ -35,6 +36,7 @@ def get_cols(col_name_search: str):
35
36
 
36
37
 
37
38
  def query_result(col_name, start_time, end_time, is_success, function_params: str, page, ):
39
+ db = MongoMixin().mongo_db_task_status
38
40
  condition = {
39
41
  'insert_time': {'$gt': time_util.DatetimeConverter(start_time).datetime_obj,
40
42
  '$lt': time_util.DatetimeConverter(end_time).datetime_obj},
@@ -59,6 +61,7 @@ def query_result(col_name, start_time, end_time, is_success, function_params: st
59
61
 
60
62
 
61
63
  def get_speed(col_name, start_time, end_time):
64
+ db = MongoMixin().mongo_db_task_status
62
65
  condition = {
63
66
  'insert_time': {'$gt': time_util.DatetimeConverter(start_time).datetime_obj,
64
67
  '$lt': time_util.DatetimeConverter(end_time).datetime_obj},
@@ -78,6 +81,7 @@ def get_speed(col_name, start_time, end_time):
78
81
 
79
82
  class Statistic(LoggerMixin):
80
83
  def __init__(self, col_name):
84
+ db = MongoMixin().mongo_db_task_status
81
85
  self.col = db.get_collection(col_name)
82
86
  self.result = {'recent_10_days': {'time_arr': [], 'count_arr': []},
83
87
  'recent_24_hours': {'time_arr': [], 'count_arr': []},
@@ -12,8 +12,13 @@ class MongoMqPublisher(AbstractPublisher, MongoMixin):
12
12
  # 使用mongo-queue包实现的基于mongodb的队列。 队列是一个col,自动存放在consume_queues库中。
13
13
  # noinspection PyAttributeOutsideInit
14
14
  def custom_init(self):
15
- self.queue = MongoQueue(
16
- self.mongo_client.get_database('consume_queues').get_collection(self._queue_name),
15
+ pass
16
+
17
+ @property
18
+ def queue(self):
19
+ return MongoQueue(
20
+ # self.mongo_client.get_database('consume_queues').get_collection(self._queue_name),
21
+ self.get_mongo_collection('consume_queues', self._queue_name),
17
22
  consumer_id=f"consumer-{time_util.DatetimeConverter().datetime_str}",
18
23
  timeout=600,
19
24
  max_attempts=3,
@@ -29,7 +34,7 @@ class MongoMqPublisher(AbstractPublisher, MongoMixin):
29
34
 
30
35
  def get_message_count(self):
31
36
  # return self.queue.size()
32
- return self.queue.collection.count_documents({'status':'queued'})
37
+ return self.queue.collection.count_documents({'status': 'queued'})
33
38
 
34
39
  def close(self):
35
40
  pass
@@ -0,0 +1,59 @@
1
+ from funboost.utils.dependency_packages.aioredis_adapt_py311.client import Redis, StrictRedis
2
+ from funboost.utils.dependency_packages.aioredis_adapt_py311.connection import (
3
+ BlockingConnectionPool,
4
+ Connection,
5
+ ConnectionPool,
6
+ SSLConnection,
7
+ UnixDomainSocketConnection,
8
+ )
9
+ from funboost.utils.dependency_packages.aioredis_adapt_py311.exceptions import (
10
+ AuthenticationError,
11
+ AuthenticationWrongNumberOfArgsError,
12
+ BusyLoadingError,
13
+ ChildDeadlockedError,
14
+ ConnectionError,
15
+ DataError,
16
+ InvalidResponse,
17
+ PubSubError,
18
+ ReadOnlyError,
19
+ RedisError,
20
+ ResponseError,
21
+ TimeoutError,
22
+ WatchError,
23
+ )
24
+ from funboost.utils.dependency_packages.aioredis_adapt_py311.utils import from_url
25
+
26
+
27
+ def int_or_str(value):
28
+ try:
29
+ return int(value)
30
+ except ValueError:
31
+ return value
32
+
33
+
34
+ __version__ = "2.0.1"
35
+ VERSION = tuple(map(int_or_str, __version__.split(".")))
36
+
37
+ __all__ = [
38
+ "AuthenticationError",
39
+ "AuthenticationWrongNumberOfArgsError",
40
+ "BlockingConnectionPool",
41
+ "BusyLoadingError",
42
+ "ChildDeadlockedError",
43
+ "Connection",
44
+ "ConnectionError",
45
+ "ConnectionPool",
46
+ "DataError",
47
+ "from_url",
48
+ "InvalidResponse",
49
+ "PubSubError",
50
+ "ReadOnlyError",
51
+ "Redis",
52
+ "RedisError",
53
+ "ResponseError",
54
+ "SSLConnection",
55
+ "StrictRedis",
56
+ "TimeoutError",
57
+ "UnixDomainSocketConnection",
58
+ "WatchError",
59
+ ]