funboost 48.8__py3-none-any.whl → 49.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of funboost might be problematic. Click here for more details.

Files changed (33) hide show
  1. funboost/__init__.py +1 -1
  2. funboost/concurrent_pool/custom_threadpool_executor.py +1 -1
  3. funboost/constant.py +16 -2
  4. funboost/consumers/base_consumer.py +42 -29
  5. funboost/consumers/rabbitmq_amqpstorm_consumer.py +5 -0
  6. funboost/consumers/redis_filter.py +47 -31
  7. funboost/core/active_cousumer_info_getter.py +47 -7
  8. funboost/core/booster.py +1 -0
  9. funboost/core/current_task.py +17 -0
  10. funboost/core/func_params_model.py +30 -17
  11. funboost/core/loggers.py +1 -0
  12. funboost/funboost_config_deafult.py +1 -1
  13. funboost/function_result_web/__pycache__/app.cpython-37.pyc +0 -0
  14. funboost/function_result_web/__pycache__/functions.cpython-37.pyc +0 -0
  15. funboost/function_result_web/__pycache__/functions.cpython-39.pyc +0 -0
  16. funboost/function_result_web/app_debug_start.py +1 -1
  17. funboost/function_result_web/functions.py +10 -1
  18. funboost/function_result_web/static/js/form-memory.js +92 -0
  19. funboost/function_result_web/static/js_cdn/chart.js +20 -0
  20. funboost/function_result_web/templates/index.html +31 -1
  21. funboost/function_result_web/templates/queue_op.html +418 -27
  22. funboost/function_result_web/templates/rpc_call.html +51 -37
  23. funboost/publishers/rabbitmq_amqpstorm_publisher.py +1 -1
  24. funboost/publishers/redis_publisher_priority.py +2 -2
  25. funboost/utils/dependency_packages_in_pythonpath/aioredis/readme.md +1 -1
  26. funboost/utils/dependency_packages_in_pythonpath/readme.md +1 -1
  27. {funboost-48.8.dist-info → funboost-49.0.dist-info}/METADATA +176 -82
  28. {funboost-48.8.dist-info → funboost-49.0.dist-info}/RECORD +32 -31
  29. {funboost-48.8.dist-info → funboost-49.0.dist-info}/WHEEL +1 -1
  30. funboost/function_result_web/templates/index_/321/204/342/225/225/320/235/321/205/320/237/320/277/321/206/320/232/320/250/321/205/320/237/320/260.html +0 -153
  31. {funboost-48.8.dist-info → funboost-49.0.dist-info}/LICENSE +0 -0
  32. {funboost-48.8.dist-info → funboost-49.0.dist-info}/entry_points.txt +0 -0
  33. {funboost-48.8.dist-info → funboost-49.0.dist-info}/top_level.txt +0 -0
funboost/__init__.py CHANGED
@@ -13,7 +13,7 @@ set_frame_config这个模块的 use_config_form_funboost_config_module() 是核
13
13
  这段注释说明和使用的用户无关,只和框架开发人员有关.
14
14
  '''
15
15
 
16
- __version__ = "48.8"
16
+ __version__ = "49.0"
17
17
 
18
18
  from funboost.set_frame_config import show_frame_config
19
19
 
@@ -98,7 +98,7 @@ class ThreadPoolExecutorShrinkAble(Executor, FunboostFileLoggerMixin, LoggerLeve
98
98
  # MIN_WORKERS = 5 # 最小值可以设置为0,代表线程池无论多久没有任务最少要保持多少个线程待命。
99
99
  # KEEP_ALIVE_TIME = 60 # 这个参数表名,当前线程从queue.get(block=True, timeout=KEEP_ALIVE_TIME)多久没任务,就线程结束。
100
100
 
101
- MIN_WORKERS = 5
101
+ MIN_WORKERS = 1
102
102
  KEEP_ALIVE_TIME = 60
103
103
 
104
104
  def __init__(self, max_workers: int = None, thread_name_prefix='',work_queue_maxsize=10):
funboost/constant.py CHANGED
@@ -108,9 +108,23 @@ class ConstStrForClassMethod:
108
108
  OBJ_INIT_PARAMS = 'obj_init_params'
109
109
  CLS_MODULE = 'cls_module'
110
110
  CLS_FILE = 'cls_file'
111
-
112
111
  class RedisKeys:
113
112
  REDIS_KEY_PAUSE_FLAG = 'funboost_pause_flag'
114
113
  REDIS_KEY_STOP_FLAG = 'funboost_stop_flag'
115
114
  QUEUE__MSG_COUNT_MAP = 'funboost_queue__msg_count_map'
116
- FUNBOOST_QUEUE__CONSUMER_PARAMS= 'funboost_queue__consumer_parmas'
115
+ FUNBOOST_QUEUE__CONSUMER_PARAMS= 'funboost_queue__consumer_parmas'
116
+ FUNBOOST_QUEUE__RUN_COUNT_MAP = 'funboost_queue__run_count_map'
117
+ FUNBOOST_QUEUE__RUN_FAIL_COUNT_MAP = 'funboost_queue__run_fail_count_map'
118
+ FUNBOOST_ALL_QUEUE_NAMES = 'funboost_all_queue_names'
119
+ FUNBOOST_ALL_IPS = 'funboost_all_ips'
120
+
121
+ FUNBOOST_HEARTBEAT_QUEUE__DICT_PREFIX = 'funboost_hearbeat_queue__dict:'
122
+ FUNBOOST_HEARTBEAT_SERVER__DICT_PREFIX = 'funboost_hearbeat_server__dict:'
123
+
124
+ @staticmethod
125
+ def gen_funboost_hearbeat_queue__dict_key_by_queue_name(queue_name):
126
+ return f'{RedisKeys.FUNBOOST_HEARTBEAT_QUEUE__DICT_PREFIX}{queue_name}'
127
+
128
+ @staticmethod
129
+ def gen_funboost_hearbeat_server__dict_key_by_ip(ip):
130
+ return f'{RedisKeys.FUNBOOST_HEARTBEAT_SERVER__DICT_PREFIX}{ip}'
@@ -194,7 +194,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
194
194
  self.consumer_params.broker_exclusive_config = broker_exclusive_config_merge
195
195
 
196
196
  self._stop_flag = None
197
- self._pause_flag = None # 暂停消费标志,从reids读取
197
+ self._pause_flag = threading.Event() # 暂停消费标志,从reids读取
198
198
  self._last_show_pause_log_time = 0
199
199
  # self._redis_key_stop_flag = f'funboost_stop_flag'
200
200
  # self._redis_key_pause_flag = RedisKeys.REDIS_KEY_PAUSE_FLAG
@@ -243,12 +243,27 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
243
243
  # print(self.publisher_params)
244
244
  if is_main_process:
245
245
  self.logger.info(f'{self.queue_name} consumer 的消费者配置:\n {self.consumer_params.json_str_value()}')
246
+
246
247
  atexit.register(self.join_shedual_task_thread)
247
248
 
249
+ self._save_consumer_params()
250
+
248
251
  if self.consumer_params.is_auto_start_consuming_message:
249
252
  _ = self.publisher_of_same_queue
250
253
  self.start_consuming_message()
251
254
 
255
+ def _save_consumer_params(self):
256
+ """
257
+ 保存队列的消费者参数,以便在web界面查看。
258
+ :return:
259
+ """
260
+ if self.consumer_params.is_send_consumer_hearbeat_to_redis:
261
+ RedisMixin().redis_db_frame.sadd(RedisKeys.FUNBOOST_ALL_QUEUE_NAMES,self.queue_name)
262
+ RedisMixin().redis_db_frame.hmset(RedisKeys.FUNBOOST_QUEUE__CONSUMER_PARAMS,
263
+ {self.queue_name: self.consumer_params.json_str_value()})
264
+ RedisMixin().redis_db_frame.sadd(RedisKeys.FUNBOOST_ALL_IPS,nb_log_config_default.computer_ip)
265
+
266
+
252
267
  def _build_logger(self):
253
268
  logger_prefix = self.consumer_params.logger_prefix
254
269
  if logger_prefix != '':
@@ -456,15 +471,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
456
471
  return msg
457
472
 
458
473
  def _submit_task(self, kw):
459
- while 1: # 这一块的代码为支持暂停消费。
460
- # print(self._pause_flag)
461
- if self._pause_flag == 1:
462
- time.sleep(5)
463
- if time.time() - self._last_show_pause_log_time > 60:
464
- self.logger.warning(f'已设置 {self.queue_name} 队列中的任务为暂停消费')
465
- self._last_show_pause_log_time = time.time()
466
- else:
467
- break
474
+
468
475
  kw['body'] = self.convert_msg_before_run(kw['body'])
469
476
  self._print_message_get_from_broker(kw['body'])
470
477
  if self._judge_is_daylight():
@@ -473,9 +480,9 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
473
480
  return
474
481
  function_only_params = delete_keys_and_return_new_dict(kw['body'], )
475
482
  if self._get_priority_conf(kw, 'do_task_filtering') and self._redis_filter.check_value_exists(
476
- function_only_params): # 对函数的参数进行检查,过滤已经执行过并且成功的任务。
483
+ function_only_params,self._get_priority_conf(kw, 'filter_str')): # 对函数的参数进行检查,过滤已经执行过并且成功的任务。
477
484
  self.logger.warning(f'redis的 [{self._redis_filter_key_name}] 键 中 过滤任务 {kw["body"]}')
478
- self._confirm_consume(kw)
485
+ self._confirm_consume(kw) # 不运行就必须确认消费,否则会发不能确认消费,导致消息队列中间件认为消息没有被消费。
479
486
  return
480
487
  publish_time = get_publish_time(kw['body'])
481
488
  msg_expire_senconds_priority = self._get_priority_conf(kw, 'msg_expire_senconds')
@@ -529,6 +536,16 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
529
536
  else:
530
537
  self._frequency_control(self.consumer_params.qps, self._msg_schedule_time_intercal)
531
538
 
539
+ while 1: # 这一块的代码为支持暂停消费。
540
+ # print(self._pause_flag)
541
+ if self._pause_flag.is_set():
542
+ if time.time() - self._last_show_pause_log_time > 60:
543
+ self.logger.warning(f'已设置 {self.queue_name} 队列中的任务为暂停消费')
544
+ self._last_show_pause_log_time = time.time()
545
+ time.sleep(5)
546
+ else:
547
+ break
548
+
532
549
  def __delete_eta_countdown(self, msg_body: dict):
533
550
  self.__dict_pop(msg_body.get('extra', {}), 'eta')
534
551
  self.__dict_pop(msg_body.get('extra', {}), 'countdown')
@@ -661,7 +678,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
661
678
  current_function_result_status.run_status = RunStatus.finish
662
679
  self._result_persistence_helper.save_function_result_to_mongo(current_function_result_status)
663
680
  if self._get_priority_conf(kw, 'do_task_filtering'):
664
- self._redis_filter.add_a_value(function_only_params) # 函数执行成功后,添加函数的参数排序后的键值对字符串到set中。
681
+ self._redis_filter.add_a_value(function_only_params,self._get_priority_conf(kw, 'filter_str')) # 函数执行成功后,添加函数的参数排序后的键值对字符串到set中。
665
682
  if current_function_result_status.success is False and current_retry_times == max_retry_times:
666
683
  log_msg = f'函数 {self.consuming_function.__name__} 达到最大重试次数 {self._get_priority_conf(kw, "max_retry_times")} 后,仍然失败, 入参是 {function_only_params} '
667
684
  if self.consumer_params.is_push_to_dlx_queue_when_retry_max_times:
@@ -707,7 +724,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
707
724
  fct_context = FctContext(function_params=function_only_params,
708
725
  full_msg=kw['body'],
709
726
  function_result_status=function_result_status,
710
- logger=self.logger, )
727
+ logger=self.logger, queue_name=self.queue_name,)
711
728
 
712
729
  try:
713
730
  function_run = self.consuming_function
@@ -715,6 +732,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
715
732
  fct_context.asyncio_use_thread_concurrent_mode = True
716
733
  function_run = sync_or_async_fun_deco(function_run)
717
734
  else:
735
+ pass
718
736
  fct_context.asynco_use_thread_concurrent_mode = False
719
737
  fct.set_fct_context(fct_context)
720
738
  function_timeout = self._get_priority_conf(kw, 'function_timeout')
@@ -814,7 +832,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
814
832
  await simple_run_in_executor(self._result_persistence_helper.save_function_result_to_mongo, current_function_result_status)
815
833
  if self._get_priority_conf(kw, 'do_task_filtering'):
816
834
  # self._redis_filter.add_a_value(function_only_params) # 函数执行成功后,添加函数的参数排序后的键值对字符串到set中。
817
- await simple_run_in_executor(self._redis_filter.add_a_value, function_only_params)
835
+ await simple_run_in_executor(self._redis_filter.add_a_value, function_only_params,self._get_priority_conf(kw, 'filter_str'))
818
836
  if current_function_result_status.success is False and current_retry_times == max_retry_times:
819
837
  log_msg = f'函数 {self.consuming_function.__name__} 达到最大重试次数 {self._get_priority_conf(kw, "max_retry_times")} 后,仍然失败, 入参是 {function_only_params} '
820
838
  if self.consumer_params.is_push_to_dlx_queue_when_retry_max_times:
@@ -866,7 +884,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
866
884
  fct_context = FctContext(function_params=function_only_params,
867
885
  full_msg=kw['body'],
868
886
  function_result_status=function_result_status,
869
- logger=self.logger, )
887
+ logger=self.logger,queue_name=self.queue_name,)
870
888
  fct.set_fct_context(fct_context)
871
889
  try:
872
890
  corotinue_obj = self.consuming_function(**self._convert_real_function_only_params_by_conusuming_function_kind(function_only_params))
@@ -1177,6 +1195,10 @@ class MetricCalculation:
1177
1195
  msg += f''' 预计还需要 {need_time} 时间 才能执行完成 队列 {self.consumer.queue_name} 中的 {self.msg_num_in_broker} 个剩余任务'''
1178
1196
  self.consumer.logger.info(msg)
1179
1197
  self.last_show_remaining_execution_time = time.time()
1198
+ if self.consumer.consumer_params.is_send_consumer_hearbeat_to_redis is True:
1199
+ RedisMixin().redis_db_frame.hincrby(RedisKeys.FUNBOOST_QUEUE__RUN_COUNT_MAP,self.consumer.queue_name,self.execute_task_times_every_unit_time_temp)
1200
+ RedisMixin().redis_db_frame.hincrby(RedisKeys.FUNBOOST_QUEUE__RUN_FAIL_COUNT_MAP,self.consumer.queue_name,self.execute_task_times_every_unit_time_temp_fail)
1201
+
1180
1202
  self.current_time_for_execute_task_times_every_unit_time = time.time()
1181
1203
  self.consuming_function_cost_time_total_every_unit_time_tmp = 0
1182
1204
  self.execute_task_times_every_unit_time_temp = 0
@@ -1233,22 +1255,13 @@ class DistributedConsumerStatistics(RedisMixin, FunboostFileLoggerMixin):
1233
1255
  self.active_consumer_num = 1
1234
1256
  self._last_show_consumer_num_timestamp = 0
1235
1257
 
1236
- self._queue__consumer_identification_map_key_name = f'funboost_hearbeat_queue__dict:{self._queue_name}'
1237
- self._server__consumer_identification_map_key_name = f'funboost_hearbeat_server__dict:{nb_log_config_default.computer_ip}'
1258
+ self._queue__consumer_identification_map_key_name = RedisKeys.gen_funboost_hearbeat_queue__dict_key_by_queue_name(self._queue_name)
1259
+ self._server__consumer_identification_map_key_name = RedisKeys.gen_funboost_hearbeat_server__dict_key_by_ip(nb_log_config_default.computer_ip)
1238
1260
 
1239
1261
  def run(self):
1240
- self._send_consumer_params()
1241
1262
  self.send_heartbeat()
1242
1263
  self._consumer.keep_circulating(self.SEND_HEARTBEAT_INTERVAL, block=False, daemon=False)(self.send_heartbeat)()
1243
1264
 
1244
- def _send_consumer_params(self):
1245
- """
1246
- 保存队列的消费者参数,以便在web界面查看。
1247
- :return:
1248
- """
1249
- self.redis_db_frame.hmset('funboost_queue__consumer_parmas',{self._consumer.queue_name: self._consumer.consumer_params.json_str_value()})
1250
-
1251
-
1252
1265
  def _send_heartbeat_with_dict_value(self, redis_key, ):
1253
1266
  # 发送当前消费者进程心跳的,值是字典,按一个机器或者一个队列运行了哪些进程。
1254
1267
 
@@ -1315,8 +1328,8 @@ class DistributedConsumerStatistics(RedisMixin, FunboostFileLoggerMixin):
1315
1328
 
1316
1329
  pause_flag = self.redis_db_frame.hget(RedisKeys.REDIS_KEY_PAUSE_FLAG,self._consumer.queue_name)
1317
1330
  if pause_flag is not None and int(pause_flag) == 1:
1318
- self._consumer._pause_flag = 1
1331
+ self._consumer._pause_flag.set()
1319
1332
  else:
1320
- self._consumer._pause_flag = 0
1333
+ self._consumer._pause_flag.clear()
1321
1334
 
1322
1335
 
@@ -30,6 +30,7 @@ class RabbitmqConsumerAmqpStorm(AbstractConsumer):
30
30
  rp.channel_wrapper_by_ampqstormbaic.qos(self.consumer_params.concurrent_num)
31
31
  rp.channel_wrapper_by_ampqstormbaic.consume(callback=callback, queue=self.queue_name, no_ack=self.consumer_params.broker_exclusive_config['no_ack'],
32
32
  )
33
+ self._rp=rp
33
34
  rp.channel.start_consuming(auto_decode=True)
34
35
 
35
36
  def _confirm_consume(self, kw):
@@ -44,3 +45,7 @@ class RabbitmqConsumerAmqpStorm(AbstractConsumer):
44
45
  # amqpstorm.Message.delivery_tag
45
46
  # print(kw['amqpstorm_message'].delivery_tag)
46
47
  kw['amqpstorm_message'].nack(requeue=True)
48
+ # kw['amqpstorm_message'].reject(requeue=True)
49
+ # kw['amqpstorm_message'].ack()
50
+ # self.publisher_of_same_queue.publish(kw['body'])
51
+
@@ -32,23 +32,36 @@ class RedisFilter(RedisMixin, FunboostFileLoggerMixin):
32
32
  self._redis_key_name = redis_key_name
33
33
  self._redis_filter_task_expire_seconds = redis_filter_task_expire_seconds
34
34
 
35
+ # @staticmethod
36
+ # def _get_ordered_str(value):
37
+ # """对json的键值对在redis中进行过滤,需要先把键值对排序,否则过滤会不准确如 {"a":1,"b":2} 和 {"b":2,"a":1}"""
38
+ # value = Serialization.to_dict(value)
39
+ # ordered_dict = OrderedDict()
40
+ # for k in sorted(value):
41
+ # ordered_dict[k] = value[k]
42
+ # return json.dumps(ordered_dict)
43
+
35
44
  @staticmethod
36
- def _get_ordered_str(value):
45
+ def generate_filter_str(value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
37
46
  """对json的键值对在redis中进行过滤,需要先把键值对排序,否则过滤会不准确如 {"a":1,"b":2} 和 {"b":2,"a":1}"""
47
+ if filter_str: # 如果用户单独指定了过滤字符串,就使用使用户指定的过滤字符串,否则使用排序后的键值对字符串
48
+ return filter_str
38
49
  value = Serialization.to_dict(value)
39
50
  ordered_dict = OrderedDict()
40
51
  for k in sorted(value):
41
52
  ordered_dict[k] = value[k]
53
+ # print(ordered_dict,filter_str)
42
54
  return json.dumps(ordered_dict)
43
55
 
44
- def add_a_value(self, value: typing.Union[str, dict]):
45
- self.redis_db_filter_and_rpc_result.sadd(self._redis_key_name, self._get_ordered_str(value))
46
56
 
47
- def manual_delete_a_value(self, value: typing.Union[str, dict]):
48
- self.redis_db_filter_and_rpc_result.srem(self._redis_key_name, self._get_ordered_str(value))
57
+ def add_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
58
+ self.redis_db_filter_and_rpc_result.sadd(self._redis_key_name, self.generate_filter_str(value, filter_str))
49
59
 
50
- def check_value_exists(self, value):
51
- return self.redis_db_filter_and_rpc_result.sismember(self._redis_key_name, self._get_ordered_str(value))
60
+ def manual_delete_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
61
+ self.redis_db_filter_and_rpc_result.srem(self._redis_key_name, self.generate_filter_str(value, filter_str))
62
+
63
+ def check_value_exists(self, value, filter_str: typing.Optional[str] = None):
64
+ return self.redis_db_filter_and_rpc_result.sismember(self._redis_key_name, self.generate_filter_str(value, filter_str))
52
65
 
53
66
  def delete_expire_filter_task_cycle(self):
54
67
  pass
@@ -61,15 +74,17 @@ class RedisImpermanencyFilter(RedisFilter):
61
74
  如果是30分钟内发布过这个任务,则不执行1 + 2,现在把这个逻辑集成到框架,一般用于接口缓存。
62
75
  """
63
76
 
64
- def add_a_value(self, value: typing.Union[str, dict]):
65
- self.redis_db_filter_and_rpc_result.zadd(self._redis_key_name, {self._get_ordered_str(value):time.time()})
77
+ def add_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
78
+ self.redis_db_filter_and_rpc_result.zadd(self._redis_key_name, {self.generate_filter_str(value, filter_str):time.time()})
66
79
 
67
- def manual_delete_a_value(self, value: typing.Union[str, dict]):
68
- self.redis_db_filter_and_rpc_result.zrem(self._redis_key_name, self._get_ordered_str(value))
80
+ def manual_delete_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
81
+ self.redis_db_filter_and_rpc_result.zrem(self._redis_key_name, self.generate_filter_str(value, filter_str))
69
82
 
70
- def check_value_exists(self, value):
71
- # print(self.redis_db_filter_and_rpc_result.zrank(self._redis_key_name, self._get_ordered_str(value)))
72
- return False if self.redis_db_filter_and_rpc_result.zrank(self._redis_key_name, self._get_ordered_str(value)) is None else True
83
+ def check_value_exists(self, value, filter_str: typing.Optional[str] = None):
84
+ # print(self.redis_db_filter_and_rpc_result.zrank(self._redis_key_name, self.generate_filter_str(value, filter_str)))
85
+ is_exists = False if self.redis_db_filter_and_rpc_result.zscore(self._redis_key_name, self.generate_filter_str(value, filter_str)) is None else True
86
+ # print(is_exists,value,filter_str,self.generate_filter_str(value, filter_str))
87
+ return is_exists
73
88
 
74
89
  @decorators.keep_circulating(60, block=False)
75
90
  def delete_expire_filter_task_cycle000(self):
@@ -111,16 +126,16 @@ class RedisImpermanencyFilterUsingRedisKey(RedisFilter):
111
126
  """
112
127
  return f'{self._redis_key_name}:{value.replace(":", ":")}' # 任务是json,带有:会形成很多树,换成中文冒号。
113
128
 
114
- def add_a_value(self, value: typing.Union[str, dict]):
115
- redis_key = self.__add_dir_prefix(self._get_ordered_str(value))
129
+ def add_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
130
+ redis_key = self.__add_dir_prefix(self.generate_filter_str(value, filter_str))
116
131
  self.redis_db_filter_and_rpc_result.set(redis_key, 1)
117
132
  self.redis_db_filter_and_rpc_result.expire(redis_key, self._redis_filter_task_expire_seconds)
118
133
 
119
- def manual_delete_a_value(self, value: typing.Union[str, dict]):
120
- self.redis_db_filter_and_rpc_result.delete(self.__add_dir_prefix(self._get_ordered_str(value)))
134
+ def manual_delete_a_value(self, value: typing.Union[str, dict], filter_str: typing.Optional[str] = None):
135
+ self.redis_db_filter_and_rpc_result.delete(self.__add_dir_prefix(self.generate_filter_str(value, filter_str)))
121
136
 
122
- def check_value_exists(self, value):
123
- return True if self.redis_db_filter_and_rpc_result.exists(self.__add_dir_prefix(self._get_ordered_str(value))) else True
137
+ def check_value_exists(self, value, filter_str: typing.Optional[str] = None):
138
+ return True if self.redis_db_filter_and_rpc_result.exists(self.__add_dir_prefix(self.generate_filter_str(value, filter_str))) else True
124
139
 
125
140
  def delete_expire_filter_task_cycle(self):
126
141
  """
@@ -131,16 +146,17 @@ class RedisImpermanencyFilterUsingRedisKey(RedisFilter):
131
146
 
132
147
 
133
148
  if __name__ == '__main__':
134
- # filter = RedisFilter('filter_set:abcdefgh', 120)
135
- params_filter = RedisImpermanencyFilter('filter_zset:abcdef', 120)
149
+ # params_filter = RedisFilter('filter_set:abcdefgh2', 120)
150
+ params_filter = RedisImpermanencyFilter('filter_zset:abcdef2', 120)
136
151
  # params_filter = RedisImpermanencyFilterUsingRedisKey('filter_dir', 300)
137
152
  for i in range(10):
138
- params_filter.add_a_value({'x': i, 'y': i * 2})
139
-
140
- params_filter.manual_delete_a_value({'a': 1, 'b': 2})
141
- print(params_filter.check_value_exists({'a': 1, 'b': 2}))
142
- params_filter.delete_expire_filter_task_cycle()
143
- params_filter.add_a_value({'a': 1, 'b': 5})
144
- print(params_filter.check_value_exists({'a': 1, 'b': 2}))
145
- time.sleep(130)
146
- print(params_filter.check_value_exists({'a': 1, 'b': 2}))
153
+ # params_filter.add_a_value({'x': i, 'y': i * 2},str(i))
154
+ params_filter.add_a_value({'x': i, 'y': i * 2},None)
155
+
156
+ # params_filter.manual_delete_a_value({'a': 1, 'b': 2})
157
+ print(params_filter.check_value_exists({'x': 1, 'y': 2}))
158
+ # params_filter.delete_expire_filter_task_cycle()
159
+ # params_filter.add_a_value({'a': 1, 'b': 5})
160
+ # print(params_filter.check_value_exists({'a': 1, 'b': 2}))
161
+ # time.sleep(130)
162
+ # print(params_filter.check_value_exists({'a': 1, 'b': 2}))
@@ -52,7 +52,7 @@ class ActiveCousumerProcessInfoGetter(RedisMixin, FunboostFileLoggerMixin):
52
52
  "start_timestamp": 1640604084.0780013
53
53
  }, ...............]
54
54
  """
55
- redis_key = f'funboost_hearbeat_queue__dict:{queue_name}'
55
+ redis_key = RedisKeys.gen_funboost_hearbeat_queue__dict_key_by_queue_name(queue_name)
56
56
  return self._get_all_hearbeat_info_by_redis_key_name(redis_key)
57
57
 
58
58
  def get_all_hearbeat_info_by_ip(self, ip=None) -> typing.List[typing.Dict]:
@@ -61,15 +61,38 @@ class ActiveCousumerProcessInfoGetter(RedisMixin, FunboostFileLoggerMixin):
61
61
  返回结果的格式和上面的 get_all_hearbeat_dict_by_queue_name 方法相同。
62
62
  """
63
63
  ip = ip or nb_log_config_default.computer_ip
64
- redis_key = f'funboost_hearbeat_server__dict:{ip}'
64
+ redis_key = RedisKeys.gen_funboost_hearbeat_server__dict_key_by_ip(ip)
65
65
  return self._get_all_hearbeat_info_by_redis_key_name(redis_key)
66
66
 
67
- def _get_all_hearbeat_info_partition_by_redis_key_prefix(self, redis_key_prefix):
68
- keys = self.redis_db_frame.scan(0, f'{redis_key_prefix}*', count=10000)[1]
67
+ # def _get_all_hearbeat_info_partition_by_redis_key_prefix(self, redis_key_prefix):
68
+ # keys = self.redis_db_frame.scan(0, f'{redis_key_prefix}*', count=10000)[1]
69
+ # infos_map = {}
70
+ # for key in keys:
71
+ # infos = self.redis_db_frame.smembers(key)
72
+ # dict_key = key.replace(redis_key_prefix, '')
73
+ # infos_map[dict_key] = []
74
+ # for info_str in infos:
75
+ # info_dict = json.loads(info_str)
76
+ # if self.timestamp() - info_dict['hearbeat_timestamp'] < 15:
77
+ # infos_map[dict_key].append(info_dict)
78
+ # if self.timestamp() - info_dict['current_time_for_execute_task_times_every_unit_time'] > 30:
79
+ # info_dict['last_x_s_execute_count'] = 0
80
+ # info_dict['last_x_s_execute_count_fail'] = 0
81
+ # return infos_map
82
+
83
+ def get_all_queue_names(self):
84
+ return self.redis_db_frame.smembers(RedisKeys.FUNBOOST_ALL_QUEUE_NAMES)
85
+
86
+ def get_all_ips(self):
87
+ return self.redis_db_frame.smembers(RedisKeys.FUNBOOST_ALL_IPS)
88
+
89
+ def _get_all_hearbeat_info_partition_by_redis_keys(self, keys):
90
+
91
+ # keys = [f'{redis_key_prefix}{queue_name}' for queue_name in queue_names]
69
92
  infos_map = {}
70
93
  for key in keys:
71
94
  infos = self.redis_db_frame.smembers(key)
72
- dict_key = key.replace(redis_key_prefix, '')
95
+ dict_key = key.replace(RedisKeys.FUNBOOST_HEARTBEAT_QUEUE__DICT_PREFIX, '').replace(RedisKeys.FUNBOOST_HEARTBEAT_SERVER__DICT_PREFIX, '')
73
96
  infos_map[dict_key] = []
74
97
  for info_str in infos:
75
98
  info_dict = json.loads(info_str)
@@ -82,13 +105,15 @@ class ActiveCousumerProcessInfoGetter(RedisMixin, FunboostFileLoggerMixin):
82
105
 
83
106
  def get_all_hearbeat_info_partition_by_queue_name(self) -> typing.Dict[typing.AnyStr, typing.List[typing.Dict]]:
84
107
  """获取所有队列对应的活跃消费者进程信息,按队列名划分,不需要传入队列名,自动扫描redis键。请不要在 funboost_config.py 的redis 指定的db中放太多其他业务的缓存键值对"""
85
- infos_map = self._get_all_hearbeat_info_partition_by_redis_key_prefix('funboost_hearbeat_queue__dict:')
108
+ queue_names = self.get_all_queue_names()
109
+ infos_map = self._get_all_hearbeat_info_partition_by_redis_keys([RedisKeys.gen_funboost_hearbeat_queue__dict_key_by_queue_name(queue_name) for queue_name in queue_names])
86
110
  self.logger.info(f'获取所有队列对应的活跃消费者进程信息,按队列名划分,结果是 {json.dumps(infos_map, indent=4)}')
87
111
  return infos_map
88
112
 
89
113
  def get_all_hearbeat_info_partition_by_ip(self) -> typing.Dict[typing.AnyStr, typing.List[typing.Dict]]:
90
114
  """获取所有机器ip对应的活跃消费者进程信息,按机器ip划分,不需要传入机器ip,自动扫描redis键。请不要在 funboost_config.py 的redis 指定的db中放太多其他业务的缓存键值对 """
91
- infos_map = self._get_all_hearbeat_info_partition_by_redis_key_prefix('funboost_hearbeat_server__dict:')
115
+ ips = self.get_all_ips()
116
+ infos_map = self._get_all_hearbeat_info_partition_by_redis_keys([RedisKeys.gen_funboost_hearbeat_server__dict_key_by_ip(ip) for ip in ips])
92
117
  self.logger.info(f'获取所有机器ip对应的活跃消费者进程信息,按机器ip划分,结果是 {json.dumps(infos_map, indent=4)}')
93
118
  return infos_map
94
119
 
@@ -123,8 +148,19 @@ class QueueConusmerParamsGetter(RedisMixin, FunboostFileLoggerMixin):
123
148
  s+=c[filed]
124
149
  return s
125
150
 
151
+ def get_queues_history_run_count(self,):
152
+ return self.redis_db_frame.hgetall(RedisKeys.FUNBOOST_QUEUE__RUN_COUNT_MAP)
153
+
154
+ def get_queues_history_run_fail_count(self,):
155
+ return self.redis_db_frame.hgetall(RedisKeys.FUNBOOST_QUEUE__RUN_FAIL_COUNT_MAP)
156
+
126
157
  def get_queue_params_and_active_consumers(self):
127
158
  queue__active_consumers_map = ActiveCousumerProcessInfoGetter().get_all_hearbeat_info_partition_by_queue_name()
159
+
160
+ queue_name_list = list(queue__active_consumers_map.keys())
161
+ queue__history_run_count_map = self.get_queues_history_run_count()
162
+ queue__history_run_fail_count_map = self.get_queues_history_run_fail_count()
163
+
128
164
  queue__consumer_params_map = self.get_queue_params()
129
165
  queue__pause_map = self.get_pause_flag()
130
166
  queue__msg_count_dict = self.get_msg_num(ignore_report_ts=True)
@@ -148,6 +184,10 @@ class QueueConusmerParamsGetter(RedisMixin, FunboostFileLoggerMixin):
148
184
  'active_consumers':active_consumers,
149
185
  'pause_flag':queue__pause_map.get(queue,-1),
150
186
  'msg_num_in_broker':queue__msg_count_dict.get(queue,None),
187
+
188
+ 'history_run_count':queue__history_run_count_map.get(queue,None),
189
+ 'history_run_fail_count':queue__history_run_fail_count_map.get(queue,None),
190
+
151
191
  'all_consumers_last_x_s_execute_count':all_consumers_last_x_s_execute_count,
152
192
  'all_consumers_last_x_s_execute_count_fail':all_consumers_last_x_s_execute_count_fail,
153
193
  'all_consumers_last_x_s_avarage_function_spend_time':all_consumers_last_x_s_avarage_function_spend_time,
funboost/core/booster.py CHANGED
@@ -100,6 +100,7 @@ class Booster:
100
100
  consuming_function = args[0]
101
101
  self.boost_params.consuming_function = consuming_function
102
102
  self.boost_params.consuming_function_raw = consuming_function
103
+ self.boost_params.consuming_function_name = consuming_function.__name__
103
104
  # print(consuming_function)
104
105
  # print(ClsHelper.get_method_kind(consuming_function))
105
106
  # print(inspect.getsourcelines(consuming_function))
@@ -1,4 +1,5 @@
1
1
  import abc
2
+ import typing
2
3
  import contextvars
3
4
  from dataclasses import dataclass
4
5
  import logging
@@ -7,6 +8,8 @@ import asyncio
7
8
 
8
9
  from funboost.core.function_result_status_saver import FunctionResultStatus
9
10
 
11
+
12
+
10
13
  """ 用法例子
11
14
  '''
12
15
  fct = funboost_current_task()
@@ -60,7 +63,9 @@ class FctContext:
60
63
  full_msg: dict
61
64
  function_result_status: FunctionResultStatus
62
65
  logger: logging.Logger
66
+ queue_name: str
63
67
  asyncio_use_thread_concurrent_mode: bool = False
68
+
64
69
 
65
70
  # class FctContext:
66
71
  # """
@@ -107,6 +112,11 @@ class _BaseCurrentTask(metaclass=abc.ABCMeta):
107
112
  @property
108
113
  def logger(self) -> logging.Logger:
109
114
  return self.get_fct_context().logger
115
+
116
+ @property
117
+ def queue_name(self) -> str:
118
+ return self.get_fct_context().queue_name
119
+
110
120
 
111
121
  def __str__(self):
112
122
  return f'<{self.__class__.__name__} [{self.function_result_status.get_status_dict()}]>'
@@ -184,6 +194,13 @@ class _FctProxy:
184
194
  @property
185
195
  def logger(self) -> logging.Logger:
186
196
  return self.fct_context.logger
197
+
198
+ @property
199
+ def queue_name(self) -> str:
200
+ return self.fct_context.queue_name
201
+
202
+
203
+
187
204
 
188
205
  def __str__(self):
189
206
  return f'<{self.__class__.__name__} [{self.function_result_status.get_status_dict()}]>'