funboost 44.0__py3-none-any.whl → 44.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of funboost might be problematic. Click here for more details.

Files changed (37) hide show
  1. funboost/__init__.py +2 -1
  2. funboost/consumers/base_consumer.py +11 -8
  3. funboost/consumers/http_consumer.py +9 -9
  4. funboost/consumers/kafka_consumer.py +12 -14
  5. funboost/consumers/kafka_consumer_manually_commit.py +10 -13
  6. funboost/consumers/mqtt_consumer.py +3 -2
  7. funboost/consumers/nats_consumer.py +5 -4
  8. funboost/consumers/nsq_consumer.py +6 -4
  9. funboost/consumers/zeromq_consumer.py +12 -11
  10. funboost/core/fabric_deploy_helper.py +3 -3
  11. funboost/core/funboost_config_getter.py +7 -0
  12. funboost/core/funboost_time.py +28 -0
  13. funboost/core/func_params_model.py +4 -3
  14. funboost/core/helper_funs.py +21 -9
  15. funboost/core/lazy_impoter.py +86 -4
  16. funboost/core/loggers.py +1 -1
  17. funboost/function_result_web/app.py +3 -0
  18. funboost/function_result_web/functions.py +0 -1
  19. funboost/publishers/base_publisher.py +4 -7
  20. funboost/publishers/confluent_kafka_publisher.py +9 -12
  21. funboost/publishers/kafka_publisher.py +5 -11
  22. funboost/publishers/mqtt_publisher.py +3 -2
  23. funboost/publishers/nats_publisher.py +2 -2
  24. funboost/publishers/nsq_publisher.py +4 -6
  25. funboost/publishers/zeromq_publisher.py +3 -3
  26. funboost/set_frame_config.py +1 -1
  27. funboost/utils/bulk_operation.py +3 -2
  28. funboost/utils/decorators.py +0 -2
  29. funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-39.pyc +0 -0
  30. funboost/utils/resource_monitoring.py +10 -9
  31. {funboost-44.0.dist-info → funboost-44.2.dist-info}/METADATA +573 -558
  32. {funboost-44.0.dist-info → funboost-44.2.dist-info}/RECORD +36 -35
  33. {funboost-44.0.dist-info → funboost-44.2.dist-info}/WHEEL +1 -1
  34. {funboost-44.0.dist-info → funboost-44.2.dist-info}/entry_points.txt +1 -0
  35. funboost/core/try_get_user_funboost_common_config.py +0 -0
  36. {funboost-44.0.dist-info → funboost-44.2.dist-info}/LICENSE +0 -0
  37. {funboost-44.0.dist-info → funboost-44.2.dist-info}/top_level.txt +0 -0
funboost/__init__.py CHANGED
@@ -13,7 +13,7 @@ set_frame_config这个模块的 use_config_form_funboost_config_module() 是核
13
13
  这段注释说明和使用的用户无关,只和框架开发人员有关.
14
14
  '''
15
15
 
16
- __version__ = "44.0"
16
+ __version__ = "44.2"
17
17
 
18
18
  from funboost.set_frame_config import show_frame_config
19
19
 
@@ -65,3 +65,4 @@ from funboost.core.current_task import funboost_current_task
65
65
 
66
66
  # 有的包默认没加handlers,原始的日志不漂亮且不可跳转不知道哪里发生的。这里把warnning级别以上的日志默认加上handlers。
67
67
  # nb_log.get_logger(name='', log_level_int=30, _log_filename='pywarning.log')
68
+
@@ -49,7 +49,7 @@ from funboost.concurrent_pool.single_thread_executor import SoloExecutor
49
49
 
50
50
  from funboost.core.function_result_status_saver import ResultPersistenceHelper, FunctionResultStatus, RunStatus
51
51
 
52
- from funboost.core.helper_funs import delete_keys_and_return_new_dict, get_publish_time, generate_task_id
52
+ from funboost.core.helper_funs import delete_keys_and_return_new_dict, get_publish_time, MsgGenerater
53
53
 
54
54
  from funboost.concurrent_pool.async_helper import simple_run_in_executor
55
55
  from funboost.concurrent_pool.async_pool_executor import AsyncPoolExecutor
@@ -383,8 +383,11 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
383
383
  """
384
384
  raise NotImplementedError
385
385
 
386
- def _auto_fill_msg(self, msg: dict):
387
- """填充消息,消息没有使用funboost来发送,并且没有extra相关字段时候"""
386
+ def convert_msg_before_run(self, msg: dict):
387
+ """
388
+ 转换消息,消息没有使用funboost来发送,并且没有extra相关字段时候
389
+ 用户也可以按照4.21文档,继承任意Consumer类,并实现这个方法 convert_msg_before_run,先转换消息.
390
+ """
388
391
  """ 一般消息至少包含这样
389
392
  {
390
393
  "a": 42,
@@ -405,11 +408,11 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
405
408
  msg['extra'] = {'is_auto_fill_extra': True}
406
409
  extra = msg['extra']
407
410
  if 'task_id' not in extra:
408
- extra['task_id'] = generate_task_id(self._queue_name)
411
+ extra['task_id'] = MsgGenerater.generate_task_id(self._queue_name)
409
412
  if 'publish_time' not in extra:
410
- extra['publish_time'] = round(time.time(), 4)
413
+ extra['publish_time'] = MsgGenerater.generate_publish_time()
411
414
  if 'publish_time_format':
412
- extra['publish_time_format'] = time.strftime('%Y-%m-%d %H:%M:%S')
415
+ extra['publish_time_format'] = MsgGenerater.generate_publish_time_format()
413
416
 
414
417
  def _submit_task(self, kw):
415
418
  while 1: # 这一块的代码为支持暂停消费。
@@ -426,7 +429,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
426
429
  self._requeue(kw)
427
430
  time.sleep(self.time_interval_for_check_do_not_run_time)
428
431
  return
429
- self._auto_fill_msg(kw['body'])
432
+ self.convert_msg_before_run(kw['body'])
430
433
  function_only_params = delete_keys_and_return_new_dict(kw['body'], )
431
434
  if self._get_priority_conf(kw, 'do_task_filtering') and self._redis_filter.check_value_exists(
432
435
  function_only_params): # 对函数的参数进行检查,过滤已经执行过并且成功的任务。
@@ -820,7 +823,7 @@ class AbstractConsumer(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
820
823
  self.logger.critical(msg=log_msg)
821
824
  # noinspection PyProtectedMember,PyUnresolvedReferences
822
825
  os._exit(444)
823
- if self.consumer_params.function_timeout == 0:
826
+ if not self.consumer_params.function_timeout :
824
827
  rs = await corotinue_obj
825
828
  # rs = await asyncio.wait_for(corotinue_obj, timeout=4)
826
829
  else:
@@ -4,11 +4,11 @@
4
4
  import asyncio
5
5
  import json
6
6
 
7
- from aiohttp import web
8
- from aiohttp.web_request import Request
7
+ # from aiohttp import web
8
+ # from aiohttp.web_request import Request
9
9
 
10
10
  from funboost.consumers.base_consumer import AbstractConsumer
11
-
11
+ from funboost.core.lazy_impoter import AioHttpImporter
12
12
 
13
13
  class HTTPConsumer(AbstractConsumer, ):
14
14
  """
@@ -38,26 +38,26 @@ class HTTPConsumer(AbstractConsumer, ):
38
38
  #
39
39
  # flask_app.run('0.0.0.0', port=self._port,debug=False)
40
40
 
41
- routes = web.RouteTableDef()
41
+ routes = AioHttpImporter().web.RouteTableDef()
42
42
 
43
43
  # noinspection PyUnusedLocal
44
44
  @routes.get('/')
45
45
  async def hello(request):
46
- return web.Response(text="Hello, from funboost")
46
+ return AioHttpImporter().web.Response(text="Hello, from funboost")
47
47
 
48
48
  @routes.post('/queue')
49
- async def recv_msg(request: Request):
49
+ async def recv_msg(request: AioHttpImporter().Request):
50
50
  data = await request.post()
51
51
  msg = data['msg']
52
52
  kw = {'body': json.loads(msg)}
53
53
  self._submit_task(kw)
54
- return web.Response(text="finish")
54
+ return AioHttpImporter().web.Response(text="finish")
55
55
 
56
- app = web.Application()
56
+ app = AioHttpImporter().web.Application()
57
57
  app.add_routes(routes)
58
58
  loop = asyncio.new_event_loop()
59
59
  asyncio.set_event_loop(loop)
60
- web.run_app(app, host='0.0.0.0', port=self._port, )
60
+ AioHttpImporter().web.run_app(app, host='0.0.0.0', port=self._port, )
61
61
 
62
62
  def _confirm_consume(self, kw):
63
63
  pass # 没有确认消费的功能。
@@ -3,16 +3,14 @@
3
3
  # @Time : 2022/8/8 0008 13:32
4
4
  import json
5
5
  # noinspection PyPackageRequirements
6
- from kafka import KafkaConsumer as OfficialKafkaConsumer, KafkaProducer, KafkaAdminClient
7
- # noinspection PyPackageRequirements
8
- from kafka.admin import NewTopic
9
- # noinspection PyPackageRequirements
10
- from kafka.errors import TopicAlreadyExistsError
6
+
11
7
  from funboost.constant import BrokerEnum
12
8
  from funboost.consumers.base_consumer import AbstractConsumer
9
+ from funboost.core.lazy_impoter import KafkaPythonImporter
13
10
  from funboost.funboost_config_deafult import BrokerConnConfig
14
11
  # from nb_log import get_logger
15
12
  from funboost.core.loggers import get_funboost_file_logger
13
+
16
14
  # LogManager('kafka').get_logger_and_add_handlers(30)
17
15
  get_funboost_file_logger('kafka', log_level_int=30)
18
16
 
@@ -36,18 +34,18 @@ class KafkaConsumer(AbstractConsumer):
36
34
 
37
35
  def _shedual_task(self):
38
36
  try:
39
- admin_client = KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
40
- admin_client.create_topics([NewTopic(self._queue_name, 10, 1)])
37
+ admin_client = KafkaPythonImporter().KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
38
+ admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 1)])
41
39
  # admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
42
- except TopicAlreadyExistsError:
40
+ except KafkaPythonImporter().TopicAlreadyExistsError:
43
41
  pass
44
42
 
45
- self._producer = KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
46
- consumer = OfficialKafkaConsumer(self._queue_name, bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS,
47
- group_id=self.consumer_params.broker_exclusive_config["group_id"],
48
- enable_auto_commit=True,
49
- auto_offset_reset=self.consumer_params.broker_exclusive_config["auto_offset_reset"],
50
- )
43
+ self._producer = KafkaPythonImporter().KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
44
+ consumer = KafkaPythonImporter().OfficialKafkaConsumer(self._queue_name, bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS,
45
+ group_id=self.consumer_params.broker_exclusive_config["group_id"],
46
+ enable_auto_commit=True,
47
+ auto_offset_reset=self.consumer_params.broker_exclusive_config["auto_offset_reset"],
48
+ )
51
49
  # auto_offset_reset (str): A policy for resetting offsets on
52
50
  # OffsetOutOfRange errors: 'earliest' will move to the oldest
53
51
  # available message, 'latest' will move to the most recent. Any
@@ -10,13 +10,10 @@ from collections import defaultdict, OrderedDict
10
10
  import time
11
11
 
12
12
  # noinspection PyPackageRequirements
13
- from kafka import KafkaProducer, KafkaAdminClient
13
+ # pip install kafka-python==2.0.2
14
14
 
15
- # noinspection PyPackageRequirements
16
- from kafka.admin import NewTopic
17
- # noinspection PyPackageRequirements
18
- from kafka.errors import TopicAlreadyExistsError
19
15
  from funboost.consumers.base_consumer import AbstractConsumer
16
+ from funboost.core.lazy_impoter import KafkaPythonImporter
20
17
  from funboost.funboost_config_deafult import BrokerConnConfig
21
18
  from confluent_kafka.cimpl import TopicPartition
22
19
  from confluent_kafka import Consumer as ConfluentConsumer # 这个包在win下不好安装,用户用这个中间件的时候自己再想办法安装。win用户需要安装c++ 14.0以上环境。
@@ -39,13 +36,13 @@ class KafkaConsumerManuallyCommit(AbstractConsumer):
39
36
  def _shedual_task(self):
40
37
 
41
38
  try:
42
- admin_client = KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
43
- admin_client.create_topics([NewTopic(self._queue_name, 10, 1)])
39
+ admin_client = KafkaPythonImporter().KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
40
+ admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 1)])
44
41
  # admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
45
- except TopicAlreadyExistsError:
42
+ except KafkaPythonImporter().TopicAlreadyExistsError:
46
43
  pass
47
44
 
48
- self._producer = KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
45
+ self._producer = KafkaPythonImporter().KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
49
46
  # consumer 配置 https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
50
47
  self._confluent_consumer = ConfluentConsumer({
51
48
  'bootstrap.servers': ','.join(BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS),
@@ -130,14 +127,14 @@ class SaslPlainKafkaConsumer(KafkaConsumerManuallyCommit):
130
127
  def _shedual_task(self):
131
128
 
132
129
  try:
133
- admin_client = KafkaAdminClient(
130
+ admin_client = KafkaPythonImporter().KafkaAdminClient(
134
131
  **BrokerConnConfig.KFFKA_SASL_CONFIG)
135
- admin_client.create_topics([NewTopic(self._queue_name, 10, 1)])
132
+ admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 1)])
136
133
  # admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
137
- except TopicAlreadyExistsError:
134
+ except KafkaPythonImporter().TopicAlreadyExistsError:
138
135
  pass
139
136
 
140
- self._producer = KafkaProducer(
137
+ self._producer = KafkaPythonImporter().KafkaProducer(
141
138
  **BrokerConnConfig.KFFKA_SASL_CONFIG)
142
139
  # consumer 配置 https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
143
140
  self._confluent_consumer = ConfluentConsumer({
@@ -5,8 +5,9 @@ import json
5
5
  # import time
6
6
  from funboost.constant import BrokerEnum
7
7
  from funboost.consumers.base_consumer import AbstractConsumer
8
+ from funboost.core.lazy_impoter import PahoMqttImporter
8
9
  from funboost.funboost_config_deafult import BrokerConnConfig
9
- import paho.mqtt.client as mqtt
10
+ # import paho.mqtt.client as mqtt
10
11
 
11
12
 
12
13
  class MqttConsumer(AbstractConsumer):
@@ -23,7 +24,7 @@ class MqttConsumer(AbstractConsumer):
23
24
 
24
25
  # noinspection DuplicatedCode
25
26
  def _shedual_task(self):
26
- client = mqtt.Client()
27
+ client = PahoMqttImporter().mqtt.Client()
27
28
  # client.username_pw_set('admin', password='public')
28
29
  client.on_connect = self._on_connect
29
30
  client.on_message = self._on_message
@@ -1,7 +1,8 @@
1
1
  import json
2
- from pynats import NATSClient, NATSMessage # noqa
3
- from funboost.constant import BrokerEnum
2
+ # from pynats import NATSClient, NATSMessage # noqa
3
+
4
4
  from funboost.consumers.base_consumer import AbstractConsumer
5
+ from funboost.core.lazy_impoter import NatsImporter
5
6
  from funboost.funboost_config_deafult import BrokerConnConfig
6
7
 
7
8
 
@@ -13,10 +14,10 @@ class NatsConsumer(AbstractConsumer):
13
14
 
14
15
  def _shedual_task(self):
15
16
  # print(88888888888888)
16
- nats_client = NATSClient(BrokerConnConfig.NATS_URL, socket_timeout=600, socket_keepalive=True)
17
+ nats_client = NatsImporter().NATSClient(BrokerConnConfig.NATS_URL, socket_timeout=600, socket_keepalive=True)
17
18
  nats_client.connect()
18
19
 
19
- def callback(msg: NATSMessage):
20
+ def callback(msg: NatsImporter().NATSMessage):
20
21
  # print(type(msg))
21
22
  # print(msg.reply)
22
23
  # print(f"Received a message with subject {msg.subject}: {msg.payload}")
@@ -2,8 +2,10 @@
2
2
  # @Author : ydf
3
3
  # @Time : 2022/8/8 0008 13:32
4
4
  import json
5
- from gnsq import Consumer, Message
6
- from funboost.constant import BrokerEnum
5
+
6
+ from funboost.core.lazy_impoter import GnsqImporter
7
+ # from gnsq import Consumer, Message
8
+
7
9
  from funboost.funboost_config_deafult import BrokerConnConfig
8
10
  from funboost.consumers.base_consumer import AbstractConsumer
9
11
  # from nb_log import LogManager
@@ -19,11 +21,11 @@ class NsqConsumer(AbstractConsumer):
19
21
 
20
22
 
21
23
  def _shedual_task(self):
22
- consumer = Consumer(self._queue_name, 'frame_channel', BrokerConnConfig.NSQD_TCP_ADDRESSES,
24
+ consumer = GnsqImporter().Consumer(self._queue_name, 'frame_channel', BrokerConnConfig.NSQD_TCP_ADDRESSES,
23
25
  max_in_flight=self.consumer_params.concurrent_num, heartbeat_interval=60, timeout=600, ) # heartbeat_interval 不能设置为600
24
26
 
25
27
  @consumer.on_message.connect
26
- def handler(consumerx: Consumer, message: Message):
28
+ def handler(consumerx: GnsqImporter().Consumer, message: GnsqImporter().Message):
27
29
  # 第一条消息不能并发,第一条消息之后可以并发。
28
30
  self._print_message_get_from_broker('nsq', message.body.decode())
29
31
  # self.logger.debug(f'从nsq的 [{self._queue_name}] 主题中 取出的消息是: {message.body.decode()}')
@@ -4,10 +4,11 @@ import os
4
4
  import socket
5
5
  import json
6
6
  # import time
7
- import zmq
7
+ # import zmq
8
8
  import multiprocessing
9
9
  from funboost.constant import BrokerEnum
10
10
  from funboost.consumers.base_consumer import AbstractConsumer
11
+ from funboost.core.lazy_impoter import ZmqImporter
11
12
  # from nb_log import get_logger
12
13
  from funboost.core.loggers import get_funboost_file_logger
13
14
 
@@ -32,17 +33,17 @@ logger_zeromq_broker = get_funboost_file_logger('zeromq_broker')
32
33
  # noinspection PyUnresolvedReferences
33
34
  def start_broker(port_router: int, port_dealer: int):
34
35
  try:
35
- context = zmq.Context()
36
+ context = ZmqImporter().zmq.Context()
36
37
  # noinspection PyUnresolvedReferences
37
- frontend = context.socket(zmq.ROUTER)
38
- backend = context.socket(zmq.DEALER)
38
+ frontend = context.socket(ZmqImporter().zmq.ROUTER)
39
+ backend = context.socket(ZmqImporter().zmq.DEALER)
39
40
  frontend.bind(f"tcp://*:{port_router}")
40
41
  backend.bind(f"tcp://*:{port_dealer}")
41
42
 
42
43
  # Initialize poll set
43
- poller = zmq.Poller()
44
- poller.register(frontend, zmq.POLLIN)
45
- poller.register(backend, zmq.POLLIN)
44
+ poller = ZmqImporter().zmq.Poller()
45
+ poller.register(frontend, ZmqImporter().zmq.POLLIN)
46
+ poller.register(backend, ZmqImporter().zmq.POLLIN)
46
47
  logger_zeromq_broker.info(f'broker 绑定端口 {port_router} {port_dealer} 成功')
47
48
 
48
49
  # Switch messages between sockets
@@ -50,11 +51,11 @@ def start_broker(port_router: int, port_dealer: int):
50
51
  while True:
51
52
  socks = dict(poller.poll()) # 轮询器 循环接收
52
53
 
53
- if socks.get(frontend) == zmq.POLLIN:
54
+ if socks.get(frontend) == ZmqImporter().zmq.POLLIN:
54
55
  message = frontend.recv_multipart()
55
56
  backend.send_multipart(message)
56
57
 
57
- if socks.get(backend) == zmq.POLLIN:
58
+ if socks.get(backend) == ZmqImporter().zmq.POLLIN:
58
59
  message = backend.recv_multipart()
59
60
  frontend.send_multipart(message)
60
61
  except BaseException as e:
@@ -87,9 +88,9 @@ class ZeroMqConsumer(AbstractConsumer):
87
88
  # noinspection DuplicatedCode
88
89
  def _shedual_task(self):
89
90
  self.start_broker_queue_name_as_port()
90
- context = zmq.Context()
91
+ context = ZmqImporter().zmq.Context()
91
92
  # noinspection PyUnresolvedReferences
92
- zsocket = context.socket(zmq.REP)
93
+ zsocket = context.socket(ZmqImporter().zmq.REP)
93
94
  zsocket.connect(f"tcp://localhost:{int(self._queue_name) + 1}")
94
95
 
95
96
  while True:
@@ -103,7 +103,7 @@ def fabric_deploy(booster: Booster, host, port, user, password,
103
103
  process_mark = f'funboost_fabric_mark__{queue_name}__{func_name}'
104
104
  conn = Connection(host, port=port, user=user, connect_kwargs={"password": password}, )
105
105
  kill_shell = f'''ps -aux|grep {process_mark}|grep -v grep|awk '{{print $2}}' |xargs kill -9'''
106
- logger.warning(f'{kill_shell} 命令杀死 {process_mark} 标识的进程')
106
+ logger.warning(f'使用linux命令 {kill_shell} 命令杀死 {process_mark} 标识的进程')
107
107
  # uploader.ssh.exec_command(kill_shell)
108
108
  conn.run(kill_shell, encoding='utf-8', warn=True) # 不想提示,免得烦扰用户以为有什么异常了。所以用上面的paramiko包的ssh.exec_command
109
109
 
@@ -113,7 +113,7 @@ def fabric_deploy(booster: Booster, host, port, user, password,
113
113
  if not extra_shell_str2.endswith(';') and extra_shell_str != '':
114
114
  extra_shell_str2 += ';'
115
115
  shell_str = extra_shell_str2 + shell_str
116
- logger.warning(f'使用语句 {shell_str} 在远程机器 {host} 上启动任务消费')
116
+ logger.warning(f'使用linux命令 {shell_str} 在远程机器 {host} 上启动任务消费')
117
117
  conn.run(shell_str, encoding='utf-8', **invoke_runner_kwargs)
118
118
  # uploader.ssh.exec_command(shell_str)
119
119
 
@@ -125,6 +125,6 @@ def kill_all_remote_tasks(host, port, user, password):
125
125
  uploader = ParamikoFolderUploader(host, port, user, password, '', '')
126
126
  funboost_fabric_mark_all = 'funboost_fabric_mark__'
127
127
  kill_shell = f'''ps -aux|grep {funboost_fabric_mark_all}|grep -v grep|awk '{{print $2}}' |xargs kill -9'''
128
- logger.warning(f'{kill_shell} 命令杀死 {funboost_fabric_mark_all} 标识的进程')
128
+ logger.warning(f'使用linux命令 {kill_shell} 命令杀死 {funboost_fabric_mark_all} 标识的进程')
129
129
  uploader.ssh.exec_command(kill_shell)
130
130
  logger.warning(f'杀死 {host} 机器所有的 {funboost_fabric_mark_all} 标识的进程')
@@ -0,0 +1,7 @@
1
+ def _try_get_user_funboost_common_config(funboost_common_conf_field:str):
2
+ try:
3
+ import funboost_config # 第一次启动funboost前还没这个文件,或者还没有初始化配置之前,就要使用使用配置.
4
+ return getattr(funboost_config.FunboostCommonConfig,funboost_common_conf_field)
5
+ except Exception as e:
6
+ print(e)
7
+ return None
@@ -0,0 +1,28 @@
1
+ import pytz
2
+ import time
3
+
4
+ import datetime
5
+
6
+ import typing
7
+
8
+ from nb_time import NbTime
9
+ from funboost.funboost_config_deafult import FunboostCommonConfig
10
+
11
+ class FunboostTime(NbTime):
12
+ default_formatter = NbTime.FORMATTER_DATETIME_NO_ZONE
13
+ def get_time_zone_str(self,time_zone: typing.Union[str, datetime.tzinfo,None] = None):
14
+ return time_zone or self.default_time_zone or FunboostCommonConfig.TIMEZONE or self.get_localzone_name()
15
+
16
+
17
+
18
+ if __name__ == '__main__':
19
+ print(NbTime())
20
+ for i in range(100000):
21
+ # print(generate_publish_time())
22
+ # print(generate_publish_time_format())
23
+ # generate_publish_time()
24
+ # generate_publish_time_format()
25
+
26
+ datetime.datetime.now(tz=pytz.timezone(FunboostCommonConfig.TIMEZONE)).strftime(NbTime.FORMATTER_DATETIME_NO_ZONE)
27
+ datetime.datetime.now(tz=pytz.timezone(FunboostCommonConfig.TIMEZONE)).timestamp()
28
+ print(NbTime())
@@ -123,7 +123,7 @@ class BoosterParams(BaseJsonAbleModel):
123
123
  由于有很好用的qps控制运行频率和智能扩大缩小的线程池,此框架建议不需要理会和设置并发数量只需要关心qps就行了,框架的并发是自适应并发数量,这一点很强很好用。"""
124
124
  concurrent_mode: str = ConcurrentModeEnum.THREADING # 并发模式,支持THREADING,GEVENT,EVENTLET,ASYNC,SINGLE_THREAD并发,multi_process_consume 支持协程/线程 叠加多进程并发,性能炸裂.
125
125
  concurrent_num: int = 50 # 并发数量,并发种类由concurrent_mode决定
126
- specify_concurrent_pool: FunboostBaseConcurrentPool = None # 使用指定的线程池/携程池,可以多个消费者共使用一个线程池,节约线程.不为None时候。threads_num失效
126
+ specify_concurrent_pool: typing.Optional[FunboostBaseConcurrentPool] = None # 使用指定的线程池/携程池,可以多个消费者共使用一个线程池,节约线程.不为None时候。threads_num失效
127
127
  specify_async_loop: asyncio.AbstractEventLoop = None # 指定的async的loop循环,设置并发模式为async才能起作用。 有些包例如aiohttp,请求和httpclient的实例化不能处在两个不同的loop中,可以传过来.
128
128
 
129
129
  """qps:
@@ -159,7 +159,7 @@ class BoosterParams(BaseJsonAbleModel):
159
159
  msg_expire_senconds: typing.Union[float, int] = None # 消息过期时间,可以设置消息是多久之前发布的就丢弃这条消息,不运行. 为None则永不丢弃
160
160
 
161
161
  do_task_filtering: bool = False # 是否对函数入参进行过滤去重.
162
- task_filtering_expire_seconds: int = 0 # 任务过滤的失效期,为0则永久性过滤任务。例如设置过滤过期时间是1800秒 , 30分钟前发布过1 + 2 的任务,现在仍然执行,如果是30分钟以内发布过这个任务,则不执行1 + 2
162
+ task_filtering_expire_seconds: int = 0 # 任务过滤的失效期,为0则永久性过滤任务。例如设置过滤过期时间是1800秒 , 30分钟前发布过1 + 2 的任务,现在仍然执行,如果是30分钟以内执行过这个任务,则不执行1 + 2
163
163
 
164
164
  function_result_status_persistance_conf: FunctionResultStatusPersistanceConfig = FunctionResultStatusPersistanceConfig(
165
165
  is_save_result=False, is_save_status=False, expire_seconds=7 * 24 * 3600, is_use_bulk_insert=False) # 是否保存函数的入参,运行结果和运行状态到mongodb。这一步用于后续的参数追溯,任务统计和web展示,需要安装mongo。
@@ -183,7 +183,7 @@ class BoosterParams(BaseJsonAbleModel):
183
183
  broker_exclusive_config: dict = {} # 加上一个不同种类中间件非通用的配置,不同中间件自身独有的配置,不是所有中间件都兼容的配置,因为框架支持30种消息队列,消息队列不仅仅是一般的先进先出queue这么简单的概念,
184
184
  # 例如kafka支持消费者组,rabbitmq也支持各种独特概念例如各种ack机制 复杂路由机制,有的中间件原生能支持消息优先级有的中间件不支持,每一种消息队列都有独特的配置参数意义,可以通过这里传递。每种中间件能传递的键值对可以看consumer类的 BROKER_EXCLUSIVE_CONFIG_DEFAULT
185
185
 
186
- should_check_publish_func_params: bool = True # 消息发布时候是否校验消息发布内容,比如有的人发布消息,函数只接受a,b两个入参,他去传2个入参,或者传参不存在的参数名字, 如果消费函数你非要写*args,**kwargs,那就需要关掉发布消息时候的函数入参检查
186
+ should_check_publish_func_params: bool = True # 消息发布时候是否校验消息发布内容,比如有的人发布消息,函数只接受a,b两个入参,他去传2个入参,或者传参不存在的参数名字, 如果消费函数你非要写*args,**kwargs,那就需要关掉发布消息时候的函数入参检查
187
187
 
188
188
  auto_generate_info: dict = {} # 自动生成的信息,不需要用户主动传参.
189
189
 
@@ -277,3 +277,4 @@ if __name__ == '__main__':
277
277
  # print(PriorityConsumingControlConfig().get_str_dict())
278
278
 
279
279
  print(BoosterParams(queue_name='3213', specify_concurrent_pool=FlexibleThreadPool(100)).json_pre())
280
+ print(PublisherParams.schema_json())
@@ -2,6 +2,8 @@ import copy
2
2
  import time
3
3
  import uuid
4
4
 
5
+ from funboost.core.funboost_time import FunboostTime
6
+
5
7
 
6
8
  def get_publish_time(paramsx: dict):
7
9
  """
@@ -37,13 +39,23 @@ def block_python_main_thread_exit():
37
39
  run_forever = block_python_main_thread_exit
38
40
 
39
41
 
40
- def _try_get_user_funboost_common_config(funboost_common_conf_field:str):
41
- try:
42
- import funboost_config # 第一次启动funboost前还没这个文件,或者还没有初始化配置之前,就要使用使用配置.
43
- return getattr(funboost_config.FunboostCommonConfig,funboost_common_conf_field)
44
- except Exception as e:
45
- print(e)
46
- return None
42
+ class MsgGenerater:
43
+ @staticmethod
44
+ def generate_task_id(queue_name:str) -> str:
45
+ return f'{queue_name}_result:{uuid.uuid4()}'
46
+
47
+ @staticmethod
48
+ def generate_publish_time() -> float:
49
+ return round(FunboostTime().timestamp,4)
50
+
51
+ @staticmethod
52
+ def generate_publish_time_format() -> str:
53
+ return FunboostTime().get_str()
54
+
55
+ @classmethod
56
+ def generate_pulish_time_and_task_id(cls,queue_name:str,task_id=None):
57
+ extra_params = {'task_id': task_id or cls.generate_task_id(queue_name), 'publish_time': cls.generate_publish_time(),
58
+ 'publish_time_format': cls.generate_publish_time_format()}
59
+ return extra_params
60
+
47
61
 
48
- def generate_task_id(queue_name:str):
49
- return f'{queue_name}_result:{uuid.uuid4()}'
@@ -2,8 +2,11 @@ import abc
2
2
 
3
3
  from funboost.utils.decorators import cached_method_result, singleton, SingletonBaseNew, SingletonBaseCustomInit
4
4
 
5
+ """
6
+ 延迟导入
7
+ 或者使用时候再pip安装
8
+ """
5
9
 
6
- # @singleton # 不方便代码补全
7
10
 
8
11
  class LazyImpoter(SingletonBaseNew):
9
12
  """
@@ -79,20 +82,99 @@ class EventletImporter:
79
82
  self.patcher = patcher
80
83
  self.Timeout = Timeout
81
84
 
85
+
82
86
  @singleton
83
87
  class PeeweeImporter:
84
88
  def __init__(self):
85
- '''pip install peewee == 3.17'''
89
+ """pip install peewee == 3.17"""
86
90
  from peewee import ModelSelect, Model, BigAutoField, CharField, DateTimeField, MySQLDatabase
87
91
  from playhouse.shortcuts import model_to_dict, dict_to_model
88
92
  self.ModelSelect = ModelSelect
89
93
  self.Model = Model
90
94
  self.BigAutoField = BigAutoField
91
95
  self.CharField = CharField
92
- self.DateTimeField =DateTimeField
96
+ self.DateTimeField = DateTimeField
93
97
  self.MySQLDatabase = MySQLDatabase
94
98
  self.model_to_dict = model_to_dict
95
- self.dict_to_model =dict_to_model
99
+ self.dict_to_model = dict_to_model
100
+
101
+
102
+ @singleton
103
+ class AioHttpImporter:
104
+
105
+ def __init__(self):
106
+ """pip install aiohttp==3.8.3"""
107
+ from aiohttp import web
108
+ from aiohttp.web_request import Request
109
+ self.web = web
110
+ self.Request = Request
111
+
112
+
113
+ @singleton
114
+ class NatsImporter:
115
+ def __init__(self):
116
+ """pip install nats-python """
117
+ from pynats import NATSClient, NATSMessage
118
+ self.NATSClient = NATSClient
119
+ self.NATSMessage = NATSMessage
120
+
121
+ @singleton
122
+ class GnsqImporter:
123
+ def __init__(self):
124
+ """pip install gnsq==1.0.1"""
125
+ from gnsq import Consumer, Message
126
+ from gnsq import Producer, NsqdHTTPClient
127
+ from gnsq.errors import NSQHttpError
128
+ self.Consumer = Consumer
129
+ self.Message = Message
130
+ self.Producer = Producer
131
+ self.NsqdHTTPClient = NsqdHTTPClient
132
+ self.NSQHttpError = NSQHttpError
133
+
134
+ @singleton
135
+ class ElasticsearchImporter:
136
+ def __init__(self):
137
+ """pip install elasticsearch """
138
+ from elasticsearch import helpers
139
+ self.helpers = helpers
140
+
141
+
142
+ @singleton
143
+ class PsutilImporter:
144
+ def __init__(self):
145
+ """pip install psutil"""
146
+ import psutil
147
+ self.psutil = psutil
148
+
149
+ @singleton
150
+ class PahoMqttImporter:
151
+ def __init__(self):
152
+ """pip install paho-mqtt"""
153
+ import paho.mqtt.client as mqtt
154
+ self.mqtt = mqtt
155
+
156
+ @singleton
157
+ class ZmqImporter:
158
+ def __init__(self):
159
+ """pip install zmq pyzmq"""
160
+ import zmq
161
+ self.zmq = zmq
162
+
163
+ @singleton
164
+ class KafkaPythonImporter:
165
+ def __init__(self):
166
+ """pip install kafka-python==2.0.2"""
167
+
168
+ from kafka import KafkaConsumer as OfficialKafkaConsumer, KafkaProducer, KafkaAdminClient
169
+ from kafka.admin import NewTopic
170
+ from kafka.errors import TopicAlreadyExistsError
171
+
172
+ self.OfficialKafkaConsumer = OfficialKafkaConsumer
173
+ self.KafkaProducer = KafkaProducer
174
+ self.KafkaAdminClient = KafkaAdminClient
175
+ self.NewTopic = NewTopic
176
+ self.TopicAlreadyExistsError = TopicAlreadyExistsError
177
+
96
178
 
97
179
  if __name__ == '__main__':
98
180
  for i in range(10000):
funboost/core/loggers.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import nb_log
2
- from funboost.core.helper_funs import _try_get_user_funboost_common_config
2
+ from funboost.core.funboost_config_getter import _try_get_user_funboost_common_config
3
3
 
4
4
  # noinspection PyUnresolvedReferences
5
5
  from nb_log import get_logger, LoggerLevelSetterMixin, nb_log_config_default
@@ -5,6 +5,9 @@
5
5
  import datetime
6
6
  import json
7
7
 
8
+ """
9
+ pip install Flask flask_bootstrap flask_wtf wtforms flask_login
10
+ """
8
11
  from flask import render_template, Flask, request, url_for, jsonify, flash, redirect
9
12
  from flask_bootstrap import Bootstrap
10
13
  from flask_wtf import FlaskForm
@@ -5,7 +5,6 @@ import datetime
5
5
  import json
6
6
  from pprint import pprint
7
7
  import time
8
- from flask import jsonify
9
8
  import copy
10
9
  from funboost import nb_print
11
10
  from funboost.utils import time_util, decorators, LoggerMixin