funboost 44.0__py3-none-any.whl → 44.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of funboost might be problematic. Click here for more details.
- funboost/__init__.py +2 -1
- funboost/consumers/base_consumer.py +11 -8
- funboost/consumers/http_consumer.py +9 -9
- funboost/consumers/kafka_consumer.py +12 -14
- funboost/consumers/kafka_consumer_manually_commit.py +10 -13
- funboost/consumers/mqtt_consumer.py +3 -2
- funboost/consumers/nats_consumer.py +5 -4
- funboost/consumers/nsq_consumer.py +6 -4
- funboost/consumers/zeromq_consumer.py +12 -11
- funboost/core/fabric_deploy_helper.py +3 -3
- funboost/core/funboost_config_getter.py +7 -0
- funboost/core/funboost_time.py +28 -0
- funboost/core/func_params_model.py +4 -3
- funboost/core/helper_funs.py +21 -9
- funboost/core/lazy_impoter.py +86 -4
- funboost/core/loggers.py +1 -1
- funboost/function_result_web/app.py +3 -0
- funboost/function_result_web/functions.py +0 -1
- funboost/publishers/base_publisher.py +4 -7
- funboost/publishers/confluent_kafka_publisher.py +9 -12
- funboost/publishers/kafka_publisher.py +5 -11
- funboost/publishers/mqtt_publisher.py +3 -2
- funboost/publishers/nats_publisher.py +2 -2
- funboost/publishers/nsq_publisher.py +4 -6
- funboost/publishers/zeromq_publisher.py +3 -3
- funboost/set_frame_config.py +1 -1
- funboost/utils/bulk_operation.py +3 -2
- funboost/utils/decorators.py +0 -2
- funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-39.pyc +0 -0
- funboost/utils/resource_monitoring.py +10 -9
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/METADATA +573 -558
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/RECORD +36 -35
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/WHEEL +1 -1
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/entry_points.txt +1 -0
- funboost/core/try_get_user_funboost_common_config.py +0 -0
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/LICENSE +0 -0
- {funboost-44.0.dist-info → funboost-44.2.dist-info}/top_level.txt +0 -0
|
@@ -9,17 +9,15 @@ import json
|
|
|
9
9
|
import logging
|
|
10
10
|
import multiprocessing
|
|
11
11
|
import threading
|
|
12
|
-
import uuid
|
|
13
12
|
import time
|
|
14
13
|
import typing
|
|
15
14
|
from functools import wraps
|
|
16
15
|
from threading import Lock
|
|
17
|
-
import datetime
|
|
18
16
|
import amqpstorm
|
|
19
17
|
|
|
20
18
|
import nb_log
|
|
21
19
|
from funboost.core.func_params_model import PublisherParams, PriorityConsumingControlConfig
|
|
22
|
-
from funboost.core.helper_funs import
|
|
20
|
+
from funboost.core.helper_funs import MsgGenerater
|
|
23
21
|
from funboost.core.loggers import develop_logger
|
|
24
22
|
|
|
25
23
|
from pikav1.exceptions import AMQPError as PikaAMQPError
|
|
@@ -28,7 +26,7 @@ from pikav1.exceptions import AMQPError as PikaAMQPError
|
|
|
28
26
|
from funboost.core.loggers import LoggerLevelSetterMixin, FunboostFileLoggerMixin, get_logger
|
|
29
27
|
from funboost.core.msg_result_getter import AsyncResult, AioAsyncResult
|
|
30
28
|
from funboost.core.task_id_logger import TaskIdLogger
|
|
31
|
-
from funboost.utils import decorators
|
|
29
|
+
from funboost.utils import decorators
|
|
32
30
|
from funboost.funboost_config_deafult import BrokerConnConfig, FunboostCommonConfig
|
|
33
31
|
|
|
34
32
|
RedisAsyncResult = AsyncResult # 别名
|
|
@@ -191,9 +189,8 @@ class AbstractPublisher(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
|
|
|
191
189
|
raw_extra = msg['extra']
|
|
192
190
|
if self.publish_params_checker and self.publisher_params.should_check_publish_func_params:
|
|
193
191
|
self.publish_params_checker.check_params(msg_function_kw)
|
|
194
|
-
task_id = task_id or generate_task_id(self._queue_name)
|
|
195
|
-
extra_params =
|
|
196
|
-
'publish_time_format': time.strftime('%Y-%m-%d %H:%M:%S')}
|
|
192
|
+
task_id = task_id or MsgGenerater.generate_task_id(self._queue_name)
|
|
193
|
+
extra_params = MsgGenerater.generate_pulish_time_and_task_id(self._queue_name, task_id=task_id)
|
|
197
194
|
if priority_control_config:
|
|
198
195
|
extra_params.update(priority_control_config.dict(exclude_none=True))
|
|
199
196
|
extra_params.update(raw_extra)
|
|
@@ -4,6 +4,8 @@
|
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
|
|
7
|
+
from funboost.core.lazy_impoter import KafkaPythonImporter
|
|
8
|
+
|
|
7
9
|
if os.name == 'nt':
|
|
8
10
|
"""
|
|
9
11
|
为了保险起见,这样做一下,设置一下path,否则anaconda安装的python可能出现 ImportError: DLL load failed while importing cimpl: 找不到指定的模块。
|
|
@@ -24,12 +26,7 @@ if os.name == 'nt':
|
|
|
24
26
|
|
|
25
27
|
import atexit
|
|
26
28
|
import time
|
|
27
|
-
|
|
28
|
-
from kafka import KafkaProducer, KafkaAdminClient
|
|
29
|
-
# noinspection PyPackageRequirements
|
|
30
|
-
from kafka.admin import NewTopic
|
|
31
|
-
# noinspection PyPackageRequirements
|
|
32
|
-
from kafka.errors import TopicAlreadyExistsError
|
|
29
|
+
|
|
33
30
|
from confluent_kafka import Producer as ConfluentProducer
|
|
34
31
|
from funboost.funboost_config_deafult import BrokerConnConfig
|
|
35
32
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
@@ -45,10 +42,10 @@ class ConfluentKafkaPublisher(AbstractPublisher, ):
|
|
|
45
42
|
|
|
46
43
|
# self._producer = KafkaProducer(bootstrap_servers=funboost_config_deafult.KAFKA_BOOTSTRAP_SERVERS)
|
|
47
44
|
try:
|
|
48
|
-
admin_client = KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
49
|
-
admin_client.create_topics([NewTopic(self._queue_name, 10, 1)])
|
|
45
|
+
admin_client = KafkaPythonImporter().KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
46
|
+
admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 1)])
|
|
50
47
|
# admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
|
|
51
|
-
except TopicAlreadyExistsError:
|
|
48
|
+
except KafkaPythonImporter().TopicAlreadyExistsError:
|
|
52
49
|
pass
|
|
53
50
|
except BaseException as e:
|
|
54
51
|
self.logger.exception(e)
|
|
@@ -92,10 +89,10 @@ class SaslPlainKafkaPublisher(ConfluentKafkaPublisher):
|
|
|
92
89
|
def custom_init(self):
|
|
93
90
|
# self._producer = KafkaProducer(bootstrap_servers=funboost_config_deafult.KAFKA_BOOTSTRAP_SERVERS)
|
|
94
91
|
try:
|
|
95
|
-
admin_client = KafkaAdminClient(**BrokerConnConfig.KFFKA_SASL_CONFIG)
|
|
96
|
-
admin_client.create_topics([NewTopic(self._queue_name, 10, 1)])
|
|
92
|
+
admin_client = KafkaPythonImporter().KafkaAdminClient(**BrokerConnConfig.KFFKA_SASL_CONFIG)
|
|
93
|
+
admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 1)])
|
|
97
94
|
# admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
|
|
98
|
-
except TopicAlreadyExistsError:
|
|
95
|
+
except KafkaPythonImporter().TopicAlreadyExistsError:
|
|
99
96
|
pass
|
|
100
97
|
except BaseException as e:
|
|
101
98
|
self.logger.exception(e)
|
|
@@ -5,13 +5,7 @@
|
|
|
5
5
|
# noinspection PyPackageRequirements
|
|
6
6
|
import atexit
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
from kafka import KafkaProducer, KafkaAdminClient
|
|
10
|
-
# noinspection PyPackageRequirements
|
|
11
|
-
from kafka.admin import NewTopic
|
|
12
|
-
# noinspection PyPackageRequirements
|
|
13
|
-
from kafka.errors import TopicAlreadyExistsError
|
|
14
|
-
|
|
8
|
+
from funboost.core.lazy_impoter import KafkaPythonImporter
|
|
15
9
|
from funboost.funboost_config_deafult import BrokerConnConfig
|
|
16
10
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
17
11
|
|
|
@@ -23,12 +17,12 @@ class KafkaPublisher(AbstractPublisher, ):
|
|
|
23
17
|
|
|
24
18
|
# noinspection PyAttributeOutsideInit
|
|
25
19
|
def custom_init(self):
|
|
26
|
-
self._producer = KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
27
|
-
self._admin_client = KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
20
|
+
self._producer = KafkaPythonImporter().KafkaProducer(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
21
|
+
self._admin_client = KafkaPythonImporter().KafkaAdminClient(bootstrap_servers=BrokerConnConfig.KAFKA_BOOTSTRAP_SERVERS)
|
|
28
22
|
try:
|
|
29
|
-
self._admin_client.create_topics([NewTopic(self._queue_name, 10, 2)])
|
|
23
|
+
self._admin_client.create_topics([KafkaPythonImporter().NewTopic(self._queue_name, 10, 2)])
|
|
30
24
|
# admin_client.create_partitions({self._queue_name: NewPartitions(total_count=16)})
|
|
31
|
-
except TopicAlreadyExistsError:
|
|
25
|
+
except KafkaPythonImporter().TopicAlreadyExistsError:
|
|
32
26
|
pass
|
|
33
27
|
except BaseException as e:
|
|
34
28
|
self.logger.exception(e)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# -*- coding: utf-8 -*-
|
|
2
2
|
# @Author : ydf
|
|
3
3
|
# @Time : 2022/8/8 0008 12:12
|
|
4
|
+
from funboost.core.lazy_impoter import PahoMqttImporter
|
|
4
5
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
5
6
|
from funboost.funboost_config_deafult import BrokerConnConfig
|
|
6
7
|
|
|
@@ -53,7 +54,7 @@ client.loop_forever() # 保持连接
|
|
|
53
54
|
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。
|
|
54
55
|
"""
|
|
55
56
|
|
|
56
|
-
import paho.mqtt.client as mqtt
|
|
57
|
+
# import paho.mqtt.client as mqtt
|
|
57
58
|
|
|
58
59
|
|
|
59
60
|
# def on_connect(client, userdata, flags, rc):
|
|
@@ -71,7 +72,7 @@ class MqttPublisher(AbstractPublisher, ):
|
|
|
71
72
|
|
|
72
73
|
# noinspection PyAttributeOutsideInit
|
|
73
74
|
def custom_init(self):
|
|
74
|
-
client = mqtt.Client()
|
|
75
|
+
client = PahoMqttImporter().mqtt.Client()
|
|
75
76
|
# client.username_pw_set('admin', password='public')
|
|
76
77
|
client.on_connect = self._on_connect
|
|
77
78
|
client.on_socket_close = self._on_socket_close
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from funboost.core.lazy_impoter import NatsImporter
|
|
2
2
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
3
3
|
from funboost.funboost_config_deafult import BrokerConnConfig
|
|
4
4
|
|
|
@@ -10,7 +10,7 @@ class NatsPublisher(AbstractPublisher, ):
|
|
|
10
10
|
|
|
11
11
|
# noinspection PyAttributeOutsideInit
|
|
12
12
|
def custom_init(self):
|
|
13
|
-
self.nats_client = NATSClient(BrokerConnConfig.NATS_URL)
|
|
13
|
+
self.nats_client = NatsImporter().NATSClient(BrokerConnConfig.NATS_URL)
|
|
14
14
|
self.nats_client.connect()
|
|
15
15
|
|
|
16
16
|
def concrete_realization_of_publish(self, msg):
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
# -*- coding: utf-8 -*-
|
|
2
2
|
# @Author : ydf
|
|
3
3
|
# @Time : 2022/8/19 0008 12:12
|
|
4
|
-
|
|
5
|
-
from gnsq import Producer, NsqdHTTPClient
|
|
6
|
-
from gnsq.errors import NSQHttpError
|
|
4
|
+
from funboost.core.lazy_impoter import GnsqImporter
|
|
7
5
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
8
6
|
from funboost.funboost_config_deafult import BrokerConnConfig
|
|
9
7
|
|
|
@@ -15,8 +13,8 @@ class NsqPublisher(AbstractPublisher, ):
|
|
|
15
13
|
|
|
16
14
|
# noinspection PyAttributeOutsideInit
|
|
17
15
|
def custom_init(self):
|
|
18
|
-
self._nsqd_cleint = NsqdHTTPClient(BrokerConnConfig.NSQD_HTTP_CLIENT_HOST, BrokerConnConfig.NSQD_HTTP_CLIENT_PORT)
|
|
19
|
-
self._producer = Producer(BrokerConnConfig.NSQD_TCP_ADDRESSES)
|
|
16
|
+
self._nsqd_cleint = GnsqImporter().NsqdHTTPClient(BrokerConnConfig.NSQD_HTTP_CLIENT_HOST, BrokerConnConfig.NSQD_HTTP_CLIENT_PORT)
|
|
17
|
+
self._producer = GnsqImporter().Producer(BrokerConnConfig.NSQD_TCP_ADDRESSES)
|
|
20
18
|
self._producer.start()
|
|
21
19
|
|
|
22
20
|
def concrete_realization_of_publish(self, msg):
|
|
@@ -26,7 +24,7 @@ class NsqPublisher(AbstractPublisher, ):
|
|
|
26
24
|
def clear(self):
|
|
27
25
|
try:
|
|
28
26
|
self._nsqd_cleint.empty_topic(self._queue_name)
|
|
29
|
-
except NSQHttpError as e:
|
|
27
|
+
except GnsqImporter().NSQHttpError as e:
|
|
30
28
|
self.logger.exception(e) # 不能清除一个不存在的topoc会报错,和其他消息队列中间件不同。
|
|
31
29
|
self.logger.warning(f'清除 {self._queue_name} topic中的消息成功')
|
|
32
30
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# -*- coding: utf-8 -*-
|
|
2
2
|
# @Author : ydf
|
|
3
|
-
import
|
|
3
|
+
from funboost.core.lazy_impoter import ZmqImporter
|
|
4
4
|
from funboost.publishers.base_publisher import AbstractPublisher
|
|
5
5
|
|
|
6
6
|
|
|
@@ -10,8 +10,8 @@ class ZeroMqPublisher(AbstractPublisher):
|
|
|
10
10
|
zeromq 中间件的发布者,zeromq基于socket代码,不会持久化,且不需要安装软件。
|
|
11
11
|
"""
|
|
12
12
|
def custom_init(self):
|
|
13
|
-
context = zmq.Context()
|
|
14
|
-
socket = context.socket(zmq.REQ)
|
|
13
|
+
context = ZmqImporter().zmq.Context()
|
|
14
|
+
socket = context.socket(ZmqImporter().zmq.REQ)
|
|
15
15
|
socket.connect(f"tcp://localhost:{int(self._queue_name)}")
|
|
16
16
|
self.socket =socket
|
|
17
17
|
self.logger.warning('框架使用 zeromq 中间件方式,必须先启动消费者(消费者会顺便启动broker) ,只有启动了服务端才能发布任务')
|
funboost/set_frame_config.py
CHANGED
|
@@ -12,7 +12,7 @@ import json
|
|
|
12
12
|
from pathlib import Path
|
|
13
13
|
from shutil import copyfile
|
|
14
14
|
|
|
15
|
-
from funboost.core.
|
|
15
|
+
from funboost.core.funboost_config_getter import _try_get_user_funboost_common_config
|
|
16
16
|
from funboost.core.loggers import flogger, get_funboost_file_logger, logger_prompt
|
|
17
17
|
from nb_log import nb_print, stderr_write, stdout_write
|
|
18
18
|
from nb_log.monkey_print import is_main_process, only_print_on_main_process
|
funboost/utils/bulk_operation.py
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
import atexit
|
|
10
10
|
import re
|
|
11
11
|
import os
|
|
12
|
-
from elasticsearch import helpers
|
|
12
|
+
# from elasticsearch import helpers
|
|
13
13
|
from threading import Thread
|
|
14
14
|
from typing import Union
|
|
15
15
|
import abc
|
|
@@ -20,6 +20,7 @@ import unittest
|
|
|
20
20
|
from pymongo import UpdateOne, InsertOne, UpdateMany, collection, MongoClient
|
|
21
21
|
import redis
|
|
22
22
|
|
|
23
|
+
from funboost.core.lazy_impoter import ElasticsearchImporter
|
|
23
24
|
from funboost.utils.redis_manager import RedisMixin
|
|
24
25
|
from funboost.utils.time_util import DatetimeConverter
|
|
25
26
|
from funboost.utils import LoggerMixin, decorators
|
|
@@ -153,7 +154,7 @@ class ElasticBulkHelper(BaseBulkHelper):
|
|
|
153
154
|
break
|
|
154
155
|
if request_list:
|
|
155
156
|
# self.base_object.bulk_write(request_list, ordered=False)
|
|
156
|
-
helpers.bulk(self.base_object, request_list)
|
|
157
|
+
ElasticsearchImporter().helpers.bulk(self.base_object, request_list)
|
|
157
158
|
if self._is_print_log:
|
|
158
159
|
self.logger.info(f'【{self.base_object}】 批量插入的任务数量是 {count} 消耗的时间是 {round(time.time() - t_start, 6)}')
|
|
159
160
|
self._current_time = time.time()
|
funboost/utils/decorators.py
CHANGED
funboost/utils/dependency_packages_in_pythonpath/func_timeout/__pycache__/dafunc.cpython-39.pyc
CHANGED
|
Binary file
|
|
@@ -7,7 +7,8 @@ import socket
|
|
|
7
7
|
import sys
|
|
8
8
|
import threading
|
|
9
9
|
import time
|
|
10
|
-
|
|
10
|
+
|
|
11
|
+
from funboost.core.lazy_impoter import PsutilImporter
|
|
11
12
|
from funboost.utils import LoggerLevelSetterMixin, LoggerMixin, decorators
|
|
12
13
|
from funboost.utils.mongo_util import MongoMixin
|
|
13
14
|
|
|
@@ -59,10 +60,10 @@ print(psutil.virtual_memory())
|
|
|
59
60
|
|
|
60
61
|
class ResourceMonitor(LoggerMixin, LoggerLevelSetterMixin, MongoMixin):
|
|
61
62
|
# ResourceMonitor(is_save_info_to_mongo=True).set_log_level(20).start_build_info_loop_on_daemon_thread(60)
|
|
62
|
-
cpu_count = psutil.cpu_count()
|
|
63
|
+
cpu_count = PsutilImporter().psutil.cpu_count()
|
|
63
64
|
host_name = socket.gethostname()
|
|
64
65
|
|
|
65
|
-
def __init__(self, process=psutil.Process(), is_save_info_to_mongo=False, mongo_col='default'):
|
|
66
|
+
def __init__(self, process=PsutilImporter().psutil.Process(), is_save_info_to_mongo=False, mongo_col='default'):
|
|
66
67
|
self.process = process
|
|
67
68
|
self.logger.setLevel(20)
|
|
68
69
|
self.all_info = {}
|
|
@@ -84,22 +85,22 @@ class ResourceMonitor(LoggerMixin, LoggerLevelSetterMixin, MongoMixin):
|
|
|
84
85
|
return result
|
|
85
86
|
|
|
86
87
|
def get_os_cpu_percpu(self):
|
|
87
|
-
result = psutil.cpu_percent(1, percpu=True)
|
|
88
|
+
result = PsutilImporter().psutil.cpu_percent(1, percpu=True)
|
|
88
89
|
self.logger.debug(result)
|
|
89
90
|
return result
|
|
90
91
|
|
|
91
92
|
def get_os_cpu_totalcpu(self):
|
|
92
|
-
result = round(psutil.cpu_percent(1, percpu=False) * self.cpu_count, 2)
|
|
93
|
+
result = round(PsutilImporter().psutil.cpu_percent(1, percpu=False) * self.cpu_count, 2)
|
|
93
94
|
self.logger.debug(result)
|
|
94
95
|
return result
|
|
95
96
|
|
|
96
97
|
def get_os_cpu_avaragecpu(self):
|
|
97
|
-
result = psutil.cpu_percent(1, percpu=False)
|
|
98
|
+
result = PsutilImporter().psutil.cpu_percent(1, percpu=False)
|
|
98
99
|
self.logger.debug(result)
|
|
99
100
|
return result
|
|
100
101
|
|
|
101
102
|
def get_os_virtual_memory(self) -> dict:
|
|
102
|
-
memory_tuple = psutil.virtual_memory()
|
|
103
|
+
memory_tuple = PsutilImporter().psutil.virtual_memory()
|
|
103
104
|
self.logger.debug(memory_tuple)
|
|
104
105
|
return {
|
|
105
106
|
'total': self.divide_1m(memory_tuple[0]),
|
|
@@ -108,9 +109,9 @@ class ResourceMonitor(LoggerMixin, LoggerLevelSetterMixin, MongoMixin):
|
|
|
108
109
|
}
|
|
109
110
|
|
|
110
111
|
def get_os_net_info(self):
|
|
111
|
-
result1 = psutil.net_io_counters(pernic=False)
|
|
112
|
+
result1 = PsutilImporter().psutil.net_io_counters(pernic=False)
|
|
112
113
|
time.sleep(1)
|
|
113
|
-
result2 = psutil.net_io_counters(pernic=False)
|
|
114
|
+
result2 = PsutilImporter().psutil.net_io_counters(pernic=False)
|
|
114
115
|
speed_dict = dict()
|
|
115
116
|
speed_dict['up_speed'] = self.divide_1m(result2[0] - result1[0])
|
|
116
117
|
speed_dict['down_speed'] = self.divide_1m(result2[1] - result1[1])
|