funboost 49.5__py3-none-any.whl → 49.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of funboost might be problematic. Click here for more details.

Files changed (37) hide show
  1. funboost/__init__.py +1 -1
  2. funboost/beggar_version_implementation/beggar_redis_consumer.py +3 -1
  3. funboost/constant.py +39 -3
  4. funboost/consumers/base_consumer.py +34 -7
  5. funboost/consumers/celery_consumer.py +1 -0
  6. funboost/consumers/empty_consumer.py +12 -1
  7. funboost/consumers/faststream_consumer.py +1 -1
  8. funboost/consumers/http_consumer.py +12 -7
  9. funboost/consumers/kafka_consumer_manually_commit.py +0 -2
  10. funboost/consumers/kombu_consumer.py +0 -50
  11. funboost/consumers/tcp_consumer.py +11 -10
  12. funboost/consumers/udp_consumer.py +9 -6
  13. funboost/consumers/zeromq_consumer.py +18 -11
  14. funboost/core/exceptions.py +7 -0
  15. funboost/core/func_params_model.py +16 -7
  16. funboost/core/function_result_status_saver.py +15 -0
  17. funboost/core/msg_result_getter.py +51 -1
  18. funboost/core/serialization.py +28 -1
  19. funboost/factories/consumer_factory.py +1 -1
  20. funboost/factories/publisher_factotry.py +1 -1
  21. funboost/funboost_config_deafult.py +3 -2
  22. funboost/function_result_web/__pycache__/app.cpython-39.pyc +0 -0
  23. funboost/publishers/base_publisher.py +16 -2
  24. funboost/publishers/http_publisher.py +7 -1
  25. funboost/publishers/tcp_publisher.py +10 -8
  26. funboost/publishers/udp_publisher.py +8 -6
  27. funboost/publishers/zeromq_publisher.py +5 -1
  28. funboost/timing_job/apscheduler_use_redis_store.py +18 -4
  29. funboost/timing_job/timing_push.py +3 -1
  30. funboost/utils/ctrl_c_end.py +1 -1
  31. funboost/utils/redis_manager.py +6 -4
  32. {funboost-49.5.dist-info → funboost-49.7.dist-info}/METADATA +168 -173
  33. {funboost-49.5.dist-info → funboost-49.7.dist-info}/RECORD +37 -37
  34. {funboost-49.5.dist-info → funboost-49.7.dist-info}/WHEEL +1 -1
  35. {funboost-49.5.dist-info → funboost-49.7.dist-info}/LICENSE +0 -0
  36. {funboost-49.5.dist-info → funboost-49.7.dist-info}/entry_points.txt +0 -0
  37. {funboost-49.5.dist-info → funboost-49.7.dist-info}/top_level.txt +0 -0
@@ -12,13 +12,29 @@ from funboost.utils.redis_manager import RedisMixin
12
12
  from funboost.utils.redis_manager import AioRedisMixin
13
13
  from funboost.core.serialization import Serialization
14
14
 
15
+ from funboost.core.function_result_status_saver import FunctionResultStatus
16
+
15
17
  class HasNotAsyncResult(Exception):
16
18
  pass
17
19
 
18
20
 
19
21
  NO_RESULT = 'no_result'
20
22
 
21
-
23
+ def _judge_rpc_function_result_status_obj(status_and_result_obj:FunctionResultStatus,raise_exception:bool):
24
+ if status_and_result_obj is None:
25
+ raise FunboostWaitRpcResultTimeout(f'等待 {status_and_result_obj.task_id} rpc结果超过了指定时间')
26
+ if status_and_result_obj.success is True:
27
+ return status_and_result_obj
28
+ else:
29
+ raw_erorr = status_and_result_obj.exception
30
+ if status_and_result_obj.exception_type == 'FunboostRpcResultError':
31
+ raw_erorr = json.loads(status_and_result_obj.exception_msg) # 使canvas链式报错json显示更美观
32
+ error_msg_dict = {'task_id':status_and_result_obj.task_id,'raw_error':raw_erorr}
33
+ if raise_exception:
34
+ raise FunboostRpcResultError(json.dumps(error_msg_dict,indent=4,ensure_ascii=False))
35
+ else:
36
+ status_and_result_obj.rpc_chain_error_msg_dict = error_msg_dict
37
+ return status_and_result_obj
22
38
  class AsyncResult(RedisMixin):
23
39
  default_callback_run_executor = FlexibleThreadPoolMinWorkers0(200,work_queue_maxsize=50)
24
40
 
@@ -63,6 +79,14 @@ class AsyncResult(RedisMixin):
63
79
  return self._status_and_result
64
80
  return None
65
81
  return self._status_and_result
82
+
83
+ @property
84
+ def status_and_result_obj(self) -> FunctionResultStatus:
85
+ """这个是为了比字典有更好的ide代码补全效果"""
86
+ if self.status_and_result is not None:
87
+ return FunctionResultStatus.parse_status_and_result_to_obj(self.status_and_result)
88
+
89
+ rpc_data =status_and_result_obj
66
90
 
67
91
  def get(self):
68
92
  # print(self.status_and_result)
@@ -104,6 +128,14 @@ class AsyncResult(RedisMixin):
104
128
  async_result.set_callback(show_result) # 使用回调函数在线程池中并发的运行函数结果
105
129
  '''
106
130
  self.callback_run_executor.submit(self._run_callback_func, callback_func)
131
+
132
+ def wait_rpc_data_or_raise(self,raise_exception:bool=True)->FunctionResultStatus:
133
+ return _judge_rpc_function_result_status_obj(self.status_and_result_obj,raise_exception)
134
+
135
+ @classmethod
136
+ def batch_wait_rpc_data_or_raise(cls,r_list:typing.List['AsyncResult'],raise_exception:bool=True)->typing.List[FunctionResultStatus]:
137
+ return [ _judge_rpc_function_result_status_obj(r.status_and_result_obj,raise_exception)
138
+ for r in r_list]
107
139
 
108
140
 
109
141
  class AioAsyncResult(AioRedisMixin):
@@ -169,6 +201,14 @@ if __name__ == '__main__':
169
201
  return None
170
202
  return self._status_and_result
171
203
 
204
+ @property
205
+ async def status_and_result_obj(self) -> FunctionResultStatus:
206
+ """这个是为了比字典有更好的ide代码补全效果"""
207
+ sr = await self.status_and_result
208
+ if sr is not None:
209
+ return FunctionResultStatus.parse_status_and_result_to_obj(sr)
210
+
211
+ rpc_data =status_and_result_obj
172
212
  async def get(self):
173
213
  # print(self.status_and_result)
174
214
  if (await self.status_and_result) is not None:
@@ -192,6 +232,16 @@ if __name__ == '__main__':
192
232
  async def set_callback(self, aio_callback_func: typing.Callable):
193
233
  asyncio.create_task(self._run_callback_func(callback_func=aio_callback_func))
194
234
 
235
+ async def wait_rpc_data_or_raise(self,raise_exception:bool=True)->FunctionResultStatus:
236
+ return _judge_rpc_function_result_status_obj(await self.status_and_result_obj,raise_exception)
237
+
238
+ @classmethod
239
+ async def batch_wait_rpc_data_or_raise(cls,r_list:typing.List['AioAsyncResult'],raise_exception:bool=True)->typing.List[FunctionResultStatus]:
240
+ return [ _judge_rpc_function_result_status_obj(await r.status_and_result_obj,raise_exception)
241
+ for r in r_list]
242
+
243
+
244
+
195
245
 
196
246
  class ResultFromMongo(MongoMixin):
197
247
  """
@@ -1,6 +1,9 @@
1
1
  import typing
2
-
2
+ import json
3
3
  import orjson
4
+ import pickle
5
+ import ast
6
+
4
7
  class Serialization:
5
8
  @staticmethod
6
9
  def to_json_str(dic:typing.Union[dict,str]):
@@ -14,3 +17,27 @@ class Serialization:
14
17
  if isinstance(strx,dict):
15
18
  return strx
16
19
  return orjson.loads(strx)
20
+
21
+ @staticmethod
22
+ def find_can_not_json_serializable_keys(dic:dict)->typing.List[str]:
23
+ can_not_json_serializable_keys = []
24
+ dic = Serialization.to_dict(dic)
25
+ for k,v in dic.items():
26
+ if not isinstance(v,str):
27
+ try:
28
+ json.dumps(v)
29
+ except:
30
+ can_not_json_serializable_keys.append(k)
31
+ return can_not_json_serializable_keys
32
+
33
+
34
+ class PickleHelper:
35
+ @staticmethod
36
+ def to_str(obj_x:typing.Any):
37
+ return str(pickle.dumps(obj_x)) # 对象pickle,转成字符串
38
+
39
+ @staticmethod
40
+ def to_obj(str_x:str):
41
+ return pickle.loads(ast.literal_eval(str_x)) # 不是从字节转成对象,是从字符串转,所以需要这样.
42
+
43
+
@@ -19,7 +19,7 @@ def get_consumer(boost_params: BoosterParams) -> AbstractConsumer:
19
19
  regist_to_funboost(boost_params.broker_kind) # 动态注册中间件到框架是为了延迟导入,用户没安装不需要的第三方包不报错。
20
20
 
21
21
  if boost_params.broker_kind not in broker_kind__publsiher_consumer_type_map:
22
- raise ValueError(f'设置的中间件种类数字不正确,你设置的值是 {boost_params.broker_kind} ')
22
+ raise ValueError(f'设置的中间件种类不正确,你设置的值是 {boost_params.broker_kind} ')
23
23
  consumer_cls = broker_kind__publsiher_consumer_type_map[boost_params.broker_kind][1]
24
24
  if not boost_params.consumer_override_cls:
25
25
  return consumer_cls(boost_params)
@@ -31,7 +31,7 @@ def get_publisher(publisher_params: PublisherParams) -> AbstractPublisher:
31
31
  broker_kind = publisher_params.broker_kind
32
32
  regist_to_funboost(broker_kind) # 动态注册中间件到框架是为了延迟导入,用户没安装不需要的第三方包不报错。
33
33
  if broker_kind not in broker_kind__publsiher_consumer_type_map:
34
- raise ValueError(f'设置的中间件种类数字不正确,你设置的值是 {broker_kind} ')
34
+ raise ValueError(f'设置的中间件种类不正确,你设置的值是 {broker_kind} ')
35
35
  publisher_cls = broker_kind__publsiher_consumer_type_map[broker_kind][0]
36
36
  if not publisher_params.publisher_override_cls:
37
37
  return publisher_cls(publisher_params)
@@ -44,7 +44,8 @@ class BrokerConnConfig(DataClassBase):
44
44
  REDIS_PORT = 6379
45
45
  REDIS_DB = 7 # redis消息队列所在db,请不要在这个db放太多其他键值对,以及方便你自己可视化查看你的redis db,框架里面有的功能会scan扫描unacked的键名,使用单独的db。
46
46
  REDIS_DB_FILTER_AND_RPC_RESULT = 8 # 如果函数做任务参数过滤 或者使用rpc获取结果,使用这个db,因为这个db的键值对多,和redis消息队列db分开
47
- REDIS_URL = f'redis://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
47
+ REDIS_SSL = False # 是否使用ssl加密,默认是False
48
+ REDIS_URL = f'{"rediss" if REDIS_SSL else "redis"}://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DB}'
48
49
 
49
50
  NSQD_TCP_ADDRESSES = ['127.0.0.1:4150']
50
51
  NSQD_HTTP_CLIENT_HOST = '127.0.0.1'
@@ -79,7 +80,7 @@ class BrokerConnConfig(DataClassBase):
79
80
  MQTT_TCP_PORT = 1883
80
81
 
81
82
  HTTPSQS_HOST = '127.0.0.1'
82
- HTTPSQS_PORT = '1218'
83
+ HTTPSQS_PORT = 1218
83
84
  HTTPSQS_AUTH = '123456'
84
85
 
85
86
  NATS_URL = 'nats://192.168.6.134:4222'
@@ -10,6 +10,7 @@ import atexit
10
10
  import json
11
11
  import logging
12
12
  import multiprocessing
13
+ from re import S
13
14
  import sys
14
15
  import threading
15
16
  import time
@@ -26,7 +27,7 @@ from funboost.core.loggers import develop_logger
26
27
  # from nb_log import LoggerLevelSetterMixin, LoggerMixin
27
28
  from funboost.core.loggers import LoggerLevelSetterMixin, FunboostFileLoggerMixin, get_logger
28
29
  from funboost.core.msg_result_getter import AsyncResult, AioAsyncResult
29
- from funboost.core.serialization import Serialization
30
+ from funboost.core.serialization import PickleHelper, Serialization
30
31
  from funboost.core.task_id_logger import TaskIdLogger
31
32
  from funboost.utils import decorators
32
33
  from funboost.funboost_config_deafult import BrokerConnConfig, FunboostCommonConfig
@@ -212,7 +213,20 @@ class AbstractPublisher(LoggerLevelSetterMixin, metaclass=abc.ABCMeta, ):
212
213
  msg = copy.deepcopy(msg) # 字典是可变对象,不要改变影响用户自身的传参字典. 用户可能继续使用这个传参字典.
213
214
  msg, msg_function_kw, extra_params, task_id = self._convert_msg(msg, task_id, priority_control_config)
214
215
  t_start = time.time()
215
- msg_json = Serialization.to_json_str(msg)
216
+
217
+ can_not_json_serializable_keys = Serialization.find_can_not_json_serializable_keys(msg)
218
+ if can_not_json_serializable_keys:
219
+ pass
220
+ self.logger.warning(f'msg 中包含不能序列化的键: {can_not_json_serializable_keys}')
221
+ # raise ValueError(f'msg 中包含不能序列化的键: {can_not_json_serializable_keys}')
222
+ new_msg = copy.deepcopy(Serialization.to_dict(msg))
223
+ for key in can_not_json_serializable_keys:
224
+ new_msg[key] = PickleHelper.to_str(new_msg[key])
225
+ new_msg['extra']['can_not_json_serializable_keys'] = can_not_json_serializable_keys
226
+ msg_json = Serialization.to_json_str(new_msg)
227
+ else:
228
+ msg_json = Serialization.to_json_str(msg)
229
+ # print(msg_json)
216
230
  decorators.handle_exception(retry_times=10, is_throw_error=True, time_sleep=0.1)(
217
231
  self.concrete_realization_of_publish)(msg_json)
218
232
 
@@ -14,9 +14,15 @@ class HTTPPublisher(AbstractPublisher, ):
14
14
  # noinspection PyAttributeOutsideInit
15
15
  def custom_init(self):
16
16
  self._http = PoolManager(10)
17
+ self._ip = self.publisher_params.broker_exclusive_config['host']
18
+ self._port = self.publisher_params.broker_exclusive_config['port']
19
+ self._ip_port_str = f'{self._ip}:{self._port}'
20
+ if self._port is None:
21
+ raise ValueError('please specify port')
22
+
17
23
 
18
24
  def concrete_realization_of_publish(self, msg):
19
- url = self.queue_name + '/queue'
25
+ url = self._ip_port_str + '/queue'
20
26
  self._http.request('post', url, fields={'msg': msg})
21
27
 
22
28
  def clear(self):
@@ -10,24 +10,26 @@ class TCPPublisher(AbstractPublisher, ):
10
10
  使用tcp作为中间件,不支持持久化,支持分布式
11
11
  """
12
12
 
13
- BUFSIZE = 10240
14
-
15
13
  # noinspection PyAttributeOutsideInit
16
14
  def custom_init(self):
17
- """ tcp为消息队列中间件 时候 queue_name 要设置为例如 127.0.0.1:5689"""
18
- pass
15
+ self._bufsize = self.publisher_params.broker_exclusive_config['bufsize']
19
16
 
20
17
  # noinspection PyAttributeOutsideInit
21
18
  def concrete_realization_of_publish(self, msg):
22
19
  if not hasattr(self, '_tcp_cli_sock'):
23
- ip__port_str = self.queue_name.split(':')
24
- ip_port = (ip__port_str[0], int(ip__port_str[1]))
20
+ # ip__port_str = self.queue_name.split(':')
21
+ # ip_port = (ip__port_str[0], int(ip__port_str[1]))
22
+ self._ip = self.publisher_params.broker_exclusive_config['host']
23
+ self._port = self.publisher_params.broker_exclusive_config['port']
24
+ self.__ip_port = (self._ip, self._port)
25
+ if self._port is None:
26
+ raise ValueError('please specify port')
25
27
  tcp_cli_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
26
- tcp_cli_sock.connect(ip_port)
28
+ tcp_cli_sock.connect(self.__ip_port)
27
29
  self._tcp_cli_sock = tcp_cli_sock
28
30
 
29
31
  self._tcp_cli_sock.send(msg.encode())
30
- self._tcp_cli_sock.recv(self.BUFSIZE)
32
+ self._tcp_cli_sock.recv(self._bufsize)
31
33
 
32
34
  def clear(self):
33
35
  pass # udp没有保存消息
@@ -10,19 +10,21 @@ class UDPPublisher(AbstractPublisher, ):
10
10
  使用udp作为中间件,不支持持久化,支持分布式
11
11
  """
12
12
 
13
- BUFSIZE = 10240
14
-
15
13
  # noinspection PyAttributeOutsideInit
16
14
  def custom_init(self):
17
- """ udp为消息队列中间件 时候 queue_name 要设置为例如 127.0.0.1:5689"""
15
+ self._bufsize = self.publisher_params.broker_exclusive_config['bufsize']
18
16
  self.__udp_client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
19
- ip__port_str = self.queue_name.split(':')
20
- self.__ip_port = (ip__port_str[0], int(ip__port_str[1]))
17
+ self._ip = self.publisher_params.broker_exclusive_config['host']
18
+ self._port = self.publisher_params.broker_exclusive_config['port']
19
+ self.__ip_port = (self._ip, self._port)
20
+ if self._port is None:
21
+ raise ValueError('please specify port')
21
22
  self.__udp_client.connect(self.__ip_port)
22
23
 
24
+ # noinspection PyAttributeOutsideInit
23
25
  def concrete_realization_of_publish(self, msg):
24
26
  self.__udp_client.send(msg.encode('utf-8'), )
25
- self.__udp_client.recv(self.BUFSIZE)
27
+ self.__udp_client.recv(self._bufsize)
26
28
 
27
29
  def clear(self):
28
30
  pass # udp没有保存消息
@@ -10,9 +10,13 @@ class ZeroMqPublisher(AbstractPublisher):
10
10
  zeromq 中间件的发布者,zeromq基于socket代码,不会持久化,且不需要安装软件。
11
11
  """
12
12
  def custom_init(self):
13
+ self._port = self.publisher_params.broker_exclusive_config['port']
14
+ if self._port is None:
15
+ raise ValueError('please specify port')
16
+
13
17
  context = ZmqImporter().zmq.Context()
14
18
  socket = context.socket(ZmqImporter().zmq.REQ)
15
- socket.connect(f"tcp://localhost:{int(self._queue_name)}")
19
+ socket.connect(f"tcp://localhost:{int(self._port)}")
16
20
  self.socket =socket
17
21
  self.logger.warning('框架使用 zeromq 中间件方式,必须先启动消费者(消费者会顺便启动broker) ,只有启动了服务端才能发布任务')
18
22
 
@@ -1,5 +1,5 @@
1
1
  from apscheduler.jobstores.redis import RedisJobStore
2
- from funboost.utils.redis_manager import RedisMixin
2
+ from funboost.utils.redis_manager import RedisMixin,get_redis_conn_kwargs
3
3
 
4
4
  from funboost.timing_job import FunboostBackgroundScheduler
5
5
  from funboost.funboost_config_deafult import BrokerConnConfig, FunboostCommonConfig
@@ -41,6 +41,21 @@ class FunboostBackgroundSchedulerProcessJobsWithinRedisLock(FunboostBackgroundSc
41
41
  # return 0.1
42
42
 
43
43
  def _process_jobs(self):
44
+ """
45
+ funboost 的做法是 在任务取出阶段就加锁,从根本上防止了重复执行。
46
+ 这个很关键,防止多个apscheduler 实例同时扫描取出同一个定时任务,间接导致重复执行,
47
+ 在apscheduler 3.xx版本这样写来防止多个apscheduler实例 重复执行定时任务的问题,简直是神操作.
48
+
49
+ _process_jobs 功能是扫描取出需要运行的定时任务,而不是直接运行定时任务
50
+ 只要扫描取出任务不会取出相同的任务,就间接的决定了不可能重复执行相同的定时任务了.
51
+
52
+
53
+ 不要以为随便在你自己的消费函数加个redis分布式锁就不会重复执行任务了,redis分布式锁是解决相同代码块不会并发执行,而不是解决重复执行.
54
+ 但funboost是神级别骚操作,把分布式锁加到_process_jobs里面,
55
+ _process_jobs是获取一个即将运行的定时任务,是扫描并删除这个即将运行的定时任务,
56
+ 所以这里加分布式锁能间接解决不重复运行定时任务,一旦任务被取出,就会从 jobstore 中删除,其他实例就无法再取到这个任务了.
57
+
58
+ """
44
59
  if self.process_jobs_redis_lock_key is None:
45
60
  raise ValueError('process_jobs_redis_lock_key is not set')
46
61
  with RedisDistributedBlockLockContextManager(RedisMixin().redis_db_frame, self.process_jobs_redis_lock_key, ):
@@ -48,9 +63,8 @@ class FunboostBackgroundSchedulerProcessJobsWithinRedisLock(FunboostBackgroundSc
48
63
 
49
64
 
50
65
  jobstores = {
51
- "default": RedisJobStore(db=BrokerConnConfig.REDIS_DB, host=BrokerConnConfig.REDIS_HOST,
52
- port=BrokerConnConfig.REDIS_PORT, password=BrokerConnConfig.REDIS_PASSWORD,
53
- username=BrokerConnConfig.REDIS_USERNAME, jobs_key='funboost.apscheduler.jobs',run_times_key="funboost.apscheduler.run_times")
66
+ "default": RedisJobStore(**get_redis_conn_kwargs(),
67
+ jobs_key='funboost.apscheduler.jobs',run_times_key="funboost.apscheduler.run_times")
54
68
  }
55
69
 
56
70
  """
@@ -146,7 +146,7 @@ if __name__ == '__main__':
146
146
  id='interval_job1'
147
147
  )
148
148
 
149
- # 方式3:使用cron表达式定时执行
149
+ # 方式3:使用cron表达式定时执行,周期运行
150
150
  ApsJobAdder(sum_two_numbers, job_store_kind='redis').add_push_job(
151
151
  trigger='cron',
152
152
  day_of_week='*',
@@ -156,5 +156,7 @@ if __name__ == '__main__':
156
156
  kwargs={"x": 50, "y": 60},
157
157
  replace_existing=True,
158
158
  id='cron_job1')
159
+
160
+
159
161
 
160
162
  ctrl_c_recv() # 启动了守护线程的定时器,一定要阻止主线程退出。 你可以代码最末尾加这个 ctrl_c_recv() 或者加个 while 1:time.sleep(10)
@@ -16,7 +16,7 @@ def ctrl_c_recv():
16
16
  apscheduler background 类型必须有主线程在运行,否则会很快结束。所以需要阻止主线程退出。
17
17
  在代码最最末尾加上 ctrl_c_recv() 就可以阻止主线程退出。
18
18
 
19
- 你也可以直接在你的启动脚本的最末尾加上:
19
+ 你也可以不用ctrl_c_recv(), 直接在你的启动脚本文件的最末尾加上:
20
20
  while 1:
21
21
  time.sleep(100)
22
22
  来达到阻止主线程退出的目的。
@@ -13,7 +13,7 @@ from funboost.utils import decorators
13
13
 
14
14
  def get_redis_conn_kwargs():
15
15
  return {'host': BrokerConnConfig.REDIS_HOST, 'port': BrokerConnConfig.REDIS_PORT,
16
- 'username': BrokerConnConfig.REDIS_USERNAME,
16
+ 'username': BrokerConnConfig.REDIS_USERNAME,'ssl' : BrokerConnConfig.REDIS_SSL,
17
17
  'password': BrokerConnConfig.REDIS_PASSWORD, 'db': BrokerConnConfig.REDIS_DB}
18
18
 
19
19
 
@@ -26,11 +26,13 @@ def _get_redis_conn_kwargs_by_db(db):
26
26
  class RedisManager(object):
27
27
  _redis_db__conn_map = {}
28
28
 
29
- def __init__(self, host='127.0.0.1', port=6379, db=0, username='', password=''):
30
- self._key = (host, port, db, username, password,)
29
+ def __init__(self, host='127.0.0.1', port=6379, db=0, username='', password='',ssl=False):
30
+ self._key = (host, port, db, username, password,ssl)
31
31
  if self._key not in self.__class__._redis_db__conn_map:
32
32
  self.__class__._redis_db__conn_map[self._key] = redis5.Redis(host=host, port=port, db=db, username=username,
33
- password=password, max_connections=1000, decode_responses=True)
33
+ password=password, max_connections=1000,
34
+ ssl=ssl,
35
+ decode_responses=True)
34
36
  self.redis = self.__class__._redis_db__conn_map[self._key]
35
37
 
36
38
  def get_redis(self) -> redis5.Redis: