deepfos 1.1.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/__init__.py +6 -0
- deepfos/_version.py +21 -0
- deepfos/algo/__init__.py +0 -0
- deepfos/algo/graph.py +171 -0
- deepfos/algo/segtree.py +31 -0
- deepfos/api/V1_1/__init__.py +0 -0
- deepfos/api/V1_1/business_model.py +119 -0
- deepfos/api/V1_1/dimension.py +599 -0
- deepfos/api/V1_1/models/__init__.py +0 -0
- deepfos/api/V1_1/models/business_model.py +1033 -0
- deepfos/api/V1_1/models/dimension.py +2768 -0
- deepfos/api/V1_2/__init__.py +0 -0
- deepfos/api/V1_2/dimension.py +285 -0
- deepfos/api/V1_2/models/__init__.py +0 -0
- deepfos/api/V1_2/models/dimension.py +2923 -0
- deepfos/api/__init__.py +0 -0
- deepfos/api/account.py +167 -0
- deepfos/api/accounting_engines.py +147 -0
- deepfos/api/app.py +626 -0
- deepfos/api/approval_process.py +198 -0
- deepfos/api/base.py +983 -0
- deepfos/api/business_model.py +160 -0
- deepfos/api/consolidation.py +129 -0
- deepfos/api/consolidation_process.py +106 -0
- deepfos/api/datatable.py +341 -0
- deepfos/api/deep_pipeline.py +61 -0
- deepfos/api/deepconnector.py +36 -0
- deepfos/api/deepfos_task.py +92 -0
- deepfos/api/deepmodel.py +188 -0
- deepfos/api/dimension.py +486 -0
- deepfos/api/financial_model.py +319 -0
- deepfos/api/journal_model.py +119 -0
- deepfos/api/journal_template.py +132 -0
- deepfos/api/memory_financial_model.py +98 -0
- deepfos/api/models/__init__.py +3 -0
- deepfos/api/models/account.py +483 -0
- deepfos/api/models/accounting_engines.py +756 -0
- deepfos/api/models/app.py +1338 -0
- deepfos/api/models/approval_process.py +1043 -0
- deepfos/api/models/base.py +234 -0
- deepfos/api/models/business_model.py +805 -0
- deepfos/api/models/consolidation.py +711 -0
- deepfos/api/models/consolidation_process.py +248 -0
- deepfos/api/models/datatable_mysql.py +427 -0
- deepfos/api/models/deep_pipeline.py +55 -0
- deepfos/api/models/deepconnector.py +28 -0
- deepfos/api/models/deepfos_task.py +386 -0
- deepfos/api/models/deepmodel.py +308 -0
- deepfos/api/models/dimension.py +1576 -0
- deepfos/api/models/financial_model.py +1796 -0
- deepfos/api/models/journal_model.py +341 -0
- deepfos/api/models/journal_template.py +854 -0
- deepfos/api/models/memory_financial_model.py +478 -0
- deepfos/api/models/platform.py +178 -0
- deepfos/api/models/python.py +221 -0
- deepfos/api/models/reconciliation_engine.py +411 -0
- deepfos/api/models/reconciliation_report.py +161 -0
- deepfos/api/models/role_strategy.py +884 -0
- deepfos/api/models/smartlist.py +237 -0
- deepfos/api/models/space.py +1137 -0
- deepfos/api/models/system.py +1065 -0
- deepfos/api/models/variable.py +463 -0
- deepfos/api/models/workflow.py +946 -0
- deepfos/api/platform.py +199 -0
- deepfos/api/python.py +90 -0
- deepfos/api/reconciliation_engine.py +181 -0
- deepfos/api/reconciliation_report.py +64 -0
- deepfos/api/role_strategy.py +234 -0
- deepfos/api/smartlist.py +69 -0
- deepfos/api/space.py +582 -0
- deepfos/api/system.py +372 -0
- deepfos/api/variable.py +154 -0
- deepfos/api/workflow.py +264 -0
- deepfos/boost/__init__.py +6 -0
- deepfos/boost/py_jstream.py +89 -0
- deepfos/boost/py_pandas.py +20 -0
- deepfos/cache.py +121 -0
- deepfos/config.py +6 -0
- deepfos/core/__init__.py +27 -0
- deepfos/core/cube/__init__.py +10 -0
- deepfos/core/cube/_base.py +462 -0
- deepfos/core/cube/constants.py +21 -0
- deepfos/core/cube/cube.py +408 -0
- deepfos/core/cube/formula.py +707 -0
- deepfos/core/cube/syscube.py +532 -0
- deepfos/core/cube/typing.py +7 -0
- deepfos/core/cube/utils.py +238 -0
- deepfos/core/dimension/__init__.py +11 -0
- deepfos/core/dimension/_base.py +506 -0
- deepfos/core/dimension/dimcreator.py +184 -0
- deepfos/core/dimension/dimension.py +472 -0
- deepfos/core/dimension/dimexpr.py +271 -0
- deepfos/core/dimension/dimmember.py +155 -0
- deepfos/core/dimension/eledimension.py +22 -0
- deepfos/core/dimension/filters.py +99 -0
- deepfos/core/dimension/sysdimension.py +168 -0
- deepfos/core/logictable/__init__.py +5 -0
- deepfos/core/logictable/_cache.py +141 -0
- deepfos/core/logictable/_operator.py +663 -0
- deepfos/core/logictable/nodemixin.py +673 -0
- deepfos/core/logictable/sqlcondition.py +609 -0
- deepfos/core/logictable/tablemodel.py +497 -0
- deepfos/db/__init__.py +36 -0
- deepfos/db/cipher.py +660 -0
- deepfos/db/clickhouse.py +191 -0
- deepfos/db/connector.py +195 -0
- deepfos/db/daclickhouse.py +171 -0
- deepfos/db/dameng.py +101 -0
- deepfos/db/damysql.py +189 -0
- deepfos/db/dbkits.py +358 -0
- deepfos/db/deepengine.py +99 -0
- deepfos/db/deepmodel.py +82 -0
- deepfos/db/deepmodel_kingbase.py +83 -0
- deepfos/db/edb.py +214 -0
- deepfos/db/gauss.py +83 -0
- deepfos/db/kingbase.py +83 -0
- deepfos/db/mysql.py +184 -0
- deepfos/db/oracle.py +131 -0
- deepfos/db/postgresql.py +192 -0
- deepfos/db/sqlserver.py +99 -0
- deepfos/db/utils.py +135 -0
- deepfos/element/__init__.py +89 -0
- deepfos/element/accounting.py +348 -0
- deepfos/element/apvlprocess.py +215 -0
- deepfos/element/base.py +398 -0
- deepfos/element/bizmodel.py +1269 -0
- deepfos/element/datatable.py +2467 -0
- deepfos/element/deep_pipeline.py +186 -0
- deepfos/element/deepconnector.py +59 -0
- deepfos/element/deepmodel.py +1806 -0
- deepfos/element/dimension.py +1254 -0
- deepfos/element/fact_table.py +427 -0
- deepfos/element/finmodel.py +1485 -0
- deepfos/element/journal.py +840 -0
- deepfos/element/journal_template.py +943 -0
- deepfos/element/pyscript.py +412 -0
- deepfos/element/reconciliation.py +553 -0
- deepfos/element/rolestrategy.py +243 -0
- deepfos/element/smartlist.py +457 -0
- deepfos/element/variable.py +756 -0
- deepfos/element/workflow.py +560 -0
- deepfos/exceptions/__init__.py +239 -0
- deepfos/exceptions/hook.py +86 -0
- deepfos/lazy.py +104 -0
- deepfos/lazy_import.py +84 -0
- deepfos/lib/__init__.py +0 -0
- deepfos/lib/_javaobj.py +366 -0
- deepfos/lib/asynchronous.py +879 -0
- deepfos/lib/concurrency.py +107 -0
- deepfos/lib/constant.py +39 -0
- deepfos/lib/decorator.py +310 -0
- deepfos/lib/deepchart.py +778 -0
- deepfos/lib/deepux.py +477 -0
- deepfos/lib/discovery.py +273 -0
- deepfos/lib/edb_lexer.py +789 -0
- deepfos/lib/eureka.py +156 -0
- deepfos/lib/filterparser.py +751 -0
- deepfos/lib/httpcli.py +106 -0
- deepfos/lib/jsonstreamer.py +80 -0
- deepfos/lib/msg.py +394 -0
- deepfos/lib/nacos.py +225 -0
- deepfos/lib/patch.py +92 -0
- deepfos/lib/redis.py +241 -0
- deepfos/lib/serutils.py +181 -0
- deepfos/lib/stopwatch.py +99 -0
- deepfos/lib/subtask.py +572 -0
- deepfos/lib/sysutils.py +703 -0
- deepfos/lib/utils.py +1003 -0
- deepfos/local.py +160 -0
- deepfos/options.py +670 -0
- deepfos/translation.py +237 -0
- deepfos-1.1.60.dist-info/METADATA +33 -0
- deepfos-1.1.60.dist-info/RECORD +175 -0
- deepfos-1.1.60.dist-info/WHEEL +5 -0
- deepfos-1.1.60.dist-info/top_level.txt +1 -0
deepfos/lib/nacos.py
ADDED
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import functools
|
|
3
|
+
import json
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from typing import TypedDict, Any, Tuple, Type, Dict
|
|
6
|
+
|
|
7
|
+
from loguru import logger
|
|
8
|
+
|
|
9
|
+
from deepfos import OPTION
|
|
10
|
+
from deepfos.exceptions import APIResponseError
|
|
11
|
+
from deepfos.lib.discovery import ServiceDiscovery
|
|
12
|
+
from deepfos.lib.httpcli import AioHttpCli
|
|
13
|
+
from deepfos.lib.utils import concat_url
|
|
14
|
+
|
|
15
|
+
__all__ = ['Nacos']
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class InstanceHostInfo(TypedDict, total=False):
|
|
19
|
+
instanceId: str
|
|
20
|
+
ip: str
|
|
21
|
+
port: int
|
|
22
|
+
weight: float
|
|
23
|
+
healthy: bool
|
|
24
|
+
enabled: bool
|
|
25
|
+
ephemeral: bool
|
|
26
|
+
clusterName: str
|
|
27
|
+
serviceName: str
|
|
28
|
+
metadata: dict
|
|
29
|
+
instanceHeartBeatInterval: int
|
|
30
|
+
instanceIdGenerator: str
|
|
31
|
+
instanceHeartBeatTimeOut: int
|
|
32
|
+
ipDeleteTimeout: int
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ResponseChecker: # pragma: no cover
|
|
36
|
+
@classmethod
|
|
37
|
+
def expect(cls, response: Any) -> bool:
|
|
38
|
+
return True
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def cast(cls, response: str, endpoint: str):
|
|
42
|
+
return response
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def validate(cls, response: str, endpoint: str) -> Tuple[bool, Any]:
|
|
46
|
+
casted = cls.cast(response, endpoint)
|
|
47
|
+
return cls.expect(casted), casted
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class JsonResponse(ResponseChecker): # pragma: no cover
|
|
51
|
+
@classmethod
|
|
52
|
+
def cast(cls, response: str, endpoint: str):
|
|
53
|
+
try:
|
|
54
|
+
return json.loads(response)
|
|
55
|
+
except (TypeError, ValueError):
|
|
56
|
+
logger.opt(lazy=True).exception(
|
|
57
|
+
f'Call api: {endpoint} failed. '
|
|
58
|
+
f'Response << {response} >> cannot be decoded as json.'
|
|
59
|
+
)
|
|
60
|
+
raise APIResponseError(
|
|
61
|
+
f'Call api: {endpoint} failed. '
|
|
62
|
+
f'Response << {response} >> cannot be decoded as json.'
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class Route: # pragma: no cover
|
|
67
|
+
def __init__(self, method: str):
|
|
68
|
+
self.method = method
|
|
69
|
+
|
|
70
|
+
def __call__(
|
|
71
|
+
self,
|
|
72
|
+
endpoint,
|
|
73
|
+
response_checker: Type[ResponseChecker] = None,
|
|
74
|
+
):
|
|
75
|
+
def execute(func):
|
|
76
|
+
request = getattr(AioHttpCli, self.method)
|
|
77
|
+
method = self.method.upper()
|
|
78
|
+
is_get = method == 'GET'
|
|
79
|
+
|
|
80
|
+
@functools.wraps(func)
|
|
81
|
+
async def call(*args, **kwargs):
|
|
82
|
+
ins = args[0]
|
|
83
|
+
url = concat_url(ins.base_url, endpoint)
|
|
84
|
+
body = func(ins, *args[1::], **kwargs)
|
|
85
|
+
logger.opt(lazy=True).debug(
|
|
86
|
+
f"Sending request: {method} {url} "
|
|
87
|
+
f"body: {repr(body)}"
|
|
88
|
+
)
|
|
89
|
+
req_args = {
|
|
90
|
+
'headers': ins.header,
|
|
91
|
+
'params' if is_get else 'data': body
|
|
92
|
+
}
|
|
93
|
+
response = await request(url, **req_args)
|
|
94
|
+
text = await response.text()
|
|
95
|
+
if response_checker is not None:
|
|
96
|
+
ok, result = response_checker.validate(text, endpoint)
|
|
97
|
+
if not ok:
|
|
98
|
+
raise APIResponseError(
|
|
99
|
+
f"Call API: {endpoint} failed. "
|
|
100
|
+
f"Bad response because status is False. Detail: {text}."
|
|
101
|
+
)
|
|
102
|
+
else:
|
|
103
|
+
return result
|
|
104
|
+
else:
|
|
105
|
+
return text
|
|
106
|
+
|
|
107
|
+
return call
|
|
108
|
+
|
|
109
|
+
return execute
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
get = Route(method='get')
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class NacosAPI:
|
|
116
|
+
def __init__(self): # noqa
|
|
117
|
+
nacos_server: str = OPTION.nacos.server.lower()
|
|
118
|
+
if nacos_server.rstrip('/').endswith('nacos'):
|
|
119
|
+
base_url = nacos_server
|
|
120
|
+
else:
|
|
121
|
+
base_url = concat_url(nacos_server, 'nacos')
|
|
122
|
+
|
|
123
|
+
self.base_url = concat_url(base_url, 'v1')
|
|
124
|
+
self.header = {"Content-Type": "application/x-www-form-urlencoded"}
|
|
125
|
+
|
|
126
|
+
@get('ns/service/list', JsonResponse)
|
|
127
|
+
def list_service(self): # pragma: no cover
|
|
128
|
+
return {
|
|
129
|
+
'pageNo': 1,
|
|
130
|
+
'pageSize': 1000000,
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
@get('ns/instance/list', JsonResponse)
|
|
134
|
+
def list_instance(
|
|
135
|
+
self,
|
|
136
|
+
service_name: str,
|
|
137
|
+
group: str = 'DEFAULT_GROUP',
|
|
138
|
+
namespace: str = 'public',
|
|
139
|
+
cluster: str = 'DEFAULT',
|
|
140
|
+
): # pragma: no cover
|
|
141
|
+
return {
|
|
142
|
+
'serviceName': service_name,
|
|
143
|
+
'groupName': group,
|
|
144
|
+
'namespaceId': namespace,
|
|
145
|
+
'clusters': cluster,
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class NacosCli(ServiceDiscovery):
|
|
150
|
+
def __init__(self):
|
|
151
|
+
super().__init__()
|
|
152
|
+
self._instance_lock: Dict[str, asyncio.Lock] = defaultdict(asyncio.Lock)
|
|
153
|
+
self._server_lock = asyncio.Lock()
|
|
154
|
+
self._api = NacosAPI()
|
|
155
|
+
self._cares = set()
|
|
156
|
+
self._group = OPTION.nacos.group
|
|
157
|
+
self._ns = OPTION.nacos.namespace
|
|
158
|
+
self._cluster = OPTION.nacos.cluster
|
|
159
|
+
|
|
160
|
+
async def on_close(self):
|
|
161
|
+
self._cares.clear()
|
|
162
|
+
self.server_cache.clear()
|
|
163
|
+
self._instance_lock.clear()
|
|
164
|
+
|
|
165
|
+
async def on_startup(self):
|
|
166
|
+
pass
|
|
167
|
+
|
|
168
|
+
async def on_interval(self):
|
|
169
|
+
await self._update_cache()
|
|
170
|
+
|
|
171
|
+
async def _update_cache(self):
|
|
172
|
+
logger.opt(lazy=True).debug(f"Update cache for instance: {self._cares}")
|
|
173
|
+
|
|
174
|
+
await asyncio.gather(*(
|
|
175
|
+
self.update_instance_for_service(sn)
|
|
176
|
+
for sn in self._cares
|
|
177
|
+
))
|
|
178
|
+
|
|
179
|
+
async def update_service_cache(self, server_name: str):
|
|
180
|
+
async with self._server_lock:
|
|
181
|
+
if server_name not in self.server_cache:
|
|
182
|
+
await self.update_services()
|
|
183
|
+
|
|
184
|
+
async def update_instance_cache(self, server_name):
|
|
185
|
+
self._cares.add(server_name)
|
|
186
|
+
await self.update_instance_for_service(server_name)
|
|
187
|
+
|
|
188
|
+
async def update_services(self):
|
|
189
|
+
new_services = frozenset((await self._api.list_service())['doms'])
|
|
190
|
+
cur_services = frozenset(self.server_cache.keys())
|
|
191
|
+
|
|
192
|
+
if added := new_services - cur_services:
|
|
193
|
+
for srv in added:
|
|
194
|
+
self.server_cache.__getitem__(srv)
|
|
195
|
+
logger.opt(lazy=True).debug(f"Added services: {added}")
|
|
196
|
+
|
|
197
|
+
if removed := cur_services - new_services:
|
|
198
|
+
for srv in removed:
|
|
199
|
+
self.server_cache.pop(srv)
|
|
200
|
+
self._instance_lock.pop(srv, None) # noqa
|
|
201
|
+
logger.opt(lazy=True).debug(f"Removed services: {removed}")
|
|
202
|
+
|
|
203
|
+
async def update_instance_for_service(self, server_name: str):
|
|
204
|
+
async with self._instance_lock[server_name]:
|
|
205
|
+
instance_info = await self._api.list_instance(
|
|
206
|
+
server_name,
|
|
207
|
+
group=self._group,
|
|
208
|
+
namespace=self._ns,
|
|
209
|
+
cluster=self._cluster
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
host: InstanceHostInfo
|
|
213
|
+
cache = self.server_cache[server_name]
|
|
214
|
+
|
|
215
|
+
for host in instance_info['hosts']:
|
|
216
|
+
if not host['enabled']:
|
|
217
|
+
continue
|
|
218
|
+
|
|
219
|
+
addr = f"http://{host['ip']}:{host['port']}"
|
|
220
|
+
cache.add(addr)
|
|
221
|
+
if not host['healthy']:
|
|
222
|
+
cache.punish(addr)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
Nacos = NacosCli()
|
deepfos/lib/patch.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
from typing import Tuple, Union, Type, Callable
|
|
3
|
+
from types import ModuleType
|
|
4
|
+
import importlib
|
|
5
|
+
from contextlib import contextmanager
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
'string_import',
|
|
9
|
+
'context_patch',
|
|
10
|
+
'patch'
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def string_import(
|
|
15
|
+
importable: str,
|
|
16
|
+
check_obj: bool = False,
|
|
17
|
+
) -> Tuple[ModuleType, str]:
|
|
18
|
+
if not importable.startswith('deepfos.'):
|
|
19
|
+
raise ValueError("Cannot patch object not belonged to deepfos package.")
|
|
20
|
+
|
|
21
|
+
module_str, obj_str = importable.rsplit('.', maxsplit=1)
|
|
22
|
+
module = importlib.import_module(module_str)
|
|
23
|
+
|
|
24
|
+
if check_obj and not hasattr(module, obj_str):
|
|
25
|
+
raise ValueError(f"Module: {module.__name__} has no attribute: {obj_str}")
|
|
26
|
+
|
|
27
|
+
return module, obj_str
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@contextmanager
|
|
31
|
+
def context_patch(
|
|
32
|
+
importable: str,
|
|
33
|
+
patch_as: Union[Type, Callable, ModuleType]
|
|
34
|
+
):
|
|
35
|
+
module, obj_str = string_import(importable)
|
|
36
|
+
|
|
37
|
+
if hasattr(module, obj_str):
|
|
38
|
+
obj_bak = getattr(module, obj_str)
|
|
39
|
+
else:
|
|
40
|
+
obj_bak = None
|
|
41
|
+
|
|
42
|
+
setattr(module, obj_str, patch_as)
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
yield
|
|
46
|
+
finally:
|
|
47
|
+
if obj_bak is None:
|
|
48
|
+
delattr(module, obj_str)
|
|
49
|
+
else:
|
|
50
|
+
setattr(module, obj_str, obj_bak)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def patch(
|
|
54
|
+
importable: str,
|
|
55
|
+
patch_as: Union[Type, Callable, ModuleType]
|
|
56
|
+
):
|
|
57
|
+
# noinspection PyProtectedMember
|
|
58
|
+
"""替换deepfos包的代码
|
|
59
|
+
|
|
60
|
+
装饰器,在被装饰的函数中,对应代码将被替换。
|
|
61
|
+
退出函数后,代码恢复。
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
importable: 需要替换的模块/函数/类
|
|
65
|
+
patch_as: 替换为对象
|
|
66
|
+
|
|
67
|
+
>>> from deepfos.lib.patch import patch
|
|
68
|
+
>>> from deepfos.lib.utils import _concat_url_single, concat_url
|
|
69
|
+
>>> def new_concat_url_single(a, b):
|
|
70
|
+
... return _concat_url_single(a, b) + '/patched'
|
|
71
|
+
>>> @patch('deepfos.lib.utils._concat_url_single', new_concat_url_single)
|
|
72
|
+
... def pathced_func():
|
|
73
|
+
... print('in pathced_func')
|
|
74
|
+
... print(concat_url('a', 'b'))
|
|
75
|
+
>>> def main():
|
|
76
|
+
... print(concat_url('a', 'b'))
|
|
77
|
+
... pathced_func()
|
|
78
|
+
... print(concat_url('a', 'b'))
|
|
79
|
+
>>> main()
|
|
80
|
+
a/b
|
|
81
|
+
in pathced_func
|
|
82
|
+
a/b/patched
|
|
83
|
+
a/b
|
|
84
|
+
"""
|
|
85
|
+
def inner(func):
|
|
86
|
+
@functools.wraps(func)
|
|
87
|
+
def wrap(*args, **kwargs):
|
|
88
|
+
with context_patch(importable, patch_as):
|
|
89
|
+
return func(*args, **kwargs)
|
|
90
|
+
return wrap
|
|
91
|
+
|
|
92
|
+
return inner
|
deepfos/lib/redis.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Redis相关"""
|
|
2
|
+
|
|
3
|
+
from threading import Thread, Event
|
|
4
|
+
from typing import Optional
|
|
5
|
+
|
|
6
|
+
import redis
|
|
7
|
+
from redis.exceptions import LockError, LockNotOwnedError
|
|
8
|
+
from loguru import logger
|
|
9
|
+
|
|
10
|
+
from deepfos.lib.decorator import cached_property
|
|
11
|
+
from deepfos.options import OPTION
|
|
12
|
+
from deepfos.exceptions import LockAcquireFailed
|
|
13
|
+
|
|
14
|
+
__all__ = [
|
|
15
|
+
'RedisLock',
|
|
16
|
+
'RedisCli'
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class _AbsRedisLock: # pragma: no cover
|
|
21
|
+
# noinspection PyUnusedLocal
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
key: str,
|
|
25
|
+
redis_client: redis.Redis = None,
|
|
26
|
+
renew_interval: int = 5,
|
|
27
|
+
expire_sec: int = 10,
|
|
28
|
+
raises: Exception = None,
|
|
29
|
+
blocking_timeout: Optional[int] = 0
|
|
30
|
+
):
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
def __enter__(self):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
def acquire(self, timeout=0):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
def release(self):
|
|
43
|
+
pass
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def poll_event(
|
|
47
|
+
event: Event,
|
|
48
|
+
interval: int
|
|
49
|
+
):
|
|
50
|
+
while not event.wait(interval):
|
|
51
|
+
yield
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class RedisLock:
|
|
55
|
+
"""通过Redis实现的锁对象
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
redis_client: Redis对象
|
|
59
|
+
key: 锁名
|
|
60
|
+
renew_interval: 刷新有效时间间隔,默认为5秒
|
|
61
|
+
expire_sec: 有效时间,默认为10秒,需大于刷新间隔,否则刷新无效
|
|
62
|
+
raises: 获取锁失败时抛出的错误,仅使用with时有效
|
|
63
|
+
blocking_timeout: 获取锁的等待时间,默认为0,即不等待,为None时等待至获取到为止
|
|
64
|
+
|
|
65
|
+
Notes:
|
|
66
|
+
使用 ``with`` 获取锁时,如果获取失败,将直接报错。
|
|
67
|
+
使用 :meth:`aquire` 获取时,将返回 :class:`bool` 类型,表示是否获取成功。
|
|
68
|
+
|
|
69
|
+
.. admonition:: 示例
|
|
70
|
+
|
|
71
|
+
.. code-block:: python
|
|
72
|
+
|
|
73
|
+
with RedisLock('locked_key_a'):
|
|
74
|
+
do_something()
|
|
75
|
+
|
|
76
|
+
lock = RedisLock('locked_key_b')
|
|
77
|
+
if lock.aquire():
|
|
78
|
+
try:
|
|
79
|
+
do_something()
|
|
80
|
+
finally:
|
|
81
|
+
lock.release()
|
|
82
|
+
|
|
83
|
+
"""
|
|
84
|
+
def __init__(
|
|
85
|
+
self,
|
|
86
|
+
key: str,
|
|
87
|
+
redis_client: redis.Redis = None,
|
|
88
|
+
renew_interval: int = 5,
|
|
89
|
+
expire_sec: int = 10,
|
|
90
|
+
raises: Exception = None,
|
|
91
|
+
blocking_timeout: Optional[int] = 0
|
|
92
|
+
):
|
|
93
|
+
self._closed = None
|
|
94
|
+
|
|
95
|
+
if renew_interval > expire_sec:
|
|
96
|
+
raise ValueError("有效时间expire_sec需大于刷新间隔renew_interval,否则刷新无效。")
|
|
97
|
+
|
|
98
|
+
self.redis_client = redis_client or RedisCli().client
|
|
99
|
+
self.key = key
|
|
100
|
+
self.blocking_timeout = blocking_timeout
|
|
101
|
+
self.renew_interval = renew_interval
|
|
102
|
+
self.expire_sec = expire_sec
|
|
103
|
+
self.exc = raises
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def closed(self):
|
|
107
|
+
if self._closed is None:
|
|
108
|
+
return True
|
|
109
|
+
return self._closed.is_set()
|
|
110
|
+
|
|
111
|
+
@cached_property
|
|
112
|
+
def lock(self):
|
|
113
|
+
return self.redis_client.lock(
|
|
114
|
+
name=self.key,
|
|
115
|
+
timeout=self.expire_sec,
|
|
116
|
+
blocking_timeout=self.blocking_timeout,
|
|
117
|
+
thread_local=False
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def __enter__(self):
|
|
121
|
+
if not self.closed:
|
|
122
|
+
logger.warning(f"RedisLock[key: {self.key}] already acquired.")
|
|
123
|
+
return
|
|
124
|
+
|
|
125
|
+
if self.lock.acquire(blocking=True):
|
|
126
|
+
self._closed = Event()
|
|
127
|
+
Thread(target=self.refresh_key, daemon=True).start()
|
|
128
|
+
return self
|
|
129
|
+
|
|
130
|
+
raise self.exc or LockAcquireFailed('Cannot aquire lock.')
|
|
131
|
+
|
|
132
|
+
do_hold = __enter__
|
|
133
|
+
|
|
134
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
135
|
+
self.release()
|
|
136
|
+
|
|
137
|
+
def acquire(self, timeout=0) -> bool:
|
|
138
|
+
"""获取当前锁
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
timeout: 获取等待时间,默认为0,即不等待,为None时一直等待至得到锁
|
|
142
|
+
"""
|
|
143
|
+
return self.lock.acquire(blocking=True, blocking_timeout=timeout)
|
|
144
|
+
|
|
145
|
+
def release(self):
|
|
146
|
+
"""释放当前锁"""
|
|
147
|
+
self.stop_task()
|
|
148
|
+
try:
|
|
149
|
+
self.lock.release()
|
|
150
|
+
except (LockError, LockNotOwnedError):
|
|
151
|
+
logger.exception("")
|
|
152
|
+
|
|
153
|
+
def refresh_key(self):
|
|
154
|
+
self.lock.extend(self.expire_sec, replace_ttl=True)
|
|
155
|
+
|
|
156
|
+
for _ in poll_event(self._closed, self.renew_interval):
|
|
157
|
+
try:
|
|
158
|
+
self.lock.extend(self.expire_sec, replace_ttl=True)
|
|
159
|
+
except (LockError, LockNotOwnedError):
|
|
160
|
+
break
|
|
161
|
+
|
|
162
|
+
def owned(self):
|
|
163
|
+
"""key是否被当前锁拥有"""
|
|
164
|
+
return self.lock.owned()
|
|
165
|
+
|
|
166
|
+
def locked(self):
|
|
167
|
+
"""key是否被某个锁拥有"""
|
|
168
|
+
return self.lock.locked()
|
|
169
|
+
|
|
170
|
+
def stop_task(self):
|
|
171
|
+
if self.closed:
|
|
172
|
+
return
|
|
173
|
+
|
|
174
|
+
self._closed.set()
|
|
175
|
+
|
|
176
|
+
def __del__(self):
|
|
177
|
+
self.stop_task()
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
if OPTION.general.dev_mode:
|
|
181
|
+
RedisLock = _AbsRedisLock
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class RedisCli:
|
|
185
|
+
"""Redis对象
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
redis_url: redis地址,格式: redis://[[username]:[password]]@[host]:[port]/0,
|
|
189
|
+
若未提供,以OPTION.redis.url的值为默认值
|
|
190
|
+
|
|
191
|
+
"""
|
|
192
|
+
_client: Optional[redis.Redis]
|
|
193
|
+
|
|
194
|
+
def __init__(self, redis_url: str = None):
|
|
195
|
+
self.redis_url = redis_url
|
|
196
|
+
self._client = None
|
|
197
|
+
|
|
198
|
+
@property
|
|
199
|
+
def client(self) -> redis.Redis:
|
|
200
|
+
if self._client is None:
|
|
201
|
+
self._client = redis.Redis.from_url(self.redis_url)
|
|
202
|
+
return self._client
|
|
203
|
+
|
|
204
|
+
def close(self):
|
|
205
|
+
if self._client is not None:
|
|
206
|
+
self._client.connection_pool.disconnect()
|
|
207
|
+
self._client = None
|
|
208
|
+
|
|
209
|
+
def __del__(self):
|
|
210
|
+
self.close()
|
|
211
|
+
|
|
212
|
+
def lock(
|
|
213
|
+
self,
|
|
214
|
+
key,
|
|
215
|
+
renew_interval: int = 5,
|
|
216
|
+
expire_sec: int = 10,
|
|
217
|
+
blocking_timeout: Optional[int] = 0
|
|
218
|
+
) -> RedisLock:
|
|
219
|
+
"""
|
|
220
|
+
提供设置了键名的redis维护锁
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
key: 键名
|
|
224
|
+
renew_interval: 刷新有效时间间隔,默认为5秒
|
|
225
|
+
expire_sec: 有效时间,默认为10秒,需大于刷新间隔,否则刷新无效
|
|
226
|
+
blocking_timeout: 获取锁的等待时间,默认为0,即不等待,为None时等待至获取到为止
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
设置了键名key的RedisLock对象
|
|
230
|
+
|
|
231
|
+
.. admonition:: 示例
|
|
232
|
+
|
|
233
|
+
.. code-block:: python
|
|
234
|
+
|
|
235
|
+
rediscli = RedisCli()
|
|
236
|
+
with rediscli.lock('test_key'):
|
|
237
|
+
...
|
|
238
|
+
|
|
239
|
+
"""
|
|
240
|
+
return RedisLock(key, self.client, renew_interval, expire_sec,
|
|
241
|
+
blocking_timeout=blocking_timeout)
|