deepfos 1.1.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/__init__.py +6 -0
- deepfos/_version.py +21 -0
- deepfos/algo/__init__.py +0 -0
- deepfos/algo/graph.py +171 -0
- deepfos/algo/segtree.py +31 -0
- deepfos/api/V1_1/__init__.py +0 -0
- deepfos/api/V1_1/business_model.py +119 -0
- deepfos/api/V1_1/dimension.py +599 -0
- deepfos/api/V1_1/models/__init__.py +0 -0
- deepfos/api/V1_1/models/business_model.py +1033 -0
- deepfos/api/V1_1/models/dimension.py +2768 -0
- deepfos/api/V1_2/__init__.py +0 -0
- deepfos/api/V1_2/dimension.py +285 -0
- deepfos/api/V1_2/models/__init__.py +0 -0
- deepfos/api/V1_2/models/dimension.py +2923 -0
- deepfos/api/__init__.py +0 -0
- deepfos/api/account.py +167 -0
- deepfos/api/accounting_engines.py +147 -0
- deepfos/api/app.py +626 -0
- deepfos/api/approval_process.py +198 -0
- deepfos/api/base.py +983 -0
- deepfos/api/business_model.py +160 -0
- deepfos/api/consolidation.py +129 -0
- deepfos/api/consolidation_process.py +106 -0
- deepfos/api/datatable.py +341 -0
- deepfos/api/deep_pipeline.py +61 -0
- deepfos/api/deepconnector.py +36 -0
- deepfos/api/deepfos_task.py +92 -0
- deepfos/api/deepmodel.py +188 -0
- deepfos/api/dimension.py +486 -0
- deepfos/api/financial_model.py +319 -0
- deepfos/api/journal_model.py +119 -0
- deepfos/api/journal_template.py +132 -0
- deepfos/api/memory_financial_model.py +98 -0
- deepfos/api/models/__init__.py +3 -0
- deepfos/api/models/account.py +483 -0
- deepfos/api/models/accounting_engines.py +756 -0
- deepfos/api/models/app.py +1338 -0
- deepfos/api/models/approval_process.py +1043 -0
- deepfos/api/models/base.py +234 -0
- deepfos/api/models/business_model.py +805 -0
- deepfos/api/models/consolidation.py +711 -0
- deepfos/api/models/consolidation_process.py +248 -0
- deepfos/api/models/datatable_mysql.py +427 -0
- deepfos/api/models/deep_pipeline.py +55 -0
- deepfos/api/models/deepconnector.py +28 -0
- deepfos/api/models/deepfos_task.py +386 -0
- deepfos/api/models/deepmodel.py +308 -0
- deepfos/api/models/dimension.py +1576 -0
- deepfos/api/models/financial_model.py +1796 -0
- deepfos/api/models/journal_model.py +341 -0
- deepfos/api/models/journal_template.py +854 -0
- deepfos/api/models/memory_financial_model.py +478 -0
- deepfos/api/models/platform.py +178 -0
- deepfos/api/models/python.py +221 -0
- deepfos/api/models/reconciliation_engine.py +411 -0
- deepfos/api/models/reconciliation_report.py +161 -0
- deepfos/api/models/role_strategy.py +884 -0
- deepfos/api/models/smartlist.py +237 -0
- deepfos/api/models/space.py +1137 -0
- deepfos/api/models/system.py +1065 -0
- deepfos/api/models/variable.py +463 -0
- deepfos/api/models/workflow.py +946 -0
- deepfos/api/platform.py +199 -0
- deepfos/api/python.py +90 -0
- deepfos/api/reconciliation_engine.py +181 -0
- deepfos/api/reconciliation_report.py +64 -0
- deepfos/api/role_strategy.py +234 -0
- deepfos/api/smartlist.py +69 -0
- deepfos/api/space.py +582 -0
- deepfos/api/system.py +372 -0
- deepfos/api/variable.py +154 -0
- deepfos/api/workflow.py +264 -0
- deepfos/boost/__init__.py +6 -0
- deepfos/boost/py_jstream.py +89 -0
- deepfos/boost/py_pandas.py +20 -0
- deepfos/cache.py +121 -0
- deepfos/config.py +6 -0
- deepfos/core/__init__.py +27 -0
- deepfos/core/cube/__init__.py +10 -0
- deepfos/core/cube/_base.py +462 -0
- deepfos/core/cube/constants.py +21 -0
- deepfos/core/cube/cube.py +408 -0
- deepfos/core/cube/formula.py +707 -0
- deepfos/core/cube/syscube.py +532 -0
- deepfos/core/cube/typing.py +7 -0
- deepfos/core/cube/utils.py +238 -0
- deepfos/core/dimension/__init__.py +11 -0
- deepfos/core/dimension/_base.py +506 -0
- deepfos/core/dimension/dimcreator.py +184 -0
- deepfos/core/dimension/dimension.py +472 -0
- deepfos/core/dimension/dimexpr.py +271 -0
- deepfos/core/dimension/dimmember.py +155 -0
- deepfos/core/dimension/eledimension.py +22 -0
- deepfos/core/dimension/filters.py +99 -0
- deepfos/core/dimension/sysdimension.py +168 -0
- deepfos/core/logictable/__init__.py +5 -0
- deepfos/core/logictable/_cache.py +141 -0
- deepfos/core/logictable/_operator.py +663 -0
- deepfos/core/logictable/nodemixin.py +673 -0
- deepfos/core/logictable/sqlcondition.py +609 -0
- deepfos/core/logictable/tablemodel.py +497 -0
- deepfos/db/__init__.py +36 -0
- deepfos/db/cipher.py +660 -0
- deepfos/db/clickhouse.py +191 -0
- deepfos/db/connector.py +195 -0
- deepfos/db/daclickhouse.py +171 -0
- deepfos/db/dameng.py +101 -0
- deepfos/db/damysql.py +189 -0
- deepfos/db/dbkits.py +358 -0
- deepfos/db/deepengine.py +99 -0
- deepfos/db/deepmodel.py +82 -0
- deepfos/db/deepmodel_kingbase.py +83 -0
- deepfos/db/edb.py +214 -0
- deepfos/db/gauss.py +83 -0
- deepfos/db/kingbase.py +83 -0
- deepfos/db/mysql.py +184 -0
- deepfos/db/oracle.py +131 -0
- deepfos/db/postgresql.py +192 -0
- deepfos/db/sqlserver.py +99 -0
- deepfos/db/utils.py +135 -0
- deepfos/element/__init__.py +89 -0
- deepfos/element/accounting.py +348 -0
- deepfos/element/apvlprocess.py +215 -0
- deepfos/element/base.py +398 -0
- deepfos/element/bizmodel.py +1269 -0
- deepfos/element/datatable.py +2467 -0
- deepfos/element/deep_pipeline.py +186 -0
- deepfos/element/deepconnector.py +59 -0
- deepfos/element/deepmodel.py +1806 -0
- deepfos/element/dimension.py +1254 -0
- deepfos/element/fact_table.py +427 -0
- deepfos/element/finmodel.py +1485 -0
- deepfos/element/journal.py +840 -0
- deepfos/element/journal_template.py +943 -0
- deepfos/element/pyscript.py +412 -0
- deepfos/element/reconciliation.py +553 -0
- deepfos/element/rolestrategy.py +243 -0
- deepfos/element/smartlist.py +457 -0
- deepfos/element/variable.py +756 -0
- deepfos/element/workflow.py +560 -0
- deepfos/exceptions/__init__.py +239 -0
- deepfos/exceptions/hook.py +86 -0
- deepfos/lazy.py +104 -0
- deepfos/lazy_import.py +84 -0
- deepfos/lib/__init__.py +0 -0
- deepfos/lib/_javaobj.py +366 -0
- deepfos/lib/asynchronous.py +879 -0
- deepfos/lib/concurrency.py +107 -0
- deepfos/lib/constant.py +39 -0
- deepfos/lib/decorator.py +310 -0
- deepfos/lib/deepchart.py +778 -0
- deepfos/lib/deepux.py +477 -0
- deepfos/lib/discovery.py +273 -0
- deepfos/lib/edb_lexer.py +789 -0
- deepfos/lib/eureka.py +156 -0
- deepfos/lib/filterparser.py +751 -0
- deepfos/lib/httpcli.py +106 -0
- deepfos/lib/jsonstreamer.py +80 -0
- deepfos/lib/msg.py +394 -0
- deepfos/lib/nacos.py +225 -0
- deepfos/lib/patch.py +92 -0
- deepfos/lib/redis.py +241 -0
- deepfos/lib/serutils.py +181 -0
- deepfos/lib/stopwatch.py +99 -0
- deepfos/lib/subtask.py +572 -0
- deepfos/lib/sysutils.py +703 -0
- deepfos/lib/utils.py +1003 -0
- deepfos/local.py +160 -0
- deepfos/options.py +670 -0
- deepfos/translation.py +237 -0
- deepfos-1.1.60.dist-info/METADATA +33 -0
- deepfos-1.1.60.dist-info/RECORD +175 -0
- deepfos-1.1.60.dist-info/WHEEL +5 -0
- deepfos-1.1.60.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,497 @@
|
|
|
1
|
+
from typing import Union, Tuple, Iterable, Dict, Optional, List
|
|
2
|
+
|
|
3
|
+
from .nodemixin import MetaNodeMixin
|
|
4
|
+
from .sqlcondition import SQLCondition, SqlCondError, ConditionManager
|
|
5
|
+
from ._cache import DataProxy
|
|
6
|
+
from ._operator import OpCombineError
|
|
7
|
+
from loguru import logger
|
|
8
|
+
from deepfos.lib.decorator import cached_property
|
|
9
|
+
from deepfos.element.datatable import (
|
|
10
|
+
get_table_class, T_DatatableInstance, T_AsyncDatatableInstance
|
|
11
|
+
)
|
|
12
|
+
from deepfos.api.models.app import ConfirmElementInfoDto
|
|
13
|
+
import pandas as pd
|
|
14
|
+
|
|
15
|
+
from weakref import WeakKeyDictionary
|
|
16
|
+
import weakref
|
|
17
|
+
from contextlib import contextmanager
|
|
18
|
+
from collections import defaultdict
|
|
19
|
+
import copy
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# -----------------------------------------------------------------------------
|
|
23
|
+
# utils
|
|
24
|
+
class TableInfo(ConfirmElementInfoDto):
|
|
25
|
+
tableName: Optional[str]
|
|
26
|
+
serverName: Optional[str]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def get_datatable(
|
|
30
|
+
table_info: Union[TableInfo, dict],
|
|
31
|
+
sync=True
|
|
32
|
+
) -> Union[T_DatatableInstance, T_AsyncDatatableInstance]:
|
|
33
|
+
if isinstance(table_info, TableInfo):
|
|
34
|
+
init_args = {
|
|
35
|
+
'element_name': table_info.elementName,
|
|
36
|
+
'folder_id': table_info.folderId,
|
|
37
|
+
'path': table_info.path,
|
|
38
|
+
'table_name': table_info.tableName,
|
|
39
|
+
'server_name': table_info.serverName,
|
|
40
|
+
}
|
|
41
|
+
element_type = table_info.elementType
|
|
42
|
+
else:
|
|
43
|
+
init_args = {**table_info}
|
|
44
|
+
element_type = init_args.pop('element_type', None)
|
|
45
|
+
if element_type is None:
|
|
46
|
+
raise ValueError("element_type is needed in table_info")
|
|
47
|
+
|
|
48
|
+
return get_table_class(element_type, sync)(**init_args)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ConditionPassError(Exception):
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class RelationInfo(object):
|
|
56
|
+
"""记录table间关联关系"""
|
|
57
|
+
def __init__(self, cls: 'MetaTable'):
|
|
58
|
+
self._rel_info = WeakKeyDictionary()
|
|
59
|
+
self._tbl = weakref.ref(cls)
|
|
60
|
+
|
|
61
|
+
def _add_relation(self, tbl: 'MetaTable', on: Tuple[str]):
|
|
62
|
+
if on is not None:
|
|
63
|
+
self._rel_info[tbl] = on
|
|
64
|
+
return self
|
|
65
|
+
|
|
66
|
+
add_parent_relation = _add_relation
|
|
67
|
+
|
|
68
|
+
def add_child_relation(
|
|
69
|
+
self,
|
|
70
|
+
tbl: 'MetaTable',
|
|
71
|
+
on: Tuple[str],
|
|
72
|
+
alias: Optional[Tuple[str]]
|
|
73
|
+
):
|
|
74
|
+
if alias is not None:
|
|
75
|
+
on = tuple(
|
|
76
|
+
ko if ka == '=' else ka
|
|
77
|
+
for ko, ka in zip(on, alias)
|
|
78
|
+
)
|
|
79
|
+
return self._add_relation(tbl, on)
|
|
80
|
+
|
|
81
|
+
def is_rel_field(self, tbl: 'MetaTable', field: str) -> bool:
|
|
82
|
+
"""字段是否为关联字段"""
|
|
83
|
+
return field in self[tbl]
|
|
84
|
+
|
|
85
|
+
def has_alias_with(self, other) -> bool:
|
|
86
|
+
"""
|
|
87
|
+
与其他表是否存在不同名的关联字段
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
other: 表,必须为当前表的子节点或父节点
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
bool: 存在不同名:True;否则:False
|
|
94
|
+
"""
|
|
95
|
+
return any(
|
|
96
|
+
k1 != k2 for k1, k2 in
|
|
97
|
+
zip(self[other], other.rel_info[self._tbl()])
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def __getitem__(self, item) -> Tuple[str]:
|
|
101
|
+
try:
|
|
102
|
+
return self._rel_info.__getitem__(item)
|
|
103
|
+
except KeyError:
|
|
104
|
+
raise KeyError(
|
|
105
|
+
"No realtionship between "
|
|
106
|
+
f"{item!r} and {self._tbl()!r}.") from None
|
|
107
|
+
|
|
108
|
+
def __repr__(self): # pragma: no cover
|
|
109
|
+
return repr({**self._rel_info})
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def pass_condition_up(*from_nodes: MetaNodeMixin, to: MetaNodeMixin):
|
|
113
|
+
"""
|
|
114
|
+
将所有from节点的查询条件汇总至to节点,
|
|
115
|
+
汇总由树的最底层开始,往上进行。
|
|
116
|
+
"""
|
|
117
|
+
depth_dict = defaultdict(set)
|
|
118
|
+
max_depth = min_depth = to.depth
|
|
119
|
+
|
|
120
|
+
for node in [*from_nodes, to]:
|
|
121
|
+
depth = node.depth
|
|
122
|
+
depth_dict[depth].add(node)
|
|
123
|
+
max_depth = max(max_depth, depth)
|
|
124
|
+
|
|
125
|
+
# 从树的最底层向上汇总,同时将父节点加入depth_dict
|
|
126
|
+
# 确保所有节点在汇总其查询条件时,得到的条件列表总是完整的。
|
|
127
|
+
for dp in range(max_depth, min_depth, -1):
|
|
128
|
+
for node in depth_dict[dp]:
|
|
129
|
+
# noinspection PyProtectedMember
|
|
130
|
+
node._pass_up()
|
|
131
|
+
depth_dict[dp-1].add(node.parent)
|
|
132
|
+
|
|
133
|
+
del depth_dict
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class MetaTable(MetaNodeMixin):
|
|
137
|
+
"""逻辑表表元类
|
|
138
|
+
|
|
139
|
+
通过类属性的指定可以自动完成树形结构的搭建。
|
|
140
|
+
同一棵树的不同节点的限制条件会自动传递,最终影响查询结果。
|
|
141
|
+
|
|
142
|
+
Note:
|
|
143
|
+
使用元类的目的是为了让类可以像实例一样被使用。
|
|
144
|
+
若要使用实例属性,请通过ClassName.object.attr_or_method访问。
|
|
145
|
+
|
|
146
|
+
"""
|
|
147
|
+
#: 父子表的关联关系
|
|
148
|
+
rel_info: RelationInfo
|
|
149
|
+
#: 表查询的字段
|
|
150
|
+
fields: Tuple[str]
|
|
151
|
+
|
|
152
|
+
def __init__(cls, cls_name: str, supercls: Tuple, attrdict: Dict):
|
|
153
|
+
cls.object = cls()
|
|
154
|
+
|
|
155
|
+
cls.__datatable = datatable = attrdict.pop('datatable', None)
|
|
156
|
+
cls.__async_datatable = attrdict.pop('async_datatable', None)
|
|
157
|
+
cls.__table_info = attrdict.pop('table_info', None)
|
|
158
|
+
|
|
159
|
+
cls._datatable = datatable
|
|
160
|
+
if 'parent' in attrdict:
|
|
161
|
+
parent = attrdict.pop('parent')
|
|
162
|
+
else:
|
|
163
|
+
parent = {}
|
|
164
|
+
|
|
165
|
+
fields = attrdict.get('fields', None)
|
|
166
|
+
if fields is not None:
|
|
167
|
+
if isinstance(fields, str):
|
|
168
|
+
fields = (fields,)
|
|
169
|
+
elif any(f == '*' for f in fields):
|
|
170
|
+
fields = None
|
|
171
|
+
|
|
172
|
+
cls.fields = fields
|
|
173
|
+
#: 用于管理查询条件的状态
|
|
174
|
+
cls.__cm = ConditionManager()
|
|
175
|
+
#: 查询数据的缓存
|
|
176
|
+
cls.__data_pxy = DataProxy(max_size=attrdict.pop('cache_size', 5))
|
|
177
|
+
|
|
178
|
+
cls.rel_info = RelationInfo(cls) #: 表与其父子表的关联关系,主要是字段信息
|
|
179
|
+
cls.set_parent_table(
|
|
180
|
+
parent.get('cls'),
|
|
181
|
+
on=parent.get('on'),
|
|
182
|
+
alias=parent.get('alias'),
|
|
183
|
+
)
|
|
184
|
+
super().__init__(cls_name, supercls, attrdict)
|
|
185
|
+
|
|
186
|
+
@cached_property
|
|
187
|
+
def datatable(cls) -> T_DatatableInstance:
|
|
188
|
+
"""数据表元素"""
|
|
189
|
+
if cls.__datatable is not None:
|
|
190
|
+
return cls.__datatable
|
|
191
|
+
if cls.__table_info is None:
|
|
192
|
+
raise KeyError("Either 'table_info' or 'datatable' should be presented.")
|
|
193
|
+
tbl = get_datatable(cls.__table_info)
|
|
194
|
+
del cls.__table_info
|
|
195
|
+
return tbl
|
|
196
|
+
|
|
197
|
+
@cached_property
|
|
198
|
+
def async_datatable(cls) -> T_AsyncDatatableInstance:
|
|
199
|
+
"""异步数据表元素"""
|
|
200
|
+
if cls.__async_datatable is not None:
|
|
201
|
+
return cls.__async_datatable
|
|
202
|
+
if cls.__table_info is None:
|
|
203
|
+
raise KeyError("Either 'table_info' or 'async_datatable' should be presented.")
|
|
204
|
+
tbl = get_datatable(cls.__table_info, sync=False)
|
|
205
|
+
del cls.__table_info
|
|
206
|
+
return tbl
|
|
207
|
+
|
|
208
|
+
@cached_property
|
|
209
|
+
def name(cls) -> str:
|
|
210
|
+
"""表名"""
|
|
211
|
+
return cls.datatable.table_name
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def data(cls) -> pd.DataFrame:
|
|
215
|
+
"""
|
|
216
|
+
从数据库获取当前查询条件下的数据。
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
:class:`DataFrame` : 查询结果
|
|
220
|
+
|
|
221
|
+
Raises:
|
|
222
|
+
RuntimeError: 树中没有任何一张表有查询条件时
|
|
223
|
+
|
|
224
|
+
Note:
|
|
225
|
+
基本查询流程如下:
|
|
226
|
+
|
|
227
|
+
1. 找到所有处于 :attr:`locked` 状态的表,查询其和待查表的最小共同祖先。
|
|
228
|
+
2. 把限定条件传递至共同祖先,再将限定条件传递至待查询表
|
|
229
|
+
|
|
230
|
+
* | 如果传递过程中出现条件矛盾或者联合条件无数据导致无法继续传递,
|
|
231
|
+
| 则停止传递过程,直接返回带字段名的空 ``Dataframe`` 。
|
|
232
|
+
* 否则开始查询
|
|
233
|
+
|
|
234
|
+
3. 查询前首先在缓存中进行,如果未命中缓存,则执行sql查询数据库。
|
|
235
|
+
4. 当两次查询间树中所有表的 :attr:`locked` 状态无变化,则条件传递过程也会被适当缩减。
|
|
236
|
+
"""
|
|
237
|
+
return cls.query(cls.fields)
|
|
238
|
+
|
|
239
|
+
def query(cls, fields: Iterable[str] = None) -> pd.DataFrame:
|
|
240
|
+
|
|
241
|
+
locked = [node for node in cls.family if node.locked]
|
|
242
|
+
if not locked:
|
|
243
|
+
raise RuntimeError("At least lock one table before query.")
|
|
244
|
+
|
|
245
|
+
if not ConditionManager.any_changed(node.__cm for node in cls.family):
|
|
246
|
+
# 距离上次查询,同一棵树的表条件没有变化
|
|
247
|
+
for node in cls.iter_to_root(): # OK if node is cls
|
|
248
|
+
if node.__cm.valid:
|
|
249
|
+
logger.debug("Condition not changed from last query.")
|
|
250
|
+
return cls._pass_and_query(node, pass_up=False, fields=fields)
|
|
251
|
+
|
|
252
|
+
# 两种情况会导致代码运行到这里
|
|
253
|
+
# 1. condition_changed == True, 这种情况明显需要清空临时查询条件
|
|
254
|
+
# 2. condition_changed == False 但是 all(node.__cm.valid is False)
|
|
255
|
+
# 这种情况是由于先前查询失败于_pass_up阶段,或者先前查询的top不是本次查询的祖先
|
|
256
|
+
# 不论哪种,清空所有临时条件都会引起一定的重复计算,但是不会引起逻辑错误,
|
|
257
|
+
# 不清空虽然也不会导致逻辑错误,但是随着查询次数增加,condition增多,and运算
|
|
258
|
+
# 会消耗更多资源,内存占用也会增加, 因此这里选择清空的方案。
|
|
259
|
+
# todo 对于case2,似乎可以清空当前有临时条件的最高层节点数据,将次高层有条件的作为locked,pass至top
|
|
260
|
+
for node in cls.family:
|
|
261
|
+
node.__cm.clear_tmp() # 清空所有临时添加的查询条件
|
|
262
|
+
|
|
263
|
+
top = cls.common_ancestor(*locked)
|
|
264
|
+
|
|
265
|
+
return cls._pass_and_query(top, locked, fields=fields)
|
|
266
|
+
|
|
267
|
+
def _pass_and_query(
|
|
268
|
+
cls,
|
|
269
|
+
top: 'MetaTable',
|
|
270
|
+
locked: Iterable['MetaTable'] = None,
|
|
271
|
+
pass_up: bool = True,
|
|
272
|
+
fields: Iterable[str] = None
|
|
273
|
+
) -> pd.DataFrame:
|
|
274
|
+
try:
|
|
275
|
+
# 把锁定条件传递至共同祖先
|
|
276
|
+
if pass_up:
|
|
277
|
+
pass_condition_up(*locked, to=top)
|
|
278
|
+
top.__cm.mark_as_valid()
|
|
279
|
+
|
|
280
|
+
# 将限定条件传递至待查询表
|
|
281
|
+
top.__pass_down_to(cls)
|
|
282
|
+
except (ConditionPassError, SqlCondError, OpCombineError) as e:
|
|
283
|
+
logger.debug(
|
|
284
|
+
f"Condition passing is terminated due to an error. Return empty dataframe. "
|
|
285
|
+
f"Error: {str(e)}")
|
|
286
|
+
return pd.DataFrame(columns=cls.columns)
|
|
287
|
+
else:
|
|
288
|
+
return cls._query(use_cache=True, fields=fields)
|
|
289
|
+
|
|
290
|
+
#: 表中是否有通过 :meth:`lock`, :meth:`temporary_lock`,
|
|
291
|
+
#: :meth:`permanent_lock` 带入的条件。
|
|
292
|
+
locked = property(lambda self: self.__cm.has_main())
|
|
293
|
+
|
|
294
|
+
#: 所有查询条件,包括条件传递时临时添加的条件。
|
|
295
|
+
condition = property(lambda self: self.__cm.condition)
|
|
296
|
+
|
|
297
|
+
@contextmanager
|
|
298
|
+
def temporary_lock(cls, **kwargs):
|
|
299
|
+
cls.lock(**kwargs)
|
|
300
|
+
yield
|
|
301
|
+
cls.release()
|
|
302
|
+
|
|
303
|
+
def lock(cls, **kwargs):
|
|
304
|
+
"""
|
|
305
|
+
增加当前表的查询条件
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
**kwargs: 查询条件,传递给 |SQLCND| ,需符合 |SQLCND| 入参要求。
|
|
309
|
+
"""
|
|
310
|
+
cls.__cm.add_main_cond(SQLCondition(
|
|
311
|
+
**kwargs, quote_char=cls.datatable.quote_char))
|
|
312
|
+
|
|
313
|
+
def permanent_lock(cls, **kwargs):
|
|
314
|
+
cls.lock(**kwargs)
|
|
315
|
+
_ = cls.data
|
|
316
|
+
# cls.__data_pxy.make_cache(cls.condition, data)
|
|
317
|
+
|
|
318
|
+
def release(cls):
|
|
319
|
+
try:
|
|
320
|
+
cls.__cm.pop_main()
|
|
321
|
+
except IndexError:
|
|
322
|
+
raise RuntimeError("No lock to release!") from None
|
|
323
|
+
|
|
324
|
+
def _query(
|
|
325
|
+
cls,
|
|
326
|
+
use_cache: bool = True,
|
|
327
|
+
unique: bool = False,
|
|
328
|
+
fields: Iterable[str] = None,
|
|
329
|
+
cond: SQLCondition = None
|
|
330
|
+
) -> pd.DataFrame:
|
|
331
|
+
cond = cond or cls.condition
|
|
332
|
+
if cond is None:
|
|
333
|
+
raise RuntimeError("Cannot execute query without condition.")
|
|
334
|
+
|
|
335
|
+
if use_cache:
|
|
336
|
+
df = cls.__data_pxy.get_data(cond, fields)
|
|
337
|
+
if df is not None:
|
|
338
|
+
return df
|
|
339
|
+
|
|
340
|
+
df = pd.DataFrame()
|
|
341
|
+
for cd in cond.to_sql():
|
|
342
|
+
df = pd.concat(
|
|
343
|
+
[
|
|
344
|
+
df,
|
|
345
|
+
cls.datatable.select(columns=fields, where=cd, distinct=unique)
|
|
346
|
+
]
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
logger.debug(f"Got DATA:\n {df}")
|
|
350
|
+
|
|
351
|
+
cls.__data_pxy.make_cache(cond, fields, df)
|
|
352
|
+
return df
|
|
353
|
+
|
|
354
|
+
def query_with_condition(
|
|
355
|
+
cls,
|
|
356
|
+
cond: SQLCondition,
|
|
357
|
+
fields: Union[str, Iterable[str]] = None,
|
|
358
|
+
unique: bool = False
|
|
359
|
+
) -> pd.DataFrame:
|
|
360
|
+
if fields is not None:
|
|
361
|
+
if isinstance(fields, str):
|
|
362
|
+
fields = (fields, )
|
|
363
|
+
elif not isinstance(fields, tuple):
|
|
364
|
+
fields = tuple(fields)
|
|
365
|
+
return cls._query(use_cache=False, unique=unique, fields=fields, cond=cond)
|
|
366
|
+
|
|
367
|
+
def _pass_up(cls):
|
|
368
|
+
"""向父节点传递当前节点的条件,不会改变ConditionManager状态"""
|
|
369
|
+
parent = cls.parent # make local ref
|
|
370
|
+
|
|
371
|
+
if parent is None:
|
|
372
|
+
return
|
|
373
|
+
|
|
374
|
+
logger.debug(f"Start passing up {cls!r} -> {parent!r}.")
|
|
375
|
+
cond = cls.__get_pass_cond(parent)
|
|
376
|
+
parent.__set_query_cond(cond)
|
|
377
|
+
logger.debug(f"Passed UP {cls!r} -> {parent!r}. Condition: {cond}.")
|
|
378
|
+
|
|
379
|
+
def __get_pass_cond(cls, to_tbl: 'MetaTable'):
|
|
380
|
+
"""获取其他表节点的传递条件,目标节点必须为当前节点的父节点或子节点"""
|
|
381
|
+
cls_cond = cls.condition
|
|
382
|
+
rel_info = cls.rel_info
|
|
383
|
+
# 判断fields是否是待传递对象关联字段的子集,
|
|
384
|
+
# 如果是,可以不做查询,直接传递,否则,查询后传递。
|
|
385
|
+
if all(rel_info.is_rel_field(to_tbl, fd) for fd in cls_cond.all_fields):
|
|
386
|
+
logger.debug("No extra field, pass condition directly.")
|
|
387
|
+
cond = copy.copy(cls_cond)
|
|
388
|
+
# 处理关联字段不同名的情况
|
|
389
|
+
if rel_info.has_alias_with(to_tbl):
|
|
390
|
+
cond.rename_field(dict(zip(rel_info[to_tbl], to_tbl.rel_info[cls])))
|
|
391
|
+
else:
|
|
392
|
+
logger.debug(f"Got extra field, need query.")
|
|
393
|
+
# 如果待传递节点已有关联字段的条件,可以提取到当前节点进行查询
|
|
394
|
+
target_cond = to_tbl.condition
|
|
395
|
+
if target_cond is not None:
|
|
396
|
+
conditioned_flds = [fd for fd in target_cond.all_fields if rel_info.is_rel_field(to_tbl, fd)]
|
|
397
|
+
if conditioned_flds:
|
|
398
|
+
logger.debug(f"{to_tbl!r}'s fields: {conditioned_flds} is conditioned.")
|
|
399
|
+
cls_cond &= target_cond[conditioned_flds]
|
|
400
|
+
# 执行查询
|
|
401
|
+
data = cls._query(unique=True, fields=rel_info[to_tbl], cond=cls_cond).dropna()
|
|
402
|
+
if data.empty:
|
|
403
|
+
raise ConditionPassError("Cannot pass condition because no data is fetched.")
|
|
404
|
+
keys = to_tbl.rel_info[cls]
|
|
405
|
+
logger.debug(f"{to_tbl} relate to {cls!r} with fields: {keys}.")
|
|
406
|
+
logger.debug(f"Fields are restrained by:\n{data!r}.")
|
|
407
|
+
cond = SQLCondition(fields=keys, value_list=data, quote_char=cls.datatable.quote_char)
|
|
408
|
+
return cond
|
|
409
|
+
|
|
410
|
+
def __set_query_cond(cls, cond: SQLCondition):
|
|
411
|
+
cls.__cm.add_tmp_cond(cond)
|
|
412
|
+
|
|
413
|
+
def __pass_down_to(cls, descendant: 'MetaTable'):
|
|
414
|
+
"""依次向子节点传递当前节点的条件,直至达到目标后代节点
|
|
415
|
+
所有被传递节点的条件将会被标记为有效,会改变ConditionManager状态,
|
|
416
|
+
"""
|
|
417
|
+
parent = cls
|
|
418
|
+
|
|
419
|
+
for dsnt in cls.iter_to_descendant(descendant):
|
|
420
|
+
logger.debug(f"Start passing down {parent!r} -> {dsnt!r}.")
|
|
421
|
+
condition = parent.__get_pass_cond(dsnt)
|
|
422
|
+
dsnt.__set_query_cond(condition)
|
|
423
|
+
dsnt.__cm.mark_as_valid() # 查询条件标记为有效
|
|
424
|
+
logger.debug(f"Passed down {parent!r} -> {dsnt!r}. Condtion: {condition!r}")
|
|
425
|
+
parent = dsnt
|
|
426
|
+
|
|
427
|
+
@cached_property
|
|
428
|
+
def columns(cls) -> List[str]:
|
|
429
|
+
"""数据表列名序列"""
|
|
430
|
+
return list(cls.datatable.structure.columns.keys())
|
|
431
|
+
|
|
432
|
+
def __repr__(self):
|
|
433
|
+
return f"<{self.__name__}>"
|
|
434
|
+
|
|
435
|
+
@property
|
|
436
|
+
def all_data(cls) -> pd.DataFrame:
|
|
437
|
+
"""获取全表数据,谨慎使用"""
|
|
438
|
+
return cls.datatable.select(columns=cls.fields)
|
|
439
|
+
|
|
440
|
+
def set_parent_table(
|
|
441
|
+
cls,
|
|
442
|
+
table: 'MetaTable',
|
|
443
|
+
on: Iterable[str],
|
|
444
|
+
alias: Optional[Iterable[str]] = None,
|
|
445
|
+
):
|
|
446
|
+
"""
|
|
447
|
+
设置父表
|
|
448
|
+
|
|
449
|
+
Args:
|
|
450
|
+
table: 父表
|
|
451
|
+
on: 关联的字段名(当前表)
|
|
452
|
+
alias: 关联字段名在父节点的名字(=表示相同,全部相同可以不指定)
|
|
453
|
+
|
|
454
|
+
"""
|
|
455
|
+
if table is None:
|
|
456
|
+
return
|
|
457
|
+
|
|
458
|
+
cls.set_parent(table)
|
|
459
|
+
on = tuple(on)
|
|
460
|
+
if alias is not None:
|
|
461
|
+
alias = tuple(alias)
|
|
462
|
+
table.rel_info.add_child_relation(cls, on, alias)
|
|
463
|
+
cls.rel_info.add_parent_relation(table, on)
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
class BaseTable(metaclass=MetaTable):
|
|
467
|
+
"""
|
|
468
|
+
Helper class, 用于继承
|
|
469
|
+
|
|
470
|
+
可定义的类属性:
|
|
471
|
+
.. code-block:: python
|
|
472
|
+
|
|
473
|
+
# 与父节点的关联信息
|
|
474
|
+
parent = {
|
|
475
|
+
# 父节点的类名:
|
|
476
|
+
"cls": ObjectInfo,
|
|
477
|
+
# 关联的字段名:
|
|
478
|
+
"on": ('sys_contract_id', 'sys_object_id', 'sys_sub_id'),
|
|
479
|
+
# 关联字段名在父节点的名字(=表示相同,全部相同可以不指定):
|
|
480
|
+
"alias": ('=', 'object_id', 'sub_id')
|
|
481
|
+
}
|
|
482
|
+
# 查询字段,默认查询所有字段
|
|
483
|
+
fields = ('id', 'name', 'etc')
|
|
484
|
+
# 最大的查询缓存数
|
|
485
|
+
cache_size = 5
|
|
486
|
+
# 绑定数据表元素
|
|
487
|
+
datatable = table
|
|
488
|
+
# 数据表信息
|
|
489
|
+
table_info: Union[TableInfo, dict] = {
|
|
490
|
+
'element_name': "元素名",
|
|
491
|
+
'element_type': "元素类型",
|
|
492
|
+
'folder_id': "文件夹id(与元素路径二选一即可)",
|
|
493
|
+
'path': "元素路径(与文件夹id二选一即可)",
|
|
494
|
+
'table_name': "真实表名(可选)",
|
|
495
|
+
'server_name': "数据表服务名(可选)",
|
|
496
|
+
}
|
|
497
|
+
"""
|
deepfos/db/__init__.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
|
|
3
|
+
from deepfos.lazy import lazify
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
6
|
+
from .mysql import MySQLClient, AsyncMySQLClient
|
|
7
|
+
from .clickhouse import ClickHouseClient, AsyncClickHouseClient
|
|
8
|
+
from .oracle import OracleClient, AsyncOracleClient, OracleDFSQLConvertor
|
|
9
|
+
from .sqlserver import SQLServerClient, AsyncSQLServerClient
|
|
10
|
+
from .kingbase import KingBaseClient, AsyncKingBaseClient
|
|
11
|
+
from .gauss import GaussClient, AsyncGaussClient
|
|
12
|
+
from .dameng import DaMengClient, AsyncDaMengClient
|
|
13
|
+
from .postgresql import PostgreSQLClient, AsyncPostgreSQLClient
|
|
14
|
+
from .deepengine import DeepEngineClient, AsyncDeepEngineClient
|
|
15
|
+
from .deepmodel import DeepModelClient, AsyncDeepModelClient
|
|
16
|
+
from .deepmodel_kingbase import DeepModelKingBaseClient, AsyncDeepModelKingBaseClient
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
lazify(
|
|
20
|
+
{
|
|
21
|
+
'deepfos.db.mysql': ('MySQLClient', 'AsyncMySQLClient'),
|
|
22
|
+
'deepfos.db.clickhouse': ('ClickHouseClient', 'AsyncClickHouseClient'),
|
|
23
|
+
'deepfos.db.oracle': (
|
|
24
|
+
'OracleClient', 'AsyncOracleClient', 'OracleDFSQLConvertor'
|
|
25
|
+
),
|
|
26
|
+
'deepfos.db.sqlserver': ('SQLServerClient', 'AsyncSQLServerClient'),
|
|
27
|
+
'deepfos.db.kingbase': ('KingBaseClient', 'AsyncKingBaseClient'),
|
|
28
|
+
'deepfos.db.gauss': ('GaussClient', 'AsyncGaussClient'),
|
|
29
|
+
'deepfos.db.dameng': ('DaMengClient', 'AsyncDaMengClient'),
|
|
30
|
+
'deepfos.db.postgresql': ('PostgreSQLClient', 'AsyncPostgreSQLClient'),
|
|
31
|
+
'deepfos.db.deepengine': ('DeepEngineClient', 'AsyncDeepEngineClient'),
|
|
32
|
+
'deepfos.db.deepmodel': ('DeepModelClient', 'AsyncDeepModelClient'),
|
|
33
|
+
'deepfos.db.deepmodel_kingbase': ('DeepModelKingBaseClient', 'AsyncDeepModelKingBaseClient'),
|
|
34
|
+
},
|
|
35
|
+
globals()
|
|
36
|
+
)
|