deepfos 1.1.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/__init__.py +6 -0
- deepfos/_version.py +21 -0
- deepfos/algo/__init__.py +0 -0
- deepfos/algo/graph.py +171 -0
- deepfos/algo/segtree.py +31 -0
- deepfos/api/V1_1/__init__.py +0 -0
- deepfos/api/V1_1/business_model.py +119 -0
- deepfos/api/V1_1/dimension.py +599 -0
- deepfos/api/V1_1/models/__init__.py +0 -0
- deepfos/api/V1_1/models/business_model.py +1033 -0
- deepfos/api/V1_1/models/dimension.py +2768 -0
- deepfos/api/V1_2/__init__.py +0 -0
- deepfos/api/V1_2/dimension.py +285 -0
- deepfos/api/V1_2/models/__init__.py +0 -0
- deepfos/api/V1_2/models/dimension.py +2923 -0
- deepfos/api/__init__.py +0 -0
- deepfos/api/account.py +167 -0
- deepfos/api/accounting_engines.py +147 -0
- deepfos/api/app.py +626 -0
- deepfos/api/approval_process.py +198 -0
- deepfos/api/base.py +983 -0
- deepfos/api/business_model.py +160 -0
- deepfos/api/consolidation.py +129 -0
- deepfos/api/consolidation_process.py +106 -0
- deepfos/api/datatable.py +341 -0
- deepfos/api/deep_pipeline.py +61 -0
- deepfos/api/deepconnector.py +36 -0
- deepfos/api/deepfos_task.py +92 -0
- deepfos/api/deepmodel.py +188 -0
- deepfos/api/dimension.py +486 -0
- deepfos/api/financial_model.py +319 -0
- deepfos/api/journal_model.py +119 -0
- deepfos/api/journal_template.py +132 -0
- deepfos/api/memory_financial_model.py +98 -0
- deepfos/api/models/__init__.py +3 -0
- deepfos/api/models/account.py +483 -0
- deepfos/api/models/accounting_engines.py +756 -0
- deepfos/api/models/app.py +1338 -0
- deepfos/api/models/approval_process.py +1043 -0
- deepfos/api/models/base.py +234 -0
- deepfos/api/models/business_model.py +805 -0
- deepfos/api/models/consolidation.py +711 -0
- deepfos/api/models/consolidation_process.py +248 -0
- deepfos/api/models/datatable_mysql.py +427 -0
- deepfos/api/models/deep_pipeline.py +55 -0
- deepfos/api/models/deepconnector.py +28 -0
- deepfos/api/models/deepfos_task.py +386 -0
- deepfos/api/models/deepmodel.py +308 -0
- deepfos/api/models/dimension.py +1576 -0
- deepfos/api/models/financial_model.py +1796 -0
- deepfos/api/models/journal_model.py +341 -0
- deepfos/api/models/journal_template.py +854 -0
- deepfos/api/models/memory_financial_model.py +478 -0
- deepfos/api/models/platform.py +178 -0
- deepfos/api/models/python.py +221 -0
- deepfos/api/models/reconciliation_engine.py +411 -0
- deepfos/api/models/reconciliation_report.py +161 -0
- deepfos/api/models/role_strategy.py +884 -0
- deepfos/api/models/smartlist.py +237 -0
- deepfos/api/models/space.py +1137 -0
- deepfos/api/models/system.py +1065 -0
- deepfos/api/models/variable.py +463 -0
- deepfos/api/models/workflow.py +946 -0
- deepfos/api/platform.py +199 -0
- deepfos/api/python.py +90 -0
- deepfos/api/reconciliation_engine.py +181 -0
- deepfos/api/reconciliation_report.py +64 -0
- deepfos/api/role_strategy.py +234 -0
- deepfos/api/smartlist.py +69 -0
- deepfos/api/space.py +582 -0
- deepfos/api/system.py +372 -0
- deepfos/api/variable.py +154 -0
- deepfos/api/workflow.py +264 -0
- deepfos/boost/__init__.py +6 -0
- deepfos/boost/py_jstream.py +89 -0
- deepfos/boost/py_pandas.py +20 -0
- deepfos/cache.py +121 -0
- deepfos/config.py +6 -0
- deepfos/core/__init__.py +27 -0
- deepfos/core/cube/__init__.py +10 -0
- deepfos/core/cube/_base.py +462 -0
- deepfos/core/cube/constants.py +21 -0
- deepfos/core/cube/cube.py +408 -0
- deepfos/core/cube/formula.py +707 -0
- deepfos/core/cube/syscube.py +532 -0
- deepfos/core/cube/typing.py +7 -0
- deepfos/core/cube/utils.py +238 -0
- deepfos/core/dimension/__init__.py +11 -0
- deepfos/core/dimension/_base.py +506 -0
- deepfos/core/dimension/dimcreator.py +184 -0
- deepfos/core/dimension/dimension.py +472 -0
- deepfos/core/dimension/dimexpr.py +271 -0
- deepfos/core/dimension/dimmember.py +155 -0
- deepfos/core/dimension/eledimension.py +22 -0
- deepfos/core/dimension/filters.py +99 -0
- deepfos/core/dimension/sysdimension.py +168 -0
- deepfos/core/logictable/__init__.py +5 -0
- deepfos/core/logictable/_cache.py +141 -0
- deepfos/core/logictable/_operator.py +663 -0
- deepfos/core/logictable/nodemixin.py +673 -0
- deepfos/core/logictable/sqlcondition.py +609 -0
- deepfos/core/logictable/tablemodel.py +497 -0
- deepfos/db/__init__.py +36 -0
- deepfos/db/cipher.py +660 -0
- deepfos/db/clickhouse.py +191 -0
- deepfos/db/connector.py +195 -0
- deepfos/db/daclickhouse.py +171 -0
- deepfos/db/dameng.py +101 -0
- deepfos/db/damysql.py +189 -0
- deepfos/db/dbkits.py +358 -0
- deepfos/db/deepengine.py +99 -0
- deepfos/db/deepmodel.py +82 -0
- deepfos/db/deepmodel_kingbase.py +83 -0
- deepfos/db/edb.py +214 -0
- deepfos/db/gauss.py +83 -0
- deepfos/db/kingbase.py +83 -0
- deepfos/db/mysql.py +184 -0
- deepfos/db/oracle.py +131 -0
- deepfos/db/postgresql.py +192 -0
- deepfos/db/sqlserver.py +99 -0
- deepfos/db/utils.py +135 -0
- deepfos/element/__init__.py +89 -0
- deepfos/element/accounting.py +348 -0
- deepfos/element/apvlprocess.py +215 -0
- deepfos/element/base.py +398 -0
- deepfos/element/bizmodel.py +1269 -0
- deepfos/element/datatable.py +2467 -0
- deepfos/element/deep_pipeline.py +186 -0
- deepfos/element/deepconnector.py +59 -0
- deepfos/element/deepmodel.py +1806 -0
- deepfos/element/dimension.py +1254 -0
- deepfos/element/fact_table.py +427 -0
- deepfos/element/finmodel.py +1485 -0
- deepfos/element/journal.py +840 -0
- deepfos/element/journal_template.py +943 -0
- deepfos/element/pyscript.py +412 -0
- deepfos/element/reconciliation.py +553 -0
- deepfos/element/rolestrategy.py +243 -0
- deepfos/element/smartlist.py +457 -0
- deepfos/element/variable.py +756 -0
- deepfos/element/workflow.py +560 -0
- deepfos/exceptions/__init__.py +239 -0
- deepfos/exceptions/hook.py +86 -0
- deepfos/lazy.py +104 -0
- deepfos/lazy_import.py +84 -0
- deepfos/lib/__init__.py +0 -0
- deepfos/lib/_javaobj.py +366 -0
- deepfos/lib/asynchronous.py +879 -0
- deepfos/lib/concurrency.py +107 -0
- deepfos/lib/constant.py +39 -0
- deepfos/lib/decorator.py +310 -0
- deepfos/lib/deepchart.py +778 -0
- deepfos/lib/deepux.py +477 -0
- deepfos/lib/discovery.py +273 -0
- deepfos/lib/edb_lexer.py +789 -0
- deepfos/lib/eureka.py +156 -0
- deepfos/lib/filterparser.py +751 -0
- deepfos/lib/httpcli.py +106 -0
- deepfos/lib/jsonstreamer.py +80 -0
- deepfos/lib/msg.py +394 -0
- deepfos/lib/nacos.py +225 -0
- deepfos/lib/patch.py +92 -0
- deepfos/lib/redis.py +241 -0
- deepfos/lib/serutils.py +181 -0
- deepfos/lib/stopwatch.py +99 -0
- deepfos/lib/subtask.py +572 -0
- deepfos/lib/sysutils.py +703 -0
- deepfos/lib/utils.py +1003 -0
- deepfos/local.py +160 -0
- deepfos/options.py +670 -0
- deepfos/translation.py +237 -0
- deepfos-1.1.60.dist-info/METADATA +33 -0
- deepfos-1.1.60.dist-info/RECORD +175 -0
- deepfos-1.1.60.dist-info/WHEEL +5 -0
- deepfos-1.1.60.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,462 @@
|
|
|
1
|
+
from collections import deque
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import Dict, List, Sequence, Optional, Union, Deque, ContextManager
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from loguru import logger
|
|
8
|
+
|
|
9
|
+
from .formula import CubeFixer, _ConditionWrapper, FormulaContainer
|
|
10
|
+
from .typing import TD_Str_ListStr, T_MaybeCondition
|
|
11
|
+
from .utils import dict_to_sql, dicts_to_sql, create_df_by_cproduct, Options
|
|
12
|
+
from .constants import Instruction
|
|
13
|
+
from deepfos.core.dimension import Dimension, SysDimension
|
|
14
|
+
from deepfos.lib.decorator import cached_property
|
|
15
|
+
from deepfos.lib.utils import ConcealableAttr, MultiKeyDict, unpack_expr, dict_to_expr, expr_to_dict
|
|
16
|
+
from deepfos.element.finmodel import FinancialCube
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CalcSet:
|
|
20
|
+
def __init__(self, data_src: pd.DataFrame, drop_cols, cube: 'CubeBase'):
|
|
21
|
+
self.data_col = cube.data_col
|
|
22
|
+
self.__dropped_cols = set(drop_cols)
|
|
23
|
+
self._data_src = data_src
|
|
24
|
+
self.__data_proxy = None
|
|
25
|
+
self._cube_dim_state: TD_Str_ListStr = cube.dim_state.copy()
|
|
26
|
+
self._filters = []
|
|
27
|
+
self._rslt_filter: Dict[str, set] = cube.formulas.left.copy()
|
|
28
|
+
self._cube_expr = cube.to_expr()
|
|
29
|
+
#: 非pov的维度
|
|
30
|
+
self._columns = data_src.columns.difference([self.data_col])
|
|
31
|
+
self._instruction = []
|
|
32
|
+
# cube fix条件对应的dataframe索引
|
|
33
|
+
self._fix_index = None
|
|
34
|
+
|
|
35
|
+
def add_instruction(self, instruction: Instruction):
|
|
36
|
+
self._instruction.append(instruction)
|
|
37
|
+
|
|
38
|
+
def add_filter(self, fltr):
|
|
39
|
+
self.__data_proxy = None
|
|
40
|
+
self._filters.append(fltr)
|
|
41
|
+
|
|
42
|
+
def pop_filter(self):
|
|
43
|
+
self.__data_proxy = None
|
|
44
|
+
self._filters.pop()
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def data_proxy(self) -> pd.DataFrame:
|
|
48
|
+
# data_src 的代理
|
|
49
|
+
if self.__data_proxy is None:
|
|
50
|
+
self.__data_proxy = self._data_src.copy()
|
|
51
|
+
# self.__data_src_cache = pipe(
|
|
52
|
+
# map(attrgetter('filter'), self._filters),
|
|
53
|
+
# self.__data_src)
|
|
54
|
+
return self.__data_proxy
|
|
55
|
+
|
|
56
|
+
@cached_property
|
|
57
|
+
def pov(self) -> Dict[str, str]:
|
|
58
|
+
return {
|
|
59
|
+
c: self._cube_dim_state[c][0]
|
|
60
|
+
for c in self.__dropped_cols
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
@cached_property
|
|
64
|
+
def pov_str(self) -> str:
|
|
65
|
+
return dict_to_expr(self.pov)
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def full_data(self) -> pd.DataFrame:
|
|
69
|
+
"""包含pov列的完整数据"""
|
|
70
|
+
return self._data_src.assign(**self.pov)
|
|
71
|
+
|
|
72
|
+
def pivot(self, dim):
|
|
73
|
+
"""把需要计算的维度转移到列上"""
|
|
74
|
+
data = self._data_src
|
|
75
|
+
index_cols = self._columns.difference([dim]).tolist()
|
|
76
|
+
|
|
77
|
+
if data.empty:
|
|
78
|
+
self._data_src = data.drop(columns=[dim, self.data_col])
|
|
79
|
+
else:
|
|
80
|
+
data = data.pivot_table(
|
|
81
|
+
index=index_cols, values=self.data_col, columns=dim,
|
|
82
|
+
aggfunc='first', dropna=False, fill_value=np.NaN
|
|
83
|
+
).reset_index(drop=not index_cols)
|
|
84
|
+
data.columns.name = None
|
|
85
|
+
self._data_src = data
|
|
86
|
+
|
|
87
|
+
self._set_fix_index(dim)
|
|
88
|
+
|
|
89
|
+
def unpivot(self, dim: str):
|
|
90
|
+
"""把计算完成的维度成员转移到行上"""
|
|
91
|
+
data = self._data_src
|
|
92
|
+
index_cols = self._columns.difference([dim]).tolist()
|
|
93
|
+
total_mbr_cols = data.columns.difference(index_cols)
|
|
94
|
+
melted_data = data.melt(
|
|
95
|
+
id_vars=index_cols, value_vars=total_mbr_cols,
|
|
96
|
+
var_name=dim, value_name=self.data_col
|
|
97
|
+
)
|
|
98
|
+
# 去除除零导致的inf数据
|
|
99
|
+
melted_data[np.isinf(melted_data[self.data_col])] = np.NaN
|
|
100
|
+
self._data_src = melted_data.dropna()
|
|
101
|
+
|
|
102
|
+
def load_fixes(
|
|
103
|
+
self,
|
|
104
|
+
column: str,
|
|
105
|
+
dimension: str,
|
|
106
|
+
on: Dict[str, Union[Sequence[str], str]],
|
|
107
|
+
name: str,
|
|
108
|
+
):
|
|
109
|
+
"""
|
|
110
|
+
加载需要计算的列,内部调用
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
column: 当前计算节点对应的数据列
|
|
114
|
+
dimension: 当前计算节点所属维度
|
|
115
|
+
on: 当前计算节点的on条件
|
|
116
|
+
name: 存在on条件时,merge出的column名
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
|
|
120
|
+
"""
|
|
121
|
+
data = self.data_proxy
|
|
122
|
+
on = {
|
|
123
|
+
k: v for k, v in on.items()
|
|
124
|
+
if k not in self.__dropped_cols
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if not on or column not in data.columns:
|
|
128
|
+
return
|
|
129
|
+
|
|
130
|
+
# 去除on条件列及自身列后,其余列作为merge on的列
|
|
131
|
+
index_cols = self._columns.difference([*on.keys(), dimension]).tolist()
|
|
132
|
+
# 根据fix条件筛出数据
|
|
133
|
+
fix_sql = dict_to_sql(on, eq='==', bracket=False)
|
|
134
|
+
on_data = data.query(fix_sql)[index_cols + [column]].rename(columns={column: name})
|
|
135
|
+
if not index_cols:
|
|
136
|
+
# 没有index_cols,代表on条件包含了所有维度,原则上应当只有一个数据
|
|
137
|
+
if len(on_data) != 1:
|
|
138
|
+
raise RuntimeError("Cannot proceed calculation due to an unexpected error.")
|
|
139
|
+
self.__data_proxy[name] = on_data[name][0]
|
|
140
|
+
else:
|
|
141
|
+
# 修改data
|
|
142
|
+
self.__data_proxy = data.merge(on_data, on=index_cols, how='left')
|
|
143
|
+
|
|
144
|
+
def handle_instruction(
|
|
145
|
+
self,
|
|
146
|
+
column: str,
|
|
147
|
+
dimension: str,
|
|
148
|
+
):
|
|
149
|
+
if not self._instruction:
|
|
150
|
+
return
|
|
151
|
+
insto = self._instruction.pop(-1)
|
|
152
|
+
if insto is Instruction.cproduct:
|
|
153
|
+
product_cols = self._columns.difference([dimension])
|
|
154
|
+
dim_state = self._cube_dim_state
|
|
155
|
+
idx = pd.MultiIndex.from_product((dim_state[col] for col in product_cols), names=product_cols)
|
|
156
|
+
df_template = pd.Series(np.NaN, index=idx, name=column).to_frame()
|
|
157
|
+
data = self.data.set_index(product_cols.tolist()).combine_first(df_template)
|
|
158
|
+
self.__data_proxy = data.reset_index()
|
|
159
|
+
|
|
160
|
+
def __repr__(self):
|
|
161
|
+
return f"POV: {self.pov_str}\nData:\n {self._data_src}"
|
|
162
|
+
|
|
163
|
+
def get_submit_data(self, compact=True):
|
|
164
|
+
"""
|
|
165
|
+
可能需要提交的计算部分。
|
|
166
|
+
"""
|
|
167
|
+
pov = self.pov
|
|
168
|
+
dicts = []
|
|
169
|
+
|
|
170
|
+
for dim, mbrs in self._rslt_filter.items():
|
|
171
|
+
dim_states = {**self._cube_dim_state, dim: mbrs}
|
|
172
|
+
|
|
173
|
+
for k in pov:
|
|
174
|
+
dim_states.pop(k, None)
|
|
175
|
+
dicts.append(dim_states)
|
|
176
|
+
|
|
177
|
+
sql = dicts_to_sql(dicts)
|
|
178
|
+
logger.debug(f"Filter submit data with sql: {sql}")
|
|
179
|
+
|
|
180
|
+
if compact:
|
|
181
|
+
return self._data_src.query(sql), pov
|
|
182
|
+
return self.full_data.query(sql)
|
|
183
|
+
|
|
184
|
+
def submit(self, cube: FinancialCube):
|
|
185
|
+
"""保存计算结果"""
|
|
186
|
+
data, pov = self.get_submit_data(compact=True)
|
|
187
|
+
cube.save(data, pov=pov, data_column=self.data_col)
|
|
188
|
+
|
|
189
|
+
def set_value(self, column, value):
|
|
190
|
+
self._data_src.loc[self._fix_index, column] = value
|
|
191
|
+
self.__data_proxy = None
|
|
192
|
+
|
|
193
|
+
def _set_fix_index(self, dim):
|
|
194
|
+
fix = {
|
|
195
|
+
k: v for k, v in self._cube_dim_state.items()
|
|
196
|
+
if k != dim and k not in self.__dropped_cols
|
|
197
|
+
}
|
|
198
|
+
if not fix:
|
|
199
|
+
self._fix_index = self.data_proxy.index
|
|
200
|
+
else:
|
|
201
|
+
sql = dict_to_sql(fix, "==", bracket=False)
|
|
202
|
+
self._fix_index = self.data_proxy.query(sql).index
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class CubeBase:
|
|
206
|
+
#: cube名
|
|
207
|
+
cube_name: str
|
|
208
|
+
#: cube数据列
|
|
209
|
+
data_col: str
|
|
210
|
+
#: 维度名->维度对象
|
|
211
|
+
dimensions: MultiKeyDict[str, Union[Dimension, SysDimension]]
|
|
212
|
+
#: 计算结果集
|
|
213
|
+
calc_set: Optional[CalcSet]
|
|
214
|
+
|
|
215
|
+
def __init__(self, cube_name, data_col, **options):
|
|
216
|
+
self.calc_dim = [] # 需要进行计算的维度
|
|
217
|
+
self.formulas = FormulaContainer(self) # Cube的筛选条件列表,每个元素都是一个_Condition对象
|
|
218
|
+
self.data_col = data_col # decimal_val
|
|
219
|
+
self.cube_name = cube_name
|
|
220
|
+
self.dimensions = MultiKeyDict()
|
|
221
|
+
self.calc_set = None
|
|
222
|
+
self._default_option = Options(**options)
|
|
223
|
+
self._option_index = 0
|
|
224
|
+
|
|
225
|
+
dim_state: TD_Str_ListStr = ConcealableAttr({})
|
|
226
|
+
option_stack: Deque[Options] = ConcealableAttr(deque())
|
|
227
|
+
"""当前维度表达式下的维度状态
|
|
228
|
+
|
|
229
|
+
维度名 -> 维度成员列表。
|
|
230
|
+
仅在进入 :meth:`fix` 语句块中时会产生有效值。
|
|
231
|
+
|
|
232
|
+
仅供内部调用。
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
def reset_dimensions(self):
|
|
236
|
+
"""清空维度选择的成员
|
|
237
|
+
|
|
238
|
+
即所有维度回到未选择成员的状态"""
|
|
239
|
+
for dim in self.dimensions.values():
|
|
240
|
+
if dim.activated:
|
|
241
|
+
dim.clear()
|
|
242
|
+
|
|
243
|
+
@property
|
|
244
|
+
def pov(self) -> Dict[str, str]:
|
|
245
|
+
"""Point of View
|
|
246
|
+
|
|
247
|
+
维度 -> 维度成员。
|
|
248
|
+
在当前维度锁定状态下,维度成员仅一个的维度会被加入至 :attr:`pov`
|
|
249
|
+
|
|
250
|
+
See Also:
|
|
251
|
+
| :meth:`load_expr`
|
|
252
|
+
| :meth:`to_expr`
|
|
253
|
+
|
|
254
|
+
"""
|
|
255
|
+
pov = {}
|
|
256
|
+
|
|
257
|
+
for dim in self.dimensions.values():
|
|
258
|
+
if not dim.activated: # 判断当前维度树是否选择了维度成员
|
|
259
|
+
continue
|
|
260
|
+
if len(data := dim.data) == 1: # 维度固定,则加入POV中
|
|
261
|
+
pov[dim.name] = data[0]
|
|
262
|
+
|
|
263
|
+
return pov
|
|
264
|
+
|
|
265
|
+
def to_expr(self) -> str:
|
|
266
|
+
"""
|
|
267
|
+
当前的维度锁定状态转化为表达式
|
|
268
|
+
"""
|
|
269
|
+
dims_exprs = (dim.to_expr() for dim in self.dimensions.values() if dim.activated)
|
|
270
|
+
return "->".join(dims_exprs)
|
|
271
|
+
|
|
272
|
+
def load_expr(self, cube_expr: str):
|
|
273
|
+
"""
|
|
274
|
+
读取维度表达式
|
|
275
|
+
|
|
276
|
+
会改变维度的锁定状态
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
cube_expr: 维度表达式
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
self
|
|
283
|
+
|
|
284
|
+
"""
|
|
285
|
+
self.reset_dimensions()
|
|
286
|
+
|
|
287
|
+
for dim_expr in cube_expr.split('->'):
|
|
288
|
+
dimname, expr = unpack_expr(dim_expr)
|
|
289
|
+
if dimname not in self.dimensions:
|
|
290
|
+
raise ValueError(f"Given Dimension '{dimname}' doesn't belong to cube.")
|
|
291
|
+
self.dimensions[dimname].load_expr(expr)
|
|
292
|
+
return self
|
|
293
|
+
|
|
294
|
+
# -----------------------------------------------------------------------------
|
|
295
|
+
# Cube 成员公式相关
|
|
296
|
+
# noinspection PyUnresolvedReferences
|
|
297
|
+
@contextmanager
|
|
298
|
+
def fix(
|
|
299
|
+
self,
|
|
300
|
+
general_fix: str = None,
|
|
301
|
+
on_dim: str = None,
|
|
302
|
+
) -> ContextManager[CubeFixer]:
|
|
303
|
+
"""
|
|
304
|
+
执行成员公式
|
|
305
|
+
|
|
306
|
+
上下文管理器,在fix上下文中可以写成员公式,
|
|
307
|
+
所有计算将在退出fix语句块时执行,
|
|
308
|
+
计算结果将保存在 :attr:`calc_set` 中。
|
|
309
|
+
|
|
310
|
+
Args:
|
|
311
|
+
general_fix: 锁定当前维度的维度表达式
|
|
312
|
+
on_dim: 计算成员所属的维度
|
|
313
|
+
|
|
314
|
+
"""
|
|
315
|
+
if general_fix is not None:
|
|
316
|
+
self.load_expr(general_fix)
|
|
317
|
+
|
|
318
|
+
dim_locked = False
|
|
319
|
+
if on_dim is not None and on_dim in self.dimensions:
|
|
320
|
+
self.calc_dim.append(on_dim)
|
|
321
|
+
dim_locked = True
|
|
322
|
+
|
|
323
|
+
__class__.dim_state.expose(self)
|
|
324
|
+
__class__.option_stack.expose(self)
|
|
325
|
+
self._dump_dim_state()
|
|
326
|
+
|
|
327
|
+
try:
|
|
328
|
+
self.option_stack.append(self._default_option)
|
|
329
|
+
yield CubeFixer(self)
|
|
330
|
+
self.formulas.solve()
|
|
331
|
+
finally:
|
|
332
|
+
if dim_locked:
|
|
333
|
+
self.calc_dim.pop()
|
|
334
|
+
self.dim_state.clear()
|
|
335
|
+
self.option_stack.clear()
|
|
336
|
+
__class__.dim_state.conceal(self)
|
|
337
|
+
__class__.option_stack.conceal(self)
|
|
338
|
+
|
|
339
|
+
def make_condition(self, condition: T_MaybeCondition) -> _ConditionWrapper:
|
|
340
|
+
"""创建筛选条件,用于成员公式"""
|
|
341
|
+
return _ConditionWrapper(self.formulas, condition)
|
|
342
|
+
|
|
343
|
+
def _dump_dim_state(self):
|
|
344
|
+
"""导出当前维度的 `fix` 状态
|
|
345
|
+
|
|
346
|
+
影响 :attr:`dim_state` 的值,仅在内部使用。
|
|
347
|
+
由于 :attr:`dim_state` 的特性,外部调用可能会直接引发报错。
|
|
348
|
+
"""
|
|
349
|
+
for name, dim in self.dimensions.items():
|
|
350
|
+
if dim.activated and (data := dim.data):
|
|
351
|
+
self.dim_state[name] = data
|
|
352
|
+
|
|
353
|
+
def _load_fix_data(self, fix_exprs: List[str]) -> pd.DataFrame:
|
|
354
|
+
raise NotImplementedError
|
|
355
|
+
|
|
356
|
+
def _load_calc_set(
|
|
357
|
+
self,
|
|
358
|
+
fix_exprs: List[str],
|
|
359
|
+
fix_mbrs: List[TD_Str_ListStr]
|
|
360
|
+
):
|
|
361
|
+
"""
|
|
362
|
+
加载计算集。仅供内部使用。
|
|
363
|
+
"""
|
|
364
|
+
drop_cols = self._resolve_drop_cols(fix_mbrs)
|
|
365
|
+
data = self._load_fix_data(fix_exprs).drop(columns=drop_cols)
|
|
366
|
+
|
|
367
|
+
dim_columns = data.columns.difference([self.data_col])
|
|
368
|
+
data = data.drop_duplicates(dim_columns)
|
|
369
|
+
|
|
370
|
+
col_map = {
|
|
371
|
+
k: v for k, v in self.dim_state.items()
|
|
372
|
+
if k not in drop_cols
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
df_tmpls = []
|
|
376
|
+
for dim, mbrs in self.formulas.left.items():
|
|
377
|
+
df_tmpls.append(create_df_by_cproduct({**col_map, dim: mbrs}))
|
|
378
|
+
|
|
379
|
+
df_tmpl = pd.concat(df_tmpls).drop_duplicates(dim_columns)
|
|
380
|
+
data = df_tmpl.merge(data, how='outer', on=dim_columns.tolist())
|
|
381
|
+
self.calc_set = CalcSet(data, drop_cols, self)
|
|
382
|
+
|
|
383
|
+
def _resolve_drop_cols(self, fixes: List[TD_Str_ListStr]):
|
|
384
|
+
"""查找可以在计算中drop的列
|
|
385
|
+
|
|
386
|
+
根据所有计算单元的fix条件(包括等号左边)。
|
|
387
|
+
结合顶部的fix条件,获取可以在后续计算中drop的列。
|
|
388
|
+
|
|
389
|
+
可以在计算集中移除是只fix了一个成员的维度。
|
|
390
|
+
|
|
391
|
+
Args:
|
|
392
|
+
fixes: 所有维度对应的fix成员列表.
|
|
393
|
+
|
|
394
|
+
Returns:
|
|
395
|
+
可移除的列名列表
|
|
396
|
+
|
|
397
|
+
"""
|
|
398
|
+
dim_state = self.dim_state
|
|
399
|
+
candiates = [k for k, v in dim_state.items() if len(v) == 1]
|
|
400
|
+
|
|
401
|
+
for dim in candiates[:]:
|
|
402
|
+
for fix in fixes:
|
|
403
|
+
if dim in fix:
|
|
404
|
+
fix_mbrs = set(fix[dim] + dim_state[dim])
|
|
405
|
+
if len(fix_mbrs) > 1:
|
|
406
|
+
candiates.remove(dim)
|
|
407
|
+
break
|
|
408
|
+
|
|
409
|
+
return candiates
|
|
410
|
+
|
|
411
|
+
@contextmanager
|
|
412
|
+
def set_options(self, **kwargs):
|
|
413
|
+
"""修改cube设置"""
|
|
414
|
+
if not self.option_stack:
|
|
415
|
+
raise RuntimeError("set_options is only allowed within `fix` function.")
|
|
416
|
+
|
|
417
|
+
option = self.option_stack[self._option_index].replace(**kwargs)
|
|
418
|
+
|
|
419
|
+
try:
|
|
420
|
+
self.option_stack.append(option)
|
|
421
|
+
self.formulas.append(self.option_stack.popleft)
|
|
422
|
+
self._option_index += 1
|
|
423
|
+
yield
|
|
424
|
+
finally:
|
|
425
|
+
self._option_index -= 1
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
def _get_remap_bottom_up(dimension, bases, top_level=0, drilldown=0):
|
|
429
|
+
"""
|
|
430
|
+
获取聚合节点向下挖掘后,挖掘曾的节点名
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
dimension: 维度对象
|
|
434
|
+
bases: 聚合节点的叶子节点名
|
|
435
|
+
top_level: 聚合节点深度
|
|
436
|
+
drilldown: 向下挖掘的深度
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
返回挖掘层的节点名字典
|
|
440
|
+
|
|
441
|
+
"""
|
|
442
|
+
# 挖掘到的节点深度
|
|
443
|
+
target_level = top_level + drilldown
|
|
444
|
+
# 使用字典存储,是为了去重
|
|
445
|
+
re_map = {}
|
|
446
|
+
|
|
447
|
+
for base in bases:
|
|
448
|
+
# 取出每个叶子节点
|
|
449
|
+
node = dimension[base]
|
|
450
|
+
|
|
451
|
+
# 叶子节点距挖掘节点的深度差
|
|
452
|
+
up_cnt = node.depth - target_level
|
|
453
|
+
if up_cnt <= 0:
|
|
454
|
+
continue
|
|
455
|
+
node_name = node.name
|
|
456
|
+
while up_cnt > 0:
|
|
457
|
+
node = node.parent
|
|
458
|
+
up_cnt -= 1
|
|
459
|
+
# 向上找到了挖掘节点
|
|
460
|
+
re_map[node_name] = node.name
|
|
461
|
+
|
|
462
|
+
return re_map
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from enum import Enum, auto
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
OPMAP = {
|
|
5
|
+
'add': '+',
|
|
6
|
+
'sub': '-',
|
|
7
|
+
'mul': '*',
|
|
8
|
+
'floordiv': '//',
|
|
9
|
+
'truediv': '/',
|
|
10
|
+
'pow': '**',
|
|
11
|
+
'mod': '%',
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
NAME_DFLT = "name"
|
|
15
|
+
PNAME_DFLT = "parent_name"
|
|
16
|
+
DATACOL_DFLT = "decimal_val"
|
|
17
|
+
WEIGHT = "aggweight"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Instruction(int, Enum):
|
|
21
|
+
cproduct = auto()
|