deepfos 1.1.60__py3-none-any.whl → 1.1.78__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/_version.py +3 -3
- deepfos/api/V1_1/models/business_model.py +322 -322
- deepfos/api/V1_1/models/dimension.py +1075 -1075
- deepfos/api/V1_2/models/dimension.py +1119 -1116
- deepfos/api/account.py +1 -0
- deepfos/api/app.py +1 -0
- deepfos/api/base.py +70 -71
- deepfos/api/deep_pipeline.py +1 -1
- deepfos/api/deepconnector.py +3 -3
- deepfos/api/financial_model.py +12 -0
- deepfos/api/models/account.py +130 -130
- deepfos/api/models/accounting_engines.py +250 -250
- deepfos/api/models/app.py +355 -355
- deepfos/api/models/approval_process.py +231 -231
- deepfos/api/models/base.py +49 -209
- deepfos/api/models/business_model.py +239 -239
- deepfos/api/models/consolidation.py +196 -196
- deepfos/api/models/consolidation_process.py +31 -31
- deepfos/api/models/datatable_mysql.py +78 -78
- deepfos/api/models/deep_pipeline.py +20 -9
- deepfos/api/models/deepconnector.py +9 -8
- deepfos/api/models/deepfos_task.py +118 -118
- deepfos/api/models/deepmodel.py +120 -120
- deepfos/api/models/dimension.py +613 -610
- deepfos/api/models/financial_model.py +691 -663
- deepfos/api/models/journal_model.py +120 -120
- deepfos/api/models/journal_template.py +185 -185
- deepfos/api/models/memory_financial_model.py +131 -131
- deepfos/api/models/platform.py +16 -16
- deepfos/api/models/python.py +32 -32
- deepfos/api/models/reconciliation_engine.py +104 -104
- deepfos/api/models/reconciliation_report.py +29 -29
- deepfos/api/models/role_strategy.py +213 -213
- deepfos/api/models/smartlist.py +86 -86
- deepfos/api/models/space.py +312 -312
- deepfos/api/models/system.py +299 -297
- deepfos/api/models/variable.py +131 -131
- deepfos/api/models/workflow.py +290 -270
- deepfos/api/platform.py +3 -1
- deepfos/api/space.py +1 -0
- deepfos/api/system.py +1 -0
- deepfos/api/workflow.py +8 -0
- deepfos/cache.py +50 -4
- deepfos/element/bizmodel.py +2 -2
- deepfos/element/deep_pipeline.py +29 -16
- deepfos/element/deepconnector.py +36 -1
- deepfos/element/deepmodel.py +591 -332
- deepfos/element/dimension.py +30 -17
- deepfos/element/finmodel.py +542 -101
- deepfos/element/journal.py +20 -10
- deepfos/element/rolestrategy.py +4 -4
- deepfos/element/variable.py +23 -17
- deepfos/element/workflow.py +60 -3
- deepfos/exceptions/__init__.py +1 -1
- deepfos/lib/deepchart.py +14 -13
- deepfos/lib/deepux.py +11 -11
- deepfos/lib/discovery.py +3 -0
- deepfos/lib/filterparser.py +2 -2
- deepfos/lib/k8s.py +101 -0
- deepfos/lib/msg.py +34 -8
- deepfos/lib/serutils.py +34 -9
- deepfos/lib/sysutils.py +37 -18
- deepfos/lib/utils.py +62 -2
- deepfos/options.py +39 -8
- {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/METADATA +7 -7
- {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/RECORD +68 -67
- {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/WHEEL +0 -0
- {deepfos-1.1.60.dist-info → deepfos-1.1.78.dist-info}/top_level.txt +0 -0
deepfos/element/finmodel.py
CHANGED
|
@@ -9,11 +9,12 @@ from typing import (
|
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
import pandas as pd
|
|
12
|
-
from pydantic import Field
|
|
12
|
+
from pydantic import Field
|
|
13
13
|
from loguru import logger
|
|
14
14
|
import datetime
|
|
15
15
|
from multidict import MultiDict
|
|
16
16
|
|
|
17
|
+
from deepfos.api.models import compat_parse_obj_as as parse_obj_as
|
|
17
18
|
from .base import ElementBase, SyncMeta
|
|
18
19
|
from .dimension import AsyncDimension, Dimension
|
|
19
20
|
from .datatable import (
|
|
@@ -24,8 +25,10 @@ from deepfos.api.app import AppAPI
|
|
|
24
25
|
from deepfos.lib.asynchronous import future_property
|
|
25
26
|
from deepfos.lib.utils import (
|
|
26
27
|
unpack_expr, dict_to_expr, LazyDict, expr_to_dict,
|
|
27
|
-
dict_to_sql,
|
|
28
|
+
dict_to_sql, find_str, concat_url, CIEnumMeta, ChunkAlert,
|
|
29
|
+
split_dataframe_alert,
|
|
28
30
|
)
|
|
31
|
+
from deepfos.lib.sysutils import complete_cartesian_product
|
|
29
32
|
from deepfos.lib.constant import (
|
|
30
33
|
DFLT_DATA_COLUMN, VIEW, VIEW_DICT,
|
|
31
34
|
HIERARCHY, DECIMAL_COL, STRING_COL,
|
|
@@ -41,7 +44,8 @@ from deepfos.api.models.financial_model import (
|
|
|
41
44
|
PcParams, CopyCalculateDTO,
|
|
42
45
|
TaskExecutionParam,
|
|
43
46
|
ParameterDefineDto, # noqa
|
|
44
|
-
FinancialDataDto
|
|
47
|
+
FinancialDataDto,
|
|
48
|
+
SyncClearDataDto,
|
|
45
49
|
)
|
|
46
50
|
from deepfos.api.models.base import BaseModel
|
|
47
51
|
from deepfos.options import OPTION
|
|
@@ -68,9 +72,12 @@ def need_query(body: str):
|
|
|
68
72
|
|
|
69
73
|
# -----------------------------------------------------------------------------
|
|
70
74
|
# models
|
|
75
|
+
TypeDimensionExpr = Union[str, Dict[str, Union[List[str], str]]]
|
|
76
|
+
|
|
77
|
+
|
|
71
78
|
class Description(BaseModel):
|
|
72
|
-
zh_cn: str = Field(None, alias='zh-cn')
|
|
73
|
-
en: Optional[str]
|
|
79
|
+
zh_cn: Optional[str] = Field(None, alias='zh-cn')
|
|
80
|
+
en: Optional[str] = None
|
|
74
81
|
|
|
75
82
|
|
|
76
83
|
class DimensionInfo(BaseModel):
|
|
@@ -95,11 +102,11 @@ class DataTableInfo(BaseModel):
|
|
|
95
102
|
|
|
96
103
|
|
|
97
104
|
class MDXVariableParameter(ParameterDefineDto):
|
|
98
|
-
type = 0
|
|
105
|
+
type: int = 0
|
|
99
106
|
|
|
100
107
|
|
|
101
108
|
class MDXCubeParameter(ParameterDefineDto):
|
|
102
|
-
type = 1
|
|
109
|
+
type: int = 1
|
|
103
110
|
|
|
104
111
|
|
|
105
112
|
class RoundType(int, Enum, metaclass=CIEnumMeta):
|
|
@@ -132,7 +139,11 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
132
139
|
|
|
133
140
|
Args:
|
|
134
141
|
entry_object: 数据来源名模板,支持替换的字段为脚本元素名称或脚本全名,默认为python
|
|
135
|
-
|
|
142
|
+
entry_mode: 数据来源类型,影响显示的icon,默认为1
|
|
143
|
+
1: Python
|
|
144
|
+
2: 电子表格
|
|
145
|
+
3: 可视化计算脚本
|
|
146
|
+
4: 数据流3.0
|
|
136
147
|
|
|
137
148
|
Note:
|
|
138
149
|
|
|
@@ -172,11 +183,17 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
172
183
|
folder_id: Optional[str] = None,
|
|
173
184
|
path: Optional[str] = None,
|
|
174
185
|
entry_object='python',
|
|
186
|
+
entry_mode=1,
|
|
175
187
|
server_name: Optional[str] = None,
|
|
188
|
+
before_chunk: ChunkAlert = None,
|
|
189
|
+
after_chunk: ChunkAlert = None,
|
|
176
190
|
):
|
|
177
191
|
full_name = OPTION.general.task_info.get('script_name', 'python')
|
|
178
192
|
self.entry_object = entry_object.format(script_name=full_name.split('.')[-1],
|
|
179
193
|
full_name=full_name)
|
|
194
|
+
self.entry_mode = entry_mode
|
|
195
|
+
self.before_chunk = before_chunk
|
|
196
|
+
self.after_chunk = after_chunk
|
|
180
197
|
super().__init__(element_name, folder_id, path, server_name)
|
|
181
198
|
|
|
182
199
|
@future_property(on_demand=True)
|
|
@@ -377,6 +394,7 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
377
394
|
verify_access: bool = False,
|
|
378
395
|
include_ignored: bool = False,
|
|
379
396
|
normalize_view: bool = False,
|
|
397
|
+
pivot_members: List[str] = None,
|
|
380
398
|
) -> Union[pd.DataFrame, Tuple[pd.DataFrame, Dict[str, str]]]:
|
|
381
399
|
"""
|
|
382
400
|
根据维度表达式以及pov获取cube数据
|
|
@@ -390,6 +408,7 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
390
408
|
verify_access: 是否带权限查询
|
|
391
409
|
include_ignored: 包含多版本实体维时,是否在结果中包含无效数据(即i列为1的数据)
|
|
392
410
|
normalize_view: 是否把大小写View统一成"View"
|
|
411
|
+
pivot_members: 如有透视维度,可指定透视成员列表,在透视列成员不存在时补全列
|
|
393
412
|
|
|
394
413
|
.. admonition:: 示例
|
|
395
414
|
|
|
@@ -419,7 +438,9 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
419
438
|
|
|
420
439
|
"""
|
|
421
440
|
pov = self._resolve_pov_as_dict(pov, validate_expr)
|
|
422
|
-
expression, full_pov = self._split_expr(
|
|
441
|
+
expression, full_pov = self._split_expr(
|
|
442
|
+
expression, pov, validate_expr=validate_expr
|
|
443
|
+
)
|
|
423
444
|
pov_expr = dict_to_expr(full_pov)
|
|
424
445
|
|
|
425
446
|
if not expression: # only pov
|
|
@@ -456,10 +477,17 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
456
477
|
|
|
457
478
|
if pivot_dim is not None:
|
|
458
479
|
pivot_col = self._get_column_from_dim(pivot_dim)
|
|
459
|
-
|
|
480
|
+
has_mbrs = isinstance(pivot_members, list)
|
|
481
|
+
if has_mbrs and any(not isinstance(c, str) for c in pivot_members):
|
|
482
|
+
raise ValueError(
|
|
483
|
+
f"Pivot members must be a list of string, got: {pivot_members}"
|
|
484
|
+
)
|
|
460
485
|
if pivot_col in full_pov:
|
|
461
486
|
val = full_pov.pop(pivot_col)
|
|
462
|
-
|
|
487
|
+
if not has_mbrs or val in pivot_members:
|
|
488
|
+
data = data.rename(columns={DFLT_DATA_COLUMN: val})
|
|
489
|
+
else:
|
|
490
|
+
data = data.drop(columns=[DFLT_DATA_COLUMN])
|
|
463
491
|
elif pivot_col not in data.columns:
|
|
464
492
|
raise ValueError(
|
|
465
493
|
f"Pivot dimension: {pivot_dim} does not "
|
|
@@ -475,55 +503,28 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
475
503
|
columns=pivot_col, aggfunc='first', fill_value=None
|
|
476
504
|
).reset_index(drop=drop_index)
|
|
477
505
|
data.columns.name = None
|
|
478
|
-
|
|
506
|
+
if has_mbrs:
|
|
507
|
+
data = data.drop(columns=filter(
|
|
508
|
+
lambda c: c not in pivot_members and c not in index,
|
|
509
|
+
data.columns
|
|
510
|
+
))
|
|
511
|
+
if has_mbrs:
|
|
512
|
+
mbr_assigns = {m: None for m in pivot_members if m not in data.columns}
|
|
513
|
+
data = data.assign(**mbr_assigns)
|
|
479
514
|
if not compact:
|
|
480
515
|
return data.assign(**full_pov)
|
|
481
516
|
return data, full_pov
|
|
482
517
|
|
|
483
|
-
|
|
518
|
+
def _build_dataframe_for_save(
|
|
484
519
|
self,
|
|
485
520
|
data: pd.DataFrame,
|
|
486
521
|
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
487
522
|
data_column: str = DFLT_DATA_COLUMN,
|
|
488
|
-
need_check: bool = True,
|
|
489
|
-
data_audit: bool = True,
|
|
490
|
-
chunksize: Optional[int] = None,
|
|
491
|
-
callback: bool = True,
|
|
492
523
|
comment_column: str = DFLT_COMMENT_COLUMN,
|
|
493
|
-
|
|
494
|
-
):
|
|
495
|
-
"""
|
|
496
|
-
将DataFrame的数据保存至cube。
|
|
497
|
-
|
|
498
|
-
Args:
|
|
499
|
-
data: 需要保存的数据
|
|
500
|
-
pov: Point Of View,维度表达式或者KV键值对格式。
|
|
501
|
-
data_column: 数据列的列名
|
|
502
|
-
need_check: 是否需要java接口校验脏数据
|
|
503
|
-
data_audit: 是否需要记录到数据审计
|
|
504
|
-
chunksize: 单次调用保存接口时最大的dataframe行数。
|
|
505
|
-
当data的行数超过此值时,将会分多次进行保存。
|
|
506
|
-
callback: 是否回调
|
|
507
|
-
comment_column: 备注列的列名,默认为VirtualMeasure_220922
|
|
508
|
-
auth_mode: 数据保存权鉴模式,默认为0,模式对应如下:
|
|
509
|
-
- 0: 继承财务模型权鉴模式
|
|
510
|
-
- 1: 宽松模式
|
|
511
|
-
- 2: 严格模式
|
|
512
|
-
- 3: 普通模式
|
|
513
|
-
|
|
514
|
-
Note:
|
|
515
|
-
此方法会对落库数据做以下处理:
|
|
516
|
-
|
|
517
|
-
- 列名重命名:维度名->数据表列名
|
|
518
|
-
- 忽略多余数据列
|
|
519
|
-
|
|
520
|
-
See Also:
|
|
521
|
-
:meth:`save_unpivot`
|
|
522
|
-
|
|
523
|
-
"""
|
|
524
|
+
) -> Tuple[pd.DataFrame, Dict[str, str]]:
|
|
524
525
|
if data.empty:
|
|
525
526
|
logger.info("Will not save to cube because dataframe is empty.")
|
|
526
|
-
return
|
|
527
|
+
return pd.DataFrame(), {}
|
|
527
528
|
|
|
528
529
|
if data_column not in data.columns:
|
|
529
530
|
raise ValueError(f"Missing data column: {data_column}.")
|
|
@@ -570,11 +571,190 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
570
571
|
data = pd.concat([data, cmt_data])
|
|
571
572
|
required_cols.add(COLUMN_USAGE_FIELD)
|
|
572
573
|
|
|
574
|
+
return data[list(required_cols)], pov
|
|
575
|
+
|
|
576
|
+
async def save(
|
|
577
|
+
self,
|
|
578
|
+
data: pd.DataFrame,
|
|
579
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
580
|
+
data_column: str = DFLT_DATA_COLUMN,
|
|
581
|
+
need_check: bool = True,
|
|
582
|
+
data_audit: bool = True,
|
|
583
|
+
chunksize: Optional[int] = None,
|
|
584
|
+
callback: bool = True,
|
|
585
|
+
comment_column: str = DFLT_COMMENT_COLUMN,
|
|
586
|
+
auth_mode: Literal[0, 1, 2, 3] = 0,
|
|
587
|
+
):
|
|
588
|
+
"""
|
|
589
|
+
将DataFrame的数据保存至cube。
|
|
590
|
+
|
|
591
|
+
Args:
|
|
592
|
+
data: 需要保存的数据
|
|
593
|
+
pov: Point Of View,维度表达式或者KV键值对格式。
|
|
594
|
+
data_column: 数据列的列名
|
|
595
|
+
need_check: 是否需要java接口校验脏数据
|
|
596
|
+
data_audit: 是否需要记录到数据审计
|
|
597
|
+
chunksize: 单次调用保存接口时最大的dataframe行数。
|
|
598
|
+
当data的行数超过此值时,将会分多次进行保存。
|
|
599
|
+
callback: 是否回调
|
|
600
|
+
comment_column: 备注列的列名,默认为VirtualMeasure_220922
|
|
601
|
+
auth_mode: 数据保存权鉴模式,默认为0,模式对应如下:
|
|
602
|
+
- 0: 继承财务模型权鉴模式
|
|
603
|
+
- 1: 宽松模式
|
|
604
|
+
- 2: 严格模式
|
|
605
|
+
- 3: 普通模式
|
|
606
|
+
|
|
607
|
+
Note:
|
|
608
|
+
此方法会对落库数据做以下处理:
|
|
609
|
+
|
|
610
|
+
- 列名重命名:维度名->数据表列名
|
|
611
|
+
- 忽略多余数据列
|
|
612
|
+
|
|
613
|
+
See Also:
|
|
614
|
+
:meth:`save_unpivot`
|
|
615
|
+
:meth:`complement_save`
|
|
616
|
+
:meth:`complement_save_unpivot`
|
|
617
|
+
|
|
618
|
+
"""
|
|
619
|
+
data, pov = self._build_dataframe_for_save(data, pov, data_column, comment_column)
|
|
620
|
+
if data.empty:
|
|
621
|
+
return
|
|
622
|
+
|
|
573
623
|
return await self._save_impl(
|
|
574
|
-
data
|
|
575
|
-
|
|
624
|
+
data, pov, need_check, data_audit, chunksize, callback, auth_mode
|
|
625
|
+
)
|
|
626
|
+
|
|
627
|
+
def _complement(
|
|
628
|
+
self,
|
|
629
|
+
data: pd.DataFrame,
|
|
630
|
+
expression: str,
|
|
631
|
+
default_hierarchy: str = 'Base',
|
|
632
|
+
pov: Dict[str, str] = None,
|
|
633
|
+
):
|
|
634
|
+
if isinstance(expression, dict):
|
|
635
|
+
expression = dict_to_expr(expression)
|
|
636
|
+
expr, pov = self._split_expr(expression, pov or {}, default_hierarchy)
|
|
637
|
+
full_expr = {**expr_to_dict(expr), **pov}
|
|
638
|
+
folders = {col: dim.folderId for col, dim in self.dimensions.items()}
|
|
639
|
+
data_comp = complete_cartesian_product(
|
|
640
|
+
full_expr,
|
|
641
|
+
data,
|
|
642
|
+
folder_ids=folders,
|
|
643
|
+
col_dim_map=self.col_dim_map,
|
|
644
|
+
)
|
|
645
|
+
return data_comp
|
|
646
|
+
|
|
647
|
+
async def complement_save(
|
|
648
|
+
self,
|
|
649
|
+
data: pd.DataFrame,
|
|
650
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
651
|
+
default_hierarchy: str = "Base",
|
|
652
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
653
|
+
data_column: str = DFLT_DATA_COLUMN,
|
|
654
|
+
comment_column: str = DFLT_COMMENT_COLUMN,
|
|
655
|
+
**kwargs
|
|
656
|
+
):
|
|
657
|
+
"""覆盖指定维度范围并保存数据
|
|
658
|
+
|
|
659
|
+
相比于 :meth:`save` ,在保存前,会将`data`按照`expression`补全笛卡尔积。
|
|
660
|
+
并且不在`data`范围的数据以`None`填充
|
|
661
|
+
|
|
662
|
+
Note:
|
|
663
|
+
逻辑上等价于两次调用
|
|
664
|
+
|
|
665
|
+
.. code-block:: python
|
|
666
|
+
|
|
667
|
+
cube.delete(expression)
|
|
668
|
+
cube.save(data, pov, data_column, comment_column, **kwargs)
|
|
669
|
+
|
|
670
|
+
Args:
|
|
671
|
+
data: 需要保存的数据
|
|
672
|
+
expression: 需要覆盖的范围(维度表达式)
|
|
673
|
+
default_hierarchy: 当expression中没指定对应维度时,默认取的层级函数,
|
|
674
|
+
即填充为 `default_hierarchy(#root,0)`
|
|
675
|
+
pov: Point Of View,维度表达式或者KV键值对格式。
|
|
676
|
+
data_column: 数据列的列名
|
|
677
|
+
comment_column: 备注列的列名,默认为VirtualMeasure_220922
|
|
678
|
+
**kwargs: 其他可传给 :meth:`save`的参数
|
|
679
|
+
|
|
680
|
+
See Also:
|
|
681
|
+
:meth:`save`
|
|
682
|
+
:meth:`save_unpivot`
|
|
683
|
+
:meth:`complement_save_unpivot`
|
|
684
|
+
|
|
685
|
+
"""
|
|
686
|
+
if not self._backend_del_availiable:
|
|
687
|
+
return await self._legacy_complement_save(
|
|
688
|
+
data=data,
|
|
689
|
+
expression=expression,
|
|
690
|
+
default_hierarchy=default_hierarchy,
|
|
691
|
+
pov=pov,
|
|
692
|
+
data_column=data_column,
|
|
693
|
+
comment_column=comment_column,
|
|
694
|
+
**kwargs
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
await self.delete(
|
|
698
|
+
expression,
|
|
699
|
+
data_audit=False,
|
|
700
|
+
default_hierarchy=default_hierarchy
|
|
701
|
+
)
|
|
702
|
+
await self.save(
|
|
703
|
+
data=data,
|
|
704
|
+
pov=pov,
|
|
705
|
+
data_column=data_column,
|
|
706
|
+
comment_column=comment_column,
|
|
707
|
+
**kwargs,
|
|
576
708
|
)
|
|
577
709
|
|
|
710
|
+
|
|
711
|
+
async def _legacy_complement_save(
|
|
712
|
+
self,
|
|
713
|
+
data: pd.DataFrame,
|
|
714
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
715
|
+
default_hierarchy: str = "Base",
|
|
716
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
717
|
+
data_column: str = DFLT_DATA_COLUMN,
|
|
718
|
+
comment_column: str = DFLT_COMMENT_COLUMN,
|
|
719
|
+
**kwargs
|
|
720
|
+
):
|
|
721
|
+
data, pov = self._build_dataframe_for_save(data, pov, data_column, comment_column)
|
|
722
|
+
if data.empty:
|
|
723
|
+
return
|
|
724
|
+
|
|
725
|
+
data_comp = self._complement(data, expression, default_hierarchy, pov)
|
|
726
|
+
return await self._save_impl(data_comp, pov, **kwargs)
|
|
727
|
+
|
|
728
|
+
def _build_dataframe_for_save_unpivot(
|
|
729
|
+
self,
|
|
730
|
+
data: pd.DataFrame,
|
|
731
|
+
unpivot_dim: str,
|
|
732
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
733
|
+
save_nan: bool = False,
|
|
734
|
+
) -> Tuple[pd.DataFrame, Dict[str, str]]:
|
|
735
|
+
if data.empty:
|
|
736
|
+
logger.info("Will not save to cube because dataframe is empty.")
|
|
737
|
+
return pd.DataFrame(), {}
|
|
738
|
+
|
|
739
|
+
data = data.rename(columns=self._maybe_get_column_from_dim)
|
|
740
|
+
dim = self._get_column_from_dim(unpivot_dim)
|
|
741
|
+
pov = self._resolve_pov_as_dict(pov)
|
|
742
|
+
data_cols = set(data.columns)
|
|
743
|
+
unpivot_cols = data_cols.difference(pov.keys(), self.dim_col_map.values())
|
|
744
|
+
if self._meta.autoCalculation:
|
|
745
|
+
unpivot_cols.discard(VIEW)
|
|
746
|
+
id_cols = data_cols - unpivot_cols
|
|
747
|
+
data = data.melt(
|
|
748
|
+
id_vars=id_cols, value_vars=unpivot_cols,
|
|
749
|
+
var_name=dim, value_name=DFLT_DATA_COLUMN
|
|
750
|
+
)
|
|
751
|
+
if not save_nan:
|
|
752
|
+
data = data.dropna()
|
|
753
|
+
if data.empty:
|
|
754
|
+
logger.info("Will not save to cube because dataframe is empty.")
|
|
755
|
+
return pd.DataFrame(), {}
|
|
756
|
+
return data, pov
|
|
757
|
+
|
|
578
758
|
async def save_unpivot(
|
|
579
759
|
self,
|
|
580
760
|
data: pd.DataFrame,
|
|
@@ -621,35 +801,98 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
621
801
|
| :meth:`save`
|
|
622
802
|
|
|
623
803
|
"""
|
|
804
|
+
data, pov = self._build_dataframe_for_save_unpivot(
|
|
805
|
+
data, unpivot_dim, pov, save_nan
|
|
806
|
+
)
|
|
624
807
|
if data.empty:
|
|
625
|
-
logger.info("Will not save to cube because dataframe is empty.")
|
|
626
808
|
return
|
|
627
809
|
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
data_cols = set(data.columns)
|
|
632
|
-
unpivot_cols = data_cols.difference(pov.keys(), self.dim_col_map.values())
|
|
810
|
+
return await self._save_impl(
|
|
811
|
+
data, pov, need_check, data_audit, chunksize, callback
|
|
812
|
+
)
|
|
633
813
|
|
|
634
|
-
|
|
635
|
-
|
|
814
|
+
async def complement_save_unpivot(
|
|
815
|
+
self,
|
|
816
|
+
data: pd.DataFrame,
|
|
817
|
+
unpivot_dim: str,
|
|
818
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
819
|
+
default_hierarchy: str = "Base",
|
|
820
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
821
|
+
save_nan: bool = False,
|
|
822
|
+
**kwargs
|
|
823
|
+
):
|
|
824
|
+
"""覆盖指定维度范围并保有某个维度所有成员在列上的 ``DataFrame``
|
|
636
825
|
|
|
637
|
-
|
|
638
|
-
data
|
|
639
|
-
id_vars=id_cols, value_vars=unpivot_cols,
|
|
640
|
-
var_name=dim, value_name=DFLT_DATA_COLUMN
|
|
641
|
-
)
|
|
826
|
+
相比于:meth:`save_unpivot`,在保存前,会将`data`按照`expression`补全笛卡尔积。
|
|
827
|
+
并且不在`data`范围的数据以`None`填充
|
|
642
828
|
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
if data.empty:
|
|
646
|
-
logger.info("Will not save to cube because dataframe is empty.")
|
|
647
|
-
return
|
|
829
|
+
Note:
|
|
830
|
+
逻辑上等价于两次调用
|
|
648
831
|
|
|
649
|
-
|
|
650
|
-
|
|
832
|
+
.. code-block:: python
|
|
833
|
+
|
|
834
|
+
cube.delete(expression)
|
|
835
|
+
cube.save_unpivot(data, unpivot_dim, **kwargs)
|
|
836
|
+
|
|
837
|
+
Args:
|
|
838
|
+
data: 需要保存的数据
|
|
839
|
+
unpivot_dim: 成员在列上的维度
|
|
840
|
+
expression: 需要覆盖的范围(维度表达式)
|
|
841
|
+
default_hierarchy: 单expression中没指定对应维度时,默认取的层级函数,
|
|
842
|
+
即填充为 `default_hierarchy(#root,0)`
|
|
843
|
+
pov: Point Of View,维度表达式或者KV键值对格式。
|
|
844
|
+
save_nan: 当把数据列成员转换到行上时,data为空的数据是否保存
|
|
845
|
+
**kwargs: 其他可传给 :meth:`save_unpivot`的参数
|
|
846
|
+
|
|
847
|
+
See Also:
|
|
848
|
+
:meth:`save`
|
|
849
|
+
:meth:`save_unpivot`
|
|
850
|
+
:meth:`complement_save`
|
|
851
|
+
|
|
852
|
+
"""
|
|
853
|
+
if not self._backend_del_availiable:
|
|
854
|
+
return await self._legacy_complement_save_unpivot(
|
|
855
|
+
data=data,
|
|
856
|
+
unpivot_dim=unpivot_dim,
|
|
857
|
+
expression=expression,
|
|
858
|
+
default_hierarchy=default_hierarchy,
|
|
859
|
+
pov=pov,
|
|
860
|
+
save_nan=save_nan,
|
|
861
|
+
**kwargs
|
|
862
|
+
)
|
|
863
|
+
|
|
864
|
+
await self.delete(
|
|
865
|
+
expression,
|
|
866
|
+
data_audit=False,
|
|
867
|
+
default_hierarchy=default_hierarchy
|
|
868
|
+
)
|
|
869
|
+
await self.save_unpivot(
|
|
870
|
+
data=data,
|
|
871
|
+
unpivot_dim=unpivot_dim,
|
|
872
|
+
pov=pov,
|
|
873
|
+
save_nan=save_nan,
|
|
874
|
+
**kwargs,
|
|
651
875
|
)
|
|
652
876
|
|
|
877
|
+
async def _legacy_complement_save_unpivot(
|
|
878
|
+
self,
|
|
879
|
+
data: pd.DataFrame,
|
|
880
|
+
unpivot_dim: str,
|
|
881
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
882
|
+
default_hierarchy: str = "Base",
|
|
883
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
884
|
+
save_nan: bool = False,
|
|
885
|
+
**kwargs
|
|
886
|
+
):
|
|
887
|
+
data, pov = self._build_dataframe_for_save_unpivot(
|
|
888
|
+
data, unpivot_dim, pov, save_nan
|
|
889
|
+
)
|
|
890
|
+
if data.empty:
|
|
891
|
+
return
|
|
892
|
+
|
|
893
|
+
data_comp = self._complement(data, expression, default_hierarchy, pov)
|
|
894
|
+
return await self._save_impl(data_comp, pov, **kwargs)
|
|
895
|
+
|
|
653
896
|
async def _save_impl(
|
|
654
897
|
self,
|
|
655
898
|
data: pd.DataFrame,
|
|
@@ -661,36 +904,47 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
661
904
|
auth_mode: Literal[0, 1, 2, 3] = 0,
|
|
662
905
|
):
|
|
663
906
|
# replace NaN to standard None
|
|
664
|
-
|
|
907
|
+
# NB: replace twice in case of infer None to nan happened in 2.x pandas
|
|
908
|
+
data[DFLT_DATA_COLUMN] = data[DFLT_DATA_COLUMN].replace({None: np.nan})
|
|
909
|
+
data[DFLT_DATA_COLUMN] = data[DFLT_DATA_COLUMN].replace({np.nan: None})
|
|
665
910
|
# ensure view is capitalized
|
|
666
911
|
if self._meta.autoCalculation:
|
|
667
912
|
data = data.rename(columns=VIEW_DICT)
|
|
913
|
+
|
|
914
|
+
cols = list(data.columns)
|
|
915
|
+
if dup_cols := set([c for c in cols if cols.count(c) > 1]):
|
|
916
|
+
raise ValueError(f"Duplicate columns:{dup_cols} found in data.")
|
|
917
|
+
|
|
668
918
|
# save data
|
|
669
919
|
resp = []
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
920
|
+
|
|
921
|
+
for batch_data, alert in split_dataframe_alert(
|
|
922
|
+
data, chunksize, self.before_chunk, self.after_chunk
|
|
923
|
+
):
|
|
924
|
+
with alert:
|
|
925
|
+
row_data = [
|
|
926
|
+
{"columnDimensionMemberMap": row}
|
|
927
|
+
for row in bp.dataframe_to_dict(batch_data)
|
|
928
|
+
]
|
|
929
|
+
payload = ReactSpreadsheetSaveForm(
|
|
930
|
+
entryObject=self.entry_object,
|
|
931
|
+
sheetDatas=[SpreadsheetSingleData(
|
|
932
|
+
cubeName=self.element_info.elementName,
|
|
933
|
+
cubeFolderId=self.element_info.folderId,
|
|
934
|
+
rowDatas=row_data,
|
|
935
|
+
commonMember=pov,
|
|
936
|
+
)],
|
|
937
|
+
needCheck=need_check,
|
|
938
|
+
dataAuditSwitch=data_audit,
|
|
939
|
+
entryMode=self.entry_mode,
|
|
940
|
+
validateDimensionMember=need_check,
|
|
941
|
+
callback=callback,
|
|
942
|
+
saveDataAuthMode=auth_mode
|
|
943
|
+
)
|
|
944
|
+
r = await self.async_api.reactspreadsheet.save(
|
|
945
|
+
payload.dict(exclude_unset=True)
|
|
946
|
+
)
|
|
947
|
+
resp.append(r)
|
|
694
948
|
return resp
|
|
695
949
|
|
|
696
950
|
async def delete_with_mdx(
|
|
@@ -816,12 +1070,33 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
816
1070
|
|
|
817
1071
|
return await self.mdx_execution(script)
|
|
818
1072
|
|
|
1073
|
+
@future_property
|
|
1074
|
+
async def _server_version(self) -> Tuple[int, ...]:
|
|
1075
|
+
api: FinancialModelAPI = await self.wait_for('async_api')
|
|
1076
|
+
version = await api.extra.git_version()
|
|
1077
|
+
if version.lower().startswith('v'):
|
|
1078
|
+
version = version[1:]
|
|
1079
|
+
parts = []
|
|
1080
|
+
for part in version.split('.'):
|
|
1081
|
+
try:
|
|
1082
|
+
parts.append(int(part))
|
|
1083
|
+
except (TypeError, ValueError):
|
|
1084
|
+
continue
|
|
1085
|
+
return tuple(parts)
|
|
1086
|
+
|
|
1087
|
+
@future_property
|
|
1088
|
+
async def _backend_del_availiable(self):
|
|
1089
|
+
version = await self.__class__._server_version.wait_for(self)
|
|
1090
|
+
return version >= (1, 1, 1, 2, 1)
|
|
1091
|
+
|
|
819
1092
|
async def delete(
|
|
820
1093
|
self,
|
|
821
|
-
expression: Union[
|
|
1094
|
+
expression: Union[TypeDimensionExpr, List[TypeDimensionExpr]],
|
|
822
1095
|
chunksize: Optional[int] = None,
|
|
823
1096
|
use_mdx: bool = False,
|
|
824
|
-
callback: bool =
|
|
1097
|
+
callback: bool = False,
|
|
1098
|
+
data_audit: bool = True,
|
|
1099
|
+
default_hierarchy: str = "Base",
|
|
825
1100
|
):
|
|
826
1101
|
"""删除数据
|
|
827
1102
|
|
|
@@ -838,6 +1113,9 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
838
1113
|
当data的行数超过此值时,将会分多次进行保存。
|
|
839
1114
|
use_mdx: 是否使用MDX脚本实现,默认为否,等效于调用 :meth:`delete_with_mdx`
|
|
840
1115
|
callback: 是否回调
|
|
1116
|
+
data_audit: 是否记录审计日志
|
|
1117
|
+
default_hierarchy: 当expression中没指定对应维度时,默认取的层级函数,
|
|
1118
|
+
即填充为 `default_hierarchy(#root,0)`
|
|
841
1119
|
|
|
842
1120
|
.. admonition:: 示例
|
|
843
1121
|
|
|
@@ -862,6 +1140,63 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
862
1140
|
:meth:`insert_null` :meth:`delete_with_mdx`
|
|
863
1141
|
|
|
864
1142
|
"""
|
|
1143
|
+
if (
|
|
1144
|
+
not self._backend_del_availiable
|
|
1145
|
+
or use_mdx
|
|
1146
|
+
or callback
|
|
1147
|
+
):
|
|
1148
|
+
if isinstance(expression, list):
|
|
1149
|
+
raise ValueError(
|
|
1150
|
+
f"pass expresssion as list is not yet supported. "
|
|
1151
|
+
f"backend version: {self._server_version}")
|
|
1152
|
+
return await self._legacy_delete(
|
|
1153
|
+
expression,
|
|
1154
|
+
chunksize=chunksize,
|
|
1155
|
+
use_mdx=use_mdx,
|
|
1156
|
+
callback=callback,
|
|
1157
|
+
data_audit=data_audit,
|
|
1158
|
+
)
|
|
1159
|
+
|
|
1160
|
+
if not isinstance(expression, list):
|
|
1161
|
+
expression = [expression]
|
|
1162
|
+
|
|
1163
|
+
clear_scopes = []
|
|
1164
|
+
for expr in expression:
|
|
1165
|
+
if isinstance(expr, dict):
|
|
1166
|
+
expr = dict_to_expr(expr)
|
|
1167
|
+
|
|
1168
|
+
expr_str, pov = self._split_expr(
|
|
1169
|
+
expr, {},
|
|
1170
|
+
default_hierarchy=default_hierarchy,
|
|
1171
|
+
validate_expr=True
|
|
1172
|
+
)
|
|
1173
|
+
expr_parts = []
|
|
1174
|
+
if expr_str:
|
|
1175
|
+
expr_parts.append(expr_str)
|
|
1176
|
+
if pov:
|
|
1177
|
+
expr_parts.append(dict_to_expr(pov))
|
|
1178
|
+
|
|
1179
|
+
clear_scopes.append("->".join(expr_parts))
|
|
1180
|
+
|
|
1181
|
+
return await self.async_api.calculate.clear_data_ex(
|
|
1182
|
+
SyncClearDataDto(
|
|
1183
|
+
cubeName=self.element_name,
|
|
1184
|
+
folderId=self.element_info.folderId,
|
|
1185
|
+
clearScriptList=clear_scopes,
|
|
1186
|
+
entryMode=self.entry_mode,
|
|
1187
|
+
entryObject='python',
|
|
1188
|
+
dataAuditSwitch=data_audit
|
|
1189
|
+
)
|
|
1190
|
+
)
|
|
1191
|
+
|
|
1192
|
+
async def _legacy_delete(
|
|
1193
|
+
self,
|
|
1194
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
1195
|
+
chunksize: Optional[int] = None,
|
|
1196
|
+
use_mdx: bool = False,
|
|
1197
|
+
callback: bool = True,
|
|
1198
|
+
data_audit: bool = True,
|
|
1199
|
+
):
|
|
865
1200
|
if use_mdx:
|
|
866
1201
|
return await self.delete_with_mdx(expression)
|
|
867
1202
|
|
|
@@ -876,7 +1211,7 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
876
1211
|
data, pov = await self.query(expression)
|
|
877
1212
|
data[DFLT_DATA_COLUMN] = None
|
|
878
1213
|
return await self.save(
|
|
879
|
-
data, pov, data_audit=
|
|
1214
|
+
data, pov, data_audit=data_audit, chunksize=chunksize, callback=callback
|
|
880
1215
|
)
|
|
881
1216
|
|
|
882
1217
|
async def queries(
|
|
@@ -1312,13 +1647,82 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
1312
1647
|
|
|
1313
1648
|
return res
|
|
1314
1649
|
|
|
1650
|
+
async def mdx_execution_with_code(
|
|
1651
|
+
self,
|
|
1652
|
+
script_code: str,
|
|
1653
|
+
parameters: Optional[Dict[str, str]] = None,
|
|
1654
|
+
precision: Optional[int] = None,
|
|
1655
|
+
timeout: Optional[int] = None,
|
|
1656
|
+
round_type: Union[RoundType, str] = RoundType.floor,
|
|
1657
|
+
):
|
|
1658
|
+
"""通过提供MDX计算脚本编码执行对应的MDX计算脚本
|
|
1659
|
+
|
|
1660
|
+
Args:
|
|
1661
|
+
script_code: MDX计算脚本编码
|
|
1662
|
+
parameters: MDX执行所需的标量参数信息键值对
|
|
1663
|
+
precision: 计算精度,默认为财务模型小数精度
|
|
1664
|
+
timeout: 超时时间(ms),默认为180秒(与OPTION.api.timeout保持一致),
|
|
1665
|
+
如为None,则为接口的默认值60秒,
|
|
1666
|
+
目前该接口不支持设置为无限等待执行结果
|
|
1667
|
+
round_type: 小数保留类型,默认为去尾法
|
|
1668
|
+
|
|
1669
|
+
Returns:
|
|
1670
|
+
执行结果
|
|
1671
|
+
|
|
1672
|
+
"""
|
|
1673
|
+
|
|
1674
|
+
if timeout is None:
|
|
1675
|
+
timeout = OPTION.api.timeout * 1000
|
|
1676
|
+
|
|
1677
|
+
params = []
|
|
1678
|
+
|
|
1679
|
+
if parameters is not None:
|
|
1680
|
+
if not isinstance(parameters, dict):
|
|
1681
|
+
raise TypeError('parameters参数应为字典')
|
|
1682
|
+
|
|
1683
|
+
for key, value in parameters.items():
|
|
1684
|
+
params.append(MDXVariableParameter(key=key, value=value))
|
|
1685
|
+
|
|
1686
|
+
path = self._path
|
|
1687
|
+
|
|
1688
|
+
if path is None:
|
|
1689
|
+
path = await AppAPI(sync=False).folder.get_folder_full(
|
|
1690
|
+
self.element_info.folderId
|
|
1691
|
+
)
|
|
1692
|
+
|
|
1693
|
+
path = path.replace('\\', '/').rstrip('/') + '/'
|
|
1694
|
+
|
|
1695
|
+
res = await self.async_api.mdxtask.execution(
|
|
1696
|
+
TaskExecutionParam(
|
|
1697
|
+
businessId=script_code,
|
|
1698
|
+
decimalDigitsType=RoundType[round_type],
|
|
1699
|
+
parameters=params,
|
|
1700
|
+
precision=precision,
|
|
1701
|
+
timeout=timeout,
|
|
1702
|
+
scriptCode=script_code,
|
|
1703
|
+
cubeName=self.element_info.elementName,
|
|
1704
|
+
cubeFolderId=self.element_info.folderId,
|
|
1705
|
+
cubePath=path
|
|
1706
|
+
)
|
|
1707
|
+
)
|
|
1708
|
+
|
|
1709
|
+
if res.status == 1:
|
|
1710
|
+
raise MDXExecuteTimeout(f'MDX执行超时,具体响应:\n{res}')
|
|
1711
|
+
|
|
1712
|
+
if res.result is False:
|
|
1713
|
+
raise MDXExecuteFail(f'MDX执行失败,失败原因:\n{res.failReason}')
|
|
1714
|
+
|
|
1715
|
+
return res
|
|
1716
|
+
|
|
1315
1717
|
|
|
1316
1718
|
class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
1317
1719
|
synchronize = (
|
|
1318
1720
|
'query',
|
|
1319
1721
|
'queries',
|
|
1320
1722
|
'save',
|
|
1723
|
+
'complement_save',
|
|
1321
1724
|
'save_unpivot',
|
|
1725
|
+
'complement_save_unpivot',
|
|
1322
1726
|
'delete',
|
|
1323
1727
|
'delete_with_mdx',
|
|
1324
1728
|
'pc_init',
|
|
@@ -1326,7 +1730,8 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
|
1326
1730
|
'pc_upsert',
|
|
1327
1731
|
'insert_null',
|
|
1328
1732
|
'copy_calculate',
|
|
1329
|
-
'mdx_execution'
|
|
1733
|
+
'mdx_execution',
|
|
1734
|
+
'mdx_execution_with_code',
|
|
1330
1735
|
)
|
|
1331
1736
|
|
|
1332
1737
|
if TYPE_CHECKING: # pragma: no cover
|
|
@@ -1348,6 +1753,7 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
|
1348
1753
|
verify_access: bool = False,
|
|
1349
1754
|
include_ignored: bool = False,
|
|
1350
1755
|
normalize_view: bool = False,
|
|
1756
|
+
pivot_members: List[str] = None,
|
|
1351
1757
|
) -> Union[pd.DataFrame, Tuple[pd.DataFrame, Dict[str, str]]]:
|
|
1352
1758
|
...
|
|
1353
1759
|
|
|
@@ -1365,6 +1771,17 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
|
1365
1771
|
):
|
|
1366
1772
|
...
|
|
1367
1773
|
|
|
1774
|
+
def complement_save(
|
|
1775
|
+
self,
|
|
1776
|
+
data: pd.DataFrame,
|
|
1777
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
1778
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
1779
|
+
data_column: str = DFLT_DATA_COLUMN,
|
|
1780
|
+
comment_column: str = DFLT_COMMENT_COLUMN,
|
|
1781
|
+
**kwargs
|
|
1782
|
+
):
|
|
1783
|
+
...
|
|
1784
|
+
|
|
1368
1785
|
def save_unpivot(
|
|
1369
1786
|
self,
|
|
1370
1787
|
data: pd.DataFrame,
|
|
@@ -1378,12 +1795,26 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
|
1378
1795
|
):
|
|
1379
1796
|
...
|
|
1380
1797
|
|
|
1798
|
+
def complement_save_unpivot(
|
|
1799
|
+
self,
|
|
1800
|
+
data: pd.DataFrame,
|
|
1801
|
+
unpivot_dim: str,
|
|
1802
|
+
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
1803
|
+
default_hierarchy: str = "Base",
|
|
1804
|
+
pov: Optional[Union[str, Dict[str, str]]] = None,
|
|
1805
|
+
save_nan: bool = False,
|
|
1806
|
+
**kwargs
|
|
1807
|
+
):
|
|
1808
|
+
...
|
|
1809
|
+
|
|
1381
1810
|
def delete(
|
|
1382
1811
|
self,
|
|
1383
1812
|
expression: Union[str, Dict[str, Union[List[str], str]]],
|
|
1384
1813
|
chunksize: Optional[int] = None,
|
|
1385
1814
|
use_mdx: bool = False,
|
|
1386
|
-
callback: bool = True
|
|
1815
|
+
callback: bool = True,
|
|
1816
|
+
data_audit: bool = True,
|
|
1817
|
+
default_hierarchy: str = "Base",
|
|
1387
1818
|
):
|
|
1388
1819
|
...
|
|
1389
1820
|
|
|
@@ -1443,6 +1874,16 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
|
|
|
1443
1874
|
):
|
|
1444
1875
|
...
|
|
1445
1876
|
|
|
1877
|
+
def mdx_execution_with_code(
|
|
1878
|
+
self,
|
|
1879
|
+
script_code: str,
|
|
1880
|
+
parameters: Optional[Dict[str, str]] = None,
|
|
1881
|
+
precision: Optional[int] = None,
|
|
1882
|
+
timeout: Optional[int] = None,
|
|
1883
|
+
round_type: Union[RoundType, str] = RoundType.floor,
|
|
1884
|
+
):
|
|
1885
|
+
...
|
|
1886
|
+
|
|
1446
1887
|
@cached_property
|
|
1447
1888
|
def dim_elements(self) -> LazyDict[str, Dimension]:
|
|
1448
1889
|
"""财务Cube的维度元素
|