deepfos 1.1.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (175) hide show
  1. deepfos/__init__.py +6 -0
  2. deepfos/_version.py +21 -0
  3. deepfos/algo/__init__.py +0 -0
  4. deepfos/algo/graph.py +171 -0
  5. deepfos/algo/segtree.py +31 -0
  6. deepfos/api/V1_1/__init__.py +0 -0
  7. deepfos/api/V1_1/business_model.py +119 -0
  8. deepfos/api/V1_1/dimension.py +599 -0
  9. deepfos/api/V1_1/models/__init__.py +0 -0
  10. deepfos/api/V1_1/models/business_model.py +1033 -0
  11. deepfos/api/V1_1/models/dimension.py +2768 -0
  12. deepfos/api/V1_2/__init__.py +0 -0
  13. deepfos/api/V1_2/dimension.py +285 -0
  14. deepfos/api/V1_2/models/__init__.py +0 -0
  15. deepfos/api/V1_2/models/dimension.py +2923 -0
  16. deepfos/api/__init__.py +0 -0
  17. deepfos/api/account.py +167 -0
  18. deepfos/api/accounting_engines.py +147 -0
  19. deepfos/api/app.py +626 -0
  20. deepfos/api/approval_process.py +198 -0
  21. deepfos/api/base.py +983 -0
  22. deepfos/api/business_model.py +160 -0
  23. deepfos/api/consolidation.py +129 -0
  24. deepfos/api/consolidation_process.py +106 -0
  25. deepfos/api/datatable.py +341 -0
  26. deepfos/api/deep_pipeline.py +61 -0
  27. deepfos/api/deepconnector.py +36 -0
  28. deepfos/api/deepfos_task.py +92 -0
  29. deepfos/api/deepmodel.py +188 -0
  30. deepfos/api/dimension.py +486 -0
  31. deepfos/api/financial_model.py +319 -0
  32. deepfos/api/journal_model.py +119 -0
  33. deepfos/api/journal_template.py +132 -0
  34. deepfos/api/memory_financial_model.py +98 -0
  35. deepfos/api/models/__init__.py +3 -0
  36. deepfos/api/models/account.py +483 -0
  37. deepfos/api/models/accounting_engines.py +756 -0
  38. deepfos/api/models/app.py +1338 -0
  39. deepfos/api/models/approval_process.py +1043 -0
  40. deepfos/api/models/base.py +234 -0
  41. deepfos/api/models/business_model.py +805 -0
  42. deepfos/api/models/consolidation.py +711 -0
  43. deepfos/api/models/consolidation_process.py +248 -0
  44. deepfos/api/models/datatable_mysql.py +427 -0
  45. deepfos/api/models/deep_pipeline.py +55 -0
  46. deepfos/api/models/deepconnector.py +28 -0
  47. deepfos/api/models/deepfos_task.py +386 -0
  48. deepfos/api/models/deepmodel.py +308 -0
  49. deepfos/api/models/dimension.py +1576 -0
  50. deepfos/api/models/financial_model.py +1796 -0
  51. deepfos/api/models/journal_model.py +341 -0
  52. deepfos/api/models/journal_template.py +854 -0
  53. deepfos/api/models/memory_financial_model.py +478 -0
  54. deepfos/api/models/platform.py +178 -0
  55. deepfos/api/models/python.py +221 -0
  56. deepfos/api/models/reconciliation_engine.py +411 -0
  57. deepfos/api/models/reconciliation_report.py +161 -0
  58. deepfos/api/models/role_strategy.py +884 -0
  59. deepfos/api/models/smartlist.py +237 -0
  60. deepfos/api/models/space.py +1137 -0
  61. deepfos/api/models/system.py +1065 -0
  62. deepfos/api/models/variable.py +463 -0
  63. deepfos/api/models/workflow.py +946 -0
  64. deepfos/api/platform.py +199 -0
  65. deepfos/api/python.py +90 -0
  66. deepfos/api/reconciliation_engine.py +181 -0
  67. deepfos/api/reconciliation_report.py +64 -0
  68. deepfos/api/role_strategy.py +234 -0
  69. deepfos/api/smartlist.py +69 -0
  70. deepfos/api/space.py +582 -0
  71. deepfos/api/system.py +372 -0
  72. deepfos/api/variable.py +154 -0
  73. deepfos/api/workflow.py +264 -0
  74. deepfos/boost/__init__.py +6 -0
  75. deepfos/boost/py_jstream.py +89 -0
  76. deepfos/boost/py_pandas.py +20 -0
  77. deepfos/cache.py +121 -0
  78. deepfos/config.py +6 -0
  79. deepfos/core/__init__.py +27 -0
  80. deepfos/core/cube/__init__.py +10 -0
  81. deepfos/core/cube/_base.py +462 -0
  82. deepfos/core/cube/constants.py +21 -0
  83. deepfos/core/cube/cube.py +408 -0
  84. deepfos/core/cube/formula.py +707 -0
  85. deepfos/core/cube/syscube.py +532 -0
  86. deepfos/core/cube/typing.py +7 -0
  87. deepfos/core/cube/utils.py +238 -0
  88. deepfos/core/dimension/__init__.py +11 -0
  89. deepfos/core/dimension/_base.py +506 -0
  90. deepfos/core/dimension/dimcreator.py +184 -0
  91. deepfos/core/dimension/dimension.py +472 -0
  92. deepfos/core/dimension/dimexpr.py +271 -0
  93. deepfos/core/dimension/dimmember.py +155 -0
  94. deepfos/core/dimension/eledimension.py +22 -0
  95. deepfos/core/dimension/filters.py +99 -0
  96. deepfos/core/dimension/sysdimension.py +168 -0
  97. deepfos/core/logictable/__init__.py +5 -0
  98. deepfos/core/logictable/_cache.py +141 -0
  99. deepfos/core/logictable/_operator.py +663 -0
  100. deepfos/core/logictable/nodemixin.py +673 -0
  101. deepfos/core/logictable/sqlcondition.py +609 -0
  102. deepfos/core/logictable/tablemodel.py +497 -0
  103. deepfos/db/__init__.py +36 -0
  104. deepfos/db/cipher.py +660 -0
  105. deepfos/db/clickhouse.py +191 -0
  106. deepfos/db/connector.py +195 -0
  107. deepfos/db/daclickhouse.py +171 -0
  108. deepfos/db/dameng.py +101 -0
  109. deepfos/db/damysql.py +189 -0
  110. deepfos/db/dbkits.py +358 -0
  111. deepfos/db/deepengine.py +99 -0
  112. deepfos/db/deepmodel.py +82 -0
  113. deepfos/db/deepmodel_kingbase.py +83 -0
  114. deepfos/db/edb.py +214 -0
  115. deepfos/db/gauss.py +83 -0
  116. deepfos/db/kingbase.py +83 -0
  117. deepfos/db/mysql.py +184 -0
  118. deepfos/db/oracle.py +131 -0
  119. deepfos/db/postgresql.py +192 -0
  120. deepfos/db/sqlserver.py +99 -0
  121. deepfos/db/utils.py +135 -0
  122. deepfos/element/__init__.py +89 -0
  123. deepfos/element/accounting.py +348 -0
  124. deepfos/element/apvlprocess.py +215 -0
  125. deepfos/element/base.py +398 -0
  126. deepfos/element/bizmodel.py +1269 -0
  127. deepfos/element/datatable.py +2467 -0
  128. deepfos/element/deep_pipeline.py +186 -0
  129. deepfos/element/deepconnector.py +59 -0
  130. deepfos/element/deepmodel.py +1806 -0
  131. deepfos/element/dimension.py +1254 -0
  132. deepfos/element/fact_table.py +427 -0
  133. deepfos/element/finmodel.py +1485 -0
  134. deepfos/element/journal.py +840 -0
  135. deepfos/element/journal_template.py +943 -0
  136. deepfos/element/pyscript.py +412 -0
  137. deepfos/element/reconciliation.py +553 -0
  138. deepfos/element/rolestrategy.py +243 -0
  139. deepfos/element/smartlist.py +457 -0
  140. deepfos/element/variable.py +756 -0
  141. deepfos/element/workflow.py +560 -0
  142. deepfos/exceptions/__init__.py +239 -0
  143. deepfos/exceptions/hook.py +86 -0
  144. deepfos/lazy.py +104 -0
  145. deepfos/lazy_import.py +84 -0
  146. deepfos/lib/__init__.py +0 -0
  147. deepfos/lib/_javaobj.py +366 -0
  148. deepfos/lib/asynchronous.py +879 -0
  149. deepfos/lib/concurrency.py +107 -0
  150. deepfos/lib/constant.py +39 -0
  151. deepfos/lib/decorator.py +310 -0
  152. deepfos/lib/deepchart.py +778 -0
  153. deepfos/lib/deepux.py +477 -0
  154. deepfos/lib/discovery.py +273 -0
  155. deepfos/lib/edb_lexer.py +789 -0
  156. deepfos/lib/eureka.py +156 -0
  157. deepfos/lib/filterparser.py +751 -0
  158. deepfos/lib/httpcli.py +106 -0
  159. deepfos/lib/jsonstreamer.py +80 -0
  160. deepfos/lib/msg.py +394 -0
  161. deepfos/lib/nacos.py +225 -0
  162. deepfos/lib/patch.py +92 -0
  163. deepfos/lib/redis.py +241 -0
  164. deepfos/lib/serutils.py +181 -0
  165. deepfos/lib/stopwatch.py +99 -0
  166. deepfos/lib/subtask.py +572 -0
  167. deepfos/lib/sysutils.py +703 -0
  168. deepfos/lib/utils.py +1003 -0
  169. deepfos/local.py +160 -0
  170. deepfos/options.py +670 -0
  171. deepfos/translation.py +237 -0
  172. deepfos-1.1.60.dist-info/METADATA +33 -0
  173. deepfos-1.1.60.dist-info/RECORD +175 -0
  174. deepfos-1.1.60.dist-info/WHEEL +5 -0
  175. deepfos-1.1.60.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1269 @@
1
+ import datetime
2
+
3
+ from collections import UserDict, defaultdict
4
+ from typing import (
5
+ List, Dict, Optional, Union, TYPE_CHECKING, Any
6
+ )
7
+ from itertools import zip_longest
8
+ from pypika.terms import Term, EmptyCriterion
9
+ import pandas as pd
10
+ from enum import Enum, IntFlag
11
+
12
+ from .base import ElementBase, SyncMeta
13
+ from .apvlprocess import AsyncApprovalProcess
14
+ from deepfos.lib.decorator import cached_property, deprecated
15
+ from deepfos.lib.utils import SettableOnce, LazyList, FrozenClass, dict_to_sql
16
+ from deepfos.lib.constant import UNSET
17
+ from deepfos.api.business_model import BusinessModelAPI
18
+ from deepfos.api.models.business_model import (
19
+ BusinessModelDTORes as BizModel,
20
+ StructureDTO as TableStructure,
21
+ PartitionDTO, AuthorityDTO,
22
+ BusinessModelApproveDTO
23
+ )
24
+ from deepfos.api.V1_1.models.business_model import (
25
+ ModelDataSaveDTO, ModelDataTableDTO, ModelDataNoChildTableDTO,
26
+ ModelDataReturnDTO, ModelDataColumnsDTO,
27
+ )
28
+ from deepfos.core.logictable import MetaTable, BaseTable
29
+ from deepfos.options import OPTION
30
+ from deepfos.db.utils import get_client_class
31
+ from .datatable import txn_support # noqa
32
+ from deepfos.lib.asynchronous import future_property
33
+ from deepfos.exceptions import ElementVersionIncompatibleError
34
+
35
+ __all__ = [
36
+ 'BusinessModel',
37
+ 'AsyncBusinessModel',
38
+ 'CopyConfig'
39
+ ]
40
+
41
+
42
+ # -----------------------------------------------------------------------------
43
+ # utils
44
+ def dataframe_to_records(
45
+ df: pd.DataFrame,
46
+ column_key: str = "columnName",
47
+ value_key: str = "value",
48
+ ) -> Dict[int, List[Dict[str, Any]]]:
49
+ tr = df.T.reset_index()
50
+ width = len(tr.columns)
51
+ tr.columns = [column_key] + [value_key] * (width - 1)
52
+ records = (
53
+ tr.iloc[:, [0, i]].to_dict('records')
54
+ for i in range(1, width)
55
+ )
56
+ return dict(zip(df.index, records))
57
+
58
+
59
+ class TableNode(MetaTable):
60
+ table_structure: TableStructure = None
61
+ name: str = None
62
+
63
+
64
+ def create_table(struct: TableStructure, parent: BaseTable = None) -> TableNode:
65
+ tbl_element = struct.dataTableInfo.elementDetail
66
+ attr = {'table_info': {
67
+ 'element_name': tbl_element.elementName,
68
+ 'element_type': tbl_element.elementType,
69
+ 'folder_id': tbl_element.folderId,
70
+ 'path': tbl_element.path,
71
+ 'table_name': struct.dataTableInfo.actualTableName,
72
+ 'server_name': tbl_element.serverName,
73
+ }}
74
+ if parent is not None:
75
+ attr.update({
76
+ 'parent': {
77
+ "cls": parent,
78
+ "on": tuple(asoc.logicTableFk for asoc in struct.associations),
79
+ "alias": tuple(asoc.parentPk for asoc in struct.associations),
80
+ }
81
+ })
82
+ tbl = TableNode(f"{struct.dataTableName}_{struct.uuid}", (), attr)
83
+ tbl.table_structure = struct
84
+ tbl.name = struct.dataTableName
85
+ return tbl
86
+
87
+
88
+ class Operator(str, Enum):
89
+ ADD = 'ADD'
90
+ DEL = 'DELETE'
91
+ UPD = 'UPDATE'
92
+
93
+
94
+ class AllowDetach(IntFlag):
95
+ none = 0
96
+ data = 1
97
+ table = 1 << 1
98
+ all = data | table
99
+
100
+
101
+ class CopyConfig(UserDict):
102
+ """业务模型数据拷贝的配置类"""
103
+ def load_config(self, conf: Dict[str, Dict]):
104
+ for table, copy_conf in conf.items():
105
+ self.data[table] = tbl_conf = {}
106
+ if "where" not in copy_conf:
107
+ raise KeyError("Field 'where' is missing in copy configure.")
108
+ tbl_conf["where"] = copy_conf["where"]
109
+ tbl_conf["field_map"] = copy_conf.get("field_map")
110
+ return self.data
111
+
112
+ def set_config(
113
+ self,
114
+ table: str,
115
+ where: Union[str, Term, EmptyCriterion],
116
+ field_map: Dict[str, Union[str, int, FrozenClass, Term]] = None
117
+ ):
118
+ """
119
+ 设置单表数据行的拷贝配置
120
+
121
+ Args:
122
+ table: 表名
123
+ where: 配置条件
124
+ field_map: key:需要复制的字段,value:需要复制的值
125
+
126
+ Note:
127
+ 配置用作 :meth:`.DataTableMySQL.copy_rows` 的入参
128
+
129
+ See Also:
130
+ :meth:`.DataTableMySQL.copy_rows`
131
+
132
+ """
133
+ self.data[table] = tbl_conf = {}
134
+ tbl_conf["where"] = where
135
+ tbl_conf["field_map"] = field_map
136
+
137
+
138
+ # -----------------------------------------------------------------------------
139
+ # core classes
140
+ class LogicTable(UserDict):
141
+ if TYPE_CHECKING:
142
+ data: Dict[str, TableNode] = {}
143
+
144
+ def __getitem__(self, item: str) -> TableNode:
145
+ """for type hint only"""
146
+ try:
147
+ return super().__getitem__(item)
148
+ except KeyError:
149
+ raise KeyError(f'No datatable named: {item}.') from None
150
+
151
+ def release_all(self):
152
+ for tbl in self.data.values():
153
+ if tbl.locked:
154
+ tbl.release()
155
+
156
+ root: TableNode = SettableOnce()
157
+
158
+
159
+ class AsyncBusinessModel(ElementBase[BusinessModelAPI]):
160
+ """业务模型"""
161
+ if TYPE_CHECKING:
162
+ from deepfos.api.V1_1.business_model import BusinessModelAPI as V1_1 # noqa
163
+ api: Union[BusinessModelAPI, V1_1]
164
+
165
+ approval_class = AsyncApprovalProcess
166
+
167
+ def __init__(
168
+ self,
169
+ element_name: str,
170
+ folder_id: str = None,
171
+ path: str = None,
172
+ server_name: str = None,
173
+ ):
174
+ self.__tables: Dict[str, TableNode] = {}
175
+ self.__logic_tables = LogicTable()
176
+ self.__dflt_ptn_id = UNSET
177
+ super().__init__(element_name, folder_id, path, server_name)
178
+
179
+ @cached_property
180
+ def default_partition_id(self) -> Optional[str]:
181
+ """默认分区id
182
+
183
+ **最后一个** 配置了审批流的审批区域id,如果没有配置
184
+ 审批流,则返回 `None`
185
+ """
186
+ if self.__dflt_ptn_id is UNSET:
187
+ _ = self.approval_process
188
+ return self.__dflt_ptn_id
189
+
190
+ @future_property
191
+ async def meta(self) -> BizModel:
192
+ """业务模型的元数据信息"""
193
+ api = await self.wait_for('async_api')
194
+ ele_info = await self.wait_for('element_info')
195
+ r = await api.model.query(
196
+ folderId=ele_info.folderId,
197
+ elementName=self.element_name
198
+ )
199
+ return r.businessModel
200
+
201
+ @future_property
202
+ async def _meta_logic_table(self) -> TableStructure:
203
+ """内部逻辑使用:业务模型的元数据信息-主表信息(logicTable)"""
204
+ api = await self.wait_for('async_api')
205
+ ele_info = await self.wait_for('element_info')
206
+ r = await api.configure.structure_top(
207
+ folderId=ele_info.folderId,
208
+ elementName=self.element_name
209
+ )
210
+ return r
211
+
212
+ @future_property
213
+ async def _meta_sub_models(self) -> List[PartitionDTO]:
214
+ """内部逻辑使用:业务模型的元数据信息-子模型信息(subModels)"""
215
+ api = await self.wait_for('async_api')
216
+ ele_info = await self.wait_for('element_info')
217
+ r = await api.configure.partitions(
218
+ folderId=ele_info.folderId,
219
+ elementName=self.element_name
220
+ )
221
+ return r
222
+
223
+ @cached_property
224
+ def logic_tables(self) -> LogicTable:
225
+ """以逻辑表格式存储的业务模型数据表
226
+
227
+ 每个数据表都继承自 :class:`MetaTable` ,
228
+ 并且各个数据表之间已经按照业务模型配置设置好了关联关系。
229
+ """
230
+ if not self.__logic_tables:
231
+ self._table_init()
232
+ return self.__logic_tables
233
+
234
+ @cached_property
235
+ def table_memo(self) -> Dict[str, TableNode]:
236
+ """数据表uuid -> 逻辑表表节点的字典
237
+
238
+ See Also:
239
+ :attr:`logic_tables`
240
+ """
241
+ if not self.__tables:
242
+ self._table_init()
243
+ return self.__tables
244
+
245
+ @future_property
246
+ async def permission(self) -> AuthorityDTO:
247
+ """权限配置信息"""
248
+ api = await self.wait_for('async_api')
249
+ ele_info = await self.wait_for('element_info')
250
+ return await api.permission.query(
251
+ elementName=self.element_name,
252
+ folderId=ele_info.folderId
253
+ )
254
+
255
+ def _table_init(self):
256
+ """
257
+ 根据业务模型的配置,初始化所有数据表信息
258
+ """
259
+ # bfs
260
+ tbl_to_visit = [(None, self._meta_logic_table)]
261
+ while tbl_to_visit:
262
+ parent, tbl = tbl_to_visit.pop(0)
263
+ table = create_table(tbl, parent)
264
+ if not self.__logic_tables:
265
+ # set root
266
+ self.__logic_tables.root = table
267
+ self.__logic_tables[tbl.dataTableName] = table
268
+ self.__tables[tbl.uuid] = table
269
+ tbl_to_visit.extend(zip_longest([], tbl.children, fillvalue=table))
270
+
271
+ @cached_property
272
+ def sub_models(self) -> Dict[str, PartitionDTO]:
273
+ """子模型信息
274
+
275
+ 模型id -> 子模型详细配置
276
+ """
277
+ return {
278
+ model.partitionId: model
279
+ for model in self._meta_sub_models
280
+ }
281
+
282
+ @cached_property
283
+ def approval_process(self) -> Dict[str, LazyList[AsyncApprovalProcess]]:
284
+ """模型配置的审批流信息"""
285
+ candidates = {}
286
+ self.__dflt_ptn_id = None
287
+ apvl_cls = self.__class__.approval_class
288
+ for partition in self.permission.statusColumn:
289
+ partition_id = partition.partitionId
290
+ for status_info in partition.statusInfo:
291
+ if status_info.pcName:
292
+ appv_list = candidates.setdefault(partition_id, LazyList())
293
+ appv_list.append(
294
+ apvl_cls,
295
+ element_name=status_info.pcName,
296
+ folder_id=status_info.pcFolderId,
297
+ )
298
+ # 有有效的审批流配置,把当前id设为默认的partition id
299
+ self.__dflt_ptn_id = partition_id
300
+ return candidates
301
+
302
+ @cached_property
303
+ def _partition_map(self) -> Dict[str, str]:
304
+ return {
305
+ part.partitionName: part.partitionId
306
+ for part in self.permission.statusColumn
307
+ }
308
+
309
+ def _get_table_from_partition(
310
+ self,
311
+ partition_id: str
312
+ ) -> TableStructure:
313
+ if partition_id in self.sub_models:
314
+ model = self.sub_models[partition_id]
315
+ return self.table_memo[model.uuid].table_structure
316
+ else:
317
+ return self.logic_tables.root.table_structure
318
+
319
+ async def set_approval(
320
+ self,
321
+ primary: Union[str, Dict[str, str]],
322
+ operation: str = None,
323
+ operation_id: str = None,
324
+ partition_name: str = None,
325
+ partition_id: str = None,
326
+ remark: str = '',
327
+ roles: List[str] = None,
328
+ origin_status: str = None,
329
+ main_primary_kv: Dict[str, str] = None,
330
+ ):
331
+ """
332
+ 设置审批流
333
+
334
+ Args:
335
+ primary: 审批模型主表的业务主键值或者键值对,用于定位审批数据
336
+ operation: 审批操作编码(界面可获取)
337
+ operation_id: 审批操作id
338
+ partition_name: 审批分区编码(如子模型编码,无子模型可不传)
339
+ partition_id: 分区id
340
+ remark: 备注
341
+ roles: 角色
342
+ origin_status: 初始审批状态
343
+ main_primary_kv: 主模型业务主键值或者键值对
344
+
345
+ Hint:
346
+ 关于入参有以下注意点:
347
+
348
+ - 审批的模型的业务主键仅一个字段时,primary可以仅提供一个值,
349
+ 超过一个字段时,必须以字典格式提供。
350
+ - ``operation`` 和 ``operation_id`` 二选一,``operation``
351
+ 可直接在界面获取,``operation_id`` 则需要通过F12查看具体数据,
352
+ - ``partition_name`` 和 ``partition_id`` 可二选一,也可都不提供。
353
+ 不提供的情况下,会以 **最后一个** 配置了审批流的审批区域为默认值。
354
+ 可以通过 :attr:`default_partition_id` 查看分区id。
355
+ - ``origin_status`` 在已知情况下推荐填写,不填则会导致额外的接口请求,
356
+ 使用当前最后一条审批记录对应的审批状态。
357
+ - ``main_primary_pk`` 参数在审批主模型时不需要填写。如果审批的是子模型,
358
+ 并且子模型主表与主模型主表的关联字段包含所有主模型的业务主键,也可以不提供。
359
+ 其余情况,必须提供此参数。
360
+
361
+ Attention:
362
+ partition_id默认会加入primary中用于审批流操作
363
+
364
+ """
365
+ # -----------------------------------------------------------------------------
366
+ # resolve partition_id
367
+ ptn_id = self._resolve_partition_id(partition_id, partition_name)
368
+ # -----------------------------------------------------------------------------
369
+ # format argument primary
370
+ is_sub = ptn_id in self.sub_models
371
+ main_table = self.logic_tables.root
372
+ main_logic_keys = main_table.table_structure.logicKeyList or []
373
+
374
+ if is_sub:
375
+ appv_table = self.table_memo[self.sub_models[ptn_id].uuid]
376
+ logic_keys = appv_table.table_structure.logicKeyList or []
377
+ else:
378
+ appv_table = main_table
379
+ logic_keys = main_logic_keys
380
+
381
+ primary = self._ensure_primary_kv_dict(appv_table, logic_keys, primary)
382
+ # 加入分区信息
383
+ primary.update(partition_id=ptn_id)
384
+
385
+ # -----------------------------------------------------------------------------
386
+ # create main_primary_kv
387
+ if main_primary_kv is None:
388
+ if not is_sub:
389
+ main_primary_kv = {**primary}
390
+ elif missing_keys := (set(main_logic_keys) - set(logic_keys)):
391
+ raise ValueError(f"Missing primary keys for main model: {missing_keys}. ")
392
+ else:
393
+ main_primary_kv = {k: primary[k] for k in main_logic_keys}
394
+
395
+ # -----------------------------------------------------------------------------
396
+ # get operation id and orignal status for approval
397
+ if operation_id is None and operation is None:
398
+ raise ValueError('None of argumnet [operation_id, operation] is set.')
399
+
400
+ if operation_id is None or origin_status is None:
401
+ if len(self.approval_process[ptn_id]) != 1:
402
+ raise ValueError(f"Only one approval process is supported.")
403
+ appv = self.approval_process[ptn_id][0]
404
+
405
+ if origin_status is None:
406
+ appv_records = await appv.get_record(primary, roles)
407
+ origin_status = appv_records[0].result_status
408
+ if operation_id is None:
409
+ operation_id = appv.get_operation_id(operation)
410
+
411
+ return await self.async_api.approval.operation(BusinessModelApproveDTO(
412
+ businessModelName=self.element_name,
413
+ businessModelFolderId=self.element_info.folderId,
414
+ originStatus=origin_status,
415
+ partitionId=ptn_id,
416
+ primaryKeyValue=primary,
417
+ mainModelPrimaryKey=main_primary_kv,
418
+ remark=remark,
419
+ processOperationId=operation_id
420
+ ))
421
+
422
+ @staticmethod
423
+ def _ensure_primary_kv_dict(
424
+ table: TableNode,
425
+ primary_keys: List[str],
426
+ primary_kv: Union[Any, Dict[str, Any]],
427
+ copy: bool = True
428
+ ) -> Dict[str, Any]:
429
+ if isinstance(primary_kv, str):
430
+ if len(primary_keys) == 1:
431
+ primary_kv = {primary_keys[0]: primary_kv}
432
+ else:
433
+ raise ValueError(
434
+ f"Table: {table.name} has more than one primary key: {primary_keys}. "
435
+ f"Thus argument: `primary` must be type of dict[field, value]."
436
+ )
437
+ elif copy:
438
+ primary_kv = {**primary_kv}
439
+ return primary_kv
440
+
441
+ async def copy_rows(
442
+ self,
443
+ config: Union[CopyConfig, Dict]
444
+ ):
445
+ """
446
+ 对模型表做数据拷贝
447
+
448
+ Args:
449
+ config: 拷贝的配置
450
+
451
+ Example:
452
+ 入参config可以为字典类型,满足以下格式:
453
+
454
+ .. code-block:: python
455
+
456
+ config = {
457
+ "table_name": {
458
+ "where": WhereCondition,
459
+ "field_map": {k: v, ...}
460
+ },
461
+ ...
462
+ }
463
+
464
+ 也可以使用 :class:`CopyConfig`
465
+
466
+ .. code-block:: python
467
+
468
+ config = CopyConfig()
469
+ config.set_config(
470
+ table="table_name",
471
+ where=WhereCondition,
472
+ field_map={k: v, ...}
473
+ )
474
+
475
+ Note:
476
+ 本方法实际循环调用了数据表元素的 :meth:`.DataTableMySQL.copy_rows`
477
+ 方法,入参配置应当符合该方法
478
+
479
+ See Also:
480
+ :meth:`.DataTableMySQL.copy_rows`
481
+
482
+ """
483
+
484
+ if isinstance(config, Dict):
485
+ config = CopyConfig().load_config(config)
486
+
487
+ for table, conf in config.items():
488
+ dt = self.logic_tables[table].async_datatable
489
+ await dt.copy_rows(**conf)
490
+
491
+ @deprecated(
492
+ replacement='set_approval_ex',
493
+ version=(1, 0, 38)
494
+ )
495
+ async def set_approval_batch(
496
+ self,
497
+ operation_name: Union[str, List[str]],
498
+ main_primary_kv: Union[
499
+ pd.DataFrame,
500
+ Dict[str, list],
501
+ Dict[str, Union[str, int]],
502
+ List[Dict[str, str]]
503
+ ],
504
+ partition_name: str = None,
505
+ partition_id: str = None,
506
+ remark: str = None,
507
+ origin_status: str = None
508
+ ) -> Dict[str, pd.DataFrame]:
509
+ """设置审批流(已废弃)
510
+
511
+ Args:
512
+ operation_name: 审批操作编码
513
+ main_primary_kv: 主模型业务主键值或者键值对
514
+ partition_name: 审批分区编码(如子模型编码,无子模型可不传)
515
+ partition_id: 分区id
516
+ remark: 备注
517
+ origin_status: 初始审批状态
518
+
519
+ Hint:
520
+ 关于入参有以下注意点:
521
+
522
+ - ``operation`` 和 ``operation_id`` 二选一,``operation``
523
+ 可直接在界面获取,``operation_id`` 则需要通过F12查看具体数据.
524
+ 尽量不要使用operation_id,根据审批流operation_id会变化
525
+ - ``partition_name`` 和 ``partition_id`` 可二选一,也可都不提供.
526
+ 不提供的情况下,会以 **最后一个** 配置了审批流的审批区域为默认值.
527
+ 可以通过 :attr:`default_partition_id` 查看分区id.
528
+ - ``origin_status`` 审批流初始化可以不传递,其他审批操作必须传.
529
+ 传入``'start', 'init', '0'`` 也会认为是初始化,如果不需要该特性,
530
+ 请使用 :meth:`set_approval_ex`
531
+ - ``main_primary_pk`` 必须提供此参数.
532
+
533
+ Attention:
534
+ 注意本方法不会调用审批流前后置python
535
+
536
+ Example:
537
+ .. code-block:: python
538
+
539
+ init_process = BusinessModel(name='a', path='/')
540
+ ids = '001'
541
+ ids = ['001','002'] # (批量)
542
+ res = init_process.set_approval_batch(
543
+ operation_name=['start','staff_submit'],
544
+ main_primary_key={"PaymentApplyCode": ids}
545
+ )
546
+
547
+ See Also:
548
+ :meth:`set_approval_ex`
549
+
550
+ """
551
+ # -----------------------------------------------------------------------------
552
+ if origin_status in ('0', 'start', 'init'):
553
+ origin_status = None
554
+ return await self.set_approval_ex(
555
+ operation_name=operation_name,
556
+ main_primary_kv=main_primary_kv,
557
+ partition_name=partition_name,
558
+ partition_id=partition_id,
559
+ remark=remark,
560
+ origin_status=origin_status
561
+ )
562
+
563
+ async def set_approval_ex(
564
+ self,
565
+ operation_name: Union[str, List[str]],
566
+ main_primary_kv: Union[
567
+ pd.DataFrame,
568
+ Dict[str, list],
569
+ Dict[str, Union[str, int]],
570
+ List[Dict[str, str]]
571
+ ],
572
+ partition_name: str = None,
573
+ partition_id: str = None,
574
+ remark: str = None,
575
+ origin_status: str = None
576
+ ) -> Dict[str, pd.DataFrame]:
577
+ """设置审批流
578
+
579
+ Args:
580
+ operation_name: 审批操作编码
581
+ main_primary_kv: 主模型业务主键值或者键值对
582
+ partition_name: 审批分区编码(如子模型编码,无子模型可不传)
583
+ partition_id: 分区id
584
+ remark: 备注
585
+ origin_status: 初始审批状态
586
+
587
+ Hint:
588
+ 关于入参有以下注意点:
589
+
590
+ - ``operation`` 和 ``operation_id`` 二选一,``operation``
591
+ 可直接在界面获取,``operation_id`` 则需要通过F12查看具体数据.
592
+ 尽量不要使用operation_id,根据审批流operation_id会变化
593
+ - ``partition_name`` 和 ``partition_id`` 可二选一,也可都不提供.
594
+ 不提供的情况下,会以 **最后一个** 配置了审批流的审批区域为默认值.
595
+ 可以通过 :attr:`default_partition_id` 查看分区id.
596
+ - ``origin_status`` 审批流初始化可以不传递,其他审批操作必须传.
597
+ - ``main_primary_pk`` 必须提供此参数.
598
+
599
+ Attention:
600
+ 注意本方法不会调用审批流前后置python
601
+
602
+ Example:
603
+ .. code-block:: python
604
+
605
+ init_process = BusinessModel(name='a', path='/')
606
+ ids = '001'
607
+ ids = ['001','002'] # (批量)
608
+ res = init_process.set_approval_ex(
609
+ operation_name=['start','staff_submit'],
610
+ main_primary_key={"PaymentApplyCode": ids}
611
+ )
612
+ """
613
+ # -----------------------------------------------------------------------------
614
+ if isinstance(operation_name, str):
615
+ operation_name = [operation_name]
616
+ elif not isinstance(operation_name, list):
617
+ raise TypeError("operation_name参数只能为str或list类型")
618
+
619
+ if not isinstance(main_primary_kv, pd.DataFrame):
620
+ if isinstance(main_primary_kv, dict):
621
+ val = list(main_primary_kv.values())[0]
622
+ if not isinstance(val, list):
623
+ main_primary_kv = [main_primary_kv]
624
+ elif not isinstance(main_primary_kv, list):
625
+ raise TypeError("main_primary_key参数只能为pd.DataFrame或dict或list类型")
626
+ try:
627
+ main_primary_kv = pd.DataFrame(main_primary_kv)
628
+ except Exception: # noqa
629
+ raise TypeError("main_primary_keys参数数据结构异常") from None
630
+ if main_primary_kv.empty:
631
+ raise ValueError('main_primary_keys参数不能为空')
632
+ # -----------------------------------------------------------------------------
633
+ # 获取partition_id
634
+ ptn_id = self._resolve_partition_id(partition_id, partition_name)
635
+ pc = self.approval_process[ptn_id][0]
636
+ tbl_pc = pc.async_record_table
637
+
638
+ # -----------------------------------------------------------------------------
639
+ main_primary_kv.dropna(inplace=True)
640
+ df_main_key = main_primary_kv.copy()
641
+ key_cols = main_primary_kv.columns.to_list()
642
+ col, where_sql, on_sql = self._create_sql(main_primary_kv, tbl_pc.quote_char)
643
+ df_operation = pd.DataFrame()
644
+ for operation in operation_name:
645
+ operation_info = pc.get_operation_info(operation)
646
+ df_row = pd.DataFrame([{'process_operation_id': operation_info.id,
647
+ 'origin_status': operation_info.originStatusList,
648
+ 'target_status': operation_info.targetStatus}])
649
+ df_operation = pd.concat([df_operation, df_row])
650
+ df_operation.reset_index(drop=True, inplace=True)
651
+ if len(df_operation) > 1:
652
+ df_operation['target_status_shift'] = df_operation['target_status'].shift(1)
653
+ if not (df_operation.loc[1:, 'target_status_shift'] == # noqa
654
+ df_operation.loc[1:, 'origin_status']).all():
655
+ raise ValueError('多个审批操作operation_name不连续')
656
+ if origin_status != df_operation.loc[0, 'origin_status']:
657
+ raise ValueError('当前审批操作与origin_status状态不匹配')
658
+ user = OPTION.api.header['user']
659
+ sql = f"""
660
+ SELECT
661
+ a.*
662
+ FROM
663
+ {tbl_pc.table_name} a
664
+ INNER JOIN ( SELECT {tbl_pc.quote_char}{col}{tbl_pc.quote_char},
665
+ max(line_no) AS line_no FROM {tbl_pc.table_name}
666
+ WHERE {where_sql} GROUP BY {tbl_pc.quote_char}{col}{tbl_pc.quote_char} ) b
667
+ ON {on_sql} AND a.line_no = b.line_no;
668
+ """
669
+ actual_cli = get_client_class(tbl_pc.api.module_type, sync=False)()
670
+ df_query = await actual_cli.query_dfs(sql)
671
+ if origin_status is None:
672
+ # 初始化的处理
673
+ if df_query.empty:
674
+ df_success = df_main_key
675
+ df_failure = pd.DataFrame()
676
+ else:
677
+ df_success = df_main_key.merge(df_query, how='left', on=key_cols)
678
+ df_failure = df_success.loc[~df_success['line_no'].isnull()]
679
+ df_success = df_success.loc[df_success['line_no'].isnull()]
680
+ df_success['line_no'] = 0
681
+ else:
682
+ if df_query.empty:
683
+ df_success = pd.DataFrame()
684
+ df_failure = df_main_key
685
+ else:
686
+ df_success = df_query.loc[df_query['result_status'] == origin_status]
687
+ df_failure = df_query.loc[df_query['result_status'] != origin_status]
688
+ time = datetime.datetime.now()
689
+ if not df_success.empty:
690
+ partition = self.models_permission[ptn_id]
691
+ partition_id = partition['partition_id']
692
+ pc_field = partition['pc_field']
693
+ main_tbl_name = partition['main_tbl_name']
694
+ df_success = df_success.assign(pc_remark=remark, operate_user=user, operate_time=time,
695
+ partition_id=partition_id)
696
+ df_insert = pd.DataFrame()
697
+ for ind, row in df_operation.iterrows():
698
+ # 取出当前状态正确的行,补齐新状态后,行号加一,插库
699
+ df_success['line_no'] += 1
700
+ df_success['result_status'] = row['target_status']
701
+ df_success['process_operation_id'] = row['process_operation_id']
702
+ df_insert = pd.concat([df_insert, df_success])
703
+
704
+ tbl_main = self.logic_tables[main_tbl_name].async_datatable
705
+ if not df_failure.empty:
706
+ _, where_sql, _ = self._create_sql(df_success[key_cols], tbl_main.quote_char)
707
+ sql_update = f"update {tbl_main.table_name} set " \
708
+ f"{tbl_main.quote_char}{pc_field}{tbl_main.quote_char}={row['target_status']!r} " \
709
+ f"where {where_sql}"
710
+ async with tbl_main.start_transaction():
711
+ await tbl_pc.insert_df(df_insert)
712
+ await txn_support(tbl_main.__class__.run_sql)(tbl_main, sql_update)
713
+ df_success = df_insert
714
+ return {'success': df_success, 'failure': df_failure}
715
+
716
+ def _resolve_partition_id(self, partition_id, partition_name):
717
+ if partition_id is not None:
718
+ ptn_id = partition_id
719
+ elif partition_name is not None:
720
+ ptn_id = self._partition_map.get(partition_name, None)
721
+ if ptn_id is None:
722
+ raise ValueError(
723
+ f"Cannot resolve partition_id from "
724
+ f"given partition_name: {partition_name}"
725
+ )
726
+ else:
727
+ ptn_id = self.default_partition_id
728
+ if ptn_id is None:
729
+ raise ValueError(
730
+ "Cannot resolve partition_id because no approval "
731
+ "process has been set for current model."
732
+ )
733
+ return ptn_id
734
+
735
+ @staticmethod
736
+ def _create_sql(df, quote_char):
737
+ # 统一repr一下
738
+ df = df.applymap(lambda x: repr(x))
739
+ if df.shape[1] == 1:
740
+ col = df.columns[0]
741
+ in_val = ",".join(df[col])
742
+ where_sql = f"{quote_char}{col}{quote_char} IN ({in_val})"
743
+ on_sql = f"a.{quote_char}{col}{quote_char}=b.{quote_char}{col}{quote_char}"
744
+ else:
745
+ col = f"{quote_char},{quote_char}".join(df.columns)
746
+ df = df.apply(lambda x: quote_char + x.name + f'{quote_char}=' + x, axis=0)
747
+ df.iloc[:, :-1] += ' AND ' # 除了最后一列,每列尾加AND
748
+ data_series = "(" + df.sum(axis=1) + ")"
749
+ where_sql = " | ".join(data_series)
750
+ on_sql = " AND ".join([f"a.{quote_char}{col}{quote_char}=b.{quote_char}{col}{quote_char}" for col in df.columns])
751
+ return col, where_sql, on_sql
752
+
753
+ @cached_property
754
+ def models_permission(self) -> Dict[str, Dict[str, str]]:
755
+ """ 模型审批对象 """
756
+ result = {}
757
+ columns = self.permission.statusColumn
758
+ for status_column in columns:
759
+ res_row = {'partition_id': status_column.partitionId}
760
+ for status_info in status_column.statusInfo:
761
+ if (status_info.isStatusColumn == 1) and (status_info.pcName is not None):
762
+ res_row['pc_field'] = status_info.columnName
763
+ res_row['main_tbl_name'] = status_info.dataTableName
764
+ res_row['main_tbl_folder_id'] = status_info.tableFolderId
765
+ break
766
+ if status_column.partitionName == "主模型":
767
+ result['0'] = res_row
768
+ else:
769
+ result[status_column.partitionId] = res_row
770
+
771
+ return result
772
+
773
+ def _ensure_version_greater_than(self, target):
774
+ if self.api.version < target:
775
+ ver_str = '.'.join(map(str, target))
776
+ raise ElementVersionIncompatibleError(
777
+ f'Expect version > {ver_str}, got {self.api.version}')
778
+
779
+ @staticmethod
780
+ def _structurize_dataframe(
781
+ table: TableNode,
782
+ data: pd.DataFrame,
783
+ operator: Operator = Operator.ADD,
784
+ ) -> Dict[int, ModelDataTableDTO]:
785
+ if (parent := table.parent) is None:
786
+ parent_info = None
787
+ else:
788
+ struct: TableStructure = parent.table_structure
789
+ parent_info = ModelDataNoChildTableDTO(
790
+ dataTableFolderId=struct.folderId,
791
+ dataTableName=struct.dataTableName,
792
+ )
793
+ return {
794
+ idx: ModelDataTableDTO(
795
+ operateType=Operator.ADD,
796
+ children=[],
797
+ columns=columns, # noqa
798
+ dataTableFolderId=table.table_structure.folderId,
799
+ dataTableName=table.table_structure.dataTableName,
800
+ parentTableInfo=parent_info,
801
+ )
802
+ for idx, columns in dataframe_to_records(data).items()
803
+ }
804
+
805
+ @staticmethod
806
+ def _validate_detached(
807
+ data_map: Dict[str, pd.DataFrame],
808
+ attached_idxes: Dict[TableNode, List[int]],
809
+ allow_detached_data: bool = True,
810
+ allow_detached_table: bool = False,
811
+ ):
812
+ allow_detached = AllowDetach.none
813
+ if allow_detached_data:
814
+ allow_detached |= AllowDetach.data
815
+ if allow_detached_table:
816
+ allow_detached |= AllowDetach.table
817
+
818
+ if (
819
+ not (AllowDetach.table in allow_detached)
820
+ and (detached := (data_map.keys() - set(t.name for t in attached_idxes)))
821
+ ):
822
+ raise ValueError(f"Cannot attach table: {detached}")
823
+
824
+ if not (AllowDetach.data in allow_detached):
825
+ for tbl, indexes in attached_idxes.items():
826
+ orig_df = data_map[tbl.name]
827
+
828
+ if len(indexes) != len(orig_df):
829
+ detached_idx = orig_df.index.difference(indexes)
830
+ raise ValueError(
831
+ f"Cannot attach following data for table {tbl.name}:\n"
832
+ f"{orig_df.loc[detached_idx]}"
833
+ )
834
+
835
+ def build_save_data(
836
+ self,
837
+ data_map: Dict[str, pd.DataFrame],
838
+ table: TableNode = None,
839
+ attached_idxes: Dict[TableNode, List[int]] = None,
840
+ allow_detached_data: bool = True,
841
+ allow_detached_table: bool = False,
842
+ ) -> List[ModelDataTableDTO]:
843
+ if table is None:
844
+ table = self.logic_tables.root
845
+ model_data = self._structurize_dataframe(table, data_map[table.name])
846
+
847
+ if attached_idxes is None:
848
+ attached_idxes: Dict[TableNode, List[int]] = defaultdict(list)
849
+
850
+ attached_idxes[table].extend(data_map[table.name].index)
851
+
852
+ def visit(node: TableNode, data_wrapper: Dict[int, ModelDataTableDTO]):
853
+ orig_df = data_map[node.name]
854
+ child: TableNode
855
+
856
+ for idx, data in data_wrapper.items():
857
+ df_record = orig_df.iloc[idx, :]
858
+
859
+ for child in node.children:
860
+ if (child_df := data_map.get(child.name)) is None:
861
+ continue
862
+
863
+ rel_parent_cols = list(node.rel_info[child])
864
+ rel_child_val = tuple(df_record.loc[rel_parent_cols])
865
+ rel_child_cols = child.rel_info[node]
866
+
867
+ rel_child = dict(zip(rel_child_cols, rel_child_val))
868
+ query = dict_to_sql(rel_child, eq='==', bracket=False)
869
+ picked_child_df = child_df.query(query)
870
+
871
+ if not picked_child_df.empty:
872
+ attached_idxes[child].extend(picked_child_df.index)
873
+ child_dw = self._structurize_dataframe(child, picked_child_df)
874
+ data.children.extend(child_dw.values())
875
+ visit(child, child_dw)
876
+
877
+ visit(table, model_data)
878
+ self._validate_detached(
879
+ data_map, attached_idxes,
880
+ allow_detached_table=allow_detached_table,
881
+ allow_detached_data=allow_detached_data,
882
+ )
883
+ return list(model_data.values())
884
+
885
+ async def save(
886
+ self,
887
+ data: Union[pd.DataFrame, Dict[str, pd.DataFrame]],
888
+ allow_detached_data: bool = True,
889
+ allow_detached_table: bool = False,
890
+ auto_format: bool = True,
891
+ check_db: bool = True,
892
+ check_logical: bool = True,
893
+ check_field: bool = True,
894
+ enable_pre_save: bool = True,
895
+ enable_post_save: bool = True,
896
+ ) -> List[ModelDataReturnDTO]:
897
+ """保存数据
898
+
899
+ 使用业务模型数据保存接口进行数据保存,相较于直接操作数据表,
900
+ 可以使用业务模型自带的数据和权限校验,并且可以触发保存前后置逻辑(可选)
901
+
902
+ Args:
903
+ data: 保存数据
904
+ allow_detached_data: 是否允许data中存在无合法关联关系的数据
905
+ allow_detached_table: 是否允许data中存在无法关联的数据表
906
+ auto_format: 是否需要处理关联关系、冗余字段等(接口功能)
907
+ check_db: 是否需要数据库属性校验(接口功能)
908
+ check_logical: 是否需要逻辑属性校验(接口功能)
909
+ check_field: 是否需要进行字段权限校验(接口功能)
910
+ enable_pre_save: 是否开启保存前置逻辑(接口功能)
911
+ enable_post_save: 是否开启保存后置逻辑(接口功能)
912
+
913
+ .. admonition:: 示例
914
+
915
+ 例如有业务模型结构如下:
916
+
917
+ .. code-block::
918
+
919
+ <ROOT>
920
+ ├── <A>
921
+ | └── <A1>
922
+ └── <B>
923
+
924
+ 4张数据表均包含2个字段 ``name, parent``,
925
+ 其中子表的 ``parent`` 与父表的 ``name`` 字段关联。
926
+
927
+ 如果需要保存 ``<ROOT>, <A>`` 表的数据:
928
+
929
+ .. code-block:: python
930
+
931
+ df_ROOT = pd.DataFrame([
932
+ {'name': "R0"}, {'name': "R1"},
933
+ ])
934
+
935
+ df_A = pd.DataFrame([
936
+ {'name': "A00", "parent": "R0"},
937
+ {'name': "A10", "parent": "R1"},
938
+ {'name': "A11", "parent": "R1"},
939
+ ])
940
+
941
+ model = BusinessModel('Tree')
942
+ model.save({'ROOT': df_ROOT, 'A': df_A})
943
+
944
+ Note:
945
+ - 如果业务模型仅一张数据表,``data`` 可以是 :class:`DataFrame` 格式,
946
+ 其余情况,必须是 ``数据表名 -> 保存数据`` 的字典结构
947
+ - 此方法要求保存数据完整,即数据必须由模型主表(根节点)开始,如果需要追加数据,
948
+ 请使用 :meth:`attach`
949
+ - 参数中的 ``allow_detached_xxx`` ,所谓 ``detached`` 是指数据或数据表没有合理归属
950
+
951
+ 以示例中的业务模型结构为例,假如传入的 ``data`` 只包含 ``<ROOT>, <A1>`` 两张表,
952
+ 由于缺乏 ``A`` 表延续关联关系,``A1`` 表会被认为是 ``detached table`` 。
953
+ 类似地,如果示例中保存的 ``A`` 表数据中,有一行 ``parent = 'R2'``,由于 ``<ROOT>``
954
+ 中并没有对应数据,这一行数据就会被认为是 ``detached data``
955
+
956
+ Returns:
957
+ 保存结果
958
+
959
+ See Also:
960
+ :meth:`attach`
961
+
962
+ """
963
+ self._ensure_version_greater_than((1, 1))
964
+
965
+ logic_tables = self.logic_tables
966
+ root_tblname = logic_tables.root.name
967
+
968
+ if isinstance(data, pd.DataFrame):
969
+ if len(logic_tables.data) > 1:
970
+ raise ValueError(
971
+ "'data' of type 'Dataframe' is only supported "
972
+ "on model containing single datatable.")
973
+ else:
974
+ data = {logic_tables.root.name: data}
975
+
976
+ if root_tblname not in data:
977
+ raise ValueError(
978
+ f"Missing root table {root_tblname!r} in data")
979
+
980
+ # ------------------------------------------------------------
981
+ # normalize index
982
+ normalized_data = {
983
+ table: df.reset_index(drop=True)
984
+ for table, df in data.items()
985
+ }
986
+
987
+ payload = ModelDataSaveDTO(
988
+ data=self.build_save_data(
989
+ normalized_data,
990
+ allow_detached_table=allow_detached_table,
991
+ allow_detached_data=allow_detached_data,
992
+ ),
993
+ databaseCheck=check_db,
994
+ formatData=auto_format,
995
+ logicCheck=check_logical,
996
+ fieldCheck=check_field,
997
+ savePre=enable_pre_save,
998
+ savePost=enable_post_save,
999
+ elementName=self.element_name,
1000
+ folderId=self.element_info.folderId,
1001
+ ).dict(exclude_none=True)
1002
+
1003
+ return await self.async_api.data.save(payload) # noqa
1004
+
1005
+ async def attach(
1006
+ self,
1007
+ data: Union[pd.DataFrame, Dict[str, pd.DataFrame]],
1008
+ primary_kv: Union[Any, Dict[str, Any]],
1009
+ parent_kv: Union[Any, Dict[str, Any]] = None,
1010
+ allow_detached_data: bool = True,
1011
+ allow_detached_table: bool = False,
1012
+ check_db: bool = True,
1013
+ check_logical: bool = True,
1014
+ check_field: bool = True,
1015
+ enable_pre_save: bool = True,
1016
+ enable_post_save: bool = True,
1017
+ ) -> List[ModelDataReturnDTO]:
1018
+ """追加数据
1019
+
1020
+ 使用业务模型数据保存接口进行数据保存,
1021
+ 允许将数据作为明细数据追加至已有的主数据上。
1022
+
1023
+ Args:
1024
+ data: 保存数据
1025
+ primary_kv: 主表业务主键值或者键值对
1026
+ parent_kv: 追加数据所属父级表的业务主键值或者键值对
1027
+ (父级表不是模型主表时必须提供)
1028
+ allow_detached_data: 是否允许data中存在无合法关联关系的数据
1029
+ allow_detached_table: 是否允许data中存在无法关联的数据表
1030
+ check_db: 是否需要数据库属性校验(接口功能)
1031
+ check_logical: 是否需要逻辑属性校验(接口功能)
1032
+ check_field: 是否需要进行字段权限校验(接口功能)
1033
+ enable_pre_save: 是否开启保存前置逻辑(接口功能)
1034
+ enable_post_save: 是否开启保存后置逻辑(接口功能)
1035
+
1036
+
1037
+ .. admonition:: 示例
1038
+
1039
+ 例如有业务模型结构如下:
1040
+
1041
+ .. code-block::
1042
+
1043
+ <ROOT>
1044
+ ├── <A>
1045
+ | └── <A1>
1046
+ └── <B>
1047
+
1048
+ 4张数据表均包含2个字段 ``name, parent``,
1049
+ 其中子表的 ``parent`` 与父表的 ``name`` 字段关联。
1050
+
1051
+ 如果需要保存 ``<A>, <A1>`` 表的数据、作为 ``<ROOT>`` 表 ``name=R0``
1052
+ 的明细数据:
1053
+
1054
+ .. code-block:: python
1055
+
1056
+ df_A = pd.DataFrame([
1057
+ {'name': "A00"},
1058
+ {'name': "A10"},
1059
+ {'name': "A11"}
1060
+ ])
1061
+ df_A1 = pd.DataFrame([
1062
+ {'name': "A00_1", "parent": "A00"},
1063
+ {'name': "A10_1", "parent": "A10"},
1064
+ ])
1065
+
1066
+ model = BusinessModel('Tree')
1067
+ model.attach({'A': df_A, 'A1': df_A1}, {'name': 'R0'})
1068
+
1069
+ 其中,由于指定 ``A`` 表数据挂在 ``<ROOT>`` 表 ``name=R0`` 上,
1070
+ 其关联字段 ``parent`` 的数据可以省略(即使提供也不会生效,固定以 ``R0`` 落库)
1071
+
1072
+ Returns:
1073
+ 保存结果
1074
+
1075
+ See Also:
1076
+ :meth:`save`
1077
+
1078
+ """
1079
+ self._ensure_version_greater_than((1, 1))
1080
+
1081
+ logic_tables = self.logic_tables
1082
+ root = logic_tables.root
1083
+
1084
+ if isinstance(data, pd.DataFrame):
1085
+ if len(logic_tables.data) != 2:
1086
+ raise ValueError(
1087
+ "'data' of type 'Dataframe' is only supported "
1088
+ "on model containing 2 datatables.")
1089
+ else:
1090
+ data = {root.children[0].name: data}
1091
+
1092
+ # ------------------------------------------------------------
1093
+ # check primary kv
1094
+ pks = root.table_structure.logicKeyList
1095
+ primary_kv = self._ensure_primary_kv_dict(root, pks, primary_kv, copy=False)
1096
+
1097
+ if missing_pk := set(pks) - primary_kv.keys():
1098
+ raise ValueError(f"Missing primary key: {missing_pk} in 'primary_kv'")
1099
+
1100
+ # -----------------------------------------------------------------------------
1101
+ # set parent kv
1102
+ data_root = sorted(
1103
+ (logic_tables[n] for n in data),
1104
+ key=lambda t: t.depth
1105
+ )[0].parent
1106
+
1107
+ if data_root is root:
1108
+ parent_kv = primary_kv
1109
+ elif parent_kv is None:
1110
+ raise ValueError("Missing 'parent_kv' while attaching to a non-root table.")
1111
+ else:
1112
+ parent_kv = self._ensure_primary_kv_dict(
1113
+ data_root,
1114
+ data_root.table_structure.logicKeyList,
1115
+ parent_kv, copy=False
1116
+ )
1117
+
1118
+ # ------------------------------------------------------------
1119
+ # normalize index
1120
+ normalized_data: Dict[str, pd.DataFrame] = {
1121
+ table: df.reset_index(drop=True)
1122
+ for table, df in data.items()
1123
+ }
1124
+
1125
+ attached_idxes = defaultdict(list)
1126
+ save_data = []
1127
+
1128
+ for child in data_root.children:
1129
+ if (child_df := normalized_data.get(child.name)) is None:
1130
+ continue
1131
+ drop_cols = [
1132
+ c for c in child.rel_info[data_root]
1133
+ if c in child_df.columns
1134
+ ]
1135
+ normalized_data[child.name] = child_df.drop(columns=drop_cols)
1136
+
1137
+ save_data.extend(self.build_save_data(
1138
+ normalized_data,
1139
+ table=child,
1140
+ attached_idxes=attached_idxes,
1141
+ allow_detached_table=True,
1142
+ allow_detached_data=True,
1143
+ ))
1144
+
1145
+ self._validate_detached(
1146
+ normalized_data, attached_idxes,
1147
+ allow_detached_table=allow_detached_table,
1148
+ allow_detached_data=allow_detached_data,
1149
+ )
1150
+
1151
+ # ---------------------------------------------------------------
1152
+ # attach parentLogicKeyColumns
1153
+ attach_at = [
1154
+ ModelDataColumnsDTO(
1155
+ columnName=k,
1156
+ value=v
1157
+ )
1158
+ for k, v in parent_kv.items()
1159
+ ]
1160
+ for sd in save_data:
1161
+ sd.parentLogicKeyColumns = attach_at
1162
+
1163
+ payload = ModelDataSaveDTO(
1164
+ data=save_data,
1165
+ databaseCheck=check_db,
1166
+ formatData=True,
1167
+ logicCheck=check_logical,
1168
+ fieldCheck=check_field,
1169
+ savePre=enable_pre_save,
1170
+ savePost=enable_post_save,
1171
+ elementName=self.element_name,
1172
+ folderId=self.element_info.folderId,
1173
+ mainKeyList=[primary_kv]
1174
+ ).dict(exclude_none=True)
1175
+
1176
+ return await self.async_api.data.save(payload) # noqa
1177
+
1178
+
1179
+ class BusinessModel(AsyncBusinessModel, metaclass=SyncMeta):
1180
+ synchronize = (
1181
+ 'set_approval',
1182
+ 'set_approval_batch',
1183
+ 'set_approval_ex',
1184
+ 'save',
1185
+ 'attach',
1186
+ 'copy_rows'
1187
+ )
1188
+
1189
+ if TYPE_CHECKING: # pragma: no cover
1190
+ def set_approval(
1191
+ self,
1192
+ primary: Union[str, Dict[str, str]],
1193
+ operation: str = None,
1194
+ operation_id: str = None,
1195
+ partition_name: str = None,
1196
+ partition_id: str = None,
1197
+ remark: str = '',
1198
+ roles: List[str] = None,
1199
+ origin_status: str = None,
1200
+ main_primary_kv: Dict[str, str] = None,
1201
+ ):
1202
+ ...
1203
+
1204
+ def set_approval_batch(
1205
+ self,
1206
+ operation_name: Union[str, List[str]],
1207
+ main_primary_kv: Union[
1208
+ pd.DataFrame,
1209
+ Dict[str, list],
1210
+ Dict[str, Union[str, int]],
1211
+ List[Dict[str, str]]
1212
+ ],
1213
+ partition_name: str = None,
1214
+ partition_id: str = None,
1215
+ remark: str = None,
1216
+ origin_status: str = None
1217
+ ) -> Dict[str, pd.DataFrame]:
1218
+ ...
1219
+
1220
+ def set_approval_ex(
1221
+ self,
1222
+ operation_name: Union[str, List[str]],
1223
+ main_primary_kv: Union[
1224
+ pd.DataFrame,
1225
+ Dict[str, list],
1226
+ Dict[str, Union[str, int]],
1227
+ List[Dict[str, str]]
1228
+ ],
1229
+ partition_name: str = None,
1230
+ partition_id: str = None,
1231
+ remark: str = None,
1232
+ origin_status: str = None
1233
+ ) -> Dict[str, pd.DataFrame]:
1234
+ ...
1235
+
1236
+ def save(
1237
+ self,
1238
+ data: Union[pd.DataFrame, Dict[str, pd.DataFrame]],
1239
+ allow_detached_data: bool = True,
1240
+ allow_detached_table: bool = False,
1241
+ auto_format: bool = True,
1242
+ check_db: bool = True,
1243
+ check_logical: bool = True,
1244
+ check_field: bool = True,
1245
+ enable_pre_save: bool = True,
1246
+ enable_post_save: bool = True,
1247
+ ) -> List[ModelDataReturnDTO]:
1248
+ ...
1249
+
1250
+ def attach(
1251
+ self,
1252
+ data: Union[pd.DataFrame, Dict[str, pd.DataFrame]],
1253
+ primary_kv: Union[Any, Dict[str, Any]],
1254
+ parent_kv: Union[Any, Dict[str, Any]] = None,
1255
+ allow_detached_data: bool = True,
1256
+ allow_detached_table: bool = False,
1257
+ check_db: bool = True,
1258
+ check_logical: bool = True,
1259
+ check_field: bool = True,
1260
+ enable_pre_save: bool = True,
1261
+ enable_post_save: bool = True,
1262
+ ) -> List[ModelDataReturnDTO]:
1263
+ ...
1264
+
1265
+ def copy_rows(
1266
+ self,
1267
+ config: Union[CopyConfig, Dict]
1268
+ ):
1269
+ ...