deepfos 1.1.60__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepfos/__init__.py +6 -0
- deepfos/_version.py +21 -0
- deepfos/algo/__init__.py +0 -0
- deepfos/algo/graph.py +171 -0
- deepfos/algo/segtree.py +31 -0
- deepfos/api/V1_1/__init__.py +0 -0
- deepfos/api/V1_1/business_model.py +119 -0
- deepfos/api/V1_1/dimension.py +599 -0
- deepfos/api/V1_1/models/__init__.py +0 -0
- deepfos/api/V1_1/models/business_model.py +1033 -0
- deepfos/api/V1_1/models/dimension.py +2768 -0
- deepfos/api/V1_2/__init__.py +0 -0
- deepfos/api/V1_2/dimension.py +285 -0
- deepfos/api/V1_2/models/__init__.py +0 -0
- deepfos/api/V1_2/models/dimension.py +2923 -0
- deepfos/api/__init__.py +0 -0
- deepfos/api/account.py +167 -0
- deepfos/api/accounting_engines.py +147 -0
- deepfos/api/app.py +626 -0
- deepfos/api/approval_process.py +198 -0
- deepfos/api/base.py +983 -0
- deepfos/api/business_model.py +160 -0
- deepfos/api/consolidation.py +129 -0
- deepfos/api/consolidation_process.py +106 -0
- deepfos/api/datatable.py +341 -0
- deepfos/api/deep_pipeline.py +61 -0
- deepfos/api/deepconnector.py +36 -0
- deepfos/api/deepfos_task.py +92 -0
- deepfos/api/deepmodel.py +188 -0
- deepfos/api/dimension.py +486 -0
- deepfos/api/financial_model.py +319 -0
- deepfos/api/journal_model.py +119 -0
- deepfos/api/journal_template.py +132 -0
- deepfos/api/memory_financial_model.py +98 -0
- deepfos/api/models/__init__.py +3 -0
- deepfos/api/models/account.py +483 -0
- deepfos/api/models/accounting_engines.py +756 -0
- deepfos/api/models/app.py +1338 -0
- deepfos/api/models/approval_process.py +1043 -0
- deepfos/api/models/base.py +234 -0
- deepfos/api/models/business_model.py +805 -0
- deepfos/api/models/consolidation.py +711 -0
- deepfos/api/models/consolidation_process.py +248 -0
- deepfos/api/models/datatable_mysql.py +427 -0
- deepfos/api/models/deep_pipeline.py +55 -0
- deepfos/api/models/deepconnector.py +28 -0
- deepfos/api/models/deepfos_task.py +386 -0
- deepfos/api/models/deepmodel.py +308 -0
- deepfos/api/models/dimension.py +1576 -0
- deepfos/api/models/financial_model.py +1796 -0
- deepfos/api/models/journal_model.py +341 -0
- deepfos/api/models/journal_template.py +854 -0
- deepfos/api/models/memory_financial_model.py +478 -0
- deepfos/api/models/platform.py +178 -0
- deepfos/api/models/python.py +221 -0
- deepfos/api/models/reconciliation_engine.py +411 -0
- deepfos/api/models/reconciliation_report.py +161 -0
- deepfos/api/models/role_strategy.py +884 -0
- deepfos/api/models/smartlist.py +237 -0
- deepfos/api/models/space.py +1137 -0
- deepfos/api/models/system.py +1065 -0
- deepfos/api/models/variable.py +463 -0
- deepfos/api/models/workflow.py +946 -0
- deepfos/api/platform.py +199 -0
- deepfos/api/python.py +90 -0
- deepfos/api/reconciliation_engine.py +181 -0
- deepfos/api/reconciliation_report.py +64 -0
- deepfos/api/role_strategy.py +234 -0
- deepfos/api/smartlist.py +69 -0
- deepfos/api/space.py +582 -0
- deepfos/api/system.py +372 -0
- deepfos/api/variable.py +154 -0
- deepfos/api/workflow.py +264 -0
- deepfos/boost/__init__.py +6 -0
- deepfos/boost/py_jstream.py +89 -0
- deepfos/boost/py_pandas.py +20 -0
- deepfos/cache.py +121 -0
- deepfos/config.py +6 -0
- deepfos/core/__init__.py +27 -0
- deepfos/core/cube/__init__.py +10 -0
- deepfos/core/cube/_base.py +462 -0
- deepfos/core/cube/constants.py +21 -0
- deepfos/core/cube/cube.py +408 -0
- deepfos/core/cube/formula.py +707 -0
- deepfos/core/cube/syscube.py +532 -0
- deepfos/core/cube/typing.py +7 -0
- deepfos/core/cube/utils.py +238 -0
- deepfos/core/dimension/__init__.py +11 -0
- deepfos/core/dimension/_base.py +506 -0
- deepfos/core/dimension/dimcreator.py +184 -0
- deepfos/core/dimension/dimension.py +472 -0
- deepfos/core/dimension/dimexpr.py +271 -0
- deepfos/core/dimension/dimmember.py +155 -0
- deepfos/core/dimension/eledimension.py +22 -0
- deepfos/core/dimension/filters.py +99 -0
- deepfos/core/dimension/sysdimension.py +168 -0
- deepfos/core/logictable/__init__.py +5 -0
- deepfos/core/logictable/_cache.py +141 -0
- deepfos/core/logictable/_operator.py +663 -0
- deepfos/core/logictable/nodemixin.py +673 -0
- deepfos/core/logictable/sqlcondition.py +609 -0
- deepfos/core/logictable/tablemodel.py +497 -0
- deepfos/db/__init__.py +36 -0
- deepfos/db/cipher.py +660 -0
- deepfos/db/clickhouse.py +191 -0
- deepfos/db/connector.py +195 -0
- deepfos/db/daclickhouse.py +171 -0
- deepfos/db/dameng.py +101 -0
- deepfos/db/damysql.py +189 -0
- deepfos/db/dbkits.py +358 -0
- deepfos/db/deepengine.py +99 -0
- deepfos/db/deepmodel.py +82 -0
- deepfos/db/deepmodel_kingbase.py +83 -0
- deepfos/db/edb.py +214 -0
- deepfos/db/gauss.py +83 -0
- deepfos/db/kingbase.py +83 -0
- deepfos/db/mysql.py +184 -0
- deepfos/db/oracle.py +131 -0
- deepfos/db/postgresql.py +192 -0
- deepfos/db/sqlserver.py +99 -0
- deepfos/db/utils.py +135 -0
- deepfos/element/__init__.py +89 -0
- deepfos/element/accounting.py +348 -0
- deepfos/element/apvlprocess.py +215 -0
- deepfos/element/base.py +398 -0
- deepfos/element/bizmodel.py +1269 -0
- deepfos/element/datatable.py +2467 -0
- deepfos/element/deep_pipeline.py +186 -0
- deepfos/element/deepconnector.py +59 -0
- deepfos/element/deepmodel.py +1806 -0
- deepfos/element/dimension.py +1254 -0
- deepfos/element/fact_table.py +427 -0
- deepfos/element/finmodel.py +1485 -0
- deepfos/element/journal.py +840 -0
- deepfos/element/journal_template.py +943 -0
- deepfos/element/pyscript.py +412 -0
- deepfos/element/reconciliation.py +553 -0
- deepfos/element/rolestrategy.py +243 -0
- deepfos/element/smartlist.py +457 -0
- deepfos/element/variable.py +756 -0
- deepfos/element/workflow.py +560 -0
- deepfos/exceptions/__init__.py +239 -0
- deepfos/exceptions/hook.py +86 -0
- deepfos/lazy.py +104 -0
- deepfos/lazy_import.py +84 -0
- deepfos/lib/__init__.py +0 -0
- deepfos/lib/_javaobj.py +366 -0
- deepfos/lib/asynchronous.py +879 -0
- deepfos/lib/concurrency.py +107 -0
- deepfos/lib/constant.py +39 -0
- deepfos/lib/decorator.py +310 -0
- deepfos/lib/deepchart.py +778 -0
- deepfos/lib/deepux.py +477 -0
- deepfos/lib/discovery.py +273 -0
- deepfos/lib/edb_lexer.py +789 -0
- deepfos/lib/eureka.py +156 -0
- deepfos/lib/filterparser.py +751 -0
- deepfos/lib/httpcli.py +106 -0
- deepfos/lib/jsonstreamer.py +80 -0
- deepfos/lib/msg.py +394 -0
- deepfos/lib/nacos.py +225 -0
- deepfos/lib/patch.py +92 -0
- deepfos/lib/redis.py +241 -0
- deepfos/lib/serutils.py +181 -0
- deepfos/lib/stopwatch.py +99 -0
- deepfos/lib/subtask.py +572 -0
- deepfos/lib/sysutils.py +703 -0
- deepfos/lib/utils.py +1003 -0
- deepfos/local.py +160 -0
- deepfos/options.py +670 -0
- deepfos/translation.py +237 -0
- deepfos-1.1.60.dist-info/METADATA +33 -0
- deepfos-1.1.60.dist-info/RECORD +175 -0
- deepfos-1.1.60.dist-info/WHEEL +5 -0
- deepfos-1.1.60.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,840 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
|
|
3
|
+
from pydantic import parse_obj_as
|
|
4
|
+
|
|
5
|
+
from deepfos.element.datatable import get_table_class
|
|
6
|
+
from deepfos.exceptions import (
|
|
7
|
+
JournalModelSaveError, JournalModelCheckError, JournalModelPostingError
|
|
8
|
+
)
|
|
9
|
+
from deepfos.lib.asynchronous import future_property
|
|
10
|
+
from typing import List, Union, TYPE_CHECKING, Tuple, Dict, Literal
|
|
11
|
+
from pypika.terms import Term, EmptyCriterion
|
|
12
|
+
from pypika import Table
|
|
13
|
+
from deepfos.lib.decorator import cached_property
|
|
14
|
+
import pandas as pd
|
|
15
|
+
import numpy as np
|
|
16
|
+
from deepfos.element.base import ElementBase, SyncMeta
|
|
17
|
+
from deepfos.api.journal_model import JournalModelAPI
|
|
18
|
+
from deepfos.api.models.journal_model import (
|
|
19
|
+
ModelDataQueryVO,
|
|
20
|
+
CheckStandardVO,
|
|
21
|
+
JmPostParamVO,
|
|
22
|
+
JmPostResultVO,
|
|
23
|
+
JournalModelExecCallbackPythonDTO as CallbackInfo,
|
|
24
|
+
CommonResultDTO,
|
|
25
|
+
ModelDataBatchDTO,
|
|
26
|
+
ModelDataDeleteDTO,
|
|
27
|
+
JournalModelConfig,
|
|
28
|
+
JournalSortConfig
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
"JournalModel",
|
|
33
|
+
"AsyncJournalModel"
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
MAIN_ID = '_main_id'
|
|
37
|
+
DEFAULT_CALLBACK_SERVER_NAME = "python-server2-0"
|
|
38
|
+
|
|
39
|
+
_escape_table = {
|
|
40
|
+
ord('\\'): u'\\\\',
|
|
41
|
+
ord('.'): u'\\.',
|
|
42
|
+
ord('('): u'\\(',
|
|
43
|
+
ord(')'): u'\\)',
|
|
44
|
+
ord('['): u'\\[',
|
|
45
|
+
ord(']'): u'\\]',
|
|
46
|
+
ord('{'): u'\\{',
|
|
47
|
+
ord('}'): u'\\}',
|
|
48
|
+
ord('*'): u'\\*',
|
|
49
|
+
ord('+'): u'\\+',
|
|
50
|
+
ord('$'): u'\\$',
|
|
51
|
+
ord('?'): u'\\?',
|
|
52
|
+
ord('|'): u'\\|',
|
|
53
|
+
ord('='): u'\\=',
|
|
54
|
+
ord('^'): u'\\^',
|
|
55
|
+
ord(':'): u'\\:',
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class AsyncJournalModel(ElementBase[JournalModelAPI]):
|
|
60
|
+
"""凭证组件"""
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
element_name: str,
|
|
64
|
+
folder_id: str = None,
|
|
65
|
+
path: str = None,
|
|
66
|
+
server_name: str = None,
|
|
67
|
+
):
|
|
68
|
+
self.__tbl_name = None
|
|
69
|
+
super().__init__(element_name, folder_id, path, server_name)
|
|
70
|
+
|
|
71
|
+
@future_property
|
|
72
|
+
async def config(self) -> JournalModelConfig:
|
|
73
|
+
"""凭证模型的元素信息"""
|
|
74
|
+
api = await self.wait_for('async_api')
|
|
75
|
+
element_info = await self.wait_for('element_info')
|
|
76
|
+
res = await api.journal_model_data.get_config(element_info)
|
|
77
|
+
return res
|
|
78
|
+
|
|
79
|
+
@cached_property
|
|
80
|
+
def quote_char(self) -> str:
|
|
81
|
+
try:
|
|
82
|
+
element_type = self.config.logicTable.dataTableInfo.elementDetail.elementType
|
|
83
|
+
dbcls = get_table_class(element_type)
|
|
84
|
+
return dbcls.quote_char
|
|
85
|
+
except Exception: # noqa
|
|
86
|
+
return '`'
|
|
87
|
+
|
|
88
|
+
@cached_property
|
|
89
|
+
def table_name(self) -> str:
|
|
90
|
+
"""数据表真实表名"""
|
|
91
|
+
if self.__tbl_name is None:
|
|
92
|
+
self.__tbl_name = self.config.logicTable.dataTableInfo.actualTableName
|
|
93
|
+
return self.__tbl_name
|
|
94
|
+
|
|
95
|
+
@cached_property
|
|
96
|
+
def table(self) -> Table:
|
|
97
|
+
"""pipyka的Table对象
|
|
98
|
+
|
|
99
|
+
主要用于创建查询条件
|
|
100
|
+
|
|
101
|
+
See Also:
|
|
102
|
+
关于table的更多使用方法,可以查看
|
|
103
|
+
`pypika的github <https://github.com/kayak/pypika#tables-columns-schemas-and-databases>`_
|
|
104
|
+
|
|
105
|
+
"""
|
|
106
|
+
try:
|
|
107
|
+
element_type = self.config.logicTable.dataTableInfo.elementDetail.elementType
|
|
108
|
+
dbcls = get_table_class(element_type)
|
|
109
|
+
return Table(self.table_name, query_cls=dbcls.query)
|
|
110
|
+
except Exception: # noqa
|
|
111
|
+
return Table(self.table_name)
|
|
112
|
+
|
|
113
|
+
def _parse_where(self, where: Union[None, Term, EmptyCriterion]) -> str:
|
|
114
|
+
if isinstance(where, (Term, EmptyCriterion)):
|
|
115
|
+
return where.get_sql(quote_char=self.quote_char)
|
|
116
|
+
if isinstance(where, str):
|
|
117
|
+
return where
|
|
118
|
+
raise TypeError(f"Unsupported type: {type(where)} for where.")
|
|
119
|
+
|
|
120
|
+
def _gen_batch_payload(
|
|
121
|
+
self,
|
|
122
|
+
head_df: pd.DataFrame,
|
|
123
|
+
line_df: pd.DataFrame,
|
|
124
|
+
callback: Union[Dict, CallbackInfo] = None,
|
|
125
|
+
relation_field: str = 'journal_id',
|
|
126
|
+
id_col: str = None,
|
|
127
|
+
operate_type: Literal['EDIT', 'ADD'] = 'ADD',
|
|
128
|
+
enable_create: bool = True,
|
|
129
|
+
enable_default_value: bool = True,
|
|
130
|
+
enable_repeat_check: bool = True,
|
|
131
|
+
enable_required: bool = True,
|
|
132
|
+
enable_valid_range: bool = True,
|
|
133
|
+
enable_all_errors: bool = True,
|
|
134
|
+
enable_need_one_line: bool = True,
|
|
135
|
+
):
|
|
136
|
+
errors = set()
|
|
137
|
+
is_editing = operate_type == 'EDIT'
|
|
138
|
+
|
|
139
|
+
if head_df.empty:
|
|
140
|
+
errors.add('凭证头表数据DataFrame不能为空')
|
|
141
|
+
if line_df.empty:
|
|
142
|
+
errors.add('凭证行表数据DataFrame不能为空')
|
|
143
|
+
if relation_field.strip() == '':
|
|
144
|
+
errors.add('凭证头行表的关联字段relation_field不能为空')
|
|
145
|
+
|
|
146
|
+
self._maybe_raise_errors(errors)
|
|
147
|
+
|
|
148
|
+
if is_editing:
|
|
149
|
+
head_required_fields = [relation_field, '_type', id_col]
|
|
150
|
+
line_required_fields = [relation_field, id_col]
|
|
151
|
+
else:
|
|
152
|
+
head_required_fields = [relation_field, '_type']
|
|
153
|
+
line_required_fields = [relation_field]
|
|
154
|
+
|
|
155
|
+
for h_l_df, h_l, req_fields in zip(
|
|
156
|
+
[head_df, line_df], ['头', '行'],
|
|
157
|
+
[head_required_fields, line_required_fields]
|
|
158
|
+
):
|
|
159
|
+
for field in req_fields:
|
|
160
|
+
if (
|
|
161
|
+
field not in h_l_df.columns
|
|
162
|
+
or any(h_l_df[field].isna())
|
|
163
|
+
or any(h_l_df[field].astype(str, errors='ignore').str.strip() == '')
|
|
164
|
+
):
|
|
165
|
+
errors.add(f'凭证{h_l}表字段({field})不存在或有值为空')
|
|
166
|
+
|
|
167
|
+
self._maybe_raise_errors(errors)
|
|
168
|
+
|
|
169
|
+
if any(dup_ser := head_df[relation_field].duplicated()):
|
|
170
|
+
raise JournalModelSaveError(
|
|
171
|
+
f'凭证头表数据关联字段({relation_field})存在重复的值:\n'
|
|
172
|
+
f'{set(head_df[relation_field][dup_ser])}'
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
table_type_pattern = "|".join(
|
|
176
|
+
"(" + t.typeCode.translate(_escape_table) + ")"
|
|
177
|
+
for t in self.config.journalModelType if t.typeCode
|
|
178
|
+
)
|
|
179
|
+
unknown_type = ~head_df['_type'].astype('str', errors='ignore').str.match(
|
|
180
|
+
table_type_pattern
|
|
181
|
+
)
|
|
182
|
+
if unknown_type.any():
|
|
183
|
+
raise JournalModelSaveError(
|
|
184
|
+
f'凭证头表数据中凭证类型:\n'
|
|
185
|
+
f'{set(head_df["_type"][unknown_type])}'
|
|
186
|
+
f'\n在凭证模型中不存在'
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
head_data = head_df[head_required_fields]
|
|
190
|
+
# generate headMainId
|
|
191
|
+
if is_editing:
|
|
192
|
+
head_data = head_data.rename(
|
|
193
|
+
columns={"_type": "journalTypeCode", id_col: "headMainId"}
|
|
194
|
+
)
|
|
195
|
+
else:
|
|
196
|
+
head_data = head_data.rename(
|
|
197
|
+
columns={"_type": "journalTypeCode"}
|
|
198
|
+
)
|
|
199
|
+
head_data = head_data.assign(
|
|
200
|
+
headMainId=pd.Series(
|
|
201
|
+
[uuid.uuid4().hex for _ in head_data.index],
|
|
202
|
+
index=head_data.index
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
head_data = head_data.assign(operateType=operate_type)
|
|
206
|
+
|
|
207
|
+
# NB: replace twice in case of infer None to nan happened
|
|
208
|
+
head_df = head_df.replace({None: np.nan})
|
|
209
|
+
head_df = head_df.replace({np.nan: None})
|
|
210
|
+
|
|
211
|
+
head_data = head_data.assign(
|
|
212
|
+
data=head_df.drop(columns=['_type']).to_dict(orient='records')
|
|
213
|
+
)
|
|
214
|
+
main_id_pattern = "|".join(
|
|
215
|
+
"(" + str(mid).translate(_escape_table) + ")"
|
|
216
|
+
for mid in head_df[relation_field]
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
unrelated_value = ~line_df[relation_field].astype(
|
|
220
|
+
'str', errors='ignore'
|
|
221
|
+
).str.match(main_id_pattern)
|
|
222
|
+
if unrelated_value.any():
|
|
223
|
+
errors.add(
|
|
224
|
+
f'凭证行表数据关联字段({relation_field})的值:\n'
|
|
225
|
+
f'{set(line_df[relation_field][unrelated_value])}\n在凭证头表中不存在'
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
self._maybe_raise_errors(errors)
|
|
229
|
+
|
|
230
|
+
line_data = line_df[line_required_fields]
|
|
231
|
+
line_data = line_data.assign(operateType=operate_type)
|
|
232
|
+
|
|
233
|
+
# generate lineMainId
|
|
234
|
+
if is_editing:
|
|
235
|
+
line_data = line_data.rename(columns={id_col: "lineMainId"})
|
|
236
|
+
elif "lineMainId" in line_df.columns:
|
|
237
|
+
line_main_id = line_df["lineMainId"]
|
|
238
|
+
line_main_id[line_main_id.isna()] = [uuid.uuid4().hex for _ in
|
|
239
|
+
range(sum(line_main_id.isna()))]
|
|
240
|
+
line_data["lineMainId"] = line_main_id
|
|
241
|
+
else:
|
|
242
|
+
line_data = line_data.assign(
|
|
243
|
+
lineMainId=pd.Series(
|
|
244
|
+
[uuid.uuid4().hex for _ in line_data.index],
|
|
245
|
+
index=line_data.index
|
|
246
|
+
)
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
# NB: replace twice in case of infer None to nan happened
|
|
250
|
+
line_df = line_df.replace({None: np.nan})
|
|
251
|
+
line_df = line_df.replace({np.nan: None})
|
|
252
|
+
|
|
253
|
+
line_data = line_data.assign(data=line_df.to_dict(orient='records'))
|
|
254
|
+
|
|
255
|
+
# merge head & line
|
|
256
|
+
line_data = line_data.merge(
|
|
257
|
+
head_data[[relation_field, "headMainId", "journalTypeCode"]],
|
|
258
|
+
on=relation_field
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
# relation field is only used for merge
|
|
262
|
+
head_data = head_data.drop(columns=[relation_field])
|
|
263
|
+
line_data = line_data.drop(columns=[relation_field])
|
|
264
|
+
|
|
265
|
+
if callback is not None:
|
|
266
|
+
callback = parse_obj_as(CallbackInfo, callback)
|
|
267
|
+
callback.serverName = callback.serverName or DEFAULT_CALLBACK_SERVER_NAME
|
|
268
|
+
|
|
269
|
+
data_map = {
|
|
270
|
+
self.config.logicTable.dataTableInfo.name: head_data.to_dict(
|
|
271
|
+
orient='records'
|
|
272
|
+
),
|
|
273
|
+
self.config.logicTable.children[0].dataTableInfo.name: line_data.to_dict(
|
|
274
|
+
orient='records'
|
|
275
|
+
)
|
|
276
|
+
}
|
|
277
|
+
return ModelDataBatchDTO(
|
|
278
|
+
modelInfo=self.element_info, # noqa
|
|
279
|
+
callbackInfo=callback,
|
|
280
|
+
dataMap=data_map,
|
|
281
|
+
enableCreate=enable_create,
|
|
282
|
+
enableDefaultValue=enable_default_value,
|
|
283
|
+
enableRepeatCheck=enable_repeat_check,
|
|
284
|
+
enableRequired=enable_required,
|
|
285
|
+
enableValidRange=enable_valid_range,
|
|
286
|
+
enableAllErrors=enable_all_errors,
|
|
287
|
+
enableNeedOneLine=enable_need_one_line
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
async def save(
|
|
291
|
+
self,
|
|
292
|
+
head_df: pd.DataFrame,
|
|
293
|
+
line_df: pd.DataFrame,
|
|
294
|
+
callback: Union[Dict, CallbackInfo] = None,
|
|
295
|
+
relation_field: str = 'journal_id',
|
|
296
|
+
enable_create: bool = True,
|
|
297
|
+
enable_default_value: bool = True,
|
|
298
|
+
enable_repeat_check: bool = True,
|
|
299
|
+
enable_required: bool = True,
|
|
300
|
+
enable_valid_range: bool = True,
|
|
301
|
+
enable_all_errors: bool = True,
|
|
302
|
+
enable_need_one_line: bool = True,
|
|
303
|
+
sync: bool = True
|
|
304
|
+
) -> CommonResultDTO:
|
|
305
|
+
"""凭证模型数据保存
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
head_df: 凭证头表的数据(字段名与凭证模型上头表的字段名对应)
|
|
309
|
+
line_df: 凭证行表的数据(字段名与凭证模型上行表的字段名对应)
|
|
310
|
+
callback: 回调脚本配置信息
|
|
311
|
+
若为None,则保存模型数据在结束后不会调用脚本,
|
|
312
|
+
如果配置了回调,则不论保存是否保存成功,都将在结束后调用回调该脚本
|
|
313
|
+
relation_field: 用于指定凭证头、行表的关联字段,
|
|
314
|
+
即通过该字段确定凭证头表对应的凭证行表数据,默认为journal_id
|
|
315
|
+
enable_create: 是否启用创建人、创建时间自动赋值,默认为True
|
|
316
|
+
enable_default_value: 是否启用字段值为空时使用默认值填充,默认为True
|
|
317
|
+
enable_repeat_check: 是否启用业务主键重复的校验,默认为True
|
|
318
|
+
enable_required: 是否启用必填字段的校验,默认为True
|
|
319
|
+
enable_valid_range: 是否启用有效性范围的校验,默认为True
|
|
320
|
+
enable_all_errors: 是否启用一次性校验所有规则和数据,默认为True
|
|
321
|
+
enable_need_one_line: 是否启用凭证行表至少需要一条数据的校验,默认为True
|
|
322
|
+
sync: 调用模型数据保存接口的类型,同步(True)/异步(False),默认为同步
|
|
323
|
+
异步保存接口会在收到保存请求时立刻响应,同步保存
|
|
324
|
+
接口会等保存数据完成后才响应,并返回保存信息
|
|
325
|
+
如果设置为同步,当数据量过大时可能会时间过长时,可能因超出SDK的接口响应超时时间而报错
|
|
326
|
+
|
|
327
|
+
Returns:
|
|
328
|
+
调用同步接口时返回信息(CommonResultDTO的success为true 表示成功,如false 则错误在errors集合里)
|
|
329
|
+
|
|
330
|
+
.. admonition:: 示例
|
|
331
|
+
|
|
332
|
+
1.以自定义数据选取参数执行
|
|
333
|
+
|
|
334
|
+
.. code-block:: python
|
|
335
|
+
|
|
336
|
+
# 凭证头表数据(注:_type的值必须对应模型配置的凭证类型代码,
|
|
337
|
+
# journal_id的值在下面的行表中必须有对应的数据)
|
|
338
|
+
head_df = pd.DataFrame([
|
|
339
|
+
{
|
|
340
|
+
"_type": "type_account_01", "journal_id": "head_main_id_202306080001",
|
|
341
|
+
"is_balance": "true", "scenario": "Actual", "version": "Working",
|
|
342
|
+
"value": "CNY", "entity": "[TotalEntity].[A]", "year": "2023",
|
|
343
|
+
"period": "12", "approve_time": "2023-05-23 15:56:00",
|
|
344
|
+
"convert_date": "2023-05-23"
|
|
345
|
+
},
|
|
346
|
+
{
|
|
347
|
+
"_type": "type_account_01", "journal_id": "head_main_id_202306080002",
|
|
348
|
+
"is_balance": "true", "scenario": "Actual","version": "Working",
|
|
349
|
+
"value": "CNY","entity": "[TotalEntity].[A]", "year": "2023",
|
|
350
|
+
"period": "12", "approve_time": "2023-05-23 15:56:00",
|
|
351
|
+
"convert_date": "2023-05-23"
|
|
352
|
+
}
|
|
353
|
+
])
|
|
354
|
+
|
|
355
|
+
# 凭证行表数据(注:行表中的 journal_id 的值必须在头表数据中存在,line_no 不允许重复)
|
|
356
|
+
line_df = pd.DataFrame([
|
|
357
|
+
{
|
|
358
|
+
"journal_id": "head_main_id_202306080001",
|
|
359
|
+
"line_no": "1","account": "100101","movement": "OPN",
|
|
360
|
+
"trx_debit": "130","debit": "130","comment_line": "line1"
|
|
361
|
+
},
|
|
362
|
+
{
|
|
363
|
+
"journal_id": "head_main_id_202306080001",
|
|
364
|
+
"line_no": "2","account": "100101","movement": "OPN",
|
|
365
|
+
"trx_credit": "130","credit": "130","comment_line": "line2"
|
|
366
|
+
},
|
|
367
|
+
{
|
|
368
|
+
"journal_id": "head_main_id_202306080002",
|
|
369
|
+
"line_no": "1","account": "100101","movement": "OPN",
|
|
370
|
+
"trx_debit": "130","debit": "130","comment_line": "line1"
|
|
371
|
+
},
|
|
372
|
+
{
|
|
373
|
+
"journal_id": "head_main_id_202306080002",
|
|
374
|
+
"line_no": "2", "account": "100101","movement": "OPN",
|
|
375
|
+
"trx_credit": "130","credit": "130","comment_line": "line2"
|
|
376
|
+
}
|
|
377
|
+
])
|
|
378
|
+
|
|
379
|
+
# 回调脚本
|
|
380
|
+
callback_info = {
|
|
381
|
+
"elementName": "testPy01", "elementType": "PY",
|
|
382
|
+
"path": "/zhy_test",
|
|
383
|
+
"callbackParams": {"year": "2023", "period": "03"}
|
|
384
|
+
}
|
|
385
|
+
# 创建凭证组件元素对象
|
|
386
|
+
journal = JournalModel('ZHY_TEST_0613_02')
|
|
387
|
+
# 调用保存
|
|
388
|
+
res = journal.save(
|
|
389
|
+
head_df=head_df,
|
|
390
|
+
line_df=line_df,
|
|
391
|
+
callback=callback_info,
|
|
392
|
+
enable_create = True,
|
|
393
|
+
enable_default_value = False,
|
|
394
|
+
enable_repeat_check= True,
|
|
395
|
+
enable_required= False,
|
|
396
|
+
enable_valid_range= True,
|
|
397
|
+
enable_all_errors = True,
|
|
398
|
+
enable_need_one_line = True,
|
|
399
|
+
sync=True
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
Attention:
|
|
404
|
+
|
|
405
|
+
以示例的回调参数为例,回调脚本接收到参数为
|
|
406
|
+
|
|
407
|
+
.. code-block:: python
|
|
408
|
+
|
|
409
|
+
# 凭证组件V1.0.6.3版本以前
|
|
410
|
+
|
|
411
|
+
p2 = {
|
|
412
|
+
"batch_id": "b14d943609b",
|
|
413
|
+
"success": True,
|
|
414
|
+
"year": "2023", # 自定义参数
|
|
415
|
+
"period": "03" # 自定义参数
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
# 凭证组件V1.0.6.3版本及以后
|
|
419
|
+
|
|
420
|
+
p2 = {
|
|
421
|
+
"mainKey": {
|
|
422
|
+
"journal_id": [
|
|
423
|
+
"ZDlhYj",
|
|
424
|
+
"ZWU00005",
|
|
425
|
+
"ZjY4MG",
|
|
426
|
+
"NWQ00002",
|
|
427
|
+
"ZTl00003",
|
|
428
|
+
"NmNiNW",
|
|
429
|
+
"YWM5ZG",
|
|
430
|
+
"M2E00004"
|
|
431
|
+
]
|
|
432
|
+
},
|
|
433
|
+
"success": True,
|
|
434
|
+
"year": "2023", # 自定义参数
|
|
435
|
+
"period": "03" # 自定义参数
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
"""
|
|
439
|
+
batch = self._gen_batch_payload(
|
|
440
|
+
head_df=head_df, line_df=line_df,
|
|
441
|
+
callback=callback, relation_field=relation_field,
|
|
442
|
+
enable_create=enable_create,
|
|
443
|
+
enable_default_value=enable_default_value,
|
|
444
|
+
enable_repeat_check=enable_repeat_check,
|
|
445
|
+
enable_required=enable_required,
|
|
446
|
+
enable_valid_range=enable_valid_range,
|
|
447
|
+
enable_all_errors=enable_all_errors,
|
|
448
|
+
enable_need_one_line=enable_need_one_line
|
|
449
|
+
)
|
|
450
|
+
if sync:
|
|
451
|
+
resp = await self.async_api.journal_model_data.sync_save(batch)
|
|
452
|
+
else:
|
|
453
|
+
resp = await self.async_api.journal_model_data.save(batch)
|
|
454
|
+
|
|
455
|
+
if not resp.success:
|
|
456
|
+
raise JournalModelSaveError(
|
|
457
|
+
f"Failed to save journal model.\n"
|
|
458
|
+
f"Detail: {resp}"
|
|
459
|
+
)
|
|
460
|
+
return resp
|
|
461
|
+
|
|
462
|
+
async def update(
|
|
463
|
+
self,
|
|
464
|
+
head_df: pd.DataFrame,
|
|
465
|
+
line_df: pd.DataFrame,
|
|
466
|
+
callback: Union[Dict, CallbackInfo] = None,
|
|
467
|
+
relation_field: str = 'journal_id',
|
|
468
|
+
enable_create: bool = True,
|
|
469
|
+
enable_default_value: bool = True,
|
|
470
|
+
enable_repeat_check: bool = True,
|
|
471
|
+
enable_required: bool = True,
|
|
472
|
+
enable_valid_range: bool = True,
|
|
473
|
+
enable_all_errors: bool = True,
|
|
474
|
+
enable_need_one_line: bool = True,
|
|
475
|
+
) -> CommonResultDTO:
|
|
476
|
+
"""凭证模型数据更新
|
|
477
|
+
|
|
478
|
+
只支持头行更新及行插入和删除
|
|
479
|
+
|
|
480
|
+
Args:
|
|
481
|
+
head_df: 凭证头表的数据(字段名与凭证模型上头表的字段名对应)
|
|
482
|
+
line_df: 凭证行表的数据(字段名与凭证模型上行表的字段名对应)
|
|
483
|
+
callback: 回调脚本配置信息
|
|
484
|
+
若为None,则保存模型数据在结束后不会调用脚本,
|
|
485
|
+
如果配置了回调,则不论保存是否保存成功,都将在结束后调用回调该脚本
|
|
486
|
+
relation_field: 用于指定凭证头、行表的关联字段,
|
|
487
|
+
即通过该字段确定凭证头表对应的凭证行表数据,默认为journal_id
|
|
488
|
+
enable_create: 是否启用创建人、创建时间自动赋值,默认为True
|
|
489
|
+
enable_default_value: 是否启用字段值为空时使用默认值填充,默认为True
|
|
490
|
+
enable_repeat_check: 是否启用业务主键重复的校验,默认为True
|
|
491
|
+
enable_required: 是否启用必填字段的校验,默认为True
|
|
492
|
+
enable_valid_range: 是否启用有效性范围的校验,默认为True
|
|
493
|
+
enable_all_errors: 是否启用一次性校验所有规则和数据,默认为True
|
|
494
|
+
enable_need_one_line: 是否启用凭证行表至少需要一条数据的校验,默认为True
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
接口返回信息(CommonResultDTO的success为true 表示成功,如false 则错误在errors集合里)
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
"""
|
|
501
|
+
batch = self._gen_batch_payload(
|
|
502
|
+
head_df=head_df, line_df=line_df,
|
|
503
|
+
callback=callback, relation_field=relation_field,
|
|
504
|
+
id_col=MAIN_ID, operate_type='EDIT',
|
|
505
|
+
enable_create=enable_create,
|
|
506
|
+
enable_default_value=enable_default_value,
|
|
507
|
+
enable_repeat_check=enable_repeat_check,
|
|
508
|
+
enable_required=enable_required,
|
|
509
|
+
enable_valid_range=enable_valid_range,
|
|
510
|
+
enable_all_errors=enable_all_errors,
|
|
511
|
+
enable_need_one_line=enable_need_one_line
|
|
512
|
+
)
|
|
513
|
+
resp = await self.async_api.journal_model_data.update(batch)
|
|
514
|
+
|
|
515
|
+
if not resp.success:
|
|
516
|
+
raise JournalModelSaveError(
|
|
517
|
+
f"Failed to update journal model.\n"
|
|
518
|
+
f"Detail: {resp}"
|
|
519
|
+
)
|
|
520
|
+
return resp
|
|
521
|
+
|
|
522
|
+
@staticmethod
|
|
523
|
+
def _maybe_raise_errors(errors):
|
|
524
|
+
if errors:
|
|
525
|
+
raise JournalModelSaveError("\n".join(errors))
|
|
526
|
+
|
|
527
|
+
async def check(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
528
|
+
"""凭证数据校验
|
|
529
|
+
|
|
530
|
+
Args:
|
|
531
|
+
where: 校验条件 (格式 可参考 数据表(DataTableMySQL)的条件格式)
|
|
532
|
+
|
|
533
|
+
.. admonition:: 示例
|
|
534
|
+
|
|
535
|
+
.. code-block:: python
|
|
536
|
+
|
|
537
|
+
journal = JournalModel(element_name="ZHY_TEST_0613_02")
|
|
538
|
+
t = journal.table
|
|
539
|
+
# 校验数据的条件 (格式 可参考 数据表(DataTableMySQL)的条件格式,& 表示 and,| 表示 or)
|
|
540
|
+
where = (
|
|
541
|
+
((t.year == '2023') | (t.journal_id == 'head_main_id_202306080001'))
|
|
542
|
+
&
|
|
543
|
+
(t.entity.isin(['A','B']) | t.journal_id.like('head_main_id_202306080002%'))
|
|
544
|
+
)
|
|
545
|
+
journal.check(where)
|
|
546
|
+
|
|
547
|
+
Hint:
|
|
548
|
+
- 如果传入的校验条件数据存在,且校验成功,则会将凭证头表上的check_status字段的值改为'true',失败则不改
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
"""
|
|
552
|
+
where_str = None
|
|
553
|
+
if where is not None:
|
|
554
|
+
where_str = self._parse_where(where)
|
|
555
|
+
param = CheckStandardVO(
|
|
556
|
+
elementName=self.element_info.elementName,
|
|
557
|
+
folderId=self.element_info.folderId,
|
|
558
|
+
whereStr=where_str
|
|
559
|
+
)
|
|
560
|
+
resp = await self.async_api.journal_model_data.check(param)
|
|
561
|
+
|
|
562
|
+
if not resp.success:
|
|
563
|
+
raise JournalModelCheckError(
|
|
564
|
+
f"Error occurs while checking journal model.\n"
|
|
565
|
+
f"Detail: {resp}"
|
|
566
|
+
)
|
|
567
|
+
return resp
|
|
568
|
+
|
|
569
|
+
async def delete(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
570
|
+
"""凭证数据删除
|
|
571
|
+
|
|
572
|
+
Hint:
|
|
573
|
+
- ``where`` 暂只支持凭证头表上的字段作为条件
|
|
574
|
+
|
|
575
|
+
Args:
|
|
576
|
+
where: 删除条件 (格式 可参考 数据表(DataTableMySQL)的条件格式)
|
|
577
|
+
|
|
578
|
+
.. admonition:: 示例
|
|
579
|
+
|
|
580
|
+
.. code-block:: python
|
|
581
|
+
|
|
582
|
+
# 创建凭证组件元素对象
|
|
583
|
+
journal = JournalModel(element_name="ZHY_TEST_0613_02")
|
|
584
|
+
t = journal.table
|
|
585
|
+
# 删除数据的条件 (格式 可参考 数据表(DataTableMySQL)的条件格式,& 表示 and,| 表示 or)
|
|
586
|
+
where = (
|
|
587
|
+
((t.year == '2023') | (t.journal_id == 'head_main_id_202306080001'))
|
|
588
|
+
&
|
|
589
|
+
(t.entity.isin(['A','B']) | t.journal_id.like('head_main_id_202306080002%'))
|
|
590
|
+
)
|
|
591
|
+
# 调用删除方法
|
|
592
|
+
journal.delete(where)
|
|
593
|
+
|
|
594
|
+
将执行sql:(只需关注 where 后的条件)
|
|
595
|
+
|
|
596
|
+
.. code-block:: sql
|
|
597
|
+
|
|
598
|
+
DELETE h,l
|
|
599
|
+
FROM
|
|
600
|
+
凭证头表 h left join 凭证行表 l on h.journal_id = l.journal_id
|
|
601
|
+
WHERE
|
|
602
|
+
(h.`year`='2023' OR `h.journal_id`= 'head_main_id_202306080001')
|
|
603
|
+
AND
|
|
604
|
+
(h.`entity` IN ('A','B') OR h.`journal_id` LIKE 'head_main_id_202306080002%')
|
|
605
|
+
|
|
606
|
+
"""
|
|
607
|
+
where_str = self._parse_where(where)
|
|
608
|
+
model_data = ModelDataDeleteDTO(
|
|
609
|
+
elementName=self.element_info.elementName,
|
|
610
|
+
folderId=self.element_info.folderId,
|
|
611
|
+
whereStr=where_str
|
|
612
|
+
)
|
|
613
|
+
res = await self.async_api.journal_model_data.delete(model_data)
|
|
614
|
+
if not res.success:
|
|
615
|
+
raise JournalModelSaveError(','.join([x.msg for x in res.errors]))
|
|
616
|
+
return res
|
|
617
|
+
|
|
618
|
+
async def query(
|
|
619
|
+
self,
|
|
620
|
+
where: Union[str, Term, EmptyCriterion] = None,
|
|
621
|
+
head_column: List[str] = None,
|
|
622
|
+
line_column: List[str] = None,
|
|
623
|
+
sort_config: List[JournalSortConfig] = None
|
|
624
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
625
|
+
"""凭证数据查询
|
|
626
|
+
|
|
627
|
+
Args:
|
|
628
|
+
where: 查询条件,条件字段名必须与凭证头、行表上字段名保持一致(格式 可参考 数据表(DataTableMySQL)的条件格式)
|
|
629
|
+
head_column: 查询返回的头表字段集合 如: ["entity","year","journal_id"]
|
|
630
|
+
line_column: 查询返回的行表字段集合 如: ["journal_id","line_no","account","trx_amount"]
|
|
631
|
+
sort_config: 排序配置集合 如: [{"col": "journal_id","type": "asc"},{"col": "line_no","type": "asc"}]
|
|
632
|
+
|
|
633
|
+
|
|
634
|
+
.. admonition:: 示例
|
|
635
|
+
|
|
636
|
+
.. code-block:: python
|
|
637
|
+
|
|
638
|
+
# 创建凭证组件元素对象
|
|
639
|
+
journal = JournalModel(element_name="ZHY_TEST_0615_01")
|
|
640
|
+
t = journal.table
|
|
641
|
+
# 查询数据的条件 (格式 可参考 数据表(DataTableMySQL)的条件格式,& 表示 and,| 表示 or)
|
|
642
|
+
where = (
|
|
643
|
+
((t.journal_id == 'head_main_id_202306080001') | (t.line_no == '1'))
|
|
644
|
+
&
|
|
645
|
+
(t.entity.isin(['A','B']) | t.journal_id.like('head_main_id_202306080002%'))
|
|
646
|
+
)
|
|
647
|
+
head_columns = ["entity", "journal_id", "year", "period", "journal_name"]
|
|
648
|
+
line_columns = ["journal_id", "line_no", "account", "trx_amount", "debit", "credit"]
|
|
649
|
+
sort = [{"col": "journal_id", "type": "desc"}, {"col": "line_no", "type": "asc"}]
|
|
650
|
+
# 调用查询方法,并返回 头和行的 DataFrame
|
|
651
|
+
head_df, line_df = journal.query(where=where,
|
|
652
|
+
head_column=head_columns,
|
|
653
|
+
line_column=line_columns,
|
|
654
|
+
sort_config=sort)
|
|
655
|
+
|
|
656
|
+
将执行sql:(只需关注 where 后的条件)
|
|
657
|
+
|
|
658
|
+
.. code-block:: sql
|
|
659
|
+
|
|
660
|
+
select h.*,l.*
|
|
661
|
+
FROM
|
|
662
|
+
凭证头表 h left join 凭证行表 l on h.journal_id = l.journal_id
|
|
663
|
+
WHERE
|
|
664
|
+
(h.`year`='2023' OR `h.journal_id`= 'head_main_id_202306080001')
|
|
665
|
+
AND
|
|
666
|
+
(h.`entity` IN ('A','B') OR h.`journal_id` LIKE 'head_main_id_202306080002%')
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
Hint:
|
|
670
|
+
- ``where`` 查询条件为空时,将返回该凭证模型下的所有数据
|
|
671
|
+
- ``head_column`` 字段名必须与凭证头表上字段名保持一致,为空时,将返回凭证头表上所有字段
|
|
672
|
+
(不管是否指定了返回字段,其中 journal_id 字段一定会返回)
|
|
673
|
+
- ``line_column`` 字段名必须与凭证行表上字段名保持一致,为空时,将返回凭证行表上所有字段
|
|
674
|
+
(不管是否指定了返回字段,其中 journal_id 和 line_no 字段一定会返回)
|
|
675
|
+
- ``sort_config`` 默认按journal_id和line_no 升序,type 为空时,默认按ASC排序
|
|
676
|
+
|
|
677
|
+
"""
|
|
678
|
+
where_str = None
|
|
679
|
+
if where is not None:
|
|
680
|
+
where_str = self._parse_where(where)
|
|
681
|
+
model_data = ModelDataQueryVO(
|
|
682
|
+
elementName=self.element_info.elementName,
|
|
683
|
+
folderId=self.element_info.folderId,
|
|
684
|
+
whereStr=where_str,
|
|
685
|
+
headQueryCols=head_column,
|
|
686
|
+
lineQueryCols=line_column,
|
|
687
|
+
sortConfig=sort_config
|
|
688
|
+
)
|
|
689
|
+
res = await self.async_api.journal_model_data.query(model_data)
|
|
690
|
+
head_table_name = self.config.logicTable.dataTableInfo.name
|
|
691
|
+
line_table_name = self.config.logicTable.children[0].dataTableInfo.name
|
|
692
|
+
return pd.DataFrame(res[head_table_name]), pd.DataFrame(res[line_table_name])
|
|
693
|
+
|
|
694
|
+
async def posting(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
695
|
+
"""凭证数据过账
|
|
696
|
+
|
|
697
|
+
Args:
|
|
698
|
+
where: 筛选条件 (格式 可参考 数据表(DataTableMySQL)的条件格式)
|
|
699
|
+
|
|
700
|
+
.. admonition:: 示例
|
|
701
|
+
|
|
702
|
+
.. code-block:: python
|
|
703
|
+
|
|
704
|
+
# 创建凭证组件元素对象
|
|
705
|
+
journal = JournalModel(element_name="ZHY_TEST_0613_02")
|
|
706
|
+
t = journal.table
|
|
707
|
+
# 筛选条件 (格式 可参考 数据表(DataTableMySQL)的条件格式,& 表示 and,| 表示 or)
|
|
708
|
+
where = (
|
|
709
|
+
((t.year == '2023') | (t.journal_id == 'head_main_id_202306080001'))
|
|
710
|
+
&
|
|
711
|
+
(t.entity.isin(['A','B']) | t.journal_id.like('head_main_id_202306080002%'))
|
|
712
|
+
)
|
|
713
|
+
journal.posting(where)
|
|
714
|
+
|
|
715
|
+
Hint:
|
|
716
|
+
|
|
717
|
+
- 如果过账成功,则会将凭证头表上的post_status字段的值改为'true',失败则不改
|
|
718
|
+
|
|
719
|
+
"""
|
|
720
|
+
where_str = None
|
|
721
|
+
if where is not None:
|
|
722
|
+
where_str = self._parse_where(where)
|
|
723
|
+
param = JmPostParamVO(
|
|
724
|
+
elementName=self.element_info.elementName,
|
|
725
|
+
folderId=self.element_info.folderId,
|
|
726
|
+
whereStr=where_str
|
|
727
|
+
)
|
|
728
|
+
resp = await self.async_api.journal_model_data.posting(param)
|
|
729
|
+
if not resp.success:
|
|
730
|
+
raise JournalModelPostingError(
|
|
731
|
+
f"Error occurs while posting journal model.\n"
|
|
732
|
+
f"Detail: {resp}"
|
|
733
|
+
)
|
|
734
|
+
return resp
|
|
735
|
+
|
|
736
|
+
async def cancel_post(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
737
|
+
"""凭证数据取消过账
|
|
738
|
+
|
|
739
|
+
Args:
|
|
740
|
+
where: 筛选条件 (格式 可参考 数据表(DataTableMySQL)的条件格式)
|
|
741
|
+
|
|
742
|
+
.. admonition:: 示例
|
|
743
|
+
|
|
744
|
+
.. code-block:: python
|
|
745
|
+
|
|
746
|
+
journal = JournalModel(element_name="ZHY_TEST_0613_02")
|
|
747
|
+
t = journal.table
|
|
748
|
+
# 筛选条件 (格式 可参考 数据表(DataTableMySQL)的条件格式,& 表示 and,| 表示 or)
|
|
749
|
+
where = (
|
|
750
|
+
((t.year == '2023') | (t.journal_id == 'head_main_id_202306080001'))
|
|
751
|
+
&
|
|
752
|
+
(t.entity.isin(['A','B']) | t.journal_id.like('head_main_id_202306080002%'))
|
|
753
|
+
)
|
|
754
|
+
journal.cancel_post(where)
|
|
755
|
+
|
|
756
|
+
Hint:
|
|
757
|
+
- 如果取消过账成功,则会将凭证头表上的post_status字段的值改为'false',失败则不改
|
|
758
|
+
|
|
759
|
+
"""
|
|
760
|
+
where_str = None
|
|
761
|
+
if where is not None:
|
|
762
|
+
where_str = self._parse_where(where)
|
|
763
|
+
param = JmPostParamVO(
|
|
764
|
+
elementName=self.element_info.elementName,
|
|
765
|
+
folderId=self.element_info.folderId,
|
|
766
|
+
whereStr=where_str,
|
|
767
|
+
)
|
|
768
|
+
resp = await self.async_api.journal_model_data.cancel_post(param)
|
|
769
|
+
if not resp.success:
|
|
770
|
+
raise JournalModelPostingError(
|
|
771
|
+
f"Error occurs while posting journal model.\n"
|
|
772
|
+
f"Detail: {resp}"
|
|
773
|
+
)
|
|
774
|
+
return resp
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
class JournalModel(AsyncJournalModel, metaclass=SyncMeta):
|
|
778
|
+
synchronize = (
|
|
779
|
+
"save",
|
|
780
|
+
"save",
|
|
781
|
+
"update",
|
|
782
|
+
"delete",
|
|
783
|
+
"query",
|
|
784
|
+
"posting",
|
|
785
|
+
"cancel_post"
|
|
786
|
+
)
|
|
787
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
788
|
+
def save(
|
|
789
|
+
self,
|
|
790
|
+
head_df: pd.DataFrame,
|
|
791
|
+
line_df: pd.DataFrame,
|
|
792
|
+
callback: Union[Dict, CallbackInfo] = None,
|
|
793
|
+
relation_field: str = 'journal_id',
|
|
794
|
+
enable_create: bool = True,
|
|
795
|
+
enable_default_value: bool = False,
|
|
796
|
+
enable_repeat_check: bool = True,
|
|
797
|
+
enable_required: bool = False,
|
|
798
|
+
enable_valid_range: bool = True,
|
|
799
|
+
enable_all_errors: bool = True,
|
|
800
|
+
enable_need_one_line: bool = True,
|
|
801
|
+
sync: bool = False
|
|
802
|
+
) -> CommonResultDTO:
|
|
803
|
+
...
|
|
804
|
+
|
|
805
|
+
def update(
|
|
806
|
+
self,
|
|
807
|
+
head_df: pd.DataFrame,
|
|
808
|
+
line_df: pd.DataFrame,
|
|
809
|
+
callback: Union[Dict, CallbackInfo] = None,
|
|
810
|
+
relation_field: str = 'journal_id',
|
|
811
|
+
enable_create: bool = True,
|
|
812
|
+
enable_default_value: bool = False,
|
|
813
|
+
enable_repeat_check: bool = True,
|
|
814
|
+
enable_required: bool = False,
|
|
815
|
+
enable_valid_range: bool = True,
|
|
816
|
+
enable_all_errors: bool = True,
|
|
817
|
+
enable_need_one_line: bool = True,
|
|
818
|
+
) -> CommonResultDTO:
|
|
819
|
+
...
|
|
820
|
+
|
|
821
|
+
def check(self, where: Union[str, Term, EmptyCriterion]) -> JmPostResultVO:
|
|
822
|
+
...
|
|
823
|
+
|
|
824
|
+
def delete(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
825
|
+
...
|
|
826
|
+
|
|
827
|
+
def posting(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
828
|
+
...
|
|
829
|
+
|
|
830
|
+
def cancel_post(self, where: Union[str, Term, EmptyCriterion]) -> CommonResultDTO:
|
|
831
|
+
...
|
|
832
|
+
|
|
833
|
+
def query(
|
|
834
|
+
self,
|
|
835
|
+
where: Union[str, Term, EmptyCriterion] = None,
|
|
836
|
+
head_column: List[str] = None,
|
|
837
|
+
line_column: List[str] = None,
|
|
838
|
+
sort_config: List[JournalSortConfig] = None
|
|
839
|
+
) -> Tuple[pd.DataFrame, pd.DataFrame]:
|
|
840
|
+
...
|