deepfos 1.1.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (175) hide show
  1. deepfos/__init__.py +6 -0
  2. deepfos/_version.py +21 -0
  3. deepfos/algo/__init__.py +0 -0
  4. deepfos/algo/graph.py +171 -0
  5. deepfos/algo/segtree.py +31 -0
  6. deepfos/api/V1_1/__init__.py +0 -0
  7. deepfos/api/V1_1/business_model.py +119 -0
  8. deepfos/api/V1_1/dimension.py +599 -0
  9. deepfos/api/V1_1/models/__init__.py +0 -0
  10. deepfos/api/V1_1/models/business_model.py +1033 -0
  11. deepfos/api/V1_1/models/dimension.py +2768 -0
  12. deepfos/api/V1_2/__init__.py +0 -0
  13. deepfos/api/V1_2/dimension.py +285 -0
  14. deepfos/api/V1_2/models/__init__.py +0 -0
  15. deepfos/api/V1_2/models/dimension.py +2923 -0
  16. deepfos/api/__init__.py +0 -0
  17. deepfos/api/account.py +167 -0
  18. deepfos/api/accounting_engines.py +147 -0
  19. deepfos/api/app.py +626 -0
  20. deepfos/api/approval_process.py +198 -0
  21. deepfos/api/base.py +983 -0
  22. deepfos/api/business_model.py +160 -0
  23. deepfos/api/consolidation.py +129 -0
  24. deepfos/api/consolidation_process.py +106 -0
  25. deepfos/api/datatable.py +341 -0
  26. deepfos/api/deep_pipeline.py +61 -0
  27. deepfos/api/deepconnector.py +36 -0
  28. deepfos/api/deepfos_task.py +92 -0
  29. deepfos/api/deepmodel.py +188 -0
  30. deepfos/api/dimension.py +486 -0
  31. deepfos/api/financial_model.py +319 -0
  32. deepfos/api/journal_model.py +119 -0
  33. deepfos/api/journal_template.py +132 -0
  34. deepfos/api/memory_financial_model.py +98 -0
  35. deepfos/api/models/__init__.py +3 -0
  36. deepfos/api/models/account.py +483 -0
  37. deepfos/api/models/accounting_engines.py +756 -0
  38. deepfos/api/models/app.py +1338 -0
  39. deepfos/api/models/approval_process.py +1043 -0
  40. deepfos/api/models/base.py +234 -0
  41. deepfos/api/models/business_model.py +805 -0
  42. deepfos/api/models/consolidation.py +711 -0
  43. deepfos/api/models/consolidation_process.py +248 -0
  44. deepfos/api/models/datatable_mysql.py +427 -0
  45. deepfos/api/models/deep_pipeline.py +55 -0
  46. deepfos/api/models/deepconnector.py +28 -0
  47. deepfos/api/models/deepfos_task.py +386 -0
  48. deepfos/api/models/deepmodel.py +308 -0
  49. deepfos/api/models/dimension.py +1576 -0
  50. deepfos/api/models/financial_model.py +1796 -0
  51. deepfos/api/models/journal_model.py +341 -0
  52. deepfos/api/models/journal_template.py +854 -0
  53. deepfos/api/models/memory_financial_model.py +478 -0
  54. deepfos/api/models/platform.py +178 -0
  55. deepfos/api/models/python.py +221 -0
  56. deepfos/api/models/reconciliation_engine.py +411 -0
  57. deepfos/api/models/reconciliation_report.py +161 -0
  58. deepfos/api/models/role_strategy.py +884 -0
  59. deepfos/api/models/smartlist.py +237 -0
  60. deepfos/api/models/space.py +1137 -0
  61. deepfos/api/models/system.py +1065 -0
  62. deepfos/api/models/variable.py +463 -0
  63. deepfos/api/models/workflow.py +946 -0
  64. deepfos/api/platform.py +199 -0
  65. deepfos/api/python.py +90 -0
  66. deepfos/api/reconciliation_engine.py +181 -0
  67. deepfos/api/reconciliation_report.py +64 -0
  68. deepfos/api/role_strategy.py +234 -0
  69. deepfos/api/smartlist.py +69 -0
  70. deepfos/api/space.py +582 -0
  71. deepfos/api/system.py +372 -0
  72. deepfos/api/variable.py +154 -0
  73. deepfos/api/workflow.py +264 -0
  74. deepfos/boost/__init__.py +6 -0
  75. deepfos/boost/py_jstream.py +89 -0
  76. deepfos/boost/py_pandas.py +20 -0
  77. deepfos/cache.py +121 -0
  78. deepfos/config.py +6 -0
  79. deepfos/core/__init__.py +27 -0
  80. deepfos/core/cube/__init__.py +10 -0
  81. deepfos/core/cube/_base.py +462 -0
  82. deepfos/core/cube/constants.py +21 -0
  83. deepfos/core/cube/cube.py +408 -0
  84. deepfos/core/cube/formula.py +707 -0
  85. deepfos/core/cube/syscube.py +532 -0
  86. deepfos/core/cube/typing.py +7 -0
  87. deepfos/core/cube/utils.py +238 -0
  88. deepfos/core/dimension/__init__.py +11 -0
  89. deepfos/core/dimension/_base.py +506 -0
  90. deepfos/core/dimension/dimcreator.py +184 -0
  91. deepfos/core/dimension/dimension.py +472 -0
  92. deepfos/core/dimension/dimexpr.py +271 -0
  93. deepfos/core/dimension/dimmember.py +155 -0
  94. deepfos/core/dimension/eledimension.py +22 -0
  95. deepfos/core/dimension/filters.py +99 -0
  96. deepfos/core/dimension/sysdimension.py +168 -0
  97. deepfos/core/logictable/__init__.py +5 -0
  98. deepfos/core/logictable/_cache.py +141 -0
  99. deepfos/core/logictable/_operator.py +663 -0
  100. deepfos/core/logictable/nodemixin.py +673 -0
  101. deepfos/core/logictable/sqlcondition.py +609 -0
  102. deepfos/core/logictable/tablemodel.py +497 -0
  103. deepfos/db/__init__.py +36 -0
  104. deepfos/db/cipher.py +660 -0
  105. deepfos/db/clickhouse.py +191 -0
  106. deepfos/db/connector.py +195 -0
  107. deepfos/db/daclickhouse.py +171 -0
  108. deepfos/db/dameng.py +101 -0
  109. deepfos/db/damysql.py +189 -0
  110. deepfos/db/dbkits.py +358 -0
  111. deepfos/db/deepengine.py +99 -0
  112. deepfos/db/deepmodel.py +82 -0
  113. deepfos/db/deepmodel_kingbase.py +83 -0
  114. deepfos/db/edb.py +214 -0
  115. deepfos/db/gauss.py +83 -0
  116. deepfos/db/kingbase.py +83 -0
  117. deepfos/db/mysql.py +184 -0
  118. deepfos/db/oracle.py +131 -0
  119. deepfos/db/postgresql.py +192 -0
  120. deepfos/db/sqlserver.py +99 -0
  121. deepfos/db/utils.py +135 -0
  122. deepfos/element/__init__.py +89 -0
  123. deepfos/element/accounting.py +348 -0
  124. deepfos/element/apvlprocess.py +215 -0
  125. deepfos/element/base.py +398 -0
  126. deepfos/element/bizmodel.py +1269 -0
  127. deepfos/element/datatable.py +2467 -0
  128. deepfos/element/deep_pipeline.py +186 -0
  129. deepfos/element/deepconnector.py +59 -0
  130. deepfos/element/deepmodel.py +1806 -0
  131. deepfos/element/dimension.py +1254 -0
  132. deepfos/element/fact_table.py +427 -0
  133. deepfos/element/finmodel.py +1485 -0
  134. deepfos/element/journal.py +840 -0
  135. deepfos/element/journal_template.py +943 -0
  136. deepfos/element/pyscript.py +412 -0
  137. deepfos/element/reconciliation.py +553 -0
  138. deepfos/element/rolestrategy.py +243 -0
  139. deepfos/element/smartlist.py +457 -0
  140. deepfos/element/variable.py +756 -0
  141. deepfos/element/workflow.py +560 -0
  142. deepfos/exceptions/__init__.py +239 -0
  143. deepfos/exceptions/hook.py +86 -0
  144. deepfos/lazy.py +104 -0
  145. deepfos/lazy_import.py +84 -0
  146. deepfos/lib/__init__.py +0 -0
  147. deepfos/lib/_javaobj.py +366 -0
  148. deepfos/lib/asynchronous.py +879 -0
  149. deepfos/lib/concurrency.py +107 -0
  150. deepfos/lib/constant.py +39 -0
  151. deepfos/lib/decorator.py +310 -0
  152. deepfos/lib/deepchart.py +778 -0
  153. deepfos/lib/deepux.py +477 -0
  154. deepfos/lib/discovery.py +273 -0
  155. deepfos/lib/edb_lexer.py +789 -0
  156. deepfos/lib/eureka.py +156 -0
  157. deepfos/lib/filterparser.py +751 -0
  158. deepfos/lib/httpcli.py +106 -0
  159. deepfos/lib/jsonstreamer.py +80 -0
  160. deepfos/lib/msg.py +394 -0
  161. deepfos/lib/nacos.py +225 -0
  162. deepfos/lib/patch.py +92 -0
  163. deepfos/lib/redis.py +241 -0
  164. deepfos/lib/serutils.py +181 -0
  165. deepfos/lib/stopwatch.py +99 -0
  166. deepfos/lib/subtask.py +572 -0
  167. deepfos/lib/sysutils.py +703 -0
  168. deepfos/lib/utils.py +1003 -0
  169. deepfos/local.py +160 -0
  170. deepfos/options.py +670 -0
  171. deepfos/translation.py +237 -0
  172. deepfos-1.1.60.dist-info/METADATA +33 -0
  173. deepfos-1.1.60.dist-info/RECORD +175 -0
  174. deepfos-1.1.60.dist-info/WHEEL +5 -0
  175. deepfos-1.1.60.dist-info/top_level.txt +1 -0
@@ -0,0 +1,703 @@
1
+ """系统相关工具类/函数"""
2
+ import asyncio
3
+ import base64
4
+ import gzip
5
+ import itertools
6
+ import json
7
+ import time
8
+ from datetime import datetime
9
+ from enum import Enum
10
+ from io import TextIOWrapper
11
+ from typing import Dict, Any, Union, List, IO, TypeVar, Optional
12
+ import pandas as pd
13
+ import math
14
+ import hashlib
15
+ from urllib.parse import unquote
16
+ from loguru import logger
17
+
18
+ from ._javaobj import is_java_serialized, JavaDeSerializeHelper
19
+ from deepfos.lib.decorator import cached_property, lru_cache
20
+ from deepfos.api.deepfos_task import TaskAPI
21
+ from deepfos.api.models.deepfos_task import (
22
+ TaskSearchDTO, JobCreateDto,
23
+ PeriodicTaskCreateInfo, ScheduledTaskCreateInfo
24
+ )
25
+ from deepfos.element.base import ElementBase
26
+ from .asynchronous import evloop
27
+ from .utils import split_dataframe
28
+ from deepfos.options import OPTION
29
+ from deepfos.api.models import BaseModel
30
+ from deepfos.api.models.space import SpFileBusinessRecordSaveDto
31
+ from deepfos.api.platform import PlatformAPI
32
+ from deepfos.api.space import SpaceAPI
33
+ from deepfos.api.system import SystemAPI
34
+ from deepfos.api import account as acc_api
35
+ from deepfos.api.models import account as account_model
36
+ from deepfos.cache import SpaceSeperatedLRUCache
37
+ from deepfos.api.models.account import UserGroupModifyDTO
38
+
39
+ _VALUE_KEY = 'value'
40
+ _DTNAME_KEY = 'dataTableName'
41
+ _LOGIC_KEY = 'logicKeys'
42
+ _TEMP_MERGE_KEY = '#cartes1an_t3mp0rary_k3y#'
43
+
44
+
45
+ class ValType(int, Enum):
46
+ invalid = 0
47
+ table = 1
48
+ field = 2
49
+
50
+ @classmethod
51
+ def classify(cls, value):
52
+ if isinstance(value, dict) and _VALUE_KEY in value:
53
+ return cls.field
54
+ if isinstance(value, list) and value:
55
+ item = value[0]
56
+ if isinstance(item, dict) and _DTNAME_KEY in item:
57
+ return cls.table
58
+ return cls.invalid
59
+
60
+
61
+ class BaseArgParser: # pragma: no cover
62
+ def parse(self):
63
+ raise NotImplementedError
64
+
65
+
66
+ class BizModelArgParser:
67
+ def __init__(self, arg: Dict[str, Any]):
68
+ self.arg = arg.copy()
69
+
70
+ def _parse_row(self, row: Dict[str, Any], memo: Dict):
71
+ classify = ValType.classify
72
+
73
+ table_name = row.pop(_DTNAME_KEY)
74
+ parsed_rows = memo.setdefault(table_name, [])
75
+ logic_keys = row.pop(_LOGIC_KEY, [])
76
+ cur_row = {k: row[k] for k in logic_keys}
77
+ parsed_rows.append(cur_row)
78
+
79
+ child_tables = []
80
+ for key, val in row.items():
81
+ val_type = classify(val)
82
+
83
+ if val_type is ValType.invalid:
84
+ continue
85
+ elif val_type is ValType.table:
86
+ child_tables.append(val)
87
+ elif val_type is ValType.field:
88
+ cur_row[key] = val[_VALUE_KEY]
89
+
90
+ for tbl in child_tables:
91
+ for row in tbl:
92
+ self._parse_row(row, memo)
93
+
94
+ def parse(self) -> Dict[str, pd.DataFrame]:
95
+ memo = {}
96
+ self._parse_row(self.arg, memo)
97
+ return {
98
+ k: pd.DataFrame(v)
99
+ for k, v in memo.items()
100
+ }
101
+
102
+
103
+ def complete_cartesian_product(
104
+ fix: Dict[str, Union[str, list]],
105
+ df: pd.DataFrame = None,
106
+ paths: Union[str, Dict[str, str]] = None,
107
+ ) -> pd.DataFrame:
108
+ """
109
+ 构造完整的维度成员笛卡尔积
110
+
111
+ Args:
112
+ fix: 需要构造笛卡尔积的维度表达式,字典格式,key为维度名,值为维度成员组成的list,或维度表达式字符串
113
+ df: 如果需要为现有DataFrame补全笛卡尔积,传入一个df。如果不传,则是生成fix中维度成员的笛卡尔积。
114
+ paths: fix中维度的path,如果所有维度的目录相同,传同一个path,否则传字典,key为维度名,value为path。
115
+ 如果不传,则自动寻找维度对应的path。
116
+
117
+ Returns:
118
+ 维度成员笛卡尔积的DataFrame
119
+
120
+ .. admonition:: 示例
121
+
122
+ .. code-block:: python
123
+ # 不传参数df,将返回cost_center,year,period三列的DataFrame
124
+ df = complete_cartesian_product(fix={
125
+ 'cost_center': 'Base(1001,0)',
126
+ 'year': ['2021', '2022'],
127
+ 'period': 'Base(TotalPeriod,0)'
128
+ })
129
+
130
+ # 传参数df,将返回account,data,cost_center,year,period五列的DataFrame
131
+ df1 = pd.DataFrame([
132
+ {'account': '1002', 'data': '111'},
133
+ {'account': '1003', 'data': '444'}
134
+ ])
135
+ df = complete_cartesian_product(
136
+ fix={
137
+ 'cost_center': 'Base(1001,0)',
138
+ 'year': ['2021', '2022'],
139
+ 'period': 'Base(TotalPeriod,0)'
140
+ },
141
+ df=df1
142
+ )
143
+
144
+ See Also:
145
+ fix参数的字典value可接受list和维度表达式,但list效率更高
146
+ """
147
+ from deepfos.element.dimension import AsyncDimension
148
+ from .asynchronous import evloop
149
+
150
+ if isinstance(paths, dict):
151
+ path_getter = paths.__getitem__
152
+ else:
153
+ path_getter = lambda _: paths
154
+
155
+ # 遍历fix,如果fix的值为str,则认为是维度表达式,将表达式转换为成员list
156
+ mbrs = {}
157
+ futures = []
158
+
159
+ for dim, exp in fix.items():
160
+ if isinstance(exp, str):
161
+ if "(" not in exp:
162
+ mbrs[dim] = exp.split(';')
163
+ else:
164
+ path = path_getter(dim)
165
+ future = evloop.apply(AsyncDimension(element_name=dim, path=path).query(
166
+ expression=exp, fields=['name'], as_model=False
167
+ ))
168
+
169
+ futures.append((dim, future))
170
+ else:
171
+ if not isinstance(exp, list):
172
+ raise TypeError('fix参数的value只能为维度表达式(str)或维度成员(list)')
173
+ mbrs[dim] = exp
174
+
175
+ for dim, future in futures:
176
+ mbrs[dim] = [item['name'] for item in future.result()]
177
+
178
+ if df is None:
179
+ df = pd.DataFrame(columns=list(fix.keys()))
180
+ elif df.empty:
181
+ df = pd.DataFrame(columns=list(set(df.columns) | set(fix.keys())))
182
+ df_cartesian = pd.DataFrame(
183
+ list(itertools.product(*mbrs.values())),
184
+ columns=list(mbrs.keys())
185
+ )
186
+ # 如果df与fix的维度没有交集,增加一列临时key
187
+ if temporary_key := not (set(df.columns) & set(fix.keys())):
188
+ df[_TEMP_MERGE_KEY] = 1
189
+ df_cartesian[_TEMP_MERGE_KEY] = 1
190
+
191
+ # 补全笛卡尔积
192
+ df = pd.merge(df, df_cartesian, how='right')
193
+
194
+ if temporary_key:
195
+ df.drop(columns=[_TEMP_MERGE_KEY], inplace=True)
196
+ return df
197
+
198
+
199
+ SIZE_UNIT = ('B', 'KB', 'MB', 'GB')
200
+
201
+ AnyStr = TypeVar('AnyStr', bytes, str)
202
+
203
+
204
+ def export_file_for_download(file_name: str, file: Union[str, bytes, TextIOWrapper, IO[AnyStr], memoryview]):
205
+ """导出文件至下载中心
206
+
207
+ Args:
208
+ file_name: 文件名
209
+ file: 文件内容
210
+
211
+ .. admonition:: 如下几种用法皆可
212
+
213
+ .. code-block:: python
214
+
215
+ from deepfos.lib.sysutils import export_file_for_download
216
+
217
+ # . 直接提供文件内容字符串
218
+ export_file_for_download('t1.txt', 'ttttt')
219
+
220
+ # . 提供包含内容的文件
221
+ with open('t.txt', 'r') as fp:
222
+ export_file_for_download('t2.txt', fp)
223
+
224
+ # . 提供包含内容的文件的bytes
225
+ with open('t.txt', 'rb') as fp:
226
+ export_file_for_download('t3.txt', fp.read())
227
+
228
+ # . 提供buffer
229
+ import io
230
+ import pandas as pd
231
+
232
+ buffer = io.BytesIO()
233
+ # 将dataframe内容写入buffer
234
+ with pd.ExcelWriter(buffer, engine="openpyxl") as writer:
235
+ pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}).to_excel(
236
+ writer, index=False, encoding='utf-8'
237
+ )
238
+
239
+ # 上传至下载中心
240
+ export_file_for_download('out.xlsx', buffer.getbuffer())
241
+
242
+
243
+ """
244
+ if hasattr(file, 'read'):
245
+ content = file.read()
246
+ else:
247
+ content = file
248
+
249
+ upload_resp = PlatformAPI().file.upload(file_type='DL',
250
+ file_name=file_name,
251
+ file=content)
252
+
253
+ if upload_resp.fileSize == 0:
254
+ logger.warning('Uploading empty file.')
255
+ SpaceAPI().business.save(
256
+ SpFileBusinessRecordSaveDto(
257
+ app=OPTION.api.header['app'],
258
+ space=OPTION.api.header['space'],
259
+ fileName=file_name,
260
+ createTime=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
261
+ fileId=upload_resp.id,
262
+ unit='B',
263
+ fileSize=upload_resp.fileSize,
264
+ status='SUCCESS'
265
+ )
266
+ )
267
+ return
268
+
269
+ unit_square = math.floor(math.log(upload_resp.fileSize, 1024))
270
+
271
+ size, unit = round(upload_resp.fileSize / (1024 ** unit_square), 2), SIZE_UNIT[unit_square]
272
+
273
+ SpaceAPI().business.save(
274
+ SpFileBusinessRecordSaveDto(
275
+ app=OPTION.api.header['app'],
276
+ space=OPTION.api.header['space'],
277
+ fileName=file_name,
278
+ createTime=datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
279
+ fileId=upload_resp.id,
280
+ unit=unit,
281
+ fileSize=size,
282
+ status='SUCCESS'
283
+ )
284
+ )
285
+
286
+
287
+ class ParamZipHelper:
288
+ """参数解压/压缩工具
289
+
290
+ Args:
291
+ ori_str: 经过压缩及base64加密的str
292
+ encoding: 如果已知原str的编码方式,则以此为准,不使用默认解码逻辑
293
+ """
294
+ ENCODING = ['utf-8', 'gbk', 'iso8859_1']
295
+ _COMPRESS_LEVEL_FAST = 1
296
+ _COMPRESS_LEVEL_TRADEOFF = 6
297
+ _COMPRESS_LEVEL_BEST = 9
298
+
299
+ def __init__(self, ori_str: str, encoding: str = None):
300
+ self.ori_str = ori_str
301
+ self.encoding = encoding or 'UTF-8'
302
+
303
+ def decompress(self) -> str:
304
+ """解压经过压缩及base64加密的str,返回解压后str
305
+
306
+ Returns:
307
+ 解压后的str
308
+
309
+ """
310
+ original_string = gzip.decompress(base64.b64decode(self.ori_str))
311
+
312
+ if is_java_serialized(original_string): # pragma: no cover
313
+ jsh = JavaDeSerializeHelper(original_string, self.encoding)
314
+ decoded_string = jsh.read_object()
315
+ else:
316
+ if self.encoding:
317
+ decoded_string = original_string.decode(self.encoding)
318
+ else: # pragma: no cover
319
+ decoded_string = self._try_decode(original_string)
320
+ return decoded_string
321
+
322
+ def decompress_json(self) -> Union[str, dict, list]:
323
+ """解压经过压缩及base64加密的str,返回解压后str经过json.loads后的字典
324
+
325
+ Returns:
326
+ 解压后str经过json.loads后的字典
327
+
328
+ """
329
+ decoded_string = self.decompress()
330
+ return json.loads(decoded_string)
331
+
332
+ def compress(self, compresslevel: int = _COMPRESS_LEVEL_BEST) -> str:
333
+ """
334
+ 将提供的str压缩并进行base64加密,返回str
335
+
336
+ Args:
337
+ compresslevel: 压缩率(1,6,9),详见 :class:`CompressLevel`
338
+
339
+ Returns:
340
+ 经过压缩及base64加密的str
341
+
342
+ """
343
+ compressed_string = gzip.compress(self.ori_str.encode(self.encoding), compresslevel)
344
+ return base64.b64encode(compressed_string).decode(self.encoding)
345
+
346
+ def _try_decode(self, input_stream: bytes) -> Union[str, bytes]:
347
+ """尝试分别用utf-8、gbk、iso8859_1来解码原bytes值,如果不能解码成功,则返回原bytes值
348
+
349
+ Args:
350
+ input_stream: 需解码的bytes值
351
+
352
+ Returns:
353
+ 解码后的str或原值
354
+
355
+ """
356
+ for encoding in self.ENCODING:
357
+ # noinspection PyBroadException
358
+ try:
359
+ decoded = input_stream.decode(encoding)
360
+ return decoded # pragma: no cover
361
+ except Exception:
362
+ pass
363
+ # if no decode way, return raw bytes
364
+ return input_stream
365
+
366
+
367
+ class BatchInfo:
368
+ """批量执行明细状态类"""
369
+ _arg_dict = {}
370
+ _required_keys = None
371
+
372
+ @classmethod
373
+ def set_keys(cls, keys: List[str]):
374
+ """设置更新明细时涉及的明细字段"""
375
+ cls._required_keys = keys
376
+
377
+ @classmethod
378
+ def set_success(cls, arg: Dict):
379
+ """设置单个明细执行结果为成功"""
380
+ if cls._required_keys:
381
+ cls._arg_dict[json.dumps({k: arg[k] for k in cls._required_keys})] = True
382
+ else:
383
+ cls._arg_dict[json.dumps(arg)] = True
384
+
385
+ @classmethod
386
+ def set_failure(cls, arg: Dict):
387
+ """设置单个明细执行结果为失败"""
388
+ if cls._required_keys:
389
+ cls._arg_dict[json.dumps({k: arg[k] for k in cls._required_keys})] = False
390
+ else:
391
+ cls._arg_dict[json.dumps(arg)] = False
392
+
393
+ @classmethod
394
+ def batch_set_success(cls, arg: pd.DataFrame):
395
+ """设置一批明细执行结果为成功"""
396
+ if cls._required_keys:
397
+ arg = arg[cls._required_keys]
398
+ args = arg.to_dict(orient='records')
399
+ for arg in args:
400
+ cls._arg_dict[json.dumps(arg)] = True
401
+
402
+ @classmethod
403
+ def batch_set_failure(cls, arg: pd.DataFrame):
404
+ """设置一批明细执行结果为失败"""
405
+ if cls._required_keys:
406
+ arg = arg[cls._required_keys]
407
+ args = arg.to_dict(orient='records')
408
+ for arg in args:
409
+ cls._arg_dict[json.dumps(arg)] = False
410
+
411
+ @classmethod
412
+ def value(cls):
413
+ return cls._arg_dict
414
+
415
+
416
+ class PyInfo(BaseModel):
417
+ """任务实例执行的Python元素信息"""
418
+ #: 元素类型
419
+ elementType: str = "PY"
420
+ #: 元素名称
421
+ elementName: str = None
422
+ #: 元素folder id
423
+ folderId: str = None
424
+ #: 元素path
425
+ path: str = None
426
+
427
+
428
+ class TaskMode(str, Enum):
429
+ """任务实例执行类型类"""
430
+ #: 即时执行
431
+ immediate = "immediately"
432
+ #: 周期执行
433
+ period = "period"
434
+ #: 定时执行
435
+ scheduled = "scheduled"
436
+
437
+
438
+ class ScheduledTaskInfo(BaseModel):
439
+ """定时任务实例配置类"""
440
+ #: 执行时间
441
+ executeTime: datetime
442
+
443
+
444
+ class PeriodTaskInfo(BaseModel):
445
+ """周期任务实例配置类"""
446
+ #: 开始时间
447
+ startTime: datetime
448
+ #: 结束时间
449
+ endTime: datetime
450
+ #: 执行周期任务的 Cron 表达式
451
+ cron: str
452
+
453
+
454
+ class TaskUtil:
455
+ """任务实例创建方法类
456
+
457
+ Args:
458
+ task_code: 任务配置的唯一标识task code
459
+ py_info: 任务实例对应的python元素信息,如未提供,仍可在 `run_job_contents` 中提供
460
+
461
+ """
462
+ api = TaskAPI(sync=True)
463
+ _chunksize = 200
464
+
465
+ def __init__(self, task_code, py_info: PyInfo = None):
466
+ self.task_code = task_code
467
+ if self.meta is None:
468
+ raise ValueError(f"No config for task_code: [{self.task_code}].")
469
+ self.py_info = py_info
470
+
471
+ @cached_property
472
+ def meta(self):
473
+ res = self.api.task_instance.search(TaskSearchDTO(maxVersionTask=True, taskCode=self.task_code))
474
+ if res is not None and len(res) > 0:
475
+ return res[0]
476
+
477
+ info = meta
478
+
479
+ def run_job_contents(self,
480
+ job_contents: Union[List, pd.DataFrame],
481
+ py_info: PyInfo = None,
482
+ mode: TaskMode = TaskMode.immediate,
483
+ config: Union[PeriodTaskInfo, ScheduledTaskInfo] = None
484
+ ):
485
+ """执行任务实例
486
+
487
+ Args:
488
+ job_contents: 任务实例的明细
489
+ py_info: 需执行的Python元素信息
490
+ mode: 执行模式,默认为即时执行,可选周期执行或定时执行,若为后两者,需进一步提供执行配置
491
+ config: 周期执行或定时执行时的任务配置
492
+
493
+ """
494
+ self._valid_params(config, job_contents, mode, py_info)
495
+
496
+ py_info = py_info or self.py_info
497
+
498
+ if job_contents.empty: # pragma: no cover
499
+ return
500
+
501
+ self._deal_with_content_name(job_contents)
502
+
503
+ if mode == TaskMode.immediate:
504
+ return self._create_batch_add(job_contents, py_info)
505
+
506
+ if mode == TaskMode.period:
507
+ payload = PeriodicTaskCreateInfo(
508
+ cron=config.cron,
509
+ startTime=config.startTime.strftime('%Y-%m-%d %H:%M:%S'),
510
+ endTime=config.endTime.strftime('%Y-%m-%d %H:%M:%S'),
511
+ customParams=py_info.dict(),
512
+ taskId=self.meta.id,
513
+ upStreamIdentity=4,
514
+ lastBatch=False
515
+ )
516
+ call_api = self.api.task_instance.instance_period_create
517
+ else:
518
+ payload = ScheduledTaskCreateInfo(
519
+ executeTime=config.executeTime.strftime('%Y-%m-%d %H:%M:%S'),
520
+ customParams=py_info.dict(),
521
+ taskId=self.meta.id,
522
+ upStreamIdentity=4,
523
+ lastBatch=False
524
+ )
525
+ call_api = self.api.task_instance.instance_scheduled_create
526
+
527
+ self._create_task_instance(call_api, job_contents, payload)
528
+
529
+ def _valid_params(self, config, job_contents, mode, py_info):
530
+ py_info = py_info or self.py_info
531
+
532
+ if py_info is None:
533
+ raise ValueError("py_info is required.")
534
+ if py_info.elementName is None:
535
+ raise ValueError("elementName of py_info is required.")
536
+ if py_info.folderId is None:
537
+ py_info.folderId = ElementBase.check_exist(ele_name=py_info.elementName,
538
+ path=py_info.path, ele_type='PY',
539
+ silent=False).folderId
540
+
541
+ if mode == TaskMode.period and not isinstance(config, PeriodTaskInfo):
542
+ raise ValueError('Expected config of PeriodTaskInfo type for task instance with mode: <period>.')
543
+
544
+ if mode == TaskMode.scheduled and not isinstance(config, ScheduledTaskInfo):
545
+ raise ValueError('Expected config of ScheduledTaskInfo type for task instance with mode: <scheduled>.')
546
+
547
+ if isinstance(job_contents, List):
548
+ try:
549
+ job_contents = pd.DataFrame(job_contents)
550
+ except Exception:
551
+ raise ValueError('Param job_contents is not valid since it can\'t be converted to pandas DataFrame.')
552
+ else:
553
+ job_contents = job_contents.copy()
554
+
555
+ not_found_col = []
556
+ for required_col in self.meta.compositeKeys.split(','):
557
+ if required_col not in job_contents:
558
+ not_found_col.append(required_col)
559
+ if not_found_col:
560
+ raise ValueError(f'Required columns:{sorted(not_found_col)} since they are compositeKeys.')
561
+
562
+ @classmethod
563
+ def _create_task_instance(cls, call_api, job_contents, payload):
564
+ if job_contents.shape[0] <= cls._chunksize:
565
+ payload.jobContent = job_contents.to_dict(orient='records')
566
+ payload.lastBatch = True
567
+ call_api(payload)
568
+ else:
569
+ payload.jobContent = job_contents.iloc[0:cls._chunksize:].to_dict(orient='records')
570
+ payload.batchId = call_api(payload).batchId
571
+ payloads = []
572
+
573
+ for batch_contents in split_dataframe(job_contents.iloc[cls._chunksize::], cls._chunksize):
574
+ payload.jobContent = batch_contents.to_dict(orient='records')
575
+ payloads.append(payload)
576
+
577
+ del job_contents
578
+
579
+ payloads[-1].lastBatch = True
580
+
581
+ for batch_contents in payloads:
582
+ call_api(batch_contents)
583
+
584
+ def _create_batch_add(self, job_contents, py_info):
585
+ payload = []
586
+ for batch_contents in split_dataframe(job_contents, self._chunksize):
587
+ payload.append(JobCreateDto(
588
+ customParams=py_info.dict(),
589
+ jobContent=batch_contents.to_dict(orient='records'),
590
+ taskCode=self.task_code,
591
+ upStreamIdentity=4))
592
+ self.api.job.batch_add(payload)
593
+
594
+ def _deal_with_content_name(self, job_contents):
595
+ composite_keys = self.meta.compositeKeys.split(',')
596
+ str_param = job_contents[composite_keys].astype('str')
597
+
598
+ if self.meta.groupBy:
599
+ groupby = self.meta.groupBy.split(',')
600
+ others = groupby
601
+ others.extend(set(self.meta.compositeKeys.split(',')).difference(groupby))
602
+ job_contents['jobContentNameZhcn'] = str_param[groupby[0]].str.cat([str_param[e] for e in others[1::]],
603
+ sep='-')
604
+ else:
605
+ job_contents['jobContentNameZhcn'] = str_param[composite_keys[0]].str.cat(
606
+ [str_param[e] for e in composite_keys[1::]], sep='-')
607
+
608
+
609
+ # -----------------------------------------------------------------------------
610
+ # helper functions for AccountAPI access
611
+
612
+ @lru_cache(maxsize=128, cache_factory=SpaceSeperatedLRUCache)
613
+ def get_enterprise_code_cached(): # noqa
614
+ return SystemAPI().space.get_tenant_code()
615
+
616
+
617
+ @lru_cache(maxsize=128, cache_factory=SpaceSeperatedLRUCache)
618
+ def get_enterprise_id_cached() -> str:
619
+ enterprise_code = get_enterprise_code_cached()
620
+
621
+ for enterprise in acc_api.AccountAPI().enterprise.list():
622
+ if enterprise.enterpriseCode == enterprise_code:
623
+ return enterprise.id
624
+
625
+ raise ValueError(f"Unknown enterprise: {enterprise_code}")
626
+
627
+
628
+ @lru_cache(maxsize=128, cache_factory=SpaceSeperatedLRUCache)
629
+ def get_platform_info_cached() -> account_model.PlatFormSecretVO:
630
+ return acc_api.AccountAPI().platform.secret(
631
+ enterpriseCode=get_enterprise_code_cached()
632
+ )
633
+
634
+
635
+ def calc_account_api_signature(
636
+ timestamp: str,
637
+ secret: str,
638
+ platform_code: str,
639
+ user_id: str = None,
640
+ ):
641
+ if user_id is None:
642
+ user_id = OPTION.api.header['user']
643
+ s = "&@&".join((timestamp, user_id, platform_code, secret))
644
+ return hashlib.md5(unquote(s).encode()).hexdigest()
645
+
646
+
647
+ @lru_cache(maxsize=128, cache_factory=SpaceSeperatedLRUCache)
648
+ def get_platform_code_cached() -> str:
649
+ space = OPTION.api.header['space']
650
+ for enterprise in acc_api.AccountAPI().space.enterprise_space_hierarchy():
651
+ if enterprise.spaceId == space:
652
+ return enterprise.platformCode
653
+
654
+ raise ValueError(f"Unknown space: {space}")
655
+
656
+
657
+ def resolve_account_api_extra_header():
658
+ enterprise_id = get_enterprise_id_cached()
659
+ secret = get_platform_info_cached()
660
+ platform_code = secret.platformCode
661
+ platform_secret = secret.platformSecret
662
+ timestamp = str(int(time.time() * 1000))
663
+ return {
664
+ 'enterprise-id': enterprise_id,
665
+ 'platform-code': platform_code,
666
+ 'platform-secret': platform_secret,
667
+ 'timestamp': timestamp,
668
+ 'sign': calc_account_api_signature(timestamp, platform_secret, platform_code)
669
+ }
670
+
671
+
672
+ def batch_modify_user_group(payloads: List[UserGroupModifyDTO], max_worker: int = None):
673
+ """批量调用用户中心用户组详情修改接口
674
+
675
+ Args:
676
+ payloads: 符合UserGroupModifyDTO定义的列表,将直接用作接口请求体
677
+ max_worker: 最大并发数
678
+
679
+ Returns: 与请求体顺序一致的返回结果列表
680
+
681
+ """
682
+ if max_worker is not None:
683
+ if max_worker <= 0:
684
+ raise ValueError('max_worker must be > 0 ')
685
+ else:
686
+ max_worker = len(payloads)
687
+
688
+ result: List[Optional[bool]] = [None] * len(payloads)
689
+ api = acc_api.AccountAPI(sync=False).user_group.space_modify_group
690
+
691
+ async def call_api(idx: int, p: UserGroupModifyDTO, sem: asyncio.Semaphore):
692
+ async with sem:
693
+ result[idx] = await api(p)
694
+
695
+ async def inner():
696
+ semaphore = asyncio.Semaphore(max_worker)
697
+ await asyncio.gather(*(
698
+ call_api(idx, payload, semaphore)
699
+ for idx, payload in enumerate(payloads)
700
+ ))
701
+
702
+ evloop.run(inner())
703
+ return result