deepfos 1.1.74__tar.gz → 1.1.76__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. {deepfos-1.1.74 → deepfos-1.1.76}/CHANGELOG.md +16 -0
  2. {deepfos-1.1.74 → deepfos-1.1.76}/PKG-INFO +1 -1
  3. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/_version.py +3 -3
  4. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_2/models/dimension.py +1 -0
  5. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/financial_model.py +12 -0
  6. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/dimension.py +1 -0
  7. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/financial_model.py +24 -0
  8. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/deepmodel.py +91 -38
  9. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/dimension.py +16 -3
  10. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/finmodel.py +174 -12
  11. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/PKG-INFO +1 -1
  12. {deepfos-1.1.74 → deepfos-1.1.76}/.gitattributes +0 -0
  13. {deepfos-1.1.74 → deepfos-1.1.76}/.gitee/ISSUE_GUIDELINES.md +0 -0
  14. {deepfos-1.1.74 → deepfos-1.1.76}/.gitee/ISSUE_TEMPLATE.md +0 -0
  15. {deepfos-1.1.74 → deepfos-1.1.76}/.gitignore +0 -0
  16. {deepfos-1.1.74 → deepfos-1.1.76}/MANIFEST.in +0 -0
  17. {deepfos-1.1.74 → deepfos-1.1.76}/README.md +0 -0
  18. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/__init__.py +0 -0
  19. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/algo/__init__.py +0 -0
  20. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/algo/graph.py +0 -0
  21. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/__init__.py +0 -0
  22. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/business_model.py +0 -0
  23. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/dimension.py +0 -0
  24. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/models/__init__.py +0 -0
  25. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/models/business_model.py +0 -0
  26. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_1/models/dimension.py +0 -0
  27. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_2/__init__.py +0 -0
  28. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_2/dimension.py +0 -0
  29. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/V1_2/models/__init__.py +0 -0
  30. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/__init__.py +0 -0
  31. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/account.py +0 -0
  32. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/accounting_engines.py +0 -0
  33. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/app.py +0 -0
  34. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/approval_process.py +0 -0
  35. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/base.py +0 -0
  36. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/business_model.py +0 -0
  37. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/consolidation.py +0 -0
  38. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/consolidation_process.py +0 -0
  39. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/datatable.py +0 -0
  40. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/deep_pipeline.py +0 -0
  41. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/deepconnector.py +0 -0
  42. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/deepfos_task.py +0 -0
  43. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/deepmodel.py +0 -0
  44. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/dimension.py +0 -0
  45. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/journal_model.py +0 -0
  46. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/journal_template.py +0 -0
  47. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/memory_financial_model.py +0 -0
  48. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/__init__.py +0 -0
  49. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/account.py +0 -0
  50. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/accounting_engines.py +0 -0
  51. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/app.py +0 -0
  52. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/approval_process.py +0 -0
  53. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/base.py +0 -0
  54. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/business_model.py +0 -0
  55. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/consolidation.py +0 -0
  56. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/consolidation_process.py +0 -0
  57. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/datatable_mysql.py +0 -0
  58. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/deep_pipeline.py +0 -0
  59. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/deepconnector.py +0 -0
  60. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/deepfos_task.py +0 -0
  61. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/deepmodel.py +0 -0
  62. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/journal_model.py +0 -0
  63. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/journal_template.py +0 -0
  64. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/memory_financial_model.py +0 -0
  65. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/platform.py +0 -0
  66. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/python.py +0 -0
  67. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/reconciliation_engine.py +0 -0
  68. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/reconciliation_report.py +0 -0
  69. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/role_strategy.py +0 -0
  70. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/smartlist.py +0 -0
  71. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/space.py +0 -0
  72. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/system.py +0 -0
  73. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/variable.py +0 -0
  74. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/models/workflow.py +0 -0
  75. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/platform.py +0 -0
  76. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/python.py +0 -0
  77. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/reconciliation_engine.py +0 -0
  78. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/reconciliation_report.py +0 -0
  79. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/role_strategy.py +0 -0
  80. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/smartlist.py +0 -0
  81. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/space.py +0 -0
  82. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/system.py +0 -0
  83. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/variable.py +0 -0
  84. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/api/workflow.py +0 -0
  85. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/__init__.py +0 -0
  86. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/jstream.c +0 -0
  87. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/jstream.pyx +0 -0
  88. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/pandas.c +0 -0
  89. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/pandas.pyx +0 -0
  90. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/py_jstream.py +0 -0
  91. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/boost/py_pandas.py +0 -0
  92. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/cache.py +0 -0
  93. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/config.py +0 -0
  94. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/__init__.py +0 -0
  95. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/__init__.py +0 -0
  96. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/_base.py +0 -0
  97. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/constants.py +0 -0
  98. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/cube.py +0 -0
  99. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/formula.py +0 -0
  100. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/syscube.py +0 -0
  101. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/typing.py +0 -0
  102. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/cube/utils.py +0 -0
  103. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/__init__.py +0 -0
  104. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/_base.py +0 -0
  105. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/dimcreator.py +0 -0
  106. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/dimension.py +0 -0
  107. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/dimexpr.py +0 -0
  108. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/dimmember.py +0 -0
  109. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/eledimension.py +0 -0
  110. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/filters.py +0 -0
  111. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/dimension/sysdimension.py +0 -0
  112. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/__init__.py +0 -0
  113. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/_cache.py +0 -0
  114. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/_operator.py +0 -0
  115. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/nodemixin.py +0 -0
  116. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/sqlcondition.py +0 -0
  117. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/core/logictable/tablemodel.py +0 -0
  118. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/__init__.py +0 -0
  119. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/cipher.py +0 -0
  120. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/clickhouse.py +0 -0
  121. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/connector.py +0 -0
  122. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/daclickhouse.py +0 -0
  123. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/dameng.py +0 -0
  124. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/damysql.py +0 -0
  125. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/dbkits.py +0 -0
  126. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/deepengine.py +0 -0
  127. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/deepmodel.py +0 -0
  128. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/deepmodel_kingbase.py +0 -0
  129. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/edb.py +0 -0
  130. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/gauss.py +0 -0
  131. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/kingbase.py +0 -0
  132. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/mysql.py +0 -0
  133. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/oracle.py +0 -0
  134. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/postgresql.py +0 -0
  135. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/sqlserver.py +0 -0
  136. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/db/utils.py +0 -0
  137. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/__init__.py +0 -0
  138. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/accounting.py +0 -0
  139. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/apvlprocess.py +0 -0
  140. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/base.py +0 -0
  141. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/bizmodel.py +0 -0
  142. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/datatable.py +0 -0
  143. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/deep_pipeline.py +0 -0
  144. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/deepconnector.py +0 -0
  145. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/fact_table.py +0 -0
  146. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/journal.py +0 -0
  147. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/journal_template.py +0 -0
  148. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/pyscript.py +0 -0
  149. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/reconciliation.py +0 -0
  150. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/rolestrategy.py +0 -0
  151. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/smartlist.py +0 -0
  152. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/variable.py +0 -0
  153. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/element/workflow.py +0 -0
  154. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/exceptions/__init__.py +0 -0
  155. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/exceptions/hook.py +0 -0
  156. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lazy.py +0 -0
  157. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/__init__.py +0 -0
  158. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/_javaobj.py +0 -0
  159. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/asynchronous.py +0 -0
  160. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/concurrency.py +0 -0
  161. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/constant.py +0 -0
  162. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/decorator.py +0 -0
  163. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/deepchart.py +0 -0
  164. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/deepux.py +0 -0
  165. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/discovery.py +0 -0
  166. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/edb_lexer.py +0 -0
  167. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/eureka.py +0 -0
  168. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/filterparser.py +0 -0
  169. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/httpcli.py +0 -0
  170. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/jsonstreamer.py +0 -0
  171. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/msg.py +0 -0
  172. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/nacos.py +0 -0
  173. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/patch.py +0 -0
  174. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/redis.py +0 -0
  175. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/serutils.py +0 -0
  176. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/stopwatch.py +0 -0
  177. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/subtask.py +0 -0
  178. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/sysutils.py +0 -0
  179. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/lib/utils.py +0 -0
  180. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/local.py +0 -0
  181. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/options.py +0 -0
  182. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos/translation.py +0 -0
  183. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/SOURCES.txt +0 -0
  184. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/dependency_links.txt +0 -0
  185. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/not-zip-safe +0 -0
  186. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/requires.txt +0 -0
  187. {deepfos-1.1.74 → deepfos-1.1.76}/deepfos.egg-info/top_level.txt +0 -0
  188. {deepfos-1.1.74 → deepfos-1.1.76}/requirements.txt +0 -0
  189. {deepfos-1.1.74 → deepfos-1.1.76}/setup.cfg +0 -0
  190. {deepfos-1.1.74 → deepfos-1.1.76}/setup.py +0 -0
  191. {deepfos-1.1.74 → deepfos-1.1.76}/versioneer.py +0 -0
@@ -1,3 +1,19 @@
1
+ ## [1.1.76] - 2025-11-06
2
+
3
+ ### 更新
4
+
5
+ * 维度增加dataTypeInfo字段
6
+ * DeepModel增加error_on_empty_link
7
+ * insert_df性能优化
8
+
9
+
10
+ ## [1.1.75] - 2025-09-17
11
+
12
+ ### 更新
13
+
14
+ * 适配财务模型delete接口
15
+
16
+
1
17
  ## [1.1.74] - 2025-09-16
2
18
 
3
19
  ### 更新
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: deepfos
3
- Version: 1.1.74
3
+ Version: 1.1.76
4
4
  Summary: Collecions of useful and handy tools for deepfos platform
5
5
  Home-page: http://py.deepfos.com
6
6
  Author: deepfos-python-team
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2025-09-16T04:32:06+0000",
11
+ "date": "2025-11-06T08:50:17+0000",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "c0ae4a326c442d072bfc0722ebcf1c6ca5002d03",
15
- "version": "1.1.74"
14
+ "full-revisionid": "1f02db295b03468baac5e6df6dfd160ee0eba4a2",
15
+ "version": "1.1.76"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -877,6 +877,7 @@ class DimensionMemberNewDto(BaseModel):
877
877
 
878
878
 
879
879
  class DimensionMemberOperationDto(BaseModel):
880
+ dataTypeInfo: Optional[str] = None
880
881
  #: 变动类型
881
882
  moveType: Optional[str] = None
882
883
  #: 科目类型,科目类特有属性
@@ -53,6 +53,10 @@ class Extra(ChildAPI):
53
53
  """
54
54
  return {'body': calculateDTO}
55
55
 
56
+ @get('git-version')
57
+ def git_version(self) -> Union[str, Awaitable[str]]:
58
+ return {}
59
+
56
60
 
57
61
  class CubeDataAPI(ChildAPI):
58
62
  endpoint = '/cube-data'
@@ -163,6 +167,14 @@ class CubeCalculateAPI(ChildAPI):
163
167
  """
164
168
  return {'body': clearDataDTO}
165
169
 
170
+ @post('/sync/clearData')
171
+ def clear_data_ex(self, clearDataDTO: SyncClearDataDto) -> Union[Any, Awaitable[Any]]:
172
+ """
173
+ 根据财务数据模型名称 所在文件夹Id 维度表达式 清除相关的数据
174
+
175
+ """
176
+ return {'body': clearDataDTO}
177
+
166
178
 
167
179
  class ReactSpreadsheet(ChildAPI):
168
180
  endpoint = '/react/spreadsheet'
@@ -542,6 +542,7 @@ class DimensionMemberListDto(BaseModel):
542
542
 
543
543
 
544
544
  class DimensionMemberOperationSw(BaseModel):
545
+ dataTypeInfo: Optional[str] = None
545
546
  #: 变动类型
546
547
  moveType: Optional[str] = None
547
548
  #: 支付类型,科目类型特有
@@ -81,6 +81,7 @@ __all__ = [
81
81
  'RoleSetDataBlock',
82
82
  'FinancialDataDto',
83
83
  'TaskExecutionParam',
84
+ 'SyncClearDataDto',
84
85
  'TaskExecutionResult'
85
86
  ]
86
87
 
@@ -1788,6 +1789,29 @@ class FinancialDataDto(BaseModel):
1788
1789
  datatable: Optional[DatatableInfoDto] = None
1789
1790
 
1790
1791
 
1792
+ class SyncClearDataDto(BaseModel):
1793
+ """Cube clear data dto
1794
+
1795
+ .. admonition:: 引用接口
1796
+
1797
+ - **POST** ``/cubeCalculate/sync/clearData``
1798
+ """
1799
+ #: 财务模型名称
1800
+ cubeName: str
1801
+ #: 财务模型路径id
1802
+ folderId: Optional[str] = None
1803
+ #: 财务模型路径
1804
+ path: Optional[str] = None
1805
+ #: 删除范围
1806
+ clearScriptList: Optional[List[str]] = None
1807
+ #: entryMode
1808
+ entryMode: Optional[int] = None
1809
+ #: entryObject
1810
+ entryObject: Optional[str] = None
1811
+ #: 日志开关
1812
+ dataAuditSwitch: Optional[bool] = None
1813
+
1814
+
1791
1815
  CubeAccessControlViewReact.update_forward_refs()
1792
1816
  DatatableColumnDto.update_forward_refs()
1793
1817
  DatatableInfoDto.update_forward_refs()
@@ -304,20 +304,43 @@ class ObjectTypeFrame(BaseModel):
304
304
  return exclusive
305
305
 
306
306
 
307
- def _format_link(df: pd.DataFrame, link_name: str):
308
- if all(pd.isnull(df['target'])):
309
- return {'target': pd.NA}
307
+ def _format_link(link_df_fit: pd.DataFrame, link_name: str):
308
+ if link_df_fit.empty:
309
+ return pd.Series(dtype=object), False
310
310
 
311
- record = df.drop(columns=['source']).set_index('target')
312
-
313
- if not record.index.is_unique:
311
+ if link_df_fit.duplicated(subset=['source', 'target']).any():
314
312
  raise MultiLinkTargetNotUnique(
315
313
  f'Multi Link: [{link_name}] relation dataframe中'
316
314
  f'source与target对应存在不唯一性'
317
315
  )
318
316
 
319
- record = record.to_dict(orient='index')
320
- return {'prop': record, 'target': list(record.keys())}
317
+ prop_cols = [col for col in link_df_fit.columns if col not in ['source', 'target']]
318
+ has_props = bool(prop_cols)
319
+
320
+ if has_props:
321
+ sources = link_df_fit['source'].values
322
+ targets = link_df_fit['target'].values
323
+ unique_sources, source_indices = np.unique(sources, return_inverse=True)
324
+
325
+ prop_arrays = {col: link_df_fit[col].values for col in prop_cols}
326
+ result = {}
327
+
328
+ for i in range(len(unique_sources)):
329
+ idx = source_indices == i
330
+ source = unique_sources[i]
331
+ source_targets = targets[idx]
332
+
333
+ indices = np.where(idx)[0]
334
+ prop_dict = {
335
+ source_targets[j]: {col: prop_arrays[col][indices[j]] for col in prop_cols}
336
+ for j in range(len(source_targets))
337
+ }
338
+ result[source] = {'target': source_targets.tolist(), 'prop': prop_dict}
339
+ link = pd.Series(result, dtype=object)
340
+ else:
341
+ link = link_df_fit.groupby('source')['target'].agg(list)
342
+
343
+ return link, has_props
321
344
 
322
345
 
323
346
  class BaseField(PtrInfo):
@@ -486,7 +509,8 @@ def _iter_link_prop_assign(link, business_key, prop_name, prop_type, is_multi):
486
509
  def _iter_single_assign(
487
510
  field: PtrInfo,
488
511
  cast_type: str,
489
- target_main_field: Dict[str, MainField]
512
+ target_main_field: Dict[str, MainField],
513
+ error_on_empty_link: bool = False
490
514
  ) -> str:
491
515
  """
492
516
  生成单字段赋值语句
@@ -495,6 +519,7 @@ def _iter_single_assign(
495
519
  field: 字段信息
496
520
  cast_type: 字段类型
497
521
  target_main_field: 目标字段信息
522
+ error_on_empty_link: 链接字段值不存在时是否抛出异常
498
523
 
499
524
  Returns:
500
525
  赋值语句
@@ -517,30 +542,38 @@ def _iter_single_assign(
517
542
  main_field = target_main_field[link]
518
543
 
519
544
  if main_field.props:
520
- target = (
521
- cast_type + "{" +
522
- ",".join(
523
- _iter_link_prop_assign(link, main_field.business_key, name,
524
- field.prop_type[name], main_field.is_multi)
525
- for name in main_field.props
526
- ) + "}"
545
+ prop_assigns = ','.join(
546
+ _iter_link_prop_assign(link, main_field.business_key, name,
547
+ field.prop_type[name], main_field.is_multi)
548
+ for name in main_field.props
527
549
  )
550
+ prop_block = f" {{{prop_assigns}}}"
528
551
  else:
529
- target = cast_type
552
+ prop_block = ""
530
553
 
531
554
  if main_field.is_multi:
555
+ link_value = f"each_{link}"
556
+ else:
557
+ link_value = f"(json_get(item, '{link}'))"
558
+
559
+ if error_on_empty_link:
560
+ link_expr = f"(<{cast_type}><std::str>{link_value}){prop_block}"
561
+ else:
562
+ link_expr = f"(select detached {cast_type}{prop_block}\nfilter .{main_field.business_key} = <std::str>{link_value})"
563
+
564
+ if main_field.is_multi:
565
+ if main_field.props:
566
+ target_source = f"json_get(item, '{link}', 'target')"
567
+ else:
568
+ target_source = f"item['{link}']"
569
+
532
570
  assign += 'distinct (\n' + textwrap.indent(textwrap.dedent(f"""\
533
- for each_{link} in json_array_unpack(json_get(item, '{link}', 'target'))
571
+ for each_{link} in json_array_unpack({target_source})
534
572
  union (
535
- select detached {target}
536
- filter .{main_field.business_key} = <{main_field.type}>each_{link}
573
+ {link_expr}
537
574
  )"""), TAB) + '\n)'
538
575
  else:
539
- assign += textwrap.dedent(f"""\
540
- assert_single((
541
- select detached {target}
542
- filter .{main_field.business_key} = <{main_field.type}>(json_get(item, '{link}'))
543
- ))""")
576
+ assign += link_expr
544
577
 
545
578
  return assign
546
579
 
@@ -549,9 +582,10 @@ def bulk_insert_by_fields(
549
582
  object_name: str,
550
583
  field_type: List[PtrInfo],
551
584
  target_main_field: Dict[str, MainField],
585
+ error_on_empty_link: bool = False,
552
586
  ):
553
587
  insert_assign_body = ','.join([
554
- _iter_single_assign(field, field.type, target_main_field)
588
+ _iter_single_assign(field, field.type, target_main_field, error_on_empty_link)
555
589
  for field in field_type
556
590
  ])
557
591
  return textwrap.dedent(f"""
@@ -567,16 +601,17 @@ def bulk_upsert_by_fields(
567
601
  field_type: List[PtrInfo],
568
602
  target_main_field: Dict[str, MainField],
569
603
  exclusive_fields: Iterable[str],
570
- update_fields: Iterable[str]
604
+ update_fields: Iterable[str],
605
+ error_on_empty_link: bool = False,
571
606
  ):
572
607
  conflict_on_fields = map(lambda n: f'.{n}', exclusive_fields)
573
608
  insert_assign_body = ','.join([
574
- _iter_single_assign(field, field.type, target_main_field)
609
+ _iter_single_assign(field, field.type, target_main_field, error_on_empty_link)
575
610
  for field in field_type
576
611
  ])
577
612
  update_assign_body = ','.join(
578
613
  [
579
- _iter_single_assign(field, field.type, target_main_field)
614
+ _iter_single_assign(field, field.type, target_main_field, error_on_empty_link)
580
615
  for field in field_type if field.name in update_fields
581
616
  ]
582
617
  )
@@ -599,9 +634,10 @@ def bulk_update_by_fields(
599
634
  target_main_field: Dict[str, MainField],
600
635
  match_fields: Iterable[str],
601
636
  update_fields: Iterable[str],
637
+ error_on_empty_link: bool = False,
602
638
  ):
603
639
  update_assign_body = ','.join([
604
- _iter_single_assign(field, field.type, target_main_field)
640
+ _iter_single_assign(field, field.type, target_main_field, error_on_empty_link)
605
641
  for field in field_type if field.name in update_fields
606
642
  ])
607
643
 
@@ -1374,9 +1410,20 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1374
1410
  ]
1375
1411
  )
1376
1412
  link_df = temp_structure.fit(link_df)
1377
- link = link_df.groupby('source').apply(_format_link, link_name=name)
1378
- data = data.drop(columns=[name], errors='ignore')
1379
- data = data.join(link.to_frame(name), on=bkey)
1413
+ link, has_props = _format_link(link_df, name)
1414
+
1415
+ if not has_props:
1416
+ data = data.drop(columns=[name], errors='ignore')
1417
+ data = data.join(link.to_frame(name), on=bkey, how='left')
1418
+ mask = data[name].isna()
1419
+ if mask.any():
1420
+ empty_series = pd.Series([[]] * mask.sum(), index=data[mask].index, dtype=object)
1421
+ data.loc[mask, name] = empty_series
1422
+ else:
1423
+ bkey_values = data[bkey].values
1424
+ mapped_values = np.array([link.get(key, []) for key in bkey_values], dtype=object)
1425
+ data[name] = mapped_values
1426
+
1380
1427
  return data
1381
1428
 
1382
1429
  async def _collect_bulk_qls(
@@ -1389,7 +1436,8 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1389
1436
  update_fields: Iterable[str] = None,
1390
1437
  exclusive_fields: Iterable[str] = None,
1391
1438
  match_fields: Iterable[str] = None,
1392
- insert: bool = True
1439
+ insert: bool = True,
1440
+ error_on_empty_link: bool = False
1393
1441
  ) -> List[List[QueryWithArgs]]:
1394
1442
  if object_name in self.objects:
1395
1443
  obj = self.objects[object_name]
@@ -1425,10 +1473,10 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1425
1473
  if enable_upsert and update_fields:
1426
1474
  bulk_ql = bulk_upsert_by_fields(
1427
1475
  object_name, field_info, tgt_main_field,
1428
- exclusive_fields, update_fields
1476
+ exclusive_fields, update_fields, error_on_empty_link
1429
1477
  )
1430
1478
  else:
1431
- bulk_ql = bulk_insert_by_fields(object_name, field_info, tgt_main_field)
1479
+ bulk_ql = bulk_insert_by_fields(object_name, field_info, tgt_main_field, error_on_empty_link)
1432
1480
  else:
1433
1481
  if missing := (set(match_fields or [bkey]) - set(field_names)):
1434
1482
  raise ValueError(f"match fields: {missing} 不在提供的数据中")
@@ -1437,7 +1485,7 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1437
1485
  if to_upd := (field_names - match_fields):
1438
1486
  bulk_ql = bulk_update_by_fields(
1439
1487
  object_name, field_info, tgt_main_field,
1440
- match_fields, to_upd
1488
+ match_fields, to_upd, error_on_empty_link
1441
1489
  )
1442
1490
  else:
1443
1491
  bulk_ql = None
@@ -1473,6 +1521,7 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1473
1521
  update_fields: Iterable[str] = None,
1474
1522
  exclusive_fields: Iterable[str] = None,
1475
1523
  commit_per_chunk: bool = False,
1524
+ error_on_empty_link: bool = False,
1476
1525
  ) -> None:
1477
1526
  """以事务执行基于DataFrame字段信息的批量插入数据
1478
1527
 
@@ -1496,6 +1545,9 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1496
1545
  commit_per_chunk: 每次插入后是否提交事务,
1497
1546
  默认为False,即所有数据插入后再提交事务
1498
1547
  该参数仅在非start transaction上下文中生效
1548
+ error_on_empty_link: 链接字段值不存在时是否抛出异常,
1549
+ 默认为False,即不检查链接目标是否存在
1550
+ 当设置为True时,会检查链接目标是否存在,不存在则抛出异常
1499
1551
 
1500
1552
  Notes:
1501
1553
 
@@ -1571,7 +1623,7 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
1571
1623
  qls = await self._collect_bulk_qls(
1572
1624
  object_name, data, relation, chunksize,
1573
1625
  enable_upsert, update_fields, exclusive_fields,
1574
- insert=True
1626
+ insert=True, error_on_empty_link=error_on_empty_link
1575
1627
  )
1576
1628
  if commit_per_chunk:
1577
1629
  for ql_chunk in qls:
@@ -1918,6 +1970,7 @@ class DeepModel(AsyncDeepModel, metaclass=SyncMeta):
1918
1970
  update_fields: Iterable[str] = None,
1919
1971
  exclusive_fields: Iterable[str] = None,
1920
1972
  commit_per_chunk: bool = False,
1973
+ error_on_empty_link: bool = False,
1921
1974
  ) -> None:
1922
1975
  ...
1923
1976
 
@@ -1162,12 +1162,25 @@ def _validate_df_for_dimension(df: pd.DataFrame):
1162
1162
  f"You have null value in dataframe. "
1163
1163
  f"column: [{DFLT_NAME_COLUMN}], index: {null_index}.")
1164
1164
 
1165
- if DFLT_PNAME_COLUMN not in df.columns:
1166
- raise ValueError(f"Missing column [{DFLT_PNAME_COLUMN}] in dataframe.")
1165
+ col_parent_name = None
1166
+ col_shared_member = None
1167
+
1168
+ if DFLT_PNAME_COLUMN in df.columns:
1169
+ col_parent_name = DFLT_PNAME_COLUMN
1170
+ elif DFLT_PNAME_COLUMN_V12 in df.columns:
1171
+ col_parent_name = DFLT_PNAME_COLUMN_V12
1167
1172
 
1168
1173
  if SHAREDMEMBER in df.columns:
1174
+ col_shared_member = SHAREDMEMBER
1175
+ elif SHAREDMEMBERV12 in df.columns:
1176
+ col_shared_member = SHAREDMEMBERV12
1177
+
1178
+ if col_parent_name is None:
1179
+ raise ValueError(f"Missing column [{DFLT_PNAME_COLUMN}] or [{DFLT_PNAME_COLUMN_V12}] in dataframe.")
1180
+
1181
+ if col_shared_member is not None:
1169
1182
  unique_df = df.groupby(
1170
- [DFLT_NAME_COLUMN, DFLT_PNAME_COLUMN, SHAREDMEMBER],
1183
+ [DFLT_NAME_COLUMN, col_parent_name, col_shared_member],
1171
1184
  as_index=False
1172
1185
  ).size()
1173
1186
  duplicated = unique_df[unique_df['size'] > 1]
@@ -44,7 +44,8 @@ from deepfos.api.models.financial_model import (
44
44
  PcParams, CopyCalculateDTO,
45
45
  TaskExecutionParam,
46
46
  ParameterDefineDto, # noqa
47
- FinancialDataDto
47
+ FinancialDataDto,
48
+ SyncClearDataDto,
48
49
  )
49
50
  from deepfos.api.models.base import BaseModel
50
51
  from deepfos.options import OPTION
@@ -71,6 +72,9 @@ def need_query(body: str):
71
72
 
72
73
  # -----------------------------------------------------------------------------
73
74
  # models
75
+ TypeDimensionExpr = Union[str, Dict[str, Union[List[str], str]]]
76
+
77
+
74
78
  class Description(BaseModel):
75
79
  zh_cn: Optional[str] = Field(None, alias='zh-cn')
76
80
  en: Optional[str] = None
@@ -608,6 +612,8 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
608
612
 
609
613
  See Also:
610
614
  :meth:`save_unpivot`
615
+ :meth:`complement_save`
616
+ :meth:`complement_save_unpivot`
611
617
 
612
618
  """
613
619
  data, pov = self._build_dataframe_for_save(data, pov, data_column, comment_column)
@@ -650,7 +656,7 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
650
656
  ):
651
657
  """覆盖指定维度范围并保存数据
652
658
 
653
- 相比于:meth:`save`,在保存前,会将`data`按照`expression`补全笛卡尔积。
659
+ 相比于 :meth:`save` ,在保存前,会将`data`按照`expression`补全笛卡尔积。
654
660
  并且不在`data`范围的数据以`None`填充
655
661
 
656
662
  Note:
@@ -664,14 +670,54 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
664
670
  Args:
665
671
  data: 需要保存的数据
666
672
  expression: 需要覆盖的范围(维度表达式)
667
- default_hierarchy: expression中没指定对应维度时,默认取的层级函数,
673
+ default_hierarchy: expression中没指定对应维度时,默认取的层级函数,
668
674
  即填充为 `default_hierarchy(#root,0)`
669
675
  pov: Point Of View,维度表达式或者KV键值对格式。
670
676
  data_column: 数据列的列名
671
677
  comment_column: 备注列的列名,默认为VirtualMeasure_220922
672
- **kwargs: 其他可传给:meth:`save`的参数
678
+ **kwargs: 其他可传给 :meth:`save`的参数
679
+
680
+ See Also:
681
+ :meth:`save`
682
+ :meth:`save_unpivot`
683
+ :meth:`complement_save_unpivot`
673
684
 
674
685
  """
686
+ if not self._backend_del_availiable:
687
+ return await self._legacy_complement_save(
688
+ data=data,
689
+ expression=expression,
690
+ default_hierarchy=default_hierarchy,
691
+ pov=pov,
692
+ data_column=data_column,
693
+ comment_column=comment_column,
694
+ **kwargs
695
+ )
696
+
697
+ await self.delete(
698
+ expression,
699
+ data_audit=False,
700
+ default_hierarchy=default_hierarchy
701
+ )
702
+ await self.save(
703
+ data=data,
704
+ pov=pov,
705
+ data_column=data_column,
706
+ comment_column=comment_column,
707
+ **kwargs,
708
+ )
709
+
710
+
711
+ async def _legacy_complement_save(
712
+ self,
713
+ data: pd.DataFrame,
714
+ expression: Union[str, Dict[str, Union[List[str], str]]],
715
+ default_hierarchy: str = "Base",
716
+ pov: Optional[Union[str, Dict[str, str]]] = None,
717
+ data_column: str = DFLT_DATA_COLUMN,
718
+ comment_column: str = DFLT_COMMENT_COLUMN,
719
+ **kwargs
720
+ ):
675
721
  data, pov = self._build_dataframe_for_save(data, pov, data_column, comment_column)
676
722
  if data.empty:
677
723
  return
@@ -795,14 +841,49 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
795
841
  default_hierarchy: 单expression中没指定对应维度时,默认取的层级函数,
796
842
  即填充为 `default_hierarchy(#root,0)`
797
843
  pov: Point Of View,维度表达式或者KV键值对格式。
798
- need_check: 是否需要java接口校验脏数据
799
- data_audit: 是否需要记录到数据审计
800
- chunksize: 单次调用保存接口时最大的dataframe行数。
801
- 当data的行数超过此值时,将会分多次进行保存。
802
844
  save_nan: 当把数据列成员转换到行上时,data为空的数据是否保存
803
- callback: 是否回调
845
+ **kwargs: 其他可传给 :meth:`save_unpivot`的参数
846
+
847
+ See Also:
848
+ :meth:`save`
849
+ :meth:`save_unpivot`
850
+ :meth:`complement_save`
804
851
 
805
852
  """
853
+ if not self._backend_del_availiable:
854
+ return await self._legacy_complement_save_unpivot(
855
+ data=data,
856
+ unpivot_dim=unpivot_dim,
857
+ expression=expression,
858
+ default_hierarchy=default_hierarchy,
859
+ pov=pov,
860
+ save_nan=save_nan,
861
+ **kwargs
862
+ )
863
+
864
+ await self.delete(
865
+ expression,
866
+ data_audit=False,
867
+ default_hierarchy=default_hierarchy
868
+ )
869
+ await self.save_unpivot(
870
+ data=data,
871
+ unpivot_dim=unpivot_dim,
872
+ pov=pov,
873
+ save_nan=save_nan,
874
+ **kwargs,
875
+ )
876
+
877
+ async def _legacy_complement_save_unpivot(
878
+ self,
879
+ data: pd.DataFrame,
880
+ unpivot_dim: str,
881
+ expression: Union[str, Dict[str, Union[List[str], str]]],
882
+ default_hierarchy: str = "Base",
883
+ pov: Optional[Union[str, Dict[str, str]]] = None,
884
+ save_nan: bool = False,
885
+ **kwargs
886
+ ):
806
887
  data, pov = self._build_dataframe_for_save_unpivot(
807
888
  data, unpivot_dim, pov, save_nan
808
889
  )
@@ -989,13 +1070,33 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
989
1070
 
990
1071
  return await self.mdx_execution(script)
991
1072
 
1073
+ @future_property
1074
+ async def _server_version(self) -> Tuple[int, ...]:
1075
+ api: FinancialModelAPI = await self.wait_for('async_api')
1076
+ version = await api.extra.git_version()
1077
+ if version.lower().startswith('v'):
1078
+ version = version[1:]
1079
+ parts = []
1080
+ for part in version.split('.'):
1081
+ try:
1082
+ parts.append(int(part))
1083
+ except (TypeError, ValueError):
1084
+ continue
1085
+ return tuple(parts)
1086
+
1087
+ @future_property
1088
+ async def _backend_del_availiable(self):
1089
+ version = await self.__class__._server_version.wait_for(self)
1090
+ return version >= (1, 1, 1, 2, 1)
1091
+
992
1092
  async def delete(
993
1093
  self,
994
- expression: Union[str, Dict[str, Union[List[str], str]]],
1094
+ expression: Union[TypeDimensionExpr, List[TypeDimensionExpr]],
995
1095
  chunksize: Optional[int] = None,
996
1096
  use_mdx: bool = False,
997
- callback: bool = True,
1097
+ callback: bool = False,
998
1098
  data_audit: bool = True,
1099
+ default_hierarchy: str = "Base",
999
1100
  ):
1000
1101
  """删除数据
1001
1102
 
@@ -1013,6 +1114,8 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
1013
1114
  use_mdx: 是否使用MDX脚本实现,默认为否,等效于调用 :meth:`delete_with_mdx`
1014
1115
  callback: 是否回调
1015
1116
  data_audit: 是否记录审计日志
1117
+ default_hierarchy: 当expression中没指定对应维度时,默认取的层级函数,
1118
+ 即填充为 `default_hierarchy(#root,0)`
1016
1119
 
1017
1120
  .. admonition:: 示例
1018
1121
 
@@ -1037,6 +1140,63 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
1037
1140
  :meth:`insert_null` :meth:`delete_with_mdx`
1038
1141
 
1039
1142
  """
1143
+ if (
1144
+ not self._backend_del_availiable
1145
+ or use_mdx
1146
+ or callback
1147
+ ):
1148
+ if isinstance(expression, list):
1149
+ raise ValueError(
1150
+ f"pass expresssion as list is not yet supported. "
1151
+ f"backend version: {self._server_version}")
1152
+ return await self._legacy_delete(
1153
+ expression,
1154
+ chunksize=chunksize,
1155
+ use_mdx=use_mdx,
1156
+ callback=callback,
1157
+ data_audit=data_audit,
1158
+ )
1159
+
1160
+ if not isinstance(expression, list):
1161
+ expression = [expression]
1162
+
1163
+ clear_scopes = []
1164
+ for expr in expression:
1165
+ if isinstance(expr, dict):
1166
+ expr = dict_to_expr(expr)
1167
+
1168
+ expr_str, pov = self._split_expr(
1169
+ expr, {},
1170
+ default_hierarchy=default_hierarchy,
1171
+ validate_expr=True
1172
+ )
1173
+ expr_parts = []
1174
+ if expr_str:
1175
+ expr_parts.append(expr_str)
1176
+ if pov:
1177
+ expr_parts.append(dict_to_expr(pov))
1178
+
1179
+ clear_scopes.append("->".join(expr_parts))
1180
+
1181
+ return await self.async_api.calculate.clear_data_ex(
1182
+ SyncClearDataDto(
1183
+ cubeName=self.element_name,
1184
+ folderId=self.element_info.folderId,
1185
+ clearScriptList=clear_scopes,
1186
+ entryMode=self.entry_mode,
1187
+ entryObject='python',
1188
+ dataAuditSwitch=data_audit
1189
+ )
1190
+ )
1191
+
1192
+ async def _legacy_delete(
1193
+ self,
1194
+ expression: Union[str, Dict[str, Union[List[str], str]]],
1195
+ chunksize: Optional[int] = None,
1196
+ use_mdx: bool = False,
1197
+ callback: bool = True,
1198
+ data_audit: bool = True,
1199
+ ):
1040
1200
  if use_mdx:
1041
1201
  return await self.delete_with_mdx(expression)
1042
1202
 
@@ -1652,7 +1812,9 @@ class FinancialCube(AsyncFinancialCube, metaclass=SyncMeta):
1652
1812
  expression: Union[str, Dict[str, Union[List[str], str]]],
1653
1813
  chunksize: Optional[int] = None,
1654
1814
  use_mdx: bool = False,
1655
- callback: bool = True
1815
+ callback: bool = True,
1816
+ data_audit: bool = True,
1817
+ default_hierarchy: str = "Base",
1656
1818
  ):
1657
1819
  ...
1658
1820