deepfos 1.1.53__tar.gz → 1.1.55__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {deepfos-1.1.53 → deepfos-1.1.55}/CHANGELOG.md +19 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/PKG-INFO +1 -1
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/_version.py +3 -3
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/deepmodel.py +1 -1
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/deepmodel.py +286 -147
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/finmodel.py +112 -39
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/serutils.py +34 -9
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/sysutils.py +19 -5
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/PKG-INFO +1 -1
- {deepfos-1.1.53 → deepfos-1.1.55}/.gitattributes +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/.gitee/ISSUE_GUIDELINES.md +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/.gitee/ISSUE_TEMPLATE.md +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/.gitignore +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/MANIFEST.in +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/README.md +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/algo/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/algo/graph.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/business_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/models/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/models/business_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_1/models/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_2/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_2/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_2/models/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/V1_2/models/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/account.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/accounting_engines.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/app.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/approval_process.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/base.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/business_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/consolidation.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/consolidation_process.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/datatable.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/deep_pipeline.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/deepconnector.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/deepfos_task.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/deepmodel.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/financial_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/journal_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/journal_template.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/memory_financial_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/account.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/accounting_engines.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/app.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/approval_process.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/base.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/business_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/consolidation.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/consolidation_process.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/datatable_mysql.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/deep_pipeline.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/deepconnector.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/deepfos_task.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/financial_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/journal_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/journal_template.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/memory_financial_model.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/platform.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/python.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/reconciliation_engine.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/reconciliation_report.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/role_strategy.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/smartlist.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/space.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/system.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/variable.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/models/workflow.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/platform.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/python.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/reconciliation_engine.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/reconciliation_report.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/role_strategy.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/smartlist.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/space.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/system.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/variable.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/api/workflow.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/jstream.c +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/jstream.pyx +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/pandas.c +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/pandas.pyx +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/py_jstream.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/boost/py_pandas.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/cache.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/config.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/_base.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/constants.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/cube.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/formula.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/syscube.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/typing.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/cube/utils.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/_base.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/dimcreator.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/dimexpr.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/dimmember.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/eledimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/filters.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/dimension/sysdimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/_cache.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/_operator.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/nodemixin.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/sqlcondition.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/core/logictable/tablemodel.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/cipher.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/clickhouse.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/connector.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/daclickhouse.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/dameng.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/damysql.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/dbkits.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/deepengine.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/deepmodel.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/deepmodel_kingbase.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/edb.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/gauss.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/kingbase.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/mysql.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/oracle.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/postgresql.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/sqlserver.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/db/utils.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/accounting.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/apvlprocess.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/base.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/bizmodel.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/datatable.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/deep_pipeline.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/deepconnector.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/dimension.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/fact_table.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/journal.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/journal_template.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/pyscript.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/reconciliation.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/rolestrategy.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/smartlist.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/variable.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/element/workflow.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/exceptions/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/exceptions/hook.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lazy.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/__init__.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/_javaobj.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/asynchronous.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/concurrency.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/constant.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/decorator.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/deepchart.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/deepux.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/discovery.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/edb_lexer.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/eureka.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/filterparser.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/httpcli.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/jsonstreamer.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/msg.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/nacos.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/patch.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/redis.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/stopwatch.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/subtask.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/lib/utils.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/local.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/options.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos/translation.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/SOURCES.txt +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/dependency_links.txt +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/not-zip-safe +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/requires.txt +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/deepfos.egg-info/top_level.txt +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/requirements.txt +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/setup.cfg +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/setup.py +0 -0
- {deepfos-1.1.53 → deepfos-1.1.55}/versioneer.py +0 -0
|
@@ -1,3 +1,22 @@
|
|
|
1
|
+
## [1.1.55] - 2025-04-25
|
|
2
|
+
|
|
3
|
+
### 新增
|
|
4
|
+
* DeepModel支持update_df
|
|
5
|
+
* 财务模型支持complement_save
|
|
6
|
+
|
|
7
|
+
### 变更
|
|
8
|
+
|
|
9
|
+
* DeepModel事务内设置globals不生效问题修正
|
|
10
|
+
* DeepModel兼容线上线下模式的json类型数据的处理,保证insert_df和query_df的读写格式一致性
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
## [1.1.54] - 2025-03-27
|
|
14
|
+
|
|
15
|
+
### 变更
|
|
16
|
+
|
|
17
|
+
* 根据DeepModel组件API调整更新模型定义
|
|
18
|
+
|
|
19
|
+
|
|
1
20
|
## [1.1.53] - 2025-02-25
|
|
2
21
|
|
|
3
22
|
### 新增
|
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "2025-
|
|
11
|
+
"date": "2025-04-27T01:54:31+0000",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "1.1.
|
|
14
|
+
"full-revisionid": "8a98c948a65577c31df7cff7c2a512e8d71de104",
|
|
15
|
+
"version": "1.1.55"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
|
@@ -128,6 +128,7 @@ class ObjectElement(ObjectParam):
|
|
|
128
128
|
class QueryWithArgs(BaseModel):
|
|
129
129
|
commands: str
|
|
130
130
|
kwargs: Dict[str, Any] = Field(default_factory=dict)
|
|
131
|
+
globals: Dict[str, Any] = Field(default_factory=dict)
|
|
131
132
|
|
|
132
133
|
|
|
133
134
|
class MainField(NamedTuple):
|
|
@@ -370,11 +371,7 @@ class FieldJson(BaseField):
|
|
|
370
371
|
df[field_name] = df[field_name].apply(self.format_json)
|
|
371
372
|
|
|
372
373
|
def cast(self, df: pd.DataFrame, field_name: str, direct_access: bool = True):
|
|
373
|
-
|
|
374
|
-
# since json value will be converted to json string(type: str)
|
|
375
|
-
# in edgedb python protocol
|
|
376
|
-
if not direct_access:
|
|
377
|
-
df[field_name] = df[field_name].apply(self.format_json)
|
|
374
|
+
pass
|
|
378
375
|
|
|
379
376
|
|
|
380
377
|
class FieldInt(BaseField):
|
|
@@ -488,6 +485,9 @@ def _iter_single_assign(
|
|
|
488
485
|
assign_string = f"{field.name} := "
|
|
489
486
|
# 设置标量值
|
|
490
487
|
if field.name not in target_main_field:
|
|
488
|
+
if field.is_multi:
|
|
489
|
+
return assign_string + f"json_array_unpack(item['{field.name}'])"
|
|
490
|
+
|
|
491
491
|
assign_string += f"<{cast_type}>"
|
|
492
492
|
|
|
493
493
|
if cast_type in NEED_CAST_STR:
|
|
@@ -590,10 +590,10 @@ def bulk_upsert_by_fields(
|
|
|
590
590
|
|
|
591
591
|
def bulk_update_by_fields(
|
|
592
592
|
object_name: str,
|
|
593
|
-
business_key: str,
|
|
594
593
|
field_type: List[PtrInfo],
|
|
595
594
|
target_main_field: Dict[str, MainField],
|
|
596
|
-
|
|
595
|
+
match_fields: Iterable[str],
|
|
596
|
+
update_fields: Iterable[str],
|
|
597
597
|
):
|
|
598
598
|
update_assign_body = ','.join(
|
|
599
599
|
[
|
|
@@ -602,11 +602,20 @@ def bulk_update_by_fields(
|
|
|
602
602
|
]
|
|
603
603
|
)
|
|
604
604
|
|
|
605
|
+
field_type_map = {field.name: field.type for field in field_type}
|
|
606
|
+
|
|
607
|
+
match_str = " and ".join(
|
|
608
|
+
[
|
|
609
|
+
f".{name} = <{field_type_map.get(name, 'std::str')}>item['{name}']"
|
|
610
|
+
for name in match_fields
|
|
611
|
+
]
|
|
612
|
+
)
|
|
613
|
+
|
|
605
614
|
return f"""
|
|
606
615
|
with raw_data := <json>to_json(<std::str>${BATCH_INSERT_KW}),
|
|
607
616
|
for item in json_array_unpack(raw_data) union (
|
|
608
617
|
update {object_name}
|
|
609
|
-
filter
|
|
618
|
+
filter {match_str}
|
|
610
619
|
set {{
|
|
611
620
|
{update_assign_body}
|
|
612
621
|
}}
|
|
@@ -777,6 +786,7 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
777
786
|
self.direct_access = direct_access
|
|
778
787
|
self.alias = AliasGenerator()
|
|
779
788
|
self.pg_dsn = pg_dsn
|
|
789
|
+
self._globals = {}
|
|
780
790
|
|
|
781
791
|
@future_property
|
|
782
792
|
async def client(self):
|
|
@@ -799,12 +809,12 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
799
809
|
dbname=dbname
|
|
800
810
|
)
|
|
801
811
|
if user_id := OPTION.api.header.get('user'):
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
812
|
+
default_globals = {
|
|
813
|
+
f'{self.spacemodule}::current_user_id':
|
|
814
|
+
user_id
|
|
815
|
+
}
|
|
816
|
+
client = client.with_globals(**default_globals)
|
|
817
|
+
self._globals = client._options.state._globals
|
|
808
818
|
return client
|
|
809
819
|
|
|
810
820
|
@future_property
|
|
@@ -899,12 +909,18 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
899
909
|
"""
|
|
900
910
|
|
|
901
911
|
if self.direct_access:
|
|
902
|
-
logger.opt(lazy=True).debug(f"Query: [{ql}]
|
|
912
|
+
logger.opt(lazy=True).debug(f"Query: [{ql}],\n"
|
|
913
|
+
f"kwargs: [{kwargs}],\n"
|
|
914
|
+
f"globals: [{self._globals}].")
|
|
903
915
|
_, result = await self.client.query(ql, **kwargs)
|
|
904
916
|
return result
|
|
905
917
|
|
|
906
|
-
result = await
|
|
907
|
-
|
|
918
|
+
result = await self._http_query(ql, **kwargs)
|
|
919
|
+
field_info = {
|
|
920
|
+
fi.name: fi.type if fi.fields is None else fi.fields
|
|
921
|
+
for fi in result.objectInfos[0].fields
|
|
922
|
+
} if result.objectInfos else {}
|
|
923
|
+
return serutils.deserialize(result.json_, field_info)
|
|
908
924
|
|
|
909
925
|
async def query(self, ql: str, **kwargs) -> List[Any]:
|
|
910
926
|
"""执行ql查询语句,得到序列化后的结果
|
|
@@ -934,7 +950,9 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
934
950
|
|
|
935
951
|
"""
|
|
936
952
|
if self.direct_access:
|
|
937
|
-
logger.opt(lazy=True).debug(f"Query: [{ql}]
|
|
953
|
+
logger.opt(lazy=True).debug(f"Query: [{ql}],\n"
|
|
954
|
+
f"kwargs: [{kwargs}],\n"
|
|
955
|
+
f"globals: [{self._globals}].")
|
|
938
956
|
frame_desc, result = await self.client.query(ql, **kwargs)
|
|
939
957
|
return serutils.serialize(
|
|
940
958
|
result, ctx=serutils.Context(frame_desc=frame_desc)
|
|
@@ -995,7 +1013,7 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
995
1013
|
else:
|
|
996
1014
|
result = await self._http_query(ql, **kwargs)
|
|
997
1015
|
# No object structure info
|
|
998
|
-
if result.objectInfos
|
|
1016
|
+
if not result.objectInfos:
|
|
999
1017
|
return pd.DataFrame(data=result.json_)
|
|
1000
1018
|
|
|
1001
1019
|
data = pd.DataFrame(
|
|
@@ -1015,6 +1033,10 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1015
1033
|
query_object.__doc__ = query_object.__doc__ + DOC_ARGS_KWARGS
|
|
1016
1034
|
query_df.__doc__ = query_df.__doc__ + DOC_ARGS_KWARGS
|
|
1017
1035
|
|
|
1036
|
+
def _ensure_client(self):
|
|
1037
|
+
if self.direct_access:
|
|
1038
|
+
self.client # noqa
|
|
1039
|
+
|
|
1018
1040
|
@txn_support
|
|
1019
1041
|
async def execute(
|
|
1020
1042
|
self,
|
|
@@ -1030,8 +1052,11 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1030
1052
|
会自动用作所有string形式ql的参数
|
|
1031
1053
|
|
|
1032
1054
|
"""
|
|
1055
|
+
self._ensure_client()
|
|
1033
1056
|
if isinstance(qls, str):
|
|
1034
|
-
qls_with_args = [QueryWithArgs(
|
|
1057
|
+
qls_with_args = [QueryWithArgs(
|
|
1058
|
+
commands=qls, kwargs=kwargs, globals=self._globals
|
|
1059
|
+
)]
|
|
1035
1060
|
else:
|
|
1036
1061
|
qls_with_args = []
|
|
1037
1062
|
seen_kwargs_key = set()
|
|
@@ -1049,7 +1074,9 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1049
1074
|
seen_kwargs_key = seen_kwargs_key.union(ql.kwargs.keys())
|
|
1050
1075
|
|
|
1051
1076
|
elif isinstance(ql, str):
|
|
1052
|
-
qls_with_args.append(QueryWithArgs(
|
|
1077
|
+
qls_with_args.append(QueryWithArgs(
|
|
1078
|
+
commands=ql, kwargs=kwargs, globals=self._globals
|
|
1079
|
+
))
|
|
1053
1080
|
else:
|
|
1054
1081
|
raise TypeError(f'qls参数中出现类型非法成员:{type(ql)}')
|
|
1055
1082
|
|
|
@@ -1089,12 +1116,25 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1089
1116
|
async with tx:
|
|
1090
1117
|
for ql in qls_with_args:
|
|
1091
1118
|
logger.opt(lazy=True).debug(
|
|
1092
|
-
f"Execute QL: [{ql.commands}],
|
|
1119
|
+
f"Execute QL: [{ql.commands}],"
|
|
1120
|
+
f"\nkwargs: [{ql.kwargs}],"
|
|
1121
|
+
f"\nglobals: [{ql.globals}]."
|
|
1093
1122
|
)
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1123
|
+
if ql.globals:
|
|
1124
|
+
bak_cli = tx._client
|
|
1125
|
+
tx._client = tx._client.with_globals(**ql.globals)
|
|
1126
|
+
try:
|
|
1127
|
+
desc, affected = await tx.execute(ql.commands, **ql.kwargs)
|
|
1128
|
+
result.append(serutils.serialize(
|
|
1129
|
+
affected, ctx=serutils.Context(frame_desc=desc)
|
|
1130
|
+
))
|
|
1131
|
+
finally:
|
|
1132
|
+
tx._client = bak_cli
|
|
1133
|
+
else:
|
|
1134
|
+
desc, affected = await tx.execute(ql.commands, **ql.kwargs)
|
|
1135
|
+
result.append(serutils.serialize(
|
|
1136
|
+
affected, ctx=serutils.Context(frame_desc=desc)
|
|
1137
|
+
))
|
|
1098
1138
|
if len(result) == 1:
|
|
1099
1139
|
return result[0]
|
|
1100
1140
|
return result
|
|
@@ -1176,6 +1216,128 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1176
1216
|
if exclusive not in valid_exclusive:
|
|
1177
1217
|
raise ValueError(f"exclusive fields: {exclusive_fields} 没有相应唯一约束")
|
|
1178
1218
|
|
|
1219
|
+
async def _get_bkey(
|
|
1220
|
+
self,
|
|
1221
|
+
obj: Union[ObjectTypeFrame, TargetField],
|
|
1222
|
+
source: str = None,
|
|
1223
|
+
name: str = None
|
|
1224
|
+
) -> str:
|
|
1225
|
+
# 如可在object结构的annotations中取业务主键,则优先取,否则走接口
|
|
1226
|
+
if obj.info and BUSINESS_KEY in obj.info:
|
|
1227
|
+
return obj.info[BUSINESS_KEY]
|
|
1228
|
+
elif (code := obj.normalized_name) in self.model_objects:
|
|
1229
|
+
return self.model_objects[code].businessKey
|
|
1230
|
+
|
|
1231
|
+
assert isinstance(obj, TargetField)
|
|
1232
|
+
# Link 至非本应用对象,需单独查询
|
|
1233
|
+
tgt = ObjectElement.construct_from(self.model_objects[source]).links[name]
|
|
1234
|
+
tgt_model_info = await self.async_api.object.info(
|
|
1235
|
+
app=tgt.targetApp, object_code=tgt.targetObjectCode
|
|
1236
|
+
)
|
|
1237
|
+
return tgt_model_info.businessKey
|
|
1238
|
+
|
|
1239
|
+
async def _collect_bulk_field_info(self, object_name, structure, data, relation):
|
|
1240
|
+
field_info = []
|
|
1241
|
+
tgt_main_field = {}
|
|
1242
|
+
for field in structure.fields.values():
|
|
1243
|
+
if field.name not in data.columns:
|
|
1244
|
+
continue
|
|
1245
|
+
|
|
1246
|
+
field_info.append(field)
|
|
1247
|
+
|
|
1248
|
+
if not field.is_link:
|
|
1249
|
+
continue
|
|
1250
|
+
|
|
1251
|
+
is_multi = field.is_multi_link
|
|
1252
|
+
name = field.name
|
|
1253
|
+
# 链接至其他对象,记录目标对象信息
|
|
1254
|
+
if is_multi:
|
|
1255
|
+
link_props = set(relation[name].columns).intersection(field.props)
|
|
1256
|
+
else:
|
|
1257
|
+
link_props = set(
|
|
1258
|
+
c[len(f'{name}@')::]
|
|
1259
|
+
for c in data.columns if c.startswith(f'{name}@')
|
|
1260
|
+
).intersection(field.props)
|
|
1261
|
+
tgt_bkey = await self._get_bkey(field.target, object_name, name)
|
|
1262
|
+
tgt_main_field[name] = MainField(tgt_bkey, is_multi, link_props)
|
|
1263
|
+
return field_info, tgt_main_field
|
|
1264
|
+
|
|
1265
|
+
def _collect_qls(
|
|
1266
|
+
self,
|
|
1267
|
+
data: pd.DataFrame,
|
|
1268
|
+
ql: str,
|
|
1269
|
+
chunksize: int,
|
|
1270
|
+
qls: List[QueryWithArgs]
|
|
1271
|
+
):
|
|
1272
|
+
self._ensure_client()
|
|
1273
|
+
for i in range(0, len(data), chunksize):
|
|
1274
|
+
part = data.iloc[i: i + chunksize]
|
|
1275
|
+
kw_name = self.alias.get(BATCH_INSERT_KW)
|
|
1276
|
+
qls.append(QueryWithArgs(
|
|
1277
|
+
commands=ql.replace(
|
|
1278
|
+
f'${BATCH_INSERT_KW}', f'${kw_name}'
|
|
1279
|
+
),
|
|
1280
|
+
kwargs={kw_name: part.to_json(
|
|
1281
|
+
orient='records', double_precision=15,
|
|
1282
|
+
force_ascii=False, default_handler=str
|
|
1283
|
+
)},
|
|
1284
|
+
globals=self._globals
|
|
1285
|
+
))
|
|
1286
|
+
|
|
1287
|
+
@staticmethod
|
|
1288
|
+
def _split_self_link(data, relation, structure, bkey):
|
|
1289
|
+
self_link_dfs = {}
|
|
1290
|
+
for name in structure.self_link_fields:
|
|
1291
|
+
field = structure.fields[name]
|
|
1292
|
+
if (link_df := relation.get(name)) is not None:
|
|
1293
|
+
link_props = set(link_df.columns).intersection(field.props)
|
|
1294
|
+
self_link_dfs[name] = (
|
|
1295
|
+
structure.fit(data[[bkey, name]]),
|
|
1296
|
+
MainField(bkey, field.is_multi_link, link_props)
|
|
1297
|
+
)
|
|
1298
|
+
data = data.drop(columns=[name])
|
|
1299
|
+
elif name in data.columns:
|
|
1300
|
+
link_prop_cols = []
|
|
1301
|
+
link_props = []
|
|
1302
|
+
|
|
1303
|
+
for col in data.columns:
|
|
1304
|
+
if (
|
|
1305
|
+
col.startswith(f'{name}@')
|
|
1306
|
+
and ((prop_name := col[len(f'{name}@')::]) in field.props)
|
|
1307
|
+
):
|
|
1308
|
+
link_prop_cols.append(col)
|
|
1309
|
+
link_props.append(prop_name)
|
|
1310
|
+
|
|
1311
|
+
self_link_dfs[name] = (
|
|
1312
|
+
structure.fit(data[[bkey, name, *link_prop_cols]]),
|
|
1313
|
+
MainField(bkey, field.is_multi_link, link_props)
|
|
1314
|
+
)
|
|
1315
|
+
data = data.drop(columns=[name, *link_prop_cols])
|
|
1316
|
+
return data, self_link_dfs
|
|
1317
|
+
|
|
1318
|
+
@staticmethod
|
|
1319
|
+
def _merge_relation(data, relation, structure, bkey):
|
|
1320
|
+
for name, link_df in relation.items():
|
|
1321
|
+
if name not in structure.fields:
|
|
1322
|
+
continue
|
|
1323
|
+
field = structure.fields[name]
|
|
1324
|
+
valid_cols = list({'source', 'target', *field.props} & set(link_df.columns))
|
|
1325
|
+
link_df = link_df[valid_cols]
|
|
1326
|
+
# for fit only
|
|
1327
|
+
temp_structure = ObjectStructure(
|
|
1328
|
+
field.type,
|
|
1329
|
+
[
|
|
1330
|
+
PtrInfo(name='source', target=TargetField(name='std::str')),
|
|
1331
|
+
PtrInfo(name='target', target=TargetField(name='std::str')),
|
|
1332
|
+
*[PtrInfo(**prop.dict()) for prop in field.properties]
|
|
1333
|
+
]
|
|
1334
|
+
)
|
|
1335
|
+
link_df = temp_structure.fit(link_df)
|
|
1336
|
+
link = link_df.groupby('source').apply(_format_link, link_name=name)
|
|
1337
|
+
data = data.drop(columns=[name], errors='ignore')
|
|
1338
|
+
data = data.join(link.to_frame(name), on=bkey)
|
|
1339
|
+
return data
|
|
1340
|
+
|
|
1179
1341
|
@txn_support
|
|
1180
1342
|
async def insert_df(
|
|
1181
1343
|
self,
|
|
@@ -1297,38 +1459,15 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1297
1459
|
data = self._merge_relation(data, relation, structure, bkey)
|
|
1298
1460
|
# 从data中分离出self-link更新信息
|
|
1299
1461
|
data, self_link_dfs = self._split_self_link(data, relation, structure, bkey)
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
# 准备bulk insert所需field信息
|
|
1304
|
-
for field in structure.fields.values():
|
|
1305
|
-
if field.name not in data.columns:
|
|
1306
|
-
continue
|
|
1307
|
-
|
|
1308
|
-
field_info.append(field)
|
|
1309
|
-
|
|
1310
|
-
if not field.is_link:
|
|
1311
|
-
continue
|
|
1312
|
-
|
|
1313
|
-
is_multi = field.is_multi_link
|
|
1314
|
-
name = field.name
|
|
1315
|
-
# 链接至其他对象,记录目标对象信息
|
|
1316
|
-
if is_multi:
|
|
1317
|
-
link_props = set(relation[name].columns).intersection(field.props)
|
|
1318
|
-
else:
|
|
1319
|
-
link_props = set(
|
|
1320
|
-
c[len(f'{name}@')::]
|
|
1321
|
-
for c in data.columns if c.startswith(f'{name}@')
|
|
1322
|
-
).intersection(field.props)
|
|
1323
|
-
tgt_bkey = await self._get_bkey(field.target, object_name, name)
|
|
1324
|
-
tgt_main_field[name] = MainField(tgt_bkey, is_multi, link_props)
|
|
1325
|
-
|
|
1462
|
+
field_info, tgt_main_field = await self._collect_bulk_field_info(
|
|
1463
|
+
object_name, structure, data, relation
|
|
1464
|
+
)
|
|
1326
1465
|
field_names = set(map(lambda f: f.name, field_info))
|
|
1327
1466
|
if enable_upsert:
|
|
1328
1467
|
self._valid_upsert(obj, field_names, bkey, exclusive_fields, update_fields)
|
|
1329
1468
|
|
|
1330
1469
|
exclusive_fields = set(exclusive_fields or {bkey}) & set(field_names)
|
|
1331
|
-
update_fields = set(update_fields or (field_names- {bkey})) & set(field_names)
|
|
1470
|
+
update_fields = set(update_fields or (field_names - {bkey})) & set(field_names)
|
|
1332
1471
|
if enable_upsert and update_fields:
|
|
1333
1472
|
insert_ql = bulk_upsert_by_fields(
|
|
1334
1473
|
object_name, field_info, tgt_main_field,
|
|
@@ -1343,30 +1482,13 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1343
1482
|
for update_field, (update_df, main_field) in self_link_dfs.items():
|
|
1344
1483
|
field = structure.fields[update_field]
|
|
1345
1484
|
update_ql = bulk_update_by_fields(
|
|
1346
|
-
object_name,
|
|
1347
|
-
|
|
1485
|
+
object_name, [field], {update_field: main_field},
|
|
1486
|
+
[bkey], [update_field]
|
|
1348
1487
|
)
|
|
1349
1488
|
self._collect_qls(update_df, update_ql, chunksize, qls)
|
|
1350
1489
|
|
|
1351
1490
|
await self.execute(qls)
|
|
1352
1491
|
|
|
1353
|
-
def _collect_qls(
|
|
1354
|
-
self,
|
|
1355
|
-
data: pd.DataFrame,
|
|
1356
|
-
ql: str,
|
|
1357
|
-
chunksize: int,
|
|
1358
|
-
qls: List[QueryWithArgs]
|
|
1359
|
-
):
|
|
1360
|
-
for i in range(0, len(data), chunksize):
|
|
1361
|
-
part = data.iloc[i: i + chunksize]
|
|
1362
|
-
kw_name = self.alias.get(BATCH_INSERT_KW)
|
|
1363
|
-
qls.append(QueryWithArgs(
|
|
1364
|
-
commands=ql.replace(
|
|
1365
|
-
f'${BATCH_INSERT_KW}', f'${kw_name}'
|
|
1366
|
-
),
|
|
1367
|
-
kwargs={kw_name: part.to_json(orient='records', double_precision=15)}
|
|
1368
|
-
))
|
|
1369
|
-
|
|
1370
1492
|
async def get_object(
|
|
1371
1493
|
self,
|
|
1372
1494
|
object_name: str,
|
|
@@ -1395,80 +1517,6 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1395
1517
|
|
|
1396
1518
|
return format_obj(objs[0])
|
|
1397
1519
|
|
|
1398
|
-
async def _get_bkey(
|
|
1399
|
-
self,
|
|
1400
|
-
obj: Union[ObjectTypeFrame, TargetField],
|
|
1401
|
-
source: str = None,
|
|
1402
|
-
name: str = None
|
|
1403
|
-
) -> str:
|
|
1404
|
-
# 如可在object结构的annotations中取业务主键,则优先取,否则走接口
|
|
1405
|
-
if obj.info and BUSINESS_KEY in obj.info:
|
|
1406
|
-
return obj.info[BUSINESS_KEY]
|
|
1407
|
-
elif (code := obj.normalized_name) in self.model_objects:
|
|
1408
|
-
return self.model_objects[code].businessKey
|
|
1409
|
-
|
|
1410
|
-
assert isinstance(obj, TargetField)
|
|
1411
|
-
# Link 至非本应用对象,需单独查询
|
|
1412
|
-
tgt = ObjectElement.construct_from(self.model_objects[source]).links[name]
|
|
1413
|
-
tgt_model_info = await self.async_api.object.info(
|
|
1414
|
-
app=tgt.targetApp, object_code=tgt.targetObjectCode
|
|
1415
|
-
)
|
|
1416
|
-
return tgt_model_info.businessKey
|
|
1417
|
-
|
|
1418
|
-
@staticmethod
|
|
1419
|
-
def _split_self_link(data, relation, structure, bkey):
|
|
1420
|
-
self_link_dfs = {}
|
|
1421
|
-
for name in structure.self_link_fields:
|
|
1422
|
-
field = structure.fields[name]
|
|
1423
|
-
if (link_df := relation.get(name)) is not None:
|
|
1424
|
-
link_props = set(link_df.columns).intersection(field.props)
|
|
1425
|
-
self_link_dfs[name] = (
|
|
1426
|
-
structure.fit(data[[bkey, name]]),
|
|
1427
|
-
MainField(bkey, field.is_multi_link, link_props)
|
|
1428
|
-
)
|
|
1429
|
-
data = data.drop(columns=[name])
|
|
1430
|
-
elif name in data.columns:
|
|
1431
|
-
link_prop_cols = []
|
|
1432
|
-
link_props = []
|
|
1433
|
-
|
|
1434
|
-
for col in data.columns:
|
|
1435
|
-
if (
|
|
1436
|
-
col.startswith(f'{name}@')
|
|
1437
|
-
and ((prop_name := col[len(f'{name}@')::]) in field.props)
|
|
1438
|
-
):
|
|
1439
|
-
link_prop_cols.append(col)
|
|
1440
|
-
link_props.append(prop_name)
|
|
1441
|
-
|
|
1442
|
-
self_link_dfs[name] = (
|
|
1443
|
-
structure.fit(data[[bkey, name, *link_prop_cols]]),
|
|
1444
|
-
MainField(bkey, field.is_multi_link, link_props)
|
|
1445
|
-
)
|
|
1446
|
-
data = data.drop(columns=[name, *link_prop_cols])
|
|
1447
|
-
return data, self_link_dfs
|
|
1448
|
-
|
|
1449
|
-
@staticmethod
|
|
1450
|
-
def _merge_relation(data, relation, structure, bkey):
|
|
1451
|
-
for name, link_df in relation.items():
|
|
1452
|
-
if name not in structure.fields:
|
|
1453
|
-
continue
|
|
1454
|
-
field = structure.fields[name]
|
|
1455
|
-
valid_cols = list({'source', 'target', *field.props} & set(link_df.columns))
|
|
1456
|
-
link_df = link_df[valid_cols]
|
|
1457
|
-
# for fit only
|
|
1458
|
-
temp_structure = ObjectStructure(
|
|
1459
|
-
field.type,
|
|
1460
|
-
[
|
|
1461
|
-
PtrInfo(name='source', target=TargetField(name='std::str')),
|
|
1462
|
-
PtrInfo(name='target', target=TargetField(name='std::str')),
|
|
1463
|
-
*[PtrInfo(**prop.dict()) for prop in field.properties]
|
|
1464
|
-
]
|
|
1465
|
-
)
|
|
1466
|
-
link_df = temp_structure.fit(link_df)
|
|
1467
|
-
link = link_df.groupby('source').apply(_format_link, link_name=name)
|
|
1468
|
-
data = data.drop(columns=[name], errors='ignore')
|
|
1469
|
-
data = data.join(link.to_frame(name), on=bkey)
|
|
1470
|
-
return data
|
|
1471
|
-
|
|
1472
1520
|
async def insert_df_pg(
|
|
1473
1521
|
self,
|
|
1474
1522
|
object_name: str,
|
|
@@ -1584,6 +1632,80 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1584
1632
|
finally:
|
|
1585
1633
|
await conn.close()
|
|
1586
1634
|
|
|
1635
|
+
@txn_support
|
|
1636
|
+
async def update_df(
|
|
1637
|
+
self,
|
|
1638
|
+
object_name: str,
|
|
1639
|
+
data: pd.DataFrame,
|
|
1640
|
+
relation: Dict[str, pd.DataFrame] = None,
|
|
1641
|
+
chunksize: int = 500,
|
|
1642
|
+
match_fields: Iterable[str] = None,
|
|
1643
|
+
) -> None:
|
|
1644
|
+
"""以事务执行基于DataFrame字段信息的批量更新数据
|
|
1645
|
+
|
|
1646
|
+
将以业务主键作为匹配条件,除业务主键以外的字段将为update fields
|
|
1647
|
+
|
|
1648
|
+
Args:
|
|
1649
|
+
object_name: 被插入数据的对象名,需属于当前应用
|
|
1650
|
+
data: 要插入的数据,若有single link property,
|
|
1651
|
+
则以列名为link_name@link_property_name的形式提供
|
|
1652
|
+
relation: 如有multi link,提供该字典用于补充link target信息,
|
|
1653
|
+
键为link字段名,值为映射关系的DataFrame
|
|
1654
|
+
DataFrame中的source列需为插入对象的业务主键,
|
|
1655
|
+
target列需为link target的业务主键,
|
|
1656
|
+
若有link property,则以property名为列名,提供在除source和target的列中
|
|
1657
|
+
chunksize: 单次最大行数
|
|
1658
|
+
match_fields: update的匹配列表,涉及的fields需出现在data或relation中,默认为业务主键
|
|
1659
|
+
"""
|
|
1660
|
+
if data.empty:
|
|
1661
|
+
logger.info("data为空,无DML执行")
|
|
1662
|
+
return
|
|
1663
|
+
|
|
1664
|
+
if object_name in self.objects:
|
|
1665
|
+
obj = self.objects[object_name]
|
|
1666
|
+
else:
|
|
1667
|
+
raise ObjectNotExist(
|
|
1668
|
+
f'DeepModel对象[{object_name}]在当前应用不存在,无法更新数据'
|
|
1669
|
+
)
|
|
1670
|
+
if obj.external:
|
|
1671
|
+
raise ExternalObjectReadOnly('外部对象只可读')
|
|
1672
|
+
|
|
1673
|
+
structure = ObjectStructure(name=obj.name, structure=obj.fields.values())
|
|
1674
|
+
self._valid_data(data, object_name, relation, structure)
|
|
1675
|
+
relation = relation or {}
|
|
1676
|
+
bkey = await self._get_bkey(obj)
|
|
1677
|
+
# data拼接relation df
|
|
1678
|
+
data = self._merge_relation(data, relation, structure, bkey)
|
|
1679
|
+
# 从data中分离出self-link更新信息
|
|
1680
|
+
data, self_link_dfs = self._split_self_link(data, relation, structure, bkey)
|
|
1681
|
+
field_info, tgt_main_field = await self._collect_bulk_field_info(
|
|
1682
|
+
object_name, structure, data, relation
|
|
1683
|
+
)
|
|
1684
|
+
field_names = set(map(lambda f: f.name, field_info))
|
|
1685
|
+
|
|
1686
|
+
if missing := (set(match_fields) - set(field_names)):
|
|
1687
|
+
raise ValueError(f"match fields: {missing} 不在提供的数据中")
|
|
1688
|
+
|
|
1689
|
+
match_fields = set(match_fields or {bkey}) & set(field_names)
|
|
1690
|
+
update_ql = bulk_update_by_fields(
|
|
1691
|
+
object_name, field_info, tgt_main_field,
|
|
1692
|
+
match_fields, field_names - match_fields
|
|
1693
|
+
)
|
|
1694
|
+
qls = []
|
|
1695
|
+
self._collect_qls(structure.fit(data), update_ql, chunksize, qls)
|
|
1696
|
+
if self_link_dfs:
|
|
1697
|
+
for update_field, (update_df, main_field) in self_link_dfs.items():
|
|
1698
|
+
field = structure.fields[update_field]
|
|
1699
|
+
update_ql = bulk_update_by_fields(
|
|
1700
|
+
object_name, [field],
|
|
1701
|
+
{update_field: main_field},
|
|
1702
|
+
[bkey],
|
|
1703
|
+
[update_field]
|
|
1704
|
+
)
|
|
1705
|
+
self._collect_qls(update_df, update_ql, chunksize, qls)
|
|
1706
|
+
|
|
1707
|
+
await self.execute(qls)
|
|
1708
|
+
|
|
1587
1709
|
@asynccontextmanager
|
|
1588
1710
|
async def start_transaction(self, flatten: bool = False):
|
|
1589
1711
|
"""开启事务
|
|
@@ -1661,11 +1783,14 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1661
1783
|
raise NotImplemented('非直连模式不支持设置state信息')
|
|
1662
1784
|
else:
|
|
1663
1785
|
bak_cli = self.client
|
|
1786
|
+
bak_globals = self.client._options.state._globals
|
|
1664
1787
|
try:
|
|
1665
1788
|
self.client = self.client.with_globals(**globals_)
|
|
1789
|
+
self._globals = self.client._options.state._globals
|
|
1666
1790
|
yield
|
|
1667
1791
|
finally:
|
|
1668
1792
|
self.client = bak_cli
|
|
1793
|
+
self._globals = bak_globals
|
|
1669
1794
|
|
|
1670
1795
|
@contextmanager
|
|
1671
1796
|
def without_globals(self, *global_names):
|
|
@@ -1676,18 +1801,22 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1676
1801
|
raise NotImplemented('非直连模式不支持设置state信息')
|
|
1677
1802
|
else:
|
|
1678
1803
|
bak_cli = self.client
|
|
1804
|
+
bak_globals = self.client._options.state._globals
|
|
1679
1805
|
try:
|
|
1680
1806
|
self.client = self.client.without_globals(*global_names)
|
|
1807
|
+
self._globals = self.client._options.state._globals
|
|
1681
1808
|
yield
|
|
1682
1809
|
finally:
|
|
1683
1810
|
self.client = bak_cli
|
|
1811
|
+
self._globals = bak_globals
|
|
1684
1812
|
|
|
1685
1813
|
|
|
1686
1814
|
class DeepModel(AsyncDeepModel, metaclass=SyncMeta):
|
|
1687
1815
|
synchronize = (
|
|
1688
1816
|
'query_object', 'query', 'query_df',
|
|
1689
1817
|
'execute', 'get_object',
|
|
1690
|
-
'insert_df', 'insert_df_pg'
|
|
1818
|
+
'insert_df', 'insert_df_pg',
|
|
1819
|
+
'update_df',
|
|
1691
1820
|
)
|
|
1692
1821
|
|
|
1693
1822
|
if TYPE_CHECKING: # pragma: no cover
|
|
@@ -1737,6 +1866,16 @@ class DeepModel(AsyncDeepModel, metaclass=SyncMeta):
|
|
|
1737
1866
|
) -> None:
|
|
1738
1867
|
...
|
|
1739
1868
|
|
|
1869
|
+
def update_df(
|
|
1870
|
+
self,
|
|
1871
|
+
object_name: str,
|
|
1872
|
+
data: pd.DataFrame,
|
|
1873
|
+
relation: Dict[str, pd.DataFrame] = None,
|
|
1874
|
+
chunksize: int = 500,
|
|
1875
|
+
match_fields: Iterable[str] = None,
|
|
1876
|
+
) -> None:
|
|
1877
|
+
...
|
|
1878
|
+
|
|
1740
1879
|
@contextmanager
|
|
1741
1880
|
def start_transaction(self, flatten: bool = False):
|
|
1742
1881
|
"""开启事务
|