deepfos 1.1.62__tar.gz → 1.1.64__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {deepfos-1.1.62 → deepfos-1.1.64}/CHANGELOG.md +16 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/PKG-INFO +1 -1
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/_version.py +3 -3
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/deepconnector.py +3 -3
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/deepconnector.py +1 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/workflow.py +3 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/deepconnector.py +34 -1
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/deepmodel.py +66 -40
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/finmodel.py +34 -25
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/workflow.py +41 -3
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/sysutils.py +6 -7
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/utils.py +61 -1
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/PKG-INFO +1 -1
- {deepfos-1.1.62 → deepfos-1.1.64}/.gitattributes +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/.gitee/ISSUE_GUIDELINES.md +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/.gitee/ISSUE_TEMPLATE.md +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/.gitignore +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/MANIFEST.in +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/README.md +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/algo/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/algo/graph.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/business_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/models/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/models/business_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_1/models/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_2/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_2/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_2/models/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/V1_2/models/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/account.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/accounting_engines.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/app.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/approval_process.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/base.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/business_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/consolidation.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/consolidation_process.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/datatable.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/deep_pipeline.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/deepfos_task.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/deepmodel.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/financial_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/journal_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/journal_template.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/memory_financial_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/account.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/accounting_engines.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/app.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/approval_process.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/base.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/business_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/consolidation.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/consolidation_process.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/datatable_mysql.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/deep_pipeline.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/deepfos_task.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/deepmodel.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/financial_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/journal_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/journal_template.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/memory_financial_model.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/platform.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/python.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/reconciliation_engine.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/reconciliation_report.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/role_strategy.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/smartlist.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/space.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/system.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/models/variable.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/platform.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/python.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/reconciliation_engine.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/reconciliation_report.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/role_strategy.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/smartlist.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/space.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/system.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/variable.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/api/workflow.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/jstream.c +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/jstream.pyx +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/pandas.c +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/pandas.pyx +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/py_jstream.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/boost/py_pandas.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/cache.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/config.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/_base.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/constants.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/cube.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/formula.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/syscube.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/typing.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/cube/utils.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/_base.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/dimcreator.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/dimexpr.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/dimmember.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/eledimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/filters.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/dimension/sysdimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/_cache.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/_operator.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/nodemixin.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/sqlcondition.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/core/logictable/tablemodel.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/cipher.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/clickhouse.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/connector.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/daclickhouse.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/dameng.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/damysql.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/dbkits.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/deepengine.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/deepmodel.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/deepmodel_kingbase.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/edb.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/gauss.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/kingbase.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/mysql.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/oracle.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/postgresql.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/sqlserver.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/db/utils.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/accounting.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/apvlprocess.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/base.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/bizmodel.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/datatable.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/deep_pipeline.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/dimension.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/fact_table.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/journal.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/journal_template.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/pyscript.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/reconciliation.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/rolestrategy.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/smartlist.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/element/variable.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/exceptions/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/exceptions/hook.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lazy.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/__init__.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/_javaobj.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/asynchronous.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/concurrency.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/constant.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/decorator.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/deepchart.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/deepux.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/discovery.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/edb_lexer.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/eureka.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/filterparser.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/httpcli.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/jsonstreamer.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/msg.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/nacos.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/patch.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/redis.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/serutils.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/stopwatch.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/lib/subtask.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/local.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/options.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos/translation.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/SOURCES.txt +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/dependency_links.txt +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/not-zip-safe +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/requires.txt +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/deepfos.egg-info/top_level.txt +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/requirements.txt +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/setup.cfg +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/setup.py +0 -0
- {deepfos-1.1.62 → deepfos-1.1.64}/versioneer.py +0 -0
|
@@ -1,3 +1,19 @@
|
|
|
1
|
+
## [1.1.64] - 2025-07-08
|
|
2
|
+
|
|
3
|
+
### 更新
|
|
4
|
+
|
|
5
|
+
* 更新连接器元素获取连接信息的接口和密码处理逻辑
|
|
6
|
+
* TaskUtil的api实例化延迟至TaskUtil实例化过程中
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
## [1.1.63] - 2025-07-01
|
|
10
|
+
|
|
11
|
+
### 更新
|
|
12
|
+
|
|
13
|
+
* 工作流完成任务实例方法增加附件列表
|
|
14
|
+
* 财务模型和DeepModel批量操作支持批次前后alert
|
|
15
|
+
|
|
16
|
+
|
|
1
17
|
## [1.1.62] - 2025-06-12
|
|
2
18
|
|
|
3
19
|
### 更新
|
|
@@ -8,11 +8,11 @@ import json
|
|
|
8
8
|
|
|
9
9
|
version_json = '''
|
|
10
10
|
{
|
|
11
|
-
"date": "2025-
|
|
11
|
+
"date": "2025-07-08T07:07:33+0000",
|
|
12
12
|
"dirty": false,
|
|
13
13
|
"error": null,
|
|
14
|
-
"full-revisionid": "
|
|
15
|
-
"version": "1.1.
|
|
14
|
+
"full-revisionid": "53d949a38fa538718df6900080ebd8b0bc1358c9",
|
|
15
|
+
"version": "1.1.64"
|
|
16
16
|
}
|
|
17
17
|
''' # END VERSION_JSON
|
|
18
18
|
|
|
@@ -3,7 +3,7 @@ from typing import Union, Awaitable
|
|
|
3
3
|
from deepfos.element.base import T_ElementInfoWithServer
|
|
4
4
|
from deepfos.lib.decorator import cached_property
|
|
5
5
|
|
|
6
|
-
from .base import ChildAPI,
|
|
6
|
+
from .base import ChildAPI, RootAPI, post
|
|
7
7
|
from .models.deepconnector import *
|
|
8
8
|
|
|
9
9
|
|
|
@@ -14,10 +14,10 @@ class DataSourceAPI(ChildAPI):
|
|
|
14
14
|
"""连接信息相关接口"""
|
|
15
15
|
endpoint = '/apis/v3/ds/spaces/{space}/apps/{app}'
|
|
16
16
|
|
|
17
|
-
@
|
|
17
|
+
@post('connection-info/query', data_wrapped=False)
|
|
18
18
|
def connection_info(self, element_info: T_ElementInfoWithServer) -> Union[ConnectionInfoVo, Awaitable[ConnectionInfoVo]]:
|
|
19
19
|
return {
|
|
20
|
-
'
|
|
20
|
+
'body': {
|
|
21
21
|
'elementName': element_info.elementName,
|
|
22
22
|
'folderId': element_info.folderId
|
|
23
23
|
}
|
|
@@ -10,6 +10,7 @@ generated by model_code_gen.py
|
|
|
10
10
|
from deepfos.api.models.base import BaseModel
|
|
11
11
|
from typing import List, Optional, Union, Any, Dict
|
|
12
12
|
from pydantic import Field
|
|
13
|
+
from deepfos.api.models.platform import FileUplodRes
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
__all__ = [
|
|
@@ -576,6 +577,8 @@ class TaskCompleteInstance(BaseModel):
|
|
|
576
577
|
extraResParams: Optional[List[TaskCompleteParam]] = None
|
|
577
578
|
#: 按钮id
|
|
578
579
|
actionId: Optional[str] = None
|
|
580
|
+
#: 附件文件
|
|
581
|
+
attachments: Optional[List[FileUplodRes]]
|
|
579
582
|
|
|
580
583
|
|
|
581
584
|
class TaskReqDto(BaseModel):
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
import base64
|
|
2
|
+
import binascii
|
|
3
|
+
import os
|
|
2
4
|
|
|
3
5
|
from deepfos.api.deepconnector import DeepConnectorAPI
|
|
4
6
|
from deepfos.api.models import BaseModel
|
|
7
|
+
from deepfos.db.cipher import AES
|
|
5
8
|
from deepfos.element.base import ElementBase, SyncMeta
|
|
6
9
|
from deepfos.lib.asynchronous import future_property
|
|
7
10
|
from deepfos.lib.decorator import cached_property
|
|
@@ -9,6 +12,20 @@ from deepfos.lib.decorator import cached_property
|
|
|
9
12
|
__all__ = ['AsyncDeepConnector', 'DeepConnector', 'ConnectionInfo']
|
|
10
13
|
|
|
11
14
|
|
|
15
|
+
def decrypt(secret, cipher_text, encoding='utf8'):
|
|
16
|
+
pwd_padded = AES(secret).decrypt(
|
|
17
|
+
base64.b16decode(cipher_text)
|
|
18
|
+
)
|
|
19
|
+
if pwd_padded[-1] in range(1, 17):
|
|
20
|
+
pad_length = pwd_padded[-1]
|
|
21
|
+
pad_char = chr(pwd_padded[-1])
|
|
22
|
+
guess_pad = pad_char * pwd_padded[-1]
|
|
23
|
+
|
|
24
|
+
if pwd_padded.decode(encoding).endswith(guess_pad):
|
|
25
|
+
return pwd_padded.decode(encoding)[:-pad_length:]
|
|
26
|
+
return pwd_padded.decode(encoding)
|
|
27
|
+
|
|
28
|
+
|
|
12
29
|
class ConnectionInfo(BaseModel):
|
|
13
30
|
host: str
|
|
14
31
|
port: int
|
|
@@ -45,12 +62,28 @@ class AsyncDeepConnector(ElementBase[DeepConnectorAPI]):
|
|
|
45
62
|
info = await api.datasource.connection_info(
|
|
46
63
|
element_info=ele_info,
|
|
47
64
|
)
|
|
65
|
+
if info.encryptType == 'AES':
|
|
66
|
+
try:
|
|
67
|
+
password = decrypt(
|
|
68
|
+
os.environ.get('EXPORT_DEEPFOS_AES_KEY', '!ABCD-EFGH-IJKL@').encode(),
|
|
69
|
+
info.password,
|
|
70
|
+
encoding='utf-8'
|
|
71
|
+
)
|
|
72
|
+
except ValueError:
|
|
73
|
+
raise ValueError(
|
|
74
|
+
'连接器连接信息解密失败,请检查公共环境变量:EXPORT_DEEPFOS_AES_KEY'
|
|
75
|
+
) from None
|
|
76
|
+
else:
|
|
77
|
+
try:
|
|
78
|
+
password = base64.decodebytes(info.password.encode()).decode()
|
|
79
|
+
except binascii.Error:
|
|
80
|
+
password = info.password
|
|
48
81
|
return ConnectionInfo(
|
|
49
82
|
host=info.connectionHost,
|
|
50
83
|
port=info.connectionPort,
|
|
51
84
|
db=info.dbName,
|
|
52
85
|
user=info.username,
|
|
53
|
-
password=
|
|
86
|
+
password=password,
|
|
54
87
|
dbtype=info.serviceName,
|
|
55
88
|
)
|
|
56
89
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import re
|
|
2
|
+
import threading
|
|
2
3
|
|
|
3
4
|
import numpy as np
|
|
4
5
|
from asyncpg.connection import connect as pg_conn
|
|
@@ -35,7 +36,9 @@ from deepfos.exceptions import (
|
|
|
35
36
|
from deepfos.lib import serutils
|
|
36
37
|
from deepfos.lib.asynchronous import future_property, evloop
|
|
37
38
|
from deepfos.lib.decorator import flagmethod, cached_property, lru_cache
|
|
38
|
-
from deepfos.lib.utils import
|
|
39
|
+
from deepfos.lib.utils import (
|
|
40
|
+
AliasGenerator, to_version_tuple, ChunkAlert, split_dataframe_alert
|
|
41
|
+
)
|
|
39
42
|
|
|
40
43
|
__all__ = ['AsyncDeepModel', 'DeepModel', 'to_fields', 'QueryWithArgs']
|
|
41
44
|
|
|
@@ -786,7 +789,13 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
786
789
|
"""
|
|
787
790
|
__mangle_docs__ = False
|
|
788
791
|
|
|
789
|
-
def __init__(
|
|
792
|
+
def __init__(
|
|
793
|
+
self,
|
|
794
|
+
direct_access: bool = True,
|
|
795
|
+
pg_dsn: str = None,
|
|
796
|
+
before_chunk: ChunkAlert = None,
|
|
797
|
+
after_chunk: ChunkAlert = None,
|
|
798
|
+
):
|
|
790
799
|
self._txn_ = ContextVar('QLTXN')
|
|
791
800
|
self.appmodule = f"app{OPTION.api.header['app']}"
|
|
792
801
|
self.spacemodule = f"space{OPTION.api.header['space']}"
|
|
@@ -796,36 +805,50 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
796
805
|
self.alias = AliasGenerator()
|
|
797
806
|
self.pg_dsn = pg_dsn
|
|
798
807
|
self._globals = {}
|
|
808
|
+
self.before_chunk = before_chunk
|
|
809
|
+
self.after_chunk = after_chunk
|
|
810
|
+
self._clients = threading.local()
|
|
799
811
|
|
|
800
812
|
@future_property
|
|
801
|
-
async def
|
|
802
|
-
if self.direct_access:
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
dbname = None
|
|
816
|
-
client = create_async_client(
|
|
817
|
-
default_module=self.appmodule,
|
|
818
|
-
dbname=dbname
|
|
813
|
+
async def _internal_dbname(self):
|
|
814
|
+
if not self.direct_access:
|
|
815
|
+
# N.B: only retrieved when direct access is enabled
|
|
816
|
+
return
|
|
817
|
+
api = await self.wait_for('async_api')
|
|
818
|
+
ver = await api.extra.version()
|
|
819
|
+
if to_version_tuple(ver, 4) < (3, 0, 18, 8, 0):
|
|
820
|
+
return
|
|
821
|
+
db_info = await api.sharding.database()
|
|
822
|
+
space = OPTION.api.header['space']
|
|
823
|
+
if db_info.space != space:
|
|
824
|
+
raise ValueError(
|
|
825
|
+
f'Space id in sharding database info invalid. '
|
|
826
|
+
f'Expected space id: {space}, actual: {db_info.space}'
|
|
819
827
|
)
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
828
|
+
return db_info.edgedbName
|
|
829
|
+
|
|
830
|
+
@property
|
|
831
|
+
def client(self):
|
|
832
|
+
if not self.direct_access:
|
|
833
|
+
return
|
|
834
|
+
|
|
835
|
+
if (client := getattr(self._clients, 'value', None)) is not None:
|
|
827
836
|
return client
|
|
828
837
|
|
|
838
|
+
client = create_async_client(
|
|
839
|
+
default_module=self.appmodule,
|
|
840
|
+
dbname=self._internal_dbname
|
|
841
|
+
)
|
|
842
|
+
if user_id := OPTION.api.header.get('user'):
|
|
843
|
+
default_globals = {
|
|
844
|
+
f'{self.spacemodule}::current_user_id':
|
|
845
|
+
user_id
|
|
846
|
+
}
|
|
847
|
+
client = client.with_globals(**default_globals)
|
|
848
|
+
self._globals = client._options.state._globals
|
|
849
|
+
self._clients.value = client
|
|
850
|
+
return client
|
|
851
|
+
|
|
829
852
|
@future_property
|
|
830
853
|
async def element_info(self):
|
|
831
854
|
"""元素信息"""
|
|
@@ -1288,19 +1311,22 @@ class AsyncDeepModel(ElementBase[DeepModelAPI]):
|
|
|
1288
1311
|
qls: List[QueryWithArgs]
|
|
1289
1312
|
):
|
|
1290
1313
|
self._ensure_client()
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1314
|
+
|
|
1315
|
+
for part, alert in split_dataframe_alert(
|
|
1316
|
+
data, chunksize, self.before_chunk, self.after_chunk
|
|
1317
|
+
):
|
|
1318
|
+
with alert:
|
|
1319
|
+
kw_name = self.alias.get(BATCH_INSERT_KW)
|
|
1320
|
+
qls.append(QueryWithArgs(
|
|
1321
|
+
commands=ql.replace(
|
|
1322
|
+
f'${BATCH_INSERT_KW}', f'${kw_name}'
|
|
1323
|
+
),
|
|
1324
|
+
kwargs={kw_name: part.to_json(
|
|
1325
|
+
orient='records', double_precision=15,
|
|
1326
|
+
force_ascii=False, default_handler=str
|
|
1327
|
+
)},
|
|
1328
|
+
globals=self._globals
|
|
1329
|
+
))
|
|
1304
1330
|
|
|
1305
1331
|
@staticmethod
|
|
1306
1332
|
def _split_self_link(data, relation, structure, bkey):
|
|
@@ -25,7 +25,8 @@ from deepfos.api.app import AppAPI
|
|
|
25
25
|
from deepfos.lib.asynchronous import future_property
|
|
26
26
|
from deepfos.lib.utils import (
|
|
27
27
|
unpack_expr, dict_to_expr, LazyDict, expr_to_dict,
|
|
28
|
-
dict_to_sql,
|
|
28
|
+
dict_to_sql, find_str, concat_url, CIEnumMeta, ChunkAlert,
|
|
29
|
+
split_dataframe_alert,
|
|
29
30
|
)
|
|
30
31
|
from deepfos.lib.sysutils import complete_cartesian_product
|
|
31
32
|
from deepfos.lib.constant import (
|
|
@@ -180,11 +181,15 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
180
181
|
entry_object='python',
|
|
181
182
|
entry_mode=1,
|
|
182
183
|
server_name: Optional[str] = None,
|
|
184
|
+
before_chunk: ChunkAlert = None,
|
|
185
|
+
after_chunk: ChunkAlert = None,
|
|
183
186
|
):
|
|
184
187
|
full_name = OPTION.general.task_info.get('script_name', 'python')
|
|
185
188
|
self.entry_object = entry_object.format(script_name=full_name.split('.')[-1],
|
|
186
189
|
full_name=full_name)
|
|
187
190
|
self.entry_mode = entry_mode
|
|
191
|
+
self.before_chunk = before_chunk
|
|
192
|
+
self.after_chunk = after_chunk
|
|
188
193
|
super().__init__(element_name, folder_id, path, server_name)
|
|
189
194
|
|
|
190
195
|
@future_property(on_demand=True)
|
|
@@ -813,30 +818,34 @@ class AsyncFinancialCube(ElementBase[FinancialModelAPI]):
|
|
|
813
818
|
|
|
814
819
|
# save data
|
|
815
820
|
resp = []
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
821
|
+
|
|
822
|
+
for batch_data, alert in split_dataframe_alert(
|
|
823
|
+
data, chunksize, self.before_chunk, self.after_chunk
|
|
824
|
+
):
|
|
825
|
+
with alert:
|
|
826
|
+
row_data = [
|
|
827
|
+
{"columnDimensionMemberMap": row}
|
|
828
|
+
for row in bp.dataframe_to_dict(batch_data)
|
|
829
|
+
]
|
|
830
|
+
payload = ReactSpreadsheetSaveForm(
|
|
831
|
+
entryObject=self.entry_object,
|
|
832
|
+
sheetDatas=[SpreadsheetSingleData(
|
|
833
|
+
cubeName=self.element_info.elementName,
|
|
834
|
+
cubeFolderId=self.element_info.folderId,
|
|
835
|
+
rowDatas=row_data,
|
|
836
|
+
commonMember=pov,
|
|
837
|
+
)],
|
|
838
|
+
needCheck=need_check,
|
|
839
|
+
dataAuditSwitch=data_audit,
|
|
840
|
+
entryMode=self.entry_mode,
|
|
841
|
+
validateDimensionMember=need_check,
|
|
842
|
+
callback=callback,
|
|
843
|
+
saveDataAuthMode=auth_mode
|
|
844
|
+
)
|
|
845
|
+
r = await self.async_api.reactspreadsheet.save(
|
|
846
|
+
payload.dict(exclude_unset=True)
|
|
847
|
+
)
|
|
848
|
+
resp.append(r)
|
|
840
849
|
return resp
|
|
841
850
|
|
|
842
851
|
async def delete_with_mdx(
|
|
@@ -4,6 +4,7 @@ from typing import *
|
|
|
4
4
|
import pandas as pd
|
|
5
5
|
|
|
6
6
|
from deepfos.api.models import compat_parse_obj_as as parse_obj_as
|
|
7
|
+
from deepfos.api.models.platform import FileUplodRes
|
|
7
8
|
from deepfos.api.models.workflow import *
|
|
8
9
|
from deepfos.api.workflow import WorkFlowAPI
|
|
9
10
|
from deepfos.element.base import ElementBase, SyncMeta
|
|
@@ -296,20 +297,47 @@ class AsyncWorkFlow(ElementBase[WorkFlowAPI]):
|
|
|
296
297
|
file_path: str = None,
|
|
297
298
|
outcome: str = None,
|
|
298
299
|
extra_res_params: Dict[str, Any] = None,
|
|
300
|
+
attachments: List[Union[Dict[str, str], FileUplodRes]] = None,
|
|
299
301
|
) -> bool:
|
|
300
302
|
"""完成任务实例
|
|
301
303
|
|
|
302
304
|
Args:
|
|
303
305
|
task_id: 任务实例id
|
|
304
306
|
comment: 备注
|
|
305
|
-
file_path: 附件路径
|
|
307
|
+
file_path: 附件路径(工作流版本在V3.0.4.0后弃用,后续版本请使用attachments参数)
|
|
306
308
|
outcome: 结果选项,在任务可选结果不唯一时,必须提供
|
|
307
309
|
extra_res_params: 完成参数
|
|
310
|
+
attachments: 附件信息列表,
|
|
311
|
+
内容一般来自文件管理的/files/upload或/files/upload/content响应值
|
|
312
|
+
|
|
308
313
|
|
|
309
314
|
Returns:
|
|
310
315
|
True: 成功
|
|
311
316
|
False: 失败
|
|
312
317
|
|
|
318
|
+
.. admonition:: 示例
|
|
319
|
+
|
|
320
|
+
.. code-block:: python
|
|
321
|
+
|
|
322
|
+
from deepfos.api.platform import PlatformAPI
|
|
323
|
+
from deepfos.element.workflow import WorkFlow
|
|
324
|
+
|
|
325
|
+
# 上传附件't.txt'
|
|
326
|
+
upload_resp = PlatformAPI().file.upload(
|
|
327
|
+
file_type='DL', file_name='t.txt', file='some text'
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
# 以动作approve,参数{"a": 1, "b": "42"}完成任务实例,
|
|
331
|
+
# 并提供附件信息为上传的't.txt'文件,备注为"Completed by SDK"
|
|
332
|
+
test_task = WorkFlow('test_task')
|
|
333
|
+
test_task.complete_task_by_id(
|
|
334
|
+
task_id='fd94f6a7-3467-47f9-8a3c-ff626e68dcf5',
|
|
335
|
+
outcome='approve',
|
|
336
|
+
extra_res_params={'a': 1, 'b': '42'},
|
|
337
|
+
comment='Completed by SDK',
|
|
338
|
+
attachments=[upload_resp]
|
|
339
|
+
)
|
|
340
|
+
|
|
313
341
|
"""
|
|
314
342
|
action_id = None
|
|
315
343
|
if outcome is not None:
|
|
@@ -318,8 +346,16 @@ class AsyncWorkFlow(ElementBase[WorkFlowAPI]):
|
|
|
318
346
|
if o.code == outcome:
|
|
319
347
|
action_id = o.id
|
|
320
348
|
break
|
|
349
|
+
|
|
321
350
|
if extra_res_params is None:
|
|
322
351
|
extra_res_params = {}
|
|
352
|
+
if not isinstance(extra_res_params, dict):
|
|
353
|
+
raise TypeError('extra_res_params参数应为字典类型')
|
|
354
|
+
|
|
355
|
+
if attachments is not None:
|
|
356
|
+
attachments = parse_obj_as(List[FileUplodRes], attachments)
|
|
357
|
+
else:
|
|
358
|
+
attachments = []
|
|
323
359
|
return await self.async_api.task.express_complete(
|
|
324
360
|
TaskCompleteInstance(
|
|
325
361
|
comment=comment, filePath=file_path, outcome=outcome, taskId=task_id,
|
|
@@ -327,7 +363,8 @@ class AsyncWorkFlow(ElementBase[WorkFlowAPI]):
|
|
|
327
363
|
TaskCompleteParam(name=k, value=v)
|
|
328
364
|
for k, v in extra_res_params.items()
|
|
329
365
|
],
|
|
330
|
-
actionId=action_id
|
|
366
|
+
actionId=action_id,
|
|
367
|
+
attachments=attachments
|
|
331
368
|
)
|
|
332
369
|
)
|
|
333
370
|
|
|
@@ -554,7 +591,8 @@ class WorkFlow(AsyncWorkFlow, metaclass=SyncMeta):
|
|
|
554
591
|
comment: str = None,
|
|
555
592
|
file_path: str = None,
|
|
556
593
|
outcome: str = None,
|
|
557
|
-
extra_res_params: Dict[str, Any] = None
|
|
594
|
+
extra_res_params: Dict[str, Any] = None,
|
|
595
|
+
attachments: List[Union[Dict[str, str], FileUplodRes]] = None,
|
|
558
596
|
) -> bool:
|
|
559
597
|
...
|
|
560
598
|
|
|
@@ -479,14 +479,14 @@ class TaskUtil:
|
|
|
479
479
|
py_info: 任务实例对应的python元素信息,如未提供,仍可在 `run_job_contents` 中提供
|
|
480
480
|
|
|
481
481
|
"""
|
|
482
|
-
api = TaskAPI(sync=True)
|
|
483
|
-
_chunksize = 200
|
|
484
482
|
|
|
485
483
|
def __init__(self, task_code, py_info: PyInfo = None):
|
|
486
484
|
self.task_code = task_code
|
|
485
|
+
self.api = TaskAPI(sync=True)
|
|
487
486
|
if self.meta is None:
|
|
488
487
|
raise ValueError(f"No config for task_code: [{self.task_code}].")
|
|
489
488
|
self.py_info = py_info
|
|
489
|
+
self._chunksize = 200
|
|
490
490
|
|
|
491
491
|
@cached_property
|
|
492
492
|
def meta(self):
|
|
@@ -579,18 +579,17 @@ class TaskUtil:
|
|
|
579
579
|
if not_found_col:
|
|
580
580
|
raise ValueError(f'Required columns:{sorted(not_found_col)} since they are compositeKeys.')
|
|
581
581
|
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
if job_contents.shape[0] <= cls._chunksize:
|
|
582
|
+
def _create_task_instance(self, call_api, job_contents, payload):
|
|
583
|
+
if job_contents.shape[0] <= self._chunksize:
|
|
585
584
|
payload.jobContent = job_contents.to_dict(orient='records')
|
|
586
585
|
payload.lastBatch = True
|
|
587
586
|
call_api(payload)
|
|
588
587
|
else:
|
|
589
|
-
payload.jobContent = job_contents.iloc[0:
|
|
588
|
+
payload.jobContent = job_contents.iloc[0:self._chunksize:].to_dict(orient='records')
|
|
590
589
|
payload.batchId = call_api(payload).batchId
|
|
591
590
|
payloads = []
|
|
592
591
|
|
|
593
|
-
for batch_contents in split_dataframe(job_contents.iloc[
|
|
592
|
+
for batch_contents in split_dataframe(job_contents.iloc[self._chunksize::], self._chunksize):
|
|
594
593
|
payload.jobContent = batch_contents.to_dict(orient='records')
|
|
595
594
|
payloads.append(payload)
|
|
596
595
|
|
|
@@ -7,13 +7,14 @@ import sys
|
|
|
7
7
|
import time
|
|
8
8
|
import weakref
|
|
9
9
|
from collections import UserList, UserDict, defaultdict
|
|
10
|
+
from contextlib import contextmanager, nullcontext
|
|
10
11
|
from enum import EnumMeta, Enum
|
|
11
12
|
import random
|
|
12
13
|
from typing import (
|
|
13
14
|
Tuple, Optional, Dict,
|
|
14
15
|
List, Union, Callable, Any,
|
|
15
16
|
TypeVar, MutableMapping, Container,
|
|
16
|
-
Iterator, Iterable, DefaultDict
|
|
17
|
+
Iterator, Iterable, DefaultDict,
|
|
17
18
|
)
|
|
18
19
|
from itertools import groupby, count
|
|
19
20
|
|
|
@@ -867,6 +868,38 @@ def dict_to_sql(
|
|
|
867
868
|
return sql
|
|
868
869
|
|
|
869
870
|
|
|
871
|
+
class ChunkAlert:
|
|
872
|
+
def __call__(self, start: int, end: int, exc: Exception = None) -> None: ...
|
|
873
|
+
|
|
874
|
+
|
|
875
|
+
@contextmanager
|
|
876
|
+
def chunk_alert(
|
|
877
|
+
start: int, end: int,
|
|
878
|
+
before: ChunkAlert = None,
|
|
879
|
+
after: ChunkAlert = None,
|
|
880
|
+
):
|
|
881
|
+
try:
|
|
882
|
+
if before is not None:
|
|
883
|
+
try:
|
|
884
|
+
before(start, end)
|
|
885
|
+
except Exception:
|
|
886
|
+
logger.warning('Error occurs while calling before_chunk.')
|
|
887
|
+
yield
|
|
888
|
+
except Exception as e:
|
|
889
|
+
if after is not None:
|
|
890
|
+
try:
|
|
891
|
+
after(start, end, e)
|
|
892
|
+
except Exception:
|
|
893
|
+
logger.warning('Error occurs while calling after_chunk.')
|
|
894
|
+
raise
|
|
895
|
+
else:
|
|
896
|
+
if after is not None:
|
|
897
|
+
try:
|
|
898
|
+
after(start, end)
|
|
899
|
+
except Exception:
|
|
900
|
+
logger.warning('Error occurs while calling after_chunk.')
|
|
901
|
+
|
|
902
|
+
|
|
870
903
|
def split_dataframe(data: pd.DataFrame, chunksize: int = None):
|
|
871
904
|
nrows = len(data)
|
|
872
905
|
if chunksize is None or chunksize > nrows:
|
|
@@ -878,6 +911,33 @@ def split_dataframe(data: pd.DataFrame, chunksize: int = None):
|
|
|
878
911
|
yield data.iloc[i: i + chunksize]
|
|
879
912
|
|
|
880
913
|
|
|
914
|
+
def split_dataframe_alert(
|
|
915
|
+
data: pd.DataFrame,
|
|
916
|
+
chunksize: int = None,
|
|
917
|
+
before_chunk: ChunkAlert = None,
|
|
918
|
+
after_chunk: ChunkAlert = None,
|
|
919
|
+
):
|
|
920
|
+
no_alert = before_chunk is None and after_chunk is None
|
|
921
|
+
|
|
922
|
+
nrows = len(data)
|
|
923
|
+
if chunksize is None or chunksize > nrows:
|
|
924
|
+
if no_alert:
|
|
925
|
+
yield data, nullcontext()
|
|
926
|
+
else:
|
|
927
|
+
yield data, chunk_alert(0, nrows, before_chunk, after_chunk)
|
|
928
|
+
elif chunksize <= 0:
|
|
929
|
+
raise ValueError("chunksize must be greater than 0.")
|
|
930
|
+
else:
|
|
931
|
+
for i in range(0, nrows, chunksize):
|
|
932
|
+
if no_alert:
|
|
933
|
+
yield data.iloc[i: i + chunksize], nullcontext()
|
|
934
|
+
else:
|
|
935
|
+
yield (
|
|
936
|
+
data.iloc[i: i + chunksize],
|
|
937
|
+
chunk_alert(i, min(i + chunksize, nrows), before_chunk, after_chunk)
|
|
938
|
+
)
|
|
939
|
+
|
|
940
|
+
|
|
881
941
|
def find_str(
|
|
882
942
|
target: str,
|
|
883
943
|
candidates: Iterable[str],
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|