recurvedata-lib 0.1.487__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of recurvedata-lib might be problematic. Click here for more details.
- recurvedata/__init__.py +0 -0
- recurvedata/__version__.py +1 -0
- recurvedata/client/__init__.py +3 -0
- recurvedata/client/client.py +150 -0
- recurvedata/client/server_client.py +91 -0
- recurvedata/config.py +99 -0
- recurvedata/connectors/__init__.py +20 -0
- recurvedata/connectors/_register.py +46 -0
- recurvedata/connectors/base.py +111 -0
- recurvedata/connectors/config_schema.py +1575 -0
- recurvedata/connectors/connectors/__init__.py +0 -0
- recurvedata/connectors/connectors/aliyun_access_key.py +30 -0
- recurvedata/connectors/connectors/auth.py +44 -0
- recurvedata/connectors/connectors/azure_blob.py +89 -0
- recurvedata/connectors/connectors/azure_synapse.py +79 -0
- recurvedata/connectors/connectors/bigquery.py +359 -0
- recurvedata/connectors/connectors/clickhouse.py +219 -0
- recurvedata/connectors/connectors/dingtalk.py +61 -0
- recurvedata/connectors/connectors/doris.py +215 -0
- recurvedata/connectors/connectors/es.py +62 -0
- recurvedata/connectors/connectors/feishu.py +65 -0
- recurvedata/connectors/connectors/ftp.py +50 -0
- recurvedata/connectors/connectors/generic.py +49 -0
- recurvedata/connectors/connectors/google_cloud_storage.py +115 -0
- recurvedata/connectors/connectors/google_service_account.py +225 -0
- recurvedata/connectors/connectors/hive.py +207 -0
- recurvedata/connectors/connectors/impala.py +210 -0
- recurvedata/connectors/connectors/jenkins.py +51 -0
- recurvedata/connectors/connectors/mail.py +89 -0
- recurvedata/connectors/connectors/microsoft_fabric.py +284 -0
- recurvedata/connectors/connectors/mongo.py +79 -0
- recurvedata/connectors/connectors/mssql.py +131 -0
- recurvedata/connectors/connectors/mysql.py +191 -0
- recurvedata/connectors/connectors/n8n.py +141 -0
- recurvedata/connectors/connectors/oss.py +74 -0
- recurvedata/connectors/connectors/owncloud.py +36 -0
- recurvedata/connectors/connectors/phoenix.py +36 -0
- recurvedata/connectors/connectors/postgres.py +230 -0
- recurvedata/connectors/connectors/python.py +50 -0
- recurvedata/connectors/connectors/redshift.py +187 -0
- recurvedata/connectors/connectors/s3.py +93 -0
- recurvedata/connectors/connectors/sftp.py +87 -0
- recurvedata/connectors/connectors/slack.py +35 -0
- recurvedata/connectors/connectors/spark.py +99 -0
- recurvedata/connectors/connectors/starrocks.py +175 -0
- recurvedata/connectors/connectors/tencent_cos.py +40 -0
- recurvedata/connectors/connectors/tidb.py +49 -0
- recurvedata/connectors/const.py +315 -0
- recurvedata/connectors/datasource.py +189 -0
- recurvedata/connectors/dbapi.py +469 -0
- recurvedata/connectors/fs.py +66 -0
- recurvedata/connectors/ftp.py +40 -0
- recurvedata/connectors/object_store.py +60 -0
- recurvedata/connectors/pigeon.py +172 -0
- recurvedata/connectors/proxy.py +104 -0
- recurvedata/connectors/service.py +223 -0
- recurvedata/connectors/utils.py +47 -0
- recurvedata/consts.py +49 -0
- recurvedata/core/__init__.py +0 -0
- recurvedata/core/config.py +46 -0
- recurvedata/core/configurable.py +27 -0
- recurvedata/core/consts.py +2 -0
- recurvedata/core/templating.py +206 -0
- recurvedata/core/tracing.py +223 -0
- recurvedata/core/transformer.py +186 -0
- recurvedata/core/translation.py +91 -0
- recurvedata/dbt/client.py +97 -0
- recurvedata/dbt/consts.py +99 -0
- recurvedata/dbt/cosmos_utils.py +275 -0
- recurvedata/dbt/error_codes.py +18 -0
- recurvedata/dbt/schemas.py +98 -0
- recurvedata/dbt/service.py +451 -0
- recurvedata/dbt/utils.py +246 -0
- recurvedata/error_codes.py +71 -0
- recurvedata/exceptions.py +72 -0
- recurvedata/executors/__init__.py +4 -0
- recurvedata/executors/cli/__init__.py +7 -0
- recurvedata/executors/cli/connector.py +117 -0
- recurvedata/executors/cli/dbt.py +118 -0
- recurvedata/executors/cli/main.py +82 -0
- recurvedata/executors/cli/parameters.py +18 -0
- recurvedata/executors/client.py +190 -0
- recurvedata/executors/consts.py +50 -0
- recurvedata/executors/debug_executor.py +100 -0
- recurvedata/executors/executor.py +300 -0
- recurvedata/executors/link_executor.py +189 -0
- recurvedata/executors/models.py +34 -0
- recurvedata/executors/schemas.py +222 -0
- recurvedata/executors/service/__init__.py +0 -0
- recurvedata/executors/service/connector.py +380 -0
- recurvedata/executors/utils.py +172 -0
- recurvedata/filestorage/__init__.py +11 -0
- recurvedata/filestorage/_factory.py +33 -0
- recurvedata/filestorage/backends/__init__.py +0 -0
- recurvedata/filestorage/backends/fsspec.py +45 -0
- recurvedata/filestorage/backends/local.py +67 -0
- recurvedata/filestorage/backends/oss.py +56 -0
- recurvedata/filestorage/interface.py +84 -0
- recurvedata/operators/__init__.py +10 -0
- recurvedata/operators/base.py +28 -0
- recurvedata/operators/config.py +21 -0
- recurvedata/operators/context.py +255 -0
- recurvedata/operators/dbt_operator/__init__.py +2 -0
- recurvedata/operators/dbt_operator/model_pipeline_link_operator.py +55 -0
- recurvedata/operators/dbt_operator/operator.py +353 -0
- recurvedata/operators/link_operator/__init__.py +1 -0
- recurvedata/operators/link_operator/operator.py +120 -0
- recurvedata/operators/models.py +55 -0
- recurvedata/operators/notify_operator/__init__.py +1 -0
- recurvedata/operators/notify_operator/operator.py +180 -0
- recurvedata/operators/operator.py +119 -0
- recurvedata/operators/python_operator/__init__.py +1 -0
- recurvedata/operators/python_operator/operator.py +132 -0
- recurvedata/operators/sensor_operator/__init__.py +1 -0
- recurvedata/operators/sensor_operator/airflow_utils.py +63 -0
- recurvedata/operators/sensor_operator/operator.py +172 -0
- recurvedata/operators/spark_operator/__init__.py +1 -0
- recurvedata/operators/spark_operator/operator.py +200 -0
- recurvedata/operators/spark_operator/spark_sample.py +47 -0
- recurvedata/operators/sql_operator/__init__.py +1 -0
- recurvedata/operators/sql_operator/operator.py +90 -0
- recurvedata/operators/task.py +211 -0
- recurvedata/operators/transfer_operator/__init__.py +40 -0
- recurvedata/operators/transfer_operator/const.py +10 -0
- recurvedata/operators/transfer_operator/dump_aliyun_sls.py +82 -0
- recurvedata/operators/transfer_operator/dump_sheet_task_base.py +292 -0
- recurvedata/operators/transfer_operator/dump_task_cass.py +155 -0
- recurvedata/operators/transfer_operator/dump_task_dbapi.py +209 -0
- recurvedata/operators/transfer_operator/dump_task_es.py +113 -0
- recurvedata/operators/transfer_operator/dump_task_feishu_sheet.py +114 -0
- recurvedata/operators/transfer_operator/dump_task_ftp.py +234 -0
- recurvedata/operators/transfer_operator/dump_task_google_sheet.py +66 -0
- recurvedata/operators/transfer_operator/dump_task_mongodb.py +168 -0
- recurvedata/operators/transfer_operator/dump_task_oss.py +285 -0
- recurvedata/operators/transfer_operator/dump_task_python.py +212 -0
- recurvedata/operators/transfer_operator/dump_task_s3.py +270 -0
- recurvedata/operators/transfer_operator/dump_task_sftp.py +229 -0
- recurvedata/operators/transfer_operator/load_task_aliyun_oss.py +107 -0
- recurvedata/operators/transfer_operator/load_task_azure_blob.py +115 -0
- recurvedata/operators/transfer_operator/load_task_azure_synapse.py +90 -0
- recurvedata/operators/transfer_operator/load_task_clickhouse.py +167 -0
- recurvedata/operators/transfer_operator/load_task_doris.py +164 -0
- recurvedata/operators/transfer_operator/load_task_email.py +188 -0
- recurvedata/operators/transfer_operator/load_task_es.py +86 -0
- recurvedata/operators/transfer_operator/load_task_filebrowser.py +151 -0
- recurvedata/operators/transfer_operator/load_task_ftp.py +19 -0
- recurvedata/operators/transfer_operator/load_task_google_bigquery.py +90 -0
- recurvedata/operators/transfer_operator/load_task_google_cloud_storage.py +127 -0
- recurvedata/operators/transfer_operator/load_task_google_sheet.py +130 -0
- recurvedata/operators/transfer_operator/load_task_hive.py +158 -0
- recurvedata/operators/transfer_operator/load_task_microsoft_fabric.py +105 -0
- recurvedata/operators/transfer_operator/load_task_mssql.py +153 -0
- recurvedata/operators/transfer_operator/load_task_mysql.py +157 -0
- recurvedata/operators/transfer_operator/load_task_owncloud.py +135 -0
- recurvedata/operators/transfer_operator/load_task_postgresql.py +109 -0
- recurvedata/operators/transfer_operator/load_task_qcloud_cos.py +119 -0
- recurvedata/operators/transfer_operator/load_task_recurve_data_prep.py +75 -0
- recurvedata/operators/transfer_operator/load_task_redshift.py +95 -0
- recurvedata/operators/transfer_operator/load_task_s3.py +150 -0
- recurvedata/operators/transfer_operator/load_task_sftp.py +90 -0
- recurvedata/operators/transfer_operator/load_task_starrocks.py +169 -0
- recurvedata/operators/transfer_operator/load_task_yicrowds.py +97 -0
- recurvedata/operators/transfer_operator/mixin.py +31 -0
- recurvedata/operators/transfer_operator/operator.py +231 -0
- recurvedata/operators/transfer_operator/task.py +223 -0
- recurvedata/operators/transfer_operator/utils.py +134 -0
- recurvedata/operators/ui.py +80 -0
- recurvedata/operators/utils/__init__.py +51 -0
- recurvedata/operators/utils/file_factory.py +150 -0
- recurvedata/operators/utils/fs.py +10 -0
- recurvedata/operators/utils/lineage.py +265 -0
- recurvedata/operators/web_init.py +15 -0
- recurvedata/pigeon/connector/__init__.py +294 -0
- recurvedata/pigeon/connector/_registry.py +17 -0
- recurvedata/pigeon/connector/aliyun_oss.py +80 -0
- recurvedata/pigeon/connector/awss3.py +123 -0
- recurvedata/pigeon/connector/azure_blob.py +176 -0
- recurvedata/pigeon/connector/azure_synapse.py +51 -0
- recurvedata/pigeon/connector/cass.py +151 -0
- recurvedata/pigeon/connector/clickhouse.py +403 -0
- recurvedata/pigeon/connector/clickhouse_native.py +351 -0
- recurvedata/pigeon/connector/dbapi.py +571 -0
- recurvedata/pigeon/connector/doris.py +166 -0
- recurvedata/pigeon/connector/es.py +176 -0
- recurvedata/pigeon/connector/feishu.py +1135 -0
- recurvedata/pigeon/connector/ftp.py +163 -0
- recurvedata/pigeon/connector/google_bigquery.py +283 -0
- recurvedata/pigeon/connector/google_cloud_storage.py +130 -0
- recurvedata/pigeon/connector/hbase_phoenix.py +108 -0
- recurvedata/pigeon/connector/hdfs.py +204 -0
- recurvedata/pigeon/connector/hive_impala.py +383 -0
- recurvedata/pigeon/connector/microsoft_fabric.py +95 -0
- recurvedata/pigeon/connector/mongodb.py +56 -0
- recurvedata/pigeon/connector/mssql.py +467 -0
- recurvedata/pigeon/connector/mysql.py +175 -0
- recurvedata/pigeon/connector/owncloud.py +92 -0
- recurvedata/pigeon/connector/postgresql.py +267 -0
- recurvedata/pigeon/connector/power_bi.py +179 -0
- recurvedata/pigeon/connector/qcloud_cos.py +79 -0
- recurvedata/pigeon/connector/redshift.py +123 -0
- recurvedata/pigeon/connector/sftp.py +73 -0
- recurvedata/pigeon/connector/sqlite.py +42 -0
- recurvedata/pigeon/connector/starrocks.py +144 -0
- recurvedata/pigeon/connector/tableau.py +162 -0
- recurvedata/pigeon/const.py +21 -0
- recurvedata/pigeon/csv.py +172 -0
- recurvedata/pigeon/docs/datasources-example.json +82 -0
- recurvedata/pigeon/docs/images/pigeon_design.png +0 -0
- recurvedata/pigeon/docs/lightweight-data-sync-solution.md +111 -0
- recurvedata/pigeon/dumper/__init__.py +171 -0
- recurvedata/pigeon/dumper/aliyun_sls.py +415 -0
- recurvedata/pigeon/dumper/base.py +141 -0
- recurvedata/pigeon/dumper/cass.py +213 -0
- recurvedata/pigeon/dumper/dbapi.py +346 -0
- recurvedata/pigeon/dumper/es.py +112 -0
- recurvedata/pigeon/dumper/ftp.py +64 -0
- recurvedata/pigeon/dumper/mongodb.py +103 -0
- recurvedata/pigeon/handler/__init__.py +4 -0
- recurvedata/pigeon/handler/base.py +153 -0
- recurvedata/pigeon/handler/csv_handler.py +290 -0
- recurvedata/pigeon/loader/__init__.py +87 -0
- recurvedata/pigeon/loader/base.py +83 -0
- recurvedata/pigeon/loader/csv_to_azure_synapse.py +214 -0
- recurvedata/pigeon/loader/csv_to_clickhouse.py +152 -0
- recurvedata/pigeon/loader/csv_to_doris.py +215 -0
- recurvedata/pigeon/loader/csv_to_es.py +51 -0
- recurvedata/pigeon/loader/csv_to_google_bigquery.py +169 -0
- recurvedata/pigeon/loader/csv_to_hive.py +468 -0
- recurvedata/pigeon/loader/csv_to_microsoft_fabric.py +242 -0
- recurvedata/pigeon/loader/csv_to_mssql.py +174 -0
- recurvedata/pigeon/loader/csv_to_mysql.py +180 -0
- recurvedata/pigeon/loader/csv_to_postgresql.py +248 -0
- recurvedata/pigeon/loader/csv_to_redshift.py +240 -0
- recurvedata/pigeon/loader/csv_to_starrocks.py +233 -0
- recurvedata/pigeon/meta.py +116 -0
- recurvedata/pigeon/row_factory.py +42 -0
- recurvedata/pigeon/schema/__init__.py +124 -0
- recurvedata/pigeon/schema/types.py +13 -0
- recurvedata/pigeon/sync.py +283 -0
- recurvedata/pigeon/transformer.py +146 -0
- recurvedata/pigeon/utils/__init__.py +134 -0
- recurvedata/pigeon/utils/bloomfilter.py +181 -0
- recurvedata/pigeon/utils/date_time.py +323 -0
- recurvedata/pigeon/utils/escape.py +15 -0
- recurvedata/pigeon/utils/fs.py +266 -0
- recurvedata/pigeon/utils/json.py +44 -0
- recurvedata/pigeon/utils/keyed_tuple.py +85 -0
- recurvedata/pigeon/utils/mp.py +156 -0
- recurvedata/pigeon/utils/sql.py +328 -0
- recurvedata/pigeon/utils/timing.py +155 -0
- recurvedata/provider_manager.py +0 -0
- recurvedata/providers/__init__.py +0 -0
- recurvedata/providers/dbapi/__init__.py +0 -0
- recurvedata/providers/flywheel/__init__.py +0 -0
- recurvedata/providers/mysql/__init__.py +0 -0
- recurvedata/schedulers/__init__.py +1 -0
- recurvedata/schedulers/airflow.py +974 -0
- recurvedata/schedulers/airflow_db_process.py +331 -0
- recurvedata/schedulers/airflow_operators.py +61 -0
- recurvedata/schedulers/airflow_plugin.py +9 -0
- recurvedata/schedulers/airflow_trigger_dag_patch.py +117 -0
- recurvedata/schedulers/base.py +99 -0
- recurvedata/schedulers/cli.py +228 -0
- recurvedata/schedulers/client.py +56 -0
- recurvedata/schedulers/consts.py +52 -0
- recurvedata/schedulers/debug_celery.py +62 -0
- recurvedata/schedulers/model.py +63 -0
- recurvedata/schedulers/schemas.py +97 -0
- recurvedata/schedulers/service.py +20 -0
- recurvedata/schedulers/system_dags.py +59 -0
- recurvedata/schedulers/task_status.py +279 -0
- recurvedata/schedulers/utils.py +73 -0
- recurvedata/schema/__init__.py +0 -0
- recurvedata/schema/field.py +88 -0
- recurvedata/schema/schema.py +55 -0
- recurvedata/schema/types.py +17 -0
- recurvedata/schema.py +0 -0
- recurvedata/server/__init__.py +0 -0
- recurvedata/server/app.py +7 -0
- recurvedata/server/connector/__init__.py +0 -0
- recurvedata/server/connector/api.py +79 -0
- recurvedata/server/connector/schemas.py +28 -0
- recurvedata/server/data_service/__init__.py +0 -0
- recurvedata/server/data_service/api.py +126 -0
- recurvedata/server/data_service/client.py +18 -0
- recurvedata/server/data_service/consts.py +1 -0
- recurvedata/server/data_service/schemas.py +68 -0
- recurvedata/server/data_service/service.py +218 -0
- recurvedata/server/dbt/__init__.py +0 -0
- recurvedata/server/dbt/api.py +116 -0
- recurvedata/server/error_code.py +49 -0
- recurvedata/server/exceptions.py +19 -0
- recurvedata/server/executor/__init__.py +0 -0
- recurvedata/server/executor/api.py +37 -0
- recurvedata/server/executor/schemas.py +30 -0
- recurvedata/server/executor/service.py +220 -0
- recurvedata/server/main.py +32 -0
- recurvedata/server/schedulers/__init__.py +0 -0
- recurvedata/server/schedulers/api.py +252 -0
- recurvedata/server/schedulers/schemas.py +50 -0
- recurvedata/server/schemas.py +50 -0
- recurvedata/utils/__init__.py +15 -0
- recurvedata/utils/_typer.py +61 -0
- recurvedata/utils/attrdict.py +19 -0
- recurvedata/utils/command_helper.py +20 -0
- recurvedata/utils/compat.py +12 -0
- recurvedata/utils/compression.py +203 -0
- recurvedata/utils/crontab.py +42 -0
- recurvedata/utils/crypto_util.py +305 -0
- recurvedata/utils/dataclass.py +11 -0
- recurvedata/utils/date_time.py +464 -0
- recurvedata/utils/dispatch.py +114 -0
- recurvedata/utils/email_util.py +104 -0
- recurvedata/utils/files.py +386 -0
- recurvedata/utils/helpers.py +170 -0
- recurvedata/utils/httputil.py +117 -0
- recurvedata/utils/imports.py +132 -0
- recurvedata/utils/json.py +80 -0
- recurvedata/utils/log.py +117 -0
- recurvedata/utils/log_capture.py +153 -0
- recurvedata/utils/mp.py +178 -0
- recurvedata/utils/normalizer.py +102 -0
- recurvedata/utils/redis_lock.py +474 -0
- recurvedata/utils/registry.py +54 -0
- recurvedata/utils/shell.py +15 -0
- recurvedata/utils/singleton.py +33 -0
- recurvedata/utils/sql.py +6 -0
- recurvedata/utils/timeout.py +28 -0
- recurvedata/utils/tracing.py +14 -0
- recurvedata_lib-0.1.487.dist-info/METADATA +605 -0
- recurvedata_lib-0.1.487.dist-info/RECORD +333 -0
- recurvedata_lib-0.1.487.dist-info/WHEEL +5 -0
- recurvedata_lib-0.1.487.dist-info/entry_points.txt +6 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from enum import Enum
|
|
3
|
+
|
|
4
|
+
from recurvedata.utils.date_time import as_local_datetime
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def str_2_bool(val: str):
|
|
8
|
+
val = val.lower()
|
|
9
|
+
if val in ("true", "1"):
|
|
10
|
+
return True
|
|
11
|
+
elif val in ("false", "0"):
|
|
12
|
+
return False
|
|
13
|
+
return val
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def str_2_int(val: str):
|
|
17
|
+
if val.isdigit():
|
|
18
|
+
return int(val)
|
|
19
|
+
if val[0] == "-" and val[1:].isdigit():
|
|
20
|
+
return int(val)
|
|
21
|
+
return val
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def str_2_float(val: str):
|
|
25
|
+
try:
|
|
26
|
+
return float(val)
|
|
27
|
+
except Exception:
|
|
28
|
+
return val
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class VariableType(str, Enum):
|
|
32
|
+
INT = "INT"
|
|
33
|
+
FLOAT = "FLOAT"
|
|
34
|
+
STRING = "STRING"
|
|
35
|
+
BOOLEAN = "BOOLEAN"
|
|
36
|
+
DATE = "DATE"
|
|
37
|
+
DATETIME = "DATETIME"
|
|
38
|
+
JSON = "JSON"
|
|
39
|
+
PYTHON_CODE = "PYTHON_CODE"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
VAR_CONVERT_STRING_FUNCS = {
|
|
43
|
+
VariableType.INT: str_2_int,
|
|
44
|
+
VariableType.FLOAT: str_2_float,
|
|
45
|
+
VariableType.BOOLEAN: str_2_bool,
|
|
46
|
+
VariableType.STRING: lambda x: x,
|
|
47
|
+
VariableType.DATE: lambda x: as_local_datetime(x).date(),
|
|
48
|
+
VariableType.DATETIME: lambda x: as_local_datetime(x),
|
|
49
|
+
VariableType.JSON: json.loads,
|
|
50
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Optional
|
|
4
|
+
|
|
5
|
+
import pendulum
|
|
6
|
+
|
|
7
|
+
from recurvedata.connectors.service import PigeonDataSource as DataSource
|
|
8
|
+
from recurvedata.consts import ETLExecutionStatus, ScheduleType
|
|
9
|
+
from recurvedata.executors.client import ExecutorClient
|
|
10
|
+
from recurvedata.executors.executor import Executor
|
|
11
|
+
from recurvedata.executors.models import ExecutorDag, ExecutorNode
|
|
12
|
+
from recurvedata.executors.schemas import DebugLogRecord, WorkflowNodeItem
|
|
13
|
+
from recurvedata.operators.task import BaseTask
|
|
14
|
+
from recurvedata.utils.dataclass import init_dataclass_from_dict
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DebugExecutor(Executor):
|
|
20
|
+
"""
|
|
21
|
+
init sdk
|
|
22
|
+
add sdk to context
|
|
23
|
+
fetch node conf using sdk
|
|
24
|
+
init Dag&Node
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
workflow_id: int,
|
|
30
|
+
node_key: str,
|
|
31
|
+
schedule_type: ScheduleType,
|
|
32
|
+
schedule_interval: str,
|
|
33
|
+
execution_date: datetime.datetime,
|
|
34
|
+
timezone: str,
|
|
35
|
+
celery_task_id: str,
|
|
36
|
+
):
|
|
37
|
+
self.project_id: int = None
|
|
38
|
+
self.workflow_id = workflow_id
|
|
39
|
+
self.node_key = node_key
|
|
40
|
+
self.schedule_type = schedule_type
|
|
41
|
+
self.schedule_interval = schedule_interval
|
|
42
|
+
self.execution_date = execution_date
|
|
43
|
+
self.timezone = pendulum.timezone(timezone)
|
|
44
|
+
self.celery_task_id = celery_task_id
|
|
45
|
+
self.client: ExecutorClient = ExecutorClient()
|
|
46
|
+
self.dag: ExecutorDag = None
|
|
47
|
+
self.node: ExecutorNode = None
|
|
48
|
+
self.init_dag_node()
|
|
49
|
+
self.register_context()
|
|
50
|
+
|
|
51
|
+
def _init_task_instance_on_task_start(self, task: BaseTask):
|
|
52
|
+
pass
|
|
53
|
+
|
|
54
|
+
def _update_task_instance_on_task_finish(
|
|
55
|
+
self,
|
|
56
|
+
task: BaseTask,
|
|
57
|
+
ti_id: int,
|
|
58
|
+
task_status: ETLExecutionStatus,
|
|
59
|
+
meta: Any,
|
|
60
|
+
error: Exception,
|
|
61
|
+
error_stack: Optional[str],
|
|
62
|
+
):
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
def _get_connection_by_name(self, project_id: int, connection_name: str) -> DataSource:
|
|
66
|
+
connection = self.client.get_connection(project_id=project_id, connection_name=connection_name)
|
|
67
|
+
return DataSource(connection_type=connection.type, name=connection.name, data=connection.data)
|
|
68
|
+
|
|
69
|
+
def init_dag_node(self):
|
|
70
|
+
logger.info(f"start init dag node {self.workflow_id} {self.node_key}")
|
|
71
|
+
api_response: WorkflowNodeItem = self.client.get_debug_node(self.workflow_id, self.node_key)
|
|
72
|
+
self.project_id = api_response.project_id
|
|
73
|
+
self.dag: ExecutorDag = ExecutorDag(
|
|
74
|
+
id=int(self.workflow_id),
|
|
75
|
+
project_id=int(self.project_id),
|
|
76
|
+
name=api_response.workflow_name,
|
|
77
|
+
scheduler_type=self.schedule_type,
|
|
78
|
+
schedule_interval=self.schedule_interval,
|
|
79
|
+
timezone=self.timezone,
|
|
80
|
+
owner="debug",
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
self.node: ExecutorNode = init_dataclass_from_dict(ExecutorNode, api_response.model_dump(), dag=self.dag)
|
|
84
|
+
self.node.variable = self.init_variables()
|
|
85
|
+
|
|
86
|
+
def run_impl(self):
|
|
87
|
+
logger.info(f"start debug {self.workflow_id}.{self.node.name}, {self.node.operator}")
|
|
88
|
+
operator = self.init_operator()
|
|
89
|
+
operator.execute()
|
|
90
|
+
logger.info(f"finish debug {self.workflow_id}.{self.node.name}, {self.node.operator}")
|
|
91
|
+
|
|
92
|
+
def _send_logs(self, message: str):
|
|
93
|
+
self.client.send_back_debug_logs(
|
|
94
|
+
DebugLogRecord(
|
|
95
|
+
workflow_id=self.workflow_id,
|
|
96
|
+
node_key=self.node_key,
|
|
97
|
+
celery_task_id=self.celery_task_id,
|
|
98
|
+
logs=[message],
|
|
99
|
+
)
|
|
100
|
+
)
|
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import socket
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Optional
|
|
7
|
+
|
|
8
|
+
from recurvedata.connectors.service import PigeonDataSource as DataSource
|
|
9
|
+
from recurvedata.consts import PROJECT_ID_KEY, ETLExecutionStatus
|
|
10
|
+
from recurvedata.core.templating import Renderer
|
|
11
|
+
from recurvedata.executors.client import ExecutorClient
|
|
12
|
+
from recurvedata.executors.consts import VariableType
|
|
13
|
+
from recurvedata.executors.models import ExecutorDag, ExecutorNode
|
|
14
|
+
from recurvedata.executors.schemas import JobNodeItem, TaskLogRecord
|
|
15
|
+
from recurvedata.executors.utils import get_airflow_run_id, update_meta_file
|
|
16
|
+
from recurvedata.operators import get_operator_class
|
|
17
|
+
from recurvedata.operators.context import context
|
|
18
|
+
from recurvedata.operators.task import BaseTask
|
|
19
|
+
from recurvedata.utils.dataclass import init_dataclass_from_dict
|
|
20
|
+
from recurvedata.utils.date_time import astimezone, tz_local, utcnow_aware
|
|
21
|
+
from recurvedata.utils.helpers import get_environment_variable, truncate_string
|
|
22
|
+
from recurvedata.utils.log_capture import OutputInterceptor, setup_log_handler
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from recurvedata.operators.operator import BaseOperator
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class Executor(object):
|
|
31
|
+
"""Executor class for running workflow nodes.
|
|
32
|
+
|
|
33
|
+
Handles initialization and execution of workflow nodes with the following responsibilities:
|
|
34
|
+
- Initializes SDK client and connects to backend services
|
|
35
|
+
- Fetches node configuration and initializes DAG/Node objects
|
|
36
|
+
- Sets up execution context and variables
|
|
37
|
+
- Manages node execution lifecycle including logging and error handling
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
dag_slug (str): Identifier for the DAG in format "dag.{job_id}"
|
|
41
|
+
node_slug (str): Identifier for the node in format "node.{node_id}-{name}"
|
|
42
|
+
execution_date (str): Execution timestamp for the node run
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self, dag_slug: str, node_slug: str, execution_date: str):
|
|
46
|
+
self.job_id = self._extract_job_id(dag_slug)
|
|
47
|
+
self.node_id = self._extract_node_id(node_slug)
|
|
48
|
+
self.client: ExecutorClient = ExecutorClient()
|
|
49
|
+
self._execution_date = execution_date
|
|
50
|
+
self.execution_date: datetime.datetime = None
|
|
51
|
+
self.dag: ExecutorDag = None
|
|
52
|
+
self.node: ExecutorNode = None
|
|
53
|
+
self.init_dag_node()
|
|
54
|
+
self.register_context()
|
|
55
|
+
|
|
56
|
+
def _extract_job_id(self, dag_slug: str) -> int:
|
|
57
|
+
return int(dag_slug.split(".")[-1])
|
|
58
|
+
|
|
59
|
+
def _extract_node_id(self, node_slug: str) -> int:
|
|
60
|
+
return int(node_slug.split(".")[1].split("-")[0])
|
|
61
|
+
|
|
62
|
+
def register_context(self):
|
|
63
|
+
context.client = self.client
|
|
64
|
+
context.init_context(get_connection_by_name=self._get_connection_by_name)
|
|
65
|
+
context.current_project_id.set(self.dag.project_id)
|
|
66
|
+
context.register_function("init_task_instance_on_task_start", self._init_task_instance_on_task_start)
|
|
67
|
+
context.register_function("update_task_instance_on_task_finish", self._update_task_instance_on_task_finish)
|
|
68
|
+
|
|
69
|
+
def _init_task_instance_on_task_start(self, task: BaseTask):
|
|
70
|
+
# todo: move to another place
|
|
71
|
+
update_meta_file(
|
|
72
|
+
task.dag.id,
|
|
73
|
+
task.node.node_key,
|
|
74
|
+
task.execution_date,
|
|
75
|
+
{
|
|
76
|
+
"operator": task.node.operator,
|
|
77
|
+
"task": task.__class__.__name__,
|
|
78
|
+
},
|
|
79
|
+
) # todo: move to another place
|
|
80
|
+
|
|
81
|
+
def _prepare_task_end_payload(self) -> dict:
|
|
82
|
+
return {
|
|
83
|
+
"current_retry_number": get_environment_variable("AIRFLOW_RETRY_NUMBER", int),
|
|
84
|
+
"max_retry_number": get_environment_variable("AIRFLOW_MAX_RETRY_NUMBER", int),
|
|
85
|
+
"link_node_id": self.node.link_settings and self.node.link_settings.get("node_id"),
|
|
86
|
+
"link_workflow_id": self.node.link_settings and self.node.link_settings.get("workflow_id"),
|
|
87
|
+
"node_id": self.node.id,
|
|
88
|
+
"execution_date": self.execution_date,
|
|
89
|
+
"data_interval_end": get_environment_variable("AIRFLOW_DATA_INTERVAL_END"),
|
|
90
|
+
"run_id": get_airflow_run_id(),
|
|
91
|
+
"job_id": self.job_id,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
def _update_task_instance_on_task_finish(
|
|
95
|
+
self,
|
|
96
|
+
task: BaseTask,
|
|
97
|
+
ti_id: int,
|
|
98
|
+
task_status: ETLExecutionStatus,
|
|
99
|
+
meta: Any,
|
|
100
|
+
error: Exception,
|
|
101
|
+
error_stack: Optional[str],
|
|
102
|
+
):
|
|
103
|
+
kwargs = self._prepare_task_end_payload()
|
|
104
|
+
if task_status == ETLExecutionStatus.FAILED:
|
|
105
|
+
if kwargs["max_retry_number"] and kwargs["current_retry_number"] < (kwargs["max_retry_number"] + 1):
|
|
106
|
+
# airflow try_times = first time + max_retry_number
|
|
107
|
+
task_status = ETLExecutionStatus.RETRY
|
|
108
|
+
self.client.task_instance_end(
|
|
109
|
+
meta=meta,
|
|
110
|
+
traceback=error_stack and truncate_string(error_stack, 1000),
|
|
111
|
+
status=task_status,
|
|
112
|
+
end_time=utcnow_aware(),
|
|
113
|
+
**kwargs,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
def _get_connection_by_name(self, project_id: int, connection_name: str) -> DataSource:
|
|
117
|
+
connection = self.client.get_connection(project_id=project_id, connection_name=connection_name)
|
|
118
|
+
return DataSource(connection_type=connection.type, name=connection.name, data=connection.data)
|
|
119
|
+
|
|
120
|
+
def init_dag_node(self):
|
|
121
|
+
logger.info(f"start init dag node {self.job_id} {self.node_id}")
|
|
122
|
+
api_response: JobNodeItem = self.client.get_node(self.job_id, self.node_id)
|
|
123
|
+
|
|
124
|
+
self.dag: ExecutorDag = ExecutorDag(
|
|
125
|
+
id=int(api_response.job_id),
|
|
126
|
+
project_id=int(api_response.project_id),
|
|
127
|
+
name=api_response.job_name,
|
|
128
|
+
scheduler_type=api_response.job_schedule_type,
|
|
129
|
+
schedule_interval=api_response.job_schedule_interval,
|
|
130
|
+
timezone=api_response.job_timezone,
|
|
131
|
+
owner=api_response.job_owner,
|
|
132
|
+
workflow_id=api_response.workflow_id,
|
|
133
|
+
workflow_name=api_response.workflow_name,
|
|
134
|
+
full_refresh_models=api_response.full_refresh_models,
|
|
135
|
+
retries=api_response.retries,
|
|
136
|
+
retry_delay=api_response.retry_delay,
|
|
137
|
+
skip_data_tests=api_response.skip_data_tests,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# use dag timezone to convert execution_date
|
|
141
|
+
self.execution_date = astimezone(self._execution_date, self.dag.timezone)
|
|
142
|
+
|
|
143
|
+
self.node: ExecutorNode = init_dataclass_from_dict(ExecutorNode, api_response.model_dump(), dag=self.dag)
|
|
144
|
+
self.node.variable = self.init_variables()
|
|
145
|
+
|
|
146
|
+
def init_variables(self):
|
|
147
|
+
variables = self.process_variables(
|
|
148
|
+
self.node.variable,
|
|
149
|
+
self.node.job_variable,
|
|
150
|
+
self.execution_date,
|
|
151
|
+
self.dag.schedule_interval,
|
|
152
|
+
self.dag.timezone,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# set project id to environment variable
|
|
156
|
+
os.environ[PROJECT_ID_KEY] = str(self.dag.project_id)
|
|
157
|
+
# append airflow dag run conf to variables
|
|
158
|
+
airflow_dag_run_conf = os.environ.get("RECURVE__JOB_RUN_CONF")
|
|
159
|
+
if airflow_dag_run_conf:
|
|
160
|
+
variables["job_run_conf"] = json.loads(airflow_dag_run_conf)
|
|
161
|
+
|
|
162
|
+
return variables
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def process_variables(
|
|
166
|
+
cls,
|
|
167
|
+
variables: dict,
|
|
168
|
+
override_variables: dict,
|
|
169
|
+
execution_date: str,
|
|
170
|
+
schedule_interval: str,
|
|
171
|
+
timezone: str | None = None,
|
|
172
|
+
) -> dict:
|
|
173
|
+
"""Process and merge variables from different sources with proper overrides.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
variables: Base variables dict
|
|
177
|
+
override_variables: Variables that should override base variables
|
|
178
|
+
execution_date: Execution date string
|
|
179
|
+
schedule_interval: Schedule interval string
|
|
180
|
+
timezone: Optional timezone string, defaults to local timezone
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
dict: Processed and merged variables
|
|
184
|
+
|
|
185
|
+
The processing order is:
|
|
186
|
+
1. Process normal variables first (in case they reference each other)
|
|
187
|
+
2. Extract python code variables using processed normal variables
|
|
188
|
+
3. Override with job variables last since they take precedence
|
|
189
|
+
"""
|
|
190
|
+
valid_var_types = set(member.value for member in VariableType.__members__.values())
|
|
191
|
+
timezone = timezone or tz_local
|
|
192
|
+
|
|
193
|
+
def process_typed_value(val_type: str, val_value):
|
|
194
|
+
processors = {
|
|
195
|
+
"DATETIME": lambda x: astimezone(x, timezone),
|
|
196
|
+
"DATE": lambda x: astimezone(x, timezone).date(),
|
|
197
|
+
"JSON": json.loads,
|
|
198
|
+
}
|
|
199
|
+
return processors.get(val_type, lambda x: x)(val_value)
|
|
200
|
+
|
|
201
|
+
# Split variables by type
|
|
202
|
+
normal_vars = {}
|
|
203
|
+
python_code_vars = {}
|
|
204
|
+
|
|
205
|
+
for var_dict in (variables, override_variables):
|
|
206
|
+
if not var_dict:
|
|
207
|
+
continue
|
|
208
|
+
|
|
209
|
+
for key, value in var_dict.items():
|
|
210
|
+
var_value, var_type = value["value"], value["type"]
|
|
211
|
+
|
|
212
|
+
if var_type == VariableType.PYTHON_CODE:
|
|
213
|
+
python_code_vars[key] = var_value
|
|
214
|
+
elif var_type not in valid_var_types:
|
|
215
|
+
raise ValueError(f"Invalid variable type {var_type} for {key}")
|
|
216
|
+
else:
|
|
217
|
+
normal_vars[key] = process_typed_value(var_type, var_value)
|
|
218
|
+
|
|
219
|
+
# Process variables in order
|
|
220
|
+
renderer = Renderer()
|
|
221
|
+
processed_normal_vars = renderer.render_variables(normal_vars, execution_date, schedule_interval)
|
|
222
|
+
|
|
223
|
+
job_vars = {}
|
|
224
|
+
if override_variables:
|
|
225
|
+
job_vars = {key: processed_normal_vars.get(key, value) for key, value in override_variables.items()}
|
|
226
|
+
|
|
227
|
+
# Merge all variables with proper precedence
|
|
228
|
+
final_vars = processed_normal_vars.copy()
|
|
229
|
+
|
|
230
|
+
if python_code_vars:
|
|
231
|
+
python_vars = cls._process_python_code_variable(
|
|
232
|
+
python_code_vars, final_vars, execution_date, schedule_interval
|
|
233
|
+
)
|
|
234
|
+
final_vars.update(python_vars)
|
|
235
|
+
|
|
236
|
+
final_vars.update(job_vars) # Job variables take highest precedence
|
|
237
|
+
|
|
238
|
+
return final_vars
|
|
239
|
+
|
|
240
|
+
@classmethod
|
|
241
|
+
def _process_python_code_variable(
|
|
242
|
+
cls, python_code_variables: dict, new_variables: dict, execution_date: str, schedule_interval: str
|
|
243
|
+
) -> dict:
|
|
244
|
+
r = Renderer()
|
|
245
|
+
extracted_variables = {}
|
|
246
|
+
for name, code in python_code_variables.items():
|
|
247
|
+
if code is None:
|
|
248
|
+
continue
|
|
249
|
+
tmp_extracted_variables = r.extract_python_code_variable(
|
|
250
|
+
python_code=code,
|
|
251
|
+
exist_variables=new_variables,
|
|
252
|
+
execution_date=execution_date,
|
|
253
|
+
schedule_interval=schedule_interval,
|
|
254
|
+
)
|
|
255
|
+
extracted_variables.update(tmp_extracted_variables)
|
|
256
|
+
return extracted_variables
|
|
257
|
+
|
|
258
|
+
def run(self):
|
|
259
|
+
if self.node.operator == "DBTOperator":
|
|
260
|
+
self.run_impl()
|
|
261
|
+
return
|
|
262
|
+
with OutputInterceptor(handler=self._send_logs) as interceptor:
|
|
263
|
+
setup_log_handler(interceptor)
|
|
264
|
+
self.run_impl()
|
|
265
|
+
|
|
266
|
+
def run_impl(self):
|
|
267
|
+
logger.info(f"Recurve Executor start run {self.job_id}.{self.node_id}, {self.node.operator}")
|
|
268
|
+
operator = self.init_operator()
|
|
269
|
+
operator.execute()
|
|
270
|
+
logger.info(f"Recurve Executor finish run {self.job_id}.{self.node_id}, {self.node.operator}")
|
|
271
|
+
|
|
272
|
+
def _send_logs(self, message: str):
|
|
273
|
+
self.client.send_back_logs(TaskLogRecord.init(self.job_id, logs=[message]))
|
|
274
|
+
|
|
275
|
+
@staticmethod
|
|
276
|
+
def _get_hostname():
|
|
277
|
+
return socket.gethostname()
|
|
278
|
+
|
|
279
|
+
@staticmethod
|
|
280
|
+
def _get_pid() -> int:
|
|
281
|
+
return os.getpid()
|
|
282
|
+
|
|
283
|
+
def set_link_settings(self, link_workflow_id: int = None, link_node_id: int = None, is_link_workflow: bool = False):
|
|
284
|
+
self.node.link_settings = {
|
|
285
|
+
"workflow_id": link_workflow_id,
|
|
286
|
+
"node_id": link_node_id,
|
|
287
|
+
"is_link_workflow": is_link_workflow,
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
def init_operator(self) -> "BaseOperator":
|
|
291
|
+
op_cls = get_operator_class(self.node.operator)
|
|
292
|
+
operator = op_cls(self.dag, self.node, self.execution_date, variables=self.node.variable)
|
|
293
|
+
self.set_operator_execution_date(operator)
|
|
294
|
+
return operator
|
|
295
|
+
|
|
296
|
+
def set_operator_execution_date(self, operator: "BaseOperator"):
|
|
297
|
+
if "execution_date" not in self.node.variable:
|
|
298
|
+
return
|
|
299
|
+
new_execution_date = astimezone(self.node.variable["execution_date"], self.dag.timezone)
|
|
300
|
+
operator.set_execution_date(new_execution_date)
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""
|
|
2
|
+
used in RecurveLinkNode
|
|
3
|
+
调用某个 node
|
|
4
|
+
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import datetime
|
|
8
|
+
import logging
|
|
9
|
+
|
|
10
|
+
from recurvedata.consts import Operator
|
|
11
|
+
from recurvedata.executors.client import ExecutorClient
|
|
12
|
+
from recurvedata.executors.executor import Executor
|
|
13
|
+
from recurvedata.executors.models import ExecutorDag, ExecutorNode
|
|
14
|
+
from recurvedata.executors.schemas import WorkflowNodeItem
|
|
15
|
+
from recurvedata.executors.utils import convert_var_value_from_string, get_variable_type_by_value, update_meta_file
|
|
16
|
+
from recurvedata.operators.task import BaseTask
|
|
17
|
+
from recurvedata.utils.dataclass import init_dataclass_from_dict
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class LinkExecutor(Executor):
|
|
23
|
+
"""Executor for running linked workflow nodes.
|
|
24
|
+
|
|
25
|
+
The LinkExecutor allows executing a node from another workflow by linking to it.
|
|
26
|
+
It handles:
|
|
27
|
+
|
|
28
|
+
- Executing a node from a different workflow while maintaining the original DAG context
|
|
29
|
+
- Overriding task instance reporting to track the link relationship
|
|
30
|
+
- Preserving the original DAG ID and node ID for file generation
|
|
31
|
+
- Converting and passing custom variables between workflows
|
|
32
|
+
- Supporting both single node and full workflow linking
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
origin_dag (ExecutorDag): The original DAG containing the link node
|
|
36
|
+
origin_node (ExecutorNode): The original node that links to another workflow
|
|
37
|
+
execution_date (datetime): Execution timestamp for the run
|
|
38
|
+
link_workflow_id (int): ID of the workflow being linked to
|
|
39
|
+
link_node_id (int): ID of the specific node being linked to
|
|
40
|
+
link_workflow_name (str, optional): Name of the linked workflow
|
|
41
|
+
link_node_name (str, optional): Name of the linked node
|
|
42
|
+
link_node_key (str, optional): Key identifier for the linked node
|
|
43
|
+
custom_variables (dict, optional): Variables to pass to the linked node
|
|
44
|
+
is_link_workflow (bool, optional): Whether linking to a full workflow vs single node
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(
|
|
48
|
+
self,
|
|
49
|
+
origin_dag: ExecutorDag,
|
|
50
|
+
origin_node: ExecutorNode,
|
|
51
|
+
execution_date: datetime.datetime,
|
|
52
|
+
link_workflow_id: int,
|
|
53
|
+
link_node_id: int,
|
|
54
|
+
link_workflow_name: str = None,
|
|
55
|
+
link_node_name: str = None,
|
|
56
|
+
link_node_key: str = None,
|
|
57
|
+
custom_variables: dict = None,
|
|
58
|
+
is_link_workflow: bool = False,
|
|
59
|
+
):
|
|
60
|
+
self.origin_dag = origin_dag
|
|
61
|
+
self.origin_node = origin_node
|
|
62
|
+
self.link_workflow_name = link_workflow_name
|
|
63
|
+
self.link_node_name = link_node_name
|
|
64
|
+
self.link_node_key = link_node_key
|
|
65
|
+
self.link_workflow_id = link_workflow_id
|
|
66
|
+
self.link_node_id = link_node_id
|
|
67
|
+
self.custom_variables = custom_variables
|
|
68
|
+
self.is_link_workflow = is_link_workflow
|
|
69
|
+
|
|
70
|
+
# execution_date is passed from LinkOperator, which has been converted to origin_dag's timezone
|
|
71
|
+
# self.execution_date = astimezone(execution_date, tz_local)
|
|
72
|
+
self.execution_date = execution_date
|
|
73
|
+
|
|
74
|
+
self.client: ExecutorClient = ExecutorClient()
|
|
75
|
+
self.job_id = origin_dag.dag_id # used in get_connection_by_name
|
|
76
|
+
self.project_id = origin_dag.project_id
|
|
77
|
+
|
|
78
|
+
self.dag: ExecutorDag = None
|
|
79
|
+
self.node: ExecutorNode = None
|
|
80
|
+
self.init_dag_node()
|
|
81
|
+
self.register_context()
|
|
82
|
+
|
|
83
|
+
def _init_task_instance_on_task_start(self, task: BaseTask):
|
|
84
|
+
if self.is_link_workflow: # todo: use scheduler?
|
|
85
|
+
task_id = f"{self.origin_node.node_key}.{task.node.node_key}"
|
|
86
|
+
else:
|
|
87
|
+
task_id = f"{self.origin_node.node_key}"
|
|
88
|
+
update_meta_file(
|
|
89
|
+
self.origin_dag.id,
|
|
90
|
+
task_id,
|
|
91
|
+
task.execution_date,
|
|
92
|
+
{
|
|
93
|
+
"operator": Operator.LinkOperator,
|
|
94
|
+
"task": "LinkTask",
|
|
95
|
+
"link_operator": task.node.operator,
|
|
96
|
+
"link_task": task.__class__.__name__,
|
|
97
|
+
"link_workflow_id": self.link_workflow_id,
|
|
98
|
+
"link_workflow_version": self.dag.workflow_version,
|
|
99
|
+
},
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def _prepare_task_end_payload(self) -> dict:
|
|
103
|
+
payload = super()._prepare_task_end_payload()
|
|
104
|
+
payload.update(
|
|
105
|
+
{
|
|
106
|
+
"link_node_id": self.link_node_id,
|
|
107
|
+
"node_id": self.origin_node.id,
|
|
108
|
+
"link_workflow_id": self.link_workflow_id,
|
|
109
|
+
}
|
|
110
|
+
)
|
|
111
|
+
return payload
|
|
112
|
+
|
|
113
|
+
def init_dag_node(self):
|
|
114
|
+
"""Initialize the DAG and Node objects for the linked workflow execution.
|
|
115
|
+
|
|
116
|
+
Fetches workflow node data from API, creates ExecutorDag using origin DAG properties,
|
|
117
|
+
initializes ExecutorNode, and sets up variables.
|
|
118
|
+
"""
|
|
119
|
+
logger.info(f"Initializing DAG node for workflow {self.link_workflow_id}, node {self.link_node_id}")
|
|
120
|
+
|
|
121
|
+
# Fetch workflow node data from API
|
|
122
|
+
api_response: WorkflowNodeItem = self.client.get_workflow_node(self.link_workflow_id, self.link_node_id)
|
|
123
|
+
|
|
124
|
+
# Create ExecutorDag using origin DAG properties
|
|
125
|
+
self.dag: ExecutorDag = ExecutorDag(
|
|
126
|
+
id=self.origin_dag.id, # Use origin_dag id for TransferOp filename generation
|
|
127
|
+
project_id=self.project_id,
|
|
128
|
+
name=api_response.workflow_name,
|
|
129
|
+
scheduler_type=self.origin_dag.scheduler_type,
|
|
130
|
+
schedule_interval=self.origin_dag.schedule_interval,
|
|
131
|
+
timezone=self.origin_dag.timezone,
|
|
132
|
+
owner=self.origin_dag.owner,
|
|
133
|
+
workflow_version=api_response.workflow_version,
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# Initialize ExecutorNode from API response
|
|
137
|
+
self.node: ExecutorNode = init_dataclass_from_dict(ExecutorNode, api_response.model_dump(), dag=self.dag)
|
|
138
|
+
|
|
139
|
+
# Process and set variables
|
|
140
|
+
self.custom_variables = self.process_custom_variable_type(self.custom_variables)
|
|
141
|
+
self.node.job_variable = self.custom_variables
|
|
142
|
+
self.node.variable = self.init_variables()
|
|
143
|
+
|
|
144
|
+
def process_custom_variable_type(self, variables: dict) -> dict:
|
|
145
|
+
"""Process and convert custom variable types from string to their proper types.
|
|
146
|
+
|
|
147
|
+
The frontend sends all variable values as strings, so we need to convert them to
|
|
148
|
+
their proper types based on either:
|
|
149
|
+
1. The variable type defined in the node's variables
|
|
150
|
+
2. The inferred type from python code variables
|
|
151
|
+
3. Keep original value if variable no longer exists in workflow
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
variables: Dictionary of variables to process
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
Dictionary with variables converted to their proper types
|
|
158
|
+
"""
|
|
159
|
+
if not variables:
|
|
160
|
+
return variables
|
|
161
|
+
|
|
162
|
+
new_variables = {}
|
|
163
|
+
# Get current workflow variables to check types and python code vars
|
|
164
|
+
workflow_vars = self.init_variables()
|
|
165
|
+
|
|
166
|
+
for name, value in variables.items():
|
|
167
|
+
# Check if variable exists in node variables
|
|
168
|
+
if name in self.node.variable:
|
|
169
|
+
val_type = self.node.variable[name]["type"]
|
|
170
|
+
|
|
171
|
+
# Check if it's a python code variable
|
|
172
|
+
elif name in workflow_vars:
|
|
173
|
+
val_type = get_variable_type_by_value(workflow_vars[name])
|
|
174
|
+
|
|
175
|
+
# Variable no longer exists in workflow, keep as-is
|
|
176
|
+
else:
|
|
177
|
+
new_variables[name] = value
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
# Convert string value to proper type
|
|
181
|
+
new_variables[name] = convert_var_value_from_string(val_type, value)
|
|
182
|
+
|
|
183
|
+
return new_variables
|
|
184
|
+
|
|
185
|
+
def run(self):
|
|
186
|
+
logger.info(f"Recurve Link Executor start run {self.dag.name}.{self.node.name} {self.node.operator}")
|
|
187
|
+
operator = self.init_operator()
|
|
188
|
+
operator.execute()
|
|
189
|
+
logger.info(f"Recurve Executor finish run {self.dag.name}.{self.node.name}, {self.node.operator}")
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from recurvedata.connectors.service import PigeonDataSource as DataSource # noqa
|
|
5
|
+
from recurvedata.operators import get_operator_class
|
|
6
|
+
from recurvedata.operators.models import DagBase, NodeBase
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class ExecutorDag(DagBase):
|
|
11
|
+
project_id: int
|
|
12
|
+
workflow_id: int = None
|
|
13
|
+
workflow_version: str = None
|
|
14
|
+
workflow_name: str = None
|
|
15
|
+
|
|
16
|
+
@property
|
|
17
|
+
def dag_id(self):
|
|
18
|
+
return self.id
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class ExecutorNode(NodeBase):
|
|
23
|
+
dag: ExecutorDag
|
|
24
|
+
operator: str
|
|
25
|
+
config: dict
|
|
26
|
+
variable: dict[str, Any]
|
|
27
|
+
job_variable: dict[str, Any] = None
|
|
28
|
+
stage: str = None
|
|
29
|
+
link_settings: dict[str, Any] = None # RecurveLink related settings
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def configuration(self):
|
|
33
|
+
op_cls = get_operator_class(self.operator)
|
|
34
|
+
return op_cls.ui_config_to_config(self.config)
|