apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/ads/hooks/ads.py +39 -5
- airflow/providers/google/ads/operators/ads.py +2 -2
- airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
- airflow/providers/google/assets/gcs.py +1 -11
- airflow/providers/google/cloud/bundles/__init__.py +16 -0
- airflow/providers/google/cloud/bundles/gcs.py +161 -0
- airflow/providers/google/cloud/hooks/bigquery.py +166 -281
- airflow/providers/google/cloud/hooks/cloud_composer.py +287 -14
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_run.py +17 -9
- airflow/providers/google/cloud/hooks/cloud_sql.py +101 -22
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +27 -6
- airflow/providers/google/cloud/hooks/compute_ssh.py +5 -1
- airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
- airflow/providers/google/cloud/hooks/dataflow.py +71 -94
- airflow/providers/google/cloud/hooks/datafusion.py +1 -1
- airflow/providers/google/cloud/hooks/dataplex.py +1 -1
- airflow/providers/google/cloud/hooks/dataprep.py +1 -1
- airflow/providers/google/cloud/hooks/dataproc.py +72 -71
- airflow/providers/google/cloud/hooks/gcs.py +111 -14
- airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/hooks/looker.py +6 -1
- airflow/providers/google/cloud/hooks/mlengine.py +3 -2
- airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
- airflow/providers/google/cloud/hooks/spanner.py +73 -8
- airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
- airflow/providers/google/cloud/hooks/translate.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -209
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -2
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +27 -1
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/hooks/vision.py +2 -2
- airflow/providers/google/cloud/hooks/workflows.py +1 -1
- airflow/providers/google/cloud/links/alloy_db.py +0 -46
- airflow/providers/google/cloud/links/base.py +77 -13
- airflow/providers/google/cloud/links/bigquery.py +0 -47
- airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
- airflow/providers/google/cloud/links/bigtable.py +0 -48
- airflow/providers/google/cloud/links/cloud_build.py +0 -73
- airflow/providers/google/cloud/links/cloud_functions.py +0 -33
- airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
- airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
- airflow/providers/google/cloud/links/cloud_sql.py +0 -33
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
- airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
- airflow/providers/google/cloud/links/compute.py +0 -58
- airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
- airflow/providers/google/cloud/links/datacatalog.py +23 -54
- airflow/providers/google/cloud/links/dataflow.py +0 -34
- airflow/providers/google/cloud/links/dataform.py +0 -64
- airflow/providers/google/cloud/links/datafusion.py +1 -96
- airflow/providers/google/cloud/links/dataplex.py +0 -154
- airflow/providers/google/cloud/links/dataprep.py +0 -24
- airflow/providers/google/cloud/links/dataproc.py +11 -95
- airflow/providers/google/cloud/links/datastore.py +0 -31
- airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
- airflow/providers/google/cloud/links/managed_kafka.py +0 -70
- airflow/providers/google/cloud/links/mlengine.py +0 -70
- airflow/providers/google/cloud/links/pubsub.py +0 -32
- airflow/providers/google/cloud/links/spanner.py +0 -33
- airflow/providers/google/cloud/links/stackdriver.py +0 -30
- airflow/providers/google/cloud/links/translate.py +17 -187
- airflow/providers/google/cloud/links/vertex_ai.py +28 -195
- airflow/providers/google/cloud/links/workflows.py +0 -52
- airflow/providers/google/cloud/log/gcs_task_handler.py +17 -9
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
- airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
- airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
- airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
- airflow/providers/google/cloud/openlineage/facets.py +102 -1
- airflow/providers/google/cloud/openlineage/mixins.py +10 -8
- airflow/providers/google/cloud/openlineage/utils.py +15 -1
- airflow/providers/google/cloud/operators/alloy_db.py +70 -55
- airflow/providers/google/cloud/operators/bigquery.py +73 -636
- airflow/providers/google/cloud/operators/bigquery_dts.py +3 -5
- airflow/providers/google/cloud/operators/bigtable.py +36 -7
- airflow/providers/google/cloud/operators/cloud_base.py +21 -1
- airflow/providers/google/cloud/operators/cloud_batch.py +2 -2
- airflow/providers/google/cloud/operators/cloud_build.py +75 -32
- airflow/providers/google/cloud/operators/cloud_composer.py +128 -40
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
- airflow/providers/google/cloud/operators/cloud_run.py +23 -5
- airflow/providers/google/cloud/operators/cloud_sql.py +8 -16
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -11
- airflow/providers/google/cloud/operators/compute.py +8 -40
- airflow/providers/google/cloud/operators/datacatalog.py +157 -21
- airflow/providers/google/cloud/operators/dataflow.py +38 -15
- airflow/providers/google/cloud/operators/dataform.py +15 -5
- airflow/providers/google/cloud/operators/datafusion.py +41 -20
- airflow/providers/google/cloud/operators/dataplex.py +193 -109
- airflow/providers/google/cloud/operators/dataprep.py +1 -5
- airflow/providers/google/cloud/operators/dataproc.py +78 -35
- airflow/providers/google/cloud/operators/dataproc_metastore.py +96 -88
- airflow/providers/google/cloud/operators/datastore.py +22 -6
- airflow/providers/google/cloud/operators/dlp.py +6 -29
- airflow/providers/google/cloud/operators/functions.py +16 -7
- airflow/providers/google/cloud/operators/gcs.py +10 -8
- airflow/providers/google/cloud/operators/gen_ai.py +389 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +60 -99
- airflow/providers/google/cloud/operators/looker.py +1 -1
- airflow/providers/google/cloud/operators/managed_kafka.py +107 -52
- airflow/providers/google/cloud/operators/natural_language.py +1 -1
- airflow/providers/google/cloud/operators/pubsub.py +60 -14
- airflow/providers/google/cloud/operators/spanner.py +25 -12
- airflow/providers/google/cloud/operators/speech_to_text.py +1 -2
- airflow/providers/google/cloud/operators/stackdriver.py +1 -9
- airflow/providers/google/cloud/operators/tasks.py +1 -12
- airflow/providers/google/cloud/operators/text_to_speech.py +1 -2
- airflow/providers/google/cloud/operators/translate.py +40 -16
- airflow/providers/google/cloud/operators/translate_speech.py +1 -2
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +29 -9
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +54 -26
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +11 -9
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +30 -7
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
- airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
- airflow/providers/google/cloud/operators/vision.py +2 -2
- airflow/providers/google/cloud/operators/workflows.py +18 -15
- airflow/providers/google/cloud/sensors/bigquery.py +2 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -2
- airflow/providers/google/cloud/sensors/bigtable.py +11 -4
- airflow/providers/google/cloud/sensors/cloud_composer.py +533 -29
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -2
- airflow/providers/google/cloud/sensors/dataflow.py +26 -9
- airflow/providers/google/cloud/sensors/dataform.py +2 -2
- airflow/providers/google/cloud/sensors/datafusion.py +4 -4
- airflow/providers/google/cloud/sensors/dataplex.py +2 -2
- airflow/providers/google/cloud/sensors/dataprep.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -2
- airflow/providers/google/cloud/sensors/gcs.py +4 -4
- airflow/providers/google/cloud/sensors/looker.py +2 -2
- airflow/providers/google/cloud/sensors/pubsub.py +4 -4
- airflow/providers/google/cloud/sensors/tasks.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
- airflow/providers/google/cloud/sensors/workflows.py +2 -2
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -4
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
- airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
- airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +20 -12
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
- airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
- airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
- airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +13 -4
- airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
- airflow/providers/google/cloud/triggers/bigquery.py +75 -34
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
- airflow/providers/google/cloud/triggers/cloud_composer.py +302 -46
- airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +91 -1
- airflow/providers/google/cloud/triggers/dataflow.py +122 -0
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataplex.py +14 -2
- airflow/providers/google/cloud/triggers/dataproc.py +122 -52
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +15 -19
- airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
- airflow/providers/google/cloud/utils/field_validator.py +1 -2
- airflow/providers/google/common/auth_backend/google_openid.py +4 -4
- airflow/providers/google/common/deprecated.py +2 -1
- airflow/providers/google/common/hooks/base_google.py +27 -8
- airflow/providers/google/common/links/storage.py +0 -22
- airflow/providers/google/common/utils/get_secret.py +31 -0
- airflow/providers/google/common/utils/id_token_credentials.py +3 -4
- airflow/providers/google/firebase/operators/firestore.py +2 -2
- airflow/providers/google/get_provider_info.py +56 -52
- airflow/providers/google/go_module_utils.py +35 -3
- airflow/providers/google/leveldb/hooks/leveldb.py +26 -1
- airflow/providers/google/leveldb/operators/leveldb.py +2 -2
- airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
- airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -2
- airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
- airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
- airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
- airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
- airflow/providers/google/marketing_platform/sensors/display_video.py +3 -63
- airflow/providers/google/suite/hooks/calendar.py +1 -1
- airflow/providers/google/suite/hooks/sheets.py +15 -1
- airflow/providers/google/suite/operators/sheets.py +8 -3
- airflow/providers/google/suite/sensors/drive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
- airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
- airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
- airflow/providers/google/version_compat.py +15 -1
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +92 -48
- apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
- apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
- airflow/providers/google/cloud/hooks/automl.py +0 -673
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/automl.py +0 -193
- airflow/providers/google/cloud/operators/automl.py +0 -1362
- airflow/providers/google/cloud/operators/life_sciences.py +0 -119
- airflow/providers/google/cloud/operators/mlengine.py +0 -112
- apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING
|
|
|
21
21
|
from airflow.providers.google.cloud.links.base import BaseGoogleLink
|
|
22
22
|
|
|
23
23
|
if TYPE_CHECKING:
|
|
24
|
-
from airflow.
|
|
24
|
+
from airflow.providers.common.compat.sdk import Context
|
|
25
25
|
|
|
26
26
|
VERTEX_AI_BASE_LINK = "/vertex-ai"
|
|
27
27
|
VERTEX_AI_MODEL_LINK = (
|
|
@@ -54,6 +54,10 @@ VERTEX_AI_PIPELINE_JOB_LINK = (
|
|
|
54
54
|
VERTEX_AI_BASE_LINK + "/locations/{region}/pipelines/runs/{pipeline_id}?project={project_id}"
|
|
55
55
|
)
|
|
56
56
|
VERTEX_AI_PIPELINE_JOB_LIST_LINK = VERTEX_AI_BASE_LINK + "/pipelines/runs?project={project_id}"
|
|
57
|
+
VERTEX_AI_RAY_CLUSTER_LINK = (
|
|
58
|
+
VERTEX_AI_BASE_LINK + "/locations/{location}/ray-clusters/{cluster_id}?project={project_id}"
|
|
59
|
+
)
|
|
60
|
+
VERTEX_AI_RAY_CLUSTER_LIST_LINK = VERTEX_AI_BASE_LINK + "/ray?project={project_id}"
|
|
57
61
|
|
|
58
62
|
|
|
59
63
|
class VertexAIModelLink(BaseGoogleLink):
|
|
@@ -63,22 +67,6 @@ class VertexAIModelLink(BaseGoogleLink):
|
|
|
63
67
|
key = "model_conf"
|
|
64
68
|
format_str = VERTEX_AI_MODEL_LINK
|
|
65
69
|
|
|
66
|
-
@staticmethod
|
|
67
|
-
def persist(
|
|
68
|
-
context: Context,
|
|
69
|
-
task_instance,
|
|
70
|
-
model_id: str,
|
|
71
|
-
):
|
|
72
|
-
task_instance.xcom_push(
|
|
73
|
-
context=context,
|
|
74
|
-
key=VertexAIModelLink.key,
|
|
75
|
-
value={
|
|
76
|
-
"model_id": model_id,
|
|
77
|
-
"region": task_instance.region,
|
|
78
|
-
"project_id": task_instance.project_id,
|
|
79
|
-
},
|
|
80
|
-
)
|
|
81
|
-
|
|
82
70
|
|
|
83
71
|
class VertexAIModelListLink(BaseGoogleLink):
|
|
84
72
|
"""Helper class for constructing Vertex AI Models Link."""
|
|
@@ -87,19 +75,6 @@ class VertexAIModelListLink(BaseGoogleLink):
|
|
|
87
75
|
key = "models_conf"
|
|
88
76
|
format_str = VERTEX_AI_MODEL_LIST_LINK
|
|
89
77
|
|
|
90
|
-
@staticmethod
|
|
91
|
-
def persist(
|
|
92
|
-
context: Context,
|
|
93
|
-
task_instance,
|
|
94
|
-
):
|
|
95
|
-
task_instance.xcom_push(
|
|
96
|
-
context=context,
|
|
97
|
-
key=VertexAIModelListLink.key,
|
|
98
|
-
value={
|
|
99
|
-
"project_id": task_instance.project_id,
|
|
100
|
-
},
|
|
101
|
-
)
|
|
102
|
-
|
|
103
78
|
|
|
104
79
|
class VertexAIModelExportLink(BaseGoogleLink):
|
|
105
80
|
"""Helper class for constructing Vertex AI Model Export Link."""
|
|
@@ -113,19 +88,15 @@ class VertexAIModelExportLink(BaseGoogleLink):
|
|
|
113
88
|
"""Return bucket name from output configuration."""
|
|
114
89
|
return config["artifact_destination"]["output_uri_prefix"].rpartition("gs://")[-1]
|
|
115
90
|
|
|
116
|
-
@
|
|
117
|
-
def persist(
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
task_instance.xcom_push(
|
|
91
|
+
@classmethod
|
|
92
|
+
def persist(cls, context: Context, **value):
|
|
93
|
+
output_config = value.get("output_config")
|
|
94
|
+
bucket_name = cls.extract_bucket_name(output_config)
|
|
95
|
+
super().persist(
|
|
122
96
|
context=context,
|
|
123
|
-
|
|
124
|
-
value
|
|
125
|
-
|
|
126
|
-
"model_id": task_instance.model_id,
|
|
127
|
-
"bucket_name": VertexAIModelExportLink.extract_bucket_name(task_instance.output_config),
|
|
128
|
-
},
|
|
97
|
+
project_id=value.get("project_id"),
|
|
98
|
+
model_id=value.get("model_id"),
|
|
99
|
+
bucket_name=bucket_name,
|
|
129
100
|
)
|
|
130
101
|
|
|
131
102
|
|
|
@@ -136,22 +107,6 @@ class VertexAITrainingLink(BaseGoogleLink):
|
|
|
136
107
|
key = "training_conf"
|
|
137
108
|
format_str = VERTEX_AI_TRAINING_LINK
|
|
138
109
|
|
|
139
|
-
@staticmethod
|
|
140
|
-
def persist(
|
|
141
|
-
context: Context,
|
|
142
|
-
task_instance,
|
|
143
|
-
training_id: str,
|
|
144
|
-
):
|
|
145
|
-
task_instance.xcom_push(
|
|
146
|
-
context=context,
|
|
147
|
-
key=VertexAITrainingLink.key,
|
|
148
|
-
value={
|
|
149
|
-
"training_id": training_id,
|
|
150
|
-
"region": task_instance.region,
|
|
151
|
-
"project_id": task_instance.project_id,
|
|
152
|
-
},
|
|
153
|
-
)
|
|
154
|
-
|
|
155
110
|
|
|
156
111
|
class VertexAITrainingPipelinesLink(BaseGoogleLink):
|
|
157
112
|
"""Helper class for constructing Vertex AI Training Pipelines link."""
|
|
@@ -160,19 +115,6 @@ class VertexAITrainingPipelinesLink(BaseGoogleLink):
|
|
|
160
115
|
key = "pipelines_conf"
|
|
161
116
|
format_str = VERTEX_AI_TRAINING_PIPELINES_LINK
|
|
162
117
|
|
|
163
|
-
@staticmethod
|
|
164
|
-
def persist(
|
|
165
|
-
context: Context,
|
|
166
|
-
task_instance,
|
|
167
|
-
):
|
|
168
|
-
task_instance.xcom_push(
|
|
169
|
-
context=context,
|
|
170
|
-
key=VertexAITrainingPipelinesLink.key,
|
|
171
|
-
value={
|
|
172
|
-
"project_id": task_instance.project_id,
|
|
173
|
-
},
|
|
174
|
-
)
|
|
175
|
-
|
|
176
118
|
|
|
177
119
|
class VertexAIDatasetLink(BaseGoogleLink):
|
|
178
120
|
"""Helper class for constructing Vertex AI Dataset link."""
|
|
@@ -181,18 +123,6 @@ class VertexAIDatasetLink(BaseGoogleLink):
|
|
|
181
123
|
key = "dataset_conf"
|
|
182
124
|
format_str = VERTEX_AI_DATASET_LINK
|
|
183
125
|
|
|
184
|
-
@staticmethod
|
|
185
|
-
def persist(context: Context, task_instance, dataset_id: str):
|
|
186
|
-
task_instance.xcom_push(
|
|
187
|
-
context=context,
|
|
188
|
-
key=VertexAIDatasetLink.key,
|
|
189
|
-
value={
|
|
190
|
-
"dataset_id": dataset_id,
|
|
191
|
-
"region": task_instance.region,
|
|
192
|
-
"project_id": task_instance.project_id,
|
|
193
|
-
},
|
|
194
|
-
)
|
|
195
|
-
|
|
196
126
|
|
|
197
127
|
class VertexAIDatasetListLink(BaseGoogleLink):
|
|
198
128
|
"""Helper class for constructing Vertex AI Datasets Link."""
|
|
@@ -201,19 +131,6 @@ class VertexAIDatasetListLink(BaseGoogleLink):
|
|
|
201
131
|
key = "datasets_conf"
|
|
202
132
|
format_str = VERTEX_AI_DATASET_LIST_LINK
|
|
203
133
|
|
|
204
|
-
@staticmethod
|
|
205
|
-
def persist(
|
|
206
|
-
context: Context,
|
|
207
|
-
task_instance,
|
|
208
|
-
):
|
|
209
|
-
task_instance.xcom_push(
|
|
210
|
-
context=context,
|
|
211
|
-
key=VertexAIDatasetListLink.key,
|
|
212
|
-
value={
|
|
213
|
-
"project_id": task_instance.project_id,
|
|
214
|
-
},
|
|
215
|
-
)
|
|
216
|
-
|
|
217
134
|
|
|
218
135
|
class VertexAIHyperparameterTuningJobListLink(BaseGoogleLink):
|
|
219
136
|
"""Helper class for constructing Vertex AI HyperparameterTuningJobs Link."""
|
|
@@ -222,19 +139,6 @@ class VertexAIHyperparameterTuningJobListLink(BaseGoogleLink):
|
|
|
222
139
|
key = "hyperparameter_tuning_jobs_conf"
|
|
223
140
|
format_str = VERTEX_AI_HYPERPARAMETER_TUNING_JOB_LIST_LINK
|
|
224
141
|
|
|
225
|
-
@staticmethod
|
|
226
|
-
def persist(
|
|
227
|
-
context: Context,
|
|
228
|
-
task_instance,
|
|
229
|
-
):
|
|
230
|
-
task_instance.xcom_push(
|
|
231
|
-
context=context,
|
|
232
|
-
key=VertexAIHyperparameterTuningJobListLink.key,
|
|
233
|
-
value={
|
|
234
|
-
"project_id": task_instance.project_id,
|
|
235
|
-
},
|
|
236
|
-
)
|
|
237
|
-
|
|
238
142
|
|
|
239
143
|
class VertexAIBatchPredictionJobLink(BaseGoogleLink):
|
|
240
144
|
"""Helper class for constructing Vertex AI BatchPredictionJob link."""
|
|
@@ -243,22 +147,6 @@ class VertexAIBatchPredictionJobLink(BaseGoogleLink):
|
|
|
243
147
|
key = "batch_prediction_job_conf"
|
|
244
148
|
format_str = VERTEX_AI_BATCH_PREDICTION_JOB_LINK
|
|
245
149
|
|
|
246
|
-
@staticmethod
|
|
247
|
-
def persist(
|
|
248
|
-
context: Context,
|
|
249
|
-
task_instance,
|
|
250
|
-
batch_prediction_job_id: str,
|
|
251
|
-
):
|
|
252
|
-
task_instance.xcom_push(
|
|
253
|
-
context=context,
|
|
254
|
-
key=VertexAIBatchPredictionJobLink.key,
|
|
255
|
-
value={
|
|
256
|
-
"batch_prediction_job_id": batch_prediction_job_id,
|
|
257
|
-
"region": task_instance.region,
|
|
258
|
-
"project_id": task_instance.project_id,
|
|
259
|
-
},
|
|
260
|
-
)
|
|
261
|
-
|
|
262
150
|
|
|
263
151
|
class VertexAIBatchPredictionJobListLink(BaseGoogleLink):
|
|
264
152
|
"""Helper class for constructing Vertex AI BatchPredictionJobList link."""
|
|
@@ -267,19 +155,6 @@ class VertexAIBatchPredictionJobListLink(BaseGoogleLink):
|
|
|
267
155
|
key = "batch_prediction_jobs_conf"
|
|
268
156
|
format_str = VERTEX_AI_BATCH_PREDICTION_JOB_LIST_LINK
|
|
269
157
|
|
|
270
|
-
@staticmethod
|
|
271
|
-
def persist(
|
|
272
|
-
context: Context,
|
|
273
|
-
task_instance,
|
|
274
|
-
):
|
|
275
|
-
task_instance.xcom_push(
|
|
276
|
-
context=context,
|
|
277
|
-
key=VertexAIBatchPredictionJobListLink.key,
|
|
278
|
-
value={
|
|
279
|
-
"project_id": task_instance.project_id,
|
|
280
|
-
},
|
|
281
|
-
)
|
|
282
|
-
|
|
283
158
|
|
|
284
159
|
class VertexAIEndpointLink(BaseGoogleLink):
|
|
285
160
|
"""Helper class for constructing Vertex AI Endpoint link."""
|
|
@@ -288,22 +163,6 @@ class VertexAIEndpointLink(BaseGoogleLink):
|
|
|
288
163
|
key = "endpoint_conf"
|
|
289
164
|
format_str = VERTEX_AI_ENDPOINT_LINK
|
|
290
165
|
|
|
291
|
-
@staticmethod
|
|
292
|
-
def persist(
|
|
293
|
-
context: Context,
|
|
294
|
-
task_instance,
|
|
295
|
-
endpoint_id: str,
|
|
296
|
-
):
|
|
297
|
-
task_instance.xcom_push(
|
|
298
|
-
context=context,
|
|
299
|
-
key=VertexAIEndpointLink.key,
|
|
300
|
-
value={
|
|
301
|
-
"endpoint_id": endpoint_id,
|
|
302
|
-
"region": task_instance.region,
|
|
303
|
-
"project_id": task_instance.project_id,
|
|
304
|
-
},
|
|
305
|
-
)
|
|
306
|
-
|
|
307
166
|
|
|
308
167
|
class VertexAIEndpointListLink(BaseGoogleLink):
|
|
309
168
|
"""Helper class for constructing Vertex AI EndpointList link."""
|
|
@@ -312,19 +171,6 @@ class VertexAIEndpointListLink(BaseGoogleLink):
|
|
|
312
171
|
key = "endpoints_conf"
|
|
313
172
|
format_str = VERTEX_AI_ENDPOINT_LIST_LINK
|
|
314
173
|
|
|
315
|
-
@staticmethod
|
|
316
|
-
def persist(
|
|
317
|
-
context: Context,
|
|
318
|
-
task_instance,
|
|
319
|
-
):
|
|
320
|
-
task_instance.xcom_push(
|
|
321
|
-
context=context,
|
|
322
|
-
key=VertexAIEndpointListLink.key,
|
|
323
|
-
value={
|
|
324
|
-
"project_id": task_instance.project_id,
|
|
325
|
-
},
|
|
326
|
-
)
|
|
327
|
-
|
|
328
174
|
|
|
329
175
|
class VertexAIPipelineJobLink(BaseGoogleLink):
|
|
330
176
|
"""Helper class for constructing Vertex AI PipelineJob link."""
|
|
@@ -333,22 +179,6 @@ class VertexAIPipelineJobLink(BaseGoogleLink):
|
|
|
333
179
|
key = "pipeline_job_conf"
|
|
334
180
|
format_str = VERTEX_AI_PIPELINE_JOB_LINK
|
|
335
181
|
|
|
336
|
-
@staticmethod
|
|
337
|
-
def persist(
|
|
338
|
-
context: Context,
|
|
339
|
-
task_instance,
|
|
340
|
-
pipeline_id: str,
|
|
341
|
-
):
|
|
342
|
-
task_instance.xcom_push(
|
|
343
|
-
context=context,
|
|
344
|
-
key=VertexAIPipelineJobLink.key,
|
|
345
|
-
value={
|
|
346
|
-
"pipeline_id": pipeline_id,
|
|
347
|
-
"region": task_instance.region,
|
|
348
|
-
"project_id": task_instance.project_id,
|
|
349
|
-
},
|
|
350
|
-
)
|
|
351
|
-
|
|
352
182
|
|
|
353
183
|
class VertexAIPipelineJobListLink(BaseGoogleLink):
|
|
354
184
|
"""Helper class for constructing Vertex AI PipelineJobList link."""
|
|
@@ -357,15 +187,18 @@ class VertexAIPipelineJobListLink(BaseGoogleLink):
|
|
|
357
187
|
key = "pipeline_job_list_conf"
|
|
358
188
|
format_str = VERTEX_AI_PIPELINE_JOB_LIST_LINK
|
|
359
189
|
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
190
|
+
|
|
191
|
+
class VertexAIRayClusterLink(BaseGoogleLink):
|
|
192
|
+
"""Helper class for constructing Vertex AI Ray Cluster link."""
|
|
193
|
+
|
|
194
|
+
name = "Ray Cluster"
|
|
195
|
+
key = "ray_cluster_conf"
|
|
196
|
+
format_str = VERTEX_AI_RAY_CLUSTER_LINK
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class VertexAIRayClusterListLink(BaseGoogleLink):
|
|
200
|
+
"""Helper class for constructing Vertex AI Ray Cluster List link."""
|
|
201
|
+
|
|
202
|
+
name = "Ray Cluster List"
|
|
203
|
+
key = "ray_cluster_list_conf"
|
|
204
|
+
format_str = VERTEX_AI_RAY_CLUSTER_LIST_LINK
|
|
@@ -19,14 +19,8 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
-
from typing import TYPE_CHECKING
|
|
23
|
-
|
|
24
22
|
from airflow.providers.google.cloud.links.base import BaseGoogleLink
|
|
25
23
|
|
|
26
|
-
if TYPE_CHECKING:
|
|
27
|
-
from airflow.models import BaseOperator
|
|
28
|
-
from airflow.utils.context import Context
|
|
29
|
-
|
|
30
24
|
WORKFLOWS_BASE_LINK = "/workflows"
|
|
31
25
|
WORKFLOW_LINK = WORKFLOWS_BASE_LINK + "/workflow/{location_id}/{workflow_id}/executions?project={project_id}"
|
|
32
26
|
WORKFLOWS_LINK = WORKFLOWS_BASE_LINK + "?project={project_id}"
|
|
@@ -43,20 +37,6 @@ class WorkflowsWorkflowDetailsLink(BaseGoogleLink):
|
|
|
43
37
|
key = "workflow_details"
|
|
44
38
|
format_str = WORKFLOW_LINK
|
|
45
39
|
|
|
46
|
-
@staticmethod
|
|
47
|
-
def persist(
|
|
48
|
-
context: Context,
|
|
49
|
-
task_instance: BaseOperator,
|
|
50
|
-
location_id: str,
|
|
51
|
-
workflow_id: str,
|
|
52
|
-
project_id: str | None,
|
|
53
|
-
):
|
|
54
|
-
task_instance.xcom_push(
|
|
55
|
-
context,
|
|
56
|
-
key=WorkflowsWorkflowDetailsLink.key,
|
|
57
|
-
value={"location_id": location_id, "workflow_id": workflow_id, "project_id": project_id},
|
|
58
|
-
)
|
|
59
|
-
|
|
60
40
|
|
|
61
41
|
class WorkflowsListOfWorkflowsLink(BaseGoogleLink):
|
|
62
42
|
"""Helper class for constructing list of Workflows Link."""
|
|
@@ -65,18 +45,6 @@ class WorkflowsListOfWorkflowsLink(BaseGoogleLink):
|
|
|
65
45
|
key = "list_of_workflows"
|
|
66
46
|
format_str = WORKFLOWS_LINK
|
|
67
47
|
|
|
68
|
-
@staticmethod
|
|
69
|
-
def persist(
|
|
70
|
-
context: Context,
|
|
71
|
-
task_instance: BaseOperator,
|
|
72
|
-
project_id: str | None,
|
|
73
|
-
):
|
|
74
|
-
task_instance.xcom_push(
|
|
75
|
-
context,
|
|
76
|
-
key=WorkflowsListOfWorkflowsLink.key,
|
|
77
|
-
value={"project_id": project_id},
|
|
78
|
-
)
|
|
79
|
-
|
|
80
48
|
|
|
81
49
|
class WorkflowsExecutionLink(BaseGoogleLink):
|
|
82
50
|
"""Helper class for constructing Workflows Execution Link."""
|
|
@@ -84,23 +52,3 @@ class WorkflowsExecutionLink(BaseGoogleLink):
|
|
|
84
52
|
name = "Workflow Execution"
|
|
85
53
|
key = "workflow_execution"
|
|
86
54
|
format_str = EXECUTION_LINK
|
|
87
|
-
|
|
88
|
-
@staticmethod
|
|
89
|
-
def persist(
|
|
90
|
-
context: Context,
|
|
91
|
-
task_instance: BaseOperator,
|
|
92
|
-
location_id: str,
|
|
93
|
-
workflow_id: str,
|
|
94
|
-
execution_id: str,
|
|
95
|
-
project_id: str | None,
|
|
96
|
-
):
|
|
97
|
-
task_instance.xcom_push(
|
|
98
|
-
context,
|
|
99
|
-
key=WorkflowsExecutionLink.key,
|
|
100
|
-
value={
|
|
101
|
-
"location_id": location_id,
|
|
102
|
-
"workflow_id": workflow_id,
|
|
103
|
-
"execution_id": execution_id,
|
|
104
|
-
"project_id": project_id,
|
|
105
|
-
},
|
|
106
|
-
)
|
|
@@ -27,8 +27,8 @@ from typing import TYPE_CHECKING
|
|
|
27
27
|
|
|
28
28
|
import attrs
|
|
29
29
|
|
|
30
|
-
#
|
|
31
|
-
|
|
30
|
+
# Make mypy happy by importing as aliases
|
|
31
|
+
import google.cloud.storage as storage
|
|
32
32
|
|
|
33
33
|
from airflow.configuration import conf
|
|
34
34
|
from airflow.exceptions import AirflowNotFoundException
|
|
@@ -61,13 +61,15 @@ class GCSRemoteLogIO(LoggingMixin): # noqa: D101
|
|
|
61
61
|
remote_base: str
|
|
62
62
|
base_log_folder: Path = attrs.field(converter=Path)
|
|
63
63
|
delete_local_copy: bool
|
|
64
|
+
project_id: str | None = None
|
|
64
65
|
|
|
65
|
-
gcp_key_path: str | None
|
|
66
|
-
gcp_keyfile_dict: dict | None
|
|
67
|
-
scopes: Collection[str] | None
|
|
68
|
-
project_id: str
|
|
66
|
+
gcp_key_path: str | None = None
|
|
67
|
+
gcp_keyfile_dict: dict | None = None
|
|
68
|
+
scopes: Collection[str] | None = _DEFAULT_SCOPESS
|
|
69
69
|
|
|
70
|
-
|
|
70
|
+
processors = ()
|
|
71
|
+
|
|
72
|
+
def upload(self, path: os.PathLike | str, ti: RuntimeTI):
|
|
71
73
|
"""Upload the given log path to the remote storage."""
|
|
72
74
|
path = Path(path)
|
|
73
75
|
if path.is_absolute():
|
|
@@ -211,9 +213,15 @@ class GCSTaskHandler(FileTaskHandler, LoggingMixin):
|
|
|
211
213
|
gcp_keyfile_dict: dict | None = None,
|
|
212
214
|
gcp_scopes: Collection[str] | None = _DEFAULT_SCOPESS,
|
|
213
215
|
project_id: str = PROVIDE_PROJECT_ID,
|
|
216
|
+
max_bytes: int = 0,
|
|
217
|
+
backup_count: int = 0,
|
|
218
|
+
delay: bool = False,
|
|
214
219
|
**kwargs,
|
|
215
|
-
):
|
|
216
|
-
|
|
220
|
+
) -> None:
|
|
221
|
+
# support log file size handling of FileTaskHandler
|
|
222
|
+
super().__init__(
|
|
223
|
+
base_log_folder=base_log_folder, max_bytes=max_bytes, backup_count=backup_count, delay=delay
|
|
224
|
+
)
|
|
217
225
|
self.handler: logging.FileHandler | None = None
|
|
218
226
|
self.log_relative_path = ""
|
|
219
227
|
self.closed = False
|
|
@@ -35,17 +35,20 @@ from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
|
35
35
|
from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id
|
|
36
36
|
from airflow.providers.google.common.consts import CLIENT_INFO
|
|
37
37
|
from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
|
|
38
|
-
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
from airflow.sdk.definitions._internal.types import NOTSET, ArgNotSet
|
|
41
|
+
except ImportError:
|
|
42
|
+
from airflow.utils.types import NOTSET, ArgNotSet # type: ignore[attr-defined,no-redef]
|
|
43
|
+
|
|
44
|
+
if not AIRFLOW_V_3_0_PLUS:
|
|
45
|
+
from airflow.utils.log.trigger_handler import ctx_indiv_trigger
|
|
39
46
|
|
|
40
47
|
if TYPE_CHECKING:
|
|
41
48
|
from google.auth.credentials import Credentials
|
|
42
49
|
|
|
43
50
|
from airflow.models import TaskInstance
|
|
44
51
|
|
|
45
|
-
|
|
46
|
-
if not AIRFLOW_V_3_0_PLUS:
|
|
47
|
-
from airflow.utils.log.trigger_handler import ctx_indiv_trigger
|
|
48
|
-
|
|
49
52
|
DEFAULT_LOGGER_NAME = "airflow"
|
|
50
53
|
_GLOBAL_RESOURCE = Resource(type="global", labels={})
|
|
51
54
|
|
|
@@ -159,7 +162,7 @@ class StackdriverTaskHandler(logging.Handler):
|
|
|
159
162
|
"""Object responsible for sending data to Stackdriver."""
|
|
160
163
|
# The Transport object is badly defined (no init) but in the docs client/name as constructor
|
|
161
164
|
# arguments are a requirement for any class that derives from Transport class, hence ignore:
|
|
162
|
-
return self.transport_type(self._client, self.gcp_log_name)
|
|
165
|
+
return self.transport_type(self._client, self.gcp_log_name)
|
|
163
166
|
|
|
164
167
|
def _get_labels(self, task_instance=None):
|
|
165
168
|
if task_instance:
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
3
|
+
"$defs": {
|
|
4
|
+
"CloudStorageTransferJobFacet": {
|
|
5
|
+
"allOf": [
|
|
6
|
+
{
|
|
7
|
+
"$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/JobFacet"
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"type": "object",
|
|
11
|
+
"properties": {
|
|
12
|
+
"jobName": {
|
|
13
|
+
"type": "string",
|
|
14
|
+
"description": "Transfer job name assigned by GCP Storage Transfer Service."
|
|
15
|
+
},
|
|
16
|
+
"projectId": {
|
|
17
|
+
"type": "string",
|
|
18
|
+
"description": "GCP project ID."
|
|
19
|
+
},
|
|
20
|
+
"description": {
|
|
21
|
+
"type": "string",
|
|
22
|
+
"description": "Optional description of the transfer job."
|
|
23
|
+
},
|
|
24
|
+
"status": {
|
|
25
|
+
"type": "string",
|
|
26
|
+
"description": "Status of the transfer job (ENABLED, DISABLED)."
|
|
27
|
+
},
|
|
28
|
+
"sourceBucket": {
|
|
29
|
+
"type": "string",
|
|
30
|
+
"description": "Source AWS S3 bucket."
|
|
31
|
+
},
|
|
32
|
+
"sourcePath": {
|
|
33
|
+
"type": "string",
|
|
34
|
+
"description": "Prefix path inside the source bucket."
|
|
35
|
+
},
|
|
36
|
+
"targetBucket": {
|
|
37
|
+
"type": "string",
|
|
38
|
+
"description": "Target GCS bucket."
|
|
39
|
+
},
|
|
40
|
+
"targetPath": {
|
|
41
|
+
"type": "string",
|
|
42
|
+
"description": "Prefix path inside the target bucket."
|
|
43
|
+
},
|
|
44
|
+
"objectConditions": {
|
|
45
|
+
"type": "object",
|
|
46
|
+
"description": "Filtering conditions for objects transferred."
|
|
47
|
+
},
|
|
48
|
+
"transferOptions": {
|
|
49
|
+
"type": "object",
|
|
50
|
+
"description": "Transfer options such as overwrite or delete."
|
|
51
|
+
},
|
|
52
|
+
"schedule": {
|
|
53
|
+
"type": "object",
|
|
54
|
+
"description": "Transfer schedule details."
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
],
|
|
59
|
+
"type": "object"
|
|
60
|
+
}
|
|
61
|
+
},
|
|
62
|
+
"type": "object",
|
|
63
|
+
"properties": {
|
|
64
|
+
"cloudStorageTransferJob": {
|
|
65
|
+
"$ref": "#/$defs/CloudStorageTransferJobFacet"
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
3
|
+
"$defs": {
|
|
4
|
+
"CloudStorageTransferRunFacet": {
|
|
5
|
+
"allOf": [
|
|
6
|
+
{
|
|
7
|
+
"$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunFacet"
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"type": "object",
|
|
11
|
+
"properties": {
|
|
12
|
+
"jobName": {
|
|
13
|
+
"type": "string",
|
|
14
|
+
"description": "Transfer job name associated with this run."
|
|
15
|
+
},
|
|
16
|
+
"operationName": {
|
|
17
|
+
"type": "string",
|
|
18
|
+
"description": "Transfer operation name if available."
|
|
19
|
+
},
|
|
20
|
+
"status": {
|
|
21
|
+
"type": "string",
|
|
22
|
+
"description": "Run status if available."
|
|
23
|
+
},
|
|
24
|
+
"startTime": {
|
|
25
|
+
"type": "string",
|
|
26
|
+
"description": "Start time of the transfer operation."
|
|
27
|
+
},
|
|
28
|
+
"endTime": {
|
|
29
|
+
"type": "string",
|
|
30
|
+
"description": "End time of the transfer operation."
|
|
31
|
+
},
|
|
32
|
+
"wait": {
|
|
33
|
+
"type": "boolean",
|
|
34
|
+
"description": "Whether the operator waited for completion."
|
|
35
|
+
},
|
|
36
|
+
"timeout": {
|
|
37
|
+
"type": ["number", "null"],
|
|
38
|
+
"description": "Timeout in seconds."
|
|
39
|
+
},
|
|
40
|
+
"deferrable": {
|
|
41
|
+
"type": "boolean",
|
|
42
|
+
"description": "Whether the operator used deferrable mode."
|
|
43
|
+
},
|
|
44
|
+
"deleteJobAfterCompletion": {
|
|
45
|
+
"type": "boolean",
|
|
46
|
+
"description": "Whether the transfer job was deleted after completion."
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
],
|
|
51
|
+
"type": "object"
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
"type": "object",
|
|
55
|
+
"properties": {
|
|
56
|
+
"cloudStorageTransferRun": {
|
|
57
|
+
"$ref": "#/$defs/CloudStorageTransferRunFacet"
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
3
|
+
"$defs": {
|
|
4
|
+
"DataFusionRunFacet": {
|
|
5
|
+
"allOf": [
|
|
6
|
+
{
|
|
7
|
+
"$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunFacet"
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"type": "object",
|
|
11
|
+
"properties": {
|
|
12
|
+
"runId": {
|
|
13
|
+
"type": "string",
|
|
14
|
+
"description": "Pipeline run ID assigned by Cloud Data Fusion."
|
|
15
|
+
},
|
|
16
|
+
"runtimeArgs": {
|
|
17
|
+
"type": "object",
|
|
18
|
+
"description": "Runtime arguments provided when starting the pipeline."
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
],
|
|
23
|
+
"type": "object"
|
|
24
|
+
}
|
|
25
|
+
},
|
|
26
|
+
"type": "object",
|
|
27
|
+
"properties": {
|
|
28
|
+
"dataFusionRun": {
|
|
29
|
+
"$ref": "#/$defs/DataFusionRunFacet"
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|