apache-airflow-providers-google 14.0.0__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/3rd-party-licenses/LICENSES.txt +14 -0
- airflow/providers/google/3rd-party-licenses/NOTICE +5 -0
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/_vendor/__init__.py +0 -0
- airflow/providers/google/_vendor/json_merge_patch.py +91 -0
- airflow/providers/google/ads/hooks/ads.py +52 -43
- airflow/providers/google/ads/operators/ads.py +2 -2
- airflow/providers/google/ads/transfers/ads_to_gcs.py +3 -19
- airflow/providers/google/assets/gcs.py +1 -11
- airflow/providers/google/cloud/_internal_client/secret_manager_client.py +3 -2
- airflow/providers/google/cloud/bundles/gcs.py +161 -0
- airflow/providers/google/cloud/hooks/alloy_db.py +2 -3
- airflow/providers/google/cloud/hooks/bigquery.py +195 -318
- airflow/providers/google/cloud/hooks/bigquery_dts.py +8 -8
- airflow/providers/google/cloud/hooks/bigtable.py +3 -2
- airflow/providers/google/cloud/hooks/cloud_batch.py +8 -9
- airflow/providers/google/cloud/hooks/cloud_build.py +6 -65
- airflow/providers/google/cloud/hooks/cloud_composer.py +292 -24
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_memorystore.py +4 -3
- airflow/providers/google/cloud/hooks/cloud_run.py +20 -11
- airflow/providers/google/cloud/hooks/cloud_sql.py +136 -64
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +35 -15
- airflow/providers/google/cloud/hooks/compute.py +7 -6
- airflow/providers/google/cloud/hooks/compute_ssh.py +7 -4
- airflow/providers/google/cloud/hooks/datacatalog.py +12 -3
- airflow/providers/google/cloud/hooks/dataflow.py +87 -242
- airflow/providers/google/cloud/hooks/dataform.py +9 -14
- airflow/providers/google/cloud/hooks/datafusion.py +7 -9
- airflow/providers/google/cloud/hooks/dataplex.py +13 -12
- airflow/providers/google/cloud/hooks/dataprep.py +2 -2
- airflow/providers/google/cloud/hooks/dataproc.py +76 -74
- airflow/providers/google/cloud/hooks/dataproc_metastore.py +4 -3
- airflow/providers/google/cloud/hooks/dlp.py +5 -4
- airflow/providers/google/cloud/hooks/gcs.py +144 -33
- airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
- airflow/providers/google/cloud/hooks/kms.py +3 -2
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -17
- airflow/providers/google/cloud/hooks/looker.py +6 -1
- airflow/providers/google/cloud/hooks/managed_kafka.py +227 -3
- airflow/providers/google/cloud/hooks/mlengine.py +7 -8
- airflow/providers/google/cloud/hooks/natural_language.py +3 -2
- airflow/providers/google/cloud/hooks/os_login.py +3 -2
- airflow/providers/google/cloud/hooks/pubsub.py +6 -6
- airflow/providers/google/cloud/hooks/secret_manager.py +105 -12
- airflow/providers/google/cloud/hooks/spanner.py +75 -10
- airflow/providers/google/cloud/hooks/speech_to_text.py +3 -2
- airflow/providers/google/cloud/hooks/stackdriver.py +18 -18
- airflow/providers/google/cloud/hooks/tasks.py +4 -3
- airflow/providers/google/cloud/hooks/text_to_speech.py +3 -2
- airflow/providers/google/cloud/hooks/translate.py +8 -17
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +8 -222
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +9 -15
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +33 -283
- airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +5 -12
- airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +6 -12
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +311 -10
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
- airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +7 -13
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +8 -12
- airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +6 -12
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +3 -2
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/hooks/video_intelligence.py +3 -2
- airflow/providers/google/cloud/hooks/vision.py +7 -7
- airflow/providers/google/cloud/hooks/workflows.py +4 -3
- airflow/providers/google/cloud/links/alloy_db.py +0 -46
- airflow/providers/google/cloud/links/base.py +77 -7
- airflow/providers/google/cloud/links/bigquery.py +0 -47
- airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
- airflow/providers/google/cloud/links/bigtable.py +0 -48
- airflow/providers/google/cloud/links/cloud_build.py +0 -73
- airflow/providers/google/cloud/links/cloud_functions.py +0 -33
- airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
- airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
- airflow/providers/google/cloud/links/cloud_sql.py +0 -33
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -46
- airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
- airflow/providers/google/cloud/links/compute.py +0 -58
- airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
- airflow/providers/google/cloud/links/datacatalog.py +23 -54
- airflow/providers/google/cloud/links/dataflow.py +0 -34
- airflow/providers/google/cloud/links/dataform.py +0 -64
- airflow/providers/google/cloud/links/datafusion.py +1 -90
- airflow/providers/google/cloud/links/dataplex.py +0 -154
- airflow/providers/google/cloud/links/dataprep.py +0 -24
- airflow/providers/google/cloud/links/dataproc.py +11 -89
- airflow/providers/google/cloud/links/datastore.py +0 -31
- airflow/providers/google/cloud/links/kubernetes_engine.py +11 -61
- airflow/providers/google/cloud/links/managed_kafka.py +11 -51
- airflow/providers/google/cloud/links/mlengine.py +0 -70
- airflow/providers/google/cloud/links/pubsub.py +0 -32
- airflow/providers/google/cloud/links/spanner.py +0 -33
- airflow/providers/google/cloud/links/stackdriver.py +0 -30
- airflow/providers/google/cloud/links/translate.py +17 -187
- airflow/providers/google/cloud/links/vertex_ai.py +28 -195
- airflow/providers/google/cloud/links/workflows.py +0 -52
- airflow/providers/google/cloud/log/gcs_task_handler.py +166 -118
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +14 -9
- airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
- airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
- airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
- airflow/providers/google/cloud/openlineage/facets.py +141 -40
- airflow/providers/google/cloud/openlineage/mixins.py +14 -13
- airflow/providers/google/cloud/openlineage/utils.py +19 -3
- airflow/providers/google/cloud/operators/alloy_db.py +76 -61
- airflow/providers/google/cloud/operators/bigquery.py +104 -667
- airflow/providers/google/cloud/operators/bigquery_dts.py +12 -12
- airflow/providers/google/cloud/operators/bigtable.py +38 -7
- airflow/providers/google/cloud/operators/cloud_base.py +22 -1
- airflow/providers/google/cloud/operators/cloud_batch.py +18 -18
- airflow/providers/google/cloud/operators/cloud_build.py +80 -36
- airflow/providers/google/cloud/operators/cloud_composer.py +157 -71
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_memorystore.py +74 -46
- airflow/providers/google/cloud/operators/cloud_run.py +39 -20
- airflow/providers/google/cloud/operators/cloud_sql.py +46 -61
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -14
- airflow/providers/google/cloud/operators/compute.py +18 -50
- airflow/providers/google/cloud/operators/datacatalog.py +167 -29
- airflow/providers/google/cloud/operators/dataflow.py +38 -15
- airflow/providers/google/cloud/operators/dataform.py +19 -7
- airflow/providers/google/cloud/operators/datafusion.py +43 -43
- airflow/providers/google/cloud/operators/dataplex.py +212 -126
- airflow/providers/google/cloud/operators/dataprep.py +1 -5
- airflow/providers/google/cloud/operators/dataproc.py +134 -207
- airflow/providers/google/cloud/operators/dataproc_metastore.py +102 -84
- airflow/providers/google/cloud/operators/datastore.py +22 -6
- airflow/providers/google/cloud/operators/dlp.py +24 -45
- airflow/providers/google/cloud/operators/functions.py +21 -14
- airflow/providers/google/cloud/operators/gcs.py +15 -12
- airflow/providers/google/cloud/operators/gen_ai.py +389 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +115 -106
- airflow/providers/google/cloud/operators/looker.py +1 -1
- airflow/providers/google/cloud/operators/managed_kafka.py +362 -40
- airflow/providers/google/cloud/operators/natural_language.py +5 -3
- airflow/providers/google/cloud/operators/pubsub.py +69 -21
- airflow/providers/google/cloud/operators/spanner.py +53 -45
- airflow/providers/google/cloud/operators/speech_to_text.py +5 -4
- airflow/providers/google/cloud/operators/stackdriver.py +5 -11
- airflow/providers/google/cloud/operators/tasks.py +6 -15
- airflow/providers/google/cloud/operators/text_to_speech.py +4 -3
- airflow/providers/google/cloud/operators/translate.py +46 -20
- airflow/providers/google/cloud/operators/translate_speech.py +4 -3
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +44 -34
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +34 -12
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +62 -53
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +75 -11
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +48 -12
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +16 -12
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +62 -14
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +35 -10
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
- airflow/providers/google/cloud/operators/video_intelligence.py +5 -3
- airflow/providers/google/cloud/operators/vision.py +7 -5
- airflow/providers/google/cloud/operators/workflows.py +24 -19
- airflow/providers/google/cloud/secrets/secret_manager.py +2 -1
- airflow/providers/google/cloud/sensors/bigquery.py +2 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +6 -4
- airflow/providers/google/cloud/sensors/bigtable.py +14 -6
- airflow/providers/google/cloud/sensors/cloud_composer.py +535 -33
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +6 -5
- airflow/providers/google/cloud/sensors/dataflow.py +27 -10
- airflow/providers/google/cloud/sensors/dataform.py +2 -2
- airflow/providers/google/cloud/sensors/datafusion.py +4 -4
- airflow/providers/google/cloud/sensors/dataplex.py +7 -5
- airflow/providers/google/cloud/sensors/dataprep.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc.py +10 -9
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +4 -3
- airflow/providers/google/cloud/sensors/gcs.py +22 -21
- airflow/providers/google/cloud/sensors/looker.py +5 -5
- airflow/providers/google/cloud/sensors/pubsub.py +20 -20
- airflow/providers/google/cloud/sensors/tasks.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
- airflow/providers/google/cloud/sensors/workflows.py +6 -4
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +14 -13
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
- airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
- airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +18 -22
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +4 -5
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +45 -38
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
- airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
- airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +44 -12
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
- airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +36 -14
- airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
- airflow/providers/google/cloud/triggers/bigquery.py +75 -34
- airflow/providers/google/cloud/triggers/bigquery_dts.py +2 -1
- airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
- airflow/providers/google/cloud/triggers/cloud_build.py +3 -2
- airflow/providers/google/cloud/triggers/cloud_composer.py +303 -47
- airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +96 -5
- airflow/providers/google/cloud/triggers/dataflow.py +125 -2
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataplex.py +16 -3
- airflow/providers/google/cloud/triggers/dataproc.py +124 -53
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +46 -28
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +17 -20
- airflow/providers/google/cloud/triggers/vertex_ai.py +8 -7
- airflow/providers/google/cloud/utils/bigquery.py +5 -7
- airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +4 -3
- airflow/providers/google/cloud/utils/dataform.py +1 -1
- airflow/providers/google/cloud/utils/external_token_supplier.py +0 -1
- airflow/providers/google/cloud/utils/field_validator.py +1 -2
- airflow/providers/google/cloud/utils/validators.py +43 -0
- airflow/providers/google/common/auth_backend/google_openid.py +26 -9
- airflow/providers/google/common/consts.py +2 -1
- airflow/providers/google/common/deprecated.py +2 -1
- airflow/providers/google/common/hooks/base_google.py +40 -43
- airflow/providers/google/common/hooks/operation_helpers.py +78 -0
- airflow/providers/google/common/links/storage.py +0 -22
- airflow/providers/google/common/utils/get_secret.py +31 -0
- airflow/providers/google/common/utils/id_token_credentials.py +4 -5
- airflow/providers/google/firebase/operators/firestore.py +2 -2
- airflow/providers/google/get_provider_info.py +61 -216
- airflow/providers/google/go_module_utils.py +35 -3
- airflow/providers/google/leveldb/hooks/leveldb.py +30 -6
- airflow/providers/google/leveldb/operators/leveldb.py +2 -2
- airflow/providers/google/marketing_platform/hooks/analytics_admin.py +3 -2
- airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
- airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
- airflow/providers/google/marketing_platform/links/analytics_admin.py +4 -5
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +7 -6
- airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
- airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
- airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
- airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
- airflow/providers/google/marketing_platform/sensors/display_video.py +4 -64
- airflow/providers/google/suite/hooks/calendar.py +1 -1
- airflow/providers/google/suite/hooks/drive.py +2 -2
- airflow/providers/google/suite/hooks/sheets.py +15 -1
- airflow/providers/google/suite/operators/sheets.py +8 -3
- airflow/providers/google/suite/sensors/drive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
- airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
- airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
- airflow/providers/google/version_compat.py +15 -1
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +117 -72
- apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +1 -1
- apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
- airflow/providers/google/cloud/example_dags/example_cloud_task.py +0 -54
- airflow/providers/google/cloud/hooks/automl.py +0 -679
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/automl.py +0 -193
- airflow/providers/google/cloud/operators/automl.py +0 -1360
- airflow/providers/google/cloud/operators/life_sciences.py +0 -119
- airflow/providers/google/cloud/operators/mlengine.py +0 -1515
- airflow/providers/google/cloud/utils/mlengine_operator_utils.py +0 -273
- apache_airflow_providers_google-14.0.0.dist-info/RECORD +0 -318
- /airflow/providers/google/cloud/{example_dags → bundles}/__init__.py +0 -0
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -24,6 +24,7 @@ from typing import TYPE_CHECKING, Any
|
|
|
24
24
|
|
|
25
25
|
from airflow.configuration import conf
|
|
26
26
|
from airflow.exceptions import AirflowException
|
|
27
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
27
28
|
from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import (
|
|
28
29
|
COUNTERS,
|
|
29
30
|
METADATA,
|
|
@@ -35,10 +36,9 @@ from airflow.providers.google.cloud.triggers.cloud_storage_transfer_service impo
|
|
|
35
36
|
CloudStorageTransferServiceCheckJobStatusTrigger,
|
|
36
37
|
)
|
|
37
38
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
38
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
39
39
|
|
|
40
40
|
if TYPE_CHECKING:
|
|
41
|
-
from airflow.
|
|
41
|
+
from airflow.providers.common.compat.sdk import Context
|
|
42
42
|
|
|
43
43
|
|
|
44
44
|
class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
@@ -98,6 +98,7 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
|
98
98
|
self.deferrable = deferrable
|
|
99
99
|
|
|
100
100
|
def poke(self, context: Context) -> bool:
|
|
101
|
+
ti = context["ti"]
|
|
101
102
|
hook = CloudDataTransferServiceHook(
|
|
102
103
|
gcp_conn_id=self.gcp_cloud_conn_id,
|
|
103
104
|
impersonation_chain=self.impersonation_chain,
|
|
@@ -113,13 +114,12 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
|
113
114
|
operations=operations, expected_statuses=self.expected_statuses
|
|
114
115
|
)
|
|
115
116
|
if check:
|
|
116
|
-
|
|
117
|
+
ti.xcom_push(key="sensed_operations", value=operations)
|
|
117
118
|
|
|
118
119
|
project_id = self.project_id or hook.project_id
|
|
119
120
|
if project_id:
|
|
120
121
|
CloudStorageTransferJobLink.persist(
|
|
121
122
|
context=context,
|
|
122
|
-
task_instance=self,
|
|
123
123
|
project_id=project_id,
|
|
124
124
|
job_name=self.job_name,
|
|
125
125
|
)
|
|
@@ -154,4 +154,5 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
|
154
154
|
if event["status"] == "error":
|
|
155
155
|
raise AirflowException(event["message"])
|
|
156
156
|
|
|
157
|
-
|
|
157
|
+
ti = context["ti"]
|
|
158
|
+
ti.xcom_push(key="sensed_operations", value=event["operations"])
|
|
@@ -19,12 +19,13 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
-
from collections.abc import Sequence
|
|
22
|
+
from collections.abc import Callable, Sequence
|
|
23
23
|
from functools import cached_property
|
|
24
|
-
from typing import TYPE_CHECKING, Any
|
|
24
|
+
from typing import TYPE_CHECKING, Any
|
|
25
25
|
|
|
26
26
|
from airflow.configuration import conf
|
|
27
27
|
from airflow.exceptions import AirflowException
|
|
28
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator, PokeReturnValue
|
|
28
29
|
from airflow.providers.google.cloud.hooks.dataflow import (
|
|
29
30
|
DEFAULT_DATAFLOW_LOCATION,
|
|
30
31
|
DataflowHook,
|
|
@@ -37,10 +38,9 @@ from airflow.providers.google.cloud.triggers.dataflow import (
|
|
|
37
38
|
DataflowJobStatusTrigger,
|
|
38
39
|
)
|
|
39
40
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
40
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
41
41
|
|
|
42
42
|
if TYPE_CHECKING:
|
|
43
|
-
from airflow.
|
|
43
|
+
from airflow.providers.common.compat.sdk import Context
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
class DataflowJobStatusSensor(BaseSensorOperator):
|
|
@@ -117,7 +117,7 @@ class DataflowJobStatusSensor(BaseSensorOperator):
|
|
|
117
117
|
|
|
118
118
|
if job_status in self.expected_statuses:
|
|
119
119
|
return True
|
|
120
|
-
|
|
120
|
+
if job_status in DataflowJobStatus.TERMINAL_STATES:
|
|
121
121
|
message = f"Job with id '{self.job_id}' is already in terminal state: {job_status}"
|
|
122
122
|
raise AirflowException(message)
|
|
123
123
|
|
|
@@ -342,7 +342,7 @@ class DataflowJobMessagesSensor(BaseSensorOperator):
|
|
|
342
342
|
self.deferrable = deferrable
|
|
343
343
|
self.poll_interval = poll_interval
|
|
344
344
|
|
|
345
|
-
def poke(self, context: Context) -> bool:
|
|
345
|
+
def poke(self, context: Context) -> PokeReturnValue | bool:
|
|
346
346
|
if self.fail_on_terminal_state:
|
|
347
347
|
job = self.hook.get_job(
|
|
348
348
|
job_id=self.job_id,
|
|
@@ -359,8 +359,17 @@ class DataflowJobMessagesSensor(BaseSensorOperator):
|
|
|
359
359
|
project_id=self.project_id,
|
|
360
360
|
location=self.location,
|
|
361
361
|
)
|
|
362
|
+
result = result if self.callback is None else self.callback(result)
|
|
363
|
+
|
|
364
|
+
if isinstance(result, PokeReturnValue):
|
|
365
|
+
return result
|
|
362
366
|
|
|
363
|
-
|
|
367
|
+
if bool(result):
|
|
368
|
+
return PokeReturnValue(
|
|
369
|
+
is_done=True,
|
|
370
|
+
xcom_value=result,
|
|
371
|
+
)
|
|
372
|
+
return False
|
|
364
373
|
|
|
365
374
|
def execute(self, context: Context) -> Any:
|
|
366
375
|
"""Airflow runs this method on the worker and defers using the trigger."""
|
|
@@ -464,7 +473,7 @@ class DataflowJobAutoScalingEventsSensor(BaseSensorOperator):
|
|
|
464
473
|
self.deferrable = deferrable
|
|
465
474
|
self.poll_interval = poll_interval
|
|
466
475
|
|
|
467
|
-
def poke(self, context: Context) -> bool:
|
|
476
|
+
def poke(self, context: Context) -> PokeReturnValue | bool:
|
|
468
477
|
if self.fail_on_terminal_state:
|
|
469
478
|
job = self.hook.get_job(
|
|
470
479
|
job_id=self.job_id,
|
|
@@ -481,8 +490,16 @@ class DataflowJobAutoScalingEventsSensor(BaseSensorOperator):
|
|
|
481
490
|
project_id=self.project_id,
|
|
482
491
|
location=self.location,
|
|
483
492
|
)
|
|
484
|
-
|
|
485
|
-
|
|
493
|
+
result = result if self.callback is None else self.callback(result)
|
|
494
|
+
if isinstance(result, PokeReturnValue):
|
|
495
|
+
return result
|
|
496
|
+
|
|
497
|
+
if bool(result):
|
|
498
|
+
return PokeReturnValue(
|
|
499
|
+
is_done=True,
|
|
500
|
+
xcom_value=result,
|
|
501
|
+
)
|
|
502
|
+
return False
|
|
486
503
|
|
|
487
504
|
def execute(self, context: Context) -> Any:
|
|
488
505
|
"""Airflow runs this method on the worker and defers using the trigger."""
|
|
@@ -23,11 +23,11 @@ from collections.abc import Iterable, Sequence
|
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
25
|
from airflow.exceptions import AirflowException
|
|
26
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
26
27
|
from airflow.providers.google.cloud.hooks.dataform import DataformHook
|
|
27
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
28
28
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
30
|
-
from airflow.
|
|
30
|
+
from airflow.providers.common.compat.sdk import Context
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class DataformWorkflowInvocationStateSensor(BaseSensorOperator):
|
|
@@ -23,12 +23,12 @@ from collections.abc import Iterable, Sequence
|
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
25
|
from airflow.exceptions import AirflowException, AirflowNotFoundException
|
|
26
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
26
27
|
from airflow.providers.google.cloud.hooks.datafusion import DataFusionHook
|
|
27
28
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
28
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
31
|
-
from airflow.
|
|
31
|
+
from airflow.providers.common.compat.sdk import Context
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
class CloudDataFusionPipelineStateSensor(BaseSensorOperator):
|
|
@@ -110,7 +110,7 @@ class CloudDataFusionPipelineStateSensor(BaseSensorOperator):
|
|
|
110
110
|
pipeline_id=self.pipeline_id,
|
|
111
111
|
namespace=self.namespace,
|
|
112
112
|
)
|
|
113
|
-
pipeline_status = pipeline_workflow
|
|
113
|
+
pipeline_status = pipeline_workflow.get("status")
|
|
114
114
|
except AirflowNotFoundException:
|
|
115
115
|
message = "Specified Pipeline ID was not found."
|
|
116
116
|
raise AirflowException(message)
|
|
@@ -127,4 +127,4 @@ class CloudDataFusionPipelineStateSensor(BaseSensorOperator):
|
|
|
127
127
|
self.log.debug(
|
|
128
128
|
"Current status of the pipeline workflow for %s: %s.", self.pipeline_id, pipeline_status
|
|
129
129
|
)
|
|
130
|
-
return pipeline_status in self.expected_statuses
|
|
130
|
+
return pipeline_status is not None and pipeline_status in self.expected_statuses
|
|
@@ -23,19 +23,21 @@ from collections.abc import Sequence
|
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
25
|
if TYPE_CHECKING:
|
|
26
|
-
from airflow.utils.context import Context
|
|
27
26
|
from google.api_core.retry import Retry
|
|
28
27
|
|
|
28
|
+
from airflow.providers.common.compat.sdk import Context
|
|
29
|
+
|
|
30
|
+
from google.api_core.exceptions import GoogleAPICallError
|
|
31
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
32
|
+
from google.cloud.dataplex_v1.types import DataScanJob
|
|
33
|
+
|
|
29
34
|
from airflow.exceptions import AirflowException
|
|
35
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
30
36
|
from airflow.providers.google.cloud.hooks.dataplex import (
|
|
31
37
|
AirflowDataQualityScanException,
|
|
32
38
|
AirflowDataQualityScanResultTimeoutException,
|
|
33
39
|
DataplexHook,
|
|
34
40
|
)
|
|
35
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
36
|
-
from google.api_core.exceptions import GoogleAPICallError
|
|
37
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
38
|
-
from google.cloud.dataplex_v1.types import DataScanJob
|
|
39
41
|
|
|
40
42
|
|
|
41
43
|
class TaskState:
|
|
@@ -22,11 +22,11 @@ from __future__ import annotations
|
|
|
22
22
|
from collections.abc import Sequence
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
26
|
from airflow.providers.google.cloud.hooks.dataprep import GoogleDataprepHook, JobGroupStatuses
|
|
26
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
29
|
-
from airflow.
|
|
29
|
+
from airflow.providers.common.compat.sdk import Context
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class DataprepJobGroupIsFinishedSensor(BaseSensorOperator):
|
|
@@ -23,15 +23,16 @@ import time
|
|
|
23
23
|
from collections.abc import Sequence
|
|
24
24
|
from typing import TYPE_CHECKING
|
|
25
25
|
|
|
26
|
+
from google.api_core.exceptions import ServerError
|
|
27
|
+
from google.cloud.dataproc_v1.types import Batch, JobStatus
|
|
28
|
+
|
|
26
29
|
from airflow.exceptions import AirflowException
|
|
30
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
27
31
|
from airflow.providers.google.cloud.hooks.dataproc import DataprocHook
|
|
28
32
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
29
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
30
|
-
from google.api_core.exceptions import ServerError
|
|
31
|
-
from google.cloud.dataproc_v1.types import Batch, JobStatus
|
|
32
33
|
|
|
33
34
|
if TYPE_CHECKING:
|
|
34
|
-
from airflow.
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
35
36
|
|
|
36
37
|
|
|
37
38
|
class DataprocJobSensor(BaseSensorOperator):
|
|
@@ -99,17 +100,17 @@ class DataprocJobSensor(BaseSensorOperator):
|
|
|
99
100
|
if state == JobStatus.State.ERROR:
|
|
100
101
|
message = f"Job failed:\n{job}"
|
|
101
102
|
raise AirflowException(message)
|
|
102
|
-
|
|
103
|
+
if state in {
|
|
103
104
|
JobStatus.State.CANCELLED,
|
|
104
105
|
JobStatus.State.CANCEL_PENDING,
|
|
105
106
|
JobStatus.State.CANCEL_STARTED,
|
|
106
107
|
}:
|
|
107
108
|
message = f"Job was cancelled:\n{job}"
|
|
108
109
|
raise AirflowException(message)
|
|
109
|
-
|
|
110
|
+
if state == JobStatus.State.DONE:
|
|
110
111
|
self.log.debug("Job %s completed successfully.", self.dataproc_job_id)
|
|
111
112
|
return True
|
|
112
|
-
|
|
113
|
+
if state == JobStatus.State.ATTEMPT_FAILURE:
|
|
113
114
|
self.log.debug("Job %s attempt has failed.", self.dataproc_job_id)
|
|
114
115
|
|
|
115
116
|
self.log.info("Waiting for job %s to complete.", self.dataproc_job_id)
|
|
@@ -178,13 +179,13 @@ class DataprocBatchSensor(BaseSensorOperator):
|
|
|
178
179
|
if state == Batch.State.FAILED:
|
|
179
180
|
message = "Batch failed"
|
|
180
181
|
raise AirflowException(message)
|
|
181
|
-
|
|
182
|
+
if state in {
|
|
182
183
|
Batch.State.CANCELLED,
|
|
183
184
|
Batch.State.CANCELLING,
|
|
184
185
|
}:
|
|
185
186
|
message = "Batch was cancelled."
|
|
186
187
|
raise AirflowException(message)
|
|
187
|
-
|
|
188
|
+
if state == Batch.State.SUCCEEDED:
|
|
188
189
|
self.log.debug("Batch %s completed successfully.", self.batch_id)
|
|
189
190
|
return True
|
|
190
191
|
|
|
@@ -21,14 +21,15 @@ from collections.abc import Sequence
|
|
|
21
21
|
from typing import TYPE_CHECKING
|
|
22
22
|
|
|
23
23
|
from airflow.exceptions import AirflowException
|
|
24
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
24
25
|
from airflow.providers.google.cloud.hooks.dataproc_metastore import DataprocMetastoreHook
|
|
25
26
|
from airflow.providers.google.cloud.hooks.gcs import parse_json_from_gcs
|
|
26
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
29
|
-
from airflow.utils.context import Context
|
|
30
29
|
from google.api_core.operation import Operation
|
|
31
30
|
|
|
31
|
+
from airflow.providers.common.compat.sdk import Context
|
|
32
|
+
|
|
32
33
|
|
|
33
34
|
class MetastoreHivePartitionSensor(BaseSensorOperator):
|
|
34
35
|
"""
|
|
@@ -111,7 +112,7 @@ class MetastoreHivePartitionSensor(BaseSensorOperator):
|
|
|
111
112
|
|
|
112
113
|
# Extract actual query results
|
|
113
114
|
result_base_uri = result_manifest_uri.rsplit("/", 1)[0]
|
|
114
|
-
results = (f"{result_base_uri}
|
|
115
|
+
results = (f"{result_base_uri}/{filename}" for filename in manifest.get("filenames", []))
|
|
115
116
|
found_partitions = sum(
|
|
116
117
|
len(
|
|
117
118
|
parse_json_from_gcs(
|
|
@@ -21,12 +21,15 @@ from __future__ import annotations
|
|
|
21
21
|
|
|
22
22
|
import os
|
|
23
23
|
import textwrap
|
|
24
|
-
from collections.abc import Sequence
|
|
24
|
+
from collections.abc import Callable, Sequence
|
|
25
25
|
from datetime import datetime, timedelta
|
|
26
|
-
from typing import TYPE_CHECKING, Any
|
|
26
|
+
from typing import TYPE_CHECKING, Any
|
|
27
|
+
|
|
28
|
+
from google.cloud.storage.retry import DEFAULT_RETRY
|
|
27
29
|
|
|
28
30
|
from airflow.configuration import conf
|
|
29
31
|
from airflow.exceptions import AirflowException
|
|
32
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator, poke_mode_only
|
|
30
33
|
from airflow.providers.google.cloud.hooks.gcs import GCSHook
|
|
31
34
|
from airflow.providers.google.cloud.triggers.gcs import (
|
|
32
35
|
GCSBlobTrigger,
|
|
@@ -34,13 +37,12 @@ from airflow.providers.google.cloud.triggers.gcs import (
|
|
|
34
37
|
GCSPrefixBlobTrigger,
|
|
35
38
|
GCSUploadSessionTrigger,
|
|
36
39
|
)
|
|
37
|
-
from airflow.sensors.base import BaseSensorOperator, poke_mode_only
|
|
38
|
-
from google.cloud.storage.retry import DEFAULT_RETRY
|
|
39
40
|
|
|
40
41
|
if TYPE_CHECKING:
|
|
41
|
-
from airflow.utils.context import Context
|
|
42
42
|
from google.api_core.retry import Retry
|
|
43
43
|
|
|
44
|
+
from airflow.providers.common.compat.sdk import Context
|
|
45
|
+
|
|
44
46
|
|
|
45
47
|
class GCSObjectExistenceSensor(BaseSensorOperator):
|
|
46
48
|
"""
|
|
@@ -304,23 +306,22 @@ class GCSObjectsWithPrefixExistenceSensor(BaseSensorOperator):
|
|
|
304
306
|
if not self.deferrable:
|
|
305
307
|
super().execute(context)
|
|
306
308
|
return self._matches
|
|
309
|
+
if not self.poke(context=context):
|
|
310
|
+
self.defer(
|
|
311
|
+
timeout=timedelta(seconds=self.timeout),
|
|
312
|
+
trigger=GCSPrefixBlobTrigger(
|
|
313
|
+
bucket=self.bucket,
|
|
314
|
+
prefix=self.prefix,
|
|
315
|
+
poke_interval=self.poke_interval,
|
|
316
|
+
google_cloud_conn_id=self.google_cloud_conn_id,
|
|
317
|
+
hook_params={
|
|
318
|
+
"impersonation_chain": self.impersonation_chain,
|
|
319
|
+
},
|
|
320
|
+
),
|
|
321
|
+
method_name="execute_complete",
|
|
322
|
+
)
|
|
307
323
|
else:
|
|
308
|
-
|
|
309
|
-
self.defer(
|
|
310
|
-
timeout=timedelta(seconds=self.timeout),
|
|
311
|
-
trigger=GCSPrefixBlobTrigger(
|
|
312
|
-
bucket=self.bucket,
|
|
313
|
-
prefix=self.prefix,
|
|
314
|
-
poke_interval=self.poke_interval,
|
|
315
|
-
google_cloud_conn_id=self.google_cloud_conn_id,
|
|
316
|
-
hook_params={
|
|
317
|
-
"impersonation_chain": self.impersonation_chain,
|
|
318
|
-
},
|
|
319
|
-
),
|
|
320
|
-
method_name="execute_complete",
|
|
321
|
-
)
|
|
322
|
-
else:
|
|
323
|
-
return self._matches
|
|
324
|
+
return self._matches
|
|
324
325
|
|
|
325
326
|
def execute_complete(self, context: dict[str, Any], event: dict[str, str | list[str]]) -> str | list[str]:
|
|
326
327
|
"""Return immediately and rely on trigger to throw a success event. Callback for the trigger."""
|
|
@@ -22,11 +22,11 @@ from __future__ import annotations
|
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
24
|
from airflow.exceptions import AirflowException
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
26
|
from airflow.providers.google.cloud.hooks.looker import JobStatus, LookerHook
|
|
26
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
29
|
-
from airflow.
|
|
29
|
+
from airflow.providers.common.compat.sdk import Context
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class LookerCheckPdtBuildSensor(BaseSensorOperator):
|
|
@@ -65,13 +65,13 @@ class LookerCheckPdtBuildSensor(BaseSensorOperator):
|
|
|
65
65
|
msg = status_dict["message"]
|
|
66
66
|
message = f'PDT materialization job failed. Job id: {self.materialization_id}. Message:\n"{msg}"'
|
|
67
67
|
raise AirflowException(message)
|
|
68
|
-
|
|
68
|
+
if status == JobStatus.CANCELLED.value:
|
|
69
69
|
message = f"PDT materialization job was cancelled. Job id: {self.materialization_id}."
|
|
70
70
|
raise AirflowException(message)
|
|
71
|
-
|
|
71
|
+
if status == JobStatus.UNKNOWN.value:
|
|
72
72
|
message = f"PDT materialization job has unknown status. Job id: {self.materialization_id}."
|
|
73
73
|
raise AirflowException(message)
|
|
74
|
-
|
|
74
|
+
if status == JobStatus.DONE.value:
|
|
75
75
|
self.log.debug(
|
|
76
76
|
"PDT materialization job completed successfully. Job id: %s.", self.materialization_id
|
|
77
77
|
)
|
|
@@ -19,20 +19,21 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
-
from collections.abc import Sequence
|
|
22
|
+
from collections.abc import Callable, Sequence
|
|
23
23
|
from datetime import timedelta
|
|
24
|
-
from typing import TYPE_CHECKING, Any
|
|
24
|
+
from typing import TYPE_CHECKING, Any
|
|
25
|
+
|
|
26
|
+
from google.cloud import pubsub_v1
|
|
27
|
+
from google.cloud.pubsub_v1.types import ReceivedMessage
|
|
25
28
|
|
|
26
29
|
from airflow.configuration import conf
|
|
27
30
|
from airflow.exceptions import AirflowException
|
|
31
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
28
32
|
from airflow.providers.google.cloud.hooks.pubsub import PubSubHook
|
|
29
33
|
from airflow.providers.google.cloud.triggers.pubsub import PubsubPullTrigger
|
|
30
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
31
|
-
from google.cloud import pubsub_v1
|
|
32
|
-
from google.cloud.pubsub_v1.types import ReceivedMessage
|
|
33
34
|
|
|
34
35
|
if TYPE_CHECKING:
|
|
35
|
-
from airflow.
|
|
36
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
class PubSubMessageTransformException(AirflowException):
|
|
@@ -167,20 +168,19 @@ class PubSubPullSensor(BaseSensorOperator):
|
|
|
167
168
|
if not self.deferrable:
|
|
168
169
|
super().execute(context)
|
|
169
170
|
return self._return_value
|
|
170
|
-
|
|
171
|
-
self.
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
)
|
|
171
|
+
self.defer(
|
|
172
|
+
timeout=timedelta(seconds=self.timeout),
|
|
173
|
+
trigger=PubsubPullTrigger(
|
|
174
|
+
project_id=self.project_id,
|
|
175
|
+
subscription=self.subscription,
|
|
176
|
+
max_messages=self.max_messages,
|
|
177
|
+
ack_messages=self.ack_messages,
|
|
178
|
+
poke_interval=self.poke_interval,
|
|
179
|
+
gcp_conn_id=self.gcp_conn_id,
|
|
180
|
+
impersonation_chain=self.impersonation_chain,
|
|
181
|
+
),
|
|
182
|
+
method_name="execute_complete",
|
|
183
|
+
)
|
|
184
184
|
|
|
185
185
|
def execute_complete(self, context: Context, event: dict[str, str | list[str]]) -> Any:
|
|
186
186
|
"""If messages_callback is provided, execute it; otherwise, return immediately with trigger event message."""
|
|
@@ -22,12 +22,12 @@ from __future__ import annotations
|
|
|
22
22
|
from collections.abc import Sequence
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
25
26
|
from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook
|
|
26
27
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
27
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
28
28
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
30
|
-
from airflow.
|
|
30
|
+
from airflow.providers.common.compat.sdk import Context
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class TaskQueueEmptySensor(BaseSensorOperator):
|
|
@@ -24,11 +24,11 @@ from collections.abc import Sequence
|
|
|
24
24
|
from typing import TYPE_CHECKING
|
|
25
25
|
|
|
26
26
|
from airflow.exceptions import AirflowException
|
|
27
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
27
28
|
from airflow.providers.google.cloud.hooks.vertex_ai.feature_store import FeatureStoreHook
|
|
28
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
31
|
-
from airflow.
|
|
31
|
+
from airflow.providers.common.compat.sdk import Context
|
|
32
32
|
|
|
33
33
|
|
|
34
34
|
class FeatureViewSyncSensor(BaseSensorOperator):
|
|
@@ -19,17 +19,19 @@ from __future__ import annotations
|
|
|
19
19
|
from collections.abc import Sequence
|
|
20
20
|
from typing import TYPE_CHECKING
|
|
21
21
|
|
|
22
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
23
|
+
from google.cloud.workflows.executions_v1beta import Execution
|
|
24
|
+
|
|
22
25
|
from airflow.exceptions import AirflowException
|
|
26
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
23
27
|
from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
|
|
24
28
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
25
|
-
from airflow.sensors.base import BaseSensorOperator
|
|
26
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
27
|
-
from google.cloud.workflows.executions_v1beta import Execution
|
|
28
29
|
|
|
29
30
|
if TYPE_CHECKING:
|
|
30
|
-
from airflow.utils.context import Context
|
|
31
31
|
from google.api_core.retry import Retry
|
|
32
32
|
|
|
33
|
+
from airflow.providers.common.compat.sdk import Context
|
|
34
|
+
|
|
33
35
|
|
|
34
36
|
class WorkflowExecutionSensor(BaseSensorOperator):
|
|
35
37
|
"""
|
|
@@ -35,7 +35,7 @@ except ModuleNotFoundError as e:
|
|
|
35
35
|
raise AirflowOptionalProviderFeatureException(e)
|
|
36
36
|
|
|
37
37
|
if TYPE_CHECKING:
|
|
38
|
-
from airflow.
|
|
38
|
+
from airflow.providers.common.compat.sdk import Context
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
class ADLSToGCSOperator(ADLSListOperator):
|
|
@@ -21,8 +21,8 @@ import tempfile
|
|
|
21
21
|
from collections.abc import Sequence
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
|
-
from airflow.models import BaseOperator
|
|
25
24
|
from airflow.providers.google.cloud.hooks.gcs import GCSHook
|
|
25
|
+
from airflow.providers.google.version_compat import BaseOperator
|
|
26
26
|
|
|
27
27
|
try:
|
|
28
28
|
from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
|
|
@@ -32,7 +32,7 @@ except ModuleNotFoundError as e:
|
|
|
32
32
|
raise AirflowOptionalProviderFeatureException(e)
|
|
33
33
|
|
|
34
34
|
if TYPE_CHECKING:
|
|
35
|
-
from airflow.
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
class AzureBlobStorageToGCSOperator(BaseOperator):
|
|
@@ -23,8 +23,8 @@ from tempfile import NamedTemporaryFile
|
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
25
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
|
26
|
-
from airflow.models import BaseOperator
|
|
27
26
|
from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url, gcs_object_is_directory
|
|
27
|
+
from airflow.providers.google.version_compat import BaseOperator
|
|
28
28
|
|
|
29
29
|
try:
|
|
30
30
|
from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook
|
|
@@ -34,7 +34,7 @@ except ModuleNotFoundError as e:
|
|
|
34
34
|
raise AirflowOptionalProviderFeatureException(e)
|
|
35
35
|
|
|
36
36
|
if TYPE_CHECKING:
|
|
37
|
-
from airflow.
|
|
37
|
+
from airflow.providers.common.compat.sdk import Context
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
class AzureFileShareToGCSOperator(BaseOperator):
|
|
@@ -22,12 +22,13 @@ from __future__ import annotations
|
|
|
22
22
|
from collections.abc import Sequence
|
|
23
23
|
from typing import TYPE_CHECKING
|
|
24
24
|
|
|
25
|
-
from airflow.models import BaseOperator
|
|
26
25
|
from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
|
|
27
26
|
from airflow.providers.google.cloud.links.bigquery import BigQueryTableLink
|
|
27
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
28
|
+
from airflow.providers.google.version_compat import BaseOperator
|
|
28
29
|
|
|
29
30
|
if TYPE_CHECKING:
|
|
30
|
-
from airflow.
|
|
31
|
+
from airflow.providers.common.compat.sdk import Context
|
|
31
32
|
|
|
32
33
|
|
|
33
34
|
class BigQueryToBigQueryOperator(BaseOperator):
|
|
@@ -73,6 +74,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
73
74
|
If set as a sequence, the identities from the list must grant
|
|
74
75
|
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
75
76
|
account from the list granting this role to the originating account (templated).
|
|
77
|
+
:param project_id: Google Cloud Project where the job is running
|
|
76
78
|
"""
|
|
77
79
|
|
|
78
80
|
template_fields: Sequence[str] = (
|
|
@@ -93,6 +95,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
93
95
|
write_disposition: str = "WRITE_EMPTY",
|
|
94
96
|
create_disposition: str = "CREATE_IF_NEEDED",
|
|
95
97
|
gcp_conn_id: str = "google_cloud_default",
|
|
98
|
+
project_id: str = PROVIDE_PROJECT_ID,
|
|
96
99
|
labels: dict | None = None,
|
|
97
100
|
encryption_configuration: dict | None = None,
|
|
98
101
|
location: str | None = None,
|
|
@@ -112,6 +115,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
112
115
|
self.impersonation_chain = impersonation_chain
|
|
113
116
|
self.hook: BigQueryHook | None = None
|
|
114
117
|
self._job_conf: dict = {}
|
|
118
|
+
self.project_id = project_id
|
|
115
119
|
|
|
116
120
|
def _prepare_job_configuration(self):
|
|
117
121
|
self.source_project_dataset_tables = (
|
|
@@ -124,7 +128,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
124
128
|
for source_project_dataset_table in self.source_project_dataset_tables:
|
|
125
129
|
source_project, source_dataset, source_table = self.hook.split_tablename(
|
|
126
130
|
table_input=source_project_dataset_table,
|
|
127
|
-
default_project_id=self.
|
|
131
|
+
default_project_id=self.project_id,
|
|
128
132
|
var_name="source_project_dataset_table",
|
|
129
133
|
)
|
|
130
134
|
source_project_dataset_tables_fixup.append(
|
|
@@ -133,7 +137,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
133
137
|
|
|
134
138
|
destination_project, destination_dataset, destination_table = self.hook.split_tablename(
|
|
135
139
|
table_input=self.destination_project_dataset_table,
|
|
136
|
-
default_project_id=self.
|
|
140
|
+
default_project_id=self.project_id,
|
|
137
141
|
)
|
|
138
142
|
configuration = {
|
|
139
143
|
"copy": {
|
|
@@ -168,18 +172,17 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
|
168
172
|
impersonation_chain=self.impersonation_chain,
|
|
169
173
|
)
|
|
170
174
|
|
|
171
|
-
if not self.
|
|
172
|
-
|
|
175
|
+
if not self.project_id:
|
|
176
|
+
self.project_id = self.hook.project_id
|
|
173
177
|
|
|
174
178
|
configuration = self._prepare_job_configuration()
|
|
175
179
|
self._job_conf = self.hook.insert_job(
|
|
176
|
-
configuration=configuration, project_id=self.
|
|
180
|
+
configuration=configuration, project_id=self.project_id
|
|
177
181
|
).to_api_repr()
|
|
178
182
|
|
|
179
183
|
dest_table_info = self._job_conf["configuration"]["copy"]["destinationTable"]
|
|
180
184
|
BigQueryTableLink.persist(
|
|
181
185
|
context=context,
|
|
182
|
-
task_instance=self,
|
|
183
186
|
dataset_id=dest_table_info["datasetId"],
|
|
184
187
|
project_id=dest_table_info["projectId"],
|
|
185
188
|
table_id=dest_table_info["tableId"],
|