apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (234) hide show
  1. airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
  2. airflow/providers/google/__init__.py +3 -3
  3. airflow/providers/google/ads/hooks/ads.py +39 -5
  4. airflow/providers/google/ads/operators/ads.py +2 -2
  5. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
  6. airflow/providers/google/assets/gcs.py +1 -11
  7. airflow/providers/google/cloud/bundles/__init__.py +16 -0
  8. airflow/providers/google/cloud/bundles/gcs.py +161 -0
  9. airflow/providers/google/cloud/hooks/bigquery.py +166 -281
  10. airflow/providers/google/cloud/hooks/cloud_composer.py +287 -14
  11. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  12. airflow/providers/google/cloud/hooks/cloud_run.py +17 -9
  13. airflow/providers/google/cloud/hooks/cloud_sql.py +101 -22
  14. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +27 -6
  15. airflow/providers/google/cloud/hooks/compute_ssh.py +5 -1
  16. airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
  17. airflow/providers/google/cloud/hooks/dataflow.py +71 -94
  18. airflow/providers/google/cloud/hooks/datafusion.py +1 -1
  19. airflow/providers/google/cloud/hooks/dataplex.py +1 -1
  20. airflow/providers/google/cloud/hooks/dataprep.py +1 -1
  21. airflow/providers/google/cloud/hooks/dataproc.py +72 -71
  22. airflow/providers/google/cloud/hooks/gcs.py +111 -14
  23. airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
  24. airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
  25. airflow/providers/google/cloud/hooks/looker.py +6 -1
  26. airflow/providers/google/cloud/hooks/mlengine.py +3 -2
  27. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  28. airflow/providers/google/cloud/hooks/spanner.py +73 -8
  29. airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
  30. airflow/providers/google/cloud/hooks/translate.py +1 -1
  31. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -209
  32. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -2
  33. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +27 -1
  34. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  35. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
  36. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
  37. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
  38. airflow/providers/google/cloud/hooks/vision.py +2 -2
  39. airflow/providers/google/cloud/hooks/workflows.py +1 -1
  40. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  41. airflow/providers/google/cloud/links/base.py +77 -13
  42. airflow/providers/google/cloud/links/bigquery.py +0 -47
  43. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  44. airflow/providers/google/cloud/links/bigtable.py +0 -48
  45. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  46. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  47. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  48. airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
  49. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  50. airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
  51. airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
  52. airflow/providers/google/cloud/links/compute.py +0 -58
  53. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  54. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  55. airflow/providers/google/cloud/links/dataflow.py +0 -34
  56. airflow/providers/google/cloud/links/dataform.py +0 -64
  57. airflow/providers/google/cloud/links/datafusion.py +1 -96
  58. airflow/providers/google/cloud/links/dataplex.py +0 -154
  59. airflow/providers/google/cloud/links/dataprep.py +0 -24
  60. airflow/providers/google/cloud/links/dataproc.py +11 -95
  61. airflow/providers/google/cloud/links/datastore.py +0 -31
  62. airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
  63. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  64. airflow/providers/google/cloud/links/mlengine.py +0 -70
  65. airflow/providers/google/cloud/links/pubsub.py +0 -32
  66. airflow/providers/google/cloud/links/spanner.py +0 -33
  67. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  68. airflow/providers/google/cloud/links/translate.py +17 -187
  69. airflow/providers/google/cloud/links/vertex_ai.py +28 -195
  70. airflow/providers/google/cloud/links/workflows.py +0 -52
  71. airflow/providers/google/cloud/log/gcs_task_handler.py +17 -9
  72. airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
  73. airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
  74. airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
  75. airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
  76. airflow/providers/google/cloud/openlineage/facets.py +102 -1
  77. airflow/providers/google/cloud/openlineage/mixins.py +10 -8
  78. airflow/providers/google/cloud/openlineage/utils.py +15 -1
  79. airflow/providers/google/cloud/operators/alloy_db.py +70 -55
  80. airflow/providers/google/cloud/operators/bigquery.py +73 -636
  81. airflow/providers/google/cloud/operators/bigquery_dts.py +3 -5
  82. airflow/providers/google/cloud/operators/bigtable.py +36 -7
  83. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  84. airflow/providers/google/cloud/operators/cloud_batch.py +2 -2
  85. airflow/providers/google/cloud/operators/cloud_build.py +75 -32
  86. airflow/providers/google/cloud/operators/cloud_composer.py +128 -40
  87. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  88. airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
  89. airflow/providers/google/cloud/operators/cloud_run.py +23 -5
  90. airflow/providers/google/cloud/operators/cloud_sql.py +8 -16
  91. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -11
  92. airflow/providers/google/cloud/operators/compute.py +8 -40
  93. airflow/providers/google/cloud/operators/datacatalog.py +157 -21
  94. airflow/providers/google/cloud/operators/dataflow.py +38 -15
  95. airflow/providers/google/cloud/operators/dataform.py +15 -5
  96. airflow/providers/google/cloud/operators/datafusion.py +41 -20
  97. airflow/providers/google/cloud/operators/dataplex.py +193 -109
  98. airflow/providers/google/cloud/operators/dataprep.py +1 -5
  99. airflow/providers/google/cloud/operators/dataproc.py +78 -35
  100. airflow/providers/google/cloud/operators/dataproc_metastore.py +96 -88
  101. airflow/providers/google/cloud/operators/datastore.py +22 -6
  102. airflow/providers/google/cloud/operators/dlp.py +6 -29
  103. airflow/providers/google/cloud/operators/functions.py +16 -7
  104. airflow/providers/google/cloud/operators/gcs.py +10 -8
  105. airflow/providers/google/cloud/operators/gen_ai.py +389 -0
  106. airflow/providers/google/cloud/operators/kubernetes_engine.py +60 -99
  107. airflow/providers/google/cloud/operators/looker.py +1 -1
  108. airflow/providers/google/cloud/operators/managed_kafka.py +107 -52
  109. airflow/providers/google/cloud/operators/natural_language.py +1 -1
  110. airflow/providers/google/cloud/operators/pubsub.py +60 -14
  111. airflow/providers/google/cloud/operators/spanner.py +25 -12
  112. airflow/providers/google/cloud/operators/speech_to_text.py +1 -2
  113. airflow/providers/google/cloud/operators/stackdriver.py +1 -9
  114. airflow/providers/google/cloud/operators/tasks.py +1 -12
  115. airflow/providers/google/cloud/operators/text_to_speech.py +1 -2
  116. airflow/providers/google/cloud/operators/translate.py +40 -16
  117. airflow/providers/google/cloud/operators/translate_speech.py +1 -2
  118. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
  119. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +29 -9
  120. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +54 -26
  121. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
  122. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
  123. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  124. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
  125. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
  126. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +11 -9
  127. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
  128. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +30 -7
  129. airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
  130. airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
  131. airflow/providers/google/cloud/operators/vision.py +2 -2
  132. airflow/providers/google/cloud/operators/workflows.py +18 -15
  133. airflow/providers/google/cloud/sensors/bigquery.py +2 -2
  134. airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -2
  135. airflow/providers/google/cloud/sensors/bigtable.py +11 -4
  136. airflow/providers/google/cloud/sensors/cloud_composer.py +533 -29
  137. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -2
  138. airflow/providers/google/cloud/sensors/dataflow.py +26 -9
  139. airflow/providers/google/cloud/sensors/dataform.py +2 -2
  140. airflow/providers/google/cloud/sensors/datafusion.py +4 -4
  141. airflow/providers/google/cloud/sensors/dataplex.py +2 -2
  142. airflow/providers/google/cloud/sensors/dataprep.py +2 -2
  143. airflow/providers/google/cloud/sensors/dataproc.py +2 -2
  144. airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -2
  145. airflow/providers/google/cloud/sensors/gcs.py +4 -4
  146. airflow/providers/google/cloud/sensors/looker.py +2 -2
  147. airflow/providers/google/cloud/sensors/pubsub.py +4 -4
  148. airflow/providers/google/cloud/sensors/tasks.py +2 -2
  149. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
  150. airflow/providers/google/cloud/sensors/workflows.py +2 -2
  151. airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
  152. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
  153. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
  154. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
  155. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -4
  156. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
  157. airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
  158. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
  159. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
  160. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  161. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -2
  162. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +3 -3
  163. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +20 -12
  164. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
  165. airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
  166. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
  167. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
  168. airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
  169. airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
  170. airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
  171. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  172. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
  173. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
  174. airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
  175. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
  176. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +13 -4
  177. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
  178. airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
  179. airflow/providers/google/cloud/triggers/bigquery.py +75 -34
  180. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  181. airflow/providers/google/cloud/triggers/cloud_composer.py +302 -46
  182. airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
  183. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +91 -1
  184. airflow/providers/google/cloud/triggers/dataflow.py +122 -0
  185. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  186. airflow/providers/google/cloud/triggers/dataplex.py +14 -2
  187. airflow/providers/google/cloud/triggers/dataproc.py +122 -52
  188. airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
  189. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  190. airflow/providers/google/cloud/triggers/pubsub.py +15 -19
  191. airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
  192. airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
  193. airflow/providers/google/cloud/utils/field_validator.py +1 -2
  194. airflow/providers/google/common/auth_backend/google_openid.py +4 -4
  195. airflow/providers/google/common/deprecated.py +2 -1
  196. airflow/providers/google/common/hooks/base_google.py +27 -8
  197. airflow/providers/google/common/links/storage.py +0 -22
  198. airflow/providers/google/common/utils/get_secret.py +31 -0
  199. airflow/providers/google/common/utils/id_token_credentials.py +3 -4
  200. airflow/providers/google/firebase/operators/firestore.py +2 -2
  201. airflow/providers/google/get_provider_info.py +56 -52
  202. airflow/providers/google/go_module_utils.py +35 -3
  203. airflow/providers/google/leveldb/hooks/leveldb.py +26 -1
  204. airflow/providers/google/leveldb/operators/leveldb.py +2 -2
  205. airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
  206. airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
  207. airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -2
  208. airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
  209. airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
  210. airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
  211. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
  212. airflow/providers/google/marketing_platform/sensors/display_video.py +3 -63
  213. airflow/providers/google/suite/hooks/calendar.py +1 -1
  214. airflow/providers/google/suite/hooks/sheets.py +15 -1
  215. airflow/providers/google/suite/operators/sheets.py +8 -3
  216. airflow/providers/google/suite/sensors/drive.py +2 -2
  217. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
  218. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  219. airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
  220. airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
  221. airflow/providers/google/version_compat.py +15 -1
  222. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +92 -48
  223. apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
  224. apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
  225. airflow/providers/google/cloud/hooks/automl.py +0 -673
  226. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  227. airflow/providers/google/cloud/links/automl.py +0 -193
  228. airflow/providers/google/cloud/operators/automl.py +0 -1362
  229. airflow/providers/google/cloud/operators/life_sciences.py +0 -119
  230. airflow/providers/google/cloud/operators/mlengine.py +0 -112
  231. apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
  232. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +0 -0
  233. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
  234. {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
@@ -20,24 +20,34 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import json
23
- from collections.abc import Iterable, Sequence
23
+ from collections.abc import Collection, Iterable, Sequence
24
24
  from datetime import datetime, timedelta
25
25
  from functools import cached_property
26
26
  from typing import TYPE_CHECKING
27
27
 
28
28
  from dateutil import parser
29
+ from google.api_core.exceptions import NotFound
29
30
  from google.cloud.orchestration.airflow.service_v1.types import Environment, ExecuteAirflowCommandResponse
30
31
 
31
32
  from airflow.configuration import conf
32
33
  from airflow.exceptions import AirflowException
34
+ from airflow.providers.common.compat.sdk import AirflowSkipException, BaseSensorOperator
33
35
  from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
34
- from airflow.providers.google.cloud.triggers.cloud_composer import CloudComposerDAGRunTrigger
36
+ from airflow.providers.google.cloud.triggers.cloud_composer import (
37
+ CloudComposerDAGRunTrigger,
38
+ CloudComposerExternalTaskTrigger,
39
+ )
35
40
  from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
36
- from airflow.sensors.base import BaseSensorOperator
37
- from airflow.utils.state import TaskInstanceState
41
+ from airflow.providers.standard.exceptions import (
42
+ DuplicateStateError,
43
+ ExternalDagFailedError,
44
+ ExternalTaskFailedError,
45
+ ExternalTaskGroupFailedError,
46
+ )
47
+ from airflow.utils.state import State, TaskInstanceState
38
48
 
39
49
  if TYPE_CHECKING:
40
- from airflow.utils.context import Context
50
+ from airflow.providers.common.compat.sdk import Context
41
51
 
42
52
 
43
53
  class CloudComposerDAGRunSensor(BaseSensorOperator):
@@ -56,6 +66,7 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
56
66
  Or [datetime(2024,3,22,0,0,0)] in this case sensor will check for states from specific time in the
57
67
  past till current time execution.
58
68
  Default value datetime.timedelta(days=1).
69
+ :param composer_dag_run_id: The Run ID of executable task. The 'execution_range' param is ignored, if both specified.
59
70
  :param gcp_conn_id: The connection ID to use when fetching connection info.
60
71
  :param impersonation_chain: Optional service account to impersonate using short-term
61
72
  credentials, or chained list of accounts required to get the access_token
@@ -86,10 +97,12 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
86
97
  composer_dag_id: str,
87
98
  allowed_states: Iterable[str] | None = None,
88
99
  execution_range: timedelta | list[datetime] | None = None,
100
+ composer_dag_run_id: str | None = None,
89
101
  gcp_conn_id: str = "google_cloud_default",
90
102
  impersonation_chain: str | Sequence[str] | None = None,
91
103
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
92
104
  poll_interval: int = 10,
105
+ use_rest_api: bool = False,
93
106
  **kwargs,
94
107
  ) -> None:
95
108
  super().__init__(**kwargs)
@@ -99,21 +112,35 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
99
112
  self.composer_dag_id = composer_dag_id
100
113
  self.allowed_states = list(allowed_states) if allowed_states else [TaskInstanceState.SUCCESS.value]
101
114
  self.execution_range = execution_range
115
+ self.composer_dag_run_id = composer_dag_run_id
102
116
  self.gcp_conn_id = gcp_conn_id
103
117
  self.impersonation_chain = impersonation_chain
104
118
  self.deferrable = deferrable
105
119
  self.poll_interval = poll_interval
120
+ self.use_rest_api = use_rest_api
121
+
122
+ if self.composer_dag_run_id and self.execution_range:
123
+ self.log.warning(
124
+ "The composer_dag_run_id parameter and execution_range parameter do not work together. This run will ignore execution_range parameter and count only specified composer_dag_run_id parameter."
125
+ )
106
126
 
107
127
  def _get_logical_dates(self, context) -> tuple[datetime, datetime]:
128
+ logical_date = context.get("logical_date", None)
129
+ if logical_date is None:
130
+ raise RuntimeError(
131
+ "logical_date is None. Please make sure the sensor is not used in an asset-triggered Dag. "
132
+ "CloudComposerDAGRunSensor was designed to be used in time-based scheduled Dags only, "
133
+ "and asset-triggered Dags do not have logical_date. "
134
+ )
108
135
  if isinstance(self.execution_range, timedelta):
109
136
  if self.execution_range < timedelta(0):
110
- return context["logical_date"], context["logical_date"] - self.execution_range
111
- return context["logical_date"] - self.execution_range, context["logical_date"]
137
+ return logical_date, logical_date - self.execution_range
138
+ return logical_date - self.execution_range, logical_date
112
139
  if isinstance(self.execution_range, list) and len(self.execution_range) > 0:
113
140
  return self.execution_range[0], self.execution_range[1] if len(
114
141
  self.execution_range
115
- ) > 1 else context["logical_date"]
116
- return context["logical_date"] - timedelta(1), context["logical_date"]
142
+ ) > 1 else logical_date
143
+ return logical_date - timedelta(1), logical_date
117
144
 
118
145
  def poke(self, context: Context) -> bool:
119
146
  start_date, end_date = self._get_logical_dates(context)
@@ -123,6 +150,20 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
123
150
 
124
151
  dag_runs = self._pull_dag_runs()
125
152
 
153
+ if len(dag_runs) == 0:
154
+ self.log.info("Dag runs are empty. Sensor waits for dag runs...")
155
+ return False
156
+
157
+ if self.composer_dag_run_id:
158
+ self.log.info(
159
+ "Sensor waits for allowed states %s for specified RunID: %s",
160
+ self.allowed_states,
161
+ self.composer_dag_run_id,
162
+ )
163
+ composer_dag_run_id_status = self._check_composer_dag_run_id_states(
164
+ dag_runs=dag_runs,
165
+ )
166
+ return composer_dag_run_id_status
126
167
  self.log.info("Sensor waits for allowed states: %s", self.allowed_states)
127
168
  allowed_states_status = self._check_dag_runs_states(
128
169
  dag_runs=dag_runs,
@@ -134,26 +175,51 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
134
175
 
135
176
  def _pull_dag_runs(self) -> list[dict]:
136
177
  """Pull the list of dag runs."""
137
- cmd_parameters = (
138
- ["-d", self.composer_dag_id, "-o", "json"]
139
- if self._composer_airflow_version < 3
140
- else [self.composer_dag_id, "-o", "json"]
141
- )
142
- dag_runs_cmd = self.hook.execute_airflow_command(
143
- project_id=self.project_id,
144
- region=self.region,
145
- environment_id=self.environment_id,
146
- command="dags",
147
- subcommand="list-runs",
148
- parameters=cmd_parameters,
149
- )
150
- cmd_result = self.hook.wait_command_execution_result(
151
- project_id=self.project_id,
152
- region=self.region,
153
- environment_id=self.environment_id,
154
- execution_cmd_info=ExecuteAirflowCommandResponse.to_dict(dag_runs_cmd),
155
- )
156
- dag_runs = json.loads(cmd_result["output"][0]["content"])
178
+ if self.use_rest_api:
179
+ try:
180
+ environment = self.hook.get_environment(
181
+ project_id=self.project_id,
182
+ region=self.region,
183
+ environment_id=self.environment_id,
184
+ timeout=self.timeout,
185
+ )
186
+ except NotFound as not_found_err:
187
+ self.log.info("The Composer environment %s does not exist.", self.environment_id)
188
+ raise AirflowException(not_found_err)
189
+ composer_airflow_uri = environment.config.airflow_uri
190
+
191
+ self.log.info(
192
+ "Pulling the DAG %s runs from the %s environment...",
193
+ self.composer_dag_id,
194
+ self.environment_id,
195
+ )
196
+ dag_runs_response = self.hook.get_dag_runs(
197
+ composer_airflow_uri=composer_airflow_uri,
198
+ composer_dag_id=self.composer_dag_id,
199
+ timeout=self.timeout,
200
+ )
201
+ dag_runs = dag_runs_response["dag_runs"]
202
+ else:
203
+ cmd_parameters = (
204
+ ["-d", self.composer_dag_id, "-o", "json"]
205
+ if self._composer_airflow_version < 3
206
+ else [self.composer_dag_id, "-o", "json"]
207
+ )
208
+ dag_runs_cmd = self.hook.execute_airflow_command(
209
+ project_id=self.project_id,
210
+ region=self.region,
211
+ environment_id=self.environment_id,
212
+ command="dags",
213
+ subcommand="list-runs",
214
+ parameters=cmd_parameters,
215
+ )
216
+ cmd_result = self.hook.wait_command_execution_result(
217
+ project_id=self.project_id,
218
+ region=self.region,
219
+ environment_id=self.environment_id,
220
+ execution_cmd_info=ExecuteAirflowCommandResponse.to_dict(dag_runs_cmd),
221
+ )
222
+ dag_runs = json.loads(cmd_result["output"][0]["content"])
157
223
  return dag_runs
158
224
 
159
225
  def _check_dag_runs_states(
@@ -184,16 +250,27 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
184
250
  image_version = environment_config["config"]["software_config"]["image_version"]
185
251
  return int(image_version.split("airflow-")[1].split(".")[0])
186
252
 
253
+ def _check_composer_dag_run_id_states(self, dag_runs: list[dict]) -> bool:
254
+ for dag_run in dag_runs:
255
+ if (
256
+ dag_run["dag_run_id" if self.use_rest_api else "run_id"] == self.composer_dag_run_id
257
+ and dag_run["state"] in self.allowed_states
258
+ ):
259
+ return True
260
+ return False
261
+
187
262
  def execute(self, context: Context) -> None:
188
263
  self._composer_airflow_version = self._get_composer_airflow_version()
189
264
  if self.deferrable:
190
265
  start_date, end_date = self._get_logical_dates(context)
191
266
  self.defer(
267
+ timeout=timedelta(seconds=self.timeout) if self.timeout else None,
192
268
  trigger=CloudComposerDAGRunTrigger(
193
269
  project_id=self.project_id,
194
270
  region=self.region,
195
271
  environment_id=self.environment_id,
196
272
  composer_dag_id=self.composer_dag_id,
273
+ composer_dag_run_id=self.composer_dag_run_id,
197
274
  start_date=start_date,
198
275
  end_date=end_date,
199
276
  allowed_states=self.allowed_states,
@@ -201,6 +278,7 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
201
278
  impersonation_chain=self.impersonation_chain,
202
279
  poll_interval=self.poll_interval,
203
280
  composer_airflow_version=self._composer_airflow_version,
281
+ use_rest_api=self.use_rest_api,
204
282
  ),
205
283
  method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
206
284
  )
@@ -217,3 +295,429 @@ class CloudComposerDAGRunSensor(BaseSensorOperator):
217
295
  gcp_conn_id=self.gcp_conn_id,
218
296
  impersonation_chain=self.impersonation_chain,
219
297
  )
298
+
299
+
300
+ class CloudComposerExternalTaskSensor(BaseSensorOperator):
301
+ """
302
+ Waits for a different DAG, task group, or task to complete for a specific composer environment.
303
+
304
+ If both `composer_external_task_group_id` and `composer_external_task_id` are ``None`` (default), the sensor
305
+ waits for the DAG.
306
+ Values for `composer_external_task_group_id` and `composer_external_task_id` can't be set at the same time.
307
+
308
+ By default, the CloudComposerExternalTaskSensor will wait for the external task to
309
+ succeed, at which point it will also succeed. However, by default it will
310
+ *not* fail if the external task fails, but will continue to check the status
311
+ until the sensor times out (thus giving you time to retry the external task
312
+ without also having to clear the sensor).
313
+
314
+ By default, the CloudComposerExternalTaskSensor will not skip if the external task skips.
315
+ To change this, simply set ``skipped_states=[TaskInstanceState.SKIPPED]``.
316
+ Note that if you are monitoring multiple tasks, and one enters error state
317
+ and the other enters a skipped state, then the external task will react to
318
+ whichever one it sees first. If both happen together, then the failed state
319
+ takes priority.
320
+
321
+ It is possible to alter the default behavior by setting states which
322
+ cause the sensor to fail, e.g. by setting ``allowed_states=[DagRunState.FAILED]``
323
+ and ``failed_states=[DagRunState.SUCCESS]`` you will flip the behaviour to
324
+ get a sensor which goes green when the external task *fails* and immediately
325
+ goes red if the external task *succeeds*!
326
+
327
+ Note that ``soft_fail`` is respected when examining the failed_states. Thus
328
+ if the external task enters a failed state and ``soft_fail == True`` the
329
+ sensor will _skip_ rather than fail. As a result, setting ``soft_fail=True``
330
+ and ``failed_states=[DagRunState.SKIPPED]`` will result in the sensor
331
+ skipping if the external task skips. However, this is a contrived
332
+ example---consider using ``skipped_states`` if you would like this
333
+ behaviour. Using ``skipped_states`` allows the sensor to skip if the target
334
+ fails, but still enter failed state on timeout. Using ``soft_fail == True``
335
+ as above will cause the sensor to skip if the target fails, but also if it
336
+ times out.
337
+
338
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
339
+ :param region: Required. The ID of the Google Cloud region that the service belongs to.
340
+ :param environment_id: The name of the Composer environment.
341
+ :param composer_external_dag_id: The dag_id that contains the task you want to
342
+ wait for. (templated)
343
+ :param composer_external_task_id: The task_id that contains the task you want to
344
+ wait for. (templated)
345
+ :param composer_external_task_ids: The list of task_ids that you want to wait for. (templated)
346
+ If ``None`` (default value) the sensor waits for the DAG. Either
347
+ composer_external_task_id or composer_external_task_ids can be passed to
348
+ CloudComposerExternalTaskSensor, but not both.
349
+ :param composer_external_task_group_id: The task_group_id that contains the task you want to
350
+ wait for. (templated)
351
+ :param allowed_states: Iterable of allowed states, default is ``['success']``
352
+ :param skipped_states: Iterable of states to make this task mark as skipped, default is ``None``
353
+ :param failed_states: Iterable of failed or dis-allowed states, default is ``None``
354
+ :param execution_range: execution DAGs time range. Sensor checks DAGs states only for DAGs which were
355
+ started in this time range. For yesterday, use [positive!] datetime.timedelta(days=1).
356
+ For future, use [negative!] datetime.timedelta(days=-1). For specific time, use list of
357
+ datetimes [datetime(2024,3,22,11,0,0), datetime(2024,3,22,12,0,0)].
358
+ Or [datetime(2024,3,22,0,0,0)] in this case sensor will check for states from specific time in the
359
+ past till current time execution.
360
+ Default value datetime.timedelta(days=1).
361
+ :param gcp_conn_id: The connection ID to use when fetching connection info.
362
+ :param impersonation_chain: Optional service account to impersonate using short-term
363
+ credentials, or chained list of accounts required to get the access_token
364
+ of the last account in the list, which will be impersonated in the request.
365
+ If set as a string, the account must grant the originating account
366
+ the Service Account Token Creator IAM role.
367
+ If set as a sequence, the identities from the list must grant
368
+ Service Account Token Creator IAM role to the directly preceding identity, with first
369
+ account from the list granting this role to the originating account (templated).
370
+ :param poll_interval: Optional: Control the rate of the poll for the result of deferrable run.
371
+ :param deferrable: Run sensor in deferrable mode.
372
+ """
373
+
374
+ template_fields = (
375
+ "project_id",
376
+ "region",
377
+ "environment_id",
378
+ "composer_external_dag_id",
379
+ "composer_external_task_id",
380
+ "composer_external_task_ids",
381
+ "composer_external_task_group_id",
382
+ "impersonation_chain",
383
+ )
384
+
385
+ def __init__(
386
+ self,
387
+ *,
388
+ project_id: str,
389
+ region: str,
390
+ environment_id: str,
391
+ composer_external_dag_id: str,
392
+ composer_external_task_id: str | None = None,
393
+ composer_external_task_ids: Collection[str] | None = None,
394
+ composer_external_task_group_id: str | None = None,
395
+ allowed_states: Iterable[str] | None = None,
396
+ skipped_states: Iterable[str] | None = None,
397
+ failed_states: Iterable[str] | None = None,
398
+ execution_range: timedelta | list[datetime] | None = None,
399
+ gcp_conn_id: str = "google_cloud_default",
400
+ impersonation_chain: str | Sequence[str] | None = None,
401
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
402
+ poll_interval: int = 10,
403
+ **kwargs,
404
+ ) -> None:
405
+ super().__init__(**kwargs)
406
+ self.project_id = project_id
407
+ self.region = region
408
+ self.environment_id = environment_id
409
+
410
+ self.allowed_states = list(allowed_states) if allowed_states else [TaskInstanceState.SUCCESS.value]
411
+ self.skipped_states = list(skipped_states) if skipped_states else []
412
+ self.failed_states = list(failed_states) if failed_states else []
413
+
414
+ total_states = set(self.allowed_states + self.skipped_states + self.failed_states)
415
+
416
+ if len(total_states) != len(self.allowed_states) + len(self.skipped_states) + len(self.failed_states):
417
+ raise DuplicateStateError(
418
+ "Duplicate values provided across allowed_states, skipped_states and failed_states."
419
+ )
420
+
421
+ # convert [] to None
422
+ if not composer_external_task_ids:
423
+ composer_external_task_ids = None
424
+
425
+ # can't set both single task id and a list of task ids
426
+ if composer_external_task_id is not None and composer_external_task_ids is not None:
427
+ raise ValueError(
428
+ "Only one of `composer_external_task_id` or `composer_external_task_ids` may "
429
+ "be provided to CloudComposerExternalTaskSensor; "
430
+ "use `composer_external_task_id` or `composer_external_task_ids` or `composer_external_task_group_id`."
431
+ )
432
+
433
+ # since both not set, convert the single id to a 1-elt list - from here on, we only consider the list
434
+ if composer_external_task_id is not None:
435
+ composer_external_task_ids = [composer_external_task_id]
436
+
437
+ if composer_external_task_group_id is not None and composer_external_task_ids is not None:
438
+ raise ValueError(
439
+ "Only one of `composer_external_task_group_id` or `composer_external_task_ids` may "
440
+ "be provided to CloudComposerExternalTaskSensor; "
441
+ "use `composer_external_task_id` or `composer_external_task_ids` or `composer_external_task_group_id`."
442
+ )
443
+
444
+ # check the requested states are all valid states for the target type, be it dag or task
445
+ if composer_external_task_ids or composer_external_task_group_id:
446
+ if not total_states <= set(State.task_states):
447
+ raise ValueError(
448
+ "Valid values for `allowed_states`, `skipped_states` and `failed_states` "
449
+ "when `composer_external_task_id` or `composer_external_task_ids` or `composer_external_task_group_id` "
450
+ f"is not `None`: {State.task_states}"
451
+ )
452
+ elif not total_states <= set(State.dag_states):
453
+ raise ValueError(
454
+ "Valid values for `allowed_states`, `skipped_states` and `failed_states` "
455
+ f"when `composer_external_task_id` and `composer_external_task_group_id` is `None`: {State.dag_states}"
456
+ )
457
+
458
+ self.execution_range = execution_range
459
+ self.composer_external_dag_id = composer_external_dag_id
460
+ self.composer_external_task_id = composer_external_task_id
461
+ self.composer_external_task_ids = composer_external_task_ids
462
+ self.composer_external_task_group_id = composer_external_task_group_id
463
+ self.gcp_conn_id = gcp_conn_id
464
+ self.impersonation_chain = impersonation_chain
465
+ self.deferrable = deferrable
466
+ self.poll_interval = poll_interval
467
+
468
+ def _get_logical_dates(self, context) -> tuple[datetime, datetime]:
469
+ logical_date = context.get("logical_date", None)
470
+ if logical_date is None:
471
+ raise RuntimeError(
472
+ "logical_date is None. Please make sure the sensor is not used in an asset-triggered Dag. "
473
+ "CloudComposerDAGRunSensor was designed to be used in time-based scheduled Dags only, "
474
+ "and asset-triggered Dags do not have logical_date. "
475
+ )
476
+ if isinstance(self.execution_range, timedelta):
477
+ if self.execution_range < timedelta(0):
478
+ return logical_date, logical_date - self.execution_range
479
+ return logical_date - self.execution_range, logical_date
480
+ if isinstance(self.execution_range, list) and len(self.execution_range) > 0:
481
+ return self.execution_range[0], self.execution_range[1] if len(
482
+ self.execution_range
483
+ ) > 1 else logical_date
484
+ return logical_date - timedelta(1), logical_date
485
+
486
+ def poke(self, context: Context) -> bool:
487
+ start_date, end_date = self._get_logical_dates(context)
488
+
489
+ task_instances = self._get_task_instances(
490
+ start_date=start_date.strftime("%Y-%m-%dT%H:%M:%SZ"),
491
+ end_date=end_date.strftime("%Y-%m-%dT%H:%M:%SZ"),
492
+ )
493
+
494
+ if len(task_instances) == 0:
495
+ self.log.info("Task Instances are empty. Sensor waits for task instances...")
496
+ return False
497
+
498
+ if self.failed_states:
499
+ external_task_status = self._check_task_instances_states(
500
+ task_instances=task_instances,
501
+ start_date=start_date,
502
+ end_date=end_date,
503
+ states=self.failed_states,
504
+ )
505
+ self._handle_failed_states(external_task_status)
506
+
507
+ if self.skipped_states:
508
+ external_task_status = self._check_task_instances_states(
509
+ task_instances=task_instances,
510
+ start_date=start_date,
511
+ end_date=end_date,
512
+ states=self.skipped_states,
513
+ )
514
+ self._handle_skipped_states(external_task_status)
515
+
516
+ self.log.info("Sensor waits for allowed states: %s", self.allowed_states)
517
+ external_task_status = self._check_task_instances_states(
518
+ task_instances=task_instances,
519
+ start_date=start_date,
520
+ end_date=end_date,
521
+ states=self.allowed_states,
522
+ )
523
+ return external_task_status
524
+
525
+ def _get_task_instances(self, start_date: str, end_date: str) -> list[dict]:
526
+ """Get the list of task instances."""
527
+ try:
528
+ environment = self.hook.get_environment(
529
+ project_id=self.project_id,
530
+ region=self.region,
531
+ environment_id=self.environment_id,
532
+ timeout=self.timeout,
533
+ )
534
+ except NotFound as not_found_err:
535
+ self.log.info("The Composer environment %s does not exist.", self.environment_id)
536
+ raise AirflowException(not_found_err)
537
+ composer_airflow_uri = environment.config.airflow_uri
538
+
539
+ self.log.info(
540
+ "Pulling the DAG '%s' task instances from the '%s' environment...",
541
+ self.composer_external_dag_id,
542
+ self.environment_id,
543
+ )
544
+ task_instances_response = self.hook.get_task_instances(
545
+ composer_airflow_uri=composer_airflow_uri,
546
+ composer_dag_id=self.composer_external_dag_id,
547
+ query_parameters={
548
+ "execution_date_gte"
549
+ if self._composer_airflow_version < 3
550
+ else "logical_date_gte": start_date,
551
+ "execution_date_lte" if self._composer_airflow_version < 3 else "logical_date_lte": end_date,
552
+ },
553
+ timeout=self.timeout,
554
+ )
555
+ task_instances = task_instances_response["task_instances"]
556
+
557
+ if self.composer_external_task_ids:
558
+ task_instances = [
559
+ task_instance
560
+ for task_instance in task_instances
561
+ if task_instance["task_id"] in self.composer_external_task_ids
562
+ ]
563
+ elif self.composer_external_task_group_id:
564
+ task_instances = [
565
+ task_instance
566
+ for task_instance in task_instances
567
+ if self.composer_external_task_group_id in task_instance["task_id"].split(".")
568
+ ]
569
+
570
+ return task_instances
571
+
572
+ def _check_task_instances_states(
573
+ self,
574
+ task_instances: list[dict],
575
+ start_date: datetime,
576
+ end_date: datetime,
577
+ states: Iterable[str],
578
+ ) -> bool:
579
+ for task_instance in task_instances:
580
+ if (
581
+ start_date.timestamp()
582
+ < parser.parse(
583
+ task_instance["execution_date" if self._composer_airflow_version < 3 else "logical_date"]
584
+ ).timestamp()
585
+ < end_date.timestamp()
586
+ ) and task_instance["state"] not in states:
587
+ return False
588
+ return True
589
+
590
+ def _get_composer_airflow_version(self) -> int:
591
+ """Return Composer Airflow version."""
592
+ environment_obj = self.hook.get_environment(
593
+ project_id=self.project_id,
594
+ region=self.region,
595
+ environment_id=self.environment_id,
596
+ )
597
+ environment_config = Environment.to_dict(environment_obj)
598
+ image_version = environment_config["config"]["software_config"]["image_version"]
599
+ return int(image_version.split("airflow-")[1].split(".")[0])
600
+
601
+ def _handle_failed_states(self, failed_status: bool) -> None:
602
+ """Handle failed states and raise appropriate exceptions."""
603
+ if failed_status:
604
+ if self.composer_external_task_ids:
605
+ if self.soft_fail:
606
+ raise AirflowSkipException(
607
+ f"Some of the external tasks '{self.composer_external_task_ids}' "
608
+ f"in DAG '{self.composer_external_dag_id}' failed. Skipping due to soft_fail."
609
+ )
610
+ raise ExternalTaskFailedError(
611
+ f"Some of the external tasks '{self.composer_external_task_ids}' "
612
+ f"in DAG '{self.composer_external_dag_id}' failed."
613
+ )
614
+ if self.composer_external_task_group_id:
615
+ if self.soft_fail:
616
+ raise AirflowSkipException(
617
+ f"The external task_group '{self.composer_external_task_group_id}' "
618
+ f"in DAG '{self.composer_external_dag_id}' failed. Skipping due to soft_fail."
619
+ )
620
+ raise ExternalTaskGroupFailedError(
621
+ f"The external task_group '{self.composer_external_task_group_id}' "
622
+ f"in DAG '{self.composer_external_dag_id}' failed."
623
+ )
624
+ if self.soft_fail:
625
+ raise AirflowSkipException(
626
+ f"The external DAG '{self.composer_external_dag_id}' failed. Skipping due to soft_fail."
627
+ )
628
+ raise ExternalDagFailedError(f"The external DAG '{self.composer_external_dag_id}' failed.")
629
+
630
+ def _handle_skipped_states(self, skipped_status: bool) -> None:
631
+ """Handle skipped states and raise appropriate exceptions."""
632
+ if skipped_status:
633
+ if self.composer_external_task_ids:
634
+ raise AirflowSkipException(
635
+ f"Some of the external tasks '{self.composer_external_task_ids}' "
636
+ f"in DAG '{self.composer_external_dag_id}' reached a state in our states-to-skip-on list. Skipping."
637
+ )
638
+ if self.composer_external_task_group_id:
639
+ raise AirflowSkipException(
640
+ f"The external task_group '{self.composer_external_task_group_id}' "
641
+ f"in DAG '{self.composer_external_dag_id}' reached a state in our states-to-skip-on list. Skipping."
642
+ )
643
+ raise AirflowSkipException(
644
+ f"The external DAG '{self.composer_external_dag_id}' reached a state in our states-to-skip-on list. "
645
+ "Skipping."
646
+ )
647
+
648
+ def execute(self, context: Context) -> None:
649
+ self._composer_airflow_version = self._get_composer_airflow_version()
650
+
651
+ if self.composer_external_task_ids and len(self.composer_external_task_ids) > len(
652
+ set(self.composer_external_task_ids)
653
+ ):
654
+ raise ValueError("Duplicate task_ids passed in composer_external_task_ids parameter")
655
+
656
+ if self.composer_external_task_ids:
657
+ self.log.info(
658
+ "Poking for tasks '%s' in dag '%s' on Composer environment '%s' ... ",
659
+ self.composer_external_task_ids,
660
+ self.composer_external_dag_id,
661
+ self.environment_id,
662
+ )
663
+
664
+ if self.composer_external_task_group_id:
665
+ self.log.info(
666
+ "Poking for task_group '%s' in dag '%s' on Composer environment '%s' ... ",
667
+ self.composer_external_task_group_id,
668
+ self.composer_external_dag_id,
669
+ self.environment_id,
670
+ )
671
+
672
+ if (
673
+ self.composer_external_dag_id
674
+ and not self.composer_external_task_group_id
675
+ and not self.composer_external_task_ids
676
+ ):
677
+ self.log.info(
678
+ "Poking for DAG '%s' on Composer environment '%s' ... ",
679
+ self.composer_external_dag_id,
680
+ self.environment_id,
681
+ )
682
+
683
+ if self.deferrable:
684
+ start_date, end_date = self._get_logical_dates(context)
685
+ self.defer(
686
+ timeout=timedelta(seconds=self.timeout) if self.timeout else None,
687
+ trigger=CloudComposerExternalTaskTrigger(
688
+ project_id=self.project_id,
689
+ region=self.region,
690
+ environment_id=self.environment_id,
691
+ composer_external_dag_id=self.composer_external_dag_id,
692
+ composer_external_task_ids=self.composer_external_task_ids,
693
+ composer_external_task_group_id=self.composer_external_task_group_id,
694
+ start_date=start_date,
695
+ end_date=end_date,
696
+ allowed_states=self.allowed_states,
697
+ skipped_states=self.skipped_states,
698
+ failed_states=self.failed_states,
699
+ gcp_conn_id=self.gcp_conn_id,
700
+ impersonation_chain=self.impersonation_chain,
701
+ poll_interval=self.poll_interval,
702
+ composer_airflow_version=self._composer_airflow_version,
703
+ ),
704
+ method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
705
+ )
706
+ super().execute(context)
707
+
708
+ def execute_complete(self, context: Context, event: dict):
709
+ if event and event["status"] == "error":
710
+ raise AirflowException(event["message"])
711
+ if event and event["status"] == "failed":
712
+ self._handle_failed_states(True)
713
+ elif event and event["status"] == "skipped":
714
+ self._handle_skipped_states(True)
715
+
716
+ self.log.info("External tasks for DAG '%s' has executed successfully.", self.composer_external_dag_id)
717
+
718
+ @cached_property
719
+ def hook(self) -> CloudComposerHook:
720
+ return CloudComposerHook(
721
+ gcp_conn_id=self.gcp_conn_id,
722
+ impersonation_chain=self.impersonation_chain,
723
+ )
@@ -24,6 +24,7 @@ from typing import TYPE_CHECKING, Any
24
24
 
25
25
  from airflow.configuration import conf
26
26
  from airflow.exceptions import AirflowException
27
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
27
28
  from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import (
28
29
  COUNTERS,
29
30
  METADATA,
@@ -35,10 +36,9 @@ from airflow.providers.google.cloud.triggers.cloud_storage_transfer_service impo
35
36
  CloudStorageTransferServiceCheckJobStatusTrigger,
36
37
  )
37
38
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
38
- from airflow.sensors.base import BaseSensorOperator
39
39
 
40
40
  if TYPE_CHECKING:
41
- from airflow.utils.context import Context
41
+ from airflow.providers.common.compat.sdk import Context
42
42
 
43
43
 
44
44
  class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):