apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (257) hide show
  1. airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
  2. airflow/providers/google/__init__.py +3 -3
  3. airflow/providers/google/ads/hooks/ads.py +39 -6
  4. airflow/providers/google/ads/operators/ads.py +2 -2
  5. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
  6. airflow/providers/google/assets/gcs.py +1 -11
  7. airflow/providers/google/cloud/bundles/__init__.py +16 -0
  8. airflow/providers/google/cloud/bundles/gcs.py +161 -0
  9. airflow/providers/google/cloud/hooks/alloy_db.py +1 -1
  10. airflow/providers/google/cloud/hooks/bigquery.py +176 -293
  11. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  12. airflow/providers/google/cloud/hooks/cloud_build.py +1 -1
  13. airflow/providers/google/cloud/hooks/cloud_composer.py +288 -15
  14. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  15. airflow/providers/google/cloud/hooks/cloud_memorystore.py +1 -1
  16. airflow/providers/google/cloud/hooks/cloud_run.py +18 -10
  17. airflow/providers/google/cloud/hooks/cloud_sql.py +102 -23
  18. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +29 -7
  19. airflow/providers/google/cloud/hooks/compute.py +1 -1
  20. airflow/providers/google/cloud/hooks/compute_ssh.py +6 -2
  21. airflow/providers/google/cloud/hooks/datacatalog.py +10 -1
  22. airflow/providers/google/cloud/hooks/dataflow.py +72 -95
  23. airflow/providers/google/cloud/hooks/dataform.py +1 -1
  24. airflow/providers/google/cloud/hooks/datafusion.py +21 -19
  25. airflow/providers/google/cloud/hooks/dataplex.py +2 -2
  26. airflow/providers/google/cloud/hooks/dataprep.py +1 -1
  27. airflow/providers/google/cloud/hooks/dataproc.py +73 -72
  28. airflow/providers/google/cloud/hooks/dataproc_metastore.py +1 -1
  29. airflow/providers/google/cloud/hooks/dlp.py +1 -1
  30. airflow/providers/google/cloud/hooks/functions.py +1 -1
  31. airflow/providers/google/cloud/hooks/gcs.py +112 -15
  32. airflow/providers/google/cloud/hooks/gdm.py +1 -1
  33. airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
  34. airflow/providers/google/cloud/hooks/kubernetes_engine.py +3 -3
  35. airflow/providers/google/cloud/hooks/looker.py +6 -2
  36. airflow/providers/google/cloud/hooks/managed_kafka.py +1 -1
  37. airflow/providers/google/cloud/hooks/mlengine.py +4 -3
  38. airflow/providers/google/cloud/hooks/pubsub.py +3 -0
  39. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  40. airflow/providers/google/cloud/hooks/spanner.py +74 -9
  41. airflow/providers/google/cloud/hooks/stackdriver.py +11 -9
  42. airflow/providers/google/cloud/hooks/tasks.py +1 -1
  43. airflow/providers/google/cloud/hooks/translate.py +2 -2
  44. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +2 -210
  45. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +3 -3
  46. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +28 -2
  47. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  48. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +308 -8
  49. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
  50. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +1 -1
  51. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +1 -1
  52. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +1 -1
  53. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
  54. airflow/providers/google/cloud/hooks/vision.py +3 -3
  55. airflow/providers/google/cloud/hooks/workflows.py +1 -1
  56. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  57. airflow/providers/google/cloud/links/base.py +77 -13
  58. airflow/providers/google/cloud/links/bigquery.py +0 -47
  59. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  60. airflow/providers/google/cloud/links/bigtable.py +0 -48
  61. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  62. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  63. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  64. airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
  65. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  66. airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
  67. airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
  68. airflow/providers/google/cloud/links/compute.py +0 -58
  69. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  70. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  71. airflow/providers/google/cloud/links/dataflow.py +0 -34
  72. airflow/providers/google/cloud/links/dataform.py +0 -64
  73. airflow/providers/google/cloud/links/datafusion.py +1 -96
  74. airflow/providers/google/cloud/links/dataplex.py +0 -154
  75. airflow/providers/google/cloud/links/dataprep.py +0 -24
  76. airflow/providers/google/cloud/links/dataproc.py +11 -95
  77. airflow/providers/google/cloud/links/datastore.py +0 -31
  78. airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
  79. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  80. airflow/providers/google/cloud/links/mlengine.py +0 -70
  81. airflow/providers/google/cloud/links/pubsub.py +0 -32
  82. airflow/providers/google/cloud/links/spanner.py +0 -33
  83. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  84. airflow/providers/google/cloud/links/translate.py +17 -187
  85. airflow/providers/google/cloud/links/vertex_ai.py +28 -195
  86. airflow/providers/google/cloud/links/workflows.py +0 -52
  87. airflow/providers/google/cloud/log/gcs_task_handler.py +58 -22
  88. airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
  89. airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
  90. airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
  91. airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
  92. airflow/providers/google/cloud/openlineage/facets.py +102 -1
  93. airflow/providers/google/cloud/openlineage/mixins.py +10 -8
  94. airflow/providers/google/cloud/openlineage/utils.py +15 -1
  95. airflow/providers/google/cloud/operators/alloy_db.py +71 -56
  96. airflow/providers/google/cloud/operators/bigquery.py +73 -636
  97. airflow/providers/google/cloud/operators/bigquery_dts.py +4 -6
  98. airflow/providers/google/cloud/operators/bigtable.py +37 -8
  99. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  100. airflow/providers/google/cloud/operators/cloud_batch.py +3 -3
  101. airflow/providers/google/cloud/operators/cloud_build.py +76 -33
  102. airflow/providers/google/cloud/operators/cloud_composer.py +129 -41
  103. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  104. airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
  105. airflow/providers/google/cloud/operators/cloud_run.py +24 -6
  106. airflow/providers/google/cloud/operators/cloud_sql.py +8 -17
  107. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +93 -12
  108. airflow/providers/google/cloud/operators/compute.py +9 -41
  109. airflow/providers/google/cloud/operators/datacatalog.py +157 -21
  110. airflow/providers/google/cloud/operators/dataflow.py +40 -16
  111. airflow/providers/google/cloud/operators/dataform.py +15 -5
  112. airflow/providers/google/cloud/operators/datafusion.py +42 -21
  113. airflow/providers/google/cloud/operators/dataplex.py +194 -110
  114. airflow/providers/google/cloud/operators/dataprep.py +1 -5
  115. airflow/providers/google/cloud/operators/dataproc.py +80 -36
  116. airflow/providers/google/cloud/operators/dataproc_metastore.py +97 -89
  117. airflow/providers/google/cloud/operators/datastore.py +23 -7
  118. airflow/providers/google/cloud/operators/dlp.py +6 -29
  119. airflow/providers/google/cloud/operators/functions.py +17 -8
  120. airflow/providers/google/cloud/operators/gcs.py +12 -9
  121. airflow/providers/google/cloud/operators/gen_ai.py +389 -0
  122. airflow/providers/google/cloud/operators/kubernetes_engine.py +62 -100
  123. airflow/providers/google/cloud/operators/looker.py +2 -2
  124. airflow/providers/google/cloud/operators/managed_kafka.py +108 -53
  125. airflow/providers/google/cloud/operators/natural_language.py +1 -1
  126. airflow/providers/google/cloud/operators/pubsub.py +68 -15
  127. airflow/providers/google/cloud/operators/spanner.py +26 -13
  128. airflow/providers/google/cloud/operators/speech_to_text.py +2 -3
  129. airflow/providers/google/cloud/operators/stackdriver.py +1 -9
  130. airflow/providers/google/cloud/operators/tasks.py +1 -12
  131. airflow/providers/google/cloud/operators/text_to_speech.py +2 -3
  132. airflow/providers/google/cloud/operators/translate.py +41 -17
  133. airflow/providers/google/cloud/operators/translate_speech.py +2 -3
  134. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
  135. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +30 -10
  136. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +55 -27
  137. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
  138. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
  139. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  140. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
  141. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -115
  142. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +12 -10
  143. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
  144. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +31 -8
  145. airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
  146. airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
  147. airflow/providers/google/cloud/operators/vision.py +2 -2
  148. airflow/providers/google/cloud/operators/workflows.py +18 -15
  149. airflow/providers/google/cloud/secrets/secret_manager.py +3 -2
  150. airflow/providers/google/cloud/sensors/bigquery.py +3 -3
  151. airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -3
  152. airflow/providers/google/cloud/sensors/bigtable.py +11 -4
  153. airflow/providers/google/cloud/sensors/cloud_composer.py +533 -30
  154. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -3
  155. airflow/providers/google/cloud/sensors/dataflow.py +26 -10
  156. airflow/providers/google/cloud/sensors/dataform.py +2 -3
  157. airflow/providers/google/cloud/sensors/datafusion.py +4 -5
  158. airflow/providers/google/cloud/sensors/dataplex.py +2 -3
  159. airflow/providers/google/cloud/sensors/dataprep.py +2 -2
  160. airflow/providers/google/cloud/sensors/dataproc.py +2 -3
  161. airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -3
  162. airflow/providers/google/cloud/sensors/gcs.py +4 -5
  163. airflow/providers/google/cloud/sensors/looker.py +2 -3
  164. airflow/providers/google/cloud/sensors/pubsub.py +4 -5
  165. airflow/providers/google/cloud/sensors/tasks.py +2 -2
  166. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -3
  167. airflow/providers/google/cloud/sensors/workflows.py +2 -3
  168. airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
  169. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
  170. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +4 -3
  171. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
  172. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +10 -5
  173. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
  174. airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
  175. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
  176. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
  177. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  178. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +3 -3
  179. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +4 -4
  180. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +21 -13
  181. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +4 -3
  182. airflow/providers/google/cloud/transfers/gcs_to_local.py +6 -4
  183. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +11 -5
  184. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
  185. airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
  186. airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
  187. airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
  188. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  189. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
  190. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
  191. airflow/providers/google/cloud/transfers/s3_to_gcs.py +13 -7
  192. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
  193. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +14 -5
  194. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
  195. airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
  196. airflow/providers/google/cloud/triggers/bigquery.py +76 -35
  197. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  198. airflow/providers/google/cloud/triggers/cloud_composer.py +303 -47
  199. airflow/providers/google/cloud/triggers/cloud_run.py +3 -3
  200. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +92 -2
  201. airflow/providers/google/cloud/triggers/dataflow.py +122 -0
  202. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  203. airflow/providers/google/cloud/triggers/dataplex.py +14 -2
  204. airflow/providers/google/cloud/triggers/dataproc.py +123 -53
  205. airflow/providers/google/cloud/triggers/kubernetes_engine.py +47 -28
  206. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  207. airflow/providers/google/cloud/triggers/pubsub.py +15 -19
  208. airflow/providers/google/cloud/triggers/vertex_ai.py +1 -1
  209. airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
  210. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  211. airflow/providers/google/cloud/utils/field_sanitizer.py +1 -1
  212. airflow/providers/google/cloud/utils/field_validator.py +2 -3
  213. airflow/providers/google/common/auth_backend/google_openid.py +4 -4
  214. airflow/providers/google/common/deprecated.py +2 -1
  215. airflow/providers/google/common/hooks/base_google.py +27 -9
  216. airflow/providers/google/common/hooks/operation_helpers.py +1 -1
  217. airflow/providers/google/common/links/storage.py +0 -22
  218. airflow/providers/google/common/utils/get_secret.py +31 -0
  219. airflow/providers/google/common/utils/id_token_credentials.py +3 -4
  220. airflow/providers/google/firebase/hooks/firestore.py +1 -1
  221. airflow/providers/google/firebase/operators/firestore.py +3 -3
  222. airflow/providers/google/get_provider_info.py +56 -52
  223. airflow/providers/google/go_module_utils.py +35 -3
  224. airflow/providers/google/leveldb/hooks/leveldb.py +27 -2
  225. airflow/providers/google/leveldb/operators/leveldb.py +2 -2
  226. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +1 -1
  227. airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
  228. airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
  229. airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
  230. airflow/providers/google/marketing_platform/operators/analytics_admin.py +2 -3
  231. airflow/providers/google/marketing_platform/operators/campaign_manager.py +6 -6
  232. airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
  233. airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
  234. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
  235. airflow/providers/google/marketing_platform/sensors/display_video.py +3 -64
  236. airflow/providers/google/suite/hooks/calendar.py +2 -2
  237. airflow/providers/google/suite/hooks/sheets.py +16 -2
  238. airflow/providers/google/suite/operators/sheets.py +8 -3
  239. airflow/providers/google/suite/sensors/drive.py +2 -2
  240. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +3 -3
  241. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  242. airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
  243. airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
  244. airflow/providers/google/version_compat.py +15 -1
  245. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/METADATA +90 -46
  246. apache_airflow_providers_google-19.3.0.dist-info/RECORD +331 -0
  247. apache_airflow_providers_google-19.3.0.dist-info/licenses/NOTICE +5 -0
  248. airflow/providers/google/cloud/hooks/automl.py +0 -673
  249. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  250. airflow/providers/google/cloud/links/automl.py +0 -193
  251. airflow/providers/google/cloud/operators/automl.py +0 -1362
  252. airflow/providers/google/cloud/operators/life_sciences.py +0 -119
  253. airflow/providers/google/cloud/operators/mlengine.py +0 -112
  254. apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
  255. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/WHEEL +0 -0
  256. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/entry_points.txt +0 -0
  257. {airflow/providers/google → apache_airflow_providers_google-19.3.0.dist-info/licenses}/LICENSE +0 -0
@@ -19,15 +19,15 @@ from __future__ import annotations
19
19
 
20
20
  import shlex
21
21
  from collections.abc import Sequence
22
- from typing import TYPE_CHECKING
22
+ from typing import TYPE_CHECKING, Any
23
23
 
24
- from google.api_core.exceptions import AlreadyExists
24
+ from google.api_core.exceptions import AlreadyExists, NotFound
25
25
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
26
26
  from google.cloud.orchestration.airflow.service_v1 import ImageVersion
27
27
  from google.cloud.orchestration.airflow.service_v1.types import Environment, ExecuteAirflowCommandResponse
28
28
 
29
29
  from airflow.configuration import conf
30
- from airflow.exceptions import AirflowException
30
+ from airflow.providers.common.compat.sdk import AirflowException
31
31
  from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
32
32
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
33
33
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
@@ -41,7 +41,7 @@ if TYPE_CHECKING:
41
41
  from google.api_core.retry import Retry
42
42
  from google.protobuf.field_mask_pb2 import FieldMask
43
43
 
44
- from airflow.utils.context import Context
44
+ from airflow.providers.common.compat.sdk import Context
45
45
 
46
46
  CLOUD_COMPOSER_BASE_LINK = "https://console.cloud.google.com/composer/environments"
47
47
  CLOUD_COMPOSER_DETAILS_LINK = (
@@ -57,25 +57,6 @@ class CloudComposerEnvironmentLink(BaseGoogleLink):
57
57
  key = "composer_conf"
58
58
  format_str = CLOUD_COMPOSER_DETAILS_LINK
59
59
 
60
- @staticmethod
61
- def persist(
62
- operator_instance: (
63
- CloudComposerCreateEnvironmentOperator
64
- | CloudComposerUpdateEnvironmentOperator
65
- | CloudComposerGetEnvironmentOperator
66
- ),
67
- context: Context,
68
- ) -> None:
69
- operator_instance.xcom_push(
70
- context,
71
- key=CloudComposerEnvironmentLink.key,
72
- value={
73
- "project_id": operator_instance.project_id,
74
- "region": operator_instance.region,
75
- "environment_id": operator_instance.environment_id,
76
- },
77
- )
78
-
79
60
 
80
61
  class CloudComposerEnvironmentsLink(BaseGoogleLink):
81
62
  """Helper class for constructing Cloud Composer Environment Link."""
@@ -84,16 +65,6 @@ class CloudComposerEnvironmentsLink(BaseGoogleLink):
84
65
  key = "composer_conf"
85
66
  format_str = CLOUD_COMPOSER_ENVIRONMENTS_LINK
86
67
 
87
- @staticmethod
88
- def persist(operator_instance: CloudComposerListEnvironmentsOperator, context: Context) -> None:
89
- operator_instance.xcom_push(
90
- context,
91
- key=CloudComposerEnvironmentsLink.key,
92
- value={
93
- "project_id": operator_instance.project_id,
94
- },
95
- )
96
-
97
68
 
98
69
  class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
99
70
  """
@@ -159,6 +130,14 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
159
130
  self.deferrable = deferrable
160
131
  self.pooling_period_seconds = pooling_period_seconds
161
132
 
133
+ @property
134
+ def extra_links_params(self) -> dict[str, Any]:
135
+ return {
136
+ "project_id": self.project_id,
137
+ "region": self.region,
138
+ "environment_id": self.environment_id,
139
+ }
140
+
162
141
  def execute(self, context: Context):
163
142
  hook = CloudComposerHook(
164
143
  gcp_conn_id=self.gcp_conn_id,
@@ -171,7 +150,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
171
150
  else:
172
151
  self.environment["name"] = name
173
152
 
174
- CloudComposerEnvironmentLink.persist(operator_instance=self, context=context)
153
+ CloudComposerEnvironmentLink.persist(context=context)
175
154
  try:
176
155
  result = hook.create_environment(
177
156
  project_id=self.project_id,
@@ -370,6 +349,14 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
370
349
  self.gcp_conn_id = gcp_conn_id
371
350
  self.impersonation_chain = impersonation_chain
372
351
 
352
+ @property
353
+ def extra_links_params(self) -> dict[str, Any]:
354
+ return {
355
+ "project_id": self.project_id,
356
+ "region": self.region,
357
+ "environment_id": self.environment_id,
358
+ }
359
+
373
360
  def execute(self, context: Context):
374
361
  hook = CloudComposerHook(
375
362
  gcp_conn_id=self.gcp_conn_id,
@@ -384,8 +371,7 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
384
371
  timeout=self.timeout,
385
372
  metadata=self.metadata,
386
373
  )
387
-
388
- CloudComposerEnvironmentLink.persist(operator_instance=self, context=context)
374
+ CloudComposerEnvironmentLink.persist(context=context)
389
375
  return Environment.to_dict(result)
390
376
 
391
377
 
@@ -445,12 +431,17 @@ class CloudComposerListEnvironmentsOperator(GoogleCloudBaseOperator):
445
431
  self.gcp_conn_id = gcp_conn_id
446
432
  self.impersonation_chain = impersonation_chain
447
433
 
434
+ @property
435
+ def extra_links_params(self) -> dict[str, Any]:
436
+ return {
437
+ "project_id": self.project_id,
438
+ }
439
+
448
440
  def execute(self, context: Context):
449
441
  hook = CloudComposerHook(
450
442
  gcp_conn_id=self.gcp_conn_id,
451
443
  impersonation_chain=self.impersonation_chain,
452
444
  )
453
- CloudComposerEnvironmentsLink.persist(operator_instance=self, context=context)
454
445
  result = hook.list_environments(
455
446
  project_id=self.project_id,
456
447
  region=self.region,
@@ -532,6 +523,14 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
532
523
  self.deferrable = deferrable
533
524
  self.pooling_period_seconds = pooling_period_seconds
534
525
 
526
+ @property
527
+ def extra_links_params(self) -> dict[str, Any]:
528
+ return {
529
+ "project_id": self.project_id,
530
+ "region": self.region,
531
+ "environment_id": self.environment_id,
532
+ }
533
+
535
534
  def execute(self, context: Context):
536
535
  hook = CloudComposerHook(
537
536
  gcp_conn_id=self.gcp_conn_id,
@@ -549,7 +548,7 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
549
548
  metadata=self.metadata,
550
549
  )
551
550
 
552
- CloudComposerEnvironmentLink.persist(operator_instance=self, context=context)
551
+ CloudComposerEnvironmentLink.persist(context=context)
553
552
  if not self.deferrable:
554
553
  environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
555
554
  return Environment.to_dict(environment)
@@ -765,9 +764,15 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
765
764
  metadata=self.metadata,
766
765
  poll_interval=self.poll_interval,
767
766
  )
768
- result_str = self._merge_cmd_output_result(result)
769
- self.log.info("Command execution result:\n%s", result_str)
770
- return result
767
+ exit_code = result.get("exit_info", {}).get("exit_code")
768
+ if exit_code == 0:
769
+ result_str = self._merge_cmd_output_result(result)
770
+ self.log.info("Command execution result:\n%s", result_str)
771
+ return result
772
+
773
+ error_output = "".join(line["content"] for line in result.get("error", []))
774
+ message = f"Airflow CLI command failed with exit code {exit_code}.\nError output:\n{error_output}"
775
+ raise AirflowException(message)
771
776
 
772
777
  def execute_complete(self, context: Context, event: dict) -> dict:
773
778
  if event and event["status"] == "error":
@@ -793,3 +798,86 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
793
798
  """Merge output to one string."""
794
799
  result_str = "\n".join(line_dict["content"] for line_dict in result["output"])
795
800
  return result_str
801
+
802
+
803
+ class CloudComposerTriggerDAGRunOperator(GoogleCloudBaseOperator):
804
+ """
805
+ Trigger DAG run for provided Composer environment.
806
+
807
+ :param project_id: The ID of the Google Cloud project that the service belongs to.
808
+ :param region: The ID of the Google Cloud region that the service belongs to.
809
+ :param environment_id: The ID of the Google Cloud environment that the service belongs to.
810
+ :param composer_dag_id: The ID of DAG which will be triggered.
811
+ :param composer_dag_conf: Configuration parameters for the DAG run.
812
+ :param timeout: The timeout for this request.
813
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
814
+ :param impersonation_chain: Optional service account to impersonate using short-term
815
+ credentials, or chained list of accounts required to get the access_token
816
+ of the last account in the list, which will be impersonated in the request.
817
+ If set as a string, the account must grant the originating account
818
+ the Service Account Token Creator IAM role.
819
+ If set as a sequence, the identities from the list must grant
820
+ Service Account Token Creator IAM role to the directly preceding identity, with first
821
+ account from the list granting this role to the originating account (templated).
822
+ """
823
+
824
+ template_fields = (
825
+ "project_id",
826
+ "region",
827
+ "environment_id",
828
+ "composer_dag_id",
829
+ "impersonation_chain",
830
+ )
831
+
832
+ def __init__(
833
+ self,
834
+ *,
835
+ project_id: str,
836
+ region: str,
837
+ environment_id: str,
838
+ composer_dag_id: str,
839
+ composer_dag_conf: dict | None = None,
840
+ timeout: float | None = None,
841
+ gcp_conn_id: str = "google_cloud_default",
842
+ impersonation_chain: str | Sequence[str] | None = None,
843
+ **kwargs,
844
+ ) -> None:
845
+ super().__init__(**kwargs)
846
+ self.project_id = project_id
847
+ self.region = region
848
+ self.environment_id = environment_id
849
+ self.composer_dag_id = composer_dag_id
850
+ self.composer_dag_conf = composer_dag_conf or {}
851
+ self.timeout = timeout
852
+ self.gcp_conn_id = gcp_conn_id
853
+ self.impersonation_chain = impersonation_chain
854
+
855
+ def execute(self, context: Context):
856
+ hook = CloudComposerHook(
857
+ gcp_conn_id=self.gcp_conn_id,
858
+ impersonation_chain=self.impersonation_chain,
859
+ )
860
+ try:
861
+ environment = hook.get_environment(
862
+ project_id=self.project_id,
863
+ region=self.region,
864
+ environment_id=self.environment_id,
865
+ timeout=self.timeout,
866
+ )
867
+ except NotFound as not_found_err:
868
+ self.log.info("The Composer environment %s does not exist.", self.environment_id)
869
+ raise AirflowException(not_found_err)
870
+ composer_airflow_uri = environment.config.airflow_uri
871
+
872
+ self.log.info(
873
+ "Triggering the DAG %s on the %s environment...", self.composer_dag_id, self.environment_id
874
+ )
875
+ dag_run = hook.trigger_dag_run(
876
+ composer_airflow_uri=composer_airflow_uri,
877
+ composer_dag_id=self.composer_dag_id,
878
+ composer_dag_conf=self.composer_dag_conf,
879
+ timeout=self.timeout,
880
+ )
881
+ self.log.info("The DAG %s was triggered with Run ID: %s", self.composer_dag_id, dag_run["dag_run_id"])
882
+
883
+ return dag_run
@@ -0,0 +1,341 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ from __future__ import annotations
20
+
21
+ from collections.abc import Sequence
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ import google.cloud.exceptions
25
+ from google.api_core.exceptions import AlreadyExists
26
+ from google.cloud.logging_v2.types import LogSink
27
+
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
+ from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook
30
+ from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
31
+
32
+ if TYPE_CHECKING:
33
+ from google.protobuf.field_mask_pb2 import FieldMask
34
+
35
+ from airflow.providers.common.compat.sdk import Context
36
+
37
+
38
+ def _validate_inputs(obj, required_fields: list[str]) -> None:
39
+ """Validate that all required fields are present on self."""
40
+ missing = [field for field in required_fields if not getattr(obj, field, None)]
41
+ if missing:
42
+ raise AirflowException(
43
+ f"Required parameters are missing: {missing}. These must be passed as keyword parameters."
44
+ )
45
+
46
+
47
+ def _get_field(obj, field_name):
48
+ """Supports both dict and protobuf-like objects."""
49
+ if isinstance(obj, dict):
50
+ return obj.get(field_name)
51
+ return getattr(obj, field_name, None)
52
+
53
+
54
+ class CloudLoggingCreateSinkOperator(GoogleCloudBaseOperator):
55
+ """
56
+ Creates a Cloud Logging export sink in a GCP project.
57
+
58
+ This operator creates a sink that exports log entries from Cloud Logging
59
+ to destinations like Cloud Storage, BigQuery, or Pub/Sub.
60
+
61
+ :param project_id: Required. ID of the Google Cloud project where the sink will be created.
62
+ :param sink_config: Required. The full sink configuration as a dictionary or a LogSink object.
63
+ See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
64
+ :param unique_writer_identity: If True, creates a unique service account for the sink.
65
+ If False, uses the default Google-managed service account.
66
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud. Defaults to "google_cloud_default".
67
+ :param impersonation_chain: Optional service account to impersonate using short-term
68
+ credentials, or chained list of accounts required to get the access_token
69
+ of the last account in the list, which will be impersonated in the request.
70
+ If set as a string, the account must grant the originating account
71
+ the Service Account Token Creator IAM role.
72
+ If set as a sequence, the identities from the list must grant
73
+ Service Account Token Creator IAM role to the directly preceding identity, with first
74
+ account from the list granting this role to the originating account (templated).
75
+ """
76
+
77
+ template_fields: Sequence[str] = (
78
+ "project_id",
79
+ "sink_config",
80
+ "gcp_conn_id",
81
+ "impersonation_chain",
82
+ "unique_writer_identity",
83
+ )
84
+
85
+ def __init__(
86
+ self,
87
+ project_id: str,
88
+ sink_config: dict | LogSink,
89
+ unique_writer_identity: bool = False,
90
+ gcp_conn_id: str = "google_cloud_default",
91
+ impersonation_chain: str | Sequence[str] | None = None,
92
+ **kwargs,
93
+ ):
94
+ super().__init__(**kwargs)
95
+ self.project_id = project_id
96
+ self.sink_config = sink_config
97
+ self.unique_writer_identity = unique_writer_identity
98
+ self.gcp_conn_id = gcp_conn_id
99
+ self.impersonation_chain = impersonation_chain
100
+
101
+ def execute(self, context: Context) -> dict[str, Any]:
102
+ """Execute the operator."""
103
+ _validate_inputs(self, required_fields=["project_id", "sink_config"])
104
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
105
+
106
+ try:
107
+ self.log.info(
108
+ "Creating log sink '%s' in project '%s'",
109
+ _get_field(self.sink_config, "name"),
110
+ self.project_id,
111
+ )
112
+ self.log.info("Destination: %s", _get_field(self.sink_config, "destination"))
113
+
114
+ response = hook.create_sink(
115
+ sink=self.sink_config,
116
+ unique_writer_identity=self.unique_writer_identity,
117
+ project_id=self.project_id,
118
+ )
119
+
120
+ self.log.info("Log sink created successfully: %s", response.name)
121
+
122
+ if self.unique_writer_identity and hasattr(response, "writer_identity"):
123
+ self.log.info("Writer identity: %s", response.writer_identity)
124
+ self.log.info("Remember to grant appropriate permissions to the writer identity")
125
+
126
+ return LogSink.to_dict(response)
127
+
128
+ except AlreadyExists:
129
+ self.log.info(
130
+ "Already existed log sink, sink_name=%s, project_id=%s",
131
+ _get_field(self.sink_config, "name"),
132
+ self.project_id,
133
+ )
134
+ existing_sink = hook.get_sink(
135
+ sink_name=_get_field(self.sink_config, "name"), project_id=self.project_id
136
+ )
137
+ return LogSink.to_dict(existing_sink)
138
+
139
+ except google.cloud.exceptions.GoogleCloudError as e:
140
+ self.log.error("An error occurred. Exiting.")
141
+ raise e
142
+
143
+
144
+ class CloudLoggingDeleteSinkOperator(GoogleCloudBaseOperator):
145
+ """
146
+ Deletes a Cloud Logging export sink from a GCP project.
147
+
148
+ :param sink_name: Required. Name of the sink to delete.
149
+ :param project_id: Required. The ID of the Google Cloud project.
150
+ :param gcp_conn_id: Optional. The connection ID to use for connecting to Google Cloud.
151
+ Defaults to "google_cloud_default".
152
+ :param impersonation_chain: Optional service account to impersonate using short-term
153
+ credentials, or chained list of accounts required to get the access_token
154
+ of the last account in the list, which will be impersonated in the request.
155
+ If set as a string, the account must grant the originating account
156
+ the Service Account Token Creator IAM role.
157
+ If set as a sequence, the identities from the list must grant
158
+ Service Account Token Creator IAM role to the directly preceding identity, with first
159
+ account from the list granting this role to the originating account (templated).
160
+ """
161
+
162
+ template_fields: Sequence[str] = ("sink_name", "project_id", "gcp_conn_id", "impersonation_chain")
163
+
164
+ def __init__(
165
+ self,
166
+ sink_name: str,
167
+ project_id: str,
168
+ gcp_conn_id: str = "google_cloud_default",
169
+ impersonation_chain: str | Sequence[str] | None = None,
170
+ **kwargs,
171
+ ):
172
+ super().__init__(**kwargs)
173
+ self.sink_name = sink_name
174
+ self.project_id = project_id
175
+ self.gcp_conn_id = gcp_conn_id
176
+ self.impersonation_chain = impersonation_chain
177
+
178
+ def execute(self, context: Context) -> None:
179
+ """Execute the operator."""
180
+ _validate_inputs(self, ["sink_name", "project_id"])
181
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
182
+
183
+ try:
184
+ self.log.info("Deleting log sink '%s' from project '%s'", self.sink_name, self.project_id)
185
+ hook.delete_sink(sink_name=self.sink_name, project_id=self.project_id)
186
+ self.log.info("Log sink '%s' deleted successfully", self.sink_name)
187
+
188
+ except google.cloud.exceptions.NotFound as e:
189
+ self.log.error("An error occurred. Not Found.")
190
+ raise e
191
+ except google.cloud.exceptions.GoogleCloudError as e:
192
+ self.log.error("An error occurred. Exiting.")
193
+ raise e
194
+
195
+
196
+ class CloudLoggingUpdateSinkOperator(GoogleCloudBaseOperator):
197
+ """
198
+ Updates an existing Cloud Logging export sink.
199
+
200
+ :param project_id: Required. The ID of the Google Cloud project that contains the sink.
201
+ :param sink_name: Required. The name of the sink to update.
202
+ :param sink_config: Required. The updated sink configuration. Can be a dictionary or a
203
+ `google.cloud.logging_v2.types.LogSink` object. Refer to:
204
+ https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
205
+ :param update_mask: Required. A FieldMask or dictionary specifying which fields of the sink
206
+ should be updated. For example, to update the destination and filter, use:
207
+ `{"paths": ["destination", "filter"]}`.
208
+ :param unique_writer_identity: Optional. When set to True, a new unique service account
209
+ will be created for the sink. Defaults to False.
210
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
211
+ Defaults to "google_cloud_default".
212
+ :param impersonation_chain: Optional service account to impersonate using short-term
213
+ credentials, or chained list of accounts required to get the access_token
214
+ of the last account in the list, which will be impersonated in the request.
215
+ If set as a string, the account must grant the originating account
216
+ the Service Account Token Creator IAM role.
217
+ If set as a sequence, the identities from the list must grant
218
+ Service Account Token Creator IAM role to the directly preceding identity, with first
219
+ account from the list granting this role to the originating account (templated).
220
+ """
221
+
222
+ template_fields: Sequence[str] = (
223
+ "sink_name",
224
+ "project_id",
225
+ "update_mask",
226
+ "sink_config",
227
+ "unique_writer_identity",
228
+ "gcp_conn_id",
229
+ "impersonation_chain",
230
+ )
231
+
232
+ def __init__(
233
+ self,
234
+ project_id: str,
235
+ sink_name: str,
236
+ sink_config: dict | LogSink,
237
+ update_mask: FieldMask | dict,
238
+ unique_writer_identity: bool = False,
239
+ gcp_conn_id: str = "google_cloud_default",
240
+ impersonation_chain: str | Sequence[str] | None = None,
241
+ **kwargs,
242
+ ):
243
+ super().__init__(**kwargs)
244
+ self.project_id = project_id
245
+ self.sink_name = sink_name
246
+ self.sink_config = sink_config
247
+ self.update_mask = update_mask
248
+ self.unique_writer_identity = unique_writer_identity
249
+ self.gcp_conn_id = gcp_conn_id
250
+ self.impersonation_chain = impersonation_chain
251
+
252
+ def execute(self, context: Context) -> dict[str, Any]:
253
+ """Execute the operator."""
254
+ _validate_inputs(self, ["sink_name", "project_id", "sink_config", "update_mask"])
255
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
256
+
257
+ try:
258
+ current_sink = hook.get_sink(sink_name=self.sink_name, project_id=self.project_id)
259
+ self.log.info("Current log sink configuration: '%s'.", LogSink.to_dict(current_sink))
260
+
261
+ self.log.info("Updating log sink '%s' in project '%s'", self.sink_name, self.project_id)
262
+ if isinstance(self.update_mask, dict) and "paths" in self.update_mask:
263
+ paths = self.update_mask["paths"]
264
+ elif hasattr(self.update_mask, "paths"):
265
+ paths = self.update_mask.paths
266
+
267
+ self.log.info("Updating fields: %s", ", ".join(paths))
268
+
269
+ response = hook.update_sink(
270
+ sink_name=self.sink_name,
271
+ sink=self.sink_config,
272
+ unique_writer_identity=self.unique_writer_identity,
273
+ project_id=self.project_id,
274
+ update_mask=self.update_mask,
275
+ )
276
+ self.log.info("Log sink updated successfully: %s", response.name)
277
+ return LogSink.to_dict(response)
278
+
279
+ except google.cloud.exceptions.NotFound as e:
280
+ self.log.error("An error occurred. Not Found.")
281
+ raise e
282
+ except google.cloud.exceptions.GoogleCloudError as e:
283
+ self.log.error("An error occurred. Exiting.")
284
+ raise e
285
+
286
+
287
+ class CloudLoggingListSinksOperator(GoogleCloudBaseOperator):
288
+ """
289
+ Lists Cloud Logging export sinks in a Google Cloud project.
290
+
291
+ :param project_id: Required. The ID of the Google Cloud project to list sinks from.
292
+ :param page_size: Optional. The maximum number of sinks to return per page. Must be greater than 0.
293
+ If None, the server will use a default value.
294
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
295
+ Defaults to "google_cloud_default".
296
+ :param impersonation_chain: Optional. Service account or chained list of accounts to impersonate.
297
+ If a string, the service account must grant the originating account the
298
+ 'Service Account Token Creator' IAM role.
299
+
300
+ If a sequence, each account in the chain must grant this role to the next.
301
+ The first account must grant it to the originating account (templated).
302
+ """
303
+
304
+ template_fields: Sequence[str] = ("project_id", "gcp_conn_id", "impersonation_chain", "page_size")
305
+
306
+ def __init__(
307
+ self,
308
+ project_id: str,
309
+ page_size: int | None = None,
310
+ gcp_conn_id: str = "google_cloud_default",
311
+ impersonation_chain: str | Sequence[str] | None = None,
312
+ **kwargs,
313
+ ):
314
+ super().__init__(**kwargs)
315
+ self.project_id = project_id
316
+ self.page_size = page_size
317
+ self.gcp_conn_id = gcp_conn_id
318
+ self.impersonation_chain = impersonation_chain
319
+
320
+ def execute(self, context: Context) -> list[dict[str, Any]]:
321
+ """Execute the operator."""
322
+ _validate_inputs(self, ["project_id"])
323
+
324
+ if self.page_size is not None and self.page_size < 1:
325
+ raise AirflowException("The page_size for the list sinks request must be greater than zero")
326
+
327
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
328
+
329
+ try:
330
+ self.log.info("Listing log sinks in project '%s'", self.project_id)
331
+
332
+ sinks = hook.list_sinks(project_id=self.project_id, page_size=self.page_size)
333
+
334
+ result = [LogSink.to_dict(sink) for sink in sinks]
335
+ self.log.info("Found %d log sinks", len(result))
336
+
337
+ return result
338
+
339
+ except google.cloud.exceptions.GoogleCloudError as e:
340
+ self.log.error("An error occurred. Exiting.")
341
+ raise e