apache-airflow-providers-google 16.0.0rc1__py3-none-any.whl → 16.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +9 -5
  3. airflow/providers/google/ads/operators/ads.py +1 -1
  4. airflow/providers/google/ads/transfers/ads_to_gcs.py +1 -1
  5. airflow/providers/google/cloud/hooks/bigquery.py +2 -3
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +8 -4
  7. airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
  8. airflow/providers/google/cloud/hooks/dataflow.py +2 -2
  9. airflow/providers/google/cloud/hooks/dataplex.py +1 -1
  10. airflow/providers/google/cloud/hooks/dataprep.py +4 -1
  11. airflow/providers/google/cloud/hooks/gcs.py +2 -2
  12. airflow/providers/google/cloud/hooks/looker.py +5 -1
  13. airflow/providers/google/cloud/hooks/mlengine.py +2 -1
  14. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  15. airflow/providers/google/cloud/hooks/spanner.py +2 -2
  16. airflow/providers/google/cloud/hooks/translate.py +1 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +43 -14
  19. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +11 -2
  20. airflow/providers/google/cloud/hooks/vision.py +2 -2
  21. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  22. airflow/providers/google/cloud/links/base.py +75 -11
  23. airflow/providers/google/cloud/links/bigquery.py +0 -47
  24. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  25. airflow/providers/google/cloud/links/bigtable.py +0 -48
  26. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  27. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  28. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  29. airflow/providers/google/cloud/links/cloud_run.py +1 -33
  30. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  31. airflow/providers/google/cloud/links/cloud_storage_transfer.py +16 -43
  32. airflow/providers/google/cloud/links/cloud_tasks.py +6 -25
  33. airflow/providers/google/cloud/links/compute.py +0 -58
  34. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  35. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  36. airflow/providers/google/cloud/links/dataflow.py +0 -34
  37. airflow/providers/google/cloud/links/dataform.py +0 -64
  38. airflow/providers/google/cloud/links/datafusion.py +1 -96
  39. airflow/providers/google/cloud/links/dataplex.py +0 -154
  40. airflow/providers/google/cloud/links/dataprep.py +0 -24
  41. airflow/providers/google/cloud/links/dataproc.py +14 -90
  42. airflow/providers/google/cloud/links/datastore.py +0 -31
  43. airflow/providers/google/cloud/links/kubernetes_engine.py +5 -59
  44. airflow/providers/google/cloud/links/life_sciences.py +0 -19
  45. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  46. airflow/providers/google/cloud/links/mlengine.py +0 -70
  47. airflow/providers/google/cloud/links/pubsub.py +0 -32
  48. airflow/providers/google/cloud/links/spanner.py +0 -33
  49. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  50. airflow/providers/google/cloud/links/translate.py +16 -186
  51. airflow/providers/google/cloud/links/vertex_ai.py +8 -224
  52. airflow/providers/google/cloud/links/workflows.py +0 -52
  53. airflow/providers/google/cloud/operators/alloy_db.py +69 -54
  54. airflow/providers/google/cloud/operators/automl.py +16 -14
  55. airflow/providers/google/cloud/operators/bigquery.py +0 -15
  56. airflow/providers/google/cloud/operators/bigquery_dts.py +2 -4
  57. airflow/providers/google/cloud/operators/bigtable.py +35 -6
  58. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  59. airflow/providers/google/cloud/operators/cloud_build.py +74 -31
  60. airflow/providers/google/cloud/operators/cloud_composer.py +34 -35
  61. airflow/providers/google/cloud/operators/cloud_memorystore.py +68 -42
  62. airflow/providers/google/cloud/operators/cloud_run.py +0 -1
  63. airflow/providers/google/cloud/operators/cloud_sql.py +11 -15
  64. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +0 -2
  65. airflow/providers/google/cloud/operators/compute.py +7 -39
  66. airflow/providers/google/cloud/operators/datacatalog.py +156 -20
  67. airflow/providers/google/cloud/operators/dataflow.py +37 -14
  68. airflow/providers/google/cloud/operators/dataform.py +14 -4
  69. airflow/providers/google/cloud/operators/datafusion.py +4 -12
  70. airflow/providers/google/cloud/operators/dataplex.py +180 -96
  71. airflow/providers/google/cloud/operators/dataprep.py +0 -4
  72. airflow/providers/google/cloud/operators/dataproc.py +10 -16
  73. airflow/providers/google/cloud/operators/dataproc_metastore.py +95 -87
  74. airflow/providers/google/cloud/operators/datastore.py +21 -5
  75. airflow/providers/google/cloud/operators/dlp.py +3 -26
  76. airflow/providers/google/cloud/operators/functions.py +15 -6
  77. airflow/providers/google/cloud/operators/gcs.py +0 -7
  78. airflow/providers/google/cloud/operators/kubernetes_engine.py +50 -7
  79. airflow/providers/google/cloud/operators/life_sciences.py +0 -1
  80. airflow/providers/google/cloud/operators/managed_kafka.py +106 -51
  81. airflow/providers/google/cloud/operators/mlengine.py +0 -1
  82. airflow/providers/google/cloud/operators/pubsub.py +2 -4
  83. airflow/providers/google/cloud/operators/spanner.py +0 -4
  84. airflow/providers/google/cloud/operators/speech_to_text.py +0 -1
  85. airflow/providers/google/cloud/operators/stackdriver.py +0 -8
  86. airflow/providers/google/cloud/operators/tasks.py +0 -11
  87. airflow/providers/google/cloud/operators/text_to_speech.py +0 -1
  88. airflow/providers/google/cloud/operators/translate.py +37 -13
  89. airflow/providers/google/cloud/operators/translate_speech.py +0 -1
  90. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +31 -18
  91. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +28 -8
  92. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +38 -25
  93. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +69 -7
  94. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +42 -8
  95. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +531 -0
  96. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +93 -25
  97. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +10 -8
  98. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +56 -10
  99. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +25 -6
  100. airflow/providers/google/cloud/operators/vertex_ai/ray.py +9 -6
  101. airflow/providers/google/cloud/operators/workflows.py +1 -9
  102. airflow/providers/google/cloud/sensors/bigquery.py +1 -1
  103. airflow/providers/google/cloud/sensors/bigquery_dts.py +6 -1
  104. airflow/providers/google/cloud/sensors/bigtable.py +15 -3
  105. airflow/providers/google/cloud/sensors/cloud_composer.py +6 -1
  106. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +6 -1
  107. airflow/providers/google/cloud/sensors/dataflow.py +3 -3
  108. airflow/providers/google/cloud/sensors/dataform.py +6 -1
  109. airflow/providers/google/cloud/sensors/datafusion.py +6 -1
  110. airflow/providers/google/cloud/sensors/dataplex.py +6 -1
  111. airflow/providers/google/cloud/sensors/dataprep.py +6 -1
  112. airflow/providers/google/cloud/sensors/dataproc.py +6 -1
  113. airflow/providers/google/cloud/sensors/dataproc_metastore.py +6 -1
  114. airflow/providers/google/cloud/sensors/gcs.py +9 -3
  115. airflow/providers/google/cloud/sensors/looker.py +6 -1
  116. airflow/providers/google/cloud/sensors/pubsub.py +8 -3
  117. airflow/providers/google/cloud/sensors/tasks.py +6 -1
  118. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +6 -1
  119. airflow/providers/google/cloud/sensors/workflows.py +6 -1
  120. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +1 -1
  121. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +1 -1
  122. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +1 -2
  123. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +1 -2
  124. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +0 -1
  125. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +1 -1
  126. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  127. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +1 -1
  128. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +1 -1
  129. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
  130. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +1 -1
  131. airflow/providers/google/cloud/transfers/gcs_to_local.py +1 -1
  132. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  133. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +5 -1
  134. airflow/providers/google/cloud/transfers/gdrive_to_local.py +1 -1
  135. airflow/providers/google/cloud/transfers/http_to_gcs.py +1 -1
  136. airflow/providers/google/cloud/transfers/local_to_gcs.py +1 -1
  137. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +1 -1
  138. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  139. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +2 -2
  140. airflow/providers/google/cloud/transfers/sql_to_gcs.py +1 -1
  141. airflow/providers/google/cloud/utils/field_validator.py +1 -2
  142. airflow/providers/google/common/auth_backend/google_openid.py +2 -1
  143. airflow/providers/google/common/deprecated.py +2 -1
  144. airflow/providers/google/common/hooks/base_google.py +7 -3
  145. airflow/providers/google/common/links/storage.py +0 -22
  146. airflow/providers/google/firebase/operators/firestore.py +1 -1
  147. airflow/providers/google/get_provider_info.py +0 -11
  148. airflow/providers/google/leveldb/hooks/leveldb.py +5 -1
  149. airflow/providers/google/leveldb/operators/leveldb.py +1 -1
  150. airflow/providers/google/marketing_platform/links/analytics_admin.py +3 -6
  151. airflow/providers/google/marketing_platform/operators/analytics_admin.py +0 -1
  152. airflow/providers/google/marketing_platform/operators/campaign_manager.py +4 -4
  153. airflow/providers/google/marketing_platform/operators/display_video.py +6 -6
  154. airflow/providers/google/marketing_platform/operators/search_ads.py +1 -1
  155. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +6 -1
  156. airflow/providers/google/marketing_platform/sensors/display_video.py +6 -1
  157. airflow/providers/google/suite/operators/sheets.py +3 -3
  158. airflow/providers/google/suite/sensors/drive.py +6 -1
  159. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +1 -1
  160. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  161. airflow/providers/google/suite/transfers/local_to_drive.py +1 -1
  162. airflow/providers/google/version_compat.py +28 -0
  163. {apache_airflow_providers_google-16.0.0rc1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/METADATA +19 -20
  164. {apache_airflow_providers_google-16.0.0rc1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/RECORD +166 -166
  165. {apache_airflow_providers_google-16.0.0rc1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/WHEEL +0 -0
  166. {apache_airflow_providers_google-16.0.0rc1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/entry_points.txt +0 -0
@@ -21,7 +21,7 @@ from __future__ import annotations
21
21
 
22
22
  from collections.abc import Sequence
23
23
  from functools import cached_property
24
- from typing import TYPE_CHECKING
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from google.api_core.exceptions import NotFound
27
27
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
@@ -228,15 +228,16 @@ class AlloyDBCreateClusterOperator(AlloyDBWriteBaseOperator):
228
228
  return result
229
229
  return None
230
230
 
231
- def execute(self, context: Context) -> dict | None:
232
- AlloyDBClusterLink.persist(
233
- context=context,
234
- task_instance=self,
235
- location_id=self.location,
236
- cluster_id=self.cluster_id,
237
- project_id=self.project_id,
238
- )
231
+ @property
232
+ def extra_links_params(self) -> dict[str, Any]:
233
+ return {
234
+ "location_id": self.location,
235
+ "cluster_id": self.cluster_id,
236
+ "project_id": self.project_id,
237
+ }
239
238
 
239
+ def execute(self, context: Context) -> dict | None:
240
+ AlloyDBClusterLink.persist(context=context)
240
241
  if cluster := self._get_cluster():
241
242
  return cluster
242
243
 
@@ -334,14 +335,16 @@ class AlloyDBUpdateClusterOperator(AlloyDBWriteBaseOperator):
334
335
  self.update_mask = update_mask
335
336
  self.allow_missing = allow_missing
336
337
 
338
+ @property
339
+ def extra_links_params(self) -> dict[str, Any]:
340
+ return {
341
+ "location_id": self.location,
342
+ "cluster_id": self.cluster_id,
343
+ "project_id": self.project_id,
344
+ }
345
+
337
346
  def execute(self, context: Context) -> dict | None:
338
- AlloyDBClusterLink.persist(
339
- context=context,
340
- task_instance=self,
341
- location_id=self.location,
342
- cluster_id=self.cluster_id,
343
- project_id=self.project_id,
344
- )
347
+ AlloyDBClusterLink.persist(context=context)
345
348
  if self.validate_request:
346
349
  self.log.info("Validating an Update AlloyDB cluster request.")
347
350
  else:
@@ -545,14 +548,16 @@ class AlloyDBCreateInstanceOperator(AlloyDBWriteBaseOperator):
545
548
  return result
546
549
  return None
547
550
 
551
+ @property
552
+ def extra_links_params(self) -> dict[str, Any]:
553
+ return {
554
+ "location_id": self.location,
555
+ "cluster_id": self.cluster_id,
556
+ "project_id": self.project_id,
557
+ }
558
+
548
559
  def execute(self, context: Context) -> dict | None:
549
- AlloyDBClusterLink.persist(
550
- context=context,
551
- task_instance=self,
552
- location_id=self.location,
553
- cluster_id=self.cluster_id,
554
- project_id=self.project_id,
555
- )
560
+ AlloyDBClusterLink.persist(context=context)
556
561
  if instance := self._get_instance():
557
562
  return instance
558
563
 
@@ -654,14 +659,16 @@ class AlloyDBUpdateInstanceOperator(AlloyDBWriteBaseOperator):
654
659
  self.update_mask = update_mask
655
660
  self.allow_missing = allow_missing
656
661
 
662
+ @property
663
+ def extra_links_params(self) -> dict[str, Any]:
664
+ return {
665
+ "location_id": self.location,
666
+ "cluster_id": self.cluster_id,
667
+ "project_id": self.project_id,
668
+ }
669
+
657
670
  def execute(self, context: Context) -> dict | None:
658
- AlloyDBClusterLink.persist(
659
- context=context,
660
- task_instance=self,
661
- location_id=self.location,
662
- cluster_id=self.cluster_id,
663
- project_id=self.project_id,
664
- )
671
+ AlloyDBClusterLink.persist(context=context)
665
672
  if self.validate_request:
666
673
  self.log.info("Validating an Update AlloyDB instance request.")
667
674
  else:
@@ -861,14 +868,16 @@ class AlloyDBCreateUserOperator(AlloyDBWriteBaseOperator):
861
868
  return result
862
869
  return None
863
870
 
871
+ @property
872
+ def extra_links_params(self) -> dict[str, Any]:
873
+ return {
874
+ "location_id": self.location,
875
+ "cluster_id": self.cluster_id,
876
+ "project_id": self.project_id,
877
+ }
878
+
864
879
  def execute(self, context: Context) -> dict | None:
865
- AlloyDBUsersLink.persist(
866
- context=context,
867
- task_instance=self,
868
- location_id=self.location,
869
- cluster_id=self.cluster_id,
870
- project_id=self.project_id,
871
- )
880
+ AlloyDBUsersLink.persist(context=context)
872
881
  if (_user := self._get_user()) is not None:
873
882
  return _user
874
883
 
@@ -968,14 +977,16 @@ class AlloyDBUpdateUserOperator(AlloyDBWriteBaseOperator):
968
977
  self.update_mask = update_mask
969
978
  self.allow_missing = allow_missing
970
979
 
980
+ @property
981
+ def extra_links_params(self) -> dict[str, Any]:
982
+ return {
983
+ "location_id": self.location,
984
+ "cluster_id": self.cluster_id,
985
+ "project_id": self.project_id,
986
+ }
987
+
971
988
  def execute(self, context: Context) -> dict | None:
972
- AlloyDBUsersLink.persist(
973
- context=context,
974
- task_instance=self,
975
- location_id=self.location,
976
- cluster_id=self.cluster_id,
977
- project_id=self.project_id,
978
- )
989
+ AlloyDBUsersLink.persist(context=context)
979
990
  if self.validate_request:
980
991
  self.log.info("Validating an Update AlloyDB user request.")
981
992
  else:
@@ -1159,12 +1170,14 @@ class AlloyDBCreateBackupOperator(AlloyDBWriteBaseOperator):
1159
1170
  return result
1160
1171
  return None
1161
1172
 
1173
+ @property
1174
+ def extra_links_params(self) -> dict[str, Any]:
1175
+ return {
1176
+ "project_id": self.project_id,
1177
+ }
1178
+
1162
1179
  def execute(self, context: Context) -> dict | None:
1163
- AlloyDBBackupsLink.persist(
1164
- context=context,
1165
- task_instance=self,
1166
- project_id=self.project_id,
1167
- )
1180
+ AlloyDBBackupsLink.persist(context=context)
1168
1181
  if backup := self._get_backup():
1169
1182
  return backup
1170
1183
 
@@ -1259,12 +1272,14 @@ class AlloyDBUpdateBackupOperator(AlloyDBWriteBaseOperator):
1259
1272
  self.update_mask = update_mask
1260
1273
  self.allow_missing = allow_missing
1261
1274
 
1275
+ @property
1276
+ def extra_links_params(self) -> dict[str, Any]:
1277
+ return {
1278
+ "project_id": self.project_id,
1279
+ }
1280
+
1262
1281
  def execute(self, context: Context) -> dict | None:
1263
- AlloyDBBackupsLink.persist(
1264
- context=context,
1265
- task_instance=self,
1266
- project_id=self.project_id,
1267
- )
1282
+ AlloyDBBackupsLink.persist(context=context)
1268
1283
  if self.validate_request:
1269
1284
  self.log.info("Validating an Update AlloyDB backup request.")
1270
1285
  else:
@@ -153,21 +153,24 @@ class AutoMLTrainModelOperator(GoogleCloudBaseOperator):
153
153
  project_id = self.project_id or hook.project_id
154
154
  if project_id:
155
155
  TranslationLegacyModelTrainLink.persist(
156
- context=context, task_instance=self, project_id=project_id
156
+ context=context,
157
+ dataset_id=self.model["dataset_id"],
158
+ project_id=project_id,
159
+ location=self.location,
157
160
  )
158
161
  operation_result = hook.wait_for_operation(timeout=self.timeout, operation=operation)
159
162
  result = Model.to_dict(operation_result)
160
163
  model_id = hook.extract_object_id(result)
161
164
  self.log.info("Model is created, model_id: %s", model_id)
162
165
 
163
- self.xcom_push(context, key="model_id", value=model_id)
166
+ context["task_instance"].xcom_push(key="model_id", value=model_id)
164
167
  if project_id:
165
168
  TranslationLegacyModelLink.persist(
166
169
  context=context,
167
- task_instance=self,
168
170
  dataset_id=self.model["dataset_id"] or "-",
169
171
  model_id=model_id,
170
172
  project_id=project_id,
173
+ location=self.location,
171
174
  )
172
175
  return result
173
176
 
@@ -313,10 +316,10 @@ class AutoMLPredictOperator(GoogleCloudBaseOperator):
313
316
  if project_id and self.model_id and dataset_id:
314
317
  TranslationLegacyModelPredictLink.persist(
315
318
  context=context,
316
- task_instance=self,
317
319
  model_id=self.model_id,
318
320
  dataset_id=dataset_id,
319
321
  project_id=project_id,
322
+ location=self.location,
320
323
  )
321
324
  return PredictResponse.to_dict(result)
322
325
 
@@ -412,14 +415,14 @@ class AutoMLCreateDatasetOperator(GoogleCloudBaseOperator):
412
415
  dataset_id = hook.extract_object_id(result)
413
416
  self.log.info("Creating completed. Dataset id: %s", dataset_id)
414
417
 
415
- self.xcom_push(context, key="dataset_id", value=dataset_id)
418
+ context["task_instance"].xcom_push(key="dataset_id", value=dataset_id)
416
419
  project_id = self.project_id or hook.project_id
417
420
  if project_id:
418
421
  TranslationLegacyDatasetLink.persist(
419
422
  context=context,
420
- task_instance=self,
421
423
  dataset_id=dataset_id,
422
424
  project_id=project_id,
425
+ location=self.location,
423
426
  )
424
427
  return result
425
428
 
@@ -530,9 +533,9 @@ class AutoMLImportDataOperator(GoogleCloudBaseOperator):
530
533
  if project_id:
531
534
  TranslationLegacyDatasetLink.persist(
532
535
  context=context,
533
- task_instance=self,
534
536
  dataset_id=self.dataset_id,
535
537
  project_id=project_id,
538
+ location=self.location,
536
539
  )
537
540
 
538
541
 
@@ -649,9 +652,9 @@ class AutoMLTablesListColumnSpecsOperator(GoogleCloudBaseOperator):
649
652
  if project_id:
650
653
  TranslationLegacyDatasetLink.persist(
651
654
  context=context,
652
- task_instance=self,
653
655
  dataset_id=self.dataset_id,
654
656
  project_id=project_id,
657
+ location=self.location,
655
658
  )
656
659
  return result
657
660
 
@@ -749,9 +752,9 @@ class AutoMLTablesUpdateDatasetOperator(GoogleCloudBaseOperator):
749
752
  if project_id:
750
753
  TranslationLegacyDatasetLink.persist(
751
754
  context=context,
752
- task_instance=self,
753
755
  dataset_id=hook.extract_object_id(self.dataset),
754
756
  project_id=project_id,
757
+ location=self.location,
755
758
  )
756
759
  return Dataset.to_dict(result)
757
760
 
@@ -845,10 +848,10 @@ class AutoMLGetModelOperator(GoogleCloudBaseOperator):
845
848
  if project_id:
846
849
  TranslationLegacyModelLink.persist(
847
850
  context=context,
848
- task_instance=self,
849
851
  dataset_id=model["dataset_id"],
850
852
  model_id=self.model_id,
851
853
  project_id=project_id,
854
+ location=self.location,
852
855
  )
853
856
  return model
854
857
 
@@ -1154,9 +1157,9 @@ class AutoMLTablesListTableSpecsOperator(GoogleCloudBaseOperator):
1154
1157
  if project_id:
1155
1158
  TranslationLegacyDatasetLink.persist(
1156
1159
  context=context,
1157
- task_instance=self,
1158
1160
  dataset_id=self.dataset_id,
1159
1161
  project_id=project_id,
1162
+ location=self.location,
1160
1163
  )
1161
1164
  return result
1162
1165
 
@@ -1245,14 +1248,13 @@ class AutoMLListDatasetOperator(GoogleCloudBaseOperator):
1245
1248
  result.append(Dataset.to_dict(dataset))
1246
1249
  self.log.info("Datasets obtained.")
1247
1250
 
1248
- self.xcom_push(
1249
- context,
1251
+ context["task_instance"].xcom_push(
1250
1252
  key="dataset_id_list",
1251
1253
  value=[hook.extract_object_id(d) for d in result],
1252
1254
  )
1253
1255
  project_id = self.project_id or hook.project_id
1254
1256
  if project_id:
1255
- TranslationDatasetListLink.persist(context=context, task_instance=self, project_id=project_id)
1257
+ TranslationDatasetListLink.persist(context=context, project_id=project_id)
1256
1258
  return result
1257
1259
 
1258
1260
 
@@ -1324,7 +1324,6 @@ class BigQueryCreateTableOperator(GoogleCloudBaseOperator):
1324
1324
  if self._table:
1325
1325
  persist_kwargs = {
1326
1326
  "context": context,
1327
- "task_instance": self,
1328
1327
  "project_id": self._table.to_api_repr()["tableReference"]["projectId"],
1329
1328
  "dataset_id": self._table.to_api_repr()["tableReference"]["datasetId"],
1330
1329
  "table_id": self._table.to_api_repr()["tableReference"]["tableId"],
@@ -1343,7 +1342,6 @@ class BigQueryCreateTableOperator(GoogleCloudBaseOperator):
1343
1342
  self.log.info(error_msg)
1344
1343
  persist_kwargs = {
1345
1344
  "context": context,
1346
- "task_instance": self,
1347
1345
  "project_id": self.project_id or bq_hook.project_id,
1348
1346
  "dataset_id": self.dataset_id,
1349
1347
  "table_id": self.table_id,
@@ -1608,7 +1606,6 @@ class BigQueryCreateEmptyTableOperator(GoogleCloudBaseOperator):
1608
1606
  if self._table:
1609
1607
  persist_kwargs = {
1610
1608
  "context": context,
1611
- "task_instance": self,
1612
1609
  "project_id": self._table.to_api_repr()["tableReference"]["projectId"],
1613
1610
  "dataset_id": self._table.to_api_repr()["tableReference"]["datasetId"],
1614
1611
  "table_id": self._table.to_api_repr()["tableReference"]["tableId"],
@@ -1627,7 +1624,6 @@ class BigQueryCreateEmptyTableOperator(GoogleCloudBaseOperator):
1627
1624
  self.log.info(error_msg)
1628
1625
  persist_kwargs = {
1629
1626
  "context": context,
1630
- "task_instance": self,
1631
1627
  "project_id": self.project_id or bq_hook.project_id,
1632
1628
  "dataset_id": self.dataset_id,
1633
1629
  "table_id": self.table_id,
@@ -1898,7 +1894,6 @@ class BigQueryCreateExternalTableOperator(GoogleCloudBaseOperator):
1898
1894
  if self._table:
1899
1895
  BigQueryTableLink.persist(
1900
1896
  context=context,
1901
- task_instance=self,
1902
1897
  dataset_id=self._table.dataset_id,
1903
1898
  project_id=self._table.project,
1904
1899
  table_id=self._table.table_id,
@@ -1957,7 +1952,6 @@ class BigQueryCreateExternalTableOperator(GoogleCloudBaseOperator):
1957
1952
  if self._table:
1958
1953
  BigQueryTableLink.persist(
1959
1954
  context=context,
1960
- task_instance=self,
1961
1955
  dataset_id=self._table.dataset_id,
1962
1956
  project_id=self._table.project,
1963
1957
  table_id=self._table.table_id,
@@ -2155,7 +2149,6 @@ class BigQueryCreateEmptyDatasetOperator(GoogleCloudBaseOperator):
2155
2149
  )
2156
2150
  persist_kwargs = {
2157
2151
  "context": context,
2158
- "task_instance": self,
2159
2152
  "project_id": dataset["datasetReference"]["projectId"],
2160
2153
  "dataset_id": dataset["datasetReference"]["datasetId"],
2161
2154
  }
@@ -2167,7 +2160,6 @@ class BigQueryCreateEmptyDatasetOperator(GoogleCloudBaseOperator):
2167
2160
  )
2168
2161
  persist_kwargs = {
2169
2162
  "context": context,
2170
- "task_instance": self,
2171
2163
  "project_id": project_id,
2172
2164
  "dataset_id": dataset_id,
2173
2165
  }
@@ -2239,7 +2231,6 @@ class BigQueryGetDatasetOperator(GoogleCloudBaseOperator):
2239
2231
  dataset_api_repr = dataset.to_api_repr()
2240
2232
  BigQueryDatasetLink.persist(
2241
2233
  context=context,
2242
- task_instance=self,
2243
2234
  dataset_id=dataset_api_repr["datasetReference"]["datasetId"],
2244
2235
  project_id=dataset_api_repr["datasetReference"]["projectId"],
2245
2236
  )
@@ -2388,7 +2379,6 @@ class BigQueryUpdateTableOperator(GoogleCloudBaseOperator):
2388
2379
  if self._table:
2389
2380
  BigQueryTableLink.persist(
2390
2381
  context=context,
2391
- task_instance=self,
2392
2382
  dataset_id=self._table["tableReference"]["datasetId"],
2393
2383
  project_id=self._table["tableReference"]["projectId"],
2394
2384
  table_id=self._table["tableReference"]["tableId"],
@@ -2491,7 +2481,6 @@ class BigQueryUpdateDatasetOperator(GoogleCloudBaseOperator):
2491
2481
  dataset_api_repr = dataset.to_api_repr()
2492
2482
  BigQueryDatasetLink.persist(
2493
2483
  context=context,
2494
- task_instance=self,
2495
2484
  dataset_id=dataset_api_repr["datasetReference"]["datasetId"],
2496
2485
  project_id=dataset_api_repr["datasetReference"]["projectId"],
2497
2486
  )
@@ -2663,7 +2652,6 @@ class BigQueryUpsertTableOperator(GoogleCloudBaseOperator):
2663
2652
  if self._table:
2664
2653
  BigQueryTableLink.persist(
2665
2654
  context=context,
2666
- task_instance=self,
2667
2655
  dataset_id=self._table["tableReference"]["datasetId"],
2668
2656
  project_id=self._table["tableReference"]["projectId"],
2669
2657
  table_id=self._table["tableReference"]["tableId"],
@@ -2793,7 +2781,6 @@ class BigQueryUpdateTableSchemaOperator(GoogleCloudBaseOperator):
2793
2781
  if self._table:
2794
2782
  BigQueryTableLink.persist(
2795
2783
  context=context,
2796
- task_instance=self,
2797
2784
  dataset_id=self._table["tableReference"]["datasetId"],
2798
2785
  project_id=self._table["tableReference"]["projectId"],
2799
2786
  table_id=self._table["tableReference"]["tableId"],
@@ -3039,7 +3026,6 @@ class BigQueryInsertJobOperator(GoogleCloudBaseOperator, _BigQueryInsertJobOpera
3039
3026
  table = job_configuration[job_type][table_prop]
3040
3027
  persist_kwargs = {
3041
3028
  "context": context,
3042
- "task_instance": self,
3043
3029
  "project_id": self.project_id,
3044
3030
  "table_id": table,
3045
3031
  }
@@ -3061,7 +3047,6 @@ class BigQueryInsertJobOperator(GoogleCloudBaseOperator, _BigQueryInsertJobOpera
3061
3047
 
3062
3048
  persist_kwargs = {
3063
3049
  "context": context,
3064
- "task_instance": self,
3065
3050
  "project_id": self.project_id,
3066
3051
  "location": self.location,
3067
3052
  "job_id": self.job_id,
@@ -134,7 +134,6 @@ class BigQueryCreateDataTransferOperator(GoogleCloudBaseOperator):
134
134
  transfer_config = _get_transfer_config_details(response.name)
135
135
  BigQueryDataTransferConfigLink.persist(
136
136
  context=context,
137
- task_instance=self,
138
137
  region=transfer_config["region"],
139
138
  config_id=transfer_config["config_id"],
140
139
  project_id=transfer_config["project_id"],
@@ -142,7 +141,7 @@ class BigQueryCreateDataTransferOperator(GoogleCloudBaseOperator):
142
141
 
143
142
  result = TransferConfig.to_dict(response)
144
143
  self.log.info("Created DTS transfer config %s", get_object_id(result))
145
- self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
144
+ context["ti"].xcom_push(key="transfer_config_id", value=get_object_id(result))
146
145
  # don't push AWS secret in XCOM
147
146
  result.get("params", {}).pop("secret_access_key", None)
148
147
  result.get("params", {}).pop("access_key_id", None)
@@ -329,7 +328,6 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(GoogleCloudBaseOperat
329
328
  transfer_config = _get_transfer_config_details(response.runs[0].name)
330
329
  BigQueryDataTransferConfigLink.persist(
331
330
  context=context,
332
- task_instance=self,
333
331
  region=transfer_config["region"],
334
332
  config_id=transfer_config["config_id"],
335
333
  project_id=transfer_config["project_id"],
@@ -337,7 +335,7 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(GoogleCloudBaseOperat
337
335
 
338
336
  result = StartManualTransferRunsResponse.to_dict(response)
339
337
  run_id = get_object_id(result["runs"][0])
340
- self.xcom_push(context, key="run_id", value=run_id)
338
+ context["ti"].xcom_push(key="run_id", value=run_id)
341
339
 
342
340
  if not self.deferrable:
343
341
  # Save as attribute for further use by OpenLineage
@@ -20,7 +20,7 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  from collections.abc import Iterable, Sequence
23
- from typing import TYPE_CHECKING
23
+ from typing import TYPE_CHECKING, Any
24
24
 
25
25
  import google.api_core.exceptions
26
26
 
@@ -142,6 +142,13 @@ class BigtableCreateInstanceOperator(GoogleCloudBaseOperator, BigtableValidation
142
142
  self.impersonation_chain = impersonation_chain
143
143
  super().__init__(**kwargs)
144
144
 
145
+ @property
146
+ def extra_links_params(self) -> dict[str, Any]:
147
+ return {
148
+ "instance_id": self.instance_id,
149
+ "project_id": self.project_id,
150
+ }
151
+
145
152
  def execute(self, context: Context) -> None:
146
153
  hook = BigtableHook(
147
154
  gcp_conn_id=self.gcp_conn_id,
@@ -155,7 +162,7 @@ class BigtableCreateInstanceOperator(GoogleCloudBaseOperator, BigtableValidation
155
162
  "The instance '%s' already exists in this project. Consider it as created",
156
163
  self.instance_id,
157
164
  )
158
- BigtableInstanceLink.persist(context=context, task_instance=self)
165
+ BigtableInstanceLink.persist(context=context)
159
166
  return
160
167
  try:
161
168
  hook.create_instance(
@@ -171,7 +178,7 @@ class BigtableCreateInstanceOperator(GoogleCloudBaseOperator, BigtableValidation
171
178
  cluster_storage_type=self.cluster_storage_type,
172
179
  timeout=self.timeout,
173
180
  )
174
- BigtableInstanceLink.persist(context=context, task_instance=self)
181
+ BigtableInstanceLink.persist(context=context)
175
182
  except google.api_core.exceptions.GoogleAPICallError as e:
176
183
  self.log.error("An error occurred. Exiting.")
177
184
  raise e
@@ -240,6 +247,13 @@ class BigtableUpdateInstanceOperator(GoogleCloudBaseOperator, BigtableValidation
240
247
  self.impersonation_chain = impersonation_chain
241
248
  super().__init__(**kwargs)
242
249
 
250
+ @property
251
+ def extra_links_params(self) -> dict[str, Any]:
252
+ return {
253
+ "instance_id": self.instance_id,
254
+ "project_id": self.project_id,
255
+ }
256
+
243
257
  def execute(self, context: Context) -> None:
244
258
  hook = BigtableHook(
245
259
  gcp_conn_id=self.gcp_conn_id,
@@ -258,7 +272,7 @@ class BigtableUpdateInstanceOperator(GoogleCloudBaseOperator, BigtableValidation
258
272
  instance_labels=self.instance_labels,
259
273
  timeout=self.timeout,
260
274
  )
261
- BigtableInstanceLink.persist(context=context, task_instance=self)
275
+ BigtableInstanceLink.persist(context=context)
262
276
  except google.api_core.exceptions.GoogleAPICallError as e:
263
277
  self.log.error("An error occurred. Exiting.")
264
278
  raise e
@@ -414,6 +428,13 @@ class BigtableCreateTableOperator(GoogleCloudBaseOperator, BigtableValidationMix
414
428
  return False
415
429
  return True
416
430
 
431
+ @property
432
+ def extra_links_params(self) -> dict[str, Any]:
433
+ return {
434
+ "instance_id": self.instance_id,
435
+ "project_id": self.project_id,
436
+ }
437
+
417
438
  def execute(self, context: Context) -> None:
418
439
  hook = BigtableHook(
419
440
  gcp_conn_id=self.gcp_conn_id,
@@ -431,7 +452,7 @@ class BigtableCreateTableOperator(GoogleCloudBaseOperator, BigtableValidationMix
431
452
  initial_split_keys=self.initial_split_keys,
432
453
  column_families=self.column_families,
433
454
  )
434
- BigtableTablesLink.persist(context=context, task_instance=self)
455
+ BigtableTablesLink.persist(context=context)
435
456
  except google.api_core.exceptions.AlreadyExists:
436
457
  if not self._compare_column_families(hook, instance):
437
458
  raise AirflowException(
@@ -575,6 +596,14 @@ class BigtableUpdateClusterOperator(GoogleCloudBaseOperator, BigtableValidationM
575
596
  self.impersonation_chain = impersonation_chain
576
597
  super().__init__(**kwargs)
577
598
 
599
+ @property
600
+ def extra_links_params(self) -> dict[str, Any]:
601
+ return {
602
+ "instance_id": self.instance_id,
603
+ "cluster_id": self.cluster_id,
604
+ "project_id": self.project_id,
605
+ }
606
+
578
607
  def execute(self, context: Context) -> None:
579
608
  hook = BigtableHook(
580
609
  gcp_conn_id=self.gcp_conn_id,
@@ -586,7 +615,7 @@ class BigtableUpdateClusterOperator(GoogleCloudBaseOperator, BigtableValidationM
586
615
 
587
616
  try:
588
617
  hook.update_cluster(instance=instance, cluster_id=self.cluster_id, nodes=self.nodes)
589
- BigtableClusterLink.persist(context=context, task_instance=self)
618
+ BigtableClusterLink.persist(context=context)
590
619
  except google.api_core.exceptions.NotFound:
591
620
  raise AirflowException(
592
621
  f"Dependency: cluster '{self.cluster_id}' does not exist for instance '{self.instance_id}'."
@@ -19,9 +19,11 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ from typing import Any
23
+
22
24
  from google.api_core.gapic_v1.method import DEFAULT
23
25
 
24
- from airflow.models import BaseOperator
26
+ from airflow.providers.google.version_compat import BaseOperator
25
27
 
26
28
 
27
29
  class GoogleCloudBaseOperator(BaseOperator):
@@ -36,3 +38,21 @@ class GoogleCloudBaseOperator(BaseOperator):
36
38
  """
37
39
  memo[id(DEFAULT)] = DEFAULT
38
40
  return super().__deepcopy__(memo)
41
+
42
+ @property
43
+ def extra_links_params(self) -> dict[str, Any]:
44
+ """
45
+ Override this method to include parameters for link formatting in extra links.
46
+
47
+ For example; most of the links on the Google provider require `project_id` and `location` in the Link.
48
+ To be not repeat; you can override this function and return something like the following:
49
+
50
+ .. code-block:: python
51
+
52
+ {
53
+ "project_id": self.project_id,
54
+ "location": self.location,
55
+ }
56
+
57
+ """
58
+ return {}