apache-airflow-providers-google 16.0.0a1__py3-none-any.whl → 16.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (172) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +43 -5
  3. airflow/providers/google/ads/operators/ads.py +1 -1
  4. airflow/providers/google/ads/transfers/ads_to_gcs.py +1 -1
  5. airflow/providers/google/cloud/hooks/bigquery.py +63 -77
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +8 -4
  7. airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
  8. airflow/providers/google/cloud/hooks/dataflow.py +2 -2
  9. airflow/providers/google/cloud/hooks/dataplex.py +1 -1
  10. airflow/providers/google/cloud/hooks/dataprep.py +4 -1
  11. airflow/providers/google/cloud/hooks/gcs.py +5 -5
  12. airflow/providers/google/cloud/hooks/looker.py +10 -1
  13. airflow/providers/google/cloud/hooks/mlengine.py +2 -1
  14. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  15. airflow/providers/google/cloud/hooks/spanner.py +2 -2
  16. airflow/providers/google/cloud/hooks/translate.py +1 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -36
  18. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
  19. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +44 -80
  20. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +11 -2
  21. airflow/providers/google/cloud/hooks/vision.py +2 -2
  22. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  23. airflow/providers/google/cloud/links/base.py +75 -11
  24. airflow/providers/google/cloud/links/bigquery.py +0 -47
  25. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  26. airflow/providers/google/cloud/links/bigtable.py +0 -48
  27. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  28. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  29. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  30. airflow/providers/google/cloud/links/cloud_run.py +27 -0
  31. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  32. airflow/providers/google/cloud/links/cloud_storage_transfer.py +16 -43
  33. airflow/providers/google/cloud/links/cloud_tasks.py +6 -25
  34. airflow/providers/google/cloud/links/compute.py +0 -58
  35. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  36. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  37. airflow/providers/google/cloud/links/dataflow.py +0 -34
  38. airflow/providers/google/cloud/links/dataform.py +0 -64
  39. airflow/providers/google/cloud/links/datafusion.py +1 -96
  40. airflow/providers/google/cloud/links/dataplex.py +0 -154
  41. airflow/providers/google/cloud/links/dataprep.py +0 -24
  42. airflow/providers/google/cloud/links/dataproc.py +14 -90
  43. airflow/providers/google/cloud/links/datastore.py +0 -31
  44. airflow/providers/google/cloud/links/kubernetes_engine.py +5 -59
  45. airflow/providers/google/cloud/links/life_sciences.py +0 -19
  46. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  47. airflow/providers/google/cloud/links/mlengine.py +0 -70
  48. airflow/providers/google/cloud/links/pubsub.py +0 -32
  49. airflow/providers/google/cloud/links/spanner.py +0 -33
  50. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  51. airflow/providers/google/cloud/links/translate.py +16 -186
  52. airflow/providers/google/cloud/links/vertex_ai.py +8 -224
  53. airflow/providers/google/cloud/links/workflows.py +0 -52
  54. airflow/providers/google/cloud/log/gcs_task_handler.py +4 -4
  55. airflow/providers/google/cloud/operators/alloy_db.py +69 -54
  56. airflow/providers/google/cloud/operators/automl.py +16 -14
  57. airflow/providers/google/cloud/operators/bigquery.py +49 -25
  58. airflow/providers/google/cloud/operators/bigquery_dts.py +2 -4
  59. airflow/providers/google/cloud/operators/bigtable.py +35 -6
  60. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  61. airflow/providers/google/cloud/operators/cloud_build.py +74 -31
  62. airflow/providers/google/cloud/operators/cloud_composer.py +34 -35
  63. airflow/providers/google/cloud/operators/cloud_memorystore.py +68 -42
  64. airflow/providers/google/cloud/operators/cloud_run.py +9 -1
  65. airflow/providers/google/cloud/operators/cloud_sql.py +11 -15
  66. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +0 -2
  67. airflow/providers/google/cloud/operators/compute.py +7 -39
  68. airflow/providers/google/cloud/operators/datacatalog.py +156 -20
  69. airflow/providers/google/cloud/operators/dataflow.py +37 -14
  70. airflow/providers/google/cloud/operators/dataform.py +14 -4
  71. airflow/providers/google/cloud/operators/datafusion.py +4 -12
  72. airflow/providers/google/cloud/operators/dataplex.py +180 -96
  73. airflow/providers/google/cloud/operators/dataprep.py +0 -4
  74. airflow/providers/google/cloud/operators/dataproc.py +10 -16
  75. airflow/providers/google/cloud/operators/dataproc_metastore.py +95 -87
  76. airflow/providers/google/cloud/operators/datastore.py +21 -5
  77. airflow/providers/google/cloud/operators/dlp.py +3 -26
  78. airflow/providers/google/cloud/operators/functions.py +15 -6
  79. airflow/providers/google/cloud/operators/gcs.py +1 -7
  80. airflow/providers/google/cloud/operators/kubernetes_engine.py +53 -92
  81. airflow/providers/google/cloud/operators/life_sciences.py +0 -1
  82. airflow/providers/google/cloud/operators/managed_kafka.py +106 -51
  83. airflow/providers/google/cloud/operators/mlengine.py +0 -1
  84. airflow/providers/google/cloud/operators/pubsub.py +4 -5
  85. airflow/providers/google/cloud/operators/spanner.py +0 -4
  86. airflow/providers/google/cloud/operators/speech_to_text.py +0 -1
  87. airflow/providers/google/cloud/operators/stackdriver.py +0 -8
  88. airflow/providers/google/cloud/operators/tasks.py +0 -11
  89. airflow/providers/google/cloud/operators/text_to_speech.py +0 -1
  90. airflow/providers/google/cloud/operators/translate.py +37 -13
  91. airflow/providers/google/cloud/operators/translate_speech.py +0 -1
  92. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +31 -18
  93. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +28 -8
  94. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +38 -25
  95. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +69 -7
  96. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +42 -8
  97. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +531 -0
  98. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +93 -117
  99. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +10 -8
  100. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +56 -10
  101. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +29 -6
  102. airflow/providers/google/cloud/operators/vertex_ai/ray.py +9 -6
  103. airflow/providers/google/cloud/operators/workflows.py +1 -9
  104. airflow/providers/google/cloud/sensors/bigquery.py +1 -1
  105. airflow/providers/google/cloud/sensors/bigquery_dts.py +6 -1
  106. airflow/providers/google/cloud/sensors/bigtable.py +15 -3
  107. airflow/providers/google/cloud/sensors/cloud_composer.py +6 -1
  108. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +6 -1
  109. airflow/providers/google/cloud/sensors/dataflow.py +3 -3
  110. airflow/providers/google/cloud/sensors/dataform.py +6 -1
  111. airflow/providers/google/cloud/sensors/datafusion.py +6 -1
  112. airflow/providers/google/cloud/sensors/dataplex.py +6 -1
  113. airflow/providers/google/cloud/sensors/dataprep.py +6 -1
  114. airflow/providers/google/cloud/sensors/dataproc.py +6 -1
  115. airflow/providers/google/cloud/sensors/dataproc_metastore.py +6 -1
  116. airflow/providers/google/cloud/sensors/gcs.py +9 -3
  117. airflow/providers/google/cloud/sensors/looker.py +6 -1
  118. airflow/providers/google/cloud/sensors/pubsub.py +8 -3
  119. airflow/providers/google/cloud/sensors/tasks.py +6 -1
  120. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +6 -1
  121. airflow/providers/google/cloud/sensors/workflows.py +6 -1
  122. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +1 -1
  123. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +1 -1
  124. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +10 -7
  125. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +1 -2
  126. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +0 -1
  127. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +1 -1
  128. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  129. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +1 -1
  130. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -2
  131. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
  132. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +1 -1
  133. airflow/providers/google/cloud/transfers/gcs_to_local.py +1 -1
  134. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  135. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +5 -1
  136. airflow/providers/google/cloud/transfers/gdrive_to_local.py +1 -1
  137. airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
  138. airflow/providers/google/cloud/transfers/local_to_gcs.py +1 -1
  139. airflow/providers/google/cloud/transfers/s3_to_gcs.py +11 -5
  140. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +1 -1
  141. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  142. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +2 -2
  143. airflow/providers/google/cloud/transfers/sql_to_gcs.py +1 -1
  144. airflow/providers/google/cloud/triggers/bigquery.py +32 -5
  145. airflow/providers/google/cloud/triggers/dataproc.py +62 -10
  146. airflow/providers/google/cloud/utils/field_validator.py +1 -2
  147. airflow/providers/google/common/auth_backend/google_openid.py +2 -1
  148. airflow/providers/google/common/deprecated.py +2 -1
  149. airflow/providers/google/common/hooks/base_google.py +7 -3
  150. airflow/providers/google/common/links/storage.py +0 -22
  151. airflow/providers/google/firebase/operators/firestore.py +1 -1
  152. airflow/providers/google/get_provider_info.py +14 -16
  153. airflow/providers/google/leveldb/hooks/leveldb.py +30 -1
  154. airflow/providers/google/leveldb/operators/leveldb.py +1 -1
  155. airflow/providers/google/marketing_platform/links/analytics_admin.py +3 -6
  156. airflow/providers/google/marketing_platform/operators/analytics_admin.py +0 -1
  157. airflow/providers/google/marketing_platform/operators/campaign_manager.py +4 -4
  158. airflow/providers/google/marketing_platform/operators/display_video.py +6 -6
  159. airflow/providers/google/marketing_platform/operators/search_ads.py +1 -1
  160. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +6 -1
  161. airflow/providers/google/marketing_platform/sensors/display_video.py +6 -1
  162. airflow/providers/google/suite/operators/sheets.py +3 -3
  163. airflow/providers/google/suite/sensors/drive.py +6 -1
  164. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +1 -1
  165. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  166. airflow/providers/google/suite/transfers/local_to_drive.py +1 -1
  167. airflow/providers/google/version_compat.py +28 -0
  168. {apache_airflow_providers_google-16.0.0a1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/METADATA +35 -35
  169. {apache_airflow_providers_google-16.0.0a1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/RECORD +171 -170
  170. airflow/providers/google/cloud/links/automl.py +0 -193
  171. {apache_airflow_providers_google-16.0.0a1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/WHEEL +0 -0
  172. {apache_airflow_providers_google-16.0.0a1.dist-info → apache_airflow_providers_google-16.1.0rc1.dist-info}/entry_points.txt +0 -0
@@ -147,7 +147,6 @@ class WorkflowsCreateWorkflowOperator(GoogleCloudBaseOperator):
147
147
 
148
148
  WorkflowsWorkflowDetailsLink.persist(
149
149
  context=context,
150
- task_instance=self,
151
150
  location_id=self.location,
152
151
  workflow_id=self.workflow_id,
153
152
  project_id=self.project_id or hook.project_id,
@@ -235,7 +234,6 @@ class WorkflowsUpdateWorkflowOperator(GoogleCloudBaseOperator):
235
234
 
236
235
  WorkflowsWorkflowDetailsLink.persist(
237
236
  context=context,
238
- task_instance=self,
239
237
  location_id=self.location,
240
238
  workflow_id=self.workflow_id,
241
239
  project_id=self.project_id or hook.project_id,
@@ -368,7 +366,6 @@ class WorkflowsListWorkflowsOperator(GoogleCloudBaseOperator):
368
366
 
369
367
  WorkflowsListOfWorkflowsLink.persist(
370
368
  context=context,
371
- task_instance=self,
372
369
  project_id=self.project_id or hook.project_id,
373
370
  )
374
371
 
@@ -434,7 +431,6 @@ class WorkflowsGetWorkflowOperator(GoogleCloudBaseOperator):
434
431
 
435
432
  WorkflowsWorkflowDetailsLink.persist(
436
433
  context=context,
437
- task_instance=self,
438
434
  location_id=self.location,
439
435
  workflow_id=self.workflow_id,
440
436
  project_id=self.project_id or hook.project_id,
@@ -505,11 +501,10 @@ class WorkflowsCreateExecutionOperator(GoogleCloudBaseOperator):
505
501
  metadata=self.metadata,
506
502
  )
507
503
  execution_id = execution.name.split("/")[-1]
508
- self.xcom_push(context, key="execution_id", value=execution_id)
504
+ context["task_instance"].xcom_push(key="execution_id", value=execution_id)
509
505
 
510
506
  WorkflowsExecutionLink.persist(
511
507
  context=context,
512
- task_instance=self,
513
508
  location_id=self.location,
514
509
  workflow_id=self.workflow_id,
515
510
  execution_id=execution_id,
@@ -582,7 +577,6 @@ class WorkflowsCancelExecutionOperator(GoogleCloudBaseOperator):
582
577
 
583
578
  WorkflowsExecutionLink.persist(
584
579
  context=context,
585
- task_instance=self,
586
580
  location_id=self.location,
587
581
  workflow_id=self.workflow_id,
588
582
  execution_id=self.execution_id,
@@ -661,7 +655,6 @@ class WorkflowsListExecutionsOperator(GoogleCloudBaseOperator):
661
655
 
662
656
  WorkflowsWorkflowDetailsLink.persist(
663
657
  context=context,
664
- task_instance=self,
665
658
  location_id=self.location,
666
659
  workflow_id=self.workflow_id,
667
660
  project_id=self.project_id or hook.project_id,
@@ -737,7 +730,6 @@ class WorkflowsGetExecutionOperator(GoogleCloudBaseOperator):
737
730
 
738
731
  WorkflowsExecutionLink.persist(
739
732
  context=context,
740
- task_instance=self,
741
733
  location_id=self.location,
742
734
  workflow_id=self.workflow_id,
743
735
  execution_id=self.execution_id,
@@ -31,7 +31,7 @@ from airflow.providers.google.cloud.triggers.bigquery import (
31
31
  BigQueryTableExistenceTrigger,
32
32
  BigQueryTablePartitionExistenceTrigger,
33
33
  )
34
- from airflow.sensors.base import BaseSensorOperator
34
+ from airflow.providers.google.version_compat import BaseSensorOperator
35
35
 
36
36
  if TYPE_CHECKING:
37
37
  from airflow.utils.context import Context
@@ -28,7 +28,12 @@ from google.cloud.bigquery_datatransfer_v1 import TransferState
28
28
  from airflow.exceptions import AirflowException
29
29
  from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook
30
30
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
31
- from airflow.sensors.base import BaseSensorOperator
31
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
32
+
33
+ if AIRFLOW_V_3_0_PLUS:
34
+ from airflow.sdk import BaseSensorOperator
35
+ else:
36
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
32
37
 
33
38
  if TYPE_CHECKING:
34
39
  from google.api_core.retry import Retry
@@ -20,7 +20,7 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  from collections.abc import Sequence
23
- from typing import TYPE_CHECKING
23
+ from typing import TYPE_CHECKING, Any
24
24
 
25
25
  import google.api_core.exceptions
26
26
  from google.cloud.bigtable import enums
@@ -30,7 +30,12 @@ from airflow.providers.google.cloud.hooks.bigtable import BigtableHook
30
30
  from airflow.providers.google.cloud.links.bigtable import BigtableTablesLink
31
31
  from airflow.providers.google.cloud.operators.bigtable import BigtableValidationMixin
32
32
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
33
- from airflow.sensors.base import BaseSensorOperator
33
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
34
+
35
+ if AIRFLOW_V_3_0_PLUS:
36
+ from airflow.sdk import BaseSensorOperator
37
+ else:
38
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
34
39
 
35
40
  if TYPE_CHECKING:
36
41
  from airflow.utils.context import Context
@@ -89,6 +94,13 @@ class BigtableTableReplicationCompletedSensor(BaseSensorOperator, BigtableValida
89
94
  self.impersonation_chain = impersonation_chain
90
95
  super().__init__(**kwargs)
91
96
 
97
+ @property
98
+ def extra_links_params(self) -> dict[str, Any]:
99
+ return {
100
+ "instance_id": self.instance_id,
101
+ "project_id": self.project_id,
102
+ }
103
+
92
104
  def poke(self, context: Context) -> bool:
93
105
  hook = BigtableHook(
94
106
  gcp_conn_id=self.gcp_conn_id,
@@ -119,5 +131,5 @@ class BigtableTableReplicationCompletedSensor(BaseSensorOperator, BigtableValida
119
131
  return False
120
132
 
121
133
  self.log.info("Table '%s' is replicated.", self.table_id)
122
- BigtableTablesLink.persist(context=context, task_instance=self)
134
+ BigtableTablesLink.persist(context=context)
123
135
  return True
@@ -33,7 +33,12 @@ from airflow.exceptions import AirflowException
33
33
  from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
34
34
  from airflow.providers.google.cloud.triggers.cloud_composer import CloudComposerDAGRunTrigger
35
35
  from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
36
- from airflow.sensors.base import BaseSensorOperator
36
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
37
+
38
+ if AIRFLOW_V_3_0_PLUS:
39
+ from airflow.sdk import BaseSensorOperator
40
+ else:
41
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
37
42
  from airflow.utils.state import TaskInstanceState
38
43
 
39
44
  if TYPE_CHECKING:
@@ -35,7 +35,12 @@ from airflow.providers.google.cloud.triggers.cloud_storage_transfer_service impo
35
35
  CloudStorageTransferServiceCheckJobStatusTrigger,
36
36
  )
37
37
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
38
- from airflow.sensors.base import BaseSensorOperator
38
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
39
+
40
+ if AIRFLOW_V_3_0_PLUS:
41
+ from airflow.sdk import BaseSensorOperator
42
+ else:
43
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
39
44
 
40
45
  if TYPE_CHECKING:
41
46
  from airflow.utils.context import Context
@@ -19,9 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from collections.abc import Sequence
22
+ from collections.abc import Callable, Sequence
23
23
  from functools import cached_property
24
- from typing import TYPE_CHECKING, Any, Callable
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
27
  from airflow.exceptions import AirflowException
@@ -37,7 +37,7 @@ from airflow.providers.google.cloud.triggers.dataflow import (
37
37
  DataflowJobStatusTrigger,
38
38
  )
39
39
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
40
- from airflow.sensors.base import BaseSensorOperator
40
+ from airflow.providers.google.version_compat import BaseSensorOperator
41
41
 
42
42
  if TYPE_CHECKING:
43
43
  from airflow.utils.context import Context
@@ -24,7 +24,12 @@ from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.exceptions import AirflowException
26
26
  from airflow.providers.google.cloud.hooks.dataform import DataformHook
27
- from airflow.sensors.base import BaseSensorOperator
27
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
28
+
29
+ if AIRFLOW_V_3_0_PLUS:
30
+ from airflow.sdk import BaseSensorOperator
31
+ else:
32
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
28
33
 
29
34
  if TYPE_CHECKING:
30
35
  from airflow.utils.context import Context
@@ -25,7 +25,12 @@ from typing import TYPE_CHECKING
25
25
  from airflow.exceptions import AirflowException, AirflowNotFoundException
26
26
  from airflow.providers.google.cloud.hooks.datafusion import DataFusionHook
27
27
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
29
+
30
+ if AIRFLOW_V_3_0_PLUS:
31
+ from airflow.sdk import BaseSensorOperator
32
+ else:
33
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
29
34
 
30
35
  if TYPE_CHECKING:
31
36
  from airflow.utils.context import Context
@@ -37,7 +37,12 @@ from airflow.providers.google.cloud.hooks.dataplex import (
37
37
  AirflowDataQualityScanResultTimeoutException,
38
38
  DataplexHook,
39
39
  )
40
- from airflow.sensors.base import BaseSensorOperator
40
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
41
+
42
+ if AIRFLOW_V_3_0_PLUS:
43
+ from airflow.sdk import BaseSensorOperator
44
+ else:
45
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
41
46
 
42
47
 
43
48
  class TaskState:
@@ -23,7 +23,12 @@ from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.providers.google.cloud.hooks.dataprep import GoogleDataprepHook, JobGroupStatuses
26
- from airflow.sensors.base import BaseSensorOperator
26
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.sdk import BaseSensorOperator
30
+ else:
31
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
27
32
 
28
33
  if TYPE_CHECKING:
29
34
  from airflow.utils.context import Context
@@ -29,7 +29,12 @@ from google.cloud.dataproc_v1.types import Batch, JobStatus
29
29
  from airflow.exceptions import AirflowException
30
30
  from airflow.providers.google.cloud.hooks.dataproc import DataprocHook
31
31
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
32
- from airflow.sensors.base import BaseSensorOperator
32
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
33
+
34
+ if AIRFLOW_V_3_0_PLUS:
35
+ from airflow.sdk import BaseSensorOperator
36
+ else:
37
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
33
38
 
34
39
  if TYPE_CHECKING:
35
40
  from airflow.utils.context import Context
@@ -23,7 +23,12 @@ from typing import TYPE_CHECKING
23
23
  from airflow.exceptions import AirflowException
24
24
  from airflow.providers.google.cloud.hooks.dataproc_metastore import DataprocMetastoreHook
25
25
  from airflow.providers.google.cloud.hooks.gcs import parse_json_from_gcs
26
- from airflow.sensors.base import BaseSensorOperator
26
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.sdk import BaseSensorOperator
30
+ else:
31
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
27
32
 
28
33
  if TYPE_CHECKING:
29
34
  from google.api_core.operation import Operation
@@ -21,9 +21,9 @@ from __future__ import annotations
21
21
 
22
22
  import os
23
23
  import textwrap
24
- from collections.abc import Sequence
24
+ from collections.abc import Callable, Sequence
25
25
  from datetime import datetime, timedelta
26
- from typing import TYPE_CHECKING, Any, Callable
26
+ from typing import TYPE_CHECKING, Any
27
27
 
28
28
  from google.cloud.storage.retry import DEFAULT_RETRY
29
29
 
@@ -36,7 +36,13 @@ from airflow.providers.google.cloud.triggers.gcs import (
36
36
  GCSPrefixBlobTrigger,
37
37
  GCSUploadSessionTrigger,
38
38
  )
39
- from airflow.sensors.base import BaseSensorOperator, poke_mode_only
39
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
40
+
41
+ if AIRFLOW_V_3_0_PLUS:
42
+ from airflow.sdk import BaseSensorOperator
43
+ from airflow.sdk.bases.sensor import poke_mode_only
44
+ else:
45
+ from airflow.sensors.base import BaseSensorOperator, poke_mode_only # type: ignore[no-redef]
40
46
 
41
47
  if TYPE_CHECKING:
42
48
  from google.api_core.retry import Retry
@@ -23,7 +23,12 @@ from typing import TYPE_CHECKING
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.google.cloud.hooks.looker import JobStatus, LookerHook
26
- from airflow.sensors.base import BaseSensorOperator
26
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
+
28
+ if AIRFLOW_V_3_0_PLUS:
29
+ from airflow.sdk import BaseSensorOperator
30
+ else:
31
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
27
32
 
28
33
  if TYPE_CHECKING:
29
34
  from airflow.utils.context import Context
@@ -19,9 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from collections.abc import Sequence
22
+ from collections.abc import Callable, Sequence
23
23
  from datetime import timedelta
24
- from typing import TYPE_CHECKING, Any, Callable
24
+ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from google.cloud import pubsub_v1
27
27
  from google.cloud.pubsub_v1.types import ReceivedMessage
@@ -30,7 +30,12 @@ from airflow.configuration import conf
30
30
  from airflow.exceptions import AirflowException
31
31
  from airflow.providers.google.cloud.hooks.pubsub import PubSubHook
32
32
  from airflow.providers.google.cloud.triggers.pubsub import PubsubPullTrigger
33
- from airflow.sensors.base import BaseSensorOperator
33
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
34
+
35
+ if AIRFLOW_V_3_0_PLUS:
36
+ from airflow.sdk import BaseSensorOperator
37
+ else:
38
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
34
39
 
35
40
  if TYPE_CHECKING:
36
41
  from airflow.utils.context import Context
@@ -24,7 +24,12 @@ from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook
26
26
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
27
- from airflow.sensors.base import BaseSensorOperator
27
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
28
+
29
+ if AIRFLOW_V_3_0_PLUS:
30
+ from airflow.sdk import BaseSensorOperator
31
+ else:
32
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
28
33
 
29
34
  if TYPE_CHECKING:
30
35
  from airflow.utils.context import Context
@@ -25,7 +25,12 @@ from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.exceptions import AirflowException
27
27
  from airflow.providers.google.cloud.hooks.vertex_ai.feature_store import FeatureStoreHook
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
29
+
30
+ if AIRFLOW_V_3_0_PLUS:
31
+ from airflow.sdk import BaseSensorOperator
32
+ else:
33
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
29
34
 
30
35
  if TYPE_CHECKING:
31
36
  from airflow.utils.context import Context
@@ -25,7 +25,12 @@ from google.cloud.workflows.executions_v1beta import Execution
25
25
  from airflow.exceptions import AirflowException
26
26
  from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook
27
27
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
29
+
30
+ if AIRFLOW_V_3_0_PLUS:
31
+ from airflow.sdk import BaseSensorOperator
32
+ else:
33
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
29
34
 
30
35
  if TYPE_CHECKING:
31
36
  from google.api_core.retry import Retry
@@ -21,8 +21,8 @@ import tempfile
21
21
  from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
25
+ from airflow.providers.google.version_compat import BaseOperator
26
26
 
27
27
  try:
28
28
  from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
@@ -23,8 +23,8 @@ from tempfile import NamedTemporaryFile
23
23
  from typing import TYPE_CHECKING
24
24
 
25
25
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url, gcs_object_is_directory
27
+ from airflow.providers.google.version_compat import BaseOperator
28
28
 
29
29
  try:
30
30
  from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook
@@ -22,9 +22,10 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
27
26
  from airflow.providers.google.cloud.links.bigquery import BigQueryTableLink
27
+ from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
28
+ from airflow.providers.google.version_compat import BaseOperator
28
29
 
29
30
  if TYPE_CHECKING:
30
31
  from airflow.utils.context import Context
@@ -73,6 +74,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
73
74
  If set as a sequence, the identities from the list must grant
74
75
  Service Account Token Creator IAM role to the directly preceding identity, with first
75
76
  account from the list granting this role to the originating account (templated).
77
+ :param project_id: Google Cloud Project where the job is running
76
78
  """
77
79
 
78
80
  template_fields: Sequence[str] = (
@@ -93,6 +95,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
93
95
  write_disposition: str = "WRITE_EMPTY",
94
96
  create_disposition: str = "CREATE_IF_NEEDED",
95
97
  gcp_conn_id: str = "google_cloud_default",
98
+ project_id: str = PROVIDE_PROJECT_ID,
96
99
  labels: dict | None = None,
97
100
  encryption_configuration: dict | None = None,
98
101
  location: str | None = None,
@@ -112,6 +115,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
112
115
  self.impersonation_chain = impersonation_chain
113
116
  self.hook: BigQueryHook | None = None
114
117
  self._job_conf: dict = {}
118
+ self.project_id = project_id
115
119
 
116
120
  def _prepare_job_configuration(self):
117
121
  self.source_project_dataset_tables = (
@@ -124,7 +128,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
124
128
  for source_project_dataset_table in self.source_project_dataset_tables:
125
129
  source_project, source_dataset, source_table = self.hook.split_tablename(
126
130
  table_input=source_project_dataset_table,
127
- default_project_id=self.hook.project_id,
131
+ default_project_id=self.project_id,
128
132
  var_name="source_project_dataset_table",
129
133
  )
130
134
  source_project_dataset_tables_fixup.append(
@@ -133,7 +137,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
133
137
 
134
138
  destination_project, destination_dataset, destination_table = self.hook.split_tablename(
135
139
  table_input=self.destination_project_dataset_table,
136
- default_project_id=self.hook.project_id,
140
+ default_project_id=self.project_id,
137
141
  )
138
142
  configuration = {
139
143
  "copy": {
@@ -168,18 +172,17 @@ class BigQueryToBigQueryOperator(BaseOperator):
168
172
  impersonation_chain=self.impersonation_chain,
169
173
  )
170
174
 
171
- if not self.hook.project_id:
172
- raise ValueError("The project_id should be set")
175
+ if not self.project_id:
176
+ self.project_id = self.hook.project_id
173
177
 
174
178
  configuration = self._prepare_job_configuration()
175
179
  self._job_conf = self.hook.insert_job(
176
- configuration=configuration, project_id=self.hook.project_id
180
+ configuration=configuration, project_id=self.project_id
177
181
  ).to_api_repr()
178
182
 
179
183
  dest_table_info = self._job_conf["configuration"]["copy"]["destinationTable"]
180
184
  BigQueryTableLink.persist(
181
185
  context=context,
182
- task_instance=self,
183
186
  dataset_id=dest_table_info["datasetId"],
184
187
  project_id=dest_table_info["projectId"],
185
188
  table_id=dest_table_info["tableId"],
@@ -27,11 +27,11 @@ from google.cloud.bigquery import DEFAULT_RETRY, UnknownJob
27
27
 
28
28
  from airflow.configuration import conf
29
29
  from airflow.exceptions import AirflowException
30
- from airflow.models import BaseOperator
31
30
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook, BigQueryJob
32
31
  from airflow.providers.google.cloud.links.bigquery import BigQueryTableLink
33
32
  from airflow.providers.google.cloud.triggers.bigquery import BigQueryInsertJobTrigger
34
33
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
34
+ from airflow.providers.google.version_compat import BaseOperator
35
35
  from airflow.utils.helpers import merge_dicts
36
36
 
37
37
  if TYPE_CHECKING:
@@ -254,7 +254,6 @@ class BigQueryToGCSOperator(BaseOperator):
254
254
  dataset_id, project_id, table_id = conf["datasetId"], conf["projectId"], conf["tableId"]
255
255
  BigQueryTableLink.persist(
256
256
  context=context,
257
- task_instance=self,
258
257
  dataset_id=dataset_id,
259
258
  project_id=project_id,
260
259
  table_id=table_id,
@@ -101,7 +101,6 @@ class BigQueryToMsSqlOperator(BigQueryToSqlBaseOperator):
101
101
  project_id, dataset_id, table_id = self.source_project_dataset_table.split(".")
102
102
  BigQueryTableLink.persist(
103
103
  context=context,
104
- task_instance=self,
105
104
  dataset_id=dataset_id,
106
105
  project_id=project_id,
107
106
  table_id=table_id,
@@ -23,9 +23,9 @@ import abc
23
23
  from collections.abc import Sequence
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
28
27
  from airflow.providers.google.cloud.utils.bigquery_get_data import bigquery_get_data
28
+ from airflow.providers.google.version_compat import BaseOperator
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  from airflow.providers.common.sql.hooks.sql import DbApiHook
@@ -21,9 +21,9 @@ from collections.abc import Sequence
21
21
  from tempfile import NamedTemporaryFile
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
26
25
  from airflow.providers.google.suite.hooks.calendar import GoogleCalendarHook
26
+ from airflow.providers.google.version_compat import BaseOperator
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from datetime import datetime
@@ -31,9 +31,9 @@ from uuid import UUID
31
31
  from cassandra.util import Date, OrderedMapSerializedKey, SortedSet, Time
32
32
 
33
33
  from airflow.exceptions import AirflowException
34
- from airflow.models import BaseOperator
35
34
  from airflow.providers.apache.cassandra.hooks.cassandra import CassandraHook
36
35
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
36
+ from airflow.providers.google.version_compat import BaseOperator
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from airflow.utils.context import Context
@@ -26,9 +26,9 @@ from enum import Enum
26
26
  from typing import TYPE_CHECKING, Any
27
27
 
28
28
  from airflow.exceptions import AirflowException
29
- from airflow.models import BaseOperator
30
29
  from airflow.providers.facebook.ads.hooks.ads import FacebookAdsReportingHook
31
30
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
31
+ from airflow.providers.google.version_compat import BaseOperator
32
32
 
33
33
  if TYPE_CHECKING:
34
34
  from facebook_business.adobjects.adsinsights import AdsInsights
@@ -208,7 +208,7 @@ class FacebookAdsReportToGcsOperator(BaseOperator):
208
208
 
209
209
  def _flush_rows(self, converted_rows: list[Any] | None, object_name: str):
210
210
  if converted_rows:
211
- headers = converted_rows[0].keys()
211
+ headers = self.fields
212
212
  with tempfile.NamedTemporaryFile("w", suffix=".csv") as csvfile:
213
213
  writer = csv.DictWriter(csvfile, fieldnames=headers)
214
214
  writer.writeheader()
@@ -38,12 +38,12 @@ from google.cloud.bigquery.table import EncryptionConfiguration, Table, TableRef
38
38
 
39
39
  from airflow.configuration import conf
40
40
  from airflow.exceptions import AirflowException
41
- from airflow.models import BaseOperator
42
41
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook, BigQueryJob
43
42
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
44
43
  from airflow.providers.google.cloud.links.bigquery import BigQueryTableLink
45
44
  from airflow.providers.google.cloud.triggers.bigquery import BigQueryInsertJobTrigger
46
45
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
46
+ from airflow.providers.google.version_compat import BaseOperator
47
47
  from airflow.utils.helpers import merge_dicts
48
48
 
49
49
  if TYPE_CHECKING:
@@ -373,7 +373,6 @@ class GCSToBigQueryOperator(BaseOperator):
373
373
 
374
374
  BigQueryTableLink.persist(
375
375
  context=context,
376
- task_instance=self,
377
376
  dataset_id=table_obj_api_repr["tableReference"]["datasetId"],
378
377
  project_id=table_obj_api_repr["tableReference"]["projectId"],
379
378
  table_id=table_obj_api_repr["tableReference"]["tableId"],
@@ -24,8 +24,8 @@ from collections.abc import Sequence
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
27
- from airflow.models import BaseOperator
28
27
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
28
+ from airflow.providers.google.version_compat import BaseOperator
29
29
 
30
30
  WILDCARD = "*"
31
31
 
@@ -20,9 +20,9 @@ from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING
21
21
 
22
22
  from airflow.exceptions import AirflowException
23
- from airflow.models import BaseOperator
24
23
  from airflow.models.xcom import MAX_XCOM_SIZE
25
24
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
25
+ from airflow.providers.google.version_compat import BaseOperator
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  from airflow.utils.context import Context
@@ -26,8 +26,8 @@ from tempfile import NamedTemporaryFile
26
26
  from typing import TYPE_CHECKING
27
27
 
28
28
  from airflow.exceptions import AirflowException
29
- from airflow.models import BaseOperator
30
29
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
30
+ from airflow.providers.google.version_compat import BaseOperator
31
31
  from airflow.providers.sftp.hooks.sftp import SFTPHook
32
32
 
33
33
  WILDCARD = "*"
@@ -19,9 +19,9 @@ from __future__ import annotations
19
19
  from collections.abc import Sequence
20
20
  from typing import TYPE_CHECKING
21
21
 
22
- from airflow.models import BaseOperator
23
22
  from airflow.providers.google.cloud.hooks.gcs import GCSHook
24
23
  from airflow.providers.google.suite.hooks.drive import GoogleDriveHook
24
+ from airflow.providers.google.version_compat import BaseOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
27
  from airflow.utils.context import Context
@@ -99,3 +99,7 @@ class GoogleDriveToGCSOperator(BaseOperator):
99
99
  bucket_name=self.bucket_name, object_name=self.object_name
100
100
  ) as file:
101
101
  gdrive_hook.download_file(file_id=file_metadata["id"], file_handle=file)
102
+
103
+ def dry_run(self):
104
+ """Perform a dry run of the operator."""
105
+ return None