apache-airflow-providers-google 10.14.0rc2__py3-none-any.whl → 10.15.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/cloud/hooks/automl.py +13 -13
  3. airflow/providers/google/cloud/hooks/bigquery.py +193 -246
  4. airflow/providers/google/cloud/hooks/bigquery_dts.py +6 -6
  5. airflow/providers/google/cloud/hooks/bigtable.py +8 -8
  6. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  7. airflow/providers/google/cloud/hooks/cloud_build.py +19 -20
  8. airflow/providers/google/cloud/hooks/cloud_composer.py +4 -4
  9. airflow/providers/google/cloud/hooks/cloud_memorystore.py +10 -10
  10. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  11. airflow/providers/google/cloud/hooks/cloud_sql.py +17 -17
  12. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +3 -3
  13. airflow/providers/google/cloud/hooks/compute.py +16 -16
  14. airflow/providers/google/cloud/hooks/compute_ssh.py +1 -1
  15. airflow/providers/google/cloud/hooks/datacatalog.py +22 -22
  16. airflow/providers/google/cloud/hooks/dataflow.py +48 -49
  17. airflow/providers/google/cloud/hooks/dataform.py +16 -16
  18. airflow/providers/google/cloud/hooks/datafusion.py +15 -15
  19. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  20. airflow/providers/google/cloud/hooks/dataplex.py +19 -19
  21. airflow/providers/google/cloud/hooks/dataprep.py +8 -8
  22. airflow/providers/google/cloud/hooks/dataproc.py +88 -0
  23. airflow/providers/google/cloud/hooks/dataproc_metastore.py +13 -13
  24. airflow/providers/google/cloud/hooks/datastore.py +3 -3
  25. airflow/providers/google/cloud/hooks/dlp.py +25 -25
  26. airflow/providers/google/cloud/hooks/gcs.py +25 -23
  27. airflow/providers/google/cloud/hooks/gdm.py +3 -3
  28. airflow/providers/google/cloud/hooks/kms.py +3 -3
  29. airflow/providers/google/cloud/hooks/kubernetes_engine.py +63 -48
  30. airflow/providers/google/cloud/hooks/life_sciences.py +13 -12
  31. airflow/providers/google/cloud/hooks/looker.py +7 -7
  32. airflow/providers/google/cloud/hooks/mlengine.py +12 -12
  33. airflow/providers/google/cloud/hooks/natural_language.py +2 -2
  34. airflow/providers/google/cloud/hooks/os_login.py +1 -1
  35. airflow/providers/google/cloud/hooks/pubsub.py +9 -9
  36. airflow/providers/google/cloud/hooks/secret_manager.py +1 -1
  37. airflow/providers/google/cloud/hooks/spanner.py +11 -11
  38. airflow/providers/google/cloud/hooks/speech_to_text.py +1 -1
  39. airflow/providers/google/cloud/hooks/stackdriver.py +7 -7
  40. airflow/providers/google/cloud/hooks/tasks.py +11 -11
  41. airflow/providers/google/cloud/hooks/text_to_speech.py +1 -1
  42. airflow/providers/google/cloud/hooks/translate.py +1 -1
  43. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +13 -13
  44. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +6 -6
  45. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +45 -50
  46. airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +13 -13
  47. airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +9 -9
  48. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +128 -11
  49. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +10 -10
  50. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +8 -8
  51. airflow/providers/google/cloud/hooks/video_intelligence.py +2 -2
  52. airflow/providers/google/cloud/hooks/vision.py +1 -1
  53. airflow/providers/google/cloud/hooks/workflows.py +10 -10
  54. airflow/providers/google/cloud/links/datafusion.py +12 -5
  55. airflow/providers/google/cloud/operators/bigquery.py +9 -11
  56. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +3 -1
  57. airflow/providers/google/cloud/operators/dataflow.py +16 -16
  58. airflow/providers/google/cloud/operators/datafusion.py +9 -1
  59. airflow/providers/google/cloud/operators/dataproc.py +298 -65
  60. airflow/providers/google/cloud/operators/kubernetes_engine.py +6 -6
  61. airflow/providers/google/cloud/operators/life_sciences.py +10 -9
  62. airflow/providers/google/cloud/operators/mlengine.py +96 -96
  63. airflow/providers/google/cloud/operators/pubsub.py +2 -0
  64. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +33 -3
  65. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +59 -2
  66. airflow/providers/google/cloud/secrets/secret_manager.py +8 -7
  67. airflow/providers/google/cloud/sensors/bigquery.py +20 -16
  68. airflow/providers/google/cloud/sensors/cloud_composer.py +11 -8
  69. airflow/providers/google/cloud/sensors/gcs.py +8 -7
  70. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +4 -4
  71. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  72. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  73. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +1 -1
  74. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +1 -1
  75. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +1 -1
  76. airflow/providers/google/cloud/transfers/presto_to_gcs.py +1 -1
  77. airflow/providers/google/cloud/transfers/s3_to_gcs.py +3 -3
  78. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  79. airflow/providers/google/cloud/transfers/sql_to_gcs.py +3 -3
  80. airflow/providers/google/cloud/transfers/trino_to_gcs.py +1 -1
  81. airflow/providers/google/cloud/triggers/bigquery.py +12 -12
  82. airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -1
  83. airflow/providers/google/cloud/triggers/cloud_batch.py +3 -1
  84. airflow/providers/google/cloud/triggers/cloud_build.py +2 -2
  85. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  86. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +6 -6
  87. airflow/providers/google/cloud/triggers/dataflow.py +3 -1
  88. airflow/providers/google/cloud/triggers/datafusion.py +2 -2
  89. airflow/providers/google/cloud/triggers/dataplex.py +2 -2
  90. airflow/providers/google/cloud/triggers/dataproc.py +2 -2
  91. airflow/providers/google/cloud/triggers/gcs.py +12 -8
  92. airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -2
  93. airflow/providers/google/cloud/triggers/mlengine.py +2 -2
  94. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  95. airflow/providers/google/cloud/triggers/vertex_ai.py +99 -0
  96. airflow/providers/google/cloud/utils/bigquery.py +2 -2
  97. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  98. airflow/providers/google/cloud/utils/dataform.py +1 -1
  99. airflow/providers/google/cloud/utils/field_validator.py +2 -2
  100. airflow/providers/google/cloud/utils/helpers.py +2 -2
  101. airflow/providers/google/cloud/utils/mlengine_operator_utils.py +1 -1
  102. airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +1 -1
  103. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  104. airflow/providers/google/common/hooks/base_google.py +29 -22
  105. airflow/providers/google/common/hooks/discovery_api.py +2 -2
  106. airflow/providers/google/common/utils/id_token_credentials.py +5 -5
  107. airflow/providers/google/firebase/hooks/firestore.py +3 -3
  108. airflow/providers/google/get_provider_info.py +7 -2
  109. airflow/providers/google/leveldb/hooks/leveldb.py +2 -2
  110. airflow/providers/google/marketing_platform/hooks/analytics.py +11 -14
  111. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +11 -11
  112. airflow/providers/google/marketing_platform/hooks/display_video.py +13 -13
  113. airflow/providers/google/marketing_platform/hooks/search_ads.py +4 -4
  114. airflow/providers/google/marketing_platform/operators/analytics.py +37 -32
  115. airflow/providers/google/suite/hooks/calendar.py +2 -2
  116. airflow/providers/google/suite/hooks/drive.py +7 -7
  117. airflow/providers/google/suite/hooks/sheets.py +8 -8
  118. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/METADATA +11 -11
  119. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/RECORD +121 -120
  120. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/WHEEL +0 -0
  121. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/entry_points.txt +0 -0
@@ -50,11 +50,11 @@ class WorkflowsHook(GoogleBaseHook):
50
50
  super().__init__(**kwargs)
51
51
 
52
52
  def get_workflows_client(self) -> WorkflowsClient:
53
- """Returns WorkflowsClient."""
53
+ """Return WorkflowsClient object."""
54
54
  return WorkflowsClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
55
55
 
56
56
  def get_executions_client(self) -> ExecutionsClient:
57
- """Returns ExecutionsClient."""
57
+ """Return ExecutionsClient object."""
58
58
  return ExecutionsClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
59
59
 
60
60
  @GoogleBaseHook.fallback_to_default_project_id
@@ -69,7 +69,7 @@ class WorkflowsHook(GoogleBaseHook):
69
69
  metadata: Sequence[tuple[str, str]] = (),
70
70
  ) -> Operation:
71
71
  """
72
- Creates a new workflow.
72
+ Create a new workflow.
73
73
 
74
74
  If a workflow with the specified name already exists in the
75
75
  specified project and location, the long running operation will
@@ -106,7 +106,7 @@ class WorkflowsHook(GoogleBaseHook):
106
106
  metadata: Sequence[tuple[str, str]] = (),
107
107
  ) -> Workflow:
108
108
  """
109
- Gets details of a single Workflow.
109
+ Get details of a single Workflow.
110
110
 
111
111
  :param workflow_id: Required. The ID of the workflow to be created.
112
112
  :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
@@ -131,7 +131,7 @@ class WorkflowsHook(GoogleBaseHook):
131
131
  metadata: Sequence[tuple[str, str]] = (),
132
132
  ) -> Operation:
133
133
  """
134
- Updates an existing workflow.
134
+ Update an existing workflow.
135
135
 
136
136
  Running this method has no impact on already running
137
137
  executions of the workflow. A new revision of the
@@ -196,7 +196,7 @@ class WorkflowsHook(GoogleBaseHook):
196
196
  metadata: Sequence[tuple[str, str]] = (),
197
197
  ) -> ListWorkflowsPager:
198
198
  """
199
- Lists Workflows in a given project and location; the default order is not specified.
199
+ List Workflows in a given project and location; the default order is not specified.
200
200
 
201
201
  :param filter_: Filter to restrict results to specific workflows.
202
202
  :param order_by: Comma-separated list of fields that
@@ -234,7 +234,7 @@ class WorkflowsHook(GoogleBaseHook):
234
234
  metadata: Sequence[tuple[str, str]] = (),
235
235
  ) -> Execution:
236
236
  """
237
- Creates a new execution using the latest revision of the given workflow.
237
+ Create a new execution using the latest revision of the given workflow.
238
238
 
239
239
  :param execution: Required. Input parameters of the execution represented as a dictionary.
240
240
  :param workflow_id: Required. The ID of the workflow.
@@ -269,7 +269,7 @@ class WorkflowsHook(GoogleBaseHook):
269
269
  metadata: Sequence[tuple[str, str]] = (),
270
270
  ) -> Execution:
271
271
  """
272
- Returns an execution for the given ``workflow_id`` and ``execution_id``.
272
+ Return an execution for the given ``workflow_id`` and ``execution_id``.
273
273
 
274
274
  :param workflow_id: Required. The ID of the workflow.
275
275
  :param execution_id: Required. The ID of the execution.
@@ -298,7 +298,7 @@ class WorkflowsHook(GoogleBaseHook):
298
298
  metadata: Sequence[tuple[str, str]] = (),
299
299
  ) -> Execution:
300
300
  """
301
- Cancels an execution using the given ``workflow_id`` and ``execution_id``.
301
+ Cancel an execution using the given ``workflow_id`` and ``execution_id``.
302
302
 
303
303
  :param workflow_id: Required. The ID of the workflow.
304
304
  :param execution_id: Required. The ID of the execution.
@@ -328,7 +328,7 @@ class WorkflowsHook(GoogleBaseHook):
328
328
  metadata: Sequence[tuple[str, str]] = (),
329
329
  ) -> ListExecutionsPager:
330
330
  """
331
- Returns a list of executions which belong to the workflow with the given name.
331
+ Return a list of executions which belong to the workflow with the given name.
332
332
 
333
333
  The method returns executions of all workflow revisions. Returned
334
334
  executions are ordered by their start time (newest first).
@@ -15,7 +15,7 @@
15
15
  # KIND, either express or implied. See the License for the
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
- """This module contains Google Compute Engine links."""
18
+ """This module contains Google Data Fusion links."""
19
19
  from __future__ import annotations
20
20
 
21
21
  from typing import TYPE_CHECKING, ClassVar
@@ -30,8 +30,8 @@ if TYPE_CHECKING:
30
30
 
31
31
  BASE_LINK = "https://console.cloud.google.com/data-fusion"
32
32
  DATAFUSION_INSTANCE_LINK = BASE_LINK + "/locations/{region}/instances/{instance_name}?project={project_id}"
33
- DATAFUSION_PIPELINES_LINK = "{uri}/cdap/ns/default/pipelines"
34
- DATAFUSION_PIPELINE_LINK = "{uri}/pipelines/ns/default/view/{pipeline_name}"
33
+ DATAFUSION_PIPELINES_LINK = "{uri}/cdap/ns/{namespace}/pipelines"
34
+ DATAFUSION_PIPELINE_LINK = "{uri}/pipelines/ns/{namespace}/view/{pipeline_name}"
35
35
 
36
36
 
37
37
  class BaseGoogleLink(BaseOperatorLink):
@@ -52,10 +52,13 @@ class BaseGoogleLink(BaseOperatorLink):
52
52
  ti_key: TaskInstanceKey,
53
53
  ) -> str:
54
54
  conf = XCom.get_value(key=self.key, ti_key=ti_key)
55
+
55
56
  if not conf:
56
57
  return ""
57
- if self.format_str.startswith("http"):
58
- return self.format_str.format(**conf)
58
+
59
+ # Add a default value for the 'namespace' parameter for backward compatibility.
60
+ conf.setdefault("namespace", "default")
61
+
59
62
  return self.format_str.format(**conf)
60
63
 
61
64
 
@@ -98,6 +101,7 @@ class DataFusionPipelineLink(BaseGoogleLink):
98
101
  task_instance: BaseOperator,
99
102
  uri: str,
100
103
  pipeline_name: str,
104
+ namespace: str,
101
105
  ):
102
106
  task_instance.xcom_push(
103
107
  context=context,
@@ -105,6 +109,7 @@ class DataFusionPipelineLink(BaseGoogleLink):
105
109
  value={
106
110
  "uri": uri,
107
111
  "pipeline_name": pipeline_name,
112
+ "namespace": namespace,
108
113
  },
109
114
  )
110
115
 
@@ -121,11 +126,13 @@ class DataFusionPipelinesLink(BaseGoogleLink):
121
126
  context: Context,
122
127
  task_instance: BaseOperator,
123
128
  uri: str,
129
+ namespace: str,
124
130
  ):
125
131
  task_instance.xcom_push(
126
132
  context=context,
127
133
  key=DataFusionPipelinesLink.key,
128
134
  value={
129
135
  "uri": uri,
136
+ "namespace": namespace,
130
137
  },
131
138
  )
@@ -25,6 +25,7 @@ from functools import cached_property
25
25
  from typing import TYPE_CHECKING, Any, Iterable, Sequence, SupportsAbs
26
26
 
27
27
  import attr
28
+ from deprecated import deprecated
28
29
  from google.api_core.exceptions import Conflict
29
30
  from google.cloud.bigquery import DEFAULT_RETRY, CopyJob, ExtractJob, LoadJob, QueryJob
30
31
  from google.cloud.bigquery.table import RowIterator
@@ -1087,6 +1088,10 @@ class BigQueryGetDataOperator(GoogleCloudBaseOperator):
1087
1088
  return event["records"]
1088
1089
 
1089
1090
 
1091
+ @deprecated(
1092
+ reason="This operator is deprecated. Please use `BigQueryInsertJobOperator`.",
1093
+ category=AirflowProviderDeprecationWarning,
1094
+ )
1090
1095
  class BigQueryExecuteQueryOperator(GoogleCloudBaseOperator):
1091
1096
  """Executes BigQuery SQL queries in a specific BigQuery database.
1092
1097
 
@@ -1211,12 +1216,6 @@ class BigQueryExecuteQueryOperator(GoogleCloudBaseOperator):
1211
1216
  **kwargs,
1212
1217
  ) -> None:
1213
1218
  super().__init__(**kwargs)
1214
- warnings.warn(
1215
- "This operator is deprecated. Please use `BigQueryInsertJobOperator`.",
1216
- AirflowProviderDeprecationWarning,
1217
- stacklevel=2,
1218
- )
1219
-
1220
1219
  self.sql = sql
1221
1220
  self.destination_dataset_table = destination_dataset_table
1222
1221
  self.write_disposition = write_disposition
@@ -2171,6 +2170,10 @@ class BigQueryGetDatasetTablesOperator(GoogleCloudBaseOperator):
2171
2170
  )
2172
2171
 
2173
2172
 
2173
+ @deprecated(
2174
+ reason="This operator is deprecated. Please use BigQueryUpdateDatasetOperator.",
2175
+ category=AirflowProviderDeprecationWarning,
2176
+ )
2174
2177
  class BigQueryPatchDatasetOperator(GoogleCloudBaseOperator):
2175
2178
  """Patch a dataset for your Project in BigQuery.
2176
2179
 
@@ -2215,11 +2218,6 @@ class BigQueryPatchDatasetOperator(GoogleCloudBaseOperator):
2215
2218
  impersonation_chain: str | Sequence[str] | None = None,
2216
2219
  **kwargs,
2217
2220
  ) -> None:
2218
- warnings.warn(
2219
- "This operator is deprecated. Please use BigQueryUpdateDatasetOperator.",
2220
- AirflowProviderDeprecationWarning,
2221
- stacklevel=2,
2222
- )
2223
2221
  self.dataset_id = dataset_id
2224
2222
  self.project_id = project_id
2225
2223
  self.gcp_conn_id = gcp_conn_id
@@ -236,7 +236,9 @@ class CloudDataTransferServiceCreateJobOperator(GoogleCloudBaseOperator):
236
236
  **kwargs,
237
237
  ) -> None:
238
238
  super().__init__(**kwargs)
239
- self.body = deepcopy(body)
239
+ self.body = body
240
+ if isinstance(self.body, dict):
241
+ self.body = deepcopy(body)
240
242
  self.aws_conn_id = aws_conn_id
241
243
  self.gcp_conn_id = gcp_conn_id
242
244
  self.api_version = api_version
@@ -21,12 +21,13 @@ from __future__ import annotations
21
21
  import copy
22
22
  import re
23
23
  import uuid
24
- import warnings
25
24
  from contextlib import ExitStack
26
25
  from enum import Enum
27
26
  from functools import cached_property
28
27
  from typing import TYPE_CHECKING, Any, Sequence
29
28
 
29
+ from deprecated import deprecated
30
+
30
31
  from airflow.configuration import conf
31
32
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
32
33
  from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
@@ -167,6 +168,11 @@ class DataflowConfiguration:
167
168
  self.service_account = service_account
168
169
 
169
170
 
171
+ # TODO: Remove one day
172
+ @deprecated(
173
+ reason="Please use `providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` instead.",
174
+ category=AirflowProviderDeprecationWarning,
175
+ )
170
176
  class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
171
177
  """
172
178
  Start a Java Cloud Dataflow batch job; the parameters of the operation will be passed to the job.
@@ -353,13 +359,6 @@ class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
353
359
  expected_terminal_state: str | None = None,
354
360
  **kwargs,
355
361
  ) -> None:
356
- # TODO: Remove one day
357
- warnings.warn(
358
- f"The `{self.__class__.__name__}` operator is deprecated, "
359
- f"please use `providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` instead.",
360
- AirflowProviderDeprecationWarning,
361
- stacklevel=2,
362
- )
363
362
  super().__init__(**kwargs)
364
363
 
365
364
  dataflow_default_options = dataflow_default_options or {}
@@ -677,6 +676,9 @@ class DataflowTemplatedJobStartOperator(GoogleCloudBaseOperator):
677
676
  options = self.dataflow_default_options
678
677
  options.update(self.options)
679
678
 
679
+ if not self.location:
680
+ self.location = DEFAULT_DATAFLOW_LOCATION
681
+
680
682
  self.job = self.hook.start_template_dataflow(
681
683
  job_name=self.job_name,
682
684
  variables=options,
@@ -704,7 +706,7 @@ class DataflowTemplatedJobStartOperator(GoogleCloudBaseOperator):
704
706
  trigger=TemplateJobStartTrigger(
705
707
  project_id=self.project_id,
706
708
  job_id=job_id,
707
- location=self.location if self.location else DEFAULT_DATAFLOW_LOCATION,
709
+ location=self.location,
708
710
  gcp_conn_id=self.gcp_conn_id,
709
711
  poll_sleep=self.poll_sleep,
710
712
  impersonation_chain=self.impersonation_chain,
@@ -1029,6 +1031,11 @@ class DataflowStartSqlJobOperator(GoogleCloudBaseOperator):
1029
1031
  )
1030
1032
 
1031
1033
 
1034
+ # TODO: Remove one day
1035
+ @deprecated(
1036
+ reason="Please use `providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator` instead.",
1037
+ category=AirflowProviderDeprecationWarning,
1038
+ )
1032
1039
  class DataflowCreatePythonJobOperator(GoogleCloudBaseOperator):
1033
1040
  """
1034
1041
  Launching Cloud Dataflow jobs written in python.
@@ -1151,13 +1158,6 @@ class DataflowCreatePythonJobOperator(GoogleCloudBaseOperator):
1151
1158
  wait_until_finished: bool | None = None,
1152
1159
  **kwargs,
1153
1160
  ) -> None:
1154
- # TODO: Remove one day
1155
- warnings.warn(
1156
- f"The `{self.__class__.__name__}` operator is deprecated, "
1157
- "please use `providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator` instead.",
1158
- AirflowProviderDeprecationWarning,
1159
- stacklevel=2,
1160
- )
1161
1161
  super().__init__(**kwargs)
1162
1162
 
1163
1163
  self.py_file = py_file
@@ -537,6 +537,7 @@ class CloudDataFusionCreatePipelineOperator(GoogleCloudBaseOperator):
537
537
  task_instance=self,
538
538
  uri=instance["serviceEndpoint"],
539
539
  pipeline_name=self.pipeline_name,
540
+ namespace=self.namespace,
540
541
  )
541
542
  self.log.info("Pipeline %s created", self.pipeline_name)
542
543
 
@@ -705,7 +706,12 @@ class CloudDataFusionListPipelinesOperator(GoogleCloudBaseOperator):
705
706
  )
706
707
  self.log.info("Pipelines: %s", pipelines)
707
708
 
708
- DataFusionPipelinesLink.persist(context=context, task_instance=self, uri=service_endpoint)
709
+ DataFusionPipelinesLink.persist(
710
+ context=context,
711
+ task_instance=self,
712
+ uri=service_endpoint,
713
+ namespace=self.namespace,
714
+ )
709
715
  return pipelines
710
716
 
711
717
 
@@ -825,6 +831,7 @@ class CloudDataFusionStartPipelineOperator(GoogleCloudBaseOperator):
825
831
  task_instance=self,
826
832
  uri=instance["serviceEndpoint"],
827
833
  pipeline_name=self.pipeline_name,
834
+ namespace=self.namespace,
828
835
  )
829
836
 
830
837
  if self.deferrable:
@@ -954,6 +961,7 @@ class CloudDataFusionStopPipelineOperator(GoogleCloudBaseOperator):
954
961
  task_instance=self,
955
962
  uri=instance["serviceEndpoint"],
956
963
  pipeline_name=self.pipeline_name,
964
+ namespace=self.namespace,
957
965
  )
958
966
  hook.stop_pipeline(
959
967
  pipeline_name=self.pipeline_name,