apache-airflow-providers-google 10.22.0__py3-none-any.whl → 10.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/cloud/hooks/bigquery.py +91 -54
  3. airflow/providers/google/cloud/hooks/cloud_build.py +3 -2
  4. airflow/providers/google/cloud/hooks/dataflow.py +112 -47
  5. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  6. airflow/providers/google/cloud/hooks/kubernetes_engine.py +15 -26
  7. airflow/providers/google/cloud/hooks/life_sciences.py +5 -7
  8. airflow/providers/google/cloud/hooks/secret_manager.py +3 -3
  9. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +28 -8
  10. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +11 -6
  11. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +214 -34
  12. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +11 -4
  13. airflow/providers/google/cloud/links/automl.py +13 -22
  14. airflow/providers/google/cloud/log/gcs_task_handler.py +1 -2
  15. airflow/providers/google/cloud/operators/bigquery.py +6 -4
  16. airflow/providers/google/cloud/operators/dataflow.py +186 -4
  17. airflow/providers/google/cloud/operators/datafusion.py +3 -2
  18. airflow/providers/google/cloud/operators/datapipeline.py +5 -6
  19. airflow/providers/google/cloud/operators/dataproc.py +30 -33
  20. airflow/providers/google/cloud/operators/gcs.py +4 -4
  21. airflow/providers/google/cloud/operators/kubernetes_engine.py +16 -2
  22. airflow/providers/google/cloud/operators/life_sciences.py +5 -7
  23. airflow/providers/google/cloud/operators/mlengine.py +42 -65
  24. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +18 -4
  25. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +5 -5
  26. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +280 -9
  27. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +4 -0
  28. airflow/providers/google/cloud/secrets/secret_manager.py +3 -5
  29. airflow/providers/google/cloud/sensors/bigquery.py +8 -27
  30. airflow/providers/google/cloud/sensors/bigquery_dts.py +1 -4
  31. airflow/providers/google/cloud/sensors/cloud_composer.py +9 -14
  32. airflow/providers/google/cloud/sensors/dataflow.py +1 -25
  33. airflow/providers/google/cloud/sensors/dataform.py +1 -4
  34. airflow/providers/google/cloud/sensors/datafusion.py +1 -7
  35. airflow/providers/google/cloud/sensors/dataplex.py +1 -31
  36. airflow/providers/google/cloud/sensors/dataproc.py +1 -16
  37. airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -7
  38. airflow/providers/google/cloud/sensors/gcs.py +5 -27
  39. airflow/providers/google/cloud/sensors/looker.py +1 -13
  40. airflow/providers/google/cloud/sensors/pubsub.py +11 -5
  41. airflow/providers/google/cloud/sensors/workflows.py +1 -4
  42. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +6 -0
  43. airflow/providers/google/cloud/triggers/dataflow.py +145 -1
  44. airflow/providers/google/cloud/triggers/kubernetes_engine.py +66 -3
  45. airflow/providers/google/common/deprecated.py +176 -0
  46. airflow/providers/google/common/hooks/base_google.py +3 -2
  47. airflow/providers/google/get_provider_info.py +8 -10
  48. airflow/providers/google/marketing_platform/hooks/analytics.py +4 -2
  49. airflow/providers/google/marketing_platform/hooks/search_ads.py +169 -30
  50. airflow/providers/google/marketing_platform/operators/analytics.py +16 -33
  51. airflow/providers/google/marketing_platform/operators/search_ads.py +217 -156
  52. airflow/providers/google/marketing_platform/sensors/display_video.py +1 -4
  53. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/METADATA +18 -16
  54. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/RECORD +56 -56
  55. airflow/providers/google/marketing_platform/sensors/search_ads.py +0 -92
  56. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/WHEEL +0 -0
  57. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/entry_points.txt +0 -0
@@ -30,7 +30,6 @@ import warnings
30
30
  from copy import deepcopy
31
31
  from typing import TYPE_CHECKING, Any, Callable, Generator, Sequence, TypeVar, cast
32
32
 
33
- from deprecated import deprecated
34
33
  from google.cloud.dataflow_v1beta3 import (
35
34
  GetJobRequest,
36
35
  Job,
@@ -51,6 +50,7 @@ from googleapiclient.discovery import Resource, build
51
50
 
52
51
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
53
52
  from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType, beam_options_to_args
53
+ from airflow.providers.google.common.deprecated import deprecated
54
54
  from airflow.providers.google.common.hooks.base_google import (
55
55
  PROVIDE_PROJECT_ID,
56
56
  GoogleBaseAsyncHook,
@@ -71,7 +71,7 @@ DEFAULT_DATAFLOW_LOCATION = "us-central1"
71
71
 
72
72
 
73
73
  JOB_ID_PATTERN = re.compile(
74
- r"Submitted job: (?P<job_id_java>[^\"\n]*)|Created job with id: \[(?P<job_id_python>[^\"\n]*)\]"
74
+ r"Submitted job: (?P<job_id_java>[^\"\n\s]*)|Created job with id: \[(?P<job_id_python>[^\"\n\s]*)\]"
75
75
  )
76
76
 
77
77
  T = TypeVar("T", bound=Callable)
@@ -186,9 +186,9 @@ class DataflowJobType:
186
186
 
187
187
  class _DataflowJobsController(LoggingMixin):
188
188
  """
189
- Interface for communication with Google API.
189
+ Interface for communication with Google Cloud Dataflow API.
190
190
 
191
- It's not use Apache Beam, but only Google Dataflow API.
191
+ Does not use Apache Beam API.
192
192
 
193
193
  :param dataflow: Discovery resource
194
194
  :param project_number: The Google Cloud Project ID.
@@ -271,12 +271,12 @@ class _DataflowJobsController(LoggingMixin):
271
271
  else:
272
272
  raise ValueError("Missing both dataflow job ID and name.")
273
273
 
274
- def fetch_job_by_id(self, job_id: str) -> dict:
274
+ def fetch_job_by_id(self, job_id: str) -> dict[str, str]:
275
275
  """
276
276
  Fetch the job with the specified Job ID.
277
277
 
278
- :param job_id: Job ID to get.
279
- :return: the Job
278
+ :param job_id: ID of the job that needs to be fetched.
279
+ :return: Dictionary containing the Job's data
280
280
  """
281
281
  return (
282
282
  self._dataflow.projects()
@@ -444,7 +444,6 @@ class _DataflowJobsController(LoggingMixin):
444
444
  "Google Cloud Dataflow job's expected terminal state cannot be "
445
445
  "JOB_STATE_DRAINED while it is a batch job"
446
446
  )
447
-
448
447
  if current_state == current_expected_state:
449
448
  if current_expected_state == DataflowJobStatus.JOB_STATE_RUNNING:
450
449
  return not self._wait_until_finished
@@ -594,12 +593,12 @@ class DataflowHook(GoogleBaseHook):
594
593
  @_fallback_to_project_id_from_variables
595
594
  @GoogleBaseHook.fallback_to_default_project_id
596
595
  @deprecated(
597
- reason=(
598
- "This method is deprecated. "
599
- "Please use `airflow.providers.apache.beam.hooks.beam.start.start_java_pipeline` "
600
- "to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done` "
601
- "to wait for the required pipeline state."
602
- ),
596
+ planned_removal_date="March 01, 2025",
597
+ use_instead="airflow.providers.apache.beam.hooks.beam.start.start_java_pipeline, "
598
+ "providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done",
599
+ instructions="Please use airflow.providers.apache.beam.hooks.beam.start.start_java_pipeline "
600
+ "to start pipeline and providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done method "
601
+ "to wait for the required pipeline state instead.",
603
602
  category=AirflowProviderDeprecationWarning,
604
603
  )
605
604
  def start_java_dataflow(
@@ -938,6 +937,90 @@ class DataflowHook(GoogleBaseHook):
938
937
  response: dict = request.execute(num_retries=self.num_retries)
939
938
  return response["job"]
940
939
 
940
+ @GoogleBaseHook.fallback_to_default_project_id
941
+ def launch_beam_yaml_job(
942
+ self,
943
+ *,
944
+ job_name: str,
945
+ yaml_pipeline_file: str,
946
+ append_job_name: bool,
947
+ jinja_variables: dict[str, str] | None,
948
+ options: dict[str, Any] | None,
949
+ project_id: str,
950
+ location: str = DEFAULT_DATAFLOW_LOCATION,
951
+ ) -> str:
952
+ """
953
+ Launch a Dataflow YAML job and run it until completion.
954
+
955
+ :param job_name: The unique name to assign to the Cloud Dataflow job.
956
+ :param yaml_pipeline_file: Path to a file defining the YAML pipeline to run.
957
+ Must be a local file or a URL beginning with 'gs://'.
958
+ :param append_job_name: Set to True if a unique suffix has to be appended to the `job_name`.
959
+ :param jinja_variables: A dictionary of Jinja2 variables to be used in reifying the yaml pipeline file.
960
+ :param options: Additional gcloud or Beam job parameters.
961
+ It must be a dictionary with the keys matching the optional flag names in gcloud.
962
+ The list of supported flags can be found at: `https://cloud.google.com/sdk/gcloud/reference/dataflow/yaml/run`.
963
+ Note that if a flag does not require a value, then its dictionary value must be either True or None.
964
+ For example, the `--log-http` flag can be passed as {'log-http': True}.
965
+ :param project_id: The ID of the GCP project that owns the job.
966
+ :param location: Region ID of the job's regional endpoint. Defaults to 'us-central1'.
967
+ :param on_new_job_callback: Callback function that passes the job to the operator once known.
968
+ :return: Job ID.
969
+ """
970
+ gcp_flags = {
971
+ "yaml-pipeline-file": yaml_pipeline_file,
972
+ "project": project_id,
973
+ "format": "value(job.id)",
974
+ "region": location,
975
+ }
976
+
977
+ if jinja_variables:
978
+ gcp_flags["jinja-variables"] = json.dumps(jinja_variables)
979
+
980
+ if options:
981
+ gcp_flags.update(options)
982
+
983
+ job_name = self.build_dataflow_job_name(job_name, append_job_name)
984
+ cmd = self._build_gcloud_command(
985
+ command=["gcloud", "dataflow", "yaml", "run", job_name], parameters=gcp_flags
986
+ )
987
+ job_id = self._create_dataflow_job_with_gcloud(cmd=cmd)
988
+ return job_id
989
+
990
+ def _build_gcloud_command(self, command: list[str], parameters: dict[str, str]) -> list[str]:
991
+ _parameters = deepcopy(parameters)
992
+ if self.impersonation_chain:
993
+ if isinstance(self.impersonation_chain, str):
994
+ impersonation_account = self.impersonation_chain
995
+ elif len(self.impersonation_chain) == 1:
996
+ impersonation_account = self.impersonation_chain[0]
997
+ else:
998
+ raise AirflowException(
999
+ "Chained list of accounts is not supported, please specify only one service account."
1000
+ )
1001
+ _parameters["impersonate-service-account"] = impersonation_account
1002
+ return [*command, *(beam_options_to_args(_parameters))]
1003
+
1004
+ def _create_dataflow_job_with_gcloud(self, cmd: list[str]) -> str:
1005
+ """Create a Dataflow job with a gcloud command and return the job's ID."""
1006
+ self.log.info("Executing command: %s", " ".join(shlex.quote(c) for c in cmd))
1007
+ success_code = 0
1008
+
1009
+ with self.provide_authorized_gcloud():
1010
+ proc = subprocess.run(cmd, capture_output=True)
1011
+
1012
+ if proc.returncode != success_code:
1013
+ stderr_last_20_lines = "\n".join(proc.stderr.decode().strip().splitlines()[-20:])
1014
+ raise AirflowException(
1015
+ f"Process exit with non-zero exit code. Exit code: {proc.returncode}. Error Details : "
1016
+ f"{stderr_last_20_lines}"
1017
+ )
1018
+
1019
+ job_id = proc.stdout.decode().strip()
1020
+ self.log.info("Created job's ID: %s", job_id)
1021
+
1022
+ return job_id
1023
+
941
1024
  @staticmethod
942
1025
  def extract_job_id(job: dict) -> str:
943
1026
  try:
@@ -951,12 +1034,12 @@ class DataflowHook(GoogleBaseHook):
951
1034
  @_fallback_to_project_id_from_variables
952
1035
  @GoogleBaseHook.fallback_to_default_project_id
953
1036
  @deprecated(
954
- reason=(
955
- "This method is deprecated. "
956
- "Please use `airflow.providers.apache.beam.hooks.beam.start.start_python_pipeline` "
957
- "to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done` "
958
- "to wait for the required pipeline state."
959
- ),
1037
+ planned_removal_date="March 01, 2025",
1038
+ use_instead="airflow.providers.apache.beam.hooks.beam.start.start_python_pipeline method, "
1039
+ "providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done",
1040
+ instructions="Please use airflow.providers.apache.beam.hooks.beam.start.start_python_pipeline method "
1041
+ "to start pipeline and providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done method "
1042
+ "to wait for the required pipeline state instead.",
960
1043
  category=AirflowProviderDeprecationWarning,
961
1044
  )
962
1045
  def start_python_dataflow(
@@ -1139,33 +1222,15 @@ class DataflowHook(GoogleBaseHook):
1139
1222
  :param on_new_job_callback: Callback called when the job is known.
1140
1223
  :return: the new job object
1141
1224
  """
1142
- gcp_options = [
1143
- f"--project={project_id}",
1144
- "--format=value(job.id)",
1145
- f"--job-name={job_name}",
1146
- f"--region={location}",
1147
- ]
1148
-
1149
- if self.impersonation_chain:
1150
- if isinstance(self.impersonation_chain, str):
1151
- impersonation_account = self.impersonation_chain
1152
- elif len(self.impersonation_chain) == 1:
1153
- impersonation_account = self.impersonation_chain[0]
1154
- else:
1155
- raise AirflowException(
1156
- "Chained list of accounts is not supported, please specify only one service account"
1157
- )
1158
- gcp_options.append(f"--impersonate-service-account={impersonation_account}")
1159
-
1160
- cmd = [
1161
- "gcloud",
1162
- "dataflow",
1163
- "sql",
1164
- "query",
1165
- query,
1166
- *gcp_options,
1167
- *(beam_options_to_args(options)),
1168
- ]
1225
+ gcp_options = {
1226
+ "project": project_id,
1227
+ "format": "value(job.id)",
1228
+ "job-name": job_name,
1229
+ "region": location,
1230
+ }
1231
+ cmd = self._build_gcloud_command(
1232
+ command=["gcloud", "dataflow", "sql", "query", query], parameters={**gcp_options, **options}
1233
+ )
1169
1234
  self.log.info("Executing command: %s", " ".join(shlex.quote(c) for c in cmd))
1170
1235
  with self.provide_authorized_gcloud():
1171
1236
  proc = subprocess.run(cmd, capture_output=True)
@@ -21,10 +21,9 @@ from __future__ import annotations
21
21
 
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from deprecated import deprecated
25
-
26
24
  from airflow.exceptions import AirflowProviderDeprecationWarning
27
25
  from airflow.providers.google.cloud.hooks.dataflow import DataflowHook
26
+ from airflow.providers.google.common.deprecated import deprecated
28
27
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
29
28
 
30
29
  if TYPE_CHECKING:
@@ -34,7 +33,8 @@ DEFAULT_DATAPIPELINE_LOCATION = "us-central1"
34
33
 
35
34
 
36
35
  @deprecated(
37
- reason="This hook is deprecated and will be removed after 01.12.2024. Please use `DataflowHook`.",
36
+ planned_removal_date="December 01, 2024",
37
+ use_instead="DataflowHook",
38
38
  category=AirflowProviderDeprecationWarning,
39
39
  )
40
40
  class DataPipelineHook(DataflowHook):
@@ -24,7 +24,6 @@ import json
24
24
  import time
25
25
  from typing import TYPE_CHECKING, Sequence
26
26
 
27
- from deprecated import deprecated
28
27
  from google.api_core.exceptions import NotFound
29
28
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
30
29
  from google.auth.transport import requests as google_requests
@@ -43,6 +42,7 @@ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarni
43
42
  from airflow.providers.cncf.kubernetes.hooks.kubernetes import AsyncKubernetesHook, KubernetesHook
44
43
  from airflow.providers.cncf.kubernetes.kube_client import _enable_tcp_keepalive
45
44
  from airflow.providers.google.common.consts import CLIENT_INFO
45
+ from airflow.providers.google.common.deprecated import deprecated
46
46
  from airflow.providers.google.common.hooks.base_google import (
47
47
  PROVIDE_PROJECT_ID,
48
48
  GoogleBaseAsyncHook,
@@ -139,10 +139,8 @@ class GKEHook(GoogleBaseHook):
139
139
  # To preserve backward compatibility
140
140
  # TODO: remove one day
141
141
  @deprecated(
142
- reason=(
143
- "The get_conn method has been deprecated. "
144
- "You should use the get_cluster_manager_client method."
145
- ),
142
+ planned_removal_date="November 01, 2024",
143
+ use_instead="get_cluster_manager_client",
146
144
  category=AirflowProviderDeprecationWarning,
147
145
  )
148
146
  def get_conn(self) -> container_v1.ClusterManagerClient:
@@ -151,7 +149,8 @@ class GKEHook(GoogleBaseHook):
151
149
  # To preserve backward compatibility
152
150
  # TODO: remove one day
153
151
  @deprecated(
154
- reason="The get_client method has been deprecated. You should use the get_conn method.",
152
+ planned_removal_date="November 01, 2024",
153
+ use_instead="get_cluster_manager_client",
155
154
  category=AirflowProviderDeprecationWarning,
156
155
  )
157
156
  def get_client(self) -> ClusterManagerClient:
@@ -580,10 +579,8 @@ class GKEKubernetesAsyncHook(GoogleBaseAsyncHook, AsyncKubernetesHook):
580
579
 
581
580
 
582
581
  @deprecated(
583
- reason=(
584
- "The `GKEDeploymentHook` class is deprecated and will be removed after 01.10.2024, please use "
585
- "`GKEKubernetesHook` instead."
586
- ),
582
+ planned_removal_date="October 01, 2024",
583
+ use_instead="GKEKubernetesHook",
587
584
  category=AirflowProviderDeprecationWarning,
588
585
  )
589
586
  class GKEDeploymentHook(GKEKubernetesHook):
@@ -591,10 +588,8 @@ class GKEDeploymentHook(GKEKubernetesHook):
591
588
 
592
589
 
593
590
  @deprecated(
594
- reason=(
595
- "The `GKECustomResourceHook` class is deprecated and will be removed after 01.10.2024, please use "
596
- "`GKEKubernetesHook` instead."
597
- ),
591
+ planned_removal_date="October 01, 2024",
592
+ use_instead="GKEKubernetesHook",
598
593
  category=AirflowProviderDeprecationWarning,
599
594
  )
600
595
  class GKECustomResourceHook(GKEKubernetesHook):
@@ -602,10 +597,8 @@ class GKECustomResourceHook(GKEKubernetesHook):
602
597
 
603
598
 
604
599
  @deprecated(
605
- reason=(
606
- "The `GKEPodHook` class is deprecated and will be removed after 01.10.2024, please use "
607
- "`GKEKubernetesHook` instead."
608
- ),
600
+ planned_removal_date="October 01, 2024",
601
+ use_instead="GKEKubernetesHook",
609
602
  category=AirflowProviderDeprecationWarning,
610
603
  )
611
604
  class GKEPodHook(GKEKubernetesHook):
@@ -631,10 +624,8 @@ class GKEPodHook(GKEKubernetesHook):
631
624
 
632
625
 
633
626
  @deprecated(
634
- reason=(
635
- "The `GKEJobHook` class is deprecated and will be removed after 01.10.2024, please use "
636
- "`GKEKubernetesHook` instead."
637
- ),
627
+ planned_removal_date="October 01, 2024",
628
+ use_instead="GKEKubernetesHook",
638
629
  category=AirflowProviderDeprecationWarning,
639
630
  )
640
631
  class GKEJobHook(GKEKubernetesHook):
@@ -642,10 +633,8 @@ class GKEJobHook(GKEKubernetesHook):
642
633
 
643
634
 
644
635
  @deprecated(
645
- reason=(
646
- "The `GKEPodAsyncHook` class is deprecated and will be removed after 01.10.2024, please use "
647
- "`GKEKubernetesAsyncHook` instead."
648
- ),
636
+ planned_removal_date="October 01, 2024",
637
+ use_instead="GKEKubernetesAsyncHook",
649
638
  category=AirflowProviderDeprecationWarning,
650
639
  )
651
640
  class GKEPodAsyncHook(GKEKubernetesAsyncHook):
@@ -23,10 +23,10 @@ import time
23
23
  from typing import Sequence
24
24
 
25
25
  import google.api_core.path_template
26
- from deprecated import deprecated
27
26
  from googleapiclient.discovery import build
28
27
 
29
28
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
+ from airflow.providers.google.common.deprecated import deprecated
30
30
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
31
31
 
32
32
  # Time to sleep between active checks of the operation results
@@ -34,12 +34,10 @@ TIME_TO_SLEEP_IN_SECONDS = 5
34
34
 
35
35
 
36
36
  @deprecated(
37
- reason=(
38
- "This hook is deprecated. Consider using "
39
- "Google Cloud Batch Operators' hook instead. "
40
- "The Life Sciences API (beta) will be discontinued "
41
- "on July 8, 2025 in favor of Google Cloud Batch."
42
- ),
37
+ planned_removal_date="March 01, 2025",
38
+ use_instead="Google Cloud Batch Operators' hook",
39
+ reason="The Life Sciences API (beta) will be discontinued on July 8, 2025 "
40
+ "in favor of Google Cloud Batch.",
43
41
  category=AirflowProviderDeprecationWarning,
44
42
  )
45
43
  class LifeSciencesHook(GoogleBaseHook):
@@ -22,7 +22,6 @@ from __future__ import annotations
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Sequence
24
24
 
25
- from deprecated import deprecated
26
25
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
27
26
  from google.cloud.secretmanager_v1 import (
28
27
  AccessSecretVersionResponse,
@@ -35,6 +34,7 @@ from google.cloud.secretmanager_v1 import (
35
34
  from airflow.exceptions import AirflowProviderDeprecationWarning
36
35
  from airflow.providers.google.cloud._internal_client.secret_manager_client import _SecretManagerClient
37
36
  from airflow.providers.google.common.consts import CLIENT_INFO
37
+ from airflow.providers.google.common.deprecated import deprecated
38
38
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
39
39
 
40
40
  if TYPE_CHECKING:
@@ -43,8 +43,8 @@ if TYPE_CHECKING:
43
43
 
44
44
 
45
45
  @deprecated(
46
- reason="The SecretsManagerHook is deprecated and will be removed after 01.11.2024. "
47
- "Please use GoogleCloudSecretManagerHook instead.",
46
+ planned_removal_date="November 01, 2024",
47
+ use_instead="GoogleCloudSecretManagerHook",
48
48
  category=AirflowProviderDeprecationWarning,
49
49
  )
50
50
  class SecretsManagerHook(GoogleBaseHook):
@@ -36,6 +36,7 @@ from google.cloud.aiplatform import (
36
36
  from google.cloud.aiplatform_v1 import JobServiceClient, PipelineServiceClient
37
37
 
38
38
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
39
+ from airflow.providers.google.common.deprecated import deprecated
39
40
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
40
41
 
41
42
  if TYPE_CHECKING:
@@ -185,6 +186,11 @@ class AutoMLHook(GoogleBaseHook):
185
186
  model_encryption_spec_key_name=model_encryption_spec_key_name,
186
187
  )
187
188
 
189
+ @deprecated(
190
+ planned_removal_date="June 15, 2025",
191
+ category=AirflowProviderDeprecationWarning,
192
+ reason="Deprecation of AutoMLText API",
193
+ )
188
194
  def get_auto_ml_text_training_job(
189
195
  self,
190
196
  display_name: str,
@@ -197,7 +203,12 @@ class AutoMLHook(GoogleBaseHook):
197
203
  training_encryption_spec_key_name: str | None = None,
198
204
  model_encryption_spec_key_name: str | None = None,
199
205
  ) -> AutoMLTextTrainingJob:
200
- """Return AutoMLTextTrainingJob object."""
206
+ """
207
+ Return AutoMLTextTrainingJob object.
208
+
209
+ WARNING: Text creation API is deprecated since September 15, 2024
210
+ (https://cloud.google.com/vertex-ai/docs/tutorials/text-classification-automl/overview).
211
+ """
201
212
  return AutoMLTextTrainingJob(
202
213
  display_name=display_name,
203
214
  prediction_type=prediction_type,
@@ -980,6 +991,11 @@ class AutoMLHook(GoogleBaseHook):
980
991
  return model, training_id
981
992
 
982
993
  @GoogleBaseHook.fallback_to_default_project_id
994
+ @deprecated(
995
+ planned_removal_date="September 15, 2025",
996
+ category=AirflowProviderDeprecationWarning,
997
+ reason="Deprecation of AutoMLText API",
998
+ )
983
999
  def create_auto_ml_text_training_job(
984
1000
  self,
985
1001
  project_id: str,
@@ -1009,6 +1025,9 @@ class AutoMLHook(GoogleBaseHook):
1009
1025
  """
1010
1026
  Create an AutoML Text Training Job.
1011
1027
 
1028
+ WARNING: Text creation API is deprecated since September 15, 2024
1029
+ (https://cloud.google.com/vertex-ai/docs/tutorials/text-classification-automl/overview).
1030
+
1012
1031
  :param project_id: Required. Project to run training in.
1013
1032
  :param region: Required. Location to run training in.
1014
1033
  :param display_name: Required. The user-defined name of this TrainingPipeline.
@@ -1101,13 +1120,14 @@ class AutoMLHook(GoogleBaseHook):
1101
1120
  concurrent Future and any downstream object will be immediately returned and synced when the
1102
1121
  Future has completed.
1103
1122
  """
1104
- self._job = self.get_auto_ml_text_training_job(
1105
- project=project_id,
1106
- location=region,
1123
+ self._job = AutoMLTextTrainingJob(
1107
1124
  display_name=display_name,
1108
1125
  prediction_type=prediction_type,
1109
1126
  multi_label=multi_label,
1110
1127
  sentiment_max=sentiment_max,
1128
+ project=project_id,
1129
+ location=region,
1130
+ credentials=self.get_credentials(),
1111
1131
  labels=labels,
1112
1132
  training_encryption_spec_key_name=training_encryption_spec_key_name,
1113
1133
  model_encryption_spec_key_name=model_encryption_spec_key_name,
@@ -1117,13 +1137,13 @@ class AutoMLHook(GoogleBaseHook):
1117
1137
  raise AirflowException("AutoMLTextTrainingJob was not created")
1118
1138
 
1119
1139
  model = self._job.run(
1120
- dataset=dataset,
1121
- training_fraction_split=training_fraction_split,
1122
- validation_fraction_split=validation_fraction_split,
1140
+ dataset=dataset, # type: ignore[arg-type]
1141
+ training_fraction_split=training_fraction_split, # type: ignore[call-arg]
1142
+ validation_fraction_split=validation_fraction_split, # type: ignore[call-arg]
1123
1143
  test_fraction_split=test_fraction_split,
1124
1144
  training_filter_split=training_filter_split,
1125
1145
  validation_filter_split=validation_filter_split,
1126
- test_filter_split=test_filter_split,
1146
+ test_filter_split=test_filter_split, # type: ignore[call-arg]
1127
1147
  model_display_name=model_display_name,
1128
1148
  model_labels=model_labels,
1129
1149
  sync=sync,
@@ -22,7 +22,6 @@ from __future__ import annotations
22
22
  import asyncio
23
23
  from typing import TYPE_CHECKING, Any, Sequence
24
24
 
25
- from deprecated import deprecated
26
25
  from google.api_core.client_options import ClientOptions
27
26
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
28
27
  from google.cloud.aiplatform import (
@@ -44,6 +43,7 @@ from google.cloud.aiplatform_v1 import (
44
43
 
45
44
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
46
45
  from airflow.providers.google.common.consts import CLIENT_INFO
46
+ from airflow.providers.google.common.deprecated import deprecated
47
47
  from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
48
48
 
49
49
  if TYPE_CHECKING:
@@ -380,7 +380,8 @@ class CustomJobHook(GoogleBaseHook):
380
380
 
381
381
  @GoogleBaseHook.fallback_to_default_project_id
382
382
  @deprecated(
383
- reason="Please use `PipelineJobHook.cancel_pipeline_job`",
383
+ planned_removal_date="March 01, 2025",
384
+ use_instead="PipelineJobHook.cancel_pipeline_job",
384
385
  category=AirflowProviderDeprecationWarning,
385
386
  )
386
387
  def cancel_pipeline_job(
@@ -509,7 +510,8 @@ class CustomJobHook(GoogleBaseHook):
509
510
 
510
511
  @GoogleBaseHook.fallback_to_default_project_id
511
512
  @deprecated(
512
- reason="Please use `PipelineJobHook.create_pipeline_job`",
513
+ planned_removal_date="March 01, 2025",
514
+ use_instead="PipelineJobHook.create_pipeline_job",
513
515
  category=AirflowProviderDeprecationWarning,
514
516
  )
515
517
  def create_pipeline_job(
@@ -2980,7 +2982,8 @@ class CustomJobHook(GoogleBaseHook):
2980
2982
 
2981
2983
  @GoogleBaseHook.fallback_to_default_project_id
2982
2984
  @deprecated(
2983
- reason="Please use `PipelineJobHook.get_pipeline_job`",
2985
+ planned_removal_date="March 01, 2025",
2986
+ use_instead="PipelineJobHook.get_pipeline_job",
2984
2987
  category=AirflowProviderDeprecationWarning,
2985
2988
  )
2986
2989
  def get_pipeline_job(
@@ -3085,7 +3088,8 @@ class CustomJobHook(GoogleBaseHook):
3085
3088
 
3086
3089
  @GoogleBaseHook.fallback_to_default_project_id
3087
3090
  @deprecated(
3088
- reason="Please use `PipelineJobHook.list_pipeline_jobs`",
3091
+ planned_removal_date="March 01, 2025",
3092
+ use_instead="PipelineJobHook.list_pipeline_jobs",
3089
3093
  category=AirflowProviderDeprecationWarning,
3090
3094
  )
3091
3095
  def list_pipeline_jobs(
@@ -3301,7 +3305,8 @@ class CustomJobHook(GoogleBaseHook):
3301
3305
 
3302
3306
  @GoogleBaseHook.fallback_to_default_project_id
3303
3307
  @deprecated(
3304
- reason="Please use `PipelineJobHook.delete_pipeline_job`",
3308
+ planned_removal_date="March 01, 2025",
3309
+ use_instead="PipelineJobHook.delete_pipeline_job",
3305
3310
  category=AirflowProviderDeprecationWarning,
3306
3311
  )
3307
3312
  def delete_pipeline_job(