apache-airflow-providers-google 10.22.0__py3-none-any.whl → 10.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/cloud/hooks/bigquery.py +91 -54
  3. airflow/providers/google/cloud/hooks/cloud_build.py +3 -2
  4. airflow/providers/google/cloud/hooks/dataflow.py +112 -47
  5. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  6. airflow/providers/google/cloud/hooks/kubernetes_engine.py +15 -26
  7. airflow/providers/google/cloud/hooks/life_sciences.py +5 -7
  8. airflow/providers/google/cloud/hooks/secret_manager.py +3 -3
  9. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +28 -8
  10. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +11 -6
  11. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +214 -34
  12. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +11 -4
  13. airflow/providers/google/cloud/links/automl.py +13 -22
  14. airflow/providers/google/cloud/log/gcs_task_handler.py +1 -2
  15. airflow/providers/google/cloud/operators/bigquery.py +6 -4
  16. airflow/providers/google/cloud/operators/dataflow.py +186 -4
  17. airflow/providers/google/cloud/operators/datafusion.py +3 -2
  18. airflow/providers/google/cloud/operators/datapipeline.py +5 -6
  19. airflow/providers/google/cloud/operators/dataproc.py +30 -33
  20. airflow/providers/google/cloud/operators/gcs.py +4 -4
  21. airflow/providers/google/cloud/operators/kubernetes_engine.py +16 -2
  22. airflow/providers/google/cloud/operators/life_sciences.py +5 -7
  23. airflow/providers/google/cloud/operators/mlengine.py +42 -65
  24. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +18 -4
  25. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +5 -5
  26. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +280 -9
  27. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +4 -0
  28. airflow/providers/google/cloud/secrets/secret_manager.py +3 -5
  29. airflow/providers/google/cloud/sensors/bigquery.py +8 -27
  30. airflow/providers/google/cloud/sensors/bigquery_dts.py +1 -4
  31. airflow/providers/google/cloud/sensors/cloud_composer.py +9 -14
  32. airflow/providers/google/cloud/sensors/dataflow.py +1 -25
  33. airflow/providers/google/cloud/sensors/dataform.py +1 -4
  34. airflow/providers/google/cloud/sensors/datafusion.py +1 -7
  35. airflow/providers/google/cloud/sensors/dataplex.py +1 -31
  36. airflow/providers/google/cloud/sensors/dataproc.py +1 -16
  37. airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -7
  38. airflow/providers/google/cloud/sensors/gcs.py +5 -27
  39. airflow/providers/google/cloud/sensors/looker.py +1 -13
  40. airflow/providers/google/cloud/sensors/pubsub.py +11 -5
  41. airflow/providers/google/cloud/sensors/workflows.py +1 -4
  42. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +6 -0
  43. airflow/providers/google/cloud/triggers/dataflow.py +145 -1
  44. airflow/providers/google/cloud/triggers/kubernetes_engine.py +66 -3
  45. airflow/providers/google/common/deprecated.py +176 -0
  46. airflow/providers/google/common/hooks/base_google.py +3 -2
  47. airflow/providers/google/get_provider_info.py +8 -10
  48. airflow/providers/google/marketing_platform/hooks/analytics.py +4 -2
  49. airflow/providers/google/marketing_platform/hooks/search_ads.py +169 -30
  50. airflow/providers/google/marketing_platform/operators/analytics.py +16 -33
  51. airflow/providers/google/marketing_platform/operators/search_ads.py +217 -156
  52. airflow/providers/google/marketing_platform/sensors/display_video.py +1 -4
  53. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/METADATA +18 -16
  54. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/RECORD +56 -56
  55. airflow/providers/google/marketing_platform/sensors/search_ads.py +0 -92
  56. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/WHEEL +0 -0
  57. {apache_airflow_providers_google-10.22.0.dist-info → apache_airflow_providers_google-10.23.0.dist-info}/entry_points.txt +0 -0
@@ -24,8 +24,10 @@ from typing import TYPE_CHECKING, Any, Sequence
24
24
  from google.cloud.dataflow_v1beta3 import JobState
25
25
  from google.cloud.dataflow_v1beta3.types import (
26
26
  AutoscalingEvent,
27
+ Job,
27
28
  JobMessage,
28
29
  JobMetrics,
30
+ JobType,
29
31
  MetricUpdate,
30
32
  )
31
33
 
@@ -157,7 +159,7 @@ class TemplateJobStartTrigger(BaseTrigger):
157
159
 
158
160
  class DataflowJobStatusTrigger(BaseTrigger):
159
161
  """
160
- Trigger that checks for metrics associated with a Dataflow job.
162
+ Trigger that monitors if a Dataflow job has reached any of the expected statuses.
161
163
 
162
164
  :param job_id: Required. ID of the job.
163
165
  :param expected_statuses: The expected state(s) of the operation.
@@ -266,6 +268,148 @@ class DataflowJobStatusTrigger(BaseTrigger):
266
268
  )
267
269
 
268
270
 
271
+ class DataflowStartYamlJobTrigger(BaseTrigger):
272
+ """
273
+ Dataflow trigger that checks the state of a Dataflow YAML job.
274
+
275
+ :param job_id: Required. ID of the job.
276
+ :param project_id: Required. The Google Cloud project ID in which the job was started.
277
+ :param location: The location where job is executed. If set to None then
278
+ the value of DEFAULT_DATAFLOW_LOCATION will be used.
279
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
280
+ :param poll_sleep: Optional. The time in seconds to sleep between polling Google Cloud Platform
281
+ for the Dataflow job.
282
+ :param cancel_timeout: Optional. How long (in seconds) operator should wait for the pipeline to be
283
+ successfully cancelled when task is being killed.
284
+ :param expected_terminal_state: Optional. The expected terminal state of the Dataflow job at which the
285
+ operator task is set to succeed. Defaults to 'JOB_STATE_DONE' for the batch jobs and
286
+ 'JOB_STATE_RUNNING' for the streaming jobs.
287
+ :param impersonation_chain: Optional. Service account to impersonate using short-term
288
+ credentials, or chained list of accounts required to get the access_token
289
+ of the last account in the list, which will be impersonated in the request.
290
+ If set as a string, the account must grant the originating account
291
+ the Service Account Token Creator IAM role.
292
+ If set as a sequence, the identities from the list must grant
293
+ Service Account Token Creator IAM role to the directly preceding identity, with first
294
+ account from the list granting this role to the originating account (templated).
295
+ """
296
+
297
+ def __init__(
298
+ self,
299
+ job_id: str,
300
+ project_id: str | None,
301
+ location: str = DEFAULT_DATAFLOW_LOCATION,
302
+ gcp_conn_id: str = "google_cloud_default",
303
+ poll_sleep: int = 10,
304
+ cancel_timeout: int | None = 5 * 60,
305
+ expected_terminal_state: str | None = None,
306
+ impersonation_chain: str | Sequence[str] | None = None,
307
+ ):
308
+ super().__init__()
309
+ self.project_id = project_id
310
+ self.job_id = job_id
311
+ self.location = location
312
+ self.gcp_conn_id = gcp_conn_id
313
+ self.poll_sleep = poll_sleep
314
+ self.cancel_timeout = cancel_timeout
315
+ self.expected_terminal_state = expected_terminal_state
316
+ self.impersonation_chain = impersonation_chain
317
+
318
+ def serialize(self) -> tuple[str, dict[str, Any]]:
319
+ """Serialize class arguments and classpath."""
320
+ return (
321
+ "airflow.providers.google.cloud.triggers.dataflow.DataflowStartYamlJobTrigger",
322
+ {
323
+ "project_id": self.project_id,
324
+ "job_id": self.job_id,
325
+ "location": self.location,
326
+ "gcp_conn_id": self.gcp_conn_id,
327
+ "poll_sleep": self.poll_sleep,
328
+ "expected_terminal_state": self.expected_terminal_state,
329
+ "impersonation_chain": self.impersonation_chain,
330
+ "cancel_timeout": self.cancel_timeout,
331
+ },
332
+ )
333
+
334
+ async def run(self):
335
+ """
336
+ Fetch job and yield events depending on the job's type and state.
337
+
338
+ Yield TriggerEvent if the job reaches a terminal state.
339
+ Otherwise awaits for a specified amount of time stored in self.poll_sleep variable.
340
+ """
341
+ hook: AsyncDataflowHook = self._get_async_hook()
342
+ try:
343
+ while True:
344
+ job: Job = await hook.get_job(
345
+ job_id=self.job_id,
346
+ project_id=self.project_id,
347
+ location=self.location,
348
+ )
349
+ job_state = job.current_state
350
+ job_type = job.type_
351
+ if job_state.name == self.expected_terminal_state:
352
+ yield TriggerEvent(
353
+ {
354
+ "job": Job.to_dict(job),
355
+ "status": "success",
356
+ "message": f"Job reached the expected terminal state: {self.expected_terminal_state}.",
357
+ }
358
+ )
359
+ return
360
+ elif job_type == JobType.JOB_TYPE_STREAMING and job_state == JobState.JOB_STATE_RUNNING:
361
+ yield TriggerEvent(
362
+ {
363
+ "job": Job.to_dict(job),
364
+ "status": "success",
365
+ "message": "Streaming job reached the RUNNING state.",
366
+ }
367
+ )
368
+ return
369
+ elif job_type == JobType.JOB_TYPE_BATCH and job_state == JobState.JOB_STATE_DONE:
370
+ yield TriggerEvent(
371
+ {
372
+ "job": Job.to_dict(job),
373
+ "status": "success",
374
+ "message": "Batch job completed.",
375
+ }
376
+ )
377
+ return
378
+ elif job_state == JobState.JOB_STATE_FAILED:
379
+ yield TriggerEvent(
380
+ {
381
+ "job": Job.to_dict(job),
382
+ "status": "error",
383
+ "message": "Job failed.",
384
+ }
385
+ )
386
+ return
387
+ elif job_state == JobState.JOB_STATE_STOPPED:
388
+ yield TriggerEvent(
389
+ {
390
+ "job": Job.to_dict(job),
391
+ "status": "stopped",
392
+ "message": "Job was stopped.",
393
+ }
394
+ )
395
+ return
396
+ else:
397
+ self.log.info("Current job status is: %s", job_state.name)
398
+ self.log.info("Sleeping for %s seconds.", self.poll_sleep)
399
+ await asyncio.sleep(self.poll_sleep)
400
+ except Exception as e:
401
+ self.log.exception("Exception occurred while checking for job completion.")
402
+ yield TriggerEvent({"job": None, "status": "error", "message": str(e)})
403
+
404
+ def _get_async_hook(self) -> AsyncDataflowHook:
405
+ return AsyncDataflowHook(
406
+ gcp_conn_id=self.gcp_conn_id,
407
+ poll_sleep=self.poll_sleep,
408
+ impersonation_chain=self.impersonation_chain,
409
+ cancel_timeout=self.cancel_timeout,
410
+ )
411
+
412
+
269
413
  class DataflowJobMetricsTrigger(BaseTrigger):
270
414
  """
271
415
  Trigger that checks for metrics associated with a Dataflow job.
@@ -23,14 +23,18 @@ from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence
24
24
 
25
25
  from google.cloud.container_v1.types import Operation
26
+ from packaging.version import parse as parse_version
26
27
 
27
- from airflow.exceptions import AirflowProviderDeprecationWarning
28
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
28
29
  from airflow.providers.cncf.kubernetes.triggers.pod import KubernetesPodTrigger
29
- from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction
30
+ from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction, PodManager
31
+ from airflow.providers.cncf.kubernetes.utils.xcom_sidecar import PodDefaults
30
32
  from airflow.providers.google.cloud.hooks.kubernetes_engine import (
31
33
  GKEAsyncHook,
32
34
  GKEKubernetesAsyncHook,
35
+ GKEKubernetesHook,
33
36
  )
37
+ from airflow.providers_manager import ProvidersManager
34
38
  from airflow.triggers.base import BaseTrigger, TriggerEvent
35
39
 
36
40
  if TYPE_CHECKING:
@@ -255,18 +259,28 @@ class GKEJobTrigger(BaseTrigger):
255
259
  ssl_ca_cert: str,
256
260
  job_name: str,
257
261
  job_namespace: str,
262
+ pod_name: str,
263
+ pod_namespace: str,
264
+ base_container_name: str,
258
265
  gcp_conn_id: str = "google_cloud_default",
259
266
  poll_interval: float = 2,
260
267
  impersonation_chain: str | Sequence[str] | None = None,
268
+ get_logs: bool = True,
269
+ do_xcom_push: bool = False,
261
270
  ) -> None:
262
271
  super().__init__()
263
272
  self.cluster_url = cluster_url
264
273
  self.ssl_ca_cert = ssl_ca_cert
265
274
  self.job_name = job_name
266
275
  self.job_namespace = job_namespace
276
+ self.pod_name = pod_name
277
+ self.pod_namespace = pod_namespace
278
+ self.base_container_name = base_container_name
267
279
  self.gcp_conn_id = gcp_conn_id
268
280
  self.poll_interval = poll_interval
269
281
  self.impersonation_chain = impersonation_chain
282
+ self.get_logs = get_logs
283
+ self.do_xcom_push = do_xcom_push
270
284
 
271
285
  def serialize(self) -> tuple[str, dict[str, Any]]:
272
286
  """Serialize KubernetesCreateJobTrigger arguments and classpath."""
@@ -277,15 +291,51 @@ class GKEJobTrigger(BaseTrigger):
277
291
  "ssl_ca_cert": self.ssl_ca_cert,
278
292
  "job_name": self.job_name,
279
293
  "job_namespace": self.job_namespace,
294
+ "pod_name": self.pod_name,
295
+ "pod_namespace": self.pod_namespace,
296
+ "base_container_name": self.base_container_name,
280
297
  "gcp_conn_id": self.gcp_conn_id,
281
298
  "poll_interval": self.poll_interval,
282
299
  "impersonation_chain": self.impersonation_chain,
300
+ "get_logs": self.get_logs,
301
+ "do_xcom_push": self.do_xcom_push,
283
302
  },
284
303
  )
285
304
 
286
305
  async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
287
306
  """Get current job status and yield a TriggerEvent."""
288
- job: V1Job = await self.hook.wait_until_job_complete(name=self.job_name, namespace=self.job_namespace)
307
+ if self.get_logs or self.do_xcom_push:
308
+ pod = await self.hook.get_pod(name=self.pod_name, namespace=self.pod_namespace)
309
+ if self.do_xcom_push:
310
+ kubernetes_provider = ProvidersManager().providers["apache-airflow-providers-cncf-kubernetes"]
311
+ kubernetes_provider_name = kubernetes_provider.data["package-name"]
312
+ kubernetes_provider_version = kubernetes_provider.version
313
+ min_version = "8.4.1"
314
+ if parse_version(kubernetes_provider_version) < parse_version(min_version):
315
+ raise AirflowException(
316
+ "You are trying to use do_xcom_push in `GKEStartJobOperator` with the provider "
317
+ f"package {kubernetes_provider_name}=={kubernetes_provider_version} which doesn't "
318
+ f"support this feature. Please upgrade it to version higher than or equal to {min_version}."
319
+ )
320
+ await self.hook.wait_until_container_complete(
321
+ name=self.pod_name,
322
+ namespace=self.pod_namespace,
323
+ container_name=self.base_container_name,
324
+ poll_interval=self.poll_interval,
325
+ )
326
+ self.log.info("Checking if xcom sidecar container is started.")
327
+ await self.hook.wait_until_container_started(
328
+ name=self.pod_name,
329
+ namespace=self.pod_namespace,
330
+ container_name=PodDefaults.SIDECAR_CONTAINER_NAME,
331
+ poll_interval=self.poll_interval,
332
+ )
333
+ self.log.info("Extracting result from xcom sidecar container.")
334
+ loop = asyncio.get_running_loop()
335
+ xcom_result = await loop.run_in_executor(None, self.pod_manager.extract_xcom, pod)
336
+ job: V1Job = await self.hook.wait_until_job_complete(
337
+ name=self.job_name, namespace=self.job_namespace, poll_interval=self.poll_interval
338
+ )
289
339
  job_dict = job.to_dict()
290
340
  error_message = self.hook.is_job_failed(job=job)
291
341
  status = "error" if error_message else "success"
@@ -294,9 +344,12 @@ class GKEJobTrigger(BaseTrigger):
294
344
  {
295
345
  "name": job.metadata.name,
296
346
  "namespace": job.metadata.namespace,
347
+ "pod_name": pod.metadata.name if self.get_logs else None,
348
+ "pod_namespace": pod.metadata.namespace if self.get_logs else None,
297
349
  "status": status,
298
350
  "message": message,
299
351
  "job": job_dict,
352
+ "xcom_result": xcom_result if self.do_xcom_push else None,
300
353
  }
301
354
  )
302
355
 
@@ -308,3 +361,13 @@ class GKEJobTrigger(BaseTrigger):
308
361
  gcp_conn_id=self.gcp_conn_id,
309
362
  impersonation_chain=self.impersonation_chain,
310
363
  )
364
+
365
+ @cached_property
366
+ def pod_manager(self) -> PodManager:
367
+ sync_hook = GKEKubernetesHook(
368
+ gcp_conn_id=self.gcp_conn_id,
369
+ cluster_url=self.cluster_url,
370
+ ssl_ca_cert=self.ssl_ca_cert,
371
+ impersonation_chain=self.impersonation_chain,
372
+ )
373
+ return PodManager(kube_client=sync_hook.core_v1_client)
@@ -0,0 +1,176 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ import inspect
20
+ import re
21
+ from datetime import date, datetime
22
+ from typing import Any, Callable
23
+
24
+ from deprecated import deprecated as standard_deprecated
25
+ from deprecated.classic import ClassicAdapter
26
+
27
+
28
+ class AirflowDeprecationAdapter(ClassicAdapter):
29
+ """
30
+ Build a detailed deprecation message based on the wrapped object type and other provided details.
31
+
32
+ :param planned_removal_date: The date after which the deprecated object should be removed.
33
+ The recommended date is six months ahead from today. The expected date format is `Month DD, YYYY`,
34
+ for example: `August 22, 2024`.
35
+ This parameter is required if the `planned_removal_release` parameter is not set.
36
+ :param planned_removal_release: The package name and the version in which the deprecated object is
37
+ expected to be removed. The expected format is `<package_name>==<package_version>`, for example
38
+ `apache-airflow==2.10.0` or `apache-airflow-providers-google==10.22.0`.
39
+ This parameter is required if the `planned_removal_date` parameter is not set.
40
+ :param use_instead: Optional. Replacement of the deprecated object.
41
+ :param reason: Optional. The detailed reason for the deprecated object.
42
+ :param instructions: Optional. The detailed instructions for migrating from the deprecated object.
43
+ :param category: Optional. The warning category to be used for the deprecation warning.
44
+ """
45
+
46
+ def __init__(
47
+ self,
48
+ planned_removal_date: str | None = None,
49
+ planned_removal_release: str | None = None,
50
+ use_instead: str | None = None,
51
+ reason: str | None = None,
52
+ instructions: str | None = None,
53
+ category: type[DeprecationWarning] = DeprecationWarning,
54
+ **kwargs: Any,
55
+ ):
56
+ super().__init__(**kwargs)
57
+ self.planned_removal_date: date | None = self._validate_date(planned_removal_date)
58
+ self.planned_removal_release: str | None = self._validate_removal_release(planned_removal_release)
59
+ self.use_instead: str | None = use_instead
60
+ self.reason: str = reason or ""
61
+ self.instructions: str | None = instructions
62
+ self.category: type[DeprecationWarning] = category
63
+ self._validate_fields()
64
+
65
+ def get_deprecated_msg(self, wrapped: Callable, instance: Any):
66
+ """
67
+ Generate a deprecation message for wrapped callable.
68
+
69
+ :param wrapped: Deprecated entity.
70
+ :param instance: The instance to which the callable belongs. (not used)
71
+ :return: A formatted deprecation message with all the details.
72
+ """
73
+ entity_type = self.entity_type(entity=wrapped)
74
+ entity_path = self.entity_path(entity=wrapped)
75
+ sunset = self.sunset_message()
76
+ replacement = self.replacement_message()
77
+ msg = f"The {entity_type} `{entity_path}` is deprecated and will be removed {sunset}. {replacement}"
78
+ if self.reason:
79
+ msg += f" The reason is: {self.reason}"
80
+ if self.instructions:
81
+ msg += f" Instructions: {self.instructions}"
82
+ return msg
83
+
84
+ @staticmethod
85
+ def _validate_date(value: str | None) -> date | None:
86
+ if value:
87
+ try:
88
+ return datetime.strptime(value, "%B %d, %Y").date()
89
+ except ValueError as ex:
90
+ error_message = (
91
+ f"Invalid date '{value}'. "
92
+ f"The expected format is 'Month DD, YYYY', for example 'August 22, 2024'."
93
+ )
94
+ raise ValueError(error_message) from ex
95
+ return None
96
+
97
+ @staticmethod
98
+ def _validate_removal_release(value: str | None) -> str | None:
99
+ if value:
100
+ pattern = r"^apache-airflow(-providers-[a-zA-Z-]+)?==\d+\.\d+\.\d+.*$"
101
+ if not bool(re.match(pattern, value)):
102
+ raise ValueError(
103
+ f"`{value}` must follow the format 'apache-airflow(-providers-<name>)==<X.Y.Z>'."
104
+ )
105
+ return value
106
+
107
+ def _validate_fields(self):
108
+ msg = "Only one of two parameters must be set: `planned_removal_date` or 'planned_removal_release'."
109
+ if self.planned_removal_release and self.planned_removal_date:
110
+ raise ValueError(f"{msg} You specified both.")
111
+
112
+ @staticmethod
113
+ def entity_type(entity: Callable) -> str:
114
+ return "class" if inspect.isclass(entity) else "function (or method)"
115
+
116
+ @staticmethod
117
+ def entity_path(entity: Callable) -> str:
118
+ module_name = getattr(entity, "__module__", "")
119
+ qualified_name = getattr(entity, "__qualname__", "")
120
+ full_path = f"{module_name}.{qualified_name}".strip(".")
121
+
122
+ if module_name and full_path:
123
+ return full_path
124
+ return str(entity)
125
+
126
+ def sunset_message(self) -> str:
127
+ if self.planned_removal_date:
128
+ return f"after {self.planned_removal_date.strftime('%B %d, %Y')}"
129
+ if self.planned_removal_release:
130
+ return f"since version {self.planned_removal_release}"
131
+ return "in the future"
132
+
133
+ def replacement_message(self):
134
+ if self.use_instead:
135
+ replacements = ", ".join(f"`{replacement}`" for replacement in self.use_instead.split(", "))
136
+ return f"Please use {replacements} instead."
137
+ return "There is no replacement."
138
+
139
+
140
+ def deprecated(
141
+ *args,
142
+ planned_removal_date: str | None = None,
143
+ planned_removal_release: str | None = None,
144
+ use_instead: str | None = None,
145
+ reason: str | None = None,
146
+ instructions: str | None = None,
147
+ adapter_cls: type[AirflowDeprecationAdapter] = AirflowDeprecationAdapter,
148
+ **kwargs,
149
+ ):
150
+ """
151
+ Mark a class, method or a function deprecated.
152
+
153
+ :param planned_removal_date: The date after which the deprecated object should be removed.
154
+ The recommended date is six months ahead from today. The expected date format is `Month DD, YYYY`,
155
+ for example: `August 22, 2024`.
156
+ This parameter is required if the `planned_removal_release` parameter is not set.
157
+ :param planned_removal_release: The package name and the version in which the deprecated object is
158
+ expected to be removed. The expected format is `<package_name>==<package_version>`, for example
159
+ `apache-airflow==2.10.0` or `apache-airflow-providers-google==10.22.0`.
160
+ This parameter is required if the `planned_removal_date` parameter is not set.
161
+ :param use_instead: Optional. Replacement of the deprecated object.
162
+ :param reason: Optional. The detailed reason for the deprecated object.
163
+ :param instructions: Optional. The detailed instructions for migrating from the deprecated object.
164
+ :param adapter_cls: Optional. Adapter class that is used to get the deprecation message
165
+ This should be a subclass of `AirflowDeprecationAdapter`.
166
+ """
167
+ _kwargs = {
168
+ **kwargs,
169
+ "planned_removal_date": planned_removal_date,
170
+ "planned_removal_release": planned_removal_release,
171
+ "use_instead": use_instead,
172
+ "reason": reason,
173
+ "instructions": instructions,
174
+ "adapter_cls": adapter_cls,
175
+ }
176
+ return standard_deprecated(*args, **_kwargs)
@@ -36,7 +36,6 @@ import google_auth_httplib2
36
36
  import requests
37
37
  import tenacity
38
38
  from asgiref.sync import sync_to_async
39
- from deprecated import deprecated
40
39
  from gcloud.aio.auth.token import Token, TokenResponse
41
40
  from google.api_core.exceptions import Forbidden, ResourceExhausted, TooManyRequests
42
41
  from google.auth import _cloud_sdk, compute_engine # type: ignore[attr-defined]
@@ -57,6 +56,7 @@ from airflow.providers.google.cloud.utils.credentials_provider import (
57
56
  get_credentials_and_project_id,
58
57
  )
59
58
  from airflow.providers.google.common.consts import CLIENT_INFO
59
+ from airflow.providers.google.common.deprecated import deprecated
60
60
  from airflow.utils.process_utils import patch_environ
61
61
 
62
62
  if TYPE_CHECKING:
@@ -451,7 +451,8 @@ class GoogleBaseHook(BaseHook):
451
451
 
452
452
  @property
453
453
  @deprecated(
454
- reason="Please use `airflow.providers.google.common.consts.CLIENT_INFO`.",
454
+ planned_removal_date="March 01, 2025",
455
+ use_instead="airflow.providers.google.common.consts.CLIENT_INFO",
455
456
  category=AirflowProviderDeprecationWarning,
456
457
  )
457
458
  def client_info(self) -> ClientInfo:
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Google",
29
29
  "description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1723970253,
31
+ "source-date-epoch": 1726860901,
32
32
  "versions": [
33
+ "10.23.0",
33
34
  "10.22.0",
34
35
  "10.21.1",
35
36
  "10.21.0",
@@ -101,15 +102,15 @@ def get_provider_info():
101
102
  "gcloud-aio-bigquery>=6.1.2",
102
103
  "gcloud-aio-storage>=9.0.0",
103
104
  "gcsfs>=2023.10.0",
104
- "google-ads>=24.1.0",
105
+ "google-ads>=25.0.0",
105
106
  "google-analytics-admin>=0.9.0",
106
107
  "google-api-core>=2.11.0,!=2.16.0,!=2.18.0",
107
108
  "google-api-python-client>=2.0.2",
108
109
  "google-auth>=2.29.0",
109
110
  "google-auth-httplib2>=0.0.1",
110
- "google-cloud-aiplatform>=1.57.0",
111
+ "google-cloud-aiplatform>=1.63.0",
111
112
  "google-cloud-automl>=2.12.0",
112
- "google-cloud-bigquery<3.21.0,>=3.4.0",
113
+ "google-cloud-bigquery>=3.4.0,!=3.21.*,!=3.22.0,!=3.23.*",
113
114
  "google-cloud-bigquery-datatransfer>=3.13.0",
114
115
  "google-cloud-bigtable>=2.17.0",
115
116
  "google-cloud-build>=3.22.0",
@@ -119,7 +120,7 @@ def get_provider_info():
119
120
  "google-cloud-dataflow-client>=0.8.6",
120
121
  "google-cloud-dataform>=0.5.0",
121
122
  "google-cloud-dataplex>=1.10.0",
122
- "google-cloud-dataproc>=5.8.0",
123
+ "google-cloud-dataproc>=5.12.0",
123
124
  "google-cloud-dataproc-metastore>=1.12.0",
124
125
  "google-cloud-dlp>=3.12.0",
125
126
  "google-cloud-kms>=2.15.0",
@@ -132,7 +133,7 @@ def get_provider_info():
132
133
  "google-cloud-pubsub>=2.19.0",
133
134
  "google-cloud-redis>=2.12.0",
134
135
  "google-cloud-secret-manager>=2.16.0",
135
- "google-cloud-spanner>=3.11.1",
136
+ "google-cloud-spanner>=3.11.1,!=3.49.0",
136
137
  "google-cloud-speech>=2.18.0",
137
138
  "google-cloud-storage>=2.7.0",
138
139
  "google-cloud-storage-transfer>=1.4.1",
@@ -157,6 +158,7 @@ def get_provider_info():
157
158
  "sqlalchemy-bigquery>=1.2.1",
158
159
  "sqlalchemy-spanner>=1.6.2",
159
160
  "tenacity>=8.1.0",
161
+ "immutabledict>=4.2.0",
160
162
  ],
161
163
  "additional-extras": [
162
164
  {"name": "apache.beam", "dependencies": ["apache-beam[gcp]"]},
@@ -905,10 +907,6 @@ def get_provider_info():
905
907
  "integration-name": "Google Display&Video 360",
906
908
  "python-modules": ["airflow.providers.google.marketing_platform.sensors.display_video"],
907
909
  },
908
- {
909
- "integration-name": "Google Search Ads 360",
910
- "python-modules": ["airflow.providers.google.marketing_platform.sensors.search_ads"],
911
- },
912
910
  {
913
911
  "integration-name": "Google Looker",
914
912
  "python-modules": ["airflow.providers.google.cloud.sensors.looker"],
@@ -19,16 +19,18 @@ from __future__ import annotations
19
19
 
20
20
  from typing import Any
21
21
 
22
- from deprecated import deprecated
23
22
  from googleapiclient.discovery import Resource, build
24
23
  from googleapiclient.http import MediaFileUpload
25
24
 
26
25
  from airflow.exceptions import AirflowProviderDeprecationWarning
26
+ from airflow.providers.google.common.deprecated import deprecated
27
27
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
28
28
 
29
29
 
30
30
  @deprecated(
31
- reason="The `GoogleAnalyticsHook` class is deprecated, please use `GoogleAnalyticsAdminHook` instead.",
31
+ planned_removal_date="November 01, 2024",
32
+ use_instead="GoogleAnalyticsAdminHook",
33
+ reason="The Google Analytics API v3 has sunset and is no longer available as of July 1, 2024.",
32
34
  category=AirflowProviderDeprecationWarning,
33
35
  )
34
36
  class GoogleAnalyticsHook(GoogleBaseHook):