apache-airflow-providers-google 16.1.0rc1__py3-none-any.whl → 17.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +1 -5
  3. airflow/providers/google/cloud/hooks/bigquery.py +1 -130
  4. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  5. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +5 -5
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +1 -1
  8. airflow/providers/google/cloud/hooks/dataflow.py +0 -85
  9. airflow/providers/google/cloud/hooks/datafusion.py +1 -1
  10. airflow/providers/google/cloud/hooks/dataprep.py +1 -4
  11. airflow/providers/google/cloud/hooks/dataproc.py +68 -70
  12. airflow/providers/google/cloud/hooks/gcs.py +3 -5
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
  14. airflow/providers/google/cloud/hooks/looker.py +1 -5
  15. airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
  16. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +4 -4
  17. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +7 -0
  19. airflow/providers/google/cloud/links/kubernetes_engine.py +3 -0
  20. airflow/providers/google/cloud/log/gcs_task_handler.py +2 -2
  21. airflow/providers/google/cloud/log/stackdriver_task_handler.py +1 -1
  22. airflow/providers/google/cloud/openlineage/mixins.py +7 -7
  23. airflow/providers/google/cloud/operators/automl.py +1 -1
  24. airflow/providers/google/cloud/operators/bigquery.py +8 -609
  25. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  26. airflow/providers/google/cloud/operators/cloud_sql.py +1 -5
  27. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +2 -2
  28. airflow/providers/google/cloud/operators/dataproc.py +1 -1
  29. airflow/providers/google/cloud/operators/dlp.py +2 -2
  30. airflow/providers/google/cloud/operators/kubernetes_engine.py +4 -4
  31. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  32. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +7 -1
  33. airflow/providers/google/cloud/operators/vertex_ai/ray.py +7 -5
  34. airflow/providers/google/cloud/operators/vision.py +1 -1
  35. airflow/providers/google/cloud/sensors/dataflow.py +23 -6
  36. airflow/providers/google/cloud/sensors/datafusion.py +2 -2
  37. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
  38. airflow/providers/google/cloud/transfers/gcs_to_local.py +3 -1
  39. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +9 -9
  40. airflow/providers/google/cloud/triggers/bigquery.py +11 -13
  41. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  42. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  43. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +1 -1
  44. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  45. airflow/providers/google/cloud/triggers/dataproc.py +10 -9
  46. airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
  47. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  48. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  49. airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
  50. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  51. airflow/providers/google/common/hooks/base_google.py +2 -6
  52. airflow/providers/google/common/utils/id_token_credentials.py +2 -2
  53. airflow/providers/google/get_provider_info.py +19 -16
  54. airflow/providers/google/leveldb/hooks/leveldb.py +1 -5
  55. airflow/providers/google/marketing_platform/hooks/display_video.py +47 -3
  56. airflow/providers/google/marketing_platform/links/analytics_admin.py +1 -1
  57. airflow/providers/google/marketing_platform/operators/display_video.py +64 -15
  58. airflow/providers/google/marketing_platform/sensors/display_video.py +9 -2
  59. airflow/providers/google/version_compat.py +10 -3
  60. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/METADATA +99 -93
  61. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/RECORD +63 -62
  62. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  63. airflow/providers/google/cloud/links/life_sciences.py +0 -30
  64. airflow/providers/google/cloud/operators/life_sciences.py +0 -118
  65. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/WHEEL +0 -0
  66. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/entry_points.txt +0 -0
@@ -32,6 +32,8 @@ from vertexai.preview.caching import CachedContent
32
32
  from vertexai.preview.evaluation import EvalResult, EvalTask
33
33
  from vertexai.preview.tuning import sft
34
34
 
35
+ from airflow.exceptions import AirflowProviderDeprecationWarning
36
+ from airflow.providers.google.common.deprecated import deprecated
35
37
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
36
38
 
37
39
  if TYPE_CHECKING:
@@ -361,6 +363,11 @@ class GenerativeModelHook(GoogleBaseHook):
361
363
  return response.text
362
364
 
363
365
 
366
+ @deprecated(
367
+ planned_removal_date="January 3, 2026",
368
+ use_instead="airflow.providers.google.cloud.hooks.vertex_ai.experiment_service.ExperimentRunHook",
369
+ category=AirflowProviderDeprecationWarning,
370
+ )
364
371
  class ExperimentRunHook(GoogleBaseHook):
365
372
  """Use the Vertex AI SDK for Python to create and manage your experiment runs."""
366
373
 
@@ -57,6 +57,9 @@ class KubernetesEngineClusterLink(BaseGoogleLink):
57
57
  if isinstance(cluster, dict):
58
58
  cluster = Cluster.from_json(json.dumps(cluster))
59
59
 
60
+ if not cluster:
61
+ raise ValueError("Cluster must be provided for KubernetesEngineClusterLink.")
62
+
60
63
  super().persist(
61
64
  context=context,
62
65
  cluster_name=cluster.name,
@@ -27,8 +27,8 @@ from typing import TYPE_CHECKING
27
27
 
28
28
  import attrs
29
29
 
30
- # not sure why but mypy complains on missing `storage` but it is clearly there and is importable
31
- from google.cloud import storage # type: ignore[attr-defined]
30
+ # Make mypy happy by importing as aliases
31
+ import google.cloud.storage as storage
32
32
 
33
33
  from airflow.configuration import conf
34
34
  from airflow.exceptions import AirflowNotFoundException
@@ -159,7 +159,7 @@ class StackdriverTaskHandler(logging.Handler):
159
159
  """Object responsible for sending data to Stackdriver."""
160
160
  # The Transport object is badly defined (no init) but in the docs client/name as constructor
161
161
  # arguments are a requirement for any class that derives from Transport class, hence ignore:
162
- return self.transport_type(self._client, self.gcp_log_name) # type: ignore[call-arg]
162
+ return self.transport_type(self._client, self.gcp_log_name)
163
163
 
164
164
  def _get_labels(self, task_instance=None):
165
165
  if task_instance:
@@ -80,7 +80,7 @@ class _BigQueryInsertJobOperatorOpenLineageMixin:
80
80
  from airflow.providers.openlineage.sqlparser import SQLParser
81
81
 
82
82
  if not self.job_id:
83
- self.log.warning("No BigQuery job_id was found by OpenLineage.") # type: ignore[attr-defined]
83
+ self.log.warning("No BigQuery job_id was found by OpenLineage.")
84
84
  return OperatorLineage()
85
85
 
86
86
  if not self.hook:
@@ -92,14 +92,14 @@ class _BigQueryInsertJobOperatorOpenLineageMixin:
92
92
  impersonation_chain=self.impersonation_chain,
93
93
  )
94
94
 
95
- self.log.debug("Extracting data from bigquery job: `%s`", self.job_id) # type: ignore[attr-defined]
95
+ self.log.debug("Extracting data from bigquery job: `%s`", self.job_id)
96
96
  inputs, outputs = [], []
97
97
  run_facets: dict[str, RunFacet] = {
98
98
  "externalQuery": ExternalQueryRunFacet(externalQueryId=self.job_id, source="bigquery")
99
99
  }
100
100
  self._client = self.hook.get_client(project_id=self.hook.project_id, location=self.location)
101
101
  try:
102
- job_properties = self._client.get_job(job_id=self.job_id)._properties # type: ignore
102
+ job_properties = self._client.get_job(job_id=self.job_id)._properties
103
103
 
104
104
  if get_from_nullable_chain(job_properties, ["status", "state"]) != "DONE":
105
105
  raise ValueError(f"Trying to extract data from running bigquery job: `{self.job_id}`")
@@ -107,11 +107,11 @@ class _BigQueryInsertJobOperatorOpenLineageMixin:
107
107
  run_facets["bigQueryJob"] = self._get_bigquery_job_run_facet(job_properties)
108
108
 
109
109
  if get_from_nullable_chain(job_properties, ["statistics", "numChildJobs"]):
110
- self.log.debug("Found SCRIPT job. Extracting lineage from child jobs instead.") # type: ignore[attr-defined]
110
+ self.log.debug("Found SCRIPT job. Extracting lineage from child jobs instead.")
111
111
  # SCRIPT job type has no input / output information but spawns child jobs that have one
112
112
  # https://cloud.google.com/bigquery/docs/information-schema-jobs#multi-statement_query_job
113
113
  for child_job_id in self._client.list_jobs(parent_job=self.job_id):
114
- child_job_properties = self._client.get_job(job_id=child_job_id)._properties # type: ignore
114
+ child_job_properties = self._client.get_job(job_id=child_job_id)._properties
115
115
  child_inputs, child_outputs = self._get_inputs_and_outputs(child_job_properties)
116
116
  inputs.extend(child_inputs)
117
117
  outputs.extend(child_outputs)
@@ -119,7 +119,7 @@ class _BigQueryInsertJobOperatorOpenLineageMixin:
119
119
  inputs, outputs = self._get_inputs_and_outputs(job_properties)
120
120
 
121
121
  except Exception as e:
122
- self.log.warning("Cannot retrieve job details from BigQuery.Client. %s", e, exc_info=True) # type: ignore[attr-defined]
122
+ self.log.warning("Cannot retrieve job details from BigQuery.Client. %s", e, exc_info=True)
123
123
  exception_msg = traceback.format_exc()
124
124
  run_facets.update(
125
125
  {
@@ -173,7 +173,7 @@ class _BigQueryInsertJobOperatorOpenLineageMixin:
173
173
  if (
174
174
  single_output.facets
175
175
  and final_outputs[key].facets
176
- and "columnLineage" in single_output.facets # type: ignore
176
+ and "columnLineage" in single_output.facets
177
177
  and "columnLineage" in final_outputs[key].facets # type: ignore
178
178
  ):
179
179
  single_output.facets["columnLineage"] = merge_column_lineage_facets(
@@ -244,7 +244,7 @@ class AutoMLPredictOperator(GoogleCloudBaseOperator):
244
244
 
245
245
  self.model_id = model_id
246
246
  self.endpoint_id = endpoint_id
247
- self.operation_params = operation_params # type: ignore
247
+ self.operation_params = operation_params
248
248
  self.instances = instances
249
249
  self.location = location
250
250
  self.project_id = project_id