apache-airflow-providers-google 10.14.0rc1__py3-none-any.whl → 10.15.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +1 -2
  3. airflow/providers/google/cloud/hooks/automl.py +13 -13
  4. airflow/providers/google/cloud/hooks/bigquery.py +208 -256
  5. airflow/providers/google/cloud/hooks/bigquery_dts.py +6 -6
  6. airflow/providers/google/cloud/hooks/bigtable.py +8 -8
  7. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  8. airflow/providers/google/cloud/hooks/cloud_build.py +19 -20
  9. airflow/providers/google/cloud/hooks/cloud_composer.py +4 -4
  10. airflow/providers/google/cloud/hooks/cloud_memorystore.py +10 -10
  11. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  12. airflow/providers/google/cloud/hooks/cloud_sql.py +18 -19
  13. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +3 -3
  14. airflow/providers/google/cloud/hooks/compute.py +16 -16
  15. airflow/providers/google/cloud/hooks/compute_ssh.py +1 -1
  16. airflow/providers/google/cloud/hooks/datacatalog.py +22 -22
  17. airflow/providers/google/cloud/hooks/dataflow.py +48 -49
  18. airflow/providers/google/cloud/hooks/dataform.py +16 -16
  19. airflow/providers/google/cloud/hooks/datafusion.py +15 -15
  20. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  21. airflow/providers/google/cloud/hooks/dataplex.py +19 -19
  22. airflow/providers/google/cloud/hooks/dataprep.py +10 -10
  23. airflow/providers/google/cloud/hooks/dataproc.py +132 -14
  24. airflow/providers/google/cloud/hooks/dataproc_metastore.py +13 -13
  25. airflow/providers/google/cloud/hooks/datastore.py +3 -3
  26. airflow/providers/google/cloud/hooks/dlp.py +25 -25
  27. airflow/providers/google/cloud/hooks/gcs.py +39 -27
  28. airflow/providers/google/cloud/hooks/gdm.py +3 -3
  29. airflow/providers/google/cloud/hooks/kms.py +3 -3
  30. airflow/providers/google/cloud/hooks/kubernetes_engine.py +63 -48
  31. airflow/providers/google/cloud/hooks/life_sciences.py +13 -12
  32. airflow/providers/google/cloud/hooks/looker.py +8 -9
  33. airflow/providers/google/cloud/hooks/mlengine.py +12 -12
  34. airflow/providers/google/cloud/hooks/natural_language.py +2 -2
  35. airflow/providers/google/cloud/hooks/os_login.py +1 -1
  36. airflow/providers/google/cloud/hooks/pubsub.py +9 -9
  37. airflow/providers/google/cloud/hooks/secret_manager.py +1 -1
  38. airflow/providers/google/cloud/hooks/spanner.py +11 -11
  39. airflow/providers/google/cloud/hooks/speech_to_text.py +1 -1
  40. airflow/providers/google/cloud/hooks/stackdriver.py +7 -7
  41. airflow/providers/google/cloud/hooks/tasks.py +11 -11
  42. airflow/providers/google/cloud/hooks/text_to_speech.py +1 -1
  43. airflow/providers/google/cloud/hooks/translate.py +1 -1
  44. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +13 -13
  45. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +6 -6
  46. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +45 -50
  47. airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +13 -13
  48. airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +9 -9
  49. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +128 -11
  50. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +10 -10
  51. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +8 -8
  52. airflow/providers/google/cloud/hooks/video_intelligence.py +2 -2
  53. airflow/providers/google/cloud/hooks/vision.py +1 -1
  54. airflow/providers/google/cloud/hooks/workflows.py +10 -10
  55. airflow/providers/google/cloud/links/datafusion.py +12 -5
  56. airflow/providers/google/cloud/operators/bigquery.py +11 -11
  57. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +3 -1
  58. airflow/providers/google/cloud/operators/dataflow.py +16 -16
  59. airflow/providers/google/cloud/operators/datafusion.py +9 -1
  60. airflow/providers/google/cloud/operators/dataproc.py +444 -69
  61. airflow/providers/google/cloud/operators/kubernetes_engine.py +6 -6
  62. airflow/providers/google/cloud/operators/life_sciences.py +10 -9
  63. airflow/providers/google/cloud/operators/mlengine.py +96 -96
  64. airflow/providers/google/cloud/operators/pubsub.py +2 -0
  65. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +33 -3
  66. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +59 -2
  67. airflow/providers/google/cloud/secrets/secret_manager.py +8 -7
  68. airflow/providers/google/cloud/sensors/bigquery.py +20 -16
  69. airflow/providers/google/cloud/sensors/cloud_composer.py +11 -8
  70. airflow/providers/google/cloud/sensors/dataproc_metastore.py +12 -2
  71. airflow/providers/google/cloud/sensors/gcs.py +8 -7
  72. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +1 -0
  73. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +4 -4
  74. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -0
  75. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  76. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  77. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +1 -1
  78. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +1 -1
  79. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +1 -1
  80. airflow/providers/google/cloud/transfers/presto_to_gcs.py +1 -1
  81. airflow/providers/google/cloud/transfers/s3_to_gcs.py +3 -3
  82. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  83. airflow/providers/google/cloud/transfers/sql_to_gcs.py +3 -3
  84. airflow/providers/google/cloud/transfers/trino_to_gcs.py +1 -1
  85. airflow/providers/google/cloud/triggers/bigquery.py +12 -12
  86. airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -1
  87. airflow/providers/google/cloud/triggers/cloud_batch.py +3 -1
  88. airflow/providers/google/cloud/triggers/cloud_build.py +2 -2
  89. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  90. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +6 -6
  91. airflow/providers/google/cloud/triggers/dataflow.py +3 -1
  92. airflow/providers/google/cloud/triggers/datafusion.py +2 -2
  93. airflow/providers/google/cloud/triggers/dataplex.py +2 -2
  94. airflow/providers/google/cloud/triggers/dataproc.py +34 -14
  95. airflow/providers/google/cloud/triggers/gcs.py +12 -8
  96. airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -2
  97. airflow/providers/google/cloud/triggers/mlengine.py +2 -2
  98. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  99. airflow/providers/google/cloud/triggers/vertex_ai.py +99 -0
  100. airflow/providers/google/cloud/utils/bigquery.py +2 -2
  101. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  102. airflow/providers/google/cloud/utils/dataform.py +1 -1
  103. airflow/providers/google/cloud/utils/dataproc.py +25 -0
  104. airflow/providers/google/cloud/utils/field_validator.py +2 -2
  105. airflow/providers/google/cloud/utils/helpers.py +2 -2
  106. airflow/providers/google/cloud/utils/mlengine_operator_utils.py +1 -1
  107. airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +1 -1
  108. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  109. airflow/providers/google/common/hooks/base_google.py +87 -23
  110. airflow/providers/google/common/hooks/discovery_api.py +2 -2
  111. airflow/providers/google/common/utils/id_token_credentials.py +5 -5
  112. airflow/providers/google/firebase/hooks/firestore.py +3 -3
  113. airflow/providers/google/get_provider_info.py +7 -2
  114. airflow/providers/google/leveldb/hooks/leveldb.py +4 -4
  115. airflow/providers/google/marketing_platform/hooks/analytics.py +11 -14
  116. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +11 -11
  117. airflow/providers/google/marketing_platform/hooks/display_video.py +13 -13
  118. airflow/providers/google/marketing_platform/hooks/search_ads.py +4 -4
  119. airflow/providers/google/marketing_platform/operators/analytics.py +37 -32
  120. airflow/providers/google/suite/hooks/calendar.py +2 -2
  121. airflow/providers/google/suite/hooks/drive.py +7 -7
  122. airflow/providers/google/suite/hooks/sheets.py +8 -8
  123. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/METADATA +11 -11
  124. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/RECORD +126 -124
  125. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/WHEEL +0 -0
  126. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/entry_points.txt +0 -0
@@ -15,22 +15,32 @@
15
15
  # KIND, either express or implied. See the License for the
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
- """This module contains a Google Cloud Vertex AI hook."""
18
+ """
19
+ This module contains a Google Cloud Vertex AI hook.
20
+
21
+ .. spelling:word-list::
22
+
23
+ JobServiceAsyncClient
24
+ """
19
25
  from __future__ import annotations
20
26
 
27
+ import asyncio
28
+ from functools import lru_cache
21
29
  from typing import TYPE_CHECKING, Sequence
22
30
 
23
31
  from google.api_core.client_options import ClientOptions
24
32
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
25
33
  from google.cloud.aiplatform import CustomJob, HyperparameterTuningJob, gapic, hyperparameter_tuning
26
- from google.cloud.aiplatform_v1 import JobServiceClient, types
34
+ from google.cloud.aiplatform_v1 import JobServiceAsyncClient, JobServiceClient, JobState, types
27
35
 
28
36
  from airflow.exceptions import AirflowException
37
+ from airflow.providers.google.common.consts import CLIENT_INFO
29
38
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
30
39
 
31
40
  if TYPE_CHECKING:
32
41
  from google.api_core.operation import Operation
33
42
  from google.api_core.retry import Retry
43
+ from google.api_core.retry_async import AsyncRetry
34
44
  from google.cloud.aiplatform_v1.services.job_service.pagers import ListHyperparameterTuningJobsPager
35
45
 
36
46
 
@@ -55,7 +65,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
55
65
  self._hyperparameter_tuning_job: HyperparameterTuningJob | None = None
56
66
 
57
67
  def get_job_service_client(self, region: str | None = None) -> JobServiceClient:
58
- """Returns JobServiceClient."""
68
+ """Return JobServiceClient."""
59
69
  if region and region != "global":
60
70
  client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443")
61
71
  else:
@@ -81,7 +91,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
81
91
  labels: dict[str, str] | None = None,
82
92
  encryption_spec_key_name: str | None = None,
83
93
  ) -> HyperparameterTuningJob:
84
- """Returns HyperparameterTuningJob object."""
94
+ """Return HyperparameterTuningJob object."""
85
95
  return HyperparameterTuningJob(
86
96
  display_name=display_name,
87
97
  custom_job=custom_job,
@@ -110,7 +120,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
110
120
  encryption_spec_key_name: str | None = None,
111
121
  staging_bucket: str | None = None,
112
122
  ) -> CustomJob:
113
- """Returns CustomJob object."""
123
+ """Return CustomJob object."""
114
124
  return CustomJob(
115
125
  display_name=display_name,
116
126
  worker_pool_specs=worker_pool_specs,
@@ -125,11 +135,11 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
125
135
 
126
136
  @staticmethod
127
137
  def extract_hyperparameter_tuning_job_id(obj: dict) -> str:
128
- """Returns unique id of the hyperparameter_tuning_job."""
138
+ """Return unique id of the hyperparameter_tuning_job."""
129
139
  return obj["name"].rpartition("/")[-1]
130
140
 
131
141
  def wait_for_operation(self, operation: Operation, timeout: float | None = None):
132
- """Waits for long-lasting operation to complete."""
142
+ """Wait for long-lasting operation to complete."""
133
143
  try:
134
144
  return operation.result(timeout=timeout)
135
145
  except Exception:
@@ -172,6 +182,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
172
182
  tensorboard: str | None = None,
173
183
  sync: bool = True,
174
184
  # END: run param
185
+ wait_job_completed: bool = True,
175
186
  ) -> HyperparameterTuningJob:
176
187
  """
177
188
  Create a HyperparameterTuningJob.
@@ -256,6 +267,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
256
267
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
257
268
  :param sync: Whether to execute this method synchronously. If False, this method will unblock and it
258
269
  will be executed in a concurrent Future.
270
+ :param wait_job_completed: Whether to wait for the job completed.
259
271
  """
260
272
  custom_job = self.get_custom_job_object(
261
273
  project=project_id,
@@ -292,7 +304,11 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
292
304
  tensorboard=tensorboard,
293
305
  sync=sync,
294
306
  )
295
- self._hyperparameter_tuning_job.wait()
307
+
308
+ if wait_job_completed:
309
+ self._hyperparameter_tuning_job.wait()
310
+ else:
311
+ self._hyperparameter_tuning_job._wait_for_resource_creation()
296
312
  return self._hyperparameter_tuning_job
297
313
 
298
314
  @GoogleBaseHook.fallback_to_default_project_id
@@ -306,7 +322,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
306
322
  metadata: Sequence[tuple[str, str]] = (),
307
323
  ) -> types.HyperparameterTuningJob:
308
324
  """
309
- Gets a HyperparameterTuningJob.
325
+ Get a HyperparameterTuningJob.
310
326
 
311
327
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
312
328
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -342,7 +358,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
342
358
  metadata: Sequence[tuple[str, str]] = (),
343
359
  ) -> ListHyperparameterTuningJobsPager:
344
360
  """
345
- Lists HyperparameterTuningJobs in a Location.
361
+ List HyperparameterTuningJobs in a Location.
346
362
 
347
363
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
348
364
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -391,7 +407,7 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
391
407
  metadata: Sequence[tuple[str, str]] = (),
392
408
  ) -> Operation:
393
409
  """
394
- Deletes a HyperparameterTuningJob.
410
+ Delete a HyperparameterTuningJob.
395
411
 
396
412
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
397
413
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -413,3 +429,104 @@ class HyperparameterTuningJobHook(GoogleBaseHook):
413
429
  metadata=metadata,
414
430
  )
415
431
  return result
432
+
433
+
434
+ class HyperparameterTuningJobAsyncHook(GoogleBaseHook):
435
+ """Async hook for Google Cloud Vertex AI Hyperparameter Tuning Job APIs."""
436
+
437
+ def __init__(
438
+ self,
439
+ gcp_conn_id: str = "google_cloud_default",
440
+ impersonation_chain: str | Sequence[str] | None = None,
441
+ **kwargs,
442
+ ):
443
+ super().__init__(
444
+ gcp_conn_id=gcp_conn_id,
445
+ impersonation_chain=impersonation_chain,
446
+ **kwargs,
447
+ )
448
+
449
+ @lru_cache
450
+ def get_job_service_client(self, region: str | None = None) -> JobServiceAsyncClient:
451
+ """
452
+ Retrieve Vertex AI async client.
453
+
454
+ :return: Google Cloud Vertex AI client object.
455
+ """
456
+ endpoint = f"{region}-aiplatform.googleapis.com:443" if region and region != "global" else None
457
+ return JobServiceAsyncClient(
458
+ credentials=self.get_credentials(),
459
+ client_info=CLIENT_INFO,
460
+ client_options=ClientOptions(api_endpoint=endpoint),
461
+ )
462
+
463
+ async def get_hyperparameter_tuning_job(
464
+ self,
465
+ project_id: str,
466
+ location: str,
467
+ job_id: str,
468
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
469
+ timeout: float | None = None,
470
+ metadata: Sequence[tuple[str, str]] = (),
471
+ ) -> types.HyperparameterTuningJob:
472
+ """
473
+ Retrieve a hyperparameter tuning job.
474
+
475
+ :param project_id: Required. The ID of the Google Cloud project that the job belongs to.
476
+ :param location: Required. The ID of the Google Cloud region that the job belongs to.
477
+ :param job_id: Required. The hyperparameter tuning job id.
478
+ :param retry: Designation of what errors, if any, should be retried.
479
+ :param timeout: The timeout for this request.
480
+ :param metadata: Strings which should be sent along with the request as metadata.
481
+ """
482
+ client: JobServiceAsyncClient = self.get_job_service_client(region=location)
483
+ job_name = client.hyperparameter_tuning_job_path(project_id, location, job_id)
484
+
485
+ result = await client.get_hyperparameter_tuning_job(
486
+ request={
487
+ "name": job_name,
488
+ },
489
+ retry=retry,
490
+ timeout=timeout,
491
+ metadata=metadata,
492
+ )
493
+
494
+ return result
495
+
496
+ async def wait_hyperparameter_tuning_job(
497
+ self,
498
+ project_id: str,
499
+ location: str,
500
+ job_id: str,
501
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
502
+ timeout: float | None = None,
503
+ metadata: Sequence[tuple[str, str]] = (),
504
+ poll_interval: int = 10,
505
+ ) -> types.HyperparameterTuningJob:
506
+ statuses_complete = {
507
+ JobState.JOB_STATE_CANCELLED,
508
+ JobState.JOB_STATE_FAILED,
509
+ JobState.JOB_STATE_PAUSED,
510
+ JobState.JOB_STATE_SUCCEEDED,
511
+ }
512
+ while True:
513
+ try:
514
+ self.log.info("Requesting hyperparameter tuning job with id %s", job_id)
515
+ job: types.HyperparameterTuningJob = await self.get_hyperparameter_tuning_job(
516
+ project_id=project_id,
517
+ location=location,
518
+ job_id=job_id,
519
+ retry=retry,
520
+ timeout=timeout,
521
+ metadata=metadata,
522
+ )
523
+ except Exception as ex:
524
+ self.log.exception("Exception occurred while requesting job %s", job_id)
525
+ raise AirflowException(ex)
526
+
527
+ self.log.info("Status of the hyperparameter tuning job %s is %s", job.name, job.state.name)
528
+ if job.state in statuses_complete:
529
+ return job
530
+
531
+ self.log.info("Sleeping for %s seconds.", poll_interval)
532
+ await asyncio.sleep(poll_interval)
@@ -51,7 +51,7 @@ class ModelServiceHook(GoogleBaseHook):
51
51
  super().__init__(**kwargs)
52
52
 
53
53
  def get_model_service_client(self, region: str | None = None) -> ModelServiceClient:
54
- """Returns ModelServiceClient."""
54
+ """Return ModelServiceClient object."""
55
55
  if region and region != "global":
56
56
  client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443")
57
57
  else:
@@ -63,11 +63,11 @@ class ModelServiceHook(GoogleBaseHook):
63
63
 
64
64
  @staticmethod
65
65
  def extract_model_id(obj: dict) -> str:
66
- """Returns unique id of the model."""
66
+ """Return unique id of the model."""
67
67
  return obj["model"].rpartition("/")[-1]
68
68
 
69
69
  def wait_for_operation(self, operation: Operation, timeout: float | None = None):
70
- """Waits for long-lasting operation to complete."""
70
+ """Wait for long-lasting operation to complete."""
71
71
  try:
72
72
  return operation.result(timeout=timeout)
73
73
  except Exception:
@@ -85,7 +85,7 @@ class ModelServiceHook(GoogleBaseHook):
85
85
  metadata: Sequence[tuple[str, str]] = (),
86
86
  ) -> Operation:
87
87
  """
88
- Deletes a Model.
88
+ Delete a Model.
89
89
 
90
90
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
91
91
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -119,7 +119,7 @@ class ModelServiceHook(GoogleBaseHook):
119
119
  metadata: Sequence[tuple[str, str]] = (),
120
120
  ) -> Operation:
121
121
  """
122
- Exports a trained, exportable Model to a location specified by the user.
122
+ Export a trained, exportable Model to a location specified by the user.
123
123
 
124
124
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
125
125
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -158,7 +158,7 @@ class ModelServiceHook(GoogleBaseHook):
158
158
  metadata: Sequence[tuple[str, str]] = (),
159
159
  ) -> ListModelsPager:
160
160
  r"""
161
- Lists Models in a Location.
161
+ List Models in a Location.
162
162
 
163
163
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
164
164
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -213,7 +213,7 @@ class ModelServiceHook(GoogleBaseHook):
213
213
  metadata: Sequence[tuple[str, str]] = (),
214
214
  ) -> Operation:
215
215
  """
216
- Uploads a Model artifact into Vertex AI.
216
+ Upload a Model artifact into Vertex AI.
217
217
 
218
218
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
219
219
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -247,7 +247,7 @@ class ModelServiceHook(GoogleBaseHook):
247
247
  metadata: Sequence[tuple[str, str]] = (),
248
248
  ) -> ListModelVersionsPager:
249
249
  """
250
- Lists all versions of the existing Model.
250
+ List all versions of the existing Model.
251
251
 
252
252
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
253
253
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -280,7 +280,7 @@ class ModelServiceHook(GoogleBaseHook):
280
280
  metadata: Sequence[tuple[str, str]] = (),
281
281
  ) -> Operation:
282
282
  """
283
- Deletes version of the Model. The version could not be deleted if this version is default.
283
+ Delete version of the Model. The version could not be deleted if this version is default.
284
284
 
285
285
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
286
286
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -313,7 +313,7 @@ class ModelServiceHook(GoogleBaseHook):
313
313
  metadata: Sequence[tuple[str, str]] = (),
314
314
  ) -> Model:
315
315
  """
316
- Retrieves Model of specific name and version. If version is not specified, the default is retrieved.
316
+ Retrieve Model of specific name and version. If version is not specified, the default is retrieved.
317
317
 
318
318
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
319
319
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -60,7 +60,7 @@ class PipelineJobHook(GoogleBaseHook):
60
60
  self,
61
61
  region: str | None = None,
62
62
  ) -> PipelineServiceClient:
63
- """Returns PipelineServiceClient."""
63
+ """Return PipelineServiceClient object."""
64
64
  if region and region != "global":
65
65
  client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443")
66
66
  else:
@@ -84,7 +84,7 @@ class PipelineJobHook(GoogleBaseHook):
84
84
  location: str | None = None,
85
85
  failure_policy: str | None = None,
86
86
  ) -> PipelineJob:
87
- """Returns PipelineJob object."""
87
+ """Return PipelineJob object."""
88
88
  return PipelineJob(
89
89
  display_name=display_name,
90
90
  template_path=template_path,
@@ -103,11 +103,11 @@ class PipelineJobHook(GoogleBaseHook):
103
103
 
104
104
  @staticmethod
105
105
  def extract_pipeline_job_id(obj: dict) -> str:
106
- """Returns unique id of the pipeline_job."""
106
+ """Return unique id of the pipeline_job."""
107
107
  return obj["name"].rpartition("/")[-1]
108
108
 
109
109
  def wait_for_operation(self, operation: Operation, timeout: float | None = None):
110
- """Waits for long-lasting operation to complete."""
110
+ """Wait for long-lasting operation to complete."""
111
111
  try:
112
112
  return operation.result(timeout=timeout)
113
113
  except Exception:
@@ -131,7 +131,7 @@ class PipelineJobHook(GoogleBaseHook):
131
131
  metadata: Sequence[tuple[str, str]] = (),
132
132
  ) -> PipelineJob:
133
133
  """
134
- Creates a PipelineJob. A PipelineJob will run immediately when created.
134
+ Create a PipelineJob. A PipelineJob will run immediately when created.
135
135
 
136
136
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
137
137
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -265,7 +265,7 @@ class PipelineJobHook(GoogleBaseHook):
265
265
  metadata: Sequence[tuple[str, str]] = (),
266
266
  ) -> PipelineJob:
267
267
  """
268
- Gets a PipelineJob.
268
+ Get a PipelineJob.
269
269
 
270
270
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
271
271
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -301,7 +301,7 @@ class PipelineJobHook(GoogleBaseHook):
301
301
  metadata: Sequence[tuple[str, str]] = (),
302
302
  ) -> ListPipelineJobsPager:
303
303
  """
304
- Lists PipelineJobs in a Location.
304
+ List PipelineJobs in a Location.
305
305
 
306
306
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
307
307
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -386,7 +386,7 @@ class PipelineJobHook(GoogleBaseHook):
386
386
  metadata: Sequence[tuple[str, str]] = (),
387
387
  ) -> Operation:
388
388
  """
389
- Deletes a PipelineJob.
389
+ Delete a PipelineJob.
390
390
 
391
391
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
392
392
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -71,7 +71,7 @@ class CloudVideoIntelligenceHook(GoogleBaseHook):
71
71
  self._conn: VideoIntelligenceServiceClient | None = None
72
72
 
73
73
  def get_conn(self) -> VideoIntelligenceServiceClient:
74
- """Returns Gcp Video Intelligence Service client."""
74
+ """Return Gcp Video Intelligence Service client."""
75
75
  if not self._conn:
76
76
  self._conn = VideoIntelligenceServiceClient(
77
77
  credentials=self.get_credentials(), client_info=CLIENT_INFO
@@ -92,7 +92,7 @@ class CloudVideoIntelligenceHook(GoogleBaseHook):
92
92
  metadata: Sequence[tuple[str, str]] = (),
93
93
  ) -> Operation:
94
94
  """
95
- Performs video annotation.
95
+ Perform video annotation.
96
96
 
97
97
  :param input_uri: Input video location. Currently, only Google Cloud Storage URIs are supported,
98
98
  which must be specified in the following format: ``gs://bucket-id/object-id``.
@@ -142,7 +142,7 @@ class CloudVisionHook(GoogleBaseHook):
142
142
 
143
143
  def get_conn(self) -> ProductSearchClient:
144
144
  """
145
- Retrieves connection to Cloud Vision.
145
+ Retrieve a connection to Cloud Vision.
146
146
 
147
147
  :return: Google Cloud Vision client object.
148
148
  """
@@ -50,11 +50,11 @@ class WorkflowsHook(GoogleBaseHook):
50
50
  super().__init__(**kwargs)
51
51
 
52
52
  def get_workflows_client(self) -> WorkflowsClient:
53
- """Returns WorkflowsClient."""
53
+ """Return WorkflowsClient object."""
54
54
  return WorkflowsClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
55
55
 
56
56
  def get_executions_client(self) -> ExecutionsClient:
57
- """Returns ExecutionsClient."""
57
+ """Return ExecutionsClient object."""
58
58
  return ExecutionsClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
59
59
 
60
60
  @GoogleBaseHook.fallback_to_default_project_id
@@ -69,7 +69,7 @@ class WorkflowsHook(GoogleBaseHook):
69
69
  metadata: Sequence[tuple[str, str]] = (),
70
70
  ) -> Operation:
71
71
  """
72
- Creates a new workflow.
72
+ Create a new workflow.
73
73
 
74
74
  If a workflow with the specified name already exists in the
75
75
  specified project and location, the long running operation will
@@ -106,7 +106,7 @@ class WorkflowsHook(GoogleBaseHook):
106
106
  metadata: Sequence[tuple[str, str]] = (),
107
107
  ) -> Workflow:
108
108
  """
109
- Gets details of a single Workflow.
109
+ Get details of a single Workflow.
110
110
 
111
111
  :param workflow_id: Required. The ID of the workflow to be created.
112
112
  :param project_id: Required. The ID of the Google Cloud project the cluster belongs to.
@@ -131,7 +131,7 @@ class WorkflowsHook(GoogleBaseHook):
131
131
  metadata: Sequence[tuple[str, str]] = (),
132
132
  ) -> Operation:
133
133
  """
134
- Updates an existing workflow.
134
+ Update an existing workflow.
135
135
 
136
136
  Running this method has no impact on already running
137
137
  executions of the workflow. A new revision of the
@@ -196,7 +196,7 @@ class WorkflowsHook(GoogleBaseHook):
196
196
  metadata: Sequence[tuple[str, str]] = (),
197
197
  ) -> ListWorkflowsPager:
198
198
  """
199
- Lists Workflows in a given project and location; the default order is not specified.
199
+ List Workflows in a given project and location; the default order is not specified.
200
200
 
201
201
  :param filter_: Filter to restrict results to specific workflows.
202
202
  :param order_by: Comma-separated list of fields that
@@ -234,7 +234,7 @@ class WorkflowsHook(GoogleBaseHook):
234
234
  metadata: Sequence[tuple[str, str]] = (),
235
235
  ) -> Execution:
236
236
  """
237
- Creates a new execution using the latest revision of the given workflow.
237
+ Create a new execution using the latest revision of the given workflow.
238
238
 
239
239
  :param execution: Required. Input parameters of the execution represented as a dictionary.
240
240
  :param workflow_id: Required. The ID of the workflow.
@@ -269,7 +269,7 @@ class WorkflowsHook(GoogleBaseHook):
269
269
  metadata: Sequence[tuple[str, str]] = (),
270
270
  ) -> Execution:
271
271
  """
272
- Returns an execution for the given ``workflow_id`` and ``execution_id``.
272
+ Return an execution for the given ``workflow_id`` and ``execution_id``.
273
273
 
274
274
  :param workflow_id: Required. The ID of the workflow.
275
275
  :param execution_id: Required. The ID of the execution.
@@ -298,7 +298,7 @@ class WorkflowsHook(GoogleBaseHook):
298
298
  metadata: Sequence[tuple[str, str]] = (),
299
299
  ) -> Execution:
300
300
  """
301
- Cancels an execution using the given ``workflow_id`` and ``execution_id``.
301
+ Cancel an execution using the given ``workflow_id`` and ``execution_id``.
302
302
 
303
303
  :param workflow_id: Required. The ID of the workflow.
304
304
  :param execution_id: Required. The ID of the execution.
@@ -328,7 +328,7 @@ class WorkflowsHook(GoogleBaseHook):
328
328
  metadata: Sequence[tuple[str, str]] = (),
329
329
  ) -> ListExecutionsPager:
330
330
  """
331
- Returns a list of executions which belong to the workflow with the given name.
331
+ Return a list of executions which belong to the workflow with the given name.
332
332
 
333
333
  The method returns executions of all workflow revisions. Returned
334
334
  executions are ordered by their start time (newest first).
@@ -15,7 +15,7 @@
15
15
  # KIND, either express or implied. See the License for the
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
- """This module contains Google Compute Engine links."""
18
+ """This module contains Google Data Fusion links."""
19
19
  from __future__ import annotations
20
20
 
21
21
  from typing import TYPE_CHECKING, ClassVar
@@ -30,8 +30,8 @@ if TYPE_CHECKING:
30
30
 
31
31
  BASE_LINK = "https://console.cloud.google.com/data-fusion"
32
32
  DATAFUSION_INSTANCE_LINK = BASE_LINK + "/locations/{region}/instances/{instance_name}?project={project_id}"
33
- DATAFUSION_PIPELINES_LINK = "{uri}/cdap/ns/default/pipelines"
34
- DATAFUSION_PIPELINE_LINK = "{uri}/pipelines/ns/default/view/{pipeline_name}"
33
+ DATAFUSION_PIPELINES_LINK = "{uri}/cdap/ns/{namespace}/pipelines"
34
+ DATAFUSION_PIPELINE_LINK = "{uri}/pipelines/ns/{namespace}/view/{pipeline_name}"
35
35
 
36
36
 
37
37
  class BaseGoogleLink(BaseOperatorLink):
@@ -52,10 +52,13 @@ class BaseGoogleLink(BaseOperatorLink):
52
52
  ti_key: TaskInstanceKey,
53
53
  ) -> str:
54
54
  conf = XCom.get_value(key=self.key, ti_key=ti_key)
55
+
55
56
  if not conf:
56
57
  return ""
57
- if self.format_str.startswith("http"):
58
- return self.format_str.format(**conf)
58
+
59
+ # Add a default value for the 'namespace' parameter for backward compatibility.
60
+ conf.setdefault("namespace", "default")
61
+
59
62
  return self.format_str.format(**conf)
60
63
 
61
64
 
@@ -98,6 +101,7 @@ class DataFusionPipelineLink(BaseGoogleLink):
98
101
  task_instance: BaseOperator,
99
102
  uri: str,
100
103
  pipeline_name: str,
104
+ namespace: str,
101
105
  ):
102
106
  task_instance.xcom_push(
103
107
  context=context,
@@ -105,6 +109,7 @@ class DataFusionPipelineLink(BaseGoogleLink):
105
109
  value={
106
110
  "uri": uri,
107
111
  "pipeline_name": pipeline_name,
112
+ "namespace": namespace,
108
113
  },
109
114
  )
110
115
 
@@ -121,11 +126,13 @@ class DataFusionPipelinesLink(BaseGoogleLink):
121
126
  context: Context,
122
127
  task_instance: BaseOperator,
123
128
  uri: str,
129
+ namespace: str,
124
130
  ):
125
131
  task_instance.xcom_push(
126
132
  context=context,
127
133
  key=DataFusionPipelinesLink.key,
128
134
  value={
129
135
  "uri": uri,
136
+ "namespace": namespace,
130
137
  },
131
138
  )
@@ -25,6 +25,7 @@ from functools import cached_property
25
25
  from typing import TYPE_CHECKING, Any, Iterable, Sequence, SupportsAbs
26
26
 
27
27
  import attr
28
+ from deprecated import deprecated
28
29
  from google.api_core.exceptions import Conflict
29
30
  from google.cloud.bigquery import DEFAULT_RETRY, CopyJob, ExtractJob, LoadJob, QueryJob
30
31
  from google.cloud.bigquery.table import RowIterator
@@ -1069,6 +1070,7 @@ class BigQueryGetDataOperator(GoogleCloudBaseOperator):
1069
1070
  project_id=self.job_project_id or hook.project_id,
1070
1071
  poll_interval=self.poll_interval,
1071
1072
  as_dict=self.as_dict,
1073
+ impersonation_chain=self.impersonation_chain,
1072
1074
  ),
1073
1075
  method_name="execute_complete",
1074
1076
  )
@@ -1086,6 +1088,10 @@ class BigQueryGetDataOperator(GoogleCloudBaseOperator):
1086
1088
  return event["records"]
1087
1089
 
1088
1090
 
1091
+ @deprecated(
1092
+ reason="This operator is deprecated. Please use `BigQueryInsertJobOperator`.",
1093
+ category=AirflowProviderDeprecationWarning,
1094
+ )
1089
1095
  class BigQueryExecuteQueryOperator(GoogleCloudBaseOperator):
1090
1096
  """Executes BigQuery SQL queries in a specific BigQuery database.
1091
1097
 
@@ -1210,12 +1216,6 @@ class BigQueryExecuteQueryOperator(GoogleCloudBaseOperator):
1210
1216
  **kwargs,
1211
1217
  ) -> None:
1212
1218
  super().__init__(**kwargs)
1213
- warnings.warn(
1214
- "This operator is deprecated. Please use `BigQueryInsertJobOperator`.",
1215
- AirflowProviderDeprecationWarning,
1216
- stacklevel=2,
1217
- )
1218
-
1219
1219
  self.sql = sql
1220
1220
  self.destination_dataset_table = destination_dataset_table
1221
1221
  self.write_disposition = write_disposition
@@ -2170,6 +2170,10 @@ class BigQueryGetDatasetTablesOperator(GoogleCloudBaseOperator):
2170
2170
  )
2171
2171
 
2172
2172
 
2173
+ @deprecated(
2174
+ reason="This operator is deprecated. Please use BigQueryUpdateDatasetOperator.",
2175
+ category=AirflowProviderDeprecationWarning,
2176
+ )
2173
2177
  class BigQueryPatchDatasetOperator(GoogleCloudBaseOperator):
2174
2178
  """Patch a dataset for your Project in BigQuery.
2175
2179
 
@@ -2214,11 +2218,6 @@ class BigQueryPatchDatasetOperator(GoogleCloudBaseOperator):
2214
2218
  impersonation_chain: str | Sequence[str] | None = None,
2215
2219
  **kwargs,
2216
2220
  ) -> None:
2217
- warnings.warn(
2218
- "This operator is deprecated. Please use BigQueryUpdateDatasetOperator.",
2219
- AirflowProviderDeprecationWarning,
2220
- stacklevel=2,
2221
- )
2222
2221
  self.dataset_id = dataset_id
2223
2222
  self.project_id = project_id
2224
2223
  self.gcp_conn_id = gcp_conn_id
@@ -2878,6 +2877,7 @@ class BigQueryInsertJobOperator(GoogleCloudBaseOperator, _BigQueryOpenLineageMix
2878
2877
  job_id=self.job_id,
2879
2878
  project_id=self.project_id,
2880
2879
  poll_interval=self.poll_interval,
2880
+ impersonation_chain=self.impersonation_chain,
2881
2881
  ),
2882
2882
  method_name="execute_complete",
2883
2883
  )
@@ -236,7 +236,9 @@ class CloudDataTransferServiceCreateJobOperator(GoogleCloudBaseOperator):
236
236
  **kwargs,
237
237
  ) -> None:
238
238
  super().__init__(**kwargs)
239
- self.body = deepcopy(body)
239
+ self.body = body
240
+ if isinstance(self.body, dict):
241
+ self.body = deepcopy(body)
240
242
  self.aws_conn_id = aws_conn_id
241
243
  self.gcp_conn_id = gcp_conn_id
242
244
  self.api_version = api_version