apache-airflow-providers-google 10.14.0rc1__py3-none-any.whl → 10.15.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +1 -2
  3. airflow/providers/google/cloud/hooks/automl.py +13 -13
  4. airflow/providers/google/cloud/hooks/bigquery.py +208 -256
  5. airflow/providers/google/cloud/hooks/bigquery_dts.py +6 -6
  6. airflow/providers/google/cloud/hooks/bigtable.py +8 -8
  7. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  8. airflow/providers/google/cloud/hooks/cloud_build.py +19 -20
  9. airflow/providers/google/cloud/hooks/cloud_composer.py +4 -4
  10. airflow/providers/google/cloud/hooks/cloud_memorystore.py +10 -10
  11. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  12. airflow/providers/google/cloud/hooks/cloud_sql.py +18 -19
  13. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +3 -3
  14. airflow/providers/google/cloud/hooks/compute.py +16 -16
  15. airflow/providers/google/cloud/hooks/compute_ssh.py +1 -1
  16. airflow/providers/google/cloud/hooks/datacatalog.py +22 -22
  17. airflow/providers/google/cloud/hooks/dataflow.py +48 -49
  18. airflow/providers/google/cloud/hooks/dataform.py +16 -16
  19. airflow/providers/google/cloud/hooks/datafusion.py +15 -15
  20. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  21. airflow/providers/google/cloud/hooks/dataplex.py +19 -19
  22. airflow/providers/google/cloud/hooks/dataprep.py +10 -10
  23. airflow/providers/google/cloud/hooks/dataproc.py +132 -14
  24. airflow/providers/google/cloud/hooks/dataproc_metastore.py +13 -13
  25. airflow/providers/google/cloud/hooks/datastore.py +3 -3
  26. airflow/providers/google/cloud/hooks/dlp.py +25 -25
  27. airflow/providers/google/cloud/hooks/gcs.py +39 -27
  28. airflow/providers/google/cloud/hooks/gdm.py +3 -3
  29. airflow/providers/google/cloud/hooks/kms.py +3 -3
  30. airflow/providers/google/cloud/hooks/kubernetes_engine.py +63 -48
  31. airflow/providers/google/cloud/hooks/life_sciences.py +13 -12
  32. airflow/providers/google/cloud/hooks/looker.py +8 -9
  33. airflow/providers/google/cloud/hooks/mlengine.py +12 -12
  34. airflow/providers/google/cloud/hooks/natural_language.py +2 -2
  35. airflow/providers/google/cloud/hooks/os_login.py +1 -1
  36. airflow/providers/google/cloud/hooks/pubsub.py +9 -9
  37. airflow/providers/google/cloud/hooks/secret_manager.py +1 -1
  38. airflow/providers/google/cloud/hooks/spanner.py +11 -11
  39. airflow/providers/google/cloud/hooks/speech_to_text.py +1 -1
  40. airflow/providers/google/cloud/hooks/stackdriver.py +7 -7
  41. airflow/providers/google/cloud/hooks/tasks.py +11 -11
  42. airflow/providers/google/cloud/hooks/text_to_speech.py +1 -1
  43. airflow/providers/google/cloud/hooks/translate.py +1 -1
  44. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +13 -13
  45. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +6 -6
  46. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +45 -50
  47. airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +13 -13
  48. airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +9 -9
  49. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +128 -11
  50. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +10 -10
  51. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +8 -8
  52. airflow/providers/google/cloud/hooks/video_intelligence.py +2 -2
  53. airflow/providers/google/cloud/hooks/vision.py +1 -1
  54. airflow/providers/google/cloud/hooks/workflows.py +10 -10
  55. airflow/providers/google/cloud/links/datafusion.py +12 -5
  56. airflow/providers/google/cloud/operators/bigquery.py +11 -11
  57. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +3 -1
  58. airflow/providers/google/cloud/operators/dataflow.py +16 -16
  59. airflow/providers/google/cloud/operators/datafusion.py +9 -1
  60. airflow/providers/google/cloud/operators/dataproc.py +444 -69
  61. airflow/providers/google/cloud/operators/kubernetes_engine.py +6 -6
  62. airflow/providers/google/cloud/operators/life_sciences.py +10 -9
  63. airflow/providers/google/cloud/operators/mlengine.py +96 -96
  64. airflow/providers/google/cloud/operators/pubsub.py +2 -0
  65. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +33 -3
  66. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +59 -2
  67. airflow/providers/google/cloud/secrets/secret_manager.py +8 -7
  68. airflow/providers/google/cloud/sensors/bigquery.py +20 -16
  69. airflow/providers/google/cloud/sensors/cloud_composer.py +11 -8
  70. airflow/providers/google/cloud/sensors/dataproc_metastore.py +12 -2
  71. airflow/providers/google/cloud/sensors/gcs.py +8 -7
  72. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +1 -0
  73. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +4 -4
  74. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -0
  75. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  76. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  77. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +1 -1
  78. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +1 -1
  79. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +1 -1
  80. airflow/providers/google/cloud/transfers/presto_to_gcs.py +1 -1
  81. airflow/providers/google/cloud/transfers/s3_to_gcs.py +3 -3
  82. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  83. airflow/providers/google/cloud/transfers/sql_to_gcs.py +3 -3
  84. airflow/providers/google/cloud/transfers/trino_to_gcs.py +1 -1
  85. airflow/providers/google/cloud/triggers/bigquery.py +12 -12
  86. airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -1
  87. airflow/providers/google/cloud/triggers/cloud_batch.py +3 -1
  88. airflow/providers/google/cloud/triggers/cloud_build.py +2 -2
  89. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  90. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +6 -6
  91. airflow/providers/google/cloud/triggers/dataflow.py +3 -1
  92. airflow/providers/google/cloud/triggers/datafusion.py +2 -2
  93. airflow/providers/google/cloud/triggers/dataplex.py +2 -2
  94. airflow/providers/google/cloud/triggers/dataproc.py +34 -14
  95. airflow/providers/google/cloud/triggers/gcs.py +12 -8
  96. airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -2
  97. airflow/providers/google/cloud/triggers/mlengine.py +2 -2
  98. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  99. airflow/providers/google/cloud/triggers/vertex_ai.py +99 -0
  100. airflow/providers/google/cloud/utils/bigquery.py +2 -2
  101. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  102. airflow/providers/google/cloud/utils/dataform.py +1 -1
  103. airflow/providers/google/cloud/utils/dataproc.py +25 -0
  104. airflow/providers/google/cloud/utils/field_validator.py +2 -2
  105. airflow/providers/google/cloud/utils/helpers.py +2 -2
  106. airflow/providers/google/cloud/utils/mlengine_operator_utils.py +1 -1
  107. airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +1 -1
  108. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  109. airflow/providers/google/common/hooks/base_google.py +87 -23
  110. airflow/providers/google/common/hooks/discovery_api.py +2 -2
  111. airflow/providers/google/common/utils/id_token_credentials.py +5 -5
  112. airflow/providers/google/firebase/hooks/firestore.py +3 -3
  113. airflow/providers/google/get_provider_info.py +7 -2
  114. airflow/providers/google/leveldb/hooks/leveldb.py +4 -4
  115. airflow/providers/google/marketing_platform/hooks/analytics.py +11 -14
  116. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +11 -11
  117. airflow/providers/google/marketing_platform/hooks/display_video.py +13 -13
  118. airflow/providers/google/marketing_platform/hooks/search_ads.py +4 -4
  119. airflow/providers/google/marketing_platform/operators/analytics.py +37 -32
  120. airflow/providers/google/suite/hooks/calendar.py +2 -2
  121. airflow/providers/google/suite/hooks/drive.py +7 -7
  122. airflow/providers/google/suite/hooks/sheets.py +8 -8
  123. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/METADATA +11 -11
  124. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/RECORD +126 -124
  125. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/WHEEL +0 -0
  126. {apache_airflow_providers_google-10.14.0rc1.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/entry_points.txt +0 -0
@@ -43,7 +43,7 @@ if TYPE_CHECKING:
43
43
 
44
44
 
45
45
  def get_object_id(obj: dict) -> str:
46
- """Returns unique id of the object."""
46
+ """Return unique id of the object."""
47
47
  return obj["name"].rpartition("/")[-1]
48
48
 
49
49
 
@@ -101,7 +101,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
101
101
 
102
102
  def get_conn(self) -> DataTransferServiceClient:
103
103
  """
104
- Retrieves connection to Google Bigquery.
104
+ Retrieve connection to Google Bigquery.
105
105
 
106
106
  :return: Google Bigquery API client
107
107
  """
@@ -122,7 +122,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
122
122
  metadata: Sequence[tuple[str, str]] = (),
123
123
  ) -> TransferConfig:
124
124
  """
125
- Creates a new data transfer configuration.
125
+ Create a new data transfer configuration.
126
126
 
127
127
  :param transfer_config: Data transfer configuration to create.
128
128
  :param project_id: The BigQuery project id where the transfer configuration should be
@@ -164,7 +164,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
164
164
  metadata: Sequence[tuple[str, str]] = (),
165
165
  ) -> None:
166
166
  """
167
- Deletes transfer configuration.
167
+ Delete transfer configuration.
168
168
 
169
169
  :param transfer_config_id: Id of transfer config to be used.
170
170
  :param project_id: The BigQuery project id where the transfer configuration should be
@@ -252,7 +252,7 @@ class BiqQueryDataTransferServiceHook(GoogleBaseHook):
252
252
  metadata: Sequence[tuple[str, str]] = (),
253
253
  ) -> TransferRun:
254
254
  """
255
- Returns information about the particular transfer run.
255
+ Return information about the particular transfer run.
256
256
 
257
257
  :param run_id: ID of the transfer run.
258
258
  :param transfer_config_id: ID of transfer config to be used.
@@ -327,7 +327,7 @@ class AsyncBiqQueryDataTransferServiceHook(GoogleBaseAsyncHook):
327
327
  metadata: Sequence[tuple[str, str]] = (),
328
328
  ):
329
329
  """
330
- Returns information about the particular transfer run.
330
+ Return information about the particular transfer run.
331
331
 
332
332
  :param run_id: ID of the transfer run.
333
333
  :param config_id: ID of transfer config to be used.
@@ -72,7 +72,7 @@ class BigtableHook(GoogleBaseHook):
72
72
  @GoogleBaseHook.fallback_to_default_project_id
73
73
  def get_instance(self, instance_id: str, project_id: str) -> Instance | None:
74
74
  """
75
- Retrieves and returns the specified Cloud Bigtable instance if it exists, otherwise returns None.
75
+ Retrieve and returns the specified Cloud Bigtable instance if it exists, otherwise returns None.
76
76
 
77
77
  :param instance_id: The ID of the Cloud Bigtable instance.
78
78
  :param project_id: Optional, Google Cloud project ID where the
@@ -87,7 +87,7 @@ class BigtableHook(GoogleBaseHook):
87
87
  @GoogleBaseHook.fallback_to_default_project_id
88
88
  def delete_instance(self, instance_id: str, project_id: str) -> None:
89
89
  """
90
- Deletes the specified Cloud Bigtable instance.
90
+ Delete the specified Cloud Bigtable instance.
91
91
 
92
92
  Raises google.api_core.exceptions.NotFound if the Cloud Bigtable instance does
93
93
  not exist.
@@ -121,7 +121,7 @@ class BigtableHook(GoogleBaseHook):
121
121
  timeout: float | None = None,
122
122
  ) -> Instance:
123
123
  """
124
- Creates new instance.
124
+ Create new instance.
125
125
 
126
126
  :param instance_id: The ID for the new instance.
127
127
  :param main_cluster_id: The ID for main cluster for the new instance.
@@ -219,7 +219,7 @@ class BigtableHook(GoogleBaseHook):
219
219
  column_families: dict[str, GarbageCollectionRule] | None = None,
220
220
  ) -> None:
221
221
  """
222
- Creates the specified Cloud Bigtable table.
222
+ Create the specified Cloud Bigtable table.
223
223
 
224
224
  Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists.
225
225
 
@@ -241,7 +241,7 @@ class BigtableHook(GoogleBaseHook):
241
241
  @GoogleBaseHook.fallback_to_default_project_id
242
242
  def delete_table(self, instance_id: str, table_id: str, project_id: str) -> None:
243
243
  """
244
- Deletes the specified table in Cloud Bigtable.
244
+ Delete the specified table in Cloud Bigtable.
245
245
 
246
246
  Raises google.api_core.exceptions.NotFound if the table does not exist.
247
247
 
@@ -260,7 +260,7 @@ class BigtableHook(GoogleBaseHook):
260
260
  @staticmethod
261
261
  def update_cluster(instance: Instance, cluster_id: str, nodes: int) -> None:
262
262
  """
263
- Updates number of nodes in the specified Cloud Bigtable cluster.
263
+ Update number of nodes in the specified Cloud Bigtable cluster.
264
264
 
265
265
  Raises google.api_core.exceptions.NotFound if the cluster does not exist.
266
266
 
@@ -277,7 +277,7 @@ class BigtableHook(GoogleBaseHook):
277
277
  @staticmethod
278
278
  def get_column_families_for_table(instance: Instance, table_id: str) -> dict[str, ColumnFamily]:
279
279
  """
280
- Fetches Column Families for the specified table in Cloud Bigtable.
280
+ Fetch Column Families for the specified table in Cloud Bigtable.
281
281
 
282
282
  :param instance: The Cloud Bigtable instance that owns the table.
283
283
  :param table_id: The ID of the table in Cloud Bigtable to fetch Column Families
@@ -289,7 +289,7 @@ class BigtableHook(GoogleBaseHook):
289
289
  @staticmethod
290
290
  def get_cluster_states_for_table(instance: Instance, table_id: str) -> dict[str, ClusterState]:
291
291
  """
292
- Fetches Cluster States for the specified table in Cloud Bigtable.
292
+ Fetch Cluster States for the specified table in Cloud Bigtable.
293
293
 
294
294
  Raises google.api_core.exceptions.NotFound if the table does not exist.
295
295
 
@@ -66,7 +66,7 @@ class CloudBatchHook(GoogleBaseHook):
66
66
 
67
67
  def get_conn(self):
68
68
  """
69
- Retrieves connection to GCE Batch.
69
+ Retrieve connection to GCE Batch.
70
70
 
71
71
  :return: Google Batch Service client object.
72
72
  """
@@ -18,9 +18,9 @@
18
18
  """Hook for Google Cloud Build service."""
19
19
  from __future__ import annotations
20
20
 
21
- import warnings
22
21
  from typing import TYPE_CHECKING, Sequence
23
22
 
23
+ from deprecated import deprecated
24
24
  from google.api_core.client_options import ClientOptions
25
25
  from google.api_core.exceptions import AlreadyExists
26
26
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
@@ -84,7 +84,7 @@ class CloudBuildHook(GoogleBaseHook):
84
84
  raise AirflowException("Could not retrieve Build ID from Operation.")
85
85
 
86
86
  def wait_for_operation(self, operation: Operation, timeout: float | None = None):
87
- """Waits for long-lasting operation to complete."""
87
+ """Wait for long-lasting operation to complete."""
88
88
  try:
89
89
  return operation.result(timeout=timeout)
90
90
  except Exception:
@@ -93,7 +93,7 @@ class CloudBuildHook(GoogleBaseHook):
93
93
 
94
94
  def get_conn(self, location: str = "global") -> CloudBuildClient:
95
95
  """
96
- Retrieves the connection to Google Cloud Build.
96
+ Retrieve the connection to Google Cloud Build.
97
97
 
98
98
  :param location: The location of the project.
99
99
 
@@ -121,7 +121,7 @@ class CloudBuildHook(GoogleBaseHook):
121
121
  location: str = "global",
122
122
  ) -> Build:
123
123
  """
124
- Cancels a build in progress.
124
+ Cancel a build in progress.
125
125
 
126
126
  :param id_: The ID of the build.
127
127
  :param project_id: Optional, Google Cloud Project project_id where the function belongs.
@@ -158,7 +158,7 @@ class CloudBuildHook(GoogleBaseHook):
158
158
  location: str = "global",
159
159
  ) -> tuple[Operation, str]:
160
160
  """
161
- Starts a build with the specified configuration without waiting for it to finish.
161
+ Start a build with the specified configuration without waiting for it to finish.
162
162
 
163
163
  :param build: The build resource to create. If a dict is provided, it must be of the same form
164
164
  as the protobuf message `google.cloud.devtools.cloudbuild_v1.types.Build`
@@ -189,6 +189,10 @@ class CloudBuildHook(GoogleBaseHook):
189
189
  return operation, id_
190
190
 
191
191
  @GoogleBaseHook.fallback_to_default_project_id
192
+ @deprecated(
193
+ reason="Please use `create_build_without_waiting_for_result`",
194
+ category=AirflowProviderDeprecationWarning,
195
+ )
192
196
  def create_build(
193
197
  self,
194
198
  build: dict | Build,
@@ -199,7 +203,7 @@ class CloudBuildHook(GoogleBaseHook):
199
203
  metadata: Sequence[tuple[str, str]] = (),
200
204
  ) -> Build:
201
205
  """
202
- Starts a build with the specified configuration.
206
+ Start a build with the specified configuration.
203
207
 
204
208
  :param build: The build resource to create. If a dict is provided, it must be of the same form
205
209
  as the protobuf message `google.cloud.devtools.cloudbuild_v1.types.Build`
@@ -213,11 +217,6 @@ class CloudBuildHook(GoogleBaseHook):
213
217
  :param metadata: Optional, additional metadata that is provided to the method.
214
218
 
215
219
  """
216
- warnings.warn(
217
- "This method is deprecated. Please use `create_build_without_waiting_for_result`.",
218
- AirflowProviderDeprecationWarning,
219
- stacklevel=2,
220
- )
221
220
  client = self.get_conn()
222
221
 
223
222
  self.log.info("Start creating build...")
@@ -251,7 +250,7 @@ class CloudBuildHook(GoogleBaseHook):
251
250
  location: str = "global",
252
251
  ) -> BuildTrigger:
253
252
  """
254
- Creates a new BuildTrigger.
253
+ Create a new BuildTrigger.
255
254
 
256
255
  :param trigger: The BuildTrigger to create. If a dict is provided, it must be of the same form
257
256
  as the protobuf message `google.cloud.devtools.cloudbuild_v1.types.BuildTrigger`
@@ -293,7 +292,7 @@ class CloudBuildHook(GoogleBaseHook):
293
292
  location: str = "global",
294
293
  ) -> None:
295
294
  """
296
- Deletes a BuildTrigger by its project ID and trigger ID.
295
+ Delete a BuildTrigger by its project ID and trigger ID.
297
296
 
298
297
  :param trigger_id: The ID of the BuildTrigger to delete.
299
298
  :param project_id: Optional, Google Cloud Project project_id where the function belongs.
@@ -329,7 +328,7 @@ class CloudBuildHook(GoogleBaseHook):
329
328
  location: str = "global",
330
329
  ) -> Build:
331
330
  """
332
- Returns information about a previously requested build.
331
+ Return information about a previously requested build.
333
332
 
334
333
  :param id_: The ID of the build.
335
334
  :param project_id: Optional, Google Cloud Project project_id where the function belongs.
@@ -367,7 +366,7 @@ class CloudBuildHook(GoogleBaseHook):
367
366
  location: str = "global",
368
367
  ) -> BuildTrigger:
369
368
  """
370
- Returns information about a BuildTrigger.
369
+ Return information about a BuildTrigger.
371
370
 
372
371
  :param trigger_id: The ID of the BuildTrigger to get.
373
372
  :param project_id: Optional, Google Cloud Project project_id where the function belongs.
@@ -406,7 +405,7 @@ class CloudBuildHook(GoogleBaseHook):
406
405
  metadata: Sequence[tuple[str, str]] = (),
407
406
  ) -> list[BuildTrigger]:
408
407
  """
409
- Lists existing BuildTriggers.
408
+ List existing BuildTriggers.
410
409
 
411
410
  :param project_id: Google Cloud Project project_id where the function belongs.
412
411
  If set to None or missing, the default project_id from the GCP connection is used.
@@ -455,7 +454,7 @@ class CloudBuildHook(GoogleBaseHook):
455
454
  metadata: Sequence[tuple[str, str]] = (),
456
455
  ) -> list[Build]:
457
456
  """
458
- Lists previously requested builds.
457
+ List previously requested builds.
459
458
 
460
459
  :param project_id: Google Cloud Project project_id where the function belongs.
461
460
  If set to None or missing, the default project_id from the Google Cloud connection is used.
@@ -552,7 +551,7 @@ class CloudBuildHook(GoogleBaseHook):
552
551
  location: str = "global",
553
552
  ) -> Build:
554
553
  """
555
- Runs a BuildTrigger at a particular source revision.
554
+ Run a BuildTrigger at a particular source revision.
556
555
 
557
556
  :param trigger_id: The ID of the trigger.
558
557
  :param source: Source to build against this trigger. If a dict is provided, it must be of the
@@ -600,7 +599,7 @@ class CloudBuildHook(GoogleBaseHook):
600
599
  location: str = "global",
601
600
  ) -> BuildTrigger:
602
601
  """
603
- Updates a BuildTrigger by its project ID and trigger ID.
602
+ Update a BuildTrigger by its project ID and trigger ID.
604
603
 
605
604
  :param trigger_id: The ID of the trigger.
606
605
  :param trigger: The BuildTrigger to create. If a dict is provided, it must be of the same form
@@ -651,7 +650,7 @@ class CloudBuildAsyncHook(GoogleBaseHook):
651
650
  metadata: Sequence[tuple[str, str]] = (),
652
651
  location: str = "global",
653
652
  ) -> Build:
654
- """Retrieves a Cloud Build with a specified id."""
653
+ """Retrieve a Cloud Build with a specified id."""
655
654
  if not id_:
656
655
  raise AirflowException("Google Cloud Build id is required.")
657
656
 
@@ -60,7 +60,7 @@ class CloudComposerHook(GoogleBaseHook):
60
60
  super().__init__(**kwargs)
61
61
 
62
62
  def get_environment_client(self) -> EnvironmentsClient:
63
- """Retrieves client library object that allow access Environments service."""
63
+ """Retrieve client library object that allow access Environments service."""
64
64
  return EnvironmentsClient(
65
65
  credentials=self.get_credentials(),
66
66
  client_info=CLIENT_INFO,
@@ -68,7 +68,7 @@ class CloudComposerHook(GoogleBaseHook):
68
68
  )
69
69
 
70
70
  def get_image_versions_client(self) -> ImageVersionsClient:
71
- """Retrieves client library object that allow access Image Versions service."""
71
+ """Retrieve client library object that allow access Image Versions service."""
72
72
  return ImageVersionsClient(
73
73
  credentials=self.get_credentials(),
74
74
  client_info=CLIENT_INFO,
@@ -76,7 +76,7 @@ class CloudComposerHook(GoogleBaseHook):
76
76
  )
77
77
 
78
78
  def wait_for_operation(self, operation: Operation, timeout: float | None = None):
79
- """Waits for long-lasting operation to complete."""
79
+ """Wait for long-lasting operation to complete."""
80
80
  try:
81
81
  return operation.result(timeout=timeout)
82
82
  except Exception:
@@ -309,7 +309,7 @@ class CloudComposerAsyncHook(GoogleBaseHook):
309
309
  client_options = ClientOptions(api_endpoint="composer.googleapis.com:443")
310
310
 
311
311
  def get_environment_client(self) -> EnvironmentsAsyncClient:
312
- """Retrieves client library object that allow access Environments service."""
312
+ """Retrieve client library object that allow access Environments service."""
313
313
  return EnvironmentsAsyncClient(
314
314
  credentials=self.get_credentials(),
315
315
  client_info=CLIENT_INFO,
@@ -87,7 +87,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
87
87
  self._client: CloudRedisClient | None = None
88
88
 
89
89
  def get_conn(self) -> CloudRedisClient:
90
- """Retrieves client library object that allow access to Cloud Memorystore service."""
90
+ """Retrieve client library object that allow access to Cloud Memorystore service."""
91
91
  if not self._client:
92
92
  self._client = CloudRedisClient(credentials=self.get_credentials())
93
93
  return self._client
@@ -122,7 +122,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
122
122
  metadata: Sequence[tuple[str, str]] = (),
123
123
  ):
124
124
  """
125
- Creates a Redis instance based on the specified tier and memory size.
125
+ Create a Redis instance based on the specified tier and memory size.
126
126
 
127
127
  By default, the instance is accessible from the project's `default network
128
128
  <https://cloud.google.com/compute/docs/networks-and-firewalls#networks>`__.
@@ -191,7 +191,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
191
191
  metadata: Sequence[tuple[str, str]] = (),
192
192
  ):
193
193
  """
194
- Deletes a specific Redis instance. Instance stops serving and data is deleted.
194
+ Delete a specific Redis instance. Instance stops serving and data is deleted.
195
195
 
196
196
  :param location: The location of the Cloud Memorystore instance (for example europe-west1)
197
197
  :param instance: The logical name of the Redis instance in the customer project.
@@ -322,7 +322,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
322
322
  metadata: Sequence[tuple[str, str]] = (),
323
323
  ):
324
324
  """
325
- Gets the details of a specific Redis instance.
325
+ Get the details of a specific Redis instance.
326
326
 
327
327
  :param location: The location of the Cloud Memorystore instance (for example europe-west1)
328
328
  :param instance: The logical name of the Redis instance in the customer project.
@@ -440,7 +440,7 @@ class CloudMemorystoreHook(GoogleBaseHook):
440
440
  metadata: Sequence[tuple[str, str]] = (),
441
441
  ):
442
442
  """
443
- Updates the metadata and configuration of a specific Redis instance.
443
+ Update the metadata and configuration of a specific Redis instance.
444
444
 
445
445
  :param update_mask: Required. Mask of fields to update. At least one path must be supplied in this
446
446
  field. The elements of the repeated paths field may only include these fields from ``Instance``:
@@ -519,7 +519,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
519
519
  self._client: CloudMemcacheClient | None = None
520
520
 
521
521
  def get_conn(self):
522
- """Retrieves client library object that allow access to Cloud Memorystore Memcached service."""
522
+ """Retrieve client library object that allow access to Cloud Memorystore Memcached service."""
523
523
  if not self._client:
524
524
  self._client = CloudMemcacheClient(credentials=self.get_credentials())
525
525
  return self._client
@@ -599,7 +599,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
599
599
  metadata: Sequence[tuple[str, str]] = (),
600
600
  ):
601
601
  """
602
- Creates a Memcached instance based on the specified tier and memory size.
602
+ Create a Memcached instance based on the specified tier and memory size.
603
603
 
604
604
  By default, the instance is accessible from the project's `default network
605
605
  <https://cloud.google.com/compute/docs/networks-and-firewalls#networks>`__.
@@ -675,7 +675,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
675
675
  metadata: Sequence[tuple[str, str]] = (),
676
676
  ):
677
677
  """
678
- Deletes a specific Memcached instance. Instance stops serving and data is deleted.
678
+ Delete a specific Memcached instance. Instance stops serving and data is deleted.
679
679
 
680
680
  :param location: The location of the Cloud Memorystore instance (for example europe-west1)
681
681
  :param instance: The logical name of the Memcached instance in the customer project.
@@ -722,7 +722,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
722
722
  metadata: Sequence[tuple[str, str]] = (),
723
723
  ):
724
724
  """
725
- Gets the details of a specific Memcached instance.
725
+ Get the details of a specific Memcached instance.
726
726
 
727
727
  :param location: The location of the Cloud Memorystore instance (for example europe-west1)
728
728
  :param instance: The logical name of the Memcached instance in the customer project.
@@ -792,7 +792,7 @@ class CloudMemorystoreMemcachedHook(GoogleBaseHook):
792
792
  metadata: Sequence[tuple[str, str]] = (),
793
793
  ):
794
794
  """
795
- Updates the metadata and configuration of a specific Memcached instance.
795
+ Update the metadata and configuration of a specific Memcached instance.
796
796
 
797
797
  :param update_mask: Required. Mask of fields to update. At least one path must be supplied in this
798
798
  field. The elements of the repeated paths field may only include these fields from ``Instance``:
@@ -67,7 +67,7 @@ class CloudRunHook(GoogleBaseHook):
67
67
 
68
68
  def get_conn(self):
69
69
  """
70
- Retrieves connection to Cloud Run.
70
+ Retrieve connection to Cloud Run.
71
71
 
72
72
  :return: Cloud Run Jobs client object.
73
73
  """
@@ -112,7 +112,7 @@ class CloudSQLHook(GoogleBaseHook):
112
112
 
113
113
  def get_conn(self) -> Resource:
114
114
  """
115
- Retrieves connection to Cloud SQL.
115
+ Retrieve connection to Cloud SQL.
116
116
 
117
117
  :return: Google Cloud SQL services object.
118
118
  """
@@ -124,7 +124,7 @@ class CloudSQLHook(GoogleBaseHook):
124
124
  @GoogleBaseHook.fallback_to_default_project_id
125
125
  def get_instance(self, instance: str, project_id: str) -> dict:
126
126
  """
127
- Retrieves a resource containing information about a Cloud SQL instance.
127
+ Retrieve a resource containing information about a Cloud SQL instance.
128
128
 
129
129
  :param instance: Database instance ID. This does not include the project ID.
130
130
  :param project_id: Project ID of the project that contains the instance. If set
@@ -142,7 +142,7 @@ class CloudSQLHook(GoogleBaseHook):
142
142
  @GoogleBaseHook.operation_in_progress_retry()
143
143
  def create_instance(self, body: dict, project_id: str) -> None:
144
144
  """
145
- Creates a new Cloud SQL instance.
145
+ Create a new Cloud SQL instance.
146
146
 
147
147
  :param body: Body required by the Cloud SQL insert API, as described in
148
148
  https://cloud.google.com/sql/docs/mysql/admin-api/v1beta4/instances/insert#request-body.
@@ -163,7 +163,7 @@ class CloudSQLHook(GoogleBaseHook):
163
163
  @GoogleBaseHook.operation_in_progress_retry()
164
164
  def patch_instance(self, body: dict, instance: str, project_id: str) -> None:
165
165
  """
166
- Updates settings of a Cloud SQL instance.
166
+ Update settings of a Cloud SQL instance.
167
167
 
168
168
  Caution: This is not a partial update, so you must include values for
169
169
  all the settings that you want to retain.
@@ -188,7 +188,7 @@ class CloudSQLHook(GoogleBaseHook):
188
188
  @GoogleBaseHook.operation_in_progress_retry()
189
189
  def delete_instance(self, instance: str, project_id: str) -> None:
190
190
  """
191
- Deletes a Cloud SQL instance.
191
+ Delete a Cloud SQL instance.
192
192
 
193
193
  :param project_id: Project ID of the project that contains the instance. If set
194
194
  to None or missing, the default project_id from the Google Cloud connection is used.
@@ -211,7 +211,7 @@ class CloudSQLHook(GoogleBaseHook):
211
211
  @GoogleBaseHook.fallback_to_default_project_id
212
212
  def get_database(self, instance: str, database: str, project_id: str) -> dict:
213
213
  """
214
- Retrieves a database resource from a Cloud SQL instance.
214
+ Retrieve a database resource from a Cloud SQL instance.
215
215
 
216
216
  :param instance: Database instance ID. This does not include the project ID.
217
217
  :param database: Name of the database in the instance.
@@ -231,7 +231,7 @@ class CloudSQLHook(GoogleBaseHook):
231
231
  @GoogleBaseHook.operation_in_progress_retry()
232
232
  def create_database(self, instance: str, body: dict, project_id: str) -> None:
233
233
  """
234
- Creates a new database inside a Cloud SQL instance.
234
+ Create a new database inside a Cloud SQL instance.
235
235
 
236
236
  :param instance: Database instance ID. This does not include the project ID.
237
237
  :param body: The request body, as described in
@@ -259,7 +259,7 @@ class CloudSQLHook(GoogleBaseHook):
259
259
  project_id: str,
260
260
  ) -> None:
261
261
  """
262
- Updates a database resource inside a Cloud SQL instance.
262
+ Update a database resource inside a Cloud SQL instance.
263
263
 
264
264
  This method supports patch semantics.
265
265
  See https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch.
@@ -285,7 +285,7 @@ class CloudSQLHook(GoogleBaseHook):
285
285
  @GoogleBaseHook.operation_in_progress_retry()
286
286
  def delete_database(self, instance: str, database: str, project_id: str) -> None:
287
287
  """
288
- Deletes a database from a Cloud SQL instance.
288
+ Delete a database from a Cloud SQL instance.
289
289
 
290
290
  :param instance: Database instance ID. This does not include the project ID.
291
291
  :param database: Name of the database to be deleted in the instance.
@@ -305,7 +305,7 @@ class CloudSQLHook(GoogleBaseHook):
305
305
  @GoogleBaseHook.fallback_to_default_project_id
306
306
  def export_instance(self, instance: str, body: dict, project_id: str):
307
307
  """
308
- Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump or CSV file.
308
+ Export data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump or CSV file.
309
309
 
310
310
  :param instance: Database instance ID of the Cloud SQL instance. This does not include the
311
311
  project ID.
@@ -327,7 +327,7 @@ class CloudSQLHook(GoogleBaseHook):
327
327
  @GoogleBaseHook.fallback_to_default_project_id
328
328
  def import_instance(self, instance: str, body: dict, project_id: str) -> None:
329
329
  """
330
- Imports data into a Cloud SQL instance from a SQL dump or CSV file in Cloud Storage.
330
+ Import data into a Cloud SQL instance from a SQL dump or CSV file in Cloud Storage.
331
331
 
332
332
  :param instance: Database instance ID. This does not include the
333
333
  project ID.
@@ -381,7 +381,7 @@ class CloudSQLHook(GoogleBaseHook):
381
381
  self, project_id: str, operation_name: str, time_to_sleep: int = TIME_TO_SLEEP_IN_SECONDS
382
382
  ) -> None:
383
383
  """
384
- Waits for the named operation to complete - checks status of the asynchronous call.
384
+ Wait for the named operation to complete - checks status of the asynchronous call.
385
385
 
386
386
  :param project_id: Project ID of the project that contains the instance.
387
387
  :param operation_name: Name of the operation.
@@ -593,7 +593,7 @@ class CloudSqlProxyRunner(LoggingMixin):
593
593
 
594
594
  def start_proxy(self) -> None:
595
595
  """
596
- Starts Cloud SQL Proxy.
596
+ Start Cloud SQL Proxy.
597
597
 
598
598
  You have to remember to stop the proxy if you started it!
599
599
  """
@@ -632,7 +632,7 @@ class CloudSqlProxyRunner(LoggingMixin):
632
632
 
633
633
  def stop_proxy(self) -> None:
634
634
  """
635
- Stops running proxy.
635
+ Stop running proxy.
636
636
 
637
637
  You should stop the proxy after you stop using it.
638
638
  """
@@ -661,7 +661,7 @@ class CloudSqlProxyRunner(LoggingMixin):
661
661
  os.remove(self.credentials_path)
662
662
 
663
663
  def get_proxy_version(self) -> str | None:
664
- """Returns version of the Cloud SQL Proxy."""
664
+ """Return version of the Cloud SQL Proxy."""
665
665
  self._download_sql_proxy_if_needed()
666
666
  command_to_run = [self.sql_proxy_path]
667
667
  command_to_run.extend(["--version"])
@@ -675,7 +675,7 @@ class CloudSqlProxyRunner(LoggingMixin):
675
675
 
676
676
  def get_socket_path(self) -> str:
677
677
  """
678
- Retrieves UNIX socket path used by Cloud SQL Proxy.
678
+ Retrieve UNIX socket path used by Cloud SQL Proxy.
679
679
 
680
680
  :return: The dynamically generated path for the socket created by the proxy.
681
681
  """
@@ -771,9 +771,8 @@ class CloudSQLDatabaseHook(BaseHook):
771
771
  gcp_conn_id: str = "google_cloud_default",
772
772
  default_gcp_project_id: str | None = None,
773
773
  sql_proxy_binary_path: str | None = None,
774
- **kwargs,
775
774
  ) -> None:
776
- super().__init__(**kwargs)
775
+ super().__init__()
777
776
  self.gcp_conn_id = gcp_conn_id
778
777
  self.gcp_cloudsql_conn_id = gcp_cloudsql_conn_id
779
778
  self.cloudsql_connection = self.get_connection(self.gcp_cloudsql_conn_id)
@@ -850,7 +849,7 @@ class CloudSQLDatabaseHook(BaseHook):
850
849
 
851
850
  def validate_socket_path_length(self) -> None:
852
851
  """
853
- Validates sockets path length.
852
+ Validate sockets path length.
854
853
 
855
854
  :return: None or rises AirflowException
856
855
  """
@@ -510,7 +510,7 @@ class CloudDataTransferServiceAsyncHook(GoogleBaseAsyncHook):
510
510
 
511
511
  def get_conn(self) -> StorageTransferServiceAsyncClient:
512
512
  """
513
- Returns async connection to the Storage Transfer Service.
513
+ Return async connection to the Storage Transfer Service.
514
514
 
515
515
  :return: Google Storage Transfer asynchronous client.
516
516
  """
@@ -520,7 +520,7 @@ class CloudDataTransferServiceAsyncHook(GoogleBaseAsyncHook):
520
520
 
521
521
  async def get_jobs(self, job_names: list[str]) -> ListTransferJobsAsyncPager:
522
522
  """
523
- Gets the latest state of a long-running operations in Google Storage Transfer Service.
523
+ Get the latest state of a long-running operations in Google Storage Transfer Service.
524
524
 
525
525
  :param job_names: (Required) List of names of the jobs to be fetched.
526
526
  :return: Object that yields Transfer jobs.
@@ -533,7 +533,7 @@ class CloudDataTransferServiceAsyncHook(GoogleBaseAsyncHook):
533
533
 
534
534
  async def get_latest_operation(self, job: TransferJob) -> Message | None:
535
535
  """
536
- Gets the latest operation of the given TransferJob instance.
536
+ Get the latest operation of the given TransferJob instance.
537
537
 
538
538
  :param job: Transfer job instance.
539
539
  :return: The latest job operation.