apache-airflow-providers-google 10.14.0rc2__py3-none-any.whl → 10.15.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/cloud/hooks/automl.py +13 -13
  3. airflow/providers/google/cloud/hooks/bigquery.py +193 -246
  4. airflow/providers/google/cloud/hooks/bigquery_dts.py +6 -6
  5. airflow/providers/google/cloud/hooks/bigtable.py +8 -8
  6. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  7. airflow/providers/google/cloud/hooks/cloud_build.py +19 -20
  8. airflow/providers/google/cloud/hooks/cloud_composer.py +4 -4
  9. airflow/providers/google/cloud/hooks/cloud_memorystore.py +10 -10
  10. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  11. airflow/providers/google/cloud/hooks/cloud_sql.py +17 -17
  12. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +3 -3
  13. airflow/providers/google/cloud/hooks/compute.py +16 -16
  14. airflow/providers/google/cloud/hooks/compute_ssh.py +1 -1
  15. airflow/providers/google/cloud/hooks/datacatalog.py +22 -22
  16. airflow/providers/google/cloud/hooks/dataflow.py +48 -49
  17. airflow/providers/google/cloud/hooks/dataform.py +16 -16
  18. airflow/providers/google/cloud/hooks/datafusion.py +15 -15
  19. airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
  20. airflow/providers/google/cloud/hooks/dataplex.py +19 -19
  21. airflow/providers/google/cloud/hooks/dataprep.py +8 -8
  22. airflow/providers/google/cloud/hooks/dataproc.py +88 -0
  23. airflow/providers/google/cloud/hooks/dataproc_metastore.py +13 -13
  24. airflow/providers/google/cloud/hooks/datastore.py +3 -3
  25. airflow/providers/google/cloud/hooks/dlp.py +25 -25
  26. airflow/providers/google/cloud/hooks/gcs.py +25 -23
  27. airflow/providers/google/cloud/hooks/gdm.py +3 -3
  28. airflow/providers/google/cloud/hooks/kms.py +3 -3
  29. airflow/providers/google/cloud/hooks/kubernetes_engine.py +63 -48
  30. airflow/providers/google/cloud/hooks/life_sciences.py +13 -12
  31. airflow/providers/google/cloud/hooks/looker.py +7 -7
  32. airflow/providers/google/cloud/hooks/mlengine.py +12 -12
  33. airflow/providers/google/cloud/hooks/natural_language.py +2 -2
  34. airflow/providers/google/cloud/hooks/os_login.py +1 -1
  35. airflow/providers/google/cloud/hooks/pubsub.py +9 -9
  36. airflow/providers/google/cloud/hooks/secret_manager.py +1 -1
  37. airflow/providers/google/cloud/hooks/spanner.py +11 -11
  38. airflow/providers/google/cloud/hooks/speech_to_text.py +1 -1
  39. airflow/providers/google/cloud/hooks/stackdriver.py +7 -7
  40. airflow/providers/google/cloud/hooks/tasks.py +11 -11
  41. airflow/providers/google/cloud/hooks/text_to_speech.py +1 -1
  42. airflow/providers/google/cloud/hooks/translate.py +1 -1
  43. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +13 -13
  44. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +6 -6
  45. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +45 -50
  46. airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +13 -13
  47. airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +9 -9
  48. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +128 -11
  49. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +10 -10
  50. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +8 -8
  51. airflow/providers/google/cloud/hooks/video_intelligence.py +2 -2
  52. airflow/providers/google/cloud/hooks/vision.py +1 -1
  53. airflow/providers/google/cloud/hooks/workflows.py +10 -10
  54. airflow/providers/google/cloud/links/datafusion.py +12 -5
  55. airflow/providers/google/cloud/operators/bigquery.py +9 -11
  56. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +3 -1
  57. airflow/providers/google/cloud/operators/dataflow.py +16 -16
  58. airflow/providers/google/cloud/operators/datafusion.py +9 -1
  59. airflow/providers/google/cloud/operators/dataproc.py +298 -65
  60. airflow/providers/google/cloud/operators/kubernetes_engine.py +6 -6
  61. airflow/providers/google/cloud/operators/life_sciences.py +10 -9
  62. airflow/providers/google/cloud/operators/mlengine.py +96 -96
  63. airflow/providers/google/cloud/operators/pubsub.py +2 -0
  64. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +33 -3
  65. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +59 -2
  66. airflow/providers/google/cloud/secrets/secret_manager.py +8 -7
  67. airflow/providers/google/cloud/sensors/bigquery.py +20 -16
  68. airflow/providers/google/cloud/sensors/cloud_composer.py +11 -8
  69. airflow/providers/google/cloud/sensors/gcs.py +8 -7
  70. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +4 -4
  71. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +1 -1
  72. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  73. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +1 -1
  74. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +1 -1
  75. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +1 -1
  76. airflow/providers/google/cloud/transfers/presto_to_gcs.py +1 -1
  77. airflow/providers/google/cloud/transfers/s3_to_gcs.py +3 -3
  78. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +1 -1
  79. airflow/providers/google/cloud/transfers/sql_to_gcs.py +3 -3
  80. airflow/providers/google/cloud/transfers/trino_to_gcs.py +1 -1
  81. airflow/providers/google/cloud/triggers/bigquery.py +12 -12
  82. airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -1
  83. airflow/providers/google/cloud/triggers/cloud_batch.py +3 -1
  84. airflow/providers/google/cloud/triggers/cloud_build.py +2 -2
  85. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  86. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +6 -6
  87. airflow/providers/google/cloud/triggers/dataflow.py +3 -1
  88. airflow/providers/google/cloud/triggers/datafusion.py +2 -2
  89. airflow/providers/google/cloud/triggers/dataplex.py +2 -2
  90. airflow/providers/google/cloud/triggers/dataproc.py +2 -2
  91. airflow/providers/google/cloud/triggers/gcs.py +12 -8
  92. airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -2
  93. airflow/providers/google/cloud/triggers/mlengine.py +2 -2
  94. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  95. airflow/providers/google/cloud/triggers/vertex_ai.py +99 -0
  96. airflow/providers/google/cloud/utils/bigquery.py +2 -2
  97. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  98. airflow/providers/google/cloud/utils/dataform.py +1 -1
  99. airflow/providers/google/cloud/utils/field_validator.py +2 -2
  100. airflow/providers/google/cloud/utils/helpers.py +2 -2
  101. airflow/providers/google/cloud/utils/mlengine_operator_utils.py +1 -1
  102. airflow/providers/google/cloud/utils/mlengine_prediction_summary.py +1 -1
  103. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  104. airflow/providers/google/common/hooks/base_google.py +29 -22
  105. airflow/providers/google/common/hooks/discovery_api.py +2 -2
  106. airflow/providers/google/common/utils/id_token_credentials.py +5 -5
  107. airflow/providers/google/firebase/hooks/firestore.py +3 -3
  108. airflow/providers/google/get_provider_info.py +7 -2
  109. airflow/providers/google/leveldb/hooks/leveldb.py +2 -2
  110. airflow/providers/google/marketing_platform/hooks/analytics.py +11 -14
  111. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +11 -11
  112. airflow/providers/google/marketing_platform/hooks/display_video.py +13 -13
  113. airflow/providers/google/marketing_platform/hooks/search_ads.py +4 -4
  114. airflow/providers/google/marketing_platform/operators/analytics.py +37 -32
  115. airflow/providers/google/suite/hooks/calendar.py +2 -2
  116. airflow/providers/google/suite/hooks/drive.py +7 -7
  117. airflow/providers/google/suite/hooks/sheets.py +8 -8
  118. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/METADATA +11 -11
  119. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/RECORD +121 -120
  120. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/WHEEL +0 -0
  121. {apache_airflow_providers_google-10.14.0rc2.dist-info → apache_airflow_providers_google-10.15.0rc1.dist-info}/entry_points.txt +0 -0
@@ -50,7 +50,7 @@ class DataformHook(GoogleBaseHook):
50
50
  super().__init__(**kwargs)
51
51
 
52
52
  def get_dataform_client(self) -> DataformClient:
53
- """Retrieves client library object that allow access to Cloud Dataform service."""
53
+ """Retrieve client library object that allow access to Cloud Dataform service."""
54
54
  return DataformClient(credentials=self.get_credentials())
55
55
 
56
56
  @GoogleBaseHook.fallback_to_default_project_id
@@ -64,7 +64,7 @@ class DataformHook(GoogleBaseHook):
64
64
  timeout: int | None = None,
65
65
  ) -> None:
66
66
  """
67
- Helper method which polls a job to check if it finishes.
67
+ Poll a job to check if it finishes.
68
68
 
69
69
  :param workflow_invocation_id: Id of the Workflow Invocation
70
70
  :param repository_id: Id of the Dataform repository
@@ -117,7 +117,7 @@ class DataformHook(GoogleBaseHook):
117
117
  metadata: Sequence[tuple[str, str]] = (),
118
118
  ) -> CompilationResult:
119
119
  """
120
- Creates a new CompilationResult in a given project and location.
120
+ Create a new CompilationResult in a given project and location.
121
121
 
122
122
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
123
123
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -151,7 +151,7 @@ class DataformHook(GoogleBaseHook):
151
151
  metadata: Sequence[tuple[str, str]] = (),
152
152
  ) -> CompilationResult:
153
153
  """
154
- Fetches a single CompilationResult.
154
+ Fetch a single CompilationResult.
155
155
 
156
156
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
157
157
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -182,7 +182,7 @@ class DataformHook(GoogleBaseHook):
182
182
  metadata: Sequence[tuple[str, str]] = (),
183
183
  ) -> WorkflowInvocation:
184
184
  """
185
- Creates a new WorkflowInvocation in a given Repository.
185
+ Create a new WorkflowInvocation in a given Repository.
186
186
 
187
187
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
188
188
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -213,7 +213,7 @@ class DataformHook(GoogleBaseHook):
213
213
  metadata: Sequence[tuple[str, str]] = (),
214
214
  ) -> WorkflowInvocation:
215
215
  """
216
- Fetches a single WorkflowInvocation.
216
+ Fetch a single WorkflowInvocation.
217
217
 
218
218
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
219
219
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -249,7 +249,7 @@ class DataformHook(GoogleBaseHook):
249
249
  metadata: Sequence[tuple[str, str]] = (),
250
250
  ) -> QueryWorkflowInvocationActionsPager:
251
251
  """
252
- Fetches WorkflowInvocation actions.
252
+ Fetch WorkflowInvocation actions.
253
253
 
254
254
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
255
255
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -286,7 +286,7 @@ class DataformHook(GoogleBaseHook):
286
286
  metadata: Sequence[tuple[str, str]] = (),
287
287
  ):
288
288
  """
289
- Requests cancellation of a running WorkflowInvocation.
289
+ Request cancellation of a running WorkflowInvocation.
290
290
 
291
291
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
292
292
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -336,7 +336,7 @@ class DataformHook(GoogleBaseHook):
336
336
  metadata: Sequence[tuple[str, str]] = (),
337
337
  ) -> Repository:
338
338
  """
339
- Creates repository.
339
+ Create repository.
340
340
 
341
341
  :param project_id: Required. The ID of the Google Cloud project where repository should be.
342
342
  :param region: Required. The ID of the Google Cloud region where repository should be.
@@ -374,7 +374,7 @@ class DataformHook(GoogleBaseHook):
374
374
  metadata: Sequence[tuple[str, str]] = (),
375
375
  ) -> None:
376
376
  """
377
- Deletes repository.
377
+ Delete repository.
378
378
 
379
379
  :param project_id: Required. The ID of the Google Cloud project where repository located.
380
380
  :param region: Required. The ID of the Google Cloud region where repository located.
@@ -411,7 +411,7 @@ class DataformHook(GoogleBaseHook):
411
411
  metadata: Sequence[tuple[str, str]] = (),
412
412
  ) -> Workspace:
413
413
  """
414
- Creates workspace.
414
+ Create workspace.
415
415
 
416
416
  :param project_id: Required. The ID of the Google Cloud project where workspace should be.
417
417
  :param region: Required. The ID of the Google Cloud region where workspace should be.
@@ -448,7 +448,7 @@ class DataformHook(GoogleBaseHook):
448
448
  metadata: Sequence[tuple[str, str]] = (),
449
449
  ):
450
450
  """
451
- Deletes workspace.
451
+ Delete workspace.
452
452
 
453
453
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
454
454
  :param region: Required. The ID of the Google Cloud region where workspace located.
@@ -489,7 +489,7 @@ class DataformHook(GoogleBaseHook):
489
489
  metadata: Sequence[tuple[str, str]] = (),
490
490
  ) -> WriteFileResponse:
491
491
  """
492
- Writes a new file to the specified workspace.
492
+ Write a new file to the specified workspace.
493
493
 
494
494
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
495
495
  :param region: Required. The ID of the Google Cloud region where workspace located.
@@ -535,7 +535,7 @@ class DataformHook(GoogleBaseHook):
535
535
  metadata: Sequence[tuple[str, str]] = (),
536
536
  ) -> dict:
537
537
  """
538
- Makes new directory in specified workspace.
538
+ Make new directory in specified workspace.
539
539
 
540
540
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
541
541
  :param region: Required. The ID of the Google Cloud region where workspace located.
@@ -580,7 +580,7 @@ class DataformHook(GoogleBaseHook):
580
580
  metadata: Sequence[tuple[str, str]] = (),
581
581
  ):
582
582
  """
583
- Removes directory in specified workspace.
583
+ Remove directory in specified workspace.
584
584
 
585
585
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
586
586
  :param region: Required. The ID of the Google Cloud region where workspace located.
@@ -623,7 +623,7 @@ class DataformHook(GoogleBaseHook):
623
623
  metadata: Sequence[tuple[str, str]] = (),
624
624
  ):
625
625
  """
626
- Removes file in specified workspace.
626
+ Remove file in specified workspace.
627
627
 
628
628
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
629
629
  :param region: Required. The ID of the Google Cloud region where workspace located.
@@ -89,7 +89,7 @@ class DataFusionHook(GoogleBaseHook):
89
89
  self.api_version = api_version
90
90
 
91
91
  def wait_for_operation(self, operation: dict[str, Any]) -> dict[str, Any]:
92
- """Waits for long-lasting operation to complete."""
92
+ """Wait for long-lasting operation to complete."""
93
93
  for time_to_wait in exponential_sleep_generator(initial=10, maximum=120):
94
94
  time.sleep(time_to_wait)
95
95
  operation = (
@@ -112,7 +112,7 @@ class DataFusionHook(GoogleBaseHook):
112
112
  failure_states: list[str] | None = None,
113
113
  timeout: int = 5 * 60,
114
114
  ) -> None:
115
- """Polls pipeline state and raises an exception if the state fails or times out."""
115
+ """Poll for pipeline state and raises an exception if the state fails or times out."""
116
116
  failure_states = failure_states or FAILURE_STATES
117
117
  success_states = success_states or SUCCESS_STATES
118
118
  start_time = time.monotonic()
@@ -184,7 +184,7 @@ class DataFusionHook(GoogleBaseHook):
184
184
  )
185
185
 
186
186
  def get_conn(self) -> Resource:
187
- """Retrieves connection to DataFusion."""
187
+ """Retrieve connection to DataFusion."""
188
188
  if not self._conn:
189
189
  http_authorized = self._authorize()
190
190
  self._conn = build(
@@ -219,7 +219,7 @@ class DataFusionHook(GoogleBaseHook):
219
219
  @GoogleBaseHook.fallback_to_default_project_id
220
220
  def delete_instance(self, instance_name: str, location: str, project_id: str) -> Operation:
221
221
  """
222
- Deletes a single Date Fusion instance.
222
+ Delete a single Date Fusion instance.
223
223
 
224
224
  :param instance_name: The name of the instance to delete.
225
225
  :param location: The Cloud Data Fusion location in which to handle the request.
@@ -244,7 +244,7 @@ class DataFusionHook(GoogleBaseHook):
244
244
  project_id: str = PROVIDE_PROJECT_ID,
245
245
  ) -> Operation:
246
246
  """
247
- Creates a new Data Fusion instance in the specified project and location.
247
+ Create a new Data Fusion instance in the specified project and location.
248
248
 
249
249
  :param instance_name: The name of the instance to create.
250
250
  :param instance: An instance of Instance.
@@ -269,7 +269,7 @@ class DataFusionHook(GoogleBaseHook):
269
269
  @GoogleBaseHook.fallback_to_default_project_id
270
270
  def get_instance(self, instance_name: str, location: str, project_id: str) -> dict[str, Any]:
271
271
  """
272
- Gets details of a single Data Fusion instance.
272
+ Get details of a single Data Fusion instance.
273
273
 
274
274
  :param instance_name: The name of the instance.
275
275
  :param location: The Cloud Data Fusion location in which to handle the request.
@@ -312,7 +312,7 @@ class DataFusionHook(GoogleBaseHook):
312
312
  project_id: str = PROVIDE_PROJECT_ID,
313
313
  ) -> Operation:
314
314
  """
315
- Updates a single Data Fusion instance.
315
+ Update a single Data Fusion instance.
316
316
 
317
317
  :param instance_name: The name of the instance to create.
318
318
  :param instance: An instance of Instance.
@@ -348,7 +348,7 @@ class DataFusionHook(GoogleBaseHook):
348
348
  namespace: str = "default",
349
349
  ) -> None:
350
350
  """
351
- Creates a batch Cloud Data Fusion pipeline.
351
+ Create a batch Cloud Data Fusion pipeline.
352
352
 
353
353
  :param pipeline_name: Your pipeline name.
354
354
  :param pipeline: The pipeline definition. For more information check:
@@ -372,7 +372,7 @@ class DataFusionHook(GoogleBaseHook):
372
372
  namespace: str = "default",
373
373
  ) -> None:
374
374
  """
375
- Deletes a batch Cloud Data Fusion pipeline.
375
+ Delete a batch Cloud Data Fusion pipeline.
376
376
 
377
377
  :param pipeline_name: Your pipeline name.
378
378
  :param version_id: Version of pipeline to delete
@@ -406,7 +406,7 @@ class DataFusionHook(GoogleBaseHook):
406
406
  namespace: str = "default",
407
407
  ) -> dict:
408
408
  """
409
- Lists Cloud Data Fusion pipelines.
409
+ List Cloud Data Fusion pipelines.
410
410
 
411
411
  :param artifact_version: Artifact version to filter instances
412
412
  :param artifact_name: Artifact name to filter instances
@@ -462,7 +462,7 @@ class DataFusionHook(GoogleBaseHook):
462
462
  runtime_args: dict[str, Any] | None = None,
463
463
  ) -> str:
464
464
  """
465
- Starts a Cloud Data Fusion pipeline. Works for both batch and stream pipelines.
465
+ Start a Cloud Data Fusion pipeline. Works for both batch and stream pipelines.
466
466
 
467
467
  :param pipeline_name: Your pipeline name.
468
468
  :param pipeline_type: Optional pipeline type (BATCH by default).
@@ -500,7 +500,7 @@ class DataFusionHook(GoogleBaseHook):
500
500
 
501
501
  def stop_pipeline(self, pipeline_name: str, instance_url: str, namespace: str = "default") -> None:
502
502
  """
503
- Stops a Cloud Data Fusion pipeline. Works for both batch and stream pipelines.
503
+ Stop a Cloud Data Fusion pipeline. Works for both batch and stream pipelines.
504
504
 
505
505
  :param pipeline_name: Your pipeline name.
506
506
  :param instance_url: Endpoint on which the REST APIs is accessible for the instance.
@@ -522,7 +522,7 @@ class DataFusionHook(GoogleBaseHook):
522
522
 
523
523
  @staticmethod
524
524
  def cdap_program_type(pipeline_type: DataFusionPipelineType) -> str:
525
- """Retrieves CDAP Program type depending on the pipeline type.
525
+ """Retrieve CDAP Program type depending on the pipeline type.
526
526
 
527
527
  :param pipeline_type: Pipeline type.
528
528
  """
@@ -534,7 +534,7 @@ class DataFusionHook(GoogleBaseHook):
534
534
 
535
535
  @staticmethod
536
536
  def cdap_program_id(pipeline_type: DataFusionPipelineType) -> str:
537
- """Retrieves CDAP Program id depending on the pipeline type.
537
+ """Retrieve CDAP Program id depending on the pipeline type.
538
538
 
539
539
  :param pipeline_type: Pipeline type.
540
540
  """
@@ -611,7 +611,7 @@ class DataFusionAsyncHook(GoogleBaseAsyncHook):
611
611
  success_states: list[str] | None = None,
612
612
  ) -> str:
613
613
  """
614
- Gets a Cloud Data Fusion pipeline status asynchronously.
614
+ Get a Cloud Data Fusion pipeline status asynchronously.
615
615
 
616
616
  :param pipeline_name: Your pipeline name.
617
617
  :param instance_url: Endpoint on which the REST APIs is accessible for the instance.
@@ -49,7 +49,7 @@ class DataPipelineHook(GoogleBaseHook):
49
49
  )
50
50
 
51
51
  def get_conn(self) -> build:
52
- """Returns a Google Cloud Data Pipelines service object."""
52
+ """Return a Google Cloud Data Pipelines service object."""
53
53
  http_authorized = self._authorize()
54
54
  return build("datapipelines", "v1", http=http_authorized, cache_discovery=False)
55
55
 
@@ -61,7 +61,7 @@ class DataPipelineHook(GoogleBaseHook):
61
61
  location: str = DEFAULT_DATAPIPELINE_LOCATION,
62
62
  ) -> None:
63
63
  """
64
- Creates a new Data Pipelines instance from the Data Pipelines API.
64
+ Create a new Data Pipelines instance from the Data Pipelines API.
65
65
 
66
66
  :param body: The request body (contains instance of Pipeline). See:
67
67
  https://cloud.google.com/dataflow/docs/reference/data-pipelines/rest/v1/projects.locations.pipelines/create#request-body
@@ -93,7 +93,7 @@ class DataPipelineHook(GoogleBaseHook):
93
93
  location: str = DEFAULT_DATAPIPELINE_LOCATION,
94
94
  ) -> None:
95
95
  """
96
- Runs a Data Pipelines Instance using the Data Pipelines API.
96
+ Run a Data Pipelines Instance using the Data Pipelines API.
97
97
 
98
98
  :param data_pipeline_name: The display name of the pipeline. In example
99
99
  projects/PROJECT_ID/locations/LOCATION_ID/pipelines/PIPELINE_ID it would be the PIPELINE_ID.
@@ -93,7 +93,7 @@ class DataplexHook(GoogleBaseHook):
93
93
  self.location = location
94
94
 
95
95
  def get_dataplex_client(self) -> DataplexServiceClient:
96
- """Returns DataplexServiceClient."""
96
+ """Return DataplexServiceClient."""
97
97
  client_options = ClientOptions(api_endpoint="dataplex.googleapis.com:443")
98
98
 
99
99
  return DataplexServiceClient(
@@ -101,7 +101,7 @@ class DataplexHook(GoogleBaseHook):
101
101
  )
102
102
 
103
103
  def get_dataplex_data_scan_client(self) -> DataScanServiceClient:
104
- """Returns DataScanServiceClient."""
104
+ """Return DataScanServiceClient."""
105
105
  client_options = ClientOptions(api_endpoint="dataplex.googleapis.com:443")
106
106
 
107
107
  return DataScanServiceClient(
@@ -109,7 +109,7 @@ class DataplexHook(GoogleBaseHook):
109
109
  )
110
110
 
111
111
  def wait_for_operation(self, timeout: float | None, operation: Operation):
112
- """Waits for long-lasting operation to complete."""
112
+ """Wait for long-lasting operation to complete."""
113
113
  try:
114
114
  return operation.result(timeout=timeout)
115
115
  except Exception:
@@ -130,7 +130,7 @@ class DataplexHook(GoogleBaseHook):
130
130
  metadata: Sequence[tuple[str, str]] = (),
131
131
  ) -> Any:
132
132
  """
133
- Creates a task resource within a lake.
133
+ Create a task resource within a lake.
134
134
 
135
135
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
136
136
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -212,7 +212,7 @@ class DataplexHook(GoogleBaseHook):
212
212
  metadata: Sequence[tuple[str, str]] = (),
213
213
  ) -> Any:
214
214
  """
215
- Lists tasks under the given lake.
215
+ List tasks under the given lake.
216
216
 
217
217
  :param project_id: Required. The ID of the Google Cloud project that the task belongs to.
218
218
  :param region: Required. The ID of the Google Cloud region that the task belongs to.
@@ -332,7 +332,7 @@ class DataplexHook(GoogleBaseHook):
332
332
  metadata: Sequence[tuple[str, str]] = (),
333
333
  ) -> Any:
334
334
  """
335
- Creates a lake resource.
335
+ Create a lake resource.
336
336
 
337
337
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
338
338
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -407,7 +407,7 @@ class DataplexHook(GoogleBaseHook):
407
407
  metadata: Sequence[tuple[str, str]] = (),
408
408
  ) -> Any:
409
409
  """
410
- Creates a zone resource within a lake.
410
+ Create a zone resource within a lake.
411
411
 
412
412
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
413
413
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -447,7 +447,7 @@ class DataplexHook(GoogleBaseHook):
447
447
  metadata: Sequence[tuple[str, str]] = (),
448
448
  ) -> Any:
449
449
  """
450
- Deletes a zone resource. All assets within a zone must be deleted before the zone can be deleted.
450
+ Delete a zone resource. All assets within a zone must be deleted before the zone can be deleted.
451
451
 
452
452
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
453
453
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -484,7 +484,7 @@ class DataplexHook(GoogleBaseHook):
484
484
  metadata: Sequence[tuple[str, str]] = (),
485
485
  ) -> Any:
486
486
  """
487
- Creates an asset resource.
487
+ Create an asset resource.
488
488
 
489
489
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
490
490
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -526,7 +526,7 @@ class DataplexHook(GoogleBaseHook):
526
526
  metadata: Sequence[tuple[str, str]] = (),
527
527
  ) -> Any:
528
528
  """
529
- Deletes an asset resource.
529
+ Delete an asset resource.
530
530
 
531
531
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
532
532
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -562,7 +562,7 @@ class DataplexHook(GoogleBaseHook):
562
562
  metadata: Sequence[tuple[str, str]] = (),
563
563
  ) -> Any:
564
564
  """
565
- Creates a DataScan resource.
565
+ Create a DataScan resource.
566
566
 
567
567
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
568
568
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -600,7 +600,7 @@ class DataplexHook(GoogleBaseHook):
600
600
  metadata: Sequence[tuple[str, str]] = (),
601
601
  ) -> Any:
602
602
  """
603
- Runs an on-demand execution of a DataScan.
603
+ Run an on-demand execution of a DataScan.
604
604
 
605
605
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
606
606
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -636,7 +636,7 @@ class DataplexHook(GoogleBaseHook):
636
636
  metadata: Sequence[tuple[str, str]] = (),
637
637
  ) -> Any:
638
638
  """
639
- Gets a DataScan Job resource.
639
+ Get a DataScan Job resource.
640
640
 
641
641
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
642
642
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -715,7 +715,7 @@ class DataplexHook(GoogleBaseHook):
715
715
  metadata: Sequence[tuple[str, str]] = (),
716
716
  ) -> Any:
717
717
  """
718
- Gets a DataScan resource.
718
+ Get a DataScan resource.
719
719
 
720
720
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
721
721
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -750,7 +750,7 @@ class DataplexHook(GoogleBaseHook):
750
750
  metadata: Sequence[tuple[str, str]] = (),
751
751
  ) -> Any:
752
752
  """
753
- Updates a DataScan resource.
753
+ Update a DataScan resource.
754
754
 
755
755
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
756
756
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -802,7 +802,7 @@ class DataplexHook(GoogleBaseHook):
802
802
  metadata: Sequence[tuple[str, str]] = (),
803
803
  ) -> Any:
804
804
  """
805
- Deletes a DataScan resource.
805
+ Delete a DataScan resource.
806
806
 
807
807
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
808
808
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -837,7 +837,7 @@ class DataplexHook(GoogleBaseHook):
837
837
  metadata: Sequence[tuple[str, str]] = (),
838
838
  ) -> Any:
839
839
  """
840
- Lists DataScanJobs under the given DataScan.
840
+ List DataScanJobs under the given DataScan.
841
841
 
842
842
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
843
843
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -881,7 +881,7 @@ class DataplexAsyncHook(GoogleBaseAsyncHook):
881
881
  super().__init__(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain)
882
882
 
883
883
  async def get_dataplex_data_scan_client(self) -> DataScanServiceAsyncClient:
884
- """Returns DataScanServiceAsyncClient."""
884
+ """Return DataScanServiceAsyncClient."""
885
885
  client_options = ClientOptions(api_endpoint="dataplex.googleapis.com:443")
886
886
 
887
887
  return DataScanServiceAsyncClient(
@@ -902,7 +902,7 @@ class DataplexAsyncHook(GoogleBaseAsyncHook):
902
902
  metadata: Sequence[tuple[str, str]] = (),
903
903
  ) -> Any:
904
904
  """
905
- Gets a DataScan Job resource.
905
+ Get a DataScan Job resource.
906
906
 
907
907
  :param project_id: Required. The ID of the Google Cloud project that the lake belongs to.
908
908
  :param region: Required. The ID of the Google Cloud region that the lake belongs to.
@@ -123,7 +123,7 @@ class GoogleDataprepHook(BaseHook):
123
123
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
124
124
  def run_job_group(self, body_request: dict) -> dict[str, Any]:
125
125
  """
126
- Creates a ``jobGroup``, which launches the specified job as the authenticated user.
126
+ Create a ``jobGroup``, which launches the specified job as the authenticated user.
127
127
 
128
128
  This performs the same action as clicking on the Run Job button in the application.
129
129
 
@@ -141,7 +141,7 @@ class GoogleDataprepHook(BaseHook):
141
141
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
142
142
  def create_flow(self, *, body_request: dict) -> dict:
143
143
  """
144
- Creates flow.
144
+ Create flow.
145
145
 
146
146
  :param body_request: Body of the POST request to be sent.
147
147
  For more details check https://clouddataprep.com/documentation/api#operation/createFlow
@@ -190,7 +190,7 @@ class GoogleDataprepHook(BaseHook):
190
190
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
191
191
  def run_flow(self, *, flow_id: int, body_request: dict) -> dict:
192
192
  """
193
- Runs the flow with the provided id copy of the provided flow id.
193
+ Run the flow with the provided id copy of the provided flow id.
194
194
 
195
195
  :param flow_id: ID of the flow to be copied
196
196
  :param body_request: Body of the POST request to be sent.
@@ -224,7 +224,7 @@ class GoogleDataprepHook(BaseHook):
224
224
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
225
225
  def create_imported_dataset(self, *, body_request: dict) -> dict:
226
226
  """
227
- Creates imported dataset.
227
+ Create imported dataset.
228
228
 
229
229
  :param body_request: Body of the POST request to be sent.
230
230
  For more details check https://clouddataprep.com/documentation/api#operation/createImportedDataset
@@ -238,7 +238,7 @@ class GoogleDataprepHook(BaseHook):
238
238
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
239
239
  def create_wrangled_dataset(self, *, body_request: dict) -> dict:
240
240
  """
241
- Creates wrangled dataset.
241
+ Create wrangled dataset.
242
242
 
243
243
  :param body_request: Body of the POST request to be sent.
244
244
  For more details check
@@ -253,7 +253,7 @@ class GoogleDataprepHook(BaseHook):
253
253
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
254
254
  def create_output_object(self, *, body_request: dict) -> dict:
255
255
  """
256
- Creates output.
256
+ Create output.
257
257
 
258
258
  :param body_request: Body of the POST request to be sent.
259
259
  For more details check
@@ -268,7 +268,7 @@ class GoogleDataprepHook(BaseHook):
268
268
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
269
269
  def create_write_settings(self, *, body_request: dict) -> dict:
270
270
  """
271
- Creates write settings.
271
+ Create write settings.
272
272
 
273
273
  :param body_request: Body of the POST request to be sent.
274
274
  For more details check
@@ -283,7 +283,7 @@ class GoogleDataprepHook(BaseHook):
283
283
  @retry(stop=stop_after_attempt(5), wait=wait_exponential(multiplier=1, max=10))
284
284
  def delete_imported_dataset(self, *, dataset_id: int) -> None:
285
285
  """
286
- Deletes imported dataset.
286
+ Delete imported dataset.
287
287
 
288
288
  :param dataset_id: ID of the imported dataset for removal.
289
289
  """
@@ -583,6 +583,94 @@ class DataprocHook(GoogleBaseHook):
583
583
  )
584
584
  return operation
585
585
 
586
+ @GoogleBaseHook.fallback_to_default_project_id
587
+ def start_cluster(
588
+ self,
589
+ region: str,
590
+ project_id: str,
591
+ cluster_name: str,
592
+ cluster_uuid: str | None = None,
593
+ request_id: str | None = None,
594
+ retry: Retry | _MethodDefault = DEFAULT,
595
+ timeout: float | None = None,
596
+ metadata: Sequence[tuple[str, str]] = (),
597
+ ) -> Operation:
598
+ """Start a cluster in a project.
599
+
600
+ :param region: Cloud Dataproc region to handle the request.
601
+ :param project_id: Google Cloud project ID that the cluster belongs to.
602
+ :param cluster_name: The cluster name.
603
+ :param cluster_uuid: The cluster UUID
604
+ :param request_id: A unique id used to identify the request. If the
605
+ server receives two *UpdateClusterRequest* requests with the same
606
+ ID, the second request will be ignored, and an operation created
607
+ for the first one and stored in the backend is returned.
608
+ :param retry: A retry object used to retry requests. If *None*, requests
609
+ will not be retried.
610
+ :param timeout: The amount of time, in seconds, to wait for the request
611
+ to complete. If *retry* is specified, the timeout applies to each
612
+ individual attempt.
613
+ :param metadata: Additional metadata that is provided to the method.
614
+ :return: An instance of ``google.api_core.operation.Operation``
615
+ """
616
+ client = self.get_cluster_client(region=region)
617
+ return client.start_cluster(
618
+ request={
619
+ "project_id": project_id,
620
+ "region": region,
621
+ "cluster_name": cluster_name,
622
+ "cluster_uuid": cluster_uuid,
623
+ "request_id": request_id,
624
+ },
625
+ retry=retry,
626
+ timeout=timeout,
627
+ metadata=metadata,
628
+ )
629
+
630
+ @GoogleBaseHook.fallback_to_default_project_id
631
+ def stop_cluster(
632
+ self,
633
+ region: str,
634
+ project_id: str,
635
+ cluster_name: str,
636
+ cluster_uuid: str | None = None,
637
+ request_id: str | None = None,
638
+ retry: Retry | _MethodDefault = DEFAULT,
639
+ timeout: float | None = None,
640
+ metadata: Sequence[tuple[str, str]] = (),
641
+ ) -> Operation:
642
+ """Start a cluster in a project.
643
+
644
+ :param region: Cloud Dataproc region to handle the request.
645
+ :param project_id: Google Cloud project ID that the cluster belongs to.
646
+ :param cluster_name: The cluster name.
647
+ :param cluster_uuid: The cluster UUID
648
+ :param request_id: A unique id used to identify the request. If the
649
+ server receives two *UpdateClusterRequest* requests with the same
650
+ ID, the second request will be ignored, and an operation created
651
+ for the first one and stored in the backend is returned.
652
+ :param retry: A retry object used to retry requests. If *None*, requests
653
+ will not be retried.
654
+ :param timeout: The amount of time, in seconds, to wait for the request
655
+ to complete. If *retry* is specified, the timeout applies to each
656
+ individual attempt.
657
+ :param metadata: Additional metadata that is provided to the method.
658
+ :return: An instance of ``google.api_core.operation.Operation``
659
+ """
660
+ client = self.get_cluster_client(region=region)
661
+ return client.stop_cluster(
662
+ request={
663
+ "project_id": project_id,
664
+ "region": region,
665
+ "cluster_name": cluster_name,
666
+ "cluster_uuid": cluster_uuid,
667
+ "request_id": request_id,
668
+ },
669
+ retry=retry,
670
+ timeout=timeout,
671
+ metadata=metadata,
672
+ )
673
+
586
674
  @GoogleBaseHook.fallback_to_default_project_id
587
675
  def create_workflow_template(
588
676
  self,