apache-airflow-providers-google 10.20.0rc1__py3-none-any.whl → 10.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +16 -8
  3. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -1
  4. airflow/providers/google/cloud/_internal_client/secret_manager_client.py +6 -3
  5. airflow/providers/google/cloud/hooks/bigquery.py +158 -79
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +12 -6
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +34 -17
  8. airflow/providers/google/cloud/hooks/dataflow.py +30 -26
  9. airflow/providers/google/cloud/hooks/dataform.py +2 -1
  10. airflow/providers/google/cloud/hooks/datafusion.py +4 -2
  11. airflow/providers/google/cloud/hooks/dataproc.py +102 -51
  12. airflow/providers/google/cloud/hooks/functions.py +20 -10
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -11
  14. airflow/providers/google/cloud/hooks/os_login.py +2 -1
  15. airflow/providers/google/cloud/hooks/secret_manager.py +18 -9
  16. airflow/providers/google/cloud/hooks/translate.py +2 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -1
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +141 -0
  19. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +2 -1
  20. airflow/providers/google/cloud/links/base.py +2 -1
  21. airflow/providers/google/cloud/links/datafusion.py +2 -1
  22. airflow/providers/google/cloud/log/stackdriver_task_handler.py +4 -2
  23. airflow/providers/google/cloud/openlineage/mixins.py +10 -0
  24. airflow/providers/google/cloud/openlineage/utils.py +4 -2
  25. airflow/providers/google/cloud/operators/bigquery.py +55 -21
  26. airflow/providers/google/cloud/operators/cloud_batch.py +3 -1
  27. airflow/providers/google/cloud/operators/cloud_sql.py +22 -11
  28. airflow/providers/google/cloud/operators/dataform.py +2 -1
  29. airflow/providers/google/cloud/operators/dataproc.py +75 -34
  30. airflow/providers/google/cloud/operators/dataproc_metastore.py +24 -12
  31. airflow/providers/google/cloud/operators/gcs.py +2 -1
  32. airflow/providers/google/cloud/operators/pubsub.py +10 -5
  33. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +3 -3
  34. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +12 -9
  35. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +243 -0
  36. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +2 -1
  37. airflow/providers/google/cloud/operators/vision.py +36 -18
  38. airflow/providers/google/cloud/sensors/gcs.py +11 -2
  39. airflow/providers/google/cloud/sensors/pubsub.py +2 -1
  40. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +21 -12
  41. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +1 -1
  42. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -1
  43. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +17 -5
  44. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +12 -6
  45. airflow/providers/google/cloud/transfers/local_to_gcs.py +5 -1
  46. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +2 -1
  47. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +2 -1
  48. airflow/providers/google/cloud/transfers/presto_to_gcs.py +2 -1
  49. airflow/providers/google/cloud/transfers/s3_to_gcs.py +2 -1
  50. airflow/providers/google/cloud/transfers/trino_to_gcs.py +2 -1
  51. airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
  52. airflow/providers/google/cloud/triggers/cloud_run.py +2 -1
  53. airflow/providers/google/cloud/triggers/dataflow.py +2 -1
  54. airflow/providers/google/cloud/triggers/vertex_ai.py +2 -1
  55. airflow/providers/google/cloud/utils/external_token_supplier.py +4 -2
  56. airflow/providers/google/cloud/utils/field_sanitizer.py +4 -2
  57. airflow/providers/google/cloud/utils/field_validator.py +6 -3
  58. airflow/providers/google/cloud/utils/helpers.py +2 -1
  59. airflow/providers/google/common/hooks/base_google.py +2 -1
  60. airflow/providers/google/common/utils/id_token_credentials.py +2 -1
  61. airflow/providers/google/get_provider_info.py +3 -2
  62. airflow/providers/google/go_module_utils.py +4 -2
  63. airflow/providers/google/marketing_platform/hooks/analytics_admin.py +12 -6
  64. airflow/providers/google/marketing_platform/links/analytics_admin.py +2 -1
  65. airflow/providers/google/suite/transfers/local_to_drive.py +2 -1
  66. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/METADATA +14 -14
  67. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/RECORD +69 -69
  68. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/WHEEL +0 -0
  69. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/entry_points.txt +0 -0
@@ -84,7 +84,8 @@ class DataProcJobBuilder:
84
84
  self.job["job"][job_type]["properties"] = properties
85
85
 
86
86
  def add_labels(self, labels: dict | None = None) -> None:
87
- """Set labels for Dataproc job.
87
+ """
88
+ Set labels for Dataproc job.
88
89
 
89
90
  :param labels: Labels for the job query.
90
91
  """
@@ -92,7 +93,8 @@ class DataProcJobBuilder:
92
93
  self.job["job"]["labels"].update(labels)
93
94
 
94
95
  def add_variables(self, variables: dict | None = None) -> None:
95
- """Set variables for Dataproc job.
96
+ """
97
+ Set variables for Dataproc job.
96
98
 
97
99
  :param variables: Variables for the job query.
98
100
  """
@@ -100,7 +102,8 @@ class DataProcJobBuilder:
100
102
  self.job["job"][self.job_type]["script_variables"] = variables
101
103
 
102
104
  def add_args(self, args: list[str] | None = None) -> None:
103
- """Set args for Dataproc job.
105
+ """
106
+ Set args for Dataproc job.
104
107
 
105
108
  :param args: Args for the job query.
106
109
  """
@@ -108,21 +111,24 @@ class DataProcJobBuilder:
108
111
  self.job["job"][self.job_type]["args"] = args
109
112
 
110
113
  def add_query(self, query: str) -> None:
111
- """Set query for Dataproc job.
114
+ """
115
+ Set query for Dataproc job.
112
116
 
113
117
  :param query: query for the job.
114
118
  """
115
119
  self.job["job"][self.job_type]["query_list"] = {"queries": [query]}
116
120
 
117
121
  def add_query_uri(self, query_uri: str) -> None:
118
- """Set query uri for Dataproc job.
122
+ """
123
+ Set query uri for Dataproc job.
119
124
 
120
125
  :param query_uri: URI for the job query.
121
126
  """
122
127
  self.job["job"][self.job_type]["query_file_uri"] = query_uri
123
128
 
124
129
  def add_jar_file_uris(self, jars: list[str] | None = None) -> None:
125
- """Set jars uris for Dataproc job.
130
+ """
131
+ Set jars uris for Dataproc job.
126
132
 
127
133
  :param jars: List of jars URIs
128
134
  """
@@ -130,7 +136,8 @@ class DataProcJobBuilder:
130
136
  self.job["job"][self.job_type]["jar_file_uris"] = jars
131
137
 
132
138
  def add_archive_uris(self, archives: list[str] | None = None) -> None:
133
- """Set archives uris for Dataproc job.
139
+ """
140
+ Set archives uris for Dataproc job.
134
141
 
135
142
  :param archives: List of archives URIs
136
143
  """
@@ -138,7 +145,8 @@ class DataProcJobBuilder:
138
145
  self.job["job"][self.job_type]["archive_uris"] = archives
139
146
 
140
147
  def add_file_uris(self, files: list[str] | None = None) -> None:
141
- """Set file uris for Dataproc job.
148
+ """
149
+ Set file uris for Dataproc job.
142
150
 
143
151
  :param files: List of files URIs
144
152
  """
@@ -146,7 +154,8 @@ class DataProcJobBuilder:
146
154
  self.job["job"][self.job_type]["file_uris"] = files
147
155
 
148
156
  def add_python_file_uris(self, pyfiles: list[str] | None = None) -> None:
149
- """Set python file uris for Dataproc job.
157
+ """
158
+ Set python file uris for Dataproc job.
150
159
 
151
160
  :param pyfiles: List of python files URIs
152
161
  """
@@ -154,7 +163,8 @@ class DataProcJobBuilder:
154
163
  self.job["job"][self.job_type]["python_file_uris"] = pyfiles
155
164
 
156
165
  def set_main(self, main_jar: str | None = None, main_class: str | None = None) -> None:
157
- """Set Dataproc main class.
166
+ """
167
+ Set Dataproc main class.
158
168
 
159
169
  :param main_jar: URI for the main file.
160
170
  :param main_class: Name of the main class.
@@ -168,14 +178,16 @@ class DataProcJobBuilder:
168
178
  self.job["job"][self.job_type]["main_class"] = main_class
169
179
 
170
180
  def set_python_main(self, main: str) -> None:
171
- """Set Dataproc main python file uri.
181
+ """
182
+ Set Dataproc main python file uri.
172
183
 
173
184
  :param main: URI for the python main file.
174
185
  """
175
186
  self.job["job"][self.job_type]["main_python_file_uri"] = main
176
187
 
177
188
  def set_job_name(self, name: str) -> None:
178
- """Set Dataproc job name.
189
+ """
190
+ Set Dataproc job name.
179
191
 
180
192
  Job name is sanitized, replacing dots by underscores.
181
193
 
@@ -185,7 +197,8 @@ class DataProcJobBuilder:
185
197
  self.job["job"]["reference"]["job_id"] = sanitized_name
186
198
 
187
199
  def build(self) -> dict:
188
- """Return Dataproc job.
200
+ """
201
+ Return Dataproc job.
189
202
 
190
203
  :return: Dataproc job
191
204
  """
@@ -193,7 +206,8 @@ class DataProcJobBuilder:
193
206
 
194
207
 
195
208
  class DataprocHook(GoogleBaseHook):
196
- """Google Cloud Dataproc APIs.
209
+ """
210
+ Google Cloud Dataproc APIs.
197
211
 
198
212
  All the methods in the hook where project_id is used must be called with
199
213
  keyword arguments rather than positional.
@@ -283,7 +297,8 @@ class DataprocHook(GoogleBaseHook):
283
297
  timeout: float | None = None,
284
298
  metadata: Sequence[tuple[str, str]] = (),
285
299
  ) -> Operation:
286
- """Create a cluster in a specified project.
300
+ """
301
+ Create a cluster in a specified project.
287
302
 
288
303
  :param project_id: Google Cloud project ID that the cluster belongs to.
289
304
  :param region: Cloud Dataproc region in which to handle the request.
@@ -350,7 +365,8 @@ class DataprocHook(GoogleBaseHook):
350
365
  timeout: float | None = None,
351
366
  metadata: Sequence[tuple[str, str]] = (),
352
367
  ) -> Operation:
353
- """Delete a cluster in a project.
368
+ """
369
+ Delete a cluster in a project.
354
370
 
355
371
  :param project_id: Google Cloud project ID that the cluster belongs to.
356
372
  :param region: Cloud Dataproc region in which to handle the request.
@@ -397,7 +413,8 @@ class DataprocHook(GoogleBaseHook):
397
413
  timeout: float | None = None,
398
414
  metadata: Sequence[tuple[str, str]] = (),
399
415
  ) -> Operation:
400
- """Get cluster diagnostic information.
416
+ """
417
+ Get cluster diagnostic information.
401
418
 
402
419
  After the operation completes, the response contains the Cloud Storage URI of the diagnostic output report containing a summary of collected diagnostics.
403
420
 
@@ -442,7 +459,8 @@ class DataprocHook(GoogleBaseHook):
442
459
  timeout: float | None = None,
443
460
  metadata: Sequence[tuple[str, str]] = (),
444
461
  ) -> Cluster:
445
- """Get the resource representation for a cluster in a project.
462
+ """
463
+ Get the resource representation for a cluster in a project.
446
464
 
447
465
  :param project_id: Google Cloud project ID that the cluster belongs to.
448
466
  :param region: Cloud Dataproc region to handle the request.
@@ -474,7 +492,8 @@ class DataprocHook(GoogleBaseHook):
474
492
  timeout: float | None = None,
475
493
  metadata: Sequence[tuple[str, str]] = (),
476
494
  ):
477
- """List all regions/{region}/clusters in a project.
495
+ """
496
+ List all regions/{region}/clusters in a project.
478
497
 
479
498
  :param project_id: Google Cloud project ID that the cluster belongs to.
480
499
  :param region: Cloud Dataproc region to handle the request.
@@ -514,7 +533,8 @@ class DataprocHook(GoogleBaseHook):
514
533
  timeout: float | None = None,
515
534
  metadata: Sequence[tuple[str, str]] = (),
516
535
  ) -> Operation:
517
- """Update a cluster in a project.
536
+ """
537
+ Update a cluster in a project.
518
538
 
519
539
  :param project_id: Google Cloud project ID that the cluster belongs to.
520
540
  :param region: Cloud Dataproc region to handle the request.
@@ -596,7 +616,8 @@ class DataprocHook(GoogleBaseHook):
596
616
  timeout: float | None = None,
597
617
  metadata: Sequence[tuple[str, str]] = (),
598
618
  ) -> Operation:
599
- """Start a cluster in a project.
619
+ """
620
+ Start a cluster in a project.
600
621
 
601
622
  :param region: Cloud Dataproc region to handle the request.
602
623
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -640,7 +661,8 @@ class DataprocHook(GoogleBaseHook):
640
661
  timeout: float | None = None,
641
662
  metadata: Sequence[tuple[str, str]] = (),
642
663
  ) -> Operation:
643
- """Start a cluster in a project.
664
+ """
665
+ Start a cluster in a project.
644
666
 
645
667
  :param region: Cloud Dataproc region to handle the request.
646
668
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -682,7 +704,8 @@ class DataprocHook(GoogleBaseHook):
682
704
  timeout: float | None = None,
683
705
  metadata: Sequence[tuple[str, str]] = (),
684
706
  ) -> WorkflowTemplate:
685
- """Create a new workflow template.
707
+ """
708
+ Create a new workflow template.
686
709
 
687
710
  :param project_id: Google Cloud project ID that the cluster belongs to.
688
711
  :param region: Cloud Dataproc region to handle the request.
@@ -718,7 +741,8 @@ class DataprocHook(GoogleBaseHook):
718
741
  timeout: float | None = None,
719
742
  metadata: Sequence[tuple[str, str]] = (),
720
743
  ) -> Operation:
721
- """Instantiate a template and begins execution.
744
+ """
745
+ Instantiate a template and begins execution.
722
746
 
723
747
  :param template_name: Name of template to instantiate.
724
748
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -764,7 +788,8 @@ class DataprocHook(GoogleBaseHook):
764
788
  timeout: float | None = None,
765
789
  metadata: Sequence[tuple[str, str]] = (),
766
790
  ) -> Operation:
767
- """Instantiate a template and begin execution.
791
+ """
792
+ Instantiate a template and begin execution.
768
793
 
769
794
  :param template: The workflow template to instantiate. If a dict is
770
795
  provided, it must be of the same form as the protobuf message
@@ -803,7 +828,8 @@ class DataprocHook(GoogleBaseHook):
803
828
  wait_time: int = 10,
804
829
  timeout: int | None = None,
805
830
  ) -> None:
806
- """Poll a job to check if it has finished.
831
+ """
832
+ Poll a job to check if it has finished.
807
833
 
808
834
  :param job_id: Dataproc job ID.
809
835
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -840,7 +866,8 @@ class DataprocHook(GoogleBaseHook):
840
866
  timeout: float | None = None,
841
867
  metadata: Sequence[tuple[str, str]] = (),
842
868
  ) -> Job:
843
- """Get the resource representation for a job in a project.
869
+ """
870
+ Get the resource representation for a job in a project.
844
871
 
845
872
  :param job_id: Dataproc job ID.
846
873
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -874,7 +901,8 @@ class DataprocHook(GoogleBaseHook):
874
901
  timeout: float | None = None,
875
902
  metadata: Sequence[tuple[str, str]] = (),
876
903
  ) -> Job:
877
- """Submit a job to a cluster.
904
+ """
905
+ Submit a job to a cluster.
878
906
 
879
907
  :param job: The job resource. If a dict is provided, it must be of the
880
908
  same form as the protobuf message Job.
@@ -910,7 +938,8 @@ class DataprocHook(GoogleBaseHook):
910
938
  timeout: float | None = None,
911
939
  metadata: Sequence[tuple[str, str]] = (),
912
940
  ) -> Job:
913
- """Start a job cancellation request.
941
+ """
942
+ Start a job cancellation request.
914
943
 
915
944
  :param project_id: Google Cloud project ID that the cluster belongs to.
916
945
  :param region: Cloud Dataproc region to handle the request.
@@ -944,7 +973,8 @@ class DataprocHook(GoogleBaseHook):
944
973
  timeout: float | None = None,
945
974
  metadata: Sequence[tuple[str, str]] = (),
946
975
  ) -> Operation:
947
- """Create a batch workload.
976
+ """
977
+ Create a batch workload.
948
978
 
949
979
  :param project_id: Google Cloud project ID that the cluster belongs to.
950
980
  :param region: Cloud Dataproc region to handle the request.
@@ -989,7 +1019,8 @@ class DataprocHook(GoogleBaseHook):
989
1019
  timeout: float | None = None,
990
1020
  metadata: Sequence[tuple[str, str]] = (),
991
1021
  ) -> None:
992
- """Delete the batch workload resource.
1022
+ """
1023
+ Delete the batch workload resource.
993
1024
 
994
1025
  :param batch_id: The batch ID.
995
1026
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1023,7 +1054,8 @@ class DataprocHook(GoogleBaseHook):
1023
1054
  timeout: float | None = None,
1024
1055
  metadata: Sequence[tuple[str, str]] = (),
1025
1056
  ) -> Batch:
1026
- """Get the batch workload resource representation.
1057
+ """
1058
+ Get the batch workload resource representation.
1027
1059
 
1028
1060
  :param batch_id: The batch ID.
1029
1061
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1061,7 +1093,8 @@ class DataprocHook(GoogleBaseHook):
1061
1093
  filter: str | None = None,
1062
1094
  order_by: str | None = None,
1063
1095
  ):
1064
- """List batch workloads.
1096
+ """
1097
+ List batch workloads.
1065
1098
 
1066
1099
  :param project_id: Google Cloud project ID that the cluster belongs to.
1067
1100
  :param region: Cloud Dataproc region to handle the request.
@@ -1107,7 +1140,8 @@ class DataprocHook(GoogleBaseHook):
1107
1140
  timeout: float | None = None,
1108
1141
  metadata: Sequence[tuple[str, str]] = (),
1109
1142
  ) -> Batch:
1110
- """Wait for a batch job to complete.
1143
+ """
1144
+ Wait for a batch job to complete.
1111
1145
 
1112
1146
  After submission of a batch job, the operator waits for the job to
1113
1147
  complete. This hook is, however, useful in the case when Airflow is
@@ -1160,7 +1194,8 @@ class DataprocHook(GoogleBaseHook):
1160
1194
 
1161
1195
 
1162
1196
  class DataprocAsyncHook(GoogleBaseHook):
1163
- """Asynchronous interaction with Google Cloud Dataproc APIs.
1197
+ """
1198
+ Asynchronous interaction with Google Cloud Dataproc APIs.
1164
1199
 
1165
1200
  All the methods in the hook where project_id is used must be called with
1166
1201
  keyword arguments rather than positional.
@@ -1242,7 +1277,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1242
1277
  timeout: float | None = None,
1243
1278
  metadata: Sequence[tuple[str, str]] = (),
1244
1279
  ) -> AsyncOperation:
1245
- """Create a cluster in a project.
1280
+ """
1281
+ Create a cluster in a project.
1246
1282
 
1247
1283
  :param project_id: Google Cloud project ID that the cluster belongs to.
1248
1284
  :param region: Cloud Dataproc region in which to handle the request.
@@ -1309,7 +1345,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1309
1345
  timeout: float | None = None,
1310
1346
  metadata: Sequence[tuple[str, str]] = (),
1311
1347
  ) -> AsyncOperation:
1312
- """Delete a cluster in a project.
1348
+ """
1349
+ Delete a cluster in a project.
1313
1350
 
1314
1351
  :param project_id: Google Cloud project ID that the cluster belongs to.
1315
1352
  :param region: Cloud Dataproc region in which to handle the request.
@@ -1356,7 +1393,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1356
1393
  timeout: float | None = None,
1357
1394
  metadata: Sequence[tuple[str, str]] = (),
1358
1395
  ) -> AsyncOperation:
1359
- """Get cluster diagnostic information.
1396
+ """
1397
+ Get cluster diagnostic information.
1360
1398
 
1361
1399
  After the operation completes, the response contains the Cloud Storage URI of the diagnostic output report containing a summary of collected diagnostics.
1362
1400
 
@@ -1401,7 +1439,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1401
1439
  timeout: float | None = None,
1402
1440
  metadata: Sequence[tuple[str, str]] = (),
1403
1441
  ) -> Cluster:
1404
- """Get the resource representation for a cluster in a project.
1442
+ """
1443
+ Get the resource representation for a cluster in a project.
1405
1444
 
1406
1445
  :param project_id: Google Cloud project ID that the cluster belongs to.
1407
1446
  :param region: Cloud Dataproc region to handle the request.
@@ -1433,7 +1472,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1433
1472
  timeout: float | None = None,
1434
1473
  metadata: Sequence[tuple[str, str]] = (),
1435
1474
  ):
1436
- """List all regions/{region}/clusters in a project.
1475
+ """
1476
+ List all regions/{region}/clusters in a project.
1437
1477
 
1438
1478
  :param project_id: Google Cloud project ID that the cluster belongs to.
1439
1479
  :param region: Cloud Dataproc region to handle the request.
@@ -1473,7 +1513,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1473
1513
  timeout: float | None = None,
1474
1514
  metadata: Sequence[tuple[str, str]] = (),
1475
1515
  ) -> AsyncOperation:
1476
- """Update a cluster in a project.
1516
+ """
1517
+ Update a cluster in a project.
1477
1518
 
1478
1519
  :param project_id: Google Cloud project ID that the cluster belongs to.
1479
1520
  :param region: Cloud Dataproc region to handle the request.
@@ -1553,7 +1594,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1553
1594
  timeout: float | None = None,
1554
1595
  metadata: Sequence[tuple[str, str]] = (),
1555
1596
  ) -> WorkflowTemplate:
1556
- """Create a new workflow template.
1597
+ """
1598
+ Create a new workflow template.
1557
1599
 
1558
1600
  :param project_id: Google Cloud project ID that the cluster belongs to.
1559
1601
  :param region: Cloud Dataproc region to handle the request.
@@ -1589,7 +1631,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1589
1631
  timeout: float | None = None,
1590
1632
  metadata: Sequence[tuple[str, str]] = (),
1591
1633
  ) -> AsyncOperation:
1592
- """Instantiate a template and begins execution.
1634
+ """
1635
+ Instantiate a template and begins execution.
1593
1636
 
1594
1637
  :param template_name: Name of template to instantiate.
1595
1638
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1635,7 +1678,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1635
1678
  timeout: float | None = None,
1636
1679
  metadata: Sequence[tuple[str, str]] = (),
1637
1680
  ) -> AsyncOperation:
1638
- """Instantiate a template and begin execution.
1681
+ """
1682
+ Instantiate a template and begin execution.
1639
1683
 
1640
1684
  :param template: The workflow template to instantiate. If a dict is
1641
1685
  provided, it must be of the same form as the protobuf message
@@ -1678,7 +1722,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1678
1722
  timeout: float | None = None,
1679
1723
  metadata: Sequence[tuple[str, str]] = (),
1680
1724
  ) -> Job:
1681
- """Get the resource representation for a job in a project.
1725
+ """
1726
+ Get the resource representation for a job in a project.
1682
1727
 
1683
1728
  :param job_id: Dataproc job ID.
1684
1729
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1712,7 +1757,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1712
1757
  timeout: float | None = None,
1713
1758
  metadata: Sequence[tuple[str, str]] = (),
1714
1759
  ) -> Job:
1715
- """Submit a job to a cluster.
1760
+ """
1761
+ Submit a job to a cluster.
1716
1762
 
1717
1763
  :param job: The job resource. If a dict is provided, it must be of the
1718
1764
  same form as the protobuf message Job.
@@ -1748,7 +1794,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1748
1794
  timeout: float | None = None,
1749
1795
  metadata: Sequence[tuple[str, str]] = (),
1750
1796
  ) -> Job:
1751
- """Start a job cancellation request.
1797
+ """
1798
+ Start a job cancellation request.
1752
1799
 
1753
1800
  :param project_id: Google Cloud project ID that the cluster belongs to.
1754
1801
  :param region: Cloud Dataproc region to handle the request.
@@ -1782,7 +1829,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1782
1829
  timeout: float | None = None,
1783
1830
  metadata: Sequence[tuple[str, str]] = (),
1784
1831
  ) -> AsyncOperation:
1785
- """Create a batch workload.
1832
+ """
1833
+ Create a batch workload.
1786
1834
 
1787
1835
  :param project_id: Google Cloud project ID that the cluster belongs to.
1788
1836
  :param region: Cloud Dataproc region to handle the request.
@@ -1827,7 +1875,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1827
1875
  timeout: float | None = None,
1828
1876
  metadata: Sequence[tuple[str, str]] = (),
1829
1877
  ) -> None:
1830
- """Delete the batch workload resource.
1878
+ """
1879
+ Delete the batch workload resource.
1831
1880
 
1832
1881
  :param batch_id: The batch ID.
1833
1882
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1861,7 +1910,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1861
1910
  timeout: float | None = None,
1862
1911
  metadata: Sequence[tuple[str, str]] = (),
1863
1912
  ) -> Batch:
1864
- """Get the batch workload resource representation.
1913
+ """
1914
+ Get the batch workload resource representation.
1865
1915
 
1866
1916
  :param batch_id: The batch ID.
1867
1917
  :param project_id: Google Cloud project ID that the cluster belongs to.
@@ -1899,7 +1949,8 @@ class DataprocAsyncHook(GoogleBaseHook):
1899
1949
  filter: str | None = None,
1900
1950
  order_by: str | None = None,
1901
1951
  ):
1902
- """List batch workloads.
1952
+ """
1953
+ List batch workloads.
1903
1954
 
1904
1955
  :param project_id: Google Cloud project ID that the cluster belongs to.
1905
1956
  :param region: Cloud Dataproc region to handle the request.
@@ -33,7 +33,8 @@ TIME_TO_SLEEP_IN_SECONDS = 1
33
33
 
34
34
 
35
35
  class CloudFunctionsHook(GoogleBaseHook):
36
- """Google Cloud Functions APIs.
36
+ """
37
+ Google Cloud Functions APIs.
37
38
 
38
39
  All the methods in the hook where project_id is used must be called with
39
40
  keyword arguments rather than positional.
@@ -61,7 +62,8 @@ class CloudFunctionsHook(GoogleBaseHook):
61
62
 
62
63
  @staticmethod
63
64
  def _full_location(project_id: str, location: str) -> str:
64
- """Retrieve full location of the function.
65
+ """
66
+ Retrieve full location of the function.
65
67
 
66
68
  :param project_id: Google Cloud Project ID where the function belongs.
67
69
  :param location: The location where the function is created.
@@ -71,7 +73,8 @@ class CloudFunctionsHook(GoogleBaseHook):
71
73
  return f"projects/{project_id}/locations/{location}"
72
74
 
73
75
  def get_conn(self) -> build:
74
- """Retrieve the connection to Cloud Functions.
76
+ """
77
+ Retrieve the connection to Cloud Functions.
75
78
 
76
79
  :return: Google Cloud Function services object.
77
80
  """
@@ -83,7 +86,8 @@ class CloudFunctionsHook(GoogleBaseHook):
83
86
  return self._conn
84
87
 
85
88
  def get_function(self, name: str) -> dict:
86
- """Get the Cloud Function with given name.
89
+ """
90
+ Get the Cloud Function with given name.
87
91
 
88
92
  :param name: Name of the function.
89
93
  :return: A Cloud Functions object representing the function.
@@ -93,7 +97,8 @@ class CloudFunctionsHook(GoogleBaseHook):
93
97
 
94
98
  @GoogleBaseHook.fallback_to_default_project_id
95
99
  def create_new_function(self, location: str, body: dict, project_id: str) -> None:
96
- """Create a new function at the location specified in the body.
100
+ """
101
+ Create a new function at the location specified in the body.
97
102
 
98
103
  :param location: The location of the function.
99
104
  :param body: The body required by the Cloud Functions insert API.
@@ -113,7 +118,8 @@ class CloudFunctionsHook(GoogleBaseHook):
113
118
  self._wait_for_operation_to_complete(operation_name=operation_name)
114
119
 
115
120
  def update_function(self, name: str, body: dict, update_mask: list[str]) -> None:
116
- """Update Cloud Functions according to the specified update mask.
121
+ """
122
+ Update Cloud Functions according to the specified update mask.
117
123
 
118
124
  :param name: The name of the function.
119
125
  :param body: The body required by the cloud function patch API.
@@ -132,7 +138,8 @@ class CloudFunctionsHook(GoogleBaseHook):
132
138
 
133
139
  @GoogleBaseHook.fallback_to_default_project_id
134
140
  def upload_function_zip(self, location: str, zip_path: str, project_id: str) -> str:
135
- """Upload ZIP file with sources.
141
+ """
142
+ Upload ZIP file with sources.
136
143
 
137
144
  :param location: The location where the function is created.
138
145
  :param zip_path: The path of the valid .zip file to upload.
@@ -165,7 +172,8 @@ class CloudFunctionsHook(GoogleBaseHook):
165
172
  return upload_url
166
173
 
167
174
  def delete_function(self, name: str) -> None:
168
- """Delete the specified Cloud Function.
175
+ """
176
+ Delete the specified Cloud Function.
169
177
 
170
178
  :param name: The name of the function.
171
179
  """
@@ -182,7 +190,8 @@ class CloudFunctionsHook(GoogleBaseHook):
182
190
  location: str,
183
191
  project_id: str = PROVIDE_PROJECT_ID,
184
192
  ) -> dict:
185
- """Invoke a deployed Cloud Function.
193
+ """
194
+ Invoke a deployed Cloud Function.
186
195
 
187
196
  This is done synchronously and should only be used for testing purposes,
188
197
  as very limited traffic is allowed.
@@ -202,7 +211,8 @@ class CloudFunctionsHook(GoogleBaseHook):
202
211
  return response
203
212
 
204
213
  def _wait_for_operation_to_complete(self, operation_name: str) -> dict:
205
- """Wait for the named operation to complete.
214
+ """
215
+ Wait for the named operation to complete.
206
216
 
207
217
  This is used to check the status of an asynchronous call.
208
218