apache-airflow-providers-google 10.20.0rc1__py3-none-any.whl → 10.21.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +16 -8
  3. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -1
  4. airflow/providers/google/cloud/_internal_client/secret_manager_client.py +6 -3
  5. airflow/providers/google/cloud/hooks/bigquery.py +158 -79
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +12 -6
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +34 -17
  8. airflow/providers/google/cloud/hooks/dataflow.py +30 -26
  9. airflow/providers/google/cloud/hooks/dataform.py +2 -1
  10. airflow/providers/google/cloud/hooks/datafusion.py +4 -2
  11. airflow/providers/google/cloud/hooks/dataproc.py +102 -51
  12. airflow/providers/google/cloud/hooks/functions.py +20 -10
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -11
  14. airflow/providers/google/cloud/hooks/os_login.py +2 -1
  15. airflow/providers/google/cloud/hooks/secret_manager.py +18 -9
  16. airflow/providers/google/cloud/hooks/translate.py +2 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -1
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +141 -0
  19. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +2 -1
  20. airflow/providers/google/cloud/links/base.py +2 -1
  21. airflow/providers/google/cloud/links/datafusion.py +2 -1
  22. airflow/providers/google/cloud/log/stackdriver_task_handler.py +4 -2
  23. airflow/providers/google/cloud/openlineage/mixins.py +10 -0
  24. airflow/providers/google/cloud/openlineage/utils.py +4 -2
  25. airflow/providers/google/cloud/operators/bigquery.py +55 -21
  26. airflow/providers/google/cloud/operators/cloud_batch.py +3 -1
  27. airflow/providers/google/cloud/operators/cloud_sql.py +22 -11
  28. airflow/providers/google/cloud/operators/dataform.py +2 -1
  29. airflow/providers/google/cloud/operators/dataproc.py +75 -34
  30. airflow/providers/google/cloud/operators/dataproc_metastore.py +24 -12
  31. airflow/providers/google/cloud/operators/gcs.py +2 -1
  32. airflow/providers/google/cloud/operators/pubsub.py +10 -5
  33. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +3 -3
  34. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +12 -9
  35. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +243 -0
  36. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +2 -1
  37. airflow/providers/google/cloud/operators/vision.py +36 -18
  38. airflow/providers/google/cloud/sensors/gcs.py +11 -2
  39. airflow/providers/google/cloud/sensors/pubsub.py +2 -1
  40. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +21 -12
  41. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +1 -1
  42. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -1
  43. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +17 -5
  44. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +12 -6
  45. airflow/providers/google/cloud/transfers/local_to_gcs.py +5 -1
  46. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +2 -1
  47. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +2 -1
  48. airflow/providers/google/cloud/transfers/presto_to_gcs.py +2 -1
  49. airflow/providers/google/cloud/transfers/s3_to_gcs.py +2 -1
  50. airflow/providers/google/cloud/transfers/trino_to_gcs.py +2 -1
  51. airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
  52. airflow/providers/google/cloud/triggers/cloud_run.py +2 -1
  53. airflow/providers/google/cloud/triggers/dataflow.py +2 -1
  54. airflow/providers/google/cloud/triggers/vertex_ai.py +2 -1
  55. airflow/providers/google/cloud/utils/external_token_supplier.py +4 -2
  56. airflow/providers/google/cloud/utils/field_sanitizer.py +4 -2
  57. airflow/providers/google/cloud/utils/field_validator.py +6 -3
  58. airflow/providers/google/cloud/utils/helpers.py +2 -1
  59. airflow/providers/google/common/hooks/base_google.py +2 -1
  60. airflow/providers/google/common/utils/id_token_credentials.py +2 -1
  61. airflow/providers/google/get_provider_info.py +3 -2
  62. airflow/providers/google/go_module_utils.py +4 -2
  63. airflow/providers/google/marketing_platform/hooks/analytics_admin.py +12 -6
  64. airflow/providers/google/marketing_platform/links/analytics_admin.py +2 -1
  65. airflow/providers/google/suite/transfers/local_to_drive.py +2 -1
  66. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/METADATA +8 -8
  67. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/RECORD +69 -69
  68. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/WHEEL +0 -0
  69. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/entry_points.txt +0 -0
@@ -87,7 +87,8 @@ class PreemptibilityType(Enum):
87
87
 
88
88
  @dataclass
89
89
  class InstanceSelection:
90
- """Defines machines types and a rank to which the machines types belong.
90
+ """
91
+ Defines machines types and a rank to which the machines types belong.
91
92
 
92
93
  Representation for
93
94
  google.cloud.dataproc.v1#google.cloud.dataproc.v1.InstanceFlexibilityPolicy.InstanceSelection.
@@ -118,7 +119,8 @@ class InstanceFlexibilityPolicy:
118
119
 
119
120
 
120
121
  class ClusterGenerator:
121
- """Create a new Dataproc Cluster.
122
+ """
123
+ Create a new Dataproc Cluster.
122
124
 
123
125
  :param cluster_name: The name of the DataProc cluster to create. (templated)
124
126
  :param project_id: The ID of the google cloud project in which
@@ -551,7 +553,8 @@ class ClusterGenerator:
551
553
 
552
554
 
553
555
  class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
554
- """Create a new cluster on Google Cloud Dataproc.
556
+ """
557
+ Create a new cluster on Google Cloud Dataproc.
555
558
 
556
559
  The operator will wait until the creation is successful or an error occurs
557
560
  in the creation process.
@@ -882,7 +885,8 @@ class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
882
885
  category=AirflowProviderDeprecationWarning,
883
886
  )
884
887
  class DataprocScaleClusterOperator(GoogleCloudBaseOperator):
885
- """Scale, up or down, a cluster on Google Cloud Dataproc.
888
+ """
889
+ Scale, up or down, a cluster on Google Cloud Dataproc.
886
890
 
887
891
  The operator will wait until the cluster is re-scaled.
888
892
 
@@ -1014,7 +1018,8 @@ class DataprocScaleClusterOperator(GoogleCloudBaseOperator):
1014
1018
 
1015
1019
 
1016
1020
  class DataprocDeleteClusterOperator(GoogleCloudBaseOperator):
1017
- """Delete a cluster in a project.
1021
+ """
1022
+ Delete a cluster in a project.
1018
1023
 
1019
1024
  :param region: Required. The Cloud Dataproc region in which to handle the request (templated).
1020
1025
  :param cluster_name: Required. The cluster name (templated).
@@ -1136,7 +1141,8 @@ class DataprocDeleteClusterOperator(GoogleCloudBaseOperator):
1136
1141
 
1137
1142
 
1138
1143
  class _DataprocStartStopClusterBaseOperator(GoogleCloudBaseOperator):
1139
- """Base class to start or stop a cluster in a project.
1144
+ """
1145
+ Base class to start or stop a cluster in a project.
1140
1146
 
1141
1147
  :param cluster_name: Required. Name of the cluster to create
1142
1148
  :param region: Required. The specified region where the dataproc cluster is created.
@@ -1211,7 +1217,8 @@ class _DataprocStartStopClusterBaseOperator(GoogleCloudBaseOperator):
1211
1217
  return self.project_id or self.hook.project_id
1212
1218
 
1213
1219
  def _get_cluster(self) -> Cluster:
1214
- """Retrieve the cluster information.
1220
+ """
1221
+ Retrieve the cluster information.
1215
1222
 
1216
1223
  :return: Instance of ``google.cloud.dataproc_v1.Cluster``` class
1217
1224
  """
@@ -1225,7 +1232,8 @@ class _DataprocStartStopClusterBaseOperator(GoogleCloudBaseOperator):
1225
1232
  )
1226
1233
 
1227
1234
  def _check_desired_cluster_state(self, cluster: Cluster) -> tuple[bool, str | None]:
1228
- """Implement this method in child class to return whether the cluster is in desired state or not.
1235
+ """
1236
+ Implement this method in child class to return whether the cluster is in desired state or not.
1229
1237
 
1230
1238
  If the cluster is in desired stated you can return a log message content as a second value
1231
1239
  for the return tuple.
@@ -1239,7 +1247,8 @@ class _DataprocStartStopClusterBaseOperator(GoogleCloudBaseOperator):
1239
1247
  raise NotImplementedError
1240
1248
 
1241
1249
  def _get_operation(self) -> operation.Operation:
1242
- """Implement this method in child class to call the related hook method and return its result.
1250
+ """
1251
+ Implement this method in child class to call the related hook method and return its result.
1243
1252
 
1244
1253
  :return: ``google.api_core.operation.Operation`` value whether the cluster is in desired state or not
1245
1254
  """
@@ -1319,7 +1328,8 @@ class DataprocStopClusterOperator(_DataprocStartStopClusterBaseOperator):
1319
1328
 
1320
1329
 
1321
1330
  class DataprocJobBaseOperator(GoogleCloudBaseOperator):
1322
- """Base class for operators that launch job on DataProc.
1331
+ """
1332
+ Base class for operators that launch job on DataProc.
1323
1333
 
1324
1334
  :param region: The specified region where the dataproc cluster is created.
1325
1335
  :param job_name: The job name used in the DataProc cluster. This name by default
@@ -1502,7 +1512,8 @@ class DataprocJobBaseOperator(GoogleCloudBaseOperator):
1502
1512
  category=AirflowProviderDeprecationWarning,
1503
1513
  )
1504
1514
  class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
1505
- """Start a Pig query Job on a Cloud DataProc cluster.
1515
+ """
1516
+ Start a Pig query Job on a Cloud DataProc cluster.
1506
1517
 
1507
1518
  .. seealso::
1508
1519
  This operator is deprecated, please use
@@ -1628,7 +1639,8 @@ class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
1628
1639
  category=AirflowProviderDeprecationWarning,
1629
1640
  )
1630
1641
  class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
1631
- """Start a Hive query Job on a Cloud DataProc cluster.
1642
+ """
1643
+ Start a Hive query Job on a Cloud DataProc cluster.
1632
1644
 
1633
1645
  .. seealso::
1634
1646
  This operator is deprecated, please use
@@ -1720,7 +1732,8 @@ class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
1720
1732
  category=AirflowProviderDeprecationWarning,
1721
1733
  )
1722
1734
  class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
1723
- """Start a Spark SQL query Job on a Cloud DataProc cluster.
1735
+ """
1736
+ Start a Spark SQL query Job on a Cloud DataProc cluster.
1724
1737
 
1725
1738
  .. seealso::
1726
1739
  This operator is deprecated, please use
@@ -1811,7 +1824,8 @@ class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
1811
1824
  category=AirflowProviderDeprecationWarning,
1812
1825
  )
1813
1826
  class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
1814
- """Start a Spark Job on a Cloud DataProc cluster.
1827
+ """
1828
+ Start a Spark Job on a Cloud DataProc cluster.
1815
1829
 
1816
1830
  .. seealso::
1817
1831
  This operator is deprecated, please use
@@ -1902,7 +1916,8 @@ class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
1902
1916
  category=AirflowProviderDeprecationWarning,
1903
1917
  )
1904
1918
  class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
1905
- """Start a Hadoop Job on a Cloud DataProc cluster.
1919
+ """
1920
+ Start a Hadoop Job on a Cloud DataProc cluster.
1906
1921
 
1907
1922
  .. seealso::
1908
1923
  This operator is deprecated, please use
@@ -1962,7 +1977,8 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
1962
1977
  self.files = files
1963
1978
 
1964
1979
  def generate_job(self):
1965
- """Act as a helper method for easier migration to `DataprocSubmitJobOperator`.
1980
+ """
1981
+ Act as a helper method for easier migration to `DataprocSubmitJobOperator`.
1966
1982
 
1967
1983
  :return: Dict representing Dataproc job
1968
1984
  """
@@ -1992,7 +2008,8 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
1992
2008
  category=AirflowProviderDeprecationWarning,
1993
2009
  )
1994
2010
  class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
1995
- """Start a PySpark Job on a Cloud DataProc cluster.
2011
+ """
2012
+ Start a PySpark Job on a Cloud DataProc cluster.
1996
2013
 
1997
2014
  .. seealso::
1998
2015
  This operator is deprecated, please use
@@ -2076,7 +2093,8 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
2076
2093
  self.pyfiles = pyfiles
2077
2094
 
2078
2095
  def generate_job(self):
2079
- """Act as a helper method for easier migration to :class:`DataprocSubmitJobOperator`.
2096
+ """
2097
+ Act as a helper method for easier migration to :class:`DataprocSubmitJobOperator`.
2080
2098
 
2081
2099
  :return: Dict representing Dataproc job
2082
2100
  """
@@ -2115,7 +2133,8 @@ class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
2115
2133
 
2116
2134
 
2117
2135
  class DataprocCreateWorkflowTemplateOperator(GoogleCloudBaseOperator):
2118
- """Creates new workflow template.
2136
+ """
2137
+ Creates new workflow template.
2119
2138
 
2120
2139
  :param project_id: Optional. The ID of the Google Cloud project the cluster belongs to.
2121
2140
  :param region: Required. The Cloud Dataproc region in which to handle the request.
@@ -2182,7 +2201,8 @@ class DataprocCreateWorkflowTemplateOperator(GoogleCloudBaseOperator):
2182
2201
 
2183
2202
 
2184
2203
  class DataprocInstantiateWorkflowTemplateOperator(GoogleCloudBaseOperator):
2185
- """Instantiate a WorkflowTemplate on Google Cloud Dataproc.
2204
+ """
2205
+ Instantiate a WorkflowTemplate on Google Cloud Dataproc.
2186
2206
 
2187
2207
  The operator will wait until the WorkflowTemplate is finished executing.
2188
2208
 
@@ -2308,7 +2328,8 @@ class DataprocInstantiateWorkflowTemplateOperator(GoogleCloudBaseOperator):
2308
2328
  )
2309
2329
 
2310
2330
  def execute_complete(self, context, event=None) -> None:
2311
- """Act as a callback for when the trigger fires.
2331
+ """
2332
+ Act as a callback for when the trigger fires.
2312
2333
 
2313
2334
  This returns immediately. It relies on trigger to throw an exception,
2314
2335
  otherwise it assumes execution was successful.
@@ -2326,7 +2347,8 @@ class DataprocInstantiateWorkflowTemplateOperator(GoogleCloudBaseOperator):
2326
2347
 
2327
2348
 
2328
2349
  class DataprocInstantiateInlineWorkflowTemplateOperator(GoogleCloudBaseOperator):
2329
- """Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc.
2350
+ """
2351
+ Instantiate a WorkflowTemplate Inline on Google Cloud Dataproc.
2330
2352
 
2331
2353
  The operator will wait until the WorkflowTemplate is finished executing.
2332
2354
 
@@ -2450,7 +2472,8 @@ class DataprocInstantiateInlineWorkflowTemplateOperator(GoogleCloudBaseOperator)
2450
2472
  )
2451
2473
 
2452
2474
  def execute_complete(self, context, event=None) -> None:
2453
- """Act as a callback for when the trigger fires.
2475
+ """
2476
+ Act as a callback for when the trigger fires.
2454
2477
 
2455
2478
  This returns immediately. It relies on trigger to throw an exception,
2456
2479
  otherwise it assumes execution was successful.
@@ -2468,7 +2491,8 @@ class DataprocInstantiateInlineWorkflowTemplateOperator(GoogleCloudBaseOperator)
2468
2491
 
2469
2492
 
2470
2493
  class DataprocSubmitJobOperator(GoogleCloudBaseOperator):
2471
- """Submit a job to a cluster.
2494
+ """
2495
+ Submit a job to a cluster.
2472
2496
 
2473
2497
  :param project_id: Optional. The ID of the Google Cloud project that the job belongs to.
2474
2498
  :param region: Required. The Cloud Dataproc region in which to handle the request.
@@ -2605,7 +2629,8 @@ class DataprocSubmitJobOperator(GoogleCloudBaseOperator):
2605
2629
  return self.job_id
2606
2630
 
2607
2631
  def execute_complete(self, context, event=None) -> None:
2608
- """Act as a callback for when the trigger fires.
2632
+ """
2633
+ Act as a callback for when the trigger fires.
2609
2634
 
2610
2635
  This returns immediately. It relies on trigger to throw an exception,
2611
2636
  otherwise it assumes execution was successful.
@@ -2626,7 +2651,8 @@ class DataprocSubmitJobOperator(GoogleCloudBaseOperator):
2626
2651
 
2627
2652
 
2628
2653
  class DataprocUpdateClusterOperator(GoogleCloudBaseOperator):
2629
- """Update a cluster in a project.
2654
+ """
2655
+ Update a cluster in a project.
2630
2656
 
2631
2657
  :param region: Required. The Cloud Dataproc region in which to handle the request.
2632
2658
  :param project_id: Optional. The ID of the Google Cloud project the cluster belongs to.
@@ -2774,7 +2800,8 @@ class DataprocUpdateClusterOperator(GoogleCloudBaseOperator):
2774
2800
 
2775
2801
 
2776
2802
  class DataprocDiagnoseClusterOperator(GoogleCloudBaseOperator):
2777
- """Diagnose a cluster in a project.
2803
+ """
2804
+ Diagnose a cluster in a project.
2778
2805
 
2779
2806
  After the operation completes, the response contains the Cloud Storage URI of the diagnostic output report containing a summary of collected diagnostics.
2780
2807
 
@@ -2891,7 +2918,8 @@ class DataprocDiagnoseClusterOperator(GoogleCloudBaseOperator):
2891
2918
  )
2892
2919
 
2893
2920
  def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> None:
2894
- """Act as a callback for when the trigger fires.
2921
+ """
2922
+ Act as a callback for when the trigger fires.
2895
2923
 
2896
2924
  This returns immediately. It relies on trigger to throw an exception,
2897
2925
  otherwise it assumes execution was successful.
@@ -2910,7 +2938,8 @@ class DataprocDiagnoseClusterOperator(GoogleCloudBaseOperator):
2910
2938
 
2911
2939
 
2912
2940
  class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
2913
- """Create a batch workload.
2941
+ """
2942
+ Create a batch workload.
2914
2943
 
2915
2944
  :param project_id: Optional. The ID of the Google Cloud project that the cluster belongs to. (templated)
2916
2945
  :param region: Required. The Cloud Dataproc region in which to handle the request. (templated)
@@ -3026,6 +3055,13 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
3026
3055
  self.log.info("Batch %s created", self.batch_id)
3027
3056
 
3028
3057
  else:
3058
+ DataprocBatchLink.persist(
3059
+ context=context,
3060
+ operator=self,
3061
+ project_id=self.project_id,
3062
+ region=self.region,
3063
+ batch_id=self.batch_id,
3064
+ )
3029
3065
  return self.operation.operation.name
3030
3066
 
3031
3067
  else:
@@ -3089,7 +3125,8 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
3089
3125
  return Batch.to_dict(result)
3090
3126
 
3091
3127
  def execute_complete(self, context, event=None) -> None:
3092
- """Act as a callback for when the trigger fires.
3128
+ """
3129
+ Act as a callback for when the trigger fires.
3093
3130
 
3094
3131
  This returns immediately. It relies on trigger to throw an exception,
3095
3132
  otherwise it assumes execution was successful.
@@ -3119,7 +3156,8 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
3119
3156
 
3120
3157
 
3121
3158
  class DataprocDeleteBatchOperator(GoogleCloudBaseOperator):
3122
- """Delete the batch workload resource.
3159
+ """
3160
+ Delete the batch workload resource.
3123
3161
 
3124
3162
  :param batch_id: Required. The ID to use for the batch, which will become the final component
3125
3163
  of the batch's resource name.
@@ -3182,7 +3220,8 @@ class DataprocDeleteBatchOperator(GoogleCloudBaseOperator):
3182
3220
 
3183
3221
 
3184
3222
  class DataprocGetBatchOperator(GoogleCloudBaseOperator):
3185
- """Get the batch workload resource representation.
3223
+ """
3224
+ Get the batch workload resource representation.
3186
3225
 
3187
3226
  :param batch_id: Required. The ID to use for the batch, which will become the final component
3188
3227
  of the batch's resource name.
@@ -3255,7 +3294,8 @@ class DataprocGetBatchOperator(GoogleCloudBaseOperator):
3255
3294
 
3256
3295
 
3257
3296
  class DataprocListBatchesOperator(GoogleCloudBaseOperator):
3258
- """List batch workloads.
3297
+ """
3298
+ List batch workloads.
3259
3299
 
3260
3300
  :param region: Required. The Cloud Dataproc region in which to handle the request.
3261
3301
  :param project_id: Optional. The ID of the Google Cloud project that the cluster belongs to.
@@ -3333,7 +3373,8 @@ class DataprocListBatchesOperator(GoogleCloudBaseOperator):
3333
3373
 
3334
3374
 
3335
3375
  class DataprocCancelOperationOperator(GoogleCloudBaseOperator):
3336
- """Cancel the batch workload resource.
3376
+ """
3377
+ Cancel the batch workload resource.
3337
3378
 
3338
3379
  :param operation_name: Required. The name of the operation resource to be cancelled.
3339
3380
  :param region: Required. The Cloud Dataproc region in which to handle the request.
@@ -147,7 +147,8 @@ class DataprocMetastoreDetailedLink(BaseOperatorLink):
147
147
 
148
148
 
149
149
  class DataprocMetastoreCreateBackupOperator(GoogleCloudBaseOperator):
150
- """Create a new backup in a given project and location.
150
+ """
151
+ Create a new backup in a given project and location.
151
152
 
152
153
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
153
154
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -261,7 +262,8 @@ class DataprocMetastoreCreateBackupOperator(GoogleCloudBaseOperator):
261
262
 
262
263
 
263
264
  class DataprocMetastoreCreateMetadataImportOperator(GoogleCloudBaseOperator):
264
- """Create a new MetadataImport in a given project and location.
265
+ """
266
+ Create a new MetadataImport in a given project and location.
265
267
 
266
268
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
267
269
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -361,7 +363,8 @@ class DataprocMetastoreCreateMetadataImportOperator(GoogleCloudBaseOperator):
361
363
 
362
364
 
363
365
  class DataprocMetastoreCreateServiceOperator(GoogleCloudBaseOperator):
364
- """Create a metastore service in a project and location.
366
+ """
367
+ Create a metastore service in a project and location.
365
368
 
366
369
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
367
370
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
@@ -462,7 +465,8 @@ class DataprocMetastoreCreateServiceOperator(GoogleCloudBaseOperator):
462
465
 
463
466
 
464
467
  class DataprocMetastoreDeleteBackupOperator(GoogleCloudBaseOperator):
465
- """Delete a single backup.
468
+ """
469
+ Delete a single backup.
466
470
 
467
471
  :param project_id: Required. The ID of the Google Cloud project that the backup belongs to.
468
472
  :param region: Required. The ID of the Google Cloud region that the backup belongs to.
@@ -546,7 +550,8 @@ class DataprocMetastoreDeleteBackupOperator(GoogleCloudBaseOperator):
546
550
 
547
551
 
548
552
  class DataprocMetastoreDeleteServiceOperator(GoogleCloudBaseOperator):
549
- """Delete a single service.
553
+ """
554
+ Delete a single service.
550
555
 
551
556
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
552
557
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
@@ -614,7 +619,8 @@ class DataprocMetastoreDeleteServiceOperator(GoogleCloudBaseOperator):
614
619
 
615
620
 
616
621
  class DataprocMetastoreExportMetadataOperator(GoogleCloudBaseOperator):
617
- """Export metadata from a service.
622
+ """
623
+ Export metadata from a service.
618
624
 
619
625
  :param destination_gcs_folder: A Cloud Storage URI of a folder, in the format
620
626
  ``gs://<bucket_name>/<path_inside_bucket>``. A sub-folder
@@ -706,7 +712,8 @@ class DataprocMetastoreExportMetadataOperator(GoogleCloudBaseOperator):
706
712
  return destination_uri[5:] if destination_uri.startswith("gs://") else destination_uri
707
713
 
708
714
  def _wait_for_export_metadata(self, hook: DataprocMetastoreHook):
709
- """Check that export was created successfully.
715
+ """
716
+ Check that export was created successfully.
710
717
 
711
718
  This is a workaround to an issue parsing result to MetadataExport inside
712
719
  the SDK.
@@ -732,7 +739,8 @@ class DataprocMetastoreExportMetadataOperator(GoogleCloudBaseOperator):
732
739
 
733
740
 
734
741
  class DataprocMetastoreGetServiceOperator(GoogleCloudBaseOperator):
735
- """Get the details of a single service.
742
+ """
743
+ Get the details of a single service.
736
744
 
737
745
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
738
746
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
@@ -804,7 +812,8 @@ class DataprocMetastoreGetServiceOperator(GoogleCloudBaseOperator):
804
812
 
805
813
 
806
814
  class DataprocMetastoreListBackupsOperator(GoogleCloudBaseOperator):
807
- """List backups in a service.
815
+ """
816
+ List backups in a service.
808
817
 
809
818
  :param project_id: Required. The ID of the Google Cloud project that the backup belongs to.
810
819
  :param region: Required. The ID of the Google Cloud region that the backup belongs to.
@@ -888,7 +897,8 @@ class DataprocMetastoreListBackupsOperator(GoogleCloudBaseOperator):
888
897
 
889
898
 
890
899
  class DataprocMetastoreRestoreServiceOperator(GoogleCloudBaseOperator):
891
- """Restore a service from a backup.
900
+ """
901
+ Restore a service from a backup.
892
902
 
893
903
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
894
904
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -992,7 +1002,8 @@ class DataprocMetastoreRestoreServiceOperator(GoogleCloudBaseOperator):
992
1002
  DataprocMetastoreLink.persist(context=context, task_instance=self, url=METASTORE_SERVICE_LINK)
993
1003
 
994
1004
  def _wait_for_restore_service(self, hook: DataprocMetastoreHook):
995
- """Check that export was created successfully.
1005
+ """
1006
+ Check that export was created successfully.
996
1007
 
997
1008
  This is a workaround to an issue parsing result to MetadataExport inside
998
1009
  the SDK.
@@ -1016,7 +1027,8 @@ class DataprocMetastoreRestoreServiceOperator(GoogleCloudBaseOperator):
1016
1027
 
1017
1028
 
1018
1029
  class DataprocMetastoreUpdateServiceOperator(GoogleCloudBaseOperator):
1019
- """Update the parameters of a single service.
1030
+ """
1031
+ Update the parameters of a single service.
1020
1032
 
1021
1033
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
1022
1034
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -739,7 +739,8 @@ class GCSTimeSpanFileTransformOperator(GoogleCloudBaseOperator):
739
739
 
740
740
  @staticmethod
741
741
  def interpolate_prefix(prefix: str, dt: datetime.datetime) -> str | None:
742
- """Interpolate prefix with datetime.
742
+ """
743
+ Interpolate prefix with datetime.
743
744
 
744
745
  :param prefix: The prefix to interpolate
745
746
  :param dt: The datetime to interpolate
@@ -51,7 +51,8 @@ if TYPE_CHECKING:
51
51
 
52
52
 
53
53
  class PubSubCreateTopicOperator(GoogleCloudBaseOperator):
54
- """Create a PubSub topic.
54
+ """
55
+ Create a PubSub topic.
55
56
 
56
57
  .. seealso::
57
58
  For more information on how to use this operator, take a look at the guide:
@@ -184,7 +185,8 @@ class PubSubCreateTopicOperator(GoogleCloudBaseOperator):
184
185
 
185
186
 
186
187
  class PubSubCreateSubscriptionOperator(GoogleCloudBaseOperator):
187
- """Create a PubSub subscription.
188
+ """
189
+ Create a PubSub subscription.
188
190
 
189
191
  .. seealso::
190
192
  For more information on how to use this operator, take a look at the guide:
@@ -393,7 +395,8 @@ class PubSubCreateSubscriptionOperator(GoogleCloudBaseOperator):
393
395
 
394
396
 
395
397
  class PubSubDeleteTopicOperator(GoogleCloudBaseOperator):
396
- """Delete a PubSub topic.
398
+ """
399
+ Delete a PubSub topic.
397
400
 
398
401
  .. seealso::
399
402
  For more information on how to use this operator, take a look at the guide:
@@ -491,7 +494,8 @@ class PubSubDeleteTopicOperator(GoogleCloudBaseOperator):
491
494
 
492
495
 
493
496
  class PubSubDeleteSubscriptionOperator(GoogleCloudBaseOperator):
494
- """Delete a PubSub subscription.
497
+ """
498
+ Delete a PubSub subscription.
495
499
 
496
500
  .. seealso::
497
501
  For more information on how to use this operator, take a look at the guide:
@@ -591,7 +595,8 @@ class PubSubDeleteSubscriptionOperator(GoogleCloudBaseOperator):
591
595
 
592
596
 
593
597
  class PubSubPublishMessageOperator(GoogleCloudBaseOperator):
594
- """Publish messages to a PubSub topic.
598
+ """
599
+ Publish messages to a PubSub topic.
595
600
 
596
601
  .. seealso::
597
602
  For more information on how to use this operator, take a look at the guide:
@@ -656,9 +656,9 @@ class DeleteAutoMLTrainingJobOperator(GoogleCloudBaseOperator):
656
656
  impersonation_chain=self.impersonation_chain,
657
657
  )
658
658
  try:
659
- self.log.info("Deleting Auto ML training pipeline: %s", self.training_pipeline)
659
+ self.log.info("Deleting Auto ML training pipeline: %s", self.training_pipeline_id)
660
660
  training_pipeline_operation = hook.delete_training_pipeline(
661
- training_pipeline=self.training_pipeline,
661
+ training_pipeline=self.training_pipeline_id,
662
662
  region=self.region,
663
663
  project_id=self.project_id,
664
664
  retry=self.retry,
@@ -668,7 +668,7 @@ class DeleteAutoMLTrainingJobOperator(GoogleCloudBaseOperator):
668
668
  hook.wait_for_operation(timeout=self.timeout, operation=training_pipeline_operation)
669
669
  self.log.info("Training pipeline was deleted.")
670
670
  except NotFound:
671
- self.log.info("The Training Pipeline ID %s does not exist.", self.training_pipeline)
671
+ self.log.info("The Training Pipeline ID %s does not exist.", self.training_pipeline_id)
672
672
 
673
673
 
674
674
  class ListAutoMLTrainingJobOperator(GoogleCloudBaseOperator):
@@ -182,7 +182,8 @@ class CustomTrainingJobBaseOperator(GoogleCloudBaseOperator):
182
182
 
183
183
 
184
184
  class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
185
- """Create Custom Container Training job.
185
+ """
186
+ Create Custom Container Training job.
186
187
 
187
188
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
188
189
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -659,7 +660,8 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
659
660
 
660
661
 
661
662
  class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator):
662
- """Create Custom Python Package Training job.
663
+ """
664
+ Create Custom Python Package Training job.
663
665
 
664
666
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
665
667
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -1136,7 +1138,8 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
1136
1138
 
1137
1139
 
1138
1140
  class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1139
- """Create a Custom Training Job pipeline.
1141
+ """
1142
+ Create a Custom Training Job pipeline.
1140
1143
 
1141
1144
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
1142
1145
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
@@ -1692,9 +1695,9 @@ class DeleteCustomTrainingJobOperator(GoogleCloudBaseOperator):
1692
1695
  impersonation_chain=self.impersonation_chain,
1693
1696
  )
1694
1697
  try:
1695
- self.log.info("Deleting custom training pipeline: %s", self.training_pipeline)
1698
+ self.log.info("Deleting custom training pipeline: %s", self.training_pipeline_id)
1696
1699
  training_pipeline_operation = hook.delete_training_pipeline(
1697
- training_pipeline=self.training_pipeline,
1700
+ training_pipeline=self.training_pipeline_id,
1698
1701
  region=self.region,
1699
1702
  project_id=self.project_id,
1700
1703
  retry=self.retry,
@@ -1704,11 +1707,11 @@ class DeleteCustomTrainingJobOperator(GoogleCloudBaseOperator):
1704
1707
  hook.wait_for_operation(timeout=self.timeout, operation=training_pipeline_operation)
1705
1708
  self.log.info("Training pipeline was deleted.")
1706
1709
  except NotFound:
1707
- self.log.info("The Training Pipeline ID %s does not exist.", self.training_pipeline)
1710
+ self.log.info("The Training Pipeline ID %s does not exist.", self.training_pipeline_id)
1708
1711
  try:
1709
- self.log.info("Deleting custom job: %s", self.custom_job)
1712
+ self.log.info("Deleting custom job: %s", self.custom_job_id)
1710
1713
  custom_job_operation = hook.delete_custom_job(
1711
- custom_job=self.custom_job,
1714
+ custom_job=self.custom_job_id,
1712
1715
  region=self.region,
1713
1716
  project_id=self.project_id,
1714
1717
  retry=self.retry,
@@ -1718,7 +1721,7 @@ class DeleteCustomTrainingJobOperator(GoogleCloudBaseOperator):
1718
1721
  hook.wait_for_operation(timeout=self.timeout, operation=custom_job_operation)
1719
1722
  self.log.info("Custom job was deleted.")
1720
1723
  except NotFound:
1721
- self.log.info("The Custom Job ID %s does not exist.", self.custom_job)
1724
+ self.log.info("The Custom Job ID %s does not exist.", self.custom_job_id)
1722
1725
 
1723
1726
 
1724
1727
  class ListCustomTrainingJobOperator(GoogleCloudBaseOperator):