apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (257) hide show
  1. airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
  2. airflow/providers/google/__init__.py +3 -3
  3. airflow/providers/google/ads/hooks/ads.py +39 -6
  4. airflow/providers/google/ads/operators/ads.py +2 -2
  5. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
  6. airflow/providers/google/assets/gcs.py +1 -11
  7. airflow/providers/google/cloud/bundles/__init__.py +16 -0
  8. airflow/providers/google/cloud/bundles/gcs.py +161 -0
  9. airflow/providers/google/cloud/hooks/alloy_db.py +1 -1
  10. airflow/providers/google/cloud/hooks/bigquery.py +176 -293
  11. airflow/providers/google/cloud/hooks/cloud_batch.py +1 -1
  12. airflow/providers/google/cloud/hooks/cloud_build.py +1 -1
  13. airflow/providers/google/cloud/hooks/cloud_composer.py +288 -15
  14. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  15. airflow/providers/google/cloud/hooks/cloud_memorystore.py +1 -1
  16. airflow/providers/google/cloud/hooks/cloud_run.py +18 -10
  17. airflow/providers/google/cloud/hooks/cloud_sql.py +102 -23
  18. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +29 -7
  19. airflow/providers/google/cloud/hooks/compute.py +1 -1
  20. airflow/providers/google/cloud/hooks/compute_ssh.py +6 -2
  21. airflow/providers/google/cloud/hooks/datacatalog.py +10 -1
  22. airflow/providers/google/cloud/hooks/dataflow.py +72 -95
  23. airflow/providers/google/cloud/hooks/dataform.py +1 -1
  24. airflow/providers/google/cloud/hooks/datafusion.py +21 -19
  25. airflow/providers/google/cloud/hooks/dataplex.py +2 -2
  26. airflow/providers/google/cloud/hooks/dataprep.py +1 -1
  27. airflow/providers/google/cloud/hooks/dataproc.py +73 -72
  28. airflow/providers/google/cloud/hooks/dataproc_metastore.py +1 -1
  29. airflow/providers/google/cloud/hooks/dlp.py +1 -1
  30. airflow/providers/google/cloud/hooks/functions.py +1 -1
  31. airflow/providers/google/cloud/hooks/gcs.py +112 -15
  32. airflow/providers/google/cloud/hooks/gdm.py +1 -1
  33. airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
  34. airflow/providers/google/cloud/hooks/kubernetes_engine.py +3 -3
  35. airflow/providers/google/cloud/hooks/looker.py +6 -2
  36. airflow/providers/google/cloud/hooks/managed_kafka.py +1 -1
  37. airflow/providers/google/cloud/hooks/mlengine.py +4 -3
  38. airflow/providers/google/cloud/hooks/pubsub.py +3 -0
  39. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  40. airflow/providers/google/cloud/hooks/spanner.py +74 -9
  41. airflow/providers/google/cloud/hooks/stackdriver.py +11 -9
  42. airflow/providers/google/cloud/hooks/tasks.py +1 -1
  43. airflow/providers/google/cloud/hooks/translate.py +2 -2
  44. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +2 -210
  45. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +3 -3
  46. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +28 -2
  47. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  48. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +308 -8
  49. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
  50. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +1 -1
  51. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +1 -1
  52. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +1 -1
  53. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
  54. airflow/providers/google/cloud/hooks/vision.py +3 -3
  55. airflow/providers/google/cloud/hooks/workflows.py +1 -1
  56. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  57. airflow/providers/google/cloud/links/base.py +77 -13
  58. airflow/providers/google/cloud/links/bigquery.py +0 -47
  59. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  60. airflow/providers/google/cloud/links/bigtable.py +0 -48
  61. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  62. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  63. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  64. airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
  65. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  66. airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
  67. airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
  68. airflow/providers/google/cloud/links/compute.py +0 -58
  69. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  70. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  71. airflow/providers/google/cloud/links/dataflow.py +0 -34
  72. airflow/providers/google/cloud/links/dataform.py +0 -64
  73. airflow/providers/google/cloud/links/datafusion.py +1 -96
  74. airflow/providers/google/cloud/links/dataplex.py +0 -154
  75. airflow/providers/google/cloud/links/dataprep.py +0 -24
  76. airflow/providers/google/cloud/links/dataproc.py +11 -95
  77. airflow/providers/google/cloud/links/datastore.py +0 -31
  78. airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
  79. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  80. airflow/providers/google/cloud/links/mlengine.py +0 -70
  81. airflow/providers/google/cloud/links/pubsub.py +0 -32
  82. airflow/providers/google/cloud/links/spanner.py +0 -33
  83. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  84. airflow/providers/google/cloud/links/translate.py +17 -187
  85. airflow/providers/google/cloud/links/vertex_ai.py +28 -195
  86. airflow/providers/google/cloud/links/workflows.py +0 -52
  87. airflow/providers/google/cloud/log/gcs_task_handler.py +58 -22
  88. airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
  89. airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
  90. airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
  91. airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
  92. airflow/providers/google/cloud/openlineage/facets.py +102 -1
  93. airflow/providers/google/cloud/openlineage/mixins.py +10 -8
  94. airflow/providers/google/cloud/openlineage/utils.py +15 -1
  95. airflow/providers/google/cloud/operators/alloy_db.py +71 -56
  96. airflow/providers/google/cloud/operators/bigquery.py +73 -636
  97. airflow/providers/google/cloud/operators/bigquery_dts.py +4 -6
  98. airflow/providers/google/cloud/operators/bigtable.py +37 -8
  99. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  100. airflow/providers/google/cloud/operators/cloud_batch.py +3 -3
  101. airflow/providers/google/cloud/operators/cloud_build.py +76 -33
  102. airflow/providers/google/cloud/operators/cloud_composer.py +129 -41
  103. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  104. airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
  105. airflow/providers/google/cloud/operators/cloud_run.py +24 -6
  106. airflow/providers/google/cloud/operators/cloud_sql.py +8 -17
  107. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +93 -12
  108. airflow/providers/google/cloud/operators/compute.py +9 -41
  109. airflow/providers/google/cloud/operators/datacatalog.py +157 -21
  110. airflow/providers/google/cloud/operators/dataflow.py +40 -16
  111. airflow/providers/google/cloud/operators/dataform.py +15 -5
  112. airflow/providers/google/cloud/operators/datafusion.py +42 -21
  113. airflow/providers/google/cloud/operators/dataplex.py +194 -110
  114. airflow/providers/google/cloud/operators/dataprep.py +1 -5
  115. airflow/providers/google/cloud/operators/dataproc.py +80 -36
  116. airflow/providers/google/cloud/operators/dataproc_metastore.py +97 -89
  117. airflow/providers/google/cloud/operators/datastore.py +23 -7
  118. airflow/providers/google/cloud/operators/dlp.py +6 -29
  119. airflow/providers/google/cloud/operators/functions.py +17 -8
  120. airflow/providers/google/cloud/operators/gcs.py +12 -9
  121. airflow/providers/google/cloud/operators/gen_ai.py +389 -0
  122. airflow/providers/google/cloud/operators/kubernetes_engine.py +62 -100
  123. airflow/providers/google/cloud/operators/looker.py +2 -2
  124. airflow/providers/google/cloud/operators/managed_kafka.py +108 -53
  125. airflow/providers/google/cloud/operators/natural_language.py +1 -1
  126. airflow/providers/google/cloud/operators/pubsub.py +68 -15
  127. airflow/providers/google/cloud/operators/spanner.py +26 -13
  128. airflow/providers/google/cloud/operators/speech_to_text.py +2 -3
  129. airflow/providers/google/cloud/operators/stackdriver.py +1 -9
  130. airflow/providers/google/cloud/operators/tasks.py +1 -12
  131. airflow/providers/google/cloud/operators/text_to_speech.py +2 -3
  132. airflow/providers/google/cloud/operators/translate.py +41 -17
  133. airflow/providers/google/cloud/operators/translate_speech.py +2 -3
  134. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
  135. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +30 -10
  136. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +55 -27
  137. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
  138. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
  139. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  140. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
  141. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -115
  142. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +12 -10
  143. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
  144. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +31 -8
  145. airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
  146. airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
  147. airflow/providers/google/cloud/operators/vision.py +2 -2
  148. airflow/providers/google/cloud/operators/workflows.py +18 -15
  149. airflow/providers/google/cloud/secrets/secret_manager.py +3 -2
  150. airflow/providers/google/cloud/sensors/bigquery.py +3 -3
  151. airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -3
  152. airflow/providers/google/cloud/sensors/bigtable.py +11 -4
  153. airflow/providers/google/cloud/sensors/cloud_composer.py +533 -30
  154. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -3
  155. airflow/providers/google/cloud/sensors/dataflow.py +26 -10
  156. airflow/providers/google/cloud/sensors/dataform.py +2 -3
  157. airflow/providers/google/cloud/sensors/datafusion.py +4 -5
  158. airflow/providers/google/cloud/sensors/dataplex.py +2 -3
  159. airflow/providers/google/cloud/sensors/dataprep.py +2 -2
  160. airflow/providers/google/cloud/sensors/dataproc.py +2 -3
  161. airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -3
  162. airflow/providers/google/cloud/sensors/gcs.py +4 -5
  163. airflow/providers/google/cloud/sensors/looker.py +2 -3
  164. airflow/providers/google/cloud/sensors/pubsub.py +4 -5
  165. airflow/providers/google/cloud/sensors/tasks.py +2 -2
  166. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -3
  167. airflow/providers/google/cloud/sensors/workflows.py +2 -3
  168. airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
  169. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
  170. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +4 -3
  171. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
  172. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +10 -5
  173. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
  174. airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
  175. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
  176. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
  177. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  178. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +3 -3
  179. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +4 -4
  180. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +21 -13
  181. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +4 -3
  182. airflow/providers/google/cloud/transfers/gcs_to_local.py +6 -4
  183. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +11 -5
  184. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
  185. airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
  186. airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
  187. airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
  188. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  189. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
  190. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
  191. airflow/providers/google/cloud/transfers/s3_to_gcs.py +13 -7
  192. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
  193. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +14 -5
  194. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
  195. airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
  196. airflow/providers/google/cloud/triggers/bigquery.py +76 -35
  197. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  198. airflow/providers/google/cloud/triggers/cloud_composer.py +303 -47
  199. airflow/providers/google/cloud/triggers/cloud_run.py +3 -3
  200. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +92 -2
  201. airflow/providers/google/cloud/triggers/dataflow.py +122 -0
  202. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  203. airflow/providers/google/cloud/triggers/dataplex.py +14 -2
  204. airflow/providers/google/cloud/triggers/dataproc.py +123 -53
  205. airflow/providers/google/cloud/triggers/kubernetes_engine.py +47 -28
  206. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  207. airflow/providers/google/cloud/triggers/pubsub.py +15 -19
  208. airflow/providers/google/cloud/triggers/vertex_ai.py +1 -1
  209. airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
  210. airflow/providers/google/cloud/utils/credentials_provider.py +2 -2
  211. airflow/providers/google/cloud/utils/field_sanitizer.py +1 -1
  212. airflow/providers/google/cloud/utils/field_validator.py +2 -3
  213. airflow/providers/google/common/auth_backend/google_openid.py +4 -4
  214. airflow/providers/google/common/deprecated.py +2 -1
  215. airflow/providers/google/common/hooks/base_google.py +27 -9
  216. airflow/providers/google/common/hooks/operation_helpers.py +1 -1
  217. airflow/providers/google/common/links/storage.py +0 -22
  218. airflow/providers/google/common/utils/get_secret.py +31 -0
  219. airflow/providers/google/common/utils/id_token_credentials.py +3 -4
  220. airflow/providers/google/firebase/hooks/firestore.py +1 -1
  221. airflow/providers/google/firebase/operators/firestore.py +3 -3
  222. airflow/providers/google/get_provider_info.py +56 -52
  223. airflow/providers/google/go_module_utils.py +35 -3
  224. airflow/providers/google/leveldb/hooks/leveldb.py +27 -2
  225. airflow/providers/google/leveldb/operators/leveldb.py +2 -2
  226. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +1 -1
  227. airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
  228. airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
  229. airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
  230. airflow/providers/google/marketing_platform/operators/analytics_admin.py +2 -3
  231. airflow/providers/google/marketing_platform/operators/campaign_manager.py +6 -6
  232. airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
  233. airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
  234. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
  235. airflow/providers/google/marketing_platform/sensors/display_video.py +3 -64
  236. airflow/providers/google/suite/hooks/calendar.py +2 -2
  237. airflow/providers/google/suite/hooks/sheets.py +16 -2
  238. airflow/providers/google/suite/operators/sheets.py +8 -3
  239. airflow/providers/google/suite/sensors/drive.py +2 -2
  240. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +3 -3
  241. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  242. airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
  243. airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
  244. airflow/providers/google/version_compat.py +15 -1
  245. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/METADATA +90 -46
  246. apache_airflow_providers_google-19.3.0.dist-info/RECORD +331 -0
  247. apache_airflow_providers_google-19.3.0.dist-info/licenses/NOTICE +5 -0
  248. airflow/providers/google/cloud/hooks/automl.py +0 -673
  249. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  250. airflow/providers/google/cloud/links/automl.py +0 -193
  251. airflow/providers/google/cloud/operators/automl.py +0 -1362
  252. airflow/providers/google/cloud/operators/life_sciences.py +0 -119
  253. airflow/providers/google/cloud/operators/mlengine.py +0 -112
  254. apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
  255. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/WHEEL +0 -0
  256. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.3.0.dist-info}/entry_points.txt +0 -0
  257. {airflow/providers/google → apache_airflow_providers_google-19.3.0.dist-info/licenses}/LICENSE +0 -0
@@ -31,7 +31,7 @@ from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel
31
31
  from google.protobuf.field_mask_pb2 import FieldMask
32
32
  from googleapiclient.errors import HttpError
33
33
 
34
- from airflow.exceptions import AirflowException
34
+ from airflow.providers.common.compat.sdk import AirflowException
35
35
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
36
36
 
37
37
  if TYPE_CHECKING:
@@ -261,8 +261,9 @@ class StackdriverHook(GoogleBaseHook):
261
261
  channel_name_map = {}
262
262
 
263
263
  for channel in channels:
264
+ # This field is immutable, illegal to specifying non-default UNVERIFIED or VERIFIED, so setting default
264
265
  channel.verification_status = (
265
- monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
266
+ monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED # type: ignore[assignment]
266
267
  )
267
268
 
268
269
  if channel.name in existing_channels:
@@ -274,7 +275,7 @@ class StackdriverHook(GoogleBaseHook):
274
275
  )
275
276
  else:
276
277
  old_name = channel.name
277
- channel.name = None
278
+ del channel.name
278
279
  new_channel = channel_client.create_notification_channel(
279
280
  request={"name": f"projects/{project_id}", "notification_channel": channel},
280
281
  retry=retry,
@@ -284,8 +285,8 @@ class StackdriverHook(GoogleBaseHook):
284
285
  channel_name_map[old_name] = new_channel.name
285
286
 
286
287
  for policy in policies_:
287
- policy.creation_record = None
288
- policy.mutation_record = None
288
+ del policy.creation_record
289
+ del policy.mutation_record
289
290
 
290
291
  for i, channel in enumerate(policy.notification_channels):
291
292
  new_channel = channel_name_map.get(channel)
@@ -301,9 +302,9 @@ class StackdriverHook(GoogleBaseHook):
301
302
  metadata=metadata,
302
303
  )
303
304
  else:
304
- policy.name = None
305
+ del policy.name
305
306
  for condition in policy.conditions:
306
- condition.name = None
307
+ del condition.name
307
308
  policy_client.create_alert_policy(
308
309
  request={"name": f"projects/{project_id}", "alert_policy": policy},
309
310
  retry=retry,
@@ -531,8 +532,9 @@ class StackdriverHook(GoogleBaseHook):
531
532
  channels_list.append(NotificationChannel(**channel))
532
533
 
533
534
  for channel in channels_list:
535
+ # This field is immutable, illegal to specifying non-default UNVERIFIED or VERIFIED, so setting default
534
536
  channel.verification_status = (
535
- monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED
537
+ monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED # type: ignore[assignment]
536
538
  )
537
539
 
538
540
  if channel.name in existing_channels:
@@ -544,7 +546,7 @@ class StackdriverHook(GoogleBaseHook):
544
546
  )
545
547
  else:
546
548
  old_name = channel.name
547
- channel.name = None
549
+ del channel.name
548
550
  new_channel = channel_client.create_notification_channel(
549
551
  request={"name": f"projects/{project_id}", "notification_channel": channel},
550
552
  retry=retry,
@@ -26,7 +26,7 @@ from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
26
26
  from google.cloud.tasks_v2 import CloudTasksClient
27
27
  from google.cloud.tasks_v2.types import Queue, Task
28
28
 
29
- from airflow.exceptions import AirflowException
29
+ from airflow.providers.common.compat.sdk import AirflowException
30
30
  from airflow.providers.google.common.consts import CLIENT_INFO
31
31
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
32
32
 
@@ -32,7 +32,7 @@ from google.cloud.translate_v2 import Client
32
32
  from google.cloud.translate_v3 import TranslationServiceClient
33
33
  from google.cloud.translate_v3.types.translation_service import GlossaryInputConfig
34
34
 
35
- from airflow.exceptions import AirflowException
35
+ from airflow.providers.common.compat.sdk import AirflowException
36
36
  from airflow.providers.google.common.consts import CLIENT_INFO
37
37
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
38
38
  from airflow.providers.google.common.hooks.operation_helpers import OperationHelper
@@ -429,7 +429,7 @@ class TranslateHook(GoogleBaseHook, OperationHelper):
429
429
  project_id: str,
430
430
  location: str,
431
431
  retry: Retry | _MethodDefault = DEFAULT,
432
- timeout: float | _MethodDefault = DEFAULT,
432
+ timeout: float | None | _MethodDefault = DEFAULT,
433
433
  metadata: Sequence[tuple[str, str]] = (),
434
434
  ) -> automl_translation.Dataset:
435
435
  """
@@ -36,9 +36,9 @@ from google.cloud.aiplatform import (
36
36
  )
37
37
  from google.cloud.aiplatform_v1 import JobServiceClient, PipelineServiceClient
38
38
 
39
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
39
+ from airflow.exceptions import AirflowProviderDeprecationWarning
40
+ from airflow.providers.common.compat.sdk import AirflowException
40
41
  from airflow.providers.google.common.consts import CLIENT_INFO
41
- from airflow.providers.google.common.deprecated import deprecated
42
42
  from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
43
43
  from airflow.providers.google.common.hooks.operation_helpers import OperationHelper
44
44
 
@@ -185,42 +185,6 @@ class AutoMLHook(GoogleBaseHook, OperationHelper):
185
185
  model_encryption_spec_key_name=model_encryption_spec_key_name,
186
186
  )
187
187
 
188
- @deprecated(
189
- planned_removal_date="June 15, 2025",
190
- category=AirflowProviderDeprecationWarning,
191
- reason="Deprecation of AutoMLText API",
192
- )
193
- def get_auto_ml_text_training_job(
194
- self,
195
- display_name: str,
196
- prediction_type: str,
197
- multi_label: bool = False,
198
- sentiment_max: int = 10,
199
- project: str | None = None,
200
- location: str | None = None,
201
- labels: dict[str, str] | None = None,
202
- training_encryption_spec_key_name: str | None = None,
203
- model_encryption_spec_key_name: str | None = None,
204
- ) -> AutoMLTextTrainingJob:
205
- """
206
- Return AutoMLTextTrainingJob object.
207
-
208
- WARNING: Text creation API is deprecated since September 15, 2024
209
- (https://cloud.google.com/vertex-ai/docs/tutorials/text-classification-automl/overview).
210
- """
211
- return AutoMLTextTrainingJob(
212
- display_name=display_name,
213
- prediction_type=prediction_type,
214
- multi_label=multi_label,
215
- sentiment_max=sentiment_max,
216
- project=project,
217
- location=location,
218
- credentials=self.get_credentials(),
219
- labels=labels,
220
- training_encryption_spec_key_name=training_encryption_spec_key_name,
221
- model_encryption_spec_key_name=model_encryption_spec_key_name,
222
- )
223
-
224
188
  def get_auto_ml_video_training_job(
225
189
  self,
226
190
  display_name: str,
@@ -987,178 +951,6 @@ class AutoMLHook(GoogleBaseHook, OperationHelper):
987
951
  )
988
952
  return model, training_id
989
953
 
990
- @GoogleBaseHook.fallback_to_default_project_id
991
- @deprecated(
992
- planned_removal_date="September 15, 2025",
993
- category=AirflowProviderDeprecationWarning,
994
- reason="Deprecation of AutoMLText API",
995
- )
996
- def create_auto_ml_text_training_job(
997
- self,
998
- project_id: str,
999
- region: str,
1000
- display_name: str,
1001
- dataset: datasets.TextDataset,
1002
- prediction_type: str,
1003
- multi_label: bool = False,
1004
- sentiment_max: int = 10,
1005
- labels: dict[str, str] | None = None,
1006
- training_encryption_spec_key_name: str | None = None,
1007
- model_encryption_spec_key_name: str | None = None,
1008
- training_fraction_split: float | None = None,
1009
- validation_fraction_split: float | None = None,
1010
- test_fraction_split: float | None = None,
1011
- training_filter_split: str | None = None,
1012
- validation_filter_split: str | None = None,
1013
- test_filter_split: str | None = None,
1014
- model_display_name: str | None = None,
1015
- model_labels: dict[str, str] | None = None,
1016
- sync: bool = True,
1017
- parent_model: str | None = None,
1018
- is_default_version: bool | None = None,
1019
- model_version_aliases: list[str] | None = None,
1020
- model_version_description: str | None = None,
1021
- ) -> tuple[models.Model | None, str]:
1022
- """
1023
- Create an AutoML Text Training Job.
1024
-
1025
- WARNING: Text creation API is deprecated since September 15, 2024
1026
- (https://cloud.google.com/vertex-ai/docs/tutorials/text-classification-automl/overview).
1027
-
1028
- :param project_id: Required. Project to run training in.
1029
- :param region: Required. Location to run training in.
1030
- :param display_name: Required. The user-defined name of this TrainingPipeline.
1031
- :param dataset: Required. The dataset within the same Project from which data will be used to train
1032
- the Model. The Dataset must use schema compatible with Model being trained, and what is
1033
- compatible should be described in the used TrainingPipeline's [training_task_definition]
1034
- [google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition].
1035
- :param prediction_type: The type of prediction the Model is to produce, one of:
1036
- "classification" - A classification model analyzes text data and returns a list of categories
1037
- that apply to the text found in the data. Vertex AI offers both single-label and multi-label text
1038
- classification models.
1039
- "extraction" - An entity extraction model inspects text data for known entities referenced in the
1040
- data and labels those entities in the text.
1041
- "sentiment" - A sentiment analysis model inspects text data and identifies the prevailing
1042
- emotional opinion within it, especially to determine a writer's attitude as positive, negative,
1043
- or neutral.
1044
- :param parent_model: Optional. The resource name or model ID of an existing model.
1045
- The new model uploaded by this job will be a version of `parent_model`.
1046
- Only set this field when training a new version of an existing model.
1047
- :param is_default_version: Optional. When set to True, the newly uploaded model version will
1048
- automatically have alias "default" included. Subsequent uses of
1049
- the model produced by this job without a version specified will
1050
- use this "default" version.
1051
- When set to False, the "default" alias will not be moved.
1052
- Actions targeting the model version produced by this job will need
1053
- to specifically reference this version by ID or alias.
1054
- New model uploads, i.e. version 1, will always be "default" aliased.
1055
- :param model_version_aliases: Optional. User provided version aliases so that the model version
1056
- uploaded by this job can be referenced via alias instead of
1057
- auto-generated version ID. A default version alias will be created
1058
- for the first version of the model.
1059
- The format is [a-z][a-zA-Z0-9-]{0,126}[a-z0-9]
1060
- :param model_version_description: Optional. The description of the model version
1061
- being uploaded by this job.
1062
- :param multi_label: Required and only applicable for text classification task. If false, a
1063
- single-label (multi-class) Model will be trained (i.e. assuming that for each text snippet just
1064
- up to one annotation may be applicable). If true, a multi-label Model will be trained (i.e.
1065
- assuming that for each text snippet multiple annotations may be applicable).
1066
- :param sentiment_max: Required and only applicable for sentiment task. A sentiment is expressed as an
1067
- integer ordinal, where higher value means a more positive sentiment. The range of sentiments that
1068
- will be used is between 0 and sentimentMax (inclusive on both ends), and all the values in the
1069
- range must be represented in the dataset before a model can be created. Only the Annotations with
1070
- this sentimentMax will be used for training. sentimentMax value must be between 1 and 10
1071
- (inclusive).
1072
- :param labels: Optional. The labels with user-defined metadata to organize TrainingPipelines. Label
1073
- keys and values can be no longer than 64 characters (Unicode codepoints), can only contain
1074
- lowercase letters, numeric characters, underscores and dashes. International characters are
1075
- allowed. See https://goo.gl/xmQnxf for more information and examples of labels.
1076
- :param training_encryption_spec_key_name: Optional. The Cloud KMS resource identifier of the customer
1077
- managed encryption key used to protect the training pipeline. Has the form:
1078
- ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``.
1079
- The key needs to be in the same region as where the compute resource is created.
1080
- If set, this TrainingPipeline will be secured by this key.
1081
- Note: Model trained by this TrainingPipeline is also secured by this key if ``model_to_upload``
1082
- is not set separately.
1083
- :param model_encryption_spec_key_name: Optional. The Cloud KMS resource identifier of the customer
1084
- managed encryption key used to protect the model. Has the form:
1085
- ``projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key``.
1086
- The key needs to be in the same region as where the compute resource is created.
1087
- If set, the trained Model will be secured by this key.
1088
- :param training_fraction_split: Optional. The fraction of the input data that is to be used to train
1089
- the Model. This is ignored if Dataset is not provided.
1090
- :param validation_fraction_split: Optional. The fraction of the input data that is to be used to
1091
- validate the Model. This is ignored if Dataset is not provided.
1092
- :param test_fraction_split: Optional. The fraction of the input data that is to be used to evaluate
1093
- the Model. This is ignored if Dataset is not provided.
1094
- :param training_filter_split: Optional. A filter on DataItems of the Dataset. DataItems that match
1095
- this filter are used to train the Model. A filter with same syntax as the one used in
1096
- DatasetService.ListDataItems may be used. If a single DataItem is matched by more than one of the
1097
- FilterSplit filters, then it is assigned to the first set that applies to it in the training,
1098
- validation, test order. This is ignored if Dataset is not provided.
1099
- :param validation_filter_split: Optional. A filter on DataItems of the Dataset. DataItems that match
1100
- this filter are used to validate the Model. A filter with same syntax as the one used in
1101
- DatasetService.ListDataItems may be used. If a single DataItem is matched by more than one of the
1102
- FilterSplit filters, then it is assigned to the first set that applies to it in the training,
1103
- validation, test order. This is ignored if Dataset is not provided.
1104
- :param test_filter_split: Optional. A filter on DataItems of the Dataset. DataItems that match this
1105
- filter are used to test the Model. A filter with same syntax as the one used in
1106
- DatasetService.ListDataItems may be used. If a single DataItem is matched by more than one of the
1107
- FilterSplit filters, then it is assigned to the first set that applies to it in the training,
1108
- validation, test order. This is ignored if Dataset is not provided.
1109
- :param model_display_name: Optional. The display name of the managed Vertex AI Model. The name can be
1110
- up to 128 characters long and can consist of any UTF-8 characters.
1111
- If not provided upon creation, the job's display_name is used.
1112
- :param model_labels: Optional. The labels with user-defined metadata to organize your Models. Label
1113
- keys and values can be no longer than 64 characters (Unicode codepoints), can only contain
1114
- lowercase letters, numeric characters, underscores and dashes. International characters are
1115
- allowed. See https://goo.gl/xmQnxf for more information and examples of labels.
1116
- :param sync: Whether to execute this method synchronously. If False, this method will be executed in
1117
- concurrent Future and any downstream object will be immediately returned and synced when the
1118
- Future has completed.
1119
- """
1120
- self._job = AutoMLTextTrainingJob(
1121
- display_name=display_name,
1122
- prediction_type=prediction_type,
1123
- multi_label=multi_label,
1124
- sentiment_max=sentiment_max,
1125
- project=project_id,
1126
- location=region,
1127
- credentials=self.get_credentials(),
1128
- labels=labels,
1129
- training_encryption_spec_key_name=training_encryption_spec_key_name,
1130
- model_encryption_spec_key_name=model_encryption_spec_key_name,
1131
- )
1132
-
1133
- if not self._job:
1134
- raise AirflowException("AutoMLTextTrainingJob was not created")
1135
-
1136
- model = self._job.run(
1137
- dataset=dataset, # type: ignore[arg-type]
1138
- training_fraction_split=training_fraction_split, # type: ignore[call-arg]
1139
- validation_fraction_split=validation_fraction_split, # type: ignore[call-arg]
1140
- test_fraction_split=test_fraction_split,
1141
- training_filter_split=training_filter_split,
1142
- validation_filter_split=validation_filter_split,
1143
- test_filter_split=test_filter_split, # type: ignore[call-arg]
1144
- model_display_name=model_display_name,
1145
- model_labels=model_labels,
1146
- sync=sync,
1147
- parent_model=parent_model,
1148
- is_default_version=is_default_version,
1149
- model_version_aliases=model_version_aliases,
1150
- model_version_description=model_version_description,
1151
- )
1152
- training_id = self.extract_training_id(self._job.resource_name)
1153
- if model:
1154
- model.wait()
1155
- else:
1156
- self.log.warning(
1157
- "Training did not produce a Managed Model returning None. AutoML Text Training "
1158
- "Pipeline is not configured to upload a Model."
1159
- )
1160
- return model, training_id
1161
-
1162
954
  @GoogleBaseHook.fallback_to_default_project_id
1163
955
  def create_auto_ml_video_training_job(
1164
956
  self,
@@ -28,7 +28,7 @@ from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
28
28
  from google.cloud.aiplatform import BatchPredictionJob, Model, explain
29
29
  from google.cloud.aiplatform_v1 import JobServiceAsyncClient, JobServiceClient, JobState, types
30
30
 
31
- from airflow.exceptions import AirflowException
31
+ from airflow.providers.common.compat.sdk import AirflowException
32
32
  from airflow.providers.google.common.consts import CLIENT_INFO
33
33
  from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
34
34
  from airflow.providers.google.common.hooks.operation_helpers import OperationHelper
@@ -110,7 +110,7 @@ class BatchPredictionJobHook(GoogleBaseHook, OperationHelper):
110
110
  :param project_id: Required. Project to run training in.
111
111
  :param region: Required. Location to run training in.
112
112
  :param job_display_name: Required. The user-defined name of the BatchPredictionJob. The name can be
113
- up to 128 characters long and can be consist of any UTF-8 characters.
113
+ up to 128 characters long and can consist of any UTF-8 characters.
114
114
  :param model_name: Required. A fully-qualified model resource name or model ID.
115
115
  :param instances_format: Required. The format in which instances are provided. Must be one of the
116
116
  formats listed in `Model.supported_input_storage_formats`. Default is "jsonl" when using
@@ -267,7 +267,7 @@ class BatchPredictionJobHook(GoogleBaseHook, OperationHelper):
267
267
  :param project_id: Required. Project to run training in.
268
268
  :param region: Required. Location to run training in.
269
269
  :param job_display_name: Required. The user-defined name of the BatchPredictionJob. The name can be
270
- up to 128 characters long and can be consist of any UTF-8 characters.
270
+ up to 128 characters long and can consist of any UTF-8 characters.
271
271
  :param model_name: Required. A fully-qualified model resource name or model ID.
272
272
  :param instances_format: Required. The format in which instances are provided. Must be one of the
273
273
  formats listed in `Model.supported_input_storage_formats`. Default is "jsonl" when using
@@ -42,7 +42,7 @@ from google.cloud.aiplatform_v1 import (
42
42
  types,
43
43
  )
44
44
 
45
- from airflow.exceptions import AirflowException
45
+ from airflow.providers.common.compat.sdk import AirflowException
46
46
  from airflow.providers.google.common.consts import CLIENT_INFO
47
47
  from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHook, GoogleBaseHook
48
48
  from airflow.providers.google.common.hooks.operation_helpers import OperationHelper
@@ -55,7 +55,7 @@ if TYPE_CHECKING:
55
55
  from google.cloud.aiplatform_v1.services.pipeline_service.pagers import (
56
56
  ListTrainingPipelinesPager,
57
57
  )
58
- from google.cloud.aiplatform_v1.types import CustomJob, TrainingPipeline
58
+ from google.cloud.aiplatform_v1.types import CustomJob, PscInterfaceConfig, TrainingPipeline
59
59
 
60
60
 
61
61
  class CustomJobHook(GoogleBaseHook, OperationHelper):
@@ -317,6 +317,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
317
317
  is_default_version: bool | None = None,
318
318
  model_version_aliases: list[str] | None = None,
319
319
  model_version_description: str | None = None,
320
+ psc_interface_config: PscInterfaceConfig | None = None,
320
321
  ) -> tuple[models.Model | None, str, str]:
321
322
  """Run a training pipeline job and wait until its completion."""
322
323
  model = job.run(
@@ -350,6 +351,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
350
351
  is_default_version=is_default_version,
351
352
  model_version_aliases=model_version_aliases,
352
353
  model_version_description=model_version_description,
354
+ psc_interface_config=psc_interface_config,
353
355
  )
354
356
  training_id = self.extract_training_id(job.resource_name)
355
357
  custom_job_id = self.extract_custom_job_id(
@@ -574,6 +576,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
574
576
  timestamp_split_column_name: str | None = None,
575
577
  tensorboard: str | None = None,
576
578
  sync=True,
579
+ psc_interface_config: PscInterfaceConfig | None = None,
577
580
  ) -> tuple[models.Model | None, str, str]:
578
581
  """
579
582
  Create Custom Container Training Job.
@@ -837,6 +840,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
837
840
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
838
841
  will be executed in concurrent Future and any downstream object will
839
842
  be immediately returned and synced when the Future has completed.
843
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
844
+ training.
840
845
  """
841
846
  self._job = self.get_custom_container_training_job(
842
847
  project=project_id,
@@ -896,6 +901,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
896
901
  is_default_version=is_default_version,
897
902
  model_version_aliases=model_version_aliases,
898
903
  model_version_description=model_version_description,
904
+ psc_interface_config=psc_interface_config,
899
905
  )
900
906
 
901
907
  return model, training_id, custom_job_id
@@ -958,6 +964,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
958
964
  model_version_aliases: list[str] | None = None,
959
965
  model_version_description: str | None = None,
960
966
  sync=True,
967
+ psc_interface_config: PscInterfaceConfig | None = None,
961
968
  ) -> tuple[models.Model | None, str, str]:
962
969
  """
963
970
  Create Custom Python Package Training Job.
@@ -1220,6 +1227,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1220
1227
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
1221
1228
  will be executed in concurrent Future and any downstream object will
1222
1229
  be immediately returned and synced when the Future has completed.
1230
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
1231
+ training.
1223
1232
  """
1224
1233
  self._job = self.get_custom_python_package_training_job(
1225
1234
  project=project_id,
@@ -1280,6 +1289,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1280
1289
  is_default_version=is_default_version,
1281
1290
  model_version_aliases=model_version_aliases,
1282
1291
  model_version_description=model_version_description,
1292
+ psc_interface_config=psc_interface_config,
1283
1293
  )
1284
1294
 
1285
1295
  return model, training_id, custom_job_id
@@ -1342,6 +1352,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1342
1352
  timestamp_split_column_name: str | None = None,
1343
1353
  tensorboard: str | None = None,
1344
1354
  sync=True,
1355
+ psc_interface_config: PscInterfaceConfig | None = None,
1345
1356
  ) -> tuple[models.Model | None, str, str]:
1346
1357
  """
1347
1358
  Create Custom Training Job.
@@ -1604,6 +1615,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1604
1615
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
1605
1616
  will be executed in concurrent Future and any downstream object will
1606
1617
  be immediately returned and synced when the Future has completed.
1618
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
1619
+ training.
1607
1620
  """
1608
1621
  self._job = self.get_custom_training_job(
1609
1622
  project=project_id,
@@ -1664,6 +1677,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1664
1677
  is_default_version=is_default_version,
1665
1678
  model_version_aliases=model_version_aliases,
1666
1679
  model_version_description=model_version_description,
1680
+ psc_interface_config=psc_interface_config,
1667
1681
  )
1668
1682
 
1669
1683
  return model, training_id, custom_job_id
@@ -1725,6 +1739,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1725
1739
  predefined_split_column_name: str | None = None,
1726
1740
  timestamp_split_column_name: str | None = None,
1727
1741
  tensorboard: str | None = None,
1742
+ psc_interface_config: PscInterfaceConfig | None = None,
1728
1743
  ) -> CustomContainerTrainingJob:
1729
1744
  """
1730
1745
  Create and submit a Custom Container Training Job pipeline, then exit without waiting for it to complete.
@@ -1985,6 +2000,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1985
2000
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
1986
2001
  For more information on configuring your service account please visit:
1987
2002
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2003
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2004
+ training.
1988
2005
  """
1989
2006
  self._job = self.get_custom_container_training_job(
1990
2007
  project=project_id,
@@ -2043,6 +2060,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2043
2060
  model_version_aliases=model_version_aliases,
2044
2061
  model_version_description=model_version_description,
2045
2062
  sync=False,
2063
+ psc_interface_config=psc_interface_config,
2046
2064
  )
2047
2065
  return self._job
2048
2066
 
@@ -2104,6 +2122,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2104
2122
  is_default_version: bool | None = None,
2105
2123
  model_version_aliases: list[str] | None = None,
2106
2124
  model_version_description: str | None = None,
2125
+ psc_interface_config: PscInterfaceConfig | None = None,
2107
2126
  ) -> CustomPythonPackageTrainingJob:
2108
2127
  """
2109
2128
  Create and submit a Custom Python Package Training Job pipeline, then exit without waiting for it to complete.
@@ -2363,6 +2382,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2363
2382
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
2364
2383
  For more information on configuring your service account please visit:
2365
2384
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2385
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2386
+ training.
2366
2387
  """
2367
2388
  self._job = self.get_custom_python_package_training_job(
2368
2389
  project=project_id,
@@ -2422,6 +2443,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2422
2443
  model_version_aliases=model_version_aliases,
2423
2444
  model_version_description=model_version_description,
2424
2445
  sync=False,
2446
+ psc_interface_config=psc_interface_config,
2425
2447
  )
2426
2448
 
2427
2449
  return self._job
@@ -2484,6 +2506,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2484
2506
  predefined_split_column_name: str | None = None,
2485
2507
  timestamp_split_column_name: str | None = None,
2486
2508
  tensorboard: str | None = None,
2509
+ psc_interface_config: PscInterfaceConfig | None = None,
2487
2510
  ) -> CustomTrainingJob:
2488
2511
  """
2489
2512
  Create and submit a Custom Training Job pipeline, then exit without waiting for it to complete.
@@ -2747,6 +2770,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2747
2770
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
2748
2771
  For more information on configuring your service account please visit:
2749
2772
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2773
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2774
+ training.
2750
2775
  """
2751
2776
  self._job = self.get_custom_training_job(
2752
2777
  project=project_id,
@@ -2806,6 +2831,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2806
2831
  model_version_aliases=model_version_aliases,
2807
2832
  model_version_description=model_version_description,
2808
2833
  sync=False,
2834
+ psc_interface_config=psc_interface_config,
2809
2835
  )
2810
2836
  return self._job
2811
2837