apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (234) hide show
  1. airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
  2. airflow/providers/google/__init__.py +3 -3
  3. airflow/providers/google/ads/hooks/ads.py +39 -5
  4. airflow/providers/google/ads/operators/ads.py +2 -2
  5. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
  6. airflow/providers/google/assets/gcs.py +1 -11
  7. airflow/providers/google/cloud/bundles/__init__.py +16 -0
  8. airflow/providers/google/cloud/bundles/gcs.py +161 -0
  9. airflow/providers/google/cloud/hooks/bigquery.py +166 -281
  10. airflow/providers/google/cloud/hooks/cloud_composer.py +287 -14
  11. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  12. airflow/providers/google/cloud/hooks/cloud_run.py +17 -9
  13. airflow/providers/google/cloud/hooks/cloud_sql.py +101 -22
  14. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +27 -6
  15. airflow/providers/google/cloud/hooks/compute_ssh.py +5 -1
  16. airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
  17. airflow/providers/google/cloud/hooks/dataflow.py +71 -94
  18. airflow/providers/google/cloud/hooks/datafusion.py +1 -1
  19. airflow/providers/google/cloud/hooks/dataplex.py +1 -1
  20. airflow/providers/google/cloud/hooks/dataprep.py +1 -1
  21. airflow/providers/google/cloud/hooks/dataproc.py +72 -71
  22. airflow/providers/google/cloud/hooks/gcs.py +111 -14
  23. airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
  24. airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
  25. airflow/providers/google/cloud/hooks/looker.py +6 -1
  26. airflow/providers/google/cloud/hooks/mlengine.py +3 -2
  27. airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
  28. airflow/providers/google/cloud/hooks/spanner.py +73 -8
  29. airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
  30. airflow/providers/google/cloud/hooks/translate.py +1 -1
  31. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -209
  32. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -2
  33. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +27 -1
  34. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  35. airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
  36. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
  37. airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
  38. airflow/providers/google/cloud/hooks/vision.py +2 -2
  39. airflow/providers/google/cloud/hooks/workflows.py +1 -1
  40. airflow/providers/google/cloud/links/alloy_db.py +0 -46
  41. airflow/providers/google/cloud/links/base.py +77 -13
  42. airflow/providers/google/cloud/links/bigquery.py +0 -47
  43. airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
  44. airflow/providers/google/cloud/links/bigtable.py +0 -48
  45. airflow/providers/google/cloud/links/cloud_build.py +0 -73
  46. airflow/providers/google/cloud/links/cloud_functions.py +0 -33
  47. airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
  48. airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
  49. airflow/providers/google/cloud/links/cloud_sql.py +0 -33
  50. airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
  51. airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
  52. airflow/providers/google/cloud/links/compute.py +0 -58
  53. airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
  54. airflow/providers/google/cloud/links/datacatalog.py +23 -54
  55. airflow/providers/google/cloud/links/dataflow.py +0 -34
  56. airflow/providers/google/cloud/links/dataform.py +0 -64
  57. airflow/providers/google/cloud/links/datafusion.py +1 -96
  58. airflow/providers/google/cloud/links/dataplex.py +0 -154
  59. airflow/providers/google/cloud/links/dataprep.py +0 -24
  60. airflow/providers/google/cloud/links/dataproc.py +11 -95
  61. airflow/providers/google/cloud/links/datastore.py +0 -31
  62. airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
  63. airflow/providers/google/cloud/links/managed_kafka.py +0 -70
  64. airflow/providers/google/cloud/links/mlengine.py +0 -70
  65. airflow/providers/google/cloud/links/pubsub.py +0 -32
  66. airflow/providers/google/cloud/links/spanner.py +0 -33
  67. airflow/providers/google/cloud/links/stackdriver.py +0 -30
  68. airflow/providers/google/cloud/links/translate.py +17 -187
  69. airflow/providers/google/cloud/links/vertex_ai.py +28 -195
  70. airflow/providers/google/cloud/links/workflows.py +0 -52
  71. airflow/providers/google/cloud/log/gcs_task_handler.py +17 -9
  72. airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
  73. airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
  74. airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
  75. airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
  76. airflow/providers/google/cloud/openlineage/facets.py +102 -1
  77. airflow/providers/google/cloud/openlineage/mixins.py +10 -8
  78. airflow/providers/google/cloud/openlineage/utils.py +15 -1
  79. airflow/providers/google/cloud/operators/alloy_db.py +70 -55
  80. airflow/providers/google/cloud/operators/bigquery.py +73 -636
  81. airflow/providers/google/cloud/operators/bigquery_dts.py +3 -5
  82. airflow/providers/google/cloud/operators/bigtable.py +36 -7
  83. airflow/providers/google/cloud/operators/cloud_base.py +21 -1
  84. airflow/providers/google/cloud/operators/cloud_batch.py +2 -2
  85. airflow/providers/google/cloud/operators/cloud_build.py +75 -32
  86. airflow/providers/google/cloud/operators/cloud_composer.py +128 -40
  87. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  88. airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
  89. airflow/providers/google/cloud/operators/cloud_run.py +23 -5
  90. airflow/providers/google/cloud/operators/cloud_sql.py +8 -16
  91. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -11
  92. airflow/providers/google/cloud/operators/compute.py +8 -40
  93. airflow/providers/google/cloud/operators/datacatalog.py +157 -21
  94. airflow/providers/google/cloud/operators/dataflow.py +38 -15
  95. airflow/providers/google/cloud/operators/dataform.py +15 -5
  96. airflow/providers/google/cloud/operators/datafusion.py +41 -20
  97. airflow/providers/google/cloud/operators/dataplex.py +193 -109
  98. airflow/providers/google/cloud/operators/dataprep.py +1 -5
  99. airflow/providers/google/cloud/operators/dataproc.py +78 -35
  100. airflow/providers/google/cloud/operators/dataproc_metastore.py +96 -88
  101. airflow/providers/google/cloud/operators/datastore.py +22 -6
  102. airflow/providers/google/cloud/operators/dlp.py +6 -29
  103. airflow/providers/google/cloud/operators/functions.py +16 -7
  104. airflow/providers/google/cloud/operators/gcs.py +10 -8
  105. airflow/providers/google/cloud/operators/gen_ai.py +389 -0
  106. airflow/providers/google/cloud/operators/kubernetes_engine.py +60 -99
  107. airflow/providers/google/cloud/operators/looker.py +1 -1
  108. airflow/providers/google/cloud/operators/managed_kafka.py +107 -52
  109. airflow/providers/google/cloud/operators/natural_language.py +1 -1
  110. airflow/providers/google/cloud/operators/pubsub.py +60 -14
  111. airflow/providers/google/cloud/operators/spanner.py +25 -12
  112. airflow/providers/google/cloud/operators/speech_to_text.py +1 -2
  113. airflow/providers/google/cloud/operators/stackdriver.py +1 -9
  114. airflow/providers/google/cloud/operators/tasks.py +1 -12
  115. airflow/providers/google/cloud/operators/text_to_speech.py +1 -2
  116. airflow/providers/google/cloud/operators/translate.py +40 -16
  117. airflow/providers/google/cloud/operators/translate_speech.py +1 -2
  118. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
  119. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +29 -9
  120. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +54 -26
  121. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
  122. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
  123. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  124. airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
  125. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
  126. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +11 -9
  127. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
  128. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +30 -7
  129. airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
  130. airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
  131. airflow/providers/google/cloud/operators/vision.py +2 -2
  132. airflow/providers/google/cloud/operators/workflows.py +18 -15
  133. airflow/providers/google/cloud/sensors/bigquery.py +2 -2
  134. airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -2
  135. airflow/providers/google/cloud/sensors/bigtable.py +11 -4
  136. airflow/providers/google/cloud/sensors/cloud_composer.py +533 -29
  137. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -2
  138. airflow/providers/google/cloud/sensors/dataflow.py +26 -9
  139. airflow/providers/google/cloud/sensors/dataform.py +2 -2
  140. airflow/providers/google/cloud/sensors/datafusion.py +4 -4
  141. airflow/providers/google/cloud/sensors/dataplex.py +2 -2
  142. airflow/providers/google/cloud/sensors/dataprep.py +2 -2
  143. airflow/providers/google/cloud/sensors/dataproc.py +2 -2
  144. airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -2
  145. airflow/providers/google/cloud/sensors/gcs.py +4 -4
  146. airflow/providers/google/cloud/sensors/looker.py +2 -2
  147. airflow/providers/google/cloud/sensors/pubsub.py +4 -4
  148. airflow/providers/google/cloud/sensors/tasks.py +2 -2
  149. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
  150. airflow/providers/google/cloud/sensors/workflows.py +2 -2
  151. airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
  152. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
  153. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
  154. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
  155. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -4
  156. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
  157. airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
  158. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
  159. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
  160. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
  161. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -2
  162. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +3 -3
  163. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +20 -12
  164. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
  165. airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
  166. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
  167. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
  168. airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
  169. airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
  170. airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
  171. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  172. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
  173. airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
  174. airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
  175. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
  176. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +13 -4
  177. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
  178. airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
  179. airflow/providers/google/cloud/triggers/bigquery.py +75 -34
  180. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  181. airflow/providers/google/cloud/triggers/cloud_composer.py +302 -46
  182. airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
  183. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +91 -1
  184. airflow/providers/google/cloud/triggers/dataflow.py +122 -0
  185. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  186. airflow/providers/google/cloud/triggers/dataplex.py +14 -2
  187. airflow/providers/google/cloud/triggers/dataproc.py +122 -52
  188. airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
  189. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  190. airflow/providers/google/cloud/triggers/pubsub.py +15 -19
  191. airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
  192. airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
  193. airflow/providers/google/cloud/utils/field_validator.py +1 -2
  194. airflow/providers/google/common/auth_backend/google_openid.py +4 -4
  195. airflow/providers/google/common/deprecated.py +2 -1
  196. airflow/providers/google/common/hooks/base_google.py +27 -8
  197. airflow/providers/google/common/links/storage.py +0 -22
  198. airflow/providers/google/common/utils/get_secret.py +31 -0
  199. airflow/providers/google/common/utils/id_token_credentials.py +3 -4
  200. airflow/providers/google/firebase/operators/firestore.py +2 -2
  201. airflow/providers/google/get_provider_info.py +56 -52
  202. airflow/providers/google/go_module_utils.py +35 -3
  203. airflow/providers/google/leveldb/hooks/leveldb.py +26 -1
  204. airflow/providers/google/leveldb/operators/leveldb.py +2 -2
  205. airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
  206. airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
  207. airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -2
  208. airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
  209. airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
  210. airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
  211. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
  212. airflow/providers/google/marketing_platform/sensors/display_video.py +3 -63
  213. airflow/providers/google/suite/hooks/calendar.py +1 -1
  214. airflow/providers/google/suite/hooks/sheets.py +15 -1
  215. airflow/providers/google/suite/operators/sheets.py +8 -3
  216. airflow/providers/google/suite/sensors/drive.py +2 -2
  217. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
  218. airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
  219. airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
  220. airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
  221. airflow/providers/google/version_compat.py +15 -1
  222. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +92 -48
  223. apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
  224. apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
  225. airflow/providers/google/cloud/hooks/automl.py +0 -673
  226. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  227. airflow/providers/google/cloud/links/automl.py +0 -193
  228. airflow/providers/google/cloud/operators/automl.py +0 -1362
  229. airflow/providers/google/cloud/operators/life_sciences.py +0 -119
  230. airflow/providers/google/cloud/operators/mlengine.py +0 -112
  231. apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
  232. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +0 -0
  233. {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
  234. {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,223 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """This module contains a Google Cloud Vertex AI hook."""
19
+
20
+ from __future__ import annotations
21
+
22
+ import dataclasses
23
+ from collections.abc import MutableMapping
24
+ from typing import Any
25
+
26
+ import vertex_ray
27
+ from google.cloud import aiplatform
28
+ from google.cloud.aiplatform.vertex_ray.util import resources
29
+ from google.cloud.aiplatform_v1 import (
30
+ PersistentResourceServiceClient,
31
+ )
32
+ from proto.marshal.collections.repeated import Repeated
33
+
34
+ from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
35
+
36
+
37
+ class RayHook(GoogleBaseHook):
38
+ """Hook for Google Cloud Vertex AI Ray APIs."""
39
+
40
+ def extract_cluster_id(self, cluster_path) -> str:
41
+ """Extract cluster_id from cluster_path."""
42
+ cluster_id = PersistentResourceServiceClient.parse_persistent_resource_path(cluster_path)[
43
+ "persistent_resource"
44
+ ]
45
+ return cluster_id
46
+
47
+ def serialize_cluster_obj(self, cluster_obj: resources.Cluster) -> dict:
48
+ """Serialize Cluster dataclass to dict."""
49
+
50
+ def __encode_value(value: Any) -> Any:
51
+ if isinstance(value, (list, Repeated)):
52
+ return [__encode_value(nested_value) for nested_value in value]
53
+ if not isinstance(value, dict) and isinstance(value, MutableMapping):
54
+ return {key: __encode_value(nested_value) for key, nested_value in dict(value).items()}
55
+ if dataclasses.is_dataclass(value):
56
+ return dataclasses.asdict(value)
57
+ return value
58
+
59
+ return {
60
+ field.name: __encode_value(getattr(cluster_obj, field.name))
61
+ for field in dataclasses.fields(cluster_obj)
62
+ }
63
+
64
+ @GoogleBaseHook.fallback_to_default_project_id
65
+ def create_ray_cluster(
66
+ self,
67
+ project_id: str,
68
+ location: str,
69
+ head_node_type: resources.Resources = resources.Resources(),
70
+ python_version: str = "3.10",
71
+ ray_version: str = "2.33",
72
+ network: str | None = None,
73
+ service_account: str | None = None,
74
+ cluster_name: str | None = None,
75
+ worker_node_types: list[resources.Resources] | None = None,
76
+ custom_images: resources.NodeImages | None = None,
77
+ enable_metrics_collection: bool = True,
78
+ enable_logging: bool = True,
79
+ psc_interface_config: resources.PscIConfig | None = None,
80
+ reserved_ip_ranges: list[str] | None = None,
81
+ labels: dict[str, str] | None = None,
82
+ ) -> str:
83
+ """
84
+ Create a Ray cluster on the Vertex AI.
85
+
86
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
87
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
88
+ :param head_node_type: The head node resource. Resources.node_count must be 1. If not set, default
89
+ value of Resources() class will be used.
90
+ :param python_version: Python version for the ray cluster.
91
+ :param ray_version: Ray version for the ray cluster. Default is 2.33.0.
92
+ :param network: Virtual private cloud (VPC) network. For Ray Client, VPC peering is required to
93
+ connect to the Ray Cluster managed in the Vertex API service. For Ray Job API, VPC network is not
94
+ required because Ray Cluster connection can be accessed through dashboard address.
95
+ :param service_account: Service account to be used for running Ray programs on the cluster.
96
+ :param cluster_name: This value may be up to 63 characters, and valid characters are `[a-z0-9_-]`.
97
+ The first character cannot be a number or hyphen.
98
+ :param worker_node_types: The list of Resources of the worker nodes. The same Resources object should
99
+ not appear multiple times in the list.
100
+ :param custom_images: The NodeImages which specifies head node and worker nodes images. All the
101
+ workers will share the same image. If each Resource has a specific custom image, use
102
+ `Resources.custom_image` for head/worker_node_type(s). Note that configuring
103
+ `Resources.custom_image` will override `custom_images` here. Allowlist only.
104
+ :param enable_metrics_collection: Enable Ray metrics collection for visualization.
105
+ :param enable_logging: Enable exporting Ray logs to Cloud Logging.
106
+ :param psc_interface_config: PSC-I config.
107
+ :param reserved_ip_ranges: A list of names for the reserved IP ranges under the VPC network that can
108
+ be used for this cluster. If set, we will deploy the cluster within the provided IP ranges.
109
+ Otherwise, the cluster is deployed to any IP ranges under the provided VPC network.
110
+ Example: ["vertex-ai-ip-range"].
111
+ :param labels: The labels with user-defined metadata to organize Ray cluster.
112
+ Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain
113
+ lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
114
+ See https://goo.gl/xmQnxf for more information and examples of labels.
115
+ """
116
+ aiplatform.init(project=project_id, location=location, credentials=self.get_credentials())
117
+ cluster_path = vertex_ray.create_ray_cluster(
118
+ head_node_type=head_node_type,
119
+ python_version=python_version,
120
+ ray_version=ray_version,
121
+ network=network,
122
+ service_account=service_account,
123
+ cluster_name=cluster_name,
124
+ worker_node_types=worker_node_types,
125
+ custom_images=custom_images,
126
+ enable_metrics_collection=enable_metrics_collection,
127
+ enable_logging=enable_logging,
128
+ psc_interface_config=psc_interface_config,
129
+ reserved_ip_ranges=reserved_ip_ranges,
130
+ labels=labels,
131
+ )
132
+ return cluster_path
133
+
134
+ @GoogleBaseHook.fallback_to_default_project_id
135
+ def list_ray_clusters(
136
+ self,
137
+ project_id: str,
138
+ location: str,
139
+ ) -> list[resources.Cluster]:
140
+ """
141
+ List Ray clusters under the currently authenticated project.
142
+
143
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
144
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
145
+ """
146
+ aiplatform.init(project=project_id, location=location, credentials=self.get_credentials())
147
+ ray_clusters = vertex_ray.list_ray_clusters()
148
+ return ray_clusters
149
+
150
+ @GoogleBaseHook.fallback_to_default_project_id
151
+ def get_ray_cluster(
152
+ self,
153
+ project_id: str,
154
+ location: str,
155
+ cluster_id: str,
156
+ ) -> resources.Cluster:
157
+ """
158
+ Get Ray cluster.
159
+
160
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
161
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
162
+ :param cluster_id: Cluster resource ID.
163
+ """
164
+ aiplatform.init(project=project_id, location=location, credentials=self.get_credentials())
165
+ ray_cluster_name = PersistentResourceServiceClient.persistent_resource_path(
166
+ project=project_id,
167
+ location=location,
168
+ persistent_resource=cluster_id,
169
+ )
170
+ ray_cluster = vertex_ray.get_ray_cluster(
171
+ cluster_resource_name=ray_cluster_name,
172
+ )
173
+ return ray_cluster
174
+
175
+ @GoogleBaseHook.fallback_to_default_project_id
176
+ def update_ray_cluster(
177
+ self,
178
+ project_id: str,
179
+ location: str,
180
+ cluster_id: str,
181
+ worker_node_types: list[resources.Resources],
182
+ ) -> str:
183
+ """
184
+ Update Ray cluster (currently support resizing node counts for worker nodes).
185
+
186
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
187
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
188
+ :param cluster_id: Cluster resource ID.
189
+ :param worker_node_types: The list of Resources of the resized worker nodes. The same Resources
190
+ object should not appear multiple times in the list.
191
+ """
192
+ aiplatform.init(project=project_id, location=location, credentials=self.get_credentials())
193
+ ray_cluster_name = PersistentResourceServiceClient.persistent_resource_path(
194
+ project=project_id,
195
+ location=location,
196
+ persistent_resource=cluster_id,
197
+ )
198
+ updated_ray_cluster_name = vertex_ray.update_ray_cluster(
199
+ cluster_resource_name=ray_cluster_name, worker_node_types=worker_node_types
200
+ )
201
+ return updated_ray_cluster_name
202
+
203
+ @GoogleBaseHook.fallback_to_default_project_id
204
+ def delete_ray_cluster(
205
+ self,
206
+ project_id: str,
207
+ location: str,
208
+ cluster_id: str,
209
+ ) -> None:
210
+ """
211
+ Delete Ray cluster.
212
+
213
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
214
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
215
+ :param cluster_id: Cluster resource ID.
216
+ """
217
+ aiplatform.init(project=project_id, location=location, credentials=self.get_credentials())
218
+ ray_cluster_name = PersistentResourceServiceClient.persistent_resource_path(
219
+ project=project_id,
220
+ location=location,
221
+ persistent_resource=cluster_id,
222
+ )
223
+ vertex_ray.delete_ray_cluster(cluster_resource_name=ray_cluster_name)
@@ -19,10 +19,10 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from collections.abc import Sequence
22
+ from collections.abc import Callable, Sequence
23
23
  from copy import deepcopy
24
24
  from functools import cached_property
25
- from typing import TYPE_CHECKING, Any, Callable
25
+ from typing import TYPE_CHECKING, Any
26
26
 
27
27
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
28
28
  from google.cloud.vision_v1 import (
@@ -65,7 +65,7 @@ class WorkflowsHook(GoogleBaseHook):
65
65
  Create a new workflow.
66
66
 
67
67
  If a workflow with the specified name already exists in the
68
- specified project and location, the long running operation will
68
+ specified project and location, the long-running operation will
69
69
  return [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error.
70
70
 
71
71
  :param workflow: Required. Workflow to be created.
@@ -19,14 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING
23
-
24
22
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
25
23
 
26
- if TYPE_CHECKING:
27
- from airflow.models import BaseOperator
28
- from airflow.utils.context import Context
29
-
30
24
  ALLOY_DB_BASE_LINK = "/alloydb"
31
25
  ALLOY_DB_CLUSTER_LINK = (
32
26
  ALLOY_DB_BASE_LINK + "/locations/{location_id}/clusters/{cluster_id}?project={project_id}"
@@ -44,20 +38,6 @@ class AlloyDBClusterLink(BaseGoogleLink):
44
38
  key = "alloy_db_cluster"
45
39
  format_str = ALLOY_DB_CLUSTER_LINK
46
40
 
47
- @staticmethod
48
- def persist(
49
- context: Context,
50
- task_instance: BaseOperator,
51
- location_id: str,
52
- cluster_id: str,
53
- project_id: str | None,
54
- ):
55
- task_instance.xcom_push(
56
- context,
57
- key=AlloyDBClusterLink.key,
58
- value={"location_id": location_id, "cluster_id": cluster_id, "project_id": project_id},
59
- )
60
-
61
41
 
62
42
  class AlloyDBUsersLink(BaseGoogleLink):
63
43
  """Helper class for constructing AlloyDB users Link."""
@@ -66,20 +46,6 @@ class AlloyDBUsersLink(BaseGoogleLink):
66
46
  key = "alloy_db_users"
67
47
  format_str = ALLOY_DB_USERS_LINK
68
48
 
69
- @staticmethod
70
- def persist(
71
- context: Context,
72
- task_instance: BaseOperator,
73
- location_id: str,
74
- cluster_id: str,
75
- project_id: str | None,
76
- ):
77
- task_instance.xcom_push(
78
- context,
79
- key=AlloyDBUsersLink.key,
80
- value={"location_id": location_id, "cluster_id": cluster_id, "project_id": project_id},
81
- )
82
-
83
49
 
84
50
  class AlloyDBBackupsLink(BaseGoogleLink):
85
51
  """Helper class for constructing AlloyDB backups Link."""
@@ -87,15 +53,3 @@ class AlloyDBBackupsLink(BaseGoogleLink):
87
53
  name = "AlloyDB Backups"
88
54
  key = "alloy_db_backups"
89
55
  format_str = ALLOY_DB_BACKUPS_LINK
90
-
91
- @staticmethod
92
- def persist(
93
- context: Context,
94
- task_instance: BaseOperator,
95
- project_id: str | None,
96
- ):
97
- task_instance.xcom_push(
98
- context,
99
- key=AlloyDBBackupsLink.key,
100
- value={"project_id": project_id},
101
- )
@@ -18,19 +18,15 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING, ClassVar
21
+ from urllib.parse import urlparse
21
22
 
22
- from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
23
+ from airflow.providers.common.compat.sdk import BaseOperatorLink, BaseSensorOperator, XCom
24
+ from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
23
25
 
24
26
  if TYPE_CHECKING:
25
- from airflow.models import BaseOperator
26
27
  from airflow.models.taskinstancekey import TaskInstanceKey
27
-
28
- if AIRFLOW_V_3_0_PLUS:
29
- from airflow.sdk import BaseOperatorLink
30
- from airflow.sdk.execution_time.xcom import XCom
31
- else:
32
- from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
33
- from airflow.models.xcom import XCom # type: ignore[no-redef]
28
+ from airflow.providers.common.compat.sdk import Context
29
+ from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
34
30
 
35
31
  BASE_LINK = "https://console.cloud.google.com"
36
32
 
@@ -39,6 +35,12 @@ class BaseGoogleLink(BaseOperatorLink):
39
35
  """
40
36
  Base class for all Google links.
41
37
 
38
+ When you inherit this class in a Link class;
39
+ - You can call the persist method to push data to the XCom to use it later in the get_link method.
40
+ - If you have an operator which inherit the GoogleCloudBaseOperator or BaseSensorOperator
41
+ You can define extra_links_params method in the operator to pass the operator properties
42
+ to the get_link method.
43
+
42
44
  :meta private:
43
45
  """
44
46
 
@@ -46,15 +48,77 @@ class BaseGoogleLink(BaseOperatorLink):
46
48
  key: ClassVar[str]
47
49
  format_str: ClassVar[str]
48
50
 
51
+ @property
52
+ def xcom_key(self) -> str:
53
+ # NOTE: in Airflow 3 we need to have xcom_key property in the Link class.
54
+ # Since we have the key property already, this is just a proxy property method to use same
55
+ # key as in Airflow 2.
56
+ return self.key
57
+
58
+ @classmethod
59
+ def persist(cls, context: Context, **value):
60
+ """
61
+ Push arguments to the XCom to use later for link formatting at the `get_link` method.
62
+
63
+ Note: for Airflow 2 we need to call this function with context variable only
64
+ where we have the extra_links_params property method defined
65
+ """
66
+ params = {}
67
+ # TODO: remove after Airflow v2 support dropped
68
+ if not AIRFLOW_V_3_0_PLUS:
69
+ common_params = getattr(context["task"], "extra_links_params", None)
70
+ if common_params:
71
+ params.update(common_params)
72
+
73
+ context["ti"].xcom_push(
74
+ key=cls.key,
75
+ value={
76
+ **params,
77
+ **value,
78
+ },
79
+ )
80
+
81
+ def get_config(self, operator, ti_key):
82
+ conf = {}
83
+ conf.update(getattr(operator, "extra_links_params", {}))
84
+ conf.update(XCom.get_value(key=self.key, ti_key=ti_key) or {})
85
+
86
+ # if the config did not define, return None to stop URL formatting
87
+ if not conf:
88
+ return None
89
+
90
+ # Add a default value for the 'namespace' parameter for backward compatibility.
91
+ # This is for datafusion
92
+ conf.setdefault("namespace", "default")
93
+ return conf
94
+
49
95
  def get_link(
50
96
  self,
51
97
  operator: BaseOperator,
52
98
  *,
53
99
  ti_key: TaskInstanceKey,
54
100
  ) -> str:
55
- conf = XCom.get_value(key=self.key, ti_key=ti_key)
101
+ if TYPE_CHECKING:
102
+ assert isinstance(operator, (GoogleCloudBaseOperator, BaseSensorOperator))
103
+
104
+ # In cases when worker passes execution to trigger, the value that is put to XCom
105
+ # already contains link to the object in string format. In this case we don't want to execute
106
+ # get_config() again. Instead we can leave this value without any changes
107
+ link_value = XCom.get_value(key=self.key, ti_key=ti_key)
108
+ if link_value and isinstance(link_value, str):
109
+ if urlparse(link_value).scheme in ("http", "https"):
110
+ return link_value
111
+
112
+ conf = self.get_config(operator, ti_key)
56
113
  if not conf:
57
114
  return ""
58
- if self.format_str.startswith("http"):
59
- return self.format_str.format(**conf)
60
- return BASE_LINK + self.format_str.format(**conf)
115
+ return self._format_link(**conf)
116
+
117
+ def _format_link(self, **kwargs):
118
+ try:
119
+ formatted_str = self.format_str.format(**kwargs)
120
+ if formatted_str.startswith("http"):
121
+ return formatted_str
122
+ return BASE_LINK + formatted_str
123
+ except KeyError:
124
+ return ""
@@ -19,14 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING
23
-
24
22
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
25
23
 
26
- if TYPE_CHECKING:
27
- from airflow.models import BaseOperator
28
- from airflow.utils.context import Context
29
-
30
24
  BIGQUERY_BASE_LINK = "/bigquery"
31
25
  BIGQUERY_DATASET_LINK = (
32
26
  BIGQUERY_BASE_LINK + "?referrer=search&project={project_id}&d={dataset_id}&p={project_id}&page=dataset"
@@ -47,19 +41,6 @@ class BigQueryDatasetLink(BaseGoogleLink):
47
41
  key = "bigquery_dataset"
48
42
  format_str = BIGQUERY_DATASET_LINK
49
43
 
50
- @staticmethod
51
- def persist(
52
- context: Context,
53
- task_instance: BaseOperator,
54
- dataset_id: str,
55
- project_id: str,
56
- ):
57
- task_instance.xcom_push(
58
- context,
59
- key=BigQueryDatasetLink.key,
60
- value={"dataset_id": dataset_id, "project_id": project_id},
61
- )
62
-
63
44
 
64
45
  class BigQueryTableLink(BaseGoogleLink):
65
46
  """Helper class for constructing BigQuery Table Link."""
@@ -68,20 +49,6 @@ class BigQueryTableLink(BaseGoogleLink):
68
49
  key = "bigquery_table"
69
50
  format_str = BIGQUERY_TABLE_LINK
70
51
 
71
- @staticmethod
72
- def persist(
73
- context: Context,
74
- task_instance: BaseOperator,
75
- project_id: str,
76
- table_id: str,
77
- dataset_id: str | None = None,
78
- ):
79
- task_instance.xcom_push(
80
- context,
81
- key=BigQueryTableLink.key,
82
- value={"dataset_id": dataset_id, "project_id": project_id, "table_id": table_id},
83
- )
84
-
85
52
 
86
53
  class BigQueryJobDetailLink(BaseGoogleLink):
87
54
  """Helper class for constructing BigQuery Job Detail Link."""
@@ -89,17 +56,3 @@ class BigQueryJobDetailLink(BaseGoogleLink):
89
56
  name = "BigQuery Job Detail"
90
57
  key = "bigquery_job_detail"
91
58
  format_str = BIGQUERY_JOB_DETAIL_LINK
92
-
93
- @staticmethod
94
- def persist(
95
- context: Context,
96
- task_instance: BaseOperator,
97
- project_id: str,
98
- location: str,
99
- job_id: str,
100
- ):
101
- task_instance.xcom_push(
102
- context,
103
- key=BigQueryJobDetailLink.key,
104
- value={"project_id": project_id, "location": location, "job_id": job_id},
105
- )
@@ -19,14 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING
23
-
24
22
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
25
23
 
26
- if TYPE_CHECKING:
27
- from airflow.models import BaseOperator
28
- from airflow.utils.context import Context
29
-
30
24
  BIGQUERY_BASE_LINK = "/bigquery/transfers"
31
25
  BIGQUERY_DTS_LINK = BIGQUERY_BASE_LINK + "/locations/{region}/configs/{config_id}/runs?project={project_id}"
32
26
 
@@ -37,17 +31,3 @@ class BigQueryDataTransferConfigLink(BaseGoogleLink):
37
31
  name = "BigQuery Data Transfer Config"
38
32
  key = "bigquery_dts_config"
39
33
  format_str = BIGQUERY_DTS_LINK
40
-
41
- @staticmethod
42
- def persist(
43
- context: Context,
44
- task_instance: BaseOperator,
45
- region: str,
46
- config_id: str,
47
- project_id: str,
48
- ):
49
- task_instance.xcom_push(
50
- context,
51
- key=BigQueryDataTransferConfigLink.key,
52
- value={"project_id": project_id, "region": region, "config_id": config_id},
53
- )
@@ -16,13 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING
20
-
21
19
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
22
20
 
23
- if TYPE_CHECKING:
24
- from airflow.utils.context import Context
25
-
26
21
  BIGTABLE_BASE_LINK = "/bigtable"
27
22
  BIGTABLE_INSTANCE_LINK = BIGTABLE_BASE_LINK + "/instances/{instance_id}/overview?project={project_id}"
28
23
  BIGTABLE_CLUSTER_LINK = (
@@ -38,20 +33,6 @@ class BigtableInstanceLink(BaseGoogleLink):
38
33
  key = "instance_key"
39
34
  format_str = BIGTABLE_INSTANCE_LINK
40
35
 
41
- @staticmethod
42
- def persist(
43
- context: Context,
44
- task_instance,
45
- ):
46
- task_instance.xcom_push(
47
- context=context,
48
- key=BigtableInstanceLink.key,
49
- value={
50
- "instance_id": task_instance.instance_id,
51
- "project_id": task_instance.project_id,
52
- },
53
- )
54
-
55
36
 
56
37
  class BigtableClusterLink(BaseGoogleLink):
57
38
  """Helper class for constructing Bigtable Cluster link."""
@@ -60,21 +41,6 @@ class BigtableClusterLink(BaseGoogleLink):
60
41
  key = "cluster_key"
61
42
  format_str = BIGTABLE_CLUSTER_LINK
62
43
 
63
- @staticmethod
64
- def persist(
65
- context: Context,
66
- task_instance,
67
- ):
68
- task_instance.xcom_push(
69
- context=context,
70
- key=BigtableClusterLink.key,
71
- value={
72
- "instance_id": task_instance.instance_id,
73
- "cluster_id": task_instance.cluster_id,
74
- "project_id": task_instance.project_id,
75
- },
76
- )
77
-
78
44
 
79
45
  class BigtableTablesLink(BaseGoogleLink):
80
46
  """Helper class for constructing Bigtable Tables link."""
@@ -82,17 +48,3 @@ class BigtableTablesLink(BaseGoogleLink):
82
48
  name = "Bigtable Tables"
83
49
  key = "tables_key"
84
50
  format_str = BIGTABLE_TABLES_LINK
85
-
86
- @staticmethod
87
- def persist(
88
- context: Context,
89
- task_instance,
90
- ):
91
- task_instance.xcom_push(
92
- context=context,
93
- key=BigtableTablesLink.key,
94
- value={
95
- "instance_id": task_instance.instance_id,
96
- "project_id": task_instance.project_id,
97
- },
98
- )