apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 16.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,388 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ """This module contains Google Vertex AI Ray operators."""
19
+
20
+ from __future__ import annotations
21
+
22
+ from collections.abc import Sequence
23
+ from functools import cached_property
24
+ from typing import TYPE_CHECKING
25
+
26
+ from google.api_core.exceptions import NotFound
27
+ from google.cloud.aiplatform.vertex_ray.util import resources
28
+
29
+ from airflow.exceptions import AirflowException
30
+ from airflow.providers.google.cloud.hooks.vertex_ai.ray import RayHook
31
+ from airflow.providers.google.cloud.links.vertex_ai import (
32
+ VertexAIRayClusterLink,
33
+ VertexAIRayClusterListLink,
34
+ )
35
+ from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
36
+
37
+ if TYPE_CHECKING:
38
+ from airflow.utils.context import Context
39
+
40
+
41
+ class RayBaseOperator(GoogleCloudBaseOperator):
42
+ """
43
+ Base class for Ray operators.
44
+
45
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
46
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
47
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
48
+ :param impersonation_chain: Optional service account to impersonate using short-term
49
+ credentials, or chained list of accounts required to get the access_token
50
+ of the last account in the list, which will be impersonated in the request.
51
+ If set as a string, the account must grant the originating account
52
+ the Service Account Token Creator IAM role.
53
+ If set as a sequence, the identities from the list must grant
54
+ Service Account Token Creator IAM role to the directly preceding identity, with first
55
+ account from the list granting this role to the originating account (templated).
56
+ """
57
+
58
+ template_fields: Sequence[str] = (
59
+ "location",
60
+ "gcp_conn_id",
61
+ "project_id",
62
+ "impersonation_chain",
63
+ )
64
+
65
+ def __init__(
66
+ self,
67
+ project_id: str,
68
+ location: str,
69
+ gcp_conn_id: str = "google_cloud_default",
70
+ impersonation_chain: str | Sequence[str] | None = None,
71
+ *args,
72
+ **kwargs,
73
+ ) -> None:
74
+ super().__init__(*args, **kwargs)
75
+ self.location = location
76
+ self.project_id = project_id
77
+ self.gcp_conn_id = gcp_conn_id
78
+ self.impersonation_chain = impersonation_chain
79
+
80
+ @cached_property
81
+ def hook(self) -> RayHook:
82
+ return RayHook(
83
+ gcp_conn_id=self.gcp_conn_id,
84
+ impersonation_chain=self.impersonation_chain,
85
+ )
86
+
87
+
88
+ class CreateRayClusterOperator(RayBaseOperator):
89
+ """
90
+ Create a Ray cluster on the Vertex AI.
91
+
92
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
93
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
94
+ :param head_node_type: The head node resource. Resources.node_count must be 1. If not set, default
95
+ value of Resources() class will be used.
96
+ :param python_version: Python version for the ray cluster.
97
+ :param ray_version: Ray version for the ray cluster. Default is 2.33.0.
98
+ :param network: Virtual private cloud (VPC) network. For Ray Client, VPC peering is required to
99
+ connect to the Ray Cluster managed in the Vertex API service. For Ray Job API, VPC network is not
100
+ required because Ray Cluster connection can be accessed through dashboard address.
101
+ :param service_account: Service account to be used for running Ray programs on the cluster.
102
+ :param cluster_name: This value may be up to 63 characters, and valid characters are `[a-z0-9_-]`.
103
+ The first character cannot be a number or hyphen.
104
+ :param worker_node_types: The list of Resources of the worker nodes. The same Resources object should
105
+ not appear multiple times in the list.
106
+ :param custom_images: The NodeImages which specifies head node and worker nodes images. All the
107
+ workers will share the same image. If each Resource has a specific custom image, use
108
+ `Resources.custom_image` for head/worker_node_type(s). Note that configuring
109
+ `Resources.custom_image` will override `custom_images` here. Allowlist only.
110
+ :param enable_metrics_collection: Enable Ray metrics collection for visualization.
111
+ :param enable_logging: Enable exporting Ray logs to Cloud Logging.
112
+ :param psc_interface_config: PSC-I config.
113
+ :param reserved_ip_ranges: A list of names for the reserved IP ranges under the VPC network that can
114
+ be used for this cluster. If set, we will deploy the cluster within the provided IP ranges.
115
+ Otherwise, the cluster is deployed to any IP ranges under the provided VPC network.
116
+ Example: ["vertex-ai-ip-range"].
117
+ :param labels: The labels with user-defined metadata to organize Ray cluster.
118
+ Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain
119
+ lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
120
+ See https://goo.gl/xmQnxf for more information and examples of labels.
121
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
122
+ :param impersonation_chain: Optional service account to impersonate using short-term
123
+ credentials, or chained list of accounts required to get the access_token
124
+ of the last account in the list, which will be impersonated in the request.
125
+ If set as a string, the account must grant the originating account
126
+ the Service Account Token Creator IAM role.
127
+ If set as a sequence, the identities from the list must grant
128
+ Service Account Token Creator IAM role to the directly preceding identity, with first
129
+ account from the list granting this role to the originating account (templated).
130
+ """
131
+
132
+ template_fields: Sequence[str] = tuple(
133
+ {"head_node_type", "worker_node_types"} | set(RayBaseOperator.template_fields)
134
+ )
135
+ operator_extra_links = (VertexAIRayClusterLink(),)
136
+
137
+ def __init__(
138
+ self,
139
+ head_node_type: resources.Resources = resources.Resources(),
140
+ python_version: str = "3.10",
141
+ ray_version: str = "2.33",
142
+ network: str | None = None,
143
+ service_account: str | None = None,
144
+ cluster_name: str | None = None,
145
+ worker_node_types: list[resources.Resources] | None = None,
146
+ custom_images: resources.NodeImages | None = None,
147
+ enable_metrics_collection: bool = True,
148
+ enable_logging: bool = True,
149
+ psc_interface_config: resources.PscIConfig | None = None,
150
+ reserved_ip_ranges: list[str] | None = None,
151
+ labels: dict[str, str] | None = None,
152
+ *args,
153
+ **kwargs,
154
+ ) -> None:
155
+ super().__init__(*args, **kwargs)
156
+ self.head_node_type = head_node_type
157
+ self.python_version = python_version
158
+ self.ray_version = ray_version
159
+ self.network = network
160
+ self.service_account = service_account
161
+ self.cluster_name = cluster_name
162
+ self.worker_node_types = worker_node_types
163
+ self.custom_images = custom_images
164
+ self.enable_metrics_collection = enable_metrics_collection
165
+ self.enable_logging = enable_logging
166
+ self.psc_interface_config = psc_interface_config
167
+ self.reserved_ip_ranges = reserved_ip_ranges
168
+ self.labels = labels
169
+
170
+ def execute(self, context: Context):
171
+ self.log.info("Creating a Ray cluster.")
172
+ try:
173
+ cluster_path = self.hook.create_ray_cluster(
174
+ project_id=self.project_id,
175
+ location=self.location,
176
+ head_node_type=self.head_node_type,
177
+ python_version=self.python_version,
178
+ ray_version=self.ray_version,
179
+ network=self.network,
180
+ service_account=self.service_account,
181
+ cluster_name=self.cluster_name,
182
+ worker_node_types=self.worker_node_types,
183
+ custom_images=self.custom_images,
184
+ enable_metrics_collection=self.enable_metrics_collection,
185
+ enable_logging=self.enable_logging,
186
+ psc_interface_config=self.psc_interface_config,
187
+ reserved_ip_ranges=self.reserved_ip_ranges,
188
+ labels=self.labels,
189
+ )
190
+ cluster_id = self.hook.extract_cluster_id(cluster_path)
191
+ self.xcom_push(
192
+ context=context,
193
+ key="cluster_id",
194
+ value=cluster_id,
195
+ )
196
+ VertexAIRayClusterLink.persist(context=context, task_instance=self, cluster_id=cluster_id)
197
+ self.log.info("Ray cluster was created.")
198
+ except Exception as error:
199
+ raise AirflowException(error)
200
+ return cluster_path
201
+
202
+
203
+ class ListRayClustersOperator(RayBaseOperator):
204
+ """
205
+ List Ray clusters under the currently authenticated project.
206
+
207
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
208
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
209
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
210
+ :param impersonation_chain: Optional service account to impersonate using short-term
211
+ credentials, or chained list of accounts required to get the access_token
212
+ of the last account in the list, which will be impersonated in the request.
213
+ If set as a string, the account must grant the originating account
214
+ the Service Account Token Creator IAM role.
215
+ If set as a sequence, the identities from the list must grant
216
+ Service Account Token Creator IAM role to the directly preceding identity, with first
217
+ account from the list granting this role to the originating account (templated).
218
+ """
219
+
220
+ operator_extra_links = (VertexAIRayClusterListLink(),)
221
+
222
+ def execute(self, context: Context):
223
+ VertexAIRayClusterListLink.persist(context=context, task_instance=self)
224
+ self.log.info("Listing Clusters from location %s.", self.location)
225
+ try:
226
+ ray_cluster_list = self.hook.list_ray_clusters(
227
+ project_id=self.project_id,
228
+ location=self.location,
229
+ )
230
+ ray_cluster_dict_list = [
231
+ self.hook.serialize_cluster_obj(ray_cluster) for ray_cluster in ray_cluster_list
232
+ ]
233
+ except Exception as error:
234
+ raise AirflowException(error)
235
+ return ray_cluster_dict_list
236
+
237
+
238
+ class GetRayClusterOperator(RayBaseOperator):
239
+ """
240
+ Get Ray cluster.
241
+
242
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
243
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
244
+ :param cluster_id: Cluster resource ID.
245
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
246
+ :param impersonation_chain: Optional service account to impersonate using short-term
247
+ credentials, or chained list of accounts required to get the access_token
248
+ of the last account in the list, which will be impersonated in the request.
249
+ If set as a string, the account must grant the originating account
250
+ the Service Account Token Creator IAM role.
251
+ If set as a sequence, the identities from the list must grant
252
+ Service Account Token Creator IAM role to the directly preceding identity, with first
253
+ account from the list granting this role to the originating account (templated).
254
+ """
255
+
256
+ template_fields: Sequence[str] = tuple({"cluster_id"} | set(RayBaseOperator.template_fields))
257
+ operator_extra_links = (VertexAIRayClusterLink(),)
258
+
259
+ def __init__(
260
+ self,
261
+ cluster_id: str,
262
+ *args,
263
+ **kwargs,
264
+ ) -> None:
265
+ super().__init__(*args, **kwargs)
266
+ self.cluster_id = cluster_id
267
+
268
+ def execute(self, context: Context):
269
+ VertexAIRayClusterLink.persist(
270
+ context=context,
271
+ task_instance=self,
272
+ cluster_id=self.cluster_id,
273
+ )
274
+ self.log.info("Getting Cluster: %s", self.cluster_id)
275
+ try:
276
+ ray_cluster = self.hook.get_ray_cluster(
277
+ project_id=self.project_id,
278
+ location=self.location,
279
+ cluster_id=self.cluster_id,
280
+ )
281
+ self.log.info("Cluster was gotten.")
282
+ ray_cluster_dict = self.hook.serialize_cluster_obj(ray_cluster)
283
+ return ray_cluster_dict
284
+ except NotFound as not_found_err:
285
+ self.log.info("The Cluster %s does not exist.", self.cluster_id)
286
+ raise AirflowException(not_found_err)
287
+
288
+
289
+ class UpdateRayClusterOperator(RayBaseOperator):
290
+ """
291
+ Update Ray cluster (currently support resizing node counts for worker nodes).
292
+
293
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
294
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
295
+ :param cluster_id: Cluster resource ID.
296
+ :param worker_node_types: The list of Resources of the resized worker nodes. The same Resources
297
+ object should not appear multiple times in the list.
298
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
299
+ :param impersonation_chain: Optional service account to impersonate using short-term
300
+ credentials, or chained list of accounts required to get the access_token
301
+ of the last account in the list, which will be impersonated in the request.
302
+ If set as a string, the account must grant the originating account
303
+ the Service Account Token Creator IAM role.
304
+ If set as a sequence, the identities from the list must grant
305
+ Service Account Token Creator IAM role to the directly preceding identity, with first
306
+ account from the list granting this role to the originating account (templated).
307
+ """
308
+
309
+ template_fields: Sequence[str] = tuple(
310
+ {"cluster_id", "worker_node_types"} | set(RayBaseOperator.template_fields)
311
+ )
312
+ operator_extra_links = (VertexAIRayClusterLink(),)
313
+
314
+ def __init__(
315
+ self,
316
+ cluster_id: str,
317
+ worker_node_types: list[resources.Resources],
318
+ *args,
319
+ **kwargs,
320
+ ) -> None:
321
+ super().__init__(*args, **kwargs)
322
+ self.cluster_id = cluster_id
323
+ self.worker_node_types = worker_node_types
324
+
325
+ def execute(self, context: Context):
326
+ VertexAIRayClusterLink.persist(
327
+ context=context,
328
+ task_instance=self,
329
+ cluster_id=self.cluster_id,
330
+ )
331
+ self.log.info("Updating a Ray cluster.")
332
+ try:
333
+ cluster_path = self.hook.update_ray_cluster(
334
+ project_id=self.project_id,
335
+ location=self.location,
336
+ cluster_id=self.cluster_id,
337
+ worker_node_types=self.worker_node_types,
338
+ )
339
+ self.log.info("Ray cluster %s was updated.", self.cluster_id)
340
+ return cluster_path
341
+ except NotFound as not_found_err:
342
+ self.log.info("The Cluster %s does not exist.", self.cluster_id)
343
+ raise AirflowException(not_found_err)
344
+ except Exception as error:
345
+ raise AirflowException(error)
346
+
347
+
348
+ class DeleteRayClusterOperator(RayBaseOperator):
349
+ """
350
+ Delete Ray cluster.
351
+
352
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
353
+ :param location: Required. The ID of the Google Cloud region that the service belongs to.
354
+ :param cluster_id: Cluster resource ID.
355
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
356
+ :param impersonation_chain: Optional service account to impersonate using short-term
357
+ credentials, or chained list of accounts required to get the access_token
358
+ of the last account in the list, which will be impersonated in the request.
359
+ If set as a string, the account must grant the originating account
360
+ the Service Account Token Creator IAM role.
361
+ If set as a sequence, the identities from the list must grant
362
+ Service Account Token Creator IAM role to the directly preceding identity, with first
363
+ account from the list granting this role to the originating account (templated).
364
+ """
365
+
366
+ template_fields: Sequence[str] = tuple({"cluster_id"} | set(RayBaseOperator.template_fields))
367
+
368
+ def __init__(
369
+ self,
370
+ cluster_id: str,
371
+ *args,
372
+ **kwargs,
373
+ ) -> None:
374
+ super().__init__(*args, **kwargs)
375
+ self.cluster_id = cluster_id
376
+
377
+ def execute(self, context: Context):
378
+ try:
379
+ self.log.info("Deleting Ray cluster: %s", self.cluster_id)
380
+ self.hook.delete_ray_cluster(
381
+ project_id=self.project_id,
382
+ location=self.location,
383
+ cluster_id=self.cluster_id,
384
+ )
385
+ self.log.info("Ray cluster was deleted.")
386
+ except NotFound as not_found_err:
387
+ self.log.info("The Ray cluster ID %s does not exist.", self.cluster_id)
388
+ raise AirflowException(not_found_err)
@@ -547,6 +547,7 @@ class GCSToBigQueryOperator(BaseOperator):
547
547
  "quote",
548
548
  "encoding",
549
549
  "preserveAsciiControlCharacters",
550
+ "columnNameCharacterMap",
550
551
  ],
551
552
  "googleSheetsOptions": ["skipLeadingRows"],
552
553
  }
@@ -672,6 +673,7 @@ class GCSToBigQueryOperator(BaseOperator):
672
673
  "quote",
673
674
  "encoding",
674
675
  "preserveAsciiControlCharacters",
676
+ "columnNameCharacterMap",
675
677
  ],
676
678
  "DATASTORE_BACKUP": ["projectionFields"],
677
679
  "NEWLINE_DELIMITED_JSON": ["autodetect", "ignoreUnknownValues"],
@@ -788,3 +788,125 @@ class DataflowJobMessagesTrigger(BaseTrigger):
788
788
  poll_sleep=self.poll_sleep,
789
789
  impersonation_chain=self.impersonation_chain,
790
790
  )
791
+
792
+
793
+ class DataflowJobStateCompleteTrigger(BaseTrigger):
794
+ """
795
+ Trigger that monitors if a Dataflow job has reached any of successful terminal state meant for that job.
796
+
797
+ :param job_id: Required. ID of the job.
798
+ :param project_id: Required. The Google Cloud project ID in which the job was started.
799
+ :param location: Optional. The location where the job is executed. If set to None then
800
+ the value of DEFAULT_DATAFLOW_LOCATION will be used.
801
+ :param wait_until_finished: Optional. Dataflow option to block pipeline until completion.
802
+ :param gcp_conn_id: The connection ID to use for connecting to Google Cloud.
803
+ :param poll_sleep: Time (seconds) to wait between two consecutive calls to check the job.
804
+ :param impersonation_chain: Optional. Service account to impersonate using short-term
805
+ credentials, or chained list of accounts required to get the access_token
806
+ of the last account in the list, which will be impersonated in the request.
807
+ If set as a string, the account must grant the originating account
808
+ the Service Account Token Creator IAM role.
809
+ If set as a sequence, the identities from the list must grant
810
+ Service Account Token Creator IAM role to the directly preceding identity, with first
811
+ account from the list granting this role to the originating account (templated).
812
+ """
813
+
814
+ def __init__(
815
+ self,
816
+ job_id: str,
817
+ project_id: str | None,
818
+ location: str = DEFAULT_DATAFLOW_LOCATION,
819
+ wait_until_finished: bool | None = None,
820
+ gcp_conn_id: str = "google_cloud_default",
821
+ poll_sleep: int = 10,
822
+ impersonation_chain: str | Sequence[str] | None = None,
823
+ ):
824
+ super().__init__()
825
+ self.job_id = job_id
826
+ self.project_id = project_id
827
+ self.location = location
828
+ self.wait_until_finished = wait_until_finished
829
+ self.gcp_conn_id = gcp_conn_id
830
+ self.poll_sleep = poll_sleep
831
+ self.impersonation_chain = impersonation_chain
832
+
833
+ def serialize(self) -> tuple[str, dict[str, Any]]:
834
+ """Serialize class arguments and classpath."""
835
+ return (
836
+ "airflow.providers.google.cloud.triggers.dataflow.DataflowJobStateCompleteTrigger",
837
+ {
838
+ "job_id": self.job_id,
839
+ "project_id": self.project_id,
840
+ "location": self.location,
841
+ "wait_until_finished": self.wait_until_finished,
842
+ "gcp_conn_id": self.gcp_conn_id,
843
+ "poll_sleep": self.poll_sleep,
844
+ "impersonation_chain": self.impersonation_chain,
845
+ },
846
+ )
847
+
848
+ async def run(self):
849
+ """
850
+ Loop until the job reaches successful final or error state.
851
+
852
+ Yields a TriggerEvent with success status, if the job reaches successful state for own type.
853
+
854
+ Yields a TriggerEvent with error status, if the client returns an unexpected terminal
855
+ job status or any exception is raised while looping.
856
+
857
+ In any other case the Trigger will wait for a specified amount of time
858
+ stored in self.poll_sleep variable.
859
+ """
860
+ try:
861
+ while True:
862
+ job = await self.async_hook.get_job(
863
+ project_id=self.project_id,
864
+ job_id=self.job_id,
865
+ location=self.location,
866
+ )
867
+ job_state = job.current_state.name
868
+ job_type_name = job.type_.name
869
+
870
+ FAILED_STATES = DataflowJobStatus.FAILED_END_STATES | {DataflowJobStatus.JOB_STATE_DRAINED}
871
+ if job_state in FAILED_STATES:
872
+ yield TriggerEvent(
873
+ {
874
+ "status": "error",
875
+ "message": (
876
+ f"Job with id '{self.job_id}' is in failed terminal state: {job_state}"
877
+ ),
878
+ }
879
+ )
880
+ return
881
+
882
+ if self.async_hook.job_reached_terminal_state(
883
+ job={"id": self.job_id, "currentState": job_state, "type": job_type_name},
884
+ wait_until_finished=self.wait_until_finished,
885
+ ):
886
+ yield TriggerEvent(
887
+ {
888
+ "status": "success",
889
+ "message": (
890
+ f"Job with id '{self.job_id}' has reached successful final state: {job_state}"
891
+ ),
892
+ }
893
+ )
894
+ return
895
+ self.log.info("Sleeping for %s seconds.", self.poll_sleep)
896
+ await asyncio.sleep(self.poll_sleep)
897
+ except Exception as e:
898
+ self.log.error("Exception occurred while checking for job state!")
899
+ yield TriggerEvent(
900
+ {
901
+ "status": "error",
902
+ "message": str(e),
903
+ }
904
+ )
905
+
906
+ @cached_property
907
+ def async_hook(self) -> AsyncDataflowHook:
908
+ return AsyncDataflowHook(
909
+ gcp_conn_id=self.gcp_conn_id,
910
+ poll_sleep=self.poll_sleep,
911
+ impersonation_chain=self.impersonation_chain,
912
+ )
@@ -675,6 +675,7 @@ def get_provider_info():
675
675
  "airflow.providers.google.cloud.operators.vertex_ai.pipeline_job",
676
676
  "airflow.providers.google.cloud.operators.vertex_ai.generative_model",
677
677
  "airflow.providers.google.cloud.operators.vertex_ai.feature_store",
678
+ "airflow.providers.google.cloud.operators.vertex_ai.ray",
678
679
  ],
679
680
  },
680
681
  {
@@ -1041,6 +1042,7 @@ def get_provider_info():
1041
1042
  "airflow.providers.google.cloud.hooks.vertex_ai.generative_model",
1042
1043
  "airflow.providers.google.cloud.hooks.vertex_ai.prediction_service",
1043
1044
  "airflow.providers.google.cloud.hooks.vertex_ai.feature_store",
1045
+ "airflow.providers.google.cloud.hooks.vertex_ai.ray",
1044
1046
  ],
1045
1047
  },
1046
1048
  {
@@ -1427,6 +1429,8 @@ def get_provider_info():
1427
1429
  "airflow.providers.google.cloud.links.vertex_ai.VertexAIEndpointListLink",
1428
1430
  "airflow.providers.google.cloud.links.vertex_ai.VertexAIPipelineJobLink",
1429
1431
  "airflow.providers.google.cloud.links.vertex_ai.VertexAIPipelineJobListLink",
1432
+ "airflow.providers.google.cloud.links.vertex_ai.VertexAIRayClusterLink",
1433
+ "airflow.providers.google.cloud.links.vertex_ai.VertexAIRayClusterListLink",
1430
1434
  "airflow.providers.google.cloud.links.workflows.WorkflowsWorkflowDetailsLink",
1431
1435
  "airflow.providers.google.cloud.links.workflows.WorkflowsListOfWorkflowsLink",
1432
1436
  "airflow.providers.google.cloud.links.workflows.WorkflowsExecutionLink",
@@ -32,5 +32,4 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
32
32
  return airflow_version.major, airflow_version.minor, airflow_version.micro
33
33
 
34
34
 
35
- AIRFLOW_V_2_10_PLUS = get_base_airflow_version_tuple() >= (2, 10, 0)
36
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)