apache-airflow-providers-google 11.0.0rc1__py3-none-any.whl → 12.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/assets/gcs.py +1 -7
- airflow/providers/google/cloud/hooks/alloy_db.py +289 -0
- airflow/providers/google/cloud/hooks/cloud_batch.py +13 -5
- airflow/providers/google/cloud/hooks/dataproc.py +7 -3
- airflow/providers/google/cloud/hooks/dataproc_metastore.py +41 -22
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +7 -38
- airflow/providers/google/cloud/hooks/translate.py +355 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +147 -0
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +10 -0
- airflow/providers/google/cloud/links/alloy_db.py +55 -0
- airflow/providers/google/cloud/links/translate.py +98 -0
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +1 -5
- airflow/providers/google/cloud/openlineage/mixins.py +4 -12
- airflow/providers/google/cloud/openlineage/utils.py +200 -22
- airflow/providers/google/cloud/operators/alloy_db.py +459 -0
- airflow/providers/google/cloud/operators/automl.py +55 -44
- airflow/providers/google/cloud/operators/bigquery.py +60 -15
- airflow/providers/google/cloud/operators/dataproc.py +12 -0
- airflow/providers/google/cloud/operators/gcs.py +5 -14
- airflow/providers/google/cloud/operators/kubernetes_engine.py +377 -705
- airflow/providers/google/cloud/operators/mlengine.py +41 -31
- airflow/providers/google/cloud/operators/translate.py +586 -1
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +163 -0
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +5 -0
- airflow/providers/google/cloud/sensors/dataproc.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/__init__.py +16 -0
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +112 -0
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +6 -11
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +3 -0
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +3 -0
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +5 -10
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +3 -15
- airflow/providers/google/cloud/transfers/gcs_to_local.py +9 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +41 -6
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +15 -0
- airflow/providers/google/get_provider_info.py +30 -18
- airflow/providers/google/version_compat.py +36 -0
- {apache_airflow_providers_google-11.0.0rc1.dist-info → apache_airflow_providers_google-12.0.0.dist-info}/METADATA +20 -22
- {apache_airflow_providers_google-11.0.0rc1.dist-info → apache_airflow_providers_google-12.0.0.dist-info}/RECORD +42 -37
- airflow/providers/google/cloud/hooks/datapipeline.py +0 -71
- airflow/providers/google/cloud/openlineage/BigQueryErrorRunFacet.json +0 -30
- airflow/providers/google/cloud/operators/datapipeline.py +0 -63
- {apache_airflow_providers_google-11.0.0rc1.dist-info → apache_airflow_providers_google-12.0.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-11.0.0rc1.dist-info → apache_airflow_providers_google-12.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,459 @@
|
|
1
|
+
#
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
4
|
+
# distributed with this work for additional information
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
7
|
+
# "License"); you may not use this file except in compliance
|
8
|
+
# with the License. You may obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
13
|
+
# software distributed under the License is distributed on an
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
15
|
+
# KIND, either express or implied. See the License for the
|
16
|
+
# specific language governing permissions and limitations
|
17
|
+
# under the License.
|
18
|
+
"""This module contains Google Cloud Alloy DB operators."""
|
19
|
+
|
20
|
+
from __future__ import annotations
|
21
|
+
|
22
|
+
from collections.abc import Sequence
|
23
|
+
from functools import cached_property
|
24
|
+
from typing import TYPE_CHECKING, Any
|
25
|
+
|
26
|
+
from google.api_core.exceptions import AlreadyExists, InvalidArgument
|
27
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
28
|
+
from google.cloud import alloydb_v1
|
29
|
+
|
30
|
+
from airflow.exceptions import AirflowException
|
31
|
+
from airflow.providers.google.cloud.hooks.alloy_db import AlloyDbHook
|
32
|
+
from airflow.providers.google.cloud.links.alloy_db import AlloyDBClusterLink
|
33
|
+
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
34
|
+
|
35
|
+
if TYPE_CHECKING:
|
36
|
+
import proto
|
37
|
+
from google.api_core.operation import Operation
|
38
|
+
from google.api_core.retry import Retry
|
39
|
+
from google.protobuf.field_mask_pb2 import FieldMask
|
40
|
+
|
41
|
+
from airflow.utils.context import Context
|
42
|
+
|
43
|
+
|
44
|
+
class AlloyDBBaseOperator(GoogleCloudBaseOperator):
|
45
|
+
"""
|
46
|
+
Base class for all AlloyDB operators.
|
47
|
+
|
48
|
+
:param project_id: Required. The ID of the Google Cloud project where the service is used.
|
49
|
+
:param location: Required. The ID of the Google Cloud region where the service is used.
|
50
|
+
:param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
|
51
|
+
:param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
|
52
|
+
be retried.
|
53
|
+
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
|
54
|
+
Note that if `retry` is specified, the timeout applies to each individual attempt.
|
55
|
+
:param metadata: Optional. Additional metadata that is provided to the method.
|
56
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
57
|
+
credentials, or chained list of accounts required to get the access_token
|
58
|
+
of the last account in the list, which will be impersonated in the request.
|
59
|
+
If set as a string, the account must grant the originating account
|
60
|
+
the Service Account Token Creator IAM role.
|
61
|
+
If set as a sequence, the identities from the list must grant
|
62
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
63
|
+
account from the list granting this role to the originating account (templated).
|
64
|
+
"""
|
65
|
+
|
66
|
+
template_fields: Sequence[str] = (
|
67
|
+
"project_id",
|
68
|
+
"location",
|
69
|
+
"gcp_conn_id",
|
70
|
+
)
|
71
|
+
|
72
|
+
def __init__(
|
73
|
+
self,
|
74
|
+
project_id: str,
|
75
|
+
location: str,
|
76
|
+
gcp_conn_id: str = "google_cloud_default",
|
77
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
78
|
+
timeout: float | None = None,
|
79
|
+
metadata: Sequence[tuple[str, str]] = (),
|
80
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
81
|
+
*args,
|
82
|
+
**kwargs,
|
83
|
+
):
|
84
|
+
super().__init__(*args, **kwargs)
|
85
|
+
self.project_id = project_id
|
86
|
+
self.location = location
|
87
|
+
self.gcp_conn_id = gcp_conn_id
|
88
|
+
self.impersonation_chain = impersonation_chain
|
89
|
+
self.retry = retry
|
90
|
+
self.timeout = timeout
|
91
|
+
self.metadata = metadata
|
92
|
+
|
93
|
+
@cached_property
|
94
|
+
def hook(self) -> AlloyDbHook:
|
95
|
+
return AlloyDbHook(
|
96
|
+
gcp_conn_id=self.gcp_conn_id,
|
97
|
+
impersonation_chain=self.impersonation_chain,
|
98
|
+
)
|
99
|
+
|
100
|
+
|
101
|
+
class AlloyDBWriteBaseOperator(AlloyDBBaseOperator):
|
102
|
+
"""
|
103
|
+
Base class for writing AlloyDB operators.
|
104
|
+
|
105
|
+
These operators perform create, update or delete operations. with the objects (not inside of database).
|
106
|
+
|
107
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
108
|
+
so that if you must retry your request, the server ignores the request if it has already been
|
109
|
+
completed. The server guarantees that for at least 60 minutes since the first request.
|
110
|
+
For example, consider a situation where you make an initial request and the request times out.
|
111
|
+
If you make the request again with the same request ID, the server can check if the original operation
|
112
|
+
with the same request ID was received, and if so, ignores the second request.
|
113
|
+
This prevents clients from accidentally creating duplicate commitments.
|
114
|
+
The request ID must be a valid UUID with the exception that zero UUID is not supported
|
115
|
+
(00000000-0000-0000-0000-000000000000).
|
116
|
+
:param validate_request: Optional. If set, performs request validation, but does not actually
|
117
|
+
execute the request.
|
118
|
+
"""
|
119
|
+
|
120
|
+
template_fields: Sequence[str] = tuple(
|
121
|
+
{"request_id", "validate_request"} | set(AlloyDBBaseOperator.template_fields)
|
122
|
+
)
|
123
|
+
|
124
|
+
def __init__(
|
125
|
+
self,
|
126
|
+
request_id: str | None = None,
|
127
|
+
validate_request: bool = False,
|
128
|
+
*args,
|
129
|
+
**kwargs,
|
130
|
+
):
|
131
|
+
super().__init__(*args, **kwargs)
|
132
|
+
self.request_id = request_id
|
133
|
+
self.validate_request = validate_request
|
134
|
+
|
135
|
+
def get_operation_result(self, operation: Operation) -> proto.Message | None:
|
136
|
+
"""
|
137
|
+
Retrieve operation result as a proto.Message.
|
138
|
+
|
139
|
+
If the `validate_request` parameter is set, then no operation is performed and thus nothing to wait.
|
140
|
+
"""
|
141
|
+
if self.validate_request:
|
142
|
+
self.log.info("The request validation has been passed successfully!")
|
143
|
+
else:
|
144
|
+
return self.hook.wait_for_operation(timeout=self.timeout, operation=operation)
|
145
|
+
return None
|
146
|
+
|
147
|
+
|
148
|
+
class AlloyDBCreateClusterOperator(AlloyDBWriteBaseOperator):
|
149
|
+
"""
|
150
|
+
Create an Alloy DB cluster.
|
151
|
+
|
152
|
+
.. seealso::
|
153
|
+
For more information on how to use this operator, take a look at the guide:
|
154
|
+
:ref:`howto/operator:AlloyDBCreateClusterOperator`
|
155
|
+
|
156
|
+
:param cluster_id: Required. ID of the cluster to create.
|
157
|
+
:param cluster_configuration: Required. Cluster to create. For more details please see API documentation:
|
158
|
+
https://cloud.google.com/python/docs/reference/alloydb/latest/google.cloud.alloydb_v1.types.Cluster
|
159
|
+
:param is_secondary: Required. Specifies if the Cluster to be created is Primary or Secondary.
|
160
|
+
Please note, if set True, then specify the `secondary_config` field in the cluster so the created
|
161
|
+
secondary cluster was pointing to the primary cluster.
|
162
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
163
|
+
so that if you must retry your request, the server ignores the request if it has already been
|
164
|
+
completed. The server guarantees that for at least 60 minutes since the first request.
|
165
|
+
For example, consider a situation where you make an initial request and the request times out.
|
166
|
+
If you make the request again with the same request ID, the server can check if the original operation
|
167
|
+
with the same request ID was received, and if so, ignores the second request.
|
168
|
+
This prevents clients from accidentally creating duplicate commitments.
|
169
|
+
The request ID must be a valid UUID with the exception that zero UUID is not supported
|
170
|
+
(00000000-0000-0000-0000-000000000000).
|
171
|
+
:param validate_request: Optional. If set, performs request validation, but does not actually
|
172
|
+
execute the request.
|
173
|
+
:param project_id: Required. The ID of the Google Cloud project where the service is used.
|
174
|
+
:param location: Required. The ID of the Google Cloud region where the service is used.
|
175
|
+
:param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
|
176
|
+
:param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
|
177
|
+
be retried.
|
178
|
+
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
|
179
|
+
Note that if `retry` is specified, the timeout applies to each individual attempt.
|
180
|
+
:param metadata: Optional. Additional metadata that is provided to the method.
|
181
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
182
|
+
credentials, or chained list of accounts required to get the access_token
|
183
|
+
of the last account in the list, which will be impersonated in the request.
|
184
|
+
If set as a string, the account must grant the originating account
|
185
|
+
the Service Account Token Creator IAM role.
|
186
|
+
If set as a sequence, the identities from the list must grant
|
187
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
188
|
+
account from the list granting this role to the originating account (templated).
|
189
|
+
"""
|
190
|
+
|
191
|
+
template_fields: Sequence[str] = tuple(
|
192
|
+
{"cluster_id", "is_secondary"} | set(AlloyDBWriteBaseOperator.template_fields)
|
193
|
+
)
|
194
|
+
operator_extra_links = (AlloyDBClusterLink(),)
|
195
|
+
|
196
|
+
def __init__(
|
197
|
+
self,
|
198
|
+
cluster_id: str,
|
199
|
+
cluster_configuration: alloydb_v1.Cluster | dict,
|
200
|
+
is_secondary: bool = False,
|
201
|
+
*args,
|
202
|
+
**kwargs,
|
203
|
+
):
|
204
|
+
super().__init__(*args, **kwargs)
|
205
|
+
self.cluster_id = cluster_id
|
206
|
+
self.cluster_configuration = cluster_configuration
|
207
|
+
self.is_secondary = is_secondary
|
208
|
+
|
209
|
+
def execute(self, context: Context) -> Any:
|
210
|
+
message = (
|
211
|
+
"Validating a Create AlloyDB cluster request."
|
212
|
+
if self.validate_request
|
213
|
+
else "Creating an AlloyDB cluster."
|
214
|
+
)
|
215
|
+
self.log.info(message)
|
216
|
+
|
217
|
+
try:
|
218
|
+
create_method = (
|
219
|
+
self.hook.create_secondary_cluster if self.is_secondary else self.hook.create_cluster
|
220
|
+
)
|
221
|
+
operation = create_method(
|
222
|
+
cluster_id=self.cluster_id,
|
223
|
+
cluster=self.cluster_configuration,
|
224
|
+
location=self.location,
|
225
|
+
project_id=self.project_id,
|
226
|
+
request_id=self.request_id,
|
227
|
+
validate_only=self.validate_request,
|
228
|
+
retry=self.retry,
|
229
|
+
timeout=self.timeout,
|
230
|
+
metadata=self.metadata,
|
231
|
+
)
|
232
|
+
except AlreadyExists:
|
233
|
+
self.log.info("AlloyDB cluster %s already exists.", self.cluster_id)
|
234
|
+
result = self.hook.get_cluster(
|
235
|
+
cluster_id=self.cluster_id,
|
236
|
+
location=self.location,
|
237
|
+
project_id=self.project_id,
|
238
|
+
)
|
239
|
+
result = alloydb_v1.Cluster.to_dict(result)
|
240
|
+
except InvalidArgument as ex:
|
241
|
+
if "cannot create more than one secondary cluster per primary cluster" in ex.message:
|
242
|
+
result = self.hook.get_cluster(
|
243
|
+
cluster_id=self.cluster_id,
|
244
|
+
location=self.location,
|
245
|
+
project_id=self.project_id,
|
246
|
+
)
|
247
|
+
result = alloydb_v1.Cluster.to_dict(result)
|
248
|
+
self.log.info("AlloyDB cluster %s already exists.", result.get("name").split("/")[-1])
|
249
|
+
else:
|
250
|
+
raise AirflowException(ex.message)
|
251
|
+
except Exception as ex:
|
252
|
+
raise AirflowException(ex)
|
253
|
+
else:
|
254
|
+
operation_result = self.get_operation_result(operation)
|
255
|
+
result = alloydb_v1.Cluster.to_dict(operation_result) if operation_result else None
|
256
|
+
|
257
|
+
if result:
|
258
|
+
AlloyDBClusterLink.persist(
|
259
|
+
context=context,
|
260
|
+
task_instance=self,
|
261
|
+
location_id=self.location,
|
262
|
+
cluster_id=self.cluster_id,
|
263
|
+
project_id=self.project_id,
|
264
|
+
)
|
265
|
+
|
266
|
+
return result
|
267
|
+
|
268
|
+
|
269
|
+
class AlloyDBUpdateClusterOperator(AlloyDBWriteBaseOperator):
|
270
|
+
"""
|
271
|
+
Update an Alloy DB cluster.
|
272
|
+
|
273
|
+
.. seealso::
|
274
|
+
For more information on how to use this operator, take a look at the guide:
|
275
|
+
:ref:`howto/operator:AlloyDBUpdateClusterOperator`
|
276
|
+
|
277
|
+
:param cluster_id: Required. ID of the cluster to create.
|
278
|
+
:param cluster_configuration: Required. Cluster to update. For more details please see API documentation:
|
279
|
+
https://cloud.google.com/python/docs/reference/alloydb/latest/google.cloud.alloydb_v1.types.Cluster
|
280
|
+
:param update_mask: Optional. Field mask is used to specify the fields to be overwritten in the
|
281
|
+
Cluster resource by the update.
|
282
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
283
|
+
so that if you must retry your request, the server ignores the request if it has already been
|
284
|
+
completed. The server guarantees that for at least 60 minutes since the first request.
|
285
|
+
For example, consider a situation where you make an initial request and the request times out.
|
286
|
+
If you make the request again with the same request ID, the server can check if the original operation
|
287
|
+
with the same request ID was received, and if so, ignores the second request.
|
288
|
+
This prevents clients from accidentally creating duplicate commitments.
|
289
|
+
The request ID must be a valid UUID with the exception that zero UUID is not supported
|
290
|
+
(00000000-0000-0000-0000-000000000000).
|
291
|
+
:param validate_request: Optional. If set, performs request validation, but does not actually
|
292
|
+
execute the request.
|
293
|
+
:param allow_missing: Optional. If set to true, update succeeds even if cluster is not found.
|
294
|
+
In that case, a new cluster is created and update_mask is ignored.
|
295
|
+
:param project_id: Required. The ID of the Google Cloud project where the service is used.
|
296
|
+
:param location: Required. The ID of the Google Cloud region where the service is used.
|
297
|
+
:param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
|
298
|
+
:param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
|
299
|
+
be retried.
|
300
|
+
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
|
301
|
+
Note that if `retry` is specified, the timeout applies to each individual attempt.
|
302
|
+
:param metadata: Optional. Additional metadata that is provided to the method.
|
303
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
304
|
+
credentials, or chained list of accounts required to get the access_token
|
305
|
+
of the last account in the list, which will be impersonated in the request.
|
306
|
+
If set as a string, the account must grant the originating account
|
307
|
+
the Service Account Token Creator IAM role.
|
308
|
+
If set as a sequence, the identities from the list must grant
|
309
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
310
|
+
account from the list granting this role to the originating account (templated).
|
311
|
+
"""
|
312
|
+
|
313
|
+
template_fields: Sequence[str] = tuple(
|
314
|
+
{"cluster_id", "allow_missing"} | set(AlloyDBWriteBaseOperator.template_fields)
|
315
|
+
)
|
316
|
+
operator_extra_links = (AlloyDBClusterLink(),)
|
317
|
+
|
318
|
+
def __init__(
|
319
|
+
self,
|
320
|
+
cluster_id: str,
|
321
|
+
cluster_configuration: alloydb_v1.Cluster | dict,
|
322
|
+
update_mask: FieldMask | dict | None = None,
|
323
|
+
allow_missing: bool = False,
|
324
|
+
*args,
|
325
|
+
**kwargs,
|
326
|
+
):
|
327
|
+
super().__init__(*args, **kwargs)
|
328
|
+
self.cluster_id = cluster_id
|
329
|
+
self.cluster_configuration = cluster_configuration
|
330
|
+
self.update_mask = update_mask
|
331
|
+
self.allow_missing = allow_missing
|
332
|
+
|
333
|
+
def execute(self, context: Context) -> Any:
|
334
|
+
message = (
|
335
|
+
"Validating an Update AlloyDB cluster request."
|
336
|
+
if self.validate_request
|
337
|
+
else "Updating an AlloyDB cluster."
|
338
|
+
)
|
339
|
+
self.log.info(message)
|
340
|
+
|
341
|
+
try:
|
342
|
+
operation = self.hook.update_cluster(
|
343
|
+
cluster_id=self.cluster_id,
|
344
|
+
project_id=self.project_id,
|
345
|
+
location=self.location,
|
346
|
+
cluster=self.cluster_configuration,
|
347
|
+
update_mask=self.update_mask,
|
348
|
+
allow_missing=self.allow_missing,
|
349
|
+
request_id=self.request_id,
|
350
|
+
validate_only=self.validate_request,
|
351
|
+
retry=self.retry,
|
352
|
+
timeout=self.timeout,
|
353
|
+
metadata=self.metadata,
|
354
|
+
)
|
355
|
+
except Exception as ex:
|
356
|
+
raise AirflowException(ex) from ex
|
357
|
+
else:
|
358
|
+
operation_result = self.get_operation_result(operation)
|
359
|
+
result = alloydb_v1.Cluster.to_dict(operation_result) if operation_result else None
|
360
|
+
|
361
|
+
AlloyDBClusterLink.persist(
|
362
|
+
context=context,
|
363
|
+
task_instance=self,
|
364
|
+
location_id=self.location,
|
365
|
+
cluster_id=self.cluster_id,
|
366
|
+
project_id=self.project_id,
|
367
|
+
)
|
368
|
+
|
369
|
+
if not self.validate_request:
|
370
|
+
self.log.info("AlloyDB cluster %s was successfully updated.", self.cluster_id)
|
371
|
+
return result
|
372
|
+
|
373
|
+
|
374
|
+
class AlloyDBDeleteClusterOperator(AlloyDBWriteBaseOperator):
|
375
|
+
"""
|
376
|
+
Delete an Alloy DB cluster.
|
377
|
+
|
378
|
+
.. seealso::
|
379
|
+
For more information on how to use this operator, take a look at the guide:
|
380
|
+
:ref:`howto/operator:AlloyDBDeleteClusterOperator`
|
381
|
+
|
382
|
+
:param cluster_id: Required. ID of the cluster to create.
|
383
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
384
|
+
so that if you must retry your request, the server ignores the request if it has already been
|
385
|
+
completed. The server guarantees that for at least 60 minutes since the first request.
|
386
|
+
For example, consider a situation where you make an initial request and the request times out.
|
387
|
+
If you make the request again with the same request ID, the server can check if the original operation
|
388
|
+
with the same request ID was received, and if so, ignores the second request.
|
389
|
+
This prevents clients from accidentally creating duplicate commitments.
|
390
|
+
The request ID must be a valid UUID with the exception that zero UUID is not supported
|
391
|
+
(00000000-0000-0000-0000-000000000000).
|
392
|
+
:param validate_request: Optional. If set, performs request validation, but does not actually
|
393
|
+
execute the request.
|
394
|
+
:param etag: Optional. The current etag of the Cluster. If an etag is provided and does not match the
|
395
|
+
current etag of the Cluster, deletion will be blocked and an ABORTED error will be returned.
|
396
|
+
:param force: Optional. Whether to cascade delete child instances for given cluster.
|
397
|
+
:param project_id: Required. The ID of the Google Cloud project where the service is used.
|
398
|
+
:param location: Required. The ID of the Google Cloud region where the service is used.
|
399
|
+
:param gcp_conn_id: Optional. The connection ID to use to connect to Google Cloud.
|
400
|
+
:param retry: Optional. A retry object used to retry requests. If `None` is specified, requests will not
|
401
|
+
be retried.
|
402
|
+
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
|
403
|
+
Note that if `retry` is specified, the timeout applies to each individual attempt.
|
404
|
+
:param metadata: Optional. Additional metadata that is provided to the method.
|
405
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
406
|
+
credentials, or chained list of accounts required to get the access_token
|
407
|
+
of the last account in the list, which will be impersonated in the request.
|
408
|
+
If set as a string, the account must grant the originating account
|
409
|
+
the Service Account Token Creator IAM role.
|
410
|
+
If set as a sequence, the identities from the list must grant
|
411
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
412
|
+
account from the list granting this role to the originating account (templated).
|
413
|
+
"""
|
414
|
+
|
415
|
+
template_fields: Sequence[str] = tuple(
|
416
|
+
{"cluster_id", "etag", "force"} | set(AlloyDBWriteBaseOperator.template_fields)
|
417
|
+
)
|
418
|
+
|
419
|
+
def __init__(
|
420
|
+
self,
|
421
|
+
cluster_id: str,
|
422
|
+
etag: str | None = None,
|
423
|
+
force: bool = False,
|
424
|
+
*args,
|
425
|
+
**kwargs,
|
426
|
+
):
|
427
|
+
super().__init__(*args, **kwargs)
|
428
|
+
self.cluster_id = cluster_id
|
429
|
+
self.etag = etag
|
430
|
+
self.force = force
|
431
|
+
|
432
|
+
def execute(self, context: Context) -> Any:
|
433
|
+
message = (
|
434
|
+
"Validating a Delete AlloyDB cluster request."
|
435
|
+
if self.validate_request
|
436
|
+
else "Deleting an AlloyDB cluster."
|
437
|
+
)
|
438
|
+
self.log.info(message)
|
439
|
+
|
440
|
+
try:
|
441
|
+
operation = self.hook.delete_cluster(
|
442
|
+
cluster_id=self.cluster_id,
|
443
|
+
project_id=self.project_id,
|
444
|
+
location=self.location,
|
445
|
+
etag=self.etag,
|
446
|
+
force=self.force,
|
447
|
+
request_id=self.request_id,
|
448
|
+
validate_only=self.validate_request,
|
449
|
+
retry=self.retry,
|
450
|
+
timeout=self.timeout,
|
451
|
+
metadata=self.metadata,
|
452
|
+
)
|
453
|
+
except Exception as ex:
|
454
|
+
raise AirflowException(ex) from ex
|
455
|
+
else:
|
456
|
+
self.get_operation_result(operation)
|
457
|
+
|
458
|
+
if not self.validate_request:
|
459
|
+
self.log.info("AlloyDB cluster %s was successfully removed.", self.cluster_id)
|
@@ -81,13 +81,14 @@ class AutoMLTrainModelOperator(GoogleCloudBaseOperator):
|
|
81
81
|
"""
|
82
82
|
Creates Google Cloud AutoML model.
|
83
83
|
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
84
|
+
.. warning::
|
85
|
+
AutoMLTrainModelOperator for tables, video intelligence, vision and natural language has been deprecated
|
86
|
+
and no longer available. Please use
|
87
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLTabularTrainingJobOperator`,
|
88
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLVideoTrainingJobOperator`,
|
89
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.auto_ml.CreateAutoMLImageTrainingJobOperator`,
|
90
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.generative_model.SupervisedFineTuningTrainOperator`,
|
91
|
+
instead.
|
91
92
|
|
92
93
|
.. seealso::
|
93
94
|
For more information on how to use this operator, take a look at the guide:
|
@@ -198,8 +199,9 @@ class AutoMLPredictOperator(GoogleCloudBaseOperator):
|
|
198
199
|
"""
|
199
200
|
Runs prediction operation on Google Cloud AutoML.
|
200
201
|
|
201
|
-
|
202
|
-
|
202
|
+
.. warning::
|
203
|
+
AutoMLPredictOperator for text, image, and video prediction has been deprecated.
|
204
|
+
Please use endpoint_id param instead of model_id param.
|
203
205
|
|
204
206
|
.. seealso::
|
205
207
|
For more information on how to use this operator, take a look at the guide:
|
@@ -356,13 +358,14 @@ class AutoMLBatchPredictOperator(GoogleCloudBaseOperator):
|
|
356
358
|
"""
|
357
359
|
Perform a batch prediction on Google Cloud AutoML.
|
358
360
|
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
361
|
+
.. warning::
|
362
|
+
AutoMLBatchPredictOperator for tables, video intelligence, vision and natural language has been deprecated
|
363
|
+
and no longer available. Please use
|
364
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.CreateBatchPredictionJobOperator`,
|
365
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.GetBatchPredictionJobOperator`,
|
366
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.ListBatchPredictionJobsOperator`,
|
367
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job.DeleteBatchPredictionJobOperator`,
|
368
|
+
instead.
|
366
369
|
|
367
370
|
.. seealso::
|
368
371
|
For more information on how to use this operator, take a look at the guide:
|
@@ -597,9 +600,10 @@ class AutoMLImportDataOperator(GoogleCloudBaseOperator):
|
|
597
600
|
"""
|
598
601
|
Imports data to a Google Cloud AutoML dataset.
|
599
602
|
|
600
|
-
|
601
|
-
|
602
|
-
|
603
|
+
.. warning::
|
604
|
+
AutoMLImportDataOperator for tables, video intelligence, vision and natural language has been deprecated
|
605
|
+
and no longer available. Please use
|
606
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.ImportDataOperator` instead.
|
603
607
|
|
604
608
|
.. seealso::
|
605
609
|
For more information on how to use this operator, take a look at the guide:
|
@@ -704,9 +708,10 @@ class AutoMLTablesListColumnSpecsOperator(GoogleCloudBaseOperator):
|
|
704
708
|
"""
|
705
709
|
Lists column specs in a table.
|
706
710
|
|
707
|
-
|
708
|
-
|
709
|
-
|
711
|
+
.. warning::
|
712
|
+
Operator AutoMLTablesListColumnSpecsOperator has been deprecated due to shutdown of
|
713
|
+
a legacy version of AutoML Tables on March 31, 2024. For additional information
|
714
|
+
see: https://cloud.google.com/automl-tables/docs/deprecations.
|
710
715
|
|
711
716
|
.. seealso::
|
712
717
|
For more information on how to use this operator, take a look at the guide:
|
@@ -823,11 +828,12 @@ class AutoMLTablesUpdateDatasetOperator(GoogleCloudBaseOperator):
|
|
823
828
|
"""
|
824
829
|
Updates a dataset.
|
825
830
|
|
826
|
-
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
+
.. warning::
|
832
|
+
Operator AutoMLTablesUpdateDatasetOperator has been deprecated due to shutdown of
|
833
|
+
a legacy version of AutoML Tables on March 31, 2024. For additional information
|
834
|
+
see: https://cloud.google.com/automl-tables/docs/deprecations.
|
835
|
+
Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.UpdateDatasetOperator`
|
836
|
+
instead.
|
831
837
|
|
832
838
|
.. seealso::
|
833
839
|
For more information on how to use this operator, take a look at the guide:
|
@@ -922,9 +928,10 @@ class AutoMLGetModelOperator(GoogleCloudBaseOperator):
|
|
922
928
|
"""
|
923
929
|
Get Google Cloud AutoML model.
|
924
930
|
|
925
|
-
|
926
|
-
|
927
|
-
|
931
|
+
.. warning::
|
932
|
+
AutoMLGetModelOperator for tables, video intelligence, vision and natural language has been deprecated
|
933
|
+
and no longer available. Please use
|
934
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.GetModelOperator` instead.
|
928
935
|
|
929
936
|
.. seealso::
|
930
937
|
For more information on how to use this operator, take a look at the guide:
|
@@ -1015,9 +1022,10 @@ class AutoMLDeleteModelOperator(GoogleCloudBaseOperator):
|
|
1015
1022
|
"""
|
1016
1023
|
Delete Google Cloud AutoML model.
|
1017
1024
|
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1025
|
+
.. warning::
|
1026
|
+
AutoMLDeleteModelOperator for tables, video intelligence, vision and natural language has been deprecated
|
1027
|
+
and no longer available. Please use
|
1028
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.model_service.DeleteModelOperator` instead.
|
1021
1029
|
|
1022
1030
|
.. seealso::
|
1023
1031
|
For more information on how to use this operator, take a look at the guide:
|
@@ -1112,11 +1120,12 @@ class AutoMLDeployModelOperator(GoogleCloudBaseOperator):
|
|
1112
1120
|
Only applicable for Text Classification, Image Object Detection and Tables; all other
|
1113
1121
|
domains manage deployment automatically.
|
1114
1122
|
|
1115
|
-
|
1116
|
-
|
1117
|
-
|
1118
|
-
|
1119
|
-
|
1123
|
+
.. warning::
|
1124
|
+
Operator AutoMLDeployModelOperator has been deprecated due to shutdown of a legacy version
|
1125
|
+
of AutoML Natural Language, Vision, Video Intelligence on March 31, 2024.
|
1126
|
+
For additional information see: https://cloud.google.com/vision/automl/docs/deprecations .
|
1127
|
+
Please use :class:`airflow.providers.google.cloud.operators.vertex_ai.endpoint_service.DeployModelOperator`
|
1128
|
+
instead.
|
1120
1129
|
|
1121
1130
|
.. seealso::
|
1122
1131
|
For more information on how to use this operator, take a look at the guide:
|
@@ -1209,9 +1218,10 @@ class AutoMLTablesListTableSpecsOperator(GoogleCloudBaseOperator):
|
|
1209
1218
|
"""
|
1210
1219
|
Lists table specs in a dataset.
|
1211
1220
|
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1221
|
+
.. warning::
|
1222
|
+
Operator AutoMLTablesListTableSpecsOperator has been deprecated due to shutdown of
|
1223
|
+
a legacy version of AutoML Tables on March 31, 2024. For additional information
|
1224
|
+
see: https://cloud.google.com/automl-tables/docs/deprecations.
|
1215
1225
|
|
1216
1226
|
.. seealso::
|
1217
1227
|
For more information on how to use this operator, take a look at the guide:
|
@@ -1318,9 +1328,10 @@ class AutoMLListDatasetOperator(GoogleCloudBaseOperator):
|
|
1318
1328
|
"""
|
1319
1329
|
Lists AutoML Datasets in project.
|
1320
1330
|
|
1321
|
-
|
1322
|
-
|
1323
|
-
|
1331
|
+
.. warning::
|
1332
|
+
AutoMLListDatasetOperator for tables, video intelligence, vision and natural language has been deprecated
|
1333
|
+
and no longer available. Please use
|
1334
|
+
:class:`airflow.providers.google.cloud.operators.vertex_ai.dataset.ListDatasetsOperator` instead.
|
1324
1335
|
|
1325
1336
|
.. seealso::
|
1326
1337
|
For more information on how to use this operator, take a look at the guide:
|