apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 16.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/ads/hooks/ads.py +34 -0
- airflow/providers/google/cloud/hooks/bigquery.py +63 -76
- airflow/providers/google/cloud/hooks/dataflow.py +67 -5
- airflow/providers/google/cloud/hooks/gcs.py +3 -3
- airflow/providers/google/cloud/hooks/looker.py +5 -0
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -36
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +1 -66
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/links/cloud_run.py +59 -0
- airflow/providers/google/cloud/links/vertex_ai.py +49 -0
- airflow/providers/google/cloud/log/gcs_task_handler.py +7 -5
- airflow/providers/google/cloud/operators/bigquery.py +49 -10
- airflow/providers/google/cloud/operators/cloud_run.py +20 -2
- airflow/providers/google/cloud/operators/gcs.py +1 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +4 -86
- airflow/providers/google/cloud/operators/pubsub.py +2 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +0 -92
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +4 -0
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +388 -0
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +9 -5
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +2 -0
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +11 -5
- airflow/providers/google/cloud/triggers/bigquery.py +32 -5
- airflow/providers/google/cloud/triggers/dataflow.py +122 -0
- airflow/providers/google/cloud/triggers/dataproc.py +62 -10
- airflow/providers/google/get_provider_info.py +18 -5
- airflow/providers/google/leveldb/hooks/leveldb.py +25 -0
- airflow/providers/google/version_compat.py +0 -1
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-16.0.0.dist-info}/METADATA +92 -85
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-16.0.0.dist-info}/RECORD +35 -32
- airflow/providers/google/cloud/links/automl.py +0 -193
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-16.0.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-16.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,388 @@
|
|
1
|
+
#
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
4
|
+
# distributed with this work for additional information
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
7
|
+
# "License"); you may not use this file except in compliance
|
8
|
+
# with the License. You may obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
13
|
+
# software distributed under the License is distributed on an
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
15
|
+
# KIND, either express or implied. See the License for the
|
16
|
+
# specific language governing permissions and limitations
|
17
|
+
# under the License.
|
18
|
+
"""This module contains Google Vertex AI Ray operators."""
|
19
|
+
|
20
|
+
from __future__ import annotations
|
21
|
+
|
22
|
+
from collections.abc import Sequence
|
23
|
+
from functools import cached_property
|
24
|
+
from typing import TYPE_CHECKING
|
25
|
+
|
26
|
+
from google.api_core.exceptions import NotFound
|
27
|
+
from google.cloud.aiplatform.vertex_ray.util import resources
|
28
|
+
|
29
|
+
from airflow.exceptions import AirflowException
|
30
|
+
from airflow.providers.google.cloud.hooks.vertex_ai.ray import RayHook
|
31
|
+
from airflow.providers.google.cloud.links.vertex_ai import (
|
32
|
+
VertexAIRayClusterLink,
|
33
|
+
VertexAIRayClusterListLink,
|
34
|
+
)
|
35
|
+
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
36
|
+
|
37
|
+
if TYPE_CHECKING:
|
38
|
+
from airflow.utils.context import Context
|
39
|
+
|
40
|
+
|
41
|
+
class RayBaseOperator(GoogleCloudBaseOperator):
|
42
|
+
"""
|
43
|
+
Base class for Ray operators.
|
44
|
+
|
45
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
46
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
47
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
48
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
49
|
+
credentials, or chained list of accounts required to get the access_token
|
50
|
+
of the last account in the list, which will be impersonated in the request.
|
51
|
+
If set as a string, the account must grant the originating account
|
52
|
+
the Service Account Token Creator IAM role.
|
53
|
+
If set as a sequence, the identities from the list must grant
|
54
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
55
|
+
account from the list granting this role to the originating account (templated).
|
56
|
+
"""
|
57
|
+
|
58
|
+
template_fields: Sequence[str] = (
|
59
|
+
"location",
|
60
|
+
"gcp_conn_id",
|
61
|
+
"project_id",
|
62
|
+
"impersonation_chain",
|
63
|
+
)
|
64
|
+
|
65
|
+
def __init__(
|
66
|
+
self,
|
67
|
+
project_id: str,
|
68
|
+
location: str,
|
69
|
+
gcp_conn_id: str = "google_cloud_default",
|
70
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
71
|
+
*args,
|
72
|
+
**kwargs,
|
73
|
+
) -> None:
|
74
|
+
super().__init__(*args, **kwargs)
|
75
|
+
self.location = location
|
76
|
+
self.project_id = project_id
|
77
|
+
self.gcp_conn_id = gcp_conn_id
|
78
|
+
self.impersonation_chain = impersonation_chain
|
79
|
+
|
80
|
+
@cached_property
|
81
|
+
def hook(self) -> RayHook:
|
82
|
+
return RayHook(
|
83
|
+
gcp_conn_id=self.gcp_conn_id,
|
84
|
+
impersonation_chain=self.impersonation_chain,
|
85
|
+
)
|
86
|
+
|
87
|
+
|
88
|
+
class CreateRayClusterOperator(RayBaseOperator):
|
89
|
+
"""
|
90
|
+
Create a Ray cluster on the Vertex AI.
|
91
|
+
|
92
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
93
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
94
|
+
:param head_node_type: The head node resource. Resources.node_count must be 1. If not set, default
|
95
|
+
value of Resources() class will be used.
|
96
|
+
:param python_version: Python version for the ray cluster.
|
97
|
+
:param ray_version: Ray version for the ray cluster. Default is 2.33.0.
|
98
|
+
:param network: Virtual private cloud (VPC) network. For Ray Client, VPC peering is required to
|
99
|
+
connect to the Ray Cluster managed in the Vertex API service. For Ray Job API, VPC network is not
|
100
|
+
required because Ray Cluster connection can be accessed through dashboard address.
|
101
|
+
:param service_account: Service account to be used for running Ray programs on the cluster.
|
102
|
+
:param cluster_name: This value may be up to 63 characters, and valid characters are `[a-z0-9_-]`.
|
103
|
+
The first character cannot be a number or hyphen.
|
104
|
+
:param worker_node_types: The list of Resources of the worker nodes. The same Resources object should
|
105
|
+
not appear multiple times in the list.
|
106
|
+
:param custom_images: The NodeImages which specifies head node and worker nodes images. All the
|
107
|
+
workers will share the same image. If each Resource has a specific custom image, use
|
108
|
+
`Resources.custom_image` for head/worker_node_type(s). Note that configuring
|
109
|
+
`Resources.custom_image` will override `custom_images` here. Allowlist only.
|
110
|
+
:param enable_metrics_collection: Enable Ray metrics collection for visualization.
|
111
|
+
:param enable_logging: Enable exporting Ray logs to Cloud Logging.
|
112
|
+
:param psc_interface_config: PSC-I config.
|
113
|
+
:param reserved_ip_ranges: A list of names for the reserved IP ranges under the VPC network that can
|
114
|
+
be used for this cluster. If set, we will deploy the cluster within the provided IP ranges.
|
115
|
+
Otherwise, the cluster is deployed to any IP ranges under the provided VPC network.
|
116
|
+
Example: ["vertex-ai-ip-range"].
|
117
|
+
:param labels: The labels with user-defined metadata to organize Ray cluster.
|
118
|
+
Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain
|
119
|
+
lowercase letters, numeric characters, underscores and dashes. International characters are allowed.
|
120
|
+
See https://goo.gl/xmQnxf for more information and examples of labels.
|
121
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
122
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
123
|
+
credentials, or chained list of accounts required to get the access_token
|
124
|
+
of the last account in the list, which will be impersonated in the request.
|
125
|
+
If set as a string, the account must grant the originating account
|
126
|
+
the Service Account Token Creator IAM role.
|
127
|
+
If set as a sequence, the identities from the list must grant
|
128
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
129
|
+
account from the list granting this role to the originating account (templated).
|
130
|
+
"""
|
131
|
+
|
132
|
+
template_fields: Sequence[str] = tuple(
|
133
|
+
{"head_node_type", "worker_node_types"} | set(RayBaseOperator.template_fields)
|
134
|
+
)
|
135
|
+
operator_extra_links = (VertexAIRayClusterLink(),)
|
136
|
+
|
137
|
+
def __init__(
|
138
|
+
self,
|
139
|
+
head_node_type: resources.Resources = resources.Resources(),
|
140
|
+
python_version: str = "3.10",
|
141
|
+
ray_version: str = "2.33",
|
142
|
+
network: str | None = None,
|
143
|
+
service_account: str | None = None,
|
144
|
+
cluster_name: str | None = None,
|
145
|
+
worker_node_types: list[resources.Resources] | None = None,
|
146
|
+
custom_images: resources.NodeImages | None = None,
|
147
|
+
enable_metrics_collection: bool = True,
|
148
|
+
enable_logging: bool = True,
|
149
|
+
psc_interface_config: resources.PscIConfig | None = None,
|
150
|
+
reserved_ip_ranges: list[str] | None = None,
|
151
|
+
labels: dict[str, str] | None = None,
|
152
|
+
*args,
|
153
|
+
**kwargs,
|
154
|
+
) -> None:
|
155
|
+
super().__init__(*args, **kwargs)
|
156
|
+
self.head_node_type = head_node_type
|
157
|
+
self.python_version = python_version
|
158
|
+
self.ray_version = ray_version
|
159
|
+
self.network = network
|
160
|
+
self.service_account = service_account
|
161
|
+
self.cluster_name = cluster_name
|
162
|
+
self.worker_node_types = worker_node_types
|
163
|
+
self.custom_images = custom_images
|
164
|
+
self.enable_metrics_collection = enable_metrics_collection
|
165
|
+
self.enable_logging = enable_logging
|
166
|
+
self.psc_interface_config = psc_interface_config
|
167
|
+
self.reserved_ip_ranges = reserved_ip_ranges
|
168
|
+
self.labels = labels
|
169
|
+
|
170
|
+
def execute(self, context: Context):
|
171
|
+
self.log.info("Creating a Ray cluster.")
|
172
|
+
try:
|
173
|
+
cluster_path = self.hook.create_ray_cluster(
|
174
|
+
project_id=self.project_id,
|
175
|
+
location=self.location,
|
176
|
+
head_node_type=self.head_node_type,
|
177
|
+
python_version=self.python_version,
|
178
|
+
ray_version=self.ray_version,
|
179
|
+
network=self.network,
|
180
|
+
service_account=self.service_account,
|
181
|
+
cluster_name=self.cluster_name,
|
182
|
+
worker_node_types=self.worker_node_types,
|
183
|
+
custom_images=self.custom_images,
|
184
|
+
enable_metrics_collection=self.enable_metrics_collection,
|
185
|
+
enable_logging=self.enable_logging,
|
186
|
+
psc_interface_config=self.psc_interface_config,
|
187
|
+
reserved_ip_ranges=self.reserved_ip_ranges,
|
188
|
+
labels=self.labels,
|
189
|
+
)
|
190
|
+
cluster_id = self.hook.extract_cluster_id(cluster_path)
|
191
|
+
self.xcom_push(
|
192
|
+
context=context,
|
193
|
+
key="cluster_id",
|
194
|
+
value=cluster_id,
|
195
|
+
)
|
196
|
+
VertexAIRayClusterLink.persist(context=context, task_instance=self, cluster_id=cluster_id)
|
197
|
+
self.log.info("Ray cluster was created.")
|
198
|
+
except Exception as error:
|
199
|
+
raise AirflowException(error)
|
200
|
+
return cluster_path
|
201
|
+
|
202
|
+
|
203
|
+
class ListRayClustersOperator(RayBaseOperator):
|
204
|
+
"""
|
205
|
+
List Ray clusters under the currently authenticated project.
|
206
|
+
|
207
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
208
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
209
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
210
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
211
|
+
credentials, or chained list of accounts required to get the access_token
|
212
|
+
of the last account in the list, which will be impersonated in the request.
|
213
|
+
If set as a string, the account must grant the originating account
|
214
|
+
the Service Account Token Creator IAM role.
|
215
|
+
If set as a sequence, the identities from the list must grant
|
216
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
217
|
+
account from the list granting this role to the originating account (templated).
|
218
|
+
"""
|
219
|
+
|
220
|
+
operator_extra_links = (VertexAIRayClusterListLink(),)
|
221
|
+
|
222
|
+
def execute(self, context: Context):
|
223
|
+
VertexAIRayClusterListLink.persist(context=context, task_instance=self)
|
224
|
+
self.log.info("Listing Clusters from location %s.", self.location)
|
225
|
+
try:
|
226
|
+
ray_cluster_list = self.hook.list_ray_clusters(
|
227
|
+
project_id=self.project_id,
|
228
|
+
location=self.location,
|
229
|
+
)
|
230
|
+
ray_cluster_dict_list = [
|
231
|
+
self.hook.serialize_cluster_obj(ray_cluster) for ray_cluster in ray_cluster_list
|
232
|
+
]
|
233
|
+
except Exception as error:
|
234
|
+
raise AirflowException(error)
|
235
|
+
return ray_cluster_dict_list
|
236
|
+
|
237
|
+
|
238
|
+
class GetRayClusterOperator(RayBaseOperator):
|
239
|
+
"""
|
240
|
+
Get Ray cluster.
|
241
|
+
|
242
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
243
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
244
|
+
:param cluster_id: Cluster resource ID.
|
245
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
246
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
247
|
+
credentials, or chained list of accounts required to get the access_token
|
248
|
+
of the last account in the list, which will be impersonated in the request.
|
249
|
+
If set as a string, the account must grant the originating account
|
250
|
+
the Service Account Token Creator IAM role.
|
251
|
+
If set as a sequence, the identities from the list must grant
|
252
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
253
|
+
account from the list granting this role to the originating account (templated).
|
254
|
+
"""
|
255
|
+
|
256
|
+
template_fields: Sequence[str] = tuple({"cluster_id"} | set(RayBaseOperator.template_fields))
|
257
|
+
operator_extra_links = (VertexAIRayClusterLink(),)
|
258
|
+
|
259
|
+
def __init__(
|
260
|
+
self,
|
261
|
+
cluster_id: str,
|
262
|
+
*args,
|
263
|
+
**kwargs,
|
264
|
+
) -> None:
|
265
|
+
super().__init__(*args, **kwargs)
|
266
|
+
self.cluster_id = cluster_id
|
267
|
+
|
268
|
+
def execute(self, context: Context):
|
269
|
+
VertexAIRayClusterLink.persist(
|
270
|
+
context=context,
|
271
|
+
task_instance=self,
|
272
|
+
cluster_id=self.cluster_id,
|
273
|
+
)
|
274
|
+
self.log.info("Getting Cluster: %s", self.cluster_id)
|
275
|
+
try:
|
276
|
+
ray_cluster = self.hook.get_ray_cluster(
|
277
|
+
project_id=self.project_id,
|
278
|
+
location=self.location,
|
279
|
+
cluster_id=self.cluster_id,
|
280
|
+
)
|
281
|
+
self.log.info("Cluster was gotten.")
|
282
|
+
ray_cluster_dict = self.hook.serialize_cluster_obj(ray_cluster)
|
283
|
+
return ray_cluster_dict
|
284
|
+
except NotFound as not_found_err:
|
285
|
+
self.log.info("The Cluster %s does not exist.", self.cluster_id)
|
286
|
+
raise AirflowException(not_found_err)
|
287
|
+
|
288
|
+
|
289
|
+
class UpdateRayClusterOperator(RayBaseOperator):
|
290
|
+
"""
|
291
|
+
Update Ray cluster (currently support resizing node counts for worker nodes).
|
292
|
+
|
293
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
294
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
295
|
+
:param cluster_id: Cluster resource ID.
|
296
|
+
:param worker_node_types: The list of Resources of the resized worker nodes. The same Resources
|
297
|
+
object should not appear multiple times in the list.
|
298
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
299
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
300
|
+
credentials, or chained list of accounts required to get the access_token
|
301
|
+
of the last account in the list, which will be impersonated in the request.
|
302
|
+
If set as a string, the account must grant the originating account
|
303
|
+
the Service Account Token Creator IAM role.
|
304
|
+
If set as a sequence, the identities from the list must grant
|
305
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
306
|
+
account from the list granting this role to the originating account (templated).
|
307
|
+
"""
|
308
|
+
|
309
|
+
template_fields: Sequence[str] = tuple(
|
310
|
+
{"cluster_id", "worker_node_types"} | set(RayBaseOperator.template_fields)
|
311
|
+
)
|
312
|
+
operator_extra_links = (VertexAIRayClusterLink(),)
|
313
|
+
|
314
|
+
def __init__(
|
315
|
+
self,
|
316
|
+
cluster_id: str,
|
317
|
+
worker_node_types: list[resources.Resources],
|
318
|
+
*args,
|
319
|
+
**kwargs,
|
320
|
+
) -> None:
|
321
|
+
super().__init__(*args, **kwargs)
|
322
|
+
self.cluster_id = cluster_id
|
323
|
+
self.worker_node_types = worker_node_types
|
324
|
+
|
325
|
+
def execute(self, context: Context):
|
326
|
+
VertexAIRayClusterLink.persist(
|
327
|
+
context=context,
|
328
|
+
task_instance=self,
|
329
|
+
cluster_id=self.cluster_id,
|
330
|
+
)
|
331
|
+
self.log.info("Updating a Ray cluster.")
|
332
|
+
try:
|
333
|
+
cluster_path = self.hook.update_ray_cluster(
|
334
|
+
project_id=self.project_id,
|
335
|
+
location=self.location,
|
336
|
+
cluster_id=self.cluster_id,
|
337
|
+
worker_node_types=self.worker_node_types,
|
338
|
+
)
|
339
|
+
self.log.info("Ray cluster %s was updated.", self.cluster_id)
|
340
|
+
return cluster_path
|
341
|
+
except NotFound as not_found_err:
|
342
|
+
self.log.info("The Cluster %s does not exist.", self.cluster_id)
|
343
|
+
raise AirflowException(not_found_err)
|
344
|
+
except Exception as error:
|
345
|
+
raise AirflowException(error)
|
346
|
+
|
347
|
+
|
348
|
+
class DeleteRayClusterOperator(RayBaseOperator):
|
349
|
+
"""
|
350
|
+
Delete Ray cluster.
|
351
|
+
|
352
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
353
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
354
|
+
:param cluster_id: Cluster resource ID.
|
355
|
+
:param gcp_conn_id: The connection ID to use connecting to Google Cloud.
|
356
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
357
|
+
credentials, or chained list of accounts required to get the access_token
|
358
|
+
of the last account in the list, which will be impersonated in the request.
|
359
|
+
If set as a string, the account must grant the originating account
|
360
|
+
the Service Account Token Creator IAM role.
|
361
|
+
If set as a sequence, the identities from the list must grant
|
362
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
363
|
+
account from the list granting this role to the originating account (templated).
|
364
|
+
"""
|
365
|
+
|
366
|
+
template_fields: Sequence[str] = tuple({"cluster_id"} | set(RayBaseOperator.template_fields))
|
367
|
+
|
368
|
+
def __init__(
|
369
|
+
self,
|
370
|
+
cluster_id: str,
|
371
|
+
*args,
|
372
|
+
**kwargs,
|
373
|
+
) -> None:
|
374
|
+
super().__init__(*args, **kwargs)
|
375
|
+
self.cluster_id = cluster_id
|
376
|
+
|
377
|
+
def execute(self, context: Context):
|
378
|
+
try:
|
379
|
+
self.log.info("Deleting Ray cluster: %s", self.cluster_id)
|
380
|
+
self.hook.delete_ray_cluster(
|
381
|
+
project_id=self.project_id,
|
382
|
+
location=self.location,
|
383
|
+
cluster_id=self.cluster_id,
|
384
|
+
)
|
385
|
+
self.log.info("Ray cluster was deleted.")
|
386
|
+
except NotFound as not_found_err:
|
387
|
+
self.log.info("The Ray cluster ID %s does not exist.", self.cluster_id)
|
388
|
+
raise AirflowException(not_found_err)
|
@@ -25,6 +25,7 @@ from typing import TYPE_CHECKING
|
|
25
25
|
from airflow.models import BaseOperator
|
26
26
|
from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
|
27
27
|
from airflow.providers.google.cloud.links.bigquery import BigQueryTableLink
|
28
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
28
29
|
|
29
30
|
if TYPE_CHECKING:
|
30
31
|
from airflow.utils.context import Context
|
@@ -73,6 +74,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
73
74
|
If set as a sequence, the identities from the list must grant
|
74
75
|
Service Account Token Creator IAM role to the directly preceding identity, with first
|
75
76
|
account from the list granting this role to the originating account (templated).
|
77
|
+
:param project_id: Google Cloud Project where the job is running
|
76
78
|
"""
|
77
79
|
|
78
80
|
template_fields: Sequence[str] = (
|
@@ -93,6 +95,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
93
95
|
write_disposition: str = "WRITE_EMPTY",
|
94
96
|
create_disposition: str = "CREATE_IF_NEEDED",
|
95
97
|
gcp_conn_id: str = "google_cloud_default",
|
98
|
+
project_id: str = PROVIDE_PROJECT_ID,
|
96
99
|
labels: dict | None = None,
|
97
100
|
encryption_configuration: dict | None = None,
|
98
101
|
location: str | None = None,
|
@@ -112,6 +115,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
112
115
|
self.impersonation_chain = impersonation_chain
|
113
116
|
self.hook: BigQueryHook | None = None
|
114
117
|
self._job_conf: dict = {}
|
118
|
+
self.project_id = project_id
|
115
119
|
|
116
120
|
def _prepare_job_configuration(self):
|
117
121
|
self.source_project_dataset_tables = (
|
@@ -124,7 +128,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
124
128
|
for source_project_dataset_table in self.source_project_dataset_tables:
|
125
129
|
source_project, source_dataset, source_table = self.hook.split_tablename(
|
126
130
|
table_input=source_project_dataset_table,
|
127
|
-
default_project_id=self.
|
131
|
+
default_project_id=self.project_id,
|
128
132
|
var_name="source_project_dataset_table",
|
129
133
|
)
|
130
134
|
source_project_dataset_tables_fixup.append(
|
@@ -133,7 +137,7 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
133
137
|
|
134
138
|
destination_project, destination_dataset, destination_table = self.hook.split_tablename(
|
135
139
|
table_input=self.destination_project_dataset_table,
|
136
|
-
default_project_id=self.
|
140
|
+
default_project_id=self.project_id,
|
137
141
|
)
|
138
142
|
configuration = {
|
139
143
|
"copy": {
|
@@ -168,12 +172,12 @@ class BigQueryToBigQueryOperator(BaseOperator):
|
|
168
172
|
impersonation_chain=self.impersonation_chain,
|
169
173
|
)
|
170
174
|
|
171
|
-
if not self.
|
172
|
-
|
175
|
+
if not self.project_id:
|
176
|
+
self.project_id = self.hook.project_id
|
173
177
|
|
174
178
|
configuration = self._prepare_job_configuration()
|
175
179
|
self._job_conf = self.hook.insert_job(
|
176
|
-
configuration=configuration, project_id=self.
|
180
|
+
configuration=configuration, project_id=self.project_id
|
177
181
|
).to_api_repr()
|
178
182
|
|
179
183
|
dest_table_info = self._job_conf["configuration"]["copy"]["destinationTable"]
|
@@ -208,7 +208,7 @@ class FacebookAdsReportToGcsOperator(BaseOperator):
|
|
208
208
|
|
209
209
|
def _flush_rows(self, converted_rows: list[Any] | None, object_name: str):
|
210
210
|
if converted_rows:
|
211
|
-
headers =
|
211
|
+
headers = self.fields
|
212
212
|
with tempfile.NamedTemporaryFile("w", suffix=".csv") as csvfile:
|
213
213
|
writer = csv.DictWriter(csvfile, fieldnames=headers)
|
214
214
|
writer.writeheader()
|
@@ -547,6 +547,7 @@ class GCSToBigQueryOperator(BaseOperator):
|
|
547
547
|
"quote",
|
548
548
|
"encoding",
|
549
549
|
"preserveAsciiControlCharacters",
|
550
|
+
"columnNameCharacterMap",
|
550
551
|
],
|
551
552
|
"googleSheetsOptions": ["skipLeadingRows"],
|
552
553
|
}
|
@@ -672,6 +673,7 @@ class GCSToBigQueryOperator(BaseOperator):
|
|
672
673
|
"quote",
|
673
674
|
"encoding",
|
674
675
|
"preserveAsciiControlCharacters",
|
676
|
+
"columnNameCharacterMap",
|
675
677
|
],
|
676
678
|
"DATASTORE_BACKUP": ["projectionFields"],
|
677
679
|
"NEWLINE_DELIMITED_JSON": ["autodetect", "ignoreUnknownValues"],
|
@@ -0,0 +1,193 @@
|
|
1
|
+
#
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
4
|
+
# distributed with this work for additional information
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
7
|
+
# "License"); you may not use this file except in compliance
|
8
|
+
# with the License. You may obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
13
|
+
# software distributed under the License is distributed on an
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
15
|
+
# KIND, either express or implied. See the License for the
|
16
|
+
# specific language governing permissions and limitations
|
17
|
+
# under the License.
|
18
|
+
"""This module contains operator to move data from HTTP endpoint to GCS."""
|
19
|
+
|
20
|
+
from __future__ import annotations
|
21
|
+
|
22
|
+
from functools import cached_property
|
23
|
+
from typing import TYPE_CHECKING, Any
|
24
|
+
|
25
|
+
from airflow.models import BaseOperator
|
26
|
+
from airflow.providers.google.cloud.hooks.gcs import GCSHook
|
27
|
+
from airflow.providers.http.hooks.http import HttpHook
|
28
|
+
|
29
|
+
if TYPE_CHECKING:
|
30
|
+
from collections.abc import Sequence
|
31
|
+
|
32
|
+
from requests.auth import AuthBase
|
33
|
+
|
34
|
+
from airflow.utils.context import Context
|
35
|
+
|
36
|
+
|
37
|
+
class HttpToGCSOperator(BaseOperator):
|
38
|
+
"""
|
39
|
+
Calls an endpoint on an HTTP system to execute an action and store the result in GCS.
|
40
|
+
|
41
|
+
:param http_conn_id: The :ref:`http connection<howto/connection:http>` to run
|
42
|
+
the operator against
|
43
|
+
:param endpoint: The relative part of the full url. (templated)
|
44
|
+
:param method: The HTTP method to use, default = "POST"
|
45
|
+
:param data: The data to pass. POST-data in POST/PUT and params
|
46
|
+
in the URL for a GET request. (templated)
|
47
|
+
:param headers: The HTTP headers to be added to the GET request
|
48
|
+
:param response_check: A check against the 'requests' response object.
|
49
|
+
The callable takes the response object as the first positional argument
|
50
|
+
and optionally any number of keyword arguments available in the context dictionary.
|
51
|
+
It should return True for 'pass' and False otherwise.
|
52
|
+
:param response_filter: A function allowing you to manipulate the response
|
53
|
+
text. e.g response_filter=lambda response: json.loads(response.text).
|
54
|
+
The callable takes the response object as the first positional argument
|
55
|
+
and optionally any number of keyword arguments available in the context dictionary.
|
56
|
+
:param extra_options: Extra options for the 'requests' library, see the
|
57
|
+
'requests' documentation (options to modify timeout, ssl, etc.)
|
58
|
+
:param log_response: Log the response (default: False)
|
59
|
+
:param auth_type: The auth type for the service
|
60
|
+
:param tcp_keep_alive: Enable TCP Keep Alive for the connection.
|
61
|
+
:param tcp_keep_alive_idle: The TCP Keep Alive Idle parameter (corresponds to ``socket.TCP_KEEPIDLE``).
|
62
|
+
:param tcp_keep_alive_count: The TCP Keep Alive count parameter (corresponds to ``socket.TCP_KEEPCNT``)
|
63
|
+
:param tcp_keep_alive_interval: The TCP Keep Alive interval parameter (corresponds to
|
64
|
+
``socket.TCP_KEEPINTVL``)
|
65
|
+
:param gcp_conn_id: The connection ID to use when fetching connection info.
|
66
|
+
:param impersonation_chain: Optional service account to impersonate using short-term credentials,
|
67
|
+
or chained list of accounts required to get the access_token of the last account in the list,
|
68
|
+
which will be impersonated in the request. If set as a string,
|
69
|
+
the account must grant the originating account the Service Account Token Creator IAM role.
|
70
|
+
If set as a sequence, the identities from the list must grant Service Account Token Creator IAM role to the directly preceding identity,
|
71
|
+
with first account from the list granting this role to the originating account.
|
72
|
+
:param bucket_name: The bucket to upload to.
|
73
|
+
:param object_name: The object name to set when uploading the file.
|
74
|
+
:param mime_type: The file mime type set when uploading the file.
|
75
|
+
:param gzip: Option to compress local file or file data for upload
|
76
|
+
:param encoding: bytes encoding for file data if provided as string
|
77
|
+
:param chunk_size: Blob chunk size.
|
78
|
+
:param timeout: Request timeout in seconds.
|
79
|
+
:param num_max_attempts: Number of attempts to try to upload the file.
|
80
|
+
:param metadata: The metadata to be uploaded with the file.
|
81
|
+
:param cache_contro: Cache-Control metadata field.
|
82
|
+
:param user_project: The identifier of the Google Cloud project to bill for the request. Required for Requester Pays buckets.
|
83
|
+
"""
|
84
|
+
|
85
|
+
template_fields: Sequence[str] = (
|
86
|
+
"http_conn_id",
|
87
|
+
"endpoint",
|
88
|
+
"data",
|
89
|
+
"headers",
|
90
|
+
"gcp_conn_id",
|
91
|
+
"bucket_name",
|
92
|
+
"object_name",
|
93
|
+
)
|
94
|
+
template_fields_renderers = {"headers": "json", "data": "py"}
|
95
|
+
template_ext: Sequence[str] = ()
|
96
|
+
ui_color = "#f4a460"
|
97
|
+
|
98
|
+
def __init__(
|
99
|
+
self,
|
100
|
+
*,
|
101
|
+
endpoint: str | None = None,
|
102
|
+
method: str = "GET",
|
103
|
+
data: Any = None,
|
104
|
+
headers: dict[str, str] | None = None,
|
105
|
+
extra_options: dict[str, Any] | None = None,
|
106
|
+
http_conn_id: str = "http_default",
|
107
|
+
log_response: bool = False,
|
108
|
+
auth_type: type[AuthBase] | None = None,
|
109
|
+
tcp_keep_alive: bool = True,
|
110
|
+
tcp_keep_alive_idle: int = 120,
|
111
|
+
tcp_keep_alive_count: int = 20,
|
112
|
+
tcp_keep_alive_interval: int = 30,
|
113
|
+
gcp_conn_id: str = "google_cloud_default",
|
114
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
115
|
+
bucket_name: str,
|
116
|
+
object_name: str,
|
117
|
+
mime_type: str | None = None,
|
118
|
+
gzip: bool = False,
|
119
|
+
encoding: str | None = None,
|
120
|
+
chunk_size: int | None = None,
|
121
|
+
timeout: int | None = None,
|
122
|
+
num_max_attempts: int = 3,
|
123
|
+
metadata: dict | None = None,
|
124
|
+
cache_control: str | None = None,
|
125
|
+
user_project: str | None = None,
|
126
|
+
**kwargs,
|
127
|
+
):
|
128
|
+
super().__init__(**kwargs)
|
129
|
+
self.http_conn_id = http_conn_id
|
130
|
+
self.method = method
|
131
|
+
self.endpoint = endpoint
|
132
|
+
self.headers = headers or {}
|
133
|
+
self.data = data or {}
|
134
|
+
self.extra_options = extra_options or {}
|
135
|
+
self.log_response = log_response
|
136
|
+
self.auth_type = auth_type
|
137
|
+
self.tcp_keep_alive = tcp_keep_alive
|
138
|
+
self.tcp_keep_alive_idle = tcp_keep_alive_idle
|
139
|
+
self.tcp_keep_alive_count = tcp_keep_alive_count
|
140
|
+
self.tcp_keep_alive_interval = tcp_keep_alive_interval
|
141
|
+
self.gcp_conn_id = gcp_conn_id
|
142
|
+
self.impersonation_chain = impersonation_chain
|
143
|
+
self.bucket_name = bucket_name
|
144
|
+
self.object_name = object_name
|
145
|
+
self.mime_type = mime_type
|
146
|
+
self.gzip = gzip
|
147
|
+
self.encoding = encoding
|
148
|
+
self.chunk_size = chunk_size
|
149
|
+
self.timeout = timeout
|
150
|
+
self.num_max_attempts = num_max_attempts
|
151
|
+
self.metadata = metadata
|
152
|
+
self.cache_control = cache_control
|
153
|
+
self.user_project = user_project
|
154
|
+
|
155
|
+
@cached_property
|
156
|
+
def http_hook(self) -> HttpHook:
|
157
|
+
"""Create and return an HttpHook."""
|
158
|
+
return HttpHook(
|
159
|
+
self.method,
|
160
|
+
http_conn_id=self.http_conn_id,
|
161
|
+
auth_type=self.auth_type,
|
162
|
+
tcp_keep_alive=self.tcp_keep_alive,
|
163
|
+
tcp_keep_alive_idle=self.tcp_keep_alive_idle,
|
164
|
+
tcp_keep_alive_count=self.tcp_keep_alive_count,
|
165
|
+
tcp_keep_alive_interval=self.tcp_keep_alive_interval,
|
166
|
+
)
|
167
|
+
|
168
|
+
@cached_property
|
169
|
+
def gcs_hook(self) -> GCSHook:
|
170
|
+
"""Create and return an GCSHook."""
|
171
|
+
return GCSHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
172
|
+
|
173
|
+
def execute(self, context: Context):
|
174
|
+
self.log.info("Calling HTTP method")
|
175
|
+
response = self.http_hook.run(
|
176
|
+
endpoint=self.endpoint, data=self.data, headers=self.headers, extra_options=self.extra_options
|
177
|
+
)
|
178
|
+
|
179
|
+
self.log.info("Uploading to GCS")
|
180
|
+
self.gcs_hook.upload(
|
181
|
+
data=response.content,
|
182
|
+
bucket_name=self.bucket_name,
|
183
|
+
object_name=self.object_name,
|
184
|
+
mime_type=self.mime_type,
|
185
|
+
gzip=self.gzip,
|
186
|
+
encoding=self.encoding or response.encoding,
|
187
|
+
chunk_size=self.chunk_size,
|
188
|
+
timeout=self.timeout,
|
189
|
+
num_max_attempts=self.num_max_attempts,
|
190
|
+
metadata=self.metadata,
|
191
|
+
cache_control=self.cache_control,
|
192
|
+
user_project=self.user_project,
|
193
|
+
)
|