apache-airflow-providers-google 10.21.1rc2__py3-none-any.whl → 10.22.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/cloud/hooks/cloud_run.py +124 -0
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +2 -0
- airflow/providers/google/cloud/operators/cloud_run.py +143 -2
- airflow/providers/google/cloud/operators/dataproc.py +71 -89
- airflow/providers/google/cloud/triggers/dataproc.py +4 -1
- airflow/providers/google/common/hooks/base_google.py +31 -6
- airflow/providers/google/get_provider_info.py +4 -3
- {apache_airflow_providers_google-10.21.1rc2.dist-info → apache_airflow_providers_google-10.22.0rc1.dist-info}/METADATA +10 -10
- {apache_airflow_providers_google-10.21.1rc2.dist-info → apache_airflow_providers_google-10.22.0rc1.dist-info}/RECORD +12 -12
- {apache_airflow_providers_google-10.21.1rc2.dist-info → apache_airflow_providers_google-10.22.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-10.21.1rc2.dist-info → apache_airflow_providers_google-10.22.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
29
29
|
|
30
30
|
__all__ = ["__version__"]
|
31
31
|
|
32
|
-
__version__ = "10.
|
32
|
+
__version__ = "10.22.0"
|
33
33
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
35
|
-
"2.
|
35
|
+
"2.8.0"
|
36
36
|
):
|
37
37
|
raise RuntimeError(
|
38
|
-
f"The package `apache-airflow-providers-google:{__version__}` needs Apache Airflow 2.
|
38
|
+
f"The package `apache-airflow-providers-google:{__version__}` needs Apache Airflow 2.8.0+"
|
39
39
|
)
|
@@ -22,13 +22,19 @@ from typing import TYPE_CHECKING, Any, Iterable, Sequence
|
|
22
22
|
|
23
23
|
from google.cloud.run_v2 import (
|
24
24
|
CreateJobRequest,
|
25
|
+
CreateServiceRequest,
|
25
26
|
DeleteJobRequest,
|
27
|
+
DeleteServiceRequest,
|
26
28
|
GetJobRequest,
|
29
|
+
GetServiceRequest,
|
27
30
|
Job,
|
28
31
|
JobsAsyncClient,
|
29
32
|
JobsClient,
|
30
33
|
ListJobsRequest,
|
31
34
|
RunJobRequest,
|
35
|
+
Service,
|
36
|
+
ServicesAsyncClient,
|
37
|
+
ServicesClient,
|
32
38
|
UpdateJobRequest,
|
33
39
|
)
|
34
40
|
from google.longrunning import operations_pb2 # type: ignore[attr-defined]
|
@@ -39,6 +45,7 @@ from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
39
45
|
|
40
46
|
if TYPE_CHECKING:
|
41
47
|
from google.api_core import operation
|
48
|
+
from google.api_core.operation_async import AsyncOperation
|
42
49
|
from google.cloud.run_v2.services.jobs import pagers
|
43
50
|
|
44
51
|
|
@@ -183,3 +190,120 @@ class CloudRunAsyncHook(GoogleBaseHook):
|
|
183
190
|
return await self.get_conn().get_operation(
|
184
191
|
operations_pb2.GetOperationRequest(name=operation_name), timeout=120
|
185
192
|
)
|
193
|
+
|
194
|
+
|
195
|
+
class CloudRunServiceHook(GoogleBaseHook):
|
196
|
+
"""
|
197
|
+
Hook for the Google Cloud Run services.
|
198
|
+
|
199
|
+
:param gcp_conn_id: The connection ID to use when fetching connection info.
|
200
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
201
|
+
credentials, or chained list of accounts required to get the access_token
|
202
|
+
of the last account in the list, which will be impersonated in the request.
|
203
|
+
If set as a string, the account must grant the originating account
|
204
|
+
the Service Account Token Creator IAM role.
|
205
|
+
If set as a sequence, the identities from the list must grant
|
206
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
207
|
+
account from the list granting this role to the originating account.
|
208
|
+
"""
|
209
|
+
|
210
|
+
def __init__(
|
211
|
+
self,
|
212
|
+
gcp_conn_id: str = "google_cloud_default",
|
213
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
214
|
+
):
|
215
|
+
self._client: ServicesClient | None = None
|
216
|
+
super().__init__(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain)
|
217
|
+
|
218
|
+
def get_conn(self):
|
219
|
+
if self._client is None:
|
220
|
+
self._client = ServicesClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
|
221
|
+
|
222
|
+
return self._client
|
223
|
+
|
224
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
225
|
+
def get_service(self, service_name: str, region: str, project_id: str = PROVIDE_PROJECT_ID):
|
226
|
+
get_service_request = GetServiceRequest(
|
227
|
+
name=f"projects/{project_id}/locations/{region}/services/{service_name}"
|
228
|
+
)
|
229
|
+
return self.get_conn().get_service(get_service_request)
|
230
|
+
|
231
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
232
|
+
def create_service(
|
233
|
+
self, service_name: str, service: Service | dict, region: str, project_id: str = PROVIDE_PROJECT_ID
|
234
|
+
) -> Service:
|
235
|
+
if isinstance(service, dict):
|
236
|
+
service = Service(service)
|
237
|
+
|
238
|
+
create_request = CreateServiceRequest(
|
239
|
+
parent=f"projects/{project_id}/locations/{region}",
|
240
|
+
service=service,
|
241
|
+
service_id=service_name,
|
242
|
+
)
|
243
|
+
|
244
|
+
operation = self.get_conn().create_service(create_request)
|
245
|
+
return operation.result()
|
246
|
+
|
247
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
248
|
+
def delete_service(self, service_name: str, region: str, project_id: str = PROVIDE_PROJECT_ID) -> Service:
|
249
|
+
delete_request = DeleteServiceRequest(
|
250
|
+
name=f"projects/{project_id}/locations/{region}/services/{service_name}"
|
251
|
+
)
|
252
|
+
|
253
|
+
operation = self.get_conn().delete_service(delete_request)
|
254
|
+
return operation.result()
|
255
|
+
|
256
|
+
|
257
|
+
class CloudRunServiceAsyncHook(GoogleBaseHook):
|
258
|
+
"""
|
259
|
+
Async hook for the Google Cloud Run services.
|
260
|
+
|
261
|
+
:param gcp_conn_id: The connection ID to use when fetching connection info.
|
262
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
263
|
+
credentials, or chained list of accounts required to get the access_token
|
264
|
+
of the last account in the list, which will be impersonated in the request.
|
265
|
+
If set as a string, the account must grant the originating account
|
266
|
+
the Service Account Token Creator IAM role.
|
267
|
+
If set as a sequence, the identities from the list must grant
|
268
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
269
|
+
account from the list granting this role to the originating account.
|
270
|
+
"""
|
271
|
+
|
272
|
+
def __init__(
|
273
|
+
self,
|
274
|
+
gcp_conn_id: str = "google_cloud_default",
|
275
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
276
|
+
):
|
277
|
+
self._client: ServicesClient | None = None
|
278
|
+
super().__init__(gcp_conn_id=gcp_conn_id, impersonation_chain=impersonation_chain)
|
279
|
+
|
280
|
+
def get_conn(self):
|
281
|
+
if self._client is None:
|
282
|
+
self._client = ServicesAsyncClient(credentials=self.get_credentials(), client_info=CLIENT_INFO)
|
283
|
+
|
284
|
+
return self._client
|
285
|
+
|
286
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
287
|
+
async def create_service(
|
288
|
+
self, service_name: str, service: Service | dict, region: str, project_id: str = PROVIDE_PROJECT_ID
|
289
|
+
) -> AsyncOperation:
|
290
|
+
if isinstance(service, dict):
|
291
|
+
service = Service(service)
|
292
|
+
|
293
|
+
create_request = CreateServiceRequest(
|
294
|
+
parent=f"projects/{project_id}/locations/{region}",
|
295
|
+
service=service,
|
296
|
+
service_id=service_name,
|
297
|
+
)
|
298
|
+
|
299
|
+
return await self.get_conn().create_service(create_request)
|
300
|
+
|
301
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
302
|
+
async def delete_service(
|
303
|
+
self, service_name: str, region: str, project_id: str = PROVIDE_PROJECT_ID
|
304
|
+
) -> AsyncOperation:
|
305
|
+
delete_request = DeleteServiceRequest(
|
306
|
+
name=f"projects/{project_id}/locations/{region}/services/{service_name}"
|
307
|
+
)
|
308
|
+
|
309
|
+
return await self.get_conn().delete_service(delete_request)
|
@@ -552,6 +552,8 @@ class CloudDataTransferServiceHook(GoogleBaseHook):
|
|
552
552
|
class CloudDataTransferServiceAsyncHook(GoogleBaseAsyncHook):
|
553
553
|
"""Asynchronous hook for Google Storage Transfer Service."""
|
554
554
|
|
555
|
+
sync_hook_class = CloudDataTransferServiceHook
|
556
|
+
|
555
557
|
def __init__(self, project_id: str = PROVIDE_PROJECT_ID, **kwargs: Any) -> None:
|
556
558
|
super().__init__(**kwargs)
|
557
559
|
self.project_id = project_id
|
@@ -19,11 +19,13 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
from typing import TYPE_CHECKING, Any, Sequence
|
21
21
|
|
22
|
-
|
22
|
+
import google.cloud.exceptions
|
23
|
+
from google.api_core.exceptions import AlreadyExists
|
24
|
+
from google.cloud.run_v2 import Job, Service
|
23
25
|
|
24
26
|
from airflow.configuration import conf
|
25
27
|
from airflow.exceptions import AirflowException
|
26
|
-
from airflow.providers.google.cloud.hooks.cloud_run import CloudRunHook
|
28
|
+
from airflow.providers.google.cloud.hooks.cloud_run import CloudRunHook, CloudRunServiceHook
|
27
29
|
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
28
30
|
from airflow.providers.google.cloud.triggers.cloud_run import CloudRunJobFinishedTrigger, RunJobStatus
|
29
31
|
|
@@ -353,3 +355,142 @@ class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
|
|
353
355
|
except Exception:
|
354
356
|
error = operation.exception(timeout=self.timeout_seconds)
|
355
357
|
raise AirflowException(error)
|
358
|
+
|
359
|
+
|
360
|
+
class CloudRunCreateServiceOperator(GoogleCloudBaseOperator):
|
361
|
+
"""
|
362
|
+
Creates a Service without executing it. Pushes the created service to xcom.
|
363
|
+
|
364
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
365
|
+
:param region: Required. The ID of the Google Cloud region that the service belongs to.
|
366
|
+
:param service_name: Required. The name of the service to create.
|
367
|
+
:param service: The service descriptor containing the configuration of the service to submit.
|
368
|
+
:param gcp_conn_id: The connection ID used to connect to Google Cloud.
|
369
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
370
|
+
credentials, or chained list of accounts required to get the access_token
|
371
|
+
of the last account in the list, which will be impersonated in the request.
|
372
|
+
If set as a string, the account must grant the originating account
|
373
|
+
the Service Account Token Creator IAM role.
|
374
|
+
If set as a sequence, the identities from the list must grant
|
375
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
376
|
+
account from the list granting this role to the originating account (templated).
|
377
|
+
"""
|
378
|
+
|
379
|
+
template_fields = ("project_id", "region", "gcp_conn_id", "impersonation_chain", "service_name")
|
380
|
+
|
381
|
+
def __init__(
|
382
|
+
self,
|
383
|
+
project_id: str,
|
384
|
+
region: str,
|
385
|
+
service_name: str,
|
386
|
+
service: dict | Service,
|
387
|
+
gcp_conn_id: str = "google_cloud_default",
|
388
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
389
|
+
**kwargs,
|
390
|
+
):
|
391
|
+
super().__init__(**kwargs)
|
392
|
+
self.project_id = project_id
|
393
|
+
self.region = region
|
394
|
+
self.service = service
|
395
|
+
self.service_name = service_name
|
396
|
+
self.gcp_conn_id = gcp_conn_id
|
397
|
+
self.impersonation_chain = impersonation_chain
|
398
|
+
self._validate_inputs()
|
399
|
+
|
400
|
+
def _validate_inputs(self):
|
401
|
+
missing_fields = [k for k in ["project_id", "region", "service_name"] if not getattr(self, k)]
|
402
|
+
if not self.project_id or not self.region or not self.service_name:
|
403
|
+
raise AirflowException(
|
404
|
+
f"Required parameters are missing: {missing_fields}. These parameters be passed either as "
|
405
|
+
"keyword parameter or as extra field in Airflow connection definition. Both are not set!"
|
406
|
+
)
|
407
|
+
|
408
|
+
def execute(self, context: Context):
|
409
|
+
hook: CloudRunServiceHook = CloudRunServiceHook(
|
410
|
+
gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
|
411
|
+
)
|
412
|
+
|
413
|
+
try:
|
414
|
+
service = hook.create_service(
|
415
|
+
service=self.service,
|
416
|
+
service_name=self.service_name,
|
417
|
+
region=self.region,
|
418
|
+
project_id=self.project_id,
|
419
|
+
)
|
420
|
+
except AlreadyExists:
|
421
|
+
self.log.info(
|
422
|
+
"Already existed Cloud run service, service_name=%s, region=%s",
|
423
|
+
self.service_name,
|
424
|
+
self.region,
|
425
|
+
)
|
426
|
+
return hook.get_service(
|
427
|
+
service_name=self.service_name, region=self.region, project_id=self.project_id
|
428
|
+
)
|
429
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
430
|
+
self.log.error("An error occurred. Exiting.")
|
431
|
+
raise e
|
432
|
+
|
433
|
+
return Service.to_dict(service)
|
434
|
+
|
435
|
+
|
436
|
+
class CloudRunDeleteServiceOperator(GoogleCloudBaseOperator):
|
437
|
+
"""
|
438
|
+
Deletes a Service without executing it. Pushes the deleted service to xcom.
|
439
|
+
|
440
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
441
|
+
:param region: Required. The ID of the Google Cloud region that the service belongs to.
|
442
|
+
:param service_name: Required. The name of the service to create.
|
443
|
+
:param gcp_conn_id: The connection ID used to connect to Google Cloud.
|
444
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
445
|
+
credentials, or chained list of accounts required to get the access_token
|
446
|
+
of the last account in the list, which will be impersonated in the request.
|
447
|
+
If set as a string, the account must grant the originating account
|
448
|
+
the Service Account Token Creator IAM role.
|
449
|
+
If set as a sequence, the identities from the list must grant
|
450
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
451
|
+
account from the list granting this role to the originating account (templated).
|
452
|
+
"""
|
453
|
+
|
454
|
+
template_fields = ("project_id", "region", "gcp_conn_id", "impersonation_chain", "service_name")
|
455
|
+
|
456
|
+
def __init__(
|
457
|
+
self,
|
458
|
+
project_id: str,
|
459
|
+
region: str,
|
460
|
+
service_name: str,
|
461
|
+
gcp_conn_id: str = "google_cloud_default",
|
462
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
463
|
+
**kwargs,
|
464
|
+
):
|
465
|
+
super().__init__(**kwargs)
|
466
|
+
self.project_id = project_id
|
467
|
+
self.region = region
|
468
|
+
self.service_name = service_name
|
469
|
+
self.gcp_conn_id = gcp_conn_id
|
470
|
+
self.impersonation_chain = impersonation_chain
|
471
|
+
self._validate_inputs()
|
472
|
+
|
473
|
+
def _validate_inputs(self):
|
474
|
+
missing_fields = [k for k in ["project_id", "region", "service_name"] if not getattr(self, k)]
|
475
|
+
if not self.project_id or not self.region or not self.service_name:
|
476
|
+
raise AirflowException(
|
477
|
+
f"Required parameters are missing: {missing_fields}. These parameters be passed either as "
|
478
|
+
"keyword parameter or as extra field in Airflow connection definition. Both are not set!"
|
479
|
+
)
|
480
|
+
|
481
|
+
def execute(self, context: Context):
|
482
|
+
hook: CloudRunServiceHook = CloudRunServiceHook(
|
483
|
+
gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
|
484
|
+
)
|
485
|
+
|
486
|
+
try:
|
487
|
+
service = hook.delete_service(
|
488
|
+
service_name=self.service_name,
|
489
|
+
region=self.region,
|
490
|
+
project_id=self.project_id,
|
491
|
+
)
|
492
|
+
except google.cloud.exceptions.NotFound as e:
|
493
|
+
self.log.error("An error occurred. Not Found.")
|
494
|
+
raise e
|
495
|
+
|
496
|
+
return Service.to_dict(service)
|
@@ -30,6 +30,7 @@ from collections.abc import MutableSequence
|
|
30
30
|
from dataclasses import dataclass
|
31
31
|
from datetime import datetime, timedelta
|
32
32
|
from enum import Enum
|
33
|
+
from functools import cached_property
|
33
34
|
from typing import TYPE_CHECKING, Any, Sequence
|
34
35
|
|
35
36
|
from deprecated import deprecated
|
@@ -638,7 +639,7 @@ class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
|
|
638
639
|
request_id: str | None = None,
|
639
640
|
delete_on_error: bool = True,
|
640
641
|
use_if_exists: bool = True,
|
641
|
-
retry: AsyncRetry | _MethodDefault = DEFAULT,
|
642
|
+
retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
|
642
643
|
timeout: float = 1 * 60 * 60,
|
643
644
|
metadata: Sequence[tuple[str, str]] = (),
|
644
645
|
gcp_conn_id: str = "google_cloud_default",
|
@@ -1184,7 +1185,7 @@ class _DataprocStartStopClusterBaseOperator(GoogleCloudBaseOperator):
|
|
1184
1185
|
project_id: str = PROVIDE_PROJECT_ID,
|
1185
1186
|
cluster_uuid: str | None = None,
|
1186
1187
|
request_id: str | None = None,
|
1187
|
-
retry: AsyncRetry | _MethodDefault = DEFAULT,
|
1188
|
+
retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
|
1188
1189
|
timeout: float = 1 * 60 * 60,
|
1189
1190
|
metadata: Sequence[tuple[str, str]] = (),
|
1190
1191
|
gcp_conn_id: str = "google_cloud_default",
|
@@ -2712,7 +2713,7 @@ class DataprocUpdateClusterOperator(GoogleCloudBaseOperator):
|
|
2712
2713
|
region: str,
|
2713
2714
|
request_id: str | None = None,
|
2714
2715
|
project_id: str = PROVIDE_PROJECT_ID,
|
2715
|
-
retry: AsyncRetry | _MethodDefault = DEFAULT,
|
2716
|
+
retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
|
2716
2717
|
timeout: float | None = None,
|
2717
2718
|
metadata: Sequence[tuple[str, str]] = (),
|
2718
2719
|
gcp_conn_id: str = "google_cloud_default",
|
@@ -2985,10 +2986,10 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
|
|
2985
2986
|
def __init__(
|
2986
2987
|
self,
|
2987
2988
|
*,
|
2988
|
-
region: str
|
2989
|
+
region: str,
|
2989
2990
|
project_id: str = PROVIDE_PROJECT_ID,
|
2990
2991
|
batch: dict | Batch,
|
2991
|
-
batch_id: str,
|
2992
|
+
batch_id: str | None = None,
|
2992
2993
|
request_id: str | None = None,
|
2993
2994
|
retry: Retry | _MethodDefault = DEFAULT,
|
2994
2995
|
timeout: float | None = None,
|
@@ -3021,20 +3022,20 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
|
|
3021
3022
|
self.polling_interval_seconds = polling_interval_seconds
|
3022
3023
|
|
3023
3024
|
def execute(self, context: Context):
|
3024
|
-
|
3025
|
-
|
3026
|
-
|
3027
|
-
link = DATAPROC_BATCH_LINK.format(
|
3028
|
-
region=self.region, project_id=self.project_id, batch_id=self.batch_id
|
3025
|
+
if self.asynchronous and self.deferrable:
|
3026
|
+
raise AirflowException(
|
3027
|
+
"Both asynchronous and deferrable parameters were passed. Please, provide only one."
|
3029
3028
|
)
|
3030
|
-
|
3031
|
-
|
3029
|
+
|
3030
|
+
batch_id: str = ""
|
3031
|
+
if self.batch_id:
|
3032
|
+
batch_id = self.batch_id
|
3033
|
+
self.log.info("Starting batch %s", batch_id)
|
3032
3034
|
else:
|
3033
|
-
self.log.info("Starting batch
|
3034
|
-
|
3035
|
-
raise AirflowException("Region should be set here")
|
3035
|
+
self.log.info("Starting batch. The batch ID will be generated since it was not provided.")
|
3036
|
+
|
3036
3037
|
try:
|
3037
|
-
self.operation = hook.create_batch(
|
3038
|
+
self.operation = self.hook.create_batch(
|
3038
3039
|
region=self.region,
|
3039
3040
|
project_id=self.project_id,
|
3040
3041
|
batch=self.batch,
|
@@ -3044,85 +3045,62 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
|
|
3044
3045
|
timeout=self.timeout,
|
3045
3046
|
metadata=self.metadata,
|
3046
3047
|
)
|
3047
|
-
if self.operation is None:
|
3048
|
-
raise RuntimeError("The operation should be set here!")
|
3049
|
-
|
3050
|
-
if not self.deferrable:
|
3051
|
-
if not self.asynchronous:
|
3052
|
-
result = hook.wait_for_operation(
|
3053
|
-
timeout=self.timeout, result_retry=self.result_retry, operation=self.operation
|
3054
|
-
)
|
3055
|
-
self.log.info("Batch %s created", self.batch_id)
|
3056
|
-
|
3057
|
-
else:
|
3058
|
-
DataprocBatchLink.persist(
|
3059
|
-
context=context,
|
3060
|
-
operator=self,
|
3061
|
-
project_id=self.project_id,
|
3062
|
-
region=self.region,
|
3063
|
-
batch_id=self.batch_id,
|
3064
|
-
)
|
3065
|
-
return self.operation.operation.name
|
3066
|
-
|
3067
|
-
else:
|
3068
|
-
# processing ends in execute_complete
|
3069
|
-
self.defer(
|
3070
|
-
trigger=DataprocBatchTrigger(
|
3071
|
-
batch_id=self.batch_id,
|
3072
|
-
project_id=self.project_id,
|
3073
|
-
region=self.region,
|
3074
|
-
gcp_conn_id=self.gcp_conn_id,
|
3075
|
-
impersonation_chain=self.impersonation_chain,
|
3076
|
-
polling_interval_seconds=self.polling_interval_seconds,
|
3077
|
-
),
|
3078
|
-
method_name="execute_complete",
|
3079
|
-
)
|
3080
|
-
|
3081
3048
|
except AlreadyExists:
|
3082
|
-
self.log.info("Batch with given id already exists")
|
3083
|
-
|
3084
|
-
|
3085
|
-
|
3086
|
-
|
3087
|
-
self.log.info("Attaching to the job %s if it is still running.", self.batch_id)
|
3049
|
+
self.log.info("Batch with given id already exists.")
|
3050
|
+
self.log.info("Attaching to the job %s if it is still running.", batch_id)
|
3051
|
+
else:
|
3052
|
+
batch_id = self.operation.metadata.batch.split("/")[-1]
|
3053
|
+
self.log.info("The batch %s was created.", batch_id)
|
3088
3054
|
|
3089
|
-
|
3090
|
-
|
3091
|
-
|
3092
|
-
|
3093
|
-
|
3094
|
-
|
3095
|
-
|
3096
|
-
gcp_conn_id=self.gcp_conn_id,
|
3097
|
-
impersonation_chain=self.impersonation_chain,
|
3098
|
-
polling_interval_seconds=self.polling_interval_seconds,
|
3099
|
-
),
|
3100
|
-
method_name="execute_complete",
|
3101
|
-
)
|
3055
|
+
DataprocBatchLink.persist(
|
3056
|
+
context=context,
|
3057
|
+
operator=self,
|
3058
|
+
project_id=self.project_id,
|
3059
|
+
region=self.region,
|
3060
|
+
batch_id=batch_id,
|
3061
|
+
)
|
3102
3062
|
|
3103
|
-
|
3104
|
-
|
3105
|
-
batch_id=
|
3063
|
+
if self.asynchronous:
|
3064
|
+
batch = self.hook.get_batch(
|
3065
|
+
batch_id=batch_id,
|
3106
3066
|
region=self.region,
|
3107
3067
|
project_id=self.project_id,
|
3108
3068
|
retry=self.retry,
|
3109
3069
|
timeout=self.timeout,
|
3110
3070
|
metadata=self.metadata,
|
3111
|
-
wait_check_interval=self.polling_interval_seconds,
|
3112
3071
|
)
|
3113
|
-
|
3072
|
+
self.log.info("The batch %s was created asynchronously. Exiting.", batch_id)
|
3073
|
+
return Batch.to_dict(batch)
|
3114
3074
|
|
3115
|
-
self.
|
3116
|
-
|
3117
|
-
|
3118
|
-
|
3119
|
-
|
3120
|
-
|
3121
|
-
|
3122
|
-
|
3123
|
-
|
3075
|
+
if self.deferrable:
|
3076
|
+
self.defer(
|
3077
|
+
trigger=DataprocBatchTrigger(
|
3078
|
+
batch_id=batch_id,
|
3079
|
+
project_id=self.project_id,
|
3080
|
+
region=self.region,
|
3081
|
+
gcp_conn_id=self.gcp_conn_id,
|
3082
|
+
impersonation_chain=self.impersonation_chain,
|
3083
|
+
polling_interval_seconds=self.polling_interval_seconds,
|
3084
|
+
),
|
3085
|
+
method_name="execute_complete",
|
3124
3086
|
)
|
3125
|
-
|
3087
|
+
|
3088
|
+
self.log.info("Waiting for the completion of batch job %s", batch_id)
|
3089
|
+
batch = self.hook.wait_for_batch(
|
3090
|
+
batch_id=batch_id,
|
3091
|
+
region=self.region,
|
3092
|
+
project_id=self.project_id,
|
3093
|
+
retry=self.retry,
|
3094
|
+
timeout=self.timeout,
|
3095
|
+
metadata=self.metadata,
|
3096
|
+
)
|
3097
|
+
|
3098
|
+
self.handle_batch_status(context, batch.state, batch_id, batch.state_message)
|
3099
|
+
return Batch.to_dict(batch)
|
3100
|
+
|
3101
|
+
@cached_property
|
3102
|
+
def hook(self) -> DataprocHook:
|
3103
|
+
return DataprocHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
3126
3104
|
|
3127
3105
|
def execute_complete(self, context, event=None) -> None:
|
3128
3106
|
"""
|
@@ -3135,23 +3113,27 @@ class DataprocCreateBatchOperator(GoogleCloudBaseOperator):
|
|
3135
3113
|
raise AirflowException("Batch failed.")
|
3136
3114
|
state = event["batch_state"]
|
3137
3115
|
batch_id = event["batch_id"]
|
3138
|
-
self.handle_batch_status(context, state, batch_id)
|
3116
|
+
self.handle_batch_status(context, state, batch_id, state_message=event["batch_state_message"])
|
3139
3117
|
|
3140
3118
|
def on_kill(self):
|
3141
3119
|
if self.operation:
|
3142
3120
|
self.operation.cancel()
|
3143
3121
|
|
3144
|
-
def handle_batch_status(
|
3122
|
+
def handle_batch_status(
|
3123
|
+
self, context: Context, state: Batch.State, batch_id: str, state_message: str | None = None
|
3124
|
+
) -> None:
|
3145
3125
|
# The existing batch may be a number of states other than 'SUCCEEDED'\
|
3146
3126
|
# wait_for_operation doesn't fail if the job is cancelled, so we will check for it here which also
|
3147
3127
|
# finds a cancelling|canceled|unspecified job from wait_for_batch or the deferred trigger
|
3148
3128
|
link = DATAPROC_BATCH_LINK.format(region=self.region, project_id=self.project_id, batch_id=batch_id)
|
3149
3129
|
if state == Batch.State.FAILED:
|
3150
|
-
raise AirflowException(
|
3130
|
+
raise AirflowException(
|
3131
|
+
f"Batch job {batch_id} failed with error: {state_message}\nDriver Logs: {link}"
|
3132
|
+
)
|
3151
3133
|
if state in (Batch.State.CANCELLED, Batch.State.CANCELLING):
|
3152
|
-
raise AirflowException("Batch job
|
3134
|
+
raise AirflowException(f"Batch job {batch_id} was cancelled. Driver logs: {link}")
|
3153
3135
|
if state == Batch.State.STATE_UNSPECIFIED:
|
3154
|
-
raise AirflowException("Batch job
|
3136
|
+
raise AirflowException(f"Batch job {batch_id} unspecified. Driver logs: {link}")
|
3155
3137
|
self.log.info("Batch job %s completed. Driver logs: %s", batch_id, link)
|
3156
3138
|
|
3157
3139
|
|
@@ -371,7 +371,10 @@ class DataprocBatchTrigger(DataprocBaseTrigger):
|
|
371
371
|
self.log.info("Current state is %s", state)
|
372
372
|
self.log.info("Sleeping for %s seconds.", self.polling_interval_seconds)
|
373
373
|
await asyncio.sleep(self.polling_interval_seconds)
|
374
|
-
|
374
|
+
|
375
|
+
yield TriggerEvent(
|
376
|
+
{"batch_id": self.batch_id, "batch_state": state, "batch_state_message": batch.state_message}
|
377
|
+
)
|
375
378
|
|
376
379
|
|
377
380
|
class DataprocDeleteClusterTrigger(DataprocBaseTrigger):
|
@@ -19,6 +19,7 @@
|
|
19
19
|
|
20
20
|
from __future__ import annotations
|
21
21
|
|
22
|
+
import asyncio
|
22
23
|
import datetime
|
23
24
|
import functools
|
24
25
|
import json
|
@@ -36,7 +37,7 @@ import requests
|
|
36
37
|
import tenacity
|
37
38
|
from asgiref.sync import sync_to_async
|
38
39
|
from deprecated import deprecated
|
39
|
-
from gcloud.aio.auth.token import Token
|
40
|
+
from gcloud.aio.auth.token import Token, TokenResponse
|
40
41
|
from google.api_core.exceptions import Forbidden, ResourceExhausted, TooManyRequests
|
41
42
|
from google.auth import _cloud_sdk, compute_engine # type: ignore[attr-defined]
|
42
43
|
from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS
|
@@ -745,17 +746,41 @@ class _CredentialsToken(Token):
|
|
745
746
|
async def get_project(self) -> str | None:
|
746
747
|
return self.project
|
747
748
|
|
748
|
-
async def
|
749
|
+
async def refresh(self, *, timeout: int) -> TokenResponse:
|
749
750
|
await sync_to_async(self.credentials.refresh)(google.auth.transport.requests.Request())
|
750
751
|
|
751
752
|
self.access_token = cast(str, self.credentials.token)
|
752
753
|
self.access_token_duration = 3600
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
|
754
|
+
self.access_token_acquired_at = self._now()
|
755
|
+
return TokenResponse(value=self.access_token, expires_in=self.access_token_duration)
|
756
|
+
|
757
|
+
async def acquire_access_token(self, timeout: int = 10) -> None:
|
758
|
+
await self.refresh(timeout=timeout)
|
757
759
|
self.acquiring = None
|
758
760
|
|
761
|
+
async def ensure_token(self) -> None:
|
762
|
+
if self.acquiring and not self.acquiring.done():
|
763
|
+
await self.acquiring
|
764
|
+
return
|
765
|
+
|
766
|
+
if self.access_token:
|
767
|
+
delta = (self._now() - self.access_token_acquired_at).total_seconds()
|
768
|
+
if delta <= self.access_token_duration / 2:
|
769
|
+
return
|
770
|
+
|
771
|
+
self.acquiring = asyncio.ensure_future( # pylint: disable=used-before-assignment
|
772
|
+
self.acquire_access_token()
|
773
|
+
)
|
774
|
+
await self.acquiring
|
775
|
+
|
776
|
+
@staticmethod
|
777
|
+
def _now():
|
778
|
+
# access_token_acquired_at is specific to gcloud-aio's Token.
|
779
|
+
# On subsequent calls of `get` it will be used with `datetime.datetime.utcnow()`.
|
780
|
+
# Therefore we have to use an offset-naive datetime.
|
781
|
+
# https://github.com/talkiq/gcloud-aio/blob/f1132b005ba35d8059229a9ca88b90f31f77456d/auth/gcloud/aio/auth/token.py#L204
|
782
|
+
return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None)
|
783
|
+
|
759
784
|
|
760
785
|
class GoogleBaseAsyncHook(BaseHook):
|
761
786
|
"""GoogleBaseAsyncHook inherits from BaseHook class, run on the trigger worker."""
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
28
28
|
"name": "Google",
|
29
29
|
"description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
|
30
30
|
"state": "ready",
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1723970253,
|
32
32
|
"versions": [
|
33
|
+
"10.22.0",
|
33
34
|
"10.21.1",
|
34
35
|
"10.21.0",
|
35
36
|
"10.20.0",
|
@@ -91,12 +92,12 @@ def get_provider_info():
|
|
91
92
|
"1.0.0",
|
92
93
|
],
|
93
94
|
"dependencies": [
|
94
|
-
"apache-airflow>=2.
|
95
|
+
"apache-airflow>=2.8.0",
|
95
96
|
"apache-airflow-providers-common-compat>=1.1.0",
|
96
97
|
"apache-airflow-providers-common-sql>=1.7.2",
|
97
98
|
"asgiref>=3.5.2",
|
98
99
|
"dill>=0.2.3",
|
99
|
-
"gcloud-aio-auth>=
|
100
|
+
"gcloud-aio-auth>=5.2.0",
|
100
101
|
"gcloud-aio-bigquery>=6.1.2",
|
101
102
|
"gcloud-aio-storage>=9.0.0",
|
102
103
|
"gcsfs>=2023.10.0",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-google
|
3
|
-
Version: 10.
|
3
|
+
Version: 10.22.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-google for Apache Airflow
|
5
5
|
Keywords: airflow-provider,google,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -24,10 +24,10 @@ Classifier: Topic :: System :: Monitoring
|
|
24
24
|
Requires-Dist: PyOpenSSL>=23.0.0
|
25
25
|
Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
|
26
26
|
Requires-Dist: apache-airflow-providers-common-sql>=1.7.2rc0
|
27
|
-
Requires-Dist: apache-airflow>=2.
|
27
|
+
Requires-Dist: apache-airflow>=2.8.0rc0
|
28
28
|
Requires-Dist: asgiref>=3.5.2
|
29
29
|
Requires-Dist: dill>=0.2.3
|
30
|
-
Requires-Dist: gcloud-aio-auth>=
|
30
|
+
Requires-Dist: gcloud-aio-auth>=5.2.0
|
31
31
|
Requires-Dist: gcloud-aio-bigquery>=6.1.2
|
32
32
|
Requires-Dist: gcloud-aio-storage>=9.0.0
|
33
33
|
Requires-Dist: gcsfs>=2023.10.0
|
@@ -107,8 +107,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
|
107
107
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
108
108
|
Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
|
109
109
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
110
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
111
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
110
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.22.0/changelog.html
|
111
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.22.0
|
112
112
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
113
113
|
Project-URL: Source Code, https://github.com/apache/airflow
|
114
114
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -177,7 +177,7 @@ Provides-Extra: trino
|
|
177
177
|
|
178
178
|
Package ``apache-airflow-providers-google``
|
179
179
|
|
180
|
-
Release: ``10.
|
180
|
+
Release: ``10.22.0.rc1``
|
181
181
|
|
182
182
|
|
183
183
|
Google services including:
|
@@ -197,7 +197,7 @@ This is a provider package for ``google`` provider. All classes for this provide
|
|
197
197
|
are in ``airflow.providers.google`` python package.
|
198
198
|
|
199
199
|
You can find package information and changelog for the provider
|
200
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
200
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.22.0/>`_.
|
201
201
|
|
202
202
|
Installation
|
203
203
|
------------
|
@@ -214,12 +214,12 @@ Requirements
|
|
214
214
|
========================================== =========================================
|
215
215
|
PIP package Version required
|
216
216
|
========================================== =========================================
|
217
|
-
``apache-airflow`` ``>=2.
|
217
|
+
``apache-airflow`` ``>=2.8.0``
|
218
218
|
``apache-airflow-providers-common-compat`` ``>=1.1.0``
|
219
219
|
``apache-airflow-providers-common-sql`` ``>=1.7.2``
|
220
220
|
``asgiref`` ``>=3.5.2``
|
221
221
|
``dill`` ``>=0.2.3``
|
222
|
-
``gcloud-aio-auth`` ``>=
|
222
|
+
``gcloud-aio-auth`` ``>=5.2.0``
|
223
223
|
``gcloud-aio-bigquery`` ``>=6.1.2``
|
224
224
|
``gcloud-aio-storage`` ``>=9.0.0``
|
225
225
|
``gcsfs`` ``>=2023.10.0``
|
@@ -318,4 +318,4 @@ Dependent package
|
|
318
318
|
======================================================================================================================== ====================
|
319
319
|
|
320
320
|
The changelog for the provider package can be found in the
|
321
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
321
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.22.0/changelog.html>`_.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
airflow/providers/google/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
2
|
-
airflow/providers/google/__init__.py,sha256=
|
3
|
-
airflow/providers/google/get_provider_info.py,sha256=
|
2
|
+
airflow/providers/google/__init__.py,sha256=PhPTVkzX5GfOr8JhhgFot6m9omJHWWwVtvCzjGchv-E,1495
|
3
|
+
airflow/providers/google/get_provider_info.py,sha256=I7lLiO0j0cHpbSM5oUVi2s_Rmqio5FYpPF0NP3XZZow,82091
|
4
4
|
airflow/providers/google/go_module_utils.py,sha256=XVM-IGME6CPgJA8fgDgkusFc4fz3lEghZaZ4elBkv7s,1780
|
5
5
|
airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
|
6
6
|
airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -30,9 +30,9 @@ airflow/providers/google/cloud/hooks/cloud_batch.py,sha256=FjpR_Av7z8oMnB4Q7S-aP
|
|
30
30
|
airflow/providers/google/cloud/hooks/cloud_build.py,sha256=jqjvBxP1T4u-h7lDcmADgluYzH85Y-Kz6QhV2NzKmi8,28324
|
31
31
|
airflow/providers/google/cloud/hooks/cloud_composer.py,sha256=_GRmI4CHry6_J8-ABccsSy-xpQJ5hmHxzu9je4UGCSg,28716
|
32
32
|
airflow/providers/google/cloud/hooks/cloud_memorystore.py,sha256=4hUbVhN-AphoOjQ5NB2Ge4Z2Ay6vd5gnyvlxE3ffzWQ,40442
|
33
|
-
airflow/providers/google/cloud/hooks/cloud_run.py,sha256=
|
33
|
+
airflow/providers/google/cloud/hooks/cloud_run.py,sha256=EEHZ84yRcX8QBfXyPqerC73fhWXqdAFV3VCNUSaLdto,12449
|
34
34
|
airflow/providers/google/cloud/hooks/cloud_sql.py,sha256=H0tWvU3KOCKwJ2whdxU7hOO8OMgPwysKirsQUooU1jQ,53442
|
35
|
-
airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py,sha256=
|
35
|
+
airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py,sha256=BI1HEa0P5_7tDUfoMtE0swREsCO5TTyIXJShVhgmtkM,23015
|
36
36
|
airflow/providers/google/cloud/hooks/compute.py,sha256=sBpi7oZjJwV4JPkWm3iKw23l7cAjHDET2MU9S8FOc58,40673
|
37
37
|
airflow/providers/google/cloud/hooks/compute_ssh.py,sha256=GTSiuhDFpeN-7n8ggrF-XBaPQ2hfk80tWChGImYGpTo,15689
|
38
38
|
airflow/providers/google/cloud/hooks/datacatalog.py,sha256=KoOcXUE-YhCR81wgHNJQ_YwTr5LYYyoDXTFJWZz_mds,54292
|
@@ -127,7 +127,7 @@ airflow/providers/google/cloud/operators/cloud_batch.py,sha256=uH4II9f-dUtC7LskX
|
|
127
127
|
airflow/providers/google/cloud/operators/cloud_build.py,sha256=7L3mBxls1r628o1XyC1vMOnRThWgfRx1x1z5zWXjx70,48481
|
128
128
|
airflow/providers/google/cloud/operators/cloud_composer.py,sha256=MAmqS0FkbKfSk3-1lUyC5F3-76H5eVUBugmMeioEFFk,32679
|
129
129
|
airflow/providers/google/cloud/operators/cloud_memorystore.py,sha256=0I4nmUIRsA2lcJlKh2uTnty9ONH40sTxAshsrVN81dY,70989
|
130
|
-
airflow/providers/google/cloud/operators/cloud_run.py,sha256=
|
130
|
+
airflow/providers/google/cloud/operators/cloud_run.py,sha256=SXpCgomERnXdyKymRC-djbTfY_Mscv-94h12uT8RRBQ,21872
|
131
131
|
airflow/providers/google/cloud/operators/cloud_sql.py,sha256=VA_RRg_Zv3zo4cKmpEfhCIpMo5c6IIdDdkj7YkeiZLs,53374
|
132
132
|
airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py,sha256=YQsVg8pDegEDvsWsZCbGrSqCln3iQyLQErZS_XZTnBo,48066
|
133
133
|
airflow/providers/google/cloud/operators/compute.py,sha256=lFGCacevkKJvAszJhfSLAOfetlsbYrCoImTeWXS5bqw,74607
|
@@ -138,7 +138,7 @@ airflow/providers/google/cloud/operators/datafusion.py,sha256=NZoR65aChdkPUG8bxE
|
|
138
138
|
airflow/providers/google/cloud/operators/datapipeline.py,sha256=3Zk_v_wlCKRuqx5yeajNWGIJN09xGNWJAN0qwsWE56o,2403
|
139
139
|
airflow/providers/google/cloud/operators/dataplex.py,sha256=IgGwt95uW72IeLi1oHpGk8V0fKyt9apsc4kUpBz_7YQ,91195
|
140
140
|
airflow/providers/google/cloud/operators/dataprep.py,sha256=jTDDgRccd2zIUqGzJebZpbNTJsFdRi5RnMtldXHqiMs,10477
|
141
|
-
airflow/providers/google/cloud/operators/dataproc.py,sha256=
|
141
|
+
airflow/providers/google/cloud/operators/dataproc.py,sha256=JtfSWwNeK4xnKP3QlieiR_nP3LEs-ospZFiSB8YLJ-8,151792
|
142
142
|
airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=mJOqDv4GEqQ7tx32ar-mMsPhIjYC_B1AZyiVDZBKOio,50402
|
143
143
|
airflow/providers/google/cloud/operators/datastore.py,sha256=di00jFy3Z1v0GcmcQ0df8NJ32yxcseOqWuojC4TKdmY,24927
|
144
144
|
airflow/providers/google/cloud/operators/dlp.py,sha256=SQCGml0RIKl0UrvXHIUiOskg5ayTj4F5_4k4rztClvM,120742
|
@@ -233,7 +233,7 @@ airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py,sha256
|
|
233
233
|
airflow/providers/google/cloud/triggers/dataflow.py,sha256=evg6qbg-LNTYI4714xCzdsp6aLEFUqn9-8Svmxv6JwQ,28640
|
234
234
|
airflow/providers/google/cloud/triggers/datafusion.py,sha256=blqNx4hLHRrLp-FQMCNR3yWmAZ2hCTfql2eyf5XY0P0,5985
|
235
235
|
airflow/providers/google/cloud/triggers/dataplex.py,sha256=fEHbvNYgP-6htNkp9b7nmvea_r3Z6CIMfIh57NaWxFA,8334
|
236
|
-
airflow/providers/google/cloud/triggers/dataproc.py,sha256=
|
236
|
+
airflow/providers/google/cloud/triggers/dataproc.py,sha256=xK2tZdS2gZTAqgQEQ9kQ9Uq571uV7OmfKBoF8Q46a48,24371
|
237
237
|
airflow/providers/google/cloud/triggers/gcs.py,sha256=pMjeNOkWHkOyiAxeK-JoyDInUf2VNtefOZxp8K-aNjw,18973
|
238
238
|
airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=OH0KLkSGAJRKYgJ0n_6KNBUgh0oMO1T0RAs3jbhw1rQ,12440
|
239
239
|
airflow/providers/google/cloud/triggers/mlengine.py,sha256=qpOa9Gz8FmHDxXvPWrXO3M7snGbRTq92gy6kGafCUiY,5265
|
@@ -257,7 +257,7 @@ airflow/providers/google/common/consts.py,sha256=KKjQX0FO6HP4MuzS8DDfWPoQNk0mhdO
|
|
257
257
|
airflow/providers/google/common/auth_backend/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
258
258
|
airflow/providers/google/common/auth_backend/google_openid.py,sha256=HvUAoHfpuK8gZiss0DCgB7R3wk2c1aDjAhwc7GknV4c,4525
|
259
259
|
airflow/providers/google/common/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
260
|
-
airflow/providers/google/common/hooks/base_google.py,sha256=
|
260
|
+
airflow/providers/google/common/hooks/base_google.py,sha256=87Pge2hmzCPxfxp6NCFM3MJFM-D1AJ-duJRoipuPvRA,34122
|
261
261
|
airflow/providers/google/common/hooks/discovery_api.py,sha256=dHA3AHwsY7ETWM1fRoNknZZgz4cMnWw4q0PTQt0TiNA,6767
|
262
262
|
airflow/providers/google/common/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
263
263
|
airflow/providers/google/common/links/storage.py,sha256=24tb1xQ00gbGyOu26wryNkTYXlk_9H-ieUra9WmuPIY,2265
|
@@ -310,7 +310,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=CxtVhp3wlEOBtjR
|
|
310
310
|
airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=4nwXWkTySeBXNuThPxzO7uww_hH6PthpppTeuShn27Q,4363
|
311
311
|
airflow/providers/google/suite/transfers/local_to_drive.py,sha256=ZSK0b1Rd6x_xsP2DVcUzeYu3qoo9Bsp3VmnKyBsFRH8,6105
|
312
312
|
airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
|
313
|
-
apache_airflow_providers_google-10.
|
314
|
-
apache_airflow_providers_google-10.
|
315
|
-
apache_airflow_providers_google-10.
|
316
|
-
apache_airflow_providers_google-10.
|
313
|
+
apache_airflow_providers_google-10.22.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
|
314
|
+
apache_airflow_providers_google-10.22.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
315
|
+
apache_airflow_providers_google-10.22.0rc1.dist-info/METADATA,sha256=mNFv94dJnExaIHI-wLhXlM5tfV8KQTKxlYRiyCrU-m8,17123
|
316
|
+
apache_airflow_providers_google-10.22.0rc1.dist-info/RECORD,,
|
File without changes
|