apache-airflow-providers-google 10.12.0rc1__py3-none-any.whl → 10.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/cloud/fs/gcs.py +16 -13
- airflow/providers/google/cloud/hooks/bigquery_dts.py +2 -1
- airflow/providers/google/cloud/hooks/cloud_build.py +2 -1
- airflow/providers/google/cloud/hooks/cloud_composer.py +4 -3
- airflow/providers/google/cloud/hooks/compute_ssh.py +18 -6
- airflow/providers/google/cloud/hooks/dataflow.py +61 -3
- airflow/providers/google/cloud/hooks/dataplex.py +2 -1
- airflow/providers/google/cloud/hooks/dataproc.py +19 -18
- airflow/providers/google/cloud/hooks/gcs.py +10 -6
- airflow/providers/google/cloud/hooks/pubsub.py +3 -2
- airflow/providers/google/cloud/log/gcs_task_handler.py +2 -39
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +2 -11
- airflow/providers/google/cloud/operators/bigquery.py +47 -47
- airflow/providers/google/cloud/operators/cloud_composer.py +1 -1
- airflow/providers/google/cloud/operators/cloud_run.py +3 -3
- airflow/providers/google/cloud/operators/dataflow.py +6 -0
- airflow/providers/google/cloud/operators/dataplex.py +530 -1
- airflow/providers/google/cloud/operators/dataproc.py +11 -11
- airflow/providers/google/cloud/operators/gcs.py +90 -15
- airflow/providers/google/cloud/operators/kubernetes_engine.py +2 -3
- airflow/providers/google/cloud/operators/pubsub.py +47 -55
- airflow/providers/google/cloud/secrets/secret_manager.py +22 -1
- airflow/providers/google/cloud/sensors/cloud_composer.py +14 -1
- airflow/providers/google/cloud/sensors/dataplex.py +118 -0
- airflow/providers/google/cloud/sensors/gcs.py +10 -1
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +5 -5
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +42 -42
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +9 -9
- airflow/providers/google/cloud/triggers/cloud_run.py +7 -7
- airflow/providers/google/cloud/triggers/dataplex.py +82 -0
- airflow/providers/google/cloud/triggers/dataproc.py +2 -5
- airflow/providers/google/cloud/triggers/gcs.py +13 -3
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +3 -1
- airflow/providers/google/common/hooks/base_google.py +6 -4
- airflow/providers/google/get_provider_info.py +14 -13
- {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0.dist-info}/METADATA +31 -31
- {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0.dist-info}/RECORD +40 -40
- {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0.dist-info}/entry_points.txt +0 -0
@@ -101,13 +101,13 @@ class GCSToGCSOperator(BaseOperator):
|
|
101
101
|
``copied_sales/2017/january-backup.avro`` in the ``data_backup`` bucket ::
|
102
102
|
|
103
103
|
copy_single_file = GCSToGCSOperator(
|
104
|
-
task_id=
|
105
|
-
source_bucket=
|
106
|
-
source_objects=[
|
107
|
-
destination_bucket=
|
108
|
-
destination_object=
|
104
|
+
task_id="copy_single_file",
|
105
|
+
source_bucket="data",
|
106
|
+
source_objects=["sales/sales-2017/january.avro"],
|
107
|
+
destination_bucket="data_backup",
|
108
|
+
destination_object="copied_sales/2017/january-backup.avro",
|
109
109
|
exact_match=True,
|
110
|
-
gcp_conn_id=google_cloud_conn_id
|
110
|
+
gcp_conn_id=google_cloud_conn_id,
|
111
111
|
)
|
112
112
|
|
113
113
|
The following Operator would copy all the Avro files from ``sales/sales-2017``
|
@@ -141,12 +141,12 @@ class GCSToGCSOperator(BaseOperator):
|
|
141
141
|
process. ::
|
142
142
|
|
143
143
|
move_files = GCSToGCSOperator(
|
144
|
-
task_id=
|
145
|
-
source_bucket=
|
146
|
-
source_object=
|
147
|
-
destination_bucket=
|
144
|
+
task_id="move_files",
|
145
|
+
source_bucket="data",
|
146
|
+
source_object="sales/sales-2017/*.avro",
|
147
|
+
destination_bucket="data_backup",
|
148
148
|
move_object=True,
|
149
|
-
gcp_conn_id=google_cloud_conn_id
|
149
|
+
gcp_conn_id=google_cloud_conn_id,
|
150
150
|
)
|
151
151
|
|
152
152
|
The following Operator would move all the Avro files from ``sales/sales-2019``
|
@@ -154,13 +154,13 @@ class GCSToGCSOperator(BaseOperator):
|
|
154
154
|
``data_backup`` bucket, deleting the original files in the process. ::
|
155
155
|
|
156
156
|
move_files = GCSToGCSOperator(
|
157
|
-
task_id=
|
158
|
-
source_bucket=
|
159
|
-
source_objects=[
|
160
|
-
destination_bucket=
|
161
|
-
delimiter=
|
157
|
+
task_id="move_files",
|
158
|
+
source_bucket="data",
|
159
|
+
source_objects=["sales/sales-2019/*.avro", "sales/sales-2020"],
|
160
|
+
destination_bucket="data_backup",
|
161
|
+
delimiter=".avro",
|
162
162
|
move_object=True,
|
163
|
-
gcp_conn_id=google_cloud_conn_id
|
163
|
+
gcp_conn_id=google_cloud_conn_id,
|
164
164
|
)
|
165
165
|
|
166
166
|
"""
|
@@ -329,12 +329,12 @@ class GCSToGCSOperator(BaseOperator):
|
|
329
329
|
the ``data_backup`` bucket (b/a.csv, b/b.csv, b/c.csv) ::
|
330
330
|
|
331
331
|
copy_files = GCSToGCSOperator(
|
332
|
-
task_id=
|
333
|
-
source_bucket=
|
334
|
-
source_objects=[
|
335
|
-
destination_bucket=
|
336
|
-
destination_object=
|
337
|
-
gcp_conn_id=google_cloud_conn_id
|
332
|
+
task_id="copy_files_without_wildcard",
|
333
|
+
source_bucket="data",
|
334
|
+
source_objects=["a/"],
|
335
|
+
destination_bucket="data_backup",
|
336
|
+
destination_object="b/",
|
337
|
+
gcp_conn_id=google_cloud_conn_id,
|
338
338
|
)
|
339
339
|
|
340
340
|
Example 2:
|
@@ -345,13 +345,13 @@ class GCSToGCSOperator(BaseOperator):
|
|
345
345
|
the ``data_backup`` bucket (b/a.avro, b/b.avro, b/c.avro) ::
|
346
346
|
|
347
347
|
copy_files = GCSToGCSOperator(
|
348
|
-
task_id=
|
349
|
-
source_bucket=
|
350
|
-
source_objects=[
|
351
|
-
destination_bucket=
|
352
|
-
destination_object=
|
353
|
-
delimiter=
|
354
|
-
gcp_conn_id=google_cloud_conn_id
|
348
|
+
task_id="copy_files_without_wildcard",
|
349
|
+
source_bucket="data",
|
350
|
+
source_objects=["a/"],
|
351
|
+
destination_bucket="data_backup",
|
352
|
+
destination_object="b/",
|
353
|
+
delimiter=".avro",
|
354
|
+
gcp_conn_id=google_cloud_conn_id,
|
355
355
|
)
|
356
356
|
|
357
357
|
Example 3:
|
@@ -362,12 +362,12 @@ class GCSToGCSOperator(BaseOperator):
|
|
362
362
|
the ``data_backup`` bucket (b/file_1.txt, b/file_2.csv, b/file_3.avro) ::
|
363
363
|
|
364
364
|
copy_files = GCSToGCSOperator(
|
365
|
-
task_id=
|
366
|
-
source_bucket=
|
367
|
-
source_objects=[
|
368
|
-
destination_bucket=
|
369
|
-
destination_object=
|
370
|
-
gcp_conn_id=google_cloud_conn_id
|
365
|
+
task_id="copy_files_without_wildcard",
|
366
|
+
source_bucket="data",
|
367
|
+
source_objects=["a/file_1.txt", "a/file_2.csv", "a/file_3.avro"],
|
368
|
+
destination_bucket="data_backup",
|
369
|
+
destination_object="b/",
|
370
|
+
gcp_conn_id=google_cloud_conn_id,
|
371
371
|
)
|
372
372
|
|
373
373
|
Example 4:
|
@@ -378,12 +378,12 @@ class GCSToGCSOperator(BaseOperator):
|
|
378
378
|
(b/foo.txt, b/foo.txt.abc, b/foo.txt/subfolder/file.txt) ::
|
379
379
|
|
380
380
|
copy_files = GCSToGCSOperator(
|
381
|
-
task_id=
|
382
|
-
source_bucket=
|
383
|
-
source_object=
|
384
|
-
destination_bucket=
|
385
|
-
destination_object=
|
386
|
-
gcp_conn_id=google_cloud_conn_id
|
381
|
+
task_id="copy_files_without_wildcard",
|
382
|
+
source_bucket="data",
|
383
|
+
source_object="a/foo.txt",
|
384
|
+
destination_bucket="data_backup",
|
385
|
+
destination_object="b/",
|
386
|
+
gcp_conn_id=google_cloud_conn_id,
|
387
387
|
)
|
388
388
|
"""
|
389
389
|
objects = hook.list(
|
@@ -42,15 +42,15 @@ class MSSQLToGCSOperator(BaseSQLToGCSOperator):
|
|
42
42
|
'mssql-export' GCS bucket (along with a schema file). ::
|
43
43
|
|
44
44
|
export_customers = MSSQLToGCSOperator(
|
45
|
-
task_id=
|
46
|
-
sql=
|
47
|
-
bit_fields=[
|
48
|
-
bucket=
|
49
|
-
filename=
|
50
|
-
schema_filename=
|
51
|
-
mssql_conn_id=
|
52
|
-
gcp_conn_id=
|
53
|
-
dag=dag
|
45
|
+
task_id="export_customers",
|
46
|
+
sql="SELECT * FROM dbo.Customers;",
|
47
|
+
bit_fields=["some_bit_field", "another_bit_field"],
|
48
|
+
bucket="mssql-export",
|
49
|
+
filename="data/customers/export.json",
|
50
|
+
schema_filename="schemas/export.json",
|
51
|
+
mssql_conn_id="mssql_default",
|
52
|
+
gcp_conn_id="google_cloud_default",
|
53
|
+
dag=dag,
|
54
54
|
)
|
55
55
|
|
56
56
|
.. seealso::
|
@@ -102,21 +102,21 @@ class CloudRunJobFinishedTrigger(BaseTrigger):
|
|
102
102
|
while timeout is None or timeout > 0:
|
103
103
|
operation: operations_pb2.Operation = await hook.get_operation(self.operation_name)
|
104
104
|
if operation.done:
|
105
|
-
# An operation can only have one of those two combinations: if it is
|
106
|
-
# the
|
107
|
-
if operation.
|
105
|
+
# An operation can only have one of those two combinations: if it is failed, then
|
106
|
+
# the error field will be populated, else, then the response field will be.
|
107
|
+
if operation.error.SerializeToString():
|
108
108
|
yield TriggerEvent(
|
109
109
|
{
|
110
|
-
"status": RunJobStatus.
|
110
|
+
"status": RunJobStatus.FAIL.value,
|
111
|
+
"operation_error_code": operation.error.code,
|
112
|
+
"operation_error_message": operation.error.message,
|
111
113
|
"job_name": self.job_name,
|
112
114
|
}
|
113
115
|
)
|
114
116
|
else:
|
115
117
|
yield TriggerEvent(
|
116
118
|
{
|
117
|
-
"status": RunJobStatus.
|
118
|
-
"operation_error_code": operation.error.code,
|
119
|
-
"operation_error_message": operation.error.message,
|
119
|
+
"status": RunJobStatus.SUCCESS.value,
|
120
120
|
"job_name": self.job_name,
|
121
121
|
}
|
122
122
|
)
|
@@ -107,3 +107,85 @@ class DataplexDataQualityJobTrigger(BaseTrigger):
|
|
107
107
|
def _convert_to_dict(self, job: DataScanJob) -> dict:
|
108
108
|
"""Returns a representation of a DataScanJob instance as a dict."""
|
109
109
|
return DataScanJob.to_dict(job)
|
110
|
+
|
111
|
+
|
112
|
+
class DataplexDataProfileJobTrigger(BaseTrigger):
|
113
|
+
"""
|
114
|
+
DataplexDataProfileJobTrigger runs on the trigger worker and waits for the job to be `SUCCEEDED` state.
|
115
|
+
|
116
|
+
:param job_id: Optional. The ID of a Dataplex job.
|
117
|
+
:param data_scan_id: Required. DataScan identifier.
|
118
|
+
:param project_id: Google Cloud Project where the job is running.
|
119
|
+
:param region: The ID of the Google Cloud region that the job belongs to.
|
120
|
+
:param gcp_conn_id: Optional, the connection ID used to connect to Google Cloud Platform.
|
121
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
122
|
+
credentials, or chained list of accounts required to get the access_token
|
123
|
+
of the last account in the list, which will be impersonated in the request.
|
124
|
+
If set as a string, the account must grant the originating account
|
125
|
+
the Service Account Token Creator IAM role.
|
126
|
+
If set as a sequence, the identities from the list must grant
|
127
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
128
|
+
account from the list granting this role to the originating account (templated).
|
129
|
+
:param polling_interval_seconds: polling period in seconds to check for the status.
|
130
|
+
"""
|
131
|
+
|
132
|
+
def __init__(
|
133
|
+
self,
|
134
|
+
job_id: str | None,
|
135
|
+
data_scan_id: str,
|
136
|
+
project_id: str | None,
|
137
|
+
region: str,
|
138
|
+
gcp_conn_id: str = "google_cloud_default",
|
139
|
+
polling_interval_seconds: int = 10,
|
140
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
141
|
+
**kwargs,
|
142
|
+
):
|
143
|
+
super().__init__(**kwargs)
|
144
|
+
self.job_id = job_id
|
145
|
+
self.data_scan_id = data_scan_id
|
146
|
+
self.project_id = project_id
|
147
|
+
self.region = region
|
148
|
+
self.gcp_conn_id = gcp_conn_id
|
149
|
+
self.polling_interval_seconds = polling_interval_seconds
|
150
|
+
self.impersonation_chain = impersonation_chain
|
151
|
+
|
152
|
+
def serialize(self):
|
153
|
+
return (
|
154
|
+
"airflow.providers.google.cloud.triggers.dataplex.DataplexDataProfileJobTrigger",
|
155
|
+
{
|
156
|
+
"job_id": self.job_id,
|
157
|
+
"data_scan_id": self.data_scan_id,
|
158
|
+
"project_id": self.project_id,
|
159
|
+
"region": self.region,
|
160
|
+
"gcp_conn_id": self.gcp_conn_id,
|
161
|
+
"impersonation_chain": self.impersonation_chain,
|
162
|
+
"polling_interval_seconds": self.polling_interval_seconds,
|
163
|
+
},
|
164
|
+
)
|
165
|
+
|
166
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
167
|
+
hook = DataplexAsyncHook(
|
168
|
+
gcp_conn_id=self.gcp_conn_id,
|
169
|
+
impersonation_chain=self.impersonation_chain,
|
170
|
+
)
|
171
|
+
while True:
|
172
|
+
job = await hook.get_data_scan_job(
|
173
|
+
project_id=self.project_id,
|
174
|
+
region=self.region,
|
175
|
+
job_id=self.job_id,
|
176
|
+
data_scan_id=self.data_scan_id,
|
177
|
+
)
|
178
|
+
state = job.state
|
179
|
+
if state in (DataScanJob.State.FAILED, DataScanJob.State.SUCCEEDED, DataScanJob.State.CANCELLED):
|
180
|
+
break
|
181
|
+
self.log.info(
|
182
|
+
"Current state is: %s, sleeping for %s seconds.",
|
183
|
+
DataScanJob.State(state).name,
|
184
|
+
self.polling_interval_seconds,
|
185
|
+
)
|
186
|
+
await asyncio.sleep(self.polling_interval_seconds)
|
187
|
+
yield TriggerEvent({"job_id": self.job_id, "job_state": state, "job": self._convert_to_dict(job)})
|
188
|
+
|
189
|
+
def _convert_to_dict(self, job: DataScanJob) -> dict:
|
190
|
+
"""Returns a representation of a DataScanJob instance as a dict."""
|
191
|
+
return DataScanJob.to_dict(job)
|
@@ -25,7 +25,6 @@ from typing import Any, AsyncIterator, Sequence
|
|
25
25
|
from google.api_core.exceptions import NotFound
|
26
26
|
from google.cloud.dataproc_v1 import Batch, ClusterStatus, JobStatus
|
27
27
|
|
28
|
-
from airflow.exceptions import AirflowException
|
29
28
|
from airflow.providers.google.cloud.hooks.dataproc import DataprocAsyncHook
|
30
29
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
31
30
|
|
@@ -98,12 +97,10 @@ class DataprocSubmitTrigger(DataprocBaseTrigger):
|
|
98
97
|
)
|
99
98
|
state = job.status.state
|
100
99
|
self.log.info("Dataproc job: %s is in state: %s", self.job_id, state)
|
101
|
-
if state in (JobStatus.State.DONE, JobStatus.State.CANCELLED):
|
100
|
+
if state in (JobStatus.State.DONE, JobStatus.State.CANCELLED, JobStatus.State.ERROR):
|
102
101
|
break
|
103
|
-
elif state == JobStatus.State.ERROR:
|
104
|
-
raise AirflowException(f"Dataproc job execution failed {self.job_id}")
|
105
102
|
await asyncio.sleep(self.polling_interval_seconds)
|
106
|
-
yield TriggerEvent({"job_id": self.job_id, "job_state": state})
|
103
|
+
yield TriggerEvent({"job_id": self.job_id, "job_state": state, "job": job})
|
107
104
|
|
108
105
|
|
109
106
|
class DataprocClusterTrigger(DataprocBaseTrigger):
|
@@ -35,6 +35,7 @@ class GCSBlobTrigger(BaseTrigger):
|
|
35
35
|
|
36
36
|
:param bucket: the bucket in the google cloud storage where the objects are residing.
|
37
37
|
:param object_name: the file or folder present in the bucket
|
38
|
+
:param use_glob: if true object_name is interpreted as glob
|
38
39
|
:param google_cloud_conn_id: reference to the Google Connection
|
39
40
|
:param poke_interval: polling period in seconds to check for file/folder
|
40
41
|
:param hook_params: Extra config params to be passed to the underlying hook.
|
@@ -45,6 +46,7 @@ class GCSBlobTrigger(BaseTrigger):
|
|
45
46
|
self,
|
46
47
|
bucket: str,
|
47
48
|
object_name: str,
|
49
|
+
use_glob: bool,
|
48
50
|
poke_interval: float,
|
49
51
|
google_cloud_conn_id: str,
|
50
52
|
hook_params: dict[str, Any],
|
@@ -52,6 +54,7 @@ class GCSBlobTrigger(BaseTrigger):
|
|
52
54
|
super().__init__()
|
53
55
|
self.bucket = bucket
|
54
56
|
self.object_name = object_name
|
57
|
+
self.use_glob = use_glob
|
55
58
|
self.poke_interval = poke_interval
|
56
59
|
self.google_cloud_conn_id: str = google_cloud_conn_id
|
57
60
|
self.hook_params = hook_params
|
@@ -63,6 +66,7 @@ class GCSBlobTrigger(BaseTrigger):
|
|
63
66
|
{
|
64
67
|
"bucket": self.bucket,
|
65
68
|
"object_name": self.object_name,
|
69
|
+
"use_glob": self.use_glob,
|
66
70
|
"poke_interval": self.poke_interval,
|
67
71
|
"google_cloud_conn_id": self.google_cloud_conn_id,
|
68
72
|
"hook_params": self.hook_params,
|
@@ -98,9 +102,14 @@ class GCSBlobTrigger(BaseTrigger):
|
|
98
102
|
async with ClientSession() as s:
|
99
103
|
client = await hook.get_storage_client(s)
|
100
104
|
bucket = client.get_bucket(bucket_name)
|
101
|
-
|
102
|
-
|
103
|
-
|
105
|
+
if self.use_glob:
|
106
|
+
list_blobs_response = await bucket.list_blobs(match_glob=object_name)
|
107
|
+
if len(list_blobs_response) > 0:
|
108
|
+
return "success"
|
109
|
+
else:
|
110
|
+
blob_exists_response = await bucket.blob_exists(blob_name=object_name)
|
111
|
+
if blob_exists_response:
|
112
|
+
return "success"
|
104
113
|
return "pending"
|
105
114
|
|
106
115
|
|
@@ -234,6 +243,7 @@ class GCSPrefixBlobTrigger(GCSBlobTrigger):
|
|
234
243
|
poke_interval=poke_interval,
|
235
244
|
google_cloud_conn_id=google_cloud_conn_id,
|
236
245
|
hook_params=hook_params,
|
246
|
+
use_glob=False,
|
237
247
|
)
|
238
248
|
self.prefix = prefix
|
239
249
|
|
@@ -19,6 +19,7 @@ from __future__ import annotations
|
|
19
19
|
|
20
20
|
import asyncio
|
21
21
|
import warnings
|
22
|
+
from functools import cached_property
|
22
23
|
from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence
|
23
24
|
|
24
25
|
from google.cloud.container_v1.types import Operation
|
@@ -137,7 +138,8 @@ class GKEStartPodTrigger(KubernetesPodTrigger):
|
|
137
138
|
},
|
138
139
|
)
|
139
140
|
|
140
|
-
|
141
|
+
@cached_property
|
142
|
+
def hook(self) -> GKEPodAsyncHook: # type: ignore[override]
|
141
143
|
return GKEPodAsyncHook(
|
142
144
|
cluster_url=self._cluster_url,
|
143
145
|
ssl_ca_cert=self._ssl_ca_cert,
|
@@ -188,8 +188,8 @@ class GoogleBaseHook(BaseHook):
|
|
188
188
|
conn_type = "google_cloud_platform"
|
189
189
|
hook_name = "Google Cloud"
|
190
190
|
|
191
|
-
@
|
192
|
-
def get_connection_form_widgets() -> dict[str, Any]:
|
191
|
+
@classmethod
|
192
|
+
def get_connection_form_widgets(cls) -> dict[str, Any]:
|
193
193
|
"""Returns connection widgets to add to connection form."""
|
194
194
|
from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget
|
195
195
|
from flask_babel import lazy_gettext
|
@@ -221,8 +221,8 @@ class GoogleBaseHook(BaseHook):
|
|
221
221
|
),
|
222
222
|
}
|
223
223
|
|
224
|
-
@
|
225
|
-
def get_ui_field_behaviour() -> dict[str, Any]:
|
224
|
+
@classmethod
|
225
|
+
def get_ui_field_behaviour(cls) -> dict[str, Any]:
|
226
226
|
"""Returns custom field behaviour."""
|
227
227
|
return {
|
228
228
|
"hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
|
@@ -267,6 +267,8 @@ class GoogleBaseHook(BaseHook):
|
|
267
267
|
|
268
268
|
if not self.impersonation_chain:
|
269
269
|
self.impersonation_chain = self._get_field("impersonation_chain", None)
|
270
|
+
if isinstance(self.impersonation_chain, str) and "," in self.impersonation_chain:
|
271
|
+
self.impersonation_chain = [s.strip() for s in self.impersonation_chain.split(",")]
|
270
272
|
|
271
273
|
target_principal, delegates = _get_target_principal_and_delegates(self.impersonation_chain)
|
272
274
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
28
28
|
"name": "Google",
|
29
29
|
"description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
|
30
30
|
"suspended": False,
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1703288137,
|
32
32
|
"versions": [
|
33
|
+
"10.13.0",
|
33
34
|
"10.12.0",
|
34
35
|
"10.11.1",
|
35
36
|
"10.11.0",
|
@@ -80,40 +81,40 @@ def get_provider_info():
|
|
80
81
|
"1.0.0",
|
81
82
|
],
|
82
83
|
"dependencies": [
|
83
|
-
"apache-airflow>=2.
|
84
|
+
"apache-airflow>=2.6.0",
|
84
85
|
"apache-airflow-providers-common-sql>=1.7.2",
|
85
86
|
"asgiref>=3.5.2",
|
86
87
|
"gcloud-aio-auth>=4.0.0,<5.0.0",
|
87
88
|
"gcloud-aio-bigquery>=6.1.2",
|
88
|
-
"gcloud-aio-storage",
|
89
|
-
"gcsfs>=2023.
|
89
|
+
"gcloud-aio-storage>=9.0.0",
|
90
|
+
"gcsfs>=2023.10.0",
|
90
91
|
"google-ads>=22.1.0",
|
91
92
|
"google-api-core>=2.11.0",
|
92
93
|
"google-api-python-client>=1.6.0",
|
93
94
|
"google-auth>=1.0.0",
|
94
95
|
"google-auth-httplib2>=0.0.1",
|
95
96
|
"google-cloud-aiplatform>=1.22.1",
|
96
|
-
"google-cloud-automl>=2.
|
97
|
-
"google-cloud-bigquery-datatransfer>=3.
|
97
|
+
"google-cloud-automl>=2.12.0",
|
98
|
+
"google-cloud-bigquery-datatransfer>=3.13.0",
|
98
99
|
"google-cloud-bigtable>=2.17.0",
|
99
|
-
"google-cloud-build>=3.
|
100
|
+
"google-cloud-build>=3.22.0",
|
100
101
|
"google-cloud-compute>=1.10.0",
|
101
102
|
"google-cloud-container>=2.17.4",
|
102
103
|
"google-cloud-datacatalog>=3.11.1",
|
103
|
-
"google-cloud-dataflow-client>=0.8.
|
104
|
+
"google-cloud-dataflow-client>=0.8.6",
|
104
105
|
"google-cloud-dataform>=0.5.0",
|
105
|
-
"google-cloud-dataplex>=1.
|
106
|
-
"google-cloud-dataproc>=5.
|
106
|
+
"google-cloud-dataplex>=1.10.0",
|
107
|
+
"google-cloud-dataproc>=5.8.0",
|
107
108
|
"google-cloud-dataproc-metastore>=1.12.0",
|
108
109
|
"google-cloud-dlp>=3.12.0",
|
109
110
|
"google-cloud-kms>=2.15.0",
|
110
111
|
"google-cloud-language>=2.9.0",
|
111
112
|
"google-cloud-logging>=3.5.0",
|
112
113
|
"google-cloud-memcache>=1.7.0",
|
113
|
-
"google-cloud-monitoring>=2.
|
114
|
-
"google-cloud-orchestration-airflow>=1.
|
114
|
+
"google-cloud-monitoring>=2.18.0",
|
115
|
+
"google-cloud-orchestration-airflow>=1.10.0",
|
115
116
|
"google-cloud-os-login>=2.9.1",
|
116
|
-
"google-cloud-pubsub>=2.
|
117
|
+
"google-cloud-pubsub>=2.19.0",
|
117
118
|
"google-cloud-redis>=2.12.0",
|
118
119
|
"google-cloud-secret-manager>=2.16.0",
|
119
120
|
"google-cloud-spanner>=3.11.1",
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-google
|
3
|
-
Version: 10.
|
3
|
+
Version: 10.13.0
|
4
4
|
Summary: Provider package apache-airflow-providers-google for Apache Airflow
|
5
5
|
Keywords: airflow-provider,google,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -21,41 +21,41 @@ Classifier: Programming Language :: Python :: 3.10
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.11
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
23
23
|
Requires-Dist: PyOpenSSL
|
24
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.7.2
|
25
|
-
Requires-Dist: apache-airflow>=2.
|
24
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.7.2
|
25
|
+
Requires-Dist: apache-airflow>=2.6.0
|
26
26
|
Requires-Dist: asgiref>=3.5.2
|
27
27
|
Requires-Dist: gcloud-aio-auth>=4.0.0,<5.0.0
|
28
28
|
Requires-Dist: gcloud-aio-bigquery>=6.1.2
|
29
|
-
Requires-Dist: gcloud-aio-storage
|
30
|
-
Requires-Dist: gcsfs>=2023.
|
29
|
+
Requires-Dist: gcloud-aio-storage>=9.0.0
|
30
|
+
Requires-Dist: gcsfs>=2023.10.0
|
31
31
|
Requires-Dist: google-ads>=22.1.0
|
32
32
|
Requires-Dist: google-api-core>=2.11.0
|
33
33
|
Requires-Dist: google-api-python-client>=1.6.0
|
34
34
|
Requires-Dist: google-auth-httplib2>=0.0.1
|
35
35
|
Requires-Dist: google-auth>=1.0.0
|
36
36
|
Requires-Dist: google-cloud-aiplatform>=1.22.1
|
37
|
-
Requires-Dist: google-cloud-automl>=2.
|
37
|
+
Requires-Dist: google-cloud-automl>=2.12.0
|
38
38
|
Requires-Dist: google-cloud-batch>=0.13.0
|
39
|
-
Requires-Dist: google-cloud-bigquery-datatransfer>=3.
|
39
|
+
Requires-Dist: google-cloud-bigquery-datatransfer>=3.13.0
|
40
40
|
Requires-Dist: google-cloud-bigtable>=2.17.0
|
41
|
-
Requires-Dist: google-cloud-build>=3.
|
41
|
+
Requires-Dist: google-cloud-build>=3.22.0
|
42
42
|
Requires-Dist: google-cloud-compute>=1.10.0
|
43
43
|
Requires-Dist: google-cloud-container>=2.17.4
|
44
44
|
Requires-Dist: google-cloud-datacatalog>=3.11.1
|
45
|
-
Requires-Dist: google-cloud-dataflow-client>=0.8.
|
45
|
+
Requires-Dist: google-cloud-dataflow-client>=0.8.6
|
46
46
|
Requires-Dist: google-cloud-dataform>=0.5.0
|
47
|
-
Requires-Dist: google-cloud-dataplex>=1.
|
47
|
+
Requires-Dist: google-cloud-dataplex>=1.10.0
|
48
48
|
Requires-Dist: google-cloud-dataproc-metastore>=1.12.0
|
49
|
-
Requires-Dist: google-cloud-dataproc>=5.
|
49
|
+
Requires-Dist: google-cloud-dataproc>=5.8.0
|
50
50
|
Requires-Dist: google-cloud-dlp>=3.12.0
|
51
51
|
Requires-Dist: google-cloud-kms>=2.15.0
|
52
52
|
Requires-Dist: google-cloud-language>=2.9.0
|
53
53
|
Requires-Dist: google-cloud-logging>=3.5.0
|
54
54
|
Requires-Dist: google-cloud-memcache>=1.7.0
|
55
|
-
Requires-Dist: google-cloud-monitoring>=2.
|
56
|
-
Requires-Dist: google-cloud-orchestration-airflow>=1.
|
55
|
+
Requires-Dist: google-cloud-monitoring>=2.18.0
|
56
|
+
Requires-Dist: google-cloud-orchestration-airflow>=1.10.0
|
57
57
|
Requires-Dist: google-cloud-os-login>=2.9.1
|
58
|
-
Requires-Dist: google-cloud-pubsub>=2.
|
58
|
+
Requires-Dist: google-cloud-pubsub>=2.19.0
|
59
59
|
Requires-Dist: google-cloud-redis>=2.12.0
|
60
60
|
Requires-Dist: google-cloud-run>=0.9.0
|
61
61
|
Requires-Dist: google-cloud-secret-manager>=2.16.0
|
@@ -98,8 +98,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
|
98
98
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
99
99
|
Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
|
100
100
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
101
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
102
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
101
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.13.0/changelog.html
|
102
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.13.0
|
103
103
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
104
104
|
Project-URL: Source Code, https://github.com/apache/airflow
|
105
105
|
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
@@ -167,7 +167,7 @@ Provides-Extra: trino
|
|
167
167
|
|
168
168
|
Package ``apache-airflow-providers-google``
|
169
169
|
|
170
|
-
Release: ``10.
|
170
|
+
Release: ``10.13.0``
|
171
171
|
|
172
172
|
|
173
173
|
Google services including:
|
@@ -187,7 +187,7 @@ This is a provider package for ``google`` provider. All classes for this provide
|
|
187
187
|
are in ``airflow.providers.google`` python package.
|
188
188
|
|
189
189
|
You can find package information and changelog for the provider
|
190
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
190
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.13.0/>`_.
|
191
191
|
|
192
192
|
Installation
|
193
193
|
------------
|
@@ -204,40 +204,40 @@ Requirements
|
|
204
204
|
======================================= ==================
|
205
205
|
PIP package Version required
|
206
206
|
======================================= ==================
|
207
|
-
``apache-airflow`` ``>=2.
|
207
|
+
``apache-airflow`` ``>=2.6.0``
|
208
208
|
``apache-airflow-providers-common-sql`` ``>=1.7.2``
|
209
209
|
``asgiref`` ``>=3.5.2``
|
210
210
|
``gcloud-aio-auth`` ``>=4.0.0,<5.0.0``
|
211
211
|
``gcloud-aio-bigquery`` ``>=6.1.2``
|
212
|
-
``gcloud-aio-storage``
|
213
|
-
``gcsfs`` ``>=2023.
|
212
|
+
``gcloud-aio-storage`` ``>=9.0.0``
|
213
|
+
``gcsfs`` ``>=2023.10.0``
|
214
214
|
``google-ads`` ``>=22.1.0``
|
215
215
|
``google-api-core`` ``>=2.11.0``
|
216
216
|
``google-api-python-client`` ``>=1.6.0``
|
217
217
|
``google-auth`` ``>=1.0.0``
|
218
218
|
``google-auth-httplib2`` ``>=0.0.1``
|
219
219
|
``google-cloud-aiplatform`` ``>=1.22.1``
|
220
|
-
``google-cloud-automl`` ``>=2.
|
221
|
-
``google-cloud-bigquery-datatransfer`` ``>=3.
|
220
|
+
``google-cloud-automl`` ``>=2.12.0``
|
221
|
+
``google-cloud-bigquery-datatransfer`` ``>=3.13.0``
|
222
222
|
``google-cloud-bigtable`` ``>=2.17.0``
|
223
|
-
``google-cloud-build`` ``>=3.
|
223
|
+
``google-cloud-build`` ``>=3.22.0``
|
224
224
|
``google-cloud-compute`` ``>=1.10.0``
|
225
225
|
``google-cloud-container`` ``>=2.17.4``
|
226
226
|
``google-cloud-datacatalog`` ``>=3.11.1``
|
227
|
-
``google-cloud-dataflow-client`` ``>=0.8.
|
227
|
+
``google-cloud-dataflow-client`` ``>=0.8.6``
|
228
228
|
``google-cloud-dataform`` ``>=0.5.0``
|
229
|
-
``google-cloud-dataplex`` ``>=1.
|
230
|
-
``google-cloud-dataproc`` ``>=5.
|
229
|
+
``google-cloud-dataplex`` ``>=1.10.0``
|
230
|
+
``google-cloud-dataproc`` ``>=5.8.0``
|
231
231
|
``google-cloud-dataproc-metastore`` ``>=1.12.0``
|
232
232
|
``google-cloud-dlp`` ``>=3.12.0``
|
233
233
|
``google-cloud-kms`` ``>=2.15.0``
|
234
234
|
``google-cloud-language`` ``>=2.9.0``
|
235
235
|
``google-cloud-logging`` ``>=3.5.0``
|
236
236
|
``google-cloud-memcache`` ``>=1.7.0``
|
237
|
-
``google-cloud-monitoring`` ``>=2.
|
238
|
-
``google-cloud-orchestration-airflow`` ``>=1.
|
237
|
+
``google-cloud-monitoring`` ``>=2.18.0``
|
238
|
+
``google-cloud-orchestration-airflow`` ``>=1.10.0``
|
239
239
|
``google-cloud-os-login`` ``>=2.9.1``
|
240
|
-
``google-cloud-pubsub`` ``>=2.
|
240
|
+
``google-cloud-pubsub`` ``>=2.19.0``
|
241
241
|
``google-cloud-redis`` ``>=2.12.0``
|
242
242
|
``google-cloud-secret-manager`` ``>=2.16.0``
|
243
243
|
``google-cloud-spanner`` ``>=3.11.1``
|
@@ -300,4 +300,4 @@ Dependent package
|
|
300
300
|
======================================================================================================================== ====================
|
301
301
|
|
302
302
|
The changelog for the provider package can be found in the
|
303
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.
|
303
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.13.0/changelog.html>`_.
|