apache-airflow-providers-google 16.1.0__py3-none-any.whl → 17.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/ads/hooks/ads.py +1 -5
- airflow/providers/google/cloud/hooks/bigquery.py +1 -130
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
- airflow/providers/google/cloud/hooks/cloud_sql.py +5 -5
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +1 -1
- airflow/providers/google/cloud/hooks/dataflow.py +0 -85
- airflow/providers/google/cloud/hooks/datafusion.py +1 -1
- airflow/providers/google/cloud/hooks/dataprep.py +1 -4
- airflow/providers/google/cloud/hooks/dataproc.py +68 -70
- airflow/providers/google/cloud/hooks/gcs.py +3 -5
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/hooks/looker.py +1 -5
- airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +4 -4
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +7 -0
- airflow/providers/google/cloud/links/kubernetes_engine.py +3 -0
- airflow/providers/google/cloud/log/gcs_task_handler.py +2 -2
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +1 -1
- airflow/providers/google/cloud/openlineage/mixins.py +7 -7
- airflow/providers/google/cloud/operators/automl.py +1 -1
- airflow/providers/google/cloud/operators/bigquery.py +8 -609
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_sql.py +1 -5
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +2 -2
- airflow/providers/google/cloud/operators/dataproc.py +1 -1
- airflow/providers/google/cloud/operators/dlp.py +2 -2
- airflow/providers/google/cloud/operators/kubernetes_engine.py +4 -4
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +7 -1
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +7 -5
- airflow/providers/google/cloud/operators/vision.py +1 -1
- airflow/providers/google/cloud/sensors/dataflow.py +23 -6
- airflow/providers/google/cloud/sensors/datafusion.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +3 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +9 -9
- airflow/providers/google/cloud/triggers/bigquery.py +11 -13
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
- airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +1 -1
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataproc.py +10 -9
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
- airflow/providers/google/common/auth_backend/google_openid.py +2 -2
- airflow/providers/google/common/hooks/base_google.py +2 -6
- airflow/providers/google/common/utils/id_token_credentials.py +2 -2
- airflow/providers/google/get_provider_info.py +19 -16
- airflow/providers/google/leveldb/hooks/leveldb.py +1 -5
- airflow/providers/google/marketing_platform/hooks/display_video.py +47 -3
- airflow/providers/google/marketing_platform/links/analytics_admin.py +1 -1
- airflow/providers/google/marketing_platform/operators/display_video.py +64 -15
- airflow/providers/google/marketing_platform/sensors/display_video.py +9 -2
- airflow/providers/google/version_compat.py +10 -3
- {apache_airflow_providers_google-16.1.0.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/METADATA +106 -100
- {apache_airflow_providers_google-16.1.0.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/RECORD +63 -62
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/life_sciences.py +0 -30
- airflow/providers/google/cloud/operators/life_sciences.py +0 -118
- {apache_airflow_providers_google-16.1.0.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-16.1.0.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/entry_points.txt +0 -0
@@ -154,7 +154,7 @@ class BigQueryInsertJobTrigger(BaseTrigger):
|
|
154
154
|
task_state = task_instance.state
|
155
155
|
return task_state != TaskInstanceState.DEFERRED
|
156
156
|
|
157
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
157
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
158
158
|
"""Get current job execution status and yields a TriggerEvent."""
|
159
159
|
hook = self._get_async_hook()
|
160
160
|
try:
|
@@ -192,9 +192,7 @@ class BigQueryInsertJobTrigger(BaseTrigger):
|
|
192
192
|
self.location,
|
193
193
|
self.job_id,
|
194
194
|
)
|
195
|
-
await hook.cancel_job(
|
196
|
-
job_id=self.job_id, project_id=self.project_id, location=self.location
|
197
|
-
)
|
195
|
+
await hook.cancel_job(job_id=self.job_id, project_id=self.project_id, location=self.location)
|
198
196
|
else:
|
199
197
|
self.log.info(
|
200
198
|
"Trigger may have shutdown. Skipping to cancel job because the airflow "
|
@@ -231,7 +229,7 @@ class BigQueryCheckTrigger(BigQueryInsertJobTrigger):
|
|
231
229
|
},
|
232
230
|
)
|
233
231
|
|
234
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
232
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
235
233
|
"""Get current job execution status and yields a TriggerEvent."""
|
236
234
|
hook = self._get_async_hook()
|
237
235
|
try:
|
@@ -308,7 +306,7 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
|
|
308
306
|
},
|
309
307
|
)
|
310
308
|
|
311
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
309
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
312
310
|
"""Get current job execution status and yields a TriggerEvent with response data."""
|
313
311
|
hook = self._get_async_hook()
|
314
312
|
try:
|
@@ -433,7 +431,7 @@ class BigQueryIntervalCheckTrigger(BigQueryInsertJobTrigger):
|
|
433
431
|
},
|
434
432
|
)
|
435
433
|
|
436
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
434
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
437
435
|
"""Get current job execution status and yields a TriggerEvent."""
|
438
436
|
hook = self._get_async_hook()
|
439
437
|
try:
|
@@ -581,7 +579,7 @@ class BigQueryValueCheckTrigger(BigQueryInsertJobTrigger):
|
|
581
579
|
},
|
582
580
|
)
|
583
581
|
|
584
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
582
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
585
583
|
"""Get current job execution status and yields a TriggerEvent."""
|
586
584
|
hook = self._get_async_hook()
|
587
585
|
try:
|
@@ -591,9 +589,9 @@ class BigQueryValueCheckTrigger(BigQueryInsertJobTrigger):
|
|
591
589
|
if response_from_hook["status"] == "success":
|
592
590
|
query_results = await hook.get_job_output(job_id=self.job_id, project_id=self.project_id)
|
593
591
|
records = hook.get_records(query_results)
|
594
|
-
|
595
|
-
hook.value_check(self.sql, self.pass_value,
|
596
|
-
yield TriggerEvent({"status": "success", "message": "Job completed", "records":
|
592
|
+
_records = records.pop(0) if records else None
|
593
|
+
hook.value_check(self.sql, self.pass_value, _records, self.tolerance)
|
594
|
+
yield TriggerEvent({"status": "success", "message": "Job completed", "records": _records})
|
597
595
|
return
|
598
596
|
elif response_from_hook["status"] == "pending":
|
599
597
|
self.log.info("Query is still running...")
|
@@ -667,7 +665,7 @@ class BigQueryTableExistenceTrigger(BaseTrigger):
|
|
667
665
|
gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
|
668
666
|
)
|
669
667
|
|
670
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
668
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
671
669
|
"""Will run until the table exists in the Google Big Query."""
|
672
670
|
try:
|
673
671
|
while True:
|
@@ -750,7 +748,7 @@ class BigQueryTablePartitionExistenceTrigger(BigQueryTableExistenceTrigger):
|
|
750
748
|
},
|
751
749
|
)
|
752
750
|
|
753
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
751
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
754
752
|
"""Will run until the table exists in the Google Big Query."""
|
755
753
|
hook = BigQueryAsyncHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
756
754
|
job_id = None
|
@@ -76,7 +76,7 @@ class CloudBuildCreateBuildTrigger(BaseTrigger):
|
|
76
76
|
},
|
77
77
|
)
|
78
78
|
|
79
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
79
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
80
80
|
"""Get current build execution status and yields a TriggerEvent."""
|
81
81
|
hook = self._get_async_hook()
|
82
82
|
try:
|
@@ -26,7 +26,7 @@ from airflow.providers.google.cloud.hooks.cloud_run import CloudRunAsyncHook
|
|
26
26
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
27
27
|
|
28
28
|
if TYPE_CHECKING:
|
29
|
-
from google.longrunning import operations_pb2
|
29
|
+
from google.longrunning import operations_pb2
|
30
30
|
|
31
31
|
DEFAULT_BATCH_LOCATION = "us-central1"
|
32
32
|
|
@@ -68,7 +68,7 @@ class CloudStorageTransferServiceCreateJobsTrigger(BaseTrigger):
|
|
68
68
|
},
|
69
69
|
)
|
70
70
|
|
71
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
71
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
72
72
|
"""Get current data storage transfer jobs and yields a TriggerEvent."""
|
73
73
|
async_hook: CloudDataTransferServiceAsyncHook = self.get_async_hook()
|
74
74
|
|
@@ -86,7 +86,7 @@ class DataFusionStartPipelineTrigger(BaseTrigger):
|
|
86
86
|
},
|
87
87
|
)
|
88
88
|
|
89
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
89
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
90
90
|
"""Get current pipeline status and yields a TriggerEvent."""
|
91
91
|
hook = self._get_async_hook()
|
92
92
|
try:
|
@@ -316,8 +316,8 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
|
|
316
316
|
yield TriggerEvent(
|
317
317
|
{
|
318
318
|
"cluster_name": self.cluster_name,
|
319
|
-
"cluster_state": ClusterStatus.State.DELETING,
|
320
|
-
"cluster": cluster,
|
319
|
+
"cluster_state": ClusterStatus.State(ClusterStatus.State.DELETING).name,
|
320
|
+
"cluster": Cluster.to_dict(cluster),
|
321
321
|
}
|
322
322
|
)
|
323
323
|
return
|
@@ -325,14 +325,15 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
|
|
325
325
|
yield TriggerEvent(
|
326
326
|
{
|
327
327
|
"cluster_name": self.cluster_name,
|
328
|
-
"cluster_state": state,
|
329
|
-
"cluster": cluster,
|
328
|
+
"cluster_state": ClusterStatus.State(state).name,
|
329
|
+
"cluster": Cluster.to_dict(cluster),
|
330
330
|
}
|
331
331
|
)
|
332
332
|
return
|
333
|
-
|
334
|
-
|
335
|
-
|
333
|
+
else:
|
334
|
+
self.log.info("Current state is %s", state)
|
335
|
+
self.log.info("Sleeping for %s seconds.", self.polling_interval_seconds)
|
336
|
+
await asyncio.sleep(self.polling_interval_seconds)
|
336
337
|
except asyncio.CancelledError:
|
337
338
|
try:
|
338
339
|
if self.delete_on_error and await self.safe_to_cancel():
|
@@ -484,9 +485,9 @@ class DataprocDeleteClusterTrigger(DataprocBaseTrigger):
|
|
484
485
|
try:
|
485
486
|
while self.end_time > time.time():
|
486
487
|
cluster = await self.get_async_hook().get_cluster(
|
487
|
-
region=self.region,
|
488
|
+
region=self.region,
|
488
489
|
cluster_name=self.cluster_name,
|
489
|
-
project_id=self.project_id,
|
490
|
+
project_id=self.project_id,
|
490
491
|
metadata=self.metadata,
|
491
492
|
)
|
492
493
|
self.log.info(
|
@@ -153,7 +153,7 @@ class GKEStartPodTrigger(KubernetesPodTrigger):
|
|
153
153
|
)
|
154
154
|
|
155
155
|
@cached_property
|
156
|
-
def hook(self) -> GKEKubernetesAsyncHook:
|
156
|
+
def hook(self) -> GKEKubernetesAsyncHook:
|
157
157
|
return GKEKubernetesAsyncHook(
|
158
158
|
cluster_url=self._cluster_url,
|
159
159
|
ssl_ca_cert=self._ssl_ca_cert,
|
@@ -200,7 +200,7 @@ class GKEOperationTrigger(BaseTrigger):
|
|
200
200
|
},
|
201
201
|
)
|
202
202
|
|
203
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
203
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
204
204
|
"""Get operation status and yields corresponding event."""
|
205
205
|
hook = self._get_hook()
|
206
206
|
try:
|
@@ -260,9 +260,10 @@ class GKEJobTrigger(BaseTrigger):
|
|
260
260
|
ssl_ca_cert: str,
|
261
261
|
job_name: str,
|
262
262
|
job_namespace: str,
|
263
|
-
|
263
|
+
pod_names: list[str],
|
264
264
|
pod_namespace: str,
|
265
265
|
base_container_name: str,
|
266
|
+
pod_name: str | None = None,
|
266
267
|
gcp_conn_id: str = "google_cloud_default",
|
267
268
|
poll_interval: float = 2,
|
268
269
|
impersonation_chain: str | Sequence[str] | None = None,
|
@@ -274,7 +275,13 @@ class GKEJobTrigger(BaseTrigger):
|
|
274
275
|
self.ssl_ca_cert = ssl_ca_cert
|
275
276
|
self.job_name = job_name
|
276
277
|
self.job_namespace = job_namespace
|
277
|
-
|
278
|
+
if pod_name is not None:
|
279
|
+
self._pod_name = pod_name
|
280
|
+
self.pod_names = [
|
281
|
+
self.pod_name,
|
282
|
+
]
|
283
|
+
else:
|
284
|
+
self.pod_names = pod_names
|
278
285
|
self.pod_namespace = pod_namespace
|
279
286
|
self.base_container_name = base_container_name
|
280
287
|
self.gcp_conn_id = gcp_conn_id
|
@@ -283,6 +290,15 @@ class GKEJobTrigger(BaseTrigger):
|
|
283
290
|
self.get_logs = get_logs
|
284
291
|
self.do_xcom_push = do_xcom_push
|
285
292
|
|
293
|
+
@property
|
294
|
+
def pod_name(self):
|
295
|
+
warnings.warn(
|
296
|
+
"`pod_name` parameter is deprecated, please use `pod_names`",
|
297
|
+
AirflowProviderDeprecationWarning,
|
298
|
+
stacklevel=2,
|
299
|
+
)
|
300
|
+
return self._pod_name
|
301
|
+
|
286
302
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
287
303
|
"""Serialize KubernetesCreateJobTrigger arguments and classpath."""
|
288
304
|
return (
|
@@ -292,7 +308,7 @@ class GKEJobTrigger(BaseTrigger):
|
|
292
308
|
"ssl_ca_cert": self.ssl_ca_cert,
|
293
309
|
"job_name": self.job_name,
|
294
310
|
"job_namespace": self.job_namespace,
|
295
|
-
"
|
311
|
+
"pod_names": self.pod_names,
|
296
312
|
"pod_namespace": self.pod_namespace,
|
297
313
|
"base_container_name": self.base_container_name,
|
298
314
|
"gcp_conn_id": self.gcp_conn_id,
|
@@ -303,10 +319,8 @@ class GKEJobTrigger(BaseTrigger):
|
|
303
319
|
},
|
304
320
|
)
|
305
321
|
|
306
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
322
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
307
323
|
"""Get current job status and yield a TriggerEvent."""
|
308
|
-
if self.get_logs or self.do_xcom_push:
|
309
|
-
pod = await self.hook.get_pod(name=self.pod_name, namespace=self.pod_namespace)
|
310
324
|
if self.do_xcom_push:
|
311
325
|
kubernetes_provider = ProvidersManager().providers["apache-airflow-providers-cncf-kubernetes"]
|
312
326
|
kubernetes_provider_name = kubernetes_provider.data["package-name"]
|
@@ -318,22 +332,26 @@ class GKEJobTrigger(BaseTrigger):
|
|
318
332
|
f"package {kubernetes_provider_name}=={kubernetes_provider_version} which doesn't "
|
319
333
|
f"support this feature. Please upgrade it to version higher than or equal to {min_version}."
|
320
334
|
)
|
321
|
-
|
322
|
-
|
323
|
-
namespace=self.pod_namespace
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
335
|
+
xcom_results = []
|
336
|
+
for pod_name in self.pod_names:
|
337
|
+
pod = await self.hook.get_pod(name=pod_name, namespace=self.pod_namespace)
|
338
|
+
await self.hook.wait_until_container_complete(
|
339
|
+
name=pod_name,
|
340
|
+
namespace=self.pod_namespace,
|
341
|
+
container_name=self.base_container_name,
|
342
|
+
poll_interval=self.poll_interval,
|
343
|
+
)
|
344
|
+
self.log.info("Checking if xcom sidecar container is started.")
|
345
|
+
await self.hook.wait_until_container_started(
|
346
|
+
name=pod_name,
|
347
|
+
namespace=self.pod_namespace,
|
348
|
+
container_name=PodDefaults.SIDECAR_CONTAINER_NAME,
|
349
|
+
poll_interval=self.poll_interval,
|
350
|
+
)
|
351
|
+
self.log.info("Extracting result from xcom sidecar container.")
|
352
|
+
loop = asyncio.get_running_loop()
|
353
|
+
xcom_result = await loop.run_in_executor(None, self.pod_manager.extract_xcom, pod)
|
354
|
+
xcom_results.append(xcom_result)
|
337
355
|
job: V1Job = await self.hook.wait_until_job_complete(
|
338
356
|
name=self.job_name, namespace=self.job_namespace, poll_interval=self.poll_interval
|
339
357
|
)
|
@@ -345,12 +363,12 @@ class GKEJobTrigger(BaseTrigger):
|
|
345
363
|
{
|
346
364
|
"name": job.metadata.name,
|
347
365
|
"namespace": job.metadata.namespace,
|
348
|
-
"
|
349
|
-
"pod_namespace":
|
366
|
+
"pod_names": [pod_name for pod_name in self.pod_names] if self.get_logs else None,
|
367
|
+
"pod_namespace": self.pod_namespace if self.get_logs else None,
|
350
368
|
"status": status,
|
351
369
|
"message": message,
|
352
370
|
"job": job_dict,
|
353
|
-
"xcom_result":
|
371
|
+
"xcom_result": xcom_results if self.do_xcom_push else None,
|
354
372
|
}
|
355
373
|
)
|
356
374
|
|
@@ -90,7 +90,7 @@ class MLEngineStartTrainingJobTrigger(BaseTrigger):
|
|
90
90
|
},
|
91
91
|
)
|
92
92
|
|
93
|
-
async def run(self) -> AsyncIterator[TriggerEvent]:
|
93
|
+
async def run(self) -> AsyncIterator[TriggerEvent]:
|
94
94
|
"""Get current job execution status and yields a TriggerEvent."""
|
95
95
|
hook = self._get_async_hook()
|
96
96
|
try:
|
@@ -29,7 +29,7 @@ from urllib.parse import urlencode
|
|
29
29
|
|
30
30
|
import google.auth
|
31
31
|
import google.oauth2.service_account
|
32
|
-
from google.auth import impersonated_credentials
|
32
|
+
from google.auth import impersonated_credentials
|
33
33
|
from google.auth.credentials import AnonymousCredentials, Credentials
|
34
34
|
from google.auth.environment_vars import CREDENTIALS, LEGACY_PROJECT, PROJECT
|
35
35
|
|
@@ -29,7 +29,7 @@ import google.auth.transport.requests
|
|
29
29
|
import google.oauth2.id_token
|
30
30
|
|
31
31
|
try:
|
32
|
-
from flask import Response, current_app, request as flask_request
|
32
|
+
from flask import Response, current_app, request as flask_request
|
33
33
|
except ImportError:
|
34
34
|
raise ImportError(
|
35
35
|
"Google requires FAB provider to be installed in order to use this auth backend. "
|
@@ -122,7 +122,7 @@ def _lookup_user(user_email: str):
|
|
122
122
|
|
123
123
|
|
124
124
|
def _set_current_user(user):
|
125
|
-
current_app.appbuilder.sm.lm._update_request_context_with_user(user=user)
|
125
|
+
current_app.appbuilder.sm.lm._update_request_context_with_user(user=user)
|
126
126
|
|
127
127
|
|
128
128
|
T = TypeVar("T", bound=Callable)
|
@@ -39,7 +39,7 @@ import tenacity
|
|
39
39
|
from asgiref.sync import sync_to_async
|
40
40
|
from gcloud.aio.auth.token import Token, TokenResponse
|
41
41
|
from google.api_core.exceptions import Forbidden, ResourceExhausted, TooManyRequests
|
42
|
-
from google.auth import _cloud_sdk, compute_engine
|
42
|
+
from google.auth import _cloud_sdk, compute_engine
|
43
43
|
from google.auth.environment_vars import CLOUD_SDK_CONFIG_DIR, CREDENTIALS
|
44
44
|
from google.auth.exceptions import RefreshError
|
45
45
|
from google.auth.transport import _http_client
|
@@ -55,11 +55,7 @@ from airflow.providers.google.cloud.utils.credentials_provider import (
|
|
55
55
|
_get_target_principal_and_delegates,
|
56
56
|
get_credentials_and_project_id,
|
57
57
|
)
|
58
|
-
|
59
|
-
try:
|
60
|
-
from airflow.sdk import BaseHook
|
61
|
-
except ImportError:
|
62
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
58
|
+
from airflow.providers.google.version_compat import BaseHook
|
63
59
|
from airflow.utils.process_utils import patch_environ
|
64
60
|
|
65
61
|
if TYPE_CHECKING:
|
@@ -37,7 +37,7 @@ from typing import TYPE_CHECKING
|
|
37
37
|
|
38
38
|
import google.auth.transport
|
39
39
|
from google.auth import credentials as google_auth_credentials, environment_vars, exceptions
|
40
|
-
from google.oauth2 import credentials as oauth2_credentials, service_account
|
40
|
+
from google.oauth2 import credentials as oauth2_credentials, service_account
|
41
41
|
|
42
42
|
if TYPE_CHECKING:
|
43
43
|
import google.oauth2
|
@@ -147,7 +147,7 @@ def _get_gcloud_sdk_credentials(
|
|
147
147
|
target_audience: str | None,
|
148
148
|
) -> google_auth_credentials.Credentials | None:
|
149
149
|
"""Get the credentials and project ID from the Cloud SDK."""
|
150
|
-
from google.auth import _cloud_sdk
|
150
|
+
from google.auth import _cloud_sdk
|
151
151
|
|
152
152
|
# Check if application default credentials exist.
|
153
153
|
credentials_filename = _cloud_sdk.get_application_default_credentials_path()
|
@@ -143,13 +143,6 @@ def get_provider_info():
|
|
143
143
|
"logo": "/docs/integration-logos/Key-Management-Service.png",
|
144
144
|
"tags": ["gcp"],
|
145
145
|
},
|
146
|
-
{
|
147
|
-
"integration-name": "Google Cloud Life Sciences",
|
148
|
-
"external-doc-url": "https://cloud.google.com/life-sciences/",
|
149
|
-
"how-to-guide": ["/docs/apache-airflow-providers-google/operators/cloud/life_sciences.rst"],
|
150
|
-
"logo": "/docs/integration-logos/Google-Cloud-Life-Sciences.png",
|
151
|
-
"tags": ["gcp"],
|
152
|
-
},
|
153
146
|
{
|
154
147
|
"integration-name": "Google Cloud Managed Service for Apache Kafka",
|
155
148
|
"external-doc-url": "https://cloud.google.com/managed-service-for-apache-kafka/docs/",
|
@@ -470,6 +463,15 @@ def get_provider_info():
|
|
470
463
|
"how-to-guide": ["/docs/apache-airflow-providers-google/operators/cloud/looker.rst"],
|
471
464
|
"tags": ["gcp"],
|
472
465
|
},
|
466
|
+
{
|
467
|
+
"integration-name": "Google Cloud Logging Sink",
|
468
|
+
"external-doc-url": "https://cloud.google.com/logging",
|
469
|
+
"logo": "/docs/integration-logos/Cloud-Logging-Sink.png",
|
470
|
+
"how-to-guide": [
|
471
|
+
"/docs/apache-airflow-providers-google/operators/cloud/cloud_logging_sink.rst"
|
472
|
+
],
|
473
|
+
"tags": ["gcp"],
|
474
|
+
},
|
473
475
|
],
|
474
476
|
"operators": [
|
475
477
|
{
|
@@ -572,10 +574,6 @@ def get_provider_info():
|
|
572
574
|
"integration-name": "Google Kubernetes Engine",
|
573
575
|
"python-modules": ["airflow.providers.google.cloud.operators.kubernetes_engine"],
|
574
576
|
},
|
575
|
-
{
|
576
|
-
"integration-name": "Google Cloud Life Sciences",
|
577
|
-
"python-modules": ["airflow.providers.google.cloud.operators.life_sciences"],
|
578
|
-
},
|
579
577
|
{
|
580
578
|
"integration-name": "Google Machine Learning Engine",
|
581
579
|
"python-modules": ["airflow.providers.google.cloud.operators.mlengine"],
|
@@ -666,6 +664,7 @@ def get_provider_info():
|
|
666
664
|
"airflow.providers.google.cloud.operators.vertex_ai.auto_ml",
|
667
665
|
"airflow.providers.google.cloud.operators.vertex_ai.batch_prediction_job",
|
668
666
|
"airflow.providers.google.cloud.operators.vertex_ai.endpoint_service",
|
667
|
+
"airflow.providers.google.cloud.operators.vertex_ai.experiment_service",
|
669
668
|
"airflow.providers.google.cloud.operators.vertex_ai.hyperparameter_tuning_job",
|
670
669
|
"airflow.providers.google.cloud.operators.vertex_ai.model_service",
|
671
670
|
"airflow.providers.google.cloud.operators.vertex_ai.pipeline_job",
|
@@ -690,6 +689,10 @@ def get_provider_info():
|
|
690
689
|
"integration-name": "Google Cloud Managed Service for Apache Kafka",
|
691
690
|
"python-modules": ["airflow.providers.google.cloud.operators.managed_kafka"],
|
692
691
|
},
|
692
|
+
{
|
693
|
+
"integration-name": "Google Cloud Logging Sink",
|
694
|
+
"python-modules": ["airflow.providers.google.cloud.operators.cloud_logging_sink"],
|
695
|
+
},
|
693
696
|
],
|
694
697
|
"sensors": [
|
695
698
|
{
|
@@ -913,10 +916,6 @@ def get_provider_info():
|
|
913
916
|
"integration-name": "Google Kubernetes Engine",
|
914
917
|
"python-modules": ["airflow.providers.google.cloud.hooks.kubernetes_engine"],
|
915
918
|
},
|
916
|
-
{
|
917
|
-
"integration-name": "Google Cloud Life Sciences",
|
918
|
-
"python-modules": ["airflow.providers.google.cloud.hooks.life_sciences"],
|
919
|
-
},
|
920
919
|
{
|
921
920
|
"integration-name": "Google Machine Learning Engine",
|
922
921
|
"python-modules": ["airflow.providers.google.cloud.hooks.mlengine"],
|
@@ -1028,6 +1027,7 @@ def get_provider_info():
|
|
1028
1027
|
"airflow.providers.google.cloud.hooks.vertex_ai.auto_ml",
|
1029
1028
|
"airflow.providers.google.cloud.hooks.vertex_ai.batch_prediction_job",
|
1030
1029
|
"airflow.providers.google.cloud.hooks.vertex_ai.endpoint_service",
|
1030
|
+
"airflow.providers.google.cloud.hooks.vertex_ai.experiment_service",
|
1031
1031
|
"airflow.providers.google.cloud.hooks.vertex_ai.hyperparameter_tuning_job",
|
1032
1032
|
"airflow.providers.google.cloud.hooks.vertex_ai.model_service",
|
1033
1033
|
"airflow.providers.google.cloud.hooks.vertex_ai.pipeline_job",
|
@@ -1053,6 +1053,10 @@ def get_provider_info():
|
|
1053
1053
|
"integration-name": "Google Cloud Managed Service for Apache Kafka",
|
1054
1054
|
"python-modules": ["airflow.providers.google.cloud.hooks.managed_kafka"],
|
1055
1055
|
},
|
1056
|
+
{
|
1057
|
+
"integration-name": "Google Cloud Logging",
|
1058
|
+
"python-modules": ["airflow.providers.google.cloud.hooks.cloud_logging"],
|
1059
|
+
},
|
1056
1060
|
],
|
1057
1061
|
"triggers": [
|
1058
1062
|
{
|
@@ -1464,7 +1468,6 @@ def get_provider_info():
|
|
1464
1468
|
"airflow.providers.google.cloud.links.cloud_build.CloudBuildListLink",
|
1465
1469
|
"airflow.providers.google.cloud.links.cloud_build.CloudBuildTriggersListLink",
|
1466
1470
|
"airflow.providers.google.cloud.links.cloud_build.CloudBuildTriggerDetailsLink",
|
1467
|
-
"airflow.providers.google.cloud.links.life_sciences.LifeSciencesLink",
|
1468
1471
|
"airflow.providers.google.cloud.links.cloud_functions.CloudFunctionsDetailsLink",
|
1469
1472
|
"airflow.providers.google.cloud.links.cloud_functions.CloudFunctionsListLink",
|
1470
1473
|
"airflow.providers.google.cloud.links.cloud_storage_transfer.CloudStorageTransferListLink",
|
@@ -21,11 +21,7 @@ from __future__ import annotations
|
|
21
21
|
from typing import Any
|
22
22
|
|
23
23
|
from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
|
24
|
-
|
25
|
-
try:
|
26
|
-
from airflow.sdk import BaseHook
|
27
|
-
except ImportError:
|
28
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
24
|
+
from airflow.providers.google.version_compat import BaseHook
|
29
25
|
|
30
26
|
try:
|
31
27
|
import plyvel
|
@@ -24,6 +24,8 @@ from typing import Any
|
|
24
24
|
|
25
25
|
from googleapiclient.discovery import Resource, build
|
26
26
|
|
27
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
28
|
+
from airflow.providers.google.common.deprecated import deprecated
|
27
29
|
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
28
30
|
|
29
31
|
|
@@ -34,7 +36,7 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
34
36
|
|
35
37
|
def __init__(
|
36
38
|
self,
|
37
|
-
api_version: str = "
|
39
|
+
api_version: str = "v4",
|
38
40
|
gcp_conn_id: str = "google_cloud_default",
|
39
41
|
impersonation_chain: str | Sequence[str] | None = None,
|
40
42
|
**kwargs,
|
@@ -46,6 +48,11 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
46
48
|
)
|
47
49
|
self.api_version = api_version
|
48
50
|
|
51
|
+
@deprecated(
|
52
|
+
planned_removal_date="September 01, 2025",
|
53
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.get_conn_to_display_video",
|
54
|
+
category=AirflowProviderDeprecationWarning,
|
55
|
+
)
|
49
56
|
def get_conn(self) -> Resource:
|
50
57
|
"""Retrieve connection to DisplayVideo."""
|
51
58
|
if not self._conn:
|
@@ -89,6 +96,11 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
89
96
|
"""
|
90
97
|
return [f"gdbm-{partner_id}/entity/{{{{ ds_nodash }}}}.*.{entity_type}.json"]
|
91
98
|
|
99
|
+
@deprecated(
|
100
|
+
planned_removal_date="September 01, 2025",
|
101
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.create_sdf_download_operation",
|
102
|
+
category=AirflowProviderDeprecationWarning,
|
103
|
+
)
|
92
104
|
def create_query(self, query: dict[str, Any]) -> dict:
|
93
105
|
"""
|
94
106
|
Create a query.
|
@@ -98,6 +110,10 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
98
110
|
response = self.get_conn().queries().create(body=query).execute(num_retries=self.num_retries)
|
99
111
|
return response
|
100
112
|
|
113
|
+
@deprecated(
|
114
|
+
planned_removal_date="September 01, 2025",
|
115
|
+
category=AirflowProviderDeprecationWarning,
|
116
|
+
)
|
101
117
|
def delete_query(self, query_id: str) -> None:
|
102
118
|
"""
|
103
119
|
Delete a stored query as well as the associated stored reports.
|
@@ -106,6 +122,11 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
106
122
|
"""
|
107
123
|
self.get_conn().queries().delete(queryId=query_id).execute(num_retries=self.num_retries)
|
108
124
|
|
125
|
+
@deprecated(
|
126
|
+
planned_removal_date="September 01, 2025",
|
127
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.get_sdf_download_operation",
|
128
|
+
category=AirflowProviderDeprecationWarning,
|
129
|
+
)
|
109
130
|
def get_query(self, query_id: str) -> dict:
|
110
131
|
"""
|
111
132
|
Retrieve a stored query.
|
@@ -115,11 +136,20 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
115
136
|
response = self.get_conn().queries().get(queryId=query_id).execute(num_retries=self.num_retries)
|
116
137
|
return response
|
117
138
|
|
139
|
+
@deprecated(
|
140
|
+
planned_removal_date="September 01, 2025",
|
141
|
+
category=AirflowProviderDeprecationWarning,
|
142
|
+
)
|
118
143
|
def list_queries(self) -> list[dict]:
|
119
144
|
"""Retrieve stored queries."""
|
120
145
|
response = self.get_conn().queries().list().execute(num_retries=self.num_retries)
|
121
146
|
return response.get("queries", [])
|
122
147
|
|
148
|
+
@deprecated(
|
149
|
+
planned_removal_date="September 01, 2025",
|
150
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.create_sdf_download_operation",
|
151
|
+
category=AirflowProviderDeprecationWarning,
|
152
|
+
)
|
123
153
|
def run_query(self, query_id: str, params: dict[str, Any] | None) -> dict:
|
124
154
|
"""
|
125
155
|
Run a stored query to generate a report.
|
@@ -131,6 +161,10 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
131
161
|
self.get_conn().queries().run(queryId=query_id, body=params).execute(num_retries=self.num_retries)
|
132
162
|
)
|
133
163
|
|
164
|
+
@deprecated(
|
165
|
+
planned_removal_date="September 01, 2025",
|
166
|
+
category=AirflowProviderDeprecationWarning,
|
167
|
+
)
|
134
168
|
def get_report(self, query_id: str, report_id: str) -> dict:
|
135
169
|
"""
|
136
170
|
Retrieve a report.
|
@@ -146,6 +180,11 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
146
180
|
.execute(num_retries=self.num_retries)
|
147
181
|
)
|
148
182
|
|
183
|
+
@deprecated(
|
184
|
+
planned_removal_date="September 01, 2025",
|
185
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.create_sdf_download_operation",
|
186
|
+
category=AirflowProviderDeprecationWarning,
|
187
|
+
)
|
149
188
|
def upload_line_items(self, line_items: Any) -> list[dict[str, Any]]:
|
150
189
|
"""
|
151
190
|
Upload line items in CSV format.
|
@@ -167,6 +206,11 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
167
206
|
)
|
168
207
|
return response
|
169
208
|
|
209
|
+
@deprecated(
|
210
|
+
planned_removal_date="September 01, 2025",
|
211
|
+
use_instead="airflow.providers.google.marketing_platform.hooks.display_video.download_media",
|
212
|
+
category=AirflowProviderDeprecationWarning,
|
213
|
+
)
|
170
214
|
def download_line_items(self, request_body: dict[str, Any]) -> list[Any]:
|
171
215
|
"""
|
172
216
|
Retrieve line items in CSV format.
|
@@ -189,7 +233,7 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
189
233
|
|
190
234
|
:param body_request: Body request.
|
191
235
|
|
192
|
-
More information about body request
|
236
|
+
More information about body request can be found here:
|
193
237
|
https://developers.google.com/display-video/api/reference/rest/v1/sdfdownloadtasks/create
|
194
238
|
"""
|
195
239
|
result = (
|
@@ -219,7 +263,7 @@ class GoogleDisplayVideo360Hook(GoogleBaseHook):
|
|
219
263
|
"""
|
220
264
|
Download media.
|
221
265
|
|
222
|
-
:param resource_name: of the media that is being downloaded.
|
266
|
+
:param resource_name: The resource name of the media that is being downloaded.
|
223
267
|
"""
|
224
268
|
request = self.get_conn_to_display_video().media().download_media(resourceName=resource_name)
|
225
269
|
return request
|
@@ -28,7 +28,7 @@ if AIRFLOW_V_3_0_PLUS:
|
|
28
28
|
from airflow.sdk import BaseOperatorLink
|
29
29
|
from airflow.sdk.execution_time.xcom import XCom
|
30
30
|
else:
|
31
|
-
from airflow.models import XCom
|
31
|
+
from airflow.models import XCom
|
32
32
|
from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
|
33
33
|
|
34
34
|
|