apache-airflow-providers-google 12.0.0rc1__py3-none-any.whl → 13.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/LICENSE +0 -52
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/ads/hooks/ads.py +27 -13
- airflow/providers/google/ads/transfers/ads_to_gcs.py +18 -4
- airflow/providers/google/assets/bigquery.py +17 -0
- airflow/providers/google/cloud/_internal_client/secret_manager_client.py +2 -3
- airflow/providers/google/cloud/hooks/alloy_db.py +736 -8
- airflow/providers/google/cloud/hooks/automl.py +10 -4
- airflow/providers/google/cloud/hooks/bigquery.py +125 -22
- airflow/providers/google/cloud/hooks/bigquery_dts.py +8 -8
- airflow/providers/google/cloud/hooks/bigtable.py +2 -3
- airflow/providers/google/cloud/hooks/cloud_batch.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_build.py +4 -5
- airflow/providers/google/cloud/hooks/cloud_composer.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_memorystore.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_run.py +3 -4
- airflow/providers/google/cloud/hooks/cloud_sql.py +7 -3
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +119 -7
- airflow/providers/google/cloud/hooks/compute.py +3 -3
- airflow/providers/google/cloud/hooks/datacatalog.py +3 -4
- airflow/providers/google/cloud/hooks/dataflow.py +12 -12
- airflow/providers/google/cloud/hooks/dataform.py +2 -3
- airflow/providers/google/cloud/hooks/datafusion.py +2 -2
- airflow/providers/google/cloud/hooks/dataplex.py +1032 -11
- airflow/providers/google/cloud/hooks/dataproc.py +4 -5
- airflow/providers/google/cloud/hooks/dataproc_metastore.py +3 -4
- airflow/providers/google/cloud/hooks/dlp.py +3 -4
- airflow/providers/google/cloud/hooks/gcs.py +7 -6
- airflow/providers/google/cloud/hooks/kms.py +2 -3
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +8 -8
- airflow/providers/google/cloud/hooks/life_sciences.py +1 -1
- airflow/providers/google/cloud/hooks/managed_kafka.py +482 -0
- airflow/providers/google/cloud/hooks/natural_language.py +2 -3
- airflow/providers/google/cloud/hooks/os_login.py +2 -3
- airflow/providers/google/cloud/hooks/pubsub.py +6 -6
- airflow/providers/google/cloud/hooks/secret_manager.py +2 -3
- airflow/providers/google/cloud/hooks/spanner.py +2 -2
- airflow/providers/google/cloud/hooks/speech_to_text.py +2 -3
- airflow/providers/google/cloud/hooks/stackdriver.py +4 -4
- airflow/providers/google/cloud/hooks/tasks.py +3 -4
- airflow/providers/google/cloud/hooks/text_to_speech.py +2 -3
- airflow/providers/google/cloud/hooks/translate.py +236 -5
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +9 -4
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +4 -5
- airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +2 -3
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +1 -181
- airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +2 -3
- airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +3 -4
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +2 -3
- airflow/providers/google/cloud/hooks/video_intelligence.py +2 -3
- airflow/providers/google/cloud/hooks/vision.py +3 -4
- airflow/providers/google/cloud/hooks/workflows.py +2 -3
- airflow/providers/google/cloud/links/alloy_db.py +46 -0
- airflow/providers/google/cloud/links/bigquery.py +25 -0
- airflow/providers/google/cloud/links/dataplex.py +172 -2
- airflow/providers/google/cloud/links/kubernetes_engine.py +1 -2
- airflow/providers/google/cloud/links/managed_kafka.py +104 -0
- airflow/providers/google/cloud/links/translate.py +28 -0
- airflow/providers/google/cloud/log/gcs_task_handler.py +3 -3
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +11 -10
- airflow/providers/google/cloud/openlineage/facets.py +67 -0
- airflow/providers/google/cloud/openlineage/mixins.py +438 -173
- airflow/providers/google/cloud/openlineage/utils.py +394 -61
- airflow/providers/google/cloud/operators/alloy_db.py +980 -69
- airflow/providers/google/cloud/operators/automl.py +83 -245
- airflow/providers/google/cloud/operators/bigquery.py +377 -74
- airflow/providers/google/cloud/operators/bigquery_dts.py +126 -13
- airflow/providers/google/cloud/operators/bigtable.py +1 -3
- airflow/providers/google/cloud/operators/cloud_base.py +1 -2
- airflow/providers/google/cloud/operators/cloud_batch.py +2 -4
- airflow/providers/google/cloud/operators/cloud_build.py +3 -5
- airflow/providers/google/cloud/operators/cloud_composer.py +5 -7
- airflow/providers/google/cloud/operators/cloud_memorystore.py +4 -6
- airflow/providers/google/cloud/operators/cloud_run.py +6 -5
- airflow/providers/google/cloud/operators/cloud_sql.py +20 -8
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +62 -8
- airflow/providers/google/cloud/operators/compute.py +3 -4
- airflow/providers/google/cloud/operators/datacatalog.py +9 -11
- airflow/providers/google/cloud/operators/dataflow.py +1 -112
- airflow/providers/google/cloud/operators/dataform.py +3 -5
- airflow/providers/google/cloud/operators/datafusion.py +1 -1
- airflow/providers/google/cloud/operators/dataplex.py +2046 -7
- airflow/providers/google/cloud/operators/dataproc.py +102 -17
- airflow/providers/google/cloud/operators/dataproc_metastore.py +7 -9
- airflow/providers/google/cloud/operators/dlp.py +17 -19
- airflow/providers/google/cloud/operators/gcs.py +14 -17
- airflow/providers/google/cloud/operators/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/operators/managed_kafka.py +788 -0
- airflow/providers/google/cloud/operators/natural_language.py +3 -5
- airflow/providers/google/cloud/operators/pubsub.py +39 -7
- airflow/providers/google/cloud/operators/speech_to_text.py +3 -5
- airflow/providers/google/cloud/operators/stackdriver.py +3 -5
- airflow/providers/google/cloud/operators/tasks.py +4 -6
- airflow/providers/google/cloud/operators/text_to_speech.py +2 -4
- airflow/providers/google/cloud/operators/translate.py +414 -5
- airflow/providers/google/cloud/operators/translate_speech.py +2 -4
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +9 -8
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +6 -8
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +0 -322
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +4 -6
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +4 -6
- airflow/providers/google/cloud/operators/video_intelligence.py +3 -5
- airflow/providers/google/cloud/operators/vision.py +4 -6
- airflow/providers/google/cloud/operators/workflows.py +5 -7
- airflow/providers/google/cloud/secrets/secret_manager.py +1 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +3 -5
- airflow/providers/google/cloud/sensors/bigtable.py +2 -3
- airflow/providers/google/cloud/sensors/cloud_composer.py +32 -8
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +39 -1
- airflow/providers/google/cloud/sensors/dataplex.py +4 -6
- airflow/providers/google/cloud/sensors/dataproc.py +2 -3
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -2
- airflow/providers/google/cloud/sensors/gcs.py +2 -4
- airflow/providers/google/cloud/sensors/pubsub.py +2 -3
- airflow/providers/google/cloud/sensors/workflows.py +3 -5
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +5 -5
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +10 -12
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +36 -4
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/mysql_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +27 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +34 -5
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +15 -0
- airflow/providers/google/cloud/transfers/trino_to_gcs.py +25 -2
- airflow/providers/google/cloud/triggers/bigquery_dts.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_batch.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -2
- airflow/providers/google/cloud/triggers/cloud_composer.py +13 -3
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +102 -4
- airflow/providers/google/cloud/triggers/dataflow.py +2 -3
- airflow/providers/google/cloud/triggers/dataplex.py +1 -2
- airflow/providers/google/cloud/triggers/dataproc.py +2 -3
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +1 -2
- airflow/providers/google/cloud/triggers/vertex_ai.py +7 -8
- airflow/providers/google/cloud/utils/credentials_provider.py +15 -8
- airflow/providers/google/cloud/utils/external_token_supplier.py +1 -0
- airflow/providers/google/common/auth_backend/google_openid.py +4 -4
- airflow/providers/google/common/consts.py +1 -2
- airflow/providers/google/common/hooks/base_google.py +8 -7
- airflow/providers/google/get_provider_info.py +186 -134
- airflow/providers/google/marketing_platform/hooks/analytics_admin.py +2 -3
- airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +5 -7
- {apache_airflow_providers_google-12.0.0rc1.dist-info → apache_airflow_providers_google-13.0.0.dist-info}/METADATA +41 -58
- {apache_airflow_providers_google-12.0.0rc1.dist-info → apache_airflow_providers_google-13.0.0.dist-info}/RECORD +157 -159
- airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py +0 -141
- airflow/providers/google/cloud/example_dags/example_looker.py +0 -64
- airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py +0 -194
- airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py +0 -129
- airflow/providers/google/marketing_platform/example_dags/__init__.py +0 -16
- airflow/providers/google/marketing_platform/example_dags/example_display_video.py +0 -213
- {apache_airflow_providers_google-12.0.0rc1.dist-info → apache_airflow_providers_google-13.0.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-12.0.0rc1.dist-info → apache_airflow_providers_google-13.0.0.dist-info}/entry_points.txt +0 -0
@@ -26,6 +26,10 @@ import uuid
|
|
26
26
|
from collections.abc import MutableSequence, Sequence
|
27
27
|
from typing import TYPE_CHECKING, Any
|
28
28
|
|
29
|
+
from airflow.exceptions import AirflowException
|
30
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
31
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
32
|
+
from airflow.version import version as airflow_version
|
29
33
|
from google.api_core.client_options import ClientOptions
|
30
34
|
from google.api_core.exceptions import ServerError
|
31
35
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
@@ -45,11 +49,6 @@ from google.cloud.dataproc_v1 import (
|
|
45
49
|
WorkflowTemplateServiceClient,
|
46
50
|
)
|
47
51
|
|
48
|
-
from airflow.exceptions import AirflowException
|
49
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
50
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
51
|
-
from airflow.version import version as airflow_version
|
52
|
-
|
53
52
|
if TYPE_CHECKING:
|
54
53
|
from google.api_core.operation import Operation
|
55
54
|
from google.api_core.operation_async import AsyncOperation
|
@@ -22,13 +22,12 @@ from __future__ import annotations
|
|
22
22
|
from collections.abc import Sequence
|
23
23
|
from typing import TYPE_CHECKING, Any
|
24
24
|
|
25
|
-
from google.api_core.client_options import ClientOptions
|
26
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
27
|
-
from google.cloud.metastore_v1 import DataprocMetastoreClient
|
28
|
-
|
29
25
|
from airflow.exceptions import AirflowException
|
30
26
|
from airflow.providers.google.common.consts import CLIENT_INFO
|
31
27
|
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
28
|
+
from google.api_core.client_options import ClientOptions
|
29
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
30
|
+
from google.cloud.metastore_v1 import DataprocMetastoreClient
|
32
31
|
|
33
32
|
if TYPE_CHECKING:
|
34
33
|
from google.api_core.operation import Operation
|
@@ -31,6 +31,9 @@ import time
|
|
31
31
|
from collections.abc import Sequence
|
32
32
|
from typing import TYPE_CHECKING
|
33
33
|
|
34
|
+
from airflow.exceptions import AirflowException
|
35
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
36
|
+
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
34
37
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
35
38
|
from google.cloud.dlp import DlpServiceClient
|
36
39
|
from google.cloud.dlp_v2.types import (
|
@@ -55,10 +58,6 @@ from google.cloud.dlp_v2.types import (
|
|
55
58
|
)
|
56
59
|
from google.protobuf.field_mask_pb2 import FieldMask
|
57
60
|
|
58
|
-
from airflow.exceptions import AirflowException
|
59
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
60
|
-
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
61
|
-
|
62
61
|
if TYPE_CHECKING:
|
63
62
|
from google.api_core.retry import Retry
|
64
63
|
|
@@ -35,12 +35,6 @@ from typing import IO, TYPE_CHECKING, Any, Callable, TypeVar, cast, overload
|
|
35
35
|
from urllib.parse import urlsplit
|
36
36
|
|
37
37
|
from gcloud.aio.storage import Storage
|
38
|
-
from google.api_core.exceptions import GoogleAPICallError, NotFound
|
39
|
-
|
40
|
-
# not sure why but mypy complains on missing `storage` but it is clearly there and is importable
|
41
|
-
from google.cloud import storage # type: ignore[attr-defined]
|
42
|
-
from google.cloud.exceptions import GoogleCloudError
|
43
|
-
from google.cloud.storage.retry import DEFAULT_RETRY
|
44
38
|
from requests import Session
|
45
39
|
|
46
40
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
@@ -55,11 +49,18 @@ from airflow.providers.google.common.hooks.base_google import (
|
|
55
49
|
from airflow.typing_compat import ParamSpec
|
56
50
|
from airflow.utils import timezone
|
57
51
|
from airflow.version import version
|
52
|
+
from google.api_core.exceptions import GoogleAPICallError, NotFound
|
53
|
+
|
54
|
+
# not sure why but mypy complains on missing `storage` but it is clearly there and is importable
|
55
|
+
from google.cloud import storage # type: ignore[attr-defined]
|
56
|
+
from google.cloud.exceptions import GoogleCloudError
|
57
|
+
from google.cloud.storage.retry import DEFAULT_RETRY
|
58
58
|
|
59
59
|
if TYPE_CHECKING:
|
60
60
|
from datetime import datetime
|
61
61
|
|
62
62
|
from aiohttp import ClientSession
|
63
|
+
|
63
64
|
from google.api_core.retry import Retry
|
64
65
|
from google.cloud.storage.blob import Blob
|
65
66
|
|
@@ -23,11 +23,10 @@ import base64
|
|
23
23
|
from collections.abc import Sequence
|
24
24
|
from typing import TYPE_CHECKING
|
25
25
|
|
26
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
27
|
-
from google.cloud.kms_v1 import KeyManagementServiceClient
|
28
|
-
|
29
26
|
from airflow.providers.google.common.consts import CLIENT_INFO
|
30
27
|
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
28
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
29
|
+
from google.cloud.kms_v1 import KeyManagementServiceClient
|
31
30
|
|
32
31
|
if TYPE_CHECKING:
|
33
32
|
from google.api_core.retry import Retry
|
@@ -25,14 +25,6 @@ import time
|
|
25
25
|
from collections.abc import Sequence
|
26
26
|
from typing import TYPE_CHECKING, Any
|
27
27
|
|
28
|
-
from google.api_core.exceptions import NotFound
|
29
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
30
|
-
from google.auth.transport import requests as google_requests
|
31
|
-
|
32
|
-
# not sure why but mypy complains on missing `container_v1` but it is clearly there and is importable
|
33
|
-
from google.cloud import exceptions # type: ignore[attr-defined]
|
34
|
-
from google.cloud.container_v1 import ClusterManagerAsyncClient, ClusterManagerClient
|
35
|
-
from google.cloud.container_v1.types import Cluster, Operation
|
36
28
|
from kubernetes import client
|
37
29
|
from kubernetes_asyncio import client as async_client
|
38
30
|
from kubernetes_asyncio.config.kube_config import FileOrData
|
@@ -47,6 +39,14 @@ from airflow.providers.google.common.hooks.base_google import (
|
|
47
39
|
GoogleBaseAsyncHook,
|
48
40
|
GoogleBaseHook,
|
49
41
|
)
|
42
|
+
from google.api_core.exceptions import NotFound
|
43
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
44
|
+
from google.auth.transport import requests as google_requests
|
45
|
+
|
46
|
+
# not sure why but mypy complains on missing `container_v1` but it is clearly there and is importable
|
47
|
+
from google.cloud import exceptions # type: ignore[attr-defined]
|
48
|
+
from google.cloud.container_v1 import ClusterManagerAsyncClient, ClusterManagerClient
|
49
|
+
from google.cloud.container_v1.types import Cluster, Operation
|
50
50
|
|
51
51
|
if TYPE_CHECKING:
|
52
52
|
import google.auth.credentials
|
@@ -22,9 +22,9 @@ from __future__ import annotations
|
|
22
22
|
import time
|
23
23
|
from collections.abc import Sequence
|
24
24
|
|
25
|
-
import google.api_core.path_template
|
26
25
|
from googleapiclient.discovery import build
|
27
26
|
|
27
|
+
import google.api_core.path_template
|
28
28
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
29
29
|
from airflow.providers.google.common.deprecated import deprecated
|
30
30
|
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
@@ -0,0 +1,482 @@
|
|
1
|
+
#
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
4
|
+
# distributed with this work for additional information
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
7
|
+
# "License"); you may not use this file except in compliance
|
8
|
+
# with the License. You may obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
13
|
+
# software distributed under the License is distributed on an
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
15
|
+
# KIND, either express or implied. See the License for the
|
16
|
+
# specific language governing permissions and limitations
|
17
|
+
# under the License.
|
18
|
+
"""This module contains a Managed Service for Apache Kafka hook."""
|
19
|
+
|
20
|
+
from __future__ import annotations
|
21
|
+
|
22
|
+
from collections.abc import Sequence
|
23
|
+
from copy import deepcopy
|
24
|
+
from typing import TYPE_CHECKING
|
25
|
+
|
26
|
+
from airflow.exceptions import AirflowException
|
27
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
28
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
29
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
30
|
+
from google.cloud.managedkafka_v1 import Cluster, ManagedKafkaClient, Topic, types
|
31
|
+
|
32
|
+
if TYPE_CHECKING:
|
33
|
+
from google.api_core.operation import Operation
|
34
|
+
from google.api_core.retry import Retry
|
35
|
+
from google.cloud.managedkafka_v1.services.managed_kafka.pagers import ListClustersPager, ListTopicsPager
|
36
|
+
from google.protobuf.field_mask_pb2 import FieldMask
|
37
|
+
|
38
|
+
|
39
|
+
class ManagedKafkaHook(GoogleBaseHook):
|
40
|
+
"""Hook for Managed Service for Apache Kafka APIs."""
|
41
|
+
|
42
|
+
def __init__(
|
43
|
+
self,
|
44
|
+
gcp_conn_id: str = "google_cloud_default",
|
45
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
46
|
+
**kwargs,
|
47
|
+
) -> None:
|
48
|
+
super().__init__(gcp_conn_id, impersonation_chain, **kwargs)
|
49
|
+
|
50
|
+
def get_managed_kafka_client(self) -> ManagedKafkaClient:
|
51
|
+
"""Return ManagedKafkaClient object."""
|
52
|
+
return ManagedKafkaClient(
|
53
|
+
credentials=self.get_credentials(),
|
54
|
+
client_info=CLIENT_INFO,
|
55
|
+
)
|
56
|
+
|
57
|
+
def wait_for_operation(self, operation: Operation, timeout: float | None = None):
|
58
|
+
"""Wait for long-lasting operation to complete."""
|
59
|
+
try:
|
60
|
+
return operation.result(timeout=timeout)
|
61
|
+
except Exception:
|
62
|
+
error = operation.exception(timeout=timeout)
|
63
|
+
raise AirflowException(error)
|
64
|
+
|
65
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
66
|
+
def create_cluster(
|
67
|
+
self,
|
68
|
+
project_id: str,
|
69
|
+
location: str,
|
70
|
+
cluster: types.Cluster | dict,
|
71
|
+
cluster_id: str,
|
72
|
+
request_id: str | None = None,
|
73
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
74
|
+
timeout: float | None = None,
|
75
|
+
metadata: Sequence[tuple[str, str]] = (),
|
76
|
+
) -> Operation:
|
77
|
+
"""
|
78
|
+
Create a new Apache Kafka cluster.
|
79
|
+
|
80
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
81
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
82
|
+
:param cluster: Required. Configuration of the cluster to create. Its ``name`` field is ignored.
|
83
|
+
:param cluster_id: Required. The ID to use for the cluster, which will become the final component of
|
84
|
+
the cluster's name. The ID must be 1-63 characters long, and match the regular expression
|
85
|
+
``[a-z]([-a-z0-9]*[a-z0-9])?`` to comply with RFC 1035. This value is structured like: ``my-cluster-id``.
|
86
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
87
|
+
to avoid duplication of requests. If a request times out or fails, retrying with the same ID
|
88
|
+
allows the server to recognize the previous attempt. For at least 60 minutes, the server ignores
|
89
|
+
duplicate requests bearing the same ID. For example, consider a situation where you make an
|
90
|
+
initial request and the request times out. If you make the request again with the same request ID
|
91
|
+
within 60 minutes of the last request, the server checks if an original operation with the same
|
92
|
+
request ID was received. If so, the server ignores the second request. The request ID must be a
|
93
|
+
valid UUID. A zero UUID is not supported (00000000-0000-0000-0000-000000000000).
|
94
|
+
:param retry: Designation of what errors, if any, should be retried.
|
95
|
+
:param timeout: The timeout for this request.
|
96
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
97
|
+
"""
|
98
|
+
client = self.get_managed_kafka_client()
|
99
|
+
parent = client.common_location_path(project_id, location)
|
100
|
+
|
101
|
+
operation = client.create_cluster(
|
102
|
+
request={
|
103
|
+
"parent": parent,
|
104
|
+
"cluster_id": cluster_id,
|
105
|
+
"cluster": cluster,
|
106
|
+
"request_id": request_id,
|
107
|
+
},
|
108
|
+
retry=retry,
|
109
|
+
timeout=timeout,
|
110
|
+
metadata=metadata,
|
111
|
+
)
|
112
|
+
return operation
|
113
|
+
|
114
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
115
|
+
def list_clusters(
|
116
|
+
self,
|
117
|
+
project_id: str,
|
118
|
+
location: str,
|
119
|
+
page_size: int | None = None,
|
120
|
+
page_token: str | None = None,
|
121
|
+
filter: str | None = None,
|
122
|
+
order_by: str | None = None,
|
123
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
124
|
+
timeout: float | None = None,
|
125
|
+
metadata: Sequence[tuple[str, str]] = (),
|
126
|
+
) -> ListClustersPager:
|
127
|
+
"""
|
128
|
+
List the clusters in a given project and location.
|
129
|
+
|
130
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
131
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
132
|
+
:param page_size: Optional. The maximum number of clusters to return. The service may return fewer
|
133
|
+
than this value. If unspecified, server will pick an appropriate default.
|
134
|
+
:param page_token: Optional. A page token, received from a previous ``ListClusters`` call. Provide
|
135
|
+
this to retrieve the subsequent page.
|
136
|
+
When paginating, all other parameters provided to ``ListClusters`` must match the call that
|
137
|
+
provided the page token.
|
138
|
+
:param filter: Optional. Filter expression for the result.
|
139
|
+
:param order_by: Optional. Order by fields for the result.
|
140
|
+
:param retry: Designation of what errors, if any, should be retried.
|
141
|
+
:param timeout: The timeout for this request.
|
142
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
143
|
+
"""
|
144
|
+
client = self.get_managed_kafka_client()
|
145
|
+
parent = client.common_location_path(project_id, location)
|
146
|
+
|
147
|
+
result = client.list_clusters(
|
148
|
+
request={
|
149
|
+
"parent": parent,
|
150
|
+
"page_size": page_size,
|
151
|
+
"page_token": page_token,
|
152
|
+
"filter": filter,
|
153
|
+
"order_by": order_by,
|
154
|
+
},
|
155
|
+
retry=retry,
|
156
|
+
timeout=timeout,
|
157
|
+
metadata=metadata,
|
158
|
+
)
|
159
|
+
return result
|
160
|
+
|
161
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
162
|
+
def get_cluster(
|
163
|
+
self,
|
164
|
+
project_id: str,
|
165
|
+
location: str,
|
166
|
+
cluster_id: str,
|
167
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
168
|
+
timeout: float | None = None,
|
169
|
+
metadata: Sequence[tuple[str, str]] = (),
|
170
|
+
) -> types.Cluster:
|
171
|
+
"""
|
172
|
+
Return the properties of a single cluster.
|
173
|
+
|
174
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
175
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
176
|
+
:param cluster_id: Required. The ID of the cluster whose configuration to return.
|
177
|
+
:param retry: Designation of what errors, if any, should be retried.
|
178
|
+
:param timeout: The timeout for this request.
|
179
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
180
|
+
"""
|
181
|
+
client = self.get_managed_kafka_client()
|
182
|
+
name = client.cluster_path(project_id, location, cluster_id)
|
183
|
+
|
184
|
+
result = client.get_cluster(
|
185
|
+
request={
|
186
|
+
"name": name,
|
187
|
+
},
|
188
|
+
retry=retry,
|
189
|
+
timeout=timeout,
|
190
|
+
metadata=metadata,
|
191
|
+
)
|
192
|
+
return result
|
193
|
+
|
194
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
195
|
+
def update_cluster(
|
196
|
+
self,
|
197
|
+
project_id: str,
|
198
|
+
location: str,
|
199
|
+
cluster_id: str,
|
200
|
+
cluster: types.Cluster | dict,
|
201
|
+
update_mask: FieldMask | dict,
|
202
|
+
request_id: str | None = None,
|
203
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
204
|
+
timeout: float | None = None,
|
205
|
+
metadata: Sequence[tuple[str, str]] = (),
|
206
|
+
) -> Operation:
|
207
|
+
"""
|
208
|
+
Update the properties of a single cluster.
|
209
|
+
|
210
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
211
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
212
|
+
:param cluster_id: Required. The ID of the cluster whose configuration to update.
|
213
|
+
:param cluster: Required. The cluster to update.
|
214
|
+
:param update_mask: Required. Field mask is used to specify the fields to be overwritten in the
|
215
|
+
cluster resource by the update. The fields specified in the update_mask are relative to the
|
216
|
+
resource, not the full request. A field will be overwritten if it is in the mask.
|
217
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
218
|
+
to avoid duplication of requests. If a request times out or fails, retrying with the same ID
|
219
|
+
allows the server to recognize the previous attempt. For at least 60 minutes, the server ignores
|
220
|
+
duplicate requests bearing the same ID.
|
221
|
+
For example, consider a situation where you make an initial request and the request times out. If
|
222
|
+
you make the request again with the same request ID within 60 minutes of the last request, the
|
223
|
+
server checks if an original operation with the same request ID was received. If so, the server
|
224
|
+
ignores the second request.
|
225
|
+
The request ID must be a valid UUID. A zero UUID is not supported (00000000-0000-0000-0000-000000000000).
|
226
|
+
:param retry: Designation of what errors, if any, should be retried.
|
227
|
+
:param timeout: The timeout for this request.
|
228
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
229
|
+
"""
|
230
|
+
client = self.get_managed_kafka_client()
|
231
|
+
_cluster = deepcopy(cluster) if isinstance(cluster, dict) else Cluster.to_dict(cluster)
|
232
|
+
_cluster["name"] = client.cluster_path(project_id, location, cluster_id)
|
233
|
+
|
234
|
+
operation = client.update_cluster(
|
235
|
+
request={
|
236
|
+
"update_mask": update_mask,
|
237
|
+
"cluster": _cluster,
|
238
|
+
"request_id": request_id,
|
239
|
+
},
|
240
|
+
retry=retry,
|
241
|
+
timeout=timeout,
|
242
|
+
metadata=metadata,
|
243
|
+
)
|
244
|
+
return operation
|
245
|
+
|
246
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
247
|
+
def delete_cluster(
|
248
|
+
self,
|
249
|
+
project_id: str,
|
250
|
+
location: str,
|
251
|
+
cluster_id: str,
|
252
|
+
request_id: str | None = None,
|
253
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
254
|
+
timeout: float | None = None,
|
255
|
+
metadata: Sequence[tuple[str, str]] = (),
|
256
|
+
) -> Operation:
|
257
|
+
"""
|
258
|
+
Delete a single cluster.
|
259
|
+
|
260
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
261
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
262
|
+
:param cluster_id: Required. The ID of the cluster to delete.
|
263
|
+
:param request_id: Optional. An optional request ID to identify requests. Specify a unique request ID
|
264
|
+
to avoid duplication of requests. If a request times out or fails, retrying with the same ID
|
265
|
+
allows the server to recognize the previous attempt. For at least 60 minutes, the server ignores
|
266
|
+
duplicate requests bearing the same ID.
|
267
|
+
For example, consider a situation where you make an initial request and the request times out. If
|
268
|
+
you make the request again with the same request ID within 60 minutes of the last request, the
|
269
|
+
server checks if an original operation with the same request ID was received. If so, the server
|
270
|
+
ignores the second request.
|
271
|
+
The request ID must be a valid UUID. A zero UUID is not supported (00000000-0000-0000-0000-000000000000).
|
272
|
+
:param retry: Designation of what errors, if any, should be retried.
|
273
|
+
:param timeout: The timeout for this request.
|
274
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
275
|
+
"""
|
276
|
+
client = self.get_managed_kafka_client()
|
277
|
+
name = client.cluster_path(project_id, location, cluster_id)
|
278
|
+
|
279
|
+
operation = client.delete_cluster(
|
280
|
+
request={
|
281
|
+
"name": name,
|
282
|
+
"request_id": request_id,
|
283
|
+
},
|
284
|
+
retry=retry,
|
285
|
+
timeout=timeout,
|
286
|
+
metadata=metadata,
|
287
|
+
)
|
288
|
+
return operation
|
289
|
+
|
290
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
291
|
+
def create_topic(
|
292
|
+
self,
|
293
|
+
project_id: str,
|
294
|
+
location: str,
|
295
|
+
cluster_id: str,
|
296
|
+
topic_id: str,
|
297
|
+
topic: types.Topic | dict,
|
298
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
299
|
+
timeout: float | None = None,
|
300
|
+
metadata: Sequence[tuple[str, str]] = (),
|
301
|
+
) -> types.Topic:
|
302
|
+
"""
|
303
|
+
Create a new topic in a given project and location.
|
304
|
+
|
305
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
306
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
307
|
+
:param cluster_id: Required. The ID of the cluster in which to create the topic.
|
308
|
+
:param topic_id: Required. The ID to use for the topic, which will become the final component of the
|
309
|
+
topic's name.
|
310
|
+
:param topic: Required. Configuration of the topic to create.
|
311
|
+
:param retry: Designation of what errors, if any, should be retried.
|
312
|
+
:param timeout: The timeout for this request.
|
313
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
314
|
+
"""
|
315
|
+
client = self.get_managed_kafka_client()
|
316
|
+
parent = client.cluster_path(project_id, location, cluster_id)
|
317
|
+
|
318
|
+
result = client.create_topic(
|
319
|
+
request={
|
320
|
+
"parent": parent,
|
321
|
+
"topic_id": topic_id,
|
322
|
+
"topic": topic,
|
323
|
+
},
|
324
|
+
retry=retry,
|
325
|
+
timeout=timeout,
|
326
|
+
metadata=metadata,
|
327
|
+
)
|
328
|
+
return result
|
329
|
+
|
330
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
331
|
+
def list_topics(
|
332
|
+
self,
|
333
|
+
project_id: str,
|
334
|
+
location: str,
|
335
|
+
cluster_id: str,
|
336
|
+
page_size: int | None = None,
|
337
|
+
page_token: str | None = None,
|
338
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
339
|
+
timeout: float | None = None,
|
340
|
+
metadata: Sequence[tuple[str, str]] = (),
|
341
|
+
) -> ListTopicsPager:
|
342
|
+
"""
|
343
|
+
List the topics in a given cluster.
|
344
|
+
|
345
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
346
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
347
|
+
:param cluster_id: Required. The ID of the cluster whose topics are to be listed.
|
348
|
+
:param page_size: Optional. The maximum number of topics to return. The service may return fewer than
|
349
|
+
this value. If unset or zero, all topics for the parent is returned.
|
350
|
+
:param page_token: Optional. A page token, received from a previous ``ListTopics`` call. Provide this
|
351
|
+
to retrieve the subsequent page. When paginating, all other parameters provided to ``ListTopics``
|
352
|
+
must match the call that provided the page token.
|
353
|
+
:param retry: Designation of what errors, if any, should be retried.
|
354
|
+
:param timeout: The timeout for this request.
|
355
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
356
|
+
"""
|
357
|
+
client = self.get_managed_kafka_client()
|
358
|
+
parent = client.cluster_path(project_id, location, cluster_id)
|
359
|
+
|
360
|
+
result = client.list_topics(
|
361
|
+
request={
|
362
|
+
"parent": parent,
|
363
|
+
"page_size": page_size,
|
364
|
+
"page_token": page_token,
|
365
|
+
},
|
366
|
+
retry=retry,
|
367
|
+
timeout=timeout,
|
368
|
+
metadata=metadata,
|
369
|
+
)
|
370
|
+
return result
|
371
|
+
|
372
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
373
|
+
def get_topic(
|
374
|
+
self,
|
375
|
+
project_id: str,
|
376
|
+
location: str,
|
377
|
+
cluster_id: str,
|
378
|
+
topic_id: str,
|
379
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
380
|
+
timeout: float | None = None,
|
381
|
+
metadata: Sequence[tuple[str, str]] = (),
|
382
|
+
) -> types.Topic:
|
383
|
+
"""
|
384
|
+
Return the properties of a single topic.
|
385
|
+
|
386
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
387
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
388
|
+
:param cluster_id: Required. The ID of the cluster whose topic is to be returned.
|
389
|
+
:param topic_id: Required. The ID of the topic whose configuration to return.
|
390
|
+
:param retry: Designation of what errors, if any, should be retried.
|
391
|
+
:param timeout: The timeout for this request.
|
392
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
393
|
+
"""
|
394
|
+
client = self.get_managed_kafka_client()
|
395
|
+
name = client.topic_path(project_id, location, cluster_id, topic_id)
|
396
|
+
|
397
|
+
result = client.get_topic(
|
398
|
+
request={
|
399
|
+
"name": name,
|
400
|
+
},
|
401
|
+
retry=retry,
|
402
|
+
timeout=timeout,
|
403
|
+
metadata=metadata,
|
404
|
+
)
|
405
|
+
return result
|
406
|
+
|
407
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
408
|
+
def update_topic(
|
409
|
+
self,
|
410
|
+
project_id: str,
|
411
|
+
location: str,
|
412
|
+
cluster_id: str,
|
413
|
+
topic_id: str,
|
414
|
+
topic: types.Topic | dict,
|
415
|
+
update_mask: FieldMask | dict,
|
416
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
417
|
+
timeout: float | None = None,
|
418
|
+
metadata: Sequence[tuple[str, str]] = (),
|
419
|
+
) -> types.Topic:
|
420
|
+
"""
|
421
|
+
Update the properties of a single topic.
|
422
|
+
|
423
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
424
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
425
|
+
:param cluster_id: Required. The ID of the cluster whose topic is to be updated.
|
426
|
+
:param topic_id: Required. The ID of the topic whose configuration to update.
|
427
|
+
:param topic: Required. The topic to update. Its ``name`` field must be populated.
|
428
|
+
:param update_mask: Required. Field mask is used to specify the fields to be overwritten in the Topic
|
429
|
+
resource by the update. The fields specified in the update_mask are relative to the resource, not
|
430
|
+
the full request. A field will be overwritten if it is in the mask.
|
431
|
+
:param retry: Designation of what errors, if any, should be retried.
|
432
|
+
:param timeout: The timeout for this request.
|
433
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
434
|
+
"""
|
435
|
+
client = self.get_managed_kafka_client()
|
436
|
+
_topic = deepcopy(topic) if isinstance(topic, dict) else Topic.to_dict(topic)
|
437
|
+
_topic["name"] = client.topic_path(project_id, location, cluster_id, topic_id)
|
438
|
+
|
439
|
+
result = client.update_topic(
|
440
|
+
request={
|
441
|
+
"update_mask": update_mask,
|
442
|
+
"topic": _topic,
|
443
|
+
},
|
444
|
+
retry=retry,
|
445
|
+
timeout=timeout,
|
446
|
+
metadata=metadata,
|
447
|
+
)
|
448
|
+
return result
|
449
|
+
|
450
|
+
@GoogleBaseHook.fallback_to_default_project_id
|
451
|
+
def delete_topic(
|
452
|
+
self,
|
453
|
+
project_id: str,
|
454
|
+
location: str,
|
455
|
+
cluster_id: str,
|
456
|
+
topic_id: str,
|
457
|
+
retry: Retry | _MethodDefault = DEFAULT,
|
458
|
+
timeout: float | None = None,
|
459
|
+
metadata: Sequence[tuple[str, str]] = (),
|
460
|
+
) -> None:
|
461
|
+
"""
|
462
|
+
Delete a single topic.
|
463
|
+
|
464
|
+
:param project_id: Required. The ID of the Google Cloud project that the service belongs to.
|
465
|
+
:param location: Required. The ID of the Google Cloud region that the service belongs to.
|
466
|
+
:param cluster_id: Required. The ID of the cluster whose topic is to be deleted.
|
467
|
+
:param topic_id: Required. The ID of the topic to delete.
|
468
|
+
:param retry: Designation of what errors, if any, should be retried.
|
469
|
+
:param timeout: The timeout for this request.
|
470
|
+
:param metadata: Strings which should be sent along with the request as metadata.
|
471
|
+
"""
|
472
|
+
client = self.get_managed_kafka_client()
|
473
|
+
name = client.topic_path(project_id, location, cluster_id, topic_id)
|
474
|
+
|
475
|
+
client.delete_topic(
|
476
|
+
request={
|
477
|
+
"name": name,
|
478
|
+
},
|
479
|
+
retry=retry,
|
480
|
+
timeout=timeout,
|
481
|
+
metadata=metadata,
|
482
|
+
)
|
@@ -22,6 +22,8 @@ from __future__ import annotations
|
|
22
22
|
from collections.abc import Sequence
|
23
23
|
from typing import TYPE_CHECKING
|
24
24
|
|
25
|
+
from airflow.providers.google.common.consts import CLIENT_INFO
|
26
|
+
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
25
27
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
26
28
|
from google.cloud.language_v1 import EncodingType, LanguageServiceClient
|
27
29
|
from google.cloud.language_v1.types import (
|
@@ -35,9 +37,6 @@ from google.cloud.language_v1.types import (
|
|
35
37
|
Document,
|
36
38
|
)
|
37
39
|
|
38
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
39
|
-
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
|
40
|
-
|
41
40
|
if TYPE_CHECKING:
|
42
41
|
from google.api_core.retry import Retry
|
43
42
|
|
@@ -27,11 +27,10 @@ from __future__ import annotations
|
|
27
27
|
from collections.abc import Sequence
|
28
28
|
from typing import TYPE_CHECKING
|
29
29
|
|
30
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
31
|
-
from google.cloud.oslogin_v1 import ImportSshPublicKeyResponse, OsLoginServiceClient
|
32
|
-
|
33
30
|
from airflow.providers.google.common.consts import CLIENT_INFO
|
34
31
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
|
32
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
33
|
+
from google.cloud.oslogin_v1 import ImportSshPublicKeyResponse, OsLoginServiceClient
|
35
34
|
|
36
35
|
if TYPE_CHECKING:
|
37
36
|
from google.api_core.retry import Retry
|