apache-airflow-providers-google 14.0.0__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/3rd-party-licenses/LICENSES.txt +14 -0
- airflow/providers/google/3rd-party-licenses/NOTICE +5 -0
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/_vendor/__init__.py +0 -0
- airflow/providers/google/_vendor/json_merge_patch.py +91 -0
- airflow/providers/google/ads/hooks/ads.py +52 -43
- airflow/providers/google/ads/operators/ads.py +2 -2
- airflow/providers/google/ads/transfers/ads_to_gcs.py +3 -19
- airflow/providers/google/assets/gcs.py +1 -11
- airflow/providers/google/cloud/_internal_client/secret_manager_client.py +3 -2
- airflow/providers/google/cloud/bundles/gcs.py +161 -0
- airflow/providers/google/cloud/hooks/alloy_db.py +2 -3
- airflow/providers/google/cloud/hooks/bigquery.py +195 -318
- airflow/providers/google/cloud/hooks/bigquery_dts.py +8 -8
- airflow/providers/google/cloud/hooks/bigtable.py +3 -2
- airflow/providers/google/cloud/hooks/cloud_batch.py +8 -9
- airflow/providers/google/cloud/hooks/cloud_build.py +6 -65
- airflow/providers/google/cloud/hooks/cloud_composer.py +292 -24
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_memorystore.py +4 -3
- airflow/providers/google/cloud/hooks/cloud_run.py +20 -11
- airflow/providers/google/cloud/hooks/cloud_sql.py +136 -64
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +35 -15
- airflow/providers/google/cloud/hooks/compute.py +7 -6
- airflow/providers/google/cloud/hooks/compute_ssh.py +7 -4
- airflow/providers/google/cloud/hooks/datacatalog.py +12 -3
- airflow/providers/google/cloud/hooks/dataflow.py +87 -242
- airflow/providers/google/cloud/hooks/dataform.py +9 -14
- airflow/providers/google/cloud/hooks/datafusion.py +7 -9
- airflow/providers/google/cloud/hooks/dataplex.py +13 -12
- airflow/providers/google/cloud/hooks/dataprep.py +2 -2
- airflow/providers/google/cloud/hooks/dataproc.py +76 -74
- airflow/providers/google/cloud/hooks/dataproc_metastore.py +4 -3
- airflow/providers/google/cloud/hooks/dlp.py +5 -4
- airflow/providers/google/cloud/hooks/gcs.py +144 -33
- airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
- airflow/providers/google/cloud/hooks/kms.py +3 -2
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -17
- airflow/providers/google/cloud/hooks/looker.py +6 -1
- airflow/providers/google/cloud/hooks/managed_kafka.py +227 -3
- airflow/providers/google/cloud/hooks/mlengine.py +7 -8
- airflow/providers/google/cloud/hooks/natural_language.py +3 -2
- airflow/providers/google/cloud/hooks/os_login.py +3 -2
- airflow/providers/google/cloud/hooks/pubsub.py +6 -6
- airflow/providers/google/cloud/hooks/secret_manager.py +105 -12
- airflow/providers/google/cloud/hooks/spanner.py +75 -10
- airflow/providers/google/cloud/hooks/speech_to_text.py +3 -2
- airflow/providers/google/cloud/hooks/stackdriver.py +18 -18
- airflow/providers/google/cloud/hooks/tasks.py +4 -3
- airflow/providers/google/cloud/hooks/text_to_speech.py +3 -2
- airflow/providers/google/cloud/hooks/translate.py +8 -17
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +8 -222
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +9 -15
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +33 -283
- airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +5 -12
- airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +6 -12
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +311 -10
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
- airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +7 -13
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +8 -12
- airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +6 -12
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +3 -2
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/hooks/video_intelligence.py +3 -2
- airflow/providers/google/cloud/hooks/vision.py +7 -7
- airflow/providers/google/cloud/hooks/workflows.py +4 -3
- airflow/providers/google/cloud/links/alloy_db.py +0 -46
- airflow/providers/google/cloud/links/base.py +77 -7
- airflow/providers/google/cloud/links/bigquery.py +0 -47
- airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
- airflow/providers/google/cloud/links/bigtable.py +0 -48
- airflow/providers/google/cloud/links/cloud_build.py +0 -73
- airflow/providers/google/cloud/links/cloud_functions.py +0 -33
- airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
- airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
- airflow/providers/google/cloud/links/cloud_sql.py +0 -33
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -46
- airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
- airflow/providers/google/cloud/links/compute.py +0 -58
- airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
- airflow/providers/google/cloud/links/datacatalog.py +23 -54
- airflow/providers/google/cloud/links/dataflow.py +0 -34
- airflow/providers/google/cloud/links/dataform.py +0 -64
- airflow/providers/google/cloud/links/datafusion.py +1 -90
- airflow/providers/google/cloud/links/dataplex.py +0 -154
- airflow/providers/google/cloud/links/dataprep.py +0 -24
- airflow/providers/google/cloud/links/dataproc.py +11 -89
- airflow/providers/google/cloud/links/datastore.py +0 -31
- airflow/providers/google/cloud/links/kubernetes_engine.py +11 -61
- airflow/providers/google/cloud/links/managed_kafka.py +11 -51
- airflow/providers/google/cloud/links/mlengine.py +0 -70
- airflow/providers/google/cloud/links/pubsub.py +0 -32
- airflow/providers/google/cloud/links/spanner.py +0 -33
- airflow/providers/google/cloud/links/stackdriver.py +0 -30
- airflow/providers/google/cloud/links/translate.py +17 -187
- airflow/providers/google/cloud/links/vertex_ai.py +28 -195
- airflow/providers/google/cloud/links/workflows.py +0 -52
- airflow/providers/google/cloud/log/gcs_task_handler.py +166 -118
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +14 -9
- airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
- airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
- airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
- airflow/providers/google/cloud/openlineage/facets.py +141 -40
- airflow/providers/google/cloud/openlineage/mixins.py +14 -13
- airflow/providers/google/cloud/openlineage/utils.py +19 -3
- airflow/providers/google/cloud/operators/alloy_db.py +76 -61
- airflow/providers/google/cloud/operators/bigquery.py +104 -667
- airflow/providers/google/cloud/operators/bigquery_dts.py +12 -12
- airflow/providers/google/cloud/operators/bigtable.py +38 -7
- airflow/providers/google/cloud/operators/cloud_base.py +22 -1
- airflow/providers/google/cloud/operators/cloud_batch.py +18 -18
- airflow/providers/google/cloud/operators/cloud_build.py +80 -36
- airflow/providers/google/cloud/operators/cloud_composer.py +157 -71
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_memorystore.py +74 -46
- airflow/providers/google/cloud/operators/cloud_run.py +39 -20
- airflow/providers/google/cloud/operators/cloud_sql.py +46 -61
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -14
- airflow/providers/google/cloud/operators/compute.py +18 -50
- airflow/providers/google/cloud/operators/datacatalog.py +167 -29
- airflow/providers/google/cloud/operators/dataflow.py +38 -15
- airflow/providers/google/cloud/operators/dataform.py +19 -7
- airflow/providers/google/cloud/operators/datafusion.py +43 -43
- airflow/providers/google/cloud/operators/dataplex.py +212 -126
- airflow/providers/google/cloud/operators/dataprep.py +1 -5
- airflow/providers/google/cloud/operators/dataproc.py +134 -207
- airflow/providers/google/cloud/operators/dataproc_metastore.py +102 -84
- airflow/providers/google/cloud/operators/datastore.py +22 -6
- airflow/providers/google/cloud/operators/dlp.py +24 -45
- airflow/providers/google/cloud/operators/functions.py +21 -14
- airflow/providers/google/cloud/operators/gcs.py +15 -12
- airflow/providers/google/cloud/operators/gen_ai.py +389 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +115 -106
- airflow/providers/google/cloud/operators/looker.py +1 -1
- airflow/providers/google/cloud/operators/managed_kafka.py +362 -40
- airflow/providers/google/cloud/operators/natural_language.py +5 -3
- airflow/providers/google/cloud/operators/pubsub.py +69 -21
- airflow/providers/google/cloud/operators/spanner.py +53 -45
- airflow/providers/google/cloud/operators/speech_to_text.py +5 -4
- airflow/providers/google/cloud/operators/stackdriver.py +5 -11
- airflow/providers/google/cloud/operators/tasks.py +6 -15
- airflow/providers/google/cloud/operators/text_to_speech.py +4 -3
- airflow/providers/google/cloud/operators/translate.py +46 -20
- airflow/providers/google/cloud/operators/translate_speech.py +4 -3
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +44 -34
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +34 -12
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +62 -53
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +75 -11
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +48 -12
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +16 -12
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +62 -14
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +35 -10
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
- airflow/providers/google/cloud/operators/video_intelligence.py +5 -3
- airflow/providers/google/cloud/operators/vision.py +7 -5
- airflow/providers/google/cloud/operators/workflows.py +24 -19
- airflow/providers/google/cloud/secrets/secret_manager.py +2 -1
- airflow/providers/google/cloud/sensors/bigquery.py +2 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +6 -4
- airflow/providers/google/cloud/sensors/bigtable.py +14 -6
- airflow/providers/google/cloud/sensors/cloud_composer.py +535 -33
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +6 -5
- airflow/providers/google/cloud/sensors/dataflow.py +27 -10
- airflow/providers/google/cloud/sensors/dataform.py +2 -2
- airflow/providers/google/cloud/sensors/datafusion.py +4 -4
- airflow/providers/google/cloud/sensors/dataplex.py +7 -5
- airflow/providers/google/cloud/sensors/dataprep.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc.py +10 -9
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +4 -3
- airflow/providers/google/cloud/sensors/gcs.py +22 -21
- airflow/providers/google/cloud/sensors/looker.py +5 -5
- airflow/providers/google/cloud/sensors/pubsub.py +20 -20
- airflow/providers/google/cloud/sensors/tasks.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
- airflow/providers/google/cloud/sensors/workflows.py +6 -4
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +14 -13
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
- airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
- airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +18 -22
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +4 -5
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +45 -38
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
- airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
- airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +44 -12
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
- airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +36 -14
- airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
- airflow/providers/google/cloud/triggers/bigquery.py +75 -34
- airflow/providers/google/cloud/triggers/bigquery_dts.py +2 -1
- airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
- airflow/providers/google/cloud/triggers/cloud_build.py +3 -2
- airflow/providers/google/cloud/triggers/cloud_composer.py +303 -47
- airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +96 -5
- airflow/providers/google/cloud/triggers/dataflow.py +125 -2
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataplex.py +16 -3
- airflow/providers/google/cloud/triggers/dataproc.py +124 -53
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +46 -28
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +17 -20
- airflow/providers/google/cloud/triggers/vertex_ai.py +8 -7
- airflow/providers/google/cloud/utils/bigquery.py +5 -7
- airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +4 -3
- airflow/providers/google/cloud/utils/dataform.py +1 -1
- airflow/providers/google/cloud/utils/external_token_supplier.py +0 -1
- airflow/providers/google/cloud/utils/field_validator.py +1 -2
- airflow/providers/google/cloud/utils/validators.py +43 -0
- airflow/providers/google/common/auth_backend/google_openid.py +26 -9
- airflow/providers/google/common/consts.py +2 -1
- airflow/providers/google/common/deprecated.py +2 -1
- airflow/providers/google/common/hooks/base_google.py +40 -43
- airflow/providers/google/common/hooks/operation_helpers.py +78 -0
- airflow/providers/google/common/links/storage.py +0 -22
- airflow/providers/google/common/utils/get_secret.py +31 -0
- airflow/providers/google/common/utils/id_token_credentials.py +4 -5
- airflow/providers/google/firebase/operators/firestore.py +2 -2
- airflow/providers/google/get_provider_info.py +61 -216
- airflow/providers/google/go_module_utils.py +35 -3
- airflow/providers/google/leveldb/hooks/leveldb.py +30 -6
- airflow/providers/google/leveldb/operators/leveldb.py +2 -2
- airflow/providers/google/marketing_platform/hooks/analytics_admin.py +3 -2
- airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
- airflow/providers/google/marketing_platform/hooks/search_ads.py +1 -1
- airflow/providers/google/marketing_platform/links/analytics_admin.py +4 -5
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +7 -6
- airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
- airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
- airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
- airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
- airflow/providers/google/marketing_platform/sensors/display_video.py +4 -64
- airflow/providers/google/suite/hooks/calendar.py +1 -1
- airflow/providers/google/suite/hooks/drive.py +2 -2
- airflow/providers/google/suite/hooks/sheets.py +15 -1
- airflow/providers/google/suite/operators/sheets.py +8 -3
- airflow/providers/google/suite/sensors/drive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
- airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
- airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
- airflow/providers/google/version_compat.py +15 -1
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +117 -72
- apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +1 -1
- apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
- airflow/providers/google/cloud/example_dags/example_cloud_task.py +0 -54
- airflow/providers/google/cloud/hooks/automl.py +0 -679
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/automl.py +0 -193
- airflow/providers/google/cloud/operators/automl.py +0 -1360
- airflow/providers/google/cloud/operators/life_sciences.py +0 -119
- airflow/providers/google/cloud/operators/mlengine.py +0 -1515
- airflow/providers/google/cloud/utils/mlengine_operator_utils.py +0 -273
- apache_airflow_providers_google-14.0.0.dist-info/RECORD +0 -318
- /airflow/providers/google/cloud/{example_dags → bundles}/__init__.py +0 -0
- {apache_airflow_providers_google-14.0.0.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -19,7 +19,12 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import shlex
|
|
21
21
|
from collections.abc import Sequence
|
|
22
|
-
from typing import TYPE_CHECKING
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
|
+
|
|
24
|
+
from google.api_core.exceptions import AlreadyExists, NotFound
|
|
25
|
+
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
26
|
+
from google.cloud.orchestration.airflow.service_v1 import ImageVersion
|
|
27
|
+
from google.cloud.orchestration.airflow.service_v1.types import Environment, ExecuteAirflowCommandResponse
|
|
23
28
|
|
|
24
29
|
from airflow.configuration import conf
|
|
25
30
|
from airflow.exceptions import AirflowException
|
|
@@ -31,16 +36,13 @@ from airflow.providers.google.cloud.triggers.cloud_composer import (
|
|
|
31
36
|
CloudComposerExecutionTrigger,
|
|
32
37
|
)
|
|
33
38
|
from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
|
|
34
|
-
from google.api_core.exceptions import AlreadyExists
|
|
35
|
-
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
36
|
-
from google.cloud.orchestration.airflow.service_v1 import ImageVersion
|
|
37
|
-
from google.cloud.orchestration.airflow.service_v1.types import Environment, ExecuteAirflowCommandResponse
|
|
38
39
|
|
|
39
40
|
if TYPE_CHECKING:
|
|
40
|
-
from airflow.utils.context import Context
|
|
41
41
|
from google.api_core.retry import Retry
|
|
42
42
|
from google.protobuf.field_mask_pb2 import FieldMask
|
|
43
43
|
|
|
44
|
+
from airflow.providers.common.compat.sdk import Context
|
|
45
|
+
|
|
44
46
|
CLOUD_COMPOSER_BASE_LINK = "https://console.cloud.google.com/composer/environments"
|
|
45
47
|
CLOUD_COMPOSER_DETAILS_LINK = (
|
|
46
48
|
CLOUD_COMPOSER_BASE_LINK + "/detail/{region}/{environment_id}/monitoring?project={project_id}"
|
|
@@ -55,25 +57,6 @@ class CloudComposerEnvironmentLink(BaseGoogleLink):
|
|
|
55
57
|
key = "composer_conf"
|
|
56
58
|
format_str = CLOUD_COMPOSER_DETAILS_LINK
|
|
57
59
|
|
|
58
|
-
@staticmethod
|
|
59
|
-
def persist(
|
|
60
|
-
operator_instance: (
|
|
61
|
-
CloudComposerCreateEnvironmentOperator
|
|
62
|
-
| CloudComposerUpdateEnvironmentOperator
|
|
63
|
-
| CloudComposerGetEnvironmentOperator
|
|
64
|
-
),
|
|
65
|
-
context: Context,
|
|
66
|
-
) -> None:
|
|
67
|
-
operator_instance.xcom_push(
|
|
68
|
-
context,
|
|
69
|
-
key=CloudComposerEnvironmentLink.key,
|
|
70
|
-
value={
|
|
71
|
-
"project_id": operator_instance.project_id,
|
|
72
|
-
"region": operator_instance.region,
|
|
73
|
-
"environment_id": operator_instance.environment_id,
|
|
74
|
-
},
|
|
75
|
-
)
|
|
76
|
-
|
|
77
60
|
|
|
78
61
|
class CloudComposerEnvironmentsLink(BaseGoogleLink):
|
|
79
62
|
"""Helper class for constructing Cloud Composer Environment Link."""
|
|
@@ -82,16 +65,6 @@ class CloudComposerEnvironmentsLink(BaseGoogleLink):
|
|
|
82
65
|
key = "composer_conf"
|
|
83
66
|
format_str = CLOUD_COMPOSER_ENVIRONMENTS_LINK
|
|
84
67
|
|
|
85
|
-
@staticmethod
|
|
86
|
-
def persist(operator_instance: CloudComposerListEnvironmentsOperator, context: Context) -> None:
|
|
87
|
-
operator_instance.xcom_push(
|
|
88
|
-
context,
|
|
89
|
-
key=CloudComposerEnvironmentsLink.key,
|
|
90
|
-
value={
|
|
91
|
-
"project_id": operator_instance.project_id,
|
|
92
|
-
},
|
|
93
|
-
)
|
|
94
|
-
|
|
95
68
|
|
|
96
69
|
class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
97
70
|
"""
|
|
@@ -157,6 +130,14 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
157
130
|
self.deferrable = deferrable
|
|
158
131
|
self.pooling_period_seconds = pooling_period_seconds
|
|
159
132
|
|
|
133
|
+
@property
|
|
134
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
135
|
+
return {
|
|
136
|
+
"project_id": self.project_id,
|
|
137
|
+
"region": self.region,
|
|
138
|
+
"environment_id": self.environment_id,
|
|
139
|
+
}
|
|
140
|
+
|
|
160
141
|
def execute(self, context: Context):
|
|
161
142
|
hook = CloudComposerHook(
|
|
162
143
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -169,7 +150,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
169
150
|
else:
|
|
170
151
|
self.environment["name"] = name
|
|
171
152
|
|
|
172
|
-
CloudComposerEnvironmentLink.persist(
|
|
153
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
173
154
|
try:
|
|
174
155
|
result = hook.create_environment(
|
|
175
156
|
project_id=self.project_id,
|
|
@@ -184,18 +165,17 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
184
165
|
if not self.deferrable:
|
|
185
166
|
environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
|
|
186
167
|
return Environment.to_dict(environment)
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
)
|
|
168
|
+
self.defer(
|
|
169
|
+
trigger=CloudComposerExecutionTrigger(
|
|
170
|
+
project_id=self.project_id,
|
|
171
|
+
region=self.region,
|
|
172
|
+
operation_name=result.operation.name,
|
|
173
|
+
gcp_conn_id=self.gcp_conn_id,
|
|
174
|
+
impersonation_chain=self.impersonation_chain,
|
|
175
|
+
pooling_period_seconds=self.pooling_period_seconds,
|
|
176
|
+
),
|
|
177
|
+
method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
|
|
178
|
+
)
|
|
199
179
|
except AlreadyExists:
|
|
200
180
|
environment = hook.get_environment(
|
|
201
181
|
project_id=self.project_id,
|
|
@@ -223,8 +203,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
223
203
|
metadata=self.metadata,
|
|
224
204
|
)
|
|
225
205
|
return Environment.to_dict(env)
|
|
226
|
-
|
|
227
|
-
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
|
206
|
+
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
|
228
207
|
|
|
229
208
|
|
|
230
209
|
class CloudComposerDeleteEnvironmentOperator(GoogleCloudBaseOperator):
|
|
@@ -370,6 +349,14 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
370
349
|
self.gcp_conn_id = gcp_conn_id
|
|
371
350
|
self.impersonation_chain = impersonation_chain
|
|
372
351
|
|
|
352
|
+
@property
|
|
353
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
354
|
+
return {
|
|
355
|
+
"project_id": self.project_id,
|
|
356
|
+
"region": self.region,
|
|
357
|
+
"environment_id": self.environment_id,
|
|
358
|
+
}
|
|
359
|
+
|
|
373
360
|
def execute(self, context: Context):
|
|
374
361
|
hook = CloudComposerHook(
|
|
375
362
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -384,8 +371,7 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
384
371
|
timeout=self.timeout,
|
|
385
372
|
metadata=self.metadata,
|
|
386
373
|
)
|
|
387
|
-
|
|
388
|
-
CloudComposerEnvironmentLink.persist(operator_instance=self, context=context)
|
|
374
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
389
375
|
return Environment.to_dict(result)
|
|
390
376
|
|
|
391
377
|
|
|
@@ -445,12 +431,17 @@ class CloudComposerListEnvironmentsOperator(GoogleCloudBaseOperator):
|
|
|
445
431
|
self.gcp_conn_id = gcp_conn_id
|
|
446
432
|
self.impersonation_chain = impersonation_chain
|
|
447
433
|
|
|
434
|
+
@property
|
|
435
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
436
|
+
return {
|
|
437
|
+
"project_id": self.project_id,
|
|
438
|
+
}
|
|
439
|
+
|
|
448
440
|
def execute(self, context: Context):
|
|
449
441
|
hook = CloudComposerHook(
|
|
450
442
|
gcp_conn_id=self.gcp_conn_id,
|
|
451
443
|
impersonation_chain=self.impersonation_chain,
|
|
452
444
|
)
|
|
453
|
-
CloudComposerEnvironmentsLink.persist(operator_instance=self, context=context)
|
|
454
445
|
result = hook.list_environments(
|
|
455
446
|
project_id=self.project_id,
|
|
456
447
|
region=self.region,
|
|
@@ -532,6 +523,14 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
532
523
|
self.deferrable = deferrable
|
|
533
524
|
self.pooling_period_seconds = pooling_period_seconds
|
|
534
525
|
|
|
526
|
+
@property
|
|
527
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
528
|
+
return {
|
|
529
|
+
"project_id": self.project_id,
|
|
530
|
+
"region": self.region,
|
|
531
|
+
"environment_id": self.environment_id,
|
|
532
|
+
}
|
|
533
|
+
|
|
535
534
|
def execute(self, context: Context):
|
|
536
535
|
hook = CloudComposerHook(
|
|
537
536
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -549,22 +548,21 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
549
548
|
metadata=self.metadata,
|
|
550
549
|
)
|
|
551
550
|
|
|
552
|
-
CloudComposerEnvironmentLink.persist(
|
|
551
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
553
552
|
if not self.deferrable:
|
|
554
553
|
environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
|
|
555
554
|
return Environment.to_dict(environment)
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
)
|
|
555
|
+
self.defer(
|
|
556
|
+
trigger=CloudComposerExecutionTrigger(
|
|
557
|
+
project_id=self.project_id,
|
|
558
|
+
region=self.region,
|
|
559
|
+
operation_name=result.operation.name,
|
|
560
|
+
gcp_conn_id=self.gcp_conn_id,
|
|
561
|
+
impersonation_chain=self.impersonation_chain,
|
|
562
|
+
pooling_period_seconds=self.pooling_period_seconds,
|
|
563
|
+
),
|
|
564
|
+
method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
|
|
565
|
+
)
|
|
568
566
|
|
|
569
567
|
def execute_complete(self, context: Context, event: dict):
|
|
570
568
|
if event["operation_done"]:
|
|
@@ -582,8 +580,7 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
582
580
|
metadata=self.metadata,
|
|
583
581
|
)
|
|
584
582
|
return Environment.to_dict(env)
|
|
585
|
-
|
|
586
|
-
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
|
583
|
+
raise AirflowException(f"Unexpected error in the operation: {event['operation_name']}")
|
|
587
584
|
|
|
588
585
|
|
|
589
586
|
class CloudComposerListImageVersionsOperator(GoogleCloudBaseOperator):
|
|
@@ -767,9 +764,15 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
|
|
|
767
764
|
metadata=self.metadata,
|
|
768
765
|
poll_interval=self.poll_interval,
|
|
769
766
|
)
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
767
|
+
exit_code = result.get("exit_info", {}).get("exit_code")
|
|
768
|
+
if exit_code == 0:
|
|
769
|
+
result_str = self._merge_cmd_output_result(result)
|
|
770
|
+
self.log.info("Command execution result:\n%s", result_str)
|
|
771
|
+
return result
|
|
772
|
+
|
|
773
|
+
error_output = "".join(line["content"] for line in result.get("error", []))
|
|
774
|
+
message = f"Airflow CLI command failed with exit code {exit_code}.\nError output:\n{error_output}"
|
|
775
|
+
raise AirflowException(message)
|
|
773
776
|
|
|
774
777
|
def execute_complete(self, context: Context, event: dict) -> dict:
|
|
775
778
|
if event and event["status"] == "error":
|
|
@@ -795,3 +798,86 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
|
|
|
795
798
|
"""Merge output to one string."""
|
|
796
799
|
result_str = "\n".join(line_dict["content"] for line_dict in result["output"])
|
|
797
800
|
return result_str
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
class CloudComposerTriggerDAGRunOperator(GoogleCloudBaseOperator):
|
|
804
|
+
"""
|
|
805
|
+
Trigger DAG run for provided Composer environment.
|
|
806
|
+
|
|
807
|
+
:param project_id: The ID of the Google Cloud project that the service belongs to.
|
|
808
|
+
:param region: The ID of the Google Cloud region that the service belongs to.
|
|
809
|
+
:param environment_id: The ID of the Google Cloud environment that the service belongs to.
|
|
810
|
+
:param composer_dag_id: The ID of DAG which will be triggered.
|
|
811
|
+
:param composer_dag_conf: Configuration parameters for the DAG run.
|
|
812
|
+
:param timeout: The timeout for this request.
|
|
813
|
+
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
|
|
814
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
815
|
+
credentials, or chained list of accounts required to get the access_token
|
|
816
|
+
of the last account in the list, which will be impersonated in the request.
|
|
817
|
+
If set as a string, the account must grant the originating account
|
|
818
|
+
the Service Account Token Creator IAM role.
|
|
819
|
+
If set as a sequence, the identities from the list must grant
|
|
820
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
821
|
+
account from the list granting this role to the originating account (templated).
|
|
822
|
+
"""
|
|
823
|
+
|
|
824
|
+
template_fields = (
|
|
825
|
+
"project_id",
|
|
826
|
+
"region",
|
|
827
|
+
"environment_id",
|
|
828
|
+
"composer_dag_id",
|
|
829
|
+
"impersonation_chain",
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
def __init__(
|
|
833
|
+
self,
|
|
834
|
+
*,
|
|
835
|
+
project_id: str,
|
|
836
|
+
region: str,
|
|
837
|
+
environment_id: str,
|
|
838
|
+
composer_dag_id: str,
|
|
839
|
+
composer_dag_conf: dict | None = None,
|
|
840
|
+
timeout: float | None = None,
|
|
841
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
842
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
843
|
+
**kwargs,
|
|
844
|
+
) -> None:
|
|
845
|
+
super().__init__(**kwargs)
|
|
846
|
+
self.project_id = project_id
|
|
847
|
+
self.region = region
|
|
848
|
+
self.environment_id = environment_id
|
|
849
|
+
self.composer_dag_id = composer_dag_id
|
|
850
|
+
self.composer_dag_conf = composer_dag_conf or {}
|
|
851
|
+
self.timeout = timeout
|
|
852
|
+
self.gcp_conn_id = gcp_conn_id
|
|
853
|
+
self.impersonation_chain = impersonation_chain
|
|
854
|
+
|
|
855
|
+
def execute(self, context: Context):
|
|
856
|
+
hook = CloudComposerHook(
|
|
857
|
+
gcp_conn_id=self.gcp_conn_id,
|
|
858
|
+
impersonation_chain=self.impersonation_chain,
|
|
859
|
+
)
|
|
860
|
+
try:
|
|
861
|
+
environment = hook.get_environment(
|
|
862
|
+
project_id=self.project_id,
|
|
863
|
+
region=self.region,
|
|
864
|
+
environment_id=self.environment_id,
|
|
865
|
+
timeout=self.timeout,
|
|
866
|
+
)
|
|
867
|
+
except NotFound as not_found_err:
|
|
868
|
+
self.log.info("The Composer environment %s does not exist.", self.environment_id)
|
|
869
|
+
raise AirflowException(not_found_err)
|
|
870
|
+
composer_airflow_uri = environment.config.airflow_uri
|
|
871
|
+
|
|
872
|
+
self.log.info(
|
|
873
|
+
"Triggering the DAG %s on the %s environment...", self.composer_dag_id, self.environment_id
|
|
874
|
+
)
|
|
875
|
+
dag_run = hook.trigger_dag_run(
|
|
876
|
+
composer_airflow_uri=composer_airflow_uri,
|
|
877
|
+
composer_dag_id=self.composer_dag_id,
|
|
878
|
+
composer_dag_conf=self.composer_dag_conf,
|
|
879
|
+
timeout=self.timeout,
|
|
880
|
+
)
|
|
881
|
+
self.log.info("The DAG %s was triggered with Run ID: %s", self.composer_dag_id, dag_run["dag_run_id"])
|
|
882
|
+
|
|
883
|
+
return dag_run
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from collections.abc import Sequence
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
|
+
|
|
24
|
+
import google.cloud.exceptions
|
|
25
|
+
from google.api_core.exceptions import AlreadyExists
|
|
26
|
+
from google.cloud.logging_v2.types import LogSink
|
|
27
|
+
|
|
28
|
+
from airflow.exceptions import AirflowException
|
|
29
|
+
from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook
|
|
30
|
+
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from google.protobuf.field_mask_pb2 import FieldMask
|
|
34
|
+
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _validate_inputs(obj, required_fields: list[str]) -> None:
|
|
39
|
+
"""Validate that all required fields are present on self."""
|
|
40
|
+
missing = [field for field in required_fields if not getattr(obj, field, None)]
|
|
41
|
+
if missing:
|
|
42
|
+
raise AirflowException(
|
|
43
|
+
f"Required parameters are missing: {missing}. These must be passed as keyword parameters."
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _get_field(obj, field_name):
|
|
48
|
+
"""Supports both dict and protobuf-like objects."""
|
|
49
|
+
if isinstance(obj, dict):
|
|
50
|
+
return obj.get(field_name)
|
|
51
|
+
return getattr(obj, field_name, None)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class CloudLoggingCreateSinkOperator(GoogleCloudBaseOperator):
|
|
55
|
+
"""
|
|
56
|
+
Creates a Cloud Logging export sink in a GCP project.
|
|
57
|
+
|
|
58
|
+
This operator creates a sink that exports log entries from Cloud Logging
|
|
59
|
+
to destinations like Cloud Storage, BigQuery, or Pub/Sub.
|
|
60
|
+
|
|
61
|
+
:param project_id: Required. ID of the Google Cloud project where the sink will be created.
|
|
62
|
+
:param sink_config: Required. The full sink configuration as a dictionary or a LogSink object.
|
|
63
|
+
See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
|
|
64
|
+
:param unique_writer_identity: If True, creates a unique service account for the sink.
|
|
65
|
+
If False, uses the default Google-managed service account.
|
|
66
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud. Defaults to "google_cloud_default".
|
|
67
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
68
|
+
credentials, or chained list of accounts required to get the access_token
|
|
69
|
+
of the last account in the list, which will be impersonated in the request.
|
|
70
|
+
If set as a string, the account must grant the originating account
|
|
71
|
+
the Service Account Token Creator IAM role.
|
|
72
|
+
If set as a sequence, the identities from the list must grant
|
|
73
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
74
|
+
account from the list granting this role to the originating account (templated).
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
template_fields: Sequence[str] = (
|
|
78
|
+
"project_id",
|
|
79
|
+
"sink_config",
|
|
80
|
+
"gcp_conn_id",
|
|
81
|
+
"impersonation_chain",
|
|
82
|
+
"unique_writer_identity",
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
def __init__(
|
|
86
|
+
self,
|
|
87
|
+
project_id: str,
|
|
88
|
+
sink_config: dict | LogSink,
|
|
89
|
+
unique_writer_identity: bool = False,
|
|
90
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
91
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
92
|
+
**kwargs,
|
|
93
|
+
):
|
|
94
|
+
super().__init__(**kwargs)
|
|
95
|
+
self.project_id = project_id
|
|
96
|
+
self.sink_config = sink_config
|
|
97
|
+
self.unique_writer_identity = unique_writer_identity
|
|
98
|
+
self.gcp_conn_id = gcp_conn_id
|
|
99
|
+
self.impersonation_chain = impersonation_chain
|
|
100
|
+
|
|
101
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
|
102
|
+
"""Execute the operator."""
|
|
103
|
+
_validate_inputs(self, required_fields=["project_id", "sink_config"])
|
|
104
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
self.log.info(
|
|
108
|
+
"Creating log sink '%s' in project '%s'",
|
|
109
|
+
_get_field(self.sink_config, "name"),
|
|
110
|
+
self.project_id,
|
|
111
|
+
)
|
|
112
|
+
self.log.info("Destination: %s", _get_field(self.sink_config, "destination"))
|
|
113
|
+
|
|
114
|
+
response = hook.create_sink(
|
|
115
|
+
sink=self.sink_config,
|
|
116
|
+
unique_writer_identity=self.unique_writer_identity,
|
|
117
|
+
project_id=self.project_id,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
self.log.info("Log sink created successfully: %s", response.name)
|
|
121
|
+
|
|
122
|
+
if self.unique_writer_identity and hasattr(response, "writer_identity"):
|
|
123
|
+
self.log.info("Writer identity: %s", response.writer_identity)
|
|
124
|
+
self.log.info("Remember to grant appropriate permissions to the writer identity")
|
|
125
|
+
|
|
126
|
+
return LogSink.to_dict(response)
|
|
127
|
+
|
|
128
|
+
except AlreadyExists:
|
|
129
|
+
self.log.info(
|
|
130
|
+
"Already existed log sink, sink_name=%s, project_id=%s",
|
|
131
|
+
_get_field(self.sink_config, "name"),
|
|
132
|
+
self.project_id,
|
|
133
|
+
)
|
|
134
|
+
existing_sink = hook.get_sink(
|
|
135
|
+
sink_name=_get_field(self.sink_config, "name"), project_id=self.project_id
|
|
136
|
+
)
|
|
137
|
+
return LogSink.to_dict(existing_sink)
|
|
138
|
+
|
|
139
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
140
|
+
self.log.error("An error occurred. Exiting.")
|
|
141
|
+
raise e
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class CloudLoggingDeleteSinkOperator(GoogleCloudBaseOperator):
|
|
145
|
+
"""
|
|
146
|
+
Deletes a Cloud Logging export sink from a GCP project.
|
|
147
|
+
|
|
148
|
+
:param sink_name: Required. Name of the sink to delete.
|
|
149
|
+
:param project_id: Required. The ID of the Google Cloud project.
|
|
150
|
+
:param gcp_conn_id: Optional. The connection ID to use for connecting to Google Cloud.
|
|
151
|
+
Defaults to "google_cloud_default".
|
|
152
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
153
|
+
credentials, or chained list of accounts required to get the access_token
|
|
154
|
+
of the last account in the list, which will be impersonated in the request.
|
|
155
|
+
If set as a string, the account must grant the originating account
|
|
156
|
+
the Service Account Token Creator IAM role.
|
|
157
|
+
If set as a sequence, the identities from the list must grant
|
|
158
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
159
|
+
account from the list granting this role to the originating account (templated).
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
template_fields: Sequence[str] = ("sink_name", "project_id", "gcp_conn_id", "impersonation_chain")
|
|
163
|
+
|
|
164
|
+
def __init__(
|
|
165
|
+
self,
|
|
166
|
+
sink_name: str,
|
|
167
|
+
project_id: str,
|
|
168
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
169
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
170
|
+
**kwargs,
|
|
171
|
+
):
|
|
172
|
+
super().__init__(**kwargs)
|
|
173
|
+
self.sink_name = sink_name
|
|
174
|
+
self.project_id = project_id
|
|
175
|
+
self.gcp_conn_id = gcp_conn_id
|
|
176
|
+
self.impersonation_chain = impersonation_chain
|
|
177
|
+
|
|
178
|
+
def execute(self, context: Context) -> None:
|
|
179
|
+
"""Execute the operator."""
|
|
180
|
+
_validate_inputs(self, ["sink_name", "project_id"])
|
|
181
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
182
|
+
|
|
183
|
+
try:
|
|
184
|
+
self.log.info("Deleting log sink '%s' from project '%s'", self.sink_name, self.project_id)
|
|
185
|
+
hook.delete_sink(sink_name=self.sink_name, project_id=self.project_id)
|
|
186
|
+
self.log.info("Log sink '%s' deleted successfully", self.sink_name)
|
|
187
|
+
|
|
188
|
+
except google.cloud.exceptions.NotFound as e:
|
|
189
|
+
self.log.error("An error occurred. Not Found.")
|
|
190
|
+
raise e
|
|
191
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
192
|
+
self.log.error("An error occurred. Exiting.")
|
|
193
|
+
raise e
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class CloudLoggingUpdateSinkOperator(GoogleCloudBaseOperator):
|
|
197
|
+
"""
|
|
198
|
+
Updates an existing Cloud Logging export sink.
|
|
199
|
+
|
|
200
|
+
:param project_id: Required. The ID of the Google Cloud project that contains the sink.
|
|
201
|
+
:param sink_name: Required. The name of the sink to update.
|
|
202
|
+
:param sink_config: Required. The updated sink configuration. Can be a dictionary or a
|
|
203
|
+
`google.cloud.logging_v2.types.LogSink` object. Refer to:
|
|
204
|
+
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
|
|
205
|
+
:param update_mask: Required. A FieldMask or dictionary specifying which fields of the sink
|
|
206
|
+
should be updated. For example, to update the destination and filter, use:
|
|
207
|
+
`{"paths": ["destination", "filter"]}`.
|
|
208
|
+
:param unique_writer_identity: Optional. When set to True, a new unique service account
|
|
209
|
+
will be created for the sink. Defaults to False.
|
|
210
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
|
|
211
|
+
Defaults to "google_cloud_default".
|
|
212
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
213
|
+
credentials, or chained list of accounts required to get the access_token
|
|
214
|
+
of the last account in the list, which will be impersonated in the request.
|
|
215
|
+
If set as a string, the account must grant the originating account
|
|
216
|
+
the Service Account Token Creator IAM role.
|
|
217
|
+
If set as a sequence, the identities from the list must grant
|
|
218
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
219
|
+
account from the list granting this role to the originating account (templated).
|
|
220
|
+
"""
|
|
221
|
+
|
|
222
|
+
template_fields: Sequence[str] = (
|
|
223
|
+
"sink_name",
|
|
224
|
+
"project_id",
|
|
225
|
+
"update_mask",
|
|
226
|
+
"sink_config",
|
|
227
|
+
"unique_writer_identity",
|
|
228
|
+
"gcp_conn_id",
|
|
229
|
+
"impersonation_chain",
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
def __init__(
|
|
233
|
+
self,
|
|
234
|
+
project_id: str,
|
|
235
|
+
sink_name: str,
|
|
236
|
+
sink_config: dict | LogSink,
|
|
237
|
+
update_mask: FieldMask | dict,
|
|
238
|
+
unique_writer_identity: bool = False,
|
|
239
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
240
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
241
|
+
**kwargs,
|
|
242
|
+
):
|
|
243
|
+
super().__init__(**kwargs)
|
|
244
|
+
self.project_id = project_id
|
|
245
|
+
self.sink_name = sink_name
|
|
246
|
+
self.sink_config = sink_config
|
|
247
|
+
self.update_mask = update_mask
|
|
248
|
+
self.unique_writer_identity = unique_writer_identity
|
|
249
|
+
self.gcp_conn_id = gcp_conn_id
|
|
250
|
+
self.impersonation_chain = impersonation_chain
|
|
251
|
+
|
|
252
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
|
253
|
+
"""Execute the operator."""
|
|
254
|
+
_validate_inputs(self, ["sink_name", "project_id", "sink_config", "update_mask"])
|
|
255
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
current_sink = hook.get_sink(sink_name=self.sink_name, project_id=self.project_id)
|
|
259
|
+
self.log.info("Current log sink configuration: '%s'.", LogSink.to_dict(current_sink))
|
|
260
|
+
|
|
261
|
+
self.log.info("Updating log sink '%s' in project '%s'", self.sink_name, self.project_id)
|
|
262
|
+
if isinstance(self.update_mask, dict) and "paths" in self.update_mask:
|
|
263
|
+
paths = self.update_mask["paths"]
|
|
264
|
+
elif hasattr(self.update_mask, "paths"):
|
|
265
|
+
paths = self.update_mask.paths
|
|
266
|
+
|
|
267
|
+
self.log.info("Updating fields: %s", ", ".join(paths))
|
|
268
|
+
|
|
269
|
+
response = hook.update_sink(
|
|
270
|
+
sink_name=self.sink_name,
|
|
271
|
+
sink=self.sink_config,
|
|
272
|
+
unique_writer_identity=self.unique_writer_identity,
|
|
273
|
+
project_id=self.project_id,
|
|
274
|
+
update_mask=self.update_mask,
|
|
275
|
+
)
|
|
276
|
+
self.log.info("Log sink updated successfully: %s", response.name)
|
|
277
|
+
return LogSink.to_dict(response)
|
|
278
|
+
|
|
279
|
+
except google.cloud.exceptions.NotFound as e:
|
|
280
|
+
self.log.error("An error occurred. Not Found.")
|
|
281
|
+
raise e
|
|
282
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
283
|
+
self.log.error("An error occurred. Exiting.")
|
|
284
|
+
raise e
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class CloudLoggingListSinksOperator(GoogleCloudBaseOperator):
|
|
288
|
+
"""
|
|
289
|
+
Lists Cloud Logging export sinks in a Google Cloud project.
|
|
290
|
+
|
|
291
|
+
:param project_id: Required. The ID of the Google Cloud project to list sinks from.
|
|
292
|
+
:param page_size: Optional. The maximum number of sinks to return per page. Must be greater than 0.
|
|
293
|
+
If None, the server will use a default value.
|
|
294
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
|
|
295
|
+
Defaults to "google_cloud_default".
|
|
296
|
+
:param impersonation_chain: Optional. Service account or chained list of accounts to impersonate.
|
|
297
|
+
If a string, the service account must grant the originating account the
|
|
298
|
+
'Service Account Token Creator' IAM role.
|
|
299
|
+
|
|
300
|
+
If a sequence, each account in the chain must grant this role to the next.
|
|
301
|
+
The first account must grant it to the originating account (templated).
|
|
302
|
+
"""
|
|
303
|
+
|
|
304
|
+
template_fields: Sequence[str] = ("project_id", "gcp_conn_id", "impersonation_chain", "page_size")
|
|
305
|
+
|
|
306
|
+
def __init__(
|
|
307
|
+
self,
|
|
308
|
+
project_id: str,
|
|
309
|
+
page_size: int | None = None,
|
|
310
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
311
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
312
|
+
**kwargs,
|
|
313
|
+
):
|
|
314
|
+
super().__init__(**kwargs)
|
|
315
|
+
self.project_id = project_id
|
|
316
|
+
self.page_size = page_size
|
|
317
|
+
self.gcp_conn_id = gcp_conn_id
|
|
318
|
+
self.impersonation_chain = impersonation_chain
|
|
319
|
+
|
|
320
|
+
def execute(self, context: Context) -> list[dict[str, Any]]:
|
|
321
|
+
"""Execute the operator."""
|
|
322
|
+
_validate_inputs(self, ["project_id"])
|
|
323
|
+
|
|
324
|
+
if self.page_size is not None and self.page_size < 1:
|
|
325
|
+
raise AirflowException("The page_size for the list sinks request must be greater than zero")
|
|
326
|
+
|
|
327
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
328
|
+
|
|
329
|
+
try:
|
|
330
|
+
self.log.info("Listing log sinks in project '%s'", self.project_id)
|
|
331
|
+
|
|
332
|
+
sinks = hook.list_sinks(project_id=self.project_id, page_size=self.page_size)
|
|
333
|
+
|
|
334
|
+
result = [LogSink.to_dict(sink) for sink in sinks]
|
|
335
|
+
self.log.info("Found %d log sinks", len(result))
|
|
336
|
+
|
|
337
|
+
return result
|
|
338
|
+
|
|
339
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
340
|
+
self.log.error("An error occurred. Exiting.")
|
|
341
|
+
raise e
|