apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/ads/hooks/ads.py +39 -5
- airflow/providers/google/ads/operators/ads.py +2 -2
- airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
- airflow/providers/google/assets/gcs.py +1 -11
- airflow/providers/google/cloud/bundles/__init__.py +16 -0
- airflow/providers/google/cloud/bundles/gcs.py +161 -0
- airflow/providers/google/cloud/hooks/bigquery.py +166 -281
- airflow/providers/google/cloud/hooks/cloud_composer.py +287 -14
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_run.py +17 -9
- airflow/providers/google/cloud/hooks/cloud_sql.py +101 -22
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +27 -6
- airflow/providers/google/cloud/hooks/compute_ssh.py +5 -1
- airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
- airflow/providers/google/cloud/hooks/dataflow.py +71 -94
- airflow/providers/google/cloud/hooks/datafusion.py +1 -1
- airflow/providers/google/cloud/hooks/dataplex.py +1 -1
- airflow/providers/google/cloud/hooks/dataprep.py +1 -1
- airflow/providers/google/cloud/hooks/dataproc.py +72 -71
- airflow/providers/google/cloud/hooks/gcs.py +111 -14
- airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/hooks/looker.py +6 -1
- airflow/providers/google/cloud/hooks/mlengine.py +3 -2
- airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
- airflow/providers/google/cloud/hooks/spanner.py +73 -8
- airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
- airflow/providers/google/cloud/hooks/translate.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -209
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -2
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +27 -1
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/hooks/vision.py +2 -2
- airflow/providers/google/cloud/hooks/workflows.py +1 -1
- airflow/providers/google/cloud/links/alloy_db.py +0 -46
- airflow/providers/google/cloud/links/base.py +77 -13
- airflow/providers/google/cloud/links/bigquery.py +0 -47
- airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
- airflow/providers/google/cloud/links/bigtable.py +0 -48
- airflow/providers/google/cloud/links/cloud_build.py +0 -73
- airflow/providers/google/cloud/links/cloud_functions.py +0 -33
- airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
- airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
- airflow/providers/google/cloud/links/cloud_sql.py +0 -33
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
- airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
- airflow/providers/google/cloud/links/compute.py +0 -58
- airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
- airflow/providers/google/cloud/links/datacatalog.py +23 -54
- airflow/providers/google/cloud/links/dataflow.py +0 -34
- airflow/providers/google/cloud/links/dataform.py +0 -64
- airflow/providers/google/cloud/links/datafusion.py +1 -96
- airflow/providers/google/cloud/links/dataplex.py +0 -154
- airflow/providers/google/cloud/links/dataprep.py +0 -24
- airflow/providers/google/cloud/links/dataproc.py +11 -95
- airflow/providers/google/cloud/links/datastore.py +0 -31
- airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
- airflow/providers/google/cloud/links/managed_kafka.py +0 -70
- airflow/providers/google/cloud/links/mlengine.py +0 -70
- airflow/providers/google/cloud/links/pubsub.py +0 -32
- airflow/providers/google/cloud/links/spanner.py +0 -33
- airflow/providers/google/cloud/links/stackdriver.py +0 -30
- airflow/providers/google/cloud/links/translate.py +17 -187
- airflow/providers/google/cloud/links/vertex_ai.py +28 -195
- airflow/providers/google/cloud/links/workflows.py +0 -52
- airflow/providers/google/cloud/log/gcs_task_handler.py +17 -9
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
- airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
- airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
- airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
- airflow/providers/google/cloud/openlineage/facets.py +102 -1
- airflow/providers/google/cloud/openlineage/mixins.py +10 -8
- airflow/providers/google/cloud/openlineage/utils.py +15 -1
- airflow/providers/google/cloud/operators/alloy_db.py +70 -55
- airflow/providers/google/cloud/operators/bigquery.py +73 -636
- airflow/providers/google/cloud/operators/bigquery_dts.py +3 -5
- airflow/providers/google/cloud/operators/bigtable.py +36 -7
- airflow/providers/google/cloud/operators/cloud_base.py +21 -1
- airflow/providers/google/cloud/operators/cloud_batch.py +2 -2
- airflow/providers/google/cloud/operators/cloud_build.py +75 -32
- airflow/providers/google/cloud/operators/cloud_composer.py +128 -40
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
- airflow/providers/google/cloud/operators/cloud_run.py +23 -5
- airflow/providers/google/cloud/operators/cloud_sql.py +8 -16
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -11
- airflow/providers/google/cloud/operators/compute.py +8 -40
- airflow/providers/google/cloud/operators/datacatalog.py +157 -21
- airflow/providers/google/cloud/operators/dataflow.py +38 -15
- airflow/providers/google/cloud/operators/dataform.py +15 -5
- airflow/providers/google/cloud/operators/datafusion.py +41 -20
- airflow/providers/google/cloud/operators/dataplex.py +193 -109
- airflow/providers/google/cloud/operators/dataprep.py +1 -5
- airflow/providers/google/cloud/operators/dataproc.py +78 -35
- airflow/providers/google/cloud/operators/dataproc_metastore.py +96 -88
- airflow/providers/google/cloud/operators/datastore.py +22 -6
- airflow/providers/google/cloud/operators/dlp.py +6 -29
- airflow/providers/google/cloud/operators/functions.py +16 -7
- airflow/providers/google/cloud/operators/gcs.py +10 -8
- airflow/providers/google/cloud/operators/gen_ai.py +389 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +60 -99
- airflow/providers/google/cloud/operators/looker.py +1 -1
- airflow/providers/google/cloud/operators/managed_kafka.py +107 -52
- airflow/providers/google/cloud/operators/natural_language.py +1 -1
- airflow/providers/google/cloud/operators/pubsub.py +60 -14
- airflow/providers/google/cloud/operators/spanner.py +25 -12
- airflow/providers/google/cloud/operators/speech_to_text.py +1 -2
- airflow/providers/google/cloud/operators/stackdriver.py +1 -9
- airflow/providers/google/cloud/operators/tasks.py +1 -12
- airflow/providers/google/cloud/operators/text_to_speech.py +1 -2
- airflow/providers/google/cloud/operators/translate.py +40 -16
- airflow/providers/google/cloud/operators/translate_speech.py +1 -2
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +29 -9
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +54 -26
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +11 -9
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +30 -7
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
- airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
- airflow/providers/google/cloud/operators/vision.py +2 -2
- airflow/providers/google/cloud/operators/workflows.py +18 -15
- airflow/providers/google/cloud/sensors/bigquery.py +2 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -2
- airflow/providers/google/cloud/sensors/bigtable.py +11 -4
- airflow/providers/google/cloud/sensors/cloud_composer.py +533 -29
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -2
- airflow/providers/google/cloud/sensors/dataflow.py +26 -9
- airflow/providers/google/cloud/sensors/dataform.py +2 -2
- airflow/providers/google/cloud/sensors/datafusion.py +4 -4
- airflow/providers/google/cloud/sensors/dataplex.py +2 -2
- airflow/providers/google/cloud/sensors/dataprep.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -2
- airflow/providers/google/cloud/sensors/gcs.py +4 -4
- airflow/providers/google/cloud/sensors/looker.py +2 -2
- airflow/providers/google/cloud/sensors/pubsub.py +4 -4
- airflow/providers/google/cloud/sensors/tasks.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
- airflow/providers/google/cloud/sensors/workflows.py +2 -2
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -4
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
- airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
- airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +20 -12
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
- airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
- airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
- airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +13 -4
- airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
- airflow/providers/google/cloud/triggers/bigquery.py +75 -34
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
- airflow/providers/google/cloud/triggers/cloud_composer.py +302 -46
- airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +91 -1
- airflow/providers/google/cloud/triggers/dataflow.py +122 -0
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataplex.py +14 -2
- airflow/providers/google/cloud/triggers/dataproc.py +122 -52
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +15 -19
- airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
- airflow/providers/google/cloud/utils/field_validator.py +1 -2
- airflow/providers/google/common/auth_backend/google_openid.py +4 -4
- airflow/providers/google/common/deprecated.py +2 -1
- airflow/providers/google/common/hooks/base_google.py +27 -8
- airflow/providers/google/common/links/storage.py +0 -22
- airflow/providers/google/common/utils/get_secret.py +31 -0
- airflow/providers/google/common/utils/id_token_credentials.py +3 -4
- airflow/providers/google/firebase/operators/firestore.py +2 -2
- airflow/providers/google/get_provider_info.py +56 -52
- airflow/providers/google/go_module_utils.py +35 -3
- airflow/providers/google/leveldb/hooks/leveldb.py +26 -1
- airflow/providers/google/leveldb/operators/leveldb.py +2 -2
- airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
- airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -2
- airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
- airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
- airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
- airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
- airflow/providers/google/marketing_platform/sensors/display_video.py +3 -63
- airflow/providers/google/suite/hooks/calendar.py +1 -1
- airflow/providers/google/suite/hooks/sheets.py +15 -1
- airflow/providers/google/suite/operators/sheets.py +8 -3
- airflow/providers/google/suite/sensors/drive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
- airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
- airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
- airflow/providers/google/version_compat.py +15 -1
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +92 -48
- apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
- apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
- airflow/providers/google/cloud/hooks/automl.py +0 -673
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/automl.py +0 -193
- airflow/providers/google/cloud/operators/automl.py +0 -1362
- airflow/providers/google/cloud/operators/life_sciences.py +0 -119
- airflow/providers/google/cloud/operators/mlengine.py +0 -112
- apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -19,9 +19,9 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import shlex
|
|
21
21
|
from collections.abc import Sequence
|
|
22
|
-
from typing import TYPE_CHECKING
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
23
|
|
|
24
|
-
from google.api_core.exceptions import AlreadyExists
|
|
24
|
+
from google.api_core.exceptions import AlreadyExists, NotFound
|
|
25
25
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
26
26
|
from google.cloud.orchestration.airflow.service_v1 import ImageVersion
|
|
27
27
|
from google.cloud.orchestration.airflow.service_v1.types import Environment, ExecuteAirflowCommandResponse
|
|
@@ -41,7 +41,7 @@ if TYPE_CHECKING:
|
|
|
41
41
|
from google.api_core.retry import Retry
|
|
42
42
|
from google.protobuf.field_mask_pb2 import FieldMask
|
|
43
43
|
|
|
44
|
-
from airflow.
|
|
44
|
+
from airflow.providers.common.compat.sdk import Context
|
|
45
45
|
|
|
46
46
|
CLOUD_COMPOSER_BASE_LINK = "https://console.cloud.google.com/composer/environments"
|
|
47
47
|
CLOUD_COMPOSER_DETAILS_LINK = (
|
|
@@ -57,25 +57,6 @@ class CloudComposerEnvironmentLink(BaseGoogleLink):
|
|
|
57
57
|
key = "composer_conf"
|
|
58
58
|
format_str = CLOUD_COMPOSER_DETAILS_LINK
|
|
59
59
|
|
|
60
|
-
@staticmethod
|
|
61
|
-
def persist(
|
|
62
|
-
operator_instance: (
|
|
63
|
-
CloudComposerCreateEnvironmentOperator
|
|
64
|
-
| CloudComposerUpdateEnvironmentOperator
|
|
65
|
-
| CloudComposerGetEnvironmentOperator
|
|
66
|
-
),
|
|
67
|
-
context: Context,
|
|
68
|
-
) -> None:
|
|
69
|
-
operator_instance.xcom_push(
|
|
70
|
-
context,
|
|
71
|
-
key=CloudComposerEnvironmentLink.key,
|
|
72
|
-
value={
|
|
73
|
-
"project_id": operator_instance.project_id,
|
|
74
|
-
"region": operator_instance.region,
|
|
75
|
-
"environment_id": operator_instance.environment_id,
|
|
76
|
-
},
|
|
77
|
-
)
|
|
78
|
-
|
|
79
60
|
|
|
80
61
|
class CloudComposerEnvironmentsLink(BaseGoogleLink):
|
|
81
62
|
"""Helper class for constructing Cloud Composer Environment Link."""
|
|
@@ -84,16 +65,6 @@ class CloudComposerEnvironmentsLink(BaseGoogleLink):
|
|
|
84
65
|
key = "composer_conf"
|
|
85
66
|
format_str = CLOUD_COMPOSER_ENVIRONMENTS_LINK
|
|
86
67
|
|
|
87
|
-
@staticmethod
|
|
88
|
-
def persist(operator_instance: CloudComposerListEnvironmentsOperator, context: Context) -> None:
|
|
89
|
-
operator_instance.xcom_push(
|
|
90
|
-
context,
|
|
91
|
-
key=CloudComposerEnvironmentsLink.key,
|
|
92
|
-
value={
|
|
93
|
-
"project_id": operator_instance.project_id,
|
|
94
|
-
},
|
|
95
|
-
)
|
|
96
|
-
|
|
97
68
|
|
|
98
69
|
class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
99
70
|
"""
|
|
@@ -159,6 +130,14 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
159
130
|
self.deferrable = deferrable
|
|
160
131
|
self.pooling_period_seconds = pooling_period_seconds
|
|
161
132
|
|
|
133
|
+
@property
|
|
134
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
135
|
+
return {
|
|
136
|
+
"project_id": self.project_id,
|
|
137
|
+
"region": self.region,
|
|
138
|
+
"environment_id": self.environment_id,
|
|
139
|
+
}
|
|
140
|
+
|
|
162
141
|
def execute(self, context: Context):
|
|
163
142
|
hook = CloudComposerHook(
|
|
164
143
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -171,7 +150,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
171
150
|
else:
|
|
172
151
|
self.environment["name"] = name
|
|
173
152
|
|
|
174
|
-
CloudComposerEnvironmentLink.persist(
|
|
153
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
175
154
|
try:
|
|
176
155
|
result = hook.create_environment(
|
|
177
156
|
project_id=self.project_id,
|
|
@@ -370,6 +349,14 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
370
349
|
self.gcp_conn_id = gcp_conn_id
|
|
371
350
|
self.impersonation_chain = impersonation_chain
|
|
372
351
|
|
|
352
|
+
@property
|
|
353
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
354
|
+
return {
|
|
355
|
+
"project_id": self.project_id,
|
|
356
|
+
"region": self.region,
|
|
357
|
+
"environment_id": self.environment_id,
|
|
358
|
+
}
|
|
359
|
+
|
|
373
360
|
def execute(self, context: Context):
|
|
374
361
|
hook = CloudComposerHook(
|
|
375
362
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -384,8 +371,7 @@ class CloudComposerGetEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
384
371
|
timeout=self.timeout,
|
|
385
372
|
metadata=self.metadata,
|
|
386
373
|
)
|
|
387
|
-
|
|
388
|
-
CloudComposerEnvironmentLink.persist(operator_instance=self, context=context)
|
|
374
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
389
375
|
return Environment.to_dict(result)
|
|
390
376
|
|
|
391
377
|
|
|
@@ -445,12 +431,17 @@ class CloudComposerListEnvironmentsOperator(GoogleCloudBaseOperator):
|
|
|
445
431
|
self.gcp_conn_id = gcp_conn_id
|
|
446
432
|
self.impersonation_chain = impersonation_chain
|
|
447
433
|
|
|
434
|
+
@property
|
|
435
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
436
|
+
return {
|
|
437
|
+
"project_id": self.project_id,
|
|
438
|
+
}
|
|
439
|
+
|
|
448
440
|
def execute(self, context: Context):
|
|
449
441
|
hook = CloudComposerHook(
|
|
450
442
|
gcp_conn_id=self.gcp_conn_id,
|
|
451
443
|
impersonation_chain=self.impersonation_chain,
|
|
452
444
|
)
|
|
453
|
-
CloudComposerEnvironmentsLink.persist(operator_instance=self, context=context)
|
|
454
445
|
result = hook.list_environments(
|
|
455
446
|
project_id=self.project_id,
|
|
456
447
|
region=self.region,
|
|
@@ -532,6 +523,14 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
532
523
|
self.deferrable = deferrable
|
|
533
524
|
self.pooling_period_seconds = pooling_period_seconds
|
|
534
525
|
|
|
526
|
+
@property
|
|
527
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
528
|
+
return {
|
|
529
|
+
"project_id": self.project_id,
|
|
530
|
+
"region": self.region,
|
|
531
|
+
"environment_id": self.environment_id,
|
|
532
|
+
}
|
|
533
|
+
|
|
535
534
|
def execute(self, context: Context):
|
|
536
535
|
hook = CloudComposerHook(
|
|
537
536
|
gcp_conn_id=self.gcp_conn_id,
|
|
@@ -549,7 +548,7 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
|
|
|
549
548
|
metadata=self.metadata,
|
|
550
549
|
)
|
|
551
550
|
|
|
552
|
-
CloudComposerEnvironmentLink.persist(
|
|
551
|
+
CloudComposerEnvironmentLink.persist(context=context)
|
|
553
552
|
if not self.deferrable:
|
|
554
553
|
environment = hook.wait_for_operation(timeout=self.timeout, operation=result)
|
|
555
554
|
return Environment.to_dict(environment)
|
|
@@ -765,9 +764,15 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
|
|
|
765
764
|
metadata=self.metadata,
|
|
766
765
|
poll_interval=self.poll_interval,
|
|
767
766
|
)
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
767
|
+
exit_code = result.get("exit_info", {}).get("exit_code")
|
|
768
|
+
if exit_code == 0:
|
|
769
|
+
result_str = self._merge_cmd_output_result(result)
|
|
770
|
+
self.log.info("Command execution result:\n%s", result_str)
|
|
771
|
+
return result
|
|
772
|
+
|
|
773
|
+
error_output = "".join(line["content"] for line in result.get("error", []))
|
|
774
|
+
message = f"Airflow CLI command failed with exit code {exit_code}.\nError output:\n{error_output}"
|
|
775
|
+
raise AirflowException(message)
|
|
771
776
|
|
|
772
777
|
def execute_complete(self, context: Context, event: dict) -> dict:
|
|
773
778
|
if event and event["status"] == "error":
|
|
@@ -793,3 +798,86 @@ class CloudComposerRunAirflowCLICommandOperator(GoogleCloudBaseOperator):
|
|
|
793
798
|
"""Merge output to one string."""
|
|
794
799
|
result_str = "\n".join(line_dict["content"] for line_dict in result["output"])
|
|
795
800
|
return result_str
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
class CloudComposerTriggerDAGRunOperator(GoogleCloudBaseOperator):
|
|
804
|
+
"""
|
|
805
|
+
Trigger DAG run for provided Composer environment.
|
|
806
|
+
|
|
807
|
+
:param project_id: The ID of the Google Cloud project that the service belongs to.
|
|
808
|
+
:param region: The ID of the Google Cloud region that the service belongs to.
|
|
809
|
+
:param environment_id: The ID of the Google Cloud environment that the service belongs to.
|
|
810
|
+
:param composer_dag_id: The ID of DAG which will be triggered.
|
|
811
|
+
:param composer_dag_conf: Configuration parameters for the DAG run.
|
|
812
|
+
:param timeout: The timeout for this request.
|
|
813
|
+
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
|
|
814
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
815
|
+
credentials, or chained list of accounts required to get the access_token
|
|
816
|
+
of the last account in the list, which will be impersonated in the request.
|
|
817
|
+
If set as a string, the account must grant the originating account
|
|
818
|
+
the Service Account Token Creator IAM role.
|
|
819
|
+
If set as a sequence, the identities from the list must grant
|
|
820
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
821
|
+
account from the list granting this role to the originating account (templated).
|
|
822
|
+
"""
|
|
823
|
+
|
|
824
|
+
template_fields = (
|
|
825
|
+
"project_id",
|
|
826
|
+
"region",
|
|
827
|
+
"environment_id",
|
|
828
|
+
"composer_dag_id",
|
|
829
|
+
"impersonation_chain",
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
def __init__(
|
|
833
|
+
self,
|
|
834
|
+
*,
|
|
835
|
+
project_id: str,
|
|
836
|
+
region: str,
|
|
837
|
+
environment_id: str,
|
|
838
|
+
composer_dag_id: str,
|
|
839
|
+
composer_dag_conf: dict | None = None,
|
|
840
|
+
timeout: float | None = None,
|
|
841
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
842
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
843
|
+
**kwargs,
|
|
844
|
+
) -> None:
|
|
845
|
+
super().__init__(**kwargs)
|
|
846
|
+
self.project_id = project_id
|
|
847
|
+
self.region = region
|
|
848
|
+
self.environment_id = environment_id
|
|
849
|
+
self.composer_dag_id = composer_dag_id
|
|
850
|
+
self.composer_dag_conf = composer_dag_conf or {}
|
|
851
|
+
self.timeout = timeout
|
|
852
|
+
self.gcp_conn_id = gcp_conn_id
|
|
853
|
+
self.impersonation_chain = impersonation_chain
|
|
854
|
+
|
|
855
|
+
def execute(self, context: Context):
|
|
856
|
+
hook = CloudComposerHook(
|
|
857
|
+
gcp_conn_id=self.gcp_conn_id,
|
|
858
|
+
impersonation_chain=self.impersonation_chain,
|
|
859
|
+
)
|
|
860
|
+
try:
|
|
861
|
+
environment = hook.get_environment(
|
|
862
|
+
project_id=self.project_id,
|
|
863
|
+
region=self.region,
|
|
864
|
+
environment_id=self.environment_id,
|
|
865
|
+
timeout=self.timeout,
|
|
866
|
+
)
|
|
867
|
+
except NotFound as not_found_err:
|
|
868
|
+
self.log.info("The Composer environment %s does not exist.", self.environment_id)
|
|
869
|
+
raise AirflowException(not_found_err)
|
|
870
|
+
composer_airflow_uri = environment.config.airflow_uri
|
|
871
|
+
|
|
872
|
+
self.log.info(
|
|
873
|
+
"Triggering the DAG %s on the %s environment...", self.composer_dag_id, self.environment_id
|
|
874
|
+
)
|
|
875
|
+
dag_run = hook.trigger_dag_run(
|
|
876
|
+
composer_airflow_uri=composer_airflow_uri,
|
|
877
|
+
composer_dag_id=self.composer_dag_id,
|
|
878
|
+
composer_dag_conf=self.composer_dag_conf,
|
|
879
|
+
timeout=self.timeout,
|
|
880
|
+
)
|
|
881
|
+
self.log.info("The DAG %s was triggered with Run ID: %s", self.composer_dag_id, dag_run["dag_run_id"])
|
|
882
|
+
|
|
883
|
+
return dag_run
|
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from collections.abc import Sequence
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
|
+
|
|
24
|
+
import google.cloud.exceptions
|
|
25
|
+
from google.api_core.exceptions import AlreadyExists
|
|
26
|
+
from google.cloud.logging_v2.types import LogSink
|
|
27
|
+
|
|
28
|
+
from airflow.exceptions import AirflowException
|
|
29
|
+
from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook
|
|
30
|
+
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
|
31
|
+
|
|
32
|
+
if TYPE_CHECKING:
|
|
33
|
+
from google.protobuf.field_mask_pb2 import FieldMask
|
|
34
|
+
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _validate_inputs(obj, required_fields: list[str]) -> None:
|
|
39
|
+
"""Validate that all required fields are present on self."""
|
|
40
|
+
missing = [field for field in required_fields if not getattr(obj, field, None)]
|
|
41
|
+
if missing:
|
|
42
|
+
raise AirflowException(
|
|
43
|
+
f"Required parameters are missing: {missing}. These must be passed as keyword parameters."
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _get_field(obj, field_name):
|
|
48
|
+
"""Supports both dict and protobuf-like objects."""
|
|
49
|
+
if isinstance(obj, dict):
|
|
50
|
+
return obj.get(field_name)
|
|
51
|
+
return getattr(obj, field_name, None)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class CloudLoggingCreateSinkOperator(GoogleCloudBaseOperator):
|
|
55
|
+
"""
|
|
56
|
+
Creates a Cloud Logging export sink in a GCP project.
|
|
57
|
+
|
|
58
|
+
This operator creates a sink that exports log entries from Cloud Logging
|
|
59
|
+
to destinations like Cloud Storage, BigQuery, or Pub/Sub.
|
|
60
|
+
|
|
61
|
+
:param project_id: Required. ID of the Google Cloud project where the sink will be created.
|
|
62
|
+
:param sink_config: Required. The full sink configuration as a dictionary or a LogSink object.
|
|
63
|
+
See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
|
|
64
|
+
:param unique_writer_identity: If True, creates a unique service account for the sink.
|
|
65
|
+
If False, uses the default Google-managed service account.
|
|
66
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud. Defaults to "google_cloud_default".
|
|
67
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
68
|
+
credentials, or chained list of accounts required to get the access_token
|
|
69
|
+
of the last account in the list, which will be impersonated in the request.
|
|
70
|
+
If set as a string, the account must grant the originating account
|
|
71
|
+
the Service Account Token Creator IAM role.
|
|
72
|
+
If set as a sequence, the identities from the list must grant
|
|
73
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
74
|
+
account from the list granting this role to the originating account (templated).
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
template_fields: Sequence[str] = (
|
|
78
|
+
"project_id",
|
|
79
|
+
"sink_config",
|
|
80
|
+
"gcp_conn_id",
|
|
81
|
+
"impersonation_chain",
|
|
82
|
+
"unique_writer_identity",
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
def __init__(
|
|
86
|
+
self,
|
|
87
|
+
project_id: str,
|
|
88
|
+
sink_config: dict | LogSink,
|
|
89
|
+
unique_writer_identity: bool = False,
|
|
90
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
91
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
92
|
+
**kwargs,
|
|
93
|
+
):
|
|
94
|
+
super().__init__(**kwargs)
|
|
95
|
+
self.project_id = project_id
|
|
96
|
+
self.sink_config = sink_config
|
|
97
|
+
self.unique_writer_identity = unique_writer_identity
|
|
98
|
+
self.gcp_conn_id = gcp_conn_id
|
|
99
|
+
self.impersonation_chain = impersonation_chain
|
|
100
|
+
|
|
101
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
|
102
|
+
"""Execute the operator."""
|
|
103
|
+
_validate_inputs(self, required_fields=["project_id", "sink_config"])
|
|
104
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
self.log.info(
|
|
108
|
+
"Creating log sink '%s' in project '%s'",
|
|
109
|
+
_get_field(self.sink_config, "name"),
|
|
110
|
+
self.project_id,
|
|
111
|
+
)
|
|
112
|
+
self.log.info("Destination: %s", _get_field(self.sink_config, "destination"))
|
|
113
|
+
|
|
114
|
+
response = hook.create_sink(
|
|
115
|
+
sink=self.sink_config,
|
|
116
|
+
unique_writer_identity=self.unique_writer_identity,
|
|
117
|
+
project_id=self.project_id,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
self.log.info("Log sink created successfully: %s", response.name)
|
|
121
|
+
|
|
122
|
+
if self.unique_writer_identity and hasattr(response, "writer_identity"):
|
|
123
|
+
self.log.info("Writer identity: %s", response.writer_identity)
|
|
124
|
+
self.log.info("Remember to grant appropriate permissions to the writer identity")
|
|
125
|
+
|
|
126
|
+
return LogSink.to_dict(response)
|
|
127
|
+
|
|
128
|
+
except AlreadyExists:
|
|
129
|
+
self.log.info(
|
|
130
|
+
"Already existed log sink, sink_name=%s, project_id=%s",
|
|
131
|
+
_get_field(self.sink_config, "name"),
|
|
132
|
+
self.project_id,
|
|
133
|
+
)
|
|
134
|
+
existing_sink = hook.get_sink(
|
|
135
|
+
sink_name=_get_field(self.sink_config, "name"), project_id=self.project_id
|
|
136
|
+
)
|
|
137
|
+
return LogSink.to_dict(existing_sink)
|
|
138
|
+
|
|
139
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
140
|
+
self.log.error("An error occurred. Exiting.")
|
|
141
|
+
raise e
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class CloudLoggingDeleteSinkOperator(GoogleCloudBaseOperator):
|
|
145
|
+
"""
|
|
146
|
+
Deletes a Cloud Logging export sink from a GCP project.
|
|
147
|
+
|
|
148
|
+
:param sink_name: Required. Name of the sink to delete.
|
|
149
|
+
:param project_id: Required. The ID of the Google Cloud project.
|
|
150
|
+
:param gcp_conn_id: Optional. The connection ID to use for connecting to Google Cloud.
|
|
151
|
+
Defaults to "google_cloud_default".
|
|
152
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
153
|
+
credentials, or chained list of accounts required to get the access_token
|
|
154
|
+
of the last account in the list, which will be impersonated in the request.
|
|
155
|
+
If set as a string, the account must grant the originating account
|
|
156
|
+
the Service Account Token Creator IAM role.
|
|
157
|
+
If set as a sequence, the identities from the list must grant
|
|
158
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
159
|
+
account from the list granting this role to the originating account (templated).
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
template_fields: Sequence[str] = ("sink_name", "project_id", "gcp_conn_id", "impersonation_chain")
|
|
163
|
+
|
|
164
|
+
def __init__(
|
|
165
|
+
self,
|
|
166
|
+
sink_name: str,
|
|
167
|
+
project_id: str,
|
|
168
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
169
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
170
|
+
**kwargs,
|
|
171
|
+
):
|
|
172
|
+
super().__init__(**kwargs)
|
|
173
|
+
self.sink_name = sink_name
|
|
174
|
+
self.project_id = project_id
|
|
175
|
+
self.gcp_conn_id = gcp_conn_id
|
|
176
|
+
self.impersonation_chain = impersonation_chain
|
|
177
|
+
|
|
178
|
+
def execute(self, context: Context) -> None:
|
|
179
|
+
"""Execute the operator."""
|
|
180
|
+
_validate_inputs(self, ["sink_name", "project_id"])
|
|
181
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
182
|
+
|
|
183
|
+
try:
|
|
184
|
+
self.log.info("Deleting log sink '%s' from project '%s'", self.sink_name, self.project_id)
|
|
185
|
+
hook.delete_sink(sink_name=self.sink_name, project_id=self.project_id)
|
|
186
|
+
self.log.info("Log sink '%s' deleted successfully", self.sink_name)
|
|
187
|
+
|
|
188
|
+
except google.cloud.exceptions.NotFound as e:
|
|
189
|
+
self.log.error("An error occurred. Not Found.")
|
|
190
|
+
raise e
|
|
191
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
192
|
+
self.log.error("An error occurred. Exiting.")
|
|
193
|
+
raise e
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class CloudLoggingUpdateSinkOperator(GoogleCloudBaseOperator):
|
|
197
|
+
"""
|
|
198
|
+
Updates an existing Cloud Logging export sink.
|
|
199
|
+
|
|
200
|
+
:param project_id: Required. The ID of the Google Cloud project that contains the sink.
|
|
201
|
+
:param sink_name: Required. The name of the sink to update.
|
|
202
|
+
:param sink_config: Required. The updated sink configuration. Can be a dictionary or a
|
|
203
|
+
`google.cloud.logging_v2.types.LogSink` object. Refer to:
|
|
204
|
+
https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
|
|
205
|
+
:param update_mask: Required. A FieldMask or dictionary specifying which fields of the sink
|
|
206
|
+
should be updated. For example, to update the destination and filter, use:
|
|
207
|
+
`{"paths": ["destination", "filter"]}`.
|
|
208
|
+
:param unique_writer_identity: Optional. When set to True, a new unique service account
|
|
209
|
+
will be created for the sink. Defaults to False.
|
|
210
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
|
|
211
|
+
Defaults to "google_cloud_default".
|
|
212
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
|
213
|
+
credentials, or chained list of accounts required to get the access_token
|
|
214
|
+
of the last account in the list, which will be impersonated in the request.
|
|
215
|
+
If set as a string, the account must grant the originating account
|
|
216
|
+
the Service Account Token Creator IAM role.
|
|
217
|
+
If set as a sequence, the identities from the list must grant
|
|
218
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
219
|
+
account from the list granting this role to the originating account (templated).
|
|
220
|
+
"""
|
|
221
|
+
|
|
222
|
+
template_fields: Sequence[str] = (
|
|
223
|
+
"sink_name",
|
|
224
|
+
"project_id",
|
|
225
|
+
"update_mask",
|
|
226
|
+
"sink_config",
|
|
227
|
+
"unique_writer_identity",
|
|
228
|
+
"gcp_conn_id",
|
|
229
|
+
"impersonation_chain",
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
def __init__(
|
|
233
|
+
self,
|
|
234
|
+
project_id: str,
|
|
235
|
+
sink_name: str,
|
|
236
|
+
sink_config: dict | LogSink,
|
|
237
|
+
update_mask: FieldMask | dict,
|
|
238
|
+
unique_writer_identity: bool = False,
|
|
239
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
240
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
241
|
+
**kwargs,
|
|
242
|
+
):
|
|
243
|
+
super().__init__(**kwargs)
|
|
244
|
+
self.project_id = project_id
|
|
245
|
+
self.sink_name = sink_name
|
|
246
|
+
self.sink_config = sink_config
|
|
247
|
+
self.update_mask = update_mask
|
|
248
|
+
self.unique_writer_identity = unique_writer_identity
|
|
249
|
+
self.gcp_conn_id = gcp_conn_id
|
|
250
|
+
self.impersonation_chain = impersonation_chain
|
|
251
|
+
|
|
252
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
|
253
|
+
"""Execute the operator."""
|
|
254
|
+
_validate_inputs(self, ["sink_name", "project_id", "sink_config", "update_mask"])
|
|
255
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
current_sink = hook.get_sink(sink_name=self.sink_name, project_id=self.project_id)
|
|
259
|
+
self.log.info("Current log sink configuration: '%s'.", LogSink.to_dict(current_sink))
|
|
260
|
+
|
|
261
|
+
self.log.info("Updating log sink '%s' in project '%s'", self.sink_name, self.project_id)
|
|
262
|
+
if isinstance(self.update_mask, dict) and "paths" in self.update_mask:
|
|
263
|
+
paths = self.update_mask["paths"]
|
|
264
|
+
elif hasattr(self.update_mask, "paths"):
|
|
265
|
+
paths = self.update_mask.paths
|
|
266
|
+
|
|
267
|
+
self.log.info("Updating fields: %s", ", ".join(paths))
|
|
268
|
+
|
|
269
|
+
response = hook.update_sink(
|
|
270
|
+
sink_name=self.sink_name,
|
|
271
|
+
sink=self.sink_config,
|
|
272
|
+
unique_writer_identity=self.unique_writer_identity,
|
|
273
|
+
project_id=self.project_id,
|
|
274
|
+
update_mask=self.update_mask,
|
|
275
|
+
)
|
|
276
|
+
self.log.info("Log sink updated successfully: %s", response.name)
|
|
277
|
+
return LogSink.to_dict(response)
|
|
278
|
+
|
|
279
|
+
except google.cloud.exceptions.NotFound as e:
|
|
280
|
+
self.log.error("An error occurred. Not Found.")
|
|
281
|
+
raise e
|
|
282
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
283
|
+
self.log.error("An error occurred. Exiting.")
|
|
284
|
+
raise e
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class CloudLoggingListSinksOperator(GoogleCloudBaseOperator):
|
|
288
|
+
"""
|
|
289
|
+
Lists Cloud Logging export sinks in a Google Cloud project.
|
|
290
|
+
|
|
291
|
+
:param project_id: Required. The ID of the Google Cloud project to list sinks from.
|
|
292
|
+
:param page_size: Optional. The maximum number of sinks to return per page. Must be greater than 0.
|
|
293
|
+
If None, the server will use a default value.
|
|
294
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
|
|
295
|
+
Defaults to "google_cloud_default".
|
|
296
|
+
:param impersonation_chain: Optional. Service account or chained list of accounts to impersonate.
|
|
297
|
+
If a string, the service account must grant the originating account the
|
|
298
|
+
'Service Account Token Creator' IAM role.
|
|
299
|
+
|
|
300
|
+
If a sequence, each account in the chain must grant this role to the next.
|
|
301
|
+
The first account must grant it to the originating account (templated).
|
|
302
|
+
"""
|
|
303
|
+
|
|
304
|
+
template_fields: Sequence[str] = ("project_id", "gcp_conn_id", "impersonation_chain", "page_size")
|
|
305
|
+
|
|
306
|
+
def __init__(
|
|
307
|
+
self,
|
|
308
|
+
project_id: str,
|
|
309
|
+
page_size: int | None = None,
|
|
310
|
+
gcp_conn_id: str = "google_cloud_default",
|
|
311
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
|
312
|
+
**kwargs,
|
|
313
|
+
):
|
|
314
|
+
super().__init__(**kwargs)
|
|
315
|
+
self.project_id = project_id
|
|
316
|
+
self.page_size = page_size
|
|
317
|
+
self.gcp_conn_id = gcp_conn_id
|
|
318
|
+
self.impersonation_chain = impersonation_chain
|
|
319
|
+
|
|
320
|
+
def execute(self, context: Context) -> list[dict[str, Any]]:
|
|
321
|
+
"""Execute the operator."""
|
|
322
|
+
_validate_inputs(self, ["project_id"])
|
|
323
|
+
|
|
324
|
+
if self.page_size is not None and self.page_size < 1:
|
|
325
|
+
raise AirflowException("The page_size for the list sinks request must be greater than zero")
|
|
326
|
+
|
|
327
|
+
hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
|
|
328
|
+
|
|
329
|
+
try:
|
|
330
|
+
self.log.info("Listing log sinks in project '%s'", self.project_id)
|
|
331
|
+
|
|
332
|
+
sinks = hook.list_sinks(project_id=self.project_id, page_size=self.page_size)
|
|
333
|
+
|
|
334
|
+
result = [LogSink.to_dict(sink) for sink in sinks]
|
|
335
|
+
self.log.info("Found %d log sinks", len(result))
|
|
336
|
+
|
|
337
|
+
return result
|
|
338
|
+
|
|
339
|
+
except google.cloud.exceptions.GoogleCloudError as e:
|
|
340
|
+
self.log.error("An error occurred. Exiting.")
|
|
341
|
+
raise e
|