apache-airflow-providers-google 15.1.0rc1__py3-none-any.whl → 19.1.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/3rd-party-licenses/NOTICE +2 -12
- airflow/providers/google/__init__.py +3 -3
- airflow/providers/google/ads/hooks/ads.py +39 -5
- airflow/providers/google/ads/operators/ads.py +2 -2
- airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -2
- airflow/providers/google/assets/gcs.py +1 -11
- airflow/providers/google/cloud/bundles/__init__.py +16 -0
- airflow/providers/google/cloud/bundles/gcs.py +161 -0
- airflow/providers/google/cloud/hooks/bigquery.py +166 -281
- airflow/providers/google/cloud/hooks/cloud_composer.py +287 -14
- airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
- airflow/providers/google/cloud/hooks/cloud_run.py +17 -9
- airflow/providers/google/cloud/hooks/cloud_sql.py +101 -22
- airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +27 -6
- airflow/providers/google/cloud/hooks/compute_ssh.py +5 -1
- airflow/providers/google/cloud/hooks/datacatalog.py +9 -1
- airflow/providers/google/cloud/hooks/dataflow.py +71 -94
- airflow/providers/google/cloud/hooks/datafusion.py +1 -1
- airflow/providers/google/cloud/hooks/dataplex.py +1 -1
- airflow/providers/google/cloud/hooks/dataprep.py +1 -1
- airflow/providers/google/cloud/hooks/dataproc.py +72 -71
- airflow/providers/google/cloud/hooks/gcs.py +111 -14
- airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
- airflow/providers/google/cloud/hooks/looker.py +6 -1
- airflow/providers/google/cloud/hooks/mlengine.py +3 -2
- airflow/providers/google/cloud/hooks/secret_manager.py +102 -10
- airflow/providers/google/cloud/hooks/spanner.py +73 -8
- airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
- airflow/providers/google/cloud/hooks/translate.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +0 -209
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -2
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +27 -1
- airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
- airflow/providers/google/cloud/hooks/vertex_ai/feature_store.py +307 -7
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -75
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py +223 -0
- airflow/providers/google/cloud/hooks/vision.py +2 -2
- airflow/providers/google/cloud/hooks/workflows.py +1 -1
- airflow/providers/google/cloud/links/alloy_db.py +0 -46
- airflow/providers/google/cloud/links/base.py +77 -13
- airflow/providers/google/cloud/links/bigquery.py +0 -47
- airflow/providers/google/cloud/links/bigquery_dts.py +0 -20
- airflow/providers/google/cloud/links/bigtable.py +0 -48
- airflow/providers/google/cloud/links/cloud_build.py +0 -73
- airflow/providers/google/cloud/links/cloud_functions.py +0 -33
- airflow/providers/google/cloud/links/cloud_memorystore.py +0 -58
- airflow/providers/google/cloud/links/{life_sciences.py → cloud_run.py} +5 -27
- airflow/providers/google/cloud/links/cloud_sql.py +0 -33
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +17 -44
- airflow/providers/google/cloud/links/cloud_tasks.py +7 -26
- airflow/providers/google/cloud/links/compute.py +0 -58
- airflow/providers/google/cloud/links/data_loss_prevention.py +0 -169
- airflow/providers/google/cloud/links/datacatalog.py +23 -54
- airflow/providers/google/cloud/links/dataflow.py +0 -34
- airflow/providers/google/cloud/links/dataform.py +0 -64
- airflow/providers/google/cloud/links/datafusion.py +1 -96
- airflow/providers/google/cloud/links/dataplex.py +0 -154
- airflow/providers/google/cloud/links/dataprep.py +0 -24
- airflow/providers/google/cloud/links/dataproc.py +11 -95
- airflow/providers/google/cloud/links/datastore.py +0 -31
- airflow/providers/google/cloud/links/kubernetes_engine.py +9 -60
- airflow/providers/google/cloud/links/managed_kafka.py +0 -70
- airflow/providers/google/cloud/links/mlengine.py +0 -70
- airflow/providers/google/cloud/links/pubsub.py +0 -32
- airflow/providers/google/cloud/links/spanner.py +0 -33
- airflow/providers/google/cloud/links/stackdriver.py +0 -30
- airflow/providers/google/cloud/links/translate.py +17 -187
- airflow/providers/google/cloud/links/vertex_ai.py +28 -195
- airflow/providers/google/cloud/links/workflows.py +0 -52
- airflow/providers/google/cloud/log/gcs_task_handler.py +17 -9
- airflow/providers/google/cloud/log/stackdriver_task_handler.py +9 -6
- airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
- airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
- airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
- airflow/providers/google/cloud/openlineage/facets.py +102 -1
- airflow/providers/google/cloud/openlineage/mixins.py +10 -8
- airflow/providers/google/cloud/openlineage/utils.py +15 -1
- airflow/providers/google/cloud/operators/alloy_db.py +70 -55
- airflow/providers/google/cloud/operators/bigquery.py +73 -636
- airflow/providers/google/cloud/operators/bigquery_dts.py +3 -5
- airflow/providers/google/cloud/operators/bigtable.py +36 -7
- airflow/providers/google/cloud/operators/cloud_base.py +21 -1
- airflow/providers/google/cloud/operators/cloud_batch.py +2 -2
- airflow/providers/google/cloud/operators/cloud_build.py +75 -32
- airflow/providers/google/cloud/operators/cloud_composer.py +128 -40
- airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
- airflow/providers/google/cloud/operators/cloud_memorystore.py +69 -43
- airflow/providers/google/cloud/operators/cloud_run.py +23 -5
- airflow/providers/google/cloud/operators/cloud_sql.py +8 -16
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +92 -11
- airflow/providers/google/cloud/operators/compute.py +8 -40
- airflow/providers/google/cloud/operators/datacatalog.py +157 -21
- airflow/providers/google/cloud/operators/dataflow.py +38 -15
- airflow/providers/google/cloud/operators/dataform.py +15 -5
- airflow/providers/google/cloud/operators/datafusion.py +41 -20
- airflow/providers/google/cloud/operators/dataplex.py +193 -109
- airflow/providers/google/cloud/operators/dataprep.py +1 -5
- airflow/providers/google/cloud/operators/dataproc.py +78 -35
- airflow/providers/google/cloud/operators/dataproc_metastore.py +96 -88
- airflow/providers/google/cloud/operators/datastore.py +22 -6
- airflow/providers/google/cloud/operators/dlp.py +6 -29
- airflow/providers/google/cloud/operators/functions.py +16 -7
- airflow/providers/google/cloud/operators/gcs.py +10 -8
- airflow/providers/google/cloud/operators/gen_ai.py +389 -0
- airflow/providers/google/cloud/operators/kubernetes_engine.py +60 -99
- airflow/providers/google/cloud/operators/looker.py +1 -1
- airflow/providers/google/cloud/operators/managed_kafka.py +107 -52
- airflow/providers/google/cloud/operators/natural_language.py +1 -1
- airflow/providers/google/cloud/operators/pubsub.py +60 -14
- airflow/providers/google/cloud/operators/spanner.py +25 -12
- airflow/providers/google/cloud/operators/speech_to_text.py +1 -2
- airflow/providers/google/cloud/operators/stackdriver.py +1 -9
- airflow/providers/google/cloud/operators/tasks.py +1 -12
- airflow/providers/google/cloud/operators/text_to_speech.py +1 -2
- airflow/providers/google/cloud/operators/translate.py +40 -16
- airflow/providers/google/cloud/operators/translate_speech.py +1 -2
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +39 -19
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +29 -9
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +54 -26
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py +70 -8
- airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +43 -9
- airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
- airflow/providers/google/cloud/operators/vertex_ai/feature_store.py +532 -1
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +135 -116
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +11 -9
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +57 -11
- airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +30 -7
- airflow/providers/google/cloud/operators/vertex_ai/ray.py +393 -0
- airflow/providers/google/cloud/operators/video_intelligence.py +1 -1
- airflow/providers/google/cloud/operators/vision.py +2 -2
- airflow/providers/google/cloud/operators/workflows.py +18 -15
- airflow/providers/google/cloud/sensors/bigquery.py +2 -2
- airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -2
- airflow/providers/google/cloud/sensors/bigtable.py +11 -4
- airflow/providers/google/cloud/sensors/cloud_composer.py +533 -29
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -2
- airflow/providers/google/cloud/sensors/dataflow.py +26 -9
- airflow/providers/google/cloud/sensors/dataform.py +2 -2
- airflow/providers/google/cloud/sensors/datafusion.py +4 -4
- airflow/providers/google/cloud/sensors/dataplex.py +2 -2
- airflow/providers/google/cloud/sensors/dataprep.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc.py +2 -2
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -2
- airflow/providers/google/cloud/sensors/gcs.py +4 -4
- airflow/providers/google/cloud/sensors/looker.py +2 -2
- airflow/providers/google/cloud/sensors/pubsub.py +4 -4
- airflow/providers/google/cloud/sensors/tasks.py +2 -2
- airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +2 -2
- airflow/providers/google/cloud/sensors/workflows.py +2 -2
- airflow/providers/google/cloud/transfers/adls_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +11 -8
- airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +4 -4
- airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +7 -3
- airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +12 -1
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +24 -10
- airflow/providers/google/cloud/transfers/bigquery_to_sql.py +104 -5
- airflow/providers/google/cloud/transfers/calendar_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +20 -12
- airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/gcs_to_local.py +5 -3
- airflow/providers/google/cloud/transfers/gcs_to_sftp.py +10 -4
- airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +6 -2
- airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -2
- airflow/providers/google/cloud/transfers/http_to_gcs.py +193 -0
- airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
- airflow/providers/google/cloud/transfers/oracle_to_gcs.py +36 -11
- airflow/providers/google/cloud/transfers/postgres_to_gcs.py +42 -9
- airflow/providers/google/cloud/transfers/s3_to_gcs.py +12 -6
- airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +13 -4
- airflow/providers/google/cloud/transfers/sheets_to_gcs.py +3 -3
- airflow/providers/google/cloud/transfers/sql_to_gcs.py +10 -10
- airflow/providers/google/cloud/triggers/bigquery.py +75 -34
- airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
- airflow/providers/google/cloud/triggers/cloud_composer.py +302 -46
- airflow/providers/google/cloud/triggers/cloud_run.py +2 -2
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +91 -1
- airflow/providers/google/cloud/triggers/dataflow.py +122 -0
- airflow/providers/google/cloud/triggers/datafusion.py +1 -1
- airflow/providers/google/cloud/triggers/dataplex.py +14 -2
- airflow/providers/google/cloud/triggers/dataproc.py +122 -52
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
- airflow/providers/google/cloud/triggers/mlengine.py +1 -1
- airflow/providers/google/cloud/triggers/pubsub.py +15 -19
- airflow/providers/google/cloud/utils/bigquery_get_data.py +1 -1
- airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
- airflow/providers/google/cloud/utils/field_validator.py +1 -2
- airflow/providers/google/common/auth_backend/google_openid.py +4 -4
- airflow/providers/google/common/deprecated.py +2 -1
- airflow/providers/google/common/hooks/base_google.py +27 -8
- airflow/providers/google/common/links/storage.py +0 -22
- airflow/providers/google/common/utils/get_secret.py +31 -0
- airflow/providers/google/common/utils/id_token_credentials.py +3 -4
- airflow/providers/google/firebase/operators/firestore.py +2 -2
- airflow/providers/google/get_provider_info.py +56 -52
- airflow/providers/google/go_module_utils.py +35 -3
- airflow/providers/google/leveldb/hooks/leveldb.py +26 -1
- airflow/providers/google/leveldb/operators/leveldb.py +2 -2
- airflow/providers/google/marketing_platform/hooks/display_video.py +3 -109
- airflow/providers/google/marketing_platform/links/analytics_admin.py +5 -14
- airflow/providers/google/marketing_platform/operators/analytics_admin.py +1 -2
- airflow/providers/google/marketing_platform/operators/campaign_manager.py +5 -5
- airflow/providers/google/marketing_platform/operators/display_video.py +28 -489
- airflow/providers/google/marketing_platform/operators/search_ads.py +2 -2
- airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -2
- airflow/providers/google/marketing_platform/sensors/display_video.py +3 -63
- airflow/providers/google/suite/hooks/calendar.py +1 -1
- airflow/providers/google/suite/hooks/sheets.py +15 -1
- airflow/providers/google/suite/operators/sheets.py +8 -3
- airflow/providers/google/suite/sensors/drive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -2
- airflow/providers/google/suite/transfers/gcs_to_sheets.py +1 -1
- airflow/providers/google/suite/transfers/local_to_drive.py +3 -3
- airflow/providers/google/suite/transfers/sql_to_sheets.py +5 -4
- airflow/providers/google/version_compat.py +15 -1
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/METADATA +92 -48
- apache_airflow_providers_google-19.1.0rc1.dist-info/RECORD +331 -0
- apache_airflow_providers_google-19.1.0rc1.dist-info/licenses/NOTICE +5 -0
- airflow/providers/google/cloud/hooks/automl.py +0 -673
- airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
- airflow/providers/google/cloud/links/automl.py +0 -193
- airflow/providers/google/cloud/operators/automl.py +0 -1362
- airflow/providers/google/cloud/operators/life_sciences.py +0 -119
- airflow/providers/google/cloud/operators/mlengine.py +0 -112
- apache_airflow_providers_google-15.1.0rc1.dist-info/RECORD +0 -321
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-15.1.0rc1.dist-info → apache_airflow_providers_google-19.1.0rc1.dist-info}/entry_points.txt +0 -0
- {airflow/providers/google → apache_airflow_providers_google-19.1.0rc1.dist-info/licenses}/LICENSE +0 -0
|
@@ -57,7 +57,6 @@ from airflow.providers.google.cloud.triggers.kubernetes_engine import (
|
|
|
57
57
|
GKEOperationTrigger,
|
|
58
58
|
GKEStartPodTrigger,
|
|
59
59
|
)
|
|
60
|
-
from airflow.providers.google.common.deprecated import deprecated
|
|
61
60
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
|
62
61
|
from airflow.providers_manager import ProvidersManager
|
|
63
62
|
from airflow.utils.timezone import utcnow
|
|
@@ -77,7 +76,7 @@ if TYPE_CHECKING:
|
|
|
77
76
|
from kubernetes.client.models import V1Job
|
|
78
77
|
from pendulum import DateTime
|
|
79
78
|
|
|
80
|
-
from airflow.
|
|
79
|
+
from airflow.providers.common.compat.sdk import Context
|
|
81
80
|
|
|
82
81
|
KUBE_CONFIG_ENV_VAR = "KUBECONFIG"
|
|
83
82
|
|
|
@@ -222,7 +221,6 @@ class GKEDeleteClusterOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
222
221
|
If set as a sequence, the identities from the list must grant
|
|
223
222
|
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
224
223
|
account from the list granting this role to the originating account (templated).
|
|
225
|
-
:param name: (Deprecated) The name of the resource to delete, in this case cluster name
|
|
226
224
|
:param api_version: The api version to use
|
|
227
225
|
:param deferrable: Run operator in the deferrable mode.
|
|
228
226
|
:param poll_interval: Interval size which defines how often operation status is checked.
|
|
@@ -241,7 +239,6 @@ class GKEDeleteClusterOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
241
239
|
gcp_conn_id: str = "google_cloud_default",
|
|
242
240
|
impersonation_chain: str | Sequence[str] | None = None,
|
|
243
241
|
cluster_name: str | None = None,
|
|
244
|
-
name: str | None = None,
|
|
245
242
|
api_version: str = "v2",
|
|
246
243
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
247
244
|
poll_interval: int = 10,
|
|
@@ -251,36 +248,17 @@ class GKEDeleteClusterOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
251
248
|
super().__init__(*args, **kwargs)
|
|
252
249
|
|
|
253
250
|
self.location = location
|
|
254
|
-
self.cluster_name = cluster_name
|
|
251
|
+
self.cluster_name = cluster_name
|
|
255
252
|
self.use_internal_ip = use_internal_ip
|
|
256
253
|
self.use_dns_endpoint = use_dns_endpoint
|
|
257
254
|
self.project_id = project_id
|
|
258
255
|
self.gcp_conn_id = gcp_conn_id
|
|
259
256
|
self.impersonation_chain = impersonation_chain
|
|
260
|
-
self._name = name
|
|
261
257
|
self.api_version = api_version
|
|
262
258
|
self.deferrable = deferrable
|
|
263
259
|
self.poll_interval = poll_interval
|
|
264
260
|
self._check_input()
|
|
265
261
|
|
|
266
|
-
@property
|
|
267
|
-
@deprecated(
|
|
268
|
-
planned_removal_date="May 01, 2025",
|
|
269
|
-
use_instead="cluster_name",
|
|
270
|
-
category=AirflowProviderDeprecationWarning,
|
|
271
|
-
)
|
|
272
|
-
def name(self) -> str | None:
|
|
273
|
-
return self._name
|
|
274
|
-
|
|
275
|
-
@name.setter
|
|
276
|
-
@deprecated(
|
|
277
|
-
planned_removal_date="May 01, 2025",
|
|
278
|
-
use_instead="cluster_name",
|
|
279
|
-
category=AirflowProviderDeprecationWarning,
|
|
280
|
-
)
|
|
281
|
-
def name(self, name: str) -> None:
|
|
282
|
-
self._name = name
|
|
283
|
-
|
|
284
262
|
def _check_input(self) -> None:
|
|
285
263
|
if not all([self.project_id, self.cluster_name, self.location]):
|
|
286
264
|
self.log.error("One of (project_id, cluster_name, location) is missing or incorrect")
|
|
@@ -475,8 +453,15 @@ class GKECreateClusterOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
475
453
|
stacklevel=2,
|
|
476
454
|
)
|
|
477
455
|
|
|
456
|
+
@property
|
|
457
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
458
|
+
return {
|
|
459
|
+
"project_id": self.project_id,
|
|
460
|
+
"location": self.location,
|
|
461
|
+
}
|
|
462
|
+
|
|
478
463
|
def execute(self, context: Context) -> str:
|
|
479
|
-
KubernetesEngineClusterLink.persist(context=context,
|
|
464
|
+
KubernetesEngineClusterLink.persist(context=context, cluster=self.body)
|
|
480
465
|
|
|
481
466
|
try:
|
|
482
467
|
operation = self.cluster_hook.create_cluster(
|
|
@@ -575,9 +560,16 @@ class GKEStartKueueInsideClusterOperator(GKEOperatorMixin, KubernetesInstallKueu
|
|
|
575
560
|
self.use_dns_endpoint = use_dns_endpoint
|
|
576
561
|
self.impersonation_chain = impersonation_chain
|
|
577
562
|
|
|
563
|
+
@property
|
|
564
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
565
|
+
return {
|
|
566
|
+
"project_id": self.project_id,
|
|
567
|
+
"location": self.location,
|
|
568
|
+
}
|
|
569
|
+
|
|
578
570
|
def execute(self, context: Context):
|
|
579
571
|
cluster = self.cluster_hook.get_cluster(name=self.cluster_name, project_id=self.project_id)
|
|
580
|
-
KubernetesEngineClusterLink.persist(context=context,
|
|
572
|
+
KubernetesEngineClusterLink.persist(context=context, cluster=cluster)
|
|
581
573
|
|
|
582
574
|
if self.cluster_hook.check_cluster_autoscaling_ability(cluster=cluster):
|
|
583
575
|
super().execute(context)
|
|
@@ -622,21 +614,15 @@ class GKEStartPodOperator(GKEOperatorMixin, KubernetesPodOperator):
|
|
|
622
614
|
If set as a sequence, the identities from the list must grant
|
|
623
615
|
Service Account Token Creator IAM role to the directly preceding identity, with first
|
|
624
616
|
account from the list granting this role to the originating account (templated).
|
|
625
|
-
:param regional: (Deprecated) The location param is region name.
|
|
626
617
|
:param on_finish_action: What to do when the pod reaches its final state, or the execution is interrupted.
|
|
627
618
|
If "delete_pod", the pod will be deleted regardless its state; if "delete_succeeded_pod",
|
|
628
619
|
only succeeded pod will be deleted. You can set to "keep_pod" to keep the pod.
|
|
629
|
-
Current default is `
|
|
630
|
-
:param is_delete_operator_pod: (Deprecated) What to do when the pod reaches its final
|
|
631
|
-
state, or the execution is interrupted. If True, delete the
|
|
632
|
-
pod; if False, leave the pod. Current default is False, but this will be
|
|
633
|
-
changed in the next major release of this provider.
|
|
634
|
-
Deprecated - use `on_finish_action` instead.
|
|
620
|
+
Current default is `delete_pod`, but this will be changed in the next major release of this provider.
|
|
635
621
|
:param deferrable: Run operator in the deferrable mode.
|
|
636
622
|
"""
|
|
637
623
|
|
|
638
624
|
template_fields: Sequence[str] = tuple(
|
|
639
|
-
{"
|
|
625
|
+
{"deferrable"}
|
|
640
626
|
| (set(KubernetesPodOperator.template_fields) - {"is_delete_operator_pod", "regional"})
|
|
641
627
|
| set(GKEOperatorMixin.template_fields)
|
|
642
628
|
)
|
|
@@ -651,30 +637,15 @@ class GKEStartPodOperator(GKEOperatorMixin, KubernetesPodOperator):
|
|
|
651
637
|
project_id: str = PROVIDE_PROJECT_ID,
|
|
652
638
|
gcp_conn_id: str = "google_cloud_default",
|
|
653
639
|
impersonation_chain: str | Sequence[str] | None = None,
|
|
654
|
-
regional: bool | None = None,
|
|
655
640
|
on_finish_action: str | None = None,
|
|
656
|
-
is_delete_operator_pod: bool | None = None,
|
|
657
641
|
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
|
658
642
|
*args,
|
|
659
643
|
**kwargs,
|
|
660
644
|
) -> None:
|
|
661
|
-
if
|
|
662
|
-
kwargs["on_finish_action"] = (
|
|
663
|
-
OnFinishAction.DELETE_POD if is_delete_operator_pod else OnFinishAction.KEEP_POD
|
|
664
|
-
)
|
|
665
|
-
elif on_finish_action is not None:
|
|
645
|
+
if on_finish_action is not None:
|
|
666
646
|
kwargs["on_finish_action"] = OnFinishAction(on_finish_action)
|
|
667
647
|
else:
|
|
668
|
-
|
|
669
|
-
f"You have not set parameter `on_finish_action` in class {self.__class__.__name__}. "
|
|
670
|
-
"Currently the default for this parameter is `keep_pod` but in a future release"
|
|
671
|
-
" the default will be changed to `delete_pod`. To ensure pods are not deleted in"
|
|
672
|
-
" the future you will need to set `on_finish_action=keep_pod` explicitly.",
|
|
673
|
-
AirflowProviderDeprecationWarning,
|
|
674
|
-
stacklevel=2,
|
|
675
|
-
)
|
|
676
|
-
kwargs["on_finish_action"] = OnFinishAction.KEEP_POD
|
|
677
|
-
|
|
648
|
+
kwargs["on_finish_action"] = OnFinishAction.DELETE_POD
|
|
678
649
|
super().__init__(*args, **kwargs)
|
|
679
650
|
self.project_id = project_id
|
|
680
651
|
self.location = location
|
|
@@ -683,9 +654,6 @@ class GKEStartPodOperator(GKEOperatorMixin, KubernetesPodOperator):
|
|
|
683
654
|
self.use_internal_ip = use_internal_ip
|
|
684
655
|
self.use_dns_endpoint = use_dns_endpoint
|
|
685
656
|
self.impersonation_chain = impersonation_chain
|
|
686
|
-
self._regional = regional
|
|
687
|
-
if is_delete_operator_pod is not None:
|
|
688
|
-
self.is_delete_operator_pod = is_delete_operator_pod
|
|
689
657
|
self.deferrable = deferrable
|
|
690
658
|
|
|
691
659
|
# There is no need to manage the kube_config file, as it will be generated automatically.
|
|
@@ -693,42 +661,6 @@ class GKEStartPodOperator(GKEOperatorMixin, KubernetesPodOperator):
|
|
|
693
661
|
if self.config_file:
|
|
694
662
|
raise AirflowException("config_file is not an allowed parameter for the GKEStartPodOperator.")
|
|
695
663
|
|
|
696
|
-
@property
|
|
697
|
-
@deprecated(
|
|
698
|
-
planned_removal_date="May 01, 2025",
|
|
699
|
-
use_instead="on_finish_action",
|
|
700
|
-
category=AirflowProviderDeprecationWarning,
|
|
701
|
-
)
|
|
702
|
-
def is_delete_operator_pod(self) -> bool | None:
|
|
703
|
-
return self._is_delete_operator_pod
|
|
704
|
-
|
|
705
|
-
@is_delete_operator_pod.setter
|
|
706
|
-
@deprecated(
|
|
707
|
-
planned_removal_date="May 01, 2025",
|
|
708
|
-
use_instead="on_finish_action",
|
|
709
|
-
category=AirflowProviderDeprecationWarning,
|
|
710
|
-
)
|
|
711
|
-
def is_delete_operator_pod(self, is_delete_operator_pod) -> None:
|
|
712
|
-
self._is_delete_operator_pod = is_delete_operator_pod
|
|
713
|
-
|
|
714
|
-
@property
|
|
715
|
-
@deprecated(
|
|
716
|
-
planned_removal_date="May 01, 2025",
|
|
717
|
-
reason="The parameter is not in actual use.",
|
|
718
|
-
category=AirflowProviderDeprecationWarning,
|
|
719
|
-
)
|
|
720
|
-
def regional(self) -> bool | None:
|
|
721
|
-
return self._regional
|
|
722
|
-
|
|
723
|
-
@regional.setter
|
|
724
|
-
@deprecated(
|
|
725
|
-
planned_removal_date="May 01, 2025",
|
|
726
|
-
reason="The parameter is not in actual use.",
|
|
727
|
-
category=AirflowProviderDeprecationWarning,
|
|
728
|
-
)
|
|
729
|
-
def regional(self, regional) -> None:
|
|
730
|
-
self._regional = regional
|
|
731
|
-
|
|
732
664
|
def invoke_defer_method(self, last_log_time: DateTime | None = None):
|
|
733
665
|
"""Redefine triggers which are being used in child classes."""
|
|
734
666
|
trigger_start_time = utcnow()
|
|
@@ -855,10 +787,10 @@ class GKEStartJobOperator(GKEOperatorMixin, KubernetesJobOperator):
|
|
|
855
787
|
trigger=GKEJobTrigger(
|
|
856
788
|
cluster_url=self.cluster_url,
|
|
857
789
|
ssl_ca_cert=self.ssl_ca_cert,
|
|
858
|
-
job_name=self.job.metadata.name,
|
|
859
|
-
job_namespace=self.job.metadata.namespace,
|
|
860
|
-
|
|
861
|
-
pod_namespace=self.
|
|
790
|
+
job_name=self.job.metadata.name,
|
|
791
|
+
job_namespace=self.job.metadata.namespace,
|
|
792
|
+
pod_names=[pod.metadata.name for pod in self.pods],
|
|
793
|
+
pod_namespace=self.pods[0].metadata.namespace,
|
|
862
794
|
base_container_name=self.base_container_name,
|
|
863
795
|
gcp_conn_id=self.gcp_conn_id,
|
|
864
796
|
poll_interval=self.job_poll_interval,
|
|
@@ -936,7 +868,14 @@ class GKEDescribeJobOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
936
868
|
self.cluster_name,
|
|
937
869
|
self.job,
|
|
938
870
|
)
|
|
939
|
-
KubernetesEngineJobLink.persist(
|
|
871
|
+
KubernetesEngineJobLink.persist(
|
|
872
|
+
context=context,
|
|
873
|
+
location=self.location,
|
|
874
|
+
cluster_name=self.cluster_name,
|
|
875
|
+
namespace=self.job.metadata.namespace,
|
|
876
|
+
job_name=self.job.metadata.name,
|
|
877
|
+
project_id=self.project_id,
|
|
878
|
+
)
|
|
940
879
|
return None
|
|
941
880
|
|
|
942
881
|
|
|
@@ -1000,6 +939,15 @@ class GKEListJobsOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
1000
939
|
self.namespace = namespace
|
|
1001
940
|
self.do_xcom_push = do_xcom_push
|
|
1002
941
|
|
|
942
|
+
@property
|
|
943
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
944
|
+
return {
|
|
945
|
+
"location": self.location,
|
|
946
|
+
"cluster_name": self.cluster_name,
|
|
947
|
+
"namespace": self.namespace,
|
|
948
|
+
"project_id": self.project_id,
|
|
949
|
+
}
|
|
950
|
+
|
|
1003
951
|
def execute(self, context: Context) -> dict:
|
|
1004
952
|
if self.namespace:
|
|
1005
953
|
jobs = self.hook.list_jobs_from_namespace(namespace=self.namespace)
|
|
@@ -1010,7 +958,7 @@ class GKEListJobsOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
1010
958
|
if self.do_xcom_push:
|
|
1011
959
|
ti = context["ti"]
|
|
1012
960
|
ti.xcom_push(key="jobs_list", value=V1JobList.to_dict(jobs))
|
|
1013
|
-
KubernetesEngineWorkloadsLink.persist(context=context
|
|
961
|
+
KubernetesEngineWorkloadsLink.persist(context=context)
|
|
1014
962
|
return V1JobList.to_dict(jobs)
|
|
1015
963
|
|
|
1016
964
|
|
|
@@ -1352,8 +1300,14 @@ class GKESuspendJobOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
1352
1300
|
self.name,
|
|
1353
1301
|
self.cluster_name,
|
|
1354
1302
|
)
|
|
1355
|
-
KubernetesEngineJobLink.persist(
|
|
1356
|
-
|
|
1303
|
+
KubernetesEngineJobLink.persist(
|
|
1304
|
+
context=context,
|
|
1305
|
+
location=self.location,
|
|
1306
|
+
cluster_name=self.cluster_name,
|
|
1307
|
+
namespace=self.job.metadata.namespace,
|
|
1308
|
+
job_name=self.job.metadata.name,
|
|
1309
|
+
project_id=self.project_id,
|
|
1310
|
+
)
|
|
1357
1311
|
return k8s.V1Job.to_dict(self.job)
|
|
1358
1312
|
|
|
1359
1313
|
|
|
@@ -1426,6 +1380,13 @@ class GKEResumeJobOperator(GKEOperatorMixin, GoogleCloudBaseOperator):
|
|
|
1426
1380
|
self.name,
|
|
1427
1381
|
self.cluster_name,
|
|
1428
1382
|
)
|
|
1429
|
-
KubernetesEngineJobLink.persist(
|
|
1383
|
+
KubernetesEngineJobLink.persist(
|
|
1384
|
+
context=context,
|
|
1385
|
+
location=self.location,
|
|
1386
|
+
cluster_name=self.cluster_name,
|
|
1387
|
+
namespace=self.job.metadata.namespace,
|
|
1388
|
+
job_name=self.job.metadata.name,
|
|
1389
|
+
project_id=self.project_id,
|
|
1390
|
+
)
|
|
1430
1391
|
|
|
1431
1392
|
return k8s.V1Job.to_dict(self.job)
|
|
@@ -26,7 +26,7 @@ from airflow.providers.google.cloud.hooks.looker import LookerHook
|
|
|
26
26
|
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
|
27
27
|
|
|
28
28
|
if TYPE_CHECKING:
|
|
29
|
-
from airflow.
|
|
29
|
+
from airflow.providers.common.compat.sdk import Context
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class LookerStartPdtBuildOperator(GoogleCloudBaseOperator):
|
|
@@ -21,7 +21,7 @@ from __future__ import annotations
|
|
|
21
21
|
|
|
22
22
|
from collections.abc import Sequence
|
|
23
23
|
from functools import cached_property
|
|
24
|
-
from typing import TYPE_CHECKING
|
|
24
|
+
from typing import TYPE_CHECKING, Any
|
|
25
25
|
|
|
26
26
|
from google.api_core.exceptions import AlreadyExists, NotFound
|
|
27
27
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
|
@@ -41,7 +41,7 @@ if TYPE_CHECKING:
|
|
|
41
41
|
from google.api_core.retry import Retry
|
|
42
42
|
from google.protobuf.field_mask_pb2 import FieldMask
|
|
43
43
|
|
|
44
|
-
from airflow.
|
|
44
|
+
from airflow.providers.common.compat.sdk import Context
|
|
45
45
|
|
|
46
46
|
|
|
47
47
|
class ManagedKafkaBaseOperator(GoogleCloudBaseOperator):
|
|
@@ -150,9 +150,17 @@ class ManagedKafkaCreateClusterOperator(ManagedKafkaBaseOperator):
|
|
|
150
150
|
self.cluster_id = cluster_id
|
|
151
151
|
self.request_id = request_id
|
|
152
152
|
|
|
153
|
+
@property
|
|
154
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
155
|
+
return {
|
|
156
|
+
"location": self.location,
|
|
157
|
+
"cluster_id": self.cluster_id,
|
|
158
|
+
"project_id": self.project_id,
|
|
159
|
+
}
|
|
160
|
+
|
|
153
161
|
def execute(self, context: Context):
|
|
154
162
|
self.log.info("Creating an Apache Kafka cluster.")
|
|
155
|
-
ApacheKafkaClusterLink.persist(context=context
|
|
163
|
+
ApacheKafkaClusterLink.persist(context=context)
|
|
156
164
|
try:
|
|
157
165
|
operation = self.hook.create_cluster(
|
|
158
166
|
project_id=self.project_id,
|
|
@@ -227,8 +235,14 @@ class ManagedKafkaListClustersOperator(ManagedKafkaBaseOperator):
|
|
|
227
235
|
self.filter = filter
|
|
228
236
|
self.order_by = order_by
|
|
229
237
|
|
|
238
|
+
@property
|
|
239
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
240
|
+
return {
|
|
241
|
+
"project_id": self.project_id,
|
|
242
|
+
}
|
|
243
|
+
|
|
230
244
|
def execute(self, context: Context):
|
|
231
|
-
ApacheKafkaClusterListLink.persist(context=context
|
|
245
|
+
ApacheKafkaClusterListLink.persist(context=context)
|
|
232
246
|
self.log.info("Listing Clusters from location %s.", self.location)
|
|
233
247
|
try:
|
|
234
248
|
cluster_list_pager = self.hook.list_clusters(
|
|
@@ -242,8 +256,7 @@ class ManagedKafkaListClustersOperator(ManagedKafkaBaseOperator):
|
|
|
242
256
|
timeout=self.timeout,
|
|
243
257
|
metadata=self.metadata,
|
|
244
258
|
)
|
|
245
|
-
|
|
246
|
-
context=context,
|
|
259
|
+
context["ti"].xcom_push(
|
|
247
260
|
key="cluster_page",
|
|
248
261
|
value=types.ListClustersResponse.to_dict(cluster_list_pager._response),
|
|
249
262
|
)
|
|
@@ -285,12 +298,16 @@ class ManagedKafkaGetClusterOperator(ManagedKafkaBaseOperator):
|
|
|
285
298
|
super().__init__(*args, **kwargs)
|
|
286
299
|
self.cluster_id = cluster_id
|
|
287
300
|
|
|
301
|
+
@property
|
|
302
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
303
|
+
return {
|
|
304
|
+
"location": self.location,
|
|
305
|
+
"cluster_id": self.cluster_id,
|
|
306
|
+
"project_id": self.project_id,
|
|
307
|
+
}
|
|
308
|
+
|
|
288
309
|
def execute(self, context: Context):
|
|
289
|
-
ApacheKafkaClusterLink.persist(
|
|
290
|
-
context=context,
|
|
291
|
-
task_instance=self,
|
|
292
|
-
cluster_id=self.cluster_id,
|
|
293
|
-
)
|
|
310
|
+
ApacheKafkaClusterLink.persist(context=context)
|
|
294
311
|
self.log.info("Getting Cluster: %s", self.cluster_id)
|
|
295
312
|
try:
|
|
296
313
|
cluster = self.hook.get_cluster(
|
|
@@ -362,12 +379,16 @@ class ManagedKafkaUpdateClusterOperator(ManagedKafkaBaseOperator):
|
|
|
362
379
|
self.update_mask = update_mask
|
|
363
380
|
self.request_id = request_id
|
|
364
381
|
|
|
382
|
+
@property
|
|
383
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
384
|
+
return {
|
|
385
|
+
"location": self.location,
|
|
386
|
+
"cluster_id": self.cluster_id,
|
|
387
|
+
"project_id": self.project_id,
|
|
388
|
+
}
|
|
389
|
+
|
|
365
390
|
def execute(self, context: Context):
|
|
366
|
-
ApacheKafkaClusterLink.persist(
|
|
367
|
-
context=context,
|
|
368
|
-
task_instance=self,
|
|
369
|
-
cluster_id=self.cluster_id,
|
|
370
|
-
)
|
|
391
|
+
ApacheKafkaClusterLink.persist(context=context)
|
|
371
392
|
self.log.info("Updating an Apache Kafka cluster.")
|
|
372
393
|
try:
|
|
373
394
|
operation = self.hook.update_cluster(
|
|
@@ -497,14 +518,18 @@ class ManagedKafkaCreateTopicOperator(ManagedKafkaBaseOperator):
|
|
|
497
518
|
self.topic_id = topic_id
|
|
498
519
|
self.topic = topic
|
|
499
520
|
|
|
521
|
+
@property
|
|
522
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
523
|
+
return {
|
|
524
|
+
"location": self.location,
|
|
525
|
+
"cluster_id": self.cluster_id,
|
|
526
|
+
"topic_id": self.topic_id,
|
|
527
|
+
"project_id": self.project_id,
|
|
528
|
+
}
|
|
529
|
+
|
|
500
530
|
def execute(self, context: Context):
|
|
501
531
|
self.log.info("Creating an Apache Kafka topic.")
|
|
502
|
-
ApacheKafkaTopicLink.persist(
|
|
503
|
-
context=context,
|
|
504
|
-
task_instance=self,
|
|
505
|
-
cluster_id=self.cluster_id,
|
|
506
|
-
topic_id=self.topic_id,
|
|
507
|
-
)
|
|
532
|
+
ApacheKafkaTopicLink.persist(context=context)
|
|
508
533
|
try:
|
|
509
534
|
topic_obj = self.hook.create_topic(
|
|
510
535
|
project_id=self.project_id,
|
|
@@ -574,8 +599,16 @@ class ManagedKafkaListTopicsOperator(ManagedKafkaBaseOperator):
|
|
|
574
599
|
self.page_size = page_size
|
|
575
600
|
self.page_token = page_token
|
|
576
601
|
|
|
602
|
+
@property
|
|
603
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
604
|
+
return {
|
|
605
|
+
"location": self.location,
|
|
606
|
+
"cluster_id": self.cluster_id,
|
|
607
|
+
"project_id": self.project_id,
|
|
608
|
+
}
|
|
609
|
+
|
|
577
610
|
def execute(self, context: Context):
|
|
578
|
-
ApacheKafkaClusterLink.persist(context=context
|
|
611
|
+
ApacheKafkaClusterLink.persist(context=context)
|
|
579
612
|
self.log.info("Listing Topics for cluster %s.", self.cluster_id)
|
|
580
613
|
try:
|
|
581
614
|
topic_list_pager = self.hook.list_topics(
|
|
@@ -588,8 +621,7 @@ class ManagedKafkaListTopicsOperator(ManagedKafkaBaseOperator):
|
|
|
588
621
|
timeout=self.timeout,
|
|
589
622
|
metadata=self.metadata,
|
|
590
623
|
)
|
|
591
|
-
|
|
592
|
-
context=context,
|
|
624
|
+
context["ti"].xcom_push(
|
|
593
625
|
key="topic_page",
|
|
594
626
|
value=types.ListTopicsResponse.to_dict(topic_list_pager._response),
|
|
595
627
|
)
|
|
@@ -636,13 +668,17 @@ class ManagedKafkaGetTopicOperator(ManagedKafkaBaseOperator):
|
|
|
636
668
|
self.cluster_id = cluster_id
|
|
637
669
|
self.topic_id = topic_id
|
|
638
670
|
|
|
671
|
+
@property
|
|
672
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
673
|
+
return {
|
|
674
|
+
"location": self.location,
|
|
675
|
+
"cluster_id": self.cluster_id,
|
|
676
|
+
"topic_id": self.topic_id,
|
|
677
|
+
"project_id": self.project_id,
|
|
678
|
+
}
|
|
679
|
+
|
|
639
680
|
def execute(self, context: Context):
|
|
640
|
-
ApacheKafkaTopicLink.persist(
|
|
641
|
-
context=context,
|
|
642
|
-
task_instance=self,
|
|
643
|
-
cluster_id=self.cluster_id,
|
|
644
|
-
topic_id=self.topic_id,
|
|
645
|
-
)
|
|
681
|
+
ApacheKafkaTopicLink.persist(context=context)
|
|
646
682
|
self.log.info("Getting Topic: %s", self.topic_id)
|
|
647
683
|
try:
|
|
648
684
|
topic = self.hook.get_topic(
|
|
@@ -707,13 +743,17 @@ class ManagedKafkaUpdateTopicOperator(ManagedKafkaBaseOperator):
|
|
|
707
743
|
self.topic = topic
|
|
708
744
|
self.update_mask = update_mask
|
|
709
745
|
|
|
746
|
+
@property
|
|
747
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
748
|
+
return {
|
|
749
|
+
"location": self.location,
|
|
750
|
+
"cluster_id": self.cluster_id,
|
|
751
|
+
"topic_id": self.topic_id,
|
|
752
|
+
"project_id": self.project_id,
|
|
753
|
+
}
|
|
754
|
+
|
|
710
755
|
def execute(self, context: Context):
|
|
711
|
-
ApacheKafkaTopicLink.persist(
|
|
712
|
-
context=context,
|
|
713
|
-
task_instance=self,
|
|
714
|
-
cluster_id=self.cluster_id,
|
|
715
|
-
topic_id=self.topic_id,
|
|
716
|
-
)
|
|
756
|
+
ApacheKafkaTopicLink.persist(context=context)
|
|
717
757
|
self.log.info("Updating an Apache Kafka topic.")
|
|
718
758
|
try:
|
|
719
759
|
topic_obj = self.hook.update_topic(
|
|
@@ -833,8 +873,16 @@ class ManagedKafkaListConsumerGroupsOperator(ManagedKafkaBaseOperator):
|
|
|
833
873
|
self.page_size = page_size
|
|
834
874
|
self.page_token = page_token
|
|
835
875
|
|
|
876
|
+
@property
|
|
877
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
878
|
+
return {
|
|
879
|
+
"location": self.location,
|
|
880
|
+
"cluster_id": self.cluster_id,
|
|
881
|
+
"project_id": self.project_id,
|
|
882
|
+
}
|
|
883
|
+
|
|
836
884
|
def execute(self, context: Context):
|
|
837
|
-
ApacheKafkaClusterLink.persist(context=context
|
|
885
|
+
ApacheKafkaClusterLink.persist(context=context)
|
|
838
886
|
self.log.info("Listing Consumer Groups for cluster %s.", self.cluster_id)
|
|
839
887
|
try:
|
|
840
888
|
consumer_group_list_pager = self.hook.list_consumer_groups(
|
|
@@ -847,8 +895,7 @@ class ManagedKafkaListConsumerGroupsOperator(ManagedKafkaBaseOperator):
|
|
|
847
895
|
timeout=self.timeout,
|
|
848
896
|
metadata=self.metadata,
|
|
849
897
|
)
|
|
850
|
-
|
|
851
|
-
context=context,
|
|
898
|
+
context["ti"].xcom_push(
|
|
852
899
|
key="consumer_group_page",
|
|
853
900
|
value=types.ListConsumerGroupsResponse.to_dict(consumer_group_list_pager._response),
|
|
854
901
|
)
|
|
@@ -895,13 +942,17 @@ class ManagedKafkaGetConsumerGroupOperator(ManagedKafkaBaseOperator):
|
|
|
895
942
|
self.cluster_id = cluster_id
|
|
896
943
|
self.consumer_group_id = consumer_group_id
|
|
897
944
|
|
|
945
|
+
@property
|
|
946
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
947
|
+
return {
|
|
948
|
+
"location": self.location,
|
|
949
|
+
"cluster_id": self.cluster_id,
|
|
950
|
+
"consumer_group_id": self.consumer_group_id,
|
|
951
|
+
"project_id": self.project_id,
|
|
952
|
+
}
|
|
953
|
+
|
|
898
954
|
def execute(self, context: Context):
|
|
899
|
-
ApacheKafkaConsumerGroupLink.persist(
|
|
900
|
-
context=context,
|
|
901
|
-
task_instance=self,
|
|
902
|
-
cluster_id=self.cluster_id,
|
|
903
|
-
consumer_group_id=self.consumer_group_id,
|
|
904
|
-
)
|
|
955
|
+
ApacheKafkaConsumerGroupLink.persist(context=context)
|
|
905
956
|
self.log.info("Getting Consumer Group: %s", self.consumer_group_id)
|
|
906
957
|
try:
|
|
907
958
|
consumer_group = self.hook.get_consumer_group(
|
|
@@ -971,13 +1022,17 @@ class ManagedKafkaUpdateConsumerGroupOperator(ManagedKafkaBaseOperator):
|
|
|
971
1022
|
self.consumer_group = consumer_group
|
|
972
1023
|
self.update_mask = update_mask
|
|
973
1024
|
|
|
1025
|
+
@property
|
|
1026
|
+
def extra_links_params(self) -> dict[str, Any]:
|
|
1027
|
+
return {
|
|
1028
|
+
"location": self.location,
|
|
1029
|
+
"cluster_id": self.cluster_id,
|
|
1030
|
+
"consumer_group_id": self.consumer_group_id,
|
|
1031
|
+
"project_id": self.project_id,
|
|
1032
|
+
}
|
|
1033
|
+
|
|
974
1034
|
def execute(self, context: Context):
|
|
975
|
-
ApacheKafkaConsumerGroupLink.persist(
|
|
976
|
-
context=context,
|
|
977
|
-
task_instance=self,
|
|
978
|
-
cluster_id=self.cluster_id,
|
|
979
|
-
consumer_group_id=self.consumer_group_id,
|
|
980
|
-
)
|
|
1035
|
+
ApacheKafkaConsumerGroupLink.persist(context=context)
|
|
981
1036
|
self.log.info("Updating an Apache Kafka consumer group.")
|
|
982
1037
|
try:
|
|
983
1038
|
consumer_group_obj = self.hook.update_consumer_group(
|
|
@@ -32,7 +32,7 @@ if TYPE_CHECKING:
|
|
|
32
32
|
from google.api_core.retry import Retry
|
|
33
33
|
from google.cloud.language_v1.types import Document, EncodingType
|
|
34
34
|
|
|
35
|
-
from airflow.
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
MetaData = Sequence[tuple[str, str]]
|