apache-airflow-providers-google 16.1.0rc1__py3-none-any.whl → 17.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +1 -5
  3. airflow/providers/google/cloud/hooks/bigquery.py +1 -130
  4. airflow/providers/google/cloud/hooks/cloud_logging.py +109 -0
  5. airflow/providers/google/cloud/hooks/cloud_run.py +1 -1
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +5 -5
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +1 -1
  8. airflow/providers/google/cloud/hooks/dataflow.py +0 -85
  9. airflow/providers/google/cloud/hooks/datafusion.py +1 -1
  10. airflow/providers/google/cloud/hooks/dataprep.py +1 -4
  11. airflow/providers/google/cloud/hooks/dataproc.py +68 -70
  12. airflow/providers/google/cloud/hooks/gcs.py +3 -5
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +2 -2
  14. airflow/providers/google/cloud/hooks/looker.py +1 -5
  15. airflow/providers/google/cloud/hooks/stackdriver.py +10 -8
  16. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +4 -4
  17. airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py +202 -0
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +7 -0
  19. airflow/providers/google/cloud/links/kubernetes_engine.py +3 -0
  20. airflow/providers/google/cloud/log/gcs_task_handler.py +2 -2
  21. airflow/providers/google/cloud/log/stackdriver_task_handler.py +1 -1
  22. airflow/providers/google/cloud/openlineage/mixins.py +7 -7
  23. airflow/providers/google/cloud/operators/automl.py +1 -1
  24. airflow/providers/google/cloud/operators/bigquery.py +8 -609
  25. airflow/providers/google/cloud/operators/cloud_logging_sink.py +341 -0
  26. airflow/providers/google/cloud/operators/cloud_sql.py +1 -5
  27. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +2 -2
  28. airflow/providers/google/cloud/operators/dataproc.py +1 -1
  29. airflow/providers/google/cloud/operators/dlp.py +2 -2
  30. airflow/providers/google/cloud/operators/kubernetes_engine.py +4 -4
  31. airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py +435 -0
  32. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +7 -1
  33. airflow/providers/google/cloud/operators/vertex_ai/ray.py +7 -5
  34. airflow/providers/google/cloud/operators/vision.py +1 -1
  35. airflow/providers/google/cloud/sensors/dataflow.py +23 -6
  36. airflow/providers/google/cloud/sensors/datafusion.py +2 -2
  37. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
  38. airflow/providers/google/cloud/transfers/gcs_to_local.py +3 -1
  39. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +9 -9
  40. airflow/providers/google/cloud/triggers/bigquery.py +11 -13
  41. airflow/providers/google/cloud/triggers/cloud_build.py +1 -1
  42. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  43. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +1 -1
  44. airflow/providers/google/cloud/triggers/datafusion.py +1 -1
  45. airflow/providers/google/cloud/triggers/dataproc.py +10 -9
  46. airflow/providers/google/cloud/triggers/kubernetes_engine.py +45 -27
  47. airflow/providers/google/cloud/triggers/mlengine.py +1 -1
  48. airflow/providers/google/cloud/triggers/pubsub.py +1 -1
  49. airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
  50. airflow/providers/google/common/auth_backend/google_openid.py +2 -2
  51. airflow/providers/google/common/hooks/base_google.py +2 -6
  52. airflow/providers/google/common/utils/id_token_credentials.py +2 -2
  53. airflow/providers/google/get_provider_info.py +19 -16
  54. airflow/providers/google/leveldb/hooks/leveldb.py +1 -5
  55. airflow/providers/google/marketing_platform/hooks/display_video.py +47 -3
  56. airflow/providers/google/marketing_platform/links/analytics_admin.py +1 -1
  57. airflow/providers/google/marketing_platform/operators/display_video.py +64 -15
  58. airflow/providers/google/marketing_platform/sensors/display_video.py +9 -2
  59. airflow/providers/google/version_compat.py +10 -3
  60. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/METADATA +99 -93
  61. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/RECORD +63 -62
  62. airflow/providers/google/cloud/hooks/life_sciences.py +0 -159
  63. airflow/providers/google/cloud/links/life_sciences.py +0 -30
  64. airflow/providers/google/cloud/operators/life_sciences.py +0 -118
  65. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/WHEEL +0 -0
  66. {apache_airflow_providers_google-16.1.0rc1.dist-info → apache_airflow_providers_google-17.0.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,341 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ from __future__ import annotations
20
+
21
+ from collections.abc import Sequence
22
+ from typing import TYPE_CHECKING, Any
23
+
24
+ import google.cloud.exceptions
25
+ from google.api_core.exceptions import AlreadyExists
26
+ from google.cloud.logging_v2.types import LogSink
27
+
28
+ from airflow.exceptions import AirflowException
29
+ from airflow.providers.google.cloud.hooks.cloud_logging import CloudLoggingHook
30
+ from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
31
+
32
+ if TYPE_CHECKING:
33
+ from google.protobuf.field_mask_pb2 import FieldMask
34
+
35
+ from airflow.utils.context import Context
36
+
37
+
38
+ def _validate_inputs(obj, required_fields: list[str]) -> None:
39
+ """Validate that all required fields are present on self."""
40
+ missing = [field for field in required_fields if not getattr(obj, field, None)]
41
+ if missing:
42
+ raise AirflowException(
43
+ f"Required parameters are missing: {missing}. These must be passed as keyword parameters."
44
+ )
45
+
46
+
47
+ def _get_field(obj, field_name):
48
+ """Supports both dict and protobuf-like objects."""
49
+ if isinstance(obj, dict):
50
+ return obj.get(field_name)
51
+ return getattr(obj, field_name, None)
52
+
53
+
54
+ class CloudLoggingCreateSinkOperator(GoogleCloudBaseOperator):
55
+ """
56
+ Creates a Cloud Logging export sink in a GCP project.
57
+
58
+ This operator creates a sink that exports log entries from Cloud Logging
59
+ to destinations like Cloud Storage, BigQuery, or Pub/Sub.
60
+
61
+ :param project_id: Required. ID of the Google Cloud project where the sink will be created.
62
+ :param sink_config: Required. The full sink configuration as a dictionary or a LogSink object.
63
+ See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
64
+ :param unique_writer_identity: If True, creates a unique service account for the sink.
65
+ If False, uses the default Google-managed service account.
66
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud. Defaults to "google_cloud_default".
67
+ :param impersonation_chain: Optional service account to impersonate using short-term
68
+ credentials, or chained list of accounts required to get the access_token
69
+ of the last account in the list, which will be impersonated in the request.
70
+ If set as a string, the account must grant the originating account
71
+ the Service Account Token Creator IAM role.
72
+ If set as a sequence, the identities from the list must grant
73
+ Service Account Token Creator IAM role to the directly preceding identity, with first
74
+ account from the list granting this role to the originating account (templated).
75
+ """
76
+
77
+ template_fields: Sequence[str] = (
78
+ "project_id",
79
+ "sink_config",
80
+ "gcp_conn_id",
81
+ "impersonation_chain",
82
+ "unique_writer_identity",
83
+ )
84
+
85
+ def __init__(
86
+ self,
87
+ project_id: str,
88
+ sink_config: dict | LogSink,
89
+ unique_writer_identity: bool = False,
90
+ gcp_conn_id: str = "google_cloud_default",
91
+ impersonation_chain: str | Sequence[str] | None = None,
92
+ **kwargs,
93
+ ):
94
+ super().__init__(**kwargs)
95
+ self.project_id = project_id
96
+ self.sink_config = sink_config
97
+ self.unique_writer_identity = unique_writer_identity
98
+ self.gcp_conn_id = gcp_conn_id
99
+ self.impersonation_chain = impersonation_chain
100
+
101
+ def execute(self, context: Context) -> dict[str, Any]:
102
+ """Execute the operator."""
103
+ _validate_inputs(self, required_fields=["project_id", "sink_config"])
104
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
105
+
106
+ try:
107
+ self.log.info(
108
+ "Creating log sink '%s' in project '%s'",
109
+ _get_field(self.sink_config, "name"),
110
+ self.project_id,
111
+ )
112
+ self.log.info("Destination: %s", _get_field(self.sink_config, "destination"))
113
+
114
+ response = hook.create_sink(
115
+ sink=self.sink_config,
116
+ unique_writer_identity=self.unique_writer_identity,
117
+ project_id=self.project_id,
118
+ )
119
+
120
+ self.log.info("Log sink created successfully: %s", response.name)
121
+
122
+ if self.unique_writer_identity and hasattr(response, "writer_identity"):
123
+ self.log.info("Writer identity: %s", response.writer_identity)
124
+ self.log.info("Remember to grant appropriate permissions to the writer identity")
125
+
126
+ return LogSink.to_dict(response)
127
+
128
+ except AlreadyExists:
129
+ self.log.info(
130
+ "Already existed log sink, sink_name=%s, project_id=%s",
131
+ _get_field(self.sink_config, "name"),
132
+ self.project_id,
133
+ )
134
+ existing_sink = hook.get_sink(
135
+ sink_name=_get_field(self.sink_config, "name"), project_id=self.project_id
136
+ )
137
+ return LogSink.to_dict(existing_sink)
138
+
139
+ except google.cloud.exceptions.GoogleCloudError as e:
140
+ self.log.error("An error occurred. Exiting.")
141
+ raise e
142
+
143
+
144
+ class CloudLoggingDeleteSinkOperator(GoogleCloudBaseOperator):
145
+ """
146
+ Deletes a Cloud Logging export sink from a GCP project.
147
+
148
+ :param sink_name: Required. Name of the sink to delete.
149
+ :param project_id: Required. The ID of the Google Cloud project.
150
+ :param gcp_conn_id: Optional. The connection ID to use for connecting to Google Cloud.
151
+ Defaults to "google_cloud_default".
152
+ :param impersonation_chain: Optional service account to impersonate using short-term
153
+ credentials, or chained list of accounts required to get the access_token
154
+ of the last account in the list, which will be impersonated in the request.
155
+ If set as a string, the account must grant the originating account
156
+ the Service Account Token Creator IAM role.
157
+ If set as a sequence, the identities from the list must grant
158
+ Service Account Token Creator IAM role to the directly preceding identity, with first
159
+ account from the list granting this role to the originating account (templated).
160
+ """
161
+
162
+ template_fields: Sequence[str] = ("sink_name", "project_id", "gcp_conn_id", "impersonation_chain")
163
+
164
+ def __init__(
165
+ self,
166
+ sink_name: str,
167
+ project_id: str,
168
+ gcp_conn_id: str = "google_cloud_default",
169
+ impersonation_chain: str | Sequence[str] | None = None,
170
+ **kwargs,
171
+ ):
172
+ super().__init__(**kwargs)
173
+ self.sink_name = sink_name
174
+ self.project_id = project_id
175
+ self.gcp_conn_id = gcp_conn_id
176
+ self.impersonation_chain = impersonation_chain
177
+
178
+ def execute(self, context: Context) -> None:
179
+ """Execute the operator."""
180
+ _validate_inputs(self, ["sink_name", "project_id"])
181
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
182
+
183
+ try:
184
+ self.log.info("Deleting log sink '%s' from project '%s'", self.sink_name, self.project_id)
185
+ hook.delete_sink(sink_name=self.sink_name, project_id=self.project_id)
186
+ self.log.info("Log sink '%s' deleted successfully", self.sink_name)
187
+
188
+ except google.cloud.exceptions.NotFound as e:
189
+ self.log.error("An error occurred. Not Found.")
190
+ raise e
191
+ except google.cloud.exceptions.GoogleCloudError as e:
192
+ self.log.error("An error occurred. Exiting.")
193
+ raise e
194
+
195
+
196
+ class CloudLoggingUpdateSinkOperator(GoogleCloudBaseOperator):
197
+ """
198
+ Updates an existing Cloud Logging export sink.
199
+
200
+ :param project_id: Required. The ID of the Google Cloud project that contains the sink.
201
+ :param sink_name: Required. The name of the sink to update.
202
+ :param sink_config: Required. The updated sink configuration. Can be a dictionary or a
203
+ `google.cloud.logging_v2.types.LogSink` object. Refer to:
204
+ https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.sinks
205
+ :param update_mask: Required. A FieldMask or dictionary specifying which fields of the sink
206
+ should be updated. For example, to update the destination and filter, use:
207
+ `{"paths": ["destination", "filter"]}`.
208
+ :param unique_writer_identity: Optional. When set to True, a new unique service account
209
+ will be created for the sink. Defaults to False.
210
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
211
+ Defaults to "google_cloud_default".
212
+ :param impersonation_chain: Optional service account to impersonate using short-term
213
+ credentials, or chained list of accounts required to get the access_token
214
+ of the last account in the list, which will be impersonated in the request.
215
+ If set as a string, the account must grant the originating account
216
+ the Service Account Token Creator IAM role.
217
+ If set as a sequence, the identities from the list must grant
218
+ Service Account Token Creator IAM role to the directly preceding identity, with first
219
+ account from the list granting this role to the originating account (templated).
220
+ """
221
+
222
+ template_fields: Sequence[str] = (
223
+ "sink_name",
224
+ "project_id",
225
+ "update_mask",
226
+ "sink_config",
227
+ "unique_writer_identity",
228
+ "gcp_conn_id",
229
+ "impersonation_chain",
230
+ )
231
+
232
+ def __init__(
233
+ self,
234
+ project_id: str,
235
+ sink_name: str,
236
+ sink_config: dict | LogSink,
237
+ update_mask: FieldMask | dict,
238
+ unique_writer_identity: bool = False,
239
+ gcp_conn_id: str = "google_cloud_default",
240
+ impersonation_chain: str | Sequence[str] | None = None,
241
+ **kwargs,
242
+ ):
243
+ super().__init__(**kwargs)
244
+ self.project_id = project_id
245
+ self.sink_name = sink_name
246
+ self.sink_config = sink_config
247
+ self.update_mask = update_mask
248
+ self.unique_writer_identity = unique_writer_identity
249
+ self.gcp_conn_id = gcp_conn_id
250
+ self.impersonation_chain = impersonation_chain
251
+
252
+ def execute(self, context: Context) -> dict[str, Any]:
253
+ """Execute the operator."""
254
+ _validate_inputs(self, ["sink_name", "project_id", "sink_config", "update_mask"])
255
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
256
+
257
+ try:
258
+ current_sink = hook.get_sink(sink_name=self.sink_name, project_id=self.project_id)
259
+ self.log.info("Current log sink configuration: '%s'.", LogSink.to_dict(current_sink))
260
+
261
+ self.log.info("Updating log sink '%s' in project '%s'", self.sink_name, self.project_id)
262
+ if isinstance(self.update_mask, dict) and "paths" in self.update_mask:
263
+ paths = self.update_mask["paths"]
264
+ elif hasattr(self.update_mask, "paths"):
265
+ paths = self.update_mask.paths
266
+
267
+ self.log.info("Updating fields: %s", ", ".join(paths))
268
+
269
+ response = hook.update_sink(
270
+ sink_name=self.sink_name,
271
+ sink=self.sink_config,
272
+ unique_writer_identity=self.unique_writer_identity,
273
+ project_id=self.project_id,
274
+ update_mask=self.update_mask,
275
+ )
276
+ self.log.info("Log sink updated successfully: %s", response.name)
277
+ return LogSink.to_dict(response)
278
+
279
+ except google.cloud.exceptions.NotFound as e:
280
+ self.log.error("An error occurred. Not Found.")
281
+ raise e
282
+ except google.cloud.exceptions.GoogleCloudError as e:
283
+ self.log.error("An error occurred. Exiting.")
284
+ raise e
285
+
286
+
287
+ class CloudLoggingListSinksOperator(GoogleCloudBaseOperator):
288
+ """
289
+ Lists Cloud Logging export sinks in a Google Cloud project.
290
+
291
+ :param project_id: Required. The ID of the Google Cloud project to list sinks from.
292
+ :param page_size: Optional. The maximum number of sinks to return per page. Must be greater than 0.
293
+ If None, the server will use a default value.
294
+ :param gcp_conn_id: Optional. The connection ID used to connect to Google Cloud.
295
+ Defaults to "google_cloud_default".
296
+ :param impersonation_chain: Optional. Service account or chained list of accounts to impersonate.
297
+ If a string, the service account must grant the originating account the
298
+ 'Service Account Token Creator' IAM role.
299
+
300
+ If a sequence, each account in the chain must grant this role to the next.
301
+ The first account must grant it to the originating account (templated).
302
+ """
303
+
304
+ template_fields: Sequence[str] = ("project_id", "gcp_conn_id", "impersonation_chain", "page_size")
305
+
306
+ def __init__(
307
+ self,
308
+ project_id: str,
309
+ page_size: int | None = None,
310
+ gcp_conn_id: str = "google_cloud_default",
311
+ impersonation_chain: str | Sequence[str] | None = None,
312
+ **kwargs,
313
+ ):
314
+ super().__init__(**kwargs)
315
+ self.project_id = project_id
316
+ self.page_size = page_size
317
+ self.gcp_conn_id = gcp_conn_id
318
+ self.impersonation_chain = impersonation_chain
319
+
320
+ def execute(self, context: Context) -> list[dict[str, Any]]:
321
+ """Execute the operator."""
322
+ _validate_inputs(self, ["project_id"])
323
+
324
+ if self.page_size is not None and self.page_size < 1:
325
+ raise AirflowException("The page_size for the list sinks request must be greater than zero")
326
+
327
+ hook = CloudLoggingHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
328
+
329
+ try:
330
+ self.log.info("Listing log sinks in project '%s'", self.project_id)
331
+
332
+ sinks = hook.list_sinks(project_id=self.project_id, page_size=self.page_size)
333
+
334
+ result = [LogSink.to_dict(sink) for sink in sinks]
335
+ self.log.info("Found %d log sinks", len(result))
336
+
337
+ return result
338
+
339
+ except google.cloud.exceptions.GoogleCloudError as e:
340
+ self.log.error("An error occurred. Exiting.")
341
+ raise e
@@ -35,11 +35,7 @@ from airflow.providers.google.cloud.triggers.cloud_sql import CloudSQLExportTrig
35
35
  from airflow.providers.google.cloud.utils.field_validator import GcpBodyFieldValidator
36
36
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, get_field
37
37
  from airflow.providers.google.common.links.storage import FileDetailsLink
38
-
39
- try:
40
- from airflow.sdk import BaseHook
41
- except ImportError:
42
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
38
+ from airflow.providers.google.version_compat import BaseHook
43
39
 
44
40
  if TYPE_CHECKING:
45
41
  from airflow.models import Connection
@@ -96,8 +96,8 @@ class TransferJobPreprocessor:
96
96
 
97
97
  aws_hook = AwsBaseHook(self.aws_conn_id, resource_type="s3")
98
98
  aws_credentials = aws_hook.get_credentials()
99
- aws_access_key_id = aws_credentials.access_key # type: ignore[attr-defined]
100
- aws_secret_access_key = aws_credentials.secret_key # type: ignore[attr-defined]
99
+ aws_access_key_id = aws_credentials.access_key
100
+ aws_secret_access_key = aws_credentials.secret_key
101
101
  self.body[TRANSFER_SPEC][AWS_S3_DATA_SOURCE][AWS_ACCESS_KEY] = {
102
102
  ACCESS_KEY_ID: aws_access_key_id,
103
103
  SECRET_ACCESS_KEY: aws_secret_access_key,
@@ -907,7 +907,7 @@ class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
907
907
  cluster_state = event["cluster_state"]
908
908
  cluster_name = event["cluster_name"]
909
909
 
910
- if cluster_state == ClusterStatus.State.ERROR:
910
+ if cluster_state == ClusterStatus.State(ClusterStatus.State.DELETING).name:
911
911
  raise AirflowException(f"Cluster is in ERROR state:\n{cluster_name}")
912
912
 
913
913
  self.log.info("%s completed successfully.", self.task_id)
@@ -1834,7 +1834,7 @@ class CloudDLPListDeidentifyTemplatesOperator(GoogleCloudBaseOperator):
1834
1834
  project_id=project_id,
1835
1835
  )
1836
1836
 
1837
- return [DeidentifyTemplate.to_dict(template) for template in templates] # type: ignore[arg-type]
1837
+ return [DeidentifyTemplate.to_dict(template) for template in templates]
1838
1838
 
1839
1839
 
1840
1840
  class CloudDLPListDLPJobsOperator(GoogleCloudBaseOperator):
@@ -1930,7 +1930,7 @@ class CloudDLPListDLPJobsOperator(GoogleCloudBaseOperator):
1930
1930
  )
1931
1931
 
1932
1932
  # the DlpJob.to_dict does not have the right type defined as possible to pass in constructor
1933
- return [DlpJob.to_dict(job) for job in jobs] # type: ignore[arg-type]
1933
+ return [DlpJob.to_dict(job) for job in jobs]
1934
1934
 
1935
1935
 
1936
1936
  class CloudDLPListInfoTypesOperator(GoogleCloudBaseOperator):
@@ -787,10 +787,10 @@ class GKEStartJobOperator(GKEOperatorMixin, KubernetesJobOperator):
787
787
  trigger=GKEJobTrigger(
788
788
  cluster_url=self.cluster_url,
789
789
  ssl_ca_cert=self.ssl_ca_cert,
790
- job_name=self.job.metadata.name, # type: ignore[union-attr]
791
- job_namespace=self.job.metadata.namespace, # type: ignore[union-attr]
792
- pod_name=self.pod.metadata.name, # type: ignore[union-attr]
793
- pod_namespace=self.pod.metadata.namespace, # type: ignore[union-attr]
790
+ job_name=self.job.metadata.name,
791
+ job_namespace=self.job.metadata.namespace,
792
+ pod_names=[pod.metadata.name for pod in self.pods],
793
+ pod_namespace=self.pods[0].metadata.namespace,
794
794
  base_container_name=self.base_container_name,
795
795
  gcp_conn_id=self.gcp_conn_id,
796
796
  poll_interval=self.job_poll_interval,