apache-airflow-providers-google 10.12.0rc1__py3-none-any.whl → 10.13.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. airflow/providers/google/__init__.py +3 -3
  2. airflow/providers/google/cloud/fs/gcs.py +16 -13
  3. airflow/providers/google/cloud/hooks/bigquery_dts.py +2 -1
  4. airflow/providers/google/cloud/hooks/cloud_build.py +2 -1
  5. airflow/providers/google/cloud/hooks/cloud_composer.py +4 -3
  6. airflow/providers/google/cloud/hooks/compute_ssh.py +18 -6
  7. airflow/providers/google/cloud/hooks/dataflow.py +1 -1
  8. airflow/providers/google/cloud/hooks/dataplex.py +2 -1
  9. airflow/providers/google/cloud/hooks/dataproc.py +19 -18
  10. airflow/providers/google/cloud/hooks/gcs.py +2 -0
  11. airflow/providers/google/cloud/operators/cloud_composer.py +1 -1
  12. airflow/providers/google/cloud/operators/cloud_run.py +3 -3
  13. airflow/providers/google/cloud/operators/dataplex.py +530 -1
  14. airflow/providers/google/cloud/operators/dataproc.py +10 -8
  15. airflow/providers/google/cloud/operators/gcs.py +85 -10
  16. airflow/providers/google/cloud/secrets/secret_manager.py +22 -1
  17. airflow/providers/google/cloud/sensors/cloud_composer.py +14 -1
  18. airflow/providers/google/cloud/sensors/dataplex.py +118 -0
  19. airflow/providers/google/cloud/triggers/cloud_run.py +7 -7
  20. airflow/providers/google/cloud/triggers/dataplex.py +82 -0
  21. airflow/providers/google/cloud/triggers/dataproc.py +2 -5
  22. airflow/providers/google/common/hooks/base_google.py +6 -4
  23. airflow/providers/google/get_provider_info.py +11 -10
  24. {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0rc2.dist-info}/METADATA +24 -24
  25. {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0rc2.dist-info}/RECORD +27 -27
  26. {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0rc2.dist-info}/WHEEL +0 -0
  27. {apache_airflow_providers_google-10.12.0rc1.dist-info → apache_airflow_providers_google-10.13.0rc2.dist-info}/entry_points.txt +0 -0
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "10.12.0"
30
+ __version__ = "10.13.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -35,8 +35,8 @@ except ImportError:
35
35
  from airflow.version import version as airflow_version
36
36
 
37
37
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
38
- "2.5.0"
38
+ "2.6.0"
39
39
  ):
40
40
  raise RuntimeError(
41
- f"The package `apache-airflow-providers-google:{__version__}` needs Apache Airflow 2.5.0+"
41
+ f"The package `apache-airflow-providers-google:{__version__}` needs Apache Airflow 2.6.0+"
42
42
  )
@@ -39,7 +39,7 @@ GCS_VERSION_AWARE = "gcs.version-aware"
39
39
  schemes = ["gs", "gcs"]
40
40
 
41
41
 
42
- def get_fs(conn_id: str | None) -> AbstractFileSystem:
42
+ def get_fs(conn_id: str | None, storage_options: dict[str, str] | None = None) -> AbstractFileSystem:
43
43
  # https://gcsfs.readthedocs.io/en/latest/api.html#gcsfs.core.GCSFileSystem
44
44
  from gcsfs import GCSFileSystem
45
45
 
@@ -49,15 +49,18 @@ def get_fs(conn_id: str | None) -> AbstractFileSystem:
49
49
  g = GoogleBaseHook(gcp_conn_id=conn_id)
50
50
  creds = g.get_credentials()
51
51
 
52
- return GCSFileSystem(
53
- project=g.project_id,
54
- access=g.extras.get(GCS_ACCESS, "full_control"),
55
- token=creds.token,
56
- consistency=g.extras.get(GCS_CONSISTENCY, "none"),
57
- cache_timeout=g.extras.get(GCS_CACHE_TIMEOUT),
58
- requester_pays=g.extras.get(GCS_REQUESTER_PAYS, False),
59
- session_kwargs=g.extras.get(GCS_SESSION_KWARGS, {}),
60
- endpoint_url=g.extras.get(GCS_ENDPOINT),
61
- default_location=g.extras.get(GCS_DEFAULT_LOCATION),
62
- version_aware=g.extras.get(GCS_VERSION_AWARE, "false").lower() == "true",
63
- )
52
+ options = {
53
+ "project": g.project_id,
54
+ "access": g.extras.get(GCS_ACCESS, "full_control"),
55
+ "token": creds.token,
56
+ "consistency": g.extras.get(GCS_CONSISTENCY, "none"),
57
+ "cache_timeout": g.extras.get(GCS_CACHE_TIMEOUT),
58
+ "requester_pays": g.extras.get(GCS_REQUESTER_PAYS, False),
59
+ "session_kwargs": g.extras.get(GCS_SESSION_KWARGS, {}),
60
+ "endpoint_url": g.extras.get(GCS_ENDPOINT),
61
+ "default_location": g.extras.get(GCS_DEFAULT_LOCATION),
62
+ "version_aware": g.extras.get(GCS_VERSION_AWARE, "false").lower() == "true",
63
+ }
64
+ options.update(storage_options or {})
65
+
66
+ return GCSFileSystem(**options)
@@ -38,6 +38,7 @@ from airflow.providers.google.common.hooks.base_google import (
38
38
 
39
39
  if TYPE_CHECKING:
40
40
  from google.api_core.retry import Retry
41
+ from google.api_core.retry_async import AsyncRetry
41
42
  from googleapiclient.discovery import Resource
42
43
 
43
44
 
@@ -321,7 +322,7 @@ class AsyncBiqQueryDataTransferServiceHook(GoogleBaseAsyncHook):
321
322
  run_id: str,
322
323
  project_id: str | None,
323
324
  location: str | None = None,
324
- retry: Retry | _MethodDefault = DEFAULT,
325
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
325
326
  timeout: float | None = None,
326
327
  metadata: Sequence[tuple[str, str]] = (),
327
328
  ):
@@ -33,6 +33,7 @@ from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
33
33
  if TYPE_CHECKING:
34
34
  from google.api_core.operation import Operation
35
35
  from google.api_core.retry import Retry
36
+ from google.api_core.retry_async import AsyncRetry
36
37
  from google.cloud.devtools.cloudbuild_v1.types import Build, BuildTrigger, RepoSource
37
38
 
38
39
  # Time to sleep between active checks of the operation results
@@ -645,7 +646,7 @@ class CloudBuildAsyncHook(GoogleBaseHook):
645
646
  self,
646
647
  id_: str,
647
648
  project_id: str = PROVIDE_PROJECT_ID,
648
- retry: Retry | _MethodDefault = DEFAULT,
649
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
649
650
  timeout: float | None = None,
650
651
  metadata: Sequence[tuple[str, str]] = (),
651
652
  location: str = "global",
@@ -35,6 +35,7 @@ if TYPE_CHECKING:
35
35
  from google.api_core.operation import Operation
36
36
  from google.api_core.operation_async import AsyncOperation
37
37
  from google.api_core.retry import Retry
38
+ from google.api_core.retry_async import AsyncRetry
38
39
  from google.cloud.orchestration.airflow.service_v1.services.environments.pagers import (
39
40
  ListEnvironmentsPager,
40
41
  )
@@ -332,7 +333,7 @@ class CloudComposerAsyncHook(GoogleBaseHook):
332
333
  project_id: str,
333
334
  region: str,
334
335
  environment: Environment | dict,
335
- retry: Retry | _MethodDefault = DEFAULT,
336
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
336
337
  timeout: float | None = None,
337
338
  metadata: Sequence[tuple[str, str]] = (),
338
339
  ) -> AsyncOperation:
@@ -361,7 +362,7 @@ class CloudComposerAsyncHook(GoogleBaseHook):
361
362
  project_id: str,
362
363
  region: str,
363
364
  environment_id: str,
364
- retry: Retry | _MethodDefault = DEFAULT,
365
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
365
366
  timeout: float | None = None,
366
367
  metadata: Sequence[tuple[str, str]] = (),
367
368
  ) -> AsyncOperation:
@@ -389,7 +390,7 @@ class CloudComposerAsyncHook(GoogleBaseHook):
389
390
  environment_id: str,
390
391
  environment: Environment | dict,
391
392
  update_mask: dict | FieldMask,
392
- retry: Retry | _MethodDefault = DEFAULT,
393
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
393
394
  timeout: float | None = None,
394
395
  metadata: Sequence[tuple[str, str]] = (),
395
396
  ) -> AsyncOperation:
@@ -86,6 +86,9 @@ class ComputeEngineSSHHook(SSHHook):
86
86
  :param gcp_conn_id: The connection id to use when fetching connection information
87
87
  :param max_retries: Maximum number of retries the process will try to establish connection to instance.
88
88
  Could be decreased/increased by user based on the amount of parallel SSH connections to the instance.
89
+ :param impersonation_chain: Optional. The service account email to impersonate using short-term
90
+ credentials. The provided service account must grant the originating account
91
+ the Service Account Token Creator IAM role and have the sufficient rights to perform the request
89
92
  """
90
93
 
91
94
  conn_name_attr = "gcp_conn_id"
@@ -93,8 +96,8 @@ class ComputeEngineSSHHook(SSHHook):
93
96
  conn_type = "gcpssh"
94
97
  hook_name = "Google Cloud SSH"
95
98
 
96
- @staticmethod
97
- def get_ui_field_behaviour() -> dict[str, Any]:
99
+ @classmethod
100
+ def get_ui_field_behaviour(cls) -> dict[str, Any]:
98
101
  return {
99
102
  "hidden_fields": ["host", "schema", "login", "password", "port", "extra"],
100
103
  "relabeling": {},
@@ -114,15 +117,17 @@ class ComputeEngineSSHHook(SSHHook):
114
117
  expire_time: int = 300,
115
118
  cmd_timeout: int | ArgNotSet = NOTSET,
116
119
  max_retries: int = 10,
120
+ impersonation_chain: str | None = None,
117
121
  **kwargs,
118
122
  ) -> None:
119
123
  if kwargs.get("delegate_to") is not None:
120
124
  raise RuntimeError(
121
125
  "The `delegate_to` parameter has been deprecated before and finally removed in this version"
122
- " of Google Provider. You MUST convert it to `impersonate_chain`"
126
+ " of Google Provider. You MUST convert it to `impersonation_chain`"
123
127
  )
124
128
  # Ignore original constructor
125
129
  # super().__init__()
130
+ self.gcp_conn_id = gcp_conn_id
126
131
  self.instance_name = instance_name
127
132
  self.zone = zone
128
133
  self.user = user
@@ -132,9 +137,9 @@ class ComputeEngineSSHHook(SSHHook):
132
137
  self.use_iap_tunnel = use_iap_tunnel
133
138
  self.use_oslogin = use_oslogin
134
139
  self.expire_time = expire_time
135
- self.gcp_conn_id = gcp_conn_id
136
140
  self.cmd_timeout = cmd_timeout
137
141
  self.max_retries = max_retries
142
+ self.impersonation_chain = impersonation_chain
138
143
  self._conn: Any | None = None
139
144
 
140
145
  @cached_property
@@ -143,7 +148,12 @@ class ComputeEngineSSHHook(SSHHook):
143
148
 
144
149
  @cached_property
145
150
  def _compute_hook(self) -> ComputeEngineHook:
146
- return ComputeEngineHook(gcp_conn_id=self.gcp_conn_id)
151
+ if self.impersonation_chain:
152
+ return ComputeEngineHook(
153
+ gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain
154
+ )
155
+ else:
156
+ return ComputeEngineHook(gcp_conn_id=self.gcp_conn_id)
147
157
 
148
158
  def _load_connection_config(self):
149
159
  def _boolify(value):
@@ -254,6 +264,8 @@ class ComputeEngineSSHHook(SSHHook):
254
264
  f"--zone={self.zone}",
255
265
  "--verbosity=warning",
256
266
  ]
267
+ if self.impersonation_chain:
268
+ proxy_command_args.append(f"--impersonate-service-account={self.impersonation_chain}")
257
269
  proxy_command = " ".join(shlex.quote(arg) for arg in proxy_command_args)
258
270
  sshclient = self._connect_to_instance(user, hostname, privkey, proxy_command)
259
271
  break
@@ -283,7 +295,7 @@ class ComputeEngineSSHHook(SSHHook):
283
295
  client = _GCloudAuthorizedSSHClient(self._compute_hook)
284
296
  # Default is RejectPolicy
285
297
  # No known host checking since we are not storing privatekey
286
- client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
298
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # nosec B507
287
299
  client.connect(
288
300
  hostname=hostname,
289
301
  username=user,
@@ -55,7 +55,7 @@ T = TypeVar("T", bound=Callable)
55
55
 
56
56
 
57
57
  def process_line_and_extract_dataflow_job_id_callback(
58
- on_new_job_id_callback: Callable[[str], None] | None
58
+ on_new_job_id_callback: Callable[[str], None] | None,
59
59
  ) -> Callable[[str], None]:
60
60
  """Build callback that triggers the specified function.
61
61
 
@@ -40,6 +40,7 @@ from airflow.providers.google.common.hooks.base_google import GoogleBaseAsyncHoo
40
40
  if TYPE_CHECKING:
41
41
  from google.api_core.operation import Operation
42
42
  from google.api_core.retry import Retry
43
+ from google.api_core.retry_async import AsyncRetry
43
44
  from googleapiclient.discovery import Resource
44
45
 
45
46
  PATH_DATA_SCAN = "projects/{project_id}/locations/{region}/dataScans/{data_scan_id}"
@@ -896,7 +897,7 @@ class DataplexAsyncHook(GoogleBaseAsyncHook):
896
897
  region: str,
897
898
  data_scan_id: str | None = None,
898
899
  job_id: str | None = None,
899
- retry: Retry | _MethodDefault = DEFAULT,
900
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
900
901
  timeout: float | None = None,
901
902
  metadata: Sequence[tuple[str, str]] = (),
902
903
  ) -> Any:
@@ -51,6 +51,7 @@ if TYPE_CHECKING:
51
51
  from google.api_core.operation_async import AsyncOperation
52
52
  from google.api_core.operations_v1.operations_client import OperationsClient
53
53
  from google.api_core.retry import Retry
54
+ from google.api_core.retry_async import AsyncRetry
54
55
  from google.protobuf.duration_pb2 import Duration
55
56
  from google.protobuf.field_mask_pb2 import FieldMask
56
57
 
@@ -256,7 +257,7 @@ class DataprocHook(GoogleBaseHook):
256
257
  self,
257
258
  operation: Operation,
258
259
  timeout: float | None = None,
259
- result_retry: Retry | _MethodDefault = DEFAULT,
260
+ result_retry: AsyncRetry | _MethodDefault = DEFAULT,
260
261
  ) -> Any:
261
262
  """Wait for a long-lasting operation to complete."""
262
263
  try:
@@ -997,7 +998,7 @@ class DataprocHook(GoogleBaseHook):
997
998
  region: str,
998
999
  project_id: str,
999
1000
  wait_check_interval: int = 10,
1000
- retry: Retry | _MethodDefault = DEFAULT,
1001
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1001
1002
  timeout: float | None = None,
1002
1003
  metadata: Sequence[tuple[str, str]] = (),
1003
1004
  ) -> Batch:
@@ -1132,7 +1133,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1132
1133
  virtual_cluster_config: dict | None = None,
1133
1134
  labels: dict[str, str] | None = None,
1134
1135
  request_id: str | None = None,
1135
- retry: Retry | _MethodDefault = DEFAULT,
1136
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1136
1137
  timeout: float | None = None,
1137
1138
  metadata: Sequence[tuple[str, str]] = (),
1138
1139
  ) -> AsyncOperation:
@@ -1199,7 +1200,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1199
1200
  project_id: str,
1200
1201
  cluster_uuid: str | None = None,
1201
1202
  request_id: str | None = None,
1202
- retry: Retry | _MethodDefault = DEFAULT,
1203
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1203
1204
  timeout: float | None = None,
1204
1205
  metadata: Sequence[tuple[str, str]] = (),
1205
1206
  ) -> AsyncOperation:
@@ -1242,7 +1243,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1242
1243
  region: str,
1243
1244
  cluster_name: str,
1244
1245
  project_id: str,
1245
- retry: Retry | _MethodDefault = DEFAULT,
1246
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1246
1247
  timeout: float | None = None,
1247
1248
  metadata: Sequence[tuple[str, str]] = (),
1248
1249
  ) -> str:
@@ -1277,7 +1278,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1277
1278
  region: str,
1278
1279
  cluster_name: str,
1279
1280
  project_id: str,
1280
- retry: Retry | _MethodDefault = DEFAULT,
1281
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1281
1282
  timeout: float | None = None,
1282
1283
  metadata: Sequence[tuple[str, str]] = (),
1283
1284
  ) -> Cluster:
@@ -1309,7 +1310,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1309
1310
  filter_: str,
1310
1311
  project_id: str,
1311
1312
  page_size: int | None = None,
1312
- retry: Retry | _MethodDefault = DEFAULT,
1313
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1313
1314
  timeout: float | None = None,
1314
1315
  metadata: Sequence[tuple[str, str]] = (),
1315
1316
  ):
@@ -1349,7 +1350,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1349
1350
  region: str,
1350
1351
  graceful_decommission_timeout: dict | Duration | None = None,
1351
1352
  request_id: str | None = None,
1352
- retry: Retry | _MethodDefault = DEFAULT,
1353
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1353
1354
  timeout: float | None = None,
1354
1355
  metadata: Sequence[tuple[str, str]] = (),
1355
1356
  ) -> AsyncOperation:
@@ -1429,7 +1430,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1429
1430
  template: dict | WorkflowTemplate,
1430
1431
  project_id: str,
1431
1432
  region: str,
1432
- retry: Retry | _MethodDefault = DEFAULT,
1433
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1433
1434
  timeout: float | None = None,
1434
1435
  metadata: Sequence[tuple[str, str]] = (),
1435
1436
  ) -> WorkflowTemplate:
@@ -1465,7 +1466,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1465
1466
  version: int | None = None,
1466
1467
  request_id: str | None = None,
1467
1468
  parameters: dict[str, str] | None = None,
1468
- retry: Retry | _MethodDefault = DEFAULT,
1469
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1469
1470
  timeout: float | None = None,
1470
1471
  metadata: Sequence[tuple[str, str]] = (),
1471
1472
  ) -> AsyncOperation:
@@ -1511,7 +1512,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1511
1512
  project_id: str,
1512
1513
  region: str,
1513
1514
  request_id: str | None = None,
1514
- retry: Retry | _MethodDefault = DEFAULT,
1515
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1515
1516
  timeout: float | None = None,
1516
1517
  metadata: Sequence[tuple[str, str]] = (),
1517
1518
  ) -> AsyncOperation:
@@ -1554,7 +1555,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1554
1555
  job_id: str,
1555
1556
  project_id: str,
1556
1557
  region: str,
1557
- retry: Retry | _MethodDefault = DEFAULT,
1558
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1558
1559
  timeout: float | None = None,
1559
1560
  metadata: Sequence[tuple[str, str]] = (),
1560
1561
  ) -> Job:
@@ -1588,7 +1589,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1588
1589
  project_id: str,
1589
1590
  region: str,
1590
1591
  request_id: str | None = None,
1591
- retry: Retry | _MethodDefault = DEFAULT,
1592
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1592
1593
  timeout: float | None = None,
1593
1594
  metadata: Sequence[tuple[str, str]] = (),
1594
1595
  ) -> Job:
@@ -1624,7 +1625,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1624
1625
  job_id: str,
1625
1626
  project_id: str,
1626
1627
  region: str | None = None,
1627
- retry: Retry | _MethodDefault = DEFAULT,
1628
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1628
1629
  timeout: float | None = None,
1629
1630
  metadata: Sequence[tuple[str, str]] = (),
1630
1631
  ) -> Job:
@@ -1658,7 +1659,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1658
1659
  batch: dict | Batch,
1659
1660
  batch_id: str | None = None,
1660
1661
  request_id: str | None = None,
1661
- retry: Retry | _MethodDefault = DEFAULT,
1662
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1662
1663
  timeout: float | None = None,
1663
1664
  metadata: Sequence[tuple[str, str]] = (),
1664
1665
  ) -> AsyncOperation:
@@ -1703,7 +1704,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1703
1704
  batch_id: str,
1704
1705
  region: str,
1705
1706
  project_id: str,
1706
- retry: Retry | _MethodDefault = DEFAULT,
1707
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1707
1708
  timeout: float | None = None,
1708
1709
  metadata: Sequence[tuple[str, str]] = (),
1709
1710
  ) -> None:
@@ -1737,7 +1738,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1737
1738
  batch_id: str,
1738
1739
  region: str,
1739
1740
  project_id: str,
1740
- retry: Retry | _MethodDefault = DEFAULT,
1741
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1741
1742
  timeout: float | None = None,
1742
1743
  metadata: Sequence[tuple[str, str]] = (),
1743
1744
  ) -> Batch:
@@ -1773,7 +1774,7 @@ class DataprocAsyncHook(GoogleBaseHook):
1773
1774
  project_id: str,
1774
1775
  page_size: int | None = None,
1775
1776
  page_token: str | None = None,
1776
- retry: Retry | _MethodDefault = DEFAULT,
1777
+ retry: AsyncRetry | _MethodDefault = DEFAULT,
1777
1778
  timeout: float | None = None,
1778
1779
  metadata: Sequence[tuple[str, str]] = (),
1779
1780
  filter: str | None = None,
@@ -821,6 +821,7 @@ class GCSHook(GoogleBaseHook):
821
821
  delimiter=delimiter,
822
822
  versions=versions,
823
823
  )
824
+ list(blobs)
824
825
 
825
826
  if blobs.prefixes:
826
827
  ids.extend(blobs.prefixes)
@@ -932,6 +933,7 @@ class GCSHook(GoogleBaseHook):
932
933
  delimiter=delimiter,
933
934
  versions=versions,
934
935
  )
936
+ list(blobs)
935
937
 
936
938
  if blobs.prefixes:
937
939
  ids.extend(blobs.prefixes)
@@ -112,7 +112,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
112
112
  :param metadata: Strings which should be sent along with the request as metadata.
113
113
  :param deferrable: Run operator in the deferrable mode
114
114
  :param pooling_period_seconds: Optional: Control the rate of the poll for the result of deferrable run.
115
- By default the trigger will poll every 30 seconds.
115
+ By default, the trigger will poll every 30 seconds.
116
116
  """
117
117
 
118
118
  template_fields = (
@@ -264,7 +264,7 @@ class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
264
264
  :param deferrable: Run operator in the deferrable mode
265
265
  """
266
266
 
267
- template_fields = ("project_id", "region", "gcp_conn_id", "impersonation_chain", "job_name")
267
+ template_fields = ("project_id", "region", "gcp_conn_id", "impersonation_chain", "job_name", "overrides")
268
268
 
269
269
  def __init__(
270
270
  self,
@@ -321,10 +321,10 @@ class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
321
321
  def execute_complete(self, context: Context, event: dict):
322
322
  status = event["status"]
323
323
 
324
- if status == RunJobStatus.TIMEOUT:
324
+ if status == RunJobStatus.TIMEOUT.value:
325
325
  raise AirflowException("Operation timed out")
326
326
 
327
- if status == RunJobStatus.FAIL:
327
+ if status == RunJobStatus.FAIL.value:
328
328
  error_code = event["operation_error_code"]
329
329
  error_message = event["operation_error_message"]
330
330
  raise AirflowException(