apache-airflow-providers-google 18.0.0rc1__py3-none-any.whl → 18.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-google might be problematic. Click here for more details.

Files changed (72) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +5 -5
  3. airflow/providers/google/assets/gcs.py +1 -11
  4. airflow/providers/google/cloud/bundles/__init__.py +16 -0
  5. airflow/providers/google/cloud/bundles/gcs.py +161 -0
  6. airflow/providers/google/cloud/hooks/bigquery.py +45 -42
  7. airflow/providers/google/cloud/hooks/cloud_composer.py +131 -1
  8. airflow/providers/google/cloud/hooks/cloud_sql.py +88 -13
  9. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +16 -0
  10. airflow/providers/google/cloud/hooks/dataflow.py +1 -1
  11. airflow/providers/google/cloud/hooks/dataprep.py +1 -1
  12. airflow/providers/google/cloud/hooks/dataproc.py +3 -0
  13. airflow/providers/google/cloud/hooks/gcs.py +107 -3
  14. airflow/providers/google/cloud/hooks/gen_ai.py +196 -0
  15. airflow/providers/google/cloud/hooks/looker.py +1 -1
  16. airflow/providers/google/cloud/hooks/spanner.py +45 -0
  17. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +30 -0
  18. airflow/providers/google/cloud/links/base.py +11 -11
  19. airflow/providers/google/cloud/links/dataproc.py +2 -10
  20. airflow/providers/google/cloud/openlineage/CloudStorageTransferJobFacet.json +68 -0
  21. airflow/providers/google/cloud/openlineage/CloudStorageTransferRunFacet.json +60 -0
  22. airflow/providers/google/cloud/openlineage/DataFusionRunFacet.json +32 -0
  23. airflow/providers/google/cloud/openlineage/facets.py +102 -1
  24. airflow/providers/google/cloud/openlineage/mixins.py +3 -1
  25. airflow/providers/google/cloud/operators/bigquery.py +2 -9
  26. airflow/providers/google/cloud/operators/cloud_run.py +2 -1
  27. airflow/providers/google/cloud/operators/cloud_sql.py +1 -1
  28. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +89 -6
  29. airflow/providers/google/cloud/operators/datafusion.py +36 -7
  30. airflow/providers/google/cloud/operators/gen_ai.py +389 -0
  31. airflow/providers/google/cloud/operators/spanner.py +22 -6
  32. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +7 -0
  33. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +30 -0
  34. airflow/providers/google/cloud/operators/workflows.py +17 -6
  35. airflow/providers/google/cloud/sensors/bigquery.py +1 -1
  36. airflow/providers/google/cloud/sensors/bigquery_dts.py +1 -6
  37. airflow/providers/google/cloud/sensors/bigtable.py +1 -6
  38. airflow/providers/google/cloud/sensors/cloud_composer.py +65 -31
  39. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +1 -6
  40. airflow/providers/google/cloud/sensors/dataflow.py +1 -1
  41. airflow/providers/google/cloud/sensors/dataform.py +1 -6
  42. airflow/providers/google/cloud/sensors/datafusion.py +1 -6
  43. airflow/providers/google/cloud/sensors/dataplex.py +1 -6
  44. airflow/providers/google/cloud/sensors/dataprep.py +1 -6
  45. airflow/providers/google/cloud/sensors/dataproc.py +1 -6
  46. airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -6
  47. airflow/providers/google/cloud/sensors/gcs.py +1 -7
  48. airflow/providers/google/cloud/sensors/looker.py +1 -6
  49. airflow/providers/google/cloud/sensors/pubsub.py +1 -6
  50. airflow/providers/google/cloud/sensors/tasks.py +1 -6
  51. airflow/providers/google/cloud/sensors/vertex_ai/feature_store.py +1 -6
  52. airflow/providers/google/cloud/sensors/workflows.py +1 -6
  53. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +2 -1
  54. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +2 -1
  55. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +11 -2
  56. airflow/providers/google/cloud/triggers/bigquery.py +15 -3
  57. airflow/providers/google/cloud/triggers/cloud_composer.py +51 -21
  58. airflow/providers/google/cloud/triggers/cloud_run.py +1 -1
  59. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +90 -0
  60. airflow/providers/google/cloud/triggers/pubsub.py +14 -18
  61. airflow/providers/google/common/hooks/base_google.py +1 -1
  62. airflow/providers/google/get_provider_info.py +15 -0
  63. airflow/providers/google/leveldb/hooks/leveldb.py +1 -1
  64. airflow/providers/google/marketing_platform/links/analytics_admin.py +2 -8
  65. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +1 -6
  66. airflow/providers/google/marketing_platform/sensors/display_video.py +1 -6
  67. airflow/providers/google/suite/sensors/drive.py +1 -6
  68. airflow/providers/google/version_compat.py +0 -20
  69. {apache_airflow_providers_google-18.0.0rc1.dist-info → apache_airflow_providers_google-18.1.0.dist-info}/METADATA +15 -15
  70. {apache_airflow_providers_google-18.0.0rc1.dist-info → apache_airflow_providers_google-18.1.0.dist-info}/RECORD +72 -65
  71. {apache_airflow_providers_google-18.0.0rc1.dist-info → apache_airflow_providers_google-18.1.0.dist-info}/WHEEL +0 -0
  72. {apache_airflow_providers_google-18.0.0rc1.dist-info → apache_airflow_providers_google-18.1.0.dist-info}/entry_points.txt +0 -0
@@ -25,6 +25,7 @@ from datetime import datetime
25
25
  from typing import Any
26
26
 
27
27
  from dateutil import parser
28
+ from google.api_core.exceptions import NotFound
28
29
  from google.cloud.orchestration.airflow.service_v1.types import ExecuteAirflowCommandResponse
29
30
 
30
31
  from airflow.exceptions import AirflowException
@@ -188,6 +189,7 @@ class CloudComposerDAGRunTrigger(BaseTrigger):
188
189
  impersonation_chain: str | Sequence[str] | None = None,
189
190
  poll_interval: int = 10,
190
191
  composer_airflow_version: int = 2,
192
+ use_rest_api: bool = False,
191
193
  ):
192
194
  super().__init__()
193
195
  self.project_id = project_id
@@ -202,6 +204,7 @@ class CloudComposerDAGRunTrigger(BaseTrigger):
202
204
  self.impersonation_chain = impersonation_chain
203
205
  self.poll_interval = poll_interval
204
206
  self.composer_airflow_version = composer_airflow_version
207
+ self.use_rest_api = use_rest_api
205
208
 
206
209
  def serialize(self) -> tuple[str, dict[str, Any]]:
207
210
  return (
@@ -219,31 +222,55 @@ class CloudComposerDAGRunTrigger(BaseTrigger):
219
222
  "impersonation_chain": self.impersonation_chain,
220
223
  "poll_interval": self.poll_interval,
221
224
  "composer_airflow_version": self.composer_airflow_version,
225
+ "use_rest_api": self.use_rest_api,
222
226
  },
223
227
  )
224
228
 
225
229
  async def _pull_dag_runs(self) -> list[dict]:
226
230
  """Pull the list of dag runs."""
227
- cmd_parameters = (
228
- ["-d", self.composer_dag_id, "-o", "json"]
229
- if self.composer_airflow_version < 3
230
- else [self.composer_dag_id, "-o", "json"]
231
- )
232
- dag_runs_cmd = await self.gcp_hook.execute_airflow_command(
233
- project_id=self.project_id,
234
- region=self.region,
235
- environment_id=self.environment_id,
236
- command="dags",
237
- subcommand="list-runs",
238
- parameters=cmd_parameters,
239
- )
240
- cmd_result = await self.gcp_hook.wait_command_execution_result(
241
- project_id=self.project_id,
242
- region=self.region,
243
- environment_id=self.environment_id,
244
- execution_cmd_info=ExecuteAirflowCommandResponse.to_dict(dag_runs_cmd),
245
- )
246
- dag_runs = json.loads(cmd_result["output"][0]["content"])
231
+ if self.use_rest_api:
232
+ try:
233
+ environment = await self.gcp_hook.get_environment(
234
+ project_id=self.project_id,
235
+ region=self.region,
236
+ environment_id=self.environment_id,
237
+ )
238
+ except NotFound as not_found_err:
239
+ self.log.info("The Composer environment %s does not exist.", self.environment_id)
240
+ raise AirflowException(not_found_err)
241
+ composer_airflow_uri = environment.config.airflow_uri
242
+
243
+ self.log.info(
244
+ "Pulling the DAG %s runs from the %s environment...",
245
+ self.composer_dag_id,
246
+ self.environment_id,
247
+ )
248
+ dag_runs_response = await self.gcp_hook.get_dag_runs(
249
+ composer_airflow_uri=composer_airflow_uri,
250
+ composer_dag_id=self.composer_dag_id,
251
+ )
252
+ dag_runs = dag_runs_response["dag_runs"]
253
+ else:
254
+ cmd_parameters = (
255
+ ["-d", self.composer_dag_id, "-o", "json"]
256
+ if self.composer_airflow_version < 3
257
+ else [self.composer_dag_id, "-o", "json"]
258
+ )
259
+ dag_runs_cmd = await self.gcp_hook.execute_airflow_command(
260
+ project_id=self.project_id,
261
+ region=self.region,
262
+ environment_id=self.environment_id,
263
+ command="dags",
264
+ subcommand="list-runs",
265
+ parameters=cmd_parameters,
266
+ )
267
+ cmd_result = await self.gcp_hook.wait_command_execution_result(
268
+ project_id=self.project_id,
269
+ region=self.region,
270
+ environment_id=self.environment_id,
271
+ execution_cmd_info=ExecuteAirflowCommandResponse.to_dict(dag_runs_cmd),
272
+ )
273
+ dag_runs = json.loads(cmd_result["output"][0]["content"])
247
274
  return dag_runs
248
275
 
249
276
  def _check_dag_runs_states(
@@ -271,7 +298,10 @@ class CloudComposerDAGRunTrigger(BaseTrigger):
271
298
 
272
299
  def _check_composer_dag_run_id_states(self, dag_runs: list[dict]) -> bool:
273
300
  for dag_run in dag_runs:
274
- if dag_run["run_id"] == self.composer_dag_run_id and dag_run["state"] in self.allowed_states:
301
+ if (
302
+ dag_run["dag_run_id" if self.use_rest_api else "run_id"] == self.composer_dag_run_id
303
+ and dag_run["state"] in self.allowed_states
304
+ ):
275
305
  return True
276
306
  return False
277
307
 
@@ -134,7 +134,7 @@ class CloudRunJobFinishedTrigger(BaseTrigger):
134
134
 
135
135
  yield TriggerEvent(
136
136
  {
137
- "status": RunJobStatus.TIMEOUT,
137
+ "status": RunJobStatus.TIMEOUT.value,
138
138
  "job_name": self.job_name,
139
139
  }
140
140
  )
@@ -23,6 +23,7 @@ from typing import Any
23
23
 
24
24
  from google.api_core.exceptions import GoogleAPIError
25
25
  from google.cloud.storage_transfer_v1.types import TransferOperation
26
+ from google.protobuf.json_format import MessageToDict
26
27
 
27
28
  from airflow.exceptions import AirflowException
28
29
  from airflow.providers.google.cloud.hooks.cloud_storage_transfer_service import (
@@ -231,3 +232,92 @@ class CloudStorageTransferServiceCheckJobStatusTrigger(BaseTrigger):
231
232
  except Exception as e:
232
233
  self.log.exception("Exception occurred while checking for query completion")
233
234
  yield TriggerEvent({"status": "error", "message": str(e)})
235
+
236
+
237
+ class CloudDataTransferServiceRunJobTrigger(BaseTrigger):
238
+ """
239
+ CloudDataTransferServiceRunJobTrigger run on the trigger worker to run Cloud Storage Transfer job.
240
+
241
+ :param job_name: The name of the transfer job
242
+ :param project_id: The ID of the project that owns the Transfer Job.
243
+ :param poke_interval: Polling period in seconds to check for the status
244
+ :param gcp_conn_id: The connection ID used to connect to Google Cloud.
245
+ :param impersonation_chain: Optional service account to impersonate using short-term
246
+ credentials, or chained list of accounts required to get the access_token
247
+ of the last account in the list, which will be impersonated in the request.
248
+ If set as a string, the account must grant the originating account
249
+ the Service Account Token Creator IAM role.
250
+ If set as a sequence, the identities from the list must grant
251
+ Service Account Token Creator IAM role to the directly preceding identity, with first
252
+ account from the list granting this role to the originating account (templated).
253
+ """
254
+
255
+ def __init__(
256
+ self,
257
+ job_name: str,
258
+ project_id: str = PROVIDE_PROJECT_ID,
259
+ poke_interval: float = 10.0,
260
+ gcp_conn_id: str = "google_cloud_default",
261
+ impersonation_chain: str | Sequence[str] | None = None,
262
+ ):
263
+ super().__init__()
264
+ self.job_name = job_name
265
+ self.project_id = project_id
266
+ self.poke_interval = poke_interval
267
+ self.gcp_conn_id = gcp_conn_id
268
+ self.impersonation_chain = impersonation_chain
269
+
270
+ def serialize(self) -> tuple[str, dict[str, Any]]:
271
+ """Serialize CloudDataTransferServiceRunJobTrigger arguments and classpath."""
272
+ return (
273
+ f"{self.__class__.__module__}.{self.__class__.__qualname__}",
274
+ {
275
+ "job_name": self.job_name,
276
+ "project_id": self.project_id,
277
+ "poke_interval": self.poke_interval,
278
+ "gcp_conn_id": self.gcp_conn_id,
279
+ "impersonation_chain": self.impersonation_chain,
280
+ },
281
+ )
282
+
283
+ def _get_async_hook(self) -> CloudDataTransferServiceAsyncHook:
284
+ return CloudDataTransferServiceAsyncHook(
285
+ project_id=self.project_id,
286
+ gcp_conn_id=self.gcp_conn_id,
287
+ impersonation_chain=self.impersonation_chain,
288
+ )
289
+
290
+ async def run(self) -> AsyncIterator[TriggerEvent]:
291
+ """Run the transfer job and yield a TriggerEvent."""
292
+ hook = self._get_async_hook()
293
+
294
+ try:
295
+ job_operation = await hook.run_transfer_job(self.job_name)
296
+ while True:
297
+ job_completed = await job_operation.done()
298
+ if job_completed:
299
+ yield TriggerEvent(
300
+ {
301
+ "status": "success",
302
+ "message": "Transfer operation run completed successfully",
303
+ "job_result": {
304
+ "name": job_operation.operation.name,
305
+ "metadata": MessageToDict(
306
+ job_operation.operation.metadata, preserving_proto_field_name=True
307
+ ),
308
+ "response": MessageToDict(
309
+ job_operation.operation.response, preserving_proto_field_name=True
310
+ ),
311
+ },
312
+ }
313
+ )
314
+ return
315
+
316
+ self.log.info(
317
+ "Sleeping for %s seconds.",
318
+ self.poke_interval,
319
+ )
320
+ await asyncio.sleep(self.poke_interval)
321
+ except Exception as e:
322
+ self.log.exception("Exception occurred while running transfer job")
323
+ yield TriggerEvent({"status": "error", "message": str(e)})
@@ -86,26 +86,22 @@ class PubsubPullTrigger(BaseTrigger):
86
86
  )
87
87
 
88
88
  async def run(self) -> AsyncIterator[TriggerEvent]:
89
- try:
90
- while True:
91
- if pulled_messages := await self.hook.pull(
92
- project_id=self.project_id,
93
- subscription=self.subscription,
94
- max_messages=self.max_messages,
95
- return_immediately=True,
96
- ):
97
- if self.ack_messages:
98
- await self.message_acknowledgement(pulled_messages)
89
+ while True:
90
+ if pulled_messages := await self.hook.pull(
91
+ project_id=self.project_id,
92
+ subscription=self.subscription,
93
+ max_messages=self.max_messages,
94
+ return_immediately=True,
95
+ ):
96
+ if self.ack_messages:
97
+ await self.message_acknowledgement(pulled_messages)
99
98
 
100
- messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages]
99
+ messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages]
101
100
 
102
- yield TriggerEvent({"status": "success", "message": messages_json})
103
- return
104
- self.log.info("Sleeping for %s seconds.", self.poke_interval)
105
- await asyncio.sleep(self.poke_interval)
106
- except Exception as e:
107
- yield TriggerEvent({"status": "error", "message": str(e)})
108
- return
101
+ yield TriggerEvent({"status": "success", "message": messages_json})
102
+ return
103
+ self.log.info("Sleeping for %s seconds.", self.poke_interval)
104
+ await asyncio.sleep(self.poke_interval)
109
105
 
110
106
  async def message_acknowledgement(self, pulled_messages):
111
107
  await self.hook.acknowledge(
@@ -50,12 +50,12 @@ from requests import Session
50
50
 
51
51
  from airflow import version
52
52
  from airflow.exceptions import AirflowException
53
+ from airflow.providers.common.compat.sdk import BaseHook
53
54
  from airflow.providers.google.cloud.utils.credentials_provider import (
54
55
  _get_scopes,
55
56
  _get_target_principal_and_delegates,
56
57
  get_credentials_and_project_id,
57
58
  )
58
- from airflow.providers.google.version_compat import BaseHook
59
59
  from airflow.utils.process_utils import patch_environ
60
60
 
61
61
  if TYPE_CHECKING:
@@ -472,6 +472,12 @@ def get_provider_info():
472
472
  ],
473
473
  "tags": ["gcp"],
474
474
  },
475
+ {
476
+ "integration-name": "Google Cloud Generative AI",
477
+ "external-doc-url": "https://cloud.google.com/generative-ai-studio",
478
+ "how-to-guide": ["/docs/apache-airflow-providers-google/operators/cloud/gen_ai.rst"],
479
+ "tags": ["gcp"],
480
+ },
475
481
  ],
476
482
  "operators": [
477
483
  {
@@ -693,6 +699,10 @@ def get_provider_info():
693
699
  "integration-name": "Google Cloud Logging Sink",
694
700
  "python-modules": ["airflow.providers.google.cloud.operators.cloud_logging_sink"],
695
701
  },
702
+ {
703
+ "integration-name": "Google Cloud Generative AI",
704
+ "python-modules": ["airflow.providers.google.cloud.operators.gen_ai"],
705
+ },
696
706
  ],
697
707
  "sensors": [
698
708
  {
@@ -1057,6 +1067,10 @@ def get_provider_info():
1057
1067
  "integration-name": "Google Cloud Logging",
1058
1068
  "python-modules": ["airflow.providers.google.cloud.hooks.cloud_logging"],
1059
1069
  },
1070
+ {
1071
+ "integration-name": "Google Cloud Generative AI",
1072
+ "python-modules": ["airflow.providers.google.cloud.hooks.gen_ai"],
1073
+ },
1060
1074
  ],
1061
1075
  "triggers": [
1062
1076
  {
@@ -1408,6 +1422,7 @@ def get_provider_info():
1408
1422
  "airflow.providers.google.cloud.links.compute.ComputeInstanceDetailsLink",
1409
1423
  "airflow.providers.google.cloud.links.compute.ComputeInstanceTemplateDetailsLink",
1410
1424
  "airflow.providers.google.cloud.links.compute.ComputeInstanceGroupManagerDetailsLink",
1425
+ "airflow.providers.google.cloud.links.cloud_run.CloudRunJobLoggingLink",
1411
1426
  "airflow.providers.google.cloud.links.cloud_tasks.CloudTasksQueueLink",
1412
1427
  "airflow.providers.google.cloud.links.cloud_tasks.CloudTasksLink",
1413
1428
  "airflow.providers.google.cloud.links.dataproc.DataprocLink",
@@ -21,7 +21,7 @@ from __future__ import annotations
21
21
  from typing import Any
22
22
 
23
23
  from airflow.exceptions import AirflowException, AirflowOptionalProviderFeatureException
24
- from airflow.providers.google.version_compat import BaseHook
24
+ from airflow.providers.common.compat.sdk import BaseHook
25
25
 
26
26
  try:
27
27
  import plyvel
@@ -18,19 +18,13 @@ from __future__ import annotations
18
18
 
19
19
  from typing import TYPE_CHECKING, ClassVar
20
20
 
21
- from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS, BaseOperator
21
+ from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
22
22
 
23
23
  if TYPE_CHECKING:
24
24
  from airflow.models.taskinstancekey import TaskInstanceKey
25
+ from airflow.providers.google.version_compat import BaseOperator
25
26
  from airflow.utils.context import Context
26
27
 
27
- if AIRFLOW_V_3_0_PLUS:
28
- from airflow.sdk import BaseOperatorLink
29
- from airflow.sdk.execution_time.xcom import XCom
30
- else:
31
- from airflow.models import XCom
32
- from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
33
-
34
28
 
35
29
  BASE_LINK = "https://analytics.google.com/analytics/web/"
36
30
 
@@ -22,13 +22,8 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
26
  from airflow.providers.google.marketing_platform.hooks.campaign_manager import GoogleCampaignManagerHook
26
- from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
-
28
- if AIRFLOW_V_3_0_PLUS:
29
- from airflow.sdk import BaseSensorOperator
30
- else:
31
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
32
27
 
33
28
  if TYPE_CHECKING:
34
29
  from airflow.utils.context import Context
@@ -22,13 +22,8 @@ from collections.abc import Sequence
22
22
  from typing import TYPE_CHECKING
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
26
  from airflow.providers.google.marketing_platform.hooks.display_video import GoogleDisplayVideo360Hook
26
- from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
-
28
- if AIRFLOW_V_3_0_PLUS:
29
- from airflow.sdk import BaseSensorOperator
30
- else:
31
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
32
27
 
33
28
  if TYPE_CHECKING:
34
29
  from airflow.utils.context import Context
@@ -22,13 +22,8 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
26
  from airflow.providers.google.suite.hooks.drive import GoogleDriveHook
26
- from airflow.providers.google.version_compat import AIRFLOW_V_3_0_PLUS
27
-
28
- if AIRFLOW_V_3_0_PLUS:
29
- from airflow.sdk import BaseSensorOperator
30
- else:
31
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
32
27
 
33
28
  if TYPE_CHECKING:
34
29
  from airflow.utils.context import Context
@@ -47,30 +47,10 @@ else:
47
47
  from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
48
48
  from airflow.models import BaseOperator
49
49
 
50
- # Other SDK components: Available since 3.0+
51
- if AIRFLOW_V_3_0_PLUS:
52
- from airflow.sdk import (
53
- BaseOperatorLink,
54
- BaseSensorOperator,
55
- PokeReturnValue,
56
- )
57
- else:
58
- from airflow.models import BaseOperatorLink
59
- from airflow.sensors.base import BaseSensorOperator, PokeReturnValue # type: ignore[no-redef]
60
-
61
- try:
62
- from airflow.sdk.execution_time.timeout import timeout
63
- except ImportError:
64
- from airflow.utils.timeout import timeout # type: ignore[assignment,attr-defined,no-redef]
65
-
66
50
  # Explicitly export these imports to protect them from being removed by linters
67
51
  __all__ = [
68
52
  "AIRFLOW_V_3_0_PLUS",
69
53
  "AIRFLOW_V_3_1_PLUS",
70
54
  "BaseHook",
71
55
  "BaseOperator",
72
- "BaseSensorOperator",
73
- "BaseOperatorLink",
74
- "PokeReturnValue",
75
- "timeout",
76
56
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-google
3
- Version: 18.0.0rc1
3
+ Version: 18.1.0
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.4.0rc1
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
26
26
  Requires-Dist: asgiref>=3.5.2
27
27
  Requires-Dist: dill>=0.2.3
28
28
  Requires-Dist: gcloud-aio-auth>=5.2.0
@@ -92,20 +92,20 @@ Requires-Dist: sqlalchemy-spanner>=1.6.2
92
92
  Requires-Dist: tenacity>=8.3.0
93
93
  Requires-Dist: immutabledict>=4.2.0
94
94
  Requires-Dist: types-protobuf!=5.29.1.20250402,>=5.27.0
95
- Requires-Dist: apache-airflow-providers-amazon>=2.6.0rc1 ; extra == "amazon"
95
+ Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
96
96
  Requires-Dist: apache-beam[gcp]>=2.53.0 ; extra == "apache-beam" and ( python_version < "3.12")
97
97
  Requires-Dist: apache-beam[gcp]>=2.57.0 ; extra == "apache-beam" and ( python_version >= "3.12" and python_version < "3.13")
98
98
  Requires-Dist: apache-airflow-providers-apache-cassandra ; extra == "apache-cassandra"
99
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=10.1.0rc1 ; extra == "cncf-kubernetes"
100
- Requires-Dist: apache-airflow-providers-fab>=2.0.0rc1 ; extra == "fab" and ( python_version < '3.13')
101
- Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc1 ; extra == "facebook"
99
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=10.1.0 ; extra == "cncf-kubernetes"
100
+ Requires-Dist: apache-airflow-providers-fab>=2.0.0 ; extra == "fab" and ( python_version < '3.13')
101
+ Requires-Dist: apache-airflow-providers-facebook>=2.2.0 ; extra == "facebook"
102
102
  Requires-Dist: apache-airflow-providers-http ; extra == "http"
103
103
  Requires-Dist: plyvel>=1.5.1 ; extra == "leveldb" and ( python_version < '3.13')
104
104
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
105
105
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
106
106
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
107
107
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
108
- Requires-Dist: apache-airflow-providers-oracle>=3.1.0rc1 ; extra == "oracle"
108
+ Requires-Dist: apache-airflow-providers-oracle>=3.1.0 ; extra == "oracle"
109
109
  Requires-Dist: apache-airflow-providers-postgres ; extra == "postgres"
110
110
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
111
111
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
@@ -113,8 +113,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
113
113
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
114
114
  Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
115
115
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
116
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-google/18.0.0/changelog.html
117
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-google/18.0.0
116
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/18.1.0/changelog.html
117
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/18.1.0
118
118
  Project-URL: Mastodon, https://fosstodon.org/@airflow
119
119
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
120
120
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -164,7 +164,7 @@ Provides-Extra: trino
164
164
 
165
165
  Package ``apache-airflow-providers-google``
166
166
 
167
- Release: ``18.0.0``
167
+ Release: ``18.1.0``
168
168
 
169
169
 
170
170
  Google services including:
@@ -184,7 +184,7 @@ This is a provider package for ``google`` provider. All classes for this provide
184
184
  are in ``airflow.providers.google`` python package.
185
185
 
186
186
  You can find package information and changelog for the provider
187
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/18.0.0/>`_.
187
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/18.1.0/>`_.
188
188
 
189
189
  Installation
190
190
  ------------
@@ -202,7 +202,7 @@ Requirements
202
202
  PIP package Version required
203
203
  ========================================== ======================================
204
204
  ``apache-airflow`` ``>=2.10.0``
205
- ``apache-airflow-providers-common-compat`` ``>=1.4.0``
205
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
206
206
  ``apache-airflow-providers-common-sql`` ``>=1.27.0``
207
207
  ``asgiref`` ``>=3.5.2``
208
208
  ``dill`` ``>=0.2.3``
@@ -340,5 +340,5 @@ Extra Dependencies
340
340
  ==================== =========================================================================================================================================
341
341
 
342
342
  The changelog for the provider package can be found in the
343
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/18.0.0/changelog.html>`_.
343
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/18.1.0/changelog.html>`_.
344
344