apache-airflow-providers-google 17.0.0__py3-none-any.whl → 17.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "17.0.0"
32
+ __version__ = "17.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -55,7 +55,7 @@ if TYPE_CHECKING:
55
55
  from google.cloud.aiplatform_v1.services.pipeline_service.pagers import (
56
56
  ListTrainingPipelinesPager,
57
57
  )
58
- from google.cloud.aiplatform_v1.types import CustomJob, TrainingPipeline
58
+ from google.cloud.aiplatform_v1.types import CustomJob, PscInterfaceConfig, TrainingPipeline
59
59
 
60
60
 
61
61
  class CustomJobHook(GoogleBaseHook, OperationHelper):
@@ -317,6 +317,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
317
317
  is_default_version: bool | None = None,
318
318
  model_version_aliases: list[str] | None = None,
319
319
  model_version_description: str | None = None,
320
+ psc_interface_config: PscInterfaceConfig | None = None,
320
321
  ) -> tuple[models.Model | None, str, str]:
321
322
  """Run a training pipeline job and wait until its completion."""
322
323
  model = job.run(
@@ -350,6 +351,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
350
351
  is_default_version=is_default_version,
351
352
  model_version_aliases=model_version_aliases,
352
353
  model_version_description=model_version_description,
354
+ psc_interface_config=psc_interface_config,
353
355
  )
354
356
  training_id = self.extract_training_id(job.resource_name)
355
357
  custom_job_id = self.extract_custom_job_id(
@@ -574,6 +576,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
574
576
  timestamp_split_column_name: str | None = None,
575
577
  tensorboard: str | None = None,
576
578
  sync=True,
579
+ psc_interface_config: PscInterfaceConfig | None = None,
577
580
  ) -> tuple[models.Model | None, str, str]:
578
581
  """
579
582
  Create Custom Container Training Job.
@@ -837,6 +840,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
837
840
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
838
841
  will be executed in concurrent Future and any downstream object will
839
842
  be immediately returned and synced when the Future has completed.
843
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
844
+ training.
840
845
  """
841
846
  self._job = self.get_custom_container_training_job(
842
847
  project=project_id,
@@ -896,6 +901,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
896
901
  is_default_version=is_default_version,
897
902
  model_version_aliases=model_version_aliases,
898
903
  model_version_description=model_version_description,
904
+ psc_interface_config=psc_interface_config,
899
905
  )
900
906
 
901
907
  return model, training_id, custom_job_id
@@ -958,6 +964,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
958
964
  model_version_aliases: list[str] | None = None,
959
965
  model_version_description: str | None = None,
960
966
  sync=True,
967
+ psc_interface_config: PscInterfaceConfig | None = None,
961
968
  ) -> tuple[models.Model | None, str, str]:
962
969
  """
963
970
  Create Custom Python Package Training Job.
@@ -1220,6 +1227,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1220
1227
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
1221
1228
  will be executed in concurrent Future and any downstream object will
1222
1229
  be immediately returned and synced when the Future has completed.
1230
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
1231
+ training.
1223
1232
  """
1224
1233
  self._job = self.get_custom_python_package_training_job(
1225
1234
  project=project_id,
@@ -1280,6 +1289,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1280
1289
  is_default_version=is_default_version,
1281
1290
  model_version_aliases=model_version_aliases,
1282
1291
  model_version_description=model_version_description,
1292
+ psc_interface_config=psc_interface_config,
1283
1293
  )
1284
1294
 
1285
1295
  return model, training_id, custom_job_id
@@ -1342,6 +1352,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1342
1352
  timestamp_split_column_name: str | None = None,
1343
1353
  tensorboard: str | None = None,
1344
1354
  sync=True,
1355
+ psc_interface_config: PscInterfaceConfig | None = None,
1345
1356
  ) -> tuple[models.Model | None, str, str]:
1346
1357
  """
1347
1358
  Create Custom Training Job.
@@ -1604,6 +1615,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1604
1615
  :param sync: Whether to execute the AI Platform job synchronously. If False, this method
1605
1616
  will be executed in concurrent Future and any downstream object will
1606
1617
  be immediately returned and synced when the Future has completed.
1618
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
1619
+ training.
1607
1620
  """
1608
1621
  self._job = self.get_custom_training_job(
1609
1622
  project=project_id,
@@ -1664,6 +1677,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1664
1677
  is_default_version=is_default_version,
1665
1678
  model_version_aliases=model_version_aliases,
1666
1679
  model_version_description=model_version_description,
1680
+ psc_interface_config=psc_interface_config,
1667
1681
  )
1668
1682
 
1669
1683
  return model, training_id, custom_job_id
@@ -1725,6 +1739,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1725
1739
  predefined_split_column_name: str | None = None,
1726
1740
  timestamp_split_column_name: str | None = None,
1727
1741
  tensorboard: str | None = None,
1742
+ psc_interface_config: PscInterfaceConfig | None = None,
1728
1743
  ) -> CustomContainerTrainingJob:
1729
1744
  """
1730
1745
  Create and submit a Custom Container Training Job pipeline, then exit without waiting for it to complete.
@@ -1985,6 +2000,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
1985
2000
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
1986
2001
  For more information on configuring your service account please visit:
1987
2002
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2003
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2004
+ training.
1988
2005
  """
1989
2006
  self._job = self.get_custom_container_training_job(
1990
2007
  project=project_id,
@@ -2043,6 +2060,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2043
2060
  model_version_aliases=model_version_aliases,
2044
2061
  model_version_description=model_version_description,
2045
2062
  sync=False,
2063
+ psc_interface_config=psc_interface_config,
2046
2064
  )
2047
2065
  return self._job
2048
2066
 
@@ -2104,6 +2122,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2104
2122
  is_default_version: bool | None = None,
2105
2123
  model_version_aliases: list[str] | None = None,
2106
2124
  model_version_description: str | None = None,
2125
+ psc_interface_config: PscInterfaceConfig | None = None,
2107
2126
  ) -> CustomPythonPackageTrainingJob:
2108
2127
  """
2109
2128
  Create and submit a Custom Python Package Training Job pipeline, then exit without waiting for it to complete.
@@ -2363,6 +2382,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2363
2382
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
2364
2383
  For more information on configuring your service account please visit:
2365
2384
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2385
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2386
+ training.
2366
2387
  """
2367
2388
  self._job = self.get_custom_python_package_training_job(
2368
2389
  project=project_id,
@@ -2422,6 +2443,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2422
2443
  model_version_aliases=model_version_aliases,
2423
2444
  model_version_description=model_version_description,
2424
2445
  sync=False,
2446
+ psc_interface_config=psc_interface_config,
2425
2447
  )
2426
2448
 
2427
2449
  return self._job
@@ -2484,6 +2506,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2484
2506
  predefined_split_column_name: str | None = None,
2485
2507
  timestamp_split_column_name: str | None = None,
2486
2508
  tensorboard: str | None = None,
2509
+ psc_interface_config: PscInterfaceConfig | None = None,
2487
2510
  ) -> CustomTrainingJob:
2488
2511
  """
2489
2512
  Create and submit a Custom Training Job pipeline, then exit without waiting for it to complete.
@@ -2747,6 +2770,8 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2747
2770
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
2748
2771
  For more information on configuring your service account please visit:
2749
2772
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
2773
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
2774
+ training.
2750
2775
  """
2751
2776
  self._job = self.get_custom_training_job(
2752
2777
  project=project_id,
@@ -2806,6 +2831,7 @@ class CustomJobHook(GoogleBaseHook, OperationHelper):
2806
2831
  model_version_aliases=model_version_aliases,
2807
2832
  model_version_description=model_version_description,
2808
2833
  sync=False,
2834
+ psc_interface_config=psc_interface_config,
2809
2835
  )
2810
2836
  return self._job
2811
2837
 
@@ -20,19 +20,10 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import dataclasses
23
+ from collections.abc import MutableMapping
23
24
  from typing import Any
24
25
 
25
- from airflow.exceptions import AirflowOptionalProviderFeatureException
26
-
27
- try:
28
- import vertex_ray
29
- from google._upb._message import ScalarMapContainer # type: ignore[attr-defined]
30
- except ImportError:
31
- # Fallback for environments where the upb module is not available.
32
- raise AirflowOptionalProviderFeatureException(
33
- "google._upb._message.ScalarMapContainer is not available. "
34
- "Please install the ray package to use this feature."
35
- )
26
+ import vertex_ray
36
27
  from google.cloud import aiplatform
37
28
  from google.cloud.aiplatform.vertex_ray.util import resources
38
29
  from google.cloud.aiplatform_v1 import (
@@ -59,7 +50,7 @@ class RayHook(GoogleBaseHook):
59
50
  def __encode_value(value: Any) -> Any:
60
51
  if isinstance(value, (list, Repeated)):
61
52
  return [__encode_value(nested_value) for nested_value in value]
62
- if isinstance(value, ScalarMapContainer):
53
+ if not isinstance(value, dict) and isinstance(value, MutableMapping):
63
54
  return {key: __encode_value(nested_value) for key, nested_value in dict(value).items()}
64
55
  if dataclasses.is_dataclass(value):
65
56
  return dataclasses.asdict(value)
@@ -51,6 +51,7 @@ if TYPE_CHECKING:
51
51
  CustomPythonPackageTrainingJob,
52
52
  CustomTrainingJob,
53
53
  )
54
+ from google.cloud.aiplatform_v1.types import PscInterfaceConfig
54
55
 
55
56
  from airflow.utils.context import Context
56
57
 
@@ -110,6 +111,7 @@ class CustomTrainingJobBaseOperator(GoogleCloudBaseOperator):
110
111
  predefined_split_column_name: str | None = None,
111
112
  timestamp_split_column_name: str | None = None,
112
113
  tensorboard: str | None = None,
114
+ psc_interface_config: PscInterfaceConfig | None = None,
113
115
  gcp_conn_id: str = "google_cloud_default",
114
116
  impersonation_chain: str | Sequence[str] | None = None,
115
117
  **kwargs,
@@ -166,6 +168,7 @@ class CustomTrainingJobBaseOperator(GoogleCloudBaseOperator):
166
168
  self.predefined_split_column_name = predefined_split_column_name
167
169
  self.timestamp_split_column_name = timestamp_split_column_name
168
170
  self.tensorboard = tensorboard
171
+ self.psc_interface_config = psc_interface_config
169
172
  # END Run param
170
173
  self.gcp_conn_id = gcp_conn_id
171
174
  self.impersonation_chain = impersonation_chain
@@ -473,6 +476,8 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
473
476
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
474
477
  For more information on configuring your service account please visit:
475
478
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
479
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
480
+ training.
476
481
  :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
477
482
  :param impersonation_chain: Optional service account to impersonate using short-term
478
483
  credentials, or chained list of accounts required to get the access_token
@@ -586,6 +591,7 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
586
591
  timestamp_split_column_name=self.timestamp_split_column_name,
587
592
  tensorboard=self.tensorboard,
588
593
  sync=True,
594
+ psc_interface_config=self.psc_interface_config,
589
595
  )
590
596
 
591
597
  if model:
@@ -652,6 +658,7 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
652
658
  predefined_split_column_name=self.predefined_split_column_name,
653
659
  timestamp_split_column_name=self.timestamp_split_column_name,
654
660
  tensorboard=self.tensorboard,
661
+ psc_interface_config=self.psc_interface_config,
655
662
  )
656
663
  custom_container_training_job_obj.wait_for_resource_creation()
657
664
  training_pipeline_id: str = custom_container_training_job_obj.name
@@ -931,6 +938,8 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
931
938
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
932
939
  For more information on configuring your service account please visit:
933
940
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
941
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
942
+ training.
934
943
  :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
935
944
  :param impersonation_chain: Optional service account to impersonate using short-term
936
945
  credentials, or chained list of accounts required to get the access_token
@@ -1043,6 +1052,7 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
1043
1052
  timestamp_split_column_name=self.timestamp_split_column_name,
1044
1053
  tensorboard=self.tensorboard,
1045
1054
  sync=True,
1055
+ psc_interface_config=self.psc_interface_config,
1046
1056
  )
1047
1057
 
1048
1058
  if model:
@@ -1110,6 +1120,7 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
1110
1120
  predefined_split_column_name=self.predefined_split_column_name,
1111
1121
  timestamp_split_column_name=self.timestamp_split_column_name,
1112
1122
  tensorboard=self.tensorboard,
1123
+ psc_interface_config=self.psc_interface_config,
1113
1124
  )
1114
1125
  custom_python_training_job_obj.wait_for_resource_creation()
1115
1126
  training_pipeline_id: str = custom_python_training_job_obj.name
@@ -1389,6 +1400,8 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1389
1400
  ``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
1390
1401
  For more information on configuring your service account please visit:
1391
1402
  https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-training
1403
+ :param psc_interface_config: Optional. Configuration for Private Service Connect interface used for
1404
+ training.
1392
1405
  :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
1393
1406
  :param impersonation_chain: Optional service account to impersonate using short-term
1394
1407
  credentials, or chained list of accounts required to get the access_token
@@ -1506,6 +1519,7 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1506
1519
  timestamp_split_column_name=self.timestamp_split_column_name,
1507
1520
  tensorboard=self.tensorboard,
1508
1521
  sync=True,
1522
+ psc_interface_config=None,
1509
1523
  )
1510
1524
 
1511
1525
  if model:
@@ -1573,6 +1587,7 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1573
1587
  predefined_split_column_name=self.predefined_split_column_name,
1574
1588
  timestamp_split_column_name=self.timestamp_split_column_name,
1575
1589
  tensorboard=self.tensorboard,
1590
+ psc_interface_config=self.psc_interface_config,
1576
1591
  )
1577
1592
  custom_training_job_obj.wait_for_resource_creation()
1578
1593
  training_pipeline_id: str = custom_training_job_obj.name
@@ -410,7 +410,7 @@ class ImportDataOperator(GoogleCloudBaseOperator, DatasetImportDataResultsCheckH
410
410
  )
411
411
  initial_dataset_size = self._get_number_of_ds_items(
412
412
  dataset=hook.get_dataset(
413
- dataset_id=self.dataset_id,
413
+ dataset=self.dataset_id,
414
414
  project_id=self.project_id,
415
415
  region=self.region,
416
416
  retry=self.retry,
@@ -432,7 +432,7 @@ class ImportDataOperator(GoogleCloudBaseOperator, DatasetImportDataResultsCheckH
432
432
  hook.wait_for_operation(timeout=self.timeout, operation=operation)
433
433
  result_dataset_size = self._get_number_of_ds_items(
434
434
  dataset=hook.get_dataset(
435
- dataset_id=self.dataset_id,
435
+ dataset=self.dataset_id,
436
436
  project_id=self.project_id,
437
437
  region=self.region,
438
438
  retry=self.retry,
@@ -282,7 +282,7 @@ class GetRayClusterOperator(RayBaseOperator):
282
282
  location=self.location,
283
283
  cluster_id=self.cluster_id,
284
284
  )
285
- self.log.info("Cluster was gotten.")
285
+ self.log.info("Cluster data has been retrieved.")
286
286
  ray_cluster_dict = self.hook.serialize_cluster_obj(ray_cluster)
287
287
  return ray_cluster_dict
288
288
  except NotFound as not_found_err:
@@ -21,6 +21,9 @@ from __future__ import annotations
21
21
 
22
22
  from typing import TYPE_CHECKING
23
23
 
24
+ from psycopg2.extensions import register_adapter
25
+ from psycopg2.extras import Json
26
+
24
27
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryHook
25
28
  from airflow.providers.google.cloud.transfers.bigquery_to_sql import BigQueryToSqlBaseOperator
26
29
  from airflow.providers.google.cloud.utils.bigquery_get_data import bigquery_get_data
@@ -76,6 +79,8 @@ class BigQueryToPostgresOperator(BigQueryToSqlBaseOperator):
76
79
  self.replace_index = replace_index
77
80
 
78
81
  def get_sql_hook(self) -> PostgresHook:
82
+ register_adapter(list, Json)
83
+ register_adapter(dict, Json)
79
84
  return PostgresHook(database=self.database, postgres_conn_id=self.postgres_conn_id)
80
85
 
81
86
  def execute(self, context: Context) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-google
3
- Version: 17.0.0
3
+ Version: 17.1.0rc1
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.4.0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.27.0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.4.0rc1
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.27.0rc1
26
26
  Requires-Dist: asgiref>=3.5.2
27
27
  Requires-Dist: dill>=0.2.3
28
28
  Requires-Dist: gcloud-aio-auth>=5.2.0
@@ -35,7 +35,7 @@ Requires-Dist: google-api-core>=2.11.0,!=2.16.0,!=2.18.0
35
35
  Requires-Dist: google-api-python-client>=2.0.2
36
36
  Requires-Dist: google-auth>=2.29.0
37
37
  Requires-Dist: google-auth-httplib2>=0.0.1
38
- Requires-Dist: google-cloud-aiplatform[evaluation]>=1.73.0
38
+ Requires-Dist: google-cloud-aiplatform[evaluation]>=1.98.0
39
39
  Requires-Dist: ray[default]>=2.42.0 ; python_version < '3.13'
40
40
  Requires-Dist: google-cloud-bigquery-storage>=2.31.0 ; python_version < '3.13'
41
41
  Requires-Dist: google-cloud-alloydb>=0.4.0
@@ -92,20 +92,20 @@ Requires-Dist: sqlalchemy-spanner>=1.6.2
92
92
  Requires-Dist: tenacity>=8.3.0
93
93
  Requires-Dist: immutabledict>=4.2.0
94
94
  Requires-Dist: types-protobuf!=5.29.1.20250402,>=5.27.0
95
- Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
95
+ Requires-Dist: apache-airflow-providers-amazon>=2.6.0rc1 ; extra == "amazon"
96
96
  Requires-Dist: apache-beam[gcp]>=2.53.0 ; extra == "apache-beam" and ( python_version < "3.12")
97
97
  Requires-Dist: apache-beam[gcp]>=2.57.0 ; extra == "apache-beam" and ( python_version >= "3.12" and python_version < "3.13")
98
98
  Requires-Dist: apache-airflow-providers-apache-cassandra ; extra == "apache-cassandra"
99
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=10.1.0 ; extra == "cncf-kubernetes"
100
- Requires-Dist: apache-airflow-providers-fab>=2.0.0 ; extra == "fab" and ( python_version < '3.13')
101
- Requires-Dist: apache-airflow-providers-facebook>=2.2.0 ; extra == "facebook"
99
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=10.1.0rc1 ; extra == "cncf-kubernetes"
100
+ Requires-Dist: apache-airflow-providers-fab>=2.0.0rc1 ; extra == "fab" and ( python_version < '3.13')
101
+ Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc1 ; extra == "facebook"
102
102
  Requires-Dist: apache-airflow-providers-http ; extra == "http"
103
103
  Requires-Dist: plyvel>=1.5.1 ; extra == "leveldb" and ( python_version < '3.13')
104
104
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
105
105
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
106
106
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
107
107
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
108
- Requires-Dist: apache-airflow-providers-oracle>=3.1.0 ; extra == "oracle"
108
+ Requires-Dist: apache-airflow-providers-oracle>=3.1.0rc1 ; extra == "oracle"
109
109
  Requires-Dist: apache-airflow-providers-postgres ; extra == "postgres"
110
110
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
111
111
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
@@ -113,8 +113,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
113
113
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
114
114
  Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
115
115
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
116
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/17.0.0/changelog.html
117
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/17.0.0
116
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-google/17.1.0/changelog.html
117
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-google/17.1.0
118
118
  Project-URL: Mastodon, https://fosstodon.org/@airflow
119
119
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
120
120
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -164,7 +164,7 @@ Provides-Extra: trino
164
164
 
165
165
  Package ``apache-airflow-providers-google``
166
166
 
167
- Release: ``17.0.0``
167
+ Release: ``17.1.0``
168
168
 
169
169
  Release Date: ``|PypiReleaseDate|``
170
170
 
@@ -185,7 +185,7 @@ This is a provider package for ``google`` provider. All classes for this provide
185
185
  are in ``airflow.providers.google`` python package.
186
186
 
187
187
  You can find package information and changelog for the provider
188
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/17.0.0/>`_.
188
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/17.1.0/>`_.
189
189
 
190
190
  Installation
191
191
  ------------
@@ -217,7 +217,7 @@ PIP package Version required
217
217
  ``google-api-python-client`` ``>=2.0.2``
218
218
  ``google-auth`` ``>=2.29.0``
219
219
  ``google-auth-httplib2`` ``>=0.0.1``
220
- ``google-cloud-aiplatform[evaluation]`` ``>=1.73.0``
220
+ ``google-cloud-aiplatform[evaluation]`` ``>=1.98.0``
221
221
  ``ray[default]`` ``>=2.42.0; python_version < "3.13"``
222
222
  ``google-cloud-bigquery-storage`` ``>=2.31.0; python_version < "3.13"``
223
223
  ``google-cloud-alloydb`` ``>=0.4.0``
@@ -314,5 +314,5 @@ Dependent package
314
314
  ======================================================================================================================== ====================
315
315
 
316
316
  The changelog for the provider package can be found in the
317
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/17.0.0/changelog.html>`_.
317
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/17.1.0/changelog.html>`_.
318
318
 
@@ -1,5 +1,5 @@
1
1
  airflow/providers/google/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/google/__init__.py,sha256=m8OVv_o6mVRhNl2LhAn9xyU3SWVLGauY-USz9G_Y44Y,1496
2
+ airflow/providers/google/__init__.py,sha256=bku6vesjfs5dKk3IipXVR8ULWLcQshfovQdD8-unKjk,1496
3
3
  airflow/providers/google/get_provider_info.py,sha256=gwx-vFIrbTcnHl0DFKUe6c_m6FbjXN_X2SZNCIVIHiw,80760
4
4
  airflow/providers/google/go_module_utils.py,sha256=XVM-IGME6CPgJA8fgDgkusFc4fz3lEghZaZ4elBkv7s,1780
5
5
  airflow/providers/google/version_compat.py,sha256=bxao2eaOV79xdn4hiwpHxd_NcjOiPjz--l8LvkA-t_E,2594
@@ -73,7 +73,7 @@ airflow/providers/google/cloud/hooks/workflows.py,sha256=KT3TAJG7qkjUFh-SmNs35hc
73
73
  airflow/providers/google/cloud/hooks/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
74
74
  airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py,sha256=YdOPdM7fkWy54H28JcxsAzrYo3ekOuhD918nIJMxEzo,86733
75
75
  airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py,sha256=KIjdycRIx08qL-U7wqiETAzU0cPM-_wGpUQkZYhwTHA,34280
76
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py,sha256=SWpJRvCVObt7lP0trSqf-YXOzxAxYIM5ZgRkNGg025Q,184903
76
+ airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py,sha256=EL7TOJGi9_hbs20j9Ogf7VFBa04Ua04-v-3Gd-N7Ge8,186536
77
77
  airflow/providers/google/cloud/hooks/vertex_ai/dataset.py,sha256=IJ7LkZ9sGBM0g_mYns7hS5CTvqIJ2luxfYrmvInftvk,18079
78
78
  airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py,sha256=2RTXqIYooW_kmAjVLACyh0Qnyn4NstOuJouiB67s-TU,15553
79
79
  airflow/providers/google/cloud/hooks/vertex_ai/experiment_service.py,sha256=fOvcx_ivtLafa27a6NNhgd-ar5Sc9H3mQ9wnCt8SY04,9012
@@ -83,7 +83,7 @@ airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py,sha2
83
83
  airflow/providers/google/cloud/hooks/vertex_ai/model_service.py,sha256=u8smUqa1rhTfKWyOt2Xq4X3W7T2uvnfbXc97mybKbRQ,17867
84
84
  airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py,sha256=72uuTgF_k84ebYB5U73SKwYlX4jhlFaLss_V0oMHC8U,28837
85
85
  airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py,sha256=U4hTr1dJZ7PB2aV-lq75jooy8wNO4LxjLdNNqU9qgSg,4290
86
- airflow/providers/google/cloud/hooks/vertex_ai/ray.py,sha256=mSdMlvC6-Fch2k7jQ3mvHuZDp4-ckjlR07a6ngUl-JI,10738
86
+ airflow/providers/google/cloud/hooks/vertex_ai/ray.py,sha256=lfnEuyz2GoqZVaHL0pP-FOA5d-iebOgoJwlLjm2VZUg,10363
87
87
  airflow/providers/google/cloud/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
88
88
  airflow/providers/google/cloud/links/alloy_db.py,sha256=VckfK0CwZkbVhluhpB_Z-YS2DtwLBjBburVpOkegcWI,1910
89
89
  airflow/providers/google/cloud/links/base.py,sha256=1IyAHmQ72lKi7Fim-CIwlQCGs59nkTf5yeqO4tq2wcw,4217
@@ -171,8 +171,8 @@ airflow/providers/google/cloud/operators/workflows.py,sha256=jkwuHUOxy9oVA2yvlYB
171
171
  airflow/providers/google/cloud/operators/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
172
172
  airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=_2Hl_FKU5_HA_acSUY5nq3feTxuuAQk_F0u14li6JOQ,28524
173
173
  airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=tqAAT72v6982yi_HI9XBT80uZCyWiU_i_2toErEe5-M,28756
174
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=uIVcjl14_8ehD4tvyveckYH8QE8t_oyJT8RsUUdTInY,98355
175
- airflow/providers/google/cloud/operators/vertex_ai/dataset.py,sha256=iFK_TjJa4H81He3jD8B4ti_IGlsCR9dMfFOecpIosr0,25422
174
+ airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=ISXNy7Nq1eDrahlkBzes-pQmOxhUvbMbaoOxhr28jlk,99249
175
+ airflow/providers/google/cloud/operators/vertex_ai/dataset.py,sha256=wpFkThoIKH8jD-1I-v9VNWDTnUt2PTobJTJFysxvxBU,25416
176
176
  airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py,sha256=eXqMonQgATxw7feS3CFYWjILmHgP7L1niNL8HgHRgrU,27496
177
177
  airflow/providers/google/cloud/operators/vertex_ai/experiment_service.py,sha256=3Axeoc3UaxQacl6jBXspym79BI9frsBFuLchSCHGbXU,18297
178
178
  airflow/providers/google/cloud/operators/vertex_ai/feature_store.py,sha256=3mS__ygqgCBLSytHJmsiNkPqlWU5ALbiCaB85iiHX3E,31509
@@ -180,7 +180,7 @@ airflow/providers/google/cloud/operators/vertex_ai/generative_model.py,sha256=ZQ
180
180
  airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py,sha256=oikwiN_X_7PfEzYpMXI4ZvL_oRxCMkk5BU6_QxGhH5Q,25089
181
181
  airflow/providers/google/cloud/operators/vertex_ai/model_service.py,sha256=zzhTD-diafDs7OVJxvftV-1c_vopSiCTtsXCeCLH-l0,36344
182
182
  airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py,sha256=idza_9gybMjyvH9xL7LT8qxavPqTxnbMk1P05UV7RQI,23526
183
- airflow/providers/google/cloud/operators/vertex_ai/ray.py,sha256=66YVNSHapKDetvAmDCZUOSUCopkaF-vkQ9vNIw52FvU,18233
183
+ airflow/providers/google/cloud/operators/vertex_ai/ray.py,sha256=d537sOtnLcBaCgm4UZ-mFMCTev_O3-vQFC23zqObno8,18246
184
184
  airflow/providers/google/cloud/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
185
185
  airflow/providers/google/cloud/secrets/secret_manager.py,sha256=u4JUHgLi662rLQAJ4hhL6kpLun0apew8uwZVLGhH0oU,8364
186
186
  airflow/providers/google/cloud/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -211,7 +211,7 @@ airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py,sha256=zXdiw8aV
211
211
  airflow/providers/google/cloud/transfers/bigquery_to_gcs.py,sha256=ngX8hNYCmlDtiY5UoyoGwH-iUJpDiY9O4ADKccVURb4,15183
212
212
  airflow/providers/google/cloud/transfers/bigquery_to_mssql.py,sha256=0_PpzLlXTw87Ab-0qr3fpiL4tkXhZShCBPQ0MuQ_G0I,4159
213
213
  airflow/providers/google/cloud/transfers/bigquery_to_mysql.py,sha256=Ay-lo6CYXkSQ8Tk4M4BNjtJcbzMNUG5pJEe8AplyDx8,3209
214
- airflow/providers/google/cloud/transfers/bigquery_to_postgres.py,sha256=8A3gJq-ZxO0Vw3UTyAIZU1dZC31bgFQAq5_-UUjbpP4,4661
214
+ airflow/providers/google/cloud/transfers/bigquery_to_postgres.py,sha256=hUs2wTaV3GVbQat42A_Qwo81-9bmIsYXLAtgJtiFydk,4818
215
215
  airflow/providers/google/cloud/transfers/bigquery_to_sql.py,sha256=SXeHSxOBymEEaTCDmFjuNnxLX_8lD08zj5cFPFn2v_Q,5639
216
216
  airflow/providers/google/cloud/transfers/calendar_to_gcs.py,sha256=FMtma56F1KRhZ80rYTKj3sBv29t629wrG0jNJcOKW1U,8451
217
217
  airflow/providers/google/cloud/transfers/cassandra_to_gcs.py,sha256=-sjlYt9cXelUJfMaYPIFo1C5lPj8ZBDWpir1rC29Jus,15766
@@ -319,7 +319,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=7DhFVabt4vzWcgZ
319
319
  airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=6GUJY8Ul0kYVqzOQG5dsSPogU-ZKCvjr9I_7yrQBCi0,4028
320
320
  airflow/providers/google/suite/transfers/local_to_drive.py,sha256=eUkgH5-LRAtsAhymLxuRLd4lwpxJDFaPX1hLVg730Kk,5819
321
321
  airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=XFc95mXTGlAP9FbcYxOvjHWgiFN03skxXxAUGMqZXcE,4909
322
- apache_airflow_providers_google-17.0.0.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
323
- apache_airflow_providers_google-17.0.0.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
324
- apache_airflow_providers_google-17.0.0.dist-info/METADATA,sha256=PPy0pJrEyUJ5VSbWkWogTdqM7KVJRnVDVDTqSj5XXAY,17393
325
- apache_airflow_providers_google-17.0.0.dist-info/RECORD,,
322
+ apache_airflow_providers_google-17.1.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
323
+ apache_airflow_providers_google-17.1.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
324
+ apache_airflow_providers_google-17.1.0rc1.dist-info/METADATA,sha256=dgOvS9hG4aDZq9eXXQZo1wWY7kdhYZQyh7_ZVsJMzZU,17434
325
+ apache_airflow_providers_google-17.1.0rc1.dist-info/RECORD,,