apache-airflow-providers-google 10.18.0__py3-none-any.whl → 10.18.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,20 +17,13 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
- from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence, SupportsAbs
20
+ from typing import Any, AsyncIterator, Sequence, SupportsAbs
21
21
 
22
22
  from aiohttp import ClientSession
23
23
  from aiohttp.client_exceptions import ClientResponseError
24
24
 
25
- from airflow.exceptions import AirflowException
26
- from airflow.models.taskinstance import TaskInstance
27
25
  from airflow.providers.google.cloud.hooks.bigquery import BigQueryAsyncHook, BigQueryTableAsyncHook
28
26
  from airflow.triggers.base import BaseTrigger, TriggerEvent
29
- from airflow.utils.session import provide_session
30
- from airflow.utils.state import TaskInstanceState
31
-
32
- if TYPE_CHECKING:
33
- from sqlalchemy.orm.session import Session
34
27
 
35
28
 
36
29
  class BigQueryInsertJobTrigger(BaseTrigger):
@@ -96,36 +89,6 @@ class BigQueryInsertJobTrigger(BaseTrigger):
96
89
  },
97
90
  )
98
91
 
99
- @provide_session
100
- def get_task_instance(self, session: Session) -> TaskInstance:
101
- query = session.query(TaskInstance).filter(
102
- TaskInstance.dag_id == self.task_instance.dag_id,
103
- TaskInstance.task_id == self.task_instance.task_id,
104
- TaskInstance.run_id == self.task_instance.run_id,
105
- TaskInstance.map_index == self.task_instance.map_index,
106
- )
107
- task_instance = query.one_or_none()
108
- if task_instance is None:
109
- raise AirflowException(
110
- "TaskInstance with dag_id: %s, task_id: %s, run_id: %s and map_index: %s is not found",
111
- self.task_instance.dag_id,
112
- self.task_instance.task_id,
113
- self.task_instance.run_id,
114
- self.task_instance.map_index,
115
- )
116
- return task_instance
117
-
118
- def safe_to_cancel(self) -> bool:
119
- """
120
- Whether it is safe to cancel the external job which is being executed by this trigger.
121
-
122
- This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
123
- Because in those cases, we should NOT cancel the external job.
124
- """
125
- # Database query is needed to get the latest state of the task instance.
126
- task_instance = self.get_task_instance() # type: ignore[call-arg]
127
- return task_instance.state != TaskInstanceState.DEFERRED
128
-
129
92
  async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
130
93
  """Get current job execution status and yields a TriggerEvent."""
131
94
  hook = self._get_async_hook()
@@ -154,27 +117,13 @@ class BigQueryInsertJobTrigger(BaseTrigger):
154
117
  )
155
118
  await asyncio.sleep(self.poll_interval)
156
119
  except asyncio.CancelledError:
157
- if self.job_id and self.cancel_on_kill and self.safe_to_cancel():
158
- self.log.info(
159
- "The job is safe to cancel the as airflow TaskInstance is not in deferred state."
160
- )
161
- self.log.info(
162
- "Cancelling job. Project ID: %s, Location: %s, Job ID: %s",
163
- self.project_id,
164
- self.location,
165
- self.job_id,
166
- )
120
+ self.log.info("Task was killed.")
121
+ if self.job_id and self.cancel_on_kill:
167
122
  await hook.cancel_job( # type: ignore[union-attr]
168
123
  job_id=self.job_id, project_id=self.project_id, location=self.location
169
124
  )
170
125
  else:
171
- self.log.info(
172
- "Trigger may have shutdown. Skipping to cancel job because the airflow "
173
- "task is not cancelled yet: Project ID: %s, Location:%s, Job ID:%s",
174
- self.project_id,
175
- self.location,
176
- self.job_id,
177
- )
126
+ self.log.info("Skipping to cancel job: %s:%s.%s", self.project_id, self.location, self.job_id)
178
127
  except Exception as e:
179
128
  self.log.exception("Exception occurred while checking for query completion")
180
129
  yield TriggerEvent({"status": "error", "message": str(e)})
@@ -199,7 +148,6 @@ class BigQueryCheckTrigger(BigQueryInsertJobTrigger):
199
148
  "table_id": self.table_id,
200
149
  "poll_interval": self.poll_interval,
201
150
  "impersonation_chain": self.impersonation_chain,
202
- "cancel_on_kill": self.cancel_on_kill,
203
151
  },
204
152
  )
205
153
 
@@ -257,10 +205,9 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
257
205
  (default: False).
258
206
  """
259
207
 
260
- def __init__(self, as_dict: bool = False, selected_fields: str | None = None, **kwargs):
208
+ def __init__(self, as_dict: bool = False, **kwargs):
261
209
  super().__init__(**kwargs)
262
210
  self.as_dict = as_dict
263
- self.selected_fields = selected_fields
264
211
 
265
212
  def serialize(self) -> tuple[str, dict[str, Any]]:
266
213
  """Serialize BigQueryInsertJobTrigger arguments and classpath."""
@@ -276,7 +223,6 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
276
223
  "poll_interval": self.poll_interval,
277
224
  "impersonation_chain": self.impersonation_chain,
278
225
  "as_dict": self.as_dict,
279
- "selected_fields": self.selected_fields,
280
226
  },
281
227
  )
282
228
 
@@ -289,11 +235,7 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
289
235
  job_status = await hook.get_job_status(job_id=self.job_id, project_id=self.project_id)
290
236
  if job_status["status"] == "success":
291
237
  query_results = await hook.get_job_output(job_id=self.job_id, project_id=self.project_id)
292
- records = hook.get_records(
293
- query_results=query_results,
294
- as_dict=self.as_dict,
295
- selected_fields=self.selected_fields,
296
- )
238
+ records = hook.get_records(query_results=query_results, as_dict=self.as_dict)
297
239
  self.log.debug("Response from hook: %s", job_status["status"])
298
240
  yield TriggerEvent(
299
241
  {
@@ -22,22 +22,16 @@ from __future__ import annotations
22
22
  import asyncio
23
23
  import re
24
24
  import time
25
- from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence
25
+ from typing import Any, AsyncIterator, Sequence
26
26
 
27
27
  from google.api_core.exceptions import NotFound
28
28
  from google.cloud.dataproc_v1 import Batch, Cluster, ClusterStatus, JobStatus
29
29
 
30
30
  from airflow.exceptions import AirflowException
31
- from airflow.models.taskinstance import TaskInstance
32
31
  from airflow.providers.google.cloud.hooks.dataproc import DataprocAsyncHook, DataprocHook
33
32
  from airflow.providers.google.cloud.utils.dataproc import DataprocOperationType
34
33
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
35
34
  from airflow.triggers.base import BaseTrigger, TriggerEvent
36
- from airflow.utils.session import provide_session
37
- from airflow.utils.state import TaskInstanceState
38
-
39
- if TYPE_CHECKING:
40
- from sqlalchemy.orm.session import Session
41
35
 
42
36
 
43
37
  class DataprocBaseTrigger(BaseTrigger):
@@ -116,41 +110,6 @@ class DataprocSubmitTrigger(DataprocBaseTrigger):
116
110
  },
117
111
  )
118
112
 
119
- @provide_session
120
- def get_task_instance(self, session: Session) -> TaskInstance:
121
- """
122
- Get the task instance for the current task.
123
-
124
- :param session: Sqlalchemy session
125
- """
126
- query = session.query(TaskInstance).filter(
127
- TaskInstance.dag_id == self.task_instance.dag_id,
128
- TaskInstance.task_id == self.task_instance.task_id,
129
- TaskInstance.run_id == self.task_instance.run_id,
130
- TaskInstance.map_index == self.task_instance.map_index,
131
- )
132
- task_instance = query.one_or_none()
133
- if task_instance is None:
134
- raise AirflowException(
135
- "TaskInstance with dag_id: %s,task_id: %s, run_id: %s and map_index: %s is not found",
136
- self.task_instance.dag_id,
137
- self.task_instance.task_id,
138
- self.task_instance.run_id,
139
- self.task_instance.map_index,
140
- )
141
- return task_instance
142
-
143
- def safe_to_cancel(self) -> bool:
144
- """
145
- Whether it is safe to cancel the external job which is being executed by this trigger.
146
-
147
- This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
148
- Because in those cases, we should NOT cancel the external job.
149
- """
150
- # Database query is needed to get the latest state of the task instance.
151
- task_instance = self.get_task_instance() # type: ignore[call-arg]
152
- return task_instance.state != TaskInstanceState.DEFERRED
153
-
154
113
  async def run(self):
155
114
  try:
156
115
  while True:
@@ -166,11 +125,7 @@ class DataprocSubmitTrigger(DataprocBaseTrigger):
166
125
  except asyncio.CancelledError:
167
126
  self.log.info("Task got cancelled.")
168
127
  try:
169
- if self.job_id and self.cancel_on_kill and self.safe_to_cancel():
170
- self.log.info(
171
- "Cancelling the job as it is safe to do so. Note that the airflow TaskInstance is not"
172
- " in deferred state."
173
- )
128
+ if self.job_id and self.cancel_on_kill:
174
129
  self.log.info("Cancelling the job: %s", self.job_id)
175
130
  # The synchronous hook is utilized to delete the cluster when a task is cancelled. This
176
131
  # is because the asynchronous hook deletion is not awaited when the trigger task is
@@ -223,36 +178,6 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
223
178
  },
224
179
  )
225
180
 
226
- @provide_session
227
- def get_task_instance(self, session: Session) -> TaskInstance:
228
- query = session.query(TaskInstance).filter(
229
- TaskInstance.dag_id == self.task_instance.dag_id,
230
- TaskInstance.task_id == self.task_instance.task_id,
231
- TaskInstance.run_id == self.task_instance.run_id,
232
- TaskInstance.map_index == self.task_instance.map_index,
233
- )
234
- task_instance = query.one_or_none()
235
- if task_instance is None:
236
- raise AirflowException(
237
- "TaskInstance with dag_id: %s,task_id: %s, run_id: %s and map_index: %s is not found.",
238
- self.task_instance.dag_id,
239
- self.task_instance.task_id,
240
- self.task_instance.run_id,
241
- self.task_instance.map_index,
242
- )
243
- return task_instance
244
-
245
- def safe_to_cancel(self) -> bool:
246
- """
247
- Whether it is safe to cancel the external job which is being executed by this trigger.
248
-
249
- This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
250
- Because in those cases, we should NOT cancel the external job.
251
- """
252
- # Database query is needed to get the latest state of the task instance.
253
- task_instance = self.get_task_instance() # type: ignore[call-arg]
254
- return task_instance.state != TaskInstanceState.DEFERRED
255
-
256
181
  async def run(self) -> AsyncIterator[TriggerEvent]:
257
182
  try:
258
183
  while True:
@@ -282,11 +207,7 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
282
207
  await asyncio.sleep(self.polling_interval_seconds)
283
208
  except asyncio.CancelledError:
284
209
  try:
285
- if self.delete_on_error and self.safe_to_cancel():
286
- self.log.info(
287
- "Deleting the cluster as it is safe to delete as the airflow TaskInstance is not in "
288
- "deferred state."
289
- )
210
+ if self.delete_on_error:
290
211
  self.log.info("Deleting cluster %s.", self.cluster_name)
291
212
  # The synchronous hook is utilized to delete the cluster when a task is cancelled.
292
213
  # This is because the asynchronous hook deletion is not awaited when the trigger task
@@ -30,6 +30,7 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction
30
30
  from airflow.providers.google.cloud.hooks.kubernetes_engine import (
31
31
  GKEAsyncHook,
32
32
  GKEKubernetesAsyncHook,
33
+ GKEPodAsyncHook,
33
34
  )
34
35
  from airflow.triggers.base import BaseTrigger, TriggerEvent
35
36
 
@@ -146,8 +147,8 @@ class GKEStartPodTrigger(KubernetesPodTrigger):
146
147
  )
147
148
 
148
149
  @cached_property
149
- def hook(self) -> GKEKubernetesAsyncHook: # type: ignore[override]
150
- return GKEKubernetesAsyncHook(
150
+ def hook(self) -> GKEPodAsyncHook: # type: ignore[override]
151
+ return GKEPodAsyncHook(
151
152
  cluster_url=self._cluster_url,
152
153
  ssl_ca_cert=self._ssl_ca_cert,
153
154
  gcp_conn_id=self.gcp_conn_id,
@@ -28,7 +28,7 @@ def get_provider_info():
28
28
  "name": "Google",
29
29
  "description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1715384437,
31
+ "source-date-epoch": 1714476421,
32
32
  "versions": [
33
33
  "10.18.0",
34
34
  "10.17.0",
@@ -96,7 +96,7 @@ def get_provider_info():
96
96
  "gcsfs>=2023.10.0",
97
97
  "google-ads>=23.1.0",
98
98
  "google-analytics-admin",
99
- "google-api-core>=2.11.0,!=2.16.0,!=2.18.0",
99
+ "google-api-core>=2.11.0,!=2.16.0",
100
100
  "google-api-python-client>=1.6.0",
101
101
  "google-auth>=1.0.0",
102
102
  "google-auth-httplib2>=0.0.1",
@@ -1147,7 +1147,6 @@ def get_provider_info():
1147
1147
  "airflow.providers.google.cloud.hooks.vertex_ai.model_service",
1148
1148
  "airflow.providers.google.cloud.hooks.vertex_ai.pipeline_job",
1149
1149
  "airflow.providers.google.cloud.hooks.vertex_ai.generative_model",
1150
- "airflow.providers.google.cloud.hooks.vertex_ai.prediction_service",
1151
1150
  ],
1152
1151
  },
1153
1152
  {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-google
3
- Version: 10.18.0
3
+ Version: 10.18.0rc1
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyOpenSSL
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.7.2
26
- Requires-Dist: apache-airflow>=2.7.0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.7.2rc0
26
+ Requires-Dist: apache-airflow>=2.7.0rc0
27
27
  Requires-Dist: asgiref>=3.5.2
28
28
  Requires-Dist: gcloud-aio-auth>=4.0.0,<5.0.0
29
29
  Requires-Dist: gcloud-aio-bigquery>=6.1.2
@@ -31,7 +31,7 @@ Requires-Dist: gcloud-aio-storage>=9.0.0
31
31
  Requires-Dist: gcsfs>=2023.10.0
32
32
  Requires-Dist: google-ads>=23.1.0
33
33
  Requires-Dist: google-analytics-admin
34
- Requires-Dist: google-api-core>=2.11.0,!=2.16.0,!=2.18.0
34
+ Requires-Dist: google-api-core>=2.11.0,!=2.16.0
35
35
  Requires-Dist: google-api-python-client>=1.6.0
36
36
  Requires-Dist: google-auth-httplib2>=0.0.1
37
37
  Requires-Dist: google-auth>=1.0.0
@@ -82,19 +82,19 @@ Requires-Dist: proto-plus>=1.19.6
82
82
  Requires-Dist: python-slugify>=5.0
83
83
  Requires-Dist: sqlalchemy-bigquery>=1.2.1
84
84
  Requires-Dist: sqlalchemy-spanner>=1.6.2
85
- Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
85
+ Requires-Dist: apache-airflow-providers-amazon>=2.6.0rc0 ; extra == "amazon"
86
86
  Requires-Dist: apache-airflow-providers-apache-beam ; extra == "apache.beam"
87
87
  Requires-Dist: apache-beam[gcp] ; extra == "apache.beam"
88
88
  Requires-Dist: apache-airflow-providers-apache-cassandra ; extra == "apache.cassandra"
89
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
89
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
90
90
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
91
- Requires-Dist: apache-airflow-providers-facebook>=2.2.0 ; extra == "facebook"
91
+ Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc0 ; extra == "facebook"
92
92
  Requires-Dist: plyvel ; extra == "leveldb"
93
93
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
94
94
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft.mssql"
95
95
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
96
96
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
97
- Requires-Dist: apache-airflow-providers-oracle>=3.1.0 ; extra == "oracle"
97
+ Requires-Dist: apache-airflow-providers-oracle>=3.1.0rc0 ; extra == "oracle"
98
98
  Requires-Dist: apache-airflow-providers-postgres ; extra == "postgres"
99
99
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
100
100
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
@@ -171,7 +171,7 @@ Provides-Extra: trino
171
171
 
172
172
  Package ``apache-airflow-providers-google``
173
173
 
174
- Release: ``10.18.0``
174
+ Release: ``10.18.0.rc1``
175
175
 
176
176
 
177
177
  Google services including:
@@ -205,9 +205,9 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
205
205
  Requirements
206
206
  ------------
207
207
 
208
- ======================================= ==============================
208
+ ======================================= =====================
209
209
  PIP package Version required
210
- ======================================= ==============================
210
+ ======================================= =====================
211
211
  ``apache-airflow`` ``>=2.7.0``
212
212
  ``apache-airflow-providers-common-sql`` ``>=1.7.2``
213
213
  ``asgiref`` ``>=3.5.2``
@@ -217,7 +217,7 @@ PIP package Version required
217
217
  ``gcsfs`` ``>=2023.10.0``
218
218
  ``google-ads`` ``>=23.1.0``
219
219
  ``google-analytics-admin``
220
- ``google-api-core`` ``>=2.11.0,!=2.16.0,!=2.18.0``
220
+ ``google-api-core`` ``>=2.11.0,!=2.16.0``
221
221
  ``google-api-python-client`` ``>=1.6.0``
222
222
  ``google-auth`` ``>=1.0.0``
223
223
  ``google-auth-httplib2`` ``>=0.0.1``
@@ -269,7 +269,7 @@ PIP package Version required
269
269
  ``sqlalchemy-bigquery`` ``>=1.2.1``
270
270
  ``sqlalchemy-spanner`` ``>=1.6.2``
271
271
  ``python-slugify`` ``>=5.0``
272
- ======================================= ==============================
272
+ ======================================= =====================
273
273
 
274
274
  Cross provider package dependencies
275
275
  -----------------------------------
@@ -1,6 +1,6 @@
1
1
  airflow/providers/google/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
- airflow/providers/google/__init__.py,sha256=-STVmWYvzAtQZziT5ntqECeACWw8HYuYxEWguvYiPuI,1495
3
- airflow/providers/google/get_provider_info.py,sha256=5EIhNOVNxiPbvIwu-Rqa-FoQV-Gzg_R3S2ZxXtSV9pk,81204
2
+ airflow/providers/google/__init__.py,sha256=Vrtzj_Z1J5KlkRA7i0jzg6B4BNHrONelk7-ffad7QE8,1583
3
+ airflow/providers/google/get_provider_info.py,sha256=1PN8UN5rY_3PjklabiSR4l04OrKqqMzhygK7ZMArE2s,81106
4
4
  airflow/providers/google/go_module_utils.py,sha256=QZcIY0BAsy_GlwJpaCDcSRWHueZvkqDCj2ecICn_-gY,1770
5
5
  airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
6
6
  airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -22,8 +22,8 @@ airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py,sha256=
22
22
  airflow/providers/google/cloud/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
23
  airflow/providers/google/cloud/fs/gcs.py,sha256=fJBGhHEE46_U5Rmbs1W0uenvGhECv13CtVSh3z7pM60,2457
24
24
  airflow/providers/google/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
- airflow/providers/google/cloud/hooks/automl.py,sha256=5fp8vZ96at8jH-a4yYipu4FI3J809b2E6XLCOQVhzmY,28959
26
- airflow/providers/google/cloud/hooks/bigquery.py,sha256=ivglJpV5jL2la6UhEXC_ABa1q-SyP2-Jbf3aSU8NimE,155228
25
+ airflow/providers/google/cloud/hooks/automl.py,sha256=GD_bF_Ka5z7ZmNTMlSFuRSd4gIl2Y_U-lQRaO5XSDqg,27562
26
+ airflow/providers/google/cloud/hooks/bigquery.py,sha256=5UrfnId7gVlir8V3KhiyDRQ4-qtXZgJVlAKhg3rZ-cg,153023
27
27
  airflow/providers/google/cloud/hooks/bigquery_dts.py,sha256=3wLKj-6tQwWphjwKBLGg1rjoXAAknv0WLh6T3MqsNWA,15228
28
28
  airflow/providers/google/cloud/hooks/bigtable.py,sha256=wReDIbDyQGP8oZIzg0vsfgD6zrLmY-oYghBNCPVPorw,12580
29
29
  airflow/providers/google/cloud/hooks/cloud_batch.py,sha256=FjpR_Av7z8oMnB4Q7S-aPTMO8HZMxAo_1akdHpE7iA8,7809
@@ -77,7 +77,6 @@ airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py,sha256=UqxLJt
77
77
  airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py,sha256=KSS2vJ3chTP4_ghrIU6qFDGKDONnhPddYn1ButnQxsI,25051
78
78
  airflow/providers/google/cloud/hooks/vertex_ai/model_service.py,sha256=KCqCyxjsxst_2FTAwPGanL3URL3vc50kKlVwmr2maDY,18115
79
79
  airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py,sha256=b4PlBV0MqrOUIwR6heuPwiFGFnEHaaduPTV3iibbs_Y,29023
80
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py,sha256=Q-EzmMtE4hjEg_tH-vnE-PcrXTvoQY2QFh77reVH840,4263
81
80
  airflow/providers/google/cloud/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
82
81
  airflow/providers/google/cloud/links/automl.py,sha256=tJ0tIYFicB4urFltg4uTp_Yk7ZeaD0SA8Q3GWyeMw4Y,4781
83
82
  airflow/providers/google/cloud/links/base.py,sha256=6Y96NArcm-f4Yw1YYqaoRdXfLLWwMH01XDAzIA-a3NA,1660
@@ -112,8 +111,8 @@ airflow/providers/google/cloud/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
112
111
  airflow/providers/google/cloud/log/gcs_task_handler.py,sha256=d2zTPtoQHwdFIXuWbHu0xniA2oor9BEBYBNgJaWeF-0,10386
113
112
  airflow/providers/google/cloud/log/stackdriver_task_handler.py,sha256=Z_aHF0_hjGwoDxk4VVtZ1sWTeqOdpSsRqg4yg6eOL80,15637
114
113
  airflow/providers/google/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
115
- airflow/providers/google/cloud/operators/automl.py,sha256=fBwkwLcKr9sAPCacjdHHQcQrh3KD65oxm1PUIQsbKuI,61727
116
- airflow/providers/google/cloud/operators/bigquery.py,sha256=dfTzs1ouMYEoC7SXfRIZMeHCoVut17OFl_GbNM2nO1U,129660
114
+ airflow/providers/google/cloud/operators/automl.py,sha256=nWohxjYJHXWtzYN5XbSsd-7jKJhsOlaeYjlEWXmwCXM,52275
115
+ airflow/providers/google/cloud/operators/bigquery.py,sha256=VFnp0a1q7h2_2Na5cJS-cygzC_mkaC2unXhwCvgtTcw,125468
117
116
  airflow/providers/google/cloud/operators/bigquery_dts.py,sha256=6VJISM4HoMBQ3EQ5nz3zxFk8tfluGA1d2vcUNUlYLPc,17695
118
117
  airflow/providers/google/cloud/operators/bigtable.py,sha256=BnWHnTEscyPbsKWFaSreLr62W68fmHu5loQVZex7LPs,26921
119
118
  airflow/providers/google/cloud/operators/cloud_base.py,sha256=Xysh4znvIQIxbQqmfKoaL6O09FikndHrQuKKUnEV7KU,1483
@@ -132,13 +131,13 @@ airflow/providers/google/cloud/operators/datafusion.py,sha256=NZoR65aChdkPUG8bxE
132
131
  airflow/providers/google/cloud/operators/datapipeline.py,sha256=UFIhzulL7d9JD3PmIbpYIu1dKVVJrJthVyA-8kkJq0A,6478
133
132
  airflow/providers/google/cloud/operators/dataplex.py,sha256=3Xq7ewGIQFH-cf_vbsWkIyuDOMD5ij5EBbTeuFtAtKs,91185
134
133
  airflow/providers/google/cloud/operators/dataprep.py,sha256=jTDDgRccd2zIUqGzJebZpbNTJsFdRi5RnMtldXHqiMs,10477
135
- airflow/providers/google/cloud/operators/dataproc.py,sha256=rN6L8Lybgjh9jmJ7guOSq05irzangEvYTBbvCD-2bJ0,152559
134
+ airflow/providers/google/cloud/operators/dataproc.py,sha256=snjZS33ZMXP2mtvB5FO26ymdjG1qElSIcE3qheNFYSU,152547
136
135
  airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=MWkThX_mzef-VTRrxZFn5WPfenZ3F2DNXWWa827nLrw,49532
137
136
  airflow/providers/google/cloud/operators/datastore.py,sha256=di00jFy3Z1v0GcmcQ0df8NJ32yxcseOqWuojC4TKdmY,24927
138
137
  airflow/providers/google/cloud/operators/dlp.py,sha256=SQCGml0RIKl0UrvXHIUiOskg5ayTj4F5_4k4rztClvM,120742
139
138
  airflow/providers/google/cloud/operators/functions.py,sha256=dL5uaYtAWujwvAID_kLsyEsQ-ThFXGrEsg5Tk277FMs,20155
140
139
  airflow/providers/google/cloud/operators/gcs.py,sha256=tUNcseFxF2AC-wNjcrngvKqLqhkYNC4gaSFHiCq7eK0,46627
141
- airflow/providers/google/cloud/operators/kubernetes_engine.py,sha256=prrgsNZN0ZvtdzlnjmrPIPtvwJen7YYp1yHPU3QqVn0,69594
140
+ airflow/providers/google/cloud/operators/kubernetes_engine.py,sha256=FazwWO8goImGQ9SFKC2msxBehL0ssjKiCATk3ABhOwg,69888
142
141
  airflow/providers/google/cloud/operators/life_sciences.py,sha256=cQzFWGdwh4yr44j7nfMXdGnPVRkeXwkrj_qdzlchD-w,4816
143
142
  airflow/providers/google/cloud/operators/looker.py,sha256=LCbN0vv8y0exwvfHbRXmUtNUZIOlSfljInNZK1zcfrs,4063
144
143
  airflow/providers/google/cloud/operators/mlengine.py,sha256=pPoLn7txuR5h2I9prSY26Sc7qfjWRXfAjHplGl3vNK8,62873
@@ -153,7 +152,7 @@ airflow/providers/google/cloud/operators/translate.py,sha256=yYN4IRcRHXllZsChMJd
153
152
  airflow/providers/google/cloud/operators/translate_speech.py,sha256=ALehMdOuSspEb-7h9Cr5ml8TYFsUnc8vvHEULpKZxa4,7817
154
153
  airflow/providers/google/cloud/operators/video_intelligence.py,sha256=NQvEueDegdpPBSqMkJF_qb9x3WeZT7XJL-yE8Sqlz_U,14165
155
154
  airflow/providers/google/cloud/operators/vision.py,sha256=RQrf9-dgRfgYxDceR--WMvc03d2a1zZaOQWzZxjrJYo,67630
156
- airflow/providers/google/cloud/operators/workflows.py,sha256=fnyWLqRHz0UYu6AnQKKZIMlfSIg_v5nNbZAt6ASe4fI,28977
155
+ airflow/providers/google/cloud/operators/workflows.py,sha256=-YMlO23E2J9sBl9WUcgsHTW8FTySyIfkoG0HE5K1gKI,29108
157
156
  airflow/providers/google/cloud/operators/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
158
157
  airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=Txeaul_QFrAKvEBGUa-AwcDEDtxhKWLNhbXSaBldTQE,30987
159
158
  airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=e0oFWmCRH3aQHNckjEf_YO5zP9LqiLVTzB1QTgv3iUo,28828
@@ -216,7 +215,7 @@ airflow/providers/google/cloud/transfers/sheets_to_gcs.py,sha256=G6PdHaYKUEhL7ER
216
215
  airflow/providers/google/cloud/transfers/sql_to_gcs.py,sha256=BNVCKLMwxFkyrLU4q7xkD0fg1wdLxQJMLxXCAc05DRM,21771
217
216
  airflow/providers/google/cloud/transfers/trino_to_gcs.py,sha256=w8a9JBsxdqiVJePDHF-hpGW5eZWIyop-lbzBYZYabzA,7143
218
217
  airflow/providers/google/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
219
- airflow/providers/google/cloud/triggers/bigquery.py,sha256=dCw9ihWSBJoFLWb6kWB-cvh0cwsO1pLVAgdTRMhcXSs,34770
218
+ airflow/providers/google/cloud/triggers/bigquery.py,sha256=0-YftqqySCZcRoUbAWHICXGTXKJ_Bup6iDXlHyVF9bk,32162
220
219
  airflow/providers/google/cloud/triggers/bigquery_dts.py,sha256=uHqkZPa32sIPBPmO8pqgTsebRS_D_wYIwd6wABwyJo4,6219
221
220
  airflow/providers/google/cloud/triggers/cloud_batch.py,sha256=AZDmqsJSaJT3X1Mp8F4uuC7oAgShUoiYIIqaIApqum0,6717
222
221
  airflow/providers/google/cloud/triggers/cloud_build.py,sha256=yLmims5zQ3RyIfdY1NOA0qAhwEkTwpvTyT2M3dPb8-M,5628
@@ -227,9 +226,9 @@ airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py,sha256
227
226
  airflow/providers/google/cloud/triggers/dataflow.py,sha256=u6CzIncAU3rWsGKTyNHV4ibYSBTCqqOiFCa7dcWnqzE,28635
228
227
  airflow/providers/google/cloud/triggers/datafusion.py,sha256=blqNx4hLHRrLp-FQMCNR3yWmAZ2hCTfql2eyf5XY0P0,5985
229
228
  airflow/providers/google/cloud/triggers/dataplex.py,sha256=fEHbvNYgP-6htNkp9b7nmvea_r3Z6CIMfIh57NaWxFA,8334
230
- airflow/providers/google/cloud/triggers/dataproc.py,sha256=5eCSAkjg6FDL56P_7gvXu_fL0OWHnBrXQc5yg5agifg,24304
229
+ airflow/providers/google/cloud/triggers/dataproc.py,sha256=DAERheg6jxHWBgy9A5K4cIbnL9FJpIyW53TwLneIAZI,20660
231
230
  airflow/providers/google/cloud/triggers/gcs.py,sha256=pMjeNOkWHkOyiAxeK-JoyDInUf2VNtefOZxp8K-aNjw,18973
232
- airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=XnCnGtHaZUEaYS1Yra0PTMovE06d2fcSUCNhIRsuCGc,12328
231
+ airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=SGdJL6wZAkYj_o6KkqW2m_zOxO-J02nfEVkzkQE3lEU,12335
233
232
  airflow/providers/google/cloud/triggers/mlengine.py,sha256=qpOa9Gz8FmHDxXvPWrXO3M7snGbRTq92gy6kGafCUiY,5265
234
233
  airflow/providers/google/cloud/triggers/pubsub.py,sha256=LbKGL-g6WfeiRhmkItWwl8hRgI3Lr6B3Ib2GNChL4mg,5724
235
234
  airflow/providers/google/cloud/triggers/vertex_ai.py,sha256=99ah7Rcc_kYBihDXGcekZvkUz7ZNyXKv7j6R3vddcCE,9927
@@ -304,7 +303,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=CxtVhp3wlEOBtjR
304
303
  airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=4nwXWkTySeBXNuThPxzO7uww_hH6PthpppTeuShn27Q,4363
305
304
  airflow/providers/google/suite/transfers/local_to_drive.py,sha256=eYCJghA0Ou2vUUvN_wfBQvpt2yzR9RmmeNdPgh2Cbjo,6100
306
305
  airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
307
- apache_airflow_providers_google-10.18.0.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
308
- apache_airflow_providers_google-10.18.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
309
- apache_airflow_providers_google-10.18.0.dist-info/METADATA,sha256=PbMDKyhE1fTFney0kv-nshS5q9paVfSQhxeCUNLJG2A,15954
310
- apache_airflow_providers_google-10.18.0.dist-info/RECORD,,
306
+ apache_airflow_providers_google-10.18.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
307
+ apache_airflow_providers_google-10.18.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
308
+ apache_airflow_providers_google-10.18.0rc1.dist-info/METADATA,sha256=rdT5loJM7JcAWARv1m_-eVgQHKeONV4ysrGa3N5YQoQ,15934
309
+ apache_airflow_providers_google-10.18.0rc1.dist-info/RECORD,,
@@ -1,91 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
-
18
- from __future__ import annotations
19
-
20
- from typing import TYPE_CHECKING, Sequence
21
-
22
- from google.api_core.client_options import ClientOptions
23
- from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
24
- from google.cloud.aiplatform_v1 import PredictionServiceClient
25
-
26
- from airflow.providers.google.common.consts import CLIENT_INFO
27
- from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook
28
-
29
- if TYPE_CHECKING:
30
- from google.api_core.retry import Retry
31
- from google.cloud.aiplatform_v1.types import PredictResponse
32
-
33
-
34
- class PredictionServiceHook(GoogleBaseHook):
35
- """Hook for Google Cloud Vertex AI Prediction API."""
36
-
37
- def get_prediction_service_client(self, region: str | None = None) -> PredictionServiceClient:
38
- """
39
- Return PredictionServiceClient object.
40
-
41
- :param region: The ID of the Google Cloud region that the service belongs to. Default is None.
42
-
43
- :return: `google.cloud.aiplatform_v1.services.prediction_service.client.PredictionServiceClient` instance.
44
- """
45
- if region and region != "global":
46
- client_options = ClientOptions(api_endpoint=f"{region}-aiplatform.googleapis.com:443")
47
- else:
48
- client_options = ClientOptions()
49
-
50
- return PredictionServiceClient(
51
- credentials=self.get_credentials(), client_info=CLIENT_INFO, client_options=client_options
52
- )
53
-
54
- @GoogleBaseHook.fallback_to_default_project_id
55
- def predict(
56
- self,
57
- endpoint_id: str,
58
- instances: list[str],
59
- location: str,
60
- project_id: str = PROVIDE_PROJECT_ID,
61
- parameters: dict[str, str] | None = None,
62
- retry: Retry | _MethodDefault = DEFAULT,
63
- timeout: float | None = None,
64
- metadata: Sequence[tuple[str, str]] = (),
65
- ) -> PredictResponse:
66
- """
67
- Perform an online prediction and returns the prediction result in the response.
68
-
69
- :param endpoint_id: Name of the endpoint_id requested to serve the prediction.
70
- :param instances: Required. The instances that are the input to the prediction call. A DeployedModel
71
- may have an upper limit on the number of instances it supports per request, and when it is
72
- exceeded the prediction call errors in case of AutoML Models, or, in case of customer created
73
- Models, the behaviour is as documented by that Model.
74
- :param parameters: Additional domain-specific parameters, any string must be up to 25000 characters long.
75
- :param project_id: ID of the Google Cloud project where model is located if None then
76
- default project_id is used.
77
- :param location: The location of the project.
78
- :param retry: A retry object used to retry requests. If `None` is specified, requests will not be
79
- retried.
80
- :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
81
- `retry` is specified, the timeout applies to each individual attempt.
82
- :param metadata: Additional metadata that is provided to the method.
83
- """
84
- client = self.get_prediction_service_client(location)
85
- endpoint = f"projects/{project_id}/locations/{location}/endpoints/{endpoint_id}"
86
- return client.predict(
87
- request={"endpoint": endpoint, "instances": instances, "parameters": parameters},
88
- retry=retry,
89
- timeout=timeout,
90
- metadata=metadata,
91
- )