apache-airflow-providers-google 10.18.0rc1__py3-none-any.whl → 10.18.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +2 -5
- airflow/providers/google/cloud/hooks/automl.py +34 -0
- airflow/providers/google/cloud/hooks/bigquery.py +62 -8
- airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +91 -0
- airflow/providers/google/cloud/operators/automl.py +230 -25
- airflow/providers/google/cloud/operators/bigquery.py +128 -40
- airflow/providers/google/cloud/operators/dataproc.py +1 -1
- airflow/providers/google/cloud/operators/kubernetes_engine.py +24 -37
- airflow/providers/google/cloud/operators/workflows.py +2 -5
- airflow/providers/google/cloud/triggers/bigquery.py +64 -6
- airflow/providers/google/cloud/triggers/dataproc.py +82 -3
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -3
- airflow/providers/google/get_provider_info.py +3 -2
- {apache_airflow_providers_google-10.18.0rc1.dist-info → apache_airflow_providers_google-10.18.0rc2.dist-info}/METADATA +7 -7
- {apache_airflow_providers_google-10.18.0rc1.dist-info → apache_airflow_providers_google-10.18.0rc2.dist-info}/RECORD +17 -16
- {apache_airflow_providers_google-10.18.0rc1.dist-info → apache_airflow_providers_google-10.18.0rc2.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-10.18.0rc1.dist-info → apache_airflow_providers_google-10.18.0rc2.dist-info}/entry_points.txt +0 -0
@@ -17,13 +17,20 @@
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
19
|
import asyncio
|
20
|
-
from typing import Any, AsyncIterator, Sequence, SupportsAbs
|
20
|
+
from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence, SupportsAbs
|
21
21
|
|
22
22
|
from aiohttp import ClientSession
|
23
23
|
from aiohttp.client_exceptions import ClientResponseError
|
24
24
|
|
25
|
+
from airflow.exceptions import AirflowException
|
26
|
+
from airflow.models.taskinstance import TaskInstance
|
25
27
|
from airflow.providers.google.cloud.hooks.bigquery import BigQueryAsyncHook, BigQueryTableAsyncHook
|
26
28
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
29
|
+
from airflow.utils.session import provide_session
|
30
|
+
from airflow.utils.state import TaskInstanceState
|
31
|
+
|
32
|
+
if TYPE_CHECKING:
|
33
|
+
from sqlalchemy.orm.session import Session
|
27
34
|
|
28
35
|
|
29
36
|
class BigQueryInsertJobTrigger(BaseTrigger):
|
@@ -89,6 +96,36 @@ class BigQueryInsertJobTrigger(BaseTrigger):
|
|
89
96
|
},
|
90
97
|
)
|
91
98
|
|
99
|
+
@provide_session
|
100
|
+
def get_task_instance(self, session: Session) -> TaskInstance:
|
101
|
+
query = session.query(TaskInstance).filter(
|
102
|
+
TaskInstance.dag_id == self.task_instance.dag_id,
|
103
|
+
TaskInstance.task_id == self.task_instance.task_id,
|
104
|
+
TaskInstance.run_id == self.task_instance.run_id,
|
105
|
+
TaskInstance.map_index == self.task_instance.map_index,
|
106
|
+
)
|
107
|
+
task_instance = query.one_or_none()
|
108
|
+
if task_instance is None:
|
109
|
+
raise AirflowException(
|
110
|
+
"TaskInstance with dag_id: %s, task_id: %s, run_id: %s and map_index: %s is not found",
|
111
|
+
self.task_instance.dag_id,
|
112
|
+
self.task_instance.task_id,
|
113
|
+
self.task_instance.run_id,
|
114
|
+
self.task_instance.map_index,
|
115
|
+
)
|
116
|
+
return task_instance
|
117
|
+
|
118
|
+
def safe_to_cancel(self) -> bool:
|
119
|
+
"""
|
120
|
+
Whether it is safe to cancel the external job which is being executed by this trigger.
|
121
|
+
|
122
|
+
This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
|
123
|
+
Because in those cases, we should NOT cancel the external job.
|
124
|
+
"""
|
125
|
+
# Database query is needed to get the latest state of the task instance.
|
126
|
+
task_instance = self.get_task_instance() # type: ignore[call-arg]
|
127
|
+
return task_instance.state != TaskInstanceState.DEFERRED
|
128
|
+
|
92
129
|
async def run(self) -> AsyncIterator[TriggerEvent]: # type: ignore[override]
|
93
130
|
"""Get current job execution status and yields a TriggerEvent."""
|
94
131
|
hook = self._get_async_hook()
|
@@ -117,13 +154,27 @@ class BigQueryInsertJobTrigger(BaseTrigger):
|
|
117
154
|
)
|
118
155
|
await asyncio.sleep(self.poll_interval)
|
119
156
|
except asyncio.CancelledError:
|
120
|
-
self.
|
121
|
-
|
157
|
+
if self.job_id and self.cancel_on_kill and self.safe_to_cancel():
|
158
|
+
self.log.info(
|
159
|
+
"The job is safe to cancel the as airflow TaskInstance is not in deferred state."
|
160
|
+
)
|
161
|
+
self.log.info(
|
162
|
+
"Cancelling job. Project ID: %s, Location: %s, Job ID: %s",
|
163
|
+
self.project_id,
|
164
|
+
self.location,
|
165
|
+
self.job_id,
|
166
|
+
)
|
122
167
|
await hook.cancel_job( # type: ignore[union-attr]
|
123
168
|
job_id=self.job_id, project_id=self.project_id, location=self.location
|
124
169
|
)
|
125
170
|
else:
|
126
|
-
self.log.info(
|
171
|
+
self.log.info(
|
172
|
+
"Trigger may have shutdown. Skipping to cancel job because the airflow "
|
173
|
+
"task is not cancelled yet: Project ID: %s, Location:%s, Job ID:%s",
|
174
|
+
self.project_id,
|
175
|
+
self.location,
|
176
|
+
self.job_id,
|
177
|
+
)
|
127
178
|
except Exception as e:
|
128
179
|
self.log.exception("Exception occurred while checking for query completion")
|
129
180
|
yield TriggerEvent({"status": "error", "message": str(e)})
|
@@ -148,6 +199,7 @@ class BigQueryCheckTrigger(BigQueryInsertJobTrigger):
|
|
148
199
|
"table_id": self.table_id,
|
149
200
|
"poll_interval": self.poll_interval,
|
150
201
|
"impersonation_chain": self.impersonation_chain,
|
202
|
+
"cancel_on_kill": self.cancel_on_kill,
|
151
203
|
},
|
152
204
|
)
|
153
205
|
|
@@ -205,9 +257,10 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
|
|
205
257
|
(default: False).
|
206
258
|
"""
|
207
259
|
|
208
|
-
def __init__(self, as_dict: bool = False, **kwargs):
|
260
|
+
def __init__(self, as_dict: bool = False, selected_fields: str | None = None, **kwargs):
|
209
261
|
super().__init__(**kwargs)
|
210
262
|
self.as_dict = as_dict
|
263
|
+
self.selected_fields = selected_fields
|
211
264
|
|
212
265
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
213
266
|
"""Serialize BigQueryInsertJobTrigger arguments and classpath."""
|
@@ -223,6 +276,7 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
|
|
223
276
|
"poll_interval": self.poll_interval,
|
224
277
|
"impersonation_chain": self.impersonation_chain,
|
225
278
|
"as_dict": self.as_dict,
|
279
|
+
"selected_fields": self.selected_fields,
|
226
280
|
},
|
227
281
|
)
|
228
282
|
|
@@ -235,7 +289,11 @@ class BigQueryGetDataTrigger(BigQueryInsertJobTrigger):
|
|
235
289
|
job_status = await hook.get_job_status(job_id=self.job_id, project_id=self.project_id)
|
236
290
|
if job_status["status"] == "success":
|
237
291
|
query_results = await hook.get_job_output(job_id=self.job_id, project_id=self.project_id)
|
238
|
-
records = hook.get_records(
|
292
|
+
records = hook.get_records(
|
293
|
+
query_results=query_results,
|
294
|
+
as_dict=self.as_dict,
|
295
|
+
selected_fields=self.selected_fields,
|
296
|
+
)
|
239
297
|
self.log.debug("Response from hook: %s", job_status["status"])
|
240
298
|
yield TriggerEvent(
|
241
299
|
{
|
@@ -22,16 +22,22 @@ from __future__ import annotations
|
|
22
22
|
import asyncio
|
23
23
|
import re
|
24
24
|
import time
|
25
|
-
from typing import Any, AsyncIterator, Sequence
|
25
|
+
from typing import TYPE_CHECKING, Any, AsyncIterator, Sequence
|
26
26
|
|
27
27
|
from google.api_core.exceptions import NotFound
|
28
28
|
from google.cloud.dataproc_v1 import Batch, Cluster, ClusterStatus, JobStatus
|
29
29
|
|
30
30
|
from airflow.exceptions import AirflowException
|
31
|
+
from airflow.models.taskinstance import TaskInstance
|
31
32
|
from airflow.providers.google.cloud.hooks.dataproc import DataprocAsyncHook, DataprocHook
|
32
33
|
from airflow.providers.google.cloud.utils.dataproc import DataprocOperationType
|
33
34
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
34
35
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
36
|
+
from airflow.utils.session import provide_session
|
37
|
+
from airflow.utils.state import TaskInstanceState
|
38
|
+
|
39
|
+
if TYPE_CHECKING:
|
40
|
+
from sqlalchemy.orm.session import Session
|
35
41
|
|
36
42
|
|
37
43
|
class DataprocBaseTrigger(BaseTrigger):
|
@@ -110,6 +116,41 @@ class DataprocSubmitTrigger(DataprocBaseTrigger):
|
|
110
116
|
},
|
111
117
|
)
|
112
118
|
|
119
|
+
@provide_session
|
120
|
+
def get_task_instance(self, session: Session) -> TaskInstance:
|
121
|
+
"""
|
122
|
+
Get the task instance for the current task.
|
123
|
+
|
124
|
+
:param session: Sqlalchemy session
|
125
|
+
"""
|
126
|
+
query = session.query(TaskInstance).filter(
|
127
|
+
TaskInstance.dag_id == self.task_instance.dag_id,
|
128
|
+
TaskInstance.task_id == self.task_instance.task_id,
|
129
|
+
TaskInstance.run_id == self.task_instance.run_id,
|
130
|
+
TaskInstance.map_index == self.task_instance.map_index,
|
131
|
+
)
|
132
|
+
task_instance = query.one_or_none()
|
133
|
+
if task_instance is None:
|
134
|
+
raise AirflowException(
|
135
|
+
"TaskInstance with dag_id: %s,task_id: %s, run_id: %s and map_index: %s is not found",
|
136
|
+
self.task_instance.dag_id,
|
137
|
+
self.task_instance.task_id,
|
138
|
+
self.task_instance.run_id,
|
139
|
+
self.task_instance.map_index,
|
140
|
+
)
|
141
|
+
return task_instance
|
142
|
+
|
143
|
+
def safe_to_cancel(self) -> bool:
|
144
|
+
"""
|
145
|
+
Whether it is safe to cancel the external job which is being executed by this trigger.
|
146
|
+
|
147
|
+
This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
|
148
|
+
Because in those cases, we should NOT cancel the external job.
|
149
|
+
"""
|
150
|
+
# Database query is needed to get the latest state of the task instance.
|
151
|
+
task_instance = self.get_task_instance() # type: ignore[call-arg]
|
152
|
+
return task_instance.state != TaskInstanceState.DEFERRED
|
153
|
+
|
113
154
|
async def run(self):
|
114
155
|
try:
|
115
156
|
while True:
|
@@ -125,7 +166,11 @@ class DataprocSubmitTrigger(DataprocBaseTrigger):
|
|
125
166
|
except asyncio.CancelledError:
|
126
167
|
self.log.info("Task got cancelled.")
|
127
168
|
try:
|
128
|
-
if self.job_id and self.cancel_on_kill:
|
169
|
+
if self.job_id and self.cancel_on_kill and self.safe_to_cancel():
|
170
|
+
self.log.info(
|
171
|
+
"Cancelling the job as it is safe to do so. Note that the airflow TaskInstance is not"
|
172
|
+
" in deferred state."
|
173
|
+
)
|
129
174
|
self.log.info("Cancelling the job: %s", self.job_id)
|
130
175
|
# The synchronous hook is utilized to delete the cluster when a task is cancelled. This
|
131
176
|
# is because the asynchronous hook deletion is not awaited when the trigger task is
|
@@ -178,6 +223,36 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
|
|
178
223
|
},
|
179
224
|
)
|
180
225
|
|
226
|
+
@provide_session
|
227
|
+
def get_task_instance(self, session: Session) -> TaskInstance:
|
228
|
+
query = session.query(TaskInstance).filter(
|
229
|
+
TaskInstance.dag_id == self.task_instance.dag_id,
|
230
|
+
TaskInstance.task_id == self.task_instance.task_id,
|
231
|
+
TaskInstance.run_id == self.task_instance.run_id,
|
232
|
+
TaskInstance.map_index == self.task_instance.map_index,
|
233
|
+
)
|
234
|
+
task_instance = query.one_or_none()
|
235
|
+
if task_instance is None:
|
236
|
+
raise AirflowException(
|
237
|
+
"TaskInstance with dag_id: %s,task_id: %s, run_id: %s and map_index: %s is not found.",
|
238
|
+
self.task_instance.dag_id,
|
239
|
+
self.task_instance.task_id,
|
240
|
+
self.task_instance.run_id,
|
241
|
+
self.task_instance.map_index,
|
242
|
+
)
|
243
|
+
return task_instance
|
244
|
+
|
245
|
+
def safe_to_cancel(self) -> bool:
|
246
|
+
"""
|
247
|
+
Whether it is safe to cancel the external job which is being executed by this trigger.
|
248
|
+
|
249
|
+
This is to avoid the case that `asyncio.CancelledError` is called because the trigger itself is stopped.
|
250
|
+
Because in those cases, we should NOT cancel the external job.
|
251
|
+
"""
|
252
|
+
# Database query is needed to get the latest state of the task instance.
|
253
|
+
task_instance = self.get_task_instance() # type: ignore[call-arg]
|
254
|
+
return task_instance.state != TaskInstanceState.DEFERRED
|
255
|
+
|
181
256
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
182
257
|
try:
|
183
258
|
while True:
|
@@ -207,7 +282,11 @@ class DataprocClusterTrigger(DataprocBaseTrigger):
|
|
207
282
|
await asyncio.sleep(self.polling_interval_seconds)
|
208
283
|
except asyncio.CancelledError:
|
209
284
|
try:
|
210
|
-
if self.delete_on_error:
|
285
|
+
if self.delete_on_error and self.safe_to_cancel():
|
286
|
+
self.log.info(
|
287
|
+
"Deleting the cluster as it is safe to delete as the airflow TaskInstance is not in "
|
288
|
+
"deferred state."
|
289
|
+
)
|
211
290
|
self.log.info("Deleting cluster %s.", self.cluster_name)
|
212
291
|
# The synchronous hook is utilized to delete the cluster when a task is cancelled.
|
213
292
|
# This is because the asynchronous hook deletion is not awaited when the trigger task
|
@@ -30,7 +30,6 @@ from airflow.providers.cncf.kubernetes.utils.pod_manager import OnFinishAction
|
|
30
30
|
from airflow.providers.google.cloud.hooks.kubernetes_engine import (
|
31
31
|
GKEAsyncHook,
|
32
32
|
GKEKubernetesAsyncHook,
|
33
|
-
GKEPodAsyncHook,
|
34
33
|
)
|
35
34
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
36
35
|
|
@@ -147,8 +146,8 @@ class GKEStartPodTrigger(KubernetesPodTrigger):
|
|
147
146
|
)
|
148
147
|
|
149
148
|
@cached_property
|
150
|
-
def hook(self) ->
|
151
|
-
return
|
149
|
+
def hook(self) -> GKEKubernetesAsyncHook: # type: ignore[override]
|
150
|
+
return GKEKubernetesAsyncHook(
|
152
151
|
cluster_url=self._cluster_url,
|
153
152
|
ssl_ca_cert=self._ssl_ca_cert,
|
154
153
|
gcp_conn_id=self.gcp_conn_id,
|
@@ -28,7 +28,7 @@ def get_provider_info():
|
|
28
28
|
"name": "Google",
|
29
29
|
"description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
|
30
30
|
"state": "ready",
|
31
|
-
"source-date-epoch":
|
31
|
+
"source-date-epoch": 1715384437,
|
32
32
|
"versions": [
|
33
33
|
"10.18.0",
|
34
34
|
"10.17.0",
|
@@ -96,7 +96,7 @@ def get_provider_info():
|
|
96
96
|
"gcsfs>=2023.10.0",
|
97
97
|
"google-ads>=23.1.0",
|
98
98
|
"google-analytics-admin",
|
99
|
-
"google-api-core>=2.11.0,!=2.16.0",
|
99
|
+
"google-api-core>=2.11.0,!=2.16.0,!=2.18.0",
|
100
100
|
"google-api-python-client>=1.6.0",
|
101
101
|
"google-auth>=1.0.0",
|
102
102
|
"google-auth-httplib2>=0.0.1",
|
@@ -1147,6 +1147,7 @@ def get_provider_info():
|
|
1147
1147
|
"airflow.providers.google.cloud.hooks.vertex_ai.model_service",
|
1148
1148
|
"airflow.providers.google.cloud.hooks.vertex_ai.pipeline_job",
|
1149
1149
|
"airflow.providers.google.cloud.hooks.vertex_ai.generative_model",
|
1150
|
+
"airflow.providers.google.cloud.hooks.vertex_ai.prediction_service",
|
1150
1151
|
],
|
1151
1152
|
},
|
1152
1153
|
{
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-google
|
3
|
-
Version: 10.18.
|
3
|
+
Version: 10.18.0rc2
|
4
4
|
Summary: Provider package apache-airflow-providers-google for Apache Airflow
|
5
5
|
Keywords: airflow-provider,google,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -31,7 +31,7 @@ Requires-Dist: gcloud-aio-storage>=9.0.0
|
|
31
31
|
Requires-Dist: gcsfs>=2023.10.0
|
32
32
|
Requires-Dist: google-ads>=23.1.0
|
33
33
|
Requires-Dist: google-analytics-admin
|
34
|
-
Requires-Dist: google-api-core>=2.11.0,!=2.16.0
|
34
|
+
Requires-Dist: google-api-core>=2.11.0,!=2.16.0,!=2.18.0
|
35
35
|
Requires-Dist: google-api-python-client>=1.6.0
|
36
36
|
Requires-Dist: google-auth-httplib2>=0.0.1
|
37
37
|
Requires-Dist: google-auth>=1.0.0
|
@@ -171,7 +171,7 @@ Provides-Extra: trino
|
|
171
171
|
|
172
172
|
Package ``apache-airflow-providers-google``
|
173
173
|
|
174
|
-
Release: ``10.18.0.
|
174
|
+
Release: ``10.18.0.rc2``
|
175
175
|
|
176
176
|
|
177
177
|
Google services including:
|
@@ -205,9 +205,9 @@ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
|
|
205
205
|
Requirements
|
206
206
|
------------
|
207
207
|
|
208
|
-
=======================================
|
208
|
+
======================================= ==============================
|
209
209
|
PIP package Version required
|
210
|
-
=======================================
|
210
|
+
======================================= ==============================
|
211
211
|
``apache-airflow`` ``>=2.7.0``
|
212
212
|
``apache-airflow-providers-common-sql`` ``>=1.7.2``
|
213
213
|
``asgiref`` ``>=3.5.2``
|
@@ -217,7 +217,7 @@ PIP package Version required
|
|
217
217
|
``gcsfs`` ``>=2023.10.0``
|
218
218
|
``google-ads`` ``>=23.1.0``
|
219
219
|
``google-analytics-admin``
|
220
|
-
``google-api-core`` ``>=2.11.0,!=2.16.0``
|
220
|
+
``google-api-core`` ``>=2.11.0,!=2.16.0,!=2.18.0``
|
221
221
|
``google-api-python-client`` ``>=1.6.0``
|
222
222
|
``google-auth`` ``>=1.0.0``
|
223
223
|
``google-auth-httplib2`` ``>=0.0.1``
|
@@ -269,7 +269,7 @@ PIP package Version required
|
|
269
269
|
``sqlalchemy-bigquery`` ``>=1.2.1``
|
270
270
|
``sqlalchemy-spanner`` ``>=1.6.2``
|
271
271
|
``python-slugify`` ``>=5.0``
|
272
|
-
=======================================
|
272
|
+
======================================= ==============================
|
273
273
|
|
274
274
|
Cross provider package dependencies
|
275
275
|
-----------------------------------
|
@@ -1,6 +1,6 @@
|
|
1
1
|
airflow/providers/google/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
2
|
-
airflow/providers/google/__init__.py,sha256
|
3
|
-
airflow/providers/google/get_provider_info.py,sha256=
|
2
|
+
airflow/providers/google/__init__.py,sha256=-STVmWYvzAtQZziT5ntqECeACWw8HYuYxEWguvYiPuI,1495
|
3
|
+
airflow/providers/google/get_provider_info.py,sha256=5EIhNOVNxiPbvIwu-Rqa-FoQV-Gzg_R3S2ZxXtSV9pk,81204
|
4
4
|
airflow/providers/google/go_module_utils.py,sha256=QZcIY0BAsy_GlwJpaCDcSRWHueZvkqDCj2ecICn_-gY,1770
|
5
5
|
airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
|
6
6
|
airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
@@ -22,8 +22,8 @@ airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py,sha256=
|
|
22
22
|
airflow/providers/google/cloud/fs/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
23
23
|
airflow/providers/google/cloud/fs/gcs.py,sha256=fJBGhHEE46_U5Rmbs1W0uenvGhECv13CtVSh3z7pM60,2457
|
24
24
|
airflow/providers/google/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
25
|
-
airflow/providers/google/cloud/hooks/automl.py,sha256=
|
26
|
-
airflow/providers/google/cloud/hooks/bigquery.py,sha256=
|
25
|
+
airflow/providers/google/cloud/hooks/automl.py,sha256=5fp8vZ96at8jH-a4yYipu4FI3J809b2E6XLCOQVhzmY,28959
|
26
|
+
airflow/providers/google/cloud/hooks/bigquery.py,sha256=ivglJpV5jL2la6UhEXC_ABa1q-SyP2-Jbf3aSU8NimE,155228
|
27
27
|
airflow/providers/google/cloud/hooks/bigquery_dts.py,sha256=3wLKj-6tQwWphjwKBLGg1rjoXAAknv0WLh6T3MqsNWA,15228
|
28
28
|
airflow/providers/google/cloud/hooks/bigtable.py,sha256=wReDIbDyQGP8oZIzg0vsfgD6zrLmY-oYghBNCPVPorw,12580
|
29
29
|
airflow/providers/google/cloud/hooks/cloud_batch.py,sha256=FjpR_Av7z8oMnB4Q7S-aPTMO8HZMxAo_1akdHpE7iA8,7809
|
@@ -77,6 +77,7 @@ airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py,sha256=UqxLJt
|
|
77
77
|
airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py,sha256=KSS2vJ3chTP4_ghrIU6qFDGKDONnhPddYn1ButnQxsI,25051
|
78
78
|
airflow/providers/google/cloud/hooks/vertex_ai/model_service.py,sha256=KCqCyxjsxst_2FTAwPGanL3URL3vc50kKlVwmr2maDY,18115
|
79
79
|
airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py,sha256=b4PlBV0MqrOUIwR6heuPwiFGFnEHaaduPTV3iibbs_Y,29023
|
80
|
+
airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py,sha256=Q-EzmMtE4hjEg_tH-vnE-PcrXTvoQY2QFh77reVH840,4263
|
80
81
|
airflow/providers/google/cloud/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
81
82
|
airflow/providers/google/cloud/links/automl.py,sha256=tJ0tIYFicB4urFltg4uTp_Yk7ZeaD0SA8Q3GWyeMw4Y,4781
|
82
83
|
airflow/providers/google/cloud/links/base.py,sha256=6Y96NArcm-f4Yw1YYqaoRdXfLLWwMH01XDAzIA-a3NA,1660
|
@@ -111,8 +112,8 @@ airflow/providers/google/cloud/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2
|
|
111
112
|
airflow/providers/google/cloud/log/gcs_task_handler.py,sha256=d2zTPtoQHwdFIXuWbHu0xniA2oor9BEBYBNgJaWeF-0,10386
|
112
113
|
airflow/providers/google/cloud/log/stackdriver_task_handler.py,sha256=Z_aHF0_hjGwoDxk4VVtZ1sWTeqOdpSsRqg4yg6eOL80,15637
|
113
114
|
airflow/providers/google/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
114
|
-
airflow/providers/google/cloud/operators/automl.py,sha256=
|
115
|
-
airflow/providers/google/cloud/operators/bigquery.py,sha256=
|
115
|
+
airflow/providers/google/cloud/operators/automl.py,sha256=fBwkwLcKr9sAPCacjdHHQcQrh3KD65oxm1PUIQsbKuI,61727
|
116
|
+
airflow/providers/google/cloud/operators/bigquery.py,sha256=dfTzs1ouMYEoC7SXfRIZMeHCoVut17OFl_GbNM2nO1U,129660
|
116
117
|
airflow/providers/google/cloud/operators/bigquery_dts.py,sha256=6VJISM4HoMBQ3EQ5nz3zxFk8tfluGA1d2vcUNUlYLPc,17695
|
117
118
|
airflow/providers/google/cloud/operators/bigtable.py,sha256=BnWHnTEscyPbsKWFaSreLr62W68fmHu5loQVZex7LPs,26921
|
118
119
|
airflow/providers/google/cloud/operators/cloud_base.py,sha256=Xysh4znvIQIxbQqmfKoaL6O09FikndHrQuKKUnEV7KU,1483
|
@@ -131,13 +132,13 @@ airflow/providers/google/cloud/operators/datafusion.py,sha256=NZoR65aChdkPUG8bxE
|
|
131
132
|
airflow/providers/google/cloud/operators/datapipeline.py,sha256=UFIhzulL7d9JD3PmIbpYIu1dKVVJrJthVyA-8kkJq0A,6478
|
132
133
|
airflow/providers/google/cloud/operators/dataplex.py,sha256=3Xq7ewGIQFH-cf_vbsWkIyuDOMD5ij5EBbTeuFtAtKs,91185
|
133
134
|
airflow/providers/google/cloud/operators/dataprep.py,sha256=jTDDgRccd2zIUqGzJebZpbNTJsFdRi5RnMtldXHqiMs,10477
|
134
|
-
airflow/providers/google/cloud/operators/dataproc.py,sha256=
|
135
|
+
airflow/providers/google/cloud/operators/dataproc.py,sha256=rN6L8Lybgjh9jmJ7guOSq05irzangEvYTBbvCD-2bJ0,152559
|
135
136
|
airflow/providers/google/cloud/operators/dataproc_metastore.py,sha256=MWkThX_mzef-VTRrxZFn5WPfenZ3F2DNXWWa827nLrw,49532
|
136
137
|
airflow/providers/google/cloud/operators/datastore.py,sha256=di00jFy3Z1v0GcmcQ0df8NJ32yxcseOqWuojC4TKdmY,24927
|
137
138
|
airflow/providers/google/cloud/operators/dlp.py,sha256=SQCGml0RIKl0UrvXHIUiOskg5ayTj4F5_4k4rztClvM,120742
|
138
139
|
airflow/providers/google/cloud/operators/functions.py,sha256=dL5uaYtAWujwvAID_kLsyEsQ-ThFXGrEsg5Tk277FMs,20155
|
139
140
|
airflow/providers/google/cloud/operators/gcs.py,sha256=tUNcseFxF2AC-wNjcrngvKqLqhkYNC4gaSFHiCq7eK0,46627
|
140
|
-
airflow/providers/google/cloud/operators/kubernetes_engine.py,sha256=
|
141
|
+
airflow/providers/google/cloud/operators/kubernetes_engine.py,sha256=prrgsNZN0ZvtdzlnjmrPIPtvwJen7YYp1yHPU3QqVn0,69594
|
141
142
|
airflow/providers/google/cloud/operators/life_sciences.py,sha256=cQzFWGdwh4yr44j7nfMXdGnPVRkeXwkrj_qdzlchD-w,4816
|
142
143
|
airflow/providers/google/cloud/operators/looker.py,sha256=LCbN0vv8y0exwvfHbRXmUtNUZIOlSfljInNZK1zcfrs,4063
|
143
144
|
airflow/providers/google/cloud/operators/mlengine.py,sha256=pPoLn7txuR5h2I9prSY26Sc7qfjWRXfAjHplGl3vNK8,62873
|
@@ -152,7 +153,7 @@ airflow/providers/google/cloud/operators/translate.py,sha256=yYN4IRcRHXllZsChMJd
|
|
152
153
|
airflow/providers/google/cloud/operators/translate_speech.py,sha256=ALehMdOuSspEb-7h9Cr5ml8TYFsUnc8vvHEULpKZxa4,7817
|
153
154
|
airflow/providers/google/cloud/operators/video_intelligence.py,sha256=NQvEueDegdpPBSqMkJF_qb9x3WeZT7XJL-yE8Sqlz_U,14165
|
154
155
|
airflow/providers/google/cloud/operators/vision.py,sha256=RQrf9-dgRfgYxDceR--WMvc03d2a1zZaOQWzZxjrJYo,67630
|
155
|
-
airflow/providers/google/cloud/operators/workflows.py,sha256
|
156
|
+
airflow/providers/google/cloud/operators/workflows.py,sha256=fnyWLqRHz0UYu6AnQKKZIMlfSIg_v5nNbZAt6ASe4fI,28977
|
156
157
|
airflow/providers/google/cloud/operators/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
157
158
|
airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=Txeaul_QFrAKvEBGUa-AwcDEDtxhKWLNhbXSaBldTQE,30987
|
158
159
|
airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=e0oFWmCRH3aQHNckjEf_YO5zP9LqiLVTzB1QTgv3iUo,28828
|
@@ -215,7 +216,7 @@ airflow/providers/google/cloud/transfers/sheets_to_gcs.py,sha256=G6PdHaYKUEhL7ER
|
|
215
216
|
airflow/providers/google/cloud/transfers/sql_to_gcs.py,sha256=BNVCKLMwxFkyrLU4q7xkD0fg1wdLxQJMLxXCAc05DRM,21771
|
216
217
|
airflow/providers/google/cloud/transfers/trino_to_gcs.py,sha256=w8a9JBsxdqiVJePDHF-hpGW5eZWIyop-lbzBYZYabzA,7143
|
217
218
|
airflow/providers/google/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
218
|
-
airflow/providers/google/cloud/triggers/bigquery.py,sha256=
|
219
|
+
airflow/providers/google/cloud/triggers/bigquery.py,sha256=dCw9ihWSBJoFLWb6kWB-cvh0cwsO1pLVAgdTRMhcXSs,34770
|
219
220
|
airflow/providers/google/cloud/triggers/bigquery_dts.py,sha256=uHqkZPa32sIPBPmO8pqgTsebRS_D_wYIwd6wABwyJo4,6219
|
220
221
|
airflow/providers/google/cloud/triggers/cloud_batch.py,sha256=AZDmqsJSaJT3X1Mp8F4uuC7oAgShUoiYIIqaIApqum0,6717
|
221
222
|
airflow/providers/google/cloud/triggers/cloud_build.py,sha256=yLmims5zQ3RyIfdY1NOA0qAhwEkTwpvTyT2M3dPb8-M,5628
|
@@ -226,9 +227,9 @@ airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py,sha256
|
|
226
227
|
airflow/providers/google/cloud/triggers/dataflow.py,sha256=u6CzIncAU3rWsGKTyNHV4ibYSBTCqqOiFCa7dcWnqzE,28635
|
227
228
|
airflow/providers/google/cloud/triggers/datafusion.py,sha256=blqNx4hLHRrLp-FQMCNR3yWmAZ2hCTfql2eyf5XY0P0,5985
|
228
229
|
airflow/providers/google/cloud/triggers/dataplex.py,sha256=fEHbvNYgP-6htNkp9b7nmvea_r3Z6CIMfIh57NaWxFA,8334
|
229
|
-
airflow/providers/google/cloud/triggers/dataproc.py,sha256=
|
230
|
+
airflow/providers/google/cloud/triggers/dataproc.py,sha256=5eCSAkjg6FDL56P_7gvXu_fL0OWHnBrXQc5yg5agifg,24304
|
230
231
|
airflow/providers/google/cloud/triggers/gcs.py,sha256=pMjeNOkWHkOyiAxeK-JoyDInUf2VNtefOZxp8K-aNjw,18973
|
231
|
-
airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=
|
232
|
+
airflow/providers/google/cloud/triggers/kubernetes_engine.py,sha256=XnCnGtHaZUEaYS1Yra0PTMovE06d2fcSUCNhIRsuCGc,12328
|
232
233
|
airflow/providers/google/cloud/triggers/mlengine.py,sha256=qpOa9Gz8FmHDxXvPWrXO3M7snGbRTq92gy6kGafCUiY,5265
|
233
234
|
airflow/providers/google/cloud/triggers/pubsub.py,sha256=LbKGL-g6WfeiRhmkItWwl8hRgI3Lr6B3Ib2GNChL4mg,5724
|
234
235
|
airflow/providers/google/cloud/triggers/vertex_ai.py,sha256=99ah7Rcc_kYBihDXGcekZvkUz7ZNyXKv7j6R3vddcCE,9927
|
@@ -303,7 +304,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=CxtVhp3wlEOBtjR
|
|
303
304
|
airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=4nwXWkTySeBXNuThPxzO7uww_hH6PthpppTeuShn27Q,4363
|
304
305
|
airflow/providers/google/suite/transfers/local_to_drive.py,sha256=eYCJghA0Ou2vUUvN_wfBQvpt2yzR9RmmeNdPgh2Cbjo,6100
|
305
306
|
airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
|
306
|
-
apache_airflow_providers_google-10.18.
|
307
|
-
apache_airflow_providers_google-10.18.
|
308
|
-
apache_airflow_providers_google-10.18.
|
309
|
-
apache_airflow_providers_google-10.18.
|
307
|
+
apache_airflow_providers_google-10.18.0rc2.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
|
308
|
+
apache_airflow_providers_google-10.18.0rc2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
309
|
+
apache_airflow_providers_google-10.18.0rc2.dist-info/METADATA,sha256=PekTdpC6hyG7aBZeQIeJBjBliPVeQoWoRDNmlJbXBgw,15979
|
310
|
+
apache_airflow_providers_google-10.18.0rc2.dist-info/RECORD,,
|
File without changes
|