apache-airflow-providers-google 14.1.0__py3-none-any.whl → 15.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/ads/hooks/ads.py +7 -33
- airflow/providers/google/ads/transfers/ads_to_gcs.py +1 -17
- airflow/providers/google/cloud/hooks/bigquery.py +6 -11
- airflow/providers/google/cloud/hooks/cloud_batch.py +1 -2
- airflow/providers/google/cloud/hooks/cloud_build.py +1 -54
- airflow/providers/google/cloud/hooks/compute.py +4 -3
- airflow/providers/google/cloud/hooks/dataflow.py +2 -139
- airflow/providers/google/cloud/hooks/dataform.py +6 -12
- airflow/providers/google/cloud/hooks/datafusion.py +1 -2
- airflow/providers/google/cloud/hooks/dataplex.py +1 -1
- airflow/providers/google/cloud/hooks/gcs.py +13 -5
- airflow/providers/google/cloud/hooks/life_sciences.py +1 -1
- airflow/providers/google/cloud/hooks/translate.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +3 -2
- airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +2 -272
- airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +2 -1
- airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +1 -1
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +2 -1
- airflow/providers/google/cloud/links/cloud_storage_transfer.py +1 -3
- airflow/providers/google/cloud/links/dataproc.py +0 -1
- airflow/providers/google/cloud/log/gcs_task_handler.py +147 -115
- airflow/providers/google/cloud/openlineage/facets.py +32 -32
- airflow/providers/google/cloud/openlineage/mixins.py +2 -2
- airflow/providers/google/cloud/operators/automl.py +1 -1
- airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +0 -3
- airflow/providers/google/cloud/operators/datafusion.py +1 -22
- airflow/providers/google/cloud/operators/dataproc.py +1 -143
- airflow/providers/google/cloud/operators/dataproc_metastore.py +0 -1
- airflow/providers/google/cloud/operators/mlengine.py +3 -1406
- airflow/providers/google/cloud/operators/spanner.py +1 -2
- airflow/providers/google/cloud/operators/translate.py +2 -2
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +0 -12
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +1 -22
- airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +4 -3
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -1
- airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +1 -2
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +23 -10
- airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +2 -2
- airflow/providers/google/common/auth_backend/google_openid.py +1 -1
- airflow/providers/google/common/hooks/base_google.py +7 -28
- airflow/providers/google/get_provider_info.py +3 -1
- airflow/providers/google/marketing_platform/sensors/display_video.py +1 -1
- airflow/providers/google/suite/hooks/drive.py +2 -2
- {apache_airflow_providers_google-14.1.0.dist-info → apache_airflow_providers_google-15.0.0rc1.dist-info}/METADATA +11 -9
- {apache_airflow_providers_google-14.1.0.dist-info → apache_airflow_providers_google-15.0.0rc1.dist-info}/RECORD +49 -50
- airflow/providers/google/cloud/utils/mlengine_operator_utils.py +0 -273
- {apache_airflow_providers_google-14.1.0.dist-info → apache_airflow_providers_google-15.0.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-14.1.0.dist-info → apache_airflow_providers_google-15.0.0rc1.dist-info}/entry_points.txt +0 -0
@@ -403,8 +403,7 @@ class SpannerDeployDatabaseInstanceOperator(GoogleCloudBaseOperator):
|
|
403
403
|
)
|
404
404
|
else:
|
405
405
|
self.log.info(
|
406
|
-
"The database '%s' in project '%s' and instance '%s'"
|
407
|
-
" already exists. Nothing to do. Exiting.",
|
406
|
+
"The database '%s' in project '%s' and instance '%s' already exists. Nothing to do. Exiting.",
|
408
407
|
self.database_id,
|
409
408
|
self.project_id,
|
410
409
|
self.instance_id,
|
@@ -1145,7 +1145,7 @@ class TranslateDocumentOperator(GoogleCloudBaseOperator):
|
|
1145
1145
|
project_id=self.project_id or hook.project_id,
|
1146
1146
|
output_config=self.document_output_config,
|
1147
1147
|
)
|
1148
|
-
return cast(dict, type(doc_translation_result).to_dict(doc_translation_result))
|
1148
|
+
return cast("dict", type(doc_translation_result).to_dict(doc_translation_result))
|
1149
1149
|
|
1150
1150
|
|
1151
1151
|
class TranslateDocumentBatchOperator(GoogleCloudBaseOperator):
|
@@ -1310,7 +1310,7 @@ class TranslateDocumentBatchOperator(GoogleCloudBaseOperator):
|
|
1310
1310
|
)
|
1311
1311
|
result = hook.wait_for_operation_result(batch_document_translate_operation)
|
1312
1312
|
self.log.info("Batch document translation job finished")
|
1313
|
-
return cast(dict, type(result).to_dict(result))
|
1313
|
+
return cast("dict", type(result).to_dict(result))
|
1314
1314
|
|
1315
1315
|
|
1316
1316
|
class TranslateCreateGlossaryOperator(GoogleCloudBaseOperator):
|
@@ -29,7 +29,6 @@ from google.cloud.aiplatform import datasets
|
|
29
29
|
from google.cloud.aiplatform.models import Model
|
30
30
|
from google.cloud.aiplatform_v1.types.training_pipeline import TrainingPipeline
|
31
31
|
|
32
|
-
from airflow.exceptions import AirflowProviderDeprecationWarning
|
33
32
|
from airflow.providers.google.cloud.hooks.vertex_ai.auto_ml import AutoMLHook
|
34
33
|
from airflow.providers.google.cloud.links.vertex_ai import (
|
35
34
|
VertexAIModelLink,
|
@@ -37,7 +36,6 @@ from airflow.providers.google.cloud.links.vertex_ai import (
|
|
37
36
|
VertexAITrainingPipelinesLink,
|
38
37
|
)
|
39
38
|
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
40
|
-
from airflow.providers.google.common.deprecated import deprecated
|
41
39
|
|
42
40
|
if TYPE_CHECKING:
|
43
41
|
from google.api_core.retry import Retry
|
@@ -575,16 +573,6 @@ class DeleteAutoMLTrainingJobOperator(GoogleCloudBaseOperator):
|
|
575
573
|
self.gcp_conn_id = gcp_conn_id
|
576
574
|
self.impersonation_chain = impersonation_chain
|
577
575
|
|
578
|
-
@property
|
579
|
-
@deprecated(
|
580
|
-
planned_removal_date="March 01, 2025",
|
581
|
-
use_instead="training_pipeline_id",
|
582
|
-
category=AirflowProviderDeprecationWarning,
|
583
|
-
)
|
584
|
-
def training_pipeline(self):
|
585
|
-
"""Alias for ``training_pipeline_id``, used for compatibility (deprecated)."""
|
586
|
-
return self.training_pipeline_id
|
587
|
-
|
588
576
|
def execute(self, context: Context):
|
589
577
|
hook = AutoMLHook(
|
590
578
|
gcp_conn_id=self.gcp_conn_id,
|
@@ -30,7 +30,7 @@ from google.cloud.aiplatform_v1.types.dataset import Dataset
|
|
30
30
|
from google.cloud.aiplatform_v1.types.training_pipeline import TrainingPipeline
|
31
31
|
|
32
32
|
from airflow.configuration import conf
|
33
|
-
from airflow.exceptions import AirflowException
|
33
|
+
from airflow.exceptions import AirflowException
|
34
34
|
from airflow.providers.google.cloud.hooks.vertex_ai.custom_job import CustomJobHook
|
35
35
|
from airflow.providers.google.cloud.links.vertex_ai import (
|
36
36
|
VertexAIModelLink,
|
@@ -43,7 +43,6 @@ from airflow.providers.google.cloud.triggers.vertex_ai import (
|
|
43
43
|
CustomPythonPackageTrainingJobTrigger,
|
44
44
|
CustomTrainingJobTrigger,
|
45
45
|
)
|
46
|
-
from airflow.providers.google.common.deprecated import deprecated
|
47
46
|
|
48
47
|
if TYPE_CHECKING:
|
49
48
|
from google.api_core.retry import Retry
|
@@ -1634,26 +1633,6 @@ class DeleteCustomTrainingJobOperator(GoogleCloudBaseOperator):
|
|
1634
1633
|
self.gcp_conn_id = gcp_conn_id
|
1635
1634
|
self.impersonation_chain = impersonation_chain
|
1636
1635
|
|
1637
|
-
@property
|
1638
|
-
@deprecated(
|
1639
|
-
planned_removal_date="March 01, 2025",
|
1640
|
-
use_instead="training_pipeline_id",
|
1641
|
-
category=AirflowProviderDeprecationWarning,
|
1642
|
-
)
|
1643
|
-
def training_pipeline(self):
|
1644
|
-
"""Alias for ``training_pipeline_id``, used for compatibility (deprecated)."""
|
1645
|
-
return self.training_pipeline_id
|
1646
|
-
|
1647
|
-
@property
|
1648
|
-
@deprecated(
|
1649
|
-
planned_removal_date="March 01, 2025",
|
1650
|
-
use_instead="custom_job_id",
|
1651
|
-
category=AirflowProviderDeprecationWarning,
|
1652
|
-
)
|
1653
|
-
def custom_job(self):
|
1654
|
-
"""Alias for ``custom_job_id``, used for compatibility (deprecated)."""
|
1655
|
-
return self.custom_job_id
|
1656
|
-
|
1657
1636
|
def execute(self, context: Context):
|
1658
1637
|
hook = CustomJobHook(
|
1659
1638
|
gcp_conn_id=self.gcp_conn_id,
|
@@ -98,6 +98,7 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
98
98
|
self.deferrable = deferrable
|
99
99
|
|
100
100
|
def poke(self, context: Context) -> bool:
|
101
|
+
ti = context["ti"]
|
101
102
|
hook = CloudDataTransferServiceHook(
|
102
103
|
gcp_conn_id=self.gcp_cloud_conn_id,
|
103
104
|
impersonation_chain=self.impersonation_chain,
|
@@ -113,13 +114,12 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
113
114
|
operations=operations, expected_statuses=self.expected_statuses
|
114
115
|
)
|
115
116
|
if check:
|
116
|
-
|
117
|
+
ti.xcom_push(key="sensed_operations", value=operations)
|
117
118
|
|
118
119
|
project_id = self.project_id or hook.project_id
|
119
120
|
if project_id:
|
120
121
|
CloudStorageTransferJobLink.persist(
|
121
122
|
context=context,
|
122
|
-
task_instance=self,
|
123
123
|
project_id=project_id,
|
124
124
|
job_name=self.job_name,
|
125
125
|
)
|
@@ -154,4 +154,5 @@ class CloudDataTransferServiceJobStatusSensor(BaseSensorOperator):
|
|
154
154
|
if event["status"] == "error":
|
155
155
|
raise AirflowException(event["message"])
|
156
156
|
|
157
|
-
|
157
|
+
ti = context["ti"]
|
158
|
+
ti.xcom_push(key="sensed_operations", value=event["operations"])
|
@@ -112,7 +112,7 @@ class MetastoreHivePartitionSensor(BaseSensorOperator):
|
|
112
112
|
|
113
113
|
# Extract actual query results
|
114
114
|
result_base_uri = result_manifest_uri.rsplit("/", 1)[0]
|
115
|
-
results = (f"{result_base_uri}
|
115
|
+
results = (f"{result_base_uri}/{filename}" for filename in manifest.get("filenames", []))
|
116
116
|
found_partitions = sum(
|
117
117
|
len(
|
118
118
|
parse_json_from_gcs(
|
@@ -486,8 +486,7 @@ class GCSToBigQueryOperator(BaseOperator):
|
|
486
486
|
if self.max_id_key:
|
487
487
|
self.log.info("Selecting the MAX value from BigQuery column %r...", self.max_id_key)
|
488
488
|
select_command = (
|
489
|
-
f"SELECT MAX({self.max_id_key}) AS max_value "
|
490
|
-
f"FROM {self.destination_project_dataset_table}"
|
489
|
+
f"SELECT MAX({self.max_id_key}) AS max_value FROM {self.destination_project_dataset_table}"
|
491
490
|
)
|
492
491
|
self.configuration = {
|
493
492
|
"query": {
|
@@ -73,6 +73,11 @@ class SFTPToGCSOperator(BaseOperator):
|
|
73
73
|
Service Account Token Creator IAM role to the directly preceding identity, with first
|
74
74
|
account from the list granting this role to the originating account (templated).
|
75
75
|
:param sftp_prefetch: Whether to enable SFTP prefetch, the default is True.
|
76
|
+
:param use_stream: Determines the transfer method from SFTP to GCS.
|
77
|
+
When ``False`` (default), the file downloads locally
|
78
|
+
then uploads (may require significant disk space).
|
79
|
+
When ``True``, the file streams directly without using local disk.
|
80
|
+
Defaults to ``False``.
|
76
81
|
"""
|
77
82
|
|
78
83
|
template_fields: Sequence[str] = (
|
@@ -95,6 +100,7 @@ class SFTPToGCSOperator(BaseOperator):
|
|
95
100
|
move_object: bool = False,
|
96
101
|
impersonation_chain: str | Sequence[str] | None = None,
|
97
102
|
sftp_prefetch: bool = True,
|
103
|
+
use_stream: bool = False,
|
98
104
|
**kwargs,
|
99
105
|
) -> None:
|
100
106
|
super().__init__(**kwargs)
|
@@ -109,6 +115,7 @@ class SFTPToGCSOperator(BaseOperator):
|
|
109
115
|
self.move_object = move_object
|
110
116
|
self.impersonation_chain = impersonation_chain
|
111
117
|
self.sftp_prefetch = sftp_prefetch
|
118
|
+
self.use_stream = use_stream
|
112
119
|
|
113
120
|
@cached_property
|
114
121
|
def sftp_hook(self):
|
@@ -166,16 +173,22 @@ class SFTPToGCSOperator(BaseOperator):
|
|
166
173
|
destination_object,
|
167
174
|
)
|
168
175
|
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
176
|
+
if self.use_stream:
|
177
|
+
dest_bucket = gcs_hook.get_bucket(self.destination_bucket)
|
178
|
+
dest_blob = dest_bucket.blob(destination_object)
|
179
|
+
with dest_blob.open("wb") as write_stream:
|
180
|
+
sftp_hook.retrieve_file(source_path, write_stream, prefetch=self.sftp_prefetch)
|
181
|
+
else:
|
182
|
+
with NamedTemporaryFile("w") as tmp:
|
183
|
+
sftp_hook.retrieve_file(source_path, tmp.name, prefetch=self.sftp_prefetch)
|
184
|
+
|
185
|
+
gcs_hook.upload(
|
186
|
+
bucket_name=self.destination_bucket,
|
187
|
+
object_name=destination_object,
|
188
|
+
filename=tmp.name,
|
189
|
+
mime_type=self.mime_type,
|
190
|
+
gzip=self.gzip,
|
191
|
+
)
|
179
192
|
|
180
193
|
if self.move_object:
|
181
194
|
self.log.info("Executing delete of %s", source_path)
|
@@ -59,7 +59,7 @@ class CloudStorageTransferServiceCreateJobsTrigger(BaseTrigger):
|
|
59
59
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
60
60
|
"""Serialize StorageTransferJobsTrigger arguments and classpath."""
|
61
61
|
return (
|
62
|
-
f"{self.__class__.__module__
|
62
|
+
f"{self.__class__.__module__}.{self.__class__.__qualname__}",
|
63
63
|
{
|
64
64
|
"project_id": self.project_id,
|
65
65
|
"job_names": self.job_names,
|
@@ -176,7 +176,7 @@ class CloudStorageTransferServiceCheckJobStatusTrigger(BaseTrigger):
|
|
176
176
|
def serialize(self) -> tuple[str, dict[str, Any]]:
|
177
177
|
"""Serialize CloudStorageTransferServiceCheckJobStatusTrigger arguments and classpath."""
|
178
178
|
return (
|
179
|
-
f"{self.__class__.__module__
|
179
|
+
f"{self.__class__.__module__}.{self.__class__.__qualname__}",
|
180
180
|
{
|
181
181
|
"job_name": self.job_name,
|
182
182
|
"expected_statuses": self.expected_statuses,
|
@@ -49,20 +49,17 @@ from googleapiclient.http import MediaIoBaseDownload, build_http, set_user_agent
|
|
49
49
|
from requests import Session
|
50
50
|
|
51
51
|
from airflow import version
|
52
|
-
from airflow.exceptions import AirflowException
|
52
|
+
from airflow.exceptions import AirflowException
|
53
53
|
from airflow.hooks.base import BaseHook
|
54
54
|
from airflow.providers.google.cloud.utils.credentials_provider import (
|
55
55
|
_get_scopes,
|
56
56
|
_get_target_principal_and_delegates,
|
57
57
|
get_credentials_and_project_id,
|
58
58
|
)
|
59
|
-
from airflow.providers.google.common.consts import CLIENT_INFO
|
60
|
-
from airflow.providers.google.common.deprecated import deprecated
|
61
59
|
from airflow.utils.process_utils import patch_environ
|
62
60
|
|
63
61
|
if TYPE_CHECKING:
|
64
62
|
from aiohttp import ClientSession
|
65
|
-
from google.api_core.gapic_v1.client_info import ClientInfo
|
66
63
|
from google.auth.credentials import Credentials
|
67
64
|
|
68
65
|
log = logging.getLogger(__name__)
|
@@ -153,7 +150,7 @@ class retry_if_temporary_refresh_credentials(tenacity.retry_if_exception):
|
|
153
150
|
# This allows the 'project_id' argument to be of type str instead of str | None,
|
154
151
|
# making it easier to type hint the function body without dealing with the None
|
155
152
|
# case that can never happen at runtime.
|
156
|
-
PROVIDE_PROJECT_ID: str = cast(str, None)
|
153
|
+
PROVIDE_PROJECT_ID: str = cast("str", None)
|
157
154
|
|
158
155
|
T = TypeVar("T", bound=Callable)
|
159
156
|
RT = TypeVar("RT")
|
@@ -442,24 +439,6 @@ class GoogleBaseHook(BaseHook):
|
|
442
439
|
f"Please check the connection configuration."
|
443
440
|
)
|
444
441
|
|
445
|
-
@property
|
446
|
-
@deprecated(
|
447
|
-
planned_removal_date="March 01, 2025",
|
448
|
-
use_instead="airflow.providers.google.common.consts.CLIENT_INFO",
|
449
|
-
category=AirflowProviderDeprecationWarning,
|
450
|
-
)
|
451
|
-
def client_info(self) -> ClientInfo:
|
452
|
-
"""
|
453
|
-
Return client information used to generate a user-agent for API calls.
|
454
|
-
|
455
|
-
It allows for better errors tracking.
|
456
|
-
|
457
|
-
This object is only used by the google-cloud-* libraries that are built specifically for
|
458
|
-
the Google Cloud. It is not supported by The Google APIs Python Client that use Discovery
|
459
|
-
based APIs.
|
460
|
-
"""
|
461
|
-
return CLIENT_INFO
|
462
|
-
|
463
442
|
@property
|
464
443
|
def scopes(self) -> Sequence[str]:
|
465
444
|
"""
|
@@ -499,7 +478,7 @@ class GoogleBaseHook(BaseHook):
|
|
499
478
|
"after": tenacity.after_log(log, logging.DEBUG),
|
500
479
|
}
|
501
480
|
default_kwargs.update(**kwargs)
|
502
|
-
return cast(T, tenacity.retry(*args, **default_kwargs)(func))
|
481
|
+
return cast("T", tenacity.retry(*args, **default_kwargs)(func))
|
503
482
|
|
504
483
|
return decorator
|
505
484
|
|
@@ -517,7 +496,7 @@ class GoogleBaseHook(BaseHook):
|
|
517
496
|
"after": tenacity.after_log(log, logging.DEBUG),
|
518
497
|
}
|
519
498
|
default_kwargs.update(**kwargs)
|
520
|
-
return cast(T, tenacity.retry(*args, **default_kwargs)(func))
|
499
|
+
return cast("T", tenacity.retry(*args, **default_kwargs)(func))
|
521
500
|
|
522
501
|
return decorator
|
523
502
|
|
@@ -569,7 +548,7 @@ class GoogleBaseHook(BaseHook):
|
|
569
548
|
with self.provide_gcp_credential_file_as_context():
|
570
549
|
return func(self, *args, **kwargs)
|
571
550
|
|
572
|
-
return cast(T, wrapper)
|
551
|
+
return cast("T", wrapper)
|
573
552
|
|
574
553
|
@contextmanager
|
575
554
|
def provide_gcp_credential_file_as_context(self) -> Generator[str | None, None, None]:
|
@@ -718,7 +697,7 @@ class _CredentialsToken(Token):
|
|
718
697
|
scopes: Sequence[str] | None = None,
|
719
698
|
) -> None:
|
720
699
|
_scopes: list[str] | None = list(scopes) if scopes else None
|
721
|
-
super().__init__(session=cast(Session, session), scopes=_scopes)
|
700
|
+
super().__init__(session=cast("Session", session), scopes=_scopes)
|
722
701
|
self.credentials = credentials
|
723
702
|
self.project = project
|
724
703
|
|
@@ -743,7 +722,7 @@ class _CredentialsToken(Token):
|
|
743
722
|
async def refresh(self, *, timeout: int) -> TokenResponse:
|
744
723
|
await sync_to_async(self.credentials.refresh)(google.auth.transport.requests.Request())
|
745
724
|
|
746
|
-
self.access_token = cast(str, self.credentials.token)
|
725
|
+
self.access_token = cast("str", self.credentials.token)
|
747
726
|
self.access_token_duration = 3600
|
748
727
|
self.access_token_acquired_at = self._now()
|
749
728
|
return TokenResponse(value=self.access_token, expires_in=self.access_token_duration)
|
@@ -27,8 +27,9 @@ def get_provider_info():
|
|
27
27
|
"name": "Google",
|
28
28
|
"description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
|
29
29
|
"state": "ready",
|
30
|
-
"source-date-epoch":
|
30
|
+
"source-date-epoch": 1743836162,
|
31
31
|
"versions": [
|
32
|
+
"15.0.0",
|
32
33
|
"14.1.0",
|
33
34
|
"14.0.0",
|
34
35
|
"12.0.0",
|
@@ -1652,6 +1653,7 @@ def get_provider_info():
|
|
1652
1653
|
"sqlalchemy-spanner>=1.6.2",
|
1653
1654
|
"tenacity>=8.1.0",
|
1654
1655
|
"immutabledict>=4.2.0",
|
1656
|
+
"types-protobuf!=5.29.1.20250402",
|
1655
1657
|
],
|
1656
1658
|
"optional-dependencies": {
|
1657
1659
|
"apache.beam": [
|
@@ -83,7 +83,7 @@ class GoogleDisplayVideo360GetSDFDownloadOperationSensor(BaseSensorOperator):
|
|
83
83
|
)
|
84
84
|
operation = hook.get_sdf_download_operation(operation_name=self.operation_name)
|
85
85
|
if "error" in operation:
|
86
|
-
message = f
|
86
|
+
message = f"The operation finished in error with {operation['error']}"
|
87
87
|
raise AirflowException(message)
|
88
88
|
if operation and operation.get("done"):
|
89
89
|
return True
|
@@ -192,9 +192,9 @@ class GoogleDriveHook(GoogleBaseHook):
|
|
192
192
|
# current_file_id can be file or directory id, Google API treats them the same way.
|
193
193
|
file_info = self._get_file_info(current_file_id)
|
194
194
|
if current_file_id == file_id:
|
195
|
-
path = f
|
195
|
+
path = f"{file_info['name']}"
|
196
196
|
else:
|
197
|
-
path = f
|
197
|
+
path = f"{file_info['name']}/{path}"
|
198
198
|
|
199
199
|
# Google API returns parents array if there is at least one object inside
|
200
200
|
if "parents" in file_info and len(file_info["parents"]) == 1:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: apache-airflow-providers-google
|
3
|
-
Version:
|
3
|
+
Version: 15.0.0rc1
|
4
4
|
Summary: Provider package apache-airflow-providers-google for Apache Airflow
|
5
5
|
Keywords: airflow-provider,google,airflow,integration
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
@@ -20,9 +20,9 @@ Classifier: Programming Language :: Python :: 3.10
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.11
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
23
|
-
Requires-Dist: apache-airflow>=2.9.
|
24
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.4.
|
25
|
-
Requires-Dist: apache-airflow-providers-common-sql>=1.20.
|
23
|
+
Requires-Dist: apache-airflow>=2.9.0rc0
|
24
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.4.0rc0
|
25
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
|
26
26
|
Requires-Dist: asgiref>=3.5.2
|
27
27
|
Requires-Dist: dill>=0.2.3
|
28
28
|
Requires-Dist: gcloud-aio-auth>=5.2.0
|
@@ -87,6 +87,7 @@ Requires-Dist: sqlalchemy-bigquery>=1.2.1
|
|
87
87
|
Requires-Dist: sqlalchemy-spanner>=1.6.2
|
88
88
|
Requires-Dist: tenacity>=8.1.0
|
89
89
|
Requires-Dist: immutabledict>=4.2.0
|
90
|
+
Requires-Dist: types-protobuf!=5.29.1.20250402
|
90
91
|
Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
|
91
92
|
Requires-Dist: apache-beam[gcp]>=2.53.0 ; extra == "apache-beam" and ( python_version < "3.12")
|
92
93
|
Requires-Dist: apache-beam[gcp]>=2.57.0 ; extra == "apache-beam" and ( python_version >= "3.12")
|
@@ -106,8 +107,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
|
|
106
107
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
107
108
|
Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
|
108
109
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
109
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/
|
110
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/
|
110
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/15.0.0/changelog.html
|
111
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/15.0.0
|
111
112
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
112
113
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
113
114
|
Project-URL: Source Code, https://github.com/apache/airflow
|
@@ -155,7 +156,7 @@ Provides-Extra: trino
|
|
155
156
|
|
156
157
|
Package ``apache-airflow-providers-google``
|
157
158
|
|
158
|
-
Release: ``
|
159
|
+
Release: ``15.0.0``
|
159
160
|
|
160
161
|
|
161
162
|
Google services including:
|
@@ -175,7 +176,7 @@ This is a provider package for ``google`` provider. All classes for this provide
|
|
175
176
|
are in ``airflow.providers.google`` python package.
|
176
177
|
|
177
178
|
You can find package information and changelog for the provider
|
178
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/
|
179
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/15.0.0/>`_.
|
179
180
|
|
180
181
|
Installation
|
181
182
|
------------
|
@@ -259,6 +260,7 @@ PIP package Version required
|
|
259
260
|
``sqlalchemy-spanner`` ``>=1.6.2``
|
260
261
|
``tenacity`` ``>=8.1.0``
|
261
262
|
``immutabledict`` ``>=4.2.0``
|
263
|
+
``types-protobuf`` ``!=5.29.1.20250402``
|
262
264
|
========================================== ======================================
|
263
265
|
|
264
266
|
Cross provider package dependencies
|
@@ -298,5 +300,5 @@ Dependent package
|
|
298
300
|
======================================================================================================================== ====================
|
299
301
|
|
300
302
|
The changelog for the provider package can be found in the
|
301
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/
|
303
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/15.0.0/changelog.html>`_.
|
302
304
|
|