apache-airflow-providers-google 10.22.0rc1__py3-none-any.whl → 10.23.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/google/__init__.py +1 -1
- airflow/providers/google/cloud/hooks/bigquery.py +91 -54
- airflow/providers/google/cloud/hooks/cloud_build.py +3 -2
- airflow/providers/google/cloud/hooks/dataflow.py +112 -47
- airflow/providers/google/cloud/hooks/datapipeline.py +3 -3
- airflow/providers/google/cloud/hooks/kubernetes_engine.py +15 -26
- airflow/providers/google/cloud/hooks/life_sciences.py +5 -7
- airflow/providers/google/cloud/hooks/secret_manager.py +3 -3
- airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +28 -8
- airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +11 -6
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +214 -34
- airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +11 -4
- airflow/providers/google/cloud/links/automl.py +13 -22
- airflow/providers/google/cloud/log/gcs_task_handler.py +1 -2
- airflow/providers/google/cloud/operators/bigquery.py +6 -4
- airflow/providers/google/cloud/operators/dataflow.py +186 -4
- airflow/providers/google/cloud/operators/datafusion.py +3 -2
- airflow/providers/google/cloud/operators/datapipeline.py +5 -6
- airflow/providers/google/cloud/operators/dataproc.py +30 -33
- airflow/providers/google/cloud/operators/gcs.py +4 -4
- airflow/providers/google/cloud/operators/kubernetes_engine.py +16 -2
- airflow/providers/google/cloud/operators/life_sciences.py +5 -7
- airflow/providers/google/cloud/operators/mlengine.py +42 -65
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +18 -4
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +5 -5
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +280 -9
- airflow/providers/google/cloud/operators/vertex_ai/model_service.py +4 -0
- airflow/providers/google/cloud/secrets/secret_manager.py +3 -5
- airflow/providers/google/cloud/sensors/bigquery.py +8 -27
- airflow/providers/google/cloud/sensors/bigquery_dts.py +1 -4
- airflow/providers/google/cloud/sensors/cloud_composer.py +9 -14
- airflow/providers/google/cloud/sensors/dataflow.py +1 -25
- airflow/providers/google/cloud/sensors/dataform.py +1 -4
- airflow/providers/google/cloud/sensors/datafusion.py +1 -7
- airflow/providers/google/cloud/sensors/dataplex.py +1 -31
- airflow/providers/google/cloud/sensors/dataproc.py +1 -16
- airflow/providers/google/cloud/sensors/dataproc_metastore.py +1 -7
- airflow/providers/google/cloud/sensors/gcs.py +5 -27
- airflow/providers/google/cloud/sensors/looker.py +1 -13
- airflow/providers/google/cloud/sensors/pubsub.py +11 -5
- airflow/providers/google/cloud/sensors/workflows.py +1 -4
- airflow/providers/google/cloud/transfers/sftp_to_gcs.py +6 -0
- airflow/providers/google/cloud/triggers/dataflow.py +145 -1
- airflow/providers/google/cloud/triggers/kubernetes_engine.py +66 -3
- airflow/providers/google/common/deprecated.py +176 -0
- airflow/providers/google/common/hooks/base_google.py +3 -2
- airflow/providers/google/get_provider_info.py +8 -10
- airflow/providers/google/marketing_platform/hooks/analytics.py +4 -2
- airflow/providers/google/marketing_platform/hooks/search_ads.py +169 -30
- airflow/providers/google/marketing_platform/operators/analytics.py +16 -33
- airflow/providers/google/marketing_platform/operators/search_ads.py +217 -156
- airflow/providers/google/marketing_platform/sensors/display_video.py +1 -4
- {apache_airflow_providers_google-10.22.0rc1.dist-info → apache_airflow_providers_google-10.23.0rc1.dist-info}/METADATA +18 -16
- {apache_airflow_providers_google-10.22.0rc1.dist-info → apache_airflow_providers_google-10.23.0rc1.dist-info}/RECORD +56 -56
- airflow/providers/google/marketing_platform/sensors/search_ads.py +0 -92
- {apache_airflow_providers_google-10.22.0rc1.dist-info → apache_airflow_providers_google-10.23.0rc1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_google-10.22.0rc1.dist-info → apache_airflow_providers_google-10.23.0rc1.dist-info}/entry_points.txt +0 -0
@@ -27,7 +27,6 @@ from enum import Enum
|
|
27
27
|
from functools import cached_property
|
28
28
|
from typing import TYPE_CHECKING, Any, Sequence
|
29
29
|
|
30
|
-
from deprecated import deprecated
|
31
30
|
from googleapiclient.errors import HttpError
|
32
31
|
|
33
32
|
from airflow.configuration import conf
|
@@ -41,8 +40,12 @@ from airflow.providers.google.cloud.hooks.dataflow import (
|
|
41
40
|
from airflow.providers.google.cloud.hooks.gcs import GCSHook
|
42
41
|
from airflow.providers.google.cloud.links.dataflow import DataflowJobLink, DataflowPipelineLink
|
43
42
|
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
44
|
-
from airflow.providers.google.cloud.triggers.dataflow import
|
43
|
+
from airflow.providers.google.cloud.triggers.dataflow import (
|
44
|
+
DataflowStartYamlJobTrigger,
|
45
|
+
TemplateJobStartTrigger,
|
46
|
+
)
|
45
47
|
from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
|
48
|
+
from airflow.providers.google.common.deprecated import deprecated
|
46
49
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
47
50
|
from airflow.version import version
|
48
51
|
|
@@ -174,7 +177,8 @@ class DataflowConfiguration:
|
|
174
177
|
|
175
178
|
# TODO: Remove one day
|
176
179
|
@deprecated(
|
177
|
-
|
180
|
+
planned_removal_date="November 01, 2024",
|
181
|
+
use_instead="providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator",
|
178
182
|
category=AirflowProviderDeprecationWarning,
|
179
183
|
)
|
180
184
|
class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
|
@@ -945,6 +949,11 @@ class DataflowStartFlexTemplateOperator(GoogleCloudBaseOperator):
|
|
945
949
|
)
|
946
950
|
|
947
951
|
|
952
|
+
@deprecated(
|
953
|
+
planned_removal_date="January 31, 2025",
|
954
|
+
use_instead="DataflowStartYamlJobOperator",
|
955
|
+
category=AirflowProviderDeprecationWarning,
|
956
|
+
)
|
948
957
|
class DataflowStartSqlJobOperator(GoogleCloudBaseOperator):
|
949
958
|
"""
|
950
959
|
Starts Dataflow SQL query.
|
@@ -1050,9 +1059,182 @@ class DataflowStartSqlJobOperator(GoogleCloudBaseOperator):
|
|
1050
1059
|
)
|
1051
1060
|
|
1052
1061
|
|
1062
|
+
class DataflowStartYamlJobOperator(GoogleCloudBaseOperator):
|
1063
|
+
"""
|
1064
|
+
Launch a Dataflow YAML job and return the result.
|
1065
|
+
|
1066
|
+
.. seealso::
|
1067
|
+
For more information on how to use this operator, take a look at the guide:
|
1068
|
+
:ref:`howto/operator:DataflowStartYamlJobOperator`
|
1069
|
+
|
1070
|
+
.. warning::
|
1071
|
+
This operator requires ``gcloud`` command (Google Cloud SDK) must be installed on the Airflow worker
|
1072
|
+
<https://cloud.google.com/sdk/docs/install>`__
|
1073
|
+
|
1074
|
+
:param job_name: Required. The unique name to assign to the Cloud Dataflow job.
|
1075
|
+
:param yaml_pipeline_file: Required. Path to a file defining the YAML pipeline to run.
|
1076
|
+
Must be a local file or a URL beginning with 'gs://'.
|
1077
|
+
:param region: Optional. Region ID of the job's regional endpoint. Defaults to 'us-central1'.
|
1078
|
+
:param project_id: Required. The ID of the GCP project that owns the job.
|
1079
|
+
If set to ``None`` or missing, the default project_id from the GCP connection is used.
|
1080
|
+
:param gcp_conn_id: Optional. The connection ID used to connect to GCP.
|
1081
|
+
:param append_job_name: Optional. Set to True if a unique suffix has to be appended to the `job_name`.
|
1082
|
+
Defaults to True.
|
1083
|
+
:param drain_pipeline: Optional. Set to True if you want to stop a streaming pipeline job by draining it
|
1084
|
+
instead of canceling when killing the task instance. Note that this does not work for batch pipeline jobs
|
1085
|
+
or in the deferrable mode. Defaults to False.
|
1086
|
+
For more info see: https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline
|
1087
|
+
:param deferrable: Optional. Run operator in the deferrable mode.
|
1088
|
+
:param expected_terminal_state: Optional. The expected terminal state of the Dataflow job at which the
|
1089
|
+
operator task is set to succeed. Defaults to 'JOB_STATE_DONE' for the batch jobs and 'JOB_STATE_RUNNING'
|
1090
|
+
for the streaming jobs.
|
1091
|
+
:param poll_sleep: Optional. The time in seconds to sleep between polling Google Cloud Platform for the Dataflow job status.
|
1092
|
+
Used both for the sync and deferrable mode.
|
1093
|
+
:param cancel_timeout: Optional. How long (in seconds) operator should wait for the pipeline to be
|
1094
|
+
successfully canceled when the task is being killed.
|
1095
|
+
:param jinja_variables: Optional. A dictionary of Jinja2 variables to be used in reifying the yaml pipeline file.
|
1096
|
+
:param options: Optional. Additional gcloud or Beam job parameters.
|
1097
|
+
It must be a dictionary with the keys matching the optional flag names in gcloud.
|
1098
|
+
The list of supported flags can be found at: `https://cloud.google.com/sdk/gcloud/reference/dataflow/yaml/run`.
|
1099
|
+
Note that if a flag does not require a value, then its dictionary value must be either True or None.
|
1100
|
+
For example, the `--log-http` flag can be passed as {'log-http': True}.
|
1101
|
+
:param impersonation_chain: Optional service account to impersonate using short-term
|
1102
|
+
credentials, or chained list of accounts required to get the access_token
|
1103
|
+
of the last account in the list, which will be impersonated in the request.
|
1104
|
+
If set as a string, the account must grant the originating account
|
1105
|
+
the Service Account Token Creator IAM role.
|
1106
|
+
If set as a sequence, the identities from the list must grant
|
1107
|
+
Service Account Token Creator IAM role to the directly preceding identity, with first
|
1108
|
+
account from the list granting this role to the originating account (templated).
|
1109
|
+
:return: Dictionary containing the job's data.
|
1110
|
+
"""
|
1111
|
+
|
1112
|
+
template_fields: Sequence[str] = (
|
1113
|
+
"job_name",
|
1114
|
+
"yaml_pipeline_file",
|
1115
|
+
"jinja_variables",
|
1116
|
+
"options",
|
1117
|
+
"region",
|
1118
|
+
"project_id",
|
1119
|
+
"gcp_conn_id",
|
1120
|
+
)
|
1121
|
+
template_fields_renderers = {
|
1122
|
+
"jinja_variables": "json",
|
1123
|
+
}
|
1124
|
+
operator_extra_links = (DataflowJobLink(),)
|
1125
|
+
|
1126
|
+
def __init__(
|
1127
|
+
self,
|
1128
|
+
*,
|
1129
|
+
job_name: str,
|
1130
|
+
yaml_pipeline_file: str,
|
1131
|
+
region: str = DEFAULT_DATAFLOW_LOCATION,
|
1132
|
+
project_id: str = PROVIDE_PROJECT_ID,
|
1133
|
+
gcp_conn_id: str = "google_cloud_default",
|
1134
|
+
append_job_name: bool = True,
|
1135
|
+
drain_pipeline: bool = False,
|
1136
|
+
deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
|
1137
|
+
poll_sleep: int = 10,
|
1138
|
+
cancel_timeout: int | None = 5 * 60,
|
1139
|
+
expected_terminal_state: str | None = None,
|
1140
|
+
jinja_variables: dict[str, str] | None = None,
|
1141
|
+
options: dict[str, Any] | None = None,
|
1142
|
+
impersonation_chain: str | Sequence[str] | None = None,
|
1143
|
+
**kwargs,
|
1144
|
+
) -> None:
|
1145
|
+
super().__init__(**kwargs)
|
1146
|
+
self.job_name = job_name
|
1147
|
+
self.yaml_pipeline_file = yaml_pipeline_file
|
1148
|
+
self.region = region
|
1149
|
+
self.project_id = project_id
|
1150
|
+
self.gcp_conn_id = gcp_conn_id
|
1151
|
+
self.append_job_name = append_job_name
|
1152
|
+
self.drain_pipeline = drain_pipeline
|
1153
|
+
self.deferrable = deferrable
|
1154
|
+
self.poll_sleep = poll_sleep
|
1155
|
+
self.cancel_timeout = cancel_timeout
|
1156
|
+
self.expected_terminal_state = expected_terminal_state
|
1157
|
+
self.options = options
|
1158
|
+
self.jinja_variables = jinja_variables
|
1159
|
+
self.impersonation_chain = impersonation_chain
|
1160
|
+
self.job_id: str | None = None
|
1161
|
+
|
1162
|
+
def execute(self, context: Context) -> dict[str, Any]:
|
1163
|
+
self.job_id = self.hook.launch_beam_yaml_job(
|
1164
|
+
job_name=self.job_name,
|
1165
|
+
yaml_pipeline_file=self.yaml_pipeline_file,
|
1166
|
+
append_job_name=self.append_job_name,
|
1167
|
+
options=self.options,
|
1168
|
+
jinja_variables=self.jinja_variables,
|
1169
|
+
project_id=self.project_id,
|
1170
|
+
location=self.region,
|
1171
|
+
)
|
1172
|
+
|
1173
|
+
DataflowJobLink.persist(self, context, self.project_id, self.region, self.job_id)
|
1174
|
+
|
1175
|
+
if self.deferrable:
|
1176
|
+
self.defer(
|
1177
|
+
trigger=DataflowStartYamlJobTrigger(
|
1178
|
+
job_id=self.job_id,
|
1179
|
+
project_id=self.project_id,
|
1180
|
+
location=self.region,
|
1181
|
+
gcp_conn_id=self.gcp_conn_id,
|
1182
|
+
poll_sleep=self.poll_sleep,
|
1183
|
+
cancel_timeout=self.cancel_timeout,
|
1184
|
+
expected_terminal_state=self.expected_terminal_state,
|
1185
|
+
impersonation_chain=self.impersonation_chain,
|
1186
|
+
),
|
1187
|
+
method_name=GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME,
|
1188
|
+
)
|
1189
|
+
|
1190
|
+
self.hook.wait_for_done(
|
1191
|
+
job_name=self.job_name, location=self.region, project_id=self.project_id, job_id=self.job_id
|
1192
|
+
)
|
1193
|
+
job = self.hook.get_job(job_id=self.job_id, location=self.region, project_id=self.project_id)
|
1194
|
+
return job
|
1195
|
+
|
1196
|
+
def execute_complete(self, context: Context, event: dict) -> dict[str, Any]:
|
1197
|
+
"""Execute after the trigger returns an event."""
|
1198
|
+
if event["status"] in ("error", "stopped"):
|
1199
|
+
self.log.info("status: %s, msg: %s", event["status"], event["message"])
|
1200
|
+
raise AirflowException(event["message"])
|
1201
|
+
job = event["job"]
|
1202
|
+
self.log.info("Job %s completed with response %s", job["id"], event["message"])
|
1203
|
+
self.xcom_push(context, key="job_id", value=job["id"])
|
1204
|
+
|
1205
|
+
return job
|
1206
|
+
|
1207
|
+
def on_kill(self):
|
1208
|
+
"""
|
1209
|
+
Cancel the dataflow job if a task instance gets killed.
|
1210
|
+
|
1211
|
+
This method will not be called if a task instance is killed in a deferred
|
1212
|
+
state.
|
1213
|
+
"""
|
1214
|
+
self.log.info("On kill called.")
|
1215
|
+
if self.job_id:
|
1216
|
+
self.hook.cancel_job(
|
1217
|
+
job_id=self.job_id,
|
1218
|
+
project_id=self.project_id,
|
1219
|
+
location=self.region,
|
1220
|
+
)
|
1221
|
+
|
1222
|
+
@cached_property
|
1223
|
+
def hook(self) -> DataflowHook:
|
1224
|
+
return DataflowHook(
|
1225
|
+
gcp_conn_id=self.gcp_conn_id,
|
1226
|
+
poll_sleep=self.poll_sleep,
|
1227
|
+
impersonation_chain=self.impersonation_chain,
|
1228
|
+
drain_pipeline=self.drain_pipeline,
|
1229
|
+
cancel_timeout=self.cancel_timeout,
|
1230
|
+
expected_terminal_state=self.expected_terminal_state,
|
1231
|
+
)
|
1232
|
+
|
1233
|
+
|
1053
1234
|
# TODO: Remove one day
|
1054
1235
|
@deprecated(
|
1055
|
-
|
1236
|
+
planned_removal_date="November 01, 2024",
|
1237
|
+
use_instead="providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator",
|
1056
1238
|
category=AirflowProviderDeprecationWarning,
|
1057
1239
|
)
|
1058
1240
|
class DataflowCreatePythonJobOperator(GoogleCloudBaseOperator):
|
@@ -21,7 +21,6 @@ from __future__ import annotations
|
|
21
21
|
import time
|
22
22
|
from typing import TYPE_CHECKING, Any, Sequence
|
23
23
|
|
24
|
-
from deprecated import deprecated
|
25
24
|
from google.api_core.retry import exponential_sleep_generator
|
26
25
|
from googleapiclient.errors import HttpError
|
27
26
|
|
@@ -37,6 +36,7 @@ from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseO
|
|
37
36
|
from airflow.providers.google.cloud.triggers.datafusion import DataFusionStartPipelineTrigger
|
38
37
|
from airflow.providers.google.cloud.utils.datafusion import DataFusionPipelineType
|
39
38
|
from airflow.providers.google.cloud.utils.helpers import resource_path_to_dict
|
39
|
+
from airflow.providers.google.common.deprecated import deprecated
|
40
40
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
41
41
|
|
42
42
|
if TYPE_CHECKING:
|
@@ -53,7 +53,8 @@ class DataFusionPipelineLinkHelper:
|
|
53
53
|
|
54
54
|
@staticmethod
|
55
55
|
@deprecated(
|
56
|
-
|
56
|
+
planned_removal_date="March 01, 2025",
|
57
|
+
use_instead="airflow.providers.google.cloud.utils.helpers.resource_path_to_dict",
|
57
58
|
category=AirflowProviderDeprecationWarning,
|
58
59
|
)
|
59
60
|
def get_project_id(instance):
|
@@ -19,20 +19,19 @@
|
|
19
19
|
|
20
20
|
from __future__ import annotations
|
21
21
|
|
22
|
-
from deprecated import deprecated
|
23
|
-
|
24
22
|
from airflow.exceptions import AirflowProviderDeprecationWarning
|
25
23
|
from airflow.providers.google.cloud.hooks.dataflow import DEFAULT_DATAFLOW_LOCATION
|
26
24
|
from airflow.providers.google.cloud.operators.dataflow import (
|
27
25
|
DataflowCreatePipelineOperator,
|
28
26
|
DataflowRunPipelineOperator,
|
29
27
|
)
|
28
|
+
from airflow.providers.google.common.deprecated import deprecated
|
30
29
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
31
30
|
|
32
31
|
|
33
32
|
@deprecated(
|
34
|
-
|
35
|
-
"
|
33
|
+
planned_removal_date="December 01, 2024",
|
34
|
+
use_instead="DataflowCreatePipelineOperator",
|
36
35
|
category=AirflowProviderDeprecationWarning,
|
37
36
|
)
|
38
37
|
class CreateDataPipelineOperator(DataflowCreatePipelineOperator):
|
@@ -40,8 +39,8 @@ class CreateDataPipelineOperator(DataflowCreatePipelineOperator):
|
|
40
39
|
|
41
40
|
|
42
41
|
@deprecated(
|
43
|
-
|
44
|
-
"
|
42
|
+
planned_removal_date="December 01, 2024",
|
43
|
+
use_instead="DataflowRunPipelineOperator",
|
45
44
|
category=AirflowProviderDeprecationWarning,
|
46
45
|
)
|
47
46
|
class RunDataPipelineOperator(DataflowRunPipelineOperator):
|
@@ -33,7 +33,6 @@ from enum import Enum
|
|
33
33
|
from functools import cached_property
|
34
34
|
from typing import TYPE_CHECKING, Any, Sequence
|
35
35
|
|
36
|
-
from deprecated import deprecated
|
37
36
|
from google.api_core.exceptions import AlreadyExists, NotFound
|
38
37
|
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
|
39
38
|
from google.api_core.retry import Retry, exponential_sleep_generator
|
@@ -64,6 +63,7 @@ from airflow.providers.google.cloud.triggers.dataproc import (
|
|
64
63
|
DataprocSubmitTrigger,
|
65
64
|
)
|
66
65
|
from airflow.providers.google.cloud.utils.dataproc import DataprocOperationType
|
66
|
+
from airflow.providers.google.common.deprecated import deprecated
|
67
67
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
68
68
|
from airflow.utils import timezone
|
69
69
|
|
@@ -882,7 +882,8 @@ class DataprocCreateClusterOperator(GoogleCloudBaseOperator):
|
|
882
882
|
|
883
883
|
# TODO: Remove one day
|
884
884
|
@deprecated(
|
885
|
-
|
885
|
+
planned_removal_date="March 01, 2025",
|
886
|
+
use_instead="DataprocUpdateClusterOperator",
|
886
887
|
category=AirflowProviderDeprecationWarning,
|
887
888
|
)
|
888
889
|
class DataprocScaleClusterOperator(GoogleCloudBaseOperator):
|
@@ -1503,13 +1504,11 @@ class DataprocJobBaseOperator(GoogleCloudBaseOperator):
|
|
1503
1504
|
self.hook.cancel_job(project_id=self.project_id, job_id=self.dataproc_job_id, region=self.region)
|
1504
1505
|
|
1505
1506
|
|
1506
|
-
# TODO: Remove one day
|
1507
1507
|
@deprecated(
|
1508
|
-
|
1509
|
-
|
1510
|
-
|
1511
|
-
|
1512
|
-
),
|
1508
|
+
planned_removal_date="November 01, 2024",
|
1509
|
+
use_instead="DataprocSubmitJobOperator",
|
1510
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
1511
|
+
"and use it with the new operator.",
|
1513
1512
|
category=AirflowProviderDeprecationWarning,
|
1514
1513
|
)
|
1515
1514
|
class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
|
@@ -1630,13 +1629,15 @@ class DataprocSubmitPigJobOperator(DataprocJobBaseOperator):
|
|
1630
1629
|
super().execute(context)
|
1631
1630
|
|
1632
1631
|
|
1632
|
+
# TODO: Remove one day
|
1633
|
+
|
1634
|
+
|
1633
1635
|
# TODO: Remove one day
|
1634
1636
|
@deprecated(
|
1635
|
-
|
1636
|
-
|
1637
|
-
|
1638
|
-
|
1639
|
-
),
|
1637
|
+
planned_removal_date="November 01, 2024",
|
1638
|
+
use_instead="DataprocSubmitJobOperator",
|
1639
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
1640
|
+
"and use it with the new operator.",
|
1640
1641
|
category=AirflowProviderDeprecationWarning,
|
1641
1642
|
)
|
1642
1643
|
class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
|
@@ -1725,11 +1726,10 @@ class DataprocSubmitHiveJobOperator(DataprocJobBaseOperator):
|
|
1725
1726
|
|
1726
1727
|
# TODO: Remove one day
|
1727
1728
|
@deprecated(
|
1728
|
-
|
1729
|
-
|
1730
|
-
|
1731
|
-
|
1732
|
-
),
|
1729
|
+
planned_removal_date="November 01, 2024",
|
1730
|
+
use_instead="DataprocSubmitJobOperator",
|
1731
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
1732
|
+
"and use it with the new operator.",
|
1733
1733
|
category=AirflowProviderDeprecationWarning,
|
1734
1734
|
)
|
1735
1735
|
class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
|
@@ -1817,11 +1817,10 @@ class DataprocSubmitSparkSqlJobOperator(DataprocJobBaseOperator):
|
|
1817
1817
|
|
1818
1818
|
# TODO: Remove one day
|
1819
1819
|
@deprecated(
|
1820
|
-
|
1821
|
-
|
1822
|
-
|
1823
|
-
|
1824
|
-
),
|
1820
|
+
planned_removal_date="November 01, 2024",
|
1821
|
+
use_instead="DataprocSubmitJobOperator",
|
1822
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
1823
|
+
"and use it with the new operator.",
|
1825
1824
|
category=AirflowProviderDeprecationWarning,
|
1826
1825
|
)
|
1827
1826
|
class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
|
@@ -1909,11 +1908,10 @@ class DataprocSubmitSparkJobOperator(DataprocJobBaseOperator):
|
|
1909
1908
|
|
1910
1909
|
# TODO: Remove one day
|
1911
1910
|
@deprecated(
|
1912
|
-
|
1913
|
-
|
1914
|
-
|
1915
|
-
|
1916
|
-
),
|
1911
|
+
planned_removal_date="November 01, 2024",
|
1912
|
+
use_instead="DataprocSubmitJobOperator",
|
1913
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
1914
|
+
"and use it with the new operator.",
|
1917
1915
|
category=AirflowProviderDeprecationWarning,
|
1918
1916
|
)
|
1919
1917
|
class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
|
@@ -2001,11 +1999,10 @@ class DataprocSubmitHadoopJobOperator(DataprocJobBaseOperator):
|
|
2001
1999
|
|
2002
2000
|
# TODO: Remove one day
|
2003
2001
|
@deprecated(
|
2004
|
-
|
2005
|
-
|
2006
|
-
|
2007
|
-
|
2008
|
-
),
|
2002
|
+
planned_removal_date="November 01, 2024",
|
2003
|
+
use_instead="DataprocSubmitJobOperator",
|
2004
|
+
instructions="You can use `generate_job` method to generate dictionary representing your job "
|
2005
|
+
"and use it with the new operator.",
|
2009
2006
|
category=AirflowProviderDeprecationWarning,
|
2010
2007
|
)
|
2011
2008
|
class DataprocSubmitPySparkJobOperator(DataprocJobBaseOperator):
|
@@ -795,11 +795,11 @@ class GCSTimeSpanFileTransformOperator(GoogleCloudBaseOperator):
|
|
795
795
|
orig_end = context["data_interval_end"]
|
796
796
|
except KeyError:
|
797
797
|
orig_start = pendulum.instance(context["execution_date"])
|
798
|
-
|
799
|
-
if
|
800
|
-
orig_end =
|
798
|
+
next_dagrun = context["dag"].next_dagrun_info(last_automated_dagrun=None, restricted=False)
|
799
|
+
if next_dagrun and next_dagrun.data_interval and next_dagrun.data_interval.end:
|
800
|
+
orig_end = next_dagrun.data_interval.end
|
801
801
|
else:
|
802
|
-
orig_end =
|
802
|
+
orig_end = None
|
803
803
|
|
804
804
|
timespan_start = orig_start
|
805
805
|
if orig_end is None: # Only possible in Airflow before 2.2.
|
@@ -25,7 +25,6 @@ from typing import TYPE_CHECKING, Any, Sequence
|
|
25
25
|
|
26
26
|
import requests
|
27
27
|
import yaml
|
28
|
-
from deprecated import deprecated
|
29
28
|
from google.api_core.exceptions import AlreadyExists
|
30
29
|
from google.cloud.container_v1.types import Cluster
|
31
30
|
from kubernetes.client import V1JobList, models as k8s
|
@@ -57,6 +56,7 @@ from airflow.providers.google.cloud.triggers.kubernetes_engine import (
|
|
57
56
|
GKEOperationTrigger,
|
58
57
|
GKEStartPodTrigger,
|
59
58
|
)
|
59
|
+
from airflow.providers.google.common.deprecated import deprecated
|
60
60
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
61
61
|
from airflow.providers_manager import ProvidersManager
|
62
62
|
from airflow.utils.timezone import utcnow
|
@@ -725,7 +725,8 @@ class GKEStartPodOperator(KubernetesPodOperator):
|
|
725
725
|
|
726
726
|
@staticmethod
|
727
727
|
@deprecated(
|
728
|
-
|
728
|
+
planned_removal_date="November 01, 2024",
|
729
|
+
use_instead="fetch_cluster_info",
|
729
730
|
category=AirflowProviderDeprecationWarning,
|
730
731
|
)
|
731
732
|
def get_gke_config_file():
|
@@ -943,13 +944,26 @@ class GKEStartJobOperator(KubernetesJobOperator):
|
|
943
944
|
ssl_ca_cert=self._ssl_ca_cert,
|
944
945
|
job_name=self.job.metadata.name, # type: ignore[union-attr]
|
945
946
|
job_namespace=self.job.metadata.namespace, # type: ignore[union-attr]
|
947
|
+
pod_name=self.pod.metadata.name, # type: ignore[union-attr]
|
948
|
+
pod_namespace=self.pod.metadata.namespace, # type: ignore[union-attr]
|
949
|
+
base_container_name=self.base_container_name,
|
946
950
|
gcp_conn_id=self.gcp_conn_id,
|
947
951
|
poll_interval=self.job_poll_interval,
|
948
952
|
impersonation_chain=self.impersonation_chain,
|
953
|
+
get_logs=self.get_logs,
|
954
|
+
do_xcom_push=self.do_xcom_push,
|
949
955
|
),
|
950
956
|
method_name="execute_complete",
|
957
|
+
kwargs={"cluster_url": self._cluster_url, "ssl_ca_cert": self._ssl_ca_cert},
|
951
958
|
)
|
952
959
|
|
960
|
+
def execute_complete(self, context: Context, event: dict, **kwargs):
|
961
|
+
# It is required for hook to be initialized
|
962
|
+
self._cluster_url = kwargs["cluster_url"]
|
963
|
+
self._ssl_ca_cert = kwargs["ssl_ca_cert"]
|
964
|
+
|
965
|
+
return super().execute_complete(context, event)
|
966
|
+
|
953
967
|
|
954
968
|
class GKEDescribeJobOperator(GoogleCloudBaseOperator):
|
955
969
|
"""
|
@@ -21,12 +21,11 @@ from __future__ import annotations
|
|
21
21
|
|
22
22
|
from typing import TYPE_CHECKING, Sequence
|
23
23
|
|
24
|
-
from deprecated import deprecated
|
25
|
-
|
26
24
|
from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
|
27
25
|
from airflow.providers.google.cloud.hooks.life_sciences import LifeSciencesHook
|
28
26
|
from airflow.providers.google.cloud.links.life_sciences import LifeSciencesLink
|
29
27
|
from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
|
28
|
+
from airflow.providers.google.common.deprecated import deprecated
|
30
29
|
from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
|
31
30
|
|
32
31
|
if TYPE_CHECKING:
|
@@ -34,11 +33,10 @@ if TYPE_CHECKING:
|
|
34
33
|
|
35
34
|
|
36
35
|
@deprecated(
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
),
|
36
|
+
planned_removal_date="July 08, 2025",
|
37
|
+
use_instead="Google Cloud Batch Operators",
|
38
|
+
reason="The Life Sciences API (beta) will be discontinued "
|
39
|
+
"on July 8, 2025 in favor of Google Cloud Batch.",
|
42
40
|
category=AirflowProviderDeprecationWarning,
|
43
41
|
)
|
44
42
|
class LifeSciencesRunPipelineOperator(GoogleCloudBaseOperator):
|