apache-airflow-providers-google 10.26.0__py3-none-any.whl → 11.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (205) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +2 -1
  3. airflow/providers/google/ads/operators/ads.py +2 -1
  4. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -1
  5. airflow/providers/google/assets/gcs.py +17 -1
  6. airflow/providers/google/cloud/hooks/automl.py +3 -6
  7. airflow/providers/google/cloud/hooks/bigquery.py +41 -1486
  8. airflow/providers/google/cloud/hooks/bigquery_dts.py +4 -11
  9. airflow/providers/google/cloud/hooks/bigtable.py +3 -6
  10. airflow/providers/google/cloud/hooks/cloud_batch.py +6 -3
  11. airflow/providers/google/cloud/hooks/cloud_build.py +3 -15
  12. airflow/providers/google/cloud/hooks/cloud_composer.py +2 -17
  13. airflow/providers/google/cloud/hooks/cloud_memorystore.py +5 -6
  14. airflow/providers/google/cloud/hooks/cloud_run.py +10 -5
  15. airflow/providers/google/cloud/hooks/cloud_sql.py +5 -7
  16. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +3 -7
  17. airflow/providers/google/cloud/hooks/compute.py +3 -6
  18. airflow/providers/google/cloud/hooks/compute_ssh.py +0 -5
  19. airflow/providers/google/cloud/hooks/datacatalog.py +3 -6
  20. airflow/providers/google/cloud/hooks/dataflow.py +3 -14
  21. airflow/providers/google/cloud/hooks/dataform.py +2 -9
  22. airflow/providers/google/cloud/hooks/datafusion.py +4 -15
  23. airflow/providers/google/cloud/hooks/dataplex.py +4 -7
  24. airflow/providers/google/cloud/hooks/dataprep.py +2 -2
  25. airflow/providers/google/cloud/hooks/dataproc.py +77 -22
  26. airflow/providers/google/cloud/hooks/dataproc_metastore.py +2 -9
  27. airflow/providers/google/cloud/hooks/datastore.py +3 -6
  28. airflow/providers/google/cloud/hooks/dlp.py +3 -6
  29. airflow/providers/google/cloud/hooks/functions.py +2 -6
  30. airflow/providers/google/cloud/hooks/gcs.py +2 -18
  31. airflow/providers/google/cloud/hooks/gdm.py +1 -17
  32. airflow/providers/google/cloud/hooks/kms.py +3 -6
  33. airflow/providers/google/cloud/hooks/kubernetes_engine.py +7 -97
  34. airflow/providers/google/cloud/hooks/life_sciences.py +2 -6
  35. airflow/providers/google/cloud/hooks/looker.py +2 -1
  36. airflow/providers/google/cloud/hooks/mlengine.py +0 -8
  37. airflow/providers/google/cloud/hooks/natural_language.py +3 -6
  38. airflow/providers/google/cloud/hooks/os_login.py +3 -6
  39. airflow/providers/google/cloud/hooks/pubsub.py +3 -6
  40. airflow/providers/google/cloud/hooks/secret_manager.py +3 -73
  41. airflow/providers/google/cloud/hooks/spanner.py +3 -6
  42. airflow/providers/google/cloud/hooks/speech_to_text.py +3 -6
  43. airflow/providers/google/cloud/hooks/stackdriver.py +3 -6
  44. airflow/providers/google/cloud/hooks/tasks.py +3 -6
  45. airflow/providers/google/cloud/hooks/text_to_speech.py +3 -6
  46. airflow/providers/google/cloud/hooks/translate.py +455 -9
  47. airflow/providers/google/cloud/hooks/vertex_ai/auto_ml.py +3 -6
  48. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +3 -6
  49. airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py +3 -6
  50. airflow/providers/google/cloud/hooks/vertex_ai/dataset.py +2 -9
  51. airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py +2 -9
  52. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +1 -14
  53. airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py +3 -6
  54. airflow/providers/google/cloud/hooks/vertex_ai/model_service.py +2 -9
  55. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +3 -1
  56. airflow/providers/google/cloud/hooks/vertex_ai/prediction_service.py +2 -1
  57. airflow/providers/google/cloud/hooks/video_intelligence.py +3 -6
  58. airflow/providers/google/cloud/hooks/vision.py +3 -6
  59. airflow/providers/google/cloud/hooks/workflows.py +2 -9
  60. airflow/providers/google/cloud/links/dataproc.py +0 -1
  61. airflow/providers/google/cloud/links/translate.py +91 -0
  62. airflow/providers/google/cloud/log/gcs_task_handler.py +2 -1
  63. airflow/providers/google/cloud/log/stackdriver_task_handler.py +11 -3
  64. airflow/providers/google/cloud/openlineage/utils.py +54 -21
  65. airflow/providers/google/cloud/operators/automl.py +5 -4
  66. airflow/providers/google/cloud/operators/bigquery.py +2 -341
  67. airflow/providers/google/cloud/operators/bigquery_dts.py +2 -1
  68. airflow/providers/google/cloud/operators/bigtable.py +2 -1
  69. airflow/providers/google/cloud/operators/cloud_batch.py +2 -1
  70. airflow/providers/google/cloud/operators/cloud_build.py +2 -1
  71. airflow/providers/google/cloud/operators/cloud_composer.py +2 -1
  72. airflow/providers/google/cloud/operators/cloud_memorystore.py +2 -1
  73. airflow/providers/google/cloud/operators/cloud_run.py +2 -1
  74. airflow/providers/google/cloud/operators/cloud_sql.py +2 -1
  75. airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py +2 -1
  76. airflow/providers/google/cloud/operators/compute.py +2 -1
  77. airflow/providers/google/cloud/operators/datacatalog.py +2 -1
  78. airflow/providers/google/cloud/operators/dataflow.py +2 -517
  79. airflow/providers/google/cloud/operators/dataform.py +2 -1
  80. airflow/providers/google/cloud/operators/datafusion.py +2 -1
  81. airflow/providers/google/cloud/operators/dataplex.py +37 -31
  82. airflow/providers/google/cloud/operators/dataprep.py +2 -1
  83. airflow/providers/google/cloud/operators/dataproc.py +3 -633
  84. airflow/providers/google/cloud/operators/dataproc_metastore.py +2 -1
  85. airflow/providers/google/cloud/operators/datastore.py +2 -1
  86. airflow/providers/google/cloud/operators/dlp.py +2 -1
  87. airflow/providers/google/cloud/operators/functions.py +2 -1
  88. airflow/providers/google/cloud/operators/gcs.py +5 -4
  89. airflow/providers/google/cloud/operators/kubernetes_engine.py +2 -11
  90. airflow/providers/google/cloud/operators/life_sciences.py +2 -1
  91. airflow/providers/google/cloud/operators/mlengine.py +2 -1
  92. airflow/providers/google/cloud/operators/natural_language.py +3 -2
  93. airflow/providers/google/cloud/operators/pubsub.py +2 -1
  94. airflow/providers/google/cloud/operators/spanner.py +2 -1
  95. airflow/providers/google/cloud/operators/speech_to_text.py +2 -1
  96. airflow/providers/google/cloud/operators/stackdriver.py +2 -1
  97. airflow/providers/google/cloud/operators/tasks.py +3 -2
  98. airflow/providers/google/cloud/operators/text_to_speech.py +2 -1
  99. airflow/providers/google/cloud/operators/translate.py +622 -32
  100. airflow/providers/google/cloud/operators/translate_speech.py +2 -1
  101. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +2 -93
  102. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +3 -13
  103. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +2 -17
  104. airflow/providers/google/cloud/operators/vertex_ai/dataset.py +2 -1
  105. airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py +2 -1
  106. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +2 -1
  107. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +3 -13
  108. airflow/providers/google/cloud/operators/vertex_ai/model_service.py +2 -1
  109. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +2 -1
  110. airflow/providers/google/cloud/operators/video_intelligence.py +2 -1
  111. airflow/providers/google/cloud/operators/vision.py +3 -2
  112. airflow/providers/google/cloud/operators/workflows.py +3 -2
  113. airflow/providers/google/cloud/secrets/secret_manager.py +2 -19
  114. airflow/providers/google/cloud/sensors/bigquery.py +2 -81
  115. airflow/providers/google/cloud/sensors/bigquery_dts.py +2 -1
  116. airflow/providers/google/cloud/sensors/bigtable.py +2 -1
  117. airflow/providers/google/cloud/sensors/cloud_composer.py +8 -94
  118. airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py +2 -1
  119. airflow/providers/google/cloud/sensors/dataflow.py +2 -1
  120. airflow/providers/google/cloud/sensors/dataform.py +2 -1
  121. airflow/providers/google/cloud/sensors/datafusion.py +2 -1
  122. airflow/providers/google/cloud/sensors/dataplex.py +2 -1
  123. airflow/providers/google/cloud/sensors/dataprep.py +2 -1
  124. airflow/providers/google/cloud/sensors/dataproc.py +2 -1
  125. airflow/providers/google/cloud/sensors/dataproc_metastore.py +2 -1
  126. airflow/providers/google/cloud/sensors/gcs.py +4 -36
  127. airflow/providers/google/cloud/sensors/pubsub.py +2 -1
  128. airflow/providers/google/cloud/sensors/tasks.py +2 -1
  129. airflow/providers/google/cloud/sensors/workflows.py +2 -1
  130. airflow/providers/google/cloud/transfers/adls_to_gcs.py +2 -1
  131. airflow/providers/google/cloud/transfers/azure_blob_to_gcs.py +2 -1
  132. airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py +2 -1
  133. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +75 -18
  134. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +9 -7
  135. airflow/providers/google/cloud/transfers/bigquery_to_mssql.py +2 -1
  136. airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +1 -1
  137. airflow/providers/google/cloud/transfers/bigquery_to_sql.py +2 -1
  138. airflow/providers/google/cloud/transfers/calendar_to_gcs.py +2 -1
  139. airflow/providers/google/cloud/transfers/cassandra_to_gcs.py +2 -1
  140. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -1
  141. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +13 -9
  142. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +2 -1
  143. airflow/providers/google/cloud/transfers/gcs_to_local.py +2 -1
  144. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +2 -1
  145. airflow/providers/google/cloud/transfers/gdrive_to_gcs.py +2 -1
  146. airflow/providers/google/cloud/transfers/gdrive_to_local.py +2 -1
  147. airflow/providers/google/cloud/transfers/local_to_gcs.py +2 -1
  148. airflow/providers/google/cloud/transfers/mssql_to_gcs.py +1 -1
  149. airflow/providers/google/cloud/transfers/s3_to_gcs.py +2 -1
  150. airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +2 -1
  151. airflow/providers/google/cloud/transfers/sftp_to_gcs.py +2 -1
  152. airflow/providers/google/cloud/transfers/sheets_to_gcs.py +2 -1
  153. airflow/providers/google/cloud/transfers/sql_to_gcs.py +2 -1
  154. airflow/providers/google/cloud/triggers/bigquery.py +2 -1
  155. airflow/providers/google/cloud/triggers/bigquery_dts.py +2 -1
  156. airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
  157. airflow/providers/google/cloud/triggers/cloud_build.py +2 -1
  158. airflow/providers/google/cloud/triggers/cloud_composer.py +3 -2
  159. airflow/providers/google/cloud/triggers/cloud_run.py +2 -1
  160. airflow/providers/google/cloud/triggers/cloud_sql.py +1 -1
  161. airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py +2 -1
  162. airflow/providers/google/cloud/triggers/dataflow.py +2 -1
  163. airflow/providers/google/cloud/triggers/datafusion.py +2 -1
  164. airflow/providers/google/cloud/triggers/dataplex.py +1 -1
  165. airflow/providers/google/cloud/triggers/dataproc.py +2 -1
  166. airflow/providers/google/cloud/triggers/gcs.py +3 -2
  167. airflow/providers/google/cloud/triggers/kubernetes_engine.py +2 -1
  168. airflow/providers/google/cloud/triggers/mlengine.py +2 -1
  169. airflow/providers/google/cloud/triggers/pubsub.py +2 -1
  170. airflow/providers/google/cloud/triggers/vertex_ai.py +2 -1
  171. airflow/providers/google/cloud/utils/credentials_provider.py +1 -1
  172. airflow/providers/google/cloud/utils/dataform.py +1 -1
  173. airflow/providers/google/cloud/utils/field_validator.py +2 -1
  174. airflow/providers/google/cloud/utils/mlengine_operator_utils.py +2 -1
  175. airflow/providers/google/common/hooks/base_google.py +4 -11
  176. airflow/providers/google/common/hooks/discovery_api.py +1 -6
  177. airflow/providers/google/firebase/hooks/firestore.py +1 -1
  178. airflow/providers/google/firebase/operators/firestore.py +2 -1
  179. airflow/providers/google/get_provider_info.py +7 -22
  180. airflow/providers/google/marketing_platform/hooks/analytics_admin.py +2 -1
  181. airflow/providers/google/marketing_platform/hooks/campaign_manager.py +2 -3
  182. airflow/providers/google/marketing_platform/hooks/display_video.py +4 -3
  183. airflow/providers/google/marketing_platform/hooks/search_ads.py +6 -6
  184. airflow/providers/google/marketing_platform/operators/analytics_admin.py +2 -1
  185. airflow/providers/google/marketing_platform/operators/campaign_manager.py +2 -42
  186. airflow/providers/google/marketing_platform/operators/display_video.py +2 -47
  187. airflow/providers/google/marketing_platform/operators/search_ads.py +2 -1
  188. airflow/providers/google/marketing_platform/sensors/campaign_manager.py +2 -7
  189. airflow/providers/google/marketing_platform/sensors/display_video.py +2 -13
  190. airflow/providers/google/suite/hooks/calendar.py +2 -8
  191. airflow/providers/google/suite/hooks/drive.py +2 -6
  192. airflow/providers/google/suite/hooks/sheets.py +2 -7
  193. airflow/providers/google/suite/operators/sheets.py +2 -7
  194. airflow/providers/google/suite/sensors/drive.py +2 -7
  195. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +2 -7
  196. airflow/providers/google/suite/transfers/gcs_to_sheets.py +2 -7
  197. airflow/providers/google/suite/transfers/local_to_drive.py +2 -7
  198. airflow/providers/google/suite/transfers/sql_to_sheets.py +2 -7
  199. {apache_airflow_providers_google-10.26.0.dist-info → apache_airflow_providers_google-11.0.0.dist-info}/METADATA +10 -10
  200. apache_airflow_providers_google-11.0.0.dist-info/RECORD +315 -0
  201. airflow/providers/google/marketing_platform/hooks/analytics.py +0 -211
  202. airflow/providers/google/marketing_platform/operators/analytics.py +0 -551
  203. apache_airflow_providers_google-10.26.0.dist-info/RECORD +0 -317
  204. {apache_airflow_providers_google-10.26.0.dist-info → apache_airflow_providers_google-11.0.0.dist-info}/WHEEL +0 -0
  205. {apache_airflow_providers_google-10.26.0.dist-info → apache_airflow_providers_google-11.0.0.dist-info}/entry_points.txt +0 -0
@@ -19,25 +19,20 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- import copy
23
- import re
24
22
  import uuid
25
- from contextlib import ExitStack
23
+ from collections.abc import Sequence
26
24
  from enum import Enum
27
25
  from functools import cached_property
28
- from typing import TYPE_CHECKING, Any, Sequence
26
+ from typing import TYPE_CHECKING, Any
29
27
 
30
28
  from googleapiclient.errors import HttpError
31
29
 
32
30
  from airflow.configuration import conf
33
31
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
34
- from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
35
32
  from airflow.providers.google.cloud.hooks.dataflow import (
36
33
  DEFAULT_DATAFLOW_LOCATION,
37
34
  DataflowHook,
38
- process_line_and_extract_dataflow_job_id_callback,
39
35
  )
40
- from airflow.providers.google.cloud.hooks.gcs import GCSHook
41
36
  from airflow.providers.google.cloud.links.dataflow import DataflowJobLink, DataflowPipelineLink
42
37
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
43
38
  from airflow.providers.google.cloud.triggers.dataflow import (
@@ -47,7 +42,6 @@ from airflow.providers.google.cloud.triggers.dataflow import (
47
42
  from airflow.providers.google.common.consts import GOOGLE_DEFAULT_DEFERRABLE_METHOD_NAME
48
43
  from airflow.providers.google.common.deprecated import deprecated
49
44
  from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID
50
- from airflow.version import version
51
45
 
52
46
  if TYPE_CHECKING:
53
47
  from airflow.utils.context import Context
@@ -175,297 +169,6 @@ class DataflowConfiguration:
175
169
  self.service_account = service_account
176
170
 
177
171
 
178
- # TODO: Remove one day
179
- @deprecated(
180
- planned_removal_date="November 01, 2024",
181
- use_instead="providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator",
182
- category=AirflowProviderDeprecationWarning,
183
- )
184
- class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
185
- """
186
- Start a Java Cloud Dataflow batch job; the parameters of the operation will be passed to the job.
187
-
188
- This class is deprecated.
189
-
190
- Please use :class:`providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`.
191
-
192
- Example usage:
193
-
194
- .. code-block:: python
195
-
196
- default_args = {
197
- "owner": "airflow",
198
- "depends_on_past": False,
199
- "start_date": (2016, 8, 1),
200
- "email": ["alex@vanboxel.be"],
201
- "email_on_failure": False,
202
- "email_on_retry": False,
203
- "retries": 1,
204
- "retry_delay": timedelta(minutes=30),
205
- "dataflow_default_options": {
206
- "project": "my-gcp-project",
207
- "zone": "us-central1-f",
208
- "stagingLocation": "gs://bucket/tmp/dataflow/staging/",
209
- },
210
- }
211
-
212
- dag = DAG("test-dag", default_args=default_args)
213
-
214
- task = DataflowCreateJavaJobOperator(
215
- gcp_conn_id="gcp_default",
216
- task_id="normalize-cal",
217
- jar="{{var.value.gcp_dataflow_base}}pipeline-ingress-cal-normalize-1.0.jar",
218
- options={
219
- "autoscalingAlgorithm": "BASIC",
220
- "maxNumWorkers": "50",
221
- "start": "{{ds}}",
222
- "partitionType": "DAY",
223
- },
224
- dag=dag,
225
- )
226
-
227
-
228
- .. seealso::
229
- For more detail on job submission have a look at the reference:
230
- https://cloud.google.com/dataflow/pipelines/specifying-exec-params
231
-
232
- .. seealso::
233
- For more information on how to use this operator, take a look at the guide:
234
- :ref:`howto/operator:DataflowCreateJavaJobOperator`
235
-
236
- :param jar: The reference to a self executing Dataflow jar (templated).
237
- :param job_name: The 'jobName' to use when executing the Dataflow job
238
- (templated). This ends up being set in the pipeline options, so any entry
239
- with key ``'jobName'`` in ``options`` will be overwritten.
240
- :param dataflow_default_options: Map of default job options.
241
- :param options: Map of job specific options.The key must be a dictionary.
242
-
243
- The value can contain different types:
244
-
245
- * If the value is None, the single option - ``--key`` (without value) will be added.
246
- * If the value is False, this option will be skipped
247
- * If the value is True, the single option - ``--key`` (without value) will be added.
248
- * If the value is list, the many options will be added for each key.
249
- If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key=B`` options
250
- will be left
251
- * Other value types will be replaced with the Python textual representation.
252
-
253
- When defining labels (``labels`` option), you can also provide a dictionary.
254
-
255
- :param project_id: Optional, the Google Cloud project ID in which to start a job.
256
- If set to None or missing, the default project_id from the Google Cloud connection is used.
257
- :param location: Job location.
258
- :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
259
- :param poll_sleep: The time in seconds to sleep between polling Google
260
- Cloud Platform for the dataflow job status while the job is in the
261
- JOB_STATE_RUNNING state.
262
- :param job_class: The name of the dataflow job class to be executed, it
263
- is often not the main class configured in the dataflow jar file.
264
-
265
- :param multiple_jobs: If pipeline creates multiple jobs then monitor all jobs
266
- :param check_if_running: before running job, validate that a previous run is not in process
267
- if job is running finish with nothing, WaitForRun= wait until job finished and the run job)
268
- ``jar``, ``options``, and ``job_name`` are templated so you can use variables in them.
269
- :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be
270
- successfully cancelled when task is being killed.
271
- :param wait_until_finished: (Optional)
272
- If True, wait for the end of pipeline execution before exiting.
273
- If False, only submits job.
274
- If None, default behavior.
275
-
276
- The default behavior depends on the type of pipeline:
277
-
278
- * for the streaming pipeline, wait for jobs to start,
279
- * for the batch pipeline, wait for the jobs to complete.
280
-
281
- .. warning::
282
-
283
- You cannot call ``PipelineResult.wait_until_finish`` method in your pipeline code for the operator
284
- to work properly. i. e. you must use asynchronous execution. Otherwise, your pipeline will
285
- always wait until finished. For more information, look at:
286
- `Asynchronous execution
287
- <https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#python_10>`__
288
-
289
- The process of starting the Dataflow job in Airflow consists of two steps:
290
-
291
- * running a subprocess and reading the stderr/stderr log for the job id.
292
- * loop waiting for the end of the job ID from the previous step.
293
- This loop checks the status of the job.
294
-
295
- Step two is started just after step one has finished, so if you have wait_until_finished in your
296
- pipeline code, step two will not start until the process stops. When this process stops,
297
- steps two will run, but it will only execute one iteration as the job will be in a terminal state.
298
-
299
- If you in your pipeline do not call the wait_for_pipeline method but pass wait_until_finish=True
300
- to the operator, the second loop will wait for the job's terminal state.
301
-
302
- If you in your pipeline do not call the wait_for_pipeline method, and pass wait_until_finish=False
303
- to the operator, the second loop will check once is job not in terminal state and exit the loop.
304
- :param expected_terminal_state: The expected terminal state of the operator on which the corresponding
305
- Airflow task succeeds. When not specified, it will be determined by the hook.
306
-
307
- Note that both
308
- ``dataflow_default_options`` and ``options`` will be merged to specify pipeline
309
- execution parameter, and ``dataflow_default_options`` is expected to save
310
- high-level options, for instances, project and zone information, which
311
- apply to all dataflow operators in the DAG.
312
-
313
- It's a good practice to define dataflow_* parameters in the default_args of the dag
314
- like the project, zone and staging location.
315
-
316
- .. code-block:: python
317
-
318
- default_args = {
319
- "dataflow_default_options": {
320
- "zone": "europe-west1-d",
321
- "stagingLocation": "gs://my-staging-bucket/staging/",
322
- }
323
- }
324
-
325
- You need to pass the path to your dataflow as a file reference with the ``jar``
326
- parameter, the jar needs to be a self executing jar (see documentation here:
327
- https://beam.apache.org/documentation/runners/dataflow/#self-executing-jar).
328
- Use ``options`` to pass on options to your job.
329
-
330
- .. code-block:: python
331
-
332
- t1 = DataflowCreateJavaJobOperator(
333
- task_id="dataflow_example",
334
- jar="{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar",
335
- options={
336
- "autoscalingAlgorithm": "BASIC",
337
- "maxNumWorkers": "50",
338
- "start": "{{ds}}",
339
- "partitionType": "DAY",
340
- "labels": {"foo": "bar"},
341
- },
342
- gcp_conn_id="airflow-conn-id",
343
- dag=my_dag,
344
- )
345
-
346
- """
347
-
348
- template_fields: Sequence[str] = ("options", "jar", "job_name")
349
- ui_color = "#0273d4"
350
-
351
- def __init__(
352
- self,
353
- *,
354
- jar: str,
355
- job_name: str = "{{task.task_id}}",
356
- dataflow_default_options: dict | None = None,
357
- options: dict | None = None,
358
- project_id: str = PROVIDE_PROJECT_ID,
359
- location: str = DEFAULT_DATAFLOW_LOCATION,
360
- gcp_conn_id: str = "google_cloud_default",
361
- poll_sleep: int = 10,
362
- job_class: str | None = None,
363
- check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun,
364
- multiple_jobs: bool = False,
365
- cancel_timeout: int | None = 10 * 60,
366
- wait_until_finished: bool | None = None,
367
- expected_terminal_state: str | None = None,
368
- **kwargs,
369
- ) -> None:
370
- super().__init__(**kwargs)
371
-
372
- dataflow_default_options = dataflow_default_options or {}
373
- options = options or {}
374
- options.setdefault("labels", {}).update(
375
- {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
376
- )
377
- self.project_id = project_id
378
- self.location = location
379
- self.gcp_conn_id = gcp_conn_id
380
- self.jar = jar
381
- self.multiple_jobs = multiple_jobs
382
- self.job_name = job_name
383
- self.dataflow_default_options = dataflow_default_options
384
- self.options = options
385
- self.poll_sleep = poll_sleep
386
- self.job_class = job_class
387
- self.check_if_running = check_if_running
388
- self.cancel_timeout = cancel_timeout
389
- self.wait_until_finished = wait_until_finished
390
- self.expected_terminal_state = expected_terminal_state
391
- self.job_id = None
392
- self.beam_hook: BeamHook | None = None
393
- self.dataflow_hook: DataflowHook | None = None
394
-
395
- def execute(self, context: Context):
396
- """Execute the Apache Beam Pipeline."""
397
- self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner)
398
- self.dataflow_hook = DataflowHook(
399
- gcp_conn_id=self.gcp_conn_id,
400
- poll_sleep=self.poll_sleep,
401
- cancel_timeout=self.cancel_timeout,
402
- wait_until_finished=self.wait_until_finished,
403
- expected_terminal_state=self.expected_terminal_state,
404
- )
405
- job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name)
406
- pipeline_options = copy.deepcopy(self.dataflow_default_options)
407
-
408
- pipeline_options["jobName"] = self.job_name
409
- pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id
410
- pipeline_options["region"] = self.location
411
- pipeline_options.update(self.options)
412
- pipeline_options.setdefault("labels", {}).update(
413
- {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
414
- )
415
- pipeline_options.update(self.options)
416
-
417
- def set_current_job_id(job_id):
418
- self.job_id = job_id
419
-
420
- process_line_callback = process_line_and_extract_dataflow_job_id_callback(
421
- on_new_job_id_callback=set_current_job_id
422
- )
423
-
424
- with ExitStack() as exit_stack:
425
- if self.jar.lower().startswith("gs://"):
426
- gcs_hook = GCSHook(self.gcp_conn_id)
427
- tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.jar))
428
- self.jar = tmp_gcs_file.name
429
-
430
- is_running = False
431
- if self.check_if_running != CheckJobRunning.IgnoreJob:
432
- is_running = self.dataflow_hook.is_job_dataflow_running(
433
- name=self.job_name,
434
- variables=pipeline_options,
435
- location=self.location,
436
- )
437
- while is_running and self.check_if_running == CheckJobRunning.WaitForRun:
438
- is_running = self.dataflow_hook.is_job_dataflow_running(
439
- name=self.job_name,
440
- variables=pipeline_options,
441
- location=self.location,
442
- )
443
- if not is_running:
444
- pipeline_options["jobName"] = job_name
445
- with self.dataflow_hook.provide_authorized_gcloud():
446
- self.beam_hook.start_java_pipeline(
447
- variables=pipeline_options,
448
- jar=self.jar,
449
- job_class=self.job_class,
450
- process_line_callback=process_line_callback,
451
- )
452
- self.dataflow_hook.wait_for_done(
453
- job_name=job_name,
454
- location=self.location,
455
- job_id=self.job_id,
456
- multiple_jobs=self.multiple_jobs,
457
- )
458
-
459
- return {"job_id": self.job_id}
460
-
461
- def on_kill(self) -> None:
462
- self.log.info("On kill.")
463
- if self.job_id:
464
- self.dataflow_hook.cancel_job(
465
- job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id
466
- )
467
-
468
-
469
172
  class DataflowTemplatedJobStartOperator(GoogleCloudBaseOperator):
470
173
  """
471
174
  Start a Dataflow job with a classic template; the parameters of the operation will be passed to the job.
@@ -1233,224 +936,6 @@ class DataflowStartYamlJobOperator(GoogleCloudBaseOperator):
1233
936
  )
1234
937
 
1235
938
 
1236
- # TODO: Remove one day
1237
- @deprecated(
1238
- planned_removal_date="November 01, 2024",
1239
- use_instead="providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator",
1240
- category=AirflowProviderDeprecationWarning,
1241
- )
1242
- class DataflowCreatePythonJobOperator(GoogleCloudBaseOperator):
1243
- """
1244
- Launching Cloud Dataflow jobs written in python.
1245
-
1246
- Note that both dataflow_default_options and options will be merged to specify pipeline
1247
- execution parameter, and dataflow_default_options is expected to save high-level options,
1248
- for instances, project and zone information, which apply to all dataflow operators in the DAG.
1249
-
1250
- This class is deprecated.
1251
-
1252
- Please use :class:`providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`.
1253
-
1254
- .. seealso::
1255
- For more detail on job submission have a look at the reference:
1256
- https://cloud.google.com/dataflow/pipelines/specifying-exec-params
1257
-
1258
- .. seealso::
1259
- For more information on how to use this operator, take a look at the guide:
1260
- :ref:`howto/operator:DataflowCreatePythonJobOperator`
1261
-
1262
- :param py_file: Reference to the python dataflow pipeline file.py, e.g.,
1263
- /some/local/file/path/to/your/python/pipeline/file. (templated)
1264
- :param job_name: The 'job_name' to use when executing the Dataflow job
1265
- (templated). This ends up being set in the pipeline options, so any entry
1266
- with key ``'jobName'`` or ``'job_name'`` in ``options`` will be overwritten.
1267
- :param py_options: Additional python options, e.g., ["-m", "-v"].
1268
- :param dataflow_default_options: Map of default job options.
1269
- :param options: Map of job specific options.The key must be a dictionary.
1270
- The value can contain different types:
1271
-
1272
- * If the value is None, the single option - ``--key`` (without value) will be added.
1273
- * If the value is False, this option will be skipped
1274
- * If the value is True, the single option - ``--key`` (without value) will be added.
1275
- * If the value is list, the many options will be added for each key.
1276
- If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key=B`` options
1277
- will be left
1278
- * Other value types will be replaced with the Python textual representation.
1279
-
1280
- When defining labels (``labels`` option), you can also provide a dictionary.
1281
- :param py_interpreter: Python version of the beam pipeline.
1282
- If None, this defaults to the python3.
1283
- To track python versions supported by beam and related
1284
- issues check: https://issues.apache.org/jira/browse/BEAM-1251
1285
- :param py_requirements: Additional python package(s) to install.
1286
- If a value is passed to this parameter, a new virtual environment has been created with
1287
- additional packages installed.
1288
-
1289
- You could also install the apache_beam package if it is not installed on your system or you want
1290
- to use a different version.
1291
- :param py_system_site_packages: Whether to include system_site_packages in your virtualenv.
1292
- See virtualenv documentation for more information.
1293
-
1294
- This option is only relevant if the ``py_requirements`` parameter is not None.
1295
- :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
1296
- :param project_id: Optional, the Google Cloud project ID in which to start a job.
1297
- If set to None or missing, the default project_id from the Google Cloud connection is used.
1298
- :param location: Job location.
1299
- :param poll_sleep: The time in seconds to sleep between polling Google
1300
- Cloud Platform for the dataflow job status while the job is in the
1301
- JOB_STATE_RUNNING state.
1302
- :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it
1303
- instead of canceling during killing task instance. See:
1304
- https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline
1305
- :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be
1306
- successfully cancelled when task is being killed.
1307
- :param wait_until_finished: (Optional)
1308
- If True, wait for the end of pipeline execution before exiting.
1309
- If False, only submits job.
1310
- If None, default behavior.
1311
-
1312
- The default behavior depends on the type of pipeline:
1313
-
1314
- * for the streaming pipeline, wait for jobs to start,
1315
- * for the batch pipeline, wait for the jobs to complete.
1316
-
1317
- .. warning::
1318
-
1319
- You cannot call ``PipelineResult.wait_until_finish`` method in your pipeline code for the operator
1320
- to work properly. i. e. you must use asynchronous execution. Otherwise, your pipeline will
1321
- always wait until finished. For more information, look at:
1322
- `Asynchronous execution
1323
- <https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#python_10>`__
1324
-
1325
- The process of starting the Dataflow job in Airflow consists of two steps:
1326
-
1327
- * running a subprocess and reading the stderr/stderr log for the job id.
1328
- * loop waiting for the end of the job ID from the previous step.
1329
- This loop checks the status of the job.
1330
-
1331
- Step two is started just after step one has finished, so if you have wait_until_finished in your
1332
- pipeline code, step two will not start until the process stops. When this process stops,
1333
- steps two will run, but it will only execute one iteration as the job will be in a terminal state.
1334
-
1335
- If you in your pipeline do not call the wait_for_pipeline method but pass wait_until_finish=True
1336
- to the operator, the second loop will wait for the job's terminal state.
1337
-
1338
- If you in your pipeline do not call the wait_for_pipeline method, and pass wait_until_finish=False
1339
- to the operator, the second loop will check once is job not in terminal state and exit the loop.
1340
- """
1341
-
1342
- template_fields: Sequence[str] = ("options", "dataflow_default_options", "job_name", "py_file")
1343
-
1344
- def __init__(
1345
- self,
1346
- *,
1347
- py_file: str,
1348
- job_name: str = "{{task.task_id}}",
1349
- dataflow_default_options: dict | None = None,
1350
- options: dict | None = None,
1351
- py_interpreter: str = "python3",
1352
- py_options: list[str] | None = None,
1353
- py_requirements: list[str] | None = None,
1354
- py_system_site_packages: bool = False,
1355
- project_id: str = PROVIDE_PROJECT_ID,
1356
- location: str = DEFAULT_DATAFLOW_LOCATION,
1357
- gcp_conn_id: str = "google_cloud_default",
1358
- poll_sleep: int = 10,
1359
- drain_pipeline: bool = False,
1360
- cancel_timeout: int | None = 10 * 60,
1361
- wait_until_finished: bool | None = None,
1362
- **kwargs,
1363
- ) -> None:
1364
- super().__init__(**kwargs)
1365
-
1366
- self.py_file = py_file
1367
- self.job_name = job_name
1368
- self.py_options = py_options or []
1369
- self.dataflow_default_options = dataflow_default_options or {}
1370
- self.options = options or {}
1371
- self.options.setdefault("labels", {}).update(
1372
- {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")}
1373
- )
1374
- self.py_interpreter = py_interpreter
1375
- self.py_requirements = py_requirements
1376
- self.py_system_site_packages = py_system_site_packages
1377
- self.project_id = project_id
1378
- self.location = location
1379
- self.gcp_conn_id = gcp_conn_id
1380
- self.poll_sleep = poll_sleep
1381
- self.drain_pipeline = drain_pipeline
1382
- self.cancel_timeout = cancel_timeout
1383
- self.wait_until_finished = wait_until_finished
1384
- self.job_id = None
1385
- self.beam_hook: BeamHook | None = None
1386
- self.dataflow_hook: DataflowHook | None = None
1387
-
1388
- def execute(self, context: Context):
1389
- """Execute the python dataflow job."""
1390
- self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner)
1391
- self.dataflow_hook = DataflowHook(
1392
- gcp_conn_id=self.gcp_conn_id,
1393
- poll_sleep=self.poll_sleep,
1394
- impersonation_chain=None,
1395
- drain_pipeline=self.drain_pipeline,
1396
- cancel_timeout=self.cancel_timeout,
1397
- wait_until_finished=self.wait_until_finished,
1398
- )
1399
-
1400
- job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name)
1401
- pipeline_options = self.dataflow_default_options.copy()
1402
- pipeline_options["job_name"] = job_name
1403
- pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id
1404
- pipeline_options["region"] = self.location
1405
- pipeline_options.update(self.options)
1406
-
1407
- # Convert argument names from lowerCamelCase to snake case.
1408
- def camel_to_snake(name):
1409
- return re.sub("[A-Z]", lambda x: "_" + x.group(0).lower(), name)
1410
-
1411
- formatted_pipeline_options = {camel_to_snake(key): pipeline_options[key] for key in pipeline_options}
1412
-
1413
- def set_current_job_id(job_id):
1414
- self.job_id = job_id
1415
-
1416
- process_line_callback = process_line_and_extract_dataflow_job_id_callback(
1417
- on_new_job_id_callback=set_current_job_id
1418
- )
1419
-
1420
- with ExitStack() as exit_stack:
1421
- if self.py_file.lower().startswith("gs://"):
1422
- gcs_hook = GCSHook(self.gcp_conn_id)
1423
- tmp_gcs_file = exit_stack.enter_context(gcs_hook.provide_file(object_url=self.py_file))
1424
- self.py_file = tmp_gcs_file.name
1425
-
1426
- with self.dataflow_hook.provide_authorized_gcloud():
1427
- self.beam_hook.start_python_pipeline(
1428
- variables=formatted_pipeline_options,
1429
- py_file=self.py_file,
1430
- py_options=self.py_options,
1431
- py_interpreter=self.py_interpreter,
1432
- py_requirements=self.py_requirements,
1433
- py_system_site_packages=self.py_system_site_packages,
1434
- process_line_callback=process_line_callback,
1435
- )
1436
-
1437
- self.dataflow_hook.wait_for_done(
1438
- job_name=job_name,
1439
- location=self.location,
1440
- job_id=self.job_id,
1441
- multiple_jobs=False,
1442
- )
1443
-
1444
- return {"job_id": self.job_id}
1445
-
1446
- def on_kill(self) -> None:
1447
- self.log.info("On kill.")
1448
- if self.job_id:
1449
- self.dataflow_hook.cancel_job(
1450
- job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id
1451
- )
1452
-
1453
-
1454
939
  class DataflowStopJobOperator(GoogleCloudBaseOperator):
1455
940
  """
1456
941
  Stops the job with the specified name prefix or Job ID.
@@ -16,7 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from typing import TYPE_CHECKING, Sequence
19
+ from collections.abc import Sequence
20
+ from typing import TYPE_CHECKING
20
21
 
21
22
  from airflow.providers.google.cloud.links.dataform import (
22
23
  DataformRepositoryLink,
@@ -19,7 +19,8 @@
19
19
  from __future__ import annotations
20
20
 
21
21
  import time
22
- from typing import TYPE_CHECKING, Any, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING, Any
23
24
 
24
25
  from google.api_core.retry import exponential_sleep_generator
25
26
  from googleapiclient.errors import HttpError
@@ -19,7 +19,8 @@
19
19
  from __future__ import annotations
20
20
 
21
21
  import time
22
- from typing import TYPE_CHECKING, Any, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING, Any
23
24
 
24
25
  from airflow.exceptions import AirflowException
25
26
  from airflow.providers.google.cloud.triggers.dataplex import (
@@ -686,39 +687,44 @@ class DataplexCreateOrUpdateDataQualityScanOperator(GoogleCloudBaseOperator):
686
687
  impersonation_chain=self.impersonation_chain,
687
688
  )
688
689
 
689
- self.log.info("Creating Dataplex Data Quality scan %s", self.data_scan_id)
690
- try:
691
- operation = hook.create_data_scan(
692
- project_id=self.project_id,
693
- region=self.region,
694
- data_scan_id=self.data_scan_id,
695
- body=self.body,
696
- retry=self.retry,
697
- timeout=self.timeout,
698
- metadata=self.metadata,
699
- )
700
- hook.wait_for_operation(timeout=self.timeout, operation=operation)
701
- self.log.info("Dataplex Data Quality scan %s created successfully!", self.data_scan_id)
702
- except AlreadyExists:
703
- self.log.info("Dataplex Data Quality scan already exists: %s", {self.data_scan_id})
704
-
705
- operation = hook.update_data_scan(
706
- project_id=self.project_id,
707
- region=self.region,
708
- data_scan_id=self.data_scan_id,
709
- body=self.body,
710
- update_mask=self.update_mask,
711
- retry=self.retry,
712
- timeout=self.timeout,
713
- metadata=self.metadata,
714
- )
715
- hook.wait_for_operation(timeout=self.timeout, operation=operation)
716
- self.log.info("Dataplex Data Quality scan %s updated successfully!", self.data_scan_id)
717
- except GoogleAPICallError as e:
718
- raise AirflowException(f"Error creating Data Quality scan {self.data_scan_id}", e)
690
+ if self.update_mask is not None:
691
+ self._update_data_scan(hook)
692
+ else:
693
+ self.log.info("Creating Dataplex Data Quality scan %s", self.data_scan_id)
694
+ try:
695
+ operation = hook.create_data_scan(
696
+ project_id=self.project_id,
697
+ region=self.region,
698
+ data_scan_id=self.data_scan_id,
699
+ body=self.body,
700
+ retry=self.retry,
701
+ timeout=self.timeout,
702
+ metadata=self.metadata,
703
+ )
704
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
705
+ self.log.info("Dataplex Data Quality scan %s created successfully!", self.data_scan_id)
706
+ except AlreadyExists:
707
+ self._update_data_scan(hook)
708
+ except GoogleAPICallError as e:
709
+ raise AirflowException(f"Error creating Data Quality scan {self.data_scan_id}", e)
719
710
 
720
711
  return self.data_scan_id
721
712
 
713
+ def _update_data_scan(self, hook: DataplexHook):
714
+ self.log.info("Dataplex Data Quality scan already exists: %s", {self.data_scan_id})
715
+ operation = hook.update_data_scan(
716
+ project_id=self.project_id,
717
+ region=self.region,
718
+ data_scan_id=self.data_scan_id,
719
+ body=self.body,
720
+ update_mask=self.update_mask,
721
+ retry=self.retry,
722
+ timeout=self.timeout,
723
+ metadata=self.metadata,
724
+ )
725
+ hook.wait_for_operation(timeout=self.timeout, operation=operation)
726
+ self.log.info("Dataplex Data Quality scan %s updated successfully!", self.data_scan_id)
727
+
722
728
 
723
729
  class DataplexGetDataQualityScanOperator(GoogleCloudBaseOperator):
724
730
  """
@@ -19,7 +19,8 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from typing import TYPE_CHECKING, Sequence
22
+ from collections.abc import Sequence
23
+ from typing import TYPE_CHECKING
23
24
 
24
25
  from airflow.providers.google.cloud.hooks.dataprep import GoogleDataprepHook
25
26
  from airflow.providers.google.cloud.links.dataprep import DataprepFlowLink, DataprepJobGroupLink