apache-airflow-providers-google 10.2.0rc1__py3-none-any.whl → 10.3.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +38 -39
  3. airflow/providers/google/ads/transfers/ads_to_gcs.py +4 -4
  4. airflow/providers/google/cloud/_internal_client/secret_manager_client.py +6 -9
  5. airflow/providers/google/cloud/hooks/bigquery.py +328 -318
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +66 -22
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +46 -70
  8. airflow/providers/google/cloud/hooks/dataflow.py +11 -15
  9. airflow/providers/google/cloud/hooks/dataform.py +3 -3
  10. airflow/providers/google/cloud/hooks/dataproc.py +577 -573
  11. airflow/providers/google/cloud/hooks/functions.py +60 -76
  12. airflow/providers/google/cloud/hooks/gcs.py +108 -18
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +69 -90
  14. airflow/providers/google/cloud/links/datafusion.py +4 -3
  15. airflow/providers/google/cloud/operators/bigquery.py +201 -191
  16. airflow/providers/google/cloud/operators/bigquery_dts.py +2 -1
  17. airflow/providers/google/cloud/operators/cloud_build.py +2 -1
  18. airflow/providers/google/cloud/operators/cloud_composer.py +4 -3
  19. airflow/providers/google/cloud/operators/cloud_sql.py +62 -28
  20. airflow/providers/google/cloud/operators/dataflow.py +6 -4
  21. airflow/providers/google/cloud/operators/dataform.py +3 -2
  22. airflow/providers/google/cloud/operators/dataproc.py +127 -123
  23. airflow/providers/google/cloud/operators/dataproc_metastore.py +18 -26
  24. airflow/providers/google/cloud/operators/gcs.py +35 -13
  25. airflow/providers/google/cloud/operators/kubernetes_engine.py +92 -42
  26. airflow/providers/google/cloud/operators/mlengine.py +2 -6
  27. airflow/providers/google/cloud/operators/vision.py +47 -56
  28. airflow/providers/google/cloud/sensors/bigquery.py +3 -2
  29. airflow/providers/google/cloud/sensors/gcs.py +5 -7
  30. airflow/providers/google/cloud/sensors/pubsub.py +2 -2
  31. airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py +3 -2
  32. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +2 -1
  33. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +4 -4
  34. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +6 -5
  35. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +46 -7
  36. airflow/providers/google/cloud/transfers/gcs_to_sftp.py +5 -2
  37. airflow/providers/google/cloud/triggers/cloud_sql.py +102 -0
  38. airflow/providers/google/cloud/triggers/kubernetes_engine.py +28 -6
  39. airflow/providers/google/cloud/utils/bigquery.py +17 -0
  40. airflow/providers/google/get_provider_info.py +7 -2
  41. airflow/providers/google/suite/transfers/gcs_to_gdrive.py +4 -0
  42. airflow/providers/google/suite/transfers/local_to_drive.py +28 -26
  43. apache_airflow_providers_google-10.3.0rc1.dist-info/METADATA +289 -0
  44. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/RECORD +49 -48
  45. apache_airflow_providers_google-10.2.0rc1.dist-info/METADATA +0 -1824
  46. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/LICENSE +0 -0
  47. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/NOTICE +0 -0
  48. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/WHEEL +0 -0
  49. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/entry_points.txt +0 -0
  50. {apache_airflow_providers_google-10.2.0rc1.dist-info → apache_airflow_providers_google-10.3.0rc1.dist-info}/top_level.txt +0 -0
@@ -32,6 +32,7 @@ from google.cloud.bigquery_datatransfer_v1 import (
32
32
  )
33
33
 
34
34
  from airflow import AirflowException
35
+ from airflow.configuration import conf
35
36
  from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id
36
37
  from airflow.providers.google.cloud.links.bigquery_dts import BigQueryDataTransferConfigLink
37
38
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
@@ -279,7 +280,7 @@ class BigQueryDataTransferServiceStartTransferRunsOperator(GoogleCloudBaseOperat
279
280
  metadata: Sequence[tuple[str, str]] = (),
280
281
  gcp_conn_id="google_cloud_default",
281
282
  impersonation_chain: str | Sequence[str] | None = None,
282
- deferrable: bool = False,
283
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
283
284
  **kwargs,
284
285
  ) -> None:
285
286
  super().__init__(**kwargs)
@@ -28,6 +28,7 @@ from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
28
28
  from google.api_core.retry import Retry
29
29
  from google.cloud.devtools.cloudbuild_v1.types import Build, BuildTrigger, RepoSource
30
30
 
31
+ from airflow.configuration import conf
31
32
  from airflow.exceptions import AirflowException
32
33
  from airflow.providers.google.cloud.hooks.cloud_build import CloudBuildHook
33
34
  from airflow.providers.google.cloud.links.cloud_build import (
@@ -176,7 +177,7 @@ class CloudBuildCreateBuildOperator(GoogleCloudBaseOperator):
176
177
  gcp_conn_id: str = "google_cloud_default",
177
178
  impersonation_chain: str | Sequence[str] | None = None,
178
179
  poll_interval: float = 4.0,
179
- deferrable: bool = False,
180
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
180
181
  location: str = "global",
181
182
  **kwargs,
182
183
  ) -> None:
@@ -27,6 +27,7 @@ from google.cloud.orchestration.airflow.service_v1.types import Environment
27
27
  from google.protobuf.field_mask_pb2 import FieldMask
28
28
 
29
29
  from airflow import AirflowException
30
+ from airflow.configuration import conf
30
31
  from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerHook
31
32
  from airflow.providers.google.cloud.links.base import BaseGoogleLink
32
33
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
@@ -135,7 +136,7 @@ class CloudComposerCreateEnvironmentOperator(GoogleCloudBaseOperator):
135
136
  retry: Retry | _MethodDefault = DEFAULT,
136
137
  timeout: float | None = None,
137
138
  metadata: Sequence[tuple[str, str]] = (),
138
- deferrable: bool = False,
139
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
139
140
  pooling_period_seconds: int = 30,
140
141
  **kwargs,
141
142
  ) -> None:
@@ -264,7 +265,7 @@ class CloudComposerDeleteEnvironmentOperator(GoogleCloudBaseOperator):
264
265
  metadata: Sequence[tuple[str, str]] = (),
265
266
  gcp_conn_id: str = "google_cloud_default",
266
267
  impersonation_chain: str | Sequence[str] | None = None,
267
- deferrable: bool = False,
268
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
268
269
  pooling_period_seconds: int = 30,
269
270
  **kwargs,
270
271
  ) -> None:
@@ -509,7 +510,7 @@ class CloudComposerUpdateEnvironmentOperator(GoogleCloudBaseOperator):
509
510
  metadata: Sequence[tuple[str, str]] = (),
510
511
  gcp_conn_id: str = "google_cloud_default",
511
512
  impersonation_chain: str | Sequence[str] | None = None,
512
- deferrable: bool = False,
513
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
513
514
  pooling_period_seconds: int = 30,
514
515
  **kwargs,
515
516
  ) -> None:
@@ -22,12 +22,14 @@ from typing import TYPE_CHECKING, Iterable, Mapping, Sequence
22
22
 
23
23
  from googleapiclient.errors import HttpError
24
24
 
25
+ from airflow.configuration import conf
25
26
  from airflow.exceptions import AirflowException
26
27
  from airflow.hooks.base import BaseHook
27
28
  from airflow.models import Connection
28
29
  from airflow.providers.google.cloud.hooks.cloud_sql import CloudSQLDatabaseHook, CloudSQLHook
29
30
  from airflow.providers.google.cloud.links.cloud_sql import CloudSQLInstanceDatabaseLink, CloudSQLInstanceLink
30
31
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
32
+ from airflow.providers.google.cloud.triggers.cloud_sql import CloudSQLExportTrigger
31
33
  from airflow.providers.google.cloud.utils.field_validator import GcpBodyFieldValidator
32
34
  from airflow.providers.google.common.hooks.base_google import get_field
33
35
  from airflow.providers.google.common.links.storage import FileDetailsLink
@@ -215,8 +217,7 @@ CLOUD_SQL_DATABASE_PATCH_VALIDATION = [
215
217
 
216
218
 
217
219
  class CloudSQLBaseOperator(GoogleCloudBaseOperator):
218
- """
219
- Abstract base operator for Google Cloud SQL operators to inherit from.
220
+ """Abstract base operator for Google Cloud SQL operators.
220
221
 
221
222
  :param instance: Cloud SQL instance ID. This does not include the project ID.
222
223
  :param project_id: Optional, Google Cloud Project ID. f set to None or missing,
@@ -284,8 +285,8 @@ class CloudSQLBaseOperator(GoogleCloudBaseOperator):
284
285
 
285
286
 
286
287
  class CloudSQLCreateInstanceOperator(CloudSQLBaseOperator):
287
- """
288
- Creates a new Cloud SQL instance.
288
+ """Create a new Cloud SQL instance.
289
+
289
290
  If an instance with the same name exists, no action will be taken and
290
291
  the operator will succeed.
291
292
 
@@ -385,8 +386,7 @@ class CloudSQLCreateInstanceOperator(CloudSQLBaseOperator):
385
386
 
386
387
 
387
388
  class CloudSQLInstancePatchOperator(CloudSQLBaseOperator):
388
- """
389
- Updates settings of a Cloud SQL instance.
389
+ """Update settings of a Cloud SQL instance.
390
390
 
391
391
  Caution: This is a partial update, so only included values for the settings will be
392
392
  updated.
@@ -478,8 +478,7 @@ class CloudSQLInstancePatchOperator(CloudSQLBaseOperator):
478
478
 
479
479
 
480
480
  class CloudSQLDeleteInstanceOperator(CloudSQLBaseOperator):
481
- """
482
- Deletes a Cloud SQL instance.
481
+ """Delete a Cloud SQL instance.
483
482
 
484
483
  .. seealso::
485
484
  For more information on how to use this operator, take a look at the guide:
@@ -525,8 +524,7 @@ class CloudSQLDeleteInstanceOperator(CloudSQLBaseOperator):
525
524
 
526
525
 
527
526
  class CloudSQLCloneInstanceOperator(CloudSQLBaseOperator):
528
- """
529
- Clones an instance to a target instance.
527
+ """Clone an instance to a target instance.
530
528
 
531
529
  .. seealso::
532
530
  For more information on how to use this operator, take a look at the guide:
@@ -617,8 +615,7 @@ class CloudSQLCloneInstanceOperator(CloudSQLBaseOperator):
617
615
 
618
616
 
619
617
  class CloudSQLCreateInstanceDatabaseOperator(CloudSQLBaseOperator):
620
- """
621
- Creates a new database inside a Cloud SQL instance.
618
+ """Create a new database inside a Cloud SQL instance.
622
619
 
623
620
  .. seealso::
624
621
  For more information on how to use this operator, take a look at the guide:
@@ -722,9 +719,7 @@ class CloudSQLCreateInstanceDatabaseOperator(CloudSQLBaseOperator):
722
719
 
723
720
 
724
721
  class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator):
725
- """
726
- Updates a resource containing information about a database inside a Cloud SQL
727
- instance using patch semantics.
722
+ """Update resource containing information about a database using patch semantics.
728
723
 
729
724
  See: https://cloud.google.com/sql/docs/mysql/admin-api/how-tos/performance#patch
730
725
 
@@ -827,8 +822,7 @@ class CloudSQLPatchInstanceDatabaseOperator(CloudSQLBaseOperator):
827
822
 
828
823
 
829
824
  class CloudSQLDeleteInstanceDatabaseOperator(CloudSQLBaseOperator):
830
- """
831
- Deletes a database from a Cloud SQL instance.
825
+ """Delete a database from a Cloud SQL instance.
832
826
 
833
827
  .. seealso::
834
828
  For more information on how to use this operator, take a look at the guide:
@@ -907,9 +901,9 @@ class CloudSQLDeleteInstanceDatabaseOperator(CloudSQLBaseOperator):
907
901
 
908
902
 
909
903
  class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
910
- """
911
- Exports data from a Cloud SQL instance to a Cloud Storage bucket as a SQL dump
912
- or CSV file.
904
+ """Export data from a Cloud SQL instance to a Cloud Storage bucket.
905
+
906
+ The exported format can be a SQL dump or CSV file.
913
907
 
914
908
  Note: This operator is idempotent. If executed multiple times with the same
915
909
  export file URI, the export file in GCS will simply be overridden.
@@ -934,6 +928,9 @@ class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
934
928
  If set as a sequence, the identities from the list must grant
935
929
  Service Account Token Creator IAM role to the directly preceding identity, with first
936
930
  account from the list granting this role to the originating account (templated).
931
+ :param deferrable: Run operator in the deferrable mode.
932
+ :param poke_interval: (Deferrable mode only) Time (seconds) to wait between calls
933
+ to check the run status.
937
934
  """
938
935
 
939
936
  # [START gcp_sql_export_template_fields]
@@ -959,10 +956,14 @@ class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
959
956
  api_version: str = "v1beta4",
960
957
  validate_body: bool = True,
961
958
  impersonation_chain: str | Sequence[str] | None = None,
959
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
960
+ poke_interval: int = 10,
962
961
  **kwargs,
963
962
  ) -> None:
964
963
  self.body = body
965
964
  self.validate_body = validate_body
965
+ self.deferrable = deferrable
966
+ self.poke_interval = poke_interval
966
967
  super().__init__(
967
968
  project_id=project_id,
968
969
  instance=instance,
@@ -1002,21 +1003,53 @@ class CloudSQLExportInstanceOperator(CloudSQLBaseOperator):
1002
1003
  uri=self.body["exportContext"]["uri"][5:],
1003
1004
  project_id=self.project_id or hook.project_id,
1004
1005
  )
1005
- return hook.export_instance(project_id=self.project_id, instance=self.instance, body=self.body)
1006
+
1007
+ operation_name = hook.export_instance(
1008
+ project_id=self.project_id, instance=self.instance, body=self.body
1009
+ )
1010
+
1011
+ if not self.deferrable:
1012
+ return hook._wait_for_operation_to_complete(
1013
+ project_id=self.project_id, operation_name=operation_name
1014
+ )
1015
+ else:
1016
+ self.defer(
1017
+ trigger=CloudSQLExportTrigger(
1018
+ operation_name=operation_name,
1019
+ project_id=self.project_id or hook.project_id,
1020
+ gcp_conn_id=self.gcp_conn_id,
1021
+ impersonation_chain=self.impersonation_chain,
1022
+ poke_interval=self.poke_interval,
1023
+ ),
1024
+ method_name="execute_complete",
1025
+ )
1026
+
1027
+ def execute_complete(self, context, event=None) -> None:
1028
+ """
1029
+ Callback for when the trigger fires - returns immediately.
1030
+ Relies on trigger to throw an exception, otherwise it assumes execution was
1031
+ successful.
1032
+ """
1033
+ if event["status"] == "success":
1034
+ self.log.info("Operation %s completed successfully", event["operation_name"])
1035
+ else:
1036
+ self.log.exception("Unexpected error in the operation.")
1037
+ raise AirflowException(event["message"])
1006
1038
 
1007
1039
 
1008
1040
  class CloudSQLImportInstanceOperator(CloudSQLBaseOperator):
1009
- """
1010
- Imports data into a Cloud SQL instance from a SQL dump or CSV file in Cloud Storage.
1041
+ """Import data into a Cloud SQL instance from Cloud Storage.
1011
1042
 
1012
- CSV IMPORT:
1043
+ CSV IMPORT
1044
+ ``````````
1013
1045
 
1014
1046
  This operator is NOT idempotent for a CSV import. If the same file is imported
1015
1047
  multiple times, the imported data will be duplicated in the database.
1016
1048
  Moreover, if there are any unique constraints the duplicate import may result in an
1017
1049
  error.
1018
1050
 
1019
- SQL IMPORT:
1051
+ SQL IMPORT
1052
+ ``````````
1020
1053
 
1021
1054
  This operator is idempotent for a SQL import if it was also exported by Cloud SQL.
1022
1055
  The exported SQL contains 'DROP TABLE IF EXISTS' statements for all tables
@@ -1117,9 +1150,10 @@ class CloudSQLImportInstanceOperator(CloudSQLBaseOperator):
1117
1150
 
1118
1151
 
1119
1152
  class CloudSQLExecuteQueryOperator(GoogleCloudBaseOperator):
1120
- """
1121
- Performs DML or DDL query on an existing Cloud Sql instance. It optionally uses
1122
- cloud-sql-proxy to establish secure connection with the database.
1153
+ """Perform DML or DDL query on an existing Cloud Sql instance.
1154
+
1155
+ It optionally uses cloud-sql-proxy to establish secure connection with the
1156
+ database.
1123
1157
 
1124
1158
  .. seealso::
1125
1159
  For more information on how to use this operator, take a look at the guide:
@@ -28,6 +28,7 @@ from functools import cached_property
28
28
  from typing import TYPE_CHECKING, Any, Sequence
29
29
 
30
30
  from airflow import AirflowException
31
+ from airflow.configuration import conf
31
32
  from airflow.exceptions import AirflowProviderDeprecationWarning
32
33
  from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType
33
34
  from airflow.providers.google.cloud.hooks.dataflow import (
@@ -172,7 +173,9 @@ class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
172
173
  This class is deprecated.
173
174
  Please use `providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`.
174
175
 
175
- **Example**: ::
176
+ Example usage:
177
+
178
+ .. code-block:: python
176
179
 
177
180
  default_args = {
178
181
  "owner": "airflow",
@@ -417,7 +420,6 @@ class DataflowCreateJavaJobOperator(GoogleCloudBaseOperator):
417
420
  variables=pipeline_options,
418
421
  )
419
422
  while is_running and self.check_if_running == CheckJobRunning.WaitForRun:
420
-
421
423
  is_running = self.dataflow_hook.is_job_dataflow_running(
422
424
  name=self.job_name,
423
425
  variables=pipeline_options,
@@ -609,7 +611,7 @@ class DataflowTemplatedJobStartOperator(GoogleCloudBaseOperator):
609
611
  cancel_timeout: int | None = 10 * 60,
610
612
  wait_until_finished: bool | None = None,
611
613
  append_job_name: bool = True,
612
- deferrable: bool = False,
614
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
613
615
  **kwargs,
614
616
  ) -> None:
615
617
  super().__init__(**kwargs)
@@ -799,7 +801,7 @@ class DataflowStartFlexTemplateOperator(GoogleCloudBaseOperator):
799
801
  cancel_timeout: int | None = 10 * 60,
800
802
  wait_until_finished: bool | None = None,
801
803
  impersonation_chain: str | Sequence[str] | None = None,
802
- deferrable: bool = False,
804
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
803
805
  append_job_name: bool = True,
804
806
  *args,
805
807
  **kwargs,
@@ -1029,8 +1029,9 @@ class DataformRemoveDirectoryOperator(GoogleCloudBaseOperator):
1029
1029
 
1030
1030
 
1031
1031
  class DataformInstallNpmPackagesOperator(GoogleCloudBaseOperator):
1032
- """
1033
- Installs npm dependencies in the provided workspace. Requires "package.json" to be created in workspace.
1032
+ """Install NPM dependencies in the provided workspace.
1033
+
1034
+ Requires "package.json" to be created in the workspace.
1034
1035
 
1035
1036
  :param project_id: Required. The ID of the Google Cloud project where workspace located.
1036
1037
  :param region: Required. The ID of the Google Cloud region where workspace located.