apache-airflow-providers-google 10.20.0rc1__py3-none-any.whl → 10.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +16 -8
  3. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -1
  4. airflow/providers/google/cloud/_internal_client/secret_manager_client.py +6 -3
  5. airflow/providers/google/cloud/hooks/bigquery.py +158 -79
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +12 -6
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +34 -17
  8. airflow/providers/google/cloud/hooks/dataflow.py +30 -26
  9. airflow/providers/google/cloud/hooks/dataform.py +2 -1
  10. airflow/providers/google/cloud/hooks/datafusion.py +4 -2
  11. airflow/providers/google/cloud/hooks/dataproc.py +102 -51
  12. airflow/providers/google/cloud/hooks/functions.py +20 -10
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -11
  14. airflow/providers/google/cloud/hooks/os_login.py +2 -1
  15. airflow/providers/google/cloud/hooks/secret_manager.py +18 -9
  16. airflow/providers/google/cloud/hooks/translate.py +2 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -1
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +141 -0
  19. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +2 -1
  20. airflow/providers/google/cloud/links/base.py +2 -1
  21. airflow/providers/google/cloud/links/datafusion.py +2 -1
  22. airflow/providers/google/cloud/log/stackdriver_task_handler.py +4 -2
  23. airflow/providers/google/cloud/openlineage/mixins.py +10 -0
  24. airflow/providers/google/cloud/openlineage/utils.py +4 -2
  25. airflow/providers/google/cloud/operators/bigquery.py +55 -21
  26. airflow/providers/google/cloud/operators/cloud_batch.py +3 -1
  27. airflow/providers/google/cloud/operators/cloud_sql.py +22 -11
  28. airflow/providers/google/cloud/operators/dataform.py +2 -1
  29. airflow/providers/google/cloud/operators/dataproc.py +75 -34
  30. airflow/providers/google/cloud/operators/dataproc_metastore.py +24 -12
  31. airflow/providers/google/cloud/operators/gcs.py +2 -1
  32. airflow/providers/google/cloud/operators/pubsub.py +10 -5
  33. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +3 -3
  34. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +12 -9
  35. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +243 -0
  36. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +2 -1
  37. airflow/providers/google/cloud/operators/vision.py +36 -18
  38. airflow/providers/google/cloud/sensors/gcs.py +11 -2
  39. airflow/providers/google/cloud/sensors/pubsub.py +2 -1
  40. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +21 -12
  41. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +1 -1
  42. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -1
  43. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +17 -5
  44. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +12 -6
  45. airflow/providers/google/cloud/transfers/local_to_gcs.py +5 -1
  46. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +2 -1
  47. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +2 -1
  48. airflow/providers/google/cloud/transfers/presto_to_gcs.py +2 -1
  49. airflow/providers/google/cloud/transfers/s3_to_gcs.py +2 -1
  50. airflow/providers/google/cloud/transfers/trino_to_gcs.py +2 -1
  51. airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
  52. airflow/providers/google/cloud/triggers/cloud_run.py +2 -1
  53. airflow/providers/google/cloud/triggers/dataflow.py +2 -1
  54. airflow/providers/google/cloud/triggers/vertex_ai.py +2 -1
  55. airflow/providers/google/cloud/utils/external_token_supplier.py +4 -2
  56. airflow/providers/google/cloud/utils/field_sanitizer.py +4 -2
  57. airflow/providers/google/cloud/utils/field_validator.py +6 -3
  58. airflow/providers/google/cloud/utils/helpers.py +2 -1
  59. airflow/providers/google/common/hooks/base_google.py +2 -1
  60. airflow/providers/google/common/utils/id_token_credentials.py +2 -1
  61. airflow/providers/google/get_provider_info.py +3 -2
  62. airflow/providers/google/go_module_utils.py +4 -2
  63. airflow/providers/google/marketing_platform/hooks/analytics_admin.py +12 -6
  64. airflow/providers/google/marketing_platform/links/analytics_admin.py +2 -1
  65. airflow/providers/google/suite/transfers/local_to_drive.py +2 -1
  66. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/METADATA +14 -14
  67. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/RECORD +69 -69
  68. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/WHEEL +0 -0
  69. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0.dist-info}/entry_points.txt +0 -0
@@ -408,9 +408,20 @@ class GCSToGCSOperator(BaseOperator):
408
408
  msg = f"{prefix} does not exist in bucket {self.source_bucket}"
409
409
  self.log.warning(msg)
410
410
  raise AirflowException(msg)
411
+ if len(objects) == 1 and objects[0][-1] != "/":
412
+ self._copy_file(hook=hook, source_object=objects[0])
411
413
  elif len(objects):
412
414
  self._copy_multiple_objects(hook=hook, source_objects=objects, prefix=prefix)
413
415
 
416
+ def _copy_file(self, hook, source_object):
417
+ destination_object = self.destination_object or source_object
418
+ if self.destination_object and self.destination_object[-1] == "/":
419
+ file_name = source_object.split("/")[-1]
420
+ destination_object += file_name
421
+ self._copy_single_object(
422
+ hook=hook, source_object=source_object, destination_object=destination_object
423
+ )
424
+
414
425
  def _copy_multiple_objects(self, hook, source_objects, prefix):
415
426
  # Check whether the prefix is a root directory for all the rest of objects.
416
427
  _pref = prefix.rstrip("/")
@@ -430,12 +441,7 @@ class GCSToGCSOperator(BaseOperator):
430
441
  destination_object = source_obj
431
442
  else:
432
443
  file_name_postfix = source_obj.replace(base_path, "", 1)
433
-
434
- destination_object = (
435
- self.destination_object.rstrip("/")[0 : self.destination_object.rfind("/")]
436
- + "/"
437
- + file_name_postfix
438
- )
444
+ destination_object = self.destination_object.rstrip("/") + "/" + file_name_postfix
439
445
 
440
446
  self._copy_single_object(
441
447
  hook=hook, source_object=source_obj, destination_object=destination_object
@@ -32,7 +32,7 @@ if TYPE_CHECKING:
32
32
 
33
33
  class LocalFilesystemToGCSOperator(BaseOperator):
34
34
  """
35
- Uploads a file or list of files to Google Cloud Storage; optionally can compress the file for upload.
35
+ Uploads a file or list of files to Google Cloud Storage; optionally can compress the file for upload; optionally can upload the data in multiple chunks.
36
36
 
37
37
  .. seealso::
38
38
  For more information on how to use this operator, take a look at the guide:
@@ -47,6 +47,7 @@ class LocalFilesystemToGCSOperator(BaseOperator):
47
47
  :param gcp_conn_id: (Optional) The connection ID used to connect to Google Cloud.
48
48
  :param mime_type: The mime-type string
49
49
  :param gzip: Allows for file to be compressed and uploaded as gzip
50
+ :param chunk_size: Blob chunk size in bytes. This must be a multiple of 262144 bytes (256 KiB)
50
51
  :param impersonation_chain: Optional service account to impersonate using short-term
51
52
  credentials, or chained list of accounts required to get the access_token
52
53
  of the last account in the list, which will be impersonated in the request.
@@ -73,6 +74,7 @@ class LocalFilesystemToGCSOperator(BaseOperator):
73
74
  gcp_conn_id="google_cloud_default",
74
75
  mime_type="application/octet-stream",
75
76
  gzip=False,
77
+ chunk_size: int | None = None,
76
78
  impersonation_chain: str | Sequence[str] | None = None,
77
79
  **kwargs,
78
80
  ):
@@ -84,6 +86,7 @@ class LocalFilesystemToGCSOperator(BaseOperator):
84
86
  self.gcp_conn_id = gcp_conn_id
85
87
  self.mime_type = mime_type
86
88
  self.gzip = gzip
89
+ self.chunk_size = chunk_size
87
90
  self.impersonation_chain = impersonation_chain
88
91
 
89
92
  def execute(self, context: Context):
@@ -114,4 +117,5 @@ class LocalFilesystemToGCSOperator(BaseOperator):
114
117
  mime_type=self.mime_type,
115
118
  filename=filepath,
116
119
  gzip=self.gzip,
120
+ chunk_size=self.chunk_size,
117
121
  )
@@ -30,7 +30,8 @@ from airflow.providers.mysql.hooks.mysql import MySqlHook
30
30
 
31
31
 
32
32
  class MySQLToGCSOperator(BaseSQLToGCSOperator):
33
- """Copy data from MySQL to Google Cloud Storage in JSON, CSV or Parquet format.
33
+ """
34
+ Copy data from MySQL to Google Cloud Storage in JSON, CSV or Parquet format.
34
35
 
35
36
  .. seealso::
36
37
  For more information on how to use this operator, take a look at the guide:
@@ -29,7 +29,8 @@ from airflow.providers.oracle.hooks.oracle import OracleHook
29
29
 
30
30
 
31
31
  class OracleToGCSOperator(BaseSQLToGCSOperator):
32
- """Copy data from Oracle to Google Cloud Storage in JSON, CSV or Parquet format.
32
+ """
33
+ Copy data from Oracle to Google Cloud Storage in JSON, CSV or Parquet format.
33
34
 
34
35
  .. seealso::
35
36
  For more information on how to use this operator, take a look at the guide:
@@ -144,7 +144,8 @@ class _PrestoToGCSPrestoCursorAdapter:
144
144
 
145
145
 
146
146
  class PrestoToGCSOperator(BaseSQLToGCSOperator):
147
- """Copy data from PrestoDB to Google Cloud Storage in JSON, CSV or Parquet format.
147
+ """
148
+ Copy data from PrestoDB to Google Cloud Storage in JSON, CSV or Parquet format.
148
149
 
149
150
  :param presto_conn_id: Reference to a specific Presto hook.
150
151
  """
@@ -329,7 +329,8 @@ class S3ToGCSOperator(S3ListOperator):
329
329
  return job_names
330
330
 
331
331
  def execute_complete(self, context: Context, event: dict[str, Any]) -> None:
332
- """Return immediately and relies on trigger to throw a success event. Callback for the trigger.
332
+ """
333
+ Return immediately and relies on trigger to throw a success event. Callback for the trigger.
333
334
 
334
335
  Relies on trigger to throw an exception, otherwise it assumes execution was
335
336
  successful.
@@ -144,7 +144,8 @@ class _TrinoToGCSTrinoCursorAdapter:
144
144
 
145
145
 
146
146
  class TrinoToGCSOperator(BaseSQLToGCSOperator):
147
- """Copy data from TrinoDB to Google Cloud Storage in JSON, CSV or Parquet format.
147
+ """
148
+ Copy data from TrinoDB to Google Cloud Storage in JSON, CSV or Parquet format.
148
149
 
149
150
  :param trino_conn_id: Reference to a specific Trino hook.
150
151
  """
@@ -28,7 +28,8 @@ DEFAULT_BATCH_LOCATION = "us-central1"
28
28
 
29
29
 
30
30
  class CloudBatchJobFinishedTrigger(BaseTrigger):
31
- """Cloud Batch trigger to check if templated job has been finished.
31
+ """
32
+ Cloud Batch trigger to check if templated job has been finished.
32
33
 
33
34
  :param job_name: Required. Name of the job.
34
35
  :param project_id: Required. the Google Cloud project ID in which the job was started.
@@ -39,7 +39,8 @@ class RunJobStatus(Enum):
39
39
 
40
40
 
41
41
  class CloudRunJobFinishedTrigger(BaseTrigger):
42
- """Cloud Run trigger to check if templated job has been finished.
42
+ """
43
+ Cloud Run trigger to check if templated job has been finished.
43
44
 
44
45
  :param operation_name: Required. Name of the operation.
45
46
  :param job_name: Required. Name of the job.
@@ -40,7 +40,8 @@ DEFAULT_DATAFLOW_LOCATION = "us-central1"
40
40
 
41
41
 
42
42
  class TemplateJobStartTrigger(BaseTrigger):
43
- """Dataflow trigger to check if templated job has been finished.
43
+ """
44
+ Dataflow trigger to check if templated job has been finished.
44
45
 
45
46
  :param project_id: Required. the Google Cloud project ID in which the job was started.
46
47
  :param job_id: Required. ID of the job.
@@ -41,7 +41,8 @@ if TYPE_CHECKING:
41
41
 
42
42
 
43
43
  class BaseVertexAIJobTrigger(BaseTrigger):
44
- """Base class for Vertex AI job triggers.
44
+ """
45
+ Base class for Vertex AI job triggers.
45
46
 
46
47
  This trigger polls the Vertex AI job and checks its status.
47
48
 
@@ -33,7 +33,8 @@ from airflow.utils.log.logging_mixin import LoggingMixin
33
33
 
34
34
 
35
35
  def cache_token_decorator(get_subject_token_method):
36
- """Cache calls to ``SubjectTokenSupplier`` instances' ``get_token_supplier`` methods.
36
+ """
37
+ Cache calls to ``SubjectTokenSupplier`` instances' ``get_token_supplier`` methods.
37
38
 
38
39
  Different instances of a same SubjectTokenSupplier class with the same attributes
39
40
  share the OIDC token cache.
@@ -48,7 +49,8 @@ def cache_token_decorator(get_subject_token_method):
48
49
 
49
50
  @wraps(get_subject_token_method)
50
51
  def wrapper(supplier_instance: CacheTokenSupplier, *args, **kwargs) -> str:
51
- """Obeys the interface set by ``SubjectTokenSupplier`` for ``get_subject_token`` methods.
52
+ """
53
+ Obeys the interface set by ``SubjectTokenSupplier`` for ``get_subject_token`` methods.
52
54
 
53
55
  :param supplier_instance: the SubjectTokenSupplier instance whose get_subject_token method is being decorated
54
56
  :return: The token string
@@ -15,7 +15,8 @@
15
15
  # KIND, either express or implied. See the License for the
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
- """Sanitizer for body fields sent via Google Cloud API.
18
+ """
19
+ Sanitizer for body fields sent via Google Cloud API.
19
20
 
20
21
  The sanitizer removes fields specified from the body.
21
22
 
@@ -108,7 +109,8 @@ class GcpFieldSanitizerException(AirflowException):
108
109
 
109
110
 
110
111
  class GcpBodyFieldSanitizer(LoggingMixin):
111
- """Sanitizes the body according to specification.
112
+ """
113
+ Sanitizes the body according to specification.
112
114
 
113
115
  :param sanitize_specs: array of strings that specifies which fields to remove
114
116
 
@@ -15,7 +15,8 @@
15
15
  # KIND, either express or implied. See the License for the
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
- """Validator for body fields sent via Google Cloud API.
18
+ """
19
+ Validator for body fields sent via Google Cloud API.
19
20
 
20
21
  The validator performs validation of the body (being dictionary of fields) that
21
22
  is sent in the API request to Google Cloud (via ``googleclient`` API usually).
@@ -146,7 +147,8 @@ class GcpFieldValidationException(AirflowException):
146
147
 
147
148
 
148
149
  class GcpValidationSpecificationException(AirflowException):
149
- """Thrown when validation specification is wrong.
150
+ """
151
+ Thrown when validation specification is wrong.
150
152
 
151
153
  This should only happen during development as ideally
152
154
  specification itself should not be invalid ;) .
@@ -177,7 +179,8 @@ EXAMPLE_VALIDATION_SPECIFICATION = [
177
179
 
178
180
 
179
181
  class GcpBodyFieldValidator(LoggingMixin):
180
- """Validates correctness of request body according to specification.
182
+ """
183
+ Validates correctness of request body according to specification.
181
184
 
182
185
  The specification can describe various type of
183
186
  fields including custom validation, and union of fields. This validator is
@@ -25,7 +25,8 @@ def normalize_directory_path(source_object: str | None) -> str | None:
25
25
 
26
26
 
27
27
  def resource_path_to_dict(resource_name: str) -> dict[str, str]:
28
- """Convert a path-like GCP resource name into a dictionary.
28
+ """
29
+ Convert a path-like GCP resource name into a dictionary.
29
30
 
30
31
  For example, the path `projects/my-project/locations/my-location/instances/my-instance` will be converted
31
32
  to a dict:
@@ -706,7 +706,8 @@ class GoogleBaseHook(BaseHook):
706
706
 
707
707
 
708
708
  class _CredentialsToken(Token):
709
- """A token implementation which makes Google credentials objects accessible to [gcloud-aio](https://talkiq.github.io/gcloud-aio/) clients.
709
+ """
710
+ A token implementation which makes Google credentials objects accessible to [gcloud-aio](https://talkiq.github.io/gcloud-aio/) clients.
710
711
 
711
712
  This class allows us to create token instances from credentials objects and thus supports a variety of use cases for Google
712
713
  credentials in Airflow (i.e. impersonation chain). By relying on a existing credentials object we leverage functionality provided by the GoogleBaseHook
@@ -192,7 +192,8 @@ def _get_gce_credentials(
192
192
  def get_default_id_token_credentials(
193
193
  target_audience: str | None, request: google.auth.transport.Request | None = None
194
194
  ) -> google_auth_credentials.Credentials:
195
- """Get the default ID Token credentials for the current environment.
195
+ """
196
+ Get the default ID Token credentials for the current environment.
196
197
 
197
198
  `Application Default Credentials`_ provides an easy way to obtain credentials to call Google APIs for
198
199
  server-to-server or local applications.
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Google",
29
29
  "description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1718604460,
31
+ "source-date-epoch": 1720423376,
32
32
  "versions": [
33
+ "10.21.0",
33
34
  "10.20.0",
34
35
  "10.19.0",
35
36
  "10.18.0",
@@ -103,7 +104,7 @@ def get_provider_info():
103
104
  "google-api-python-client>=2.0.2",
104
105
  "google-auth>=2.29.0",
105
106
  "google-auth-httplib2>=0.0.1",
106
- "google-cloud-aiplatform>=1.54.0",
107
+ "google-cloud-aiplatform>=1.57.0",
107
108
  "google-cloud-automl>=2.12.0",
108
109
  "google-cloud-bigquery<3.21.0,>=3.4.0",
109
110
  "google-cloud-bigquery-datatransfer>=3.13.0",
@@ -25,7 +25,8 @@ from airflow.utils.process_utils import execute_in_subprocess
25
25
 
26
26
 
27
27
  def init_module(go_module_name: str, go_module_path: str) -> None:
28
- """Initialize a Go module.
28
+ """
29
+ Initialize a Go module.
29
30
 
30
31
  If a ``go.mod`` file already exists, this function will do nothing.
31
32
 
@@ -39,7 +40,8 @@ def init_module(go_module_name: str, go_module_path: str) -> None:
39
40
 
40
41
 
41
42
  def install_dependencies(go_module_path: str) -> None:
42
- """Install dependencies for a Go module.
43
+ """
44
+ Install dependencies for a Go module.
43
45
 
44
46
  :param go_module_path: The path to the directory containing the Go module.
45
47
  """
@@ -70,7 +70,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
70
70
  timeout: float | None = None,
71
71
  metadata: Sequence[tuple[str, str]] = (),
72
72
  ) -> ListAccountsPager:
73
- """Get list of accounts in Google Analytics.
73
+ """
74
+ Get list of accounts in Google Analytics.
74
75
 
75
76
  .. seealso::
76
77
  For more details please check the client library documentation:
@@ -97,7 +98,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
97
98
  timeout: float | None = None,
98
99
  metadata: Sequence[tuple[str, str]] = (),
99
100
  ) -> Property:
100
- """Create Google Analytics property.
101
+ """
102
+ Create Google Analytics property.
101
103
 
102
104
  .. seealso::
103
105
  For more details please check the client library documentation:
@@ -127,7 +129,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
127
129
  timeout: float | None = None,
128
130
  metadata: Sequence[tuple[str, str]] = (),
129
131
  ) -> Property:
130
- """Soft delete Google Analytics property.
132
+ """
133
+ Soft delete Google Analytics property.
131
134
 
132
135
  .. seealso::
133
136
  For more details please check the client library documentation:
@@ -153,7 +156,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
153
156
  timeout: float | None = None,
154
157
  metadata: Sequence[tuple[str, str]] = (),
155
158
  ) -> DataStream:
156
- """Create Google Analytics data stream.
159
+ """
160
+ Create Google Analytics data stream.
157
161
 
158
162
  .. seealso::
159
163
  For more details please check the client library documentation:
@@ -184,7 +188,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
184
188
  timeout: float | None = None,
185
189
  metadata: Sequence[tuple[str, str]] = (),
186
190
  ) -> None:
187
- """Delete Google Analytics data stream.
191
+ """
192
+ Delete Google Analytics data stream.
188
193
 
189
194
  .. seealso::
190
195
  For more details please check the client library documentation:
@@ -214,7 +219,8 @@ class GoogleAnalyticsAdminHook(GoogleBaseHook):
214
219
  timeout: float | None = None,
215
220
  metadata: Sequence[tuple[str, str]] = (),
216
221
  ) -> ListGoogleAdsLinksPager:
217
- """Get list of Google Ads links.
222
+ """
223
+ Get list of Google Ads links.
218
224
 
219
225
  .. seealso::
220
226
  For more details please check the client library documentation:
@@ -29,7 +29,8 @@ BASE_LINK = "https://analytics.google.com/analytics/web/"
29
29
 
30
30
 
31
31
  class GoogleAnalyticsBaseLink(BaseOperatorLink):
32
- """Base class for Google Analytics links.
32
+ """
33
+ Base class for Google Analytics links.
33
34
 
34
35
  :meta private:
35
36
  """
@@ -31,7 +31,8 @@ if TYPE_CHECKING:
31
31
 
32
32
 
33
33
  class LocalFilesystemToGoogleDriveOperator(BaseOperator):
34
- """Upload a list of files to a Google Drive folder.
34
+ """
35
+ Upload a list of files to a Google Drive folder.
35
36
 
36
37
  This operator uploads a list of local files to a Google Drive folder.
37
38
  The local files can optionally be deleted after upload.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-google
3
- Version: 10.20.0rc1
3
+ Version: 10.21.0
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Programming Language :: Python :: 3.12
23
23
  Classifier: Topic :: System :: Monitoring
24
24
  Requires-Dist: PyOpenSSL>=23.0.0
25
- Requires-Dist: apache-airflow-providers-common-sql>=1.7.2rc0
26
- Requires-Dist: apache-airflow>=2.7.0rc0
25
+ Requires-Dist: apache-airflow-providers-common-sql>=1.7.2
26
+ Requires-Dist: apache-airflow>=2.7.0
27
27
  Requires-Dist: asgiref>=3.5.2
28
28
  Requires-Dist: dill>=0.2.3
29
29
  Requires-Dist: gcloud-aio-auth>=4.0.0,<5.0.0
@@ -36,7 +36,7 @@ Requires-Dist: google-api-core>=2.11.0,!=2.16.0,!=2.18.0
36
36
  Requires-Dist: google-api-python-client>=2.0.2
37
37
  Requires-Dist: google-auth-httplib2>=0.0.1
38
38
  Requires-Dist: google-auth>=2.29.0
39
- Requires-Dist: google-cloud-aiplatform>=1.54.0
39
+ Requires-Dist: google-cloud-aiplatform>=1.57.0
40
40
  Requires-Dist: google-cloud-automl>=2.12.0
41
41
  Requires-Dist: google-cloud-batch>=0.13.0
42
42
  Requires-Dist: google-cloud-bigquery-datatransfer>=3.13.0
@@ -85,19 +85,19 @@ Requires-Dist: python-slugify>=7.0.0
85
85
  Requires-Dist: sqlalchemy-bigquery>=1.2.1
86
86
  Requires-Dist: sqlalchemy-spanner>=1.6.2
87
87
  Requires-Dist: tenacity>=8.1.0
88
- Requires-Dist: apache-airflow-providers-amazon>=2.6.0rc0 ; extra == "amazon"
88
+ Requires-Dist: apache-airflow-providers-amazon>=2.6.0 ; extra == "amazon"
89
89
  Requires-Dist: apache-airflow-providers-apache-beam ; extra == "apache.beam"
90
90
  Requires-Dist: apache-beam[gcp] ; extra == "apache.beam"
91
91
  Requires-Dist: apache-airflow-providers-apache-cassandra ; extra == "apache.cassandra"
92
- Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cncf.kubernetes"
92
+ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0 ; extra == "cncf.kubernetes"
93
93
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
94
- Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc0 ; extra == "facebook"
94
+ Requires-Dist: apache-airflow-providers-facebook>=2.2.0 ; extra == "facebook"
95
95
  Requires-Dist: plyvel ; extra == "leveldb"
96
96
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
97
97
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft.mssql"
98
98
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
99
99
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
100
- Requires-Dist: apache-airflow-providers-oracle>=3.1.0rc0 ; extra == "oracle"
100
+ Requires-Dist: apache-airflow-providers-oracle>=3.1.0 ; extra == "oracle"
101
101
  Requires-Dist: apache-airflow-providers-postgres ; extra == "postgres"
102
102
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
103
103
  Requires-Dist: apache-airflow-providers-salesforce ; extra == "salesforce"
@@ -105,8 +105,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
105
105
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
106
106
  Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
107
107
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
108
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.20.0/changelog.html
109
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.20.0
108
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.0/changelog.html
109
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.0
110
110
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
111
111
  Project-URL: Source Code, https://github.com/apache/airflow
112
112
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -174,7 +174,7 @@ Provides-Extra: trino
174
174
 
175
175
  Package ``apache-airflow-providers-google``
176
176
 
177
- Release: ``10.20.0.rc1``
177
+ Release: ``10.21.0``
178
178
 
179
179
 
180
180
  Google services including:
@@ -194,7 +194,7 @@ This is a provider package for ``google`` provider. All classes for this provide
194
194
  are in ``airflow.providers.google`` python package.
195
195
 
196
196
  You can find package information and changelog for the provider
197
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.20.0/>`_.
197
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.0/>`_.
198
198
 
199
199
  Installation
200
200
  ------------
@@ -225,7 +225,7 @@ PIP package Version required
225
225
  ``google-api-python-client`` ``>=2.0.2``
226
226
  ``google-auth`` ``>=2.29.0``
227
227
  ``google-auth-httplib2`` ``>=0.0.1``
228
- ``google-cloud-aiplatform`` ``>=1.54.0``
228
+ ``google-cloud-aiplatform`` ``>=1.57.0``
229
229
  ``google-cloud-automl`` ``>=2.12.0``
230
230
  ``google-cloud-bigquery`` ``<3.21.0,>=3.4.0``
231
231
  ``google-cloud-bigquery-datatransfer`` ``>=3.13.0``
@@ -313,4 +313,4 @@ Dependent package
313
313
  ======================================================================================================================== ====================
314
314
 
315
315
  The changelog for the provider package can be found in the
316
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.20.0/changelog.html>`_.
316
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.21.0/changelog.html>`_.