apache-airflow-providers-google 10.24.0rc1__py3-none-any.whl → 10.25.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/assets/gcs.py +45 -0
  3. airflow/providers/google/cloud/hooks/dataproc.py +1 -0
  4. airflow/providers/google/cloud/hooks/gcs.py +50 -2
  5. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +79 -0
  6. airflow/providers/google/cloud/links/translate.py +2 -1
  7. airflow/providers/google/cloud/operators/automl.py +31 -18
  8. airflow/providers/google/cloud/operators/bigquery.py +1 -1
  9. airflow/providers/google/cloud/operators/cloud_run.py +4 -6
  10. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +10 -1
  11. airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py +1 -1
  12. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +19 -0
  13. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +152 -4
  14. airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py +1 -0
  15. airflow/providers/google/cloud/sensors/dataform.py +75 -0
  16. airflow/providers/google/get_provider_info.py +21 -7
  17. airflow/providers/google/marketing_platform/example_dags/example_display_video.py +2 -2
  18. {apache_airflow_providers_google-10.24.0rc1.dist-info → apache_airflow_providers_google-10.25.0rc1.dist-info}/METADATA +15 -13
  19. {apache_airflow_providers_google-10.24.0rc1.dist-info → apache_airflow_providers_google-10.25.0rc1.dist-info}/RECORD +23 -22
  20. /airflow/providers/google/{datasets → assets}/__init__.py +0 -0
  21. /airflow/providers/google/{datasets → assets}/bigquery.py +0 -0
  22. {apache_airflow_providers_google-10.24.0rc1.dist-info → apache_airflow_providers_google-10.25.0rc1.dist-info}/WHEEL +0 -0
  23. {apache_airflow_providers_google-10.24.0rc1.dist-info → apache_airflow_providers_google-10.25.0rc1.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "10.24.0"
32
+ __version__ = "10.25.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.8.0"
@@ -0,0 +1,45 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from typing import TYPE_CHECKING
20
+
21
+ from airflow.providers.common.compat.assets import Asset
22
+ from airflow.providers.google.cloud.hooks.gcs import _parse_gcs_url
23
+
24
+ if TYPE_CHECKING:
25
+ from urllib.parse import SplitResult
26
+
27
+ from airflow.providers.common.compat.openlineage.facet import Dataset as OpenLineageDataset
28
+
29
+
30
+ def create_asset(*, bucket: str, key: str, extra: dict | None = None) -> Asset:
31
+ return Asset(uri=f"gs://{bucket}/{key}", extra=extra)
32
+
33
+
34
+ def sanitize_uri(uri: SplitResult) -> SplitResult:
35
+ if not uri.netloc:
36
+ raise ValueError("URI format gs:// must contain a bucket name")
37
+ return uri
38
+
39
+
40
+ def convert_asset_to_openlineage(asset: Asset, lineage_context) -> OpenLineageDataset:
41
+ """Translate Asset with valid AIP-60 uri to OpenLineage with assistance from the hook."""
42
+ from airflow.providers.common.compat.openlineage.facet import Dataset as OpenLineageDataset
43
+
44
+ bucket, key = _parse_gcs_url(asset.uri)
45
+ return OpenLineageDataset(namespace=f"gs://{bucket}", name=key if key else "/")
@@ -999,6 +999,7 @@ class DataprocHook(GoogleBaseHook):
999
999
  individual attempt.
1000
1000
  :param metadata: Additional metadata that is provided to the method.
1001
1001
  """
1002
+ self.log.debug("Creating batch: %s", batch)
1002
1003
  client = self.get_batch_client(region)
1003
1004
  parent = f"projects/{project_id}/regions/{region}"
1004
1005
 
@@ -43,6 +43,7 @@ from google.cloud.storage.retry import DEFAULT_RETRY
43
43
  from requests import Session
44
44
 
45
45
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
46
+ from airflow.providers.common.compat.lineage.hook import get_hook_lineage_collector
46
47
  from airflow.providers.google.cloud.utils.helpers import normalize_directory_path
47
48
  from airflow.providers.google.common.consts import CLIENT_INFO
48
49
  from airflow.providers.google.common.hooks.base_google import (
@@ -214,6 +215,16 @@ class GCSHook(GoogleBaseHook):
214
215
  destination_object = source_bucket.copy_blob( # type: ignore[attr-defined]
215
216
  blob=source_object, destination_bucket=destination_bucket, new_name=destination_object
216
217
  )
218
+ get_hook_lineage_collector().add_input_asset(
219
+ context=self,
220
+ scheme="gs",
221
+ asset_kwargs={"bucket": source_bucket.name, "key": source_object.name}, # type: ignore[attr-defined]
222
+ )
223
+ get_hook_lineage_collector().add_output_asset(
224
+ context=self,
225
+ scheme="gs",
226
+ asset_kwargs={"bucket": destination_bucket.name, "key": destination_object.name}, # type: ignore[union-attr]
227
+ )
217
228
 
218
229
  self.log.info(
219
230
  "Object %s in bucket %s copied to object %s in bucket %s",
@@ -267,6 +278,16 @@ class GCSHook(GoogleBaseHook):
267
278
  ).rewrite(source=source_object, token=token)
268
279
 
269
280
  self.log.info("Total Bytes: %s | Bytes Written: %s", total_bytes, bytes_rewritten)
281
+ get_hook_lineage_collector().add_input_asset(
282
+ context=self,
283
+ scheme="gs",
284
+ asset_kwargs={"bucket": source_bucket.name, "key": source_object.name}, # type: ignore[attr-defined]
285
+ )
286
+ get_hook_lineage_collector().add_output_asset(
287
+ context=self,
288
+ scheme="gs",
289
+ asset_kwargs={"bucket": destination_bucket.name, "key": destination_object}, # type: ignore[attr-defined]
290
+ )
270
291
  self.log.info(
271
292
  "Object %s in bucket %s rewritten to object %s in bucket %s",
272
293
  source_object.name, # type: ignore[attr-defined]
@@ -345,9 +366,18 @@ class GCSHook(GoogleBaseHook):
345
366
 
346
367
  if filename:
347
368
  blob.download_to_filename(filename, timeout=timeout)
369
+ get_hook_lineage_collector().add_input_asset(
370
+ context=self, scheme="gs", asset_kwargs={"bucket": bucket.name, "key": blob.name}
371
+ )
372
+ get_hook_lineage_collector().add_output_asset(
373
+ context=self, scheme="file", asset_kwargs={"path": filename}
374
+ )
348
375
  self.log.info("File downloaded to %s", filename)
349
376
  return filename
350
377
  else:
378
+ get_hook_lineage_collector().add_input_asset(
379
+ context=self, scheme="gs", asset_kwargs={"bucket": bucket.name, "key": blob.name}
380
+ )
351
381
  return blob.download_as_bytes()
352
382
 
353
383
  except GoogleCloudError:
@@ -555,6 +585,9 @@ class GCSHook(GoogleBaseHook):
555
585
  _call_with_retry(
556
586
  partial(blob.upload_from_filename, filename=filename, content_type=mime_type, timeout=timeout)
557
587
  )
588
+ get_hook_lineage_collector().add_input_asset(
589
+ context=self, scheme="file", asset_kwargs={"path": filename}
590
+ )
558
591
 
559
592
  if gzip:
560
593
  os.remove(filename)
@@ -576,6 +609,10 @@ class GCSHook(GoogleBaseHook):
576
609
  else:
577
610
  raise ValueError("'filename' and 'data' parameter missing. One is required to upload to gcs.")
578
611
 
612
+ get_hook_lineage_collector().add_output_asset(
613
+ context=self, scheme="gs", asset_kwargs={"bucket": bucket.name, "key": blob.name}
614
+ )
615
+
579
616
  def exists(self, bucket_name: str, object_name: str, retry: Retry = DEFAULT_RETRY) -> bool:
580
617
  """
581
618
  Check for the existence of a file in Google Cloud Storage.
@@ -691,6 +728,9 @@ class GCSHook(GoogleBaseHook):
691
728
  bucket = client.bucket(bucket_name)
692
729
  blob = bucket.blob(blob_name=object_name)
693
730
  blob.delete()
731
+ get_hook_lineage_collector().add_input_asset(
732
+ context=self, scheme="gs", asset_kwargs={"bucket": bucket.name, "key": blob.name}
733
+ )
694
734
 
695
735
  self.log.info("Blob %s deleted.", object_name)
696
736
 
@@ -1198,9 +1238,17 @@ class GCSHook(GoogleBaseHook):
1198
1238
  client = self.get_conn()
1199
1239
  bucket = client.bucket(bucket_name)
1200
1240
  destination_blob = bucket.blob(destination_object)
1201
- destination_blob.compose(
1202
- sources=[bucket.blob(blob_name=source_object) for source_object in source_objects]
1241
+ source_blobs = [bucket.blob(blob_name=source_object) for source_object in source_objects]
1242
+ destination_blob.compose(sources=source_blobs)
1243
+ get_hook_lineage_collector().add_output_asset(
1244
+ context=self, scheme="gs", asset_kwargs={"bucket": bucket.name, "key": destination_blob.name}
1203
1245
  )
1246
+ for single_source_blob in source_blobs:
1247
+ get_hook_lineage_collector().add_input_asset(
1248
+ context=self,
1249
+ scheme="gs",
1250
+ asset_kwargs={"bucket": bucket.name, "key": single_source_blob.name},
1251
+ )
1204
1252
 
1205
1253
  self.log.info("Completed successfully.")
1206
1254
 
@@ -20,12 +20,15 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import time
23
+ from datetime import timedelta
23
24
  from typing import TYPE_CHECKING, Sequence
24
25
 
25
26
  import vertexai
26
27
  from vertexai.generative_models import GenerativeModel, Part
27
28
  from vertexai.language_models import TextEmbeddingModel, TextGenerationModel
29
+ from vertexai.preview.caching import CachedContent
28
30
  from vertexai.preview.evaluation import EvalResult, EvalTask
31
+ from vertexai.preview.generative_models import GenerativeModel as preview_generative_model
29
32
  from vertexai.preview.tuning import sft
30
33
 
31
34
  from airflow.exceptions import AirflowProviderDeprecationWarning
@@ -95,6 +98,16 @@ class GenerativeModelHook(GoogleBaseHook):
95
98
  )
96
99
  return eval_task
97
100
 
101
+ def get_cached_context_model(
102
+ self,
103
+ cached_content_name: str,
104
+ ) -> preview_generative_model:
105
+ """Return a Generative Model with Cached Context."""
106
+ cached_content = CachedContent(cached_content_name=cached_content_name)
107
+
108
+ cached_context_model = preview_generative_model.from_cached_content(cached_content)
109
+ return cached_context_model
110
+
98
111
  @deprecated(
99
112
  planned_removal_date="January 01, 2025",
100
113
  use_instead="Part objects included in contents parameter of "
@@ -528,3 +541,69 @@ class GenerativeModelHook(GoogleBaseHook):
528
541
  )
529
542
 
530
543
  return eval_result
544
+
545
+ def create_cached_content(
546
+ self,
547
+ model_name: str,
548
+ location: str,
549
+ ttl_hours: float = 1,
550
+ system_instruction: str | None = None,
551
+ contents: list | None = None,
552
+ display_name: str | None = None,
553
+ project_id: str = PROVIDE_PROJECT_ID,
554
+ ) -> str:
555
+ """
556
+ Create CachedContent to reduce the cost of requests that contain repeat content with high input token counts.
557
+
558
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
559
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
560
+ :param model_name: Required. The name of the publisher model to use for cached content.
561
+ :param system_instruction: Developer set system instruction.
562
+ :param contents: The content to cache.
563
+ :param ttl_hours: The TTL for this resource in hours. The expiration time is computed: now + TTL.
564
+ Defaults to one hour.
565
+ :param display_name: The user-generated meaningful display name of the cached content
566
+ """
567
+ vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
568
+
569
+ response = CachedContent.create(
570
+ model_name=model_name,
571
+ system_instruction=system_instruction,
572
+ contents=contents,
573
+ ttl=timedelta(hours=ttl_hours),
574
+ display_name=display_name,
575
+ )
576
+
577
+ return response.name
578
+
579
+ def generate_from_cached_content(
580
+ self,
581
+ location: str,
582
+ cached_content_name: str,
583
+ contents: list,
584
+ generation_config: dict | None = None,
585
+ safety_settings: dict | None = None,
586
+ project_id: str = PROVIDE_PROJECT_ID,
587
+ ) -> str:
588
+ """
589
+ Generate a response from CachedContent.
590
+
591
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
592
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
593
+ :param cached_content_name: Required. The name of the cached content resource.
594
+ :param contents: Required. The multi-part content of a message that a user or a program
595
+ gives to the generative model, in order to elicit a specific response.
596
+ :param generation_config: Optional. Generation configuration settings.
597
+ :param safety_settings: Optional. Per request settings for blocking unsafe content.
598
+ """
599
+ vertexai.init(project=project_id, location=location, credentials=self.get_credentials())
600
+
601
+ cached_context_model = self.get_cached_context_model(cached_content_name=cached_content_name)
602
+
603
+ response = cached_context_model.generate_content(
604
+ contents=contents,
605
+ generation_config=generation_config,
606
+ safety_settings=safety_settings,
607
+ )
608
+
609
+ return response.text
@@ -167,13 +167,14 @@ class TranslationLegacyModelPredictLink(BaseGoogleLink):
167
167
  task_instance,
168
168
  model_id: str,
169
169
  project_id: str,
170
+ dataset_id: str,
170
171
  ):
171
172
  task_instance.xcom_push(
172
173
  context,
173
174
  key=TranslationLegacyModelPredictLink.key,
174
175
  value={
175
176
  "location": task_instance.location,
176
- "dataset_id": task_instance.model.dataset_id,
177
+ "dataset_id": dataset_id,
177
178
  "model_id": model_id,
178
179
  "project_id": project_id,
179
180
  },
@@ -22,7 +22,7 @@ from __future__ import annotations
22
22
  import ast
23
23
  import warnings
24
24
  from functools import cached_property
25
- from typing import TYPE_CHECKING, Sequence, Tuple
25
+ from typing import TYPE_CHECKING, Sequence, Tuple, cast
26
26
 
27
27
  from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
28
28
  from google.cloud.automl_v1beta1 import (
@@ -280,17 +280,22 @@ class AutoMLPredictOperator(GoogleCloudBaseOperator):
280
280
  impersonation_chain=self.impersonation_chain,
281
281
  )
282
282
 
283
+ @cached_property
284
+ def model(self) -> Model | None:
285
+ if self.model_id:
286
+ hook = cast(CloudAutoMLHook, self.hook)
287
+ return hook.get_model(
288
+ model_id=self.model_id,
289
+ location=self.location,
290
+ project_id=self.project_id,
291
+ retry=self.retry,
292
+ timeout=self.timeout,
293
+ metadata=self.metadata,
294
+ )
295
+ return None
296
+
283
297
  def _check_model_type(self):
284
- hook = self.hook
285
- model = hook.get_model(
286
- model_id=self.model_id,
287
- location=self.location,
288
- project_id=self.project_id,
289
- retry=self.retry,
290
- timeout=self.timeout,
291
- metadata=self.metadata,
292
- )
293
- if not hasattr(model, "translation_model_metadata"):
298
+ if not hasattr(self.model, "translation_model_metadata"):
294
299
  raise AirflowException(
295
300
  "AutoMLPredictOperator for text, image, and video prediction has been deprecated. "
296
301
  "Please use endpoint_id param instead of model_id param."
@@ -329,11 +334,13 @@ class AutoMLPredictOperator(GoogleCloudBaseOperator):
329
334
  )
330
335
 
331
336
  project_id = self.project_id or hook.project_id
332
- if project_id and self.model_id:
337
+ dataset_id: str | None = self.model.dataset_id if self.model else None
338
+ if project_id and self.model_id and dataset_id:
333
339
  TranslationLegacyModelPredictLink.persist(
334
340
  context=context,
335
341
  task_instance=self,
336
342
  model_id=self.model_id,
343
+ dataset_id=dataset_id,
337
344
  project_id=project_id,
338
345
  )
339
346
  return PredictResponse.to_dict(result)
@@ -431,12 +438,16 @@ class AutoMLBatchPredictOperator(GoogleCloudBaseOperator):
431
438
  self.input_config = input_config
432
439
  self.output_config = output_config
433
440
 
434
- def execute(self, context: Context):
435
- hook = CloudAutoMLHook(
441
+ @cached_property
442
+ def hook(self) -> CloudAutoMLHook:
443
+ return CloudAutoMLHook(
436
444
  gcp_conn_id=self.gcp_conn_id,
437
445
  impersonation_chain=self.impersonation_chain,
438
446
  )
439
- self.model: Model = hook.get_model(
447
+
448
+ @cached_property
449
+ def model(self) -> Model:
450
+ return self.hook.get_model(
440
451
  model_id=self.model_id,
441
452
  location=self.location,
442
453
  project_id=self.project_id,
@@ -445,6 +456,7 @@ class AutoMLBatchPredictOperator(GoogleCloudBaseOperator):
445
456
  metadata=self.metadata,
446
457
  )
447
458
 
459
+ def execute(self, context: Context):
448
460
  if not hasattr(self.model, "translation_model_metadata"):
449
461
  _raise_exception_for_deprecated_operator(
450
462
  self.__class__.__name__,
@@ -456,7 +468,7 @@ class AutoMLBatchPredictOperator(GoogleCloudBaseOperator):
456
468
  ],
457
469
  )
458
470
  self.log.info("Fetch batch prediction.")
459
- operation = hook.batch_predict(
471
+ operation = self.hook.batch_predict(
460
472
  model_id=self.model_id,
461
473
  input_config=self.input_config,
462
474
  output_config=self.output_config,
@@ -467,16 +479,17 @@ class AutoMLBatchPredictOperator(GoogleCloudBaseOperator):
467
479
  timeout=self.timeout,
468
480
  metadata=self.metadata,
469
481
  )
470
- operation_result = hook.wait_for_operation(timeout=self.timeout, operation=operation)
482
+ operation_result = self.hook.wait_for_operation(timeout=self.timeout, operation=operation)
471
483
  result = BatchPredictResult.to_dict(operation_result)
472
484
  self.log.info("Batch prediction is ready.")
473
- project_id = self.project_id or hook.project_id
485
+ project_id = self.project_id or self.hook.project_id
474
486
  if project_id:
475
487
  TranslationLegacyModelPredictLink.persist(
476
488
  context=context,
477
489
  task_instance=self,
478
490
  model_id=self.model_id,
479
491
  project_id=project_id,
492
+ dataset_id=self.model.dataset_id,
480
493
  )
481
494
  return result
482
495
 
@@ -35,7 +35,7 @@ from airflow.configuration import conf
35
35
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException
36
36
  from airflow.models import BaseOperator, BaseOperatorLink
37
37
  from airflow.models.xcom import XCom
38
- from airflow.providers.common.sql.operators.sql import (
38
+ from airflow.providers.common.sql.operators.sql import ( # type: ignore[attr-defined] # for _parse_boolean
39
39
  SQLCheckOperator,
40
40
  SQLColumnCheckOperator,
41
41
  SQLIntervalCheckOperator,
@@ -243,18 +243,16 @@ class CloudRunListJobsOperator(GoogleCloudBaseOperator):
243
243
 
244
244
  class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
245
245
  """
246
- Executes a job and wait for the operation to be completed. Pushes the executed job to xcom.
246
+ Executes a job and waits for the operation to be completed. Pushes the executed job to xcom.
247
247
 
248
248
  :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
249
249
  :param region: Required. The ID of the Google Cloud region that the service belongs to.
250
250
  :param job_name: Required. The name of the job to update.
251
- :param job: Required. The job descriptor containing the new configuration of the job to update.
252
- The name field will be replaced by job_name
253
251
  :param overrides: Optional map of override values.
254
252
  :param gcp_conn_id: The connection ID used to connect to Google Cloud.
255
- :param polling_period_seconds: Optional: Control the rate of the poll for the result of deferrable run.
253
+ :param polling_period_seconds: Optional. Control the rate of the poll for the result of deferrable run.
256
254
  By default, the trigger will poll every 10 seconds.
257
- :param timeout: The timeout for this request.
255
+ :param timeout_seconds: Optional. The timeout for this request, in seconds.
258
256
  :param impersonation_chain: Optional service account to impersonate using short-term
259
257
  credentials, or chained list of accounts required to get the access_token
260
258
  of the last account in the list, which will be impersonated in the request.
@@ -263,7 +261,7 @@ class CloudRunExecuteJobOperator(GoogleCloudBaseOperator):
263
261
  If set as a sequence, the identities from the list must grant
264
262
  Service Account Token Creator IAM role to the directly preceding identity, with first
265
263
  account from the list granting this role to the originating account (templated).
266
- :param deferrable: Run operator in the deferrable mode
264
+ :param deferrable: Run the operator in deferrable mode.
267
265
  """
268
266
 
269
267
  template_fields = ("project_id", "region", "gcp_conn_id", "impersonation_chain", "job_name", "overrides")
@@ -106,6 +106,8 @@ class CreateAutoMLForecastingTrainingJobOperator(AutoMLTrainingJobBaseOperator):
106
106
  "dataset_id",
107
107
  "region",
108
108
  "impersonation_chain",
109
+ "display_name",
110
+ "model_display_name",
109
111
  )
110
112
  operator_extra_links = (VertexAIModelLink(), VertexAITrainingLink())
111
113
 
@@ -121,6 +123,8 @@ class CreateAutoMLForecastingTrainingJobOperator(AutoMLTrainingJobBaseOperator):
121
123
  forecast_horizon: int,
122
124
  data_granularity_unit: str,
123
125
  data_granularity_count: int,
126
+ display_name: str,
127
+ model_display_name: str | None = None,
124
128
  optimization_objective: str | None = None,
125
129
  column_specs: dict[str, str] | None = None,
126
130
  column_transformations: list[dict[str, dict[str, str]]] | None = None,
@@ -143,7 +147,12 @@ class CreateAutoMLForecastingTrainingJobOperator(AutoMLTrainingJobBaseOperator):
143
147
  **kwargs,
144
148
  ) -> None:
145
149
  super().__init__(
146
- region=region, impersonation_chain=impersonation_chain, parent_model=parent_model, **kwargs
150
+ display_name=display_name,
151
+ model_display_name=model_display_name,
152
+ region=region,
153
+ impersonation_chain=impersonation_chain,
154
+ parent_model=parent_model,
155
+ **kwargs,
147
156
  )
148
157
  self.dataset_id = dataset_id
149
158
  self.target_column = target_column
@@ -163,7 +163,7 @@ class CreateBatchPredictionJobOperator(GoogleCloudBaseOperator):
163
163
  :param poll_interval: Interval size which defines how often job status is checked in deferrable mode.
164
164
  """
165
165
 
166
- template_fields = ("region", "project_id", "model_name", "impersonation_chain")
166
+ template_fields = ("region", "project_id", "model_name", "impersonation_chain", "job_display_name")
167
167
  operator_extra_links = (VertexAIBatchPredictionJobLink(),)
168
168
 
169
169
  def __init__(
@@ -496,6 +496,8 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
496
496
  "parent_model",
497
497
  "dataset_id",
498
498
  "impersonation_chain",
499
+ "display_name",
500
+ "model_display_name",
499
501
  )
500
502
  operator_extra_links = (
501
503
  VertexAIModelLink(),
@@ -507,6 +509,8 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
507
509
  *,
508
510
  command: Sequence[str] = [],
509
511
  region: str,
512
+ display_name: str,
513
+ model_display_name: str | None = None,
510
514
  parent_model: str | None = None,
511
515
  impersonation_chain: str | Sequence[str] | None = None,
512
516
  dataset_id: str | None = None,
@@ -515,6 +519,8 @@ class CreateCustomContainerTrainingJobOperator(CustomTrainingJobBaseOperator):
515
519
  **kwargs,
516
520
  ) -> None:
517
521
  super().__init__(
522
+ display_name=display_name,
523
+ model_display_name=model_display_name,
518
524
  region=region,
519
525
  parent_model=parent_model,
520
526
  impersonation_chain=impersonation_chain,
@@ -949,6 +955,8 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
949
955
  "region",
950
956
  "dataset_id",
951
957
  "impersonation_chain",
958
+ "display_name",
959
+ "model_display_name",
952
960
  )
953
961
  operator_extra_links = (VertexAIModelLink(), VertexAITrainingLink())
954
962
 
@@ -958,6 +966,8 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
958
966
  python_package_gcs_uri: str,
959
967
  python_module_name: str,
960
968
  region: str,
969
+ display_name: str,
970
+ model_display_name: str | None = None,
961
971
  parent_model: str | None = None,
962
972
  impersonation_chain: str | Sequence[str] | None = None,
963
973
  dataset_id: str | None = None,
@@ -966,6 +976,8 @@ class CreateCustomPythonPackageTrainingJobOperator(CustomTrainingJobBaseOperator
966
976
  **kwargs,
967
977
  ) -> None:
968
978
  super().__init__(
979
+ display_name=display_name,
980
+ model_display_name=model_display_name,
969
981
  region=region,
970
982
  parent_model=parent_model,
971
983
  impersonation_chain=impersonation_chain,
@@ -1405,6 +1417,8 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1405
1417
  "requirements",
1406
1418
  "dataset_id",
1407
1419
  "impersonation_chain",
1420
+ "display_name",
1421
+ "model_display_name",
1408
1422
  )
1409
1423
  operator_extra_links = (
1410
1424
  VertexAIModelLink(),
@@ -1417,6 +1431,8 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1417
1431
  script_path: str,
1418
1432
  requirements: Sequence[str] | None = None,
1419
1433
  region: str,
1434
+ display_name: str,
1435
+ model_display_name: str | None = None,
1420
1436
  parent_model: str | None = None,
1421
1437
  impersonation_chain: str | Sequence[str] | None = None,
1422
1438
  dataset_id: str | None = None,
@@ -1425,6 +1441,8 @@ class CreateCustomTrainingJobOperator(CustomTrainingJobBaseOperator):
1425
1441
  **kwargs,
1426
1442
  ) -> None:
1427
1443
  super().__init__(
1444
+ display_name=display_name,
1445
+ model_display_name=model_display_name,
1428
1446
  region=region,
1429
1447
  parent_model=parent_model,
1430
1448
  impersonation_chain=impersonation_chain,
@@ -1732,6 +1750,7 @@ class ListCustomTrainingJobOperator(GoogleCloudBaseOperator):
1732
1750
  "region",
1733
1751
  "project_id",
1734
1752
  "impersonation_chain",
1753
+ "display_name",
1735
1754
  ]
1736
1755
  operator_extra_links = [
1737
1756
  VertexAITrainingPipelinesLink(),
@@ -21,8 +21,6 @@ from __future__ import annotations
21
21
 
22
22
  from typing import TYPE_CHECKING, Sequence
23
23
 
24
- from google.cloud.aiplatform_v1beta1 import types as types_v1beta1
25
-
26
24
  from airflow.exceptions import AirflowProviderDeprecationWarning
27
25
  from airflow.providers.google.cloud.hooks.vertex_ai.generative_model import GenerativeModelHook
28
26
  from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator
@@ -742,8 +740,6 @@ class CountTokensOperator(GoogleCloudBaseOperator):
742
740
  self.xcom_push(context, key="total_tokens", value=response.total_tokens)
743
741
  self.xcom_push(context, key="total_billable_characters", value=response.total_billable_characters)
744
742
 
745
- return types_v1beta1.CountTokensResponse.to_dict(response)
746
-
747
743
 
748
744
  class RunEvaluationOperator(GoogleCloudBaseOperator):
749
745
  """
@@ -842,3 +838,155 @@ class RunEvaluationOperator(GoogleCloudBaseOperator):
842
838
  )
843
839
 
844
840
  return response.summary_metrics
841
+
842
+
843
+ class CreateCachedContentOperator(GoogleCloudBaseOperator):
844
+ """
845
+ Create CachedContent to reduce the cost of requests that contain repeat content with high input token counts.
846
+
847
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
848
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
849
+ :param model_name: Required. The name of the publisher model to use for cached content.
850
+ :param system_instruction: Developer set system instruction.
851
+ :param contents: The content to cache.
852
+ :param ttl_hours: The TTL for this resource in hours. The expiration time is computed: now + TTL.
853
+ Defaults to one hour.
854
+ :param display_name: The user-generated meaningful display name of the cached content
855
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
856
+ :param impersonation_chain: Optional service account to impersonate using short-term
857
+ credentials, or chained list of accounts required to get the access_token
858
+ of the last account in the list, which will be impersonated in the request.
859
+ If set as a string, the account must grant the originating account
860
+ the Service Account Token Creator IAM role.
861
+ If set as a sequence, the identities from the list must grant
862
+ Service Account Token Creator IAM role to the directly preceding identity, with first
863
+ account from the list granting this role to the originating account (templated).
864
+ """
865
+
866
+ template_fields = (
867
+ "location",
868
+ "project_id",
869
+ "impersonation_chain",
870
+ "model_name",
871
+ "contents",
872
+ "system_instruction",
873
+ )
874
+
875
+ def __init__(
876
+ self,
877
+ *,
878
+ project_id: str,
879
+ location: str,
880
+ model_name: str,
881
+ system_instruction: str | None = None,
882
+ contents: list | None = None,
883
+ ttl_hours: float = 1,
884
+ display_name: str | None = None,
885
+ gcp_conn_id: str = "google_cloud_default",
886
+ impersonation_chain: str | Sequence[str] | None = None,
887
+ **kwargs,
888
+ ) -> None:
889
+ super().__init__(**kwargs)
890
+
891
+ self.project_id = project_id
892
+ self.location = location
893
+ self.model_name = model_name
894
+ self.system_instruction = system_instruction
895
+ self.contents = contents
896
+ self.ttl_hours = ttl_hours
897
+ self.display_name = display_name
898
+ self.gcp_conn_id = gcp_conn_id
899
+ self.impersonation_chain = impersonation_chain
900
+
901
+ def execute(self, context: Context):
902
+ self.hook = GenerativeModelHook(
903
+ gcp_conn_id=self.gcp_conn_id,
904
+ impersonation_chain=self.impersonation_chain,
905
+ )
906
+
907
+ cached_content_name = self.hook.create_cached_content(
908
+ project_id=self.project_id,
909
+ location=self.location,
910
+ model_name=self.model_name,
911
+ system_instruction=self.system_instruction,
912
+ contents=self.contents,
913
+ ttl_hours=self.ttl_hours,
914
+ display_name=self.display_name,
915
+ )
916
+
917
+ self.log.info("Cached Content Name: %s", cached_content_name)
918
+
919
+ return cached_content_name
920
+
921
+
922
+ class GenerateFromCachedContentOperator(GoogleCloudBaseOperator):
923
+ """
924
+ Generate a response from CachedContent.
925
+
926
+ :param project_id: Required. The ID of the Google Cloud project that the service belongs to.
927
+ :param location: Required. The ID of the Google Cloud location that the service belongs to.
928
+ :param cached_content_name: Required. The name of the cached content resource.
929
+ :param contents: Required. The multi-part content of a message that a user or a program
930
+ gives to the generative model, in order to elicit a specific response.
931
+ :param generation_config: Optional. Generation configuration settings.
932
+ :param safety_settings: Optional. Per request settings for blocking unsafe content.
933
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
934
+ :param impersonation_chain: Optional service account to impersonate using short-term
935
+ credentials, or chained list of accounts required to get the access_token
936
+ of the last account in the list, which will be impersonated in the request.
937
+ If set as a string, the account must grant the originating account
938
+ the Service Account Token Creator IAM role.
939
+ If set as a sequence, the identities from the list must grant
940
+ Service Account Token Creator IAM role to the directly preceding identity, with first
941
+ account from the list granting this role to the originating account (templated).
942
+ """
943
+
944
+ template_fields = (
945
+ "location",
946
+ "project_id",
947
+ "impersonation_chain",
948
+ "cached_content_name",
949
+ "contents",
950
+ )
951
+
952
+ def __init__(
953
+ self,
954
+ *,
955
+ project_id: str,
956
+ location: str,
957
+ cached_content_name: str,
958
+ contents: list,
959
+ generation_config: dict | None = None,
960
+ safety_settings: dict | None = None,
961
+ gcp_conn_id: str = "google_cloud_default",
962
+ impersonation_chain: str | Sequence[str] | None = None,
963
+ **kwargs,
964
+ ) -> None:
965
+ super().__init__(**kwargs)
966
+
967
+ self.project_id = project_id
968
+ self.location = location
969
+ self.cached_content_name = cached_content_name
970
+ self.contents = contents
971
+ self.generation_config = generation_config
972
+ self.safety_settings = safety_settings
973
+ self.gcp_conn_id = gcp_conn_id
974
+ self.impersonation_chain = impersonation_chain
975
+
976
+ def execute(self, context: Context):
977
+ self.hook = GenerativeModelHook(
978
+ gcp_conn_id=self.gcp_conn_id,
979
+ impersonation_chain=self.impersonation_chain,
980
+ )
981
+ cached_content_text = self.hook.generate_from_cached_content(
982
+ project_id=self.project_id,
983
+ location=self.location,
984
+ cached_content_name=self.cached_content_name,
985
+ contents=self.contents,
986
+ generation_config=self.generation_config,
987
+ safety_settings=self.safety_settings,
988
+ )
989
+
990
+ self.log.info("Cached Content Response: %s", cached_content_text)
991
+
992
+ return cached_content_text
@@ -147,6 +147,7 @@ class CreateHyperparameterTuningJobOperator(GoogleCloudBaseOperator):
147
147
  "region",
148
148
  "project_id",
149
149
  "impersonation_chain",
150
+ "display_name",
150
151
  ]
151
152
  operator_extra_links = (VertexAITrainingLink(),)
152
153
 
@@ -103,3 +103,78 @@ class DataformWorkflowInvocationStateSensor(BaseSensorOperator):
103
103
  raise AirflowException(message)
104
104
 
105
105
  return workflow_status in self.expected_statuses
106
+
107
+
108
+ class DataformWorkflowInvocationActionStateSensor(BaseSensorOperator):
109
+ """
110
+ Checks for the status of a Workflow Invocation Action in Google Cloud Dataform.
111
+
112
+ :param project_id: Required, the Google Cloud project ID in which to start a job.
113
+ If set to None or missing, the default project_id from the Google Cloud connection is used.
114
+ :param region: Required, The location of the Dataform workflow invocation (for example europe-west1).
115
+ :param repository_id: Required. The ID of the Dataform repository that the task belongs to.
116
+ :param workflow_invocation_id: Required, ID of the workflow invocation to be checked.
117
+ :param target_name: Required. The name of the target to be checked in the workflow.
118
+ :param expected_statuses: The expected state of the action.
119
+ See:
120
+ https://cloud.google.com/python/docs/reference/dataform/latest/google.cloud.dataform_v1beta1.types.WorkflowInvocationAction.State
121
+ :param failure_statuses: State that will terminate the sensor with an exception
122
+ :param gcp_conn_id: The connection ID to use connecting to Google Cloud.
123
+ :param impersonation_chain: Optional service account to impersonate using short-term
124
+ credentials, or chained list of accounts required to get the access_token
125
+ of the last account in the list, which will be impersonated in the request.
126
+ If set as a string, the account must grant the originating account
127
+ the Service Account Token Creator IAM role.
128
+ If set as a sequence, the identities from the list must grant
129
+ Service Account Token Creator IAM role to the directly preceding identity, with first
130
+ account from the list granting this role to the originating account (templated).
131
+ """
132
+
133
+ template_fields: Sequence[str] = ("workflow_invocation_id",)
134
+
135
+ def __init__(
136
+ self,
137
+ *,
138
+ project_id: str,
139
+ region: str,
140
+ repository_id: str,
141
+ workflow_invocation_id: str,
142
+ target_name: str,
143
+ expected_statuses: Iterable[int],
144
+ failure_statuses: Iterable[int],
145
+ gcp_conn_id: str = "google_cloud_default",
146
+ impersonation_chain: str | Sequence[str] | None = None,
147
+ **kwargs,
148
+ ) -> None:
149
+ super().__init__(**kwargs)
150
+ self.repository_id = repository_id
151
+ self.workflow_invocation_id = workflow_invocation_id
152
+ self.project_id = project_id
153
+ self.region = region
154
+ self.target_name = target_name
155
+ self.expected_statuses = expected_statuses
156
+ self.failure_statuses = failure_statuses
157
+ self.gcp_conn_id = gcp_conn_id
158
+ self.impersonation_chain = impersonation_chain
159
+ self.hook: DataformHook | None = None
160
+
161
+ def poke(self, context: Context) -> bool:
162
+ self.hook = DataformHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain)
163
+
164
+ workflow_invocation_actions = self.hook.query_workflow_invocation_actions(
165
+ project_id=self.project_id,
166
+ region=self.region,
167
+ repository_id=self.repository_id,
168
+ workflow_invocation_id=self.workflow_invocation_id,
169
+ )
170
+
171
+ for workflow_invocation_action in workflow_invocation_actions:
172
+ if workflow_invocation_action.target.name == self.target_name:
173
+ state = workflow_invocation_action.state
174
+ if state in self.failure_statuses:
175
+ raise AirflowException(
176
+ f"Workflow Invocation Action target {self.target_name} state is: {state}."
177
+ )
178
+ return state in self.expected_statuses
179
+
180
+ raise AirflowException(f"Workflow Invocation Action target {self.target_name} not found.")
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Google",
29
29
  "description": "Google services including:\n\n - `Google Ads <https://ads.google.com/>`__\n - `Google Cloud (GCP) <https://cloud.google.com/>`__\n - `Google Firebase <https://firebase.google.com/>`__\n - `Google LevelDB <https://github.com/google/leveldb/>`__\n - `Google Marketing Platform <https://marketingplatform.google.com/>`__\n - `Google Workspace <https://workspace.google.com/>`__ (formerly Google Suite)\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1728485162,
31
+ "source-date-epoch": 1730012771,
32
32
  "versions": [
33
+ "10.25.0",
33
34
  "10.24.0",
34
35
  "10.23.0",
35
36
  "10.22.0",
@@ -95,7 +96,7 @@ def get_provider_info():
95
96
  ],
96
97
  "dependencies": [
97
98
  "apache-airflow>=2.8.0",
98
- "apache-airflow-providers-common-compat>=1.1.0",
99
+ "apache-airflow-providers-common-compat>=1.2.1",
99
100
  "apache-airflow-providers-common-sql>=1.7.2",
100
101
  "asgiref>=3.5.2",
101
102
  "dill>=0.2.3",
@@ -109,7 +110,7 @@ def get_provider_info():
109
110
  "google-api-python-client>=2.0.2",
110
111
  "google-auth>=2.29.0",
111
112
  "google-auth-httplib2>=0.0.1",
112
- "google-cloud-aiplatform>=1.63.0",
113
+ "google-cloud-aiplatform>=1.70.0",
113
114
  "google-cloud-automl>=2.12.0",
114
115
  "google-cloud-bigquery>=3.4.0,!=3.21.*,!=3.22.0,!=3.23.*",
115
116
  "google-cloud-bigquery-datatransfer>=3.13.0",
@@ -149,11 +150,12 @@ def get_provider_info():
149
150
  "grpcio-gcp>=0.2.2",
150
151
  "httpx>=0.25.0",
151
152
  "json-merge-patch>=0.2",
152
- "looker-sdk>=22.4.0",
153
+ "looker-sdk>=22.4.0,!=24.18.0",
153
154
  "pandas-gbq>=0.7.0",
154
155
  'pandas>=2.1.2,<2.2;python_version>="3.9"',
155
156
  'pandas>=1.5.3,<2.2;python_version<"3.9"',
156
157
  "proto-plus>=1.19.6",
158
+ "pyarrow>=14.0.1",
157
159
  "python-slugify>=7.0.0",
158
160
  "PyOpenSSL>=23.0.0",
159
161
  "sqlalchemy-bigquery>=1.2.1",
@@ -164,7 +166,7 @@ def get_provider_info():
164
166
  "additional-extras": [
165
167
  {"name": "apache.beam", "dependencies": ["apache-beam[gcp]"]},
166
168
  {"name": "cncf.kubernetes", "dependencies": ["apache-airflow-providers-cncf-kubernetes>=7.2.0"]},
167
- {"name": "leveldb", "dependencies": ["plyvel"]},
169
+ {"name": "leveldb", "dependencies": ["plyvel>=1.5.1"]},
168
170
  {"name": "oracle", "dependencies": ["apache-airflow-providers-oracle>=3.1.0"]},
169
171
  {"name": "facebook", "dependencies": ["apache-airflow-providers-facebook>=2.2.0"]},
170
172
  {"name": "amazon", "dependencies": ["apache-airflow-providers-amazon>=2.6.0"]},
@@ -924,11 +926,23 @@ def get_provider_info():
924
926
  "filesystems": ["airflow.providers.google.cloud.fs.gcs"],
925
927
  "asset-uris": [
926
928
  {"schemes": ["gcp"], "handler": None},
927
- {"schemes": ["bigquery"], "handler": "airflow.providers.google.datasets.bigquery.sanitize_uri"},
929
+ {"schemes": ["bigquery"], "handler": "airflow.providers.google.assets.bigquery.sanitize_uri"},
930
+ {
931
+ "schemes": ["gs"],
932
+ "handler": "airflow.providers.google.assets.gcs.sanitize_uri",
933
+ "factory": "airflow.providers.google.assets.gcs.create_asset",
934
+ "to_openlineage_converter": "airflow.providers.google.assets.gcs.convert_asset_to_openlineage",
935
+ },
928
936
  ],
929
937
  "dataset-uris": [
930
938
  {"schemes": ["gcp"], "handler": None},
931
- {"schemes": ["bigquery"], "handler": "airflow.providers.google.datasets.bigquery.sanitize_uri"},
939
+ {"schemes": ["bigquery"], "handler": "airflow.providers.google.assets.bigquery.sanitize_uri"},
940
+ {
941
+ "schemes": ["gs"],
942
+ "handler": "airflow.providers.google.assets.gcs.sanitize_uri",
943
+ "factory": "airflow.providers.google.assets.gcs.create_asset",
944
+ "to_openlineage_converter": "airflow.providers.google.assets.gcs.convert_asset_to_openlineage",
945
+ },
932
946
  ],
933
947
  "hooks": [
934
948
  {"integration-name": "Google Ads", "python-modules": ["airflow.providers.google.ads.hooks.ads"]},
@@ -92,7 +92,7 @@ with DAG(
92
92
  "example_display_video_misc",
93
93
  start_date=START_DATE,
94
94
  catchup=False,
95
- ) as dag2:
95
+ ) as dag_example_display_video_misc:
96
96
  # [START howto_google_display_video_upload_multiple_entity_read_files_to_big_query]
97
97
  upload_erf_to_bq = GCSToBigQueryOperator(
98
98
  task_id="upload_erf_to_bq",
@@ -125,7 +125,7 @@ with DAG(
125
125
  "example_display_video_sdf",
126
126
  start_date=START_DATE,
127
127
  catchup=False,
128
- ) as dag3:
128
+ ) as dag_example_display_video_sdf:
129
129
  # [START howto_google_display_video_create_sdf_download_task_operator]
130
130
  create_sdf_download_task = GoogleDisplayVideo360CreateSDFDownloadTaskOperator(
131
131
  task_id="create_sdf_download_task", body_request=CREATE_SDF_DOWNLOAD_TASK_BODY_REQUEST
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-google
3
- Version: 10.24.0rc1
3
+ Version: 10.25.0rc1
4
4
  Summary: Provider package apache-airflow-providers-google for Apache Airflow
5
5
  Keywords: airflow-provider,google,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,7 +21,7 @@ Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: PyOpenSSL>=23.0.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.1.0rc0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.2.1rc0
25
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.7.2rc0
26
26
  Requires-Dist: apache-airflow>=2.8.0rc0
27
27
  Requires-Dist: asgiref>=3.5.2
@@ -36,7 +36,7 @@ Requires-Dist: google-api-core>=2.11.0,!=2.16.0,!=2.18.0
36
36
  Requires-Dist: google-api-python-client>=2.0.2
37
37
  Requires-Dist: google-auth-httplib2>=0.0.1
38
38
  Requires-Dist: google-auth>=2.29.0
39
- Requires-Dist: google-cloud-aiplatform>=1.63.0
39
+ Requires-Dist: google-cloud-aiplatform>=1.70.0
40
40
  Requires-Dist: google-cloud-automl>=2.12.0
41
41
  Requires-Dist: google-cloud-batch>=0.13.0
42
42
  Requires-Dist: google-cloud-bigquery-datatransfer>=3.13.0
@@ -77,11 +77,12 @@ Requires-Dist: grpcio-gcp>=0.2.2
77
77
  Requires-Dist: httpx>=0.25.0
78
78
  Requires-Dist: immutabledict>=4.2.0
79
79
  Requires-Dist: json-merge-patch>=0.2
80
- Requires-Dist: looker-sdk>=22.4.0
80
+ Requires-Dist: looker-sdk>=22.4.0,!=24.18.0
81
81
  Requires-Dist: pandas-gbq>=0.7.0
82
82
  Requires-Dist: pandas>=1.5.3,<2.2;python_version<"3.9"
83
83
  Requires-Dist: pandas>=2.1.2,<2.2;python_version>="3.9"
84
84
  Requires-Dist: proto-plus>=1.19.6
85
+ Requires-Dist: pyarrow>=14.0.1
85
86
  Requires-Dist: python-slugify>=7.0.0
86
87
  Requires-Dist: sqlalchemy-bigquery>=1.2.1
87
88
  Requires-Dist: sqlalchemy-spanner>=1.6.2
@@ -94,7 +95,7 @@ Requires-Dist: apache-airflow-providers-cncf-kubernetes>=7.2.0rc0 ; extra == "cn
94
95
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common.compat"
95
96
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
96
97
  Requires-Dist: apache-airflow-providers-facebook>=2.2.0rc0 ; extra == "facebook"
97
- Requires-Dist: plyvel ; extra == "leveldb"
98
+ Requires-Dist: plyvel>=1.5.1 ; extra == "leveldb"
98
99
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
99
100
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft.mssql"
100
101
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
@@ -107,8 +108,8 @@ Requires-Dist: apache-airflow-providers-sftp ; extra == "sftp"
107
108
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
108
109
  Requires-Dist: apache-airflow-providers-trino ; extra == "trino"
109
110
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
110
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.24.0/changelog.html
111
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.24.0
111
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-google/10.25.0/changelog.html
112
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-google/10.25.0
112
113
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
113
114
  Project-URL: Source Code, https://github.com/apache/airflow
114
115
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -177,7 +178,7 @@ Provides-Extra: trino
177
178
 
178
179
  Package ``apache-airflow-providers-google``
179
180
 
180
- Release: ``10.24.0.rc1``
181
+ Release: ``10.25.0.rc1``
181
182
 
182
183
 
183
184
  Google services including:
@@ -197,7 +198,7 @@ This is a provider package for ``google`` provider. All classes for this provide
197
198
  are in ``airflow.providers.google`` python package.
198
199
 
199
200
  You can find package information and changelog for the provider
200
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.24.0/>`_.
201
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-google/10.25.0/>`_.
201
202
 
202
203
  Installation
203
204
  ------------
@@ -215,7 +216,7 @@ Requirements
215
216
  PIP package Version required
216
217
  ========================================== =========================================
217
218
  ``apache-airflow`` ``>=2.8.0``
218
- ``apache-airflow-providers-common-compat`` ``>=1.1.0``
219
+ ``apache-airflow-providers-common-compat`` ``>=1.2.1``
219
220
  ``apache-airflow-providers-common-sql`` ``>=1.7.2``
220
221
  ``asgiref`` ``>=3.5.2``
221
222
  ``dill`` ``>=0.2.3``
@@ -229,7 +230,7 @@ PIP package Version required
229
230
  ``google-api-python-client`` ``>=2.0.2``
230
231
  ``google-auth`` ``>=2.29.0``
231
232
  ``google-auth-httplib2`` ``>=0.0.1``
232
- ``google-cloud-aiplatform`` ``>=1.63.0``
233
+ ``google-cloud-aiplatform`` ``>=1.70.0``
233
234
  ``google-cloud-automl`` ``>=2.12.0``
234
235
  ``google-cloud-bigquery`` ``!=3.21.*,!=3.22.0,!=3.23.*,>=3.4.0``
235
236
  ``google-cloud-bigquery-datatransfer`` ``>=3.13.0``
@@ -269,11 +270,12 @@ PIP package Version required
269
270
  ``grpcio-gcp`` ``>=0.2.2``
270
271
  ``httpx`` ``>=0.25.0``
271
272
  ``json-merge-patch`` ``>=0.2``
272
- ``looker-sdk`` ``>=22.4.0``
273
+ ``looker-sdk`` ``>=22.4.0,!=24.18.0``
273
274
  ``pandas-gbq`` ``>=0.7.0``
274
275
  ``pandas`` ``>=2.1.2,<2.2; python_version >= "3.9"``
275
276
  ``pandas`` ``>=1.5.3,<2.2; python_version < "3.9"``
276
277
  ``proto-plus`` ``>=1.19.6``
278
+ ``pyarrow`` ``>=14.0.1``
277
279
  ``python-slugify`` ``>=7.0.0``
278
280
  ``PyOpenSSL`` ``>=23.0.0``
279
281
  ``sqlalchemy-bigquery`` ``>=1.2.1``
@@ -319,4 +321,4 @@ Dependent package
319
321
  ======================================================================================================================== ====================
320
322
 
321
323
  The changelog for the provider package can be found in the
322
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.24.0/changelog.html>`_.
324
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-google/10.25.0/changelog.html>`_.
@@ -1,6 +1,6 @@
1
1
  airflow/providers/google/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/google/__init__.py,sha256=HCd-PmeEVd99eSEuOle0bVEuyyRdbC-TeYHy2An3dqw,1495
3
- airflow/providers/google/get_provider_info.py,sha256=gJcV9EN6FkM8I98xA3n0KHMLwDVJxonPO04qrwBVj_E,82205
2
+ airflow/providers/google/__init__.py,sha256=1fXjRLgat4vzYE9ozVJL6esVXrJpvLQ4kiGHOxlcpbo,1495
3
+ airflow/providers/google/get_provider_info.py,sha256=HcvMEqOWqotjMwtLemXVoLAmtFiX0YnyCEL6kRa_Zes,82940
4
4
  airflow/providers/google/go_module_utils.py,sha256=XVM-IGME6CPgJA8fgDgkusFc4fz3lEghZaZ4elBkv7s,1780
5
5
  airflow/providers/google/ads/.gitignore,sha256=z_qaKzblF2LuVvP-06iDord9JBeyzIlNeJ4bx3LbtGc,167
6
6
  airflow/providers/google/ads/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -10,6 +10,9 @@ airflow/providers/google/ads/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39k
10
10
  airflow/providers/google/ads/operators/ads.py,sha256=OdUVaA0v2YGXlC3y1un5nDos0Ig1ixBYcrRMsddXJnc,4744
11
11
  airflow/providers/google/ads/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
12
  airflow/providers/google/ads/transfers/ads_to_gcs.py,sha256=1E8SP37NrSb98HwjhWqnxTmLmnbHgOhnJp_k5hHZJ_I,5225
13
+ airflow/providers/google/assets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
+ airflow/providers/google/assets/bigquery.py,sha256=MzEAmuDEA8Dv9vmxQYZBW1sdvL3HDOFIbtzBR-EMgEU,1258
15
+ airflow/providers/google/assets/gcs.py,sha256=9tMphT_48kpmrRiyvswQeQYTjgXQ2nTgqoIbOVHWDPo,1848
13
16
  airflow/providers/google/cloud/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
17
  airflow/providers/google/cloud/_internal_client/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
15
18
  airflow/providers/google/cloud/_internal_client/secret_manager_client.py,sha256=iDoe0W_Wf_30uSJxnXMg4-DcXZIeljf_DooyFY4vg-E,3770
@@ -42,12 +45,12 @@ airflow/providers/google/cloud/hooks/datafusion.py,sha256=un_0r0fwiLPffNG-9tWN05
42
45
  airflow/providers/google/cloud/hooks/datapipeline.py,sha256=HPNO7tkQeb_N1JQZInZeO-YWZADcNLZZijjtVAKkWcE,2637
43
46
  airflow/providers/google/cloud/hooks/dataplex.py,sha256=hOAQ5gXBE0V6fw5Y_7Q8BymD6_GmFGsc8TPvd4SwJPM,38347
44
47
  airflow/providers/google/cloud/hooks/dataprep.py,sha256=GH46CoEMnc63RoMiJ7aKvsHlGFvsBl_QcRgbmWwJ5tU,12187
45
- airflow/providers/google/cloud/hooks/dataproc.py,sha256=q1kw1AJ1AjSFb2ylljJNHVryycOQMbhB_sxeZ6Yn0PU,84496
48
+ airflow/providers/google/cloud/hooks/dataproc.py,sha256=CCTHAs4vmjrkX9WHDxEoi1vW0oBKW32KZWYiGdEJRbk,84548
46
49
  airflow/providers/google/cloud/hooks/dataproc_metastore.py,sha256=Oh6I6PbawdCb0hkfrFNU4BVbxWangCcjIJdOBAh7q2Q,32152
47
50
  airflow/providers/google/cloud/hooks/datastore.py,sha256=JuXTZqL-FAohbKBHQVYLCS3tY9pvMDjlg-dSphKiyPU,12158
48
51
  airflow/providers/google/cloud/hooks/dlp.py,sha256=U1mnUEIQBcvh0bxf9RgeKLK2gjWx09qGh-hvCDOL_8k,67586
49
52
  airflow/providers/google/cloud/hooks/functions.py,sha256=Zz1MCy_6j4oG-SXeeiVdqo4Ld68pVQg4rIuSm7RrPEo,9290
50
- airflow/providers/google/cloud/hooks/gcs.py,sha256=j7SO8bxDh_-sO0XdFvfhOA9RuAw325y-2ef3mnKXKwQ,59974
53
+ airflow/providers/google/cloud/hooks/gcs.py,sha256=QXfCJcgeO260hqNVkuZ5SzLCVb5xv9a2uNQ3_xLbHfA,62468
51
54
  airflow/providers/google/cloud/hooks/gdm.py,sha256=fCTu6SXtyQ3sn56GfIdgj6Myj_cc8UWdFVbYDMRMhZY,4123
52
55
  airflow/providers/google/cloud/hooks/kms.py,sha256=CSIoyXgfZOujEPWOk2bn2bmwcKuDWXCu8eFSvi8MV9w,6574
53
56
  airflow/providers/google/cloud/hooks/kubernetes_engine.py,sha256=VzpGcO8sf7FsAkLniatmvjyRpH_x2DV9cVYsl0ICAt4,24204
@@ -73,7 +76,7 @@ airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py,sha256=zX
73
76
  airflow/providers/google/cloud/hooks/vertex_ai/custom_job.py,sha256=S3N5Nqbv7dXbla06UhdZyde_7yQLBsRBB8_IgrPkjxk,197203
74
77
  airflow/providers/google/cloud/hooks/vertex_ai/dataset.py,sha256=hSkQL6d9j5tLCRR7LBDSAbGWWGrdwWBeIGnB21rjkCQ,18693
75
78
  airflow/providers/google/cloud/hooks/vertex_ai/endpoint_service.py,sha256=_rO_E1EL75ZclFgm1JY78CtrOkB4XcLUkmEbk5qI-sQ,16109
76
- airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py,sha256=LBdiZ7p_Yby0PKxf4FezB9xFy2eay6X7r6ezj6LSapQ,23869
79
+ airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py,sha256=2hU7pKYsTZzSTFh8ODFtIoGFxJd2ioLpbU4bCZB23as,27330
77
80
  airflow/providers/google/cloud/hooks/vertex_ai/hyperparameter_tuning_job.py,sha256=mKEotG10-FjWsR21JmrlVut4bGyWrGmQZzRIpwba4Z8,25053
78
81
  airflow/providers/google/cloud/hooks/vertex_ai/model_service.py,sha256=3XXGY6zzJPzTI5gWafwN-plXgXNsY3kyTtMa0U9LOC0,18337
79
82
  airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py,sha256=ZKmSwmxEfDZoCbtB6zYxMGsjnKFNu33r-FDe7BbvdBY,29024
@@ -106,7 +109,7 @@ airflow/providers/google/cloud/links/mlengine.py,sha256=lvhRkVVExFvwoaSskckm31BW
106
109
  airflow/providers/google/cloud/links/pubsub.py,sha256=MqCg3vHUvPVT1nkEwnjhNf5mmF5zPGtqsyxE-qkRCIw,2423
107
110
  airflow/providers/google/cloud/links/spanner.py,sha256=L2j8b6VIrLta1zTM_hsoK5sYpGb3VPf6HQ92qYhQyWk,2510
108
111
  airflow/providers/google/cloud/links/stackdriver.py,sha256=WgO-KeX9guf5l4Z3TLPUD_Pns8UZzUfbRVL4kdec8z0,2413
109
- airflow/providers/google/cloud/links/translate.py,sha256=p0tHVr_CHPJswCmNheQAnpPreM8a30AT7EjD9xW4LJk,5467
112
+ airflow/providers/google/cloud/links/translate.py,sha256=YE8St8dg4UOmWItylBU3oKTF2R0igVaCHwjTFsD-QL8,5472
110
113
  airflow/providers/google/cloud/links/vertex_ai.py,sha256=T5WGK77wKIPTiHtxKuQXvokArjVQYwZPjX5IgJX1dds,11138
111
114
  airflow/providers/google/cloud/links/workflows.py,sha256=Ux7bwq2fspvSoNT4X3xFXX2SSmp8EuwDyPQsTO-z7bI,3303
112
115
  airflow/providers/google/cloud/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -118,8 +121,8 @@ airflow/providers/google/cloud/openlineage/__init__.py,sha256=9hdXHABrVpkbpjZgUf
118
121
  airflow/providers/google/cloud/openlineage/mixins.py,sha256=5blMJ1heK9rMYBWR5p-lBj1y_EA1qJyjfb-fwdBM31k,12908
119
122
  airflow/providers/google/cloud/openlineage/utils.py,sha256=iBeHh68qMumZ1pc-PWRBgQW8sycwmdtkJV-oCvYplPA,5781
120
123
  airflow/providers/google/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
121
- airflow/providers/google/cloud/operators/automl.py,sha256=2S5gziRfU-3ptHNTHmRMXo9CfQcl8trNqPEqAVCRlZQ,63779
122
- airflow/providers/google/cloud/operators/bigquery.py,sha256=l8wLnRdCsH1IGWsxZrdb-YzHlyOssymQo19lnNgxdyA,131077
124
+ airflow/providers/google/cloud/operators/automl.py,sha256=PNgilSmO2ujUz0aNZECKgL_EAyjzfmR26OECDLn8Aic,64255
125
+ airflow/providers/google/cloud/operators/bigquery.py,sha256=sHdOHm9x-uiUN1MF9PLkbuuZLcuMVpNc1J21oKVFsCs,131128
123
126
  airflow/providers/google/cloud/operators/bigquery_dts.py,sha256=6VJISM4HoMBQ3EQ5nz3zxFk8tfluGA1d2vcUNUlYLPc,17695
124
127
  airflow/providers/google/cloud/operators/bigtable.py,sha256=BnWHnTEscyPbsKWFaSreLr62W68fmHu5loQVZex7LPs,26921
125
128
  airflow/providers/google/cloud/operators/cloud_base.py,sha256=Xysh4znvIQIxbQqmfKoaL6O09FikndHrQuKKUnEV7KU,1483
@@ -127,7 +130,7 @@ airflow/providers/google/cloud/operators/cloud_batch.py,sha256=uH4II9f-dUtC7LskX
127
130
  airflow/providers/google/cloud/operators/cloud_build.py,sha256=7L3mBxls1r628o1XyC1vMOnRThWgfRx1x1z5zWXjx70,48481
128
131
  airflow/providers/google/cloud/operators/cloud_composer.py,sha256=MAmqS0FkbKfSk3-1lUyC5F3-76H5eVUBugmMeioEFFk,32679
129
132
  airflow/providers/google/cloud/operators/cloud_memorystore.py,sha256=0I4nmUIRsA2lcJlKh2uTnty9ONH40sTxAshsrVN81dY,70989
130
- airflow/providers/google/cloud/operators/cloud_run.py,sha256=SXpCgomERnXdyKymRC-djbTfY_Mscv-94h12uT8RRBQ,21872
133
+ airflow/providers/google/cloud/operators/cloud_run.py,sha256=z5AnotHbi_Nq-BMehZkON01nfDQBE7uKOd3idbKo3yk,21752
131
134
  airflow/providers/google/cloud/operators/cloud_sql.py,sha256=VA_RRg_Zv3zo4cKmpEfhCIpMo5c6IIdDdkj7YkeiZLs,53374
132
135
  airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py,sha256=YQsVg8pDegEDvsWsZCbGrSqCln3iQyLQErZS_XZTnBo,48066
133
136
  airflow/providers/google/cloud/operators/compute.py,sha256=lFGCacevkKJvAszJhfSLAOfetlsbYrCoImTeWXS5bqw,74607
@@ -161,13 +164,13 @@ airflow/providers/google/cloud/operators/video_intelligence.py,sha256=NQvEueDegd
161
164
  airflow/providers/google/cloud/operators/vision.py,sha256=iKEEj33nVLrHUcJr2sE72NC1mAAZ7Gz-_XGdLgO4dqA,67720
162
165
  airflow/providers/google/cloud/operators/workflows.py,sha256=fnyWLqRHz0UYu6AnQKKZIMlfSIg_v5nNbZAt6ASe4fI,28977
163
166
  airflow/providers/google/cloud/operators/vertex_ai/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
164
- airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=oL9hug_RLoe5h85wUJ7XrBe1pZivZzUOkNn9o0O9Gqs,32272
165
- airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=e0oFWmCRH3aQHNckjEf_YO5zP9LqiLVTzB1QTgv3iUo,28828
166
- airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=2RYk2fyv12Q0GPK0X9t2MTHQtdbpwVhdAk5T-T17e8w,98748
167
+ airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py,sha256=W5yIG5pWgc8OgmUyDO3ldW8_pl8g74lHV7a3ZdVHF5A,32527
168
+ airflow/providers/google/cloud/operators/vertex_ai/batch_prediction_job.py,sha256=mQs5kdo9Bepfd7p-dqSuuhflPzCYc1HYc0O-gqN1e4Q,28848
169
+ airflow/providers/google/cloud/operators/vertex_ai/custom_job.py,sha256=rO7St-vGhR-c_Ii3wIRFXwovRBZVQdN8jY4MCCRzClo,99426
167
170
  airflow/providers/google/cloud/operators/vertex_ai/dataset.py,sha256=u_iEOIJEoZj10bbiymvoAT7aJP50HyZMt7_2KxUKbxM,23051
168
171
  airflow/providers/google/cloud/operators/vertex_ai/endpoint_service.py,sha256=mtbgC2NrolxIxyxOmqlnokc22NtCspa8oXwuHo140EU,26739
169
- airflow/providers/google/cloud/operators/vertex_ai/generative_model.py,sha256=dv4qpN-QXYuc54tt0fLHb-4PZwazllwcoiAwGpT-9Qg,37911
170
- airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py,sha256=8mcAAABSVhZKNv1RYxrcAitzqpYpxxt1fP4ZGTre_VE,25479
172
+ airflow/providers/google/cloud/operators/vertex_ai/generative_model.py,sha256=9CdLV04pjcn1CEDzWNI4eLykhi2cts0F9zJluA_Tk0g,43995
173
+ airflow/providers/google/cloud/operators/vertex_ai/hyperparameter_tuning_job.py,sha256=3YcxwPyexYrzgakSr5vLbu2-zUV9myzTietF8_CafeE,25503
171
174
  airflow/providers/google/cloud/operators/vertex_ai/model_service.py,sha256=bPR82AcehPllV1IYk7nM0l6IsR489gQjPZyaiyTE94s,35170
172
175
  airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py,sha256=sOXfmnlyyAOg_LkAn0v1HlaZRbFVBy9vAXjKJlxPFzg,22968
173
176
  airflow/providers/google/cloud/secrets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -179,7 +182,7 @@ airflow/providers/google/cloud/sensors/bigtable.py,sha256=iVQDpUjZKcjvkFCI98Ea28
179
182
  airflow/providers/google/cloud/sensors/cloud_composer.py,sha256=BWP2eWG1ZY8avvSoOsO2NKnvEr6ORHE6IF3ZhhGk1G4,12623
180
183
  airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py,sha256=8oviMlDS7GKNPodALwZ0JxLAB1k7IPPaSPDBnXFk4uE,4878
181
184
  airflow/providers/google/cloud/sensors/dataflow.py,sha256=Buo9fg591XwM1ZX92HNJKmsPnFSwGcYckZXyV_skuiQ,23508
182
- airflow/providers/google/cloud/sensors/dataform.py,sha256=1C9f62XKADLN0oDgaheg2VeS_69IROrnPz7Z35Mig60,4753
185
+ airflow/providers/google/cloud/sensors/dataform.py,sha256=Jg3eVQ39VUDCKGxnFDWzM4xZ_uEfAL6NYBiRP3OgKrU,8473
183
186
  airflow/providers/google/cloud/sensors/datafusion.py,sha256=flbL3RKKPdc5vjoI0zdbPdXq4CjYyhdb_UnSoAzZwAs,5532
184
187
  airflow/providers/google/cloud/sensors/dataplex.py,sha256=DrTTatKEsnuXtElehsX4_9E0OhZ5LtAw0qH9nIVhEX0,14940
185
188
  airflow/providers/google/cloud/sensors/dataprep.py,sha256=bpec49ybL8rLwddTYdc8mKja_Ol_f66j-IKJLeL53-Y,1913
@@ -264,8 +267,6 @@ airflow/providers/google/common/links/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kO
264
267
  airflow/providers/google/common/links/storage.py,sha256=24tb1xQ00gbGyOu26wryNkTYXlk_9H-ieUra9WmuPIY,2265
265
268
  airflow/providers/google/common/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
266
269
  airflow/providers/google/common/utils/id_token_credentials.py,sha256=dm4A38jK6W_9k5UEfOKk2-bwS0N3OpmfdqK7VMlUIX0,8992
267
- airflow/providers/google/datasets/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
268
- airflow/providers/google/datasets/bigquery.py,sha256=MzEAmuDEA8Dv9vmxQYZBW1sdvL3HDOFIbtzBR-EMgEU,1258
269
270
  airflow/providers/google/firebase/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
270
271
  airflow/providers/google/firebase/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
271
272
  airflow/providers/google/firebase/hooks/firestore.py,sha256=pj5mLOLdi1Kz6GExshy9T9xa1rELPWRrifhTvoS43tg,5668
@@ -278,7 +279,7 @@ airflow/providers/google/leveldb/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUf
278
279
  airflow/providers/google/leveldb/operators/leveldb.py,sha256=eeoWiqD2RLW73gzM4NlsWSU8cu2psItuzy8eA5rbW4A,3788
279
280
  airflow/providers/google/marketing_platform/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
280
281
  airflow/providers/google/marketing_platform/example_dags/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
281
- airflow/providers/google/marketing_platform/example_dags/example_display_video.py,sha256=8DraOJVgfjULkgMKZukw_3Er6xuXQT-1G5C-SaZ7efc,8561
282
+ airflow/providers/google/marketing_platform/example_dags/example_display_video.py,sha256=YyZKRsgSWDYlEPhJQeZed7mSP_Q8Xl9Ro_np37RmvxQ,8612
282
283
  airflow/providers/google/marketing_platform/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
283
284
  airflow/providers/google/marketing_platform/hooks/analytics.py,sha256=I2OawmcOpFXNEO0AeqVuWJlTc7pY3-BUGnjiM-kccus,8172
284
285
  airflow/providers/google/marketing_platform/hooks/analytics_admin.py,sha256=263rnO8n65ENzj2delmnb9_Iz8AvCFnoYIoU2ayv4eY,10024
@@ -310,7 +311,7 @@ airflow/providers/google/suite/transfers/gcs_to_gdrive.py,sha256=CxtVhp3wlEOBtjR
310
311
  airflow/providers/google/suite/transfers/gcs_to_sheets.py,sha256=4nwXWkTySeBXNuThPxzO7uww_hH6PthpppTeuShn27Q,4363
311
312
  airflow/providers/google/suite/transfers/local_to_drive.py,sha256=ZSK0b1Rd6x_xsP2DVcUzeYu3qoo9Bsp3VmnKyBsFRH8,6105
312
313
  airflow/providers/google/suite/transfers/sql_to_sheets.py,sha256=sORkYSUDArRPnvi8WCiXP7YIXtpAgpEPhf8cqgpu644,5220
313
- apache_airflow_providers_google-10.24.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
314
- apache_airflow_providers_google-10.24.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
315
- apache_airflow_providers_google-10.24.0rc1.dist-info/METADATA,sha256=SWU6q90s_5utuT5wOyuTPpK4QYezLyo-vmQVATREoZo,17219
316
- apache_airflow_providers_google-10.24.0rc1.dist-info/RECORD,,
314
+ apache_airflow_providers_google-10.25.0rc1.dist-info/entry_points.txt,sha256=Ay1Uo7uHxdXCxWew3CyBHumZ44Ld-iR7AcSR2fY-PLw,102
315
+ apache_airflow_providers_google-10.25.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
316
+ apache_airflow_providers_google-10.25.0rc1.dist-info/METADATA,sha256=ISi-Egfyr1nu6OOfchXUSrauxNZJ8JbHTw4nKO_K6no,17334
317
+ apache_airflow_providers_google-10.25.0rc1.dist-info/RECORD,,