apache-airflow-providers-google 10.20.0rc1__py3-none-any.whl → 10.21.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. airflow/providers/google/__init__.py +1 -1
  2. airflow/providers/google/ads/hooks/ads.py +16 -8
  3. airflow/providers/google/ads/transfers/ads_to_gcs.py +2 -1
  4. airflow/providers/google/cloud/_internal_client/secret_manager_client.py +6 -3
  5. airflow/providers/google/cloud/hooks/bigquery.py +158 -79
  6. airflow/providers/google/cloud/hooks/cloud_sql.py +12 -6
  7. airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py +34 -17
  8. airflow/providers/google/cloud/hooks/dataflow.py +30 -26
  9. airflow/providers/google/cloud/hooks/dataform.py +2 -1
  10. airflow/providers/google/cloud/hooks/datafusion.py +4 -2
  11. airflow/providers/google/cloud/hooks/dataproc.py +102 -51
  12. airflow/providers/google/cloud/hooks/functions.py +20 -10
  13. airflow/providers/google/cloud/hooks/kubernetes_engine.py +22 -11
  14. airflow/providers/google/cloud/hooks/os_login.py +2 -1
  15. airflow/providers/google/cloud/hooks/secret_manager.py +18 -9
  16. airflow/providers/google/cloud/hooks/translate.py +2 -1
  17. airflow/providers/google/cloud/hooks/vertex_ai/batch_prediction_job.py +2 -1
  18. airflow/providers/google/cloud/hooks/vertex_ai/generative_model.py +141 -0
  19. airflow/providers/google/cloud/hooks/vertex_ai/pipeline_job.py +2 -1
  20. airflow/providers/google/cloud/links/base.py +2 -1
  21. airflow/providers/google/cloud/links/datafusion.py +2 -1
  22. airflow/providers/google/cloud/log/stackdriver_task_handler.py +4 -2
  23. airflow/providers/google/cloud/openlineage/mixins.py +10 -0
  24. airflow/providers/google/cloud/openlineage/utils.py +4 -2
  25. airflow/providers/google/cloud/operators/bigquery.py +55 -21
  26. airflow/providers/google/cloud/operators/cloud_batch.py +3 -1
  27. airflow/providers/google/cloud/operators/cloud_sql.py +22 -11
  28. airflow/providers/google/cloud/operators/dataform.py +2 -1
  29. airflow/providers/google/cloud/operators/dataproc.py +75 -34
  30. airflow/providers/google/cloud/operators/dataproc_metastore.py +24 -12
  31. airflow/providers/google/cloud/operators/gcs.py +2 -1
  32. airflow/providers/google/cloud/operators/pubsub.py +10 -5
  33. airflow/providers/google/cloud/operators/vertex_ai/auto_ml.py +3 -3
  34. airflow/providers/google/cloud/operators/vertex_ai/custom_job.py +12 -9
  35. airflow/providers/google/cloud/operators/vertex_ai/generative_model.py +243 -0
  36. airflow/providers/google/cloud/operators/vertex_ai/pipeline_job.py +2 -1
  37. airflow/providers/google/cloud/operators/vision.py +36 -18
  38. airflow/providers/google/cloud/sensors/gcs.py +11 -2
  39. airflow/providers/google/cloud/sensors/pubsub.py +2 -1
  40. airflow/providers/google/cloud/transfers/bigquery_to_gcs.py +21 -12
  41. airflow/providers/google/cloud/transfers/bigquery_to_postgres.py +1 -1
  42. airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py +2 -1
  43. airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +17 -5
  44. airflow/providers/google/cloud/transfers/gcs_to_gcs.py +12 -6
  45. airflow/providers/google/cloud/transfers/local_to_gcs.py +5 -1
  46. airflow/providers/google/cloud/transfers/mysql_to_gcs.py +2 -1
  47. airflow/providers/google/cloud/transfers/oracle_to_gcs.py +2 -1
  48. airflow/providers/google/cloud/transfers/presto_to_gcs.py +2 -1
  49. airflow/providers/google/cloud/transfers/s3_to_gcs.py +2 -1
  50. airflow/providers/google/cloud/transfers/trino_to_gcs.py +2 -1
  51. airflow/providers/google/cloud/triggers/cloud_batch.py +2 -1
  52. airflow/providers/google/cloud/triggers/cloud_run.py +2 -1
  53. airflow/providers/google/cloud/triggers/dataflow.py +2 -1
  54. airflow/providers/google/cloud/triggers/vertex_ai.py +2 -1
  55. airflow/providers/google/cloud/utils/external_token_supplier.py +4 -2
  56. airflow/providers/google/cloud/utils/field_sanitizer.py +4 -2
  57. airflow/providers/google/cloud/utils/field_validator.py +6 -3
  58. airflow/providers/google/cloud/utils/helpers.py +2 -1
  59. airflow/providers/google/common/hooks/base_google.py +2 -1
  60. airflow/providers/google/common/utils/id_token_credentials.py +2 -1
  61. airflow/providers/google/get_provider_info.py +3 -2
  62. airflow/providers/google/go_module_utils.py +4 -2
  63. airflow/providers/google/marketing_platform/hooks/analytics_admin.py +12 -6
  64. airflow/providers/google/marketing_platform/links/analytics_admin.py +2 -1
  65. airflow/providers/google/suite/transfers/local_to_drive.py +2 -1
  66. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/METADATA +8 -8
  67. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/RECORD +69 -69
  68. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/WHEEL +0 -0
  69. {apache_airflow_providers_google-10.20.0rc1.dist-info → apache_airflow_providers_google-10.21.0rc1.dist-info}/entry_points.txt +0 -0
@@ -87,7 +87,8 @@ BigQueryJob = Union[CopyJob, QueryJob, LoadJob, ExtractJob]
87
87
 
88
88
 
89
89
  class BigQueryHook(GoogleBaseHook, DbApiHook):
90
- """Interact with BigQuery.
90
+ """
91
+ Interact with BigQuery.
91
92
 
92
93
  This hook uses the Google Cloud connection.
93
94
 
@@ -206,7 +207,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
206
207
  return build("bigquery", "v2", http=http_authorized, cache_discovery=False)
207
208
 
208
209
  def get_client(self, project_id: str = PROVIDE_PROJECT_ID, location: str | None = None) -> Client:
209
- """Get an authenticated BigQuery Client.
210
+ """
211
+ Get an authenticated BigQuery Client.
210
212
 
211
213
  :param project_id: Project ID for the project which the client acts on behalf of.
212
214
  :param location: Default location for jobs / datasets / tables.
@@ -223,7 +225,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
223
225
  return f"bigquery://{self.project_id}"
224
226
 
225
227
  def get_sqlalchemy_engine(self, engine_kwargs: dict | None = None):
226
- """Create an SQLAlchemy engine object.
228
+ """
229
+ Create an SQLAlchemy engine object.
227
230
 
228
231
  :param engine_kwargs: Kwargs used in :func:`~sqlalchemy.create_engine`.
229
232
  """
@@ -288,7 +291,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
288
291
  replace: Any = False,
289
292
  **kwargs,
290
293
  ) -> None:
291
- """Insert rows.
294
+ """
295
+ Insert rows.
292
296
 
293
297
  Insertion is currently unsupported. Theoretically, you could use
294
298
  BigQuery's streaming API to insert rows into a table, but this hasn't
@@ -303,7 +307,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
303
307
  dialect: str | None = None,
304
308
  **kwargs,
305
309
  ) -> pd.DataFrame:
306
- """Get a Pandas DataFrame for the BigQuery results.
310
+ """
311
+ Get a Pandas DataFrame for the BigQuery results.
307
312
 
308
313
  The DbApiHook method must be overridden because Pandas doesn't support
309
314
  PEP 249 connections, except for SQLite.
@@ -328,7 +333,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
328
333
 
329
334
  @GoogleBaseHook.fallback_to_default_project_id
330
335
  def table_exists(self, dataset_id: str, table_id: str, project_id: str) -> bool:
331
- """Check if a table exists in Google BigQuery.
336
+ """
337
+ Check if a table exists in Google BigQuery.
332
338
 
333
339
  :param project_id: The Google cloud project in which to look for the
334
340
  table. The connection supplied to the hook must provide access to
@@ -348,7 +354,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
348
354
  def table_partition_exists(
349
355
  self, dataset_id: str, table_id: str, partition_id: str, project_id: str
350
356
  ) -> bool:
351
- """Check if a partition exists in Google BigQuery.
357
+ """
358
+ Check if a partition exists in Google BigQuery.
352
359
 
353
360
  :param project_id: The Google cloud project in which to look for the
354
361
  table. The connection supplied to the hook must provide access to
@@ -382,7 +389,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
382
389
  location: str | None = None,
383
390
  exists_ok: bool = True,
384
391
  ) -> Table:
385
- """Create a new, empty table in the dataset.
392
+ """
393
+ Create a new, empty table in the dataset.
386
394
 
387
395
  To create a view, which is defined by a SQL query, parse a dictionary to
388
396
  the *view* argument.
@@ -486,7 +494,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
486
494
  dataset_reference: dict[str, Any] | None = None,
487
495
  exists_ok: bool = True,
488
496
  ) -> dict[str, Any]:
489
- """Create a new empty dataset.
497
+ """
498
+ Create a new empty dataset.
490
499
 
491
500
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert
492
501
 
@@ -547,7 +556,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
547
556
  max_results: int | None = None,
548
557
  retry: Retry = DEFAULT_RETRY,
549
558
  ) -> list[dict[str, Any]]:
550
- """Get the list of tables for a given dataset.
559
+ """
560
+ Get the list of tables for a given dataset.
551
561
 
552
562
  For more information, see:
553
563
  https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list
@@ -576,7 +586,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
576
586
  delete_contents: bool = False,
577
587
  retry: Retry = DEFAULT_RETRY,
578
588
  ) -> None:
579
- """Delete a dataset of Big query in your project.
589
+ """
590
+ Delete a dataset of Big query in your project.
580
591
 
581
592
  :param project_id: The name of the project where we have the dataset.
582
593
  :param dataset_id: The dataset to be delete.
@@ -623,7 +634,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
623
634
  location: str | None = None,
624
635
  project_id: str = PROVIDE_PROJECT_ID,
625
636
  ) -> Table:
626
- """Create an external table in the dataset with data from Google Cloud Storage.
637
+ """
638
+ Create an external table in the dataset with data from Google Cloud Storage.
627
639
 
628
640
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource
629
641
 
@@ -759,7 +771,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
759
771
  table_id: str | None = None,
760
772
  project_id: str = PROVIDE_PROJECT_ID,
761
773
  ) -> dict[str, Any]:
762
- """Change some fields of a table.
774
+ """
775
+ Change some fields of a table.
763
776
 
764
777
  Use ``fields`` to specify which fields to update. At least one field
765
778
  must be provided. If a field is listed in ``fields`` and is ``None``
@@ -815,7 +828,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
815
828
  require_partition_filter: bool | None = None,
816
829
  encryption_configuration: dict | None = None,
817
830
  ) -> None:
818
- """Patch information in an existing table.
831
+ """
832
+ Patch information in an existing table.
819
833
 
820
834
  It only updates fields that are provided in the request object. This
821
835
  method is deprecated. Please use :func:`.update_table` instead.
@@ -910,7 +924,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
910
924
  skip_invalid_rows: bool = False,
911
925
  fail_on_error: bool = False,
912
926
  ) -> None:
913
- """Stream data into BigQuery one record at a time without a load job.
927
+ """
928
+ Stream data into BigQuery one record at a time without a load job.
914
929
 
915
930
  .. seealso::
916
931
  For more information, see:
@@ -963,7 +978,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
963
978
  project_id: str = PROVIDE_PROJECT_ID,
964
979
  retry: Retry = DEFAULT_RETRY,
965
980
  ) -> Dataset:
966
- """Change some fields of a dataset.
981
+ """
982
+ Change some fields of a dataset.
967
983
 
968
984
  Use ``fields`` to specify which fields to update. At least one field
969
985
  must be provided. If a field is listed in ``fields`` and is ``None`` in
@@ -1009,7 +1025,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1009
1025
  def patch_dataset(
1010
1026
  self, dataset_id: str, dataset_resource: dict, project_id: str = PROVIDE_PROJECT_ID
1011
1027
  ) -> dict:
1012
- """Patches information in an existing dataset.
1028
+ """
1029
+ Patches information in an existing dataset.
1013
1030
 
1014
1031
  It only replaces fields that are provided in the submitted dataset resource.
1015
1032
 
@@ -1060,7 +1077,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1060
1077
  table_prefix: str | None = None,
1061
1078
  max_results: int | None = None,
1062
1079
  ) -> list[dict[str, Any]]:
1063
- """List tables of a BigQuery dataset.
1080
+ """
1081
+ List tables of a BigQuery dataset.
1064
1082
 
1065
1083
  If a table prefix is specified, only tables beginning by it are
1066
1084
  returned. This method is deprecated. Please use
@@ -1101,7 +1119,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1101
1119
  retry: Retry = DEFAULT_RETRY,
1102
1120
  return_iterator: bool = False,
1103
1121
  ) -> list[DatasetListItem] | HTTPIterator:
1104
- """Get all BigQuery datasets in the current project.
1122
+ """
1123
+ Get all BigQuery datasets in the current project.
1105
1124
 
1106
1125
  For more information, see:
1107
1126
  https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list
@@ -1144,7 +1163,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1144
1163
 
1145
1164
  @GoogleBaseHook.fallback_to_default_project_id
1146
1165
  def get_dataset(self, dataset_id: str, project_id: str = PROVIDE_PROJECT_ID) -> Dataset:
1147
- """Fetch the dataset referenced by *dataset_id*.
1166
+ """
1167
+ Fetch the dataset referenced by *dataset_id*.
1148
1168
 
1149
1169
  :param dataset_id: The BigQuery Dataset ID
1150
1170
  :param project_id: The Google Cloud Project ID
@@ -1169,7 +1189,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1169
1189
  view_project: str | None = None,
1170
1190
  project_id: str = PROVIDE_PROJECT_ID,
1171
1191
  ) -> dict[str, Any]:
1172
- """Grant authorized view access of a dataset to a view table.
1192
+ """
1193
+ Grant authorized view access of a dataset to a view table.
1173
1194
 
1174
1195
  If this view has already been granted access to the dataset, do nothing.
1175
1196
  This method is not atomic. Running it may clobber a simultaneous update.
@@ -1221,7 +1242,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1221
1242
  def run_table_upsert(
1222
1243
  self, dataset_id: str, table_resource: dict[str, Any], project_id: str = PROVIDE_PROJECT_ID
1223
1244
  ) -> dict[str, Any]:
1224
- """Update a table if it exists, otherwise create a new one.
1245
+ """
1246
+ Update a table if it exists, otherwise create a new one.
1225
1247
 
1226
1248
  Since BigQuery does not natively allow table upserts, this is not an
1227
1249
  atomic operation.
@@ -1255,7 +1277,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1255
1277
  category=AirflowProviderDeprecationWarning,
1256
1278
  )
1257
1279
  def run_table_delete(self, deletion_dataset_table: str, ignore_if_missing: bool = False) -> None:
1258
- """Delete an existing table from the dataset.
1280
+ """
1281
+ Delete an existing table from the dataset.
1259
1282
 
1260
1283
  If the table does not exist, return an error unless *ignore_if_missing*
1261
1284
  is set to True.
@@ -1278,7 +1301,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1278
1301
  not_found_ok: bool = True,
1279
1302
  project_id: str = PROVIDE_PROJECT_ID,
1280
1303
  ) -> None:
1281
- """Delete an existing table from the dataset.
1304
+ """
1305
+ Delete an existing table from the dataset.
1282
1306
 
1283
1307
  If the table does not exist, return an error unless *not_found_ok* is
1284
1308
  set to True.
@@ -1308,7 +1332,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1308
1332
  page_token: str | None = None,
1309
1333
  start_index: int | None = None,
1310
1334
  ) -> list[dict]:
1311
- """Get data from given table.
1335
+ """
1336
+ Get data from given table.
1312
1337
 
1313
1338
  This method is deprecated. Please use :func:`.list_rows` instead.
1314
1339
 
@@ -1348,7 +1373,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1348
1373
  retry: Retry = DEFAULT_RETRY,
1349
1374
  return_iterator: bool = False,
1350
1375
  ) -> list[Row] | RowIterator:
1351
- """List rows in a table.
1376
+ """
1377
+ List rows in a table.
1352
1378
 
1353
1379
  See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list
1354
1380
 
@@ -1397,7 +1423,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1397
1423
 
1398
1424
  @GoogleBaseHook.fallback_to_default_project_id
1399
1425
  def get_schema(self, dataset_id: str, table_id: str, project_id: str = PROVIDE_PROJECT_ID) -> dict:
1400
- """Get the schema for a given dataset and table.
1426
+ """
1427
+ Get the schema for a given dataset and table.
1401
1428
 
1402
1429
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/tables#resource
1403
1430
 
@@ -1420,7 +1447,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1420
1447
  table_id: str,
1421
1448
  project_id: str = PROVIDE_PROJECT_ID,
1422
1449
  ) -> dict[str, Any]:
1423
- """Update fields within a schema for a given dataset and table.
1450
+ """
1451
+ Update fields within a schema for a given dataset and table.
1424
1452
 
1425
1453
  Note that some fields in schemas are immutable; trying to change them
1426
1454
  will cause an exception.
@@ -1515,7 +1543,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1515
1543
  location: str | None = None,
1516
1544
  retry: Retry = DEFAULT_RETRY,
1517
1545
  ) -> bool:
1518
- """Check if jobs have completed.
1546
+ """
1547
+ Check if jobs have completed.
1519
1548
 
1520
1549
  :param job_id: id of the job.
1521
1550
  :param project_id: Google Cloud Project where the job is running
@@ -1544,7 +1573,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1544
1573
  project_id: str = PROVIDE_PROJECT_ID,
1545
1574
  location: str | None = None,
1546
1575
  ) -> None:
1547
- """Cancel a job and wait for cancellation to complete.
1576
+ """
1577
+ Cancel a job and wait for cancellation to complete.
1548
1578
 
1549
1579
  :param job_id: id of the job.
1550
1580
  :param project_id: Google Cloud Project where the job is running
@@ -1589,7 +1619,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1589
1619
  project_id: str = PROVIDE_PROJECT_ID,
1590
1620
  location: str | None = None,
1591
1621
  ) -> BigQueryJob | UnknownJob:
1592
- """Retrieve a BigQuery job.
1622
+ """
1623
+ Retrieve a BigQuery job.
1593
1624
 
1594
1625
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs
1595
1626
 
@@ -1623,7 +1654,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1623
1654
  retry: Retry = DEFAULT_RETRY,
1624
1655
  timeout: float | None = None,
1625
1656
  ) -> BigQueryJob:
1626
- """Execute a BigQuery job and wait for it to complete.
1657
+ """
1658
+ Execute a BigQuery job and wait for it to complete.
1627
1659
 
1628
1660
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs
1629
1661
 
@@ -1681,7 +1713,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1681
1713
  category=AirflowProviderDeprecationWarning,
1682
1714
  )
1683
1715
  def run_with_configuration(self, configuration: dict) -> str:
1684
- """Execute a BigQuery SQL query.
1716
+ """
1717
+ Execute a BigQuery SQL query.
1685
1718
 
1686
1719
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs
1687
1720
 
@@ -1725,7 +1758,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1725
1758
  labels: dict | None = None,
1726
1759
  description: str | None = None,
1727
1760
  ) -> str:
1728
- """Load data from Google Cloud Storage to BigQuery.
1761
+ """
1762
+ Load data from Google Cloud Storage to BigQuery.
1729
1763
 
1730
1764
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs
1731
1765
 
@@ -1949,7 +1983,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
1949
1983
  labels: dict | None = None,
1950
1984
  encryption_configuration: dict | None = None,
1951
1985
  ) -> str:
1952
- """Copy data from one BigQuery table to another.
1986
+ """
1987
+ Copy data from one BigQuery table to another.
1953
1988
 
1954
1989
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy
1955
1990
 
@@ -2036,7 +2071,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
2036
2071
  labels: dict | None = None,
2037
2072
  return_full_job: bool = False,
2038
2073
  ) -> str | BigQueryJob:
2039
- """Copy data from BigQuery to Google Cloud Storage.
2074
+ """
2075
+ Copy data from BigQuery to Google Cloud Storage.
2040
2076
 
2041
2077
  .. seealso:: https://cloud.google.com/bigquery/docs/reference/v2/jobs
2042
2078
 
@@ -2120,7 +2156,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
2120
2156
  location: str | None = None,
2121
2157
  encryption_configuration: dict | None = None,
2122
2158
  ) -> str:
2123
- """Execute a BigQuery SQL query.
2159
+ """
2160
+ Execute a BigQuery SQL query.
2124
2161
 
2125
2162
  Optionally persists results in a BigQuery table.
2126
2163
 
@@ -2450,7 +2487,8 @@ class BigQueryHook(GoogleBaseHook, DbApiHook):
2450
2487
 
2451
2488
 
2452
2489
  class BigQueryConnection:
2453
- """BigQuery connection.
2490
+ """
2491
+ BigQuery connection.
2454
2492
 
2455
2493
  BigQuery does not have a notion of a persistent connection. Thus, these
2456
2494
  objects are small stateless factories for cursors, which do all the real
@@ -2477,7 +2515,8 @@ class BigQueryConnection:
2477
2515
 
2478
2516
 
2479
2517
  class BigQueryBaseCursor(LoggingMixin):
2480
- """BigQuery cursor.
2518
+ """
2519
+ BigQuery cursor.
2481
2520
 
2482
2521
  The BigQuery base cursor contains helper methods to execute queries against
2483
2522
  BigQuery. The methods can be used directly by operators, in cases where a
@@ -2513,7 +2552,8 @@ class BigQueryBaseCursor(LoggingMixin):
2513
2552
  category=AirflowProviderDeprecationWarning,
2514
2553
  )
2515
2554
  def create_empty_table(self, *args, **kwargs):
2516
- """Create empty table. DEPRECATED.
2555
+ """
2556
+ Create empty table. DEPRECATED.
2517
2557
 
2518
2558
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_table`
2519
2559
  instead.
@@ -2525,7 +2565,8 @@ class BigQueryBaseCursor(LoggingMixin):
2525
2565
  category=AirflowProviderDeprecationWarning,
2526
2566
  )
2527
2567
  def create_empty_dataset(self, *args, **kwargs) -> dict[str, Any]:
2528
- """Create empty dataset. DEPRECATED.
2568
+ """
2569
+ Create empty dataset. DEPRECATED.
2529
2570
 
2530
2571
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_empty_dataset`
2531
2572
  instead.
@@ -2537,7 +2578,8 @@ class BigQueryBaseCursor(LoggingMixin):
2537
2578
  category=AirflowProviderDeprecationWarning,
2538
2579
  )
2539
2580
  def get_dataset_tables(self, *args, **kwargs) -> list[dict[str, Any]]:
2540
- """Get dataset tables. DEPRECATED.
2581
+ """
2582
+ Get dataset tables. DEPRECATED.
2541
2583
 
2542
2584
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables`
2543
2585
  instead.
@@ -2549,7 +2591,8 @@ class BigQueryBaseCursor(LoggingMixin):
2549
2591
  category=AirflowProviderDeprecationWarning,
2550
2592
  )
2551
2593
  def delete_dataset(self, *args, **kwargs) -> None:
2552
- """Delete dataset. DEPRECATED.
2594
+ """
2595
+ Delete dataset. DEPRECATED.
2553
2596
 
2554
2597
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.delete_dataset`
2555
2598
  instead.
@@ -2561,7 +2604,8 @@ class BigQueryBaseCursor(LoggingMixin):
2561
2604
  category=AirflowProviderDeprecationWarning,
2562
2605
  )
2563
2606
  def create_external_table(self, *args, **kwargs):
2564
- """Create external table. DEPRECATED.
2607
+ """
2608
+ Create external table. DEPRECATED.
2565
2609
 
2566
2610
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.create_external_table`
2567
2611
  instead.
@@ -2573,7 +2617,8 @@ class BigQueryBaseCursor(LoggingMixin):
2573
2617
  category=AirflowProviderDeprecationWarning,
2574
2618
  )
2575
2619
  def patch_table(self, *args, **kwargs) -> None:
2576
- """Patch table. DEPRECATED.
2620
+ """
2621
+ Patch table. DEPRECATED.
2577
2622
 
2578
2623
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_table`
2579
2624
  instead.
@@ -2585,7 +2630,8 @@ class BigQueryBaseCursor(LoggingMixin):
2585
2630
  category=AirflowProviderDeprecationWarning,
2586
2631
  )
2587
2632
  def insert_all(self, *args, **kwargs) -> None:
2588
- """Insert all. DEPRECATED.
2633
+ """
2634
+ Insert all. DEPRECATED.
2589
2635
 
2590
2636
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_all`
2591
2637
  instead.
@@ -2597,7 +2643,8 @@ class BigQueryBaseCursor(LoggingMixin):
2597
2643
  category=AirflowProviderDeprecationWarning,
2598
2644
  )
2599
2645
  def update_dataset(self, *args, **kwargs) -> dict:
2600
- """Update dataset. DEPRECATED.
2646
+ """
2647
+ Update dataset. DEPRECATED.
2601
2648
 
2602
2649
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.update_dataset`
2603
2650
  instead.
@@ -2609,7 +2656,8 @@ class BigQueryBaseCursor(LoggingMixin):
2609
2656
  category=AirflowProviderDeprecationWarning,
2610
2657
  )
2611
2658
  def patch_dataset(self, *args, **kwargs) -> dict:
2612
- """Patch dataset. DEPRECATED.
2659
+ """
2660
+ Patch dataset. DEPRECATED.
2613
2661
 
2614
2662
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.patch_dataset`
2615
2663
  instead.
@@ -2621,7 +2669,8 @@ class BigQueryBaseCursor(LoggingMixin):
2621
2669
  category=AirflowProviderDeprecationWarning,
2622
2670
  )
2623
2671
  def get_dataset_tables_list(self, *args, **kwargs) -> list[dict[str, Any]]:
2624
- """Get dataset tables list. DEPRECATED.
2672
+ """
2673
+ Get dataset tables list. DEPRECATED.
2625
2674
 
2626
2675
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset_tables_list`
2627
2676
  instead.
@@ -2633,7 +2682,8 @@ class BigQueryBaseCursor(LoggingMixin):
2633
2682
  category=AirflowProviderDeprecationWarning,
2634
2683
  )
2635
2684
  def get_datasets_list(self, *args, **kwargs) -> list | HTTPIterator:
2636
- """Get datasets list. DEPRECATED.
2685
+ """
2686
+ Get datasets list. DEPRECATED.
2637
2687
 
2638
2688
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_datasets_list`
2639
2689
  instead.
@@ -2645,7 +2695,8 @@ class BigQueryBaseCursor(LoggingMixin):
2645
2695
  category=AirflowProviderDeprecationWarning,
2646
2696
  )
2647
2697
  def get_dataset(self, *args, **kwargs) -> Dataset:
2648
- """Get dataset. DEPRECATED.
2698
+ """
2699
+ Get dataset. DEPRECATED.
2649
2700
 
2650
2701
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_dataset`
2651
2702
  instead.
@@ -2657,7 +2708,8 @@ class BigQueryBaseCursor(LoggingMixin):
2657
2708
  category=AirflowProviderDeprecationWarning,
2658
2709
  )
2659
2710
  def run_grant_dataset_view_access(self, *args, **kwargs) -> dict:
2660
- """Grant view access to dataset. DEPRECATED.
2711
+ """
2712
+ Grant view access to dataset. DEPRECATED.
2661
2713
 
2662
2714
  Please use
2663
2715
  :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_grant_dataset_view_access`
@@ -2670,7 +2722,8 @@ class BigQueryBaseCursor(LoggingMixin):
2670
2722
  category=AirflowProviderDeprecationWarning,
2671
2723
  )
2672
2724
  def run_table_upsert(self, *args, **kwargs) -> dict:
2673
- """Upsert table. DEPRECATED.
2725
+ """
2726
+ Upsert table. DEPRECATED.
2674
2727
 
2675
2728
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_upsert`
2676
2729
  instead.
@@ -2682,7 +2735,8 @@ class BigQueryBaseCursor(LoggingMixin):
2682
2735
  category=AirflowProviderDeprecationWarning,
2683
2736
  )
2684
2737
  def run_table_delete(self, *args, **kwargs) -> None:
2685
- """Delete table. DEPRECATED.
2738
+ """
2739
+ Delete table. DEPRECATED.
2686
2740
 
2687
2741
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_table_delete`
2688
2742
  instead.
@@ -2694,7 +2748,8 @@ class BigQueryBaseCursor(LoggingMixin):
2694
2748
  category=AirflowProviderDeprecationWarning,
2695
2749
  )
2696
2750
  def get_tabledata(self, *args, **kwargs) -> list[dict]:
2697
- """Get table data. DEPRECATED.
2751
+ """
2752
+ Get table data. DEPRECATED.
2698
2753
 
2699
2754
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_tabledata`
2700
2755
  instead.
@@ -2706,7 +2761,8 @@ class BigQueryBaseCursor(LoggingMixin):
2706
2761
  category=AirflowProviderDeprecationWarning,
2707
2762
  )
2708
2763
  def get_schema(self, *args, **kwargs) -> dict:
2709
- """Get Schema. DEPRECATED.
2764
+ """
2765
+ Get Schema. DEPRECATED.
2710
2766
 
2711
2767
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.get_schema`
2712
2768
  instead.
@@ -2718,7 +2774,8 @@ class BigQueryBaseCursor(LoggingMixin):
2718
2774
  category=AirflowProviderDeprecationWarning,
2719
2775
  )
2720
2776
  def poll_job_complete(self, *args, **kwargs) -> bool:
2721
- """Poll for job completion.DEPRECATED.
2777
+ """
2778
+ Poll for job completion.DEPRECATED.
2722
2779
 
2723
2780
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.poll_job_complete`
2724
2781
  instead.
@@ -2730,7 +2787,8 @@ class BigQueryBaseCursor(LoggingMixin):
2730
2787
  category=AirflowProviderDeprecationWarning,
2731
2788
  )
2732
2789
  def cancel_query(self, *args, **kwargs) -> None:
2733
- """Cancel query. DEPRECATED.
2790
+ """
2791
+ Cancel query. DEPRECATED.
2734
2792
 
2735
2793
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.cancel_query`
2736
2794
  instead.
@@ -2742,7 +2800,8 @@ class BigQueryBaseCursor(LoggingMixin):
2742
2800
  category=AirflowProviderDeprecationWarning,
2743
2801
  )
2744
2802
  def run_with_configuration(self, *args, **kwargs) -> str:
2745
- """Run with configuration. DEPRECATED.
2803
+ """
2804
+ Run with configuration. DEPRECATED.
2746
2805
 
2747
2806
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_with_configuration`
2748
2807
  instead.
@@ -2754,7 +2813,8 @@ class BigQueryBaseCursor(LoggingMixin):
2754
2813
  category=AirflowProviderDeprecationWarning,
2755
2814
  )
2756
2815
  def run_load(self, *args, **kwargs) -> str:
2757
- """Run load. DEPRECATED.
2816
+ """
2817
+ Run load. DEPRECATED.
2758
2818
 
2759
2819
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_load`
2760
2820
  instead.
@@ -2766,7 +2826,8 @@ class BigQueryBaseCursor(LoggingMixin):
2766
2826
  category=AirflowProviderDeprecationWarning,
2767
2827
  )
2768
2828
  def run_copy(self, *args, **kwargs) -> str:
2769
- """Run copy. DEPRECATED.
2829
+ """
2830
+ Run copy. DEPRECATED.
2770
2831
 
2771
2832
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_copy`
2772
2833
  instead.
@@ -2778,7 +2839,8 @@ class BigQueryBaseCursor(LoggingMixin):
2778
2839
  category=AirflowProviderDeprecationWarning,
2779
2840
  )
2780
2841
  def run_extract(self, *args, **kwargs) -> str | BigQueryJob:
2781
- """Run extraction. DEPRECATED.
2842
+ """
2843
+ Run extraction. DEPRECATED.
2782
2844
 
2783
2845
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_extract`
2784
2846
  instead.
@@ -2790,7 +2852,8 @@ class BigQueryBaseCursor(LoggingMixin):
2790
2852
  category=AirflowProviderDeprecationWarning,
2791
2853
  )
2792
2854
  def run_query(self, *args, **kwargs) -> str:
2793
- """Run query. DEPRECATED.
2855
+ """
2856
+ Run query. DEPRECATED.
2794
2857
 
2795
2858
  Please use :func:`~airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.run_query`
2796
2859
  instead.
@@ -2799,7 +2862,8 @@ class BigQueryBaseCursor(LoggingMixin):
2799
2862
 
2800
2863
 
2801
2864
  class BigQueryCursor(BigQueryBaseCursor):
2802
- """A very basic BigQuery PEP 249 cursor implementation.
2865
+ """
2866
+ A very basic BigQuery PEP 249 cursor implementation.
2803
2867
 
2804
2868
  The PyHive PEP 249 implementation was used as a reference:
2805
2869
 
@@ -2849,7 +2913,8 @@ class BigQueryCursor(BigQueryBaseCursor):
2849
2913
  return -1
2850
2914
 
2851
2915
  def execute(self, operation: str, parameters: dict | None = None) -> None:
2852
- """Execute a BigQuery query, and update the BigQueryCursor description.
2916
+ """
2917
+ Execute a BigQuery query, and update the BigQueryCursor description.
2853
2918
 
2854
2919
  :param operation: The query to execute.
2855
2920
  :param parameters: Parameters to substitute into the query.
@@ -2866,7 +2931,8 @@ class BigQueryCursor(BigQueryBaseCursor):
2866
2931
  self.description = []
2867
2932
 
2868
2933
  def executemany(self, operation: str, seq_of_parameters: list) -> None:
2869
- """Execute a BigQuery query multiple times with different parameters.
2934
+ """
2935
+ Execute a BigQuery query multiple times with different parameters.
2870
2936
 
2871
2937
  :param operation: The query to execute.
2872
2938
  :param seq_of_parameters: List of dictionary parameters to substitute into the
@@ -2887,7 +2953,8 @@ class BigQueryCursor(BigQueryBaseCursor):
2887
2953
  return self.next()
2888
2954
 
2889
2955
  def next(self) -> list | None:
2890
- """Return the next row from a buffer.
2956
+ """
2957
+ Return the next row from a buffer.
2891
2958
 
2892
2959
  Helper method for ``fetchone``.
2893
2960
 
@@ -2922,7 +2989,8 @@ class BigQueryCursor(BigQueryBaseCursor):
2922
2989
  return self.buffer.pop(0)
2923
2990
 
2924
2991
  def fetchmany(self, size: int | None = None) -> list:
2925
- """Fetch the next set of rows of a query result.
2992
+ """
2993
+ Fetch the next set of rows of a query result.
2926
2994
 
2927
2995
  This returns a sequence of sequences (e.g. a list of tuples). An empty
2928
2996
  sequence is returned when no more rows are available.
@@ -2950,7 +3018,8 @@ class BigQueryCursor(BigQueryBaseCursor):
2950
3018
  return result
2951
3019
 
2952
3020
  def fetchall(self) -> list[list]:
2953
- """Fetch all (remaining) rows of a query result.
3021
+ """
3022
+ Fetch all (remaining) rows of a query result.
2954
3023
 
2955
3024
  A sequence of sequences (e.g. a list of tuples) is returned.
2956
3025
  """
@@ -2958,14 +3027,16 @@ class BigQueryCursor(BigQueryBaseCursor):
2958
3027
  return result
2959
3028
 
2960
3029
  def get_arraysize(self) -> int:
2961
- """Get number of rows to fetch at a time.
3030
+ """
3031
+ Get number of rows to fetch at a time.
2962
3032
 
2963
3033
  .. seealso:: :func:`.fetchmany()`
2964
3034
  """
2965
3035
  return self.buffersize or 1
2966
3036
 
2967
3037
  def set_arraysize(self, arraysize: int) -> None:
2968
- """Set the number of rows to fetch at a time.
3038
+ """
3039
+ Set the number of rows to fetch at a time.
2969
3040
 
2970
3041
  .. seealso:: :func:`.fetchmany()`
2971
3042
  """
@@ -3282,7 +3353,8 @@ def _validate_src_fmt_configs(
3282
3353
  valid_configs: list[str],
3283
3354
  backward_compatibility_configs: dict | None = None,
3284
3355
  ) -> dict:
3285
- """Validate ``src_fmt_configs`` against a valid config for the source format.
3356
+ """
3357
+ Validate ``src_fmt_configs`` against a valid config for the source format.
3286
3358
 
3287
3359
  Adds the backward compatibility config to ``src_fmt_configs``.
3288
3360
 
@@ -3306,7 +3378,8 @@ def _validate_src_fmt_configs(
3306
3378
 
3307
3379
 
3308
3380
  def _format_schema_for_description(schema: dict) -> list:
3309
- """Reformat the schema to match cursor description standard.
3381
+ """
3382
+ Reformat the schema to match cursor description standard.
3310
3383
 
3311
3384
  The description should be a tuple of 7 elemenbts: name, type, display_size,
3312
3385
  internal_size, precision, scale, null_ok.
@@ -3473,7 +3546,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook):
3473
3546
  as_dict: bool = False,
3474
3547
  selected_fields: str | list[str] | None = None,
3475
3548
  ) -> list[Any]:
3476
- """Convert a response from BigQuery to records.
3549
+ """
3550
+ Convert a response from BigQuery to records.
3477
3551
 
3478
3552
  :param query_results: the results from a SQL query
3479
3553
  :param as_dict: if True returns the result as a list of dictionaries, otherwise as list of lists.
@@ -3503,7 +3577,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook):
3503
3577
  records: list[Any],
3504
3578
  tolerance: float | None = None,
3505
3579
  ) -> None:
3506
- """Match a single query resulting row and tolerance with pass_value.
3580
+ """
3581
+ Match a single query resulting row and tolerance with pass_value.
3507
3582
 
3508
3583
  :raise AirflowException: if matching fails
3509
3584
  """
@@ -3536,7 +3611,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook):
3536
3611
  def _get_numeric_matches(
3537
3612
  records: list[float], pass_value: Any, tolerance: float | None = None
3538
3613
  ) -> list[bool]:
3539
- """Match numeric pass_value, tolerance with records value.
3614
+ """
3615
+ Match numeric pass_value, tolerance with records value.
3540
3616
 
3541
3617
  :param records: List of value to match against
3542
3618
  :param pass_value: Expected value
@@ -3551,7 +3627,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook):
3551
3627
 
3552
3628
  @staticmethod
3553
3629
  def _convert_to_float_if_possible(s: Any) -> Any:
3554
- """Convert a string to a numeric value if appropriate.
3630
+ """
3631
+ Convert a string to a numeric value if appropriate.
3555
3632
 
3556
3633
  :param s: the string to be converted
3557
3634
  """
@@ -3568,7 +3645,8 @@ class BigQueryAsyncHook(GoogleBaseAsyncHook):
3568
3645
  ignore_zero: bool,
3569
3646
  ratio_formula: str,
3570
3647
  ) -> None:
3571
- """Check values of metrics (SQL expressions) are within a certain tolerance.
3648
+ """
3649
+ Check values of metrics (SQL expressions) are within a certain tolerance.
3572
3650
 
3573
3651
  :param row1: first resulting row of a query execution job for first SQL query
3574
3652
  :param row2: first resulting row of a query execution job for second SQL query
@@ -3667,7 +3745,8 @@ class BigQueryTableAsyncHook(GoogleBaseAsyncHook):
3667
3745
  async def get_table_client(
3668
3746
  self, dataset: str, table_id: str, project_id: str, session: ClientSession
3669
3747
  ) -> Table_async:
3670
- """Get a Google Big Query Table object.
3748
+ """
3749
+ Get a Google Big Query Table object.
3671
3750
 
3672
3751
  :param dataset: The name of the dataset in which to look for the table storage bucket.
3673
3752
  :param table_id: The name of the table to check the existence of.