apache-airflow-providers-common-sql 1.27.0rc1__py3-none-any.whl → 1.27.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.27.0"
32
+ __version__ = "1.27.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -54,7 +54,7 @@ from airflow.providers.common.sql.hooks import handlers
54
54
  from airflow.utils.module_loading import import_string
55
55
 
56
56
  if TYPE_CHECKING:
57
- from pandas import DataFrame
57
+ from pandas import DataFrame as PandasDataFrame
58
58
  from polars import DataFrame as PolarsDataFrame
59
59
  from sqlalchemy.engine import URL, Engine, Inspector
60
60
 
@@ -391,7 +391,7 @@ class DbApiHook(BaseHook):
391
391
  sql,
392
392
  parameters: list | tuple | Mapping[str, Any] | None = None,
393
393
  **kwargs,
394
- ) -> DataFrame:
394
+ ) -> PandasDataFrame:
395
395
  """
396
396
  Execute the sql and returns a pandas dataframe.
397
397
 
@@ -399,7 +399,7 @@ class DbApiHook(BaseHook):
399
399
  :param parameters: The parameters to render the SQL query with.
400
400
  :param kwargs: (optional) passed into pandas.io.sql.read_sql method
401
401
  """
402
- return self._get_pandas_df(sql, parameters, **kwargs)
402
+ return self.get_df(sql, parameters, df_type="pandas", **kwargs)
403
403
 
404
404
  @deprecated(
405
405
  reason="Replaced by function `get_df_by_chunks`.",
@@ -413,17 +413,37 @@ class DbApiHook(BaseHook):
413
413
  *,
414
414
  chunksize: int,
415
415
  **kwargs,
416
- ) -> Generator[DataFrame, None, None]:
417
- return self._get_pandas_df_by_chunks(sql, parameters, chunksize=chunksize, **kwargs)
416
+ ) -> Generator[PandasDataFrame, None, None]:
417
+ return self.get_df_by_chunks(sql, parameters, chunksize=chunksize, df_type="pandas", **kwargs)
418
418
 
419
+ @overload
419
420
  def get_df(
420
421
  self,
421
- sql,
422
+ sql: str | list[str],
423
+ parameters: list | tuple | Mapping[str, Any] | None = None,
424
+ *,
425
+ df_type: Literal["pandas"] = "pandas",
426
+ **kwargs: Any,
427
+ ) -> PandasDataFrame: ...
428
+
429
+ @overload
430
+ def get_df(
431
+ self,
432
+ sql: str | list[str],
433
+ parameters: list | tuple | Mapping[str, Any] | None = None,
434
+ *,
435
+ df_type: Literal["polars"],
436
+ **kwargs: Any,
437
+ ) -> PolarsDataFrame: ...
438
+
439
+ def get_df(
440
+ self,
441
+ sql: str | list[str],
422
442
  parameters: list | tuple | Mapping[str, Any] | None = None,
423
443
  *,
424
444
  df_type: Literal["pandas", "polars"] = "pandas",
425
445
  **kwargs,
426
- ) -> DataFrame | PolarsDataFrame:
446
+ ) -> PandasDataFrame | PolarsDataFrame:
427
447
  """
428
448
  Execute the sql and returns a dataframe.
429
449
 
@@ -442,7 +462,7 @@ class DbApiHook(BaseHook):
442
462
  sql,
443
463
  parameters: list | tuple | Mapping[str, Any] | None = None,
444
464
  **kwargs,
445
- ) -> DataFrame:
465
+ ) -> PandasDataFrame:
446
466
  """
447
467
  Execute the sql and returns a pandas dataframe.
448
468
 
@@ -492,15 +512,37 @@ class DbApiHook(BaseHook):
492
512
 
493
513
  return pl.read_database(sql, connection=conn, execute_options=execute_options, **kwargs)
494
514
 
515
+ @overload
495
516
  def get_df_by_chunks(
496
517
  self,
497
- sql,
518
+ sql: str | list[str],
519
+ parameters: list | tuple | Mapping[str, Any] | None = None,
520
+ *,
521
+ chunksize: int,
522
+ df_type: Literal["pandas"] = "pandas",
523
+ **kwargs,
524
+ ) -> Generator[PandasDataFrame, None, None]: ...
525
+
526
+ @overload
527
+ def get_df_by_chunks(
528
+ self,
529
+ sql: str | list[str],
530
+ parameters: list | tuple | Mapping[str, Any] | None = None,
531
+ *,
532
+ chunksize: int,
533
+ df_type: Literal["polars"],
534
+ **kwargs,
535
+ ) -> Generator[PolarsDataFrame, None, None]: ...
536
+
537
+ def get_df_by_chunks(
538
+ self,
539
+ sql: str | list[str],
498
540
  parameters: list | tuple | Mapping[str, Any] | None = None,
499
541
  *,
500
542
  chunksize: int,
501
543
  df_type: Literal["pandas", "polars"] = "pandas",
502
544
  **kwargs,
503
- ) -> Generator[DataFrame | PolarsDataFrame, None, None]:
545
+ ) -> Generator[PandasDataFrame | PolarsDataFrame, None, None]:
504
546
  """
505
547
  Execute the sql and return a generator.
506
548
 
@@ -522,7 +564,7 @@ class DbApiHook(BaseHook):
522
564
  *,
523
565
  chunksize: int,
524
566
  **kwargs,
525
- ) -> Generator[DataFrame, None, None]:
567
+ ) -> Generator[PandasDataFrame, None, None]:
526
568
  """
527
569
  Execute the sql and return a generator.
528
570
 
@@ -34,13 +34,11 @@ isort:skip_file
34
34
 
35
35
  from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence
36
36
  from functools import cached_property as cached_property
37
- from typing import Any, Callable, Protocol, TypeVar, overload
37
+ from typing import Any, Protocol, TypeVar
38
38
 
39
39
  from _typeshed import Incomplete as Incomplete
40
40
  from pandas import DataFrame as PandasDataFrame
41
- from polars import DataFrame as PolarsDataFrame
42
41
  from sqlalchemy.engine import URL as URL, Engine as Engine, Inspector as Inspector
43
- from typing_extensions import Literal
44
42
 
45
43
  from airflow.hooks.base import BaseHook as BaseHook
46
44
  from airflow.models import Connection as Connection
@@ -111,61 +109,6 @@ class DbApiHook(BaseHook):
111
109
  def get_pandas_df_by_chunks(
112
110
  self, sql, parameters: list | tuple | Mapping[str, Any] | None = None, *, chunksize: int, **kwargs
113
111
  ) -> Generator[PandasDataFrame, None, None]: ...
114
- @overload
115
- def get_df(
116
- self,
117
- sql: str | list[str],
118
- parameters: list | tuple | Mapping[str, Any] | None = None,
119
- *,
120
- df_type: Literal["pandas"] = "pandas",
121
- **kwargs: Any,
122
- ) -> PandasDataFrame: ...
123
- @overload
124
- def get_df(
125
- self,
126
- sql: str | list[str],
127
- parameters: list | tuple | Mapping[str, Any] | None = None,
128
- *,
129
- df_type: Literal["polars"] = "polars",
130
- **kwargs: Any,
131
- ) -> PolarsDataFrame: ...
132
- @overload
133
- def get_df( # fallback overload
134
- self,
135
- sql: str | list[str],
136
- parameters: list | tuple | Mapping[str, Any] | None = None,
137
- *,
138
- df_type: Literal["pandas", "polars"] = "pandas",
139
- ) -> PandasDataFrame | PolarsDataFrame: ...
140
- @overload
141
- def get_df_by_chunks(
142
- self,
143
- sql,
144
- parameters: list | tuple | Mapping[str, Any] | None = None,
145
- *,
146
- chunksize: int,
147
- df_type: Literal["pandas"] = "pandas",
148
- **kwargs,
149
- ) -> Generator[PandasDataFrame, None, None]: ...
150
- @overload
151
- def get_df_by_chunks(
152
- self,
153
- sql,
154
- parameters: list | tuple | Mapping[str, Any] | None = None,
155
- *,
156
- chunksize: int,
157
- df_type: Literal["polars"],
158
- **kwargs,
159
- ) -> Generator[PolarsDataFrame, None, None]: ...
160
- @overload
161
- def get_df_by_chunks( # fallback overload
162
- self,
163
- sql,
164
- parameters: list | tuple | Mapping[str, Any] | None = None,
165
- *,
166
- chunksize: int,
167
- df_type: Literal["pandas", "polars"] = "pandas",
168
- ) -> Generator[PandasDataFrame | PolarsDataFrame, None, None]: ...
169
112
  def get_records(
170
113
  self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None
171
114
  ) -> Any: ...
@@ -178,26 +121,6 @@ class DbApiHook(BaseHook):
178
121
  def split_sql_string(sql: str, strip_semicolon: bool = False) -> list[str]: ...
179
122
  @property
180
123
  def last_description(self) -> Sequence[Sequence] | None: ...
181
- @overload
182
- def run(
183
- self,
184
- sql: str | Iterable[str],
185
- autocommit: bool = ...,
186
- parameters: Iterable | Mapping[str, Any] | None = ...,
187
- handler: None = ...,
188
- split_statements: bool = ...,
189
- return_last: bool = ...,
190
- ) -> None: ...
191
- @overload
192
- def run(
193
- self,
194
- sql: str | Iterable[str],
195
- autocommit: bool = ...,
196
- parameters: Iterable | Mapping[str, Any] | None = ...,
197
- handler: Callable[[Any], T] = ...,
198
- split_statements: bool = ...,
199
- return_last: bool = ...,
200
- ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None: ...
201
124
  def set_autocommit(self, conn, autocommit) -> None: ...
202
125
  def get_autocommit(self, conn) -> bool: ...
203
126
  def get_cursor(self) -> Any: ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.27.0rc1
3
+ Version: 1.27.1
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,7 +20,7 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
23
+ Requires-Dist: apache-airflow>=2.10.0
24
24
  Requires-Dist: sqlparse>=0.5.1
25
25
  Requires-Dist: more-itertools>=9.0.0
26
26
  Requires-Dist: methodtools>=0.4.7
@@ -28,8 +28,8 @@ Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
28
28
  Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas"
29
29
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
30
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.0/changelog.html
32
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.0
31
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.1/changelog.html
32
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.1
33
33
  Project-URL: Mastodon, https://fosstodon.org/@airflow
34
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
35
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -63,7 +63,7 @@ Provides-Extra: polars
63
63
 
64
64
  Package ``apache-airflow-providers-common-sql``
65
65
 
66
- Release: ``1.27.0``
66
+ Release: ``1.27.1``
67
67
 
68
68
 
69
69
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -76,7 +76,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
76
76
  are in ``airflow.providers.common.sql`` python package.
77
77
 
78
78
  You can find package information and changelog for the provider
79
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.0/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.1/>`_.
80
80
 
81
81
  Installation
82
82
  ------------
@@ -119,5 +119,5 @@ Dependent package
119
119
  ============================================================================================================== ===============
120
120
 
121
121
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.0/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.1/changelog.html>`_.
123
123
 
@@ -1,6 +1,6 @@
1
1
  airflow/providers/common/sql/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
2
  airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
- airflow/providers/common/sql/__init__.py,sha256=1fjlO-pK_nT17oF_-bhTA0xjWEMvmFYDYrXVDAfJy4M,1500
3
+ airflow/providers/common/sql/__init__.py,sha256=CWHhzs8kjka2JJp7ySeiSHm0RmSweUpj1YWWb4UAHLc,1500
4
4
  airflow/providers/common/sql/get_provider_info.py,sha256=xCPXLKFA_1ilhGa0aB3E9ggdHtn9Do7Eb469begpZag,2767
5
5
  airflow/providers/common/sql/get_provider_info.pyi,sha256=0mydJPGQScnPpoa9-ohHVJFngFH6Lsk22KS243PE-gw,1596
6
6
  airflow/providers/common/sql/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
@@ -12,8 +12,8 @@ airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapih
12
12
  airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
13
  airflow/providers/common/sql/hooks/handlers.py,sha256=XjvycIQsGpDrtg6RFACczybW_dER97RR6Z6B_S6jf6Y,3399
14
14
  airflow/providers/common/sql/hooks/handlers.pyi,sha256=3UDOBxvFi5dLzRlF2yCwlj8LuYgDFSKNLmCHhF_Qfik,1827
15
- airflow/providers/common/sql/hooks/sql.py,sha256=1zgwVrP93mtwRGa4SmgMcj1Vhq82YSGfEkYT-2zD9ZA,42358
16
- airflow/providers/common/sql/hooks/sql.pyi,sha256=cQWV2jQ6IEnOp3Ajbg220AbXu2gn5aPQa5F77Wilg30,8565
15
+ airflow/providers/common/sql/hooks/sql.py,sha256=WmMut3VxA17vxtUtU_XXBq7MNa-HDLI3ln0B_5VlMjY,43565
16
+ airflow/providers/common/sql/hooks/sql.pyi,sha256=YDNAAh2BMb34Nsx0nRRQEDZw_dcuNM2kb3gzwbxpyn0,6148
17
17
  airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
18
  airflow/providers/common/sql/operators/generic_transfer.py,sha256=EEKKMcOOLl0vyjXRHBI6UuoFLqpHpeHEj9zwMTrNAA4,8336
19
19
  airflow/providers/common/sql/operators/generic_transfer.pyi,sha256=rhuCB7KSm_NutW8m3BNQmaoiUPDXp1fTrSeoR0Jr4dU,3330
@@ -24,7 +24,7 @@ airflow/providers/common/sql/sensors/sql.pyi,sha256=GiOk2qD0PO5HWISgTTdOJQLC9b2I
24
24
  airflow/providers/common/sql/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
25
  airflow/providers/common/sql/triggers/sql.py,sha256=C6BEhJdypE_hrvrWU_jxJuOi5FbpQG4xJ0EYPn_fqR0,3665
26
26
  airflow/providers/common/sql/triggers/sql.pyi,sha256=7wVgfqUPJB7egsWwbZtwZV3TFm7DuKLclWetNInCM5w,1986
27
- apache_airflow_providers_common_sql-1.27.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
28
- apache_airflow_providers_common_sql-1.27.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
29
- apache_airflow_providers_common_sql-1.27.0rc1.dist-info/METADATA,sha256=jrfkGHOgkIXYbAiIF41X4s9qmxzIg-qJzzimugZDa9s,5352
30
- apache_airflow_providers_common_sql-1.27.0rc1.dist-info/RECORD,,
27
+ apache_airflow_providers_common_sql-1.27.1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
28
+ apache_airflow_providers_common_sql-1.27.1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
29
+ apache_airflow_providers_common_sql-1.27.1.dist-info/METADATA,sha256=osasvsv-TZitBN_YktjMJfOchDlDnTVUkX1g5HFoTQ0,5346
30
+ apache_airflow_providers_common_sql-1.27.1.dist-info/RECORD,,