apache-airflow-providers-common-sql 1.27.2rc1__py3-none-any.whl → 1.27.3rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.27.2"
32
+ __version__ = "1.27.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -17,8 +17,8 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import re
20
- from collections.abc import Iterable, Mapping
21
- from typing import TYPE_CHECKING, Any, Callable, TypeVar
20
+ from collections.abc import Callable, Iterable, Mapping
21
+ from typing import TYPE_CHECKING, Any, TypeVar
22
22
 
23
23
  from methodtools import lru_cache
24
24
 
@@ -32,8 +32,8 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow.
32
32
  isort:skip_file
33
33
  """
34
34
 
35
- from collections.abc import Iterable, Mapping
36
- from typing import Any, Callable, TypeVar
35
+ from collections.abc import Callable, Iterable, Mapping
36
+ from typing import Any, TypeVar
37
37
 
38
38
  from _typeshed import Incomplete as Incomplete
39
39
  from sqlalchemy.engine import Inspector as Inspector
@@ -18,19 +18,11 @@ from __future__ import annotations
18
18
 
19
19
  import contextlib
20
20
  import warnings
21
- from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence
21
+ from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
22
22
  from contextlib import closing, contextmanager, suppress
23
23
  from datetime import datetime
24
24
  from functools import cached_property
25
- from typing import (
26
- TYPE_CHECKING,
27
- Any,
28
- Callable,
29
- Protocol,
30
- TypeVar,
31
- cast,
32
- overload,
33
- )
25
+ from typing import TYPE_CHECKING, Any, Literal, Protocol, TypeVar, cast, overload
34
26
  from urllib.parse import urlparse
35
27
 
36
28
  import sqlparse
@@ -40,7 +32,6 @@ from more_itertools import chunked
40
32
  from sqlalchemy import create_engine, inspect
41
33
  from sqlalchemy.engine import make_url
42
34
  from sqlalchemy.exc import ArgumentError, NoSuchModuleError
43
- from typing_extensions import Literal
44
35
 
45
36
  from airflow.configuration import conf
46
37
  from airflow.exceptions import (
@@ -48,9 +39,13 @@ from airflow.exceptions import (
48
39
  AirflowOptionalProviderFeatureException,
49
40
  AirflowProviderDeprecationWarning,
50
41
  )
51
- from airflow.hooks.base import BaseHook
52
42
  from airflow.providers.common.sql.dialects.dialect import Dialect
53
43
  from airflow.providers.common.sql.hooks import handlers
44
+
45
+ try:
46
+ from airflow.sdk import BaseHook
47
+ except ImportError:
48
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
54
49
  from airflow.utils.module_loading import import_string
55
50
 
56
51
  if TYPE_CHECKING:
@@ -58,10 +53,14 @@ if TYPE_CHECKING:
58
53
  from polars import DataFrame as PolarsDataFrame
59
54
  from sqlalchemy.engine import URL, Engine, Inspector
60
55
 
61
- from airflow.models import Connection
62
56
  from airflow.providers.openlineage.extractors import OperatorLineage
63
57
  from airflow.providers.openlineage.sqlparser import DatabaseInfo
64
58
 
59
+ try:
60
+ from airflow.sdk import Connection
61
+ except ImportError:
62
+ from airflow.models.connection import Connection # type: ignore[assignment]
63
+
65
64
 
66
65
  T = TypeVar("T")
67
66
  SQL_PLACEHOLDERS = frozenset({"%s", "?"})
@@ -278,7 +277,10 @@ class DbApiHook(BaseHook):
278
277
  db = self.connection
279
278
  if self.connector is None:
280
279
  raise RuntimeError(f"{type(self).__name__} didn't have `self.connector` set!")
281
- return self.connector.connect(host=db.host, port=db.port, username=db.login, schema=db.schema)
280
+ host = db.host or ""
281
+ login = db.login or ""
282
+ schema = db.schema or ""
283
+ return self.connector.connect(host=host, port=cast("int", db.port), username=login, schema=schema)
282
284
 
283
285
  def get_uri(self) -> str:
284
286
  """
@@ -425,7 +427,6 @@ class DbApiHook(BaseHook):
425
427
  df_type: Literal["pandas"] = "pandas",
426
428
  **kwargs: Any,
427
429
  ) -> PandasDataFrame: ...
428
-
429
430
  @overload
430
431
  def get_df(
431
432
  self,
@@ -522,7 +523,6 @@ class DbApiHook(BaseHook):
522
523
  df_type: Literal["pandas"] = "pandas",
523
524
  **kwargs,
524
525
  ) -> Generator[PandasDataFrame, None, None]: ...
525
-
526
526
  @overload
527
527
  def get_df_by_chunks(
528
528
  self,
@@ -32,12 +32,13 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow.
32
32
  isort:skip_file
33
33
  """
34
34
 
35
- from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence
35
+ from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
36
36
  from functools import cached_property as cached_property
37
- from typing import Any, Protocol, TypeVar
37
+ from typing import Any, Literal, Protocol, TypeVar, overload
38
38
 
39
39
  from _typeshed import Incomplete as Incomplete
40
40
  from pandas import DataFrame as PandasDataFrame
41
+ from polars import DataFrame as PolarsDataFrame
41
42
  from sqlalchemy.engine import URL as URL, Engine as Engine, Inspector as Inspector
42
43
 
43
44
  from airflow.hooks.base import BaseHook as BaseHook
@@ -115,6 +116,44 @@ class DbApiHook(BaseHook):
115
116
  def get_first(
116
117
  self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None
117
118
  ) -> Any: ...
119
+ @overload
120
+ def get_df(
121
+ self,
122
+ sql: str | list[str],
123
+ parameters: list | tuple | Mapping[str, Any] | None = None,
124
+ *,
125
+ df_type: Literal["pandas"] = "pandas",
126
+ **kwargs: Any,
127
+ ) -> PandasDataFrame: ...
128
+ @overload
129
+ def get_df(
130
+ self,
131
+ sql: str | list[str],
132
+ parameters: list | tuple | Mapping[str, Any] | None = None,
133
+ *,
134
+ df_type: Literal["polars"],
135
+ **kwargs: Any,
136
+ ) -> PolarsDataFrame: ...
137
+ @overload
138
+ def get_df_by_chunks(
139
+ self,
140
+ sql: str | list[str],
141
+ parameters: list | tuple | Mapping[str, Any] | None = None,
142
+ *,
143
+ chunksize: int,
144
+ df_type: Literal["pandas"] = "pandas",
145
+ **kwargs,
146
+ ) -> Generator[PandasDataFrame, None, None]: ...
147
+ @overload
148
+ def get_df_by_chunks(
149
+ self,
150
+ sql: str | list[str],
151
+ parameters: list | tuple | Mapping[str, Any] | None = None,
152
+ *,
153
+ chunksize: int,
154
+ df_type: Literal["polars"],
155
+ **kwargs,
156
+ ) -> Generator[PolarsDataFrame, None, None]: ...
118
157
  @staticmethod
119
158
  def strip_sql_string(sql: str) -> str: ...
120
159
  @staticmethod
@@ -146,3 +185,23 @@ class DbApiHook(BaseHook):
146
185
  @staticmethod
147
186
  def get_openlineage_authority_part(connection, default_port: int | None = None) -> str: ...
148
187
  def get_db_log_messages(self, conn) -> None: ...
188
+ @overload
189
+ def run(
190
+ self,
191
+ sql: str | Iterable[str],
192
+ autocommit: bool = ...,
193
+ parameters: Iterable | Mapping[str, Any] | None = ...,
194
+ handler: None = ...,
195
+ split_statements: bool = ...,
196
+ return_last: bool = ...,
197
+ ) -> None: ...
198
+ @overload
199
+ def run(
200
+ self,
201
+ sql: str | Iterable[str],
202
+ autocommit: bool = ...,
203
+ parameters: Iterable | Mapping[str, Any] | None = ...,
204
+ handler: Callable[[Any], T] = ...,
205
+ split_statements: bool = ...,
206
+ return_last: bool = ...,
207
+ ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None: ...
@@ -22,10 +22,14 @@ from functools import cached_property
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
- from airflow.hooks.base import BaseHook
26
- from airflow.models import BaseOperator
27
25
  from airflow.providers.common.sql.hooks.sql import DbApiHook
28
26
  from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
27
+ from airflow.providers.common.sql.version_compat import BaseOperator
28
+
29
+ try:
30
+ from airflow.sdk import BaseHook
31
+ except ImportError:
32
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
29
33
 
30
34
  if TYPE_CHECKING:
31
35
  import jinja2
@@ -192,7 +196,7 @@ class GenericTransfer(BaseOperator):
192
196
  )
193
197
 
194
198
  self.log.info("Offset increased to %d", offset)
195
- self.xcom_push(context=context, key="offset", value=offset)
199
+ context["ti"].xcom_push(key="offset", value=offset)
196
200
 
197
201
  self.log.info("Inserting %d rows into %s", len(results), self.destination_conn_id)
198
202
  self.destination_hook.insert_rows(
@@ -19,17 +19,22 @@ from __future__ import annotations
19
19
 
20
20
  import ast
21
21
  import re
22
- from collections.abc import Iterable, Mapping, Sequence
22
+ from collections.abc import Callable, Iterable, Mapping, Sequence
23
23
  from functools import cached_property
24
- from typing import TYPE_CHECKING, Any, Callable, ClassVar, NoReturn, SupportsAbs
24
+ from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
25
25
 
26
26
  from airflow.exceptions import AirflowException, AirflowFailException
27
- from airflow.hooks.base import BaseHook
28
- from airflow.models import BaseOperator, SkipMixin
27
+ from airflow.models import SkipMixin
29
28
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
30
29
  from airflow.providers.common.sql.hooks.sql import DbApiHook
30
+ from airflow.providers.common.sql.version_compat import BaseOperator
31
31
  from airflow.utils.helpers import merge_dicts
32
32
 
33
+ try:
34
+ from airflow.sdk import BaseHook
35
+ except ImportError:
36
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
37
+
33
38
  if TYPE_CHECKING:
34
39
  from airflow.providers.openlineage.extractors import OperatorLineage
35
40
  from airflow.utils.context import Context
@@ -16,14 +16,24 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
- from collections.abc import Mapping, Sequence
19
+ from collections.abc import Callable, Mapping, Sequence
20
20
  from operator import itemgetter
21
- from typing import TYPE_CHECKING, Any, Callable
21
+ from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.exceptions import AirflowException
24
- from airflow.hooks.base import BaseHook
25
24
  from airflow.providers.common.sql.hooks.sql import DbApiHook
26
- from airflow.sensors.base import BaseSensorOperator
25
+
26
+ try:
27
+ from airflow.sdk import BaseHook
28
+ except ImportError:
29
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
30
+
31
+ from airflow.providers.common.sql.version_compat import AIRFLOW_V_3_0_PLUS
32
+
33
+ if AIRFLOW_V_3_0_PLUS:
34
+ from airflow.sdk import BaseSensorOperator
35
+ else:
36
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
27
37
 
28
38
  if TYPE_CHECKING:
29
39
  from airflow.utils.context import Context
@@ -32,12 +32,18 @@ Definition of the public interface for airflow.providers.common.sql.src.airflow.
32
32
  isort:skip_file
33
33
  """
34
34
 
35
- from collections.abc import Mapping, Sequence
36
- from typing import Any, Callable
35
+ from collections.abc import Callable, Mapping, Sequence
36
+ from typing import Any
37
37
 
38
38
  from _typeshed import Incomplete as Incomplete
39
39
 
40
- from airflow.sensors.base import BaseSensorOperator as BaseSensorOperator
40
+ from airflow.providers.common.sql.version_compat import AIRFLOW_V_3_0_PLUS
41
+
42
+ if AIRFLOW_V_3_0_PLUS:
43
+ from airflow.sdk import BaseSensorOperator
44
+ else:
45
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
46
+
41
47
  from airflow.utils.context import Context as Context
42
48
 
43
49
  class SqlSensor(BaseSensorOperator):
@@ -20,8 +20,12 @@ from __future__ import annotations
20
20
  from typing import TYPE_CHECKING
21
21
 
22
22
  from airflow.exceptions import AirflowException
23
- from airflow.hooks.base import BaseHook
24
23
  from airflow.providers.common.sql.hooks.sql import DbApiHook
24
+
25
+ try:
26
+ from airflow.sdk import BaseHook
27
+ except ImportError:
28
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
25
29
  from airflow.triggers.base import BaseTrigger, TriggerEvent
26
30
 
27
31
  if TYPE_CHECKING:
@@ -0,0 +1,47 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
19
+ # DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
20
+ # ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
21
+ # THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
22
+ #
23
+ from __future__ import annotations
24
+
25
+
26
+ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
+ from packaging.version import Version
28
+
29
+ from airflow import __version__
30
+
31
+ airflow_version = Version(__version__)
32
+ return airflow_version.major, airflow_version.minor, airflow_version.micro
33
+
34
+
35
+ AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+
37
+ if AIRFLOW_V_3_0_PLUS:
38
+ from airflow.sdk import BaseOperator, BaseSensorOperator
39
+ else:
40
+ from airflow.models import BaseOperator
41
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
42
+
43
+ __all__ = [
44
+ "AIRFLOW_V_3_0_PLUS",
45
+ "BaseOperator",
46
+ "BaseSensorOperator",
47
+ ]
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.27.2rc1
3
+ Version: 1.27.3rc1
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,7 +15,6 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
@@ -25,11 +24,12 @@ Requires-Dist: sqlparse>=0.5.1
25
24
  Requires-Dist: more-itertools>=9.0.0
26
25
  Requires-Dist: methodtools>=0.4.7
27
26
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
28
- Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas"
27
+ Requires-Dist: pandas>=2.1.2 ; extra == "pandas" and ( python_version <"3.13")
28
+ Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
29
29
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
30
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.27.2/changelog.html
32
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.27.2
31
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.27.3/changelog.html
32
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.27.3
33
33
  Project-URL: Mastodon, https://fosstodon.org/@airflow
34
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
35
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -63,7 +63,7 @@ Provides-Extra: polars
63
63
 
64
64
  Package ``apache-airflow-providers-common-sql``
65
65
 
66
- Release: ``1.27.2``
66
+ Release: ``1.27.3``
67
67
 
68
68
 
69
69
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -76,7 +76,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
76
76
  are in ``airflow.providers.common.sql`` python package.
77
77
 
78
78
  You can find package information and changelog for the provider
79
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.2/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.3/>`_.
80
80
 
81
81
  Installation
82
82
  ------------
@@ -85,7 +85,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
85
85
  for the minimum Airflow version supported) via
86
86
  ``pip install apache-airflow-providers-common-sql``
87
87
 
88
- The package supports the following python versions: 3.9,3.10,3.11,3.12
88
+ The package supports the following python versions: 3.10,3.11,3.12
89
89
 
90
90
  Requirements
91
91
  ------------
@@ -119,5 +119,5 @@ Dependent package
119
119
  ============================================================================================================== ===============
120
120
 
121
121
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.2/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.3/changelog.html>`_.
123
123
 
@@ -1,30 +1,31 @@
1
1
  airflow/providers/common/sql/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
2
  airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
- airflow/providers/common/sql/__init__.py,sha256=aME9qIOUQngZQL_-kcyOM69k00iBZsWm4sT95pTPvT8,1500
3
+ airflow/providers/common/sql/__init__.py,sha256=Iewiwy1VXOrC79hdtaKca4rPKbi696TGkf8F5WXKImo,1500
4
4
  airflow/providers/common/sql/get_provider_info.py,sha256=xCPXLKFA_1ilhGa0aB3E9ggdHtn9Do7Eb469begpZag,2767
5
5
  airflow/providers/common/sql/get_provider_info.pyi,sha256=0mydJPGQScnPpoa9-ohHVJFngFH6Lsk22KS243PE-gw,1596
6
+ airflow/providers/common/sql/version_compat.py,sha256=krcf7mICqAlaHCrhnYiWmbOvleDrBD2KBCWn-rdEbxw,1840
6
7
  airflow/providers/common/sql/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/common/sql/dialects/dialect.py,sha256=rV4frwvKeU25AydRFA0iJEzSDAjHBAQQl4a7OLpE2Tg,7698
8
- airflow/providers/common/sql/dialects/dialect.pyi,sha256=AWVQ0Q4Sn0aDq_MtQinFopbokQR-GIyXsg0oeK6NBxA,3513
8
+ airflow/providers/common/sql/dialects/dialect.py,sha256=lRzM0EU23tz99j5jmurYNfgcsqGFhPswpQIxMzkhkyc,7698
9
+ airflow/providers/common/sql/dialects/dialect.pyi,sha256=-4Rw4NVUrpsXvVzu745KQlLAqpbGZCWcbiSjGpRYnQc,3513
9
10
  airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
10
11
  airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
11
12
  airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md,sha256=DscUH0P3sgOpfXKPvtWpOkRXt8BI60FUxYnuwWpyLqM,2792
12
13
  airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
14
  airflow/providers/common/sql/hooks/handlers.py,sha256=XjvycIQsGpDrtg6RFACczybW_dER97RR6Z6B_S6jf6Y,3399
14
15
  airflow/providers/common/sql/hooks/handlers.pyi,sha256=3UDOBxvFi5dLzRlF2yCwlj8LuYgDFSKNLmCHhF_Qfik,1827
15
- airflow/providers/common/sql/hooks/sql.py,sha256=WmMut3VxA17vxtUtU_XXBq7MNa-HDLI3ln0B_5VlMjY,43565
16
- airflow/providers/common/sql/hooks/sql.pyi,sha256=YDNAAh2BMb34Nsx0nRRQEDZw_dcuNM2kb3gzwbxpyn0,6148
16
+ airflow/providers/common/sql/hooks/sql.py,sha256=ShdAeM04wZoHUoCCMLMdbU_njJ1KM9jaf9eVdLZSYzw,43824
17
+ airflow/providers/common/sql/hooks/sql.pyi,sha256=AQg0zRXrOGqlbP39yxefIPdNSpEOVJdnezVZL4miSNU,7961
17
18
  airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/common/sql/operators/generic_transfer.py,sha256=qwG-iKAVTPaxevfSMfe_T1_lXf2TgpDf918z0RXCF2o,8331
19
+ airflow/providers/common/sql/operators/generic_transfer.py,sha256=GrINfknN6La9p-vROdy76ywRaDAxOVyiL6f8IseEDvM,8458
19
20
  airflow/providers/common/sql/operators/generic_transfer.pyi,sha256=rhuCB7KSm_NutW8m3BNQmaoiUPDXp1fTrSeoR0Jr4dU,3330
20
- airflow/providers/common/sql/operators/sql.py,sha256=DZNljvIwDgSwUpmCWZoe1O8xulY8BURCzvP78t9q--I,50157
21
+ airflow/providers/common/sql/operators/sql.py,sha256=oI_8AJALUPwF4QKAykprGJbrugoyOZqtZH2BTuUVkPE,50318
21
22
  airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
22
- airflow/providers/common/sql/sensors/sql.py,sha256=iYcE8vxLbya8GncOydjceAhu43uoGsN7wqjeYFQ-cak,5471
23
- airflow/providers/common/sql/sensors/sql.pyi,sha256=GiOk2qD0PO5HWISgTTdOJQLC9b2ItzvQr68adXIbjGQ,2530
23
+ airflow/providers/common/sql/sensors/sql.py,sha256=Ojc-j0k0j9a8wT7rLrRBWYeUqlJkORDUwi5IgXrnDxw,5760
24
+ airflow/providers/common/sql/sensors/sql.pyi,sha256=gJ_WPbqsbFBZWKnZcRmaPfRYgpUb9_7_65PPc9VIymM,2691
24
25
  airflow/providers/common/sql/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
25
- airflow/providers/common/sql/triggers/sql.py,sha256=C6BEhJdypE_hrvrWU_jxJuOi5FbpQG4xJ0EYPn_fqR0,3665
26
+ airflow/providers/common/sql/triggers/sql.py,sha256=lrBivVd6g6AqqxotOU7sjns8tmCAH0WX2bkjKMFz8Jg,3771
26
27
  airflow/providers/common/sql/triggers/sql.pyi,sha256=7wVgfqUPJB7egsWwbZtwZV3TFm7DuKLclWetNInCM5w,1986
27
- apache_airflow_providers_common_sql-1.27.2rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
28
- apache_airflow_providers_common_sql-1.27.2rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
29
- apache_airflow_providers_common_sql-1.27.2rc1.dist-info/METADATA,sha256=yKOVHW5chsOYvbCcmlwaOy4oiOAmB050A3taCVu9-Js,5366
30
- apache_airflow_providers_common_sql-1.27.2rc1.dist-info/RECORD,,
28
+ apache_airflow_providers_common_sql-1.27.3rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
29
+ apache_airflow_providers_common_sql-1.27.3rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
30
+ apache_airflow_providers_common_sql-1.27.3rc1.dist-info/METADATA,sha256=2PEIAnnBYjVFyOHQdyNN3pQ_xkmFY-1XuEOz0dGS5A8,5418
31
+ apache_airflow_providers_common_sql-1.27.3rc1.dist-info/RECORD,,