apache-airflow-providers-common-sql 1.27.5__py3-none-any.whl → 1.28.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,7 +30,7 @@ The approach we take is similar to one that has been applied by Android OS team
30
30
  and it is based on storing the current version of API and flagging changes that are potentially breaking.
31
31
  This is done by comparing the previous API (store in stub files) and the upcoming API from the PR.
32
32
  The upcoming API is automatically extracted from `common.sql` Python files using `update-common-sql-api-stubs`
33
- pre-commit using mypy `stubgen` and stored as `.pyi` files in the `airflow.providers.common.sql` package.
33
+ prek hook using mypy `stubgen` and stored as `.pyi` files in the `airflow.providers.common.sql` package.
34
34
  We also post-process the `.pyi` files to add some historically exposed methods that should be also
35
35
  considered as public API.
36
36
 
@@ -40,22 +40,22 @@ to review the changes and manually regenerate the stub files.
40
40
  The details of the workflow are as follows:
41
41
 
42
42
  1) The previous API is stored in the (committed to repository) stub files.
43
- 2) Every time when common.sql Python files are modified the `update-common-sql-api-stubs` pre-commit
43
+ 2) Whenever common.sql Python files are modified the `update-common-sql-api-stubs` prek hook
44
44
  regenerates the stubs (including post-processing it) and looks for potentially breaking changes
45
45
  (removals or updates of the existing classes/methods).
46
- 3) If the check reveals there are no changes to the API, nothing happens, pre-commit succeeds.
47
- 4) If there are only additions, the pre-commit automatically updates the stub files,
48
- asks the contributor to commit resulting updates and fails the pre-commit. This is very similar to
46
+ 3) If the check reveals there are no changes to the API, nothing happens, prek hook succeeds.
47
+ 4) If there are only additions, the prek hook automatically updates the stub files,
48
+ asks the contributor to commit resulting updates and fails the prek hook. This is very similar to
49
49
  other static checks that automatically modify/fix source code.
50
- 5) If the pre-commit detects potentially breaking changes, the process is a bit more involved for the
51
- contributor. The pre-commit flags such changes to the contributor by failing the pre-commit and
50
+ 5) If the prek hook detects potentially breaking changes, the process is a bit more involved for the
51
+ contributor. The prek hook flags such changes to the contributor by failing the prek hook and
52
52
  asks the contributor to review the change looking specifically for breaking compatibility with previous
53
53
  providers (and fix any backwards compatibility). Once this is completed, the contributor is asked to
54
- manually and explicitly regenerate and commit the new version of the stubs by running the pre-commit
54
+ manually and explicitly regenerate and commit the new version of the stubs by running the prek hook
55
55
  with manually added environment variable:
56
56
 
57
57
  ```shell
58
- UPDATE_COMMON_SQL_API=1 pre-commit run update-common-sql-api-stubs
58
+ UPDATE_COMMON_SQL_API=1 prek update-common-sql-api-stubs
59
59
  ```
60
60
 
61
61
  # Verifying other providers to use only public API of the `common.sql` provider
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.27.5"
32
+ __version__ = "1.28.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -28,8 +28,8 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.dialects.dialect
32
- isort:skip_file
31
+ Definition of the public interface for
32
+ airflow.providers.common.sql.src.airflow.providers.common.sql.dialects.dialect.
33
33
  """
34
34
 
35
35
  from collections.abc import Callable, Iterable, Mapping
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -28,8 +28,8 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.get_provider_info
32
- isort:skip_file
31
+ Definition of the public interface for
32
+ airflow.providers.common.sql.src.airflow.providers.common.sql.get_provider_info.
33
33
  """
34
34
 
35
35
  def get_provider_info() -> None: ...
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -28,8 +28,8 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.handlers
32
- isort:skip_file
31
+ Definition of the public interface for
32
+ airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.handlers.
33
33
  """
34
34
 
35
35
  from collections.abc import Iterable
@@ -332,7 +332,7 @@ class DbApiHook(BaseHook):
332
332
  def dialect_name(self) -> str:
333
333
  try:
334
334
  return make_url(self.get_uri()).get_dialect().name
335
- except (ArgumentError, NoSuchModuleError):
335
+ except (ArgumentError, NoSuchModuleError, ValueError):
336
336
  config = self.connection_extra
337
337
  sqlalchemy_scheme = config.get("sqlalchemy_scheme")
338
338
  if sqlalchemy_scheme:
@@ -810,6 +810,9 @@ class DbApiHook(BaseHook):
810
810
  self.log.info("Running statement: %s, parameters: %s", sql_statement, parameters)
811
811
 
812
812
  if parameters:
813
+ # If we're using psycopg3, we might need to handle parameters differently
814
+ if hasattr(cur, "__module__") and "psycopg" in cur.__module__ and isinstance(parameters, list):
815
+ parameters = tuple(parameters)
813
816
  cur.execute(sql_statement, parameters)
814
817
  else:
815
818
  cur.execute(sql_statement)
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -28,8 +28,8 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.sql
32
- isort:skip_file
31
+ Definition of the public interface for
32
+ airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.sql.
33
33
  """
34
34
 
35
35
  from collections.abc import Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
@@ -56,6 +56,7 @@ class GenericTransfer(BaseOperator):
56
56
  executed prior to loading the data. (templated)
57
57
  :param insert_args: extra params for `insert_rows` method.
58
58
  :param page_size: number of records to be read in paginated mode (optional).
59
+ :param paginated_sql_statement_clause: SQL statement clause to be used for pagination (optional).
59
60
  """
60
61
 
61
62
  template_fields: Sequence[str] = (
@@ -65,6 +66,8 @@ class GenericTransfer(BaseOperator):
65
66
  "destination_table",
66
67
  "preoperator",
67
68
  "insert_args",
69
+ "page_size",
70
+ "paginated_sql_statement_clause",
68
71
  )
69
72
  template_ext: Sequence[str] = (
70
73
  ".sql",
@@ -85,6 +88,7 @@ class GenericTransfer(BaseOperator):
85
88
  preoperator: str | list[str] | None = None,
86
89
  insert_args: dict | None = None,
87
90
  page_size: int | None = None,
91
+ paginated_sql_statement_clause: str | None = None,
88
92
  **kwargs,
89
93
  ) -> None:
90
94
  super().__init__(**kwargs)
@@ -97,9 +101,7 @@ class GenericTransfer(BaseOperator):
97
101
  self.preoperator = preoperator
98
102
  self.insert_args = insert_args or {}
99
103
  self.page_size = page_size
100
- self._paginated_sql_statement_format = kwargs.get(
101
- "paginated_sql_statement_format", "{} LIMIT {} OFFSET {}"
102
- )
104
+ self.paginated_sql_statement_clause = paginated_sql_statement_clause or "{} LIMIT {} OFFSET {}"
103
105
 
104
106
  @classmethod
105
107
  def get_hook(cls, conn_id: str, hook_params: dict | None = None) -> DbApiHook:
@@ -126,7 +128,7 @@ class GenericTransfer(BaseOperator):
126
128
 
127
129
  def get_paginated_sql(self, offset: int) -> str:
128
130
  """Format the paginated SQL statement using the current format."""
129
- return self._paginated_sql_statement_format.format(self.sql, self.page_size, offset)
131
+ return self.paginated_sql_statement_clause.format(self.sql, self.page_size, offset)
130
132
 
131
133
  def render_template_fields(
132
134
  self,
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -27,10 +27,7 @@
27
27
  #
28
28
  # You can read more in the README_API.md file
29
29
  #
30
- """
31
- Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer
32
- isort:skip_file
33
- """
30
+ """Definition of the public interface for airflow.providers.common.sql.operators.generic_transfer."""
34
31
 
35
32
  from collections.abc import Sequence
36
33
  from functools import cached_property as cached_property
@@ -23,7 +23,8 @@ from collections.abc import Callable, Iterable, Mapping, Sequence
23
23
  from functools import cached_property
24
24
  from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
25
25
 
26
- from airflow.exceptions import AirflowException, AirflowFailException
26
+ from airflow import XComArg
27
+ from airflow.exceptions import AirflowException, AirflowFailException, AirflowSkipException
27
28
  from airflow.models import SkipMixin
28
29
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
29
30
  from airflow.providers.common.sql.hooks.sql import DbApiHook
@@ -31,6 +32,8 @@ from airflow.providers.common.sql.version_compat import BaseHook, BaseOperator
31
32
  from airflow.utils.helpers import merge_dicts
32
33
 
33
34
  if TYPE_CHECKING:
35
+ import jinja2
36
+
34
37
  from airflow.providers.openlineage.extractors import OperatorLineage
35
38
  from airflow.utils.context import Context
36
39
 
@@ -1252,6 +1255,135 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
1252
1255
  self.skip_all_except(context["ti"], follow_branch)
1253
1256
 
1254
1257
 
1258
+ class SQLInsertRowsOperator(BaseSQLOperator):
1259
+ """
1260
+ Insert rows (e.g. a collection of tuples) into a database table directly from an XCom or Python data structure.
1261
+
1262
+ :param table: the name of the table in which the rows will be inserted (templated).
1263
+ :param conn_id: the connection ID used to connect to the database
1264
+ :param schema: (optional) the name of schema in which the table is defined
1265
+ :param database: name of database (e.g. schema) which overwrite the defined one in connection
1266
+ :param columns: (optional) specify a list of columns being used for the insert when passing a list of
1267
+ dictionaries.
1268
+ :param ignore_columns: (optional) specify a list of columns being ignored for the insert. If no columns
1269
+ where specified, the columns will be resolved dynamically from the metadata.
1270
+ :param rows: the rows to insert into the table. Rows can be a list of tuples or a list of dictionaries.
1271
+ When a list of dictionaries is provided, the column names are inferred from the dictionary keys and
1272
+ will be matched with the column names, ignored columns will be filtered out.
1273
+ :rows_processor: (optional) a function that will be applied to the rows before inserting them into the table.
1274
+ :param preoperator: sql statement or list of statements to be executed prior to loading the data. (templated)
1275
+ :param postoperator: sql statement or list of statements to be executed after loading the data. (templated)
1276
+ :param insert_args: (optional) dictionary of additional arguments passed to the underlying hook's
1277
+ `insert_rows` method. This allows you to configure options such as `replace`, `executemany`,
1278
+ `fast_executemany`, and `autocommit`.
1279
+
1280
+ .. seealso::
1281
+ For more information on how to use this operator, take a look at the guide:
1282
+ :ref:`howto/operator:SQLInsertRowsOperator`
1283
+ """
1284
+
1285
+ template_fields: Sequence[str] = (
1286
+ "table_name",
1287
+ "conn_id",
1288
+ "schema",
1289
+ "database",
1290
+ "_columns",
1291
+ "ignored_columns",
1292
+ "preoperator",
1293
+ "postoperator",
1294
+ "insert_args",
1295
+ )
1296
+ template_ext: Sequence[str] = (".sql",)
1297
+ template_fields_renderers = {"preoperator": "sql"}
1298
+
1299
+ def __init__(
1300
+ self,
1301
+ *,
1302
+ table_name: str,
1303
+ conn_id: str | None = None,
1304
+ schema: str | None = None,
1305
+ database: str | None = None,
1306
+ columns: Iterable[str] | None = None,
1307
+ ignored_columns: Iterable[str] | None = None,
1308
+ rows: list[Any] | XComArg | None = None,
1309
+ rows_processor: Callable[[Any, Context], Any] = lambda rows, **context: rows,
1310
+ preoperator: str | list[str] | None = None,
1311
+ postoperator: str | list[str] | None = None,
1312
+ hook_params: dict | None = None,
1313
+ insert_args: dict | None = None,
1314
+ **kwargs,
1315
+ ):
1316
+ super().__init__(
1317
+ conn_id=conn_id,
1318
+ database=database,
1319
+ hook_params=hook_params,
1320
+ **kwargs,
1321
+ )
1322
+ self.table_name = table_name
1323
+ self.schema = schema
1324
+ self._columns: list | None = list(columns) if columns else None
1325
+ self.ignored_columns = set(ignored_columns or {})
1326
+ self.rows = rows or []
1327
+ self._rows_processor = rows_processor
1328
+ self.preoperator = preoperator
1329
+ self.postoperator = postoperator
1330
+ self.insert_args = insert_args or {}
1331
+ self.do_xcom_push = False
1332
+
1333
+ def render_template_fields(
1334
+ self,
1335
+ context: Context,
1336
+ jinja_env: jinja2.Environment | None = None,
1337
+ ) -> None:
1338
+ super().render_template_fields(context=context, jinja_env=jinja_env)
1339
+
1340
+ if isinstance(self.rows, XComArg):
1341
+ self.rows = self.rows.resolve(context=context)
1342
+
1343
+ @property
1344
+ def table_name_with_schema(self) -> str:
1345
+ if self.schema is not None:
1346
+ return f"{self.schema}.{self.table_name}"
1347
+ return self.table_name
1348
+
1349
+ @cached_property
1350
+ def columns(self):
1351
+ if self._columns is None:
1352
+ self._columns = self.get_db_hook().dialect.get_column_names(self.table_name_with_schema)
1353
+ return self._columns
1354
+
1355
+ @property
1356
+ def column_names(self) -> list[str]:
1357
+ if self.ignored_columns:
1358
+ return [column for column in self.columns if column not in self.ignored_columns]
1359
+ return self.columns
1360
+
1361
+ def _process_rows(self, context: Context):
1362
+ return self._rows_processor(context, self.rows) # type: ignore
1363
+
1364
+ def execute(self, context: Context) -> Any:
1365
+ if not self.rows:
1366
+ raise AirflowSkipException(f"Skipping task {self.task_id} because no rows.")
1367
+
1368
+ self.log.debug("Table: %s", self.table_name_with_schema)
1369
+ self.log.debug("Column names: %s", self.column_names)
1370
+ if self.preoperator:
1371
+ self.log.debug("Running preoperator")
1372
+ self.log.debug(self.preoperator)
1373
+ self.get_db_hook().run(self.preoperator)
1374
+ rows = self._process_rows(context=context)
1375
+ self.get_db_hook().insert_rows(
1376
+ table=self.table_name_with_schema,
1377
+ rows=rows,
1378
+ target_fields=self.column_names,
1379
+ **self.insert_args,
1380
+ )
1381
+ if self.postoperator:
1382
+ self.log.debug("Running postoperator")
1383
+ self.log.debug(self.postoperator)
1384
+ self.get_db_hook().run(self.postoperator)
1385
+
1386
+
1255
1387
  def _initialize_partition_clause(clause: str | None) -> str | None:
1256
1388
  """Ensure the partition_clause contains only valid patterns."""
1257
1389
  if clause is None:
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -28,8 +28,8 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.sensors.sql
32
- isort:skip_file
31
+ Definition of the public interface for
32
+ airflow.providers.common.sql.src.airflow.providers.common.sql.sensors.sql.
33
33
  """
34
34
 
35
35
  from collections.abc import Callable, Mapping, Sequence
@@ -17,7 +17,7 @@
17
17
  #
18
18
  # This is automatically generated stub for the `common.sql` provider
19
19
  #
20
- # This file is generated automatically by the `update-common-sql-api stubs` pre-commit
20
+ # This file is generated automatically by the `update-common-sql-api stubs` prek hook
21
21
  # and the .pyi file represents part of the "public" API that the
22
22
  # `common.sql` provider exposes to other providers.
23
23
  #
@@ -27,10 +27,7 @@
27
27
  #
28
28
  # You can read more in the README_API.md file
29
29
  #
30
- """
31
- Definition of the public interface for airflow.providers.common.sql.triggers.sql
32
- isort:skip_file
33
- """
30
+ """Definition of the public interface for airflow.providers.common.sql.triggers.sql."""
34
31
 
35
32
  from collections.abc import AsyncIterator
36
33
  from typing import Any
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.27.5
3
+ Version: 1.28.0rc1
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,7 +20,7 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
24
  Requires-Dist: sqlparse>=0.5.1
25
25
  Requires-Dist: more-itertools>=9.0.0
26
26
  Requires-Dist: methodtools>=0.4.7
@@ -29,8 +29,8 @@ Requires-Dist: pandas[sql-other]>=2.1.2 ; extra == "pandas" and ( python_version
29
29
  Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
30
30
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
31
31
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
32
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.5/changelog.html
33
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.5
32
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html
33
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.28.0
34
34
  Project-URL: Mastodon, https://fosstodon.org/@airflow
35
35
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
36
36
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -64,9 +64,8 @@ Provides-Extra: polars
64
64
 
65
65
  Package ``apache-airflow-providers-common-sql``
66
66
 
67
- Release: ``1.27.5``
67
+ Release: ``1.28.0``
68
68
 
69
- Release Date: ``|PypiReleaseDate|``
70
69
 
71
70
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
72
71
 
@@ -78,12 +77,12 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
78
77
  are in ``airflow.providers.common.sql`` python package.
79
78
 
80
79
  You can find package information and changelog for the provider
81
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.5/>`_.
80
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/>`_.
82
81
 
83
82
  Installation
84
83
  ------------
85
84
 
86
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
85
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
87
86
  for the minimum Airflow version supported) via
88
87
  ``pip install apache-airflow-providers-common-sql``
89
88
 
@@ -121,5 +120,5 @@ Dependent package
121
120
  ============================================================================================================== ===============
122
121
 
123
122
  The changelog for the provider package can be found in the
124
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.27.5/changelog.html>`_.
123
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.28.0/changelog.html>`_.
125
124
 
@@ -1,31 +1,31 @@
1
1
  airflow/providers/common/sql/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
- airflow/providers/common/sql/__init__.py,sha256=GYsP_yzmgFFZm75gJtaQiXoEyNOg8QyA8BIANzQq7p0,1500
2
+ airflow/providers/common/sql/README_API.md,sha256=Yug9-DLqoKkG-qT5XMwkyG_T-r17Iqhiipxt5tMZIUw,5906
3
+ airflow/providers/common/sql/__init__.py,sha256=k0Ea8iIK7zvwZotjgEVDtaYm67qgMXFkqrEmJaPHozA,1500
4
4
  airflow/providers/common/sql/get_provider_info.py,sha256=xCPXLKFA_1ilhGa0aB3E9ggdHtn9Do7Eb469begpZag,2767
5
- airflow/providers/common/sql/get_provider_info.pyi,sha256=0mydJPGQScnPpoa9-ohHVJFngFH6Lsk22KS243PE-gw,1596
5
+ airflow/providers/common/sql/get_provider_info.pyi,sha256=NSIGS74SESn-j0g3xd3BlctUrKlkWaXL605hCs0hjac,1580
6
6
  airflow/providers/common/sql/version_compat.py,sha256=WKfSWhm-ZTmqCuSo6UMn9GiEgzfCMGEso4BR52V4A4c,2105
7
7
  airflow/providers/common/sql/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
8
  airflow/providers/common/sql/dialects/dialect.py,sha256=1pWw6s0NJpfdIiNUO_Tl-jCgKUefbS-mQa01yAkNLT8,7705
9
- airflow/providers/common/sql/dialects/dialect.pyi,sha256=-4Up-ykasYv0NRf1U7WhO1G_HYGn2gDAuq7D-HW3Ulg,3520
9
+ airflow/providers/common/sql/dialects/dialect.pyi,sha256=ti8eTrLEj-sgtYEoIj3pEfOSl24nbQNhxBcgB8145jY,3504
10
10
  airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
11
11
  airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
12
12
  airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md,sha256=DscUH0P3sgOpfXKPvtWpOkRXt8BI60FUxYnuwWpyLqM,2792
13
13
  airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
14
  airflow/providers/common/sql/hooks/handlers.py,sha256=XjvycIQsGpDrtg6RFACczybW_dER97RR6Z6B_S6jf6Y,3399
15
- airflow/providers/common/sql/hooks/handlers.pyi,sha256=3UDOBxvFi5dLzRlF2yCwlj8LuYgDFSKNLmCHhF_Qfik,1827
16
- airflow/providers/common/sql/hooks/sql.py,sha256=Zjg5VfogvQKrZgSEPqk6ZbRl3oFRWAVAM3HbVr0J2LI,43757
17
- airflow/providers/common/sql/hooks/sql.pyi,sha256=EIQZ6-MDhT8vzdpq9b-8PDn1drdr0jrkMGV7PlajGsQ,7968
15
+ airflow/providers/common/sql/hooks/handlers.pyi,sha256=Qex63GfW0J6RQeT-prAfukvw4NE6P1IQnM1e04D2sH4,1811
16
+ airflow/providers/common/sql/hooks/sql.py,sha256=w_I-mQ1j_hde_M5Dq7AwtwZLIv-ga5dJsAKhvfNnsaU,44010
17
+ airflow/providers/common/sql/hooks/sql.pyi,sha256=5E3BMnzkKpDaxv47WDsTVlGtTp1_5N-4gk3janeF8nA,7952
18
18
  airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
19
- airflow/providers/common/sql/operators/generic_transfer.py,sha256=HorM0suydgf1rHH3V53YyKbd1BiiS4hxZeksIBo0ROs,8322
20
- airflow/providers/common/sql/operators/generic_transfer.pyi,sha256=rhuCB7KSm_NutW8m3BNQmaoiUPDXp1fTrSeoR0Jr4dU,3330
21
- airflow/providers/common/sql/operators/sql.py,sha256=Di6-LJrWzaSjww0pAw-1N9VeJwazzRb-Ug5z6PJE5Lg,50274
19
+ airflow/providers/common/sql/operators/generic_transfer.py,sha256=8rK29EisThzJlNmbmPT86fdp7jUqIl8VVFHAb2-ca1A,8510
20
+ airflow/providers/common/sql/operators/generic_transfer.pyi,sha256=wHN-8y2rLpGBSoNMce6gQR19CnahLZzoW3qjCDmUvHE,3312
21
+ airflow/providers/common/sql/operators/sql.py,sha256=Z9GtZ3q8Onzsa_HSjDDFULIPdr60V-BodHGrVU_ZAMk,55716
22
22
  airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
23
23
  airflow/providers/common/sql/sensors/sql.py,sha256=osPBiu7atWLEe2fH9FTmwm0aXJ12JggcGuwJwjkWOko,5464
24
- airflow/providers/common/sql/sensors/sql.pyi,sha256=gJ_WPbqsbFBZWKnZcRmaPfRYgpUb9_7_65PPc9VIymM,2691
24
+ airflow/providers/common/sql/sensors/sql.pyi,sha256=T25x934WUathT0bOULErqx-af8nxkBN-j9eid3cFRSI,2675
25
25
  airflow/providers/common/sql/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
26
26
  airflow/providers/common/sql/triggers/sql.py,sha256=3xTxMf2oSE8X8IhP22pmlSoEYmJIIFmi8HmsNbiLek0,3690
27
- airflow/providers/common/sql/triggers/sql.pyi,sha256=7wVgfqUPJB7egsWwbZtwZV3TFm7DuKLclWetNInCM5w,1986
28
- apache_airflow_providers_common_sql-1.27.5.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
29
- apache_airflow_providers_common_sql-1.27.5.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
30
- apache_airflow_providers_common_sql-1.27.5.dist-info/METADATA,sha256=UVmu-PGvgElDW4p2G32HPVl5STu16BcidQZ6PWoNcn0,5501
31
- apache_airflow_providers_common_sql-1.27.5.dist-info/RECORD,,
27
+ airflow/providers/common/sql/triggers/sql.pyi,sha256=TjSM2B3qCv3oN8Y5l_czi9YfxRE2h5Hv_lvUokeiGsE,1968
28
+ apache_airflow_providers_common_sql-1.28.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
29
+ apache_airflow_providers_common_sql-1.28.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
30
+ apache_airflow_providers_common_sql-1.28.0rc1.dist-info/METADATA,sha256=WfXGp2lPWpwJ1Xk9PPiHlpK2xP3mGXWCNTquMEPr8W0,5483
31
+ apache_airflow_providers_common_sql-1.28.0rc1.dist-info/RECORD,,