apache-airflow-providers-common-sql 1.21.0rc2__py3-none-any.whl → 1.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

@@ -18,8 +18,8 @@ from __future__ import annotations
18
18
 
19
19
  import contextlib
20
20
  import warnings
21
- from collections.abc import Generator, Iterable, Mapping, Sequence
22
- from contextlib import closing, contextmanager
21
+ from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence
22
+ from contextlib import closing, contextmanager, suppress
23
23
  from datetime import datetime
24
24
  from functools import cached_property
25
25
  from typing import (
@@ -34,15 +34,21 @@ from typing import (
34
34
  from urllib.parse import urlparse
35
35
 
36
36
  import sqlparse
37
+ from methodtools import lru_cache
37
38
  from more_itertools import chunked
38
39
  from sqlalchemy import create_engine
39
- from sqlalchemy.engine import Inspector
40
+ from sqlalchemy.engine import Inspector, make_url
41
+ from sqlalchemy.exc import ArgumentError, NoSuchModuleError
40
42
 
43
+ from airflow.configuration import conf
41
44
  from airflow.exceptions import (
42
45
  AirflowException,
43
46
  AirflowOptionalProviderFeatureException,
44
47
  )
45
48
  from airflow.hooks.base import BaseHook
49
+ from airflow.providers.common.sql.dialects.dialect import Dialect
50
+ from airflow.providers.common.sql.hooks import handlers
51
+ from airflow.utils.module_loading import import_string
46
52
 
47
53
  if TYPE_CHECKING:
48
54
  from pandas import DataFrame
@@ -62,27 +68,51 @@ be removed in the future. Please import it from 'airflow.providers.common.sql.ho
62
68
  def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool | None):
63
69
  warnings.warn(WARNING_MESSAGE.format("return_single_query_results"), DeprecationWarning, stacklevel=2)
64
70
 
65
- from airflow.providers.common.sql.hooks import handlers
66
-
67
71
  return handlers.return_single_query_results(sql, return_last, split_statements)
68
72
 
69
73
 
70
74
  def fetch_all_handler(cursor) -> list[tuple] | None:
71
75
  warnings.warn(WARNING_MESSAGE.format("fetch_all_handler"), DeprecationWarning, stacklevel=2)
72
76
 
73
- from airflow.providers.common.sql.hooks import handlers
74
-
75
77
  return handlers.fetch_all_handler(cursor)
76
78
 
77
79
 
78
80
  def fetch_one_handler(cursor) -> list[tuple] | None:
79
81
  warnings.warn(WARNING_MESSAGE.format("fetch_one_handler"), DeprecationWarning, stacklevel=2)
80
82
 
81
- from airflow.providers.common.sql.hooks import handlers
82
-
83
83
  return handlers.fetch_one_handler(cursor)
84
84
 
85
85
 
86
+ def resolve_dialects() -> MutableMapping[str, MutableMapping]:
87
+ from airflow.providers_manager import ProvidersManager
88
+
89
+ providers_manager = ProvidersManager()
90
+
91
+ # TODO: this check can be removed once common sql provider depends on Airflow 3.0 or higher,
92
+ # we could then also use DialectInfo and won't need to convert it to a dict.
93
+ if hasattr(providers_manager, "dialects"):
94
+ return {key: dict(value._asdict()) for key, value in providers_manager.dialects.items()}
95
+
96
+ # TODO: this can be removed once common sql provider depends on Airflow 3.0 or higher
97
+ return {
98
+ "default": dict(
99
+ name="default",
100
+ dialect_class_name="airflow.providers.common.sql.dialects.dialect.Dialect",
101
+ provider_name="apache-airflow-providers-common-sql",
102
+ ),
103
+ "mssql": dict(
104
+ name="mssql",
105
+ dialect_class_name="airflow.providers.microsoft.mssql.dialects.mssql.MsSqlDialect",
106
+ provider_name="apache-airflow-providers-microsoft-mssql",
107
+ ),
108
+ "postgresql": dict(
109
+ name="postgresql",
110
+ dialect_class_name="airflow.providers.postgres.dialects.postgres.PostgresDialect",
111
+ provider_name="apache-airflow-providers-postgres",
112
+ ),
113
+ }
114
+
115
+
86
116
  class ConnectorProtocol(Protocol):
87
117
  """Database connection protocol."""
88
118
 
@@ -129,6 +159,8 @@ class DbApiHook(BaseHook):
129
159
  _test_connection_sql = "select 1"
130
160
  # Default SQL placeholder
131
161
  _placeholder: str = "%s"
162
+ _dialects: MutableMapping[str, MutableMapping] = resolve_dialects()
163
+ _resolve_target_fields = conf.getboolean("core", "dbapihook_resolve_target_fields", fallback=False)
132
164
 
133
165
  def __init__(self, *args, schema: str | None = None, log_sql: bool = True, **kwargs):
134
166
  super().__init__()
@@ -147,12 +179,10 @@ class DbApiHook(BaseHook):
147
179
  self.__schema = schema
148
180
  self.log_sql = log_sql
149
181
  self.descriptions: list[Sequence[Sequence] | None] = []
150
- self._insert_statement_format: str = kwargs.get(
151
- "insert_statement_format", "INSERT INTO {} {} VALUES ({})"
152
- )
153
- self._replace_statement_format: str = kwargs.get(
154
- "replace_statement_format", "REPLACE INTO {} {} VALUES ({})"
155
- )
182
+ self._insert_statement_format: str | None = kwargs.get("insert_statement_format")
183
+ self._replace_statement_format: str | None = kwargs.get("replace_statement_format")
184
+ self._escape_word_format: str | None = kwargs.get("escape_word_format")
185
+ self._escape_column_names: bool | None = kwargs.get("escape_column_names")
156
186
  self._connection: Connection | None = kwargs.pop("connection", None)
157
187
 
158
188
  def get_conn_id(self) -> str:
@@ -174,6 +204,38 @@ class DbApiHook(BaseHook):
174
204
  )
175
205
  return self._placeholder
176
206
 
207
+ @property
208
+ def insert_statement_format(self) -> str:
209
+ """Return the insert statement format."""
210
+ if self._insert_statement_format is None:
211
+ self._insert_statement_format = self.connection_extra.get(
212
+ "insert_statement_format", "INSERT INTO {} {} VALUES ({})"
213
+ )
214
+ return self._insert_statement_format
215
+
216
+ @property
217
+ def replace_statement_format(self) -> str:
218
+ """Return the replacement statement format."""
219
+ if self._replace_statement_format is None:
220
+ self._replace_statement_format = self.connection_extra.get(
221
+ "replace_statement_format", "REPLACE INTO {} {} VALUES ({})"
222
+ )
223
+ return self._replace_statement_format
224
+
225
+ @property
226
+ def escape_word_format(self) -> str:
227
+ """Return the escape word format."""
228
+ if self._escape_word_format is None:
229
+ self._escape_word_format = self.connection_extra.get("escape_word_format", '"{}"')
230
+ return self._escape_word_format
231
+
232
+ @property
233
+ def escape_column_names(self) -> bool:
234
+ """Return the escape column names flag."""
235
+ if self._escape_column_names is None:
236
+ self._escape_column_names = self.connection_extra.get("escape_column_names", False)
237
+ return self._escape_column_names
238
+
177
239
  @property
178
240
  def connection(self) -> Connection:
179
241
  if self._connection is None:
@@ -262,6 +324,57 @@ class DbApiHook(BaseHook):
262
324
  def inspector(self) -> Inspector:
263
325
  return Inspector.from_engine(self.get_sqlalchemy_engine())
264
326
 
327
+ @cached_property
328
+ def dialect_name(self) -> str:
329
+ try:
330
+ return make_url(self.get_uri()).get_dialect().name
331
+ except (ArgumentError, NoSuchModuleError):
332
+ config = self.connection_extra
333
+ sqlalchemy_scheme = config.get("sqlalchemy_scheme")
334
+ if sqlalchemy_scheme:
335
+ return sqlalchemy_scheme.split("+")[0] if "+" in sqlalchemy_scheme else sqlalchemy_scheme
336
+ return config.get("dialect", "default")
337
+
338
+ @cached_property
339
+ def dialect(self) -> Dialect:
340
+ from airflow.utils.module_loading import import_string
341
+
342
+ dialect_info = self._dialects.get(self.dialect_name)
343
+
344
+ self.log.debug("dialect_info: %s", dialect_info)
345
+
346
+ if dialect_info:
347
+ try:
348
+ return import_string(dialect_info["dialect_class_name"])(self)
349
+ except ImportError:
350
+ raise AirflowOptionalProviderFeatureException(
351
+ f"{dialect_info['dialect_class_name']} not found, run: pip install "
352
+ f"'{dialect_info['provider_name']}'."
353
+ )
354
+ return Dialect(self)
355
+
356
+ @property
357
+ def reserved_words(self) -> set[str]:
358
+ return self.get_reserved_words(self.dialect_name)
359
+
360
+ @lru_cache(maxsize=None)
361
+ def get_reserved_words(self, dialect_name: str) -> set[str]:
362
+ result = set()
363
+ with suppress(ImportError, ModuleNotFoundError, NoSuchModuleError):
364
+ dialect_module = import_string(f"sqlalchemy.dialects.{dialect_name}.base")
365
+
366
+ if hasattr(dialect_module, "RESERVED_WORDS"):
367
+ result = set(dialect_module.RESERVED_WORDS)
368
+ else:
369
+ dialect_module = import_string(f"sqlalchemy.dialects.{dialect_name}.reserved_words")
370
+ reserved_words_attr = f"RESERVED_WORDS_{dialect_name.upper()}"
371
+
372
+ if hasattr(dialect_module, reserved_words_attr):
373
+ result = set(getattr(dialect_module, reserved_words_attr))
374
+
375
+ self.log.debug("reserved words for '%s': %s", dialect_name, result)
376
+ return result
377
+
265
378
  def get_pandas_df(
266
379
  self,
267
380
  sql,
@@ -324,7 +437,7 @@ class DbApiHook(BaseHook):
324
437
  :param sql: the sql statement to be executed (str) or a list of sql statements to execute
325
438
  :param parameters: The parameters to render the SQL query with.
326
439
  """
327
- return self.run(sql=sql, parameters=parameters, handler=fetch_all_handler)
440
+ return self.run(sql=sql, parameters=parameters, handler=handlers.fetch_all_handler)
328
441
 
329
442
  def get_first(self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None) -> Any:
330
443
  """
@@ -333,7 +446,7 @@ class DbApiHook(BaseHook):
333
446
  :param sql: the sql statement to be executed (str) or a list of sql statements to execute
334
447
  :param parameters: The parameters to render the SQL query with.
335
448
  """
336
- return self.run(sql=sql, parameters=parameters, handler=fetch_one_handler)
449
+ return self.run(sql=sql, parameters=parameters, handler=handlers.fetch_one_handler)
337
450
 
338
451
  @staticmethod
339
452
  def strip_sql_string(sql: str) -> str:
@@ -468,7 +581,7 @@ class DbApiHook(BaseHook):
468
581
 
469
582
  if handler is not None:
470
583
  result = self._make_common_data_structure(handler(cur))
471
- if return_single_query_results(sql, return_last, split_statements):
584
+ if handlers.return_single_query_results(sql, return_last, split_statements):
472
585
  _last_result = result
473
586
  _last_description = cur.description
474
587
  else:
@@ -483,7 +596,7 @@ class DbApiHook(BaseHook):
483
596
 
484
597
  if handler is None:
485
598
  return None
486
- if return_single_query_results(sql, return_last, split_statements):
599
+ if handlers.return_single_query_results(sql, return_last, split_statements):
487
600
  self.descriptions = [_last_description]
488
601
  return _last_result
489
602
  else:
@@ -543,7 +656,7 @@ class DbApiHook(BaseHook):
543
656
  """Return a cursor."""
544
657
  return self.get_conn().cursor()
545
658
 
546
- def _generate_insert_sql(self, table, values, target_fields, replace, **kwargs) -> str:
659
+ def _generate_insert_sql(self, table, values, target_fields=None, replace: bool = False, **kwargs) -> str:
547
660
  """
548
661
  Generate the INSERT SQL statement.
549
662
 
@@ -551,24 +664,19 @@ class DbApiHook(BaseHook):
551
664
 
552
665
  :param table: Name of the target table
553
666
  :param values: The row to insert into the table
554
- :param target_fields: The names of the columns to fill in the table
667
+ :param target_fields: The names of the columns to fill in the table. If no target fields are
668
+ specified, they will be determined dynamically from the table's metadata.
555
669
  :param replace: Whether to replace/upsert instead of insert
556
670
  :return: The generated INSERT or REPLACE/UPSERT SQL statement
557
671
  """
558
- placeholders = [
559
- self.placeholder,
560
- ] * len(values)
561
-
562
- if target_fields:
563
- target_fields = ", ".join(target_fields)
564
- target_fields = f"({target_fields})"
565
- else:
566
- target_fields = ""
672
+ if not target_fields and self._resolve_target_fields:
673
+ with suppress(Exception):
674
+ target_fields = self.dialect.get_target_fields(table)
567
675
 
568
- if not replace:
569
- return self._insert_statement_format.format(table, target_fields, ",".join(placeholders))
676
+ if replace:
677
+ return self.dialect.generate_replace_sql(table, values, target_fields, **kwargs)
570
678
 
571
- return self._replace_statement_format.format(table, target_fields, ",".join(placeholders))
679
+ return self.dialect.generate_insert_sql(table, values, target_fields, **kwargs)
572
680
 
573
681
  @contextmanager
574
682
  def _create_autocommit_connection(self, autocommit: bool = False):
@@ -636,8 +744,8 @@ class DbApiHook(BaseHook):
636
744
  self.log.debug("Generated sql: %s", sql)
637
745
  cur.executemany(sql, values)
638
746
  conn.commit()
639
- self.log.info("Loaded %s rows into %s so far", len(chunked_rows), table)
640
747
  nb_rows += len(chunked_rows)
748
+ self.log.info("Loaded %s rows into %s so far", nb_rows, table)
641
749
  else:
642
750
  for i, row in enumerate(rows, 1):
643
751
  values = self._serialize_cells(row, conn)
@@ -28,25 +28,32 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.hooks.sql
31
+ Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.hooks.sql
32
32
  isort:skip_file
33
33
  """
34
+
35
+ from collections.abc import Generator, Iterable, Mapping, MutableMapping, Sequence
36
+ from functools import cached_property as cached_property
37
+ from typing import Any, Callable, Protocol, TypeVar, overload
38
+
34
39
  from _typeshed import Incomplete as Incomplete
40
+ from pandas import DataFrame as DataFrame
41
+ from sqlalchemy.engine import URL as URL, Inspector as Inspector
42
+
35
43
  from airflow.hooks.base import BaseHook as BaseHook
36
44
  from airflow.models import Connection as Connection
45
+ from airflow.providers.common.sql.dialects.dialect import Dialect as Dialect
37
46
  from airflow.providers.openlineage.extractors import OperatorLineage as OperatorLineage
38
47
  from airflow.providers.openlineage.sqlparser import DatabaseInfo as DatabaseInfo
39
- from functools import cached_property as cached_property
40
- from pandas import DataFrame as DataFrame
41
- from sqlalchemy.engine import Inspector as Inspector, URL as URL
42
- from typing import Any, Callable, Generator, Iterable, Mapping, Protocol, Sequence, TypeVar, overload
43
48
 
44
49
  T = TypeVar("T")
45
50
  SQL_PLACEHOLDERS: Incomplete
51
+ WARNING_MESSAGE: str
46
52
 
47
53
  def return_single_query_results(sql: str | Iterable[str], return_last: bool, split_statements: bool): ...
48
54
  def fetch_all_handler(cursor) -> list[tuple] | None: ...
49
55
  def fetch_one_handler(cursor) -> list[tuple] | None: ...
56
+ def resolve_dialects() -> MutableMapping[str, MutableMapping]: ...
50
57
 
51
58
  class ConnectorProtocol(Protocol):
52
59
  def connect(self, host: str, port: int, username: str, schema: str) -> Any: ...
@@ -65,6 +72,14 @@ class DbApiHook(BaseHook):
65
72
  @cached_property
66
73
  def placeholder(self) -> str: ...
67
74
  @property
75
+ def insert_statement_format(self) -> str: ...
76
+ @property
77
+ def replace_statement_format(self) -> str: ...
78
+ @property
79
+ def escape_word_format(self) -> str: ...
80
+ @property
81
+ def escape_column_names(self) -> bool: ...
82
+ @property
68
83
  def connection(self) -> Connection: ...
69
84
  @connection.setter
70
85
  def connection(self, value: Any) -> None: ...
@@ -79,6 +94,13 @@ class DbApiHook(BaseHook):
79
94
  def get_sqlalchemy_engine(self, engine_kwargs: Incomplete | None = None): ...
80
95
  @property
81
96
  def inspector(self) -> Inspector: ...
97
+ @cached_property
98
+ def dialect_name(self) -> str: ...
99
+ @cached_property
100
+ def dialect(self) -> Dialect: ...
101
+ @property
102
+ def reserved_words(self) -> set[str]: ...
103
+ def get_reserved_words(self, dialect_name: str) -> set[str]: ...
82
104
  def get_pandas_df(
83
105
  self, sql, parameters: list | tuple | Mapping[str, Any] | None = None, **kwargs
84
106
  ) -> DataFrame: ...
@@ -1240,4 +1240,5 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
1240
1240
  f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
1241
1241
  )
1242
1242
 
1243
- self.skip_all_except(context["ti"], follow_branch)
1243
+ # TODO(potiuk) remove the type ignore once we solve provider <-> Task SDK relationship
1244
+ self.skip_all_except(context["ti"], follow_branch) # type: ignore[arg-type]
@@ -28,13 +28,17 @@
28
28
  # You can read more in the README_API.md file
29
29
  #
30
30
  """
31
- Definition of the public interface for airflow.providers.common.sql.sensors.sql
31
+ Definition of the public interface for airflow.providers.common.sql.src.airflow.providers.common.sql.sensors.sql
32
32
  isort:skip_file
33
33
  """
34
+
35
+ from collections.abc import Mapping, Sequence
36
+ from typing import Any, Callable
37
+
34
38
  from _typeshed import Incomplete as Incomplete
39
+
35
40
  from airflow.sensors.base import BaseSensorOperator as BaseSensorOperator
36
41
  from airflow.utils.context import Context as Context
37
- from typing import Any, Callable, Mapping, Sequence
38
42
 
39
43
  class SqlSensor(BaseSensorOperator):
40
44
  template_fields: Sequence[str]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.21.0rc2
3
+ Version: 1.23.0
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,15 +20,14 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: more-itertools>=9.0.0
23
+ Requires-Dist: apache-airflow>=2.9.0
25
24
  Requires-Dist: sqlparse>=0.5.1
25
+ Requires-Dist: more-itertools>=9.0.0
26
26
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
27
- Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas" and (python_version>="3.9")
28
- Requires-Dist: pandas>=1.5.3,<2.2 ; extra == "pandas" and (python_version<"3.9")
27
+ Requires-Dist: pandas>=2.1.2,<2.2 ; extra == "pandas"
29
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0
29
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html
30
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0
32
31
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
32
  Project-URL: Source Code, https://github.com/apache/airflow
34
33
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -37,23 +36,6 @@ Provides-Extra: openlineage
37
36
  Provides-Extra: pandas
38
37
 
39
38
 
40
- .. Licensed to the Apache Software Foundation (ASF) under one
41
- or more contributor license agreements. See the NOTICE file
42
- distributed with this work for additional information
43
- regarding copyright ownership. The ASF licenses this file
44
- to you under the Apache License, Version 2.0 (the
45
- "License"); you may not use this file except in compliance
46
- with the License. You may obtain a copy of the License at
47
-
48
- .. http://www.apache.org/licenses/LICENSE-2.0
49
-
50
- .. Unless required by applicable law or agreed to in writing,
51
- software distributed under the License is distributed on an
52
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
53
- KIND, either express or implied. See the License for the
54
- specific language governing permissions and limitations
55
- under the License.
56
-
57
39
  .. Licensed to the Apache Software Foundation (ASF) under one
58
40
  or more contributor license agreements. See the NOTICE file
59
41
  distributed with this work for additional information
@@ -71,8 +53,7 @@ Provides-Extra: pandas
71
53
  specific language governing permissions and limitations
72
54
  under the License.
73
55
 
74
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
75
- OVERWRITTEN WHEN PREPARING PACKAGES.
56
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
76
57
 
77
58
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
78
59
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -80,7 +61,7 @@ Provides-Extra: pandas
80
61
 
81
62
  Package ``apache-airflow-providers-common-sql``
82
63
 
83
- Release: ``1.21.0.rc2``
64
+ Release: ``1.23.0``
84
65
 
85
66
 
86
67
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -93,7 +74,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
93
74
  are in ``airflow.providers.common.sql`` python package.
94
75
 
95
76
  You can find package information and changelog for the provider
96
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/>`_.
77
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/>`_.
97
78
 
98
79
  Installation
99
80
  ------------
@@ -135,4 +116,5 @@ Dependent package
135
116
  ============================================================================================================== ===============
136
117
 
137
118
  The changelog for the provider package can be found in the
138
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.21.0/changelog.html>`_.
119
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.23.0/changelog.html>`_.
120
+
@@ -0,0 +1,25 @@
1
+ airflow/providers/common/sql/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
+ airflow/providers/common/sql/__init__.py,sha256=AYm77GCPhnDJ6AMc3fi1FzUszcGkMWApE2HhQ-W5hR4,1498
4
+ airflow/providers/common/sql/get_provider_info.py,sha256=mObPyvz4mfh6C0xry0r7XPQ3QZt4HFf5Ud2ZTWpzneQ,3650
5
+ airflow/providers/common/sql/get_provider_info.pyi,sha256=0mydJPGQScnPpoa9-ohHVJFngFH6Lsk22KS243PE-gw,1596
6
+ airflow/providers/common/sql/dialects/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
+ airflow/providers/common/sql/dialects/dialect.py,sha256=rV4frwvKeU25AydRFA0iJEzSDAjHBAQQl4a7OLpE2Tg,7698
8
+ airflow/providers/common/sql/dialects/dialect.pyi,sha256=AWVQ0Q4Sn0aDq_MtQinFopbokQR-GIyXsg0oeK6NBxA,3513
9
+ airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
10
+ airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
11
+ airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md,sha256=DscUH0P3sgOpfXKPvtWpOkRXt8BI60FUxYnuwWpyLqM,2792
12
+ airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
+ airflow/providers/common/sql/hooks/handlers.py,sha256=pzZg6gDWQa_bIpoFCs3J_o_07Eb9EpbJIiNCk2e07e4,3413
14
+ airflow/providers/common/sql/hooks/handlers.pyi,sha256=8meEoDd_lUHgJ8PCPrmhRX00xpSKyrMRY5tbrc8upzg,1814
15
+ airflow/providers/common/sql/hooks/sql.py,sha256=K7U6LVbglKTXBIw34MBoFWTN3o1o82x1zw3iKyiZdu8,36130
16
+ airflow/providers/common/sql/hooks/sql.pyi,sha256=K0B4HOgI-pczVG3K8PRSlF0tJ7FcLwz-nHveSpIydno,6751
17
+ airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
+ airflow/providers/common/sql/operators/sql.py,sha256=c5nERbSxUTg8438Hq7MnM4vuFOWZLJfN70o7FIDmNVQ,49245
19
+ airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
20
+ airflow/providers/common/sql/sensors/sql.py,sha256=vJqxvDpwmGbq_6kMMqU4YjhHhcbhDhHYjJ2ufhLDmGc,5519
21
+ airflow/providers/common/sql/sensors/sql.pyi,sha256=GiOk2qD0PO5HWISgTTdOJQLC9b2ItzvQr68adXIbjGQ,2530
22
+ apache_airflow_providers_common_sql-1.23.0.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
23
+ apache_airflow_providers_common_sql-1.23.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
24
+ apache_airflow_providers_common_sql-1.23.0.dist-info/METADATA,sha256=XYL-MD-Pvo02D0YiahqtW5oBZIua_8OyoUSkS9zT15I,5223
25
+ apache_airflow_providers_common_sql-1.23.0.dist-info/RECORD,,