apache-airflow-providers-common-sql 1.9.0rc1__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-common-sql might be problematic. Click here for more details.

@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "1.9.0"
30
+ __version__ = "1.10.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Common SQL",
29
29
  "description": "`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__\n",
30
30
  "suspended": False,
31
- "source-date-epoch": 1701983370,
31
+ "source-date-epoch": 1703288124,
32
32
  "versions": [
33
+ "1.10.0",
33
34
  "1.9.0",
34
35
  "1.8.1",
35
36
  "1.8.0",
@@ -16,6 +16,8 @@
16
16
  # under the License.
17
17
  from __future__ import annotations
18
18
 
19
+ import contextlib
20
+ import warnings
19
21
  from contextlib import closing
20
22
  from datetime import datetime
21
23
  from typing import (
@@ -24,6 +26,7 @@ from typing import (
24
26
  Callable,
25
27
  Generator,
26
28
  Iterable,
29
+ List,
27
30
  Mapping,
28
31
  Protocol,
29
32
  Sequence,
@@ -36,7 +39,7 @@ from urllib.parse import urlparse
36
39
  import sqlparse
37
40
  from sqlalchemy import create_engine
38
41
 
39
- from airflow.exceptions import AirflowException
42
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
40
43
  from airflow.hooks.base import BaseHook
41
44
 
42
45
  if TYPE_CHECKING:
@@ -122,10 +125,10 @@ class DbApiHook(BaseHook):
122
125
  """
123
126
  Abstract base class for sql hooks.
124
127
 
125
- When subclassing, maintainers can override the `_make_serializable` method:
128
+ When subclassing, maintainers can override the `_make_common_data_structure` method:
126
129
  This method transforms the result of the handler method (typically `cursor.fetchall()`) into
127
- JSON-serializable objects. Most of the time, the underlying SQL library already returns tuples from
128
- its cursor, and the `_make_serializable` method can be ignored.
130
+ objects common across all Hooks derived from this class (tuples). Most of the time, the underlying SQL
131
+ library already returns tuples from its cursor, and the `_make_common_data_structure` method can be ignored.
129
132
 
130
133
  :param schema: Optional DB schema that overrides the schema specified in the connection. Make sure that
131
134
  if you change the schema parameter value in the constructor of the derived Hook, such change
@@ -143,8 +146,6 @@ class DbApiHook(BaseHook):
143
146
  connector: ConnectorProtocol | None = None
144
147
  # Override with db-specific query to check connection
145
148
  _test_connection_sql = "select 1"
146
- # Override with the db-specific value used for placeholders
147
- placeholder: str = "%s"
148
149
 
149
150
  def __init__(self, *args, schema: str | None = None, log_sql: bool = True, **kwargs):
150
151
  super().__init__()
@@ -163,6 +164,11 @@ class DbApiHook(BaseHook):
163
164
  self.__schema = schema
164
165
  self.log_sql = log_sql
165
166
  self.descriptions: list[Sequence[Sequence] | None] = []
167
+ self._placeholder: str = "%s"
168
+
169
+ @property
170
+ def placeholder(self) -> str:
171
+ return self._placeholder
166
172
 
167
173
  def get_conn(self):
168
174
  """Return a connection object."""
@@ -305,7 +311,7 @@ class DbApiHook(BaseHook):
305
311
  handler: Callable[[Any], T] = ...,
306
312
  split_statements: bool = ...,
307
313
  return_last: bool = ...,
308
- ) -> T | list[T]:
314
+ ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None:
309
315
  ...
310
316
 
311
317
  def run(
@@ -316,7 +322,7 @@ class DbApiHook(BaseHook):
316
322
  handler: Callable[[Any], T] | None = None,
317
323
  split_statements: bool = False,
318
324
  return_last: bool = True,
319
- ) -> T | list[T] | None:
325
+ ) -> tuple | list[tuple] | list[list[tuple] | tuple] | None:
320
326
  """Run a command or a list of commands.
321
327
 
322
328
  Pass a list of SQL statements to the sql parameter to get them to
@@ -392,7 +398,7 @@ class DbApiHook(BaseHook):
392
398
  self._run_command(cur, sql_statement, parameters)
393
399
 
394
400
  if handler is not None:
395
- result = self._make_serializable(handler(cur))
401
+ result = self._make_common_data_structure(handler(cur))
396
402
  if return_single_query_results(sql, return_last, split_statements):
397
403
  _last_result = result
398
404
  _last_description = cur.description
@@ -412,19 +418,31 @@ class DbApiHook(BaseHook):
412
418
  else:
413
419
  return results
414
420
 
415
- @staticmethod
416
- def _make_serializable(result: Any) -> Any:
417
- """Ensure the data returned from an SQL command is JSON-serializable.
421
+ def _make_common_data_structure(self, result: T | Sequence[T]) -> tuple | list[tuple]:
422
+ """Ensure the data returned from an SQL command is a standard tuple or list[tuple].
418
423
 
419
424
  This method is intended to be overridden by subclasses of the `DbApiHook`. Its purpose is to
420
- transform the result of an SQL command (typically returned by cursor methods) into a
421
- JSON-serializable format.
425
+ transform the result of an SQL command (typically returned by cursor methods) into a common
426
+ data structure (a tuple or list[tuple]) across all DBApiHook derived Hooks, as defined in the
427
+ ADR-0002 of the sql provider.
428
+
429
+ If this method is not overridden, the result data is returned as-is. If the output of the cursor
430
+ is already a common data structure, this method should be ignored.
431
+ """
432
+ # Back-compatibility call for providers implementing old ´_make_serializable' method.
433
+ with contextlib.suppress(AttributeError):
434
+ result = self._make_serializable(result=result) # type: ignore[attr-defined]
435
+ warnings.warn(
436
+ "The `_make_serializable` method is deprecated and support will be removed in a future "
437
+ f"version of the common.sql provider. Please update the {self.__class__.__name__}'s provider "
438
+ "to a version based on common.sql >= 1.9.1.",
439
+ AirflowProviderDeprecationWarning,
440
+ stacklevel=2,
441
+ )
422
442
 
423
- If this method is not overridden, the result data is returned as-is.
424
- If the output of the cursor is already JSON-serializable, this method
425
- should be ignored.
426
- """
427
- return result
443
+ if isinstance(result, Sequence):
444
+ return cast(List[tuple], result)
445
+ return cast(tuple, result)
428
446
 
429
447
  def _run_command(self, cur, sql_statement, parameters):
430
448
  """Run a statement using an already open cursor."""
@@ -463,8 +481,7 @@ class DbApiHook(BaseHook):
463
481
  """Return a cursor."""
464
482
  return self.get_conn().cursor()
465
483
 
466
- @classmethod
467
- def _generate_insert_sql(cls, table, values, target_fields, replace, **kwargs) -> str:
484
+ def _generate_insert_sql(self, table, values, target_fields, replace, **kwargs) -> str:
468
485
  """
469
486
  Generate the INSERT SQL statement.
470
487
 
@@ -477,7 +494,7 @@ class DbApiHook(BaseHook):
477
494
  :return: The generated INSERT or REPLACE SQL statement
478
495
  """
479
496
  placeholders = [
480
- cls.placeholder,
497
+ self.placeholder,
481
498
  ] * len(values)
482
499
 
483
500
  if target_fields:
@@ -53,6 +53,7 @@ class DbApiHook(BaseForDbApiHook):
53
53
  placeholder: str
54
54
  log_sql: Incomplete
55
55
  descriptions: Incomplete
56
+ _placeholder: str
56
57
  def __init__(self, *args, schema: Union[str, None] = ..., log_sql: bool = ..., **kwargs) -> None: ...
57
58
  def get_conn(self): ...
58
59
  def get_uri(self) -> str: ...
@@ -729,6 +729,8 @@ class SQLCheckOperator(BaseSQLOperator):
729
729
  The ``SQLCheckOperator`` expects a sql query that will return a single row.
730
730
  Each value on that first row is evaluated using python ``bool`` casting.
731
731
  If any of the values return ``False`` the check is failed and errors out.
732
+ If a Python dict is returned, and any values in the Python dict are ``False``,
733
+ the check is failed and errors out.
732
734
 
733
735
  Note that Python bool casting evals the following as ``False``:
734
736
 
@@ -737,6 +739,7 @@ class SQLCheckOperator(BaseSQLOperator):
737
739
  * Empty string (``""``)
738
740
  * Empty list (``[]``)
739
741
  * Empty dictionary or set (``{}``)
742
+ * Dictionary with value = ``False`` (``{'DUPLICATE_ID_CHECK': False}``)
740
743
 
741
744
  Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
742
745
  the count ``== 0``. You can craft much more complex query that could,
@@ -785,6 +788,8 @@ class SQLCheckOperator(BaseSQLOperator):
785
788
  self.log.info("Record: %s", records)
786
789
  if not records:
787
790
  self._raise_exception(f"The following query returned zero rows: {self.sql}")
791
+ elif isinstance(records, dict) and not all(records.values()):
792
+ self._raise_exception(f"Test failed.\nQuery:\n{self.sql}\nResults:\n{records!s}")
788
793
  elif not all(records):
789
794
  self._raise_exception(f"Test failed.\nQuery:\n{self.sql}\nResults:\n{records!s}")
790
795
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.9.0rc1
3
+ Version: 1.10.0
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,13 +20,13 @@ Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.6.0.dev0
23
+ Requires-Dist: apache-airflow>=2.6.0
24
24
  Requires-Dist: sqlparse>=0.4.2
25
25
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
26
26
  Requires-Dist: pandas>=0.17.1 ; extra == "pandas"
27
27
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
28
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.9.0/changelog.html
29
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.9.0
28
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.10.0/changelog.html
29
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.10.0
30
30
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
31
31
  Project-URL: Source Code, https://github.com/apache/airflow
32
32
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -78,7 +78,7 @@ Provides-Extra: pandas
78
78
 
79
79
  Package ``apache-airflow-providers-common-sql``
80
80
 
81
- Release: ``1.9.0.rc1``
81
+ Release: ``1.10.0``
82
82
 
83
83
 
84
84
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -91,7 +91,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
91
91
  are in ``airflow.providers.common.sql`` python package.
92
92
 
93
93
  You can find package information and changelog for the provider
94
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.9.0/>`_.
94
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.10.0/>`_.
95
95
 
96
96
  Installation
97
97
  ------------
@@ -132,4 +132,4 @@ Dependent package
132
132
  ============================================================================================================== ===============
133
133
 
134
134
  The changelog for the provider package can be found in the
135
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.9.0/changelog.html>`_.
135
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.10.0/changelog.html>`_.
@@ -1,19 +1,19 @@
1
1
  airflow/providers/common/sql/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
2
2
  airflow/providers/common/sql/README_API.md,sha256=CxhaS8EedZ4dcbLUPC4-GLCMaY3OH96oHxXttUGU06E,5932
3
- airflow/providers/common/sql/__init__.py,sha256=GUWCXqAy3zJ4t-dpvCANEfZ5wv88L3v34Me1V6RqWQg,1585
4
- airflow/providers/common/sql/get_provider_info.py,sha256=Oq35Nk7qQdmhu3MLublr6PA5KibZ_JHSsaNIqkI9wXc,2837
3
+ airflow/providers/common/sql/__init__.py,sha256=b5AyAg77Q-v8F3NKxkV8CqryYCu3LcRZ1vFYmO3noXU,1586
4
+ airflow/providers/common/sql/get_provider_info.py,sha256=KAoSn7ciaJqLGDKCoYBSXnbPCOZaURURaemm6T-nR0o,2859
5
5
  airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md,sha256=TfANqrzoFto9PMOMza3MitIkXHGLx2kY_BhhF-N0_ow,1675
6
6
  airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md,sha256=ze5w9IVS-HkUwdZvPW8_JaJaVwel7-N6XdEVN4pTuCE,8457
7
7
  airflow/providers/common/sql/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
- airflow/providers/common/sql/hooks/sql.py,sha256=-ZcSWjt1oBgDT3f1h81-nWE_GfRzhXQ8lxvU-zE7wwM,25069
9
- airflow/providers/common/sql/hooks/sql.pyi,sha256=VWDJZEOnvW4si_ablFHXBmG9K6fQJoEKL4YA6U21Sbc,4072
8
+ airflow/providers/common/sql/hooks/sql.py,sha256=1i90yY7sIWjzhp1QEwlQzpjqNq5H8uOA2Rce4o3Kq7E,26191
9
+ airflow/providers/common/sql/hooks/sql.pyi,sha256=E8N_C8Gk7gCmQcGtw3UtxNgMp9SqReOMvRf1mfHawYs,4094
10
10
  airflow/providers/common/sql/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
- airflow/providers/common/sql/operators/sql.py,sha256=zhqsUjbAIfa4WiQgO_Fej-vjJDuJa0mCZ1IICJfPfAc,47142
11
+ airflow/providers/common/sql/operators/sql.py,sha256=w7OBaJgE5EhosCvIP7UKEyDOZ8Pe-j5IjwC-B5QRLZU,47505
12
12
  airflow/providers/common/sql/operators/sql.pyi,sha256=-Wa-4uMtRPvUowYgSDnfH98Joe3Uakzvof4F4G4mgMM,7769
13
13
  airflow/providers/common/sql/sensors/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
14
  airflow/providers/common/sql/sensors/sql.py,sha256=bqyaw3pQpvCRJ5h2R2eZiJJuGtW-6g4RcI-5WaREPRk,5669
15
15
  airflow/providers/common/sql/sensors/sql.pyi,sha256=ZwVia3SUHrW7eB98r3vHYT_jhgkSWHRZqA2srYDHVbc,2295
16
- apache_airflow_providers_common_sql-1.9.0rc1.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
17
- apache_airflow_providers_common_sql-1.9.0rc1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
18
- apache_airflow_providers_common_sql-1.9.0rc1.dist-info/METADATA,sha256=9M-6rhIfEuQ3aCXKuCe-y6lJgnoie6cFWSmEXFRpMb4,5985
19
- apache_airflow_providers_common_sql-1.9.0rc1.dist-info/RECORD,,
16
+ apache_airflow_providers_common_sql-1.10.0.dist-info/entry_points.txt,sha256=h8UXRp2crPuGmYVYRM5oe168qIh7g-4t2QQbVMizKjI,106
17
+ apache_airflow_providers_common_sql-1.10.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
18
+ apache_airflow_providers_common_sql-1.10.0.dist-info/METADATA,sha256=0jWYecVJ5skyfBEyISv8Xy4AQWrT5qLLF5gTE-_aDtk,5979
19
+ apache_airflow_providers_common_sql-1.10.0.dist-info/RECORD,,