apache-airflow-providers-common-sql 1.28.1__tar.gz → 1.30.0rc2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- apache_airflow_providers_common_sql-1.30.0rc2/NOTICE +5 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/PKG-INFO +27 -22
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/README.rst +19 -17
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/changelog.rst +62 -11
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/index.rst +20 -18
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/provider.yaml +4 -1
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/pyproject.toml +8 -5
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/__init__.py +3 -3
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/dialects/dialect.py +12 -15
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/hooks/sql.py +3 -7
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/hooks/sql.pyi +3 -3
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/operators/generic_transfer.py +13 -11
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/operators/generic_transfer.pyi +1 -1
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/operators/sql.py +8 -3
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/sensors/sql.py +2 -2
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/sensors/sql.pyi +1 -7
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/triggers/sql.py +1 -1
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/version_compat.py +0 -14
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/dialects/test_dialect.py +0 -1
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/hooks/test_dbapi.py +2 -4
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/hooks/test_sql.py +12 -6
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/hooks/test_sqlparse.py +1 -1
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/operators/test_generic_transfer.py +32 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/operators/test_sql.py +3 -3
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/operators/test_sql_execute.py +3 -3
- {apache_airflow_providers_common_sql-1.28.1/src/airflow/providers/common/sql → apache_airflow_providers_common_sql-1.30.0rc2}/LICENSE +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/commits.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/conf.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/connections.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/dataframes.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/dialects.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/integration-logos/sql.png +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/operators.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/security.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/docs/supported-database-types.rst +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/README_API.md +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/dialects/dialect.pyi +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/get_provider_info.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/get_provider_info.pyi +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/hooks/handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/hooks/handlers.pyi +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/src/airflow/providers/common/sql/triggers/sql.pyi +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/conftest.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/example_generic_transfer.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/example_sql_column_table_check.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/example_sql_execute_query.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/example_sql_insert_rows.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/system/common/sql/example_sql_threshold_check.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/hooks/test_handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/sensors/test_sql.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.28.1 → apache_airflow_providers_common_sql-1.30.0rc2}/tests/unit/common/sql/triggers/test_sql.py +0 -0
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-sql
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.30.0rc2
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.sql,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,13 +15,15 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0rc1
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.0rc1
|
|
24
27
|
Requires-Dist: sqlparse>=0.5.1
|
|
25
28
|
Requires-Dist: more-itertools>=9.0.0
|
|
26
29
|
Requires-Dist: methodtools>=0.4.7
|
|
@@ -29,8 +32,8 @@ Requires-Dist: pandas[sql-other]>=2.1.2 ; extra == "pandas" and ( python_version
|
|
|
29
32
|
Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
|
|
30
33
|
Requires-Dist: polars>=1.26.0 ; extra == "polars"
|
|
31
34
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
32
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
33
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
35
|
+
Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html
|
|
36
|
+
Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0
|
|
34
37
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
35
38
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
36
39
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -64,7 +67,7 @@ Provides-Extra: polars
|
|
|
64
67
|
|
|
65
68
|
Package ``apache-airflow-providers-common-sql``
|
|
66
69
|
|
|
67
|
-
Release: ``1.
|
|
70
|
+
Release: ``1.30.0``
|
|
68
71
|
|
|
69
72
|
|
|
70
73
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -77,7 +80,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
77
80
|
are in ``airflow.providers.common.sql`` python package.
|
|
78
81
|
|
|
79
82
|
You can find package information and changelog for the provider
|
|
80
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
83
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/>`_.
|
|
81
84
|
|
|
82
85
|
Installation
|
|
83
86
|
------------
|
|
@@ -91,14 +94,15 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
|
91
94
|
Requirements
|
|
92
95
|
------------
|
|
93
96
|
|
|
94
|
-
|
|
95
|
-
PIP package
|
|
96
|
-
|
|
97
|
-
``apache-airflow``
|
|
98
|
-
``
|
|
99
|
-
``
|
|
100
|
-
``
|
|
101
|
-
|
|
97
|
+
========================================== ==================
|
|
98
|
+
PIP package Version required
|
|
99
|
+
========================================== ==================
|
|
100
|
+
``apache-airflow`` ``>=2.11.0``
|
|
101
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.0``
|
|
102
|
+
``sqlparse`` ``>=0.5.1``
|
|
103
|
+
``more-itertools`` ``>=9.0.0``
|
|
104
|
+
``methodtools`` ``>=0.4.7``
|
|
105
|
+
========================================== ==================
|
|
102
106
|
|
|
103
107
|
Cross provider package dependencies
|
|
104
108
|
-----------------------------------
|
|
@@ -110,14 +114,15 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
110
114
|
|
|
111
115
|
.. code-block:: bash
|
|
112
116
|
|
|
113
|
-
pip install apache-airflow-providers-common-sql[
|
|
117
|
+
pip install apache-airflow-providers-common-sql[common.compat]
|
|
114
118
|
|
|
115
119
|
|
|
116
|
-
|
|
117
|
-
Dependent package
|
|
118
|
-
|
|
119
|
-
`apache-airflow-providers-
|
|
120
|
-
|
|
120
|
+
================================================================================================================== =================
|
|
121
|
+
Dependent package Extra
|
|
122
|
+
================================================================================================================== =================
|
|
123
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
124
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
125
|
+
================================================================================================================== =================
|
|
121
126
|
|
|
122
127
|
Optional dependencies
|
|
123
128
|
----------------------
|
|
@@ -131,5 +136,5 @@ Extra Dependencies
|
|
|
131
136
|
=============== ================================================================================================
|
|
132
137
|
|
|
133
138
|
The changelog for the provider package can be found in the
|
|
134
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
139
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html>`_.
|
|
135
140
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-common-sql``
|
|
25
25
|
|
|
26
|
-
Release: ``1.
|
|
26
|
+
Release: ``1.30.0``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
36
36
|
are in ``airflow.providers.common.sql`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -50,14 +50,15 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
|
50
50
|
Requirements
|
|
51
51
|
------------
|
|
52
52
|
|
|
53
|
-
|
|
54
|
-
PIP package
|
|
55
|
-
|
|
56
|
-
``apache-airflow``
|
|
57
|
-
``
|
|
58
|
-
``
|
|
59
|
-
``
|
|
60
|
-
|
|
53
|
+
========================================== ==================
|
|
54
|
+
PIP package Version required
|
|
55
|
+
========================================== ==================
|
|
56
|
+
``apache-airflow`` ``>=2.11.0``
|
|
57
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.0``
|
|
58
|
+
``sqlparse`` ``>=0.5.1``
|
|
59
|
+
``more-itertools`` ``>=9.0.0``
|
|
60
|
+
``methodtools`` ``>=0.4.7``
|
|
61
|
+
========================================== ==================
|
|
61
62
|
|
|
62
63
|
Cross provider package dependencies
|
|
63
64
|
-----------------------------------
|
|
@@ -69,14 +70,15 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
69
70
|
|
|
70
71
|
.. code-block:: bash
|
|
71
72
|
|
|
72
|
-
pip install apache-airflow-providers-common-sql[
|
|
73
|
+
pip install apache-airflow-providers-common-sql[common.compat]
|
|
73
74
|
|
|
74
75
|
|
|
75
|
-
|
|
76
|
-
Dependent package
|
|
77
|
-
|
|
78
|
-
`apache-airflow-providers-
|
|
79
|
-
|
|
76
|
+
================================================================================================================== =================
|
|
77
|
+
Dependent package Extra
|
|
78
|
+
================================================================================================================== =================
|
|
79
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
80
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
81
|
+
================================================================================================================== =================
|
|
80
82
|
|
|
81
83
|
Optional dependencies
|
|
82
84
|
----------------------
|
|
@@ -90,4 +92,4 @@ Extra Dependencies
|
|
|
90
92
|
=============== ================================================================================================
|
|
91
93
|
|
|
92
94
|
The changelog for the provider package can be found in the
|
|
93
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
95
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html>`_.
|
|
@@ -25,11 +25,67 @@
|
|
|
25
25
|
Changelog
|
|
26
26
|
---------
|
|
27
27
|
|
|
28
|
-
1.
|
|
28
|
+
1.30.0
|
|
29
|
+
......
|
|
30
|
+
|
|
31
|
+
.. note::
|
|
32
|
+
This release of provider is only available for Airflow 2.11+ as explained in the
|
|
33
|
+
Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
|
|
34
|
+
|
|
35
|
+
Misc
|
|
36
|
+
~~~~
|
|
37
|
+
|
|
38
|
+
* ``Move out some exceptions to TaskSDK (#54505)``
|
|
39
|
+
* ``Bump minimum Airflow version in providers to Airflow 2.11.0 (#58612)``
|
|
40
|
+
* ``Fix lower bound dependency to common-compat provider (#58833)``
|
|
41
|
+
|
|
42
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
43
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
44
|
+
* ``Updates to release process of providers (#58316)``
|
|
45
|
+
* ``Prepare release for 2025-11-27 wave of providers (#58697)``
|
|
46
|
+
|
|
47
|
+
1.29.0
|
|
48
|
+
......
|
|
49
|
+
|
|
50
|
+
Features
|
|
51
|
+
~~~~~~~~
|
|
52
|
+
|
|
53
|
+
* ``Allow a list of SQL statements in GenericTransfer (#56041)``
|
|
54
|
+
|
|
55
|
+
Misc
|
|
56
|
+
~~~~
|
|
57
|
+
|
|
58
|
+
* ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
|
|
59
|
+
* ``Migrate 'common/sql' provider to 'common.compat' (#57135)``
|
|
60
|
+
* ``Migrate 'common.sql' provider to 'common.compat' (#57075)``
|
|
61
|
+
|
|
62
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
63
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
64
|
+
* ``Delete all unnecessary LICENSE Files (#58191)``
|
|
65
|
+
* ``Enable PT006 rule to 19 files in providers (cncf,common) (#57995)``
|
|
66
|
+
* ``Fix code formatting via ruff preview (#57641)``
|
|
67
|
+
* ``Enable ruff PLW0127 rule (#57298)``
|
|
68
|
+
|
|
69
|
+
1.28.2
|
|
29
70
|
......
|
|
30
71
|
|
|
72
|
+
Misc
|
|
73
|
+
~~~~
|
|
74
|
+
|
|
75
|
+
* ``fix mypy type errors in common/sql provider for sqlalchemy 2 upgrade (#56824)``
|
|
76
|
+
|
|
77
|
+
Doc-only
|
|
78
|
+
~~~~~~~~
|
|
79
|
+
|
|
80
|
+
* ``Remove placeholder Release Date in changelog and index files (#56056)``
|
|
81
|
+
|
|
82
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
83
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
84
|
+
* ``Enable PT011 rule to prvoider tests (#56495)``
|
|
85
|
+
|
|
86
|
+
1.28.1
|
|
87
|
+
......
|
|
31
88
|
|
|
32
|
-
Release Date: ``|PypiReleaseDate|``
|
|
33
89
|
|
|
34
90
|
Bug Fixes
|
|
35
91
|
~~~~~~~~~
|
|
@@ -44,8 +100,6 @@ Bug Fixes
|
|
|
44
100
|
......
|
|
45
101
|
|
|
46
102
|
|
|
47
|
-
Release Date: ``|PypiReleaseDate|``
|
|
48
|
-
|
|
49
103
|
Features
|
|
50
104
|
~~~~~~~~
|
|
51
105
|
|
|
@@ -74,8 +128,6 @@ Doc-only
|
|
|
74
128
|
* ``Replace API server's direct Connection access workaround in BaseHook (#54083)``
|
|
75
129
|
* ``Switch pre-commit to prek (#54258)``
|
|
76
130
|
* ``make bundle_name not nullable (#47592)``
|
|
77
|
-
|
|
78
|
-
.. Review and move the new changes to one of the sections above:
|
|
79
131
|
* ``Fix Airflow 2 reference in README/index of providers (#55240)``
|
|
80
132
|
|
|
81
133
|
1.27.5
|
|
@@ -347,9 +399,6 @@ Misc
|
|
|
347
399
|
.. Below changes are excluded from the changelog. Move them to
|
|
348
400
|
appropriate section above if needed. Do not delete the lines(!):
|
|
349
401
|
* ``Use Python 3.9 as target version for Ruff & Black rules (#44298)``
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
.. Review and move the new changes to one of the sections above:
|
|
353
402
|
* ``Update path of example dags in docs (#45069)``
|
|
354
403
|
|
|
355
404
|
1.20.0
|
|
@@ -586,7 +635,8 @@ Misc
|
|
|
586
635
|
* ``feat: Switch all class, functions, methods deprecations to decorators (#36876)``
|
|
587
636
|
* ``Add more-itertools as dependency of common-sql (#37359)``
|
|
588
637
|
|
|
589
|
-
..
|
|
638
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
639
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
590
640
|
* ``Prepare docs 1st wave of Providers February 2024 (#37326)``
|
|
591
641
|
|
|
592
642
|
1.10.1
|
|
@@ -643,7 +693,8 @@ Misc
|
|
|
643
693
|
* ``Add '_make_serializable' method which other SQL operators can overrides when result from cursor is not JSON-serializable (#32319)``
|
|
644
694
|
* ``Remove backcompat inheritance for DbApiHook (#35754)``
|
|
645
695
|
|
|
646
|
-
..
|
|
696
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
697
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
647
698
|
* ``Use reproducible builds for providers (#35693)``
|
|
648
699
|
* ``Fix and reapply templates for provider documentation (#35686)``
|
|
649
700
|
* ``Prepare docs 1st wave of Providers November 2023 (#35537)``
|
|
@@ -79,7 +79,7 @@ apache-airflow-providers-common-sql package
|
|
|
79
79
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
80
80
|
|
|
81
81
|
|
|
82
|
-
Release: 1.
|
|
82
|
+
Release: 1.30.0
|
|
83
83
|
|
|
84
84
|
Provider package
|
|
85
85
|
----------------
|
|
@@ -97,16 +97,17 @@ For the minimum Airflow version supported, see ``Requirements`` below.
|
|
|
97
97
|
Requirements
|
|
98
98
|
------------
|
|
99
99
|
|
|
100
|
-
The minimum Apache Airflow version supported by this provider distribution is ``2.
|
|
100
|
+
The minimum Apache Airflow version supported by this provider distribution is ``2.11.0``.
|
|
101
101
|
|
|
102
|
-
|
|
103
|
-
PIP package
|
|
104
|
-
|
|
105
|
-
``apache-airflow``
|
|
106
|
-
``
|
|
107
|
-
``
|
|
108
|
-
``
|
|
109
|
-
|
|
102
|
+
========================================== ==================
|
|
103
|
+
PIP package Version required
|
|
104
|
+
========================================== ==================
|
|
105
|
+
``apache-airflow`` ``>=2.11.0``
|
|
106
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.0``
|
|
107
|
+
``sqlparse`` ``>=0.5.1``
|
|
108
|
+
``more-itertools`` ``>=9.0.0``
|
|
109
|
+
``methodtools`` ``>=0.4.7``
|
|
110
|
+
========================================== ==================
|
|
110
111
|
|
|
111
112
|
Cross provider package dependencies
|
|
112
113
|
-----------------------------------
|
|
@@ -118,14 +119,15 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
118
119
|
|
|
119
120
|
.. code-block:: bash
|
|
120
121
|
|
|
121
|
-
pip install apache-airflow-providers-common-sql[
|
|
122
|
+
pip install apache-airflow-providers-common-sql[common.compat]
|
|
122
123
|
|
|
123
124
|
|
|
124
|
-
|
|
125
|
-
Dependent package
|
|
126
|
-
|
|
127
|
-
`apache-airflow-providers-
|
|
128
|
-
|
|
125
|
+
================================================================================================================== =================
|
|
126
|
+
Dependent package Extra
|
|
127
|
+
================================================================================================================== =================
|
|
128
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
129
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
|
130
|
+
================================================================================================================== =================
|
|
129
131
|
|
|
130
132
|
Downloading official packages
|
|
131
133
|
-----------------------------
|
|
@@ -133,5 +135,5 @@ Downloading official packages
|
|
|
133
135
|
You can download officially released packages and verify their checksums and signatures from the
|
|
134
136
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
135
137
|
|
|
136
|
-
* `The apache-airflow-providers-common-sql 1.
|
|
137
|
-
* `The apache-airflow-providers-common-sql 1.
|
|
138
|
+
* `The apache-airflow-providers-common-sql 1.30.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz.sha512>`__)
|
|
139
|
+
* `The apache-airflow-providers-common-sql 1.30.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl.sha512>`__)
|
|
@@ -22,12 +22,15 @@ description: |
|
|
|
22
22
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1764109960
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 1.30.0
|
|
32
|
+
- 1.29.0
|
|
33
|
+
- 1.28.2
|
|
31
34
|
- 1.28.1
|
|
32
35
|
- 1.28.0
|
|
33
36
|
- 1.27.5
|
|
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-common-sql"
|
|
28
|
-
version = "1.
|
|
28
|
+
version = "1.30.0rc2"
|
|
29
29
|
description = "Provider package apache-airflow-providers-common-sql for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
|
+
license = "Apache-2.0"
|
|
32
|
+
license-files = ['LICENSE', 'NOTICE']
|
|
31
33
|
authors = [
|
|
32
34
|
{name="Apache Software Foundation", email="dev@airflow.apache.org"},
|
|
33
35
|
]
|
|
@@ -43,7 +45,6 @@ classifiers = [
|
|
|
43
45
|
"Intended Audience :: System Administrators",
|
|
44
46
|
"Framework :: Apache Airflow",
|
|
45
47
|
"Framework :: Apache Airflow :: Provider",
|
|
46
|
-
"License :: OSI Approved :: Apache Software License",
|
|
47
48
|
"Programming Language :: Python :: 3.10",
|
|
48
49
|
"Programming Language :: Python :: 3.11",
|
|
49
50
|
"Programming Language :: Python :: 3.12",
|
|
@@ -57,7 +58,8 @@ requires-python = ">=3.10"
|
|
|
57
58
|
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
58
59
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
59
60
|
dependencies = [
|
|
60
|
-
"apache-airflow>=2.
|
|
61
|
+
"apache-airflow>=2.11.0rc1",
|
|
62
|
+
"apache-airflow-providers-common-compat>=1.10.0rc1",
|
|
61
63
|
"sqlparse>=0.5.1",
|
|
62
64
|
"more-itertools>=9.0.0",
|
|
63
65
|
# The methodtools dependency is necessary since the introduction of dialects:
|
|
@@ -86,6 +88,7 @@ dev = [
|
|
|
86
88
|
"apache-airflow",
|
|
87
89
|
"apache-airflow-task-sdk",
|
|
88
90
|
"apache-airflow-devel-common",
|
|
91
|
+
"apache-airflow-providers-common-compat",
|
|
89
92
|
"apache-airflow-providers-openlineage",
|
|
90
93
|
# Additional devel dependencies (do not remove this line and add extra development dependencies)
|
|
91
94
|
"apache-airflow-providers-common-sql[pandas]",
|
|
@@ -121,8 +124,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
121
124
|
apache-airflow-providers-standard = {workspace = true}
|
|
122
125
|
|
|
123
126
|
[project.urls]
|
|
124
|
-
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
125
|
-
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.
|
|
127
|
+
"Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0"
|
|
128
|
+
"Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html"
|
|
126
129
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
127
130
|
"Source Code" = "https://github.com/apache/airflow"
|
|
128
131
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.
|
|
32
|
+
__version__ = "1.30.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.11.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-common-sql:{__version__}` needs Apache Airflow 2.11.0+"
|
|
39
39
|
)
|
|
@@ -26,6 +26,7 @@ from airflow.utils.log.logging_mixin import LoggingMixin
|
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
28
|
from sqlalchemy.engine import Inspector
|
|
29
|
+
from sqlalchemy.engine.interfaces import ReflectedColumn
|
|
29
30
|
|
|
30
31
|
T = TypeVar("T")
|
|
31
32
|
|
|
@@ -85,16 +86,13 @@ class Dialect(LoggingMixin):
|
|
|
85
86
|
return self.escape_word_format.format(word)
|
|
86
87
|
return word
|
|
87
88
|
|
|
88
|
-
def unescape_word(self, word: str
|
|
89
|
+
def unescape_word(self, word: str) -> str:
|
|
89
90
|
"""
|
|
90
91
|
Remove escape characters from each part of a dotted identifier (e.g., schema.table).
|
|
91
92
|
|
|
92
93
|
:param word: Escaped schema, table, or column name, potentially with multiple segments.
|
|
93
94
|
:return: The word without escaped characters.
|
|
94
95
|
"""
|
|
95
|
-
if not word:
|
|
96
|
-
return word
|
|
97
|
-
|
|
98
96
|
escape_char_start = self.escape_word_format[0]
|
|
99
97
|
escape_char_end = self.escape_word_format[-1]
|
|
100
98
|
|
|
@@ -112,20 +110,19 @@ class Dialect(LoggingMixin):
|
|
|
112
110
|
|
|
113
111
|
@lru_cache(maxsize=None)
|
|
114
112
|
def get_column_names(
|
|
115
|
-
self,
|
|
113
|
+
self,
|
|
114
|
+
table: str,
|
|
115
|
+
schema: str | None = None,
|
|
116
|
+
predicate: Callable[[T | ReflectedColumn], bool] = lambda column: True,
|
|
116
117
|
) -> list[str] | None:
|
|
117
118
|
if schema is None:
|
|
118
119
|
table, schema = self.extract_schema_from_table(table)
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
schema=self.unescape_word(schema) if schema else None,
|
|
126
|
-
),
|
|
127
|
-
)
|
|
128
|
-
)
|
|
120
|
+
table_name = self.unescape_word(table)
|
|
121
|
+
schema = self.unescape_word(schema) if schema else None
|
|
122
|
+
column_names = []
|
|
123
|
+
for column in self.inspector.get_columns(table_name=table_name, schema=schema):
|
|
124
|
+
if predicate(column):
|
|
125
|
+
column_names.append(column["name"])
|
|
129
126
|
self.log.debug("Column names for table '%s': %s", table, column_names)
|
|
130
127
|
return column_names
|
|
131
128
|
|
|
@@ -39,9 +39,9 @@ from airflow.exceptions import (
|
|
|
39
39
|
AirflowOptionalProviderFeatureException,
|
|
40
40
|
AirflowProviderDeprecationWarning,
|
|
41
41
|
)
|
|
42
|
+
from airflow.providers.common.compat.sdk import BaseHook
|
|
42
43
|
from airflow.providers.common.sql.dialects.dialect import Dialect
|
|
43
44
|
from airflow.providers.common.sql.hooks import handlers
|
|
44
|
-
from airflow.providers.common.sql.version_compat import BaseHook
|
|
45
45
|
from airflow.utils.module_loading import import_string
|
|
46
46
|
|
|
47
47
|
if TYPE_CHECKING:
|
|
@@ -49,14 +49,10 @@ if TYPE_CHECKING:
|
|
|
49
49
|
from polars import DataFrame as PolarsDataFrame
|
|
50
50
|
from sqlalchemy.engine import URL, Engine, Inspector
|
|
51
51
|
|
|
52
|
+
from airflow.providers.common.compat.sdk import Connection
|
|
52
53
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
53
54
|
from airflow.providers.openlineage.sqlparser import DatabaseInfo
|
|
54
55
|
|
|
55
|
-
try:
|
|
56
|
-
from airflow.sdk import Connection
|
|
57
|
-
except ImportError:
|
|
58
|
-
from airflow.models.connection import Connection # type: ignore[assignment]
|
|
59
|
-
|
|
60
56
|
|
|
61
57
|
T = TypeVar("T")
|
|
62
58
|
SQL_PLACEHOLDERS = frozenset({"%s", "?"})
|
|
@@ -316,7 +312,7 @@ class DbApiHook(BaseHook):
|
|
|
316
312
|
engine_kwargs = {}
|
|
317
313
|
|
|
318
314
|
try:
|
|
319
|
-
url = self.sqlalchemy_url
|
|
315
|
+
url: URL | str = self.sqlalchemy_url
|
|
320
316
|
except NotImplementedError:
|
|
321
317
|
url = self.get_uri()
|
|
322
318
|
|
|
@@ -109,7 +109,7 @@ class DbApiHook(BaseHook):
|
|
|
109
109
|
) -> PandasDataFrame: ...
|
|
110
110
|
def get_pandas_df_by_chunks(
|
|
111
111
|
self, sql, parameters: list | tuple | Mapping[str, Any] | None = None, *, chunksize: int, **kwargs
|
|
112
|
-
) -> Generator[PandasDataFrame
|
|
112
|
+
) -> Generator[PandasDataFrame]: ...
|
|
113
113
|
def get_records(
|
|
114
114
|
self, sql: str | list[str], parameters: Iterable | Mapping[str, Any] | None = None
|
|
115
115
|
) -> Any: ...
|
|
@@ -143,7 +143,7 @@ class DbApiHook(BaseHook):
|
|
|
143
143
|
chunksize: int,
|
|
144
144
|
df_type: Literal["pandas"] = "pandas",
|
|
145
145
|
**kwargs,
|
|
146
|
-
) -> Generator[PandasDataFrame
|
|
146
|
+
) -> Generator[PandasDataFrame]: ...
|
|
147
147
|
@overload
|
|
148
148
|
def get_df_by_chunks(
|
|
149
149
|
self,
|
|
@@ -153,7 +153,7 @@ class DbApiHook(BaseHook):
|
|
|
153
153
|
chunksize: int,
|
|
154
154
|
df_type: Literal["polars"],
|
|
155
155
|
**kwargs,
|
|
156
|
-
) -> Generator[PolarsDataFrame
|
|
156
|
+
) -> Generator[PolarsDataFrame]: ...
|
|
157
157
|
@staticmethod
|
|
158
158
|
def strip_sql_string(sql: str) -> str: ...
|
|
159
159
|
@staticmethod
|
|
@@ -22,18 +22,14 @@ from functools import cached_property
|
|
|
22
22
|
from typing import TYPE_CHECKING, Any
|
|
23
23
|
|
|
24
24
|
from airflow.exceptions import AirflowException
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseHook, BaseOperator
|
|
25
26
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
26
27
|
from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
|
|
27
|
-
from airflow.providers.common.sql.version_compat import BaseHook, BaseOperator
|
|
28
28
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
30
30
|
import jinja2
|
|
31
31
|
|
|
32
|
-
|
|
33
|
-
from airflow.sdk.definitions.context import Context
|
|
34
|
-
except ImportError:
|
|
35
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
36
|
-
from airflow.utils.context import Context
|
|
32
|
+
from airflow.providers.common.compat.sdk import Context
|
|
37
33
|
|
|
38
34
|
|
|
39
35
|
class GenericTransfer(BaseOperator):
|
|
@@ -79,7 +75,7 @@ class GenericTransfer(BaseOperator):
|
|
|
79
75
|
def __init__(
|
|
80
76
|
self,
|
|
81
77
|
*,
|
|
82
|
-
sql: str,
|
|
78
|
+
sql: str | list[str],
|
|
83
79
|
destination_table: str,
|
|
84
80
|
source_conn_id: str,
|
|
85
81
|
source_hook_params: dict | None = None,
|
|
@@ -160,13 +156,19 @@ class GenericTransfer(BaseOperator):
|
|
|
160
156
|
method_name=self.execute_complete.__name__,
|
|
161
157
|
)
|
|
162
158
|
else:
|
|
159
|
+
if isinstance(self.sql, str):
|
|
160
|
+
self.sql = [self.sql]
|
|
161
|
+
|
|
163
162
|
self.log.info("Extracting data from %s", self.source_conn_id)
|
|
164
|
-
|
|
163
|
+
for sql in self.sql:
|
|
164
|
+
self.log.info("Executing: \n %s", sql)
|
|
165
165
|
|
|
166
|
-
|
|
166
|
+
results = self.source_hook.get_records(sql)
|
|
167
167
|
|
|
168
|
-
|
|
169
|
-
|
|
168
|
+
self.log.info("Inserting rows into %s", self.destination_conn_id)
|
|
169
|
+
self.destination_hook.insert_rows(
|
|
170
|
+
table=self.destination_table, rows=results, **self.insert_args
|
|
171
|
+
)
|
|
170
172
|
|
|
171
173
|
def execute_complete(
|
|
172
174
|
self,
|
|
@@ -24,18 +24,23 @@ from functools import cached_property
|
|
|
24
24
|
from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
|
|
25
25
|
|
|
26
26
|
from airflow import XComArg
|
|
27
|
-
from airflow.exceptions import AirflowException
|
|
27
|
+
from airflow.exceptions import AirflowException
|
|
28
28
|
from airflow.models import SkipMixin
|
|
29
|
+
from airflow.providers.common.compat.sdk import (
|
|
30
|
+
AirflowFailException,
|
|
31
|
+
AirflowSkipException,
|
|
32
|
+
BaseHook,
|
|
33
|
+
BaseOperator,
|
|
34
|
+
)
|
|
29
35
|
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, return_single_query_results
|
|
30
36
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
31
|
-
from airflow.providers.common.sql.version_compat import BaseHook, BaseOperator
|
|
32
37
|
from airflow.utils.helpers import merge_dicts
|
|
33
38
|
|
|
34
39
|
if TYPE_CHECKING:
|
|
35
40
|
import jinja2
|
|
36
41
|
|
|
42
|
+
from airflow.providers.common.compat.sdk import Context
|
|
37
43
|
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
38
|
-
from airflow.utils.context import Context
|
|
39
44
|
|
|
40
45
|
|
|
41
46
|
def _convert_to_float_if_possible(s: str) -> float | str:
|
|
@@ -21,11 +21,11 @@ from operator import itemgetter
|
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
|
|
23
23
|
from airflow.exceptions import AirflowException
|
|
24
|
+
from airflow.providers.common.compat.sdk import BaseHook, BaseSensorOperator
|
|
24
25
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
25
|
-
from airflow.providers.common.sql.version_compat import BaseHook, BaseSensorOperator
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
from airflow.
|
|
28
|
+
from airflow.providers.common.compat.sdk import Context
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
class SqlSensor(BaseSensorOperator):
|
|
@@ -37,13 +37,7 @@ from typing import Any
|
|
|
37
37
|
|
|
38
38
|
from _typeshed import Incomplete as Incomplete
|
|
39
39
|
|
|
40
|
-
from airflow.providers.common.
|
|
41
|
-
|
|
42
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
43
|
-
from airflow.sdk import BaseSensorOperator
|
|
44
|
-
else:
|
|
45
|
-
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
|
|
46
|
-
|
|
40
|
+
from airflow.providers.common.compat.sdk import BaseSensorOperator
|
|
47
41
|
from airflow.utils.context import Context as Context
|
|
48
42
|
|
|
49
43
|
class SqlSensor(BaseSensorOperator):
|
|
@@ -20,8 +20,8 @@ from __future__ import annotations
|
|
|
20
20
|
from typing import TYPE_CHECKING
|
|
21
21
|
|
|
22
22
|
from airflow.exceptions import AirflowException
|
|
23
|
+
from airflow.providers.common.compat.sdk import BaseHook
|
|
23
24
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
24
|
-
from airflow.providers.common.sql.version_compat import BaseHook
|
|
25
25
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
@@ -35,21 +35,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
|
35
35
|
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
36
|
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
37
37
|
|
|
38
|
-
if AIRFLOW_V_3_1_PLUS:
|
|
39
|
-
from airflow.sdk import BaseHook
|
|
40
|
-
else:
|
|
41
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
42
|
-
|
|
43
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
44
|
-
from airflow.sdk import BaseOperator, BaseSensorOperator
|
|
45
|
-
else:
|
|
46
|
-
from airflow.models import BaseOperator
|
|
47
|
-
from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
|
|
48
|
-
|
|
49
38
|
__all__ = [
|
|
50
39
|
"AIRFLOW_V_3_0_PLUS",
|
|
51
40
|
"AIRFLOW_V_3_1_PLUS",
|
|
52
|
-
"BaseHook",
|
|
53
|
-
"BaseOperator",
|
|
54
|
-
"BaseSensorOperator",
|
|
55
41
|
]
|
|
@@ -57,7 +57,6 @@ class TestDialect:
|
|
|
57
57
|
def test_unescape_word_with_different_format(self):
|
|
58
58
|
self.test_db_hook.escape_word_format = "[{}]"
|
|
59
59
|
dialect = Dialect(self.test_db_hook)
|
|
60
|
-
assert not dialect.unescape_word(None)
|
|
61
60
|
assert dialect.unescape_word("table") == "table"
|
|
62
61
|
assert dialect.unescape_word("t@ble") == "t@ble"
|
|
63
62
|
assert dialect.unescape_word("table_name") == "table_name"
|
|
@@ -19,17 +19,16 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import json
|
|
21
21
|
import logging
|
|
22
|
-
import logging.config
|
|
23
22
|
from unittest import mock
|
|
24
23
|
|
|
25
24
|
import pytest
|
|
26
25
|
from pyodbc import Cursor
|
|
27
26
|
|
|
28
27
|
from airflow.models import Connection
|
|
28
|
+
from airflow.providers.common.compat.sdk import BaseHook
|
|
29
29
|
from airflow.providers.common.sql.dialects.dialect import Dialect
|
|
30
30
|
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler, fetch_one_handler
|
|
31
31
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
32
|
-
from airflow.providers.common.sql.version_compat import BaseHook
|
|
33
32
|
|
|
34
33
|
|
|
35
34
|
class DbApiHookInProvider(DbApiHook):
|
|
@@ -577,9 +576,8 @@ class TestDbApiHook:
|
|
|
577
576
|
assert result == [obj, obj]
|
|
578
577
|
|
|
579
578
|
def test_run_no_queries(self):
|
|
580
|
-
with pytest.raises(ValueError
|
|
579
|
+
with pytest.raises(ValueError, match="List of SQL statements is empty"):
|
|
581
580
|
self.db_hook.run(sql=[])
|
|
582
|
-
assert err.value.args[0] == "List of SQL statements is empty"
|
|
583
581
|
|
|
584
582
|
def test_run_and_log_db_messages(self):
|
|
585
583
|
statement = "SQL"
|
|
@@ -20,7 +20,6 @@ from __future__ import annotations
|
|
|
20
20
|
|
|
21
21
|
import inspect
|
|
22
22
|
import logging
|
|
23
|
-
import logging.config
|
|
24
23
|
from unittest.mock import MagicMock
|
|
25
24
|
|
|
26
25
|
import pandas as pd
|
|
@@ -70,8 +69,16 @@ index = 0
|
|
|
70
69
|
|
|
71
70
|
@pytest.mark.db_test
|
|
72
71
|
@pytest.mark.parametrize(
|
|
73
|
-
|
|
74
|
-
|
|
72
|
+
(
|
|
73
|
+
"return_last",
|
|
74
|
+
"split_statements",
|
|
75
|
+
"sql",
|
|
76
|
+
"cursor_calls",
|
|
77
|
+
"cursor_descriptions",
|
|
78
|
+
"cursor_results",
|
|
79
|
+
"hook_descriptions",
|
|
80
|
+
"hook_results",
|
|
81
|
+
),
|
|
75
82
|
[
|
|
76
83
|
pytest.param(
|
|
77
84
|
True,
|
|
@@ -239,9 +246,8 @@ class TestDbApiHook:
|
|
|
239
246
|
)
|
|
240
247
|
def test_no_query(self, empty_statement):
|
|
241
248
|
dbapi_hook = mock_db_hook(DbApiHook)
|
|
242
|
-
with pytest.raises(ValueError
|
|
249
|
+
with pytest.raises(ValueError, match="List of SQL statements is empty"):
|
|
243
250
|
dbapi_hook.run(sql=empty_statement)
|
|
244
|
-
assert err.value.args[0] == "List of SQL statements is empty"
|
|
245
251
|
|
|
246
252
|
@pytest.mark.db_test
|
|
247
253
|
def test_placeholder_config_from_extra(self):
|
|
@@ -311,7 +317,7 @@ class TestDbApiHook:
|
|
|
311
317
|
|
|
312
318
|
@pytest.mark.db_test
|
|
313
319
|
@pytest.mark.parametrize(
|
|
314
|
-
"df_type, expected_type",
|
|
320
|
+
("df_type", "expected_type"),
|
|
315
321
|
[
|
|
316
322
|
("test_default_df_type", pd.DataFrame),
|
|
317
323
|
("pandas", pd.DataFrame),
|
|
@@ -22,7 +22,7 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
@pytest.mark.parametrize(
|
|
25
|
-
"line,parsed_statements",
|
|
25
|
+
("line", "parsed_statements"),
|
|
26
26
|
[
|
|
27
27
|
("SELECT * FROM table", ["SELECT * FROM table"]),
|
|
28
28
|
("SELECT * FROM table;", ["SELECT * FROM table;"]),
|
|
@@ -289,6 +289,38 @@ class TestGenericTransfer:
|
|
|
289
289
|
**{"rows": [[1, 2], [11, 12], [3, 4], [13, 14], [3, 4], [13, 14]], "table": "NEW_HR.EMPLOYEES"},
|
|
290
290
|
}
|
|
291
291
|
|
|
292
|
+
def test_non_paginated_read_for_multiple_sql_statements(self):
|
|
293
|
+
with mock.patch(f"{BASEHOOK_PATCH_PATH}.get_connection", side_effect=self.get_connection):
|
|
294
|
+
with mock.patch(f"{BASEHOOK_PATCH_PATH}.get_hook", side_effect=self.get_hook):
|
|
295
|
+
operator = GenericTransfer(
|
|
296
|
+
task_id="transfer_table",
|
|
297
|
+
source_conn_id="my_source_conn_id",
|
|
298
|
+
destination_conn_id="my_destination_conn_id",
|
|
299
|
+
sql=["SELECT * FROM HR.EMPLOYEES", "SELECT * FROM HR.PEOPLE"],
|
|
300
|
+
destination_table="NEW_HR.EMPLOYEES",
|
|
301
|
+
insert_args=INSERT_ARGS,
|
|
302
|
+
execution_timeout=timedelta(hours=1),
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
operator.execute(context=mock_context(task=operator))
|
|
306
|
+
|
|
307
|
+
assert self.mocked_source_hook.get_records.call_count == 2
|
|
308
|
+
assert [call.args[0] for call in self.mocked_source_hook.get_records.call_args_list] == [
|
|
309
|
+
"SELECT * FROM HR.EMPLOYEES",
|
|
310
|
+
"SELECT * FROM HR.PEOPLE",
|
|
311
|
+
]
|
|
312
|
+
assert self.mocked_destination_hook.insert_rows.call_count == 2
|
|
313
|
+
assert self.mocked_destination_hook.insert_rows.call_args_list[0].kwargs == {
|
|
314
|
+
**INSERT_ARGS,
|
|
315
|
+
"rows": [[1, 2], [11, 12], [3, 4], [13, 14], [3, 4], [13, 14]],
|
|
316
|
+
"table": "NEW_HR.EMPLOYEES",
|
|
317
|
+
}
|
|
318
|
+
assert self.mocked_destination_hook.insert_rows.call_args_list[1].kwargs == {
|
|
319
|
+
**INSERT_ARGS,
|
|
320
|
+
"rows": [[1, 2], [11, 12], [3, 4], [13, 14], [3, 4], [13, 14]],
|
|
321
|
+
"table": "NEW_HR.EMPLOYEES",
|
|
322
|
+
}
|
|
323
|
+
|
|
292
324
|
def test_paginated_read(self):
|
|
293
325
|
"""
|
|
294
326
|
This unit test is based on the example described in the medium article:
|
|
@@ -362,7 +362,7 @@ class TestColumnCheckOperator:
|
|
|
362
362
|
("X", "min", -1),
|
|
363
363
|
("X", "max", 20),
|
|
364
364
|
]
|
|
365
|
-
operator =
|
|
365
|
+
operator = self._construct_operator(monkeypatch, self.valid_column_mapping, records)
|
|
366
366
|
with pytest.raises(AirflowException):
|
|
367
367
|
operator.execute(context=MagicMock())
|
|
368
368
|
|
|
@@ -522,7 +522,7 @@ class TestTableCheckOperator:
|
|
|
522
522
|
return operator
|
|
523
523
|
|
|
524
524
|
@pytest.mark.parametrize(
|
|
525
|
-
|
|
525
|
+
"conn_id",
|
|
526
526
|
[
|
|
527
527
|
pytest.param("postgres_default", marks=[pytest.mark.backend("postgres")]),
|
|
528
528
|
pytest.param("mysql_default", marks=[pytest.mark.backend("mysql")]),
|
|
@@ -559,7 +559,7 @@ class TestTableCheckOperator:
|
|
|
559
559
|
hook.run(["DROP TABLE employees"])
|
|
560
560
|
|
|
561
561
|
@pytest.mark.parametrize(
|
|
562
|
-
|
|
562
|
+
"conn_id",
|
|
563
563
|
[
|
|
564
564
|
pytest.param("postgres_default", marks=[pytest.mark.backend("postgres")]),
|
|
565
565
|
pytest.param("mysql_default", marks=[pytest.mark.backend("mysql")]),
|
|
@@ -51,7 +51,7 @@ class Row2(NamedTuple):
|
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
@pytest.mark.parametrize(
|
|
54
|
-
"sql, return_last, split_statement, hook_results, hook_descriptions, expected_results",
|
|
54
|
+
("sql", "return_last", "split_statement", "hook_results", "hook_descriptions", "expected_results"),
|
|
55
55
|
[
|
|
56
56
|
pytest.param(
|
|
57
57
|
"select * from dummy",
|
|
@@ -166,7 +166,7 @@ def test_exec_success(sql, return_last, split_statement, hook_results, hook_desc
|
|
|
166
166
|
|
|
167
167
|
|
|
168
168
|
@pytest.mark.parametrize(
|
|
169
|
-
"sql, return_last, split_statement, hook_results, hook_descriptions, expected_results",
|
|
169
|
+
("sql", "return_last", "split_statement", "hook_results", "hook_descriptions", "expected_results"),
|
|
170
170
|
[
|
|
171
171
|
pytest.param(
|
|
172
172
|
"select * from dummy",
|
|
@@ -288,7 +288,7 @@ def test_exec_success_with_process_output(
|
|
|
288
288
|
|
|
289
289
|
|
|
290
290
|
@pytest.mark.parametrize(
|
|
291
|
-
"connection_port, default_port, expected_port",
|
|
291
|
+
("connection_port", "default_port", "expected_port"),
|
|
292
292
|
[(None, 4321, 4321), (1234, None, 1234), (1234, 4321, 1234)],
|
|
293
293
|
)
|
|
294
294
|
def test_execute_openlineage_events(connection_port, default_port, expected_port):
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|