apache-airflow-providers-common-sql 1.30.0rc2__tar.gz → 1.30.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/PKG-INFO +9 -9
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/README.rst +4 -4
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/changelog.rst +12 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/index.rst +4 -4
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/provider.yaml +2 -1
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/pyproject.toml +5 -5
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/__init__.py +1 -1
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/hooks/sql.py +2 -6
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/operators/generic_transfer.py +1 -2
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/operators/sql.py +120 -91
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/sensors/sql.py +1 -2
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/triggers/sql.py +1 -2
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/operators/test_sql.py +25 -8
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/sensors/test_sql.py +1 -1
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/LICENSE +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/NOTICE +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/.latest-doc-only-change.txt +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/commits.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/conf.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/connections.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/dataframes.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/dialects.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/installing-providers-from-sources.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/integration-logos/sql.png +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/operators.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/security.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/docs/supported-database-types.rst +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/README_API.md +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/dialects/dialect.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/dialects/dialect.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/get_provider_info.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/get_provider_info.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/hooks/handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/hooks/handlers.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/hooks/sql.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/operators/generic_transfer.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/sensors/sql.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/triggers/sql.pyi +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/src/airflow/providers/common/sql/version_compat.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/conftest.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/example_generic_transfer.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/example_sql_column_table_check.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/example_sql_execute_query.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/example_sql_insert_rows.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/system/common/sql/example_sql_threshold_check.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/dialects/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/dialects/test_dialect.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/hooks/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/hooks/test_dbapi.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/hooks/test_handlers.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/hooks/test_sql.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/hooks/test_sqlparse.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/operators/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/operators/test_generic_transfer.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/operators/test_sql_execute.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/sensors/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/triggers/__init__.py +0 -0
- {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/tests/unit/common/sql/triggers/test_sql.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-common-sql
|
|
3
|
-
Version: 1.30.
|
|
3
|
+
Version: 1.30.1
|
|
4
4
|
Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,common.sql,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.13
|
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
License-File: LICENSE
|
|
24
24
|
License-File: NOTICE
|
|
25
|
-
Requires-Dist: apache-airflow>=2.11.
|
|
26
|
-
Requires-Dist: apache-airflow-providers-common-compat>=1.10.
|
|
25
|
+
Requires-Dist: apache-airflow>=2.11.0
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.10.1
|
|
27
27
|
Requires-Dist: sqlparse>=0.5.1
|
|
28
28
|
Requires-Dist: more-itertools>=9.0.0
|
|
29
29
|
Requires-Dist: methodtools>=0.4.7
|
|
@@ -32,8 +32,8 @@ Requires-Dist: pandas[sql-other]>=2.1.2 ; extra == "pandas" and ( python_version
|
|
|
32
32
|
Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
|
|
33
33
|
Requires-Dist: polars>=1.26.0 ; extra == "polars"
|
|
34
34
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
35
|
-
Project-URL: Changelog, https://airflow.
|
|
36
|
-
Project-URL: Documentation, https://airflow.
|
|
35
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/changelog.html
|
|
36
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1
|
|
37
37
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
38
38
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
39
39
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -67,7 +67,7 @@ Provides-Extra: polars
|
|
|
67
67
|
|
|
68
68
|
Package ``apache-airflow-providers-common-sql``
|
|
69
69
|
|
|
70
|
-
Release: ``1.30.
|
|
70
|
+
Release: ``1.30.1``
|
|
71
71
|
|
|
72
72
|
|
|
73
73
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -80,7 +80,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
80
80
|
are in ``airflow.providers.common.sql`` python package.
|
|
81
81
|
|
|
82
82
|
You can find package information and changelog for the provider
|
|
83
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.
|
|
83
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/>`_.
|
|
84
84
|
|
|
85
85
|
Installation
|
|
86
86
|
------------
|
|
@@ -98,7 +98,7 @@ Requirements
|
|
|
98
98
|
PIP package Version required
|
|
99
99
|
========================================== ==================
|
|
100
100
|
``apache-airflow`` ``>=2.11.0``
|
|
101
|
-
``apache-airflow-providers-common-compat`` ``>=1.10.
|
|
101
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
102
102
|
``sqlparse`` ``>=0.5.1``
|
|
103
103
|
``more-itertools`` ``>=9.0.0``
|
|
104
104
|
``methodtools`` ``>=0.4.7``
|
|
@@ -136,5 +136,5 @@ Extra Dependencies
|
|
|
136
136
|
=============== ================================================================================================
|
|
137
137
|
|
|
138
138
|
The changelog for the provider package can be found in the
|
|
139
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.
|
|
139
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/changelog.html>`_.
|
|
140
140
|
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
|
|
24
24
|
Package ``apache-airflow-providers-common-sql``
|
|
25
25
|
|
|
26
|
-
Release: ``1.30.
|
|
26
|
+
Release: ``1.30.1``
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
@@ -36,7 +36,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
|
|
|
36
36
|
are in ``airflow.providers.common.sql`` python package.
|
|
37
37
|
|
|
38
38
|
You can find package information and changelog for the provider
|
|
39
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.
|
|
39
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/>`_.
|
|
40
40
|
|
|
41
41
|
Installation
|
|
42
42
|
------------
|
|
@@ -54,7 +54,7 @@ Requirements
|
|
|
54
54
|
PIP package Version required
|
|
55
55
|
========================================== ==================
|
|
56
56
|
``apache-airflow`` ``>=2.11.0``
|
|
57
|
-
``apache-airflow-providers-common-compat`` ``>=1.10.
|
|
57
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
58
58
|
``sqlparse`` ``>=0.5.1``
|
|
59
59
|
``more-itertools`` ``>=9.0.0``
|
|
60
60
|
``methodtools`` ``>=0.4.7``
|
|
@@ -92,4 +92,4 @@ Extra Dependencies
|
|
|
92
92
|
=============== ================================================================================================
|
|
93
93
|
|
|
94
94
|
The changelog for the provider package can be found in the
|
|
95
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.
|
|
95
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/changelog.html>`_.
|
|
@@ -25,6 +25,18 @@
|
|
|
25
25
|
Changelog
|
|
26
26
|
---------
|
|
27
27
|
|
|
28
|
+
1.30.1
|
|
29
|
+
......
|
|
30
|
+
|
|
31
|
+
Misc
|
|
32
|
+
~~~~
|
|
33
|
+
|
|
34
|
+
* ``Add backcompat for exceptions in providers (#58727)``
|
|
35
|
+
* ``chore: Move OpenLineage methods to BaseSQLOperator (#58897)``
|
|
36
|
+
|
|
37
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
38
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
39
|
+
|
|
28
40
|
1.30.0
|
|
29
41
|
......
|
|
30
42
|
|
|
@@ -79,7 +79,7 @@ apache-airflow-providers-common-sql package
|
|
|
79
79
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
80
80
|
|
|
81
81
|
|
|
82
|
-
Release: 1.30.
|
|
82
|
+
Release: 1.30.1
|
|
83
83
|
|
|
84
84
|
Provider package
|
|
85
85
|
----------------
|
|
@@ -103,7 +103,7 @@ The minimum Apache Airflow version supported by this provider distribution is ``
|
|
|
103
103
|
PIP package Version required
|
|
104
104
|
========================================== ==================
|
|
105
105
|
``apache-airflow`` ``>=2.11.0``
|
|
106
|
-
``apache-airflow-providers-common-compat`` ``>=1.10.
|
|
106
|
+
``apache-airflow-providers-common-compat`` ``>=1.10.1``
|
|
107
107
|
``sqlparse`` ``>=0.5.1``
|
|
108
108
|
``more-itertools`` ``>=9.0.0``
|
|
109
109
|
``methodtools`` ``>=0.4.7``
|
|
@@ -135,5 +135,5 @@ Downloading official packages
|
|
|
135
135
|
You can download officially released packages and verify their checksums and signatures from the
|
|
136
136
|
`Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
|
|
137
137
|
|
|
138
|
-
* `The apache-airflow-providers-common-sql 1.30.
|
|
139
|
-
* `The apache-airflow-providers-common-sql 1.30.
|
|
138
|
+
* `The apache-airflow-providers-common-sql 1.30.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1.tar.gz.sha512>`__)
|
|
139
|
+
* `The apache-airflow-providers-common-sql 1.30.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.1-py3-none-any.whl.sha512>`__)
|
|
@@ -22,12 +22,13 @@ description: |
|
|
|
22
22
|
`Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
|
|
23
23
|
|
|
24
24
|
state: ready
|
|
25
|
-
source-date-epoch:
|
|
25
|
+
source-date-epoch: 1765298925
|
|
26
26
|
# Note that those versions are maintained by release manager - do not update them manually
|
|
27
27
|
# with the exception of case where other provider in sources has >= new provider version.
|
|
28
28
|
# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
|
|
29
29
|
# to be done in the same PR
|
|
30
30
|
versions:
|
|
31
|
+
- 1.30.1
|
|
31
32
|
- 1.30.0
|
|
32
33
|
- 1.29.0
|
|
33
34
|
- 1.28.2
|
|
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
|
|
|
25
25
|
|
|
26
26
|
[project]
|
|
27
27
|
name = "apache-airflow-providers-common-sql"
|
|
28
|
-
version = "1.30.
|
|
28
|
+
version = "1.30.1"
|
|
29
29
|
description = "Provider package apache-airflow-providers-common-sql for Apache Airflow"
|
|
30
30
|
readme = "README.rst"
|
|
31
31
|
license = "Apache-2.0"
|
|
@@ -58,8 +58,8 @@ requires-python = ">=3.10"
|
|
|
58
58
|
# Make sure to run ``prek update-providers-dependencies --all-files``
|
|
59
59
|
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
|
|
60
60
|
dependencies = [
|
|
61
|
-
"apache-airflow>=2.11.
|
|
62
|
-
"apache-airflow-providers-common-compat>=1.10.
|
|
61
|
+
"apache-airflow>=2.11.0",
|
|
62
|
+
"apache-airflow-providers-common-compat>=1.10.1",
|
|
63
63
|
"sqlparse>=0.5.1",
|
|
64
64
|
"more-itertools>=9.0.0",
|
|
65
65
|
# The methodtools dependency is necessary since the introduction of dialects:
|
|
@@ -124,8 +124,8 @@ apache-airflow-providers-common-sql = {workspace = true}
|
|
|
124
124
|
apache-airflow-providers-standard = {workspace = true}
|
|
125
125
|
|
|
126
126
|
[project.urls]
|
|
127
|
-
"Documentation" = "https://airflow.
|
|
128
|
-
"Changelog" = "https://airflow.
|
|
127
|
+
"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1"
|
|
128
|
+
"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.1/changelog.html"
|
|
129
129
|
"Bug Tracker" = "https://github.com/apache/airflow/issues"
|
|
130
130
|
"Source Code" = "https://github.com/apache/airflow"
|
|
131
131
|
"Slack Chat" = "https://s.apache.org/airflow-slack"
|
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "1.30.
|
|
32
|
+
__version__ = "1.30.1"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.11.0"
|
|
@@ -34,12 +34,8 @@ from sqlalchemy.engine import make_url
|
|
|
34
34
|
from sqlalchemy.exc import ArgumentError, NoSuchModuleError
|
|
35
35
|
|
|
36
36
|
from airflow.configuration import conf
|
|
37
|
-
from airflow.exceptions import
|
|
38
|
-
|
|
39
|
-
AirflowOptionalProviderFeatureException,
|
|
40
|
-
AirflowProviderDeprecationWarning,
|
|
41
|
-
)
|
|
42
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
37
|
+
from airflow.exceptions import AirflowOptionalProviderFeatureException, AirflowProviderDeprecationWarning
|
|
38
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
43
39
|
from airflow.providers.common.sql.dialects.dialect import Dialect
|
|
44
40
|
from airflow.providers.common.sql.hooks import handlers
|
|
45
41
|
from airflow.utils.module_loading import import_string
|
|
@@ -21,8 +21,7 @@ from collections.abc import Sequence
|
|
|
21
21
|
from functools import cached_property
|
|
22
22
|
from typing import TYPE_CHECKING, Any
|
|
23
23
|
|
|
24
|
-
from airflow.
|
|
25
|
-
from airflow.providers.common.compat.sdk import BaseHook, BaseOperator
|
|
24
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook, BaseOperator
|
|
26
25
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
27
26
|
from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
|
|
28
27
|
|
|
@@ -24,9 +24,9 @@ from functools import cached_property
|
|
|
24
24
|
from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
|
|
25
25
|
|
|
26
26
|
from airflow import XComArg
|
|
27
|
-
from airflow.exceptions import AirflowException
|
|
28
27
|
from airflow.models import SkipMixin
|
|
29
28
|
from airflow.providers.common.compat.sdk import (
|
|
29
|
+
AirflowException,
|
|
30
30
|
AirflowFailException,
|
|
31
31
|
AirflowSkipException,
|
|
32
32
|
BaseHook,
|
|
@@ -213,6 +213,86 @@ class BaseSQLOperator(BaseOperator):
|
|
|
213
213
|
raise AirflowException(exception_string)
|
|
214
214
|
raise AirflowFailException(exception_string)
|
|
215
215
|
|
|
216
|
+
def get_openlineage_facets_on_start(self) -> OperatorLineage | None:
|
|
217
|
+
"""Generate OpenLineage facets on start for SQL operators."""
|
|
218
|
+
try:
|
|
219
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
220
|
+
from airflow.providers.openlineage.sqlparser import SQLParser
|
|
221
|
+
except ImportError:
|
|
222
|
+
self.log.debug("OpenLineage could not import required classes. Skipping.")
|
|
223
|
+
return None
|
|
224
|
+
|
|
225
|
+
sql = getattr(self, "sql", None)
|
|
226
|
+
if not sql:
|
|
227
|
+
self.log.debug("OpenLineage could not find 'sql' attribute on `%s`.", type(self).__name__)
|
|
228
|
+
return OperatorLineage()
|
|
229
|
+
|
|
230
|
+
hook = self.get_db_hook()
|
|
231
|
+
try:
|
|
232
|
+
from airflow.providers.openlineage.utils.utils import should_use_external_connection
|
|
233
|
+
|
|
234
|
+
use_external_connection = should_use_external_connection(hook)
|
|
235
|
+
except ImportError:
|
|
236
|
+
# OpenLineage provider release < 1.8.0 - we always use connection
|
|
237
|
+
use_external_connection = True
|
|
238
|
+
|
|
239
|
+
connection = hook.get_connection(getattr(hook, hook.conn_name_attr))
|
|
240
|
+
try:
|
|
241
|
+
database_info = hook.get_openlineage_database_info(connection)
|
|
242
|
+
except AttributeError:
|
|
243
|
+
self.log.debug("%s has no database info provided", hook)
|
|
244
|
+
database_info = None
|
|
245
|
+
|
|
246
|
+
if database_info is None:
|
|
247
|
+
self.log.debug("OpenLineage could not retrieve database information. Skipping.")
|
|
248
|
+
return OperatorLineage()
|
|
249
|
+
|
|
250
|
+
try:
|
|
251
|
+
sql_parser = SQLParser(
|
|
252
|
+
dialect=hook.get_openlineage_database_dialect(connection),
|
|
253
|
+
default_schema=hook.get_openlineage_default_schema(),
|
|
254
|
+
)
|
|
255
|
+
except AttributeError:
|
|
256
|
+
self.log.debug("%s failed to get database dialect", hook)
|
|
257
|
+
return None
|
|
258
|
+
|
|
259
|
+
operator_lineage = sql_parser.generate_openlineage_metadata_from_sql(
|
|
260
|
+
sql=sql,
|
|
261
|
+
hook=hook,
|
|
262
|
+
database_info=database_info,
|
|
263
|
+
database=self.database,
|
|
264
|
+
sqlalchemy_engine=hook.get_sqlalchemy_engine(),
|
|
265
|
+
use_connection=use_external_connection,
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
return operator_lineage
|
|
269
|
+
|
|
270
|
+
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None:
|
|
271
|
+
"""Generate OpenLineage facets when task completes."""
|
|
272
|
+
try:
|
|
273
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
274
|
+
except ImportError:
|
|
275
|
+
self.log.debug("OpenLineage could not import required classes. Skipping.")
|
|
276
|
+
return None
|
|
277
|
+
|
|
278
|
+
operator_lineage = self.get_openlineage_facets_on_start() or OperatorLineage()
|
|
279
|
+
hook = self.get_db_hook()
|
|
280
|
+
try:
|
|
281
|
+
database_specific_lineage = hook.get_openlineage_database_specific_lineage(task_instance)
|
|
282
|
+
except AttributeError:
|
|
283
|
+
self.log.debug("%s has no database specific lineage provided", hook)
|
|
284
|
+
database_specific_lineage = None
|
|
285
|
+
|
|
286
|
+
if database_specific_lineage is None:
|
|
287
|
+
return operator_lineage
|
|
288
|
+
|
|
289
|
+
return OperatorLineage(
|
|
290
|
+
inputs=operator_lineage.inputs + database_specific_lineage.inputs,
|
|
291
|
+
outputs=operator_lineage.outputs + database_specific_lineage.outputs,
|
|
292
|
+
run_facets=merge_dicts(operator_lineage.run_facets, database_specific_lineage.run_facets),
|
|
293
|
+
job_facets=merge_dicts(operator_lineage.job_facets, database_specific_lineage.job_facets),
|
|
294
|
+
)
|
|
295
|
+
|
|
216
296
|
|
|
217
297
|
class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
218
298
|
"""
|
|
@@ -343,76 +423,6 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
|
|
|
343
423
|
if isinstance(self.parameters, str):
|
|
344
424
|
self.parameters = ast.literal_eval(self.parameters)
|
|
345
425
|
|
|
346
|
-
def get_openlineage_facets_on_start(self) -> OperatorLineage | None:
|
|
347
|
-
try:
|
|
348
|
-
from airflow.providers.openlineage.sqlparser import SQLParser
|
|
349
|
-
except ImportError:
|
|
350
|
-
return None
|
|
351
|
-
|
|
352
|
-
hook = self.get_db_hook()
|
|
353
|
-
|
|
354
|
-
try:
|
|
355
|
-
from airflow.providers.openlineage.utils.utils import should_use_external_connection
|
|
356
|
-
|
|
357
|
-
use_external_connection = should_use_external_connection(hook)
|
|
358
|
-
except ImportError:
|
|
359
|
-
# OpenLineage provider release < 1.8.0 - we always use connection
|
|
360
|
-
use_external_connection = True
|
|
361
|
-
|
|
362
|
-
connection = hook.get_connection(getattr(hook, hook.conn_name_attr))
|
|
363
|
-
try:
|
|
364
|
-
database_info = hook.get_openlineage_database_info(connection)
|
|
365
|
-
except AttributeError:
|
|
366
|
-
self.log.debug("%s has no database info provided", hook)
|
|
367
|
-
database_info = None
|
|
368
|
-
|
|
369
|
-
if database_info is None:
|
|
370
|
-
return None
|
|
371
|
-
|
|
372
|
-
try:
|
|
373
|
-
sql_parser = SQLParser(
|
|
374
|
-
dialect=hook.get_openlineage_database_dialect(connection),
|
|
375
|
-
default_schema=hook.get_openlineage_default_schema(),
|
|
376
|
-
)
|
|
377
|
-
except AttributeError:
|
|
378
|
-
self.log.debug("%s failed to get database dialect", hook)
|
|
379
|
-
return None
|
|
380
|
-
|
|
381
|
-
operator_lineage = sql_parser.generate_openlineage_metadata_from_sql(
|
|
382
|
-
sql=self.sql,
|
|
383
|
-
hook=hook,
|
|
384
|
-
database_info=database_info,
|
|
385
|
-
database=self.database,
|
|
386
|
-
sqlalchemy_engine=hook.get_sqlalchemy_engine(),
|
|
387
|
-
use_connection=use_external_connection,
|
|
388
|
-
)
|
|
389
|
-
|
|
390
|
-
return operator_lineage
|
|
391
|
-
|
|
392
|
-
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None:
|
|
393
|
-
try:
|
|
394
|
-
from airflow.providers.openlineage.extractors import OperatorLineage
|
|
395
|
-
except ImportError:
|
|
396
|
-
return None
|
|
397
|
-
|
|
398
|
-
operator_lineage = self.get_openlineage_facets_on_start() or OperatorLineage()
|
|
399
|
-
|
|
400
|
-
hook = self.get_db_hook()
|
|
401
|
-
try:
|
|
402
|
-
database_specific_lineage = hook.get_openlineage_database_specific_lineage(task_instance)
|
|
403
|
-
except AttributeError:
|
|
404
|
-
database_specific_lineage = None
|
|
405
|
-
|
|
406
|
-
if database_specific_lineage is None:
|
|
407
|
-
return operator_lineage
|
|
408
|
-
|
|
409
|
-
return OperatorLineage(
|
|
410
|
-
inputs=operator_lineage.inputs + database_specific_lineage.inputs,
|
|
411
|
-
outputs=operator_lineage.outputs + database_specific_lineage.outputs,
|
|
412
|
-
run_facets=merge_dicts(operator_lineage.run_facets, database_specific_lineage.run_facets),
|
|
413
|
-
job_facets=merge_dicts(operator_lineage.job_facets, database_specific_lineage.job_facets),
|
|
414
|
-
)
|
|
415
|
-
|
|
416
426
|
|
|
417
427
|
class SQLColumnCheckOperator(BaseSQLOperator):
|
|
418
428
|
"""
|
|
@@ -999,8 +1009,13 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
|
|
|
999
1009
|
|
|
1000
1010
|
self.sql1 = f"{sqlt}'{{{{ ds }}}}'"
|
|
1001
1011
|
self.sql2 = f"{sqlt}'{{{{ macros.ds_add(ds, {self.days_back}) }}}}'"
|
|
1012
|
+
# Save all queries as `sql` attr - similar to other sql operators (to be used by listeners).
|
|
1013
|
+
self.sql: list[str] = [self.sql1, self.sql2]
|
|
1002
1014
|
|
|
1003
1015
|
def execute(self, context: Context):
|
|
1016
|
+
# Re-set with templated queries
|
|
1017
|
+
self.sql = [self.sql1, self.sql2]
|
|
1018
|
+
|
|
1004
1019
|
hook = self.get_db_hook()
|
|
1005
1020
|
self.log.info("Using ratio formula: %s", self.ratio_formula)
|
|
1006
1021
|
self.log.info("Executing SQL check: %s", self.sql2)
|
|
@@ -1017,25 +1032,36 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
|
|
|
1017
1032
|
reference = dict(zip(self.metrics_sorted, row2))
|
|
1018
1033
|
|
|
1019
1034
|
ratios: dict[str, int | None] = {}
|
|
1020
|
-
|
|
1035
|
+
# Save all details about all tests to be used in error message if needed
|
|
1036
|
+
all_tests_results: dict[str, dict[str, Any]] = {}
|
|
1021
1037
|
|
|
1022
1038
|
for metric in self.metrics_sorted:
|
|
1023
1039
|
cur = current[metric]
|
|
1024
1040
|
ref = reference[metric]
|
|
1025
1041
|
threshold = self.metrics_thresholds[metric]
|
|
1042
|
+
single_metric_results = {
|
|
1043
|
+
"metric": metric,
|
|
1044
|
+
"current_metric": cur,
|
|
1045
|
+
"past_metric": ref,
|
|
1046
|
+
"threshold": threshold,
|
|
1047
|
+
"ignore_zero": self.ignore_zero,
|
|
1048
|
+
}
|
|
1026
1049
|
if cur == 0 or ref == 0:
|
|
1027
1050
|
ratios[metric] = None
|
|
1028
|
-
|
|
1051
|
+
single_metric_results["ratio"] = None
|
|
1052
|
+
single_metric_results["success"] = self.ignore_zero
|
|
1029
1053
|
else:
|
|
1030
1054
|
ratio_metric = self.ratio_formulas[self.ratio_formula](current[metric], reference[metric])
|
|
1031
1055
|
ratios[metric] = ratio_metric
|
|
1056
|
+
single_metric_results["ratio"] = ratio_metric
|
|
1032
1057
|
if ratio_metric is not None:
|
|
1033
|
-
|
|
1058
|
+
single_metric_results["success"] = ratio_metric < threshold
|
|
1034
1059
|
else:
|
|
1035
|
-
|
|
1060
|
+
single_metric_results["success"] = self.ignore_zero
|
|
1036
1061
|
|
|
1062
|
+
all_tests_results[metric] = single_metric_results
|
|
1037
1063
|
self.log.info(
|
|
1038
|
-
|
|
1064
|
+
"Current metric for %s: %s\nPast metric for %s: %s\nRatio for %s: %s\nThreshold: %s\n",
|
|
1039
1065
|
metric,
|
|
1040
1066
|
cur,
|
|
1041
1067
|
metric,
|
|
@@ -1045,21 +1071,24 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
|
|
|
1045
1071
|
threshold,
|
|
1046
1072
|
)
|
|
1047
1073
|
|
|
1048
|
-
|
|
1049
|
-
|
|
1074
|
+
failed_tests = [single for single in all_tests_results.values() if not single["success"]]
|
|
1075
|
+
if failed_tests:
|
|
1050
1076
|
self.log.warning(
|
|
1051
1077
|
"The following %s tests out of %s failed:",
|
|
1052
1078
|
len(failed_tests),
|
|
1053
1079
|
len(self.metrics_sorted),
|
|
1054
1080
|
)
|
|
1055
|
-
for
|
|
1081
|
+
for single_filed_test in failed_tests:
|
|
1056
1082
|
self.log.warning(
|
|
1057
1083
|
"'%s' check failed. %s is above %s",
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1084
|
+
single_filed_test["metric"],
|
|
1085
|
+
single_filed_test["ratio"],
|
|
1086
|
+
single_filed_test["threshold"],
|
|
1061
1087
|
)
|
|
1062
|
-
|
|
1088
|
+
failed_test_details = "; ".join(
|
|
1089
|
+
f"{t['metric']}: {t}" for t in sorted(failed_tests, key=lambda x: x["metric"])
|
|
1090
|
+
)
|
|
1091
|
+
self._raise_exception(f"The following tests have failed:\n {failed_test_details}")
|
|
1063
1092
|
|
|
1064
1093
|
self.log.info("All tests have passed")
|
|
1065
1094
|
|
|
@@ -1206,6 +1235,8 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
|
|
|
1206
1235
|
self.parameters = parameters
|
|
1207
1236
|
self.follow_task_ids_if_true = follow_task_ids_if_true
|
|
1208
1237
|
self.follow_task_ids_if_false = follow_task_ids_if_false
|
|
1238
|
+
# Chosen branch, after evaluating condition, set during execution, to be used by listeners
|
|
1239
|
+
self.follow_branch: list[str] | None = None
|
|
1209
1240
|
|
|
1210
1241
|
def execute(self, context: Context):
|
|
1211
1242
|
self.log.info(
|
|
@@ -1232,32 +1263,30 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
|
|
|
1232
1263
|
|
|
1233
1264
|
self.log.info("Query returns %s, type '%s'", query_result, type(query_result))
|
|
1234
1265
|
|
|
1235
|
-
follow_branch = None
|
|
1236
1266
|
try:
|
|
1237
1267
|
if isinstance(query_result, bool):
|
|
1238
1268
|
if query_result:
|
|
1239
|
-
follow_branch = self.follow_task_ids_if_true
|
|
1269
|
+
self.follow_branch = self.follow_task_ids_if_true
|
|
1240
1270
|
elif isinstance(query_result, str):
|
|
1241
1271
|
# return result is not Boolean, try to convert from String to Boolean
|
|
1242
1272
|
if _parse_boolean(query_result):
|
|
1243
|
-
follow_branch = self.follow_task_ids_if_true
|
|
1273
|
+
self.follow_branch = self.follow_task_ids_if_true
|
|
1244
1274
|
elif isinstance(query_result, int):
|
|
1245
1275
|
if bool(query_result):
|
|
1246
|
-
follow_branch = self.follow_task_ids_if_true
|
|
1276
|
+
self.follow_branch = self.follow_task_ids_if_true
|
|
1247
1277
|
else:
|
|
1248
1278
|
raise AirflowException(
|
|
1249
1279
|
f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
|
|
1250
1280
|
)
|
|
1251
1281
|
|
|
1252
|
-
if follow_branch is None:
|
|
1253
|
-
follow_branch = self.follow_task_ids_if_false
|
|
1282
|
+
if self.follow_branch is None:
|
|
1283
|
+
self.follow_branch = self.follow_task_ids_if_false
|
|
1254
1284
|
except ValueError:
|
|
1255
1285
|
raise AirflowException(
|
|
1256
1286
|
f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
|
|
1257
1287
|
)
|
|
1258
1288
|
|
|
1259
|
-
|
|
1260
|
-
self.skip_all_except(context["ti"], follow_branch)
|
|
1289
|
+
self.skip_all_except(context["ti"], self.follow_branch)
|
|
1261
1290
|
|
|
1262
1291
|
|
|
1263
1292
|
class SQLInsertRowsOperator(BaseSQLOperator):
|
|
@@ -20,8 +20,7 @@ from collections.abc import Callable, Mapping, Sequence
|
|
|
20
20
|
from operator import itemgetter
|
|
21
21
|
from typing import TYPE_CHECKING, Any
|
|
22
22
|
|
|
23
|
-
from airflow.
|
|
24
|
-
from airflow.providers.common.compat.sdk import BaseHook, BaseSensorOperator
|
|
23
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook, BaseSensorOperator
|
|
25
24
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
26
25
|
|
|
27
26
|
if TYPE_CHECKING:
|
|
@@ -19,8 +19,7 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
from typing import TYPE_CHECKING
|
|
21
21
|
|
|
22
|
-
from airflow.
|
|
23
|
-
from airflow.providers.common.compat.sdk import BaseHook
|
|
22
|
+
from airflow.providers.common.compat.sdk import AirflowException, BaseHook
|
|
24
23
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
25
24
|
from airflow.triggers.base import BaseTrigger, TriggerEvent
|
|
26
25
|
|
|
@@ -34,8 +34,9 @@ try:
|
|
|
34
34
|
except ImportError:
|
|
35
35
|
BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
|
|
36
36
|
from airflow import DAG
|
|
37
|
-
from airflow.exceptions import
|
|
37
|
+
from airflow.exceptions import AirflowProviderDeprecationWarning
|
|
38
38
|
from airflow.models import Connection
|
|
39
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
39
40
|
from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
|
|
40
41
|
from airflow.providers.common.sql.operators.sql import (
|
|
41
42
|
BaseSQLOperator,
|
|
@@ -939,7 +940,16 @@ class TestIntervalCheckOperator:
|
|
|
939
940
|
ignore_zero=True,
|
|
940
941
|
)
|
|
941
942
|
|
|
942
|
-
|
|
943
|
+
expected_err_message = (
|
|
944
|
+
"The following tests have failed:\n "
|
|
945
|
+
"f0: {'metric': 'f0', 'current_metric': 1, 'past_metric': 2, 'threshold': 1.0,"
|
|
946
|
+
" 'ignore_zero': True, 'ratio': 2.0, 'success': False}; "
|
|
947
|
+
"f1: {'metric': 'f1', 'current_metric': 1, 'past_metric': 2, 'threshold': 1.5,"
|
|
948
|
+
" 'ignore_zero': True, 'ratio': 2.0, 'success': False}; "
|
|
949
|
+
"f2: {'metric': 'f2', 'current_metric': 1, 'past_metric': 2, 'threshold': 2.0,"
|
|
950
|
+
" 'ignore_zero': True, 'ratio': 2.0, 'success': False}"
|
|
951
|
+
)
|
|
952
|
+
with pytest.raises(AirflowException, match=expected_err_message):
|
|
943
953
|
operator.execute(context=MagicMock())
|
|
944
954
|
|
|
945
955
|
@mock.patch.object(SQLIntervalCheckOperator, "get_db_hook")
|
|
@@ -969,7 +979,14 @@ class TestIntervalCheckOperator:
|
|
|
969
979
|
ignore_zero=True,
|
|
970
980
|
)
|
|
971
981
|
|
|
972
|
-
|
|
982
|
+
expected_err_message = (
|
|
983
|
+
"The following tests have failed:\n "
|
|
984
|
+
"f0: {'metric': 'f0', 'current_metric': 1, 'past_metric': 3, 'threshold': 0.5, "
|
|
985
|
+
"'ignore_zero': True, 'ratio': 0.6666666666666666, 'success': False}; "
|
|
986
|
+
"f1: {'metric': 'f1', 'current_metric': 1, 'past_metric': 3, 'threshold': 0.6, "
|
|
987
|
+
"'ignore_zero': True, 'ratio': 0.6666666666666666, 'success': False}"
|
|
988
|
+
)
|
|
989
|
+
with pytest.raises(AirflowException, match=expected_err_message):
|
|
973
990
|
operator.execute(context=MagicMock())
|
|
974
991
|
|
|
975
992
|
|
|
@@ -1256,7 +1273,7 @@ class TestSqlBranch:
|
|
|
1256
1273
|
mock_get_records.return_value = 1
|
|
1257
1274
|
|
|
1258
1275
|
if AIRFLOW_V_3_0_1:
|
|
1259
|
-
from airflow.
|
|
1276
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
1260
1277
|
|
|
1261
1278
|
with pytest.raises(DownstreamTasksSkipped) as exc_info:
|
|
1262
1279
|
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
|
|
@@ -1305,7 +1322,7 @@ class TestSqlBranch:
|
|
|
1305
1322
|
mock_get_records.return_value = true_value
|
|
1306
1323
|
|
|
1307
1324
|
if AIRFLOW_V_3_0_1:
|
|
1308
|
-
from airflow.
|
|
1325
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
1309
1326
|
|
|
1310
1327
|
with pytest.raises(DownstreamTasksSkipped) as exc_info:
|
|
1311
1328
|
branch_op.execute({})
|
|
@@ -1353,7 +1370,7 @@ class TestSqlBranch:
|
|
|
1353
1370
|
mock_get_records.return_value = false_value
|
|
1354
1371
|
|
|
1355
1372
|
if AIRFLOW_V_3_0_1:
|
|
1356
|
-
from airflow.
|
|
1373
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
1357
1374
|
|
|
1358
1375
|
with pytest.raises(DownstreamTasksSkipped) as exc_info:
|
|
1359
1376
|
branch_op.execute({})
|
|
@@ -1412,7 +1429,7 @@ class TestSqlBranch:
|
|
|
1412
1429
|
mock_get_records.return_value = [["1"]]
|
|
1413
1430
|
|
|
1414
1431
|
if AIRFLOW_V_3_0_1:
|
|
1415
|
-
from airflow.
|
|
1432
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
1416
1433
|
|
|
1417
1434
|
with pytest.raises(DownstreamTasksSkipped) as exc_info:
|
|
1418
1435
|
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
|
|
@@ -1530,7 +1547,7 @@ class TestSqlBranch:
|
|
|
1530
1547
|
mock_get_records.return_value = [false_value]
|
|
1531
1548
|
|
|
1532
1549
|
if AIRFLOW_V_3_0_1:
|
|
1533
|
-
from airflow.
|
|
1550
|
+
from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
|
|
1534
1551
|
|
|
1535
1552
|
with pytest.raises(DownstreamTasksSkipped) as exc_info:
|
|
1536
1553
|
branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
|
|
@@ -21,8 +21,8 @@ from unittest import mock
|
|
|
21
21
|
|
|
22
22
|
import pytest
|
|
23
23
|
|
|
24
|
-
from airflow.exceptions import AirflowException
|
|
25
24
|
from airflow.models.dag import DAG
|
|
25
|
+
from airflow.providers.common.compat.sdk import AirflowException
|
|
26
26
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
27
27
|
from airflow.providers.common.sql.sensors.sql import SqlSensor
|
|
28
28
|
from airflow.utils.timezone import datetime
|
{apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/LICENSE
RENAMED
|
File without changes
|
{apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.1}/NOTICE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|