apache-airflow-providers-common-sql 1.30.0rc2__tar.gz → 1.30.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/PKG-INFO +9 -9
  2. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/README.rst +4 -4
  3. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/changelog.rst +30 -0
  4. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/index.rst +4 -4
  5. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/operators.rst +23 -0
  6. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/provider.yaml +3 -1
  7. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/pyproject.toml +5 -5
  8. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/__init__.py +1 -1
  9. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/hooks/sql.py +4 -8
  10. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/operators/generic_transfer.py +1 -2
  11. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/operators/generic_transfer.pyi +1 -1
  12. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/operators/sql.py +123 -91
  13. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/sensors/sql.py +1 -2
  14. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/sensors/sql.pyi +1 -1
  15. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/triggers/sql.py +1 -2
  16. apache_airflow_providers_common_sql-1.30.2/tests/system/common/sql/example_sql_value_check.py +62 -0
  17. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/operators/test_sql.py +44 -18
  18. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/sensors/test_sql.py +1 -1
  19. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/LICENSE +0 -0
  20. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/NOTICE +0 -0
  21. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/.latest-doc-only-change.txt +0 -0
  22. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/commits.rst +0 -0
  23. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/conf.py +0 -0
  24. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/connections.rst +0 -0
  25. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/dataframes.rst +0 -0
  26. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/dialects.rst +0 -0
  27. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/installing-providers-from-sources.rst +0 -0
  28. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/integration-logos/sql.png +0 -0
  29. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/security.rst +0 -0
  30. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/docs/supported-database-types.rst +0 -0
  31. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/__init__.py +0 -0
  32. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/__init__.py +0 -0
  33. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/__init__.py +0 -0
  34. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/README_API.md +0 -0
  35. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/dialects/__init__.py +0 -0
  36. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/dialects/dialect.py +0 -0
  37. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/dialects/dialect.pyi +0 -0
  38. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/doc/adr/0001-record-architecture-decisions.md +0 -0
  39. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/doc/adr/0002-return-common-data-structure-from-dbapihook-derived-hooks.md +0 -0
  40. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/doc/adr/0003-introduce-notion-of-dialects-in-dbapihook.md +0 -0
  41. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/get_provider_info.py +0 -0
  42. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/get_provider_info.pyi +0 -0
  43. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/hooks/__init__.py +0 -0
  44. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/hooks/handlers.py +0 -0
  45. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/hooks/handlers.pyi +0 -0
  46. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/hooks/sql.pyi +0 -0
  47. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/operators/__init__.py +0 -0
  48. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/sensors/__init__.py +0 -0
  49. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/triggers/__init__.py +0 -0
  50. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/triggers/sql.pyi +0 -0
  51. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/src/airflow/providers/common/sql/version_compat.py +0 -0
  52. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/conftest.py +0 -0
  53. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/__init__.py +0 -0
  54. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/__init__.py +0 -0
  55. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/__init__.py +0 -0
  56. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/example_generic_transfer.py +0 -0
  57. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/example_sql_column_table_check.py +0 -0
  58. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/example_sql_execute_query.py +0 -0
  59. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/example_sql_insert_rows.py +0 -0
  60. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/system/common/sql/example_sql_threshold_check.py +0 -0
  61. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/__init__.py +0 -0
  62. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/__init__.py +0 -0
  63. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/__init__.py +0 -0
  64. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/dialects/__init__.py +0 -0
  65. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/dialects/test_dialect.py +0 -0
  66. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/hooks/__init__.py +0 -0
  67. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/hooks/test_dbapi.py +0 -0
  68. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/hooks/test_handlers.py +0 -0
  69. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/hooks/test_sql.py +0 -0
  70. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/hooks/test_sqlparse.py +0 -0
  71. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/operators/__init__.py +0 -0
  72. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/operators/test_generic_transfer.py +0 -0
  73. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/operators/test_sql_execute.py +0 -0
  74. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/sensors/__init__.py +0 -0
  75. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/triggers/__init__.py +0 -0
  76. {apache_airflow_providers_common_sql-1.30.0rc2 → apache_airflow_providers_common_sql-1.30.2}/tests/unit/common/sql/triggers/test_sql.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-common-sql
3
- Version: 1.30.0rc2
3
+ Version: 1.30.2
4
4
  Summary: Provider package apache-airflow-providers-common-sql for Apache Airflow
5
5
  Keywords: airflow-provider,common.sql,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,8 +22,8 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
- Requires-Dist: apache-airflow>=2.11.0rc1
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.10.0rc1
25
+ Requires-Dist: apache-airflow>=2.11.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.11.0
27
27
  Requires-Dist: sqlparse>=0.5.1
28
28
  Requires-Dist: more-itertools>=9.0.0
29
29
  Requires-Dist: methodtools>=0.4.7
@@ -32,8 +32,8 @@ Requires-Dist: pandas[sql-other]>=2.1.2 ; extra == "pandas" and ( python_version
32
32
  Requires-Dist: pandas>=2.2.3 ; extra == "pandas" and ( python_version >="3.13")
33
33
  Requires-Dist: polars>=1.26.0 ; extra == "polars"
34
34
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
35
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html
36
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0
35
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/changelog.html
36
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2
37
37
  Project-URL: Mastodon, https://fosstodon.org/@airflow
38
38
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
39
39
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -67,7 +67,7 @@ Provides-Extra: polars
67
67
 
68
68
  Package ``apache-airflow-providers-common-sql``
69
69
 
70
- Release: ``1.30.0``
70
+ Release: ``1.30.2``
71
71
 
72
72
 
73
73
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -80,7 +80,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
80
80
  are in ``airflow.providers.common.sql`` python package.
81
81
 
82
82
  You can find package information and changelog for the provider
83
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/>`_.
83
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/>`_.
84
84
 
85
85
  Installation
86
86
  ------------
@@ -98,7 +98,7 @@ Requirements
98
98
  PIP package Version required
99
99
  ========================================== ==================
100
100
  ``apache-airflow`` ``>=2.11.0``
101
- ``apache-airflow-providers-common-compat`` ``>=1.10.0``
101
+ ``apache-airflow-providers-common-compat`` ``>=1.11.0``
102
102
  ``sqlparse`` ``>=0.5.1``
103
103
  ``more-itertools`` ``>=9.0.0``
104
104
  ``methodtools`` ``>=0.4.7``
@@ -136,5 +136,5 @@ Extra Dependencies
136
136
  =============== ================================================================================================
137
137
 
138
138
  The changelog for the provider package can be found in the
139
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html>`_.
139
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/changelog.html>`_.
140
140
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-common-sql``
25
25
 
26
- Release: ``1.30.0``
26
+ Release: ``1.30.2``
27
27
 
28
28
 
29
29
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
@@ -36,7 +36,7 @@ This is a provider package for ``common.sql`` provider. All classes for this pro
36
36
  are in ``airflow.providers.common.sql`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -54,7 +54,7 @@ Requirements
54
54
  PIP package Version required
55
55
  ========================================== ==================
56
56
  ``apache-airflow`` ``>=2.11.0``
57
- ``apache-airflow-providers-common-compat`` ``>=1.10.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.11.0``
58
58
  ``sqlparse`` ``>=0.5.1``
59
59
  ``more-itertools`` ``>=9.0.0``
60
60
  ``methodtools`` ``>=0.4.7``
@@ -92,4 +92,4 @@ Extra Dependencies
92
92
  =============== ================================================================================================
93
93
 
94
94
  The changelog for the provider package can be found in the
95
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/changelog.html>`_.
@@ -25,6 +25,36 @@
25
25
  Changelog
26
26
  ---------
27
27
 
28
+ 1.30.2
29
+ ......
30
+
31
+ Misc
32
+ ~~~~
33
+
34
+ * ``Remove top-level SDK reference in Core (#59817)``
35
+ * ``Extract shared "module_loading" distribution (#59139)``
36
+
37
+ Doc-only
38
+ ~~~~~~~~
39
+
40
+ * ``Add missing fields to SQLValueCheckOperator docstring (#59409)``
41
+
42
+ .. Below changes are excluded from the changelog. Move them to
43
+ appropriate section above if needed. Do not delete the lines(!):
44
+ * ``TaskInstance unused method cleanup (#59835)``
45
+
46
+ 1.30.1
47
+ ......
48
+
49
+ Misc
50
+ ~~~~
51
+
52
+ * ``Add backcompat for exceptions in providers (#58727)``
53
+ * ``chore: Move OpenLineage methods to BaseSQLOperator (#58897)``
54
+
55
+ .. Below changes are excluded from the changelog. Move them to
56
+ appropriate section above if needed. Do not delete the lines(!):
57
+
28
58
  1.30.0
29
59
  ......
30
60
 
@@ -79,7 +79,7 @@ apache-airflow-providers-common-sql package
79
79
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
80
80
 
81
81
 
82
- Release: 1.30.0
82
+ Release: 1.30.2
83
83
 
84
84
  Provider package
85
85
  ----------------
@@ -103,7 +103,7 @@ The minimum Apache Airflow version supported by this provider distribution is ``
103
103
  PIP package Version required
104
104
  ========================================== ==================
105
105
  ``apache-airflow`` ``>=2.11.0``
106
- ``apache-airflow-providers-common-compat`` ``>=1.10.0``
106
+ ``apache-airflow-providers-common-compat`` ``>=1.11.0``
107
107
  ``sqlparse`` ``>=0.5.1``
108
108
  ``more-itertools`` ``>=9.0.0``
109
109
  ``methodtools`` ``>=0.4.7``
@@ -135,5 +135,5 @@ Downloading official packages
135
135
  You can download officially released packages and verify their checksums and signatures from the
136
136
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
137
137
 
138
- * `The apache-airflow-providers-common-sql 1.30.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0.tar.gz.sha512>`__)
139
- * `The apache-airflow-providers-common-sql 1.30.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.0-py3-none-any.whl.sha512>`__)
138
+ * `The apache-airflow-providers-common-sql 1.30.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2.tar.gz.sha512>`__)
139
+ * `The apache-airflow-providers-common-sql 1.30.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_common_sql-1.30.2-py3-none-any.whl.sha512>`__)
@@ -148,6 +148,29 @@ The below example demonstrates how to instantiate the SQLTableCheckOperator task
148
148
  :end-before: [END howto_operator_sql_table_check]
149
149
 
150
150
 
151
+ .. _howto/operator:SQLValueCheckOperator:
152
+
153
+ Check value against expected
154
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
155
+
156
+ Use the :class:`~airflow.providers.common.sql.operators.sql.SQLValueCheckOperator` to compare a SQL query result
157
+ against an expected value, with some optionally specified tolerance for numeric results.
158
+ The parameters for this operator are:
159
+
160
+ - ``sql`` - the sql query to be executed, as a templated string.
161
+ - ``pass_value`` - the expected value to compare the query result against.
162
+ - ``tolerance`` (optional) - numerical tolerance for comparisons involving numeric values.
163
+ - ``conn_id`` (optional) - the connection ID used to connect to the database.
164
+ - ``database`` (optional) - name of the database which overwrites the name defined in the connection.
165
+
166
+ The below example demonstrates how to instantiate the SQLValueCheckOperator task.
167
+
168
+ .. exampleinclude:: /../tests/system/common/sql/example_sql_value_check.py
169
+ :language: python
170
+ :dedent: 4
171
+ :start-after: [START howto_operator_sql_value_check]
172
+ :end-before: [END howto_operator_sql_value_check]
173
+
151
174
  .. _howto/operator:SQLThresholdCheckOperator:
152
175
 
153
176
  Check values against a threshold
@@ -22,12 +22,14 @@ description: |
22
22
  `Common SQL Provider <https://en.wikipedia.org/wiki/SQL>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1764109960
25
+ source-date-epoch: 1767124274
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 1.30.2
32
+ - 1.30.1
31
33
  - 1.30.0
32
34
  - 1.29.0
33
35
  - 1.28.2
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-common-sql"
28
- version = "1.30.0rc2"
28
+ version = "1.30.2"
29
29
  description = "Provider package apache-airflow-providers-common-sql for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  license = "Apache-2.0"
@@ -58,8 +58,8 @@ requires-python = ">=3.10"
58
58
  # Make sure to run ``prek update-providers-dependencies --all-files``
59
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
60
60
  dependencies = [
61
- "apache-airflow>=2.11.0rc1",
62
- "apache-airflow-providers-common-compat>=1.10.0rc1",
61
+ "apache-airflow>=2.11.0",
62
+ "apache-airflow-providers-common-compat>=1.11.0",
63
63
  "sqlparse>=0.5.1",
64
64
  "more-itertools>=9.0.0",
65
65
  # The methodtools dependency is necessary since the introduction of dialects:
@@ -124,8 +124,8 @@ apache-airflow-providers-common-sql = {workspace = true}
124
124
  apache-airflow-providers-standard = {workspace = true}
125
125
 
126
126
  [project.urls]
127
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0"
128
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-common-sql/1.30.0/changelog.html"
127
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2"
128
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-common-sql/1.30.2/changelog.html"
129
129
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
130
130
  "Source Code" = "https://github.com/apache/airflow"
131
131
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "1.30.0"
32
+ __version__ = "1.30.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -34,15 +34,11 @@ from sqlalchemy.engine import make_url
34
34
  from sqlalchemy.exc import ArgumentError, NoSuchModuleError
35
35
 
36
36
  from airflow.configuration import conf
37
- from airflow.exceptions import (
38
- AirflowException,
39
- AirflowOptionalProviderFeatureException,
40
- AirflowProviderDeprecationWarning,
41
- )
42
- from airflow.providers.common.compat.sdk import BaseHook
37
+ from airflow.exceptions import AirflowOptionalProviderFeatureException, AirflowProviderDeprecationWarning
38
+ from airflow.providers.common.compat.module_loading import import_string
39
+ from airflow.providers.common.compat.sdk import AirflowException, BaseHook
43
40
  from airflow.providers.common.sql.dialects.dialect import Dialect
44
41
  from airflow.providers.common.sql.hooks import handlers
45
- from airflow.utils.module_loading import import_string
46
42
 
47
43
  if TYPE_CHECKING:
48
44
  from pandas import DataFrame as PandasDataFrame
@@ -337,7 +333,7 @@ class DbApiHook(BaseHook):
337
333
 
338
334
  @cached_property
339
335
  def dialect(self) -> Dialect:
340
- from airflow.utils.module_loading import import_string
336
+ from airflow.providers.common.compat.module_loading import import_string
341
337
 
342
338
  dialect_info = self._dialects.get(self.dialect_name)
343
339
 
@@ -21,8 +21,7 @@ from collections.abc import Sequence
21
21
  from functools import cached_property
22
22
  from typing import TYPE_CHECKING, Any
23
23
 
24
- from airflow.exceptions import AirflowException
25
- from airflow.providers.common.compat.sdk import BaseHook, BaseOperator
24
+ from airflow.providers.common.compat.sdk import AirflowException, BaseHook, BaseOperator
26
25
  from airflow.providers.common.sql.hooks.sql import DbApiHook
27
26
  from airflow.providers.common.sql.triggers.sql import SQLExecuteQueryTrigger
28
27
 
@@ -38,7 +38,7 @@ from _typeshed import Incomplete as Incomplete
38
38
 
39
39
  from airflow.models import BaseOperator
40
40
  from airflow.providers.common.sql.hooks.sql import DbApiHook as DbApiHook
41
- from airflow.utils.context import Context as Context
41
+ from airflow.sdk import Context
42
42
 
43
43
  class GenericTransfer(BaseOperator):
44
44
  template_fields: Sequence[str]
@@ -24,9 +24,9 @@ from functools import cached_property
24
24
  from typing import TYPE_CHECKING, Any, ClassVar, NoReturn, SupportsAbs
25
25
 
26
26
  from airflow import XComArg
27
- from airflow.exceptions import AirflowException
28
27
  from airflow.models import SkipMixin
29
28
  from airflow.providers.common.compat.sdk import (
29
+ AirflowException,
30
30
  AirflowFailException,
31
31
  AirflowSkipException,
32
32
  BaseHook,
@@ -213,6 +213,86 @@ class BaseSQLOperator(BaseOperator):
213
213
  raise AirflowException(exception_string)
214
214
  raise AirflowFailException(exception_string)
215
215
 
216
+ def get_openlineage_facets_on_start(self) -> OperatorLineage | None:
217
+ """Generate OpenLineage facets on start for SQL operators."""
218
+ try:
219
+ from airflow.providers.openlineage.extractors import OperatorLineage
220
+ from airflow.providers.openlineage.sqlparser import SQLParser
221
+ except ImportError:
222
+ self.log.debug("OpenLineage could not import required classes. Skipping.")
223
+ return None
224
+
225
+ sql = getattr(self, "sql", None)
226
+ if not sql:
227
+ self.log.debug("OpenLineage could not find 'sql' attribute on `%s`.", type(self).__name__)
228
+ return OperatorLineage()
229
+
230
+ hook = self.get_db_hook()
231
+ try:
232
+ from airflow.providers.openlineage.utils.utils import should_use_external_connection
233
+
234
+ use_external_connection = should_use_external_connection(hook)
235
+ except ImportError:
236
+ # OpenLineage provider release < 1.8.0 - we always use connection
237
+ use_external_connection = True
238
+
239
+ connection = hook.get_connection(getattr(hook, hook.conn_name_attr))
240
+ try:
241
+ database_info = hook.get_openlineage_database_info(connection)
242
+ except AttributeError:
243
+ self.log.debug("%s has no database info provided", hook)
244
+ database_info = None
245
+
246
+ if database_info is None:
247
+ self.log.debug("OpenLineage could not retrieve database information. Skipping.")
248
+ return OperatorLineage()
249
+
250
+ try:
251
+ sql_parser = SQLParser(
252
+ dialect=hook.get_openlineage_database_dialect(connection),
253
+ default_schema=hook.get_openlineage_default_schema(),
254
+ )
255
+ except AttributeError:
256
+ self.log.debug("%s failed to get database dialect", hook)
257
+ return None
258
+
259
+ operator_lineage = sql_parser.generate_openlineage_metadata_from_sql(
260
+ sql=sql,
261
+ hook=hook,
262
+ database_info=database_info,
263
+ database=self.database,
264
+ sqlalchemy_engine=hook.get_sqlalchemy_engine(),
265
+ use_connection=use_external_connection,
266
+ )
267
+
268
+ return operator_lineage
269
+
270
+ def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None:
271
+ """Generate OpenLineage facets when task completes."""
272
+ try:
273
+ from airflow.providers.openlineage.extractors import OperatorLineage
274
+ except ImportError:
275
+ self.log.debug("OpenLineage could not import required classes. Skipping.")
276
+ return None
277
+
278
+ operator_lineage = self.get_openlineage_facets_on_start() or OperatorLineage()
279
+ hook = self.get_db_hook()
280
+ try:
281
+ database_specific_lineage = hook.get_openlineage_database_specific_lineage(task_instance)
282
+ except AttributeError:
283
+ self.log.debug("%s has no database specific lineage provided", hook)
284
+ database_specific_lineage = None
285
+
286
+ if database_specific_lineage is None:
287
+ return operator_lineage
288
+
289
+ return OperatorLineage(
290
+ inputs=operator_lineage.inputs + database_specific_lineage.inputs,
291
+ outputs=operator_lineage.outputs + database_specific_lineage.outputs,
292
+ run_facets=merge_dicts(operator_lineage.run_facets, database_specific_lineage.run_facets),
293
+ job_facets=merge_dicts(operator_lineage.job_facets, database_specific_lineage.job_facets),
294
+ )
295
+
216
296
 
217
297
  class SQLExecuteQueryOperator(BaseSQLOperator):
218
298
  """
@@ -343,76 +423,6 @@ class SQLExecuteQueryOperator(BaseSQLOperator):
343
423
  if isinstance(self.parameters, str):
344
424
  self.parameters = ast.literal_eval(self.parameters)
345
425
 
346
- def get_openlineage_facets_on_start(self) -> OperatorLineage | None:
347
- try:
348
- from airflow.providers.openlineage.sqlparser import SQLParser
349
- except ImportError:
350
- return None
351
-
352
- hook = self.get_db_hook()
353
-
354
- try:
355
- from airflow.providers.openlineage.utils.utils import should_use_external_connection
356
-
357
- use_external_connection = should_use_external_connection(hook)
358
- except ImportError:
359
- # OpenLineage provider release < 1.8.0 - we always use connection
360
- use_external_connection = True
361
-
362
- connection = hook.get_connection(getattr(hook, hook.conn_name_attr))
363
- try:
364
- database_info = hook.get_openlineage_database_info(connection)
365
- except AttributeError:
366
- self.log.debug("%s has no database info provided", hook)
367
- database_info = None
368
-
369
- if database_info is None:
370
- return None
371
-
372
- try:
373
- sql_parser = SQLParser(
374
- dialect=hook.get_openlineage_database_dialect(connection),
375
- default_schema=hook.get_openlineage_default_schema(),
376
- )
377
- except AttributeError:
378
- self.log.debug("%s failed to get database dialect", hook)
379
- return None
380
-
381
- operator_lineage = sql_parser.generate_openlineage_metadata_from_sql(
382
- sql=self.sql,
383
- hook=hook,
384
- database_info=database_info,
385
- database=self.database,
386
- sqlalchemy_engine=hook.get_sqlalchemy_engine(),
387
- use_connection=use_external_connection,
388
- )
389
-
390
- return operator_lineage
391
-
392
- def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage | None:
393
- try:
394
- from airflow.providers.openlineage.extractors import OperatorLineage
395
- except ImportError:
396
- return None
397
-
398
- operator_lineage = self.get_openlineage_facets_on_start() or OperatorLineage()
399
-
400
- hook = self.get_db_hook()
401
- try:
402
- database_specific_lineage = hook.get_openlineage_database_specific_lineage(task_instance)
403
- except AttributeError:
404
- database_specific_lineage = None
405
-
406
- if database_specific_lineage is None:
407
- return operator_lineage
408
-
409
- return OperatorLineage(
410
- inputs=operator_lineage.inputs + database_specific_lineage.inputs,
411
- outputs=operator_lineage.outputs + database_specific_lineage.outputs,
412
- run_facets=merge_dicts(operator_lineage.run_facets, database_specific_lineage.run_facets),
413
- job_facets=merge_dicts(operator_lineage.job_facets, database_specific_lineage.job_facets),
414
- )
415
-
416
426
 
417
427
  class SQLColumnCheckOperator(BaseSQLOperator):
418
428
  """
@@ -852,6 +862,9 @@ class SQLValueCheckOperator(BaseSQLOperator):
852
862
  :param sql: the sql to be executed. (templated)
853
863
  :param conn_id: the connection ID used to connect to the database.
854
864
  :param database: name of database which overwrite the defined one in connection
865
+ :param pass_value: the value to check against
866
+ :param tolerance: (optional) the tolerance allowed to pass records within for
867
+ numeric queries
855
868
  """
856
869
 
857
870
  __mapper_args__ = {"polymorphic_identity": "SQLValueCheckOperator"}
@@ -999,8 +1012,13 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
999
1012
 
1000
1013
  self.sql1 = f"{sqlt}'{{{{ ds }}}}'"
1001
1014
  self.sql2 = f"{sqlt}'{{{{ macros.ds_add(ds, {self.days_back}) }}}}'"
1015
+ # Save all queries as `sql` attr - similar to other sql operators (to be used by listeners).
1016
+ self.sql: list[str] = [self.sql1, self.sql2]
1002
1017
 
1003
1018
  def execute(self, context: Context):
1019
+ # Re-set with templated queries
1020
+ self.sql = [self.sql1, self.sql2]
1021
+
1004
1022
  hook = self.get_db_hook()
1005
1023
  self.log.info("Using ratio formula: %s", self.ratio_formula)
1006
1024
  self.log.info("Executing SQL check: %s", self.sql2)
@@ -1017,25 +1035,36 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
1017
1035
  reference = dict(zip(self.metrics_sorted, row2))
1018
1036
 
1019
1037
  ratios: dict[str, int | None] = {}
1020
- test_results = {}
1038
+ # Save all details about all tests to be used in error message if needed
1039
+ all_tests_results: dict[str, dict[str, Any]] = {}
1021
1040
 
1022
1041
  for metric in self.metrics_sorted:
1023
1042
  cur = current[metric]
1024
1043
  ref = reference[metric]
1025
1044
  threshold = self.metrics_thresholds[metric]
1045
+ single_metric_results = {
1046
+ "metric": metric,
1047
+ "current_metric": cur,
1048
+ "past_metric": ref,
1049
+ "threshold": threshold,
1050
+ "ignore_zero": self.ignore_zero,
1051
+ }
1026
1052
  if cur == 0 or ref == 0:
1027
1053
  ratios[metric] = None
1028
- test_results[metric] = self.ignore_zero
1054
+ single_metric_results["ratio"] = None
1055
+ single_metric_results["success"] = self.ignore_zero
1029
1056
  else:
1030
1057
  ratio_metric = self.ratio_formulas[self.ratio_formula](current[metric], reference[metric])
1031
1058
  ratios[metric] = ratio_metric
1059
+ single_metric_results["ratio"] = ratio_metric
1032
1060
  if ratio_metric is not None:
1033
- test_results[metric] = ratio_metric < threshold
1061
+ single_metric_results["success"] = ratio_metric < threshold
1034
1062
  else:
1035
- test_results[metric] = self.ignore_zero
1063
+ single_metric_results["success"] = self.ignore_zero
1036
1064
 
1065
+ all_tests_results[metric] = single_metric_results
1037
1066
  self.log.info(
1038
- ("Current metric for %s: %s\nPast metric for %s: %s\nRatio for %s: %s\nThreshold: %s\n"),
1067
+ "Current metric for %s: %s\nPast metric for %s: %s\nRatio for %s: %s\nThreshold: %s\n",
1039
1068
  metric,
1040
1069
  cur,
1041
1070
  metric,
@@ -1045,21 +1074,24 @@ class SQLIntervalCheckOperator(BaseSQLOperator):
1045
1074
  threshold,
1046
1075
  )
1047
1076
 
1048
- if not all(test_results.values()):
1049
- failed_tests = [it[0] for it in test_results.items() if not it[1]]
1077
+ failed_tests = [single for single in all_tests_results.values() if not single["success"]]
1078
+ if failed_tests:
1050
1079
  self.log.warning(
1051
1080
  "The following %s tests out of %s failed:",
1052
1081
  len(failed_tests),
1053
1082
  len(self.metrics_sorted),
1054
1083
  )
1055
- for k in failed_tests:
1084
+ for single_filed_test in failed_tests:
1056
1085
  self.log.warning(
1057
1086
  "'%s' check failed. %s is above %s",
1058
- k,
1059
- ratios[k],
1060
- self.metrics_thresholds[k],
1087
+ single_filed_test["metric"],
1088
+ single_filed_test["ratio"],
1089
+ single_filed_test["threshold"],
1061
1090
  )
1062
- self._raise_exception(f"The following tests have failed:\n {', '.join(sorted(failed_tests))}")
1091
+ failed_test_details = "; ".join(
1092
+ f"{t['metric']}: {t}" for t in sorted(failed_tests, key=lambda x: x["metric"])
1093
+ )
1094
+ self._raise_exception(f"The following tests have failed:\n {failed_test_details}")
1063
1095
 
1064
1096
  self.log.info("All tests have passed")
1065
1097
 
@@ -1206,6 +1238,8 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
1206
1238
  self.parameters = parameters
1207
1239
  self.follow_task_ids_if_true = follow_task_ids_if_true
1208
1240
  self.follow_task_ids_if_false = follow_task_ids_if_false
1241
+ # Chosen branch, after evaluating condition, set during execution, to be used by listeners
1242
+ self.follow_branch: list[str] | None = None
1209
1243
 
1210
1244
  def execute(self, context: Context):
1211
1245
  self.log.info(
@@ -1232,32 +1266,30 @@ class BranchSQLOperator(BaseSQLOperator, SkipMixin):
1232
1266
 
1233
1267
  self.log.info("Query returns %s, type '%s'", query_result, type(query_result))
1234
1268
 
1235
- follow_branch = None
1236
1269
  try:
1237
1270
  if isinstance(query_result, bool):
1238
1271
  if query_result:
1239
- follow_branch = self.follow_task_ids_if_true
1272
+ self.follow_branch = self.follow_task_ids_if_true
1240
1273
  elif isinstance(query_result, str):
1241
1274
  # return result is not Boolean, try to convert from String to Boolean
1242
1275
  if _parse_boolean(query_result):
1243
- follow_branch = self.follow_task_ids_if_true
1276
+ self.follow_branch = self.follow_task_ids_if_true
1244
1277
  elif isinstance(query_result, int):
1245
1278
  if bool(query_result):
1246
- follow_branch = self.follow_task_ids_if_true
1279
+ self.follow_branch = self.follow_task_ids_if_true
1247
1280
  else:
1248
1281
  raise AirflowException(
1249
1282
  f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
1250
1283
  )
1251
1284
 
1252
- if follow_branch is None:
1253
- follow_branch = self.follow_task_ids_if_false
1285
+ if self.follow_branch is None:
1286
+ self.follow_branch = self.follow_task_ids_if_false
1254
1287
  except ValueError:
1255
1288
  raise AirflowException(
1256
1289
  f"Unexpected query return result '{query_result}' type '{type(query_result)}'"
1257
1290
  )
1258
1291
 
1259
- # TODO(potiuk) remove the type ignore once we solve provider <-> Task SDK relationship
1260
- self.skip_all_except(context["ti"], follow_branch)
1292
+ self.skip_all_except(context["ti"], self.follow_branch)
1261
1293
 
1262
1294
 
1263
1295
  class SQLInsertRowsOperator(BaseSQLOperator):
@@ -20,8 +20,7 @@ from collections.abc import Callable, Mapping, Sequence
20
20
  from operator import itemgetter
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.exceptions import AirflowException
24
- from airflow.providers.common.compat.sdk import BaseHook, BaseSensorOperator
23
+ from airflow.providers.common.compat.sdk import AirflowException, BaseHook, BaseSensorOperator
25
24
  from airflow.providers.common.sql.hooks.sql import DbApiHook
26
25
 
27
26
  if TYPE_CHECKING:
@@ -38,7 +38,7 @@ from typing import Any
38
38
  from _typeshed import Incomplete as Incomplete
39
39
 
40
40
  from airflow.providers.common.compat.sdk import BaseSensorOperator
41
- from airflow.utils.context import Context as Context
41
+ from airflow.sdk import Context
42
42
 
43
43
  class SqlSensor(BaseSensorOperator):
44
44
  template_fields: Sequence[str]
@@ -19,8 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from typing import TYPE_CHECKING
21
21
 
22
- from airflow.exceptions import AirflowException
23
- from airflow.providers.common.compat.sdk import BaseHook
22
+ from airflow.providers.common.compat.sdk import AirflowException, BaseHook
24
23
  from airflow.providers.common.sql.hooks.sql import DbApiHook
25
24
  from airflow.triggers.base import BaseTrigger, TriggerEvent
26
25
 
@@ -0,0 +1,62 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from airflow import DAG
21
+ from airflow.providers.common.sql.operators.sql import SQLValueCheckOperator
22
+ from airflow.sdk.timezone import datetime
23
+
24
+ connection_args = {
25
+ "conn_id": "sales_db",
26
+ "conn_type": "Postgres",
27
+ "host": "postgres",
28
+ "schema": "postgres",
29
+ "login": "postgres",
30
+ "password": "postgres",
31
+ "port": 5432,
32
+ }
33
+
34
+ with DAG(
35
+ "example_sql_value_check_query",
36
+ description="Example DAG for SQLValueCheckOperator.",
37
+ default_args=connection_args,
38
+ start_date=datetime(2025, 12, 15),
39
+ schedule=None,
40
+ catchup=False,
41
+ ) as dag:
42
+ """
43
+ ### Example SQL value check DAG
44
+
45
+ Runs the SQLValueCheckOperator against the Airflow metadata DB.
46
+ """
47
+
48
+ # [START howto_operator_sql_value_check]
49
+ value_check = SQLValueCheckOperator(
50
+ task_id="threshhold_check",
51
+ conn_id="sales_db",
52
+ sql="SELECT count(distinct(customer_id)) FROM sales LIMIT 50;",
53
+ pass_value=40,
54
+ tolerance=5,
55
+ )
56
+ # [END howto_operator_sql_value_check]
57
+
58
+
59
+ from tests_common.test_utils.system_tests import get_test_run # noqa: E402
60
+
61
+ # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
62
+ test_run = get_test_run(dag)
@@ -18,24 +18,17 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import datetime
21
+ import importlib.util
21
22
  import inspect
22
23
  from unittest import mock
23
24
  from unittest.mock import MagicMock
24
25
 
25
26
  import pytest
26
27
 
27
- try:
28
- import importlib.util
29
-
30
- if not importlib.util.find_spec("airflow.sdk.bases.hook"):
31
- raise ImportError
32
-
33
- BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
34
- except ImportError:
35
- BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
36
28
  from airflow import DAG
37
- from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
29
+ from airflow.exceptions import AirflowProviderDeprecationWarning
38
30
  from airflow.models import Connection
31
+ from airflow.providers.common.compat.sdk import AirflowException
39
32
  from airflow.providers.common.sql.hooks.handlers import fetch_all_handler
40
33
  from airflow.providers.common.sql.operators.sql import (
41
34
  BaseSQLOperator,
@@ -59,8 +52,19 @@ from tests_common.test_utils.dag import sync_dag_to_db
59
52
  from tests_common.test_utils.db import clear_db_dag_bundles, clear_db_dags, clear_db_runs, clear_db_xcom
60
53
  from tests_common.test_utils.markers import skip_if_force_lowest_dependencies_marker
61
54
  from tests_common.test_utils.providers import get_provider_min_airflow_version
55
+ from tests_common.test_utils.taskinstance import TaskInstanceWrapper
62
56
  from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_1, AIRFLOW_V_3_0_PLUS
63
57
 
58
+ try:
59
+ import importlib.util
60
+
61
+ if not importlib.util.find_spec("airflow.sdk.bases.hook"):
62
+ raise ImportError
63
+
64
+ BASEHOOK_PATCH_PATH = "airflow.sdk.bases.hook.BaseHook"
65
+ except ImportError:
66
+ BASEHOOK_PATCH_PATH = "airflow.hooks.base.BaseHook"
67
+
64
68
  if AIRFLOW_V_3_0_PLUS:
65
69
  from airflow.utils.types import DagRunTriggeredByType
66
70
 
@@ -939,7 +943,16 @@ class TestIntervalCheckOperator:
939
943
  ignore_zero=True,
940
944
  )
941
945
 
942
- with pytest.raises(AirflowException, match="f0, f1, f2"):
946
+ expected_err_message = (
947
+ "The following tests have failed:\n "
948
+ "f0: {'metric': 'f0', 'current_metric': 1, 'past_metric': 2, 'threshold': 1.0,"
949
+ " 'ignore_zero': True, 'ratio': 2.0, 'success': False}; "
950
+ "f1: {'metric': 'f1', 'current_metric': 1, 'past_metric': 2, 'threshold': 1.5,"
951
+ " 'ignore_zero': True, 'ratio': 2.0, 'success': False}; "
952
+ "f2: {'metric': 'f2', 'current_metric': 1, 'past_metric': 2, 'threshold': 2.0,"
953
+ " 'ignore_zero': True, 'ratio': 2.0, 'success': False}"
954
+ )
955
+ with pytest.raises(AirflowException, match=expected_err_message):
943
956
  operator.execute(context=MagicMock())
944
957
 
945
958
  @mock.patch.object(SQLIntervalCheckOperator, "get_db_hook")
@@ -969,7 +982,14 @@ class TestIntervalCheckOperator:
969
982
  ignore_zero=True,
970
983
  )
971
984
 
972
- with pytest.raises(AirflowException, match="f0, f1"):
985
+ expected_err_message = (
986
+ "The following tests have failed:\n "
987
+ "f0: {'metric': 'f0', 'current_metric': 1, 'past_metric': 3, 'threshold': 0.5, "
988
+ "'ignore_zero': True, 'ratio': 0.6666666666666666, 'success': False}; "
989
+ "f1: {'metric': 'f1', 'current_metric': 1, 'past_metric': 3, 'threshold': 0.6, "
990
+ "'ignore_zero': True, 'ratio': 0.6666666666666666, 'success': False}"
991
+ )
992
+ with pytest.raises(AirflowException, match=expected_err_message):
973
993
  operator.execute(context=MagicMock())
974
994
 
975
995
 
@@ -1134,7 +1154,13 @@ class TestSqlBranch:
1134
1154
  )
1135
1155
 
1136
1156
  ti = dr.get_task_instance(task_id)
1137
- ti.task = self.dag.get_task(ti.task_id)
1157
+ task = self.dag.get_task(ti.task_id)
1158
+
1159
+ if AIRFLOW_V_3_0_PLUS:
1160
+ ti.task = self.scheduler_dag.get_task(ti.task_id)
1161
+ ti = TaskInstanceWrapper(ti, task)
1162
+ else:
1163
+ ti.task = task
1138
1164
 
1139
1165
  return ti
1140
1166
 
@@ -1256,7 +1282,7 @@ class TestSqlBranch:
1256
1282
  mock_get_records.return_value = 1
1257
1283
 
1258
1284
  if AIRFLOW_V_3_0_1:
1259
- from airflow.exceptions import DownstreamTasksSkipped
1285
+ from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
1260
1286
 
1261
1287
  with pytest.raises(DownstreamTasksSkipped) as exc_info:
1262
1288
  branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
@@ -1305,7 +1331,7 @@ class TestSqlBranch:
1305
1331
  mock_get_records.return_value = true_value
1306
1332
 
1307
1333
  if AIRFLOW_V_3_0_1:
1308
- from airflow.exceptions import DownstreamTasksSkipped
1334
+ from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
1309
1335
 
1310
1336
  with pytest.raises(DownstreamTasksSkipped) as exc_info:
1311
1337
  branch_op.execute({})
@@ -1353,7 +1379,7 @@ class TestSqlBranch:
1353
1379
  mock_get_records.return_value = false_value
1354
1380
 
1355
1381
  if AIRFLOW_V_3_0_1:
1356
- from airflow.exceptions import DownstreamTasksSkipped
1382
+ from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
1357
1383
 
1358
1384
  with pytest.raises(DownstreamTasksSkipped) as exc_info:
1359
1385
  branch_op.execute({})
@@ -1412,7 +1438,7 @@ class TestSqlBranch:
1412
1438
  mock_get_records.return_value = [["1"]]
1413
1439
 
1414
1440
  if AIRFLOW_V_3_0_1:
1415
- from airflow.exceptions import DownstreamTasksSkipped
1441
+ from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
1416
1442
 
1417
1443
  with pytest.raises(DownstreamTasksSkipped) as exc_info:
1418
1444
  branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
@@ -1530,7 +1556,7 @@ class TestSqlBranch:
1530
1556
  mock_get_records.return_value = [false_value]
1531
1557
 
1532
1558
  if AIRFLOW_V_3_0_1:
1533
- from airflow.exceptions import DownstreamTasksSkipped
1559
+ from airflow.providers.common.compat.sdk import DownstreamTasksSkipped
1534
1560
 
1535
1561
  with pytest.raises(DownstreamTasksSkipped) as exc_info:
1536
1562
  branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
@@ -21,8 +21,8 @@ from unittest import mock
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import AirflowException
25
24
  from airflow.models.dag import DAG
25
+ from airflow.providers.common.compat.sdk import AirflowException
26
26
  from airflow.providers.common.sql.hooks.sql import DbApiHook
27
27
  from airflow.providers.common.sql.sensors.sql import SqlSensor
28
28
  from airflow.utils.timezone import datetime