apache-airflow-providers-apache-hive 9.1.2rc1__tar.gz → 9.1.3rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-apache-hive might be problematic. Click here for more details.

Files changed (81) hide show
  1. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/PKG-INFO +36 -20
  2. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/README.rst +32 -17
  3. apache_airflow_providers_apache_hive-9.1.3rc1/docs/.latest-doc-only-change.txt +1 -0
  4. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/changelog.rst +24 -0
  5. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/index.rst +18 -19
  6. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/provider.yaml +2 -1
  7. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/pyproject.toml +6 -4
  8. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/__init__.py +1 -1
  9. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/hooks/hive.py +17 -20
  10. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/operators/hive.py +10 -4
  11. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/operators/hive_stats.py +2 -2
  12. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/sensors/hive_partition.py +2 -2
  13. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +1 -1
  14. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +2 -2
  15. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +2 -2
  16. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +2 -2
  17. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +2 -2
  18. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +2 -2
  19. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +2 -2
  20. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +2 -2
  21. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/version_compat.py +8 -18
  22. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/apache/hive/example_twitter_dag.py +1 -8
  23. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/hooks/test_hive.py +1 -7
  24. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/sensors/test_named_hive_partition.py +1 -1
  25. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_hive_to_mysql.py +1 -6
  26. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_hive_to_samba.py +1 -6
  27. apache_airflow_providers_apache_hive-9.1.2rc1/docs/.latest-doc-only-change.txt +0 -1
  28. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/commits.rst +0 -0
  29. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/conf.py +0 -0
  30. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/configurations-ref.rst +0 -0
  31. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/connections/hive_cli.rst +0 -0
  32. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/connections/hive_metastore.rst +0 -0
  33. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/connections/hiveserver2.rst +0 -0
  34. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/connections/index.rst +0 -0
  35. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/installing-providers-from-sources.rst +0 -0
  36. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/integration-logos/hive.png +0 -0
  37. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/macros.rst +0 -0
  38. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/operators.rst +0 -0
  39. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/docs/security.rst +0 -0
  40. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/__init__.py +0 -0
  41. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/__init__.py +0 -0
  42. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/__init__.py +0 -0
  43. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/LICENSE +0 -0
  44. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/get_provider_info.py +0 -0
  45. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  46. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  47. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  48. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  49. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  50. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  51. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  52. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  53. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/conftest.py +0 -0
  54. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/integration/__init__.py +0 -0
  55. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/integration/apache/__init__.py +0 -0
  56. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/integration/apache/hive/__init__.py +0 -0
  57. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/integration/apache/hive/transfers/__init__.py +0 -0
  58. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  59. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/__init__.py +0 -0
  60. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/apache/__init__.py +0 -0
  61. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/apache/hive/__init__.py +0 -0
  62. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/apache/hive/example_hive.py +0 -0
  63. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/system/apache/hive/example_twitter_README.md +0 -0
  64. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/__init__.py +0 -0
  65. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/__init__.py +0 -0
  66. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/__init__.py +0 -0
  67. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/hooks/__init__.py +0 -0
  68. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/hooks/query_results.csv +0 -0
  69. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/macros/__init__.py +0 -0
  70. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/macros/test_hive.py +0 -0
  71. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/operators/__init__.py +0 -0
  72. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/operators/test_hive.py +0 -0
  73. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/operators/test_hive_stats.py +0 -0
  74. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/sensors/__init__.py +0 -0
  75. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/sensors/test_hive_partition.py +0 -0
  76. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/sensors/test_metastore_partition.py +0 -0
  77. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/__init__.py +0 -0
  78. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  79. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_mysql_to_hive.py +0 -0
  80. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_s3_to_hive.py +0 -0
  81. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.3rc1}/tests/unit/apache/hive/transfers/test_vertica_to_hive.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.1.2rc1
3
+ Version: 9.1.3rc1
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,6 +21,7 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
24
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.26.0rc1
25
26
  Requires-Dist: hmsclient>=0.1.0
26
27
  Requires-Dist: pandas>=2.1.2; python_version <"3.13"
@@ -36,8 +37,8 @@ Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
36
37
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
37
38
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
38
39
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
39
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html
40
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2
40
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/changelog.html
41
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3
41
42
  Project-URL: Mastodon, https://fosstodon.org/@airflow
42
43
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
43
44
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -75,9 +76,8 @@ Provides-Extra: vertica
75
76
 
76
77
  Package ``apache-airflow-providers-apache-hive``
77
78
 
78
- Release: ``9.1.2``
79
+ Release: ``9.1.3``
79
80
 
80
- Release Date: ``|PypiReleaseDate|``
81
81
 
82
82
  `Apache Hive <https://hive.apache.org/>`__
83
83
 
@@ -89,12 +89,12 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
89
89
  are in ``airflow.providers.apache.hive`` python package.
90
90
 
91
91
  You can find package information and changelog for the provider
92
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
92
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/>`_.
93
93
 
94
94
  Installation
95
95
  ------------
96
96
 
97
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
97
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
98
98
  for the minimum Airflow version supported) via
99
99
  ``pip install apache-airflow-providers-apache-hive``
100
100
 
@@ -103,18 +103,19 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
103
103
  Requirements
104
104
  ------------
105
105
 
106
- ======================================= =====================================
107
- PIP package Version required
108
- ======================================= =====================================
109
- ``apache-airflow`` ``>=2.10.0``
110
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
111
- ``hmsclient`` ``>=0.1.0``
112
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
113
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
114
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
115
- ``thrift`` ``>=0.11.0``
116
- ``jmespath`` ``>=0.7.0``
117
- ======================================= =====================================
106
+ ========================================== =====================================
107
+ PIP package Version required
108
+ ========================================== =====================================
109
+ ``apache-airflow`` ``>=2.10.0``
110
+ ``apache-airflow-providers-common-compat`` ``>=1.7.4``
111
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
112
+ ``hmsclient`` ``>=0.1.0``
113
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
114
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
115
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
116
+ ``thrift`` ``>=0.11.0``
117
+ ``jmespath`` ``>=0.7.0``
118
+ ========================================== =====================================
118
119
 
119
120
  Cross provider package dependencies
120
121
  -----------------------------------
@@ -141,6 +142,21 @@ Dependent package
141
142
  `apache-airflow-providers-vertica <https://airflow.apache.org/docs/apache-airflow-providers-vertica>`_ ``vertica``
142
143
  ====================================================================================================================== ===================
143
144
 
145
+ Optional dependencies
146
+ ----------------------
147
+
148
+ =================== ============================================
149
+ Extra Dependencies
150
+ =================== ============================================
151
+ ``amazon`` ``apache-airflow-providers-amazon``
152
+ ``microsoft.mssql`` ``apache-airflow-providers-microsoft-mssql``
153
+ ``mysql`` ``apache-airflow-providers-mysql``
154
+ ``presto`` ``apache-airflow-providers-presto``
155
+ ``samba`` ``apache-airflow-providers-samba``
156
+ ``vertica`` ``apache-airflow-providers-vertica``
157
+ ``common.compat`` ``apache-airflow-providers-common-compat``
158
+ =================== ============================================
159
+
144
160
  The changelog for the provider package can be found in the
145
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
161
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/changelog.html>`_.
146
162
 
@@ -23,9 +23,8 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.1.2``
26
+ Release: ``9.1.3``
27
27
 
28
- Release Date: ``|PypiReleaseDate|``
29
28
 
30
29
  `Apache Hive <https://hive.apache.org/>`__
31
30
 
@@ -37,12 +36,12 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
37
36
  are in ``airflow.providers.apache.hive`` python package.
38
37
 
39
38
  You can find package information and changelog for the provider
40
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/>`_.
41
40
 
42
41
  Installation
43
42
  ------------
44
43
 
45
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
44
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
46
45
  for the minimum Airflow version supported) via
47
46
  ``pip install apache-airflow-providers-apache-hive``
48
47
 
@@ -51,18 +50,19 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
51
50
  Requirements
52
51
  ------------
53
52
 
54
- ======================================= =====================================
55
- PIP package Version required
56
- ======================================= =====================================
57
- ``apache-airflow`` ``>=2.10.0``
58
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
59
- ``hmsclient`` ``>=0.1.0``
60
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
61
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
62
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
63
- ``thrift`` ``>=0.11.0``
64
- ``jmespath`` ``>=0.7.0``
65
- ======================================= =====================================
53
+ ========================================== =====================================
54
+ PIP package Version required
55
+ ========================================== =====================================
56
+ ``apache-airflow`` ``>=2.10.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.7.4``
58
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
59
+ ``hmsclient`` ``>=0.1.0``
60
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
61
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
62
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
63
+ ``thrift`` ``>=0.11.0``
64
+ ``jmespath`` ``>=0.7.0``
65
+ ========================================== =====================================
66
66
 
67
67
  Cross provider package dependencies
68
68
  -----------------------------------
@@ -89,5 +89,20 @@ Dependent package
89
89
  `apache-airflow-providers-vertica <https://airflow.apache.org/docs/apache-airflow-providers-vertica>`_ ``vertica``
90
90
  ====================================================================================================================== ===================
91
91
 
92
+ Optional dependencies
93
+ ----------------------
94
+
95
+ =================== ============================================
96
+ Extra Dependencies
97
+ =================== ============================================
98
+ ``amazon`` ``apache-airflow-providers-amazon``
99
+ ``microsoft.mssql`` ``apache-airflow-providers-microsoft-mssql``
100
+ ``mysql`` ``apache-airflow-providers-mysql``
101
+ ``presto`` ``apache-airflow-providers-presto``
102
+ ``samba`` ``apache-airflow-providers-samba``
103
+ ``vertica`` ``apache-airflow-providers-vertica``
104
+ ``common.compat`` ``apache-airflow-providers-common-compat``
105
+ =================== ============================================
106
+
92
107
  The changelog for the provider package can be found in the
93
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
108
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/changelog.html>`_.
@@ -0,0 +1 @@
1
+ 05960ac2ebb1fd9a74f3135e5e8fe5e28160d4b2
@@ -27,6 +27,30 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 9.1.3
31
+ .....
32
+
33
+ Bug Fixes
34
+ ~~~~~~~~~
35
+
36
+ * ``FIX: incorrect access of logical_date in google bigquery operator and google workflow operator (#55110)``
37
+ * ``Replace sasl with pyhive.get_installed_sasl for pure-sasl compatibility (#55772)``
38
+
39
+ Misc
40
+ ~~~~
41
+
42
+ * ``Migrate Apache providers & Elasticsearch to ''common.compat'' (#57016)``
43
+
44
+ .. Below changes are excluded from the changelog. Move them to
45
+ appropriate section above if needed. Do not delete the lines(!):
46
+ * ``Enable PT011 rule to prvoider tests (#56608)``
47
+ * ``Prepare release for Sep 2025 2nd wave of providers (#55688)``
48
+ * ``Prepare release for Sep 2025 1st wave of providers (#55203)``
49
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
50
+ * ``Make term Dag consistent in providers docs (#55101)``
51
+ * ``Switch pre-commit to prek (#54258)``
52
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
53
+
30
54
  9.1.2
31
55
  .....
32
56
 
@@ -57,7 +57,7 @@
57
57
  :maxdepth: 1
58
58
  :caption: Resources
59
59
 
60
- Example DAGs <https://github.com/apache/airflow/tree/providers-apache-hive/|version|/providers/apache/hive/tests/system/apache/hive>
60
+ Example Dags <https://github.com/apache/airflow/tree/providers-apache-hive/|version|/providers/apache/hive/tests/system/apache/hive>
61
61
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-apache-hive/>
62
62
  Installing from sources <installing-providers-from-sources>
63
63
  Macros <macros>
@@ -79,9 +79,7 @@ apache-airflow-providers-apache-hive package
79
79
  `Apache Hive <https://hive.apache.org/>`__
80
80
 
81
81
 
82
- Release: 9.1.2
83
-
84
- Release Date: ``|PypiReleaseDate|``
82
+ Release: 9.1.3
85
83
 
86
84
  Provider package
87
85
  ----------------
@@ -92,7 +90,7 @@ All classes for this package are included in the ``airflow.providers.apache.hive
92
90
  Installation
93
91
  ------------
94
92
 
95
- You can install this package on top of an existing Airflow 2 installation via
93
+ You can install this package on top of an existing Airflow installation via
96
94
  ``pip install apache-airflow-providers-apache-hive``.
97
95
  For the minimum Airflow version supported, see ``Requirements`` below.
98
96
 
@@ -101,18 +99,19 @@ Requirements
101
99
 
102
100
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
103
101
 
104
- ======================================= =====================================
105
- PIP package Version required
106
- ======================================= =====================================
107
- ``apache-airflow`` ``>=2.10.0``
108
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
109
- ``hmsclient`` ``>=0.1.0``
110
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
111
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
112
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
113
- ``thrift`` ``>=0.11.0``
114
- ``jmespath`` ``>=0.7.0``
115
- ======================================= =====================================
102
+ ========================================== =====================================
103
+ PIP package Version required
104
+ ========================================== =====================================
105
+ ``apache-airflow`` ``>=2.10.0``
106
+ ``apache-airflow-providers-common-compat`` ``>=1.7.4``
107
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
108
+ ``hmsclient`` ``>=0.1.0``
109
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
110
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
111
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
112
+ ``thrift`` ``>=0.11.0``
113
+ ``jmespath`` ``>=0.7.0``
114
+ ========================================== =====================================
116
115
 
117
116
  Cross provider package dependencies
118
117
  -----------------------------------
@@ -145,5 +144,5 @@ Downloading official packages
145
144
  You can download officially released packages and verify their checksums and signatures from the
146
145
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
147
146
 
148
- * `The apache-airflow-providers-apache-hive 9.1.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.sha512>`__)
149
- * `The apache-airflow-providers-apache-hive 9.1.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.sha512>`__)
147
+ * `The apache-airflow-providers-apache-hive 9.1.3 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3.tar.gz.sha512>`__)
148
+ * `The apache-airflow-providers-apache-hive 9.1.3 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.3-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Apache Hive <https://hive.apache.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1753688805
25
+ source-date-epoch: 1761114399
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 9.1.3
31
32
  - 9.1.2
32
33
  - 9.1.1
33
34
  - 9.1.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.1.2rc1"
28
+ version = "9.1.3rc1"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -54,10 +54,11 @@ requires-python = ">=3.10"
54
54
 
55
55
  # The dependencies should be modified in place in the generated file.
56
56
  # Any change in the dependencies is preserved when the file is regenerated
57
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
57
+ # Make sure to run ``prek update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
60
  "apache-airflow>=2.10.0rc1",
61
+ "apache-airflow-providers-common-compat>=1.8.0rc1",
61
62
  "apache-airflow-providers-common-sql>=1.26.0rc1",
62
63
  "hmsclient>=0.1.0",
63
64
  'pandas>=2.1.2; python_version <"3.13"',
@@ -98,6 +99,7 @@ dev = [
98
99
  "apache-airflow-task-sdk",
99
100
  "apache-airflow-devel-common",
100
101
  "apache-airflow-providers-amazon",
102
+ "apache-airflow-providers-common-compat",
101
103
  "apache-airflow-providers-common-sql",
102
104
  "apache-airflow-providers-microsoft-mssql",
103
105
  "apache-airflow-providers-mysql",
@@ -134,8 +136,8 @@ apache-airflow-providers-common-sql = {workspace = true}
134
136
  apache-airflow-providers-standard = {workspace = true}
135
137
 
136
138
  [project.urls]
137
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2"
138
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html"
139
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3"
140
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.3/changelog.html"
139
141
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
140
142
  "Source Code" = "https://github.com/apache/airflow"
141
143
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.1.2"
32
+ __version__ = "9.1.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -18,6 +18,7 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import contextlib
21
+ import csv
21
22
  import os
22
23
  import re
23
24
  import socket
@@ -30,19 +31,21 @@ from typing import TYPE_CHECKING, Any, Literal
30
31
  from deprecated import deprecated
31
32
  from typing_extensions import overload
32
33
 
33
- if TYPE_CHECKING:
34
- import pandas as pd
35
- import polars as pl
36
-
37
- import csv
38
-
39
34
  from airflow.configuration import conf
40
35
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
41
- from airflow.providers.apache.hive.version_compat import AIRFLOW_VAR_NAME_FORMAT_MAPPING, BaseHook
36
+ from airflow.providers.common.compat.sdk import (
37
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
38
+ BaseHook,
39
+ )
42
40
  from airflow.providers.common.sql.hooks.sql import DbApiHook
43
41
  from airflow.security import utils
44
42
  from airflow.utils.helpers import as_flattened_list
45
43
 
44
+ if TYPE_CHECKING:
45
+ import pandas as pd
46
+ import polars as pl
47
+
48
+
46
49
  HIVE_QUEUE_PRIORITIES = ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]
47
50
 
48
51
 
@@ -573,21 +576,15 @@ class HiveMetastoreHook(BaseHook):
573
576
  conn_socket = TSocket.TSocket(host, conn.port)
574
577
 
575
578
  if conf.get("core", "security") == "kerberos" and auth_mechanism == "GSSAPI":
576
- try:
577
- import saslwrapper as sasl
578
- except ImportError:
579
- import sasl
580
-
581
- def sasl_factory() -> sasl.Client:
582
- sasl_client = sasl.Client()
583
- sasl_client.setAttr("host", host)
584
- sasl_client.setAttr("service", kerberos_service_name)
585
- sasl_client.init()
586
- return sasl_client
587
-
579
+ from pyhive.hive import get_installed_sasl
588
580
  from thrift_sasl import TSaslClientTransport
589
581
 
590
- transport = TSaslClientTransport(sasl_factory, "GSSAPI", conn_socket)
582
+ sasl_auth = "GSSAPI"
583
+ transport = TSaslClientTransport(
584
+ lambda: get_installed_sasl(host=host, sasl_auth=sasl_auth, service=kerberos_service_name),
585
+ sasl_auth,
586
+ conn_socket,
587
+ )
591
588
  else:
592
589
  transport = TTransport.TBufferedTransport(conn_socket)
593
590
 
@@ -25,14 +25,14 @@ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
28
- from airflow.providers.apache.hive.version_compat import (
28
+ from airflow.providers.common.compat.sdk import (
29
29
  AIRFLOW_VAR_NAME_FORMAT_MAPPING,
30
30
  BaseOperator,
31
31
  context_to_airflow_vars,
32
32
  )
33
33
 
34
34
  if TYPE_CHECKING:
35
- from airflow.utils.context import Context
35
+ from airflow.providers.common.compat.sdk import Context
36
36
 
37
37
 
38
38
  class HiveOperator(BaseOperator):
@@ -143,9 +143,15 @@ class HiveOperator(BaseOperator):
143
143
  # set the mapred_job_name if it's not set with dag, task, execution time info
144
144
  if not self.mapred_job_name:
145
145
  ti = context["ti"]
146
- logical_date = context["logical_date"]
146
+ logical_date = context.get("logical_date", None)
147
147
  if logical_date is None:
148
- raise RuntimeError("logical_date is None")
148
+ raise RuntimeError(
149
+ "logical_date is not available. Please make sure the task is not used in an asset-triggered Dag. "
150
+ "HiveOperator was designed to work with timetable scheduled Dags, "
151
+ "and an asset-triggered Dag run does not have a logical_date. "
152
+ "If you need to use HiveOperator in an asset-triggered Dag,"
153
+ "please open an issue on the Airflow project."
154
+ )
149
155
  hostname = ti.hostname or ""
150
156
  self.hook.mapred_job_name = self.mapred_job_name_template.format(
151
157
  dag_id=ti.dag_id,
@@ -23,12 +23,12 @@ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
26
- from airflow.providers.apache.hive.version_compat import BaseOperator
26
+ from airflow.providers.common.compat.sdk import BaseOperator
27
27
  from airflow.providers.mysql.hooks.mysql import MySqlHook
28
28
  from airflow.providers.presto.hooks.presto import PrestoHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveStatsCollectionOperator(BaseOperator):
@@ -21,10 +21,10 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
24
- from airflow.providers.apache.hive.version_compat import BaseSensorOperator
24
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
27
+ from airflow.providers.common.compat.sdk import Context
28
28
 
29
29
 
30
30
  class HivePartitionSensor(BaseSensorOperator):
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any
23
23
  from airflow.providers.common.sql.sensors.sql import SqlSensor
24
24
 
25
25
  if TYPE_CHECKING:
26
- from airflow.utils.context import Context
26
+ from airflow.providers.common.compat.sdk import Context
27
27
 
28
28
 
29
29
  class MetastorePartitionSensor(SqlSensor):
@@ -20,10 +20,10 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.providers.apache.hive.version_compat import BaseSensorOperator
23
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
24
24
 
25
25
  if TYPE_CHECKING:
26
- from airflow.utils.context import Context
26
+ from airflow.providers.common.compat.sdk import Context
27
27
 
28
28
 
29
29
  class NamedHivePartitionSensor(BaseSensorOperator):
@@ -24,11 +24,11 @@ from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
27
- from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
27
+ from airflow.providers.common.compat.sdk import BaseOperator, context_to_airflow_vars
28
28
  from airflow.providers.mysql.hooks.mysql import MySqlHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveToMySqlOperator(BaseOperator):
@@ -24,11 +24,11 @@ from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
27
- from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
27
+ from airflow.providers.common.compat.sdk import BaseOperator, context_to_airflow_vars
28
28
  from airflow.providers.samba.hooks.samba import SambaHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveToSambaOperator(BaseOperator):
@@ -27,11 +27,11 @@ from typing import TYPE_CHECKING
27
27
  import pymssql
28
28
 
29
29
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
30
- from airflow.providers.apache.hive.version_compat import BaseOperator
30
+ from airflow.providers.common.compat.sdk import BaseOperator
31
31
  from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
32
32
 
33
33
  if TYPE_CHECKING:
34
- from airflow.utils.context import Context
34
+ from airflow.providers.common.compat.sdk import Context
35
35
 
36
36
 
37
37
  class MsSqlToHiveOperator(BaseOperator):
@@ -37,11 +37,11 @@ except ImportError:
37
37
 
38
38
 
39
39
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
40
- from airflow.providers.apache.hive.version_compat import BaseOperator
40
+ from airflow.providers.common.compat.sdk import BaseOperator
41
41
  from airflow.providers.mysql.hooks.mysql import MySqlHook
42
42
 
43
43
  if TYPE_CHECKING:
44
- from airflow.utils.context import Context
44
+ from airflow.providers.common.compat.sdk import Context
45
45
 
46
46
 
47
47
  class MySqlToHiveOperator(BaseOperator):
@@ -31,10 +31,10 @@ from typing import TYPE_CHECKING, Any
31
31
  from airflow.exceptions import AirflowException
32
32
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
33
33
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
34
- from airflow.providers.apache.hive.version_compat import BaseOperator
34
+ from airflow.providers.common.compat.sdk import BaseOperator
35
35
 
36
36
  if TYPE_CHECKING:
37
- from airflow.utils.context import Context
37
+ from airflow.providers.common.compat.sdk import Context
38
38
 
39
39
 
40
40
  class S3ToHiveOperator(BaseOperator):
@@ -25,11 +25,11 @@ from tempfile import NamedTemporaryFile
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
28
- from airflow.providers.apache.hive.version_compat import BaseOperator
28
+ from airflow.providers.common.compat.sdk import BaseOperator
29
29
  from airflow.providers.vertica.hooks.vertica import VerticaHook
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.utils.context import Context
32
+ from airflow.providers.common.compat.sdk import Context
33
33
 
34
34
 
35
35
  class VerticaToHiveOperator(BaseOperator):
@@ -22,6 +22,14 @@
22
22
  #
23
23
  from __future__ import annotations
24
24
 
25
+ # Re-export from common.compat for backward compatibility
26
+ from airflow.providers.common.compat.sdk import (
27
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
28
+ BaseOperator,
29
+ BaseSensorOperator,
30
+ context_to_airflow_vars,
31
+ )
32
+
25
33
 
26
34
  def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
35
  from packaging.version import Version
@@ -35,27 +43,9 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
35
43
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
44
  AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
45
 
38
- if AIRFLOW_V_3_1_PLUS:
39
- from airflow.sdk import BaseHook
40
- else:
41
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
-
43
- if AIRFLOW_V_3_0_PLUS:
44
- from airflow.sdk import BaseOperator, BaseSensorOperator
45
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING, context_to_airflow_vars
46
- else:
47
- from airflow.models import BaseOperator
48
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
49
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
50
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
51
- context_to_airflow_vars,
52
- )
53
-
54
-
55
46
  __all__ = [
56
47
  "AIRFLOW_V_3_0_PLUS",
57
48
  "AIRFLOW_V_3_1_PLUS",
58
- "BaseHook",
59
49
  "BaseOperator",
60
50
  "BaseSensorOperator",
61
51
  "AIRFLOW_VAR_NAME_FORMAT_MAPPING",
@@ -26,16 +26,9 @@ from datetime import date, datetime, timedelta
26
26
 
27
27
  from airflow import DAG
28
28
  from airflow.providers.apache.hive.operators.hive import HiveOperator
29
+ from airflow.providers.common.compat.sdk import task
29
30
  from airflow.providers.standard.operators.bash import BashOperator
30
31
 
31
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
32
-
33
- if AIRFLOW_V_3_0_PLUS:
34
- from airflow.sdk import task
35
- else:
36
- # Airflow 2 path
37
- from airflow.decorators import task # type: ignore[attr-defined,no-redef]
38
-
39
32
  # --------------------------------------------------------------------------------
40
33
  # Caveat: This Dag will not run because of missing scripts.
41
34
  # The purpose of this is to give you a sample of a real world example DAG!
@@ -31,6 +31,7 @@ from airflow.exceptions import AirflowException
31
31
  from airflow.models.connection import Connection
32
32
  from airflow.models.dag import DAG
33
33
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook, HiveMetastoreHook, HiveServer2Hook
34
+ from airflow.providers.common.compat.sdk import AIRFLOW_VAR_NAME_FORMAT_MAPPING
34
35
  from airflow.secrets.environment_variables import CONN_ENV_PREFIX
35
36
  from airflow.utils import timezone
36
37
 
@@ -44,13 +45,6 @@ from unit.apache.hive import (
44
45
  MockSubProcess,
45
46
  )
46
47
 
47
- if AIRFLOW_V_3_0_PLUS:
48
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING
49
- else:
50
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
51
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
52
- )
53
-
54
48
  DEFAULT_DATE = timezone.datetime(2015, 1, 1)
55
49
  DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
56
50
  DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
@@ -72,7 +72,7 @@ class TestNamedHivePartitionSensor:
72
72
 
73
73
  def test_parse_partition_name_incorrect(self):
74
74
  name = "incorrect.name"
75
- with pytest.raises(ValueError):
75
+ with pytest.raises(ValueError, match="Could not parse incorrect.nameinto table, partition"):
76
76
  NamedHivePartitionSensor.parse_partition_name(name)
77
77
 
78
78
  def test_parse_partition_name_default(self):
@@ -24,16 +24,11 @@ from unittest.mock import MagicMock, patch
24
24
  import pytest
25
25
 
26
26
  from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
27
+ from airflow.providers.common.compat.sdk import context_to_airflow_vars
27
28
  from airflow.utils import timezone
28
29
 
29
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
30
30
  from unit.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment
31
31
 
32
- if AIRFLOW_V_3_0_PLUS:
33
- from airflow.sdk.execution_time.context import context_to_airflow_vars
34
- else:
35
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
36
-
37
32
  DEFAULT_DATE = timezone.datetime(2015, 1, 1)
38
33
 
39
34
 
@@ -23,9 +23,9 @@ from unittest.mock import MagicMock, Mock, PropertyMock, patch
23
23
  import pytest
24
24
 
25
25
  from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
26
+ from airflow.providers.common.compat.sdk import context_to_airflow_vars
26
27
  from airflow.providers.samba.hooks.samba import SambaHook
27
28
 
28
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
29
29
  from unit.apache.hive import (
30
30
  DEFAULT_DATE,
31
31
  MockConnectionCursor,
@@ -33,11 +33,6 @@ from unit.apache.hive import (
33
33
  TestHiveEnvironment,
34
34
  )
35
35
 
36
- if AIRFLOW_V_3_0_PLUS:
37
- from airflow.sdk.execution_time.context import context_to_airflow_vars
38
- else:
39
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
40
-
41
36
 
42
37
  class MockSambaHook(SambaHook):
43
38
  def __init__(self, *args, **kwargs):
@@ -1 +0,0 @@
1
- 7b2ec33c7ad4998d9c9735b79593fcdcd3b9dd1f