apache-airflow-providers-apache-hive 9.1.2rc1__tar.gz → 9.1.4rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. apache_airflow_providers_apache_hive-9.1.4rc1/NOTICE +5 -0
  2. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/PKG-INFO +43 -22
  3. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/README.rst +33 -17
  4. apache_airflow_providers_apache_hive-9.1.4rc1/docs/.latest-doc-only-change.txt +1 -0
  5. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/changelog.rst +41 -0
  6. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/index.rst +18 -19
  7. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/provider.yaml +3 -1
  8. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/pyproject.toml +13 -6
  9. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/__init__.py +1 -1
  10. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/hooks/hive.py +31 -35
  11. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/operators/hive.py +10 -4
  12. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/operators/hive_stats.py +2 -2
  13. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/sensors/hive_partition.py +2 -2
  14. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +1 -1
  15. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +2 -2
  16. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +2 -2
  17. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +2 -2
  18. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +2 -2
  19. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +2 -2
  20. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +2 -2
  21. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +2 -2
  22. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/version_compat.py +8 -18
  23. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/apache/hive/example_twitter_dag.py +1 -8
  24. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/hooks/test_hive.py +4 -10
  25. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/sensors/test_named_hive_partition.py +1 -1
  26. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_hive_to_mysql.py +1 -6
  27. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_hive_to_samba.py +1 -6
  28. apache_airflow_providers_apache_hive-9.1.2rc1/docs/.latest-doc-only-change.txt +0 -1
  29. {apache_airflow_providers_apache_hive-9.1.2rc1/src/airflow/providers/apache/hive → apache_airflow_providers_apache_hive-9.1.4rc1}/LICENSE +0 -0
  30. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/commits.rst +0 -0
  31. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/conf.py +0 -0
  32. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/configurations-ref.rst +0 -0
  33. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/connections/hive_cli.rst +0 -0
  34. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/connections/hive_metastore.rst +0 -0
  35. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/connections/hiveserver2.rst +0 -0
  36. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/connections/index.rst +0 -0
  37. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/installing-providers-from-sources.rst +0 -0
  38. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/integration-logos/hive.png +0 -0
  39. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/macros.rst +0 -0
  40. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/operators.rst +0 -0
  41. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/docs/security.rst +0 -0
  42. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/__init__.py +0 -0
  43. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/__init__.py +0 -0
  44. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/__init__.py +0 -0
  45. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/get_provider_info.py +0 -0
  46. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  47. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  48. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  49. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  50. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  51. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  52. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  53. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  54. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/conftest.py +0 -0
  55. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/integration/__init__.py +0 -0
  56. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/integration/apache/__init__.py +0 -0
  57. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/integration/apache/hive/__init__.py +0 -0
  58. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/integration/apache/hive/transfers/__init__.py +0 -0
  59. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  60. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/__init__.py +0 -0
  61. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/apache/__init__.py +0 -0
  62. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/apache/hive/__init__.py +0 -0
  63. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/apache/hive/example_hive.py +0 -0
  64. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/system/apache/hive/example_twitter_README.md +0 -0
  65. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/__init__.py +0 -0
  66. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/__init__.py +0 -0
  67. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/__init__.py +0 -0
  68. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/hooks/__init__.py +0 -0
  69. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/hooks/query_results.csv +0 -0
  70. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/macros/__init__.py +0 -0
  71. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/macros/test_hive.py +0 -0
  72. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/operators/__init__.py +0 -0
  73. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/operators/test_hive.py +0 -0
  74. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/operators/test_hive_stats.py +0 -0
  75. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/sensors/__init__.py +0 -0
  76. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/sensors/test_hive_partition.py +0 -0
  77. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/sensors/test_metastore_partition.py +0 -0
  78. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/__init__.py +0 -0
  79. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  80. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_mysql_to_hive.py +0 -0
  81. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_s3_to_hive.py +0 -0
  82. {apache_airflow_providers_apache_hive-9.1.2rc1 → apache_airflow_providers_apache_hive-9.1.4rc1}/tests/unit/apache/hive/transfers/test_vertica_to_hive.py +0 -0
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.1.2rc1
3
+ Version: 9.1.4rc1
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,36 +15,40 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
+ License-File: LICENSE
24
+ License-File: NOTICE
23
25
  Requires-Dist: apache-airflow>=2.10.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
24
27
  Requires-Dist: apache-airflow-providers-common-sql>=1.26.0rc1
25
28
  Requires-Dist: hmsclient>=0.1.0
26
29
  Requires-Dist: pandas>=2.1.2; python_version <"3.13"
27
30
  Requires-Dist: pandas>=2.2.3; python_version >="3.13"
28
31
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
29
- Requires-Dist: thrift>=0.11.0
30
32
  Requires-Dist: jmespath>=0.7.0
31
33
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
32
34
  Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
35
+ Requires-Dist: winkerberos>=0.7.0 ; extra == "gssapi" and ( sys_platform == "win32")
36
+ Requires-Dist: kerberos>=1.3.0 ; extra == "gssapi" and ( sys_platform != "win32")
33
37
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
34
38
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
35
39
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
36
40
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
37
41
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
38
42
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
39
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html
40
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2
43
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/changelog.html
44
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4
41
45
  Project-URL: Mastodon, https://fosstodon.org/@airflow
42
46
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
43
47
  Project-URL: Source Code, https://github.com/apache/airflow
44
48
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
45
49
  Provides-Extra: amazon
46
50
  Provides-Extra: common-compat
51
+ Provides-Extra: gssapi
47
52
  Provides-Extra: microsoft-mssql
48
53
  Provides-Extra: mysql
49
54
  Provides-Extra: presto
@@ -75,9 +80,8 @@ Provides-Extra: vertica
75
80
 
76
81
  Package ``apache-airflow-providers-apache-hive``
77
82
 
78
- Release: ``9.1.2``
83
+ Release: ``9.1.4``
79
84
 
80
- Release Date: ``|PypiReleaseDate|``
81
85
 
82
86
  `Apache Hive <https://hive.apache.org/>`__
83
87
 
@@ -89,12 +93,12 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
89
93
  are in ``airflow.providers.apache.hive`` python package.
90
94
 
91
95
  You can find package information and changelog for the provider
92
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
96
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/>`_.
93
97
 
94
98
  Installation
95
99
  ------------
96
100
 
97
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
101
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
98
102
  for the minimum Airflow version supported) via
99
103
  ``pip install apache-airflow-providers-apache-hive``
100
104
 
@@ -103,18 +107,18 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
103
107
  Requirements
104
108
  ------------
105
109
 
106
- ======================================= =====================================
107
- PIP package Version required
108
- ======================================= =====================================
109
- ``apache-airflow`` ``>=2.10.0``
110
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
111
- ``hmsclient`` ``>=0.1.0``
112
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
113
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
114
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
115
- ``thrift`` ``>=0.11.0``
116
- ``jmespath`` ``>=0.7.0``
117
- ======================================= =====================================
110
+ ========================================== =====================================
111
+ PIP package Version required
112
+ ========================================== =====================================
113
+ ``apache-airflow`` ``>=2.10.0``
114
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
115
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
116
+ ``hmsclient`` ``>=0.1.0``
117
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
118
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
119
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
120
+ ``jmespath`` ``>=0.7.0``
121
+ ========================================== =====================================
118
122
 
119
123
  Cross provider package dependencies
120
124
  -----------------------------------
@@ -133,6 +137,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
133
137
  Dependent package Extra
134
138
  ====================================================================================================================== ===================
135
139
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
140
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
136
141
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
137
142
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
138
143
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -141,6 +146,22 @@ Dependent package
141
146
  `apache-airflow-providers-vertica <https://airflow.apache.org/docs/apache-airflow-providers-vertica>`_ ``vertica``
142
147
  ====================================================================================================================== ===================
143
148
 
149
+ Optional dependencies
150
+ ----------------------
151
+
152
+ =================== =============================================================================================
153
+ Extra Dependencies
154
+ =================== =============================================================================================
155
+ ``amazon`` ``apache-airflow-providers-amazon``
156
+ ``microsoft.mssql`` ``apache-airflow-providers-microsoft-mssql``
157
+ ``mysql`` ``apache-airflow-providers-mysql``
158
+ ``presto`` ``apache-airflow-providers-presto``
159
+ ``samba`` ``apache-airflow-providers-samba``
160
+ ``vertica`` ``apache-airflow-providers-vertica``
161
+ ``GSSAPI`` ``winkerberos>=0.7.0; sys_platform == "win32"``, ``kerberos>=1.3.0; sys_platform != "win32"``
162
+ ``common.compat`` ``apache-airflow-providers-common-compat``
163
+ =================== =============================================================================================
164
+
144
165
  The changelog for the provider package can be found in the
145
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
166
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/changelog.html>`_.
146
167
 
@@ -23,9 +23,8 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.1.2``
26
+ Release: ``9.1.4``
27
27
 
28
- Release Date: ``|PypiReleaseDate|``
29
28
 
30
29
  `Apache Hive <https://hive.apache.org/>`__
31
30
 
@@ -37,12 +36,12 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
37
36
  are in ``airflow.providers.apache.hive`` python package.
38
37
 
39
38
  You can find package information and changelog for the provider
40
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/>`_.
41
40
 
42
41
  Installation
43
42
  ------------
44
43
 
45
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
44
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
46
45
  for the minimum Airflow version supported) via
47
46
  ``pip install apache-airflow-providers-apache-hive``
48
47
 
@@ -51,18 +50,18 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
51
50
  Requirements
52
51
  ------------
53
52
 
54
- ======================================= =====================================
55
- PIP package Version required
56
- ======================================= =====================================
57
- ``apache-airflow`` ``>=2.10.0``
58
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
59
- ``hmsclient`` ``>=0.1.0``
60
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
61
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
62
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
63
- ``thrift`` ``>=0.11.0``
64
- ``jmespath`` ``>=0.7.0``
65
- ======================================= =====================================
53
+ ========================================== =====================================
54
+ PIP package Version required
55
+ ========================================== =====================================
56
+ ``apache-airflow`` ``>=2.10.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
58
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
59
+ ``hmsclient`` ``>=0.1.0``
60
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
61
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
62
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
63
+ ``jmespath`` ``>=0.7.0``
64
+ ========================================== =====================================
66
65
 
67
66
  Cross provider package dependencies
68
67
  -----------------------------------
@@ -81,6 +80,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
81
80
  Dependent package Extra
82
81
  ====================================================================================================================== ===================
83
82
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
83
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
84
84
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
85
85
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
86
86
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -89,5 +89,21 @@ Dependent package
89
89
  `apache-airflow-providers-vertica <https://airflow.apache.org/docs/apache-airflow-providers-vertica>`_ ``vertica``
90
90
  ====================================================================================================================== ===================
91
91
 
92
+ Optional dependencies
93
+ ----------------------
94
+
95
+ =================== =============================================================================================
96
+ Extra Dependencies
97
+ =================== =============================================================================================
98
+ ``amazon`` ``apache-airflow-providers-amazon``
99
+ ``microsoft.mssql`` ``apache-airflow-providers-microsoft-mssql``
100
+ ``mysql`` ``apache-airflow-providers-mysql``
101
+ ``presto`` ``apache-airflow-providers-presto``
102
+ ``samba`` ``apache-airflow-providers-samba``
103
+ ``vertica`` ``apache-airflow-providers-vertica``
104
+ ``GSSAPI`` ``winkerberos>=0.7.0; sys_platform == "win32"``, ``kerberos>=1.3.0; sys_platform != "win32"``
105
+ ``common.compat`` ``apache-airflow-providers-common-compat``
106
+ =================== =============================================================================================
107
+
92
108
  The changelog for the provider package can be found in the
93
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
109
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/changelog.html>`_.
@@ -0,0 +1 @@
1
+ 05960ac2ebb1fd9a74f3135e5e8fe5e28160d4b2
@@ -27,6 +27,47 @@
27
27
  Changelog
28
28
  ---------
29
29
 
30
+ 9.1.4
31
+ .....
32
+
33
+ Misc
34
+ ~~~~
35
+
36
+ * ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
37
+ * ``Remove direct dependency on thrift (#57423)``
38
+ * ``Add Kerberos dependency to hive provider (#55773)``
39
+
40
+ .. Below changes are excluded from the changelog. Move them to
41
+ appropriate section above if needed. Do not delete the lines(!):
42
+ * ``Delete all unnecessary LICENSE Files (#58191)``
43
+ * ``Enable ruff PLW2101,PLW2901,PLW3301 rule (#57700)``
44
+ * ``Enable PT006 rule to 13 files in providers (apache) (#57998)``
45
+
46
+
47
+ 9.1.3
48
+ .....
49
+
50
+ Bug Fixes
51
+ ~~~~~~~~~
52
+
53
+ * ``FIX: incorrect access of logical_date in google bigquery operator and google workflow operator (#55110)``
54
+ * ``Replace sasl with pyhive.get_installed_sasl for pure-sasl compatibility (#55772)``
55
+
56
+ Misc
57
+ ~~~~
58
+
59
+ * ``Migrate Apache providers & Elasticsearch to ''common.compat'' (#57016)``
60
+
61
+ .. Below changes are excluded from the changelog. Move them to
62
+ appropriate section above if needed. Do not delete the lines(!):
63
+ * ``Enable PT011 rule to prvoider tests (#56608)``
64
+ * ``Prepare release for Sep 2025 2nd wave of providers (#55688)``
65
+ * ``Prepare release for Sep 2025 1st wave of providers (#55203)``
66
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
67
+ * ``Make term Dag consistent in providers docs (#55101)``
68
+ * ``Switch pre-commit to prek (#54258)``
69
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
70
+
30
71
  9.1.2
31
72
  .....
32
73
 
@@ -57,7 +57,7 @@
57
57
  :maxdepth: 1
58
58
  :caption: Resources
59
59
 
60
- Example DAGs <https://github.com/apache/airflow/tree/providers-apache-hive/|version|/providers/apache/hive/tests/system/apache/hive>
60
+ Example Dags <https://github.com/apache/airflow/tree/providers-apache-hive/|version|/providers/apache/hive/tests/system/apache/hive>
61
61
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-apache-hive/>
62
62
  Installing from sources <installing-providers-from-sources>
63
63
  Macros <macros>
@@ -79,9 +79,7 @@ apache-airflow-providers-apache-hive package
79
79
  `Apache Hive <https://hive.apache.org/>`__
80
80
 
81
81
 
82
- Release: 9.1.2
83
-
84
- Release Date: ``|PypiReleaseDate|``
82
+ Release: 9.1.4
85
83
 
86
84
  Provider package
87
85
  ----------------
@@ -92,7 +90,7 @@ All classes for this package are included in the ``airflow.providers.apache.hive
92
90
  Installation
93
91
  ------------
94
92
 
95
- You can install this package on top of an existing Airflow 2 installation via
93
+ You can install this package on top of an existing Airflow installation via
96
94
  ``pip install apache-airflow-providers-apache-hive``.
97
95
  For the minimum Airflow version supported, see ``Requirements`` below.
98
96
 
@@ -101,18 +99,18 @@ Requirements
101
99
 
102
100
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
103
101
 
104
- ======================================= =====================================
105
- PIP package Version required
106
- ======================================= =====================================
107
- ``apache-airflow`` ``>=2.10.0``
108
- ``apache-airflow-providers-common-sql`` ``>=1.26.0``
109
- ``hmsclient`` ``>=0.1.0``
110
- ``pandas`` ``>=2.1.2; python_version < "3.13"``
111
- ``pandas`` ``>=2.2.3; python_version >= "3.13"``
112
- ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
113
- ``thrift`` ``>=0.11.0``
114
- ``jmespath`` ``>=0.7.0``
115
- ======================================= =====================================
102
+ ========================================== =====================================
103
+ PIP package Version required
104
+ ========================================== =====================================
105
+ ``apache-airflow`` ``>=2.10.0``
106
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
107
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
108
+ ``hmsclient`` ``>=0.1.0``
109
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
110
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
111
+ ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
112
+ ``jmespath`` ``>=0.7.0``
113
+ ========================================== =====================================
116
114
 
117
115
  Cross provider package dependencies
118
116
  -----------------------------------
@@ -131,6 +129,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
131
129
  Dependent package Extra
132
130
  ====================================================================================================================== ===================
133
131
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
132
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
134
133
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
135
134
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
136
135
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -145,5 +144,5 @@ Downloading official packages
145
144
  You can download officially released packages and verify their checksums and signatures from the
146
145
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
147
146
 
148
- * `The apache-airflow-providers-apache-hive 9.1.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.sha512>`__)
149
- * `The apache-airflow-providers-apache-hive 9.1.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.sha512>`__)
147
+ * `The apache-airflow-providers-apache-hive 9.1.4 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4.tar.gz.sha512>`__)
148
+ * `The apache-airflow-providers-apache-hive 9.1.4 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.4-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,14 @@ description: |
22
22
  `Apache Hive <https://hive.apache.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1753688805
25
+ source-date-epoch: 1763068409
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 9.1.4
32
+ - 9.1.3
31
33
  - 9.1.2
32
34
  - 9.1.1
33
35
  - 9.1.0
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.1.2rc1"
28
+ version = "9.1.4rc1"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
+ license = "Apache-2.0"
32
+ license-files = ['LICENSE', 'NOTICE']
31
33
  authors = [
32
34
  {name="Apache Software Foundation", email="dev@airflow.apache.org"},
33
35
  ]
@@ -43,7 +45,6 @@ classifiers = [
43
45
  "Intended Audience :: System Administrators",
44
46
  "Framework :: Apache Airflow",
45
47
  "Framework :: Apache Airflow :: Provider",
46
- "License :: OSI Approved :: Apache Software License",
47
48
  "Programming Language :: Python :: 3.10",
48
49
  "Programming Language :: Python :: 3.11",
49
50
  "Programming Language :: Python :: 3.12",
@@ -54,16 +55,16 @@ requires-python = ">=3.10"
54
55
 
55
56
  # The dependencies should be modified in place in the generated file.
56
57
  # Any change in the dependencies is preserved when the file is regenerated
57
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
+ # Make sure to run ``prek update-providers-dependencies --all-files``
58
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
60
  dependencies = [
60
61
  "apache-airflow>=2.10.0rc1",
62
+ "apache-airflow-providers-common-compat>=1.8.0rc1",
61
63
  "apache-airflow-providers-common-sql>=1.26.0rc1",
62
64
  "hmsclient>=0.1.0",
63
65
  'pandas>=2.1.2; python_version <"3.13"',
64
66
  'pandas>=2.2.3; python_version >="3.13"',
65
67
  "pyhive[hive_pure_sasl]>=0.7.0",
66
- "thrift>=0.11.0",
67
68
  "jmespath>=0.7.0",
68
69
  ]
69
70
 
@@ -88,6 +89,11 @@ dependencies = [
88
89
  "vertica" = [
89
90
  "apache-airflow-providers-vertica"
90
91
  ]
92
+ "GSSAPI" = [
93
+ # Windows: use winkerberos, others: use kerberos
94
+ 'winkerberos>=0.7.0; sys_platform == "win32"',
95
+ 'kerberos>=1.3.0; sys_platform != "win32"'
96
+ ]
91
97
  "common.compat" = [
92
98
  "apache-airflow-providers-common-compat"
93
99
  ]
@@ -98,6 +104,7 @@ dev = [
98
104
  "apache-airflow-task-sdk",
99
105
  "apache-airflow-devel-common",
100
106
  "apache-airflow-providers-amazon",
107
+ "apache-airflow-providers-common-compat",
101
108
  "apache-airflow-providers-common-sql",
102
109
  "apache-airflow-providers-microsoft-mssql",
103
110
  "apache-airflow-providers-mysql",
@@ -134,8 +141,8 @@ apache-airflow-providers-common-sql = {workspace = true}
134
141
  apache-airflow-providers-standard = {workspace = true}
135
142
 
136
143
  [project.urls]
137
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2"
138
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html"
144
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4"
145
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.4/changelog.html"
139
146
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
140
147
  "Source Code" = "https://github.com/apache/airflow"
141
148
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.1.2"
32
+ __version__ = "9.1.4"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -18,6 +18,7 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import contextlib
21
+ import csv
21
22
  import os
22
23
  import re
23
24
  import socket
@@ -30,19 +31,21 @@ from typing import TYPE_CHECKING, Any, Literal
30
31
  from deprecated import deprecated
31
32
  from typing_extensions import overload
32
33
 
33
- if TYPE_CHECKING:
34
- import pandas as pd
35
- import polars as pl
36
-
37
- import csv
38
-
39
34
  from airflow.configuration import conf
40
35
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
41
- from airflow.providers.apache.hive.version_compat import AIRFLOW_VAR_NAME_FORMAT_MAPPING, BaseHook
36
+ from airflow.providers.common.compat.sdk import (
37
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
38
+ BaseHook,
39
+ )
42
40
  from airflow.providers.common.sql.hooks.sql import DbApiHook
43
41
  from airflow.security import utils
44
42
  from airflow.utils.helpers import as_flattened_list
45
43
 
44
+ if TYPE_CHECKING:
45
+ import pandas as pd
46
+ import polars as pl
47
+
48
+
46
49
  HIVE_QUEUE_PRIORITIES = ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]
47
50
 
48
51
 
@@ -317,8 +320,8 @@ class HiveCliHook(BaseHook):
317
320
  )
318
321
  self.sub_process = sub_process
319
322
  stdout = ""
320
- for line in iter(sub_process.stdout.readline, b""):
321
- line = line.decode()
323
+ for line_raw in iter(sub_process.stdout.readline, b""):
324
+ line = line_raw.decode()
322
325
  stdout += line
323
326
  if verbose:
324
327
  self.log.info(line.strip())
@@ -333,24 +336,23 @@ class HiveCliHook(BaseHook):
333
336
  """Test an hql statement using the hive cli and EXPLAIN."""
334
337
  create, insert, other = [], [], []
335
338
  for query in hql.split(";"): # naive
336
- query_original = query
337
- query = query.lower().strip()
338
-
339
- if query.startswith("create table"):
340
- create.append(query_original)
341
- elif query.startswith(("set ", "add jar ", "create temporary function")):
342
- other.append(query_original)
343
- elif query.startswith("insert"):
344
- insert.append(query_original)
339
+ query_lower = query.lower().strip()
340
+
341
+ if query_lower.startswith("create table"):
342
+ create.append(query)
343
+ elif query_lower.startswith(("set ", "add jar ", "create temporary function")):
344
+ other.append(query)
345
+ elif query_lower.startswith("insert"):
346
+ insert.append(query)
345
347
  other_ = ";".join(other)
346
348
  for query_set in [create, insert]:
347
- for query in query_set:
348
- query_preview = " ".join(query.split())[:50]
349
+ for query_item in query_set:
350
+ query_preview = " ".join(query_item.split())[:50]
349
351
  self.log.info("Testing HQL [%s (...)]", query_preview)
350
352
  if query_set == insert:
351
- query = other_ + "; explain " + query
353
+ query = other_ + "; explain " + query_item
352
354
  else:
353
- query = "explain " + query
355
+ query = "explain " + query_item
354
356
  try:
355
357
  self.run_cli(query, verbose=False)
356
358
  except AirflowException as e:
@@ -573,21 +575,15 @@ class HiveMetastoreHook(BaseHook):
573
575
  conn_socket = TSocket.TSocket(host, conn.port)
574
576
 
575
577
  if conf.get("core", "security") == "kerberos" and auth_mechanism == "GSSAPI":
576
- try:
577
- import saslwrapper as sasl
578
- except ImportError:
579
- import sasl
580
-
581
- def sasl_factory() -> sasl.Client:
582
- sasl_client = sasl.Client()
583
- sasl_client.setAttr("host", host)
584
- sasl_client.setAttr("service", kerberos_service_name)
585
- sasl_client.init()
586
- return sasl_client
587
-
578
+ from pyhive.hive import get_installed_sasl
588
579
  from thrift_sasl import TSaslClientTransport
589
580
 
590
- transport = TSaslClientTransport(sasl_factory, "GSSAPI", conn_socket)
581
+ sasl_auth = "GSSAPI"
582
+ transport = TSaslClientTransport(
583
+ lambda: get_installed_sasl(host=host, sasl_auth=sasl_auth, service=kerberos_service_name),
584
+ sasl_auth,
585
+ conn_socket,
586
+ )
591
587
  else:
592
588
  transport = TTransport.TBufferedTransport(conn_socket)
593
589
 
@@ -25,14 +25,14 @@ from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
28
- from airflow.providers.apache.hive.version_compat import (
28
+ from airflow.providers.common.compat.sdk import (
29
29
  AIRFLOW_VAR_NAME_FORMAT_MAPPING,
30
30
  BaseOperator,
31
31
  context_to_airflow_vars,
32
32
  )
33
33
 
34
34
  if TYPE_CHECKING:
35
- from airflow.utils.context import Context
35
+ from airflow.providers.common.compat.sdk import Context
36
36
 
37
37
 
38
38
  class HiveOperator(BaseOperator):
@@ -143,9 +143,15 @@ class HiveOperator(BaseOperator):
143
143
  # set the mapred_job_name if it's not set with dag, task, execution time info
144
144
  if not self.mapred_job_name:
145
145
  ti = context["ti"]
146
- logical_date = context["logical_date"]
146
+ logical_date = context.get("logical_date", None)
147
147
  if logical_date is None:
148
- raise RuntimeError("logical_date is None")
148
+ raise RuntimeError(
149
+ "logical_date is not available. Please make sure the task is not used in an asset-triggered Dag. "
150
+ "HiveOperator was designed to work with timetable scheduled Dags, "
151
+ "and an asset-triggered Dag run does not have a logical_date. "
152
+ "If you need to use HiveOperator in an asset-triggered Dag,"
153
+ "please open an issue on the Airflow project."
154
+ )
149
155
  hostname = ti.hostname or ""
150
156
  self.hook.mapred_job_name = self.mapred_job_name_template.format(
151
157
  dag_id=ti.dag_id,
@@ -23,12 +23,12 @@ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
25
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
26
- from airflow.providers.apache.hive.version_compat import BaseOperator
26
+ from airflow.providers.common.compat.sdk import BaseOperator
27
27
  from airflow.providers.mysql.hooks.mysql import MySqlHook
28
28
  from airflow.providers.presto.hooks.presto import PrestoHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveStatsCollectionOperator(BaseOperator):
@@ -21,10 +21,10 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
24
- from airflow.providers.apache.hive.version_compat import BaseSensorOperator
24
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
27
+ from airflow.providers.common.compat.sdk import Context
28
28
 
29
29
 
30
30
  class HivePartitionSensor(BaseSensorOperator):
@@ -23,7 +23,7 @@ from typing import TYPE_CHECKING, Any
23
23
  from airflow.providers.common.sql.sensors.sql import SqlSensor
24
24
 
25
25
  if TYPE_CHECKING:
26
- from airflow.utils.context import Context
26
+ from airflow.providers.common.compat.sdk import Context
27
27
 
28
28
 
29
29
  class MetastorePartitionSensor(SqlSensor):
@@ -20,10 +20,10 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.providers.apache.hive.version_compat import BaseSensorOperator
23
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
24
24
 
25
25
  if TYPE_CHECKING:
26
- from airflow.utils.context import Context
26
+ from airflow.providers.common.compat.sdk import Context
27
27
 
28
28
 
29
29
  class NamedHivePartitionSensor(BaseSensorOperator):
@@ -24,11 +24,11 @@ from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
27
- from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
27
+ from airflow.providers.common.compat.sdk import BaseOperator, context_to_airflow_vars
28
28
  from airflow.providers.mysql.hooks.mysql import MySqlHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveToMySqlOperator(BaseOperator):
@@ -24,11 +24,11 @@ from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
27
- from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
27
+ from airflow.providers.common.compat.sdk import BaseOperator, context_to_airflow_vars
28
28
  from airflow.providers.samba.hooks.samba import SambaHook
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.providers.common.compat.sdk import Context
32
32
 
33
33
 
34
34
  class HiveToSambaOperator(BaseOperator):
@@ -27,11 +27,11 @@ from typing import TYPE_CHECKING
27
27
  import pymssql
28
28
 
29
29
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
30
- from airflow.providers.apache.hive.version_compat import BaseOperator
30
+ from airflow.providers.common.compat.sdk import BaseOperator
31
31
  from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
32
32
 
33
33
  if TYPE_CHECKING:
34
- from airflow.utils.context import Context
34
+ from airflow.providers.common.compat.sdk import Context
35
35
 
36
36
 
37
37
  class MsSqlToHiveOperator(BaseOperator):
@@ -37,11 +37,11 @@ except ImportError:
37
37
 
38
38
 
39
39
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
40
- from airflow.providers.apache.hive.version_compat import BaseOperator
40
+ from airflow.providers.common.compat.sdk import BaseOperator
41
41
  from airflow.providers.mysql.hooks.mysql import MySqlHook
42
42
 
43
43
  if TYPE_CHECKING:
44
- from airflow.utils.context import Context
44
+ from airflow.providers.common.compat.sdk import Context
45
45
 
46
46
 
47
47
  class MySqlToHiveOperator(BaseOperator):
@@ -31,10 +31,10 @@ from typing import TYPE_CHECKING, Any
31
31
  from airflow.exceptions import AirflowException
32
32
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
33
33
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
34
- from airflow.providers.apache.hive.version_compat import BaseOperator
34
+ from airflow.providers.common.compat.sdk import BaseOperator
35
35
 
36
36
  if TYPE_CHECKING:
37
- from airflow.utils.context import Context
37
+ from airflow.providers.common.compat.sdk import Context
38
38
 
39
39
 
40
40
  class S3ToHiveOperator(BaseOperator):
@@ -25,11 +25,11 @@ from tempfile import NamedTemporaryFile
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
28
- from airflow.providers.apache.hive.version_compat import BaseOperator
28
+ from airflow.providers.common.compat.sdk import BaseOperator
29
29
  from airflow.providers.vertica.hooks.vertica import VerticaHook
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.utils.context import Context
32
+ from airflow.providers.common.compat.sdk import Context
33
33
 
34
34
 
35
35
  class VerticaToHiveOperator(BaseOperator):
@@ -22,6 +22,14 @@
22
22
  #
23
23
  from __future__ import annotations
24
24
 
25
+ # Re-export from common.compat for backward compatibility
26
+ from airflow.providers.common.compat.sdk import (
27
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
28
+ BaseOperator,
29
+ BaseSensorOperator,
30
+ context_to_airflow_vars,
31
+ )
32
+
25
33
 
26
34
  def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
35
  from packaging.version import Version
@@ -35,27 +43,9 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
35
43
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
44
  AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
45
 
38
- if AIRFLOW_V_3_1_PLUS:
39
- from airflow.sdk import BaseHook
40
- else:
41
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
42
-
43
- if AIRFLOW_V_3_0_PLUS:
44
- from airflow.sdk import BaseOperator, BaseSensorOperator
45
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING, context_to_airflow_vars
46
- else:
47
- from airflow.models import BaseOperator
48
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
49
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
50
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
51
- context_to_airflow_vars,
52
- )
53
-
54
-
55
46
  __all__ = [
56
47
  "AIRFLOW_V_3_0_PLUS",
57
48
  "AIRFLOW_V_3_1_PLUS",
58
- "BaseHook",
59
49
  "BaseOperator",
60
50
  "BaseSensorOperator",
61
51
  "AIRFLOW_VAR_NAME_FORMAT_MAPPING",
@@ -26,16 +26,9 @@ from datetime import date, datetime, timedelta
26
26
 
27
27
  from airflow import DAG
28
28
  from airflow.providers.apache.hive.operators.hive import HiveOperator
29
+ from airflow.providers.common.compat.sdk import task
29
30
  from airflow.providers.standard.operators.bash import BashOperator
30
31
 
31
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
32
-
33
- if AIRFLOW_V_3_0_PLUS:
34
- from airflow.sdk import task
35
- else:
36
- # Airflow 2 path
37
- from airflow.decorators import task # type: ignore[attr-defined,no-redef]
38
-
39
32
  # --------------------------------------------------------------------------------
40
33
  # Caveat: This Dag will not run because of missing scripts.
41
34
  # The purpose of this is to give you a sample of a real world example DAG!
@@ -31,6 +31,7 @@ from airflow.exceptions import AirflowException
31
31
  from airflow.models.connection import Connection
32
32
  from airflow.models.dag import DAG
33
33
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook, HiveMetastoreHook, HiveServer2Hook
34
+ from airflow.providers.common.compat.sdk import AIRFLOW_VAR_NAME_FORMAT_MAPPING
34
35
  from airflow.secrets.environment_variables import CONN_ENV_PREFIX
35
36
  from airflow.utils import timezone
36
37
 
@@ -44,13 +45,6 @@ from unit.apache.hive import (
44
45
  MockSubProcess,
45
46
  )
46
47
 
47
- if AIRFLOW_V_3_0_PLUS:
48
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING
49
- else:
50
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
51
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
52
- )
53
-
54
48
  DEFAULT_DATE = timezone.datetime(2015, 1, 1)
55
49
  DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
56
50
  DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
@@ -658,7 +652,7 @@ class TestHiveServer2Hook:
658
652
  )
659
653
 
660
654
  @pytest.mark.parametrize(
661
- "host, port, schema, message",
655
+ ("host", "port", "schema", "message"),
662
656
  [
663
657
  ("localhost", "10000", "default", None),
664
658
  ("localhost:", "10000", "default", "The host used in beeline command"),
@@ -912,7 +906,7 @@ class TestHiveCli:
912
906
  assert not hook.high_availability
913
907
 
914
908
  @pytest.mark.parametrize(
915
- "extra_dejson, correct_proxy_user, proxy_user",
909
+ ("extra_dejson", "correct_proxy_user", "proxy_user"),
916
910
  [
917
911
  ({"proxy_user": "a_user_proxy"}, "hive.server2.proxy.user=a_user_proxy", None),
918
912
  ],
@@ -944,7 +938,7 @@ class TestHiveCli:
944
938
  hook._prepare_cli_cmd()
945
939
 
946
940
  @pytest.mark.parametrize(
947
- "extra_dejson, expected_keys",
941
+ ("extra_dejson", "expected_keys"),
948
942
  [
949
943
  (
950
944
  {"high_availability": "true"},
@@ -72,7 +72,7 @@ class TestNamedHivePartitionSensor:
72
72
 
73
73
  def test_parse_partition_name_incorrect(self):
74
74
  name = "incorrect.name"
75
- with pytest.raises(ValueError):
75
+ with pytest.raises(ValueError, match="Could not parse incorrect.nameinto table, partition"):
76
76
  NamedHivePartitionSensor.parse_partition_name(name)
77
77
 
78
78
  def test_parse_partition_name_default(self):
@@ -24,16 +24,11 @@ from unittest.mock import MagicMock, patch
24
24
  import pytest
25
25
 
26
26
  from airflow.providers.apache.hive.transfers.hive_to_mysql import HiveToMySqlOperator
27
+ from airflow.providers.common.compat.sdk import context_to_airflow_vars
27
28
  from airflow.utils import timezone
28
29
 
29
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
30
30
  from unit.apache.hive import MockHiveServer2Hook, MockMySqlHook, TestHiveEnvironment
31
31
 
32
- if AIRFLOW_V_3_0_PLUS:
33
- from airflow.sdk.execution_time.context import context_to_airflow_vars
34
- else:
35
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
36
-
37
32
  DEFAULT_DATE = timezone.datetime(2015, 1, 1)
38
33
 
39
34
 
@@ -23,9 +23,9 @@ from unittest.mock import MagicMock, Mock, PropertyMock, patch
23
23
  import pytest
24
24
 
25
25
  from airflow.providers.apache.hive.transfers.hive_to_samba import HiveToSambaOperator
26
+ from airflow.providers.common.compat.sdk import context_to_airflow_vars
26
27
  from airflow.providers.samba.hooks.samba import SambaHook
27
28
 
28
- from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
29
29
  from unit.apache.hive import (
30
30
  DEFAULT_DATE,
31
31
  MockConnectionCursor,
@@ -33,11 +33,6 @@ from unit.apache.hive import (
33
33
  TestHiveEnvironment,
34
34
  )
35
35
 
36
- if AIRFLOW_V_3_0_PLUS:
37
- from airflow.sdk.execution_time.context import context_to_airflow_vars
38
- else:
39
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
40
-
41
36
 
42
37
  class MockSambaHook(SambaHook):
43
38
  def __init__(self, *args, **kwargs):
@@ -1 +0,0 @@
1
- 7b2ec33c7ad4998d9c9735b79593fcdcd3b9dd1f