apache-airflow-providers-apache-hive 9.1.0rc1__tar.gz → 9.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-apache-hive might be problematic. Click here for more details.

Files changed (80) hide show
  1. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/PKG-INFO +10 -11
  2. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/README.rst +4 -4
  3. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/changelog.rst +15 -0
  4. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/index.rst +3 -3
  5. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/provider.yaml +2 -1
  6. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/pyproject.toml +6 -8
  7. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/__init__.py +1 -1
  8. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/hooks/hive.py +12 -13
  9. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/operators/hive.py +5 -10
  10. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/operators/hive_stats.py +3 -3
  11. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/sensors/hive_partition.py +1 -1
  12. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +1 -1
  13. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +1 -8
  14. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +1 -7
  15. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +1 -1
  16. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +1 -1
  17. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +1 -1
  18. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +1 -1
  19. apache_airflow_providers_apache_hive-9.1.1/src/airflow/providers/apache/hive/version_compat.py +55 -0
  20. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/sensors/test_named_hive_partition.py +1 -2
  21. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_s3_to_hive.py +2 -1
  22. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_vertica_to_hive.py +1 -2
  23. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/.latest-doc-only-change.txt +0 -0
  24. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/commits.rst +0 -0
  25. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/conf.py +0 -0
  26. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/configurations-ref.rst +0 -0
  27. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/connections/hive_cli.rst +0 -0
  28. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/connections/hive_metastore.rst +0 -0
  29. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/connections/hiveserver2.rst +0 -0
  30. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/connections/index.rst +0 -0
  31. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/installing-providers-from-sources.rst +0 -0
  32. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/integration-logos/hive.png +0 -0
  33. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/macros.rst +0 -0
  34. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/operators.rst +0 -0
  35. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/docs/security.rst +0 -0
  36. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/__init__.py +0 -0
  37. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/__init__.py +0 -0
  38. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/__init__.py +0 -0
  39. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/LICENSE +0 -0
  40. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/get_provider_info.py +0 -0
  41. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  42. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  43. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  44. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  45. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  46. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  47. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  48. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +0 -0
  49. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  50. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/conftest.py +0 -0
  51. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/integration/__init__.py +0 -0
  52. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/integration/apache/__init__.py +0 -0
  53. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/integration/apache/hive/__init__.py +0 -0
  54. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/integration/apache/hive/transfers/__init__.py +0 -0
  55. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  56. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/__init__.py +0 -0
  57. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/apache/__init__.py +0 -0
  58. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/apache/hive/__init__.py +0 -0
  59. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/apache/hive/example_hive.py +0 -0
  60. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/apache/hive/example_twitter_README.md +0 -0
  61. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/system/apache/hive/example_twitter_dag.py +0 -0
  62. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/__init__.py +0 -0
  63. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/__init__.py +0 -0
  64. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/__init__.py +0 -0
  65. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/hooks/__init__.py +0 -0
  66. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/hooks/query_results.csv +0 -0
  67. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/hooks/test_hive.py +0 -0
  68. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/macros/__init__.py +0 -0
  69. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/macros/test_hive.py +0 -0
  70. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/operators/__init__.py +0 -0
  71. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/operators/test_hive.py +0 -0
  72. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/operators/test_hive_stats.py +0 -0
  73. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/sensors/__init__.py +0 -0
  74. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/sensors/test_hive_partition.py +0 -0
  75. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/sensors/test_metastore_partition.py +0 -0
  76. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/__init__.py +0 -0
  77. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_hive_to_mysql.py +0 -0
  78. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_hive_to_samba.py +0 -0
  79. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  80. {apache_airflow_providers_apache_hive-9.1.0rc1 → apache_airflow_providers_apache_hive-9.1.1}/tests/unit/apache/hive/transfers/test_mysql_to_hive.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.1.0rc1
3
+ Version: 9.1.1
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,13 +15,12 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
22
21
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.26.0rc1
22
+ Requires-Dist: apache-airflow>=2.10.0
23
+ Requires-Dist: apache-airflow-providers-common-sql>=1.26.0
25
24
  Requires-Dist: hmsclient>=0.1.0
26
25
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
27
26
  Requires-Dist: thrift>=0.11.0
@@ -34,8 +33,8 @@ Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
34
33
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
35
34
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
36
35
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
37
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html
38
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0
36
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html
37
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1
39
38
  Project-URL: Mastodon, https://fosstodon.org/@airflow
40
39
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
41
40
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -73,7 +72,7 @@ Provides-Extra: vertica
73
72
 
74
73
  Package ``apache-airflow-providers-apache-hive``
75
74
 
76
- Release: ``9.1.0``
75
+ Release: ``9.1.1``
77
76
 
78
77
 
79
78
  `Apache Hive <https://hive.apache.org/>`__
@@ -86,7 +85,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
86
85
  are in ``airflow.providers.apache.hive`` python package.
87
86
 
88
87
  You can find package information and changelog for the provider
89
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/>`_.
88
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/>`_.
90
89
 
91
90
  Installation
92
91
  ------------
@@ -95,7 +94,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
95
94
  for the minimum Airflow version supported) via
96
95
  ``pip install apache-airflow-providers-apache-hive``
97
96
 
98
- The package supports the following python versions: 3.9,3.10,3.11,3.12
97
+ The package supports the following python versions: 3.10,3.11,3.12
99
98
 
100
99
  Requirements
101
100
  ------------
@@ -138,5 +137,5 @@ Dependent package
138
137
  ====================================================================================================================== ===================
139
138
 
140
139
  The changelog for the provider package can be found in the
141
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html>`_.
140
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html>`_.
142
141
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.1.0``
26
+ Release: ``9.1.1``
27
27
 
28
28
 
29
29
  `Apache Hive <https://hive.apache.org/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
36
36
  are in ``airflow.providers.apache.hive`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
45
45
  for the minimum Airflow version supported) via
46
46
  ``pip install apache-airflow-providers-apache-hive``
47
47
 
48
- The package supports the following python versions: 3.9,3.10,3.11,3.12
48
+ The package supports the following python versions: 3.10,3.11,3.12
49
49
 
50
50
  Requirements
51
51
  ------------
@@ -88,4 +88,4 @@ Dependent package
88
88
  ====================================================================================================================== ===================
89
89
 
90
90
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html>`_.
@@ -26,6 +26,21 @@
26
26
 
27
27
  Changelog
28
28
 
29
+ 9.1.1
30
+ .....
31
+
32
+ Misc
33
+ ~~~~
34
+
35
+ * ``Move 'BaseHook' implementation to task SDK (#51873)``
36
+ * ``Provider Migration: Replace 'models.BaseOperator' to Task SDK for apache/hive (#52453)``
37
+ * ``Drop support for Python 3.9 (#52072)``
38
+ * ``Use BaseSensorOperator from task sdk in providers (#52296)``
39
+
40
+ .. Below changes are excluded from the changelog. Move them to
41
+ appropriate section above if needed. Do not delete the lines(!):
42
+ * ``Remove pytest db markers from apache hive provider (#52097)``
43
+
29
44
  9.1.0
30
45
  .....
31
46
 
@@ -79,7 +79,7 @@ apache-airflow-providers-apache-hive package
79
79
  `Apache Hive <https://hive.apache.org/>`__
80
80
 
81
81
 
82
- Release: 9.1.0
82
+ Release: 9.1.1
83
83
 
84
84
  Provider package
85
85
  ----------------
@@ -142,5 +142,5 @@ Downloading official packages
142
142
  You can download officially released packages and verify their checksums and signatures from the
143
143
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
144
144
 
145
- * `The apache-airflow-providers-apache-hive 9.1.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz.sha512>`__)
146
- * `The apache-airflow-providers-apache-hive 9.1.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl.sha512>`__)
145
+ * `The apache-airflow-providers-apache-hive 9.1.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz.sha512>`__)
146
+ * `The apache-airflow-providers-apache-hive 9.1.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Apache Hive <https://hive.apache.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1747131490
25
+ source-date-epoch: 1751472547
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 9.1.1
31
32
  - 9.1.0
32
33
  - 9.0.6
33
34
  - 9.0.5
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.1.0rc1"
28
+ version = "9.1.1"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -44,21 +44,20 @@ classifiers = [
44
44
  "Framework :: Apache Airflow",
45
45
  "Framework :: Apache Airflow :: Provider",
46
46
  "License :: OSI Approved :: Apache Software License",
47
- "Programming Language :: Python :: 3.9",
48
47
  "Programming Language :: Python :: 3.10",
49
48
  "Programming Language :: Python :: 3.11",
50
49
  "Programming Language :: Python :: 3.12",
51
50
  "Topic :: System :: Monitoring",
52
51
  ]
53
- requires-python = "~=3.9"
52
+ requires-python = "~=3.10"
54
53
 
55
54
  # The dependencies should be modified in place in the generated file.
56
55
  # Any change in the dependencies is preserved when the file is regenerated
57
56
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
57
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
58
  dependencies = [
60
- "apache-airflow>=2.10.0rc1",
61
- "apache-airflow-providers-common-sql>=1.26.0rc1",
59
+ "apache-airflow>=2.10.0",
60
+ "apache-airflow-providers-common-sql>=1.26.0",
62
61
  "hmsclient>=0.1.0",
63
62
  "pyhive[hive_pure_sasl]>=0.7.0",
64
63
  "thrift>=0.11.0",
@@ -96,7 +95,6 @@ dev = [
96
95
  "apache-airflow-task-sdk",
97
96
  "apache-airflow-devel-common",
98
97
  "apache-airflow-providers-amazon",
99
- "apache-airflow-providers-common-compat",
100
98
  "apache-airflow-providers-common-sql",
101
99
  "apache-airflow-providers-microsoft-mssql",
102
100
  "apache-airflow-providers-mysql",
@@ -133,8 +131,8 @@ apache-airflow-providers-common-sql = {workspace = true}
133
131
  apache-airflow-providers-standard = {workspace = true}
134
132
 
135
133
  [project.urls]
136
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0"
137
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html"
134
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1"
135
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html"
138
136
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
139
137
  "Source Code" = "https://github.com/apache/airflow"
140
138
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.1.0"
32
+ __version__ = "9.1.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -25,10 +25,10 @@ import subprocess
25
25
  import time
26
26
  from collections.abc import Iterable, Mapping
27
27
  from tempfile import NamedTemporaryFile, TemporaryDirectory
28
- from typing import TYPE_CHECKING, Any
28
+ from typing import TYPE_CHECKING, Any, Literal
29
29
 
30
30
  from deprecated import deprecated
31
- from typing_extensions import Literal, overload
31
+ from typing_extensions import overload
32
32
 
33
33
  if TYPE_CHECKING:
34
34
  import pandas as pd
@@ -38,19 +38,16 @@ import csv
38
38
 
39
39
  from airflow.configuration import conf
40
40
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
41
- from airflow.hooks.base import BaseHook
42
- from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
41
+ from airflow.providers.apache.hive.version_compat import AIRFLOW_VAR_NAME_FORMAT_MAPPING
43
42
  from airflow.providers.common.sql.hooks.sql import DbApiHook
43
+
44
+ try:
45
+ from airflow.sdk import BaseHook
46
+ except ImportError:
47
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
44
48
  from airflow.security import utils
45
49
  from airflow.utils.helpers import as_flattened_list
46
50
 
47
- if AIRFLOW_V_3_0_PLUS:
48
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING
49
- else:
50
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
51
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
52
- )
53
-
54
51
  HIVE_QUEUE_PRIORITIES = ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]
55
52
 
56
53
 
@@ -277,7 +274,7 @@ class HiveCliHook(BaseHook):
277
274
  True
278
275
  """
279
276
  conn = self.conn
280
- schema = schema or conn.schema
277
+ schema = schema or conn.schema or ""
281
278
 
282
279
  invalid_chars_list = re.findall(r"[^a-z0-9_]", schema)
283
280
  if invalid_chars_list:
@@ -605,7 +602,9 @@ class HiveMetastoreHook(BaseHook):
605
602
 
606
603
  def _find_valid_host(self) -> Any:
607
604
  conn = self.conn
608
- hosts = conn.host.split(",")
605
+ hosts = []
606
+ if conn.host:
607
+ hosts = conn.host.split(",")
609
608
  for host in hosts:
610
609
  host_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
611
610
  self.log.info("Trying to connect to %s:%s", host, conn.port)
@@ -24,17 +24,12 @@ from functools import cached_property
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
- from airflow.models import BaseOperator
28
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
29
- from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
30
-
31
- if AIRFLOW_V_3_0_PLUS:
32
- from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING, context_to_airflow_vars
33
- else:
34
- from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
35
- AIRFLOW_VAR_NAME_FORMAT_MAPPING,
36
- context_to_airflow_vars,
37
- )
28
+ from airflow.providers.apache.hive.version_compat import (
29
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
30
+ BaseOperator,
31
+ context_to_airflow_vars,
32
+ )
38
33
 
39
34
  if TYPE_CHECKING:
40
35
  from airflow.utils.context import Context
@@ -18,12 +18,12 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import json
21
- from collections.abc import Sequence
22
- from typing import TYPE_CHECKING, Any, Callable
21
+ from collections.abc import Callable, Sequence
22
+ from typing import TYPE_CHECKING, Any
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
- from airflow.models import BaseOperator
26
25
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
26
+ from airflow.providers.apache.hive.version_compat import BaseOperator
27
27
  from airflow.providers.mysql.hooks.mysql import MySqlHook
28
28
  from airflow.providers.presto.hooks.presto import PrestoHook
29
29
 
@@ -21,7 +21,7 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
24
- from airflow.sensors.base import BaseSensorOperator
24
+ from airflow.providers.apache.hive.version_compat import BaseSensorOperator
25
25
 
26
26
  if TYPE_CHECKING:
27
27
  from airflow.utils.context import Context
@@ -20,7 +20,7 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.sensors.base import BaseSensorOperator
23
+ from airflow.providers.apache.hive.version_compat import BaseSensorOperator
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from airflow.utils.context import Context
@@ -23,17 +23,10 @@ from collections.abc import Sequence
23
23
  from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
28
- from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
27
+ from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
29
28
  from airflow.providers.mysql.hooks.mysql import MySqlHook
30
29
 
31
- if AIRFLOW_V_3_0_PLUS:
32
- from airflow.sdk.execution_time.context import context_to_airflow_vars
33
- else:
34
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
35
-
36
-
37
30
  if TYPE_CHECKING:
38
31
  from airflow.utils.context import Context
39
32
 
@@ -23,16 +23,10 @@ from collections.abc import Sequence
23
23
  from tempfile import NamedTemporaryFile
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
28
- from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
27
+ from airflow.providers.apache.hive.version_compat import BaseOperator, context_to_airflow_vars
29
28
  from airflow.providers.samba.hooks.samba import SambaHook
30
29
 
31
- if AIRFLOW_V_3_0_PLUS:
32
- from airflow.sdk.execution_time.context import context_to_airflow_vars
33
- else:
34
- from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
35
-
36
30
  if TYPE_CHECKING:
37
31
  from airflow.utils.context import Context
38
32
 
@@ -26,8 +26,8 @@ from typing import TYPE_CHECKING
26
26
 
27
27
  import pymssql
28
28
 
29
- from airflow.models import BaseOperator
30
29
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
30
+ from airflow.providers.apache.hive.version_compat import BaseOperator
31
31
  from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
32
32
 
33
33
  if TYPE_CHECKING:
@@ -36,8 +36,8 @@ except ImportError:
36
36
  )
37
37
 
38
38
 
39
- from airflow.models import BaseOperator
40
39
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
40
+ from airflow.providers.apache.hive.version_compat import BaseOperator
41
41
  from airflow.providers.mysql.hooks.mysql import MySqlHook
42
42
 
43
43
  if TYPE_CHECKING:
@@ -29,9 +29,9 @@ from tempfile import NamedTemporaryFile, TemporaryDirectory
29
29
  from typing import TYPE_CHECKING, Any
30
30
 
31
31
  from airflow.exceptions import AirflowException
32
- from airflow.models import BaseOperator
33
32
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
34
33
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
34
+ from airflow.providers.apache.hive.version_compat import BaseOperator
35
35
 
36
36
  if TYPE_CHECKING:
37
37
  from airflow.utils.context import Context
@@ -24,8 +24,8 @@ from collections.abc import Sequence
24
24
  from tempfile import NamedTemporaryFile
25
25
  from typing import TYPE_CHECKING, Any
26
26
 
27
- from airflow.models import BaseOperator
28
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
28
+ from airflow.providers.apache.hive.version_compat import BaseOperator
29
29
  from airflow.providers.vertica.hooks.vertica import VerticaHook
30
30
 
31
31
  if TYPE_CHECKING:
@@ -0,0 +1,55 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
19
+ # DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
20
+ # ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
21
+ # THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
22
+ #
23
+ from __future__ import annotations
24
+
25
+
26
+ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
+ from packaging.version import Version
28
+
29
+ from airflow import __version__
30
+
31
+ airflow_version = Version(__version__)
32
+ return airflow_version.major, airflow_version.minor, airflow_version.micro
33
+
34
+
35
+ AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+
37
+ if AIRFLOW_V_3_0_PLUS:
38
+ from airflow.sdk import BaseOperator, BaseSensorOperator
39
+ from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING, context_to_airflow_vars
40
+ else:
41
+ from airflow.models import BaseOperator
42
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
43
+ from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
44
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
45
+ context_to_airflow_vars,
46
+ )
47
+
48
+
49
+ __all__ = [
50
+ "AIRFLOW_V_3_0_PLUS",
51
+ "BaseOperator",
52
+ "BaseSensorOperator",
53
+ "AIRFLOW_VAR_NAME_FORMAT_MAPPING",
54
+ "context_to_airflow_vars",
55
+ ]
@@ -34,9 +34,8 @@ DEFAULT_DATE = datetime(2015, 1, 1)
34
34
  DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
35
35
  DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
36
36
 
37
- pytestmark = pytest.mark.db_test
38
-
39
37
 
38
+ @pytest.mark.db_test
40
39
  class TestNamedHivePartitionSensor:
41
40
  def setup_method(self):
42
41
  args = {"owner": "airflow", "start_date": DEFAULT_DATE}
@@ -39,7 +39,6 @@ moto = pytest.importorskip("moto")
39
39
  logger = logging.getLogger(__name__)
40
40
 
41
41
 
42
- @pytest.mark.db_test
43
42
  class TestS3ToHiveTransfer:
44
43
  @pytest.fixture(autouse=True)
45
44
  def setup_attrs(self):
@@ -196,6 +195,7 @@ class TestS3ToHiveTransfer:
196
195
  fn_bz2 = self._get_fn(".bz2", False)
197
196
  assert self._check_file_equality(bz2_txt_nh, fn_bz2, ".bz2"), "bz2 Compressed file not as expected"
198
197
 
198
+ @pytest.mark.db_test
199
199
  @mock.patch("airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook")
200
200
  @moto.mock_aws
201
201
  def test_execute(self, mock_hiveclihook):
@@ -229,6 +229,7 @@ class TestS3ToHiveTransfer:
229
229
  s32hive = S3ToHiveOperator(**self.kwargs)
230
230
  s32hive.execute(None)
231
231
 
232
+ @pytest.mark.db_test
232
233
  @mock.patch("airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook")
233
234
  @moto.mock_aws
234
235
  def test_execute_with_select_expression(self, mock_hiveclihook):
@@ -25,8 +25,6 @@ import pytest
25
25
  from airflow.models.dag import DAG
26
26
  from airflow.providers.apache.hive.transfers.vertica_to_hive import VerticaToHiveOperator
27
27
 
28
- pytestmark = pytest.mark.db_test
29
-
30
28
 
31
29
  def mock_get_conn():
32
30
  commit_mock = mock.MagicMock()
@@ -48,6 +46,7 @@ class TestVerticaToHiveTransfer:
48
46
  args = {"owner": "airflow", "start_date": datetime.datetime(2017, 1, 1)}
49
47
  self.dag = DAG("test_dag_id", schedule=None, default_args=args)
50
48
 
49
+ @pytest.mark.db_test
51
50
  @mock.patch(
52
51
  "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaHook.get_conn",
53
52
  side_effect=mock_get_conn,