apache-airflow-providers-apache-hive 9.0.6rc1__tar.gz → 9.1.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-apache-hive might be problematic. Click here for more details.

Files changed (80) hide show
  1. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/PKG-INFO +10 -12
  2. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/README.rst +5 -6
  3. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/changelog.rst +19 -0
  4. apache_airflow_providers_apache_hive-9.1.0rc1/docs/commits.rst +35 -0
  5. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/index.rst +6 -7
  6. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/provider.yaml +6 -2
  7. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/pyproject.toml +6 -10
  8. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/__init__.py +3 -3
  9. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/hooks/hive.py +88 -15
  10. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/hooks/test_hive.py +11 -4
  11. apache_airflow_providers_apache_hive-9.0.6rc1/docs/commits.rst +0 -912
  12. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/.latest-doc-only-change.txt +0 -0
  13. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/conf.py +0 -0
  14. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/configurations-ref.rst +0 -0
  15. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/connections/hive_cli.rst +0 -0
  16. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/connections/hive_metastore.rst +0 -0
  17. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/connections/hiveserver2.rst +0 -0
  18. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/connections/index.rst +0 -0
  19. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/installing-providers-from-sources.rst +0 -0
  20. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/integration-logos/hive.png +0 -0
  21. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/macros.rst +0 -0
  22. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/operators.rst +0 -0
  23. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/docs/security.rst +0 -0
  24. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/__init__.py +0 -0
  25. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/__init__.py +0 -0
  26. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/__init__.py +0 -0
  27. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/LICENSE +0 -0
  28. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/get_provider_info.py +0 -0
  29. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  30. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  31. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  32. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  33. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/operators/hive.py +0 -0
  34. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/operators/hive_stats.py +0 -0
  35. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  36. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  37. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  38. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/sensors/hive_partition.py +0 -0
  39. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +0 -0
  40. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +0 -0
  41. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  42. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +0 -0
  43. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +0 -0
  44. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +0 -0
  45. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +0 -0
  46. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +0 -0
  47. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +0 -0
  48. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/conftest.py +0 -0
  49. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/integration/__init__.py +0 -0
  50. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/integration/apache/__init__.py +0 -0
  51. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/integration/apache/hive/__init__.py +0 -0
  52. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/integration/apache/hive/transfers/__init__.py +0 -0
  53. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  54. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/__init__.py +0 -0
  55. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/apache/__init__.py +0 -0
  56. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/apache/hive/__init__.py +0 -0
  57. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/apache/hive/example_hive.py +0 -0
  58. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/apache/hive/example_twitter_README.md +0 -0
  59. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/system/apache/hive/example_twitter_dag.py +0 -0
  60. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/__init__.py +0 -0
  61. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/__init__.py +0 -0
  62. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/__init__.py +0 -0
  63. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/hooks/__init__.py +0 -0
  64. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/hooks/query_results.csv +0 -0
  65. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/macros/__init__.py +0 -0
  66. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/macros/test_hive.py +0 -0
  67. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/operators/__init__.py +0 -0
  68. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/operators/test_hive.py +0 -0
  69. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/operators/test_hive_stats.py +0 -0
  70. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/sensors/__init__.py +0 -0
  71. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/sensors/test_hive_partition.py +0 -0
  72. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/sensors/test_metastore_partition.py +0 -0
  73. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/sensors/test_named_hive_partition.py +0 -0
  74. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/__init__.py +0 -0
  75. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_hive_to_mysql.py +0 -0
  76. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_hive_to_samba.py +0 -0
  77. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  78. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_mysql_to_hive.py +0 -0
  79. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_s3_to_hive.py +0 -0
  80. {apache_airflow_providers_apache_hive-9.0.6rc1 → apache_airflow_providers_apache_hive-9.1.0rc1}/tests/unit/apache/hive/transfers/test_vertica_to_hive.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.0.6rc1
3
+ Version: 9.1.0rc1
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,10 +20,9 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.26.0rc1
25
25
  Requires-Dist: hmsclient>=0.1.0
26
- Requires-Dist: pandas>=2.1.2,<2.2
27
26
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
28
27
  Requires-Dist: thrift>=0.11.0
29
28
  Requires-Dist: jmespath>=0.7.0
@@ -35,8 +34,8 @@ Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
35
34
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
36
35
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
37
36
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
38
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/changelog.html
39
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6
37
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html
38
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0
40
39
  Project-URL: Mastodon, https://fosstodon.org/@airflow
41
40
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
42
41
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -74,7 +73,7 @@ Provides-Extra: vertica
74
73
 
75
74
  Package ``apache-airflow-providers-apache-hive``
76
75
 
77
- Release: ``9.0.6``
76
+ Release: ``9.1.0``
78
77
 
79
78
 
80
79
  `Apache Hive <https://hive.apache.org/>`__
@@ -87,7 +86,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
87
86
  are in ``airflow.providers.apache.hive`` python package.
88
87
 
89
88
  You can find package information and changelog for the provider
90
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/>`_.
89
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/>`_.
91
90
 
92
91
  Installation
93
92
  ------------
@@ -104,10 +103,9 @@ Requirements
104
103
  ======================================= ==================
105
104
  PIP package Version required
106
105
  ======================================= ==================
107
- ``apache-airflow`` ``>=2.9.0``
108
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
106
+ ``apache-airflow`` ``>=2.10.0``
107
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
109
108
  ``hmsclient`` ``>=0.1.0``
110
- ``pandas`` ``>=2.1.2,<2.2``
111
109
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
112
110
  ``thrift`` ``>=0.11.0``
113
111
  ``jmespath`` ``>=0.7.0``
@@ -140,5 +138,5 @@ Dependent package
140
138
  ====================================================================================================================== ===================
141
139
 
142
140
  The changelog for the provider package can be found in the
143
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/changelog.html>`_.
141
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html>`_.
144
142
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.0.6``
26
+ Release: ``9.1.0``
27
27
 
28
28
 
29
29
  `Apache Hive <https://hive.apache.org/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
36
36
  are in ``airflow.providers.apache.hive`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -53,10 +53,9 @@ Requirements
53
53
  ======================================= ==================
54
54
  PIP package Version required
55
55
  ======================================= ==================
56
- ``apache-airflow`` ``>=2.9.0``
57
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
56
+ ``apache-airflow`` ``>=2.10.0``
57
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
58
58
  ``hmsclient`` ``>=0.1.0``
59
- ``pandas`` ``>=2.1.2,<2.2``
60
59
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
61
60
  ``thrift`` ``>=0.11.0``
62
61
  ``jmespath`` ``>=0.7.0``
@@ -89,4 +88,4 @@ Dependent package
89
88
  ====================================================================================================================== ===================
90
89
 
91
90
  The changelog for the provider package can be found in the
92
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/changelog.html>`_.
91
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html>`_.
@@ -26,6 +26,25 @@
26
26
 
27
27
  Changelog
28
28
 
29
+ 9.1.0
30
+ .....
31
+
32
+ .. note::
33
+ This release of provider is only available for Airflow 2.10+ as explained in the
34
+ Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
35
+
36
+ Misc
37
+ ~~~~
38
+
39
+ * ``Refine type overload for Hive (#50211)``
40
+ * ``Migrate HiveServer2Hook to use get_df (#50070)``
41
+ * ``Bump min Airflow version in providers to 2.10 (#49843)``
42
+
43
+ .. Below changes are excluded from the changelog. Move them to
44
+ appropriate section above if needed. Do not delete the lines(!):
45
+ * ``Update description of provider.yaml dependencies (#50231)``
46
+ * ``Avoid committing history for providers (#49907)``
47
+
29
48
  9.0.6
30
49
  .....
31
50
 
@@ -0,0 +1,35 @@
1
+
2
+ .. Licensed to the Apache Software Foundation (ASF) under one
3
+ or more contributor license agreements. See the NOTICE file
4
+ distributed with this work for additional information
5
+ regarding copyright ownership. The ASF licenses this file
6
+ to you under the Apache License, Version 2.0 (the
7
+ "License"); you may not use this file except in compliance
8
+ with the License. You may obtain a copy of the License at
9
+
10
+ .. http://www.apache.org/licenses/LICENSE-2.0
11
+
12
+ .. Unless required by applicable law or agreed to in writing,
13
+ software distributed under the License is distributed on an
14
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ KIND, either express or implied. See the License for the
16
+ specific language governing permissions and limitations
17
+ under the License.
18
+
19
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
+
21
+ .. IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ `PROVIDER_COMMITS_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
23
+
24
+ .. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN!
25
+
26
+ Package apache-airflow-providers-apache-hive
27
+ ------------------------------------------------------
28
+
29
+ `Apache Hive <https://hive.apache.org/>`__
30
+
31
+
32
+ This is detailed commit list of changes for versions provider package: ``apache.hive``.
33
+ For high-level changelog, see :doc:`package information including changelog <index>`.
34
+
35
+ .. airflow-providers-commits::
@@ -79,7 +79,7 @@ apache-airflow-providers-apache-hive package
79
79
  `Apache Hive <https://hive.apache.org/>`__
80
80
 
81
81
 
82
- Release: 9.0.6
82
+ Release: 9.1.0
83
83
 
84
84
  Provider package
85
85
  ----------------
@@ -97,15 +97,14 @@ For the minimum Airflow version supported, see ``Requirements`` below.
97
97
  Requirements
98
98
  ------------
99
99
 
100
- The minimum Apache Airflow version supported by this provider distribution is ``2.9.0``.
100
+ The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
101
101
 
102
102
  ======================================= ==================
103
103
  PIP package Version required
104
104
  ======================================= ==================
105
- ``apache-airflow`` ``>=2.9.0``
106
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
105
+ ``apache-airflow`` ``>=2.10.0``
106
+ ``apache-airflow-providers-common-sql`` ``>=1.26.0``
107
107
  ``hmsclient`` ``>=0.1.0``
108
- ``pandas`` ``>=2.1.2,<2.2``
109
108
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
110
109
  ``thrift`` ``>=0.11.0``
111
110
  ``jmespath`` ``>=0.7.0``
@@ -143,5 +142,5 @@ Downloading official packages
143
142
  You can download officially released packages and verify their checksums and signatures from the
144
143
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
145
144
 
146
- * `The apache-airflow-providers-apache-hive 9.0.6 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6.tar.gz.sha512>`__)
147
- * `The apache-airflow-providers-apache-hive 9.0.6 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.0.6-py3-none-any.whl.sha512>`__)
145
+ * `The apache-airflow-providers-apache-hive 9.1.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0.tar.gz.sha512>`__)
146
+ * `The apache-airflow-providers-apache-hive 9.1.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.0-py3-none-any.whl.sha512>`__)
@@ -22,9 +22,13 @@ description: |
22
22
  `Apache Hive <https://hive.apache.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1744788817
26
- # note that those versions are maintained by release manager - do not update them manually
25
+ source-date-epoch: 1747131490
26
+ # Note that those versions are maintained by release manager - do not update them manually
27
+ # with the exception of case where other provider in sources has >= new provider version.
28
+ # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
+ # to be done in the same PR
27
30
  versions:
31
+ - 9.1.0
28
32
  - 9.0.6
29
33
  - 9.0.5
30
34
  - 9.0.4
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.0.6.rc1"
28
+ version = "9.1.0rc1"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,14 +57,9 @@ requires-python = "~=3.9"
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.9.0rc0",
61
- "apache-airflow-providers-common-sql>=1.20.0rc0",
60
+ "apache-airflow>=2.10.0rc1",
61
+ "apache-airflow-providers-common-sql>=1.26.0rc1",
62
62
  "hmsclient>=0.1.0",
63
- # In pandas 2.2 minimal version of the sqlalchemy is 2.0
64
- # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies
65
- # However Airflow not fully supports it yet: https://github.com/apache/airflow/issues/28723
66
- # In addition FAB also limit sqlalchemy to < 2.0
67
- "pandas>=2.1.2,<2.2",
68
63
  "pyhive[hive_pure_sasl]>=0.7.0",
69
64
  "thrift>=0.11.0",
70
65
  "jmespath>=0.7.0",
@@ -109,6 +104,7 @@ dev = [
109
104
  "apache-airflow-providers-samba",
110
105
  "apache-airflow-providers-vertica",
111
106
  # Additional devel dependencies (do not remove this line and add extra development dependencies)
107
+ "apache-airflow-providers-common-sql[pandas,polars]",
112
108
  ]
113
109
 
114
110
  # To build docs:
@@ -137,8 +133,8 @@ apache-airflow-providers-common-sql = {workspace = true}
137
133
  apache-airflow-providers-standard = {workspace = true}
138
134
 
139
135
  [project.urls]
140
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6"
141
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.6/changelog.html"
136
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0"
137
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.0/changelog.html"
142
138
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
143
139
  "Source Code" = "https://github.com/apache/airflow"
144
140
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.0.6"
32
+ __version__ = "9.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.9.0"
35
+ "2.10.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.9.0+"
38
+ f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.10.0+"
39
39
  )
@@ -27,13 +27,17 @@ from collections.abc import Iterable, Mapping
27
27
  from tempfile import NamedTemporaryFile, TemporaryDirectory
28
28
  from typing import TYPE_CHECKING, Any
29
29
 
30
+ from deprecated import deprecated
31
+ from typing_extensions import Literal, overload
32
+
30
33
  if TYPE_CHECKING:
31
34
  import pandas as pd
35
+ import polars as pl
32
36
 
33
37
  import csv
34
38
 
35
39
  from airflow.configuration import conf
36
- from airflow.exceptions import AirflowException
40
+ from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
37
41
  from airflow.hooks.base import BaseHook
38
42
  from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
39
43
  from airflow.providers.common.sql.hooks.sql import DbApiHook
@@ -1031,37 +1035,106 @@ class HiveServer2Hook(DbApiHook):
1031
1035
  schema = kwargs["schema"] if "schema" in kwargs else "default"
1032
1036
  return self.get_results(sql, schema=schema, hive_conf=parameters)["data"]
1033
1037
 
1034
- def get_pandas_df( # type: ignore
1038
+ def _get_pandas_df( # type: ignore
1035
1039
  self,
1036
1040
  sql: str,
1037
1041
  schema: str = "default",
1038
1042
  hive_conf: dict[Any, Any] | None = None,
1039
1043
  **kwargs,
1040
1044
  ) -> pd.DataFrame:
1045
+ try:
1046
+ import pandas as pd
1047
+ except ImportError as e:
1048
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
1049
+
1050
+ raise AirflowOptionalProviderFeatureException(e)
1051
+
1052
+ res = self.get_results(sql, schema=schema, hive_conf=hive_conf)
1053
+ df = pd.DataFrame(res["data"], columns=[c[0] for c in res["header"]], **kwargs)
1054
+ return df
1055
+
1056
+ def _get_polars_df( # type: ignore
1057
+ self,
1058
+ sql: str,
1059
+ schema: str = "default",
1060
+ hive_conf: dict[Any, Any] | None = None,
1061
+ **kwargs,
1062
+ ) -> pl.DataFrame:
1063
+ try:
1064
+ import polars as pl
1065
+ except ImportError as e:
1066
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
1067
+
1068
+ raise AirflowOptionalProviderFeatureException(e)
1069
+
1070
+ res = self.get_results(sql, schema=schema, hive_conf=hive_conf)
1071
+ df = pl.DataFrame(res["data"], schema=[c[0] for c in res["header"]], orient="row", **kwargs)
1072
+ return df
1073
+
1074
+ @overload # type: ignore[override]
1075
+ def get_df(
1076
+ self,
1077
+ sql: str,
1078
+ schema: str = "default",
1079
+ hive_conf: dict[Any, Any] | None = None,
1080
+ *,
1081
+ df_type: Literal["pandas"] = "pandas",
1082
+ **kwargs: Any,
1083
+ ) -> pd.DataFrame: ...
1084
+
1085
+ @overload # type: ignore[override]
1086
+ def get_df(
1087
+ self,
1088
+ sql: str,
1089
+ schema: str = "default",
1090
+ hive_conf: dict[Any, Any] | None = None,
1091
+ *,
1092
+ df_type: Literal["polars"],
1093
+ **kwargs: Any,
1094
+ ) -> pl.DataFrame: ...
1095
+
1096
+ def get_df( # type: ignore
1097
+ self,
1098
+ sql: str,
1099
+ schema: str = "default",
1100
+ hive_conf: dict[Any, Any] | None = None,
1101
+ *,
1102
+ df_type: Literal["pandas", "polars"] = "pandas",
1103
+ **kwargs,
1104
+ ) -> pd.DataFrame | pl.DataFrame:
1041
1105
  """
1042
- Get a pandas dataframe from a Hive query.
1106
+ Get a pandas / polars dataframe from a Hive query.
1043
1107
 
1044
1108
  :param sql: hql to be executed.
1045
1109
  :param schema: target schema, default to 'default'.
1046
1110
  :param hive_conf: hive_conf to execute alone with the hql.
1111
+ :param df_type: type of dataframe to return, either 'pandas' or 'polars'
1047
1112
  :param kwargs: (optional) passed into pandas.DataFrame constructor
1048
1113
  :return: result of hive execution
1049
1114
 
1050
1115
  >>> hh = HiveServer2Hook()
1051
1116
  >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"
1052
- >>> df = hh.get_pandas_df(sql)
1117
+ >>> df = hh.get_df(sql, df_type="pandas")
1053
1118
  >>> len(df.index)
1054
1119
  100
1055
1120
 
1056
- :return: pandas.DateFrame
1121
+ :return: pandas.DateFrame | polars.DataFrame
1057
1122
  """
1058
- try:
1059
- import pandas as pd
1060
- except ImportError as e:
1061
- from airflow.exceptions import AirflowOptionalProviderFeatureException
1062
-
1063
- raise AirflowOptionalProviderFeatureException(e)
1064
-
1065
- res = self.get_results(sql, schema=schema, hive_conf=hive_conf)
1066
- df = pd.DataFrame(res["data"], columns=[c[0] for c in res["header"]], **kwargs)
1067
- return df
1123
+ if df_type == "pandas":
1124
+ return self._get_pandas_df(sql, schema=schema, hive_conf=hive_conf, **kwargs)
1125
+ if df_type == "polars":
1126
+ return self._get_polars_df(sql, schema=schema, hive_conf=hive_conf, **kwargs)
1127
+
1128
+ @deprecated(
1129
+ reason="Replaced by function `get_df`.",
1130
+ category=AirflowProviderDeprecationWarning,
1131
+ action="ignore",
1132
+ )
1133
+ def get_pandas_df( # type: ignore
1134
+ self,
1135
+ sql: str,
1136
+ schema: str = "default",
1137
+ hive_conf: dict[Any, Any] | None = None,
1138
+ **kwargs,
1139
+ ) -> pd.DataFrame:
1140
+ return self._get_pandas_df(sql, schema=schema, hive_conf=hive_conf, **kwargs)
@@ -23,6 +23,7 @@ from collections import namedtuple
23
23
  from unittest import mock
24
24
 
25
25
  import pandas as pd
26
+ import polars as pl
26
27
  import pytest
27
28
  from hmsclient import HMSClient
28
29
 
@@ -715,7 +716,8 @@ class TestHiveServer2Hook:
715
716
  hook.mock_cursor.execute.assert_any_call("set airflow.ctx.dag_owner=airflow")
716
717
  hook.mock_cursor.execute.assert_any_call("set airflow.ctx.dag_email=test@airflow.com")
717
718
 
718
- def test_get_pandas_df(self):
719
+ @pytest.mark.parametrize("df_type", ["pandas", "polars"])
720
+ def test_get_df(self, df_type):
719
721
  hook = MockHiveServer2Hook()
720
722
  query = f"SELECT * FROM {self.table}"
721
723
 
@@ -731,10 +733,15 @@ class TestHiveServer2Hook:
731
733
  "AIRFLOW_CTX_DAG_EMAIL": "test@airflow.com",
732
734
  },
733
735
  ):
734
- df = hook.get_pandas_df(query, schema=self.database)
736
+ df = hook.get_df(query, schema=self.database, df_type=df_type)
735
737
 
736
738
  assert len(df) == 2
737
- assert df["hive_server_hook.a"].values.tolist() == [1, 2]
739
+ if df_type == "pandas":
740
+ assert df["hive_server_hook.a"].values.tolist() == [1, 2]
741
+ assert isinstance(df, pd.DataFrame)
742
+ elif df_type == "polars":
743
+ assert df["hive_server_hook.a"].to_list() == [1, 2]
744
+ assert isinstance(df, pl.DataFrame)
738
745
  date_key = "logical_date" if AIRFLOW_V_3_0_PLUS else "execution_date"
739
746
  hook.get_conn.assert_called_with(self.database)
740
747
  hook.mock_cursor.execute.assert_any_call("set airflow.ctx.dag_id=test_dag_id")
@@ -747,7 +754,7 @@ class TestHiveServer2Hook:
747
754
  hook = MockHiveServer2Hook(connection_cursor=EmptyMockConnectionCursor())
748
755
  query = f"SELECT * FROM {self.table}"
749
756
 
750
- df = hook.get_pandas_df(query, schema=self.database)
757
+ df = hook.get_df(query, schema=self.database, df_type=df_type)
751
758
 
752
759
  assert len(df) == 0
753
760