apache-airflow-providers-apache-hive 9.0.3rc1__tar.gz → 9.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-apache-hive might be problematic. Click here for more details.

Files changed (26) hide show
  1. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/PKG-INFO +13 -10
  2. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/README.rst +5 -4
  3. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/pyproject.toml +11 -7
  4. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/__init__.py +1 -1
  5. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/get_provider_info.py +3 -1
  6. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/hooks/hive.py +8 -1
  7. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/operators/hive.py +10 -5
  8. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +7 -1
  9. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +6 -1
  10. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/LICENSE +0 -0
  11. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  12. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  13. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  14. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  15. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/operators/hive_stats.py +0 -0
  16. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  17. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  18. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  19. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/sensors/hive_partition.py +0 -0
  20. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +0 -0
  21. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +0 -0
  22. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  23. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +0 -0
  24. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +0 -0
  25. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +0 -0
  26. {apache_airflow_providers_apache_hive-9.0.3rc1 → apache_airflow_providers_apache_hive-9.0.4}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.0.3rc1
3
+ Version: 9.0.4
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,27 +20,29 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
25
  Requires-Dist: hmsclient>=0.1.0
26
26
  Requires-Dist: pandas>=2.1.2,<2.2
27
27
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
28
28
  Requires-Dist: thrift>=0.11.0
29
29
  Requires-Dist: jmespath>=0.7.0
30
30
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
31
+ Requires-Dist: apache-airflow-providers-common-compat ; extra == "common-compat"
31
32
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
32
33
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
33
34
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
34
35
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
35
36
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
36
37
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
37
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/changelog.html
38
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3
38
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/changelog.html
39
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4
40
+ Project-URL: Mastodon, https://fosstodon.org/@airflow
39
41
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
40
42
  Project-URL: Source Code, https://github.com/apache/airflow
41
- Project-URL: Twitter, https://x.com/ApacheAirflow
42
43
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
43
44
  Provides-Extra: amazon
45
+ Provides-Extra: common-compat
44
46
  Provides-Extra: microsoft-mssql
45
47
  Provides-Extra: mysql
46
48
  Provides-Extra: presto
@@ -72,7 +74,7 @@ Provides-Extra: vertica
72
74
 
73
75
  Package ``apache-airflow-providers-apache-hive``
74
76
 
75
- Release: ``9.0.3``
77
+ Release: ``9.0.4``
76
78
 
77
79
 
78
80
  `Apache Hive <https://hive.apache.org/>`__
@@ -85,7 +87,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
85
87
  are in ``airflow.providers.apache.hive`` python package.
86
88
 
87
89
  You can find package information and changelog for the provider
88
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/>`_.
90
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/>`_.
89
91
 
90
92
  Installation
91
93
  ------------
@@ -115,7 +117,7 @@ Cross provider package dependencies
115
117
  -----------------------------------
116
118
 
117
119
  Those are dependencies that might be needed in order to use all the features of the package.
118
- You need to install the specified provider packages in order to use them.
120
+ You need to install the specified providers in order to use them.
119
121
 
120
122
  You can install such cross-provider dependencies when installing from PyPI. For example:
121
123
 
@@ -128,6 +130,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
128
130
  Dependent package Extra
129
131
  ====================================================================================================================== ===================
130
132
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
133
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
131
134
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
132
135
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
133
136
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -137,5 +140,5 @@ Dependent package
137
140
  ====================================================================================================================== ===================
138
141
 
139
142
  The changelog for the provider package can be found in the
140
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/changelog.html>`_.
143
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/changelog.html>`_.
141
144
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.0.3``
26
+ Release: ``9.0.4``
27
27
 
28
28
 
29
29
  `Apache Hive <https://hive.apache.org/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
36
36
  are in ``airflow.providers.apache.hive`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -66,7 +66,7 @@ Cross provider package dependencies
66
66
  -----------------------------------
67
67
 
68
68
  Those are dependencies that might be needed in order to use all the features of the package.
69
- You need to install the specified provider packages in order to use them.
69
+ You need to install the specified providers in order to use them.
70
70
 
71
71
  You can install such cross-provider dependencies when installing from PyPI. For example:
72
72
 
@@ -79,6 +79,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
79
79
  Dependent package Extra
80
80
  ====================================================================================================================== ===================
81
81
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
82
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
82
83
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
83
84
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
84
85
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -88,4 +89,4 @@ Dependent package
88
89
  ====================================================================================================================== ===================
89
90
 
90
91
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/changelog.html>`_.
92
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/changelog.html>`_.
@@ -20,12 +20,12 @@
20
20
  # IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
21
21
  # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
22
22
  [build-system]
23
- requires = ["flit_core==3.11.0"]
23
+ requires = ["flit_core==3.12.0"]
24
24
  build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.0.3.rc1"
28
+ version = "9.0.4"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,8 +57,8 @@ requires-python = "~=3.9"
57
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.9.0rc0",
61
- "apache-airflow-providers-common-sql>=1.20.0rc0",
60
+ "apache-airflow>=2.9.0",
61
+ "apache-airflow-providers-common-sql>=1.20.0",
62
62
  "hmsclient>=0.1.0",
63
63
  # In pandas 2.2 minimal version of the sqlalchemy is 2.0
64
64
  # https://pandas.pydata.org/docs/whatsnew/v2.2.0.html#increased-minimum-versions-for-dependencies
@@ -91,6 +91,9 @@ dependencies = [
91
91
  "vertica" = [
92
92
  "apache-airflow-providers-vertica"
93
93
  ]
94
+ "common.compat" = [
95
+ "apache-airflow-providers-common-compat"
96
+ ]
94
97
 
95
98
  [dependency-groups]
96
99
  dev = [
@@ -98,6 +101,7 @@ dev = [
98
101
  "apache-airflow-task-sdk",
99
102
  "apache-airflow-devel-common",
100
103
  "apache-airflow-providers-amazon",
104
+ "apache-airflow-providers-common-compat",
101
105
  "apache-airflow-providers-common-sql",
102
106
  "apache-airflow-providers-microsoft-mssql",
103
107
  "apache-airflow-providers-mysql",
@@ -118,12 +122,12 @@ apache-airflow-providers-fab = {workspace = true}
118
122
  apache-airflow-providers-standard = {workspace = true}
119
123
 
120
124
  [project.urls]
121
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3"
122
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.3/changelog.html"
125
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4"
126
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.4/changelog.html"
123
127
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
124
128
  "Source Code" = "https://github.com/apache/airflow"
125
129
  "Slack Chat" = "https://s.apache.org/airflow-slack"
126
- "Twitter" = "https://x.com/ApacheAirflow"
130
+ "Mastodon" = "https://fosstodon.org/@airflow"
127
131
  "YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
128
132
 
129
133
  [project.entry-points."apache_airflow_provider"]
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.0.3"
32
+ __version__ = "9.0.4"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -27,8 +27,9 @@ def get_provider_info():
27
27
  "name": "Apache Hive",
28
28
  "description": "`Apache Hive <https://hive.apache.org/>`__\n",
29
29
  "state": "ready",
30
- "source-date-epoch": 1741507888,
30
+ "source-date-epoch": 1742979243,
31
31
  "versions": [
32
+ "9.0.4",
32
33
  "9.0.3",
33
34
  "9.0.2",
34
35
  "9.0.0",
@@ -199,6 +200,7 @@ def get_provider_info():
199
200
  "presto": ["apache-airflow-providers-presto"],
200
201
  "samba": ["apache-airflow-providers-samba"],
201
202
  "vertica": ["apache-airflow-providers-vertica"],
203
+ "common.compat": ["apache-airflow-providers-common-compat"],
202
204
  },
203
205
  "devel-dependencies": [],
204
206
  }
@@ -35,10 +35,17 @@ import csv
35
35
  from airflow.configuration import conf
36
36
  from airflow.exceptions import AirflowException
37
37
  from airflow.hooks.base import BaseHook
38
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
38
39
  from airflow.providers.common.sql.hooks.sql import DbApiHook
39
40
  from airflow.security import utils
40
41
  from airflow.utils.helpers import as_flattened_list
41
- from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING
42
+
43
+ if AIRFLOW_V_3_0_PLUS:
44
+ from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING
45
+ else:
46
+ from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
47
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
48
+ )
42
49
 
43
50
  HIVE_QUEUE_PRIORITIES = ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]
44
51
 
@@ -26,8 +26,15 @@ from typing import TYPE_CHECKING, Any
26
26
  from airflow.configuration import conf
27
27
  from airflow.models import BaseOperator
28
28
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
29
- from airflow.utils import operator_helpers
30
- from airflow.utils.operator_helpers import context_to_airflow_vars
29
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
30
+
31
+ if AIRFLOW_V_3_0_PLUS:
32
+ from airflow.sdk.execution_time.context import AIRFLOW_VAR_NAME_FORMAT_MAPPING, context_to_airflow_vars
33
+ else:
34
+ from airflow.utils.operator_helpers import ( # type: ignore[no-redef, attr-defined]
35
+ AIRFLOW_VAR_NAME_FORMAT_MAPPING,
36
+ context_to_airflow_vars,
37
+ )
31
38
 
32
39
  if TYPE_CHECKING:
33
40
  from airflow.utils.context import Context
@@ -173,7 +180,5 @@ class HiveOperator(BaseOperator):
173
180
 
174
181
  def clear_airflow_vars(self) -> None:
175
182
  """Reset airflow environment variables to prevent existing ones from impacting behavior."""
176
- blank_env_vars = {
177
- value["env_var_format"]: "" for value in operator_helpers.AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()
178
- }
183
+ blank_env_vars = {value["env_var_format"]: "" for value in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()}
179
184
  os.environ.update(blank_env_vars)
@@ -25,8 +25,14 @@ from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.models import BaseOperator
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
28
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
28
29
  from airflow.providers.mysql.hooks.mysql import MySqlHook
29
- from airflow.utils.operator_helpers import context_to_airflow_vars
30
+
31
+ if AIRFLOW_V_3_0_PLUS:
32
+ from airflow.sdk.execution_time.context import context_to_airflow_vars
33
+ else:
34
+ from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
35
+
30
36
 
31
37
  if TYPE_CHECKING:
32
38
  from airflow.utils.context import Context
@@ -25,8 +25,13 @@ from typing import TYPE_CHECKING
25
25
 
26
26
  from airflow.models import BaseOperator
27
27
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
28
+ from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
28
29
  from airflow.providers.samba.hooks.samba import SambaHook
29
- from airflow.utils.operator_helpers import context_to_airflow_vars
30
+
31
+ if AIRFLOW_V_3_0_PLUS:
32
+ from airflow.sdk.execution_time.context import context_to_airflow_vars
33
+ else:
34
+ from airflow.utils.operator_helpers import context_to_airflow_vars # type: ignore[no-redef, attr-defined]
30
35
 
31
36
  if TYPE_CHECKING:
32
37
  from airflow.utils.context import Context