apache-airflow-providers-apache-hive 8.2.1__py3-none-any.whl → 9.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/apache/hive/__init__.py +3 -3
- airflow/providers/apache/hive/get_provider_info.py +3 -2
- airflow/providers/apache/hive/hooks/hive.py +0 -8
- airflow/providers/apache/hive/operators/hive.py +6 -5
- airflow/providers/apache/hive/operators/hive_stats.py +2 -1
- airflow/providers/apache/hive/sensors/hive_partition.py +2 -1
- airflow/providers/apache/hive/sensors/metastore_partition.py +2 -1
- airflow/providers/apache/hive/sensors/named_hive_partition.py +2 -1
- airflow/providers/apache/hive/transfers/hive_to_mysql.py +2 -1
- airflow/providers/apache/hive/transfers/hive_to_samba.py +2 -1
- airflow/providers/apache/hive/transfers/mssql_to_hive.py +2 -1
- airflow/providers/apache/hive/transfers/mysql_to_hive.py +2 -1
- airflow/providers/apache/hive/transfers/s3_to_hive.py +4 -3
- airflow/providers/apache/hive/transfers/vertica_to_hive.py +2 -1
- {apache_airflow_providers_apache_hive-8.2.1.dist-info → apache_airflow_providers_apache_hive-9.0.0.dist-info}/METADATA +9 -11
- {apache_airflow_providers_apache_hive-8.2.1.dist-info → apache_airflow_providers_apache_hive-9.0.0.dist-info}/RECORD +18 -18
- {apache_airflow_providers_apache_hive-8.2.1.dist-info → apache_airflow_providers_apache_hive-9.0.0.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_apache_hive-8.2.1.dist-info → apache_airflow_providers_apache_hive-9.0.0.dist-info}/entry_points.txt +0 -0
|
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "
|
|
32
|
+
__version__ = "9.0.0"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
|
-
"2.
|
|
35
|
+
"2.9.0"
|
|
36
36
|
):
|
|
37
37
|
raise RuntimeError(
|
|
38
|
-
f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.
|
|
38
|
+
f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.9.0+"
|
|
39
39
|
)
|
|
@@ -28,8 +28,9 @@ def get_provider_info():
|
|
|
28
28
|
"name": "Apache Hive",
|
|
29
29
|
"description": "`Apache Hive <https://hive.apache.org/>`__\n",
|
|
30
30
|
"state": "ready",
|
|
31
|
-
"source-date-epoch":
|
|
31
|
+
"source-date-epoch": 1734528011,
|
|
32
32
|
"versions": [
|
|
33
|
+
"9.0.0",
|
|
33
34
|
"8.2.1",
|
|
34
35
|
"8.2.0",
|
|
35
36
|
"8.1.2",
|
|
@@ -78,7 +79,7 @@ def get_provider_info():
|
|
|
78
79
|
"1.0.0",
|
|
79
80
|
],
|
|
80
81
|
"dependencies": [
|
|
81
|
-
"apache-airflow>=2.
|
|
82
|
+
"apache-airflow>=2.9.0",
|
|
82
83
|
"apache-airflow-providers-common-sql>=1.20.0",
|
|
83
84
|
"hmsclient>=0.1.0",
|
|
84
85
|
'pandas>=2.1.2,<2.2;python_version>="3.9"',
|
|
@@ -873,14 +873,6 @@ class HiveServer2Hook(DbApiHook):
|
|
|
873
873
|
auth_mechanism = db.extra_dejson.get("auth_mechanism", "KERBEROS")
|
|
874
874
|
kerberos_service_name = db.extra_dejson.get("kerberos_service_name", "hive")
|
|
875
875
|
|
|
876
|
-
# pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier
|
|
877
|
-
if auth_mechanism == "GSSAPI":
|
|
878
|
-
self.log.warning(
|
|
879
|
-
"Detected deprecated 'GSSAPI' for auth_mechanism for %s. Please use 'KERBEROS' instead",
|
|
880
|
-
self.hiveserver2_conn_id, # type: ignore
|
|
881
|
-
)
|
|
882
|
-
auth_mechanism = "KERBEROS"
|
|
883
|
-
|
|
884
876
|
# Password should be set if and only if in LDAP or CUSTOM mode
|
|
885
877
|
if auth_mechanism in ("LDAP", "CUSTOM"):
|
|
886
878
|
password = db.password
|
|
@@ -19,8 +19,9 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import os
|
|
21
21
|
import re
|
|
22
|
+
from collections.abc import Sequence
|
|
22
23
|
from functools import cached_property
|
|
23
|
-
from typing import TYPE_CHECKING, Any
|
|
24
|
+
from typing import TYPE_CHECKING, Any
|
|
24
25
|
|
|
25
26
|
from airflow.configuration import conf
|
|
26
27
|
from airflow.models import BaseOperator
|
|
@@ -111,7 +112,7 @@ class HiveOperator(BaseOperator):
|
|
|
111
112
|
job_name_template = conf.get_mandatory_value(
|
|
112
113
|
"hive",
|
|
113
114
|
"mapred_job_name_template",
|
|
114
|
-
fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{
|
|
115
|
+
fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{logical_date}",
|
|
115
116
|
)
|
|
116
117
|
self.mapred_job_name_template: str = job_name_template
|
|
117
118
|
|
|
@@ -140,12 +141,12 @@ class HiveOperator(BaseOperator):
|
|
|
140
141
|
# set the mapred_job_name if it's not set with dag, task, execution time info
|
|
141
142
|
if not self.mapred_job_name:
|
|
142
143
|
ti = context["ti"]
|
|
143
|
-
if ti.
|
|
144
|
-
raise RuntimeError("
|
|
144
|
+
if ti.logical_date is None:
|
|
145
|
+
raise RuntimeError("logical_date is None")
|
|
145
146
|
self.hook.mapred_job_name = self.mapred_job_name_template.format(
|
|
146
147
|
dag_id=ti.dag_id,
|
|
147
148
|
task_id=ti.task_id,
|
|
148
|
-
|
|
149
|
+
logical_date=ti.logical_date.isoformat(),
|
|
149
150
|
hostname=ti.hostname.split(".")[0],
|
|
150
151
|
)
|
|
151
152
|
|
|
@@ -18,7 +18,8 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
import json
|
|
21
|
-
from
|
|
21
|
+
from collections.abc import Sequence
|
|
22
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
22
23
|
|
|
23
24
|
from airflow.exceptions import AirflowException
|
|
24
25
|
from airflow.models import BaseOperator
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Sequence
|
|
21
|
+
from typing import TYPE_CHECKING, Any
|
|
21
22
|
|
|
22
23
|
from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
|
|
23
24
|
from airflow.sensors.base import BaseSensorOperator
|
|
@@ -17,7 +17,8 @@
|
|
|
17
17
|
# under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from
|
|
20
|
+
from collections.abc import Sequence
|
|
21
|
+
from typing import TYPE_CHECKING, Any
|
|
21
22
|
|
|
22
23
|
from airflow.providers.common.sql.sensors.sql import SqlSensor
|
|
23
24
|
|
|
@@ -19,8 +19,9 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
+
from collections.abc import Sequence
|
|
22
23
|
from tempfile import NamedTemporaryFile
|
|
23
|
-
from typing import TYPE_CHECKING
|
|
24
|
+
from typing import TYPE_CHECKING
|
|
24
25
|
|
|
25
26
|
from airflow.models import BaseOperator
|
|
26
27
|
from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
|
|
@@ -19,8 +19,9 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
+
from collections.abc import Sequence
|
|
22
23
|
from tempfile import NamedTemporaryFile
|
|
23
|
-
from typing import TYPE_CHECKING
|
|
24
|
+
from typing import TYPE_CHECKING
|
|
24
25
|
|
|
25
26
|
from airflow.models import BaseOperator
|
|
26
27
|
from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
|
|
@@ -20,9 +20,10 @@
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
22
|
import csv
|
|
23
|
+
from collections.abc import Sequence
|
|
23
24
|
from contextlib import closing
|
|
24
25
|
from tempfile import NamedTemporaryFile
|
|
25
|
-
from typing import TYPE_CHECKING
|
|
26
|
+
from typing import TYPE_CHECKING
|
|
26
27
|
|
|
27
28
|
try:
|
|
28
29
|
import MySQLdb
|
|
@@ -289,8 +289,9 @@ def uncompress_file(input_file_name, file_extension, dest_dir):
|
|
|
289
289
|
fmodule = gzip.GzipFile
|
|
290
290
|
elif file_extension.lower() == ".bz2":
|
|
291
291
|
fmodule = bz2.BZ2File
|
|
292
|
-
with
|
|
293
|
-
|
|
294
|
-
|
|
292
|
+
with (
|
|
293
|
+
fmodule(input_file_name, mode="rb") as f_compressed,
|
|
294
|
+
NamedTemporaryFile(dir=dest_dir, mode="wb", delete=False) as f_uncompressed,
|
|
295
|
+
):
|
|
295
296
|
shutil.copyfileobj(f_compressed, f_uncompressed)
|
|
296
297
|
return f_uncompressed.name
|
|
@@ -20,8 +20,9 @@
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
22
|
import csv
|
|
23
|
+
from collections.abc import Sequence
|
|
23
24
|
from tempfile import NamedTemporaryFile
|
|
24
|
-
from typing import TYPE_CHECKING, Any
|
|
25
|
+
from typing import TYPE_CHECKING, Any
|
|
25
26
|
|
|
26
27
|
from airflow.models import BaseOperator
|
|
27
28
|
from airflow.providers.apache.hive.hooks.hive import HiveCliHook
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: apache-airflow-providers-apache-hive
|
|
3
|
-
Version:
|
|
3
|
+
Version: 9.0.0
|
|
4
4
|
Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,apache.hive,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
@@ -21,7 +21,7 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.12
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
23
|
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
|
|
24
|
-
Requires-Dist: apache-airflow>=2.
|
|
24
|
+
Requires-Dist: apache-airflow>=2.9.0
|
|
25
25
|
Requires-Dist: hmsclient>=0.1.0
|
|
26
26
|
Requires-Dist: jmespath>=0.7.0
|
|
27
27
|
Requires-Dist: pandas>=1.5.3,<2.2;python_version<"3.9"
|
|
@@ -29,21 +29,19 @@ Requires-Dist: pandas>=2.1.2,<2.2;python_version>="3.9"
|
|
|
29
29
|
Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
|
|
30
30
|
Requires-Dist: thrift>=0.11.0
|
|
31
31
|
Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
|
|
32
|
-
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common-sql"
|
|
33
32
|
Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
|
|
34
33
|
Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
|
|
35
34
|
Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
|
|
36
35
|
Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
|
|
37
36
|
Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
|
|
38
37
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
39
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/
|
|
40
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/
|
|
38
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/changelog.html
|
|
39
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0
|
|
41
40
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
42
41
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
43
|
-
Project-URL: Twitter, https://
|
|
42
|
+
Project-URL: Twitter, https://x.com/ApacheAirflow
|
|
44
43
|
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
45
44
|
Provides-Extra: amazon
|
|
46
|
-
Provides-Extra: common-sql
|
|
47
45
|
Provides-Extra: microsoft-mssql
|
|
48
46
|
Provides-Extra: mysql
|
|
49
47
|
Provides-Extra: presto
|
|
@@ -94,7 +92,7 @@ Provides-Extra: vertica
|
|
|
94
92
|
|
|
95
93
|
Package ``apache-airflow-providers-apache-hive``
|
|
96
94
|
|
|
97
|
-
Release: ``
|
|
95
|
+
Release: ``9.0.0``
|
|
98
96
|
|
|
99
97
|
|
|
100
98
|
`Apache Hive <https://hive.apache.org/>`__
|
|
@@ -107,7 +105,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
|
|
|
107
105
|
are in ``airflow.providers.apache.hive`` python package.
|
|
108
106
|
|
|
109
107
|
You can find package information and changelog for the provider
|
|
110
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/
|
|
108
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/>`_.
|
|
111
109
|
|
|
112
110
|
Installation
|
|
113
111
|
------------
|
|
@@ -124,7 +122,7 @@ Requirements
|
|
|
124
122
|
======================================= =========================================
|
|
125
123
|
PIP package Version required
|
|
126
124
|
======================================= =========================================
|
|
127
|
-
``apache-airflow`` ``>=2.
|
|
125
|
+
``apache-airflow`` ``>=2.9.0``
|
|
128
126
|
``apache-airflow-providers-common-sql`` ``>=1.20.0``
|
|
129
127
|
``hmsclient`` ``>=0.1.0``
|
|
130
128
|
``pandas`` ``>=2.1.2,<2.2; python_version >= "3.9"``
|
|
@@ -160,4 +158,4 @@ Dependent package
|
|
|
160
158
|
====================================================================================================================== ===================
|
|
161
159
|
|
|
162
160
|
The changelog for the provider package can be found in the
|
|
163
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/
|
|
161
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/changelog.html>`_.
|
|
@@ -1,27 +1,27 @@
|
|
|
1
1
|
airflow/providers/apache/hive/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
|
|
2
|
-
airflow/providers/apache/hive/__init__.py,sha256=
|
|
3
|
-
airflow/providers/apache/hive/get_provider_info.py,sha256=
|
|
2
|
+
airflow/providers/apache/hive/__init__.py,sha256=5b3RR33He6WC-TAwDICi8baXvCHO2OZubRrKpOUaPEg,1498
|
|
3
|
+
airflow/providers/apache/hive/get_provider_info.py,sha256=tjfDpgG2vaWmY5DGiQCGNLsNSD1s9kFgU3K01-JVeA4,7066
|
|
4
4
|
airflow/providers/apache/hive/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
5
|
-
airflow/providers/apache/hive/hooks/hive.py,sha256=
|
|
5
|
+
airflow/providers/apache/hive/hooks/hive.py,sha256=0BOk6tdrtq1vFXRMsqhLhXHGAmj85aOPnmS2movXcYQ,42573
|
|
6
6
|
airflow/providers/apache/hive/macros/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
7
7
|
airflow/providers/apache/hive/macros/hive.py,sha256=p3LUIb74pbthh8lcUXV6f3m-cZrcRXjV8aGiazdpnzo,4592
|
|
8
8
|
airflow/providers/apache/hive/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
9
|
-
airflow/providers/apache/hive/operators/hive.py,sha256=
|
|
10
|
-
airflow/providers/apache/hive/operators/hive_stats.py,sha256=
|
|
9
|
+
airflow/providers/apache/hive/operators/hive.py,sha256=d_73cC-ODb7q5_37R7MNUNT-xnz-xxqKmqEWR3S1wEc,7029
|
|
10
|
+
airflow/providers/apache/hive/operators/hive_stats.py,sha256=8fNlR9o8J9DKMDBpB849apqA2iGbMovlHOWute4TFCY,7074
|
|
11
11
|
airflow/providers/apache/hive/plugins/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
12
12
|
airflow/providers/apache/hive/plugins/hive.py,sha256=rrGccro6DEdnHSmII3goBYD9te-XdhPaPMjWpF-36HU,1146
|
|
13
13
|
airflow/providers/apache/hive/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
14
|
-
airflow/providers/apache/hive/sensors/hive_partition.py,sha256=
|
|
15
|
-
airflow/providers/apache/hive/sensors/metastore_partition.py,sha256=
|
|
16
|
-
airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=
|
|
14
|
+
airflow/providers/apache/hive/sensors/hive_partition.py,sha256=UIafLmwTUXdNfWAhLOOSZQc6v6So7lz_0bYA2Wyunw4,3066
|
|
15
|
+
airflow/providers/apache/hive/sensors/metastore_partition.py,sha256=H-luvQ96TRp5Oax0LIoag4rlnKiDlkJCsYNh80-IzXU,3365
|
|
16
|
+
airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=5R3NJ_Vi9nqROD9VPs-YXnziYhWd-X5ukgelLwfngy4,4185
|
|
17
17
|
airflow/providers/apache/hive/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
|
-
airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=
|
|
19
|
-
airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=
|
|
20
|
-
airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=
|
|
21
|
-
airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=
|
|
22
|
-
airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=
|
|
23
|
-
airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=
|
|
24
|
-
apache_airflow_providers_apache_hive-
|
|
25
|
-
apache_airflow_providers_apache_hive-
|
|
26
|
-
apache_airflow_providers_apache_hive-
|
|
27
|
-
apache_airflow_providers_apache_hive-
|
|
18
|
+
airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=VcMbWhtGR8cHvOr3IinMGbPisdtYC__Ga5kwyDfVQFM,5308
|
|
19
|
+
airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=Ou70VwArs59GRjJWxg2Hp-F-8BgI5dF7Oq2jep73oWM,3010
|
|
20
|
+
airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=SQGcJTAKPoRipmWLdudN_wATBNqd_2eMRgokcjkPmjQ,5644
|
|
21
|
+
airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=6BkAoYrn9wDfSC0BKHp86EgoUVtqwHGz9nZsielCydQ,7022
|
|
22
|
+
airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=K5Zke6CF9Rgw7YDkOtWIYXC3_2TuXLTuwYxu8uVrGP0,12455
|
|
23
|
+
airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=IBiMGF7322rOrwDbWMVB89oKO8hFhhBmc6rQwPkFA34,5555
|
|
24
|
+
apache_airflow_providers_apache_hive-9.0.0.dist-info/entry_points.txt,sha256=Hzixt33mYYldwmwswarArUB7ZU0xbmUtd3tFViZ414s,185
|
|
25
|
+
apache_airflow_providers_apache_hive-9.0.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
|
26
|
+
apache_airflow_providers_apache_hive-9.0.0.dist-info/METADATA,sha256=M7OBb0siJUEIDwJCjivCrPHVlceWRxQYtj65ZCoq01k,8041
|
|
27
|
+
apache_airflow_providers_apache_hive-9.0.0.dist-info/RECORD,,
|
|
File without changes
|