apache-airflow-providers-apache-hive 8.2.1__py3-none-any.whl → 9.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "8.2.1"
32
+ __version__ = "9.0.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.8.0"
35
+ "2.9.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.8.0+"
38
+ f"The package `apache-airflow-providers-apache-hive:{__version__}` needs Apache Airflow 2.9.0+"
39
39
  )
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "Apache Hive",
29
29
  "description": "`Apache Hive <https://hive.apache.org/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1731569312,
31
+ "source-date-epoch": 1734528011,
32
32
  "versions": [
33
+ "9.0.0",
33
34
  "8.2.1",
34
35
  "8.2.0",
35
36
  "8.1.2",
@@ -78,7 +79,7 @@ def get_provider_info():
78
79
  "1.0.0",
79
80
  ],
80
81
  "dependencies": [
81
- "apache-airflow>=2.8.0",
82
+ "apache-airflow>=2.9.0",
82
83
  "apache-airflow-providers-common-sql>=1.20.0",
83
84
  "hmsclient>=0.1.0",
84
85
  'pandas>=2.1.2,<2.2;python_version>="3.9"',
@@ -873,14 +873,6 @@ class HiveServer2Hook(DbApiHook):
873
873
  auth_mechanism = db.extra_dejson.get("auth_mechanism", "KERBEROS")
874
874
  kerberos_service_name = db.extra_dejson.get("kerberos_service_name", "hive")
875
875
 
876
- # pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier
877
- if auth_mechanism == "GSSAPI":
878
- self.log.warning(
879
- "Detected deprecated 'GSSAPI' for auth_mechanism for %s. Please use 'KERBEROS' instead",
880
- self.hiveserver2_conn_id, # type: ignore
881
- )
882
- auth_mechanism = "KERBEROS"
883
-
884
876
  # Password should be set if and only if in LDAP or CUSTOM mode
885
877
  if auth_mechanism in ("LDAP", "CUSTOM"):
886
878
  password = db.password
@@ -19,8 +19,9 @@ from __future__ import annotations
19
19
 
20
20
  import os
21
21
  import re
22
+ from collections.abc import Sequence
22
23
  from functools import cached_property
23
- from typing import TYPE_CHECKING, Any, Sequence
24
+ from typing import TYPE_CHECKING, Any
24
25
 
25
26
  from airflow.configuration import conf
26
27
  from airflow.models import BaseOperator
@@ -111,7 +112,7 @@ class HiveOperator(BaseOperator):
111
112
  job_name_template = conf.get_mandatory_value(
112
113
  "hive",
113
114
  "mapred_job_name_template",
114
- fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{execution_date}",
115
+ fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{logical_date}",
115
116
  )
116
117
  self.mapred_job_name_template: str = job_name_template
117
118
 
@@ -140,12 +141,12 @@ class HiveOperator(BaseOperator):
140
141
  # set the mapred_job_name if it's not set with dag, task, execution time info
141
142
  if not self.mapred_job_name:
142
143
  ti = context["ti"]
143
- if ti.execution_date is None:
144
- raise RuntimeError("execution_date is None")
144
+ if ti.logical_date is None:
145
+ raise RuntimeError("logical_date is None")
145
146
  self.hook.mapred_job_name = self.mapred_job_name_template.format(
146
147
  dag_id=ti.dag_id,
147
148
  task_id=ti.task_id,
148
- execution_date=ti.execution_date.isoformat(),
149
+ logical_date=ti.logical_date.isoformat(),
149
150
  hostname=ti.hostname.split(".")[0],
150
151
  )
151
152
 
@@ -18,7 +18,8 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import json
21
- from typing import TYPE_CHECKING, Any, Callable, Sequence
21
+ from collections.abc import Sequence
22
+ from typing import TYPE_CHECKING, Any, Callable
22
23
 
23
24
  from airflow.exceptions import AirflowException
24
25
  from airflow.models import BaseOperator
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Any, Sequence
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
23
24
  from airflow.sensors.base import BaseSensorOperator
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Any, Sequence
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.providers.common.sql.sensors.sql import SqlSensor
23
24
 
@@ -17,7 +17,8 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import TYPE_CHECKING, Any, Sequence
20
+ from collections.abc import Sequence
21
+ from typing import TYPE_CHECKING, Any
21
22
 
22
23
  from airflow.sensors.base import BaseSensorOperator
23
24
 
@@ -19,8 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ from collections.abc import Sequence
22
23
  from tempfile import NamedTemporaryFile
23
- from typing import TYPE_CHECKING, Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from airflow.models import BaseOperator
26
27
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
@@ -19,8 +19,9 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ from collections.abc import Sequence
22
23
  from tempfile import NamedTemporaryFile
23
- from typing import TYPE_CHECKING, Sequence
24
+ from typing import TYPE_CHECKING
24
25
 
25
26
  from airflow.models import BaseOperator
26
27
  from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
@@ -20,8 +20,9 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import csv
23
+ from collections.abc import Sequence
23
24
  from tempfile import NamedTemporaryFile
24
- from typing import TYPE_CHECKING, Sequence
25
+ from typing import TYPE_CHECKING
25
26
 
26
27
  import pymssql
27
28
 
@@ -20,9 +20,10 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import csv
23
+ from collections.abc import Sequence
23
24
  from contextlib import closing
24
25
  from tempfile import NamedTemporaryFile
25
- from typing import TYPE_CHECKING, Sequence
26
+ from typing import TYPE_CHECKING
26
27
 
27
28
  try:
28
29
  import MySQLdb
@@ -289,8 +289,9 @@ def uncompress_file(input_file_name, file_extension, dest_dir):
289
289
  fmodule = gzip.GzipFile
290
290
  elif file_extension.lower() == ".bz2":
291
291
  fmodule = bz2.BZ2File
292
- with fmodule(input_file_name, mode="rb") as f_compressed, NamedTemporaryFile(
293
- dir=dest_dir, mode="wb", delete=False
294
- ) as f_uncompressed:
292
+ with (
293
+ fmodule(input_file_name, mode="rb") as f_compressed,
294
+ NamedTemporaryFile(dir=dest_dir, mode="wb", delete=False) as f_uncompressed,
295
+ ):
295
296
  shutil.copyfileobj(f_compressed, f_uncompressed)
296
297
  return f_uncompressed.name
@@ -20,8 +20,9 @@
20
20
  from __future__ import annotations
21
21
 
22
22
  import csv
23
+ from collections.abc import Sequence
23
24
  from tempfile import NamedTemporaryFile
24
- from typing import TYPE_CHECKING, Any, Sequence
25
+ from typing import TYPE_CHECKING, Any
25
26
 
26
27
  from airflow.models import BaseOperator
27
28
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 8.2.1
3
+ Version: 9.0.0
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,7 +21,7 @@ Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
24
- Requires-Dist: apache-airflow>=2.8.0
24
+ Requires-Dist: apache-airflow>=2.9.0
25
25
  Requires-Dist: hmsclient>=0.1.0
26
26
  Requires-Dist: jmespath>=0.7.0
27
27
  Requires-Dist: pandas>=1.5.3,<2.2;python_version<"3.9"
@@ -29,21 +29,19 @@ Requires-Dist: pandas>=2.1.2,<2.2;python_version>="3.9"
29
29
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
30
30
  Requires-Dist: thrift>=0.11.0
31
31
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
32
- Requires-Dist: apache-airflow-providers-common-sql ; extra == "common-sql"
33
32
  Requires-Dist: apache-airflow-providers-microsoft-mssql ; extra == "microsoft-mssql"
34
33
  Requires-Dist: apache-airflow-providers-mysql ; extra == "mysql"
35
34
  Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
36
35
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
37
36
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
38
37
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
39
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.2.1/changelog.html
40
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.2.1
38
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/changelog.html
39
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0
41
40
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
42
41
  Project-URL: Source Code, https://github.com/apache/airflow
43
- Project-URL: Twitter, https://twitter.com/ApacheAirflow
42
+ Project-URL: Twitter, https://x.com/ApacheAirflow
44
43
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
45
44
  Provides-Extra: amazon
46
- Provides-Extra: common-sql
47
45
  Provides-Extra: microsoft-mssql
48
46
  Provides-Extra: mysql
49
47
  Provides-Extra: presto
@@ -94,7 +92,7 @@ Provides-Extra: vertica
94
92
 
95
93
  Package ``apache-airflow-providers-apache-hive``
96
94
 
97
- Release: ``8.2.1``
95
+ Release: ``9.0.0``
98
96
 
99
97
 
100
98
  `Apache Hive <https://hive.apache.org/>`__
@@ -107,7 +105,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
107
105
  are in ``airflow.providers.apache.hive`` python package.
108
106
 
109
107
  You can find package information and changelog for the provider
110
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.2.1/>`_.
108
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/>`_.
111
109
 
112
110
  Installation
113
111
  ------------
@@ -124,7 +122,7 @@ Requirements
124
122
  ======================================= =========================================
125
123
  PIP package Version required
126
124
  ======================================= =========================================
127
- ``apache-airflow`` ``>=2.8.0``
125
+ ``apache-airflow`` ``>=2.9.0``
128
126
  ``apache-airflow-providers-common-sql`` ``>=1.20.0``
129
127
  ``hmsclient`` ``>=0.1.0``
130
128
  ``pandas`` ``>=2.1.2,<2.2; python_version >= "3.9"``
@@ -160,4 +158,4 @@ Dependent package
160
158
  ====================================================================================================================== ===================
161
159
 
162
160
  The changelog for the provider package can be found in the
163
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.2.1/changelog.html>`_.
161
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.0.0/changelog.html>`_.
@@ -1,27 +1,27 @@
1
1
  airflow/providers/apache/hive/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/apache/hive/__init__.py,sha256=UerqdCosYbuL0lcXyGbd1hj7Ks-pPpEkqWEjLThqpRA,1498
3
- airflow/providers/apache/hive/get_provider_info.py,sha256=e_4MFlI3n3fCuAmyNngbPw7HfiVknvC46mJVN12JSdA,7045
2
+ airflow/providers/apache/hive/__init__.py,sha256=5b3RR33He6WC-TAwDICi8baXvCHO2OZubRrKpOUaPEg,1498
3
+ airflow/providers/apache/hive/get_provider_info.py,sha256=tjfDpgG2vaWmY5DGiQCGNLsNSD1s9kFgU3K01-JVeA4,7066
4
4
  airflow/providers/apache/hive/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
5
- airflow/providers/apache/hive/hooks/hive.py,sha256=szwElZ1kDJG1WIz35XM-byKesXVJmEVzAU6Y6ukGd1g,42940
5
+ airflow/providers/apache/hive/hooks/hive.py,sha256=0BOk6tdrtq1vFXRMsqhLhXHGAmj85aOPnmS2movXcYQ,42573
6
6
  airflow/providers/apache/hive/macros/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
7
7
  airflow/providers/apache/hive/macros/hive.py,sha256=p3LUIb74pbthh8lcUXV6f3m-cZrcRXjV8aGiazdpnzo,4592
8
8
  airflow/providers/apache/hive/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
- airflow/providers/apache/hive/operators/hive.py,sha256=rDVz_d6w5KK3LsubSkgGBmd1kozNVveQYmWDcYN1Rcw,7012
10
- airflow/providers/apache/hive/operators/hive_stats.py,sha256=zIdAlR3KLPvliqiZCLycRoO4v9hPkA58wmSa6d1W2lU,7047
9
+ airflow/providers/apache/hive/operators/hive.py,sha256=d_73cC-ODb7q5_37R7MNUNT-xnz-xxqKmqEWR3S1wEc,7029
10
+ airflow/providers/apache/hive/operators/hive_stats.py,sha256=8fNlR9o8J9DKMDBpB849apqA2iGbMovlHOWute4TFCY,7074
11
11
  airflow/providers/apache/hive/plugins/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
12
12
  airflow/providers/apache/hive/plugins/hive.py,sha256=rrGccro6DEdnHSmII3goBYD9te-XdhPaPMjWpF-36HU,1146
13
13
  airflow/providers/apache/hive/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
14
- airflow/providers/apache/hive/sensors/hive_partition.py,sha256=t6YlsDhfU-b9GTsJWWA1s30LWilUWfWcYZLn057wGX0,3039
15
- airflow/providers/apache/hive/sensors/metastore_partition.py,sha256=UA6TtxdLLXpgptssCELXS9tsOaaXbwqmKajEwo7kxG0,3338
16
- airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=vpC70TX4PxZmatBDF73rWbBevrsCr6Rl6PgXs8KHclA,4158
14
+ airflow/providers/apache/hive/sensors/hive_partition.py,sha256=UIafLmwTUXdNfWAhLOOSZQc6v6So7lz_0bYA2Wyunw4,3066
15
+ airflow/providers/apache/hive/sensors/metastore_partition.py,sha256=H-luvQ96TRp5Oax0LIoag4rlnKiDlkJCsYNh80-IzXU,3365
16
+ airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=5R3NJ_Vi9nqROD9VPs-YXnziYhWd-X5ukgelLwfngy4,4185
17
17
  airflow/providers/apache/hive/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
- airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=wYULZxrT3K4_poTXt_5bXziQ-0WbB1dZ7otsmpm3S4M,5281
19
- airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=cIACOY-NgYl-ktD1nb8eoCDZEbSX5A33KUk2B_Mnv6s,2983
20
- airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=bnhHsen0qqgenQD2QGl5yEEQaGFf7Do9m84wMsVVH0s,5617
21
- airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=expB5ZCiHDDITX5UIE_NDJLS47k7hvtq6x7MhXDtnsA,6995
22
- airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=5mQ2hKbxJNy7w5SNqogtboGYoC6TXbDg6N39tgCc6XY,12444
23
- airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=cszge1MSZmaUhHuAkmtTDdflzmShEb5cJNA2t3Wbba8,5528
24
- apache_airflow_providers_apache_hive-8.2.1.dist-info/entry_points.txt,sha256=Hzixt33mYYldwmwswarArUB7ZU0xbmUtd3tFViZ414s,185
25
- apache_airflow_providers_apache_hive-8.2.1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
26
- apache_airflow_providers_apache_hive-8.2.1.dist-info/METADATA,sha256=_GVS5QYBf3ml6AvJBlrRm_3F7OMqKpxx75Nceg3c7yw,8149
27
- apache_airflow_providers_apache_hive-8.2.1.dist-info/RECORD,,
18
+ airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=VcMbWhtGR8cHvOr3IinMGbPisdtYC__Ga5kwyDfVQFM,5308
19
+ airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=Ou70VwArs59GRjJWxg2Hp-F-8BgI5dF7Oq2jep73oWM,3010
20
+ airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=SQGcJTAKPoRipmWLdudN_wATBNqd_2eMRgokcjkPmjQ,5644
21
+ airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=6BkAoYrn9wDfSC0BKHp86EgoUVtqwHGz9nZsielCydQ,7022
22
+ airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=K5Zke6CF9Rgw7YDkOtWIYXC3_2TuXLTuwYxu8uVrGP0,12455
23
+ airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=IBiMGF7322rOrwDbWMVB89oKO8hFhhBmc6rQwPkFA34,5555
24
+ apache_airflow_providers_apache_hive-9.0.0.dist-info/entry_points.txt,sha256=Hzixt33mYYldwmwswarArUB7ZU0xbmUtd3tFViZ414s,185
25
+ apache_airflow_providers_apache_hive-9.0.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
26
+ apache_airflow_providers_apache_hive-9.0.0.dist-info/METADATA,sha256=M7OBb0siJUEIDwJCjivCrPHVlceWRxQYtj65ZCoq01k,8041
27
+ apache_airflow_providers_apache_hive-9.0.0.dist-info/RECORD,,