apache-airflow-providers-apache-hive 5.1.3__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-apache-hive might be problematic. Click here for more details.

@@ -28,6 +28,7 @@ def get_provider_info():
28
28
  "name": "Apache Hive",
29
29
  "description": "`Apache Hive <https://hive.apache.org/>`__\n",
30
30
  "versions": [
31
+ "6.0.0",
31
32
  "5.1.3",
32
33
  "5.1.2",
33
34
  "5.1.1",
@@ -99,12 +99,13 @@ class HiveCliHook(BaseHook):
99
99
  mapred_queue_priority: str | None = None,
100
100
  mapred_job_name: str | None = None,
101
101
  hive_cli_params: str = "",
102
+ auth: str | None = None,
102
103
  ) -> None:
103
104
  super().__init__()
104
105
  conn = self.get_connection(hive_cli_conn_id)
105
106
  self.hive_cli_params: str = hive_cli_params
106
107
  self.use_beeline: bool = conn.extra_dejson.get("use_beeline", False)
107
- self.auth = conn.extra_dejson.get("auth", "noSasl")
108
+ self.auth = auth
108
109
  self.conn = conn
109
110
  self.run_as = run_as
110
111
  self.sub_process: Any = None
@@ -56,6 +56,8 @@ class HiveOperator(BaseOperator):
56
56
  Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW
57
57
  :param mapred_job_name: This name will appear in the jobtracker.
58
58
  This can make monitoring easier.
59
+ :param hive_cli_params: parameters passed to hive CLO
60
+ :param auth: optional authentication option passed for the Hive connection
59
61
  """
60
62
 
61
63
  template_fields: Sequence[str] = (
@@ -88,6 +90,7 @@ class HiveOperator(BaseOperator):
88
90
  mapred_queue_priority: str | None = None,
89
91
  mapred_job_name: str | None = None,
90
92
  hive_cli_params: str = "",
93
+ auth: str | None = None,
91
94
  **kwargs: Any,
92
95
  ) -> None:
93
96
  super().__init__(**kwargs)
@@ -104,6 +107,7 @@ class HiveOperator(BaseOperator):
104
107
  self.mapred_queue_priority = mapred_queue_priority
105
108
  self.mapred_job_name = mapred_job_name
106
109
  self.hive_cli_params = hive_cli_params
110
+ self.auth = auth
107
111
 
108
112
  job_name_template = conf.get_mandatory_value(
109
113
  "hive",
@@ -127,6 +131,7 @@ class HiveOperator(BaseOperator):
127
131
  mapred_queue_priority=self.mapred_queue_priority,
128
132
  mapred_job_name=self.mapred_job_name,
129
133
  hive_cli_params=self.hive_cli_params,
134
+ auth=self.auth,
130
135
  )
131
136
 
132
137
  def prepare_template(self) -> None:
@@ -60,6 +60,7 @@ class MsSqlToHiveOperator(BaseOperator):
60
60
  :param mssql_conn_id: source Microsoft SQL Server connection
61
61
  :param hive_cli_conn_id: Reference to the
62
62
  :ref:`Hive CLI connection id <howto/connection:hive_cli>`.
63
+ :param hive_auth: optional authentication option passed for the Hive connection
63
64
  :param tblproperties: TBLPROPERTIES of the hive table being created
64
65
  """
65
66
 
@@ -79,6 +80,7 @@ class MsSqlToHiveOperator(BaseOperator):
79
80
  delimiter: str = chr(1),
80
81
  mssql_conn_id: str = "mssql_default",
81
82
  hive_cli_conn_id: str = "hive_cli_default",
83
+ hive_auth: str | None = None,
82
84
  tblproperties: dict | None = None,
83
85
  **kwargs,
84
86
  ) -> None:
@@ -93,6 +95,7 @@ class MsSqlToHiveOperator(BaseOperator):
93
95
  self.hive_cli_conn_id = hive_cli_conn_id
94
96
  self.partition = partition or {}
95
97
  self.tblproperties = tblproperties
98
+ self.hive_auth = hive_auth
96
99
 
97
100
  @classmethod
98
101
  def type_map(cls, mssql_type: int) -> str:
@@ -119,7 +122,7 @@ class MsSqlToHiveOperator(BaseOperator):
119
122
  csv_writer.writerows(cursor)
120
123
  tmp_file.flush()
121
124
 
122
- hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
125
+ hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id, auth=self.hive_auth)
123
126
  self.log.info("Loading file into Hive")
124
127
  hive.load_file(
125
128
  tmp_file.name,
@@ -65,6 +65,7 @@ class MySqlToHiveOperator(BaseOperator):
65
65
  :param mysql_conn_id: source mysql connection
66
66
  :param hive_cli_conn_id: Reference to the
67
67
  :ref:`Hive CLI connection id <howto/connection:hive_cli>`.
68
+ :param hive_auth: optional authentication option passed for the Hive connection
68
69
  :param tblproperties: TBLPROPERTIES of the hive table being created
69
70
  """
70
71
 
@@ -87,6 +88,7 @@ class MySqlToHiveOperator(BaseOperator):
87
88
  escapechar: str | None = None,
88
89
  mysql_conn_id: str = "mysql_default",
89
90
  hive_cli_conn_id: str = "hive_cli_default",
91
+ hive_auth: str | None = None,
90
92
  tblproperties: dict | None = None,
91
93
  **kwargs,
92
94
  ) -> None:
@@ -104,6 +106,7 @@ class MySqlToHiveOperator(BaseOperator):
104
106
  self.hive_cli_conn_id = hive_cli_conn_id
105
107
  self.partition = partition or {}
106
108
  self.tblproperties = tblproperties
109
+ self.hive_auth = hive_auth
107
110
 
108
111
  @classmethod
109
112
  def type_map(cls, mysql_type: int) -> str:
@@ -126,7 +129,7 @@ class MySqlToHiveOperator(BaseOperator):
126
129
  return type_map.get(mysql_type, "STRING")
127
130
 
128
131
  def execute(self, context: Context):
129
- hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
132
+ hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id, auth=self.hive_auth)
130
133
  mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
131
134
 
132
135
  self.log.info("Dumping MySQL query results to local file")
@@ -109,6 +109,7 @@ class S3ToHiveOperator(BaseOperator):
109
109
  input_compressed: bool = False,
110
110
  tblproperties: dict | None = None,
111
111
  select_expression: str | None = None,
112
+ hive_auth: str | None = None,
112
113
  **kwargs,
113
114
  ) -> None:
114
115
  super().__init__(**kwargs)
@@ -128,6 +129,7 @@ class S3ToHiveOperator(BaseOperator):
128
129
  self.input_compressed = input_compressed
129
130
  self.tblproperties = tblproperties
130
131
  self.select_expression = select_expression
132
+ self.hive_auth = hive_auth
131
133
 
132
134
  if self.check_headers and not (self.field_dict is not None and self.headers):
133
135
  raise AirflowException("To check_headers provide field_dict and headers")
@@ -135,7 +137,7 @@ class S3ToHiveOperator(BaseOperator):
135
137
  def execute(self, context: Context):
136
138
  # Downloading file from S3
137
139
  s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
138
- hive_hook = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
140
+ hive_hook = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id, auth=self.hive_auth)
139
141
  self.log.info("Downloading S3 file")
140
142
 
141
143
  if self.wildcard_match:
@@ -58,6 +58,7 @@ class VerticaToHiveOperator(BaseOperator):
58
58
  :param vertica_conn_id: source Vertica connection
59
59
  :param hive_cli_conn_id: Reference to the
60
60
  :ref:`Hive CLI connection id <howto/connection:hive_cli>`.
61
+ :param hive_auth: optional authentication option passed for the Hive connection
61
62
  """
62
63
 
63
64
  template_fields: Sequence[str] = ("sql", "partition", "hive_table")
@@ -76,6 +77,7 @@ class VerticaToHiveOperator(BaseOperator):
76
77
  delimiter: str = chr(1),
77
78
  vertica_conn_id: str = "vertica_default",
78
79
  hive_cli_conn_id: str = "hive_cli_default",
80
+ hive_auth: str | None = None,
79
81
  **kwargs: Any,
80
82
  ) -> None:
81
83
  super().__init__(**kwargs)
@@ -88,6 +90,7 @@ class VerticaToHiveOperator(BaseOperator):
88
90
  self.vertica_conn_id = vertica_conn_id
89
91
  self.hive_cli_conn_id = hive_cli_conn_id
90
92
  self.partition = partition or {}
93
+ self.hive_auth = hive_auth
91
94
 
92
95
  @classmethod
93
96
  def type_map(cls, vertica_type):
@@ -107,7 +110,7 @@ class VerticaToHiveOperator(BaseOperator):
107
110
  return type_map.get(vertica_type, "STRING")
108
111
 
109
112
  def execute(self, context: Context):
110
- hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
113
+ hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id, auth=self.hive_auth)
111
114
  vertica = VerticaHook(vertica_conn_id=self.vertica_conn_id)
112
115
 
113
116
  self.log.info("Dumping Vertica query results to local file")
@@ -1,19 +1,18 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 5.1.3
3
+ Version: 6.0.0
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-apache-hive package
5
5
  Home-page: https://airflow.apache.org/
6
+ Download-URL: https://archive.apache.org/dist/airflow/providers
6
7
  Author: Apache Software Foundation
7
8
  Author-email: dev@airflow.apache.org
8
9
  License: Apache License 2.0
9
- Download-URL: https://archive.apache.org/dist/airflow/providers
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/5.1.3/
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.0.0/
11
11
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
12
12
  Project-URL: Source Code, https://github.com/apache/airflow
13
13
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
14
14
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
15
15
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
16
- Platform: UNKNOWN
17
16
  Classifier: Development Status :: 5 - Production/Stable
18
17
  Classifier: Environment :: Console
19
18
  Classifier: Environment :: Web Environment
@@ -74,7 +73,7 @@ Requires-Dist: apache-airflow-providers-vertica ; extra == 'vertica'
74
73
 
75
74
  Package ``apache-airflow-providers-apache-hive``
76
75
 
77
- Release: ``5.1.3``
76
+ Release: ``6.0.0``
78
77
 
79
78
 
80
79
  `Apache Hive <https://hive.apache.org/>`__
@@ -87,7 +86,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
87
86
  are in ``airflow.providers.apache.hive`` python package.
88
87
 
89
88
  You can find package information and changelog for the provider
90
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/5.1.3/>`_.
89
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.0.0/>`_.
91
90
 
92
91
 
93
92
  Installation
@@ -165,6 +164,18 @@ Dependent package
165
164
  Changelog
166
165
  ---------
167
166
 
167
+ 6.0.0
168
+ .....
169
+
170
+ Breaking changes
171
+ ~~~~~~~~~~~~~~~~
172
+
173
+ The auth option is moved from the extra field to the auth parameter in the Hook. If you have extra
174
+ parameters defined in your connections as auth, you should move them to the DAG where your HiveOperator
175
+ or other Hive related operators are used.
176
+
177
+ * ``Move auth parameter from extra to Hook parameter (#30212)``
178
+
168
179
  5.1.3
169
180
  .....
170
181
 
@@ -526,5 +537,3 @@ Bug fixes
526
537
  .....
527
538
 
528
539
  Initial version of the provider.
529
-
530
-
@@ -1,11 +1,11 @@
1
1
  airflow/providers/apache/hive/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
2
- airflow/providers/apache/hive/get_provider_info.py,sha256=4_ksyy8AF2aLiH7dieGCooxBDtmVEneuqA292hgxNLY,5531
2
+ airflow/providers/apache/hive/get_provider_info.py,sha256=XrmY2HsrC39eFk8YWuXu1JcW128eoChC0L-lc-rBs2s,5552
3
3
  airflow/providers/apache/hive/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
4
- airflow/providers/apache/hive/hooks/hive.py,sha256=owghE8JSvpPBpPwmToQSjn4zbRv-i-X0GPCVtGMg00k,42221
4
+ airflow/providers/apache/hive/hooks/hive.py,sha256=uslLMrDJgfy0JDus8HgCSEOWDzdF8zHOvz_TXc4FFxU,42219
5
5
  airflow/providers/apache/hive/macros/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
6
  airflow/providers/apache/hive/macros/hive.py,sha256=Rq53o6tGww-KLPPZNuD64KkfE798GJQZ1HS6boVUh1A,4581
7
7
  airflow/providers/apache/hive/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
8
- airflow/providers/apache/hive/operators/hive.py,sha256=SDwN9auN48OFdrzyx3La8F9Qgl_dbRQLmXGWI-U-x6Y,7057
8
+ airflow/providers/apache/hive/operators/hive.py,sha256=J0m_hub4aWm0wZXzpDB5B1iG7Jn9RRDPdnB1LBDZ2Mg,7280
9
9
  airflow/providers/apache/hive/operators/hive_stats.py,sha256=4Tn2o4D0V55SHq55mssqTz2wXXeHtB-cki58YC_VPLs,7454
10
10
  airflow/providers/apache/hive/plugins/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
11
11
  airflow/providers/apache/hive/plugins/hive.py,sha256=rrGccro6DEdnHSmII3goBYD9te-XdhPaPMjWpF-36HU,1146
@@ -16,14 +16,14 @@ airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=_8WSbtvnQq0
16
16
  airflow/providers/apache/hive/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
17
  airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=EKc6B9DAgOXGkHWDMjucUYg6JnUiirvOyN4vMfaRHEc,5279
18
18
  airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=X9g2XXGLcNCIxlh0czeOolkkASHh1QDsNLrBKI0UGgw,2973
19
- airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=i39r-Q3_7YEbKylXDQnt7nM6WIModkImbunWwvJCHK8,5493
20
- airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=tDy2sMwSmZpWR373FjhfbJMWg5cC37LZZCWnbGRYnUA,6409
21
- airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=JUtX0pTMDJxn-TP2YJ5jbYPYDpSCY1myiXkscaIpMgQ,11587
22
- airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=NNWVX7pDyxgD4VedOi_5l5zoFSkknYl8RBoCumrkzXo,5370
23
- apache_airflow_providers_apache_hive-5.1.3.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
24
- apache_airflow_providers_apache_hive-5.1.3.dist-info/METADATA,sha256=6kRlfAjyjyXx9jDrC0fJgn33fWnpfbvvhTt-isvU-90,18239
25
- apache_airflow_providers_apache_hive-5.1.3.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
26
- apache_airflow_providers_apache_hive-5.1.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
27
- apache_airflow_providers_apache_hive-5.1.3.dist-info/entry_points.txt,sha256=EVfz8UeP_u2F9jOizWGvEJ8ZGn-5Qofdz-0wQvxCQjI,189
28
- apache_airflow_providers_apache_hive-5.1.3.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
29
- apache_airflow_providers_apache_hive-5.1.3.dist-info/RECORD,,
19
+ airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=Ft9mQOU3DMp_t7PXvhvtK83pYUYMlvjsbx6MumJM2OU,5671
20
+ airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=h687LZYrjIhoGEDQrox1m8_ep8fptjL79XFWv672Z_8,6587
21
+ airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=HNPqnG8xwfLtVKpKLx6l9Yhw_SWSa6EUsJOtvm-kvg4,11681
22
+ airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=fwIcSKpc75DsMvNQEggyZKP2LB0O8GCneMsAa6_S_Wc,5548
23
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
24
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/METADATA,sha256=-MRQXluZM6nizYvOu8moX6tsBLN4YeowSbxLSELsEwI,18578
25
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
26
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
27
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/entry_points.txt,sha256=Dhu3u9ocLHndlPrXKiW9MhRcGApVU4qlCHRuhqvLDcw,188
28
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
29
+ apache_airflow_providers_apache_hive-6.0.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.38.4)
2
+ Generator: bdist_wheel (0.40.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -3,4 +3,3 @@ hive = airflow.providers.apache.hive.plugins.hive:HivePlugin
3
3
 
4
4
  [apache_airflow_provider]
5
5
  provider_info = airflow.providers.apache.hive.get_provider_info:get_provider_info
6
-