apache-airflow-providers-apache-hive 6.1.0rc2__py3-none-any.whl → 6.1.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,7 +28,7 @@ import packaging.version
28
28
 
29
29
  __all__ = ["__version__"]
30
30
 
31
- __version__ = "6.1.0"
31
+ __version__ = "6.1.1"
32
32
 
33
33
  try:
34
34
  from airflow import __version__ as airflow_version
@@ -29,6 +29,7 @@ def get_provider_info():
29
29
  "description": "`Apache Hive <https://hive.apache.org/>`__\n",
30
30
  "suspended": False,
31
31
  "versions": [
32
+ "6.1.1",
32
33
  "6.1.0",
33
34
  "6.0.0",
34
35
  "5.1.3",
@@ -66,6 +67,7 @@ def get_provider_info():
66
67
  'sasl>=0.3.1; python_version>="3.9"',
67
68
  "thrift>=0.9.2",
68
69
  ],
70
+ "excluded-python-versions": ["3.11"],
69
71
  "integrations": [
70
72
  {
71
73
  "integration-name": "Apache Hive",
@@ -37,7 +37,7 @@ except ImportError as e:
37
37
 
38
38
  raise AirflowOptionalProviderFeatureException(e)
39
39
 
40
- import unicodecsv as csv
40
+ import csv
41
41
 
42
42
  from airflow.configuration import conf
43
43
  from airflow.exceptions import AirflowException
@@ -137,7 +137,7 @@ class HiveCliHook(BaseHook):
137
137
  return proxy_user_value # The default proxy user (undefined)
138
138
 
139
139
  def _prepare_cli_cmd(self) -> list[Any]:
140
- """This function creates the command list from available information"""
140
+ """This function creates the command list from available information."""
141
141
  conn = self.conn
142
142
  hive_bin = "hive"
143
143
  cmd_extra = []
@@ -150,9 +150,9 @@ class HiveCliHook(BaseHook):
150
150
  template = conn.extra_dejson.get("principal", "hive/_HOST@EXAMPLE.COM")
151
151
  if "_HOST" in template:
152
152
  template = utils.replace_hostname_pattern(utils.get_components(template))
153
-
154
153
  proxy_user = self._get_proxy_user()
155
-
154
+ if ";" in template:
155
+ raise RuntimeError("The principal should not contain the ';' character")
156
156
  jdbc_url += f";principal={template};{proxy_user}"
157
157
  elif self.auth:
158
158
  jdbc_url += ";auth=" + self.auth
@@ -296,7 +296,7 @@ class HiveCliHook(BaseHook):
296
296
  return stdout
297
297
 
298
298
  def test_hql(self, hql: str) -> None:
299
- """Test an hql statement using the hive cli and EXPLAIN"""
299
+ """Test an hql statement using the hive cli and EXPLAIN."""
300
300
  create, insert, other = [], [], []
301
301
  for query in hql.split(";"): # naive
302
302
  query_original = query
@@ -415,7 +415,7 @@ class HiveCliHook(BaseHook):
415
415
  tblproperties: dict[str, Any] | None = None,
416
416
  ) -> None:
417
417
  """
418
- Loads a local file into Hive
418
+ Loads a local file into Hive.
419
419
 
420
420
  Note that the table generated in Hive uses ``STORED AS textfile``
421
421
  which isn't the most efficient serialization format. If a
@@ -475,7 +475,7 @@ class HiveCliHook(BaseHook):
475
475
  self.run_cli(hql)
476
476
 
477
477
  def kill(self) -> None:
478
- """Kill Hive cli command"""
478
+ """Kill Hive cli command."""
479
479
  if hasattr(self, "sub_process"):
480
480
  if self.sub_process.poll() is None:
481
481
  print("Killing the Hive job")
@@ -486,7 +486,7 @@ class HiveCliHook(BaseHook):
486
486
 
487
487
  class HiveMetastoreHook(BaseHook):
488
488
  """
489
- Wrapper to interact with the Hive Metastore
489
+ Wrapper to interact with the Hive Metastore.
490
490
 
491
491
  :param metastore_conn_id: reference to the
492
492
  :ref: `metastore thrift service connection id <howto/connection:hive_metastore>`.
@@ -587,7 +587,7 @@ class HiveMetastoreHook(BaseHook):
587
587
 
588
588
  def check_for_partition(self, schema: str, table: str, partition: str) -> bool:
589
589
  """
590
- Checks whether a partition exists
590
+ Checks whether a partition exists.
591
591
 
592
592
  :param schema: Name of hive schema (database) @table belongs to
593
593
  :param table: Name of hive table @partition belongs to
@@ -608,7 +608,7 @@ class HiveMetastoreHook(BaseHook):
608
608
 
609
609
  def check_for_named_partition(self, schema: str, table: str, partition_name: str) -> Any:
610
610
  """
611
- Checks whether a partition with a given name exists
611
+ Checks whether a partition with a given name exists.
612
612
 
613
613
  :param schema: Name of hive schema (database) @table belongs to
614
614
  :param table: Name of hive table @partition belongs to
@@ -625,7 +625,7 @@ class HiveMetastoreHook(BaseHook):
625
625
  return client.check_for_named_partition(schema, table, partition_name)
626
626
 
627
627
  def get_table(self, table_name: str, db: str = "default") -> Any:
628
- """Get a metastore table object
628
+ """Get a metastore table object.
629
629
 
630
630
  >>> hh = HiveMetastoreHook()
631
631
  >>> t = hh.get_table(db='airflow', table_name='static_babynames')
@@ -640,13 +640,13 @@ class HiveMetastoreHook(BaseHook):
640
640
  return client.get_table(dbname=db, tbl_name=table_name)
641
641
 
642
642
  def get_tables(self, db: str, pattern: str = "*") -> Any:
643
- """Get a metastore table object"""
643
+ """Get a metastore table object."""
644
644
  with self.metastore as client:
645
645
  tables = client.get_tables(db_name=db, pattern=pattern)
646
646
  return client.get_table_objects_by_name(db, tables)
647
647
 
648
648
  def get_databases(self, pattern: str = "*") -> Any:
649
- """Get a metastore table object"""
649
+ """Get a metastore table object."""
650
650
  with self.metastore as client:
651
651
  return client.get_databases(pattern)
652
652
 
@@ -774,7 +774,7 @@ class HiveMetastoreHook(BaseHook):
774
774
 
775
775
  def table_exists(self, table_name: str, db: str = "default") -> bool:
776
776
  """
777
- Check if table exists
777
+ Check if table exists.
778
778
 
779
779
  >>> hh = HiveMetastoreHook()
780
780
  >>> hh.table_exists(db='airflow', table_name='static_babynames')
@@ -790,7 +790,7 @@ class HiveMetastoreHook(BaseHook):
790
790
 
791
791
  def drop_partitions(self, table_name, part_vals, delete_data=False, db="default"):
792
792
  """
793
- Drop partitions from the given table matching the part_vals input
793
+ Drop partitions from the given table matching the part_vals input.
794
794
 
795
795
  :param table_name: table name.
796
796
  :param part_vals: list of partition specs.
@@ -816,7 +816,7 @@ class HiveMetastoreHook(BaseHook):
816
816
 
817
817
  class HiveServer2Hook(DbApiHook):
818
818
  """
819
- Wrapper around the pyhive library
819
+ Wrapper around the pyhive library.
820
820
 
821
821
  Notes:
822
822
  * the default auth_mechanism is PLAIN, to override it you
@@ -989,8 +989,8 @@ class HiveServer2Hook(DbApiHook):
989
989
  message = None
990
990
 
991
991
  i = 0
992
- with open(csv_filepath, "wb") as file:
993
- writer = csv.writer(file, delimiter=delimiter, lineterminator=lineterminator, encoding="utf-8")
992
+ with open(csv_filepath, "w", encoding="utf-8") as file:
993
+ writer = csv.writer(file, delimiter=delimiter, lineterminator=lineterminator)
994
994
  try:
995
995
  if output_header:
996
996
  self.log.debug("Cursor description is %s", header)
@@ -1037,7 +1037,7 @@ class HiveServer2Hook(DbApiHook):
1037
1037
  **kwargs,
1038
1038
  ) -> pandas.DataFrame:
1039
1039
  """
1040
- Get a pandas dataframe from a Hive query
1040
+ Get a pandas dataframe from a Hive query.
1041
1041
 
1042
1042
  :param sql: hql to be executed.
1043
1043
  :param schema: target schema, default to 'default'.
@@ -123,7 +123,7 @@ class HiveOperator(BaseOperator):
123
123
  self.hook: HiveCliHook | None = None
124
124
 
125
125
  def get_hook(self) -> HiveCliHook:
126
- """Get Hive cli hook"""
126
+ """Get Hive cli hook."""
127
127
  return HiveCliHook(
128
128
  hive_cli_conn_id=self.hive_cli_conn_id,
129
129
  run_as=self.run_as,
@@ -33,10 +33,13 @@ if TYPE_CHECKING:
33
33
 
34
34
 
35
35
  class HiveStatsCollectionOperator(BaseOperator):
36
- """
37
- Gathers partition statistics using a dynamically generated Presto
38
- query, inserts the stats into a MySql table with this format. Stats
39
- overwrite themselves if you rerun the same date/partition. ::
36
+ """Gather partition statistics and insert them into MySQL.
37
+
38
+ Statistics are gathered with a dynamically generated Presto query and
39
+ inserted with this format. Stats overwrite themselves if you rerun the
40
+ same date/partition.
41
+
42
+ .. code-block:: sql
40
43
 
41
44
  CREATE TABLE hive_stats (
42
45
  ds VARCHAR(16),
@@ -98,7 +101,7 @@ class HiveStatsCollectionOperator(BaseOperator):
98
101
  self.dttm = "{{ execution_date.isoformat() }}"
99
102
 
100
103
  def get_default_exprs(self, col: str, col_type: str) -> dict[Any, Any]:
101
- """Get default expressions"""
104
+ """Get default expressions."""
102
105
  if col in self.excluded_columns:
103
106
  return {}
104
107
  exp = {(col, "non_null"): f"COUNT({col})"}
@@ -18,12 +18,12 @@
18
18
  """This module contains an operator to move data from MSSQL to Hive."""
19
19
  from __future__ import annotations
20
20
 
21
+ import csv
21
22
  from collections import OrderedDict
22
23
  from tempfile import NamedTemporaryFile
23
24
  from typing import TYPE_CHECKING, Sequence
24
25
 
25
26
  import pymssql
26
- import unicodecsv as csv
27
27
 
28
28
  from airflow.models import BaseOperator
29
29
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
@@ -113,8 +113,8 @@ class MsSqlToHiveOperator(BaseOperator):
113
113
  with mssql.get_conn() as conn:
114
114
  with conn.cursor() as cursor:
115
115
  cursor.execute(self.sql)
116
- with NamedTemporaryFile("w") as tmp_file:
117
- csv_writer = csv.writer(tmp_file, delimiter=self.delimiter, encoding="utf-8")
116
+ with NamedTemporaryFile(mode="w", encoding="utf-8") as tmp_file:
117
+ csv_writer = csv.writer(tmp_file, delimiter=self.delimiter)
118
118
  field_dict = OrderedDict()
119
119
  for col_count, field in enumerate(cursor.description, start=1):
120
120
  col_position = f"Column{col_count}"
@@ -18,12 +18,13 @@
18
18
  """This module contains an operator to move data from MySQL to Hive."""
19
19
  from __future__ import annotations
20
20
 
21
+ import csv
21
22
  from collections import OrderedDict
23
+ from contextlib import closing
22
24
  from tempfile import NamedTemporaryFile
23
25
  from typing import TYPE_CHECKING, Sequence
24
26
 
25
27
  import MySQLdb
26
- import unicodecsv as csv
27
28
 
28
29
  from airflow.models import BaseOperator
29
30
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
@@ -83,7 +84,7 @@ class MySqlToHiveOperator(BaseOperator):
83
84
  recreate: bool = False,
84
85
  partition: dict | None = None,
85
86
  delimiter: str = chr(1),
86
- quoting: str | None = None,
87
+ quoting: int | None = None,
87
88
  quotechar: str = '"',
88
89
  escapechar: str | None = None,
89
90
  mysql_conn_id: str = "mysql_default",
@@ -131,28 +132,24 @@ class MySqlToHiveOperator(BaseOperator):
131
132
  def execute(self, context: Context):
132
133
  hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id, auth=self.hive_auth)
133
134
  mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id)
134
-
135
135
  self.log.info("Dumping MySQL query results to local file")
136
- conn = mysql.get_conn()
137
- cursor = conn.cursor()
138
- cursor.execute(self.sql)
139
- with NamedTemporaryFile("wb") as f:
140
- csv_writer = csv.writer(
141
- f,
142
- delimiter=self.delimiter,
143
- quoting=self.quoting,
144
- quotechar=self.quotechar,
145
- escapechar=self.escapechar,
146
- encoding="utf-8",
147
- )
148
- field_dict = OrderedDict()
149
- if cursor.description is not None:
150
- for field in cursor.description:
151
- field_dict[field[0]] = self.type_map(field[1])
152
- csv_writer.writerows(cursor)
136
+ with NamedTemporaryFile(mode="w", encoding="utf-8") as f:
137
+ with closing(mysql.get_conn()) as conn:
138
+ with closing(conn.cursor()) as cursor:
139
+ cursor.execute(self.sql)
140
+ csv_writer = csv.writer(
141
+ f,
142
+ delimiter=self.delimiter,
143
+ quoting=self.quoting,
144
+ quotechar=self.quotechar if self.quoting != csv.QUOTE_NONE else None,
145
+ escapechar=self.escapechar,
146
+ )
147
+ field_dict = OrderedDict()
148
+ if cursor.description is not None:
149
+ for field in cursor.description:
150
+ field_dict[field[0]] = self.type_map(field[1])
151
+ csv_writer.writerows(cursor)
153
152
  f.flush()
154
- cursor.close()
155
- conn.close() # type: ignore[misc]
156
153
  self.log.info("Loading file into Hive")
157
154
  hive.load_file(
158
155
  f.name,
@@ -18,12 +18,11 @@
18
18
  """This module contains an operator to move data from Vertica to Hive."""
19
19
  from __future__ import annotations
20
20
 
21
+ import csv
21
22
  from collections import OrderedDict
22
23
  from tempfile import NamedTemporaryFile
23
24
  from typing import TYPE_CHECKING, Any, Sequence
24
25
 
25
- import unicodecsv as csv
26
-
27
26
  from airflow.models import BaseOperator
28
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
29
28
  from airflow.providers.vertica.hooks.vertica import VerticaHook
@@ -94,9 +93,11 @@ class VerticaToHiveOperator(BaseOperator):
94
93
 
95
94
  @classmethod
96
95
  def type_map(cls, vertica_type):
97
- """
98
- Vertica-python datatype.py does not provide the full type mapping access.
99
- Manual hack. Reference:
96
+ """Manually hack Vertica-Python type mapping.
97
+
98
+ The stock datatype.py does not provide the full type mapping access.
99
+
100
+ Reference:
100
101
  https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py
101
102
  """
102
103
  type_map = {
@@ -117,8 +118,8 @@ class VerticaToHiveOperator(BaseOperator):
117
118
  conn = vertica.get_conn()
118
119
  cursor = conn.cursor()
119
120
  cursor.execute(self.sql)
120
- with NamedTemporaryFile("w") as f:
121
- csv_writer = csv.writer(f, delimiter=self.delimiter, encoding="utf-8")
121
+ with NamedTemporaryFile(mode="w", encoding="utf-8") as f:
122
+ csv_writer = csv.writer(f, delimiter=self.delimiter)
122
123
  field_dict = OrderedDict()
123
124
  for col_count, field in enumerate(cursor.description, start=1):
124
125
  col_position = f"Column{col_count}"
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 6.1.0rc2
3
+ Version: 6.1.1rc1
4
4
  Summary: Provider for Apache Airflow. Implements apache-airflow-providers-apache-hive package
5
5
  Home-page: https://airflow.apache.org/
6
6
  Download-URL: https://archive.apache.org/dist/airflow/providers
7
7
  Author: Apache Software Foundation
8
8
  Author-email: dev@airflow.apache.org
9
9
  License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.1.0/
10
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.1.1/
11
11
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
12
12
  Project-URL: Source Code, https://github.com/apache/airflow
13
13
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
@@ -21,12 +21,11 @@ Classifier: Intended Audience :: System Administrators
21
21
  Classifier: Framework :: Apache Airflow
22
22
  Classifier: Framework :: Apache Airflow :: Provider
23
23
  Classifier: License :: OSI Approved :: Apache Software License
24
- Classifier: Programming Language :: Python :: 3.7
25
24
  Classifier: Programming Language :: Python :: 3.8
26
25
  Classifier: Programming Language :: Python :: 3.9
27
26
  Classifier: Programming Language :: Python :: 3.10
28
27
  Classifier: Topic :: System :: Monitoring
29
- Requires-Python: ~=3.7
28
+ Requires-Python: ~=3.8
30
29
  Description-Content-Type: text/x-rst
31
30
  License-File: LICENSE
32
31
  License-File: NOTICE
@@ -73,7 +72,7 @@ Requires-Dist: apache-airflow-providers-vertica ; extra == 'vertica'
73
72
 
74
73
  Package ``apache-airflow-providers-apache-hive``
75
74
 
76
- Release: ``6.1.0rc2``
75
+ Release: ``6.1.1rc1``
77
76
 
78
77
 
79
78
  `Apache Hive <https://hive.apache.org/>`__
@@ -86,7 +85,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
86
85
  are in ``airflow.providers.apache.hive`` python package.
87
86
 
88
87
  You can find package information and changelog for the provider
89
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.1.0/>`_.
88
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/6.1.1/>`_.
90
89
 
91
90
 
92
91
  Installation
@@ -96,7 +95,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
96
95
  for the minimum Airflow version supported) via
97
96
  ``pip install apache-airflow-providers-apache-hive``
98
97
 
99
- The package supports the following python versions: 3.7,3.8,3.9,3.10
98
+ The package supports the following python versions: 3.8,3.9,3.10
100
99
 
101
100
  Requirements
102
101
  ------------
@@ -164,6 +163,32 @@ Dependent package
164
163
  Changelog
165
164
  ---------
166
165
 
166
+
167
+ 6.1.1
168
+ .....
169
+
170
+ .. note::
171
+ This release dropped support for Python 3.7
172
+
173
+ Bug Fixes
174
+ ~~~~~~~~~
175
+
176
+ * ``Sanitize beeline principal parameter (#31983)``
177
+
178
+ Misc
179
+ ~~~~
180
+
181
+ * ``Replace unicodecsv with standard csv library (#31693)``
182
+
183
+ .. Below changes are excluded from the changelog. Move them to
184
+ appropriate section above if needed. Do not delete the lines(!):
185
+
186
+ * ``Apache provider docstring improvements (#31730)``
187
+ * ``Improve docstrings in providers (#31681)``
188
+ * ``Add D400 pydocstyle check - Apache providers only (#31424)``
189
+ * ``Add Python 3.11 support (#27264)``
190
+ * ``Add note about dropping Python 3.7 for providers (#32015)``
191
+
167
192
  6.1.0
168
193
  .....
169
194
 
@@ -1,12 +1,12 @@
1
- airflow/providers/apache/hive/__init__.py,sha256=sgDGnm2V4Y0uBUU9vTVvHcSEt_gxyf0Lo5SF6oSlAsk,1536
2
- airflow/providers/apache/hive/get_provider_info.py,sha256=KbaI3F9kOYWQy_aYFPzU-auWdBw5S9u3Dhf6wLbOm7w,5601
1
+ airflow/providers/apache/hive/__init__.py,sha256=LCOVQWUVRwQIHVLzVTbEihDfCbdpwG-YbwnuTEsu0ug,1536
2
+ airflow/providers/apache/hive/get_provider_info.py,sha256=6T5R312-j-tcAx8JJHvGF5hEaesB1qutshcJ3f0QMeI,5668
3
3
  airflow/providers/apache/hive/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
4
- airflow/providers/apache/hive/hooks/hive.py,sha256=yOfoMs1BbvreHA9zSki2MjSVBYW_-rzeUXy3BFyaHZM,42315
4
+ airflow/providers/apache/hive/hooks/hive.py,sha256=HAcK5Yjq7fwhJutEmuWyg08QYC1c_w8NwtqAuXnyj-Q,42441
5
5
  airflow/providers/apache/hive/macros/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
6
  airflow/providers/apache/hive/macros/hive.py,sha256=Rq53o6tGww-KLPPZNuD64KkfE798GJQZ1HS6boVUh1A,4581
7
7
  airflow/providers/apache/hive/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
8
- airflow/providers/apache/hive/operators/hive.py,sha256=gU6iMSV-pm2249-DNK599xVAkV8eMqdp3MpB01fLrW0,7383
9
- airflow/providers/apache/hive/operators/hive_stats.py,sha256=4Tn2o4D0V55SHq55mssqTz2wXXeHtB-cki58YC_VPLs,7454
8
+ airflow/providers/apache/hive/operators/hive.py,sha256=jwG7l73d8KcQhaDnoaXh9ACK2hmG7J896w2RqgRrl68,7384
9
+ airflow/providers/apache/hive/operators/hive_stats.py,sha256=dy8w_uZDDsANk0gslTUAPDbE1cpbIcsjS0q6y5r06ZU,7502
10
10
  airflow/providers/apache/hive/plugins/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
11
11
  airflow/providers/apache/hive/plugins/hive.py,sha256=rrGccro6DEdnHSmII3goBYD9te-XdhPaPMjWpF-36HU,1146
12
12
  airflow/providers/apache/hive/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
@@ -16,14 +16,14 @@ airflow/providers/apache/hive/sensors/named_hive_partition.py,sha256=_8WSbtvnQq0
16
16
  airflow/providers/apache/hive/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
17
17
  airflow/providers/apache/hive/transfers/hive_to_mysql.py,sha256=EKc6B9DAgOXGkHWDMjucUYg6JnUiirvOyN4vMfaRHEc,5279
18
18
  airflow/providers/apache/hive/transfers/hive_to_samba.py,sha256=X9g2XXGLcNCIxlh0czeOolkkASHh1QDsNLrBKI0UGgw,2973
19
- airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=Ft9mQOU3DMp_t7PXvhvtK83pYUYMlvjsbx6MumJM2OU,5671
20
- airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=h687LZYrjIhoGEDQrox1m8_ep8fptjL79XFWv672Z_8,6587
19
+ airflow/providers/apache/hive/transfers/mssql_to_hive.py,sha256=zGp5KqaPPmo9tS1FQw4xcU-6LWcKukmM6_85zlybfVU,5662
20
+ airflow/providers/apache/hive/transfers/mysql_to_hive.py,sha256=L2ccK82t_VPp5nzaU-IuAO6Jb99iXkpgpAM3B3NI_wo,6713
21
21
  airflow/providers/apache/hive/transfers/s3_to_hive.py,sha256=QOh_HFXDqRAGM2ymJQfG_3L07pblr69-xi67pOguB3k,11742
22
- airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=fwIcSKpc75DsMvNQEggyZKP2LB0O8GCneMsAa6_S_Wc,5548
23
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
24
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/METADATA,sha256=jmn4UUDXoaJXA0MYCPqsKKZ4H6mzfuk65anA_Zsc9YQ,19756
25
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
26
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
27
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/entry_points.txt,sha256=Dhu3u9ocLHndlPrXKiW9MhRcGApVU4qlCHRuhqvLDcw,188
28
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
29
- apache_airflow_providers_apache_hive-6.1.0rc2.dist-info/RECORD,,
22
+ airflow/providers/apache/hive/transfers/vertica_to_hive.py,sha256=9HKX2dLLSFyo8MLOgIPZcXf31B9ij87DSIrf9zgWqv4,5564
23
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
24
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/METADATA,sha256=en490_SJMkwtnVY66rrsQVQbUAMm8UoHHxGe-gLuoAc,20336
25
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
26
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
27
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/entry_points.txt,sha256=Dhu3u9ocLHndlPrXKiW9MhRcGApVU4qlCHRuhqvLDcw,188
28
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
29
+ apache_airflow_providers_apache_hive-6.1.1rc1.dist-info/RECORD,,