apache-airflow-providers-apache-hive 9.1.1__tar.gz → 9.1.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/PKG-INFO +19 -14
  2. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/README.rst +10 -8
  3. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/changelog.rst +20 -0
  4. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/index.rst +10 -7
  5. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/pyproject.toml +9 -6
  7. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/__init__.py +1 -1
  8. {apache_airflow_providers_apache_hive-9.1.1/tests/integration → apache_airflow_providers_apache_hive-9.1.2rc1/src/airflow/providers}/__init__.py +1 -1
  9. {apache_airflow_providers_apache_hive-9.1.1/src/airflow/providers → apache_airflow_providers_apache_hive-9.1.2rc1/src/airflow/providers/apache}/__init__.py +1 -1
  10. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/__init__.py +1 -1
  11. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/hooks/hive.py +11 -14
  12. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/mysql_to_hive.py +3 -3
  13. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/version_compat.py +8 -0
  14. {apache_airflow_providers_apache_hive-9.1.1/src/airflow/providers/apache → apache_airflow_providers_apache_hive-9.1.2rc1/tests/integration}/__init__.py +1 -1
  15. apache_airflow_providers_apache_hive-9.1.2rc1/tests/integration/apache/__init__.py +17 -0
  16. apache_airflow_providers_apache_hive-9.1.2rc1/tests/system/__init__.py +17 -0
  17. apache_airflow_providers_apache_hive-9.1.2rc1/tests/system/apache/__init__.py +17 -0
  18. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/system/apache/hive/example_twitter_dag.py +8 -2
  19. apache_airflow_providers_apache_hive-9.1.2rc1/tests/unit/__init__.py +17 -0
  20. apache_airflow_providers_apache_hive-9.1.2rc1/tests/unit/apache/__init__.py +17 -0
  21. apache_airflow_providers_apache_hive-9.1.1/tests/integration/apache/__init__.py +0 -17
  22. apache_airflow_providers_apache_hive-9.1.1/tests/system/__init__.py +0 -17
  23. apache_airflow_providers_apache_hive-9.1.1/tests/system/apache/__init__.py +0 -17
  24. apache_airflow_providers_apache_hive-9.1.1/tests/unit/__init__.py +0 -17
  25. apache_airflow_providers_apache_hive-9.1.1/tests/unit/apache/__init__.py +0 -17
  26. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/.latest-doc-only-change.txt +0 -0
  27. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/commits.rst +0 -0
  28. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/conf.py +0 -0
  29. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/configurations-ref.rst +0 -0
  30. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/connections/hive_cli.rst +0 -0
  31. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/connections/hive_metastore.rst +0 -0
  32. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/connections/hiveserver2.rst +0 -0
  33. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/connections/index.rst +0 -0
  34. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/installing-providers-from-sources.rst +0 -0
  35. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/integration-logos/hive.png +0 -0
  36. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/macros.rst +0 -0
  37. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/operators.rst +0 -0
  38. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/docs/security.rst +0 -0
  39. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/LICENSE +0 -0
  40. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/get_provider_info.py +0 -0
  41. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  42. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/macros/__init__.py +0 -0
  43. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/macros/hive.py +0 -0
  44. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/operators/__init__.py +0 -0
  45. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/operators/hive.py +0 -0
  46. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/operators/hive_stats.py +0 -0
  47. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  48. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/plugins/hive.py +0 -0
  49. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  50. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/sensors/hive_partition.py +0 -0
  51. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/sensors/metastore_partition.py +0 -0
  52. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/sensors/named_hive_partition.py +0 -0
  53. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/__init__.py +0 -0
  54. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/hive_to_mysql.py +0 -0
  55. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/hive_to_samba.py +0 -0
  56. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/mssql_to_hive.py +0 -0
  57. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/s3_to_hive.py +0 -0
  58. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/src/airflow/providers/apache/hive/transfers/vertica_to_hive.py +0 -0
  59. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/conftest.py +0 -0
  60. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/integration/apache/hive/__init__.py +0 -0
  61. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/integration/apache/hive/transfers/__init__.py +0 -0
  62. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/integration/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  63. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/system/apache/hive/__init__.py +0 -0
  64. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/system/apache/hive/example_hive.py +0 -0
  65. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/system/apache/hive/example_twitter_README.md +0 -0
  66. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/__init__.py +0 -0
  67. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/hooks/__init__.py +0 -0
  68. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/hooks/query_results.csv +0 -0
  69. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/hooks/test_hive.py +0 -0
  70. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/macros/__init__.py +0 -0
  71. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/macros/test_hive.py +0 -0
  72. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/operators/__init__.py +0 -0
  73. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/operators/test_hive.py +0 -0
  74. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/operators/test_hive_stats.py +0 -0
  75. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/sensors/__init__.py +0 -0
  76. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/sensors/test_hive_partition.py +0 -0
  77. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/sensors/test_metastore_partition.py +0 -0
  78. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/sensors/test_named_hive_partition.py +0 -0
  79. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/__init__.py +0 -0
  80. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_hive_to_mysql.py +0 -0
  81. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_hive_to_samba.py +0 -0
  82. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_mssql_to_hive.py +0 -0
  83. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_mysql_to_hive.py +0 -0
  84. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_s3_to_hive.py +0 -0
  85. {apache_airflow_providers_apache_hive-9.1.1 → apache_airflow_providers_apache_hive-9.1.2rc1}/tests/unit/apache/hive/transfers/test_vertica_to_hive.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 9.1.1
3
+ Version: 9.1.2rc1
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.10
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -18,10 +18,13 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
21
22
  Classifier: Topic :: System :: Monitoring
22
- Requires-Dist: apache-airflow>=2.10.0
23
- Requires-Dist: apache-airflow-providers-common-sql>=1.26.0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.26.0rc1
24
25
  Requires-Dist: hmsclient>=0.1.0
26
+ Requires-Dist: pandas>=2.1.2; python_version <"3.13"
27
+ Requires-Dist: pandas>=2.2.3; python_version >="3.13"
25
28
  Requires-Dist: pyhive[hive-pure-sasl]>=0.7.0
26
29
  Requires-Dist: thrift>=0.11.0
27
30
  Requires-Dist: jmespath>=0.7.0
@@ -33,8 +36,8 @@ Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
33
36
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
34
37
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
35
38
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
36
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html
37
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1
39
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html
40
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2
38
41
  Project-URL: Mastodon, https://fosstodon.org/@airflow
39
42
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
40
43
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -72,8 +75,9 @@ Provides-Extra: vertica
72
75
 
73
76
  Package ``apache-airflow-providers-apache-hive``
74
77
 
75
- Release: ``9.1.1``
78
+ Release: ``9.1.2``
76
79
 
80
+ Release Date: ``|PypiReleaseDate|``
77
81
 
78
82
  `Apache Hive <https://hive.apache.org/>`__
79
83
 
@@ -85,7 +89,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
85
89
  are in ``airflow.providers.apache.hive`` python package.
86
90
 
87
91
  You can find package information and changelog for the provider
88
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/>`_.
92
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
89
93
 
90
94
  Installation
91
95
  ------------
@@ -94,21 +98,23 @@ You can install this package on top of an existing Airflow 2 installation (see `
94
98
  for the minimum Airflow version supported) via
95
99
  ``pip install apache-airflow-providers-apache-hive``
96
100
 
97
- The package supports the following python versions: 3.10,3.11,3.12
101
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
98
102
 
99
103
  Requirements
100
104
  ------------
101
105
 
102
- ======================================= ==================
106
+ ======================================= =====================================
103
107
  PIP package Version required
104
- ======================================= ==================
108
+ ======================================= =====================================
105
109
  ``apache-airflow`` ``>=2.10.0``
106
110
  ``apache-airflow-providers-common-sql`` ``>=1.26.0``
107
111
  ``hmsclient`` ``>=0.1.0``
112
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
113
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
108
114
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
109
115
  ``thrift`` ``>=0.11.0``
110
116
  ``jmespath`` ``>=0.7.0``
111
- ======================================= ==================
117
+ ======================================= =====================================
112
118
 
113
119
  Cross provider package dependencies
114
120
  -----------------------------------
@@ -127,7 +133,6 @@ You can install such cross-provider dependencies when installing from PyPI. For
127
133
  Dependent package Extra
128
134
  ====================================================================================================================== ===================
129
135
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
130
- `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
131
136
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
132
137
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
133
138
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -137,5 +142,5 @@ Dependent package
137
142
  ====================================================================================================================== ===================
138
143
 
139
144
  The changelog for the provider package can be found in the
140
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html>`_.
145
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
141
146
 
@@ -23,8 +23,9 @@
23
23
 
24
24
  Package ``apache-airflow-providers-apache-hive``
25
25
 
26
- Release: ``9.1.1``
26
+ Release: ``9.1.2``
27
27
 
28
+ Release Date: ``|PypiReleaseDate|``
28
29
 
29
30
  `Apache Hive <https://hive.apache.org/>`__
30
31
 
@@ -36,7 +37,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
36
37
  are in ``airflow.providers.apache.hive`` python package.
37
38
 
38
39
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/>`_.
40
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/>`_.
40
41
 
41
42
  Installation
42
43
  ------------
@@ -45,21 +46,23 @@ You can install this package on top of an existing Airflow 2 installation (see `
45
46
  for the minimum Airflow version supported) via
46
47
  ``pip install apache-airflow-providers-apache-hive``
47
48
 
48
- The package supports the following python versions: 3.10,3.11,3.12
49
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
49
50
 
50
51
  Requirements
51
52
  ------------
52
53
 
53
- ======================================= ==================
54
+ ======================================= =====================================
54
55
  PIP package Version required
55
- ======================================= ==================
56
+ ======================================= =====================================
56
57
  ``apache-airflow`` ``>=2.10.0``
57
58
  ``apache-airflow-providers-common-sql`` ``>=1.26.0``
58
59
  ``hmsclient`` ``>=0.1.0``
60
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
61
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
59
62
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
60
63
  ``thrift`` ``>=0.11.0``
61
64
  ``jmespath`` ``>=0.7.0``
62
- ======================================= ==================
65
+ ======================================= =====================================
63
66
 
64
67
  Cross provider package dependencies
65
68
  -----------------------------------
@@ -78,7 +81,6 @@ You can install such cross-provider dependencies when installing from PyPI. For
78
81
  Dependent package Extra
79
82
  ====================================================================================================================== ===================
80
83
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
81
- `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
82
84
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
83
85
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
84
86
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -88,4 +90,4 @@ Dependent package
88
90
  ====================================================================================================================== ===================
89
91
 
90
92
  The changelog for the provider package can be found in the
91
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html>`_.
93
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html>`_.
@@ -25,6 +25,26 @@
25
25
 
26
26
 
27
27
  Changelog
28
+ ---------
29
+
30
+ 9.1.2
31
+ .....
32
+
33
+ Misc
34
+ ~~~~
35
+
36
+ * ``Fix hive changelog (#53665)``
37
+ * ``Deprecate decorators from Core (#53629)``
38
+ * ``Bump mypy to 1.17.0 (#53523)``
39
+ * ``Add Python 3.13 support for Airflow. (#46891)``
40
+ * ``Cleanup type ignores in apache/hive provider (#53302)``
41
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
42
+ * ``Remove upper-binding for "python-requires" (#52980)``
43
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
44
+ * ``Replace 'BaseHook' to Task SDK for 'apache/hive' (#52685)``
45
+
46
+ .. Below changes are excluded from the changelog. Move them to
47
+ appropriate section above if needed. Do not delete the lines(!):
28
48
 
29
49
  9.1.1
30
50
  .....
@@ -79,7 +79,9 @@ apache-airflow-providers-apache-hive package
79
79
  `Apache Hive <https://hive.apache.org/>`__
80
80
 
81
81
 
82
- Release: 9.1.1
82
+ Release: 9.1.2
83
+
84
+ Release Date: ``|PypiReleaseDate|``
83
85
 
84
86
  Provider package
85
87
  ----------------
@@ -99,16 +101,18 @@ Requirements
99
101
 
100
102
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
101
103
 
102
- ======================================= ==================
104
+ ======================================= =====================================
103
105
  PIP package Version required
104
- ======================================= ==================
106
+ ======================================= =====================================
105
107
  ``apache-airflow`` ``>=2.10.0``
106
108
  ``apache-airflow-providers-common-sql`` ``>=1.26.0``
107
109
  ``hmsclient`` ``>=0.1.0``
110
+ ``pandas`` ``>=2.1.2; python_version < "3.13"``
111
+ ``pandas`` ``>=2.2.3; python_version >= "3.13"``
108
112
  ``pyhive[hive_pure_sasl]`` ``>=0.7.0``
109
113
  ``thrift`` ``>=0.11.0``
110
114
  ``jmespath`` ``>=0.7.0``
111
- ======================================= ==================
115
+ ======================================= =====================================
112
116
 
113
117
  Cross provider package dependencies
114
118
  -----------------------------------
@@ -127,7 +131,6 @@ You can install such cross-provider dependencies when installing from PyPI. For
127
131
  Dependent package Extra
128
132
  ====================================================================================================================== ===================
129
133
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
130
- `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
131
134
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
132
135
  `apache-airflow-providers-microsoft-mssql <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-mssql>`_ ``microsoft.mssql``
133
136
  `apache-airflow-providers-mysql <https://airflow.apache.org/docs/apache-airflow-providers-mysql>`_ ``mysql``
@@ -142,5 +145,5 @@ Downloading official packages
142
145
  You can download officially released packages and verify their checksums and signatures from the
143
146
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
144
147
 
145
- * `The apache-airflow-providers-apache-hive 9.1.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1.tar.gz.sha512>`__)
146
- * `The apache-airflow-providers-apache-hive 9.1.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.1-py3-none-any.whl.sha512>`__)
148
+ * `The apache-airflow-providers-apache-hive 9.1.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2.tar.gz.sha512>`__)
149
+ * `The apache-airflow-providers-apache-hive 9.1.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-9.1.2-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `Apache Hive <https://hive.apache.org/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1751472547
25
+ source-date-epoch: 1753688805
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 9.1.2
31
32
  - 9.1.1
32
33
  - 9.1.0
33
34
  - 9.0.6
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-apache-hive"
28
- version = "9.1.1"
28
+ version = "9.1.2rc1"
29
29
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -47,18 +47,21 @@ classifiers = [
47
47
  "Programming Language :: Python :: 3.10",
48
48
  "Programming Language :: Python :: 3.11",
49
49
  "Programming Language :: Python :: 3.12",
50
+ "Programming Language :: Python :: 3.13",
50
51
  "Topic :: System :: Monitoring",
51
52
  ]
52
- requires-python = "~=3.10"
53
+ requires-python = ">=3.10"
53
54
 
54
55
  # The dependencies should be modified in place in the generated file.
55
56
  # Any change in the dependencies is preserved when the file is regenerated
56
57
  # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
57
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
58
59
  dependencies = [
59
- "apache-airflow>=2.10.0",
60
- "apache-airflow-providers-common-sql>=1.26.0",
60
+ "apache-airflow>=2.10.0rc1",
61
+ "apache-airflow-providers-common-sql>=1.26.0rc1",
61
62
  "hmsclient>=0.1.0",
63
+ 'pandas>=2.1.2; python_version <"3.13"',
64
+ 'pandas>=2.2.3; python_version >="3.13"',
62
65
  "pyhive[hive_pure_sasl]>=0.7.0",
63
66
  "thrift>=0.11.0",
64
67
  "jmespath>=0.7.0",
@@ -131,8 +134,8 @@ apache-airflow-providers-common-sql = {workspace = true}
131
134
  apache-airflow-providers-standard = {workspace = true}
132
135
 
133
136
  [project.urls]
134
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1"
135
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/9.1.1/changelog.html"
137
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2"
138
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-apache-hive/9.1.2/changelog.html"
136
139
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
137
140
  "Source Code" = "https://github.com/apache/airflow"
138
141
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.1.1"
32
+ __version__ = "9.1.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -38,13 +38,8 @@ import csv
38
38
 
39
39
  from airflow.configuration import conf
40
40
  from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning
41
- from airflow.providers.apache.hive.version_compat import AIRFLOW_VAR_NAME_FORMAT_MAPPING
41
+ from airflow.providers.apache.hive.version_compat import AIRFLOW_VAR_NAME_FORMAT_MAPPING, BaseHook
42
42
  from airflow.providers.common.sql.hooks.sql import DbApiHook
43
-
44
- try:
45
- from airflow.sdk import BaseHook
46
- except ImportError:
47
- from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
48
43
  from airflow.security import utils
49
44
  from airflow.utils.helpers import as_flattened_list
50
45
 
@@ -870,7 +865,7 @@ class HiveServer2Hook(DbApiHook):
870
865
  username: str | None = None
871
866
  password: str | None = None
872
867
 
873
- db = self.get_connection(self.hiveserver2_conn_id) # type: ignore
868
+ db = self.get_connection(self.get_conn_id())
874
869
 
875
870
  auth_mechanism = db.extra_dejson.get("auth_mechanism", "NONE")
876
871
  if auth_mechanism == "NONE" and db.login is None:
@@ -912,7 +907,7 @@ class HiveServer2Hook(DbApiHook):
912
907
  with contextlib.closing(self.get_conn(schema)) as conn, contextlib.closing(conn.cursor()) as cur:
913
908
  cur.arraysize = fetch_size or 1000
914
909
 
915
- db = self.get_connection(self.hiveserver2_conn_id) # type: ignore
910
+ db = self.get_connection(self.get_conn_id())
916
911
  # Not all query services (e.g. impala) support the set command
917
912
  if db.extra_dejson.get("run_set_variable_statements", True):
918
913
  env_context = get_context_from_env_var()
@@ -1034,9 +1029,10 @@ class HiveServer2Hook(DbApiHook):
1034
1029
  schema = kwargs["schema"] if "schema" in kwargs else "default"
1035
1030
  return self.get_results(sql, schema=schema, hive_conf=parameters)["data"]
1036
1031
 
1037
- def _get_pandas_df( # type: ignore
1032
+ def _get_pandas_df(
1038
1033
  self,
1039
- sql: str,
1034
+ sql,
1035
+ parameters: list[Any] | tuple[Any, ...] | Mapping[str, Any] | None = None,
1040
1036
  schema: str = "default",
1041
1037
  hive_conf: dict[Any, Any] | None = None,
1042
1038
  **kwargs,
@@ -1052,9 +1048,10 @@ class HiveServer2Hook(DbApiHook):
1052
1048
  df = pd.DataFrame(res["data"], columns=[c[0] for c in res["header"]], **kwargs)
1053
1049
  return df
1054
1050
 
1055
- def _get_polars_df( # type: ignore
1051
+ def _get_polars_df(
1056
1052
  self,
1057
- sql: str,
1053
+ sql,
1054
+ parameters: list[Any] | tuple[Any, ...] | Mapping[str, Any] | None = None,
1058
1055
  schema: str = "default",
1059
1056
  hive_conf: dict[Any, Any] | None = None,
1060
1057
  **kwargs,
@@ -1081,7 +1078,7 @@ class HiveServer2Hook(DbApiHook):
1081
1078
  **kwargs: Any,
1082
1079
  ) -> pd.DataFrame: ...
1083
1080
 
1084
- @overload # type: ignore[override]
1081
+ @overload
1085
1082
  def get_df(
1086
1083
  self,
1087
1084
  sql: str,
@@ -1092,7 +1089,7 @@ class HiveServer2Hook(DbApiHook):
1092
1089
  **kwargs: Any,
1093
1090
  ) -> pl.DataFrame: ...
1094
1091
 
1095
- def get_df( # type: ignore
1092
+ def get_df(
1096
1093
  self,
1097
1094
  sql: str,
1098
1095
  schema: str = "default",
@@ -23,7 +23,7 @@ import csv
23
23
  from collections.abc import Sequence
24
24
  from contextlib import closing
25
25
  from tempfile import NamedTemporaryFile
26
- from typing import TYPE_CHECKING
26
+ from typing import TYPE_CHECKING, Literal
27
27
 
28
28
  try:
29
29
  import MySQLdb
@@ -97,7 +97,7 @@ class MySqlToHiveOperator(BaseOperator):
97
97
  recreate: bool = False,
98
98
  partition: dict | None = None,
99
99
  delimiter: str = chr(1),
100
- quoting: int | None = None,
100
+ quoting: Literal[0, 1, 2, 3] = csv.QUOTE_MINIMAL,
101
101
  quotechar: str = '"',
102
102
  escapechar: str | None = None,
103
103
  mysql_conn_id: str = "mysql_default",
@@ -113,7 +113,7 @@ class MySqlToHiveOperator(BaseOperator):
113
113
  self.create = create
114
114
  self.recreate = recreate
115
115
  self.delimiter = str(delimiter)
116
- self.quoting = quoting or csv.QUOTE_MINIMAL
116
+ self.quoting = quoting
117
117
  self.quotechar = quotechar
118
118
  self.escapechar = escapechar
119
119
  self.mysql_conn_id = mysql_conn_id
@@ -33,6 +33,12 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+ AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
37
+
38
+ if AIRFLOW_V_3_1_PLUS:
39
+ from airflow.sdk import BaseHook
40
+ else:
41
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
36
42
 
37
43
  if AIRFLOW_V_3_0_PLUS:
38
44
  from airflow.sdk import BaseOperator, BaseSensorOperator
@@ -48,6 +54,8 @@ else:
48
54
 
49
55
  __all__ = [
50
56
  "AIRFLOW_V_3_0_PLUS",
57
+ "AIRFLOW_V_3_1_PLUS",
58
+ "BaseHook",
51
59
  "BaseOperator",
52
60
  "BaseSensorOperator",
53
61
  "AIRFLOW_VAR_NAME_FORMAT_MAPPING",
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -0,0 +1,17 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -0,0 +1,17 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -0,0 +1,17 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -25,10 +25,17 @@ import os
25
25
  from datetime import date, datetime, timedelta
26
26
 
27
27
  from airflow import DAG
28
- from airflow.decorators import task
29
28
  from airflow.providers.apache.hive.operators.hive import HiveOperator
30
29
  from airflow.providers.standard.operators.bash import BashOperator
31
30
 
31
+ from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
32
+
33
+ if AIRFLOW_V_3_0_PLUS:
34
+ from airflow.sdk import task
35
+ else:
36
+ # Airflow 2 path
37
+ from airflow.decorators import task # type: ignore[attr-defined,no-redef]
38
+
32
39
  # --------------------------------------------------------------------------------
33
40
  # Caveat: This Dag will not run because of missing scripts.
34
41
  # The purpose of this is to give you a sample of a real world example DAG!
@@ -38,7 +45,6 @@ from airflow.providers.standard.operators.bash import BashOperator
38
45
  # Load The Dependencies
39
46
  # --------------------------------------------------------------------------------
40
47
 
41
-
42
48
  ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
43
49
  DAG_ID = "example_twitter_dag"
44
50
 
@@ -0,0 +1,17 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -0,0 +1,17 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -1,17 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
@@ -1,17 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
@@ -1,17 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
@@ -1,17 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
@@ -1,17 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore