PyStellarDB 0.13.4__tar.gz → 1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {pystellardb-0.13.4/PyStellarDB.egg-info → pystellardb-1.0}/PKG-INFO +69 -21
  2. pystellardb-0.13.4/README.rst → pystellardb-1.0/PyStellarDB.egg-info/PKG-INFO +85 -5
  3. {pystellardb-0.13.4 → pystellardb-1.0}/PyStellarDB.egg-info/SOURCES.txt +5 -1
  4. pystellardb-1.0/PyStellarDB.egg-info/requires.txt +10 -0
  5. pystellardb-0.13.4/PKG-INFO → pystellardb-1.0/README.rst +52 -37
  6. pystellardb-1.0/pystellardb/__init__.py +17 -0
  7. {pystellardb-0.13.4 → pystellardb-1.0}/pystellardb/_version.py +3 -3
  8. {pystellardb-0.13.4 → pystellardb-1.0}/pystellardb/graph_types.py +2 -2
  9. pystellardb-1.0/pystellardb/sasl_compat.py +49 -0
  10. {pystellardb-0.13.4 → pystellardb-1.0}/pystellardb/stellar_hive.py +14 -17
  11. pystellardb-1.0/pystellardb/v2/__init__.py +8 -0
  12. pystellardb-1.0/pystellardb/v2/connection.py +276 -0
  13. pystellardb-1.0/pystellardb/v2/database.py +110 -0
  14. {pystellardb-0.13.4 → pystellardb-1.0}/setup.py +6 -13
  15. pystellardb-0.13.4/PyStellarDB.egg-info/requires.txt +0 -22
  16. pystellardb-0.13.4/pystellardb/__init__.py +0 -4
  17. {pystellardb-0.13.4 → pystellardb-1.0}/LICENSE +0 -0
  18. {pystellardb-0.13.4 → pystellardb-1.0}/MANIFEST.in +0 -0
  19. {pystellardb-0.13.4 → pystellardb-1.0}/PyStellarDB.egg-info/dependency_links.txt +0 -0
  20. {pystellardb-0.13.4 → pystellardb-1.0}/PyStellarDB.egg-info/top_level.txt +0 -0
  21. {pystellardb-0.13.4 → pystellardb-1.0}/pystellardb/stellar_rdd.py +0 -0
  22. {pystellardb-0.13.4 → pystellardb-1.0}/setup.cfg +0 -0
  23. {pystellardb-0.13.4 → pystellardb-1.0}/versioneer.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: PyStellarDB
3
- Version: 0.13.4
3
+ Version: 1.0
4
4
  Summary: Python interface to StellarDB
5
5
  Home-page: https://github.com/WarpCloud/PyStellarDB
6
6
  Author: Zhiping Wang
@@ -10,25 +10,26 @@ Classifier: Intended Audience :: Developers
10
10
  Classifier: License :: OSI Approved :: Apache Software License
11
11
  Classifier: Operating System :: OS Independent
12
12
  Classifier: Topic :: Database :: Front-Ends
13
- Requires-Python: >=2.7
13
+ Requires-Python: >=3.6
14
14
  License-File: LICENSE
15
15
  Requires-Dist: future
16
16
  Requires-Dist: python-dateutil
17
- Requires-Dist: pyhive
18
- Requires-Dist: sasl
19
- Requires-Dist: thrift
20
- Requires-Dist: thrift-sasl>=0.3.0
21
- Provides-Extra: presto
22
- Requires-Dist: requests>=1.0.0; extra == "presto"
23
- Provides-Extra: hive
24
- Requires-Dist: sasl>=0.2.1; extra == "hive"
25
- Requires-Dist: thrift>=0.10.0; extra == "hive"
26
- Provides-Extra: sqlalchemy
27
- Requires-Dist: sqlalchemy>=1.3.0; extra == "sqlalchemy"
28
- Provides-Extra: kerberos
29
- Requires-Dist: requests_kerberos>=0.12.0; extra == "kerberos"
17
+ Requires-Dist: pyhive[hive_pure_sasl]
18
+ Requires-Dist: thrift>=0.10.0
30
19
  Provides-Extra: pyspark
31
20
  Requires-Dist: pyspark>=2.4.0; extra == "pyspark"
21
+ Provides-Extra: kerberos
22
+ Requires-Dist: kerberos>=1.3.0; extra == "kerberos"
23
+ Dynamic: author
24
+ Dynamic: author-email
25
+ Dynamic: classifier
26
+ Dynamic: description
27
+ Dynamic: home-page
28
+ Dynamic: license
29
+ Dynamic: provides-extra
30
+ Dynamic: requires-dist
31
+ Dynamic: requires-python
32
+ Dynamic: summary
32
33
 
33
34
  PyStellarDB
34
35
  ===========
@@ -60,6 +61,7 @@ PLAIN Mode (No security is configured)
60
61
  ---------------------------------------
61
62
  .. code-block:: python
62
63
 
64
+ """version < 1.0 """
63
65
  from pystellardb import stellar_hive
64
66
 
65
67
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, graph_name='pokemon')
@@ -70,11 +72,19 @@ PLAIN Mode (No security is configured)
70
72
 
71
73
  print cur.fetchall()
72
74
 
75
+ """version >= 1.0 """
76
+ from pystellardb import Connection, Graph
77
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
78
+ graph = Graph('pokemon', conn)
79
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
80
+ for row in query_result:
81
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
73
82
 
74
83
  LDAP Mode
75
84
  ---------
76
85
  .. code-block:: python
77
86
 
87
+ """version < 1.0 """
78
88
  from pystellardb import stellar_hive
79
89
 
80
90
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP', graph_name='pokemon')
@@ -85,6 +95,14 @@ LDAP Mode
85
95
 
86
96
  print cur.fetchall()
87
97
 
98
+ """version >= 1.0 """
99
+ from pystellardb import Connection, Graph
100
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP')
101
+ graph = Graph('pokemon', conn)
102
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
103
+ for row in query_result:
104
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
105
+
88
106
 
89
107
  Kerberos Mode
90
108
  -------------
@@ -96,6 +114,12 @@ Kerberos Mode
96
114
  # In Linux: kinit -kt FILE_PATH_OF_KEYTABL PRINCIPAL_NAME
97
115
  # In Mac: kinit -t FILE_PATH_OF_KEYTABL -f PRINCIPAL_NAME
98
116
 
117
+ # Run with Kerberos path environment variables
118
+ # ENV KRB5_CONFIG=/etc/krb5.conf
119
+ # ENV KRB5_CLIENT_KTNAME=/etc/krb5.keytab
120
+ # ENV KRB5_KTNAME=/etc/krb5.keytab
121
+
122
+ """version < 1.0 """
99
123
  from pystellardb import stellar_hive
100
124
 
101
125
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS', graph_name='pokemon')
@@ -106,6 +130,13 @@ Kerberos Mode
106
130
 
107
131
  print cur.fetchall()
108
132
 
133
+ """version >= 1.0 """
134
+ from pystellardb import Connection, Graph
135
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS')
136
+ graph = Graph('pokemon', conn)
137
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
138
+ for row in query_result:
139
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
109
140
 
110
141
  Execute Hive Query
111
142
  ------------------
@@ -123,6 +154,7 @@ Execute Graph Query and change to a PySpark RDD object
123
154
  ------------------------------------------------------
124
155
  .. code-block:: python
125
156
 
157
+ """version < 1.0 """
126
158
  from pyspark import SparkContext
127
159
  from pystellardb import stellar_hive
128
160
 
@@ -140,6 +172,22 @@ Execute Graph Query and change to a PySpark RDD object
140
172
 
141
173
  rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
142
174
 
175
+ """version >= 1.0 """
176
+ from pyspark import SparkContext
177
+ from pystellardb import Connection, Graph
178
+
179
+ sc = SparkContext("local", "Demo App")
180
+
181
+
182
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
183
+ graph = Graph('pokemon', conn)
184
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
185
+ rdd = query_result.toRDD(sc)
186
+
187
+ def f(x): print(x)
188
+
189
+ rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
190
+
143
191
  # Every line of this query is in format of Tuple(VertexObject, EdgeObject, VertexObject)
144
192
  # Vertex and Edge object has a function of toJSON() which can print the object in JSON format
145
193
 
@@ -171,24 +219,24 @@ Dependencies
171
219
  Required:
172
220
  ------------
173
221
 
174
- - Python 2.7+ / Python 3
222
+ - Python 3.6+
175
223
 
176
- System SASL
177
- ------------
224
+ System SASL(Depricated since 1.0):
225
+ ----------------------------------
178
226
 
179
227
  Ubuntu:
180
228
 
181
229
  .. code-block:: bash
182
230
 
183
231
  apt-get install libsasl2-dev libsasl2-2 libsasl2-modules-gssapi-mit
184
- apt-get install python-dev gcc #Update python and gcc if needed
232
+ apt-get install python3-dev gcc #Update python and gcc if needed
185
233
 
186
234
  RHEL/CentOS:
187
235
 
188
236
  .. code-block:: bash
189
237
 
190
238
  yum install cyrus-sasl-md5 cyrus-sasl-plain cyrus-sasl-gssapi cyrus-sasl-devel
191
- yum install gcc-c++ python-devel.x86_64 #Update python and gcc if needed
239
+ yum install gcc-c++ python3-devel.x86_64 #Update python and gcc if needed
192
240
 
193
241
  # if pip3 install fails with a message like 'Can't connect to HTTPS URL because the SSL module is not available'
194
242
  # you may need to update ssl & reinstall python
@@ -1,3 +1,36 @@
1
+ Metadata-Version: 2.2
2
+ Name: PyStellarDB
3
+ Version: 1.0
4
+ Summary: Python interface to StellarDB
5
+ Home-page: https://github.com/WarpCloud/PyStellarDB
6
+ Author: Zhiping Wang
7
+ Author-email: zhiping.wang@transwarp.io
8
+ License: Apache License, Version 2.0
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: Apache Software License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Topic :: Database :: Front-Ends
13
+ Requires-Python: >=3.6
14
+ License-File: LICENSE
15
+ Requires-Dist: future
16
+ Requires-Dist: python-dateutil
17
+ Requires-Dist: pyhive[hive_pure_sasl]
18
+ Requires-Dist: thrift>=0.10.0
19
+ Provides-Extra: pyspark
20
+ Requires-Dist: pyspark>=2.4.0; extra == "pyspark"
21
+ Provides-Extra: kerberos
22
+ Requires-Dist: kerberos>=1.3.0; extra == "kerberos"
23
+ Dynamic: author
24
+ Dynamic: author-email
25
+ Dynamic: classifier
26
+ Dynamic: description
27
+ Dynamic: home-page
28
+ Dynamic: license
29
+ Dynamic: provides-extra
30
+ Dynamic: requires-dist
31
+ Dynamic: requires-python
32
+ Dynamic: summary
33
+
1
34
  PyStellarDB
2
35
  ===========
3
36
 
@@ -28,6 +61,7 @@ PLAIN Mode (No security is configured)
28
61
  ---------------------------------------
29
62
  .. code-block:: python
30
63
 
64
+ """version < 1.0 """
31
65
  from pystellardb import stellar_hive
32
66
 
33
67
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, graph_name='pokemon')
@@ -38,11 +72,19 @@ PLAIN Mode (No security is configured)
38
72
 
39
73
  print cur.fetchall()
40
74
 
75
+ """version >= 1.0 """
76
+ from pystellardb import Connection, Graph
77
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
78
+ graph = Graph('pokemon', conn)
79
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
80
+ for row in query_result:
81
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
41
82
 
42
83
  LDAP Mode
43
84
  ---------
44
85
  .. code-block:: python
45
86
 
87
+ """version < 1.0 """
46
88
  from pystellardb import stellar_hive
47
89
 
48
90
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP', graph_name='pokemon')
@@ -53,6 +95,14 @@ LDAP Mode
53
95
 
54
96
  print cur.fetchall()
55
97
 
98
+ """version >= 1.0 """
99
+ from pystellardb import Connection, Graph
100
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP')
101
+ graph = Graph('pokemon', conn)
102
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
103
+ for row in query_result:
104
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
105
+
56
106
 
57
107
  Kerberos Mode
58
108
  -------------
@@ -64,6 +114,12 @@ Kerberos Mode
64
114
  # In Linux: kinit -kt FILE_PATH_OF_KEYTABL PRINCIPAL_NAME
65
115
  # In Mac: kinit -t FILE_PATH_OF_KEYTABL -f PRINCIPAL_NAME
66
116
 
117
+ # Run with Kerberos path environment variables
118
+ # ENV KRB5_CONFIG=/etc/krb5.conf
119
+ # ENV KRB5_CLIENT_KTNAME=/etc/krb5.keytab
120
+ # ENV KRB5_KTNAME=/etc/krb5.keytab
121
+
122
+ """version < 1.0 """
67
123
  from pystellardb import stellar_hive
68
124
 
69
125
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS', graph_name='pokemon')
@@ -74,6 +130,13 @@ Kerberos Mode
74
130
 
75
131
  print cur.fetchall()
76
132
 
133
+ """version >= 1.0 """
134
+ from pystellardb import Connection, Graph
135
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS')
136
+ graph = Graph('pokemon', conn)
137
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
138
+ for row in query_result:
139
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
77
140
 
78
141
  Execute Hive Query
79
142
  ------------------
@@ -91,6 +154,7 @@ Execute Graph Query and change to a PySpark RDD object
91
154
  ------------------------------------------------------
92
155
  .. code-block:: python
93
156
 
157
+ """version < 1.0 """
94
158
  from pyspark import SparkContext
95
159
  from pystellardb import stellar_hive
96
160
 
@@ -108,6 +172,22 @@ Execute Graph Query and change to a PySpark RDD object
108
172
 
109
173
  rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
110
174
 
175
+ """version >= 1.0 """
176
+ from pyspark import SparkContext
177
+ from pystellardb import Connection, Graph
178
+
179
+ sc = SparkContext("local", "Demo App")
180
+
181
+
182
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
183
+ graph = Graph('pokemon', conn)
184
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
185
+ rdd = query_result.toRDD(sc)
186
+
187
+ def f(x): print(x)
188
+
189
+ rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
190
+
111
191
  # Every line of this query is in format of Tuple(VertexObject, EdgeObject, VertexObject)
112
192
  # Vertex and Edge object has a function of toJSON() which can print the object in JSON format
113
193
 
@@ -139,24 +219,24 @@ Dependencies
139
219
  Required:
140
220
  ------------
141
221
 
142
- - Python 2.7+ / Python 3
222
+ - Python 3.6+
143
223
 
144
- System SASL
145
- ------------
224
+ System SASL(Depricated since 1.0):
225
+ ----------------------------------
146
226
 
147
227
  Ubuntu:
148
228
 
149
229
  .. code-block:: bash
150
230
 
151
231
  apt-get install libsasl2-dev libsasl2-2 libsasl2-modules-gssapi-mit
152
- apt-get install python-dev gcc #Update python and gcc if needed
232
+ apt-get install python3-dev gcc #Update python and gcc if needed
153
233
 
154
234
  RHEL/CentOS:
155
235
 
156
236
  .. code-block:: bash
157
237
 
158
238
  yum install cyrus-sasl-md5 cyrus-sasl-plain cyrus-sasl-gssapi cyrus-sasl-devel
159
- yum install gcc-c++ python-devel.x86_64 #Update python and gcc if needed
239
+ yum install gcc-c++ python3-devel.x86_64 #Update python and gcc if needed
160
240
 
161
241
  # if pip3 install fails with a message like 'Can't connect to HTTPS URL because the SSL module is not available'
162
242
  # you may need to update ssl & reinstall python
@@ -12,5 +12,9 @@ PyStellarDB.egg-info/top_level.txt
12
12
  pystellardb/__init__.py
13
13
  pystellardb/_version.py
14
14
  pystellardb/graph_types.py
15
+ pystellardb/sasl_compat.py
15
16
  pystellardb/stellar_hive.py
16
- pystellardb/stellar_rdd.py
17
+ pystellardb/stellar_rdd.py
18
+ pystellardb/v2/__init__.py
19
+ pystellardb/v2/connection.py
20
+ pystellardb/v2/database.py
@@ -0,0 +1,10 @@
1
+ future
2
+ python-dateutil
3
+ pyhive[hive_pure_sasl]
4
+ thrift>=0.10.0
5
+
6
+ [kerberos]
7
+ kerberos>=1.3.0
8
+
9
+ [pyspark]
10
+ pyspark>=2.4.0
@@ -1,35 +1,3 @@
1
- Metadata-Version: 2.1
2
- Name: PyStellarDB
3
- Version: 0.13.4
4
- Summary: Python interface to StellarDB
5
- Home-page: https://github.com/WarpCloud/PyStellarDB
6
- Author: Zhiping Wang
7
- Author-email: zhiping.wang@transwarp.io
8
- License: Apache License, Version 2.0
9
- Classifier: Intended Audience :: Developers
10
- Classifier: License :: OSI Approved :: Apache Software License
11
- Classifier: Operating System :: OS Independent
12
- Classifier: Topic :: Database :: Front-Ends
13
- Requires-Python: >=2.7
14
- License-File: LICENSE
15
- Requires-Dist: future
16
- Requires-Dist: python-dateutil
17
- Requires-Dist: pyhive
18
- Requires-Dist: sasl
19
- Requires-Dist: thrift
20
- Requires-Dist: thrift-sasl>=0.3.0
21
- Provides-Extra: presto
22
- Requires-Dist: requests>=1.0.0; extra == "presto"
23
- Provides-Extra: hive
24
- Requires-Dist: sasl>=0.2.1; extra == "hive"
25
- Requires-Dist: thrift>=0.10.0; extra == "hive"
26
- Provides-Extra: sqlalchemy
27
- Requires-Dist: sqlalchemy>=1.3.0; extra == "sqlalchemy"
28
- Provides-Extra: kerberos
29
- Requires-Dist: requests_kerberos>=0.12.0; extra == "kerberos"
30
- Provides-Extra: pyspark
31
- Requires-Dist: pyspark>=2.4.0; extra == "pyspark"
32
-
33
1
  PyStellarDB
34
2
  ===========
35
3
 
@@ -60,6 +28,7 @@ PLAIN Mode (No security is configured)
60
28
  ---------------------------------------
61
29
  .. code-block:: python
62
30
 
31
+ """version < 1.0 """
63
32
  from pystellardb import stellar_hive
64
33
 
65
34
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, graph_name='pokemon')
@@ -70,11 +39,19 @@ PLAIN Mode (No security is configured)
70
39
 
71
40
  print cur.fetchall()
72
41
 
42
+ """version >= 1.0 """
43
+ from pystellardb import Connection, Graph
44
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
45
+ graph = Graph('pokemon', conn)
46
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
47
+ for row in query_result:
48
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
73
49
 
74
50
  LDAP Mode
75
51
  ---------
76
52
  .. code-block:: python
77
53
 
54
+ """version < 1.0 """
78
55
  from pystellardb import stellar_hive
79
56
 
80
57
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP', graph_name='pokemon')
@@ -85,6 +62,14 @@ LDAP Mode
85
62
 
86
63
  print cur.fetchall()
87
64
 
65
+ """version >= 1.0 """
66
+ from pystellardb import Connection, Graph
67
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, username='hive', password='123456', auth='LDAP')
68
+ graph = Graph('pokemon', conn)
69
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
70
+ for row in query_result:
71
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
72
+
88
73
 
89
74
  Kerberos Mode
90
75
  -------------
@@ -96,6 +81,12 @@ Kerberos Mode
96
81
  # In Linux: kinit -kt FILE_PATH_OF_KEYTABL PRINCIPAL_NAME
97
82
  # In Mac: kinit -t FILE_PATH_OF_KEYTABL -f PRINCIPAL_NAME
98
83
 
84
+ # Run with Kerberos path environment variables
85
+ # ENV KRB5_CONFIG=/etc/krb5.conf
86
+ # ENV KRB5_CLIENT_KTNAME=/etc/krb5.keytab
87
+ # ENV KRB5_KTNAME=/etc/krb5.keytab
88
+
89
+ """version < 1.0 """
99
90
  from pystellardb import stellar_hive
100
91
 
101
92
  conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS', graph_name='pokemon')
@@ -106,6 +97,13 @@ Kerberos Mode
106
97
 
107
98
  print cur.fetchall()
108
99
 
100
+ """version >= 1.0 """
101
+ from pystellardb import Connection, Graph
102
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000, kerberos_service_name='hive', auth='KERBEROS')
103
+ graph = Graph('pokemon', conn)
104
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
105
+ for row in query_result:
106
+ print(row[0].toJSON(), row[1].toJSON(), row[2].toJSON())
109
107
 
110
108
  Execute Hive Query
111
109
  ------------------
@@ -123,6 +121,7 @@ Execute Graph Query and change to a PySpark RDD object
123
121
  ------------------------------------------------------
124
122
  .. code-block:: python
125
123
 
124
+ """version < 1.0 """
126
125
  from pyspark import SparkContext
127
126
  from pystellardb import stellar_hive
128
127
 
@@ -140,6 +139,22 @@ Execute Graph Query and change to a PySpark RDD object
140
139
 
141
140
  rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
142
141
 
142
+ """version >= 1.0 """
143
+ from pyspark import SparkContext
144
+ from pystellardb import Connection, Graph
145
+
146
+ sc = SparkContext("local", "Demo App")
147
+
148
+
149
+ conn = stellar_hive.StellarConnection(host="localhost", port=10000)
150
+ graph = Graph('pokemon', conn)
151
+ query_result = graph.execute('match p = (a)-[f]->(b) return a,f,b limit 1')
152
+ rdd = query_result.toRDD(sc)
153
+
154
+ def f(x): print(x)
155
+
156
+ rdd.map(lambda x: (x[0].toJSON(), x[1].toJSON(), x[2].toJSON())).foreach(f)
157
+
143
158
  # Every line of this query is in format of Tuple(VertexObject, EdgeObject, VertexObject)
144
159
  # Vertex and Edge object has a function of toJSON() which can print the object in JSON format
145
160
 
@@ -171,24 +186,24 @@ Dependencies
171
186
  Required:
172
187
  ------------
173
188
 
174
- - Python 2.7+ / Python 3
189
+ - Python 3.6+
175
190
 
176
- System SASL
177
- ------------
191
+ System SASL(Depricated since 1.0):
192
+ ----------------------------------
178
193
 
179
194
  Ubuntu:
180
195
 
181
196
  .. code-block:: bash
182
197
 
183
198
  apt-get install libsasl2-dev libsasl2-2 libsasl2-modules-gssapi-mit
184
- apt-get install python-dev gcc #Update python and gcc if needed
199
+ apt-get install python3-dev gcc #Update python and gcc if needed
185
200
 
186
201
  RHEL/CentOS:
187
202
 
188
203
  .. code-block:: bash
189
204
 
190
205
  yum install cyrus-sasl-md5 cyrus-sasl-plain cyrus-sasl-gssapi cyrus-sasl-devel
191
- yum install gcc-c++ python-devel.x86_64 #Update python and gcc if needed
206
+ yum install gcc-c++ python3-devel.x86_64 #Update python and gcc if needed
192
207
 
193
208
  # if pip3 install fails with a message like 'Can't connect to HTTPS URL because the SSL module is not available'
194
209
  # you may need to update ssl & reinstall python
@@ -0,0 +1,17 @@
1
+
2
+ from ._version import get_versions
3
+ from .v2 import Connection, Graph, CypherResult
4
+
5
+
6
+ __version__ = get_versions()['version']
7
+ del get_versions
8
+
9
+ __all__ = [
10
+ '__version__',
11
+ 'stellar_hive',
12
+ 'stellar_rdd',
13
+ 'graph_types',
14
+ 'Connection',
15
+ 'Graph',
16
+ 'CypherResult',
17
+ ]
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2024-10-30T09:30:52+0800",
11
+ "date": "2025-05-06T17:47:56+0800",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "88d8d41626e7131ffbf0e675bce34330a12fd953",
15
- "version": "0.13.4"
14
+ "full-revisionid": "76d9b0b752e5e453e3046d5864750c4e3ad85b36",
15
+ "version": "1.0"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -241,7 +241,7 @@ class Edge(GraphElement):
241
241
  if 'startKey' not in m:
242
242
  raise ValueError("Could not find start node entity key in JSON")
243
243
 
244
- if schema.getVersion() == 18:
244
+ if schema.getVersion() >= 18:
245
245
  startUid = prop_dict['__srcuid']
246
246
  startLabelIdx = Vertex.parseLabelIdxFromRKV18(m['startKey'])
247
247
  else:
@@ -262,7 +262,7 @@ class Edge(GraphElement):
262
262
  if 'endKey' not in m:
263
263
  raise ValueError("Could not find end node entity key in JSON")
264
264
 
265
- if schema.getVersion() == 18:
265
+ if schema.getVersion() >= 18:
266
266
  endUid = prop_dict['__dstuid']
267
267
  endLabelIdx = Vertex.parseLabelIdxFromRKV18(m['endKey'])
268
268
  else:
@@ -0,0 +1,49 @@
1
+ from __future__ import absolute_import
2
+
3
+ from puresasl.client import SASLClient, SASLError
4
+ from contextlib import contextmanager
5
+
6
+ @contextmanager
7
+ def error_catcher(self, Exc = Exception):
8
+ try:
9
+ self.error = None
10
+ yield
11
+ except Exc as e:
12
+ self.error = str(e)
13
+
14
+
15
+ class PureSASLClient(SASLClient):
16
+ def __init__(self, *args, **kwargs):
17
+ self.error = None
18
+ super(PureSASLClient, self).__init__(*args, **kwargs)
19
+
20
+ def start(self, mechanism):
21
+ with error_catcher(self, SASLError):
22
+ if isinstance(mechanism, list):
23
+ self.choose_mechanism(mechanism)
24
+ else:
25
+ self.choose_mechanism([mechanism])
26
+ return True, self.mechanism, self.process()
27
+ # else
28
+ return False, mechanism, None
29
+
30
+ def encode(self, incoming):
31
+ with error_catcher(self):
32
+ return True, self.unwrap(incoming)
33
+ # else
34
+ return False, None
35
+
36
+ def decode(self, outgoing):
37
+ with error_catcher(self):
38
+ return True, self.wrap(outgoing)
39
+ # else
40
+ return False, None
41
+
42
+ def step(self, challenge=None):
43
+ with error_catcher(self):
44
+ return True, self.process(challenge)
45
+ # else
46
+ return False, None
47
+
48
+ def getError(self):
49
+ return self.error
@@ -9,8 +9,9 @@ from __future__ import absolute_import
9
9
  from __future__ import unicode_literals
10
10
 
11
11
  from pyhive import hive
12
- from pystellardb import graph_types
13
- from pystellardb import stellar_rdd
12
+ from . import graph_types
13
+ from . import stellar_rdd
14
+ from .sasl_compat import PureSASLClient
14
15
 
15
16
  from TCLIService import TCLIService
16
17
  from TCLIService import ttypes
@@ -102,12 +103,10 @@ class StellarConnection(object):
102
103
  self._transport = thrift.transport.TTransport.TBufferedTransport(
103
104
  socket)
104
105
  elif auth in ('LDAP', 'KERBEROS', 'NONE', 'CUSTOM'):
105
- # Defer import so package dependency is optional
106
- import sasl
107
106
  import thrift_sasl
108
107
 
109
108
  if auth == 'KERBEROS':
110
- # KERBEROS mode in hive.server2.authentication is GSSAPI in sasl library
109
+ # KERBEROS mode in hive.server2.authentication is GSSAPI in SASL
111
110
  sasl_auth = 'GSSAPI'
112
111
  else:
113
112
  sasl_auth = 'PLAIN'
@@ -116,20 +115,15 @@ class StellarConnection(object):
116
115
  password = 'x'
117
116
 
118
117
  def sasl_factory():
119
- sasl_client = sasl.Client()
120
- sasl_client.setAttr('host', host)
121
118
  if sasl_auth == 'GSSAPI':
122
- sasl_client.setAttr('service', kerberos_service_name)
119
+ sasl_client = PureSASLClient(host, mechanism="GSSAPI", service=kerberos_service_name)
123
120
  elif sasl_auth == 'PLAIN':
124
- sasl_client.setAttr('username', username)
125
- sasl_client.setAttr('password', password)
121
+ sasl_client = PureSASLClient(host, mechanism="PLAIN", username=username, password=password)
126
122
  else:
127
- raise AssertionError
128
- sasl_client.init()
123
+ raise AssertionError("Unsupported SASL mechanism")
129
124
  return sasl_client
130
125
 
131
- self._transport = thrift_sasl.TSaslClientTransport(
132
- sasl_factory, sasl_auth, socket)
126
+ self._transport = thrift_sasl.TSaslClientTransport(sasl_factory, sasl_auth, socket)
133
127
  else:
134
128
  # All HS2 config options:
135
129
  # https://cwiki.apache.org/confluence/display/Hive/Setting+Up+HiveServer2#SettingUpHiveServer2-Configuration
@@ -174,9 +168,12 @@ class StellarConnection(object):
174
168
  schemaInJson)
175
169
 
176
170
  # get schema from data
177
- cursor.execute('manipulate graph {} get_schema_from_data'.format(graph_name))
178
- self._graph_schema_from_data = cursor.fetchone()[0]
179
- self._graph_schema_from_data = json.loads(self._graph_schema_from_data)
171
+ try:
172
+ cursor.execute('manipulate graph {} get_schema_from_data'.format(graph_name))
173
+ self._graph_schema_from_data = cursor.fetchone()[0]
174
+ self._graph_schema_from_data = json.loads(self._graph_schema_from_data)
175
+ except:
176
+ pass
180
177
  else:
181
178
  assert response.serverProtocolVersion == protocol_version, \
182
179
  "Unable to handle protocol version {}".format(response.serverProtocolVersion)
@@ -0,0 +1,8 @@
1
+ from .connection import Connection, CypherResult
2
+ from .database import Graph
3
+
4
+ __all__ = [
5
+ "Graph",
6
+ "Connection",
7
+ "CypherResult",
8
+ ]
@@ -0,0 +1,276 @@
1
+ from __future__ import absolute_import
2
+ from __future__ import unicode_literals
3
+
4
+ from types import TracebackType
5
+ from typing import Any, Union
6
+ from pyhive import hive
7
+ from pyhive.exc import *
8
+ from TCLIService import TCLIService
9
+ from TCLIService import ttypes
10
+
11
+ import thrift.protocol.TBinaryProtocol
12
+ import thrift.transport.TSocket
13
+ import thrift.transport.TTransport
14
+ import getpass
15
+ import logging
16
+ import json
17
+ import sys
18
+
19
+ from pystellardb.stellar_rdd import transformToRDD
20
+ from pystellardb.graph_types import Vertex, Edge, Path, GraphSchema
21
+ from pystellardb.sasl_compat import PureSASLClient
22
+
23
+ __logger = logging.getLogger(__name__)
24
+
25
+ class CypherResult(hive.Cursor):
26
+ """
27
+ QueryResult class for handling query results.
28
+ """
29
+
30
+ def __init__(self, connection: Any, graph_schema: GraphSchema, arraysize: int = 1000):
31
+ super(CypherResult, self).__init__(connection, arraysize)
32
+ self._graph_schema = graph_schema
33
+ self._column_comments = []
34
+
35
+ @property
36
+ def description(self):
37
+ """This read-only attribute is a sequence of 7-item sequences.
38
+
39
+ Each of these sequences contains information describing one result column:
40
+
41
+ - name
42
+ - type_code
43
+ - display_size (None in current implementation)
44
+ - internal_size (None in current implementation)
45
+ - precision (None in current implementation)
46
+ - scale (None in current implementation)
47
+ - null_ok (always True in current implementation)
48
+
49
+ This attribute will be ``None`` for operations that do not return rows or if the cursor has
50
+ not had an operation invoked via the :py:meth:`execute` method yet.
51
+
52
+ The ``type_code`` can be interpreted by comparing it to the Type Objects specified in the
53
+ section below.
54
+ """
55
+ if self._operationHandle is None or not self._operationHandle.hasResultSet:
56
+ return None
57
+ if self._description is None:
58
+ req = ttypes.TGetResultSetMetadataReq(self._operationHandle)
59
+ response = self._connection.client.GetResultSetMetadata(req)
60
+ hive._check_status(response)
61
+ columns = response.schema.columns
62
+ self._description = []
63
+ # If it's a cypher query, column comment is not null
64
+ self._column_comments = [
65
+ col.comment for col in response.schema.columns
66
+ if col.comment is not None
67
+ ]
68
+
69
+ for col in columns:
70
+ primary_type_entry = col.typeDesc.types[0]
71
+ if primary_type_entry.primitiveEntry is None:
72
+ # All fancy stuff maps to string
73
+ type_code = ttypes.TTypeId._VALUES_TO_NAMES[
74
+ ttypes.TTypeId.STRING_TYPE]
75
+ else:
76
+ type_id = primary_type_entry.primitiveEntry.type
77
+ type_code = ttypes.TTypeId._VALUES_TO_NAMES[type_id]
78
+ self._description.append(
79
+ (col.columnName.decode('utf-8')
80
+ if sys.version_info[0] == 2 else col.columnName,
81
+ type_code.decode('utf-8') if sys.version_info[0] == 2 else
82
+ type_code, None, None, None, None, True))
83
+ return self._description
84
+
85
+ def fetchone(self):
86
+ row = super(CypherResult, self).fetchone()
87
+
88
+ if row is None:
89
+ return None
90
+
91
+ parsed_row = []
92
+ for i in range(0, len(self._column_comments)):
93
+ parsed_row.append(
94
+ self._convertData(self._column_comments[i], row[i]))
95
+
96
+ return tuple(parsed_row)
97
+
98
+ def _convertData(self, type, data):
99
+ """Convert Crux type to Readable type"""
100
+ if type == 'boolean':
101
+ return bool(data)
102
+ elif type == 'int':
103
+ return int(data)
104
+ elif type == 'long':
105
+ return int(data)
106
+ elif type == 'float' or type == 'double':
107
+ return float(data)
108
+ elif type == 'CruxType:Node' or type == 'GraphNode':
109
+ return Vertex.parseVertexFromJson(data)
110
+ elif type == 'CruxType:Relation' or type == 'GraphRelation':
111
+ return Edge.parseEdgeFromJson(self._graph_schema, data)
112
+ elif type == 'CruxType:Path':
113
+ return Path.parsePathFromJson(self._graph_schema, data)
114
+ elif type.startswith('CruxType:List'):
115
+ return self._parseList(type, data)
116
+ elif type.startswith('CruxType:Map'):
117
+ return self._parseMap(type, data)
118
+ else:
119
+ return data
120
+
121
+ def _parseList(self, type, data):
122
+ """Parse 'CruxType:List' type"""
123
+ parsed_data = json.loads(data)
124
+ newType = type[len('CruxType:List') + 1:type.find('>')]
125
+
126
+ return [self._convertData(newType, json.dumps(entry)) for entry in parsed_data]
127
+
128
+ def _parseMap(self, type, data):
129
+ """Parse 'CruxType:Map' type"""
130
+ parsed_data = json.loads(data)
131
+ newTypes = type[len('CruxType:Map') + 1:-2].split(',')
132
+
133
+ result = {}
134
+
135
+ for entry in parsed_data.keys():
136
+ key = self._convertData(newTypes[0], entry)
137
+ result[key] = self._convertData(newTypes[1], parsed_data[entry])
138
+
139
+ return result
140
+
141
+ def toRDD(self, sc, parallelism=1):
142
+ """
143
+ Transform to RDD
144
+ param sc: SparkContext
145
+ param parallelism: RDD parallelism
146
+ """
147
+ return transformToRDD(self, sc, parallelism)
148
+
149
+ class Connection(object):
150
+ """
151
+ Connection class for connecting to the Stellar database.
152
+ """
153
+
154
+ def __init__(
155
+ self,
156
+ host: str,
157
+ port: int,
158
+ auth: Union[str, None] = None,
159
+ username: Union[str, None] = None,
160
+ password: Union[str, None] = None,
161
+ kerberos_service_name: Union[str, None] = None,):
162
+ """Connect to HiveServer2
163
+
164
+ :param host: What host HiveServer2 runs on
165
+ :param port: What port HiveServer2 runs on. Defaults to 10000.
166
+ :param auth: The value of hive.server2.authentication used by HiveServer2. Defaults to ``NONE``.
167
+ :param username: Use with auth='LDAP' only
168
+ :param password: Use with auth='LDAP' only
169
+ :param kerberos_service_name: Use with auth='KERBEROS' only
170
+
171
+ The way to support LDAP and GSSAPI is originated from cloudera/Impyla:
172
+ https://github.com/cloudera/impyla/blob/255b07ed973d47a3395214ed92d35ec0615ebf62
173
+ /impala/_thrift_api.py#L152-L160
174
+ """
175
+ username = username or getpass.getuser()
176
+
177
+ if (password is not None) != (auth in ('LDAP', 'CUSTOM')):
178
+ raise ValueError(
179
+ "Password should be set if and only if in LDAP or CUSTOM mode; "
180
+ "Remove password or use one of those modes")
181
+ if (kerberos_service_name is not None) != (auth == 'KERBEROS'):
182
+ raise ValueError(
183
+ "kerberos_service_name should be set if and only if in KERBEROS mode"
184
+ )
185
+ if port is None:
186
+ port = 10000
187
+ if auth is None:
188
+ auth = 'NONE'
189
+ socket = thrift.transport.TSocket.TSocket(host, port)
190
+ if auth == 'NOSASL':
191
+ # NOSASL corresponds to hive.server2.authentication=NOSASL in hive-site.xml
192
+ self._transport = thrift.transport.TTransport.TBufferedTransport(socket)
193
+ elif auth in ('LDAP', 'KERBEROS', 'NONE', 'CUSTOM'):
194
+ import thrift_sasl
195
+
196
+ if auth == 'KERBEROS':
197
+ # KERBEROS mode in hive.server2.authentication is GSSAPI in SASL
198
+ sasl_auth = 'GSSAPI'
199
+ else:
200
+ sasl_auth = 'PLAIN'
201
+ if password is None:
202
+ # Password doesn't matter in NONE mode, just needs to be nonempty.
203
+ password = 'x'
204
+
205
+ def sasl_factory():
206
+ if sasl_auth == 'GSSAPI':
207
+ sasl_client = PureSASLClient(host, mechanism="GSSAPI", service=kerberos_service_name)
208
+ elif sasl_auth == 'PLAIN':
209
+ sasl_client = PureSASLClient(host, mechanism="PLAIN", username=username, password=password)
210
+ else:
211
+ raise AssertionError("Unsupported SASL mechanism")
212
+ return sasl_client
213
+
214
+ self._transport = thrift_sasl.TSaslClientTransport(sasl_factory, sasl_auth, socket)
215
+ else:
216
+ # All HS2 config options:
217
+ # https://cwiki.apache.org/confluence/display/Hive/Setting+Up+HiveServer2#SettingUpHiveServer2-Configuration
218
+ # PAM currently left to end user via thrift_transport option.
219
+ raise NotImplementedError(
220
+ "Only NONE, NOSASL, LDAP, KERBEROS, CUSTOM "
221
+ "authentication are supported, got {}".format(auth))
222
+ protocol = thrift.protocol.TBinaryProtocol.TBinaryProtocol(self._transport)
223
+ self._client = TCLIService.Client(protocol)
224
+ # oldest version that still contains features we care about
225
+ # "V6 uses binary type for binary payload (was string) and uses columnar result set"
226
+ protocol_version = ttypes.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6
227
+ try:
228
+ self._transport.open()
229
+ open_session_req = ttypes.TOpenSessionReq(
230
+ client_protocol=protocol_version,
231
+ configuration={},
232
+ username=username,
233
+ )
234
+ response = self._client.OpenSession(open_session_req)
235
+ hive._check_status(response)
236
+ assert response.sessionHandle is not None, "Expected a session from OpenSession"
237
+ self._sessionHandle = response.sessionHandle
238
+ assert response.serverProtocolVersion == protocol_version, \
239
+ "Unable to handle protocol version {}".format(response.serverProtocolVersion)
240
+
241
+ except:
242
+ self._transport.close()
243
+ raise
244
+
245
+ def __enter__(self):
246
+ """Transport should already be opened by __init__"""
247
+ return self
248
+
249
+ def __exit__(self, exc_type, exc_value, traceback):
250
+ """Call close"""
251
+ self.close()
252
+
253
+ @property
254
+ def client(self):
255
+ return self._client
256
+
257
+ @property
258
+ def sessionHandle(self):
259
+ return self._sessionHandle
260
+
261
+ def close(self):
262
+ req = ttypes.TCloseSessionReq(sessionHandle=self._sessionHandle)
263
+ response = self._client.CloseSession(req)
264
+ self._transport.close()
265
+ hive._check_status(response)
266
+
267
+ def execute(self, operation: str, graph_schema: Union[GraphSchema, None] = None) -> CypherResult:
268
+ """
269
+ Execute a query on the database.
270
+
271
+ :param operation: The query to execute
272
+ """
273
+ cursor = CypherResult(self, graph_schema)
274
+ cursor.execute(operation)
275
+ return cursor
276
+
@@ -0,0 +1,110 @@
1
+ """
2
+ Define database class for operations on a single graph.
3
+ """
4
+
5
+ from __future__ import absolute_import
6
+ from types import TracebackType
7
+ import logging
8
+ import json
9
+ from typing import Any, Union
10
+
11
+ from ..graph_types import GraphSchema
12
+ from .connection import Connection, CypherResult
13
+
14
+ _logger = logging.getLogger(__name__)
15
+
16
+ class Graph(object):
17
+ """
18
+ Database class for operations on a single graph.
19
+ """
20
+
21
+ def __init__(self, graph_name: str, connection: Connection):
22
+ self.graph_name = graph_name
23
+ self.connection = connection
24
+ self.graph_schema: GraphSchema = None
25
+ self._init_connection()
26
+
27
+ def __enter__(self):
28
+ """
29
+ Enter the database context.
30
+ """
31
+ return self
32
+
33
+ def __exit__(self, exc_type, exc_value, traceback):
34
+ """
35
+ Exit the database context.
36
+ """
37
+ if self.connection:
38
+ self.connection.close()
39
+ self.connection = None
40
+
41
+ def _init_connection(self):
42
+ """
43
+ Initialize the connection to the database.
44
+ """
45
+
46
+ self.connection.execute('config query.lang cypher')
47
+
48
+ """Try to bind cursor with graph name."""
49
+ try:
50
+ self.connection.execute(f'use graph {self.graph_name}')
51
+ self._get_graph_schema()
52
+ except Exception as e:
53
+ _logger.debug(f"graph {self.graph_name} not found")
54
+
55
+ def _get_graph_schema(self):
56
+ """
57
+ Get the graph schema.
58
+ """
59
+ query_result = self.connection.execute('DESCRIBE GRAPH {} RAW'.format(self.graph_name))
60
+ schemaInJson = query_result.fetchone()[0]
61
+ query_result.close()
62
+ self.graph_schema = GraphSchema.parseSchemaFromJson(schemaInJson)
63
+
64
+ # get schema from data
65
+ try:
66
+ query_result = self.connection.execute('manipulate graph {} get_schema_from_data'.format(self.graph_name))
67
+ self.graph_labels = query_result.fetchone()[0]
68
+ query_result.close()
69
+ self.graph_labels = json.loads(self.graph_labels)
70
+ except:
71
+ pass
72
+
73
+ def execute(self, query: str, *args, **kwargs) -> CypherResult:
74
+ """
75
+ Execute a query on the database.
76
+ """
77
+ query_result = self.connection.execute(query, self.graph_schema)
78
+
79
+ if (query.strip().lower().startswith('create graph')):
80
+ self._get_graph_schema()
81
+
82
+ return query_result
83
+
84
+ def create(self, cypher: str):
85
+ """
86
+ Create a graph.
87
+ """
88
+ create_result = self.connection.execute(cypher)
89
+ create_result.close()
90
+ self._get_graph_schema()
91
+
92
+ def drop(self):
93
+ """
94
+ Drop the graph.
95
+ """
96
+ drop_result = self.connection.execute('DROP GRAPH {}'.format(self.graph_name))
97
+ drop_result.close()
98
+ self.graph_schema = None
99
+
100
+ def get_graph_schema(self) -> Union[GraphSchema, None]:
101
+ """
102
+ Get the graph schema.
103
+ """
104
+ return self.graph_schema
105
+
106
+ def get_labels(self) -> Any:
107
+ """
108
+ Get the labels of the graph.
109
+ """
110
+ return self.graph_labels
@@ -1,5 +1,6 @@
1
1
  #!/usr/bin/env python
2
2
 
3
+ import platform
3
4
  from setuptools import setup, find_packages
4
5
  from setuptools.command.test import test as TestCommand
5
6
  import versioneer
@@ -31,7 +32,7 @@ setup(
31
32
  author="Zhiping Wang",
32
33
  author_email="zhiping.wang@transwarp.io",
33
34
  license="Apache License, Version 2.0",
34
- python_requires='>=2.7',
35
+ python_requires='>=3.6',
35
36
  packages=find_packages(),
36
37
  classifiers=[
37
38
  "Intended Audience :: Developers",
@@ -42,17 +43,12 @@ setup(
42
43
  install_requires=[
43
44
  'future',
44
45
  'python-dateutil',
45
- 'pyhive',
46
- 'sasl',
47
- 'thrift',
48
- 'thrift-sasl>=0.3.0'
46
+ 'pyhive[hive_pure_sasl]',
47
+ 'thrift>=0.10.0',
49
48
  ],
50
49
  extras_require={
51
- 'presto': ['requests>=1.0.0'],
52
- 'hive': ['sasl>=0.2.1', 'thrift>=0.10.0'],
53
- 'sqlalchemy': ['sqlalchemy>=1.3.0'],
54
- 'kerberos': ['requests_kerberos>=0.12.0'],
55
- 'pyspark': ['pyspark>=2.4.0']
50
+ 'pyspark': ['pyspark>=2.4.0'],
51
+ 'kerberos': ['kerberos>=1.3.0'],
56
52
  },
57
53
  tests_require=[
58
54
  'mock>=1.0.0',
@@ -60,9 +56,6 @@ setup(
60
56
  'pytest-cov',
61
57
  'requests>=1.0.0',
62
58
  'requests_kerberos>=0.12.0',
63
- 'sasl>=0.2.1',
64
- 'sqlalchemy>=1.3.0',
65
- 'thrift>=0.10.0',
66
59
  ],
67
60
  package_data={
68
61
  '': ['*.rst'],
@@ -1,22 +0,0 @@
1
- future
2
- python-dateutil
3
- pyhive
4
- sasl
5
- thrift
6
- thrift-sasl>=0.3.0
7
-
8
- [hive]
9
- sasl>=0.2.1
10
- thrift>=0.10.0
11
-
12
- [kerberos]
13
- requests_kerberos>=0.12.0
14
-
15
- [presto]
16
- requests>=1.0.0
17
-
18
- [pyspark]
19
- pyspark>=2.4.0
20
-
21
- [sqlalchemy]
22
- sqlalchemy>=1.3.0
@@ -1,4 +0,0 @@
1
-
2
- from ._version import get_versions
3
- __version__ = get_versions()['version']
4
- del get_versions
File without changes
File without changes
File without changes
File without changes