scylla-cqlsh 6.0.29__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- copyutil.cp310-win_amd64.pyd +0 -0
- cqlsh/__init__.py +1 -0
- cqlsh/__main__.py +11 -0
- cqlsh/cqlsh.py +2736 -0
- cqlshlib/__init__.py +90 -0
- cqlshlib/_version.py +34 -0
- cqlshlib/authproviderhandling.py +176 -0
- cqlshlib/copyutil.py +2762 -0
- cqlshlib/cql3handling.py +1670 -0
- cqlshlib/cqlhandling.py +333 -0
- cqlshlib/cqlshhandling.py +314 -0
- cqlshlib/displaying.py +128 -0
- cqlshlib/formatting.py +601 -0
- cqlshlib/helptopics.py +190 -0
- cqlshlib/pylexotron.py +562 -0
- cqlshlib/saferscanner.py +91 -0
- cqlshlib/sslhandling.py +109 -0
- cqlshlib/tracing.py +90 -0
- cqlshlib/util.py +183 -0
- cqlshlib/wcwidth.py +379 -0
- scylla_cqlsh-6.0.29.dist-info/METADATA +108 -0
- scylla_cqlsh-6.0.29.dist-info/RECORD +26 -0
- scylla_cqlsh-6.0.29.dist-info/WHEEL +5 -0
- scylla_cqlsh-6.0.29.dist-info/entry_points.txt +2 -0
- scylla_cqlsh-6.0.29.dist-info/licenses/LICENSE.txt +204 -0
- scylla_cqlsh-6.0.29.dist-info/top_level.txt +3 -0
cqlshlib/sslhandling.py
ADDED
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
|
|
17
|
+
import os
|
|
18
|
+
import sys
|
|
19
|
+
import ssl
|
|
20
|
+
|
|
21
|
+
import configparser
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def ssl_settings(host, config_file, env=os.environ):
|
|
25
|
+
"""
|
|
26
|
+
Function which generates SSL setting for cassandra.Cluster
|
|
27
|
+
|
|
28
|
+
Params:
|
|
29
|
+
* host .........: hostname of Cassandra node.
|
|
30
|
+
* env ..........: environment variables. SSL factory will use, if passed,
|
|
31
|
+
SSL_CERTFILE and SSL_VALIDATE variables.
|
|
32
|
+
* config_file ..: path to cqlsh config file (usually ~/.cqlshrc).
|
|
33
|
+
SSL factory will use, if set, certfile and validate
|
|
34
|
+
options in [ssl] section, as well as host to certfile
|
|
35
|
+
mapping in [certfiles] section.
|
|
36
|
+
|
|
37
|
+
[certfiles] section is optional, 'validate' setting in [ssl] section is
|
|
38
|
+
optional too. If validation is enabled then SSL certfile must be provided
|
|
39
|
+
either in the config file or as an environment variable.
|
|
40
|
+
Environment variables override any options set in cqlsh config file.
|
|
41
|
+
"""
|
|
42
|
+
configs = configparser.ConfigParser()
|
|
43
|
+
configs.read(config_file)
|
|
44
|
+
|
|
45
|
+
def get_option(section, option):
|
|
46
|
+
try:
|
|
47
|
+
return configs.get(section, option)
|
|
48
|
+
except configparser.Error:
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
def get_best_tls_protocol(ssl_ver_str):
|
|
52
|
+
if ssl_ver_str:
|
|
53
|
+
print("Warning: Explicit SSL and TLS versions in the cqlshrc file or in SSL_VERSION environment property are ignored as the protocol is auto-negotiated.\n")
|
|
54
|
+
return ssl.PROTOCOL_TLS_CLIENT
|
|
55
|
+
|
|
56
|
+
ssl_validate = env.get('SSL_VALIDATE')
|
|
57
|
+
if ssl_validate is None:
|
|
58
|
+
ssl_validate = get_option('ssl', 'validate')
|
|
59
|
+
ssl_validate = ssl_validate is None or ssl_validate.lower() != 'false'
|
|
60
|
+
|
|
61
|
+
ssl_check_hostname = env.get('SSL_CHECK_HOSTNAME')
|
|
62
|
+
if ssl_check_hostname is None:
|
|
63
|
+
ssl_check_hostname = get_option('ssl', 'check_hostname')
|
|
64
|
+
ssl_check_hostname = ssl_check_hostname is not None and ssl_check_hostname.lower() != 'false'
|
|
65
|
+
|
|
66
|
+
if ssl_check_hostname and not ssl_validate:
|
|
67
|
+
sys.exit("SSL certificate hostname checking "
|
|
68
|
+
"(`check_hostname` in the [ssl] section) must be turned off "
|
|
69
|
+
"if certificate `validate` is turned off.")
|
|
70
|
+
|
|
71
|
+
ssl_version_str = env.get('SSL_VERSION')
|
|
72
|
+
if ssl_version_str is None:
|
|
73
|
+
ssl_version_str = get_option('ssl', 'version')
|
|
74
|
+
|
|
75
|
+
ssl_version = get_best_tls_protocol(ssl_version_str)
|
|
76
|
+
|
|
77
|
+
ssl_certfile = env.get('SSL_CERTFILE')
|
|
78
|
+
if ssl_certfile is None:
|
|
79
|
+
ssl_certfile = get_option('certfiles', host)
|
|
80
|
+
if ssl_certfile is None:
|
|
81
|
+
ssl_certfile = get_option('ssl', 'certfile')
|
|
82
|
+
if ssl_validate and ssl_certfile is None:
|
|
83
|
+
sys.exit("Validation is enabled; SSL transport factory requires a valid certfile "
|
|
84
|
+
"to be specified. Please provide path to the certfile in [ssl] section "
|
|
85
|
+
"as 'certfile' option in %s (or use [certfiles] section) or set SSL_CERTFILE "
|
|
86
|
+
"environment variable." % (config_file,))
|
|
87
|
+
if ssl_certfile is not None:
|
|
88
|
+
ssl_certfile = os.path.expanduser(ssl_certfile)
|
|
89
|
+
|
|
90
|
+
userkey = get_option('ssl', 'userkey')
|
|
91
|
+
if userkey:
|
|
92
|
+
userkey = os.path.expanduser(userkey)
|
|
93
|
+
usercert = get_option('ssl', 'usercert')
|
|
94
|
+
if usercert:
|
|
95
|
+
usercert = os.path.expanduser(usercert)
|
|
96
|
+
|
|
97
|
+
ssl_context = ssl.SSLContext(ssl_version)
|
|
98
|
+
ssl_context.check_hostname = ssl_check_hostname
|
|
99
|
+
if usercert and userkey:
|
|
100
|
+
ssl_context.load_cert_chain(certfile=usercert,
|
|
101
|
+
keyfile=userkey)
|
|
102
|
+
if (usercert and not userkey) or (userkey and not usercert):
|
|
103
|
+
print("Warning: userkey and usercert from [ssl] section, should be both configured, otherwise won't be used")
|
|
104
|
+
|
|
105
|
+
ssl_context.verify_mode = ssl.CERT_REQUIRED if ssl_validate else ssl.CERT_NONE
|
|
106
|
+
if ssl_certfile:
|
|
107
|
+
ssl_context.load_verify_locations(cafile=ssl_certfile)
|
|
108
|
+
|
|
109
|
+
return ssl_context
|
cqlshlib/tracing.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
import time
|
|
19
|
+
|
|
20
|
+
from cassandra.query import QueryTrace, TraceUnavailable
|
|
21
|
+
from cqlshlib.displaying import MAGENTA
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def print_trace_session(shell, session, session_id, partial_session=False):
|
|
25
|
+
"""
|
|
26
|
+
Lookup a trace by session and trace session ID, then print it.
|
|
27
|
+
"""
|
|
28
|
+
trace = QueryTrace(session_id, session)
|
|
29
|
+
try:
|
|
30
|
+
wait_for_complete = not partial_session
|
|
31
|
+
trace.populate(wait_for_complete=wait_for_complete)
|
|
32
|
+
except TraceUnavailable:
|
|
33
|
+
shell.printerr("Session %s wasn't found." % session_id)
|
|
34
|
+
else:
|
|
35
|
+
print_trace(shell, trace)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def print_trace(shell, trace):
|
|
39
|
+
"""
|
|
40
|
+
Print an already populated cassandra.query.QueryTrace instance.
|
|
41
|
+
"""
|
|
42
|
+
rows = make_trace_rows(trace)
|
|
43
|
+
if not rows:
|
|
44
|
+
shell.printerr("No rows for session %s found." % (trace.trace_id,))
|
|
45
|
+
return
|
|
46
|
+
names = ['activity', 'timestamp', 'source', 'source_elapsed', 'client']
|
|
47
|
+
|
|
48
|
+
formatted_names = list(map(shell.myformat_colname, names))
|
|
49
|
+
formatted_values = [list(map(shell.myformat_value, row)) for row in rows]
|
|
50
|
+
|
|
51
|
+
shell.writeresult('')
|
|
52
|
+
shell.writeresult('Tracing session: ', color=MAGENTA, newline=False)
|
|
53
|
+
shell.writeresult(trace.trace_id)
|
|
54
|
+
shell.writeresult('')
|
|
55
|
+
shell.print_formatted_result(formatted_names, formatted_values, with_header=True, tty=shell.tty)
|
|
56
|
+
shell.writeresult('')
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def make_trace_rows(trace):
|
|
60
|
+
if not trace.events:
|
|
61
|
+
return []
|
|
62
|
+
|
|
63
|
+
rows = [[trace.request_type, str(datetime_from_utc_to_local(trace.started_at)), trace.coordinator, 0, trace.client]]
|
|
64
|
+
|
|
65
|
+
# append main rows (from events table).
|
|
66
|
+
for event in trace.events:
|
|
67
|
+
rows.append(["%s [%s]" % (event.description, event.thread_name),
|
|
68
|
+
str(datetime_from_utc_to_local(event.datetime)),
|
|
69
|
+
event.source,
|
|
70
|
+
total_micro_seconds(event.source_elapsed),
|
|
71
|
+
trace.client])
|
|
72
|
+
# append footer row (from sessions table).
|
|
73
|
+
if trace.duration:
|
|
74
|
+
finished_at = (datetime_from_utc_to_local(trace.started_at) + trace.duration)
|
|
75
|
+
rows.append(['Request complete', str(finished_at), trace.coordinator, total_micro_seconds(trace.duration), trace.client])
|
|
76
|
+
|
|
77
|
+
return rows
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def total_micro_seconds(td):
|
|
81
|
+
"""
|
|
82
|
+
Convert a timedelta into total microseconds
|
|
83
|
+
"""
|
|
84
|
+
return int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6)) if td else "--"
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def datetime_from_utc_to_local(utc_datetime):
|
|
88
|
+
now_timestamp = time.time()
|
|
89
|
+
offset = datetime.fromtimestamp(now_timestamp) - datetime.utcfromtimestamp(now_timestamp)
|
|
90
|
+
return utc_datetime + offset
|
cqlshlib/util.py
ADDED
|
@@ -0,0 +1,183 @@
|
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
|
3
|
+
# distributed with this work for additional information
|
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
|
6
|
+
# "License"); you may not use this file except in compliance
|
|
7
|
+
# with the License. You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
import cProfile
|
|
19
|
+
import codecs
|
|
20
|
+
import pstats
|
|
21
|
+
import os
|
|
22
|
+
import errno
|
|
23
|
+
import stat
|
|
24
|
+
|
|
25
|
+
from datetime import timedelta, tzinfo
|
|
26
|
+
from io import StringIO
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
from line_profiler import LineProfiler
|
|
30
|
+
HAS_LINE_PROFILER = True
|
|
31
|
+
except ImportError:
|
|
32
|
+
HAS_LINE_PROFILER = False
|
|
33
|
+
|
|
34
|
+
ZERO = timedelta(0)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class UTC(tzinfo):
|
|
38
|
+
"""UTC"""
|
|
39
|
+
|
|
40
|
+
def utcoffset(self, dt):
|
|
41
|
+
return ZERO
|
|
42
|
+
|
|
43
|
+
def tzname(self, dt):
|
|
44
|
+
return "UTC"
|
|
45
|
+
|
|
46
|
+
def dst(self, dt):
|
|
47
|
+
return ZERO
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def split_list(items, pred):
|
|
51
|
+
"""
|
|
52
|
+
Split up a list (or other iterable) on the elements which satisfy the
|
|
53
|
+
given predicate 'pred'. Elements for which 'pred' returns true start a new
|
|
54
|
+
sublist for subsequent elements, which will accumulate in the new sublist
|
|
55
|
+
until the next satisfying element.
|
|
56
|
+
|
|
57
|
+
>>> split_list([0, 1, 2, 5, 99, 8], lambda n: (n % 2) == 0)
|
|
58
|
+
[[0], [1, 2], [5, 99, 8], []]
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
thisresult = []
|
|
62
|
+
results = [thisresult]
|
|
63
|
+
for i in items:
|
|
64
|
+
thisresult.append(i)
|
|
65
|
+
if pred(i):
|
|
66
|
+
thisresult = []
|
|
67
|
+
results.append(thisresult)
|
|
68
|
+
return results
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def find_common_prefix(strs):
|
|
72
|
+
"""
|
|
73
|
+
Given a list (iterable) of strings, return the longest common prefix.
|
|
74
|
+
|
|
75
|
+
>>> find_common_prefix(['abracadabra', 'abracadero', 'abranch'])
|
|
76
|
+
'abra'
|
|
77
|
+
>>> find_common_prefix(['abracadabra', 'abracadero', 'mt. fuji'])
|
|
78
|
+
''
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
common = []
|
|
82
|
+
for cgroup in zip(*strs):
|
|
83
|
+
if all(x == cgroup[0] for x in cgroup[1:]):
|
|
84
|
+
common.append(cgroup[0])
|
|
85
|
+
else:
|
|
86
|
+
break
|
|
87
|
+
return ''.join(common)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def list_bifilter(pred, iterable):
|
|
91
|
+
"""
|
|
92
|
+
Filter an iterable into two output lists: the first containing all
|
|
93
|
+
elements of the iterable for which 'pred' returns true, and the second
|
|
94
|
+
containing all others. Order of the elements is otherwise retained.
|
|
95
|
+
|
|
96
|
+
>>> list_bifilter(lambda x: isinstance(x, int), (4, 'bingo', 1.2, 6, True))
|
|
97
|
+
([4, 6], ['bingo', 1.2, True])
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
yes_s = []
|
|
101
|
+
no_s = []
|
|
102
|
+
for i in iterable:
|
|
103
|
+
(yes_s if pred(i) else no_s).append(i)
|
|
104
|
+
return yes_s, no_s
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def identity(x):
|
|
108
|
+
return x
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def trim_if_present(s, prefix):
|
|
112
|
+
if s.startswith(prefix):
|
|
113
|
+
return s[len(prefix):]
|
|
114
|
+
return s
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def is_file_secure(filename):
|
|
118
|
+
try:
|
|
119
|
+
st = os.stat(filename)
|
|
120
|
+
except OSError as e:
|
|
121
|
+
if e.errno != errno.ENOENT:
|
|
122
|
+
raise
|
|
123
|
+
# the file doesn't exist, the security of it is irrelevant
|
|
124
|
+
return True
|
|
125
|
+
uid = os.getuid()
|
|
126
|
+
|
|
127
|
+
# Skip enforcing the file owner and UID matching for the root user (uid == 0).
|
|
128
|
+
# This is to allow "sudo cqlsh" to work with user owned credentials file.
|
|
129
|
+
return (uid == 0 or st.st_uid == uid) and stat.S_IMODE(st.st_mode) & (stat.S_IRGRP | stat.S_IROTH) == 0
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_file_encoding_bomsize(filename):
|
|
133
|
+
"""
|
|
134
|
+
Checks the beginning of a file for a Unicode BOM. Based on this check,
|
|
135
|
+
the encoding that should be used to open the file and the number of
|
|
136
|
+
bytes that should be skipped (to skip the BOM) are returned.
|
|
137
|
+
"""
|
|
138
|
+
bom_encodings = ((codecs.BOM_UTF8, 'utf-8-sig'),
|
|
139
|
+
(codecs.BOM_UTF16_LE, 'utf-16le'),
|
|
140
|
+
(codecs.BOM_UTF16_BE, 'utf-16be'),
|
|
141
|
+
(codecs.BOM_UTF32_LE, 'utf-32be'),
|
|
142
|
+
(codecs.BOM_UTF32_BE, 'utf-32be'))
|
|
143
|
+
|
|
144
|
+
firstbytes = open(filename, 'rb').read(4)
|
|
145
|
+
for bom, encoding in bom_encodings:
|
|
146
|
+
if firstbytes.startswith(bom):
|
|
147
|
+
file_encoding, size = encoding, len(bom)
|
|
148
|
+
break
|
|
149
|
+
else:
|
|
150
|
+
file_encoding, size = "utf-8", 0
|
|
151
|
+
|
|
152
|
+
return file_encoding, size
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def profile_on(fcn_names=None):
|
|
156
|
+
if fcn_names and HAS_LINE_PROFILER:
|
|
157
|
+
pr = LineProfiler()
|
|
158
|
+
for fcn_name in fcn_names:
|
|
159
|
+
pr.add_function(fcn_name)
|
|
160
|
+
pr.enable()
|
|
161
|
+
return pr
|
|
162
|
+
|
|
163
|
+
pr = cProfile.Profile()
|
|
164
|
+
pr.enable()
|
|
165
|
+
return pr
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def profile_off(pr, file_name):
|
|
169
|
+
pr.disable()
|
|
170
|
+
s = StringIO()
|
|
171
|
+
|
|
172
|
+
if HAS_LINE_PROFILER and isinstance(pr, LineProfiler):
|
|
173
|
+
pr.print_stats(s)
|
|
174
|
+
else:
|
|
175
|
+
ps = pstats.Stats(pr, stream=s).sort_stats('cumulative')
|
|
176
|
+
ps.print_stats()
|
|
177
|
+
|
|
178
|
+
ret = s.getvalue()
|
|
179
|
+
if file_name:
|
|
180
|
+
with open(file_name, 'w') as f:
|
|
181
|
+
print("Writing to %s\n" % (f.name, ))
|
|
182
|
+
f.write(ret)
|
|
183
|
+
return ret
|