clickhouse-driver 0.2.5__cp311-cp311-musllinux_1_1_aarch64.whl → 0.2.9__cp311-cp311-musllinux_1_1_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. clickhouse_driver/__init__.py +1 -1
  2. clickhouse_driver/block.py +3 -2
  3. clickhouse_driver/bufferedreader.cpython-311-aarch64-linux-musl.so +0 -0
  4. clickhouse_driver/bufferedwriter.cpython-311-aarch64-linux-musl.so +0 -0
  5. clickhouse_driver/client.py +119 -99
  6. clickhouse_driver/clientinfo.py +2 -2
  7. clickhouse_driver/columns/arraycolumn.py +15 -6
  8. clickhouse_driver/columns/base.py +71 -7
  9. clickhouse_driver/columns/datecolumn.py +52 -13
  10. clickhouse_driver/columns/datetimecolumn.py +3 -2
  11. clickhouse_driver/columns/enumcolumn.py +27 -17
  12. clickhouse_driver/columns/jsoncolumn.py +37 -0
  13. clickhouse_driver/columns/largeint.cpython-311-aarch64-linux-musl.so +0 -0
  14. clickhouse_driver/columns/lowcardinalitycolumn.py +23 -4
  15. clickhouse_driver/columns/mapcolumn.py +9 -2
  16. clickhouse_driver/columns/nestedcolumn.py +2 -13
  17. clickhouse_driver/columns/numpy/datetimecolumn.py +21 -18
  18. clickhouse_driver/columns/numpy/lowcardinalitycolumn.py +2 -2
  19. clickhouse_driver/columns/service.py +12 -2
  20. clickhouse_driver/columns/tuplecolumn.py +31 -5
  21. clickhouse_driver/columns/util.py +2 -1
  22. clickhouse_driver/columns/uuidcolumn.py +1 -1
  23. clickhouse_driver/connection.py +117 -19
  24. clickhouse_driver/defines.py +12 -1
  25. clickhouse_driver/log.py +7 -3
  26. clickhouse_driver/numpy/helpers.py +5 -2
  27. clickhouse_driver/progress.py +15 -3
  28. clickhouse_driver/protocol.py +19 -3
  29. clickhouse_driver/settings/writer.py +7 -2
  30. clickhouse_driver/streams/native.py +24 -6
  31. clickhouse_driver/util/compat.py +12 -0
  32. clickhouse_driver/util/escape.py +36 -8
  33. clickhouse_driver/util/helpers.py +114 -0
  34. clickhouse_driver/varint.cpython-311-aarch64-linux-musl.so +0 -0
  35. {clickhouse_driver-0.2.5.dist-info → clickhouse_driver-0.2.9.dist-info}/METADATA +8 -8
  36. {clickhouse_driver-0.2.5.dist-info → clickhouse_driver-0.2.9.dist-info}/RECORD +71 -70
  37. {clickhouse_driver-0.2.5.dist-info → clickhouse_driver-0.2.9.dist-info}/WHEEL +1 -1
  38. {clickhouse_driver-0.2.5.dist-info → clickhouse_driver-0.2.9.dist-info}/LICENSE +0 -0
  39. {clickhouse_driver-0.2.5.dist-info → clickhouse_driver-0.2.9.dist-info}/top_level.txt +0 -0
@@ -3,6 +3,7 @@ import socket
3
3
  import ssl
4
4
  from collections import deque
5
5
  from contextlib import contextmanager
6
+ from sys import platform
6
7
  from time import time
7
8
  from urllib.parse import urlparse
8
9
 
@@ -19,11 +20,12 @@ from .log import log_block
19
20
  from .progress import Progress
20
21
  from .protocol import Compression, ClientPacketTypes, ServerPacketTypes
21
22
  from .queryprocessingstage import QueryProcessingStage
22
- from .reader import read_binary_str
23
+ from .reader import read_binary_str, read_binary_uint64
23
24
  from .readhelpers import read_exception
24
- from .settings.writer import write_settings
25
+ from .settings.writer import write_settings, SettingsFlags
25
26
  from .streams.native import BlockInputStream, BlockOutputStream
26
27
  from .util.compat import threading
28
+ from .util.escape import escape_params
27
29
  from .varint import write_varint, read_varint
28
30
  from .writer import write_binary_str
29
31
 
@@ -44,17 +46,22 @@ class Packet(object):
44
46
 
45
47
  class ServerInfo(object):
46
48
  def __init__(self, name, version_major, version_minor, version_patch,
47
- revision, timezone, display_name):
49
+ revision, timezone, display_name, used_revision):
48
50
  self.name = name
49
51
  self.version_major = version_major
50
52
  self.version_minor = version_minor
51
53
  self.version_patch = version_patch
52
54
  self.revision = revision
53
55
  self.timezone = timezone
56
+ self.session_timezone = None
54
57
  self.display_name = display_name
58
+ self.used_revision = used_revision
55
59
 
56
60
  super(ServerInfo, self).__init__()
57
61
 
62
+ def get_timezone(self):
63
+ return self.session_timezone or self.timezone
64
+
58
65
  def version_tuple(self):
59
66
  return self.version_major, self.version_minor, self.version_patch
60
67
 
@@ -66,6 +73,7 @@ class ServerInfo(object):
66
73
  ('name', self.name),
67
74
  ('version', version),
68
75
  ('revision', self.revision),
76
+ ('used revision', self.used_revision),
69
77
  ('timezone', self.timezone),
70
78
  ('display_name', self.display_name)
71
79
  ]
@@ -124,6 +132,15 @@ class Connection(object):
124
132
  ignored, ``True`` means that the query will
125
133
  fail with UNKNOWN_SETTING error.
126
134
  Defaults to ``False``.
135
+ :param tcp_keepalive: enables `TCP keepalive <https://tldp.org/HOWTO/
136
+ TCP-Keepalive-HOWTO/overview.html>`_ on established
137
+ connection. If is set to ``True``` system keepalive
138
+ settings are used. You can also specify custom
139
+ keepalive setting with tuple:
140
+ ``(idle_time_sec, interval_sec, probes)``.
141
+ Defaults to ``False``.
142
+ :param client_revision: can be used for client version downgrading.
143
+ Defaults to ``None``.
127
144
  """
128
145
 
129
146
  def __init__(
@@ -143,6 +160,8 @@ class Connection(object):
143
160
  server_hostname=None,
144
161
  alt_hosts=None,
145
162
  settings_is_important=False,
163
+ tcp_keepalive=False,
164
+ client_revision=None
146
165
  ):
147
166
  if secure:
148
167
  default_port = defines.DEFAULT_SECURE_PORT
@@ -164,6 +183,10 @@ class Connection(object):
164
183
  self.send_receive_timeout = send_receive_timeout
165
184
  self.sync_request_timeout = sync_request_timeout
166
185
  self.settings_is_important = settings_is_important
186
+ self.tcp_keepalive = tcp_keepalive
187
+ self.client_revision = min(
188
+ client_revision or defines.CLIENT_REVISION, defines.CLIENT_REVISION
189
+ )
167
190
 
168
191
  self.secure_socket = secure
169
192
  self.verify_cert = verify
@@ -282,19 +305,19 @@ class Connection(object):
282
305
  def _create_ssl_context(self, ssl_options):
283
306
  purpose = ssl.Purpose.SERVER_AUTH
284
307
 
285
- version = ssl_options.get('ssl_version', ssl.PROTOCOL_TLS)
308
+ version = ssl_options.get('ssl_version', ssl.PROTOCOL_TLS_CLIENT)
286
309
  context = ssl.SSLContext(version)
310
+ context.check_hostname = self.verify_cert
287
311
 
288
312
  if 'ca_certs' in ssl_options:
289
313
  context.load_verify_locations(ssl_options['ca_certs'])
290
314
  elif ssl_options.get('cert_reqs') != ssl.CERT_NONE:
291
- context.load_default_certs(purpose
292
- )
315
+ context.load_default_certs(purpose)
293
316
  if 'ciphers' in ssl_options:
294
317
  context.set_ciphers(ssl_options['ciphers'])
295
318
 
296
319
  if 'cert_reqs' in ssl_options:
297
- context.options = ssl_options['cert_reqs']
320
+ context.verify_mode = ssl_options['cert_reqs']
298
321
 
299
322
  if 'certfile' in ssl_options:
300
323
  keyfile = ssl_options.get('keyfile')
@@ -310,6 +333,8 @@ class Connection(object):
310
333
 
311
334
  # performance tweak
312
335
  self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
336
+ if self.tcp_keepalive:
337
+ self._set_keepalive()
313
338
 
314
339
  self.fin = BufferedSocketReader(self.socket, defines.BUFFER_SIZE)
315
340
  self.fout = BufferedSocketWriter(self.socket, defines.BUFFER_SIZE)
@@ -317,10 +342,42 @@ class Connection(object):
317
342
  self.send_hello()
318
343
  self.receive_hello()
319
344
 
345
+ revision = self.server_info.used_revision
346
+ if revision >= defines.DBMS_MIN_PROTOCOL_VERSION_WITH_ADDENDUM:
347
+ self.send_addendum()
348
+
320
349
  self.block_in = self.get_block_in_stream()
321
350
  self.block_in_raw = BlockInputStream(self.fin, self.context)
322
351
  self.block_out = self.get_block_out_stream()
323
352
 
353
+ def _set_keepalive(self):
354
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
355
+
356
+ if not isinstance(self.tcp_keepalive, tuple):
357
+ return
358
+
359
+ idle_time_sec, interval_sec, probes = self.tcp_keepalive
360
+
361
+ if platform == 'linux' or platform == 'win32':
362
+ # This should also work for Windows
363
+ # starting with Windows 10, version 1709.
364
+ self.socket.setsockopt(
365
+ socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, idle_time_sec
366
+ )
367
+ self.socket.setsockopt(
368
+ socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, interval_sec
369
+ )
370
+ self.socket.setsockopt(
371
+ socket.IPPROTO_TCP, socket.TCP_KEEPCNT, probes
372
+ )
373
+
374
+ elif platform == 'darwin':
375
+ TCP_KEEPALIVE = 0x10
376
+ # Only interval is available in mac os.
377
+ self.socket.setsockopt(
378
+ socket.IPPROTO_TCP, TCP_KEEPALIVE, interval_sec
379
+ )
380
+
324
381
  def _format_connection_error(self, e, host, port):
325
382
  err = (e.strerror + ' ') if e.strerror else ''
326
383
  return err + '({}:{})'.format(host, port)
@@ -410,7 +467,7 @@ class Connection(object):
410
467
  write_varint(defines.CLIENT_VERSION_MINOR, self.fout)
411
468
  # NOTE For backward compatibility of the protocol,
412
469
  # client cannot send its version_patch.
413
- write_varint(defines.CLIENT_REVISION, self.fout)
470
+ write_varint(self.client_revision, self.fout)
414
471
  write_binary_str(self.database, self.fout)
415
472
  write_binary_str(self.user, self.fout)
416
473
  write_binary_str(self.password, self.fout)
@@ -426,25 +483,38 @@ class Connection(object):
426
483
  server_version_minor = read_varint(self.fin)
427
484
  server_revision = read_varint(self.fin)
428
485
 
486
+ used_revision = min(self.client_revision, server_revision)
487
+
429
488
  server_timezone = None
430
- if server_revision >= \
489
+ if used_revision >= \
431
490
  defines.DBMS_MIN_REVISION_WITH_SERVER_TIMEZONE:
432
491
  server_timezone = read_binary_str(self.fin)
433
492
 
434
493
  server_display_name = ''
435
- if server_revision >= \
494
+ if used_revision >= \
436
495
  defines.DBMS_MIN_REVISION_WITH_SERVER_DISPLAY_NAME:
437
496
  server_display_name = read_binary_str(self.fin)
438
497
 
439
498
  server_version_patch = server_revision
440
- if server_revision >= \
499
+ if used_revision >= \
441
500
  defines.DBMS_MIN_REVISION_WITH_VERSION_PATCH:
442
501
  server_version_patch = read_varint(self.fin)
443
502
 
503
+ if used_revision >= defines. \
504
+ DBMS_MIN_PROTOCOL_VERSION_WITH_PASSWORD_COMPLEXITY_RULES:
505
+ rules_size = read_varint(self.fin)
506
+ for _i in range(rules_size):
507
+ read_binary_str(self.fin) # original_pattern
508
+ read_binary_str(self.fin) # exception_message
509
+
510
+ if used_revision >= defines. \
511
+ DBMS_MIN_REVISION_WITH_INTERSERVER_SECRET_V2:
512
+ read_binary_uint64(self.fin) # read_nonce
513
+
444
514
  self.server_info = ServerInfo(
445
515
  server_name, server_version_major, server_version_minor,
446
516
  server_version_patch, server_revision,
447
- server_timezone, server_display_name
517
+ server_timezone, server_display_name, used_revision
448
518
  )
449
519
  self.context.server_info = self.server_info
450
520
 
@@ -463,6 +533,14 @@ class Connection(object):
463
533
  self.disconnect()
464
534
  raise errors.UnexpectedPacketFromServerError(message)
465
535
 
536
+ def send_addendum(self):
537
+ revision = self.server_info.used_revision
538
+
539
+ if revision >= defines.DBMS_MIN_PROTOCOL_VERSION_WITH_QUOTA_KEY:
540
+ write_binary_str(
541
+ self.context.client_settings['quota_key'], self.fout
542
+ )
543
+
466
544
  def ping(self):
467
545
  timeout = self.sync_request_timeout
468
546
 
@@ -538,6 +616,12 @@ class Connection(object):
538
616
  elif packet_type == ServerPacketTypes.PROFILE_EVENTS:
539
617
  packet.block = self.receive_data(may_be_compressed=False)
540
618
 
619
+ elif packet_type == ServerPacketTypes.TIMEZONE_UPDATE:
620
+ timezone = read_binary_str(self.fin)
621
+ if timezone:
622
+ logger.info('Server timezone changed to %s', timezone)
623
+ self.server_info.session_timezone = timezone
624
+
541
625
  else:
542
626
  message = 'Unknown packet {} from server {}'.format(
543
627
  packet_type, self.get_description()
@@ -567,7 +651,7 @@ class Connection(object):
567
651
  return BlockOutputStream(self.fout, self.context)
568
652
 
569
653
  def receive_data(self, may_be_compressed=True, may_be_use_numpy=False):
570
- revision = self.server_info.revision
654
+ revision = self.server_info.used_revision
571
655
 
572
656
  if revision >= defines.DBMS_MIN_REVISION_WITH_TEMPORARY_TABLES:
573
657
  read_binary_str(self.fin)
@@ -581,7 +665,7 @@ class Connection(object):
581
665
 
582
666
  def receive_progress(self):
583
667
  progress = Progress()
584
- progress.read(self.server_info.revision, self.fin)
668
+ progress.read(self.server_info, self.fin)
585
669
  return progress
586
670
 
587
671
  def receive_profile_info(self):
@@ -597,14 +681,14 @@ class Connection(object):
597
681
  start = time()
598
682
  write_varint(ClientPacketTypes.DATA, self.fout)
599
683
 
600
- revision = self.server_info.revision
684
+ revision = self.server_info.used_revision
601
685
  if revision >= defines.DBMS_MIN_REVISION_WITH_TEMPORARY_TABLES:
602
686
  write_binary_str(table_name, self.fout)
603
687
 
604
688
  self.block_out.write(block)
605
689
  logger.debug('Block "%s" send time: %f', table_name, time() - start)
606
690
 
607
- def send_query(self, query, query_id=None):
691
+ def send_query(self, query, query_id=None, params=None):
608
692
  if not self.connected:
609
693
  self.connect()
610
694
 
@@ -612,9 +696,10 @@ class Connection(object):
612
696
 
613
697
  write_binary_str(query_id or '', self.fout)
614
698
 
615
- revision = self.server_info.revision
699
+ revision = self.server_info.used_revision
616
700
  if revision >= defines.DBMS_MIN_REVISION_WITH_CLIENT_INFO:
617
- client_info = ClientInfo(self.client_name, self.context)
701
+ client_info = ClientInfo(self.client_name, self.context,
702
+ client_revision=self.client_revision)
618
703
  client_info.query_kind = ClientInfo.QueryKind.INITIAL_QUERY
619
704
 
620
705
  client_info.write(revision, self.fout)
@@ -623,8 +708,11 @@ class Connection(object):
623
708
  revision >= defines
624
709
  .DBMS_MIN_REVISION_WITH_SETTINGS_SERIALIZED_AS_STRINGS
625
710
  )
711
+ settings_flags = 0
712
+ if self.settings_is_important:
713
+ settings_flags |= SettingsFlags.IMPORTANT
626
714
  write_settings(self.context.settings, self.fout, settings_as_strings,
627
- self.settings_is_important)
715
+ settings_flags)
628
716
 
629
717
  if revision >= defines.DBMS_MIN_REVISION_WITH_INTERSERVER_SECRET:
630
718
  write_binary_str('', self.fout)
@@ -634,6 +722,16 @@ class Connection(object):
634
722
 
635
723
  write_binary_str(query, self.fout)
636
724
 
725
+ if revision >= defines.DBMS_MIN_PROTOCOL_VERSION_WITH_PARAMETERS:
726
+ if self.context.client_settings['server_side_params']:
727
+ # Always settings_as_strings = True
728
+ escaped = escape_params(
729
+ params or {}, self.context, for_server=True
730
+ )
731
+ else:
732
+ escaped = {}
733
+ write_settings(escaped, self.fout, True, SettingsFlags.CUSTOM)
734
+
637
735
  logger.debug('Query: %s', query)
638
736
 
639
737
  self.fout.flush()
@@ -25,6 +25,17 @@ DBMS_MIN_PROTOCOL_VERSION_WITH_DISTRIBUTED_DEPTH = 54448
25
25
  DBMS_MIN_PROTOCOL_VERSION_WITH_INITIAL_QUERY_START_TIME = 54449
26
26
  DBMS_MIN_PROTOCOL_VERSION_WITH_INCREMENTAL_PROFILE_EVENTS = 54451
27
27
  DBMS_MIN_REVISION_WITH_PARALLEL_REPLICAS = 54453
28
+ DBMS_MIN_REVISION_WITH_CUSTOM_SERIALIZATION = 54454
29
+ DBMS_MIN_PROTOCOL_VERSION_WITH_PROFILE_EVENTS_IN_INSERT = 54456
30
+ DBMS_MIN_PROTOCOL_VERSION_WITH_ADDENDUM = 54458
31
+ DBMS_MIN_PROTOCOL_VERSION_WITH_QUOTA_KEY = 54458
32
+ DBMS_MIN_PROTOCOL_VERSION_WITH_PARAMETERS = 54459
33
+ DBMS_MIN_PROTOCOL_VERSION_WITH_SERVER_QUERY_TIME_IN_PROGRESS = 54460
34
+ DBMS_MIN_PROTOCOL_VERSION_WITH_PASSWORD_COMPLEXITY_RULES = 54461
35
+ DBMS_MIN_REVISION_WITH_INTERSERVER_SECRET_V2 = 54462
36
+ DBMS_MIN_PROTOCOL_VERSION_WITH_TOTAL_BYTES_IN_PROGRESS = 54463
37
+ DBMS_MIN_PROTOCOL_VERSION_WITH_TIMEZONE_UPDATES = 54464
38
+ DBMS_MIN_REVISION_WITH_SYSTEM_KEYWORDS_TABLE = 54468
28
39
 
29
40
  # Timeouts
30
41
  DBMS_DEFAULT_CONNECT_TIMEOUT_SEC = 10
@@ -40,7 +51,7 @@ CLIENT_NAME = 'python-driver'
40
51
  CLIENT_VERSION_MAJOR = 20
41
52
  CLIENT_VERSION_MINOR = 10
42
53
  CLIENT_VERSION_PATCH = 2
43
- CLIENT_REVISION = 54453
54
+ CLIENT_REVISION = DBMS_MIN_REVISION_WITH_SYSTEM_KEYWORDS_TABLE
44
55
 
45
56
  BUFFER_SIZE = 1048576
46
57
 
clickhouse_driver/log.py CHANGED
@@ -2,7 +2,8 @@ import logging
2
2
 
3
3
  logger = logging.getLogger(__name__)
4
4
 
5
-
5
+ # Keep in sync with ClickHouse priorities
6
+ # https://github.com/ClickHouse/ClickHouse/blob/master/src/Interpreters/InternalTextLogsQueue.cpp
6
7
  log_priorities = (
7
8
  'Unknown',
8
9
  'Fatal',
@@ -12,9 +13,12 @@ log_priorities = (
12
13
  'Notice',
13
14
  'Information',
14
15
  'Debug',
15
- 'Trace'
16
+ 'Trace',
17
+ 'Test',
16
18
  )
17
19
 
20
+ num_priorities = len(log_priorities)
21
+
18
22
 
19
23
  def log_block(block):
20
24
  if block is None:
@@ -25,7 +29,7 @@ def log_block(block):
25
29
  for row in block.get_rows():
26
30
  row = dict(zip(column_names, row))
27
31
 
28
- if 1 <= row['priority'] <= 8:
32
+ if 1 <= row['priority'] <= num_priorities:
29
33
  priority = log_priorities[row['priority']]
30
34
  else:
31
35
  priority = row[0]
@@ -1,13 +1,16 @@
1
1
  import numpy as np
2
2
  import pandas as pd
3
+ from pandas.core.arrays import ExtensionArray
3
4
 
4
5
 
5
6
  def column_chunks(columns, n):
6
7
  for column in columns:
7
- if not isinstance(column, (np.ndarray, pd.DatetimeIndex)):
8
+ if not isinstance(
9
+ column, (np.ndarray, pd.DatetimeIndex, ExtensionArray)
10
+ ):
8
11
  raise TypeError(
9
12
  'Unsupported column type: {}. '
10
- 'ndarray/DatetimeIndex is expected.'
13
+ 'ndarray/DatetimeIndex/ExtensionArray is expected.'
11
14
  .format(type(column))
12
15
  )
13
16
 
@@ -6,27 +6,39 @@ class Progress(object):
6
6
  def __init__(self):
7
7
  self.rows = 0
8
8
  self.bytes = 0
9
- self.total_rows = 0
9
+ self.total_rows = 0 # total_rows_to_read
10
+ self.total_bytes = 0 # total_bytes_to_read
10
11
  self.written_rows = 0
11
12
  self.written_bytes = 0
13
+ self.elapsed_ns = 0
12
14
 
13
15
  super(Progress, self).__init__()
14
16
 
15
- def read(self, server_revision, fin):
17
+ def read(self, server_info, fin):
16
18
  self.rows = read_varint(fin)
17
19
  self.bytes = read_varint(fin)
18
20
 
19
- revision = server_revision
21
+ revision = server_info.used_revision
20
22
  if revision >= defines.DBMS_MIN_REVISION_WITH_TOTAL_ROWS_IN_PROGRESS:
21
23
  self.total_rows = read_varint(fin)
22
24
 
25
+ if revision >= defines. \
26
+ DBMS_MIN_PROTOCOL_VERSION_WITH_TOTAL_BYTES_IN_PROGRESS:
27
+ self.total_bytes = read_varint(fin)
28
+
23
29
  if revision >= defines.DBMS_MIN_REVISION_WITH_CLIENT_WRITE_INFO:
24
30
  self.written_rows = read_varint(fin)
25
31
  self.written_bytes = read_varint(fin)
26
32
 
33
+ if revision >= defines. \
34
+ DBMS_MIN_PROTOCOL_VERSION_WITH_SERVER_QUERY_TIME_IN_PROGRESS:
35
+ self.elapsed_ns = read_varint(fin)
36
+
27
37
  def increment(self, another_progress):
28
38
  self.rows += another_progress.rows
29
39
  self.bytes += another_progress.bytes
30
40
  self.total_rows += another_progress.total_rows
41
+ self.total_bytes += another_progress.total_bytes
31
42
  self.written_rows += another_progress.written_rows
32
43
  self.written_bytes += another_progress.written_bytes
44
+ self.elapsed_ns += another_progress.elapsed_ns
@@ -29,7 +29,10 @@ class ClientPacketTypes(object):
29
29
 
30
30
  @classmethod
31
31
  def to_str(cls, packet):
32
- return 'Unknown packet' if packet > 5 else cls._types_str[packet]
32
+ try:
33
+ return cls._types_str[packet]
34
+ except IndexError:
35
+ return 'Unknown packet'
33
36
 
34
37
 
35
38
  class ServerPacketTypes(object):
@@ -81,15 +84,28 @@ class ServerPacketTypes(object):
81
84
  # Packet with profile events from server.
82
85
  PROFILE_EVENTS = 14
83
86
 
87
+ MERGE_TREE_ALL_RANGES_ANNOUNCEMENT = 15
88
+
89
+ # Request from a MergeTree replica to a coordinator
90
+ MERGE_TREE_READ_TASK_REQUEST = 16
91
+
92
+ # Receive server's (session-wide) default timezone
93
+ TIMEZONE_UPDATE = 17
94
+
84
95
  _types_str = [
85
96
  'Hello', 'Data', 'Exception', 'Progress', 'Pong', 'EndOfStream',
86
97
  'ProfileInfo', 'Totals', 'Extremes', 'TablesStatusResponse', 'Log',
87
- 'TableColumns', 'PartUUIDs', 'ReadTaskRequest', 'ProfileEvents'
98
+ 'TableColumns', 'PartUUIDs', 'ReadTaskRequest', 'ProfileEvents',
99
+ 'MergeTreeAllRangesAnnouncement', 'MergeTreeReadTaskRequest',
100
+ 'TimezoneUpdate'
88
101
  ]
89
102
 
90
103
  @classmethod
91
104
  def to_str(cls, packet):
92
- return 'Unknown packet' if packet > 14 else cls._types_str[packet]
105
+ try:
106
+ return cls._types_str[packet]
107
+ except IndexError:
108
+ return 'Unknown packet'
93
109
 
94
110
  @classmethod
95
111
  def strings_in_message(cls, packet):
@@ -7,13 +7,18 @@ from .available import settings as available_settings
7
7
  logger = logging.getLogger(__name__)
8
8
 
9
9
 
10
- def write_settings(settings, buf, settings_as_strings, is_important=False):
10
+ class SettingsFlags:
11
+ IMPORTANT = 0x1
12
+ CUSTOM = 0x2
13
+
14
+
15
+ def write_settings(settings, buf, settings_as_strings, flags):
11
16
  for setting, value in (settings or {}).items():
12
17
  # If the server support settings as string we do not need to know
13
18
  # anything about them, so we can write any setting.
14
19
  if settings_as_strings:
15
20
  write_binary_str(setting, buf)
16
- write_binary_uint8(int(is_important), buf)
21
+ write_binary_uint8(flags, buf)
17
22
  write_binary_str(str(value), buf)
18
23
 
19
24
  else:
@@ -1,10 +1,14 @@
1
+ import logging
2
+
1
3
  from ..block import ColumnOrientedBlock, BlockInfo
2
4
  from ..columns.service import read_column, write_column
3
- from ..reader import read_binary_str
5
+ from ..reader import read_binary_str, read_binary_uint8
4
6
  from ..varint import write_varint, read_varint
5
- from ..writer import write_binary_str
7
+ from ..writer import write_binary_str, write_binary_uint8
6
8
  from .. import defines
7
9
 
10
+ logger = logging.getLogger(__name__)
11
+
8
12
 
9
13
  class BlockOutputStream(object):
10
14
  def __init__(self, fout, context):
@@ -14,7 +18,7 @@ class BlockOutputStream(object):
14
18
  super(BlockOutputStream, self).__init__()
15
19
 
16
20
  def write(self, block):
17
- revision = self.context.server_info.revision
21
+ revision = self.context.server_info.used_revision
18
22
  if revision >= defines.DBMS_MIN_REVISION_WITH_BLOCK_INFO:
19
23
  block.info.write(self.fout)
20
24
 
@@ -35,6 +39,12 @@ class BlockOutputStream(object):
35
39
  except IndexError:
36
40
  raise ValueError('Different rows length')
37
41
 
42
+ if revision >= \
43
+ defines.DBMS_MIN_REVISION_WITH_CUSTOM_SERIALIZATION:
44
+ # We write always sparse data without custom serialization.
45
+ write_binary_uint8(0, self.fout)
46
+
47
+ logger.debug('Writing column %s', col_name)
38
48
  write_column(self.context, col_name, col_type, items,
39
49
  self.fout, types_check=block.types_check)
40
50
 
@@ -54,7 +64,7 @@ class BlockInputStream(object):
54
64
  def read(self, use_numpy=None):
55
65
  info = BlockInfo()
56
66
 
57
- revision = self.context.server_info.revision
67
+ revision = self.context.server_info.used_revision
58
68
  if revision >= defines.DBMS_MIN_REVISION_WITH_BLOCK_INFO:
59
69
  info.read(self.fin)
60
70
 
@@ -70,9 +80,17 @@ class BlockInputStream(object):
70
80
  names.append(column_name)
71
81
  types.append(column_type)
72
82
 
83
+ has_custom_serialization = False
84
+ if revision >= defines.DBMS_MIN_REVISION_WITH_CUSTOM_SERIALIZATION:
85
+ has_custom_serialization = bool(read_binary_uint8(self.fin))
86
+
73
87
  if n_rows:
74
- column = read_column(self.context, column_type, n_rows,
75
- self.fin, use_numpy=use_numpy)
88
+ logger.debug('Reading column %s', column_name)
89
+ column = read_column(
90
+ self.context, column_type, n_rows,
91
+ self.fin, use_numpy=use_numpy,
92
+ has_custom_serialization=has_custom_serialization
93
+ )
76
94
  data.append(column)
77
95
 
78
96
  if self.context.client_settings['use_numpy']:
@@ -5,6 +5,18 @@ try:
5
5
  except ImportError:
6
6
  import dummy_threading as threading # noqa: F401
7
7
 
8
+ import json # noqa: F401
9
+ try:
10
+ import orjson as json # noqa: F811
11
+ except ImportError:
12
+ pass
13
+
14
+ try:
15
+ import ujson as json # noqa: F811,F401
16
+ except ImportError:
17
+ pass
18
+
19
+
8
20
  try:
9
21
  # since tzlocal 4.0+
10
22
  # this will avoid warning for get_localzone().key
@@ -1,5 +1,6 @@
1
- from datetime import date, datetime
1
+ from datetime import date, datetime, time
2
2
  from enum import Enum
3
+ from functools import wraps
3
4
  from uuid import UUID
4
5
 
5
6
  from pytz import timezone
@@ -20,7 +21,7 @@ escape_chars_map = {
20
21
 
21
22
 
22
23
  def escape_datetime(item, context):
23
- server_tz = timezone(context.server_info.timezone)
24
+ server_tz = timezone(context.server_info.get_timezone())
24
25
 
25
26
  if item.tzinfo is not None:
26
27
  item = item.astimezone(server_tz)
@@ -28,7 +29,24 @@ def escape_datetime(item, context):
28
29
  return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S')
29
30
 
30
31
 
31
- def escape_param(item, context):
32
+ def maybe_enquote_for_server(f):
33
+ @wraps(f)
34
+ def wrapper(*args, **kwargs):
35
+ rv = f(*args, **kwargs)
36
+
37
+ if kwargs.get('for_server'):
38
+ is_str = isinstance(rv, str)
39
+
40
+ if not is_str or (is_str and not rv.startswith("'")):
41
+ rv = "'%s'" % rv
42
+
43
+ return rv
44
+
45
+ return wrapper
46
+
47
+
48
+ @maybe_enquote_for_server
49
+ def escape_param(item, context, for_server=False):
32
50
  if item is None:
33
51
  return 'NULL'
34
52
 
@@ -38,17 +56,27 @@ def escape_param(item, context):
38
56
  elif isinstance(item, date):
39
57
  return "'%s'" % item.strftime('%Y-%m-%d')
40
58
 
59
+ elif isinstance(item, time):
60
+ return "'%s'" % item.strftime('%H:%M:%S')
61
+
41
62
  elif isinstance(item, str):
63
+ # We need double escaping for server-side parameters.
64
+ if for_server:
65
+ item = ''.join(escape_chars_map.get(c, c) for c in item)
42
66
  return "'%s'" % ''.join(escape_chars_map.get(c, c) for c in item)
43
67
 
44
68
  elif isinstance(item, list):
45
- return "[%s]" % ', '.join(str(escape_param(x, context)) for x in item)
69
+ return "[%s]" % ', '.join(
70
+ str(escape_param(x, context, for_server=for_server)) for x in item
71
+ )
46
72
 
47
73
  elif isinstance(item, tuple):
48
- return "(%s)" % ', '.join(str(escape_param(x, context)) for x in item)
74
+ return "(%s)" % ', '.join(
75
+ str(escape_param(x, context, for_server=for_server)) for x in item
76
+ )
49
77
 
50
78
  elif isinstance(item, Enum):
51
- return escape_param(item.value, context)
79
+ return escape_param(item.value, context, for_server=for_server)
52
80
 
53
81
  elif isinstance(item, UUID):
54
82
  return "'%s'" % str(item)
@@ -57,10 +85,10 @@ def escape_param(item, context):
57
85
  return item
58
86
 
59
87
 
60
- def escape_params(params, context):
88
+ def escape_params(params, context, for_server=False):
61
89
  escaped = {}
62
90
 
63
91
  for key, value in params.items():
64
- escaped[key] = escape_param(value, context)
92
+ escaped[key] = escape_param(value, context, for_server=for_server)
65
93
 
66
94
  return escaped