clickhouse-driver 0.2.10__cp311-cp311-musllinux_1_2_s390x.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. clickhouse_driver/__init__.py +9 -0
  2. clickhouse_driver/block.py +227 -0
  3. clickhouse_driver/blockstreamprofileinfo.py +22 -0
  4. clickhouse_driver/bufferedreader.cpython-311-s390x-linux-musl.so +0 -0
  5. clickhouse_driver/bufferedwriter.cpython-311-s390x-linux-musl.so +0 -0
  6. clickhouse_driver/client.py +812 -0
  7. clickhouse_driver/clientinfo.py +119 -0
  8. clickhouse_driver/columns/__init__.py +0 -0
  9. clickhouse_driver/columns/arraycolumn.py +161 -0
  10. clickhouse_driver/columns/base.py +221 -0
  11. clickhouse_driver/columns/boolcolumn.py +7 -0
  12. clickhouse_driver/columns/datecolumn.py +108 -0
  13. clickhouse_driver/columns/datetimecolumn.py +203 -0
  14. clickhouse_driver/columns/decimalcolumn.py +116 -0
  15. clickhouse_driver/columns/enumcolumn.py +129 -0
  16. clickhouse_driver/columns/exceptions.py +12 -0
  17. clickhouse_driver/columns/floatcolumn.py +34 -0
  18. clickhouse_driver/columns/intcolumn.py +157 -0
  19. clickhouse_driver/columns/intervalcolumn.py +33 -0
  20. clickhouse_driver/columns/ipcolumn.py +118 -0
  21. clickhouse_driver/columns/jsoncolumn.py +37 -0
  22. clickhouse_driver/columns/largeint.cpython-311-s390x-linux-musl.so +0 -0
  23. clickhouse_driver/columns/lowcardinalitycolumn.py +142 -0
  24. clickhouse_driver/columns/mapcolumn.py +73 -0
  25. clickhouse_driver/columns/nestedcolumn.py +10 -0
  26. clickhouse_driver/columns/nothingcolumn.py +13 -0
  27. clickhouse_driver/columns/nullablecolumn.py +7 -0
  28. clickhouse_driver/columns/nullcolumn.py +15 -0
  29. clickhouse_driver/columns/numpy/__init__.py +0 -0
  30. clickhouse_driver/columns/numpy/base.py +47 -0
  31. clickhouse_driver/columns/numpy/boolcolumn.py +8 -0
  32. clickhouse_driver/columns/numpy/datecolumn.py +19 -0
  33. clickhouse_driver/columns/numpy/datetimecolumn.py +146 -0
  34. clickhouse_driver/columns/numpy/floatcolumn.py +24 -0
  35. clickhouse_driver/columns/numpy/intcolumn.py +43 -0
  36. clickhouse_driver/columns/numpy/lowcardinalitycolumn.py +96 -0
  37. clickhouse_driver/columns/numpy/service.py +58 -0
  38. clickhouse_driver/columns/numpy/stringcolumn.py +78 -0
  39. clickhouse_driver/columns/numpy/tuplecolumn.py +37 -0
  40. clickhouse_driver/columns/service.py +185 -0
  41. clickhouse_driver/columns/simpleaggregatefunctioncolumn.py +7 -0
  42. clickhouse_driver/columns/stringcolumn.py +73 -0
  43. clickhouse_driver/columns/tuplecolumn.py +63 -0
  44. clickhouse_driver/columns/util.py +61 -0
  45. clickhouse_driver/columns/uuidcolumn.py +64 -0
  46. clickhouse_driver/compression/__init__.py +32 -0
  47. clickhouse_driver/compression/base.py +87 -0
  48. clickhouse_driver/compression/lz4.py +21 -0
  49. clickhouse_driver/compression/lz4hc.py +9 -0
  50. clickhouse_driver/compression/zstd.py +20 -0
  51. clickhouse_driver/connection.py +825 -0
  52. clickhouse_driver/context.py +36 -0
  53. clickhouse_driver/dbapi/__init__.py +62 -0
  54. clickhouse_driver/dbapi/connection.py +99 -0
  55. clickhouse_driver/dbapi/cursor.py +370 -0
  56. clickhouse_driver/dbapi/errors.py +40 -0
  57. clickhouse_driver/dbapi/extras.py +73 -0
  58. clickhouse_driver/defines.py +58 -0
  59. clickhouse_driver/errors.py +453 -0
  60. clickhouse_driver/log.py +48 -0
  61. clickhouse_driver/numpy/__init__.py +0 -0
  62. clickhouse_driver/numpy/block.py +8 -0
  63. clickhouse_driver/numpy/helpers.py +28 -0
  64. clickhouse_driver/numpy/result.py +123 -0
  65. clickhouse_driver/opentelemetry.py +43 -0
  66. clickhouse_driver/progress.py +44 -0
  67. clickhouse_driver/protocol.py +130 -0
  68. clickhouse_driver/queryprocessingstage.py +8 -0
  69. clickhouse_driver/reader.py +69 -0
  70. clickhouse_driver/readhelpers.py +26 -0
  71. clickhouse_driver/result.py +144 -0
  72. clickhouse_driver/settings/__init__.py +0 -0
  73. clickhouse_driver/settings/available.py +405 -0
  74. clickhouse_driver/settings/types.py +50 -0
  75. clickhouse_driver/settings/writer.py +34 -0
  76. clickhouse_driver/streams/__init__.py +0 -0
  77. clickhouse_driver/streams/compressed.py +88 -0
  78. clickhouse_driver/streams/native.py +108 -0
  79. clickhouse_driver/util/__init__.py +0 -0
  80. clickhouse_driver/util/compat.py +39 -0
  81. clickhouse_driver/util/escape.py +94 -0
  82. clickhouse_driver/util/helpers.py +173 -0
  83. clickhouse_driver/varint.cpython-311-s390x-linux-musl.so +0 -0
  84. clickhouse_driver/writer.py +67 -0
  85. clickhouse_driver-0.2.10.dist-info/METADATA +215 -0
  86. clickhouse_driver-0.2.10.dist-info/RECORD +89 -0
  87. clickhouse_driver-0.2.10.dist-info/WHEEL +5 -0
  88. clickhouse_driver-0.2.10.dist-info/licenses/LICENSE +21 -0
  89. clickhouse_driver-0.2.10.dist-info/top_level.txt +1 -0
@@ -0,0 +1,123 @@
1
+ from itertools import chain
2
+
3
+ import numpy as np
4
+ import pandas as pd
5
+ from pandas.api.types import union_categoricals
6
+
7
+ from ..progress import Progress
8
+ from ..result import QueryResult
9
+
10
+
11
+ class NumpyQueryResult(QueryResult):
12
+ """
13
+ Stores query result from multiple blocks as numpy arrays.
14
+ """
15
+
16
+ def store(self, packet):
17
+ block = getattr(packet, 'block', None)
18
+ if block is None:
19
+ return
20
+
21
+ # Header block contains no rows. Pick columns from it.
22
+ if block.num_rows:
23
+ if self.columnar:
24
+ self.data.append(block.get_columns())
25
+ else:
26
+ self.data.extend(block.get_rows())
27
+
28
+ elif not self.columns_with_types:
29
+ self.columns_with_types = block.columns_with_types
30
+
31
+ def get_result(self):
32
+ """
33
+ :return: stored query result.
34
+ """
35
+
36
+ for packet in self.packet_generator:
37
+ self.store(packet)
38
+
39
+ if self.columnar:
40
+ data = []
41
+ # Transpose to a list of columns, each column is list of chunks
42
+ for column_chunks in zip(*self.data):
43
+ # Concatenate chunks for each column
44
+ if isinstance(column_chunks[0], np.ndarray):
45
+ column = np.concatenate(column_chunks)
46
+ elif isinstance(column_chunks[0], pd.Categorical):
47
+ column = union_categoricals(column_chunks)
48
+ else:
49
+ column = tuple(chain.from_iterable(column_chunks))
50
+ data.append(column)
51
+ else:
52
+ data = self.data
53
+
54
+ if self.with_column_types:
55
+ return data, self.columns_with_types
56
+ else:
57
+ return data
58
+
59
+
60
+ class NumpyProgressQueryResult(NumpyQueryResult):
61
+ """
62
+ Stores query result and progress information from multiple blocks.
63
+ Provides iteration over query progress.
64
+ """
65
+
66
+ def __init__(self, *args, **kwargs):
67
+ self.progress_totals = Progress()
68
+
69
+ super(NumpyProgressQueryResult, self).__init__(*args, **kwargs)
70
+
71
+ def __iter__(self):
72
+ return self
73
+
74
+ def __next__(self):
75
+ while True:
76
+ packet = next(self.packet_generator)
77
+ progress_packet = getattr(packet, 'progress', None)
78
+ if progress_packet:
79
+ self.progress_totals.increment(progress_packet)
80
+ return (
81
+ self.progress_totals.rows, self.progress_totals.total_rows
82
+ )
83
+ else:
84
+ self.store(packet)
85
+
86
+ def get_result(self):
87
+ # Read all progress packets.
88
+ for _ in self:
89
+ pass
90
+
91
+ return super(NumpyProgressQueryResult, self).get_result()
92
+
93
+
94
+ class NumpyIterQueryResult(object):
95
+ """
96
+ Provides iteration over returned data by chunks (streaming by chunks).
97
+ """
98
+
99
+ def __init__(
100
+ self, packet_generator,
101
+ with_column_types=False):
102
+ self.packet_generator = packet_generator
103
+ self.with_column_types = with_column_types
104
+
105
+ self.first_block = True
106
+ super(NumpyIterQueryResult, self).__init__()
107
+
108
+ def __iter__(self):
109
+ return self
110
+
111
+ def __next__(self):
112
+ packet = next(self.packet_generator)
113
+ block = getattr(packet, 'block', None)
114
+ if block is None:
115
+ return []
116
+
117
+ if self.first_block and self.with_column_types:
118
+ self.first_block = False
119
+ rv = [block.columns_with_types]
120
+ rv.extend(block.get_rows())
121
+ return rv
122
+ else:
123
+ return block.get_rows()
@@ -0,0 +1,43 @@
1
+
2
+ class OpenTelemetryTraceContext(object):
3
+ traceparent_tpl = 'xx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx-xx'
4
+ translation = str.maketrans('1234567890abcdef', 'xxxxxxxxxxxxxxxx')
5
+
6
+ def __init__(self, traceparent, tracestate):
7
+ # xx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx-xxxxxxxxxxxxxxxx-xx
8
+ # ^ ^ ^ ^
9
+ # version trace_id span_id flags
10
+
11
+ self.trace_id = None # UUID
12
+ self.span_id = None # UInt64
13
+ self.tracestate = tracestate # String
14
+ self.trace_flags = None # UInt8
15
+
16
+ if traceparent is not None:
17
+ self.parse_traceparent(traceparent)
18
+
19
+ super(OpenTelemetryTraceContext, self).__init__()
20
+
21
+ def parse_traceparent(self, traceparent):
22
+ traceparent = traceparent.lower()
23
+
24
+ if len(traceparent) != len(self.traceparent_tpl):
25
+ raise ValueError('unexpected length {}, expected {}'.format(
26
+ len(traceparent), len(self.traceparent_tpl)
27
+ ))
28
+
29
+ if traceparent.translate(self.translation) != self.traceparent_tpl:
30
+ raise ValueError(
31
+ 'Malformed traceparant header: {}'.format(traceparent)
32
+ )
33
+
34
+ parts = traceparent.split('-')
35
+ version = int(parts[0], 16)
36
+ if version != 0:
37
+ raise ValueError(
38
+ 'unexpected version {}, expected 00'.format(parts[0])
39
+ )
40
+
41
+ self.trace_id = (int(parts[1][16:], 16) << 64) + int(parts[1][:16], 16)
42
+ self.span_id = int(parts[2], 16)
43
+ self.trace_flags = int(parts[3], 16)
@@ -0,0 +1,44 @@
1
+ from . import defines
2
+ from .varint import read_varint
3
+
4
+
5
+ class Progress(object):
6
+ def __init__(self):
7
+ self.rows = 0
8
+ self.bytes = 0
9
+ self.total_rows = 0 # total_rows_to_read
10
+ self.total_bytes = 0 # total_bytes_to_read
11
+ self.written_rows = 0
12
+ self.written_bytes = 0
13
+ self.elapsed_ns = 0
14
+
15
+ super(Progress, self).__init__()
16
+
17
+ def read(self, server_info, fin):
18
+ self.rows = read_varint(fin)
19
+ self.bytes = read_varint(fin)
20
+
21
+ revision = server_info.used_revision
22
+ if revision >= defines.DBMS_MIN_REVISION_WITH_TOTAL_ROWS_IN_PROGRESS:
23
+ self.total_rows = read_varint(fin)
24
+
25
+ if revision >= defines. \
26
+ DBMS_MIN_PROTOCOL_VERSION_WITH_TOTAL_BYTES_IN_PROGRESS:
27
+ self.total_bytes = read_varint(fin)
28
+
29
+ if revision >= defines.DBMS_MIN_REVISION_WITH_CLIENT_WRITE_INFO:
30
+ self.written_rows = read_varint(fin)
31
+ self.written_bytes = read_varint(fin)
32
+
33
+ if revision >= defines. \
34
+ DBMS_MIN_PROTOCOL_VERSION_WITH_SERVER_QUERY_TIME_IN_PROGRESS:
35
+ self.elapsed_ns = read_varint(fin)
36
+
37
+ def increment(self, another_progress):
38
+ self.rows += another_progress.rows
39
+ self.bytes += another_progress.bytes
40
+ self.total_rows += another_progress.total_rows
41
+ self.total_bytes += another_progress.total_bytes
42
+ self.written_rows += another_progress.written_rows
43
+ self.written_bytes += another_progress.written_bytes
44
+ self.elapsed_ns += another_progress.elapsed_ns
@@ -0,0 +1,130 @@
1
+
2
+ class ClientPacketTypes(object):
3
+ """
4
+ Packet types that client transmits
5
+ """
6
+ # Name, version, revision, default DB
7
+ HELLO = 0
8
+
9
+ # Query id, query settings, stage up to which the query must be executed,
10
+ # whether the compression must be used, query text
11
+ # (without data for INSERTs).
12
+ QUERY = 1
13
+
14
+ # A block of data (compressed or not).
15
+ DATA = 2
16
+
17
+ # Cancel the query execution.
18
+ CANCEL = 3
19
+
20
+ # Check that connection to the server is alive.
21
+ PING = 4
22
+
23
+ # Check status of tables on the server.
24
+ TABLES_STATUS_REQUEST = 5
25
+
26
+ _types_str = [
27
+ 'Hello', 'Query', 'Data', 'Cancel', 'Ping', 'TablesStatusRequest'
28
+ ]
29
+
30
+ @classmethod
31
+ def to_str(cls, packet):
32
+ try:
33
+ return cls._types_str[packet]
34
+ except IndexError:
35
+ return 'Unknown packet'
36
+
37
+
38
+ class ServerPacketTypes(object):
39
+ """
40
+ Packet types that server transmits.
41
+ """
42
+ # Name, version, revision.
43
+ HELLO = 0
44
+
45
+ # A block of data (compressed or not).
46
+ DATA = 1
47
+
48
+ # The exception during query execution.
49
+ EXCEPTION = 2
50
+
51
+ # Query execution progress: rows read, bytes read.
52
+ PROGRESS = 3
53
+
54
+ # Ping response
55
+ PONG = 4
56
+
57
+ # All packets were transmitted
58
+ END_OF_STREAM = 5
59
+
60
+ # Packet with profiling info.
61
+ PROFILE_INFO = 6
62
+
63
+ # A block with totals (compressed or not).
64
+ TOTALS = 7
65
+
66
+ # A block with minimums and maximums (compressed or not).
67
+ EXTREMES = 8
68
+
69
+ # A response to TablesStatus request.
70
+ TABLES_STATUS_RESPONSE = 9
71
+
72
+ # System logs of the query execution
73
+ LOG = 10
74
+
75
+ # Columns' description for default values calculation
76
+ TABLE_COLUMNS = 11
77
+
78
+ # List of unique parts ids.
79
+ PART_UUIDS = 12
80
+
81
+ # String (UUID) describes a request for which next task is needed
82
+ READ_TASK_REQUEST = 13
83
+
84
+ # Packet with profile events from server.
85
+ PROFILE_EVENTS = 14
86
+
87
+ MERGE_TREE_ALL_RANGES_ANNOUNCEMENT = 15
88
+
89
+ # Request from a MergeTree replica to a coordinator
90
+ MERGE_TREE_READ_TASK_REQUEST = 16
91
+
92
+ # Receive server's (session-wide) default timezone
93
+ TIMEZONE_UPDATE = 17
94
+
95
+ _types_str = [
96
+ 'Hello', 'Data', 'Exception', 'Progress', 'Pong', 'EndOfStream',
97
+ 'ProfileInfo', 'Totals', 'Extremes', 'TablesStatusResponse', 'Log',
98
+ 'TableColumns', 'PartUUIDs', 'ReadTaskRequest', 'ProfileEvents',
99
+ 'MergeTreeAllRangesAnnouncement', 'MergeTreeReadTaskRequest',
100
+ 'TimezoneUpdate'
101
+ ]
102
+
103
+ @classmethod
104
+ def to_str(cls, packet):
105
+ try:
106
+ return cls._types_str[packet]
107
+ except IndexError:
108
+ return 'Unknown packet'
109
+
110
+ @classmethod
111
+ def strings_in_message(cls, packet):
112
+ if packet == cls.TABLE_COLUMNS:
113
+ return 2
114
+ return 0
115
+
116
+
117
+ class Compression(object):
118
+ DISABLED = 0
119
+ ENABLED = 1
120
+
121
+
122
+ class CompressionMethod(object):
123
+ LZ4 = 1
124
+ LZ4HC = 2
125
+ ZSTD = 3
126
+
127
+
128
+ class CompressionMethodByte(object):
129
+ LZ4 = 0x82
130
+ ZSTD = 0x90
@@ -0,0 +1,8 @@
1
+
2
+ class QueryProcessingStage(object):
3
+ """
4
+ Determines till which state SELECT query should be executed.
5
+ """
6
+ FETCH_COLUMNS = 0
7
+ WITH_MERGEABLE_STATE = 1
8
+ COMPLETE = 2
@@ -0,0 +1,69 @@
1
+ from struct import Struct
2
+
3
+ from .varint import read_varint
4
+
5
+
6
+ def read_binary_str(buf):
7
+ length = read_varint(buf)
8
+ return read_binary_str_fixed_len(buf, length)
9
+
10
+
11
+ def read_binary_bytes(buf):
12
+ length = read_varint(buf)
13
+ return read_binary_bytes_fixed_len(buf, length)
14
+
15
+
16
+ def read_binary_str_fixed_len(buf, length):
17
+ return read_binary_bytes_fixed_len(buf, length).decode('utf-8')
18
+
19
+
20
+ def read_binary_bytes_fixed_len(buf, length):
21
+ return buf.read(length)
22
+
23
+
24
+ def read_binary_int(buf, fmt):
25
+ """
26
+ Reads int from buffer with provided format.
27
+ """
28
+ # Little endian.
29
+ s = Struct('<' + fmt)
30
+ return s.unpack(buf.read(s.size))[0]
31
+
32
+
33
+ def read_binary_int8(buf):
34
+ return read_binary_int(buf, 'b')
35
+
36
+
37
+ def read_binary_int16(buf):
38
+ return read_binary_int(buf, 'h')
39
+
40
+
41
+ def read_binary_int32(buf):
42
+ return read_binary_int(buf, 'i')
43
+
44
+
45
+ def read_binary_int64(buf):
46
+ return read_binary_int(buf, 'q')
47
+
48
+
49
+ def read_binary_uint8(buf):
50
+ return read_binary_int(buf, 'B')
51
+
52
+
53
+ def read_binary_uint16(buf):
54
+ return read_binary_int(buf, 'H')
55
+
56
+
57
+ def read_binary_uint32(buf):
58
+ return read_binary_int(buf, 'I')
59
+
60
+
61
+ def read_binary_uint64(buf):
62
+ return read_binary_int(buf, 'Q')
63
+
64
+
65
+ def read_binary_uint128(buf):
66
+ hi = read_binary_int(buf, 'Q')
67
+ lo = read_binary_int(buf, 'Q')
68
+
69
+ return (hi << 64) + lo
@@ -0,0 +1,26 @@
1
+ from .errors import ServerException
2
+ from .reader import read_binary_str, read_binary_uint8, read_binary_int32
3
+
4
+
5
+ def read_exception(buf, additional_message=None):
6
+ code = read_binary_int32(buf)
7
+ name = read_binary_str(buf)
8
+ message = read_binary_str(buf)
9
+ stack_trace = read_binary_str(buf)
10
+ has_nested = bool(read_binary_uint8(buf))
11
+
12
+ new_message = ''
13
+
14
+ if additional_message:
15
+ new_message += additional_message + '. '
16
+
17
+ if name != 'DB::Exception':
18
+ new_message += name + ". "
19
+
20
+ new_message += message + ". Stack trace:\n\n" + stack_trace
21
+
22
+ nested = None
23
+ if has_nested:
24
+ nested = read_exception(buf)
25
+
26
+ return ServerException(new_message, code, nested=nested)
@@ -0,0 +1,144 @@
1
+ from .blockstreamprofileinfo import BlockStreamProfileInfo
2
+ from .progress import Progress
3
+
4
+
5
+ class QueryResult(object):
6
+ """
7
+ Stores query result from multiple blocks.
8
+ """
9
+
10
+ def __init__(
11
+ self, packet_generator,
12
+ with_column_types=False, columnar=False):
13
+ self.packet_generator = packet_generator
14
+ self.with_column_types = with_column_types
15
+
16
+ self.data = []
17
+ self.columns_with_types = []
18
+ self.columnar = columnar
19
+
20
+ super(QueryResult, self).__init__()
21
+
22
+ def store(self, packet):
23
+ block = getattr(packet, 'block', None)
24
+ if block is None:
25
+ return
26
+
27
+ # Header block contains no rows. Pick columns from it.
28
+ if block.num_rows:
29
+ if self.columnar:
30
+ columns = block.get_columns()
31
+ if self.data:
32
+ # Extend corresponding column.
33
+ for i, column in enumerate(columns):
34
+ self.data[i].extend(column)
35
+ else:
36
+ # Cast tuples to lists for further extending.
37
+ # Concatenating tuples produce new tuple. It's slow.
38
+ self.data = [list(c) for c in columns]
39
+ else:
40
+ self.data.extend(block.get_rows())
41
+
42
+ elif not self.columns_with_types:
43
+ self.columns_with_types = block.columns_with_types
44
+
45
+ def get_result(self):
46
+ """
47
+ :return: stored query result.
48
+ """
49
+
50
+ for packet in self.packet_generator:
51
+ self.store(packet)
52
+
53
+ data = self.data
54
+ if self.columnar:
55
+ data = [tuple(c) for c in self.data]
56
+
57
+ if self.with_column_types:
58
+ return data, self.columns_with_types
59
+ else:
60
+ return data
61
+
62
+
63
+ class ProgressQueryResult(QueryResult):
64
+ """
65
+ Stores query result and progress information from multiple blocks.
66
+ Provides iteration over query progress.
67
+ """
68
+
69
+ def __init__(self, *args, **kwargs):
70
+ self.progress_totals = Progress()
71
+ super(ProgressQueryResult, self).__init__(*args, **kwargs)
72
+
73
+ def __iter__(self):
74
+ return self
75
+
76
+ def __next__(self):
77
+ while True:
78
+ packet = next(self.packet_generator)
79
+ progress_packet = getattr(packet, 'progress', None)
80
+ if progress_packet:
81
+ self.progress_totals.increment(progress_packet)
82
+ return (
83
+ self.progress_totals.rows, self.progress_totals.total_rows
84
+ )
85
+ else:
86
+ self.store(packet)
87
+
88
+ def get_result(self):
89
+ # Read all progress packets.
90
+ for _ in self:
91
+ pass
92
+
93
+ return super(ProgressQueryResult, self).get_result()
94
+
95
+
96
+ class IterQueryResult(object):
97
+ """
98
+ Provides iteration over returned data by chunks (streaming by chunks).
99
+ """
100
+
101
+ def __init__(
102
+ self, packet_generator,
103
+ with_column_types=False):
104
+ self.packet_generator = packet_generator
105
+ self.with_column_types = with_column_types
106
+
107
+ self.first_block = True
108
+ super(IterQueryResult, self).__init__()
109
+
110
+ def __iter__(self):
111
+ return self
112
+
113
+ def __next__(self):
114
+ packet = next(self.packet_generator)
115
+ block = getattr(packet, 'block', None)
116
+ if block is None:
117
+ return []
118
+
119
+ if self.first_block and self.with_column_types:
120
+ self.first_block = False
121
+ rv = [block.columns_with_types]
122
+ rv.extend(block.get_rows())
123
+ return rv
124
+ else:
125
+ return block.get_rows()
126
+
127
+
128
+ class QueryInfo(object):
129
+ def __init__(self):
130
+ self.profile_info = BlockStreamProfileInfo()
131
+ self.progress = Progress()
132
+ self.elapsed = 0
133
+
134
+ def store_profile(self, profile_info):
135
+ self.profile_info = profile_info
136
+
137
+ def store_progress(self, progress):
138
+ if self.progress:
139
+ self.progress.increment(progress)
140
+ else:
141
+ self.progress = progress
142
+
143
+ def store_elapsed(self, elapsed):
144
+ self.elapsed = elapsed
File without changes