clickhouse-driver 0.2.1__cp39-cp39-win_amd64.whl → 0.2.10__cp39-cp39-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. clickhouse_driver/__init__.py +9 -9
  2. clickhouse_driver/block.py +227 -195
  3. clickhouse_driver/blockstreamprofileinfo.py +22 -22
  4. clickhouse_driver/bufferedreader.cp39-win_amd64.pyd +0 -0
  5. clickhouse_driver/bufferedwriter.cp39-win_amd64.pyd +0 -0
  6. clickhouse_driver/client.py +812 -666
  7. clickhouse_driver/clientinfo.py +119 -80
  8. clickhouse_driver/columns/arraycolumn.py +161 -150
  9. clickhouse_driver/columns/base.py +221 -147
  10. clickhouse_driver/columns/boolcolumn.py +7 -0
  11. clickhouse_driver/columns/datecolumn.py +108 -49
  12. clickhouse_driver/columns/datetimecolumn.py +203 -207
  13. clickhouse_driver/columns/decimalcolumn.py +116 -118
  14. clickhouse_driver/columns/enumcolumn.py +129 -119
  15. clickhouse_driver/columns/exceptions.py +12 -12
  16. clickhouse_driver/columns/floatcolumn.py +34 -34
  17. clickhouse_driver/columns/intcolumn.py +157 -157
  18. clickhouse_driver/columns/intervalcolumn.py +33 -33
  19. clickhouse_driver/columns/ipcolumn.py +118 -118
  20. clickhouse_driver/columns/jsoncolumn.py +37 -0
  21. clickhouse_driver/columns/largeint.cp39-win_amd64.pyd +0 -0
  22. clickhouse_driver/columns/lowcardinalitycolumn.py +142 -123
  23. clickhouse_driver/columns/mapcolumn.py +73 -58
  24. clickhouse_driver/columns/nestedcolumn.py +10 -0
  25. clickhouse_driver/columns/nothingcolumn.py +13 -13
  26. clickhouse_driver/columns/nullablecolumn.py +7 -7
  27. clickhouse_driver/columns/nullcolumn.py +15 -15
  28. clickhouse_driver/columns/numpy/base.py +47 -14
  29. clickhouse_driver/columns/numpy/boolcolumn.py +8 -0
  30. clickhouse_driver/columns/numpy/datecolumn.py +19 -12
  31. clickhouse_driver/columns/numpy/datetimecolumn.py +146 -145
  32. clickhouse_driver/columns/numpy/floatcolumn.py +24 -13
  33. clickhouse_driver/columns/numpy/intcolumn.py +43 -43
  34. clickhouse_driver/columns/numpy/lowcardinalitycolumn.py +96 -83
  35. clickhouse_driver/columns/numpy/service.py +58 -80
  36. clickhouse_driver/columns/numpy/stringcolumn.py +78 -76
  37. clickhouse_driver/columns/numpy/tuplecolumn.py +37 -0
  38. clickhouse_driver/columns/service.py +185 -131
  39. clickhouse_driver/columns/simpleaggregatefunctioncolumn.py +7 -7
  40. clickhouse_driver/columns/stringcolumn.py +73 -73
  41. clickhouse_driver/columns/tuplecolumn.py +63 -65
  42. clickhouse_driver/columns/util.py +61 -0
  43. clickhouse_driver/columns/uuidcolumn.py +64 -64
  44. clickhouse_driver/compression/__init__.py +32 -28
  45. clickhouse_driver/compression/base.py +87 -52
  46. clickhouse_driver/compression/lz4.py +21 -55
  47. clickhouse_driver/compression/lz4hc.py +9 -9
  48. clickhouse_driver/compression/zstd.py +20 -51
  49. clickhouse_driver/connection.py +825 -632
  50. clickhouse_driver/context.py +36 -36
  51. clickhouse_driver/dbapi/__init__.py +62 -62
  52. clickhouse_driver/dbapi/connection.py +99 -96
  53. clickhouse_driver/dbapi/cursor.py +370 -368
  54. clickhouse_driver/dbapi/errors.py +40 -40
  55. clickhouse_driver/dbapi/extras.py +73 -0
  56. clickhouse_driver/defines.py +58 -42
  57. clickhouse_driver/errors.py +453 -446
  58. clickhouse_driver/log.py +48 -44
  59. clickhouse_driver/numpy/block.py +8 -8
  60. clickhouse_driver/numpy/helpers.py +28 -25
  61. clickhouse_driver/numpy/result.py +123 -123
  62. clickhouse_driver/opentelemetry.py +43 -0
  63. clickhouse_driver/progress.py +44 -32
  64. clickhouse_driver/protocol.py +130 -105
  65. clickhouse_driver/queryprocessingstage.py +8 -8
  66. clickhouse_driver/reader.py +69 -69
  67. clickhouse_driver/readhelpers.py +26 -26
  68. clickhouse_driver/result.py +144 -144
  69. clickhouse_driver/settings/available.py +405 -405
  70. clickhouse_driver/settings/types.py +50 -50
  71. clickhouse_driver/settings/writer.py +34 -29
  72. clickhouse_driver/streams/compressed.py +88 -88
  73. clickhouse_driver/streams/native.py +108 -90
  74. clickhouse_driver/util/compat.py +39 -0
  75. clickhouse_driver/util/escape.py +94 -55
  76. clickhouse_driver/util/helpers.py +173 -57
  77. clickhouse_driver/varint.cp39-win_amd64.pyd +0 -0
  78. clickhouse_driver/writer.py +67 -67
  79. clickhouse_driver-0.2.10.dist-info/METADATA +215 -0
  80. clickhouse_driver-0.2.10.dist-info/RECORD +89 -0
  81. {clickhouse_driver-0.2.1.dist-info → clickhouse_driver-0.2.10.dist-info}/WHEEL +1 -1
  82. {clickhouse_driver-0.2.1.dist-info → clickhouse_driver-0.2.10.dist-info/licenses}/LICENSE +21 -21
  83. clickhouse_driver-0.2.1.dist-info/METADATA +0 -24
  84. clickhouse_driver-0.2.1.dist-info/RECORD +0 -80
  85. {clickhouse_driver-0.2.1.dist-info → clickhouse_driver-0.2.10.dist-info}/top_level.txt +0 -0
@@ -1,207 +1,203 @@
1
- from datetime import datetime
2
-
3
- from pytz import timezone as get_timezone, utc
4
- from tzlocal import get_localzone
5
-
6
- from .base import FormatColumn
7
-
8
- EPOCH = datetime(1970, 1, 1, tzinfo=utc)
9
-
10
-
11
- class DateTimeColumn(FormatColumn):
12
- ch_type = 'DateTime'
13
- py_types = (datetime, int)
14
- format = 'I'
15
-
16
- def __init__(self, timezone=None, offset_naive=True, **kwargs):
17
- self.timezone = timezone
18
- self.offset_naive = offset_naive
19
- super(DateTimeColumn, self).__init__(**kwargs)
20
-
21
- def after_read_items(self, items, nulls_map=None):
22
- tz = self.timezone
23
- fromts = datetime.fromtimestamp
24
-
25
- # A bit ugly copy-paste. But it helps save time on items
26
- # processing by avoiding lambda calls or if in loop.
27
- if self.offset_naive:
28
- if tz:
29
- if nulls_map is None:
30
- return tuple(
31
- fromts(item, tz).replace(tzinfo=None)
32
- for item in items
33
- )
34
- else:
35
- return tuple(
36
- (None if is_null else
37
- fromts(items[i], tz).replace(tzinfo=None))
38
- for i, is_null in enumerate(nulls_map)
39
- )
40
- else:
41
- if nulls_map is None:
42
- return tuple(fromts(item) for item in items)
43
- else:
44
- return tuple(
45
- (None if is_null else fromts(items[i]))
46
- for i, is_null in enumerate(nulls_map)
47
- )
48
-
49
- else:
50
- if nulls_map is None:
51
- return tuple(fromts(item, tz) for item in items)
52
- else:
53
- return tuple(
54
- (None if is_null else fromts(items[i], tz))
55
- for i, is_null in enumerate(nulls_map)
56
- )
57
-
58
- def before_write_items(self, items, nulls_map=None):
59
- timezone = self.timezone
60
- null_value = self.null_value
61
- to_timestamp = datetime.timestamp
62
-
63
- for i, item in enumerate(items):
64
- if nulls_map and nulls_map[i]:
65
- items[i] = null_value
66
- continue
67
-
68
- if isinstance(item, int):
69
- # support supplying raw integers to avoid
70
- # costly timezone conversions when using datetime
71
- continue
72
-
73
- if timezone:
74
- # Set server's timezone for offset-naive datetime.
75
- if item.tzinfo is None:
76
- item = timezone.localize(item)
77
-
78
- item = item.astimezone(utc)
79
-
80
- else:
81
- # If datetime is offset-aware use it's timezone.
82
- if item.tzinfo is not None:
83
- item = item.astimezone(utc)
84
-
85
- items[i] = int(to_timestamp(item))
86
-
87
-
88
- class DateTime64Column(DateTimeColumn):
89
- ch_type = 'DateTime64'
90
- format = 'Q'
91
-
92
- max_scale = 6
93
-
94
- def __init__(self, scale=0, **kwargs):
95
- self.scale = scale
96
- super(DateTime64Column, self).__init__(**kwargs)
97
-
98
- def after_read_items(self, items, nulls_map=None):
99
- scale = float(10 ** self.scale)
100
-
101
- tz = self.timezone
102
- fromts = datetime.fromtimestamp
103
-
104
- # A bit ugly copy-paste. But it helps save time on items
105
- # processing by avoiding lambda calls or if in loop.
106
- if self.offset_naive:
107
- if tz:
108
- if nulls_map is None:
109
- return tuple(
110
- fromts(item / scale, tz).replace(tzinfo=None)
111
- for item in items
112
- )
113
- else:
114
- return tuple(
115
- (None if is_null else
116
- fromts(items[i] / scale, tz).replace(tzinfo=None))
117
- for i, is_null in enumerate(nulls_map)
118
- )
119
- else:
120
- if nulls_map is None:
121
- return tuple(fromts(item / scale) for item in items)
122
- else:
123
- return tuple(
124
- (None if is_null else fromts(items[i] / scale))
125
- for i, is_null in enumerate(nulls_map)
126
- )
127
-
128
- else:
129
- if nulls_map is None:
130
- return tuple(fromts(item / scale, tz) for item in items)
131
- else:
132
- return tuple(
133
- (None if is_null else fromts(items[i] / scale, tz))
134
- for i, is_null in enumerate(nulls_map)
135
- )
136
-
137
- def before_write_items(self, items, nulls_map=None):
138
- scale = 10 ** self.scale
139
- frac_scale = 10 ** (self.max_scale - self.scale)
140
-
141
- timezone = self.timezone
142
- null_value = self.null_value
143
- to_timestamp = datetime.timestamp
144
-
145
- for i, item in enumerate(items):
146
- if nulls_map and nulls_map[i]:
147
- items[i] = null_value
148
- continue
149
-
150
- if isinstance(item, int):
151
- # support supplying raw integers to avoid
152
- # costly timezone conversions when using datetime
153
- continue
154
-
155
- if timezone:
156
- # Set server's timezone for offset-naive datetime.
157
- if item.tzinfo is None:
158
- item = timezone.localize(item)
159
-
160
- item = item.astimezone(utc)
161
-
162
- else:
163
- # If datetime is offset-aware use it's timezone.
164
- if item.tzinfo is not None:
165
- item = item.astimezone(utc)
166
-
167
- items[i] = (
168
- int(to_timestamp(item)) * scale +
169
- int(item.microsecond / frac_scale)
170
- )
171
-
172
-
173
- def create_datetime_column(spec, column_options):
174
- if spec.startswith('DateTime64'):
175
- cls = DateTime64Column
176
- spec = spec[11:-1]
177
- params = spec.split(',', 1)
178
- column_options['scale'] = int(params[0])
179
- if len(params) > 1:
180
- spec = params[1].strip() + ')'
181
- else:
182
- cls = DateTimeColumn
183
- spec = spec[9:]
184
-
185
- context = column_options['context']
186
-
187
- tz_name = timezone = None
188
- offset_naive = True
189
-
190
- # Use column's timezone if it's specified.
191
- if spec and spec[-1] == ')':
192
- tz_name = spec[1:-2]
193
- offset_naive = False
194
- else:
195
- if not context.settings.get('use_client_time_zone', False):
196
- try:
197
- local_timezone = get_localzone().zone
198
- except Exception:
199
- local_timezone = None
200
-
201
- if local_timezone != context.server_info.timezone:
202
- tz_name = context.server_info.timezone
203
-
204
- if tz_name:
205
- timezone = get_timezone(tz_name)
206
-
207
- return cls(timezone=timezone, offset_naive=offset_naive, **column_options)
1
+ from datetime import datetime
2
+
3
+ from pytz import timezone as get_timezone, utc
4
+ from ..util.compat import get_localzone_name_compat
5
+ from .base import FormatColumn
6
+
7
+ EPOCH = datetime(1970, 1, 1, tzinfo=utc)
8
+
9
+
10
+ class DateTimeColumn(FormatColumn):
11
+ ch_type = 'DateTime'
12
+ py_types = (datetime, int)
13
+ format = 'I'
14
+
15
+ def __init__(self, timezone=None, offset_naive=True, **kwargs):
16
+ self.timezone = timezone
17
+ self.offset_naive = offset_naive
18
+ super(DateTimeColumn, self).__init__(**kwargs)
19
+
20
+ def after_read_items(self, items, nulls_map=None):
21
+ tz = self.timezone
22
+ fromts = datetime.fromtimestamp
23
+
24
+ # A bit ugly copy-paste. But it helps save time on items
25
+ # processing by avoiding lambda calls or if in loop.
26
+ if self.offset_naive:
27
+ if tz:
28
+ if nulls_map is None:
29
+ return tuple(
30
+ fromts(item, tz).replace(tzinfo=None)
31
+ for item in items
32
+ )
33
+ else:
34
+ return tuple(
35
+ (None if is_null else
36
+ fromts(items[i], tz).replace(tzinfo=None))
37
+ for i, is_null in enumerate(nulls_map)
38
+ )
39
+ else:
40
+ if nulls_map is None:
41
+ return tuple(fromts(item) for item in items)
42
+ else:
43
+ return tuple(
44
+ (None if is_null else fromts(items[i]))
45
+ for i, is_null in enumerate(nulls_map)
46
+ )
47
+
48
+ else:
49
+ if nulls_map is None:
50
+ return tuple(fromts(item, tz) for item in items)
51
+ else:
52
+ return tuple(
53
+ (None if is_null else fromts(items[i], tz))
54
+ for i, is_null in enumerate(nulls_map)
55
+ )
56
+
57
+ def before_write_items(self, items, nulls_map=None):
58
+ timezone = self.timezone
59
+ null_value = self.null_value
60
+ to_timestamp = datetime.timestamp
61
+
62
+ for i, item in enumerate(items):
63
+ if nulls_map and nulls_map[i]:
64
+ items[i] = null_value
65
+ continue
66
+
67
+ if isinstance(item, int):
68
+ # support supplying raw integers to avoid
69
+ # costly timezone conversions when using datetime
70
+ continue
71
+
72
+ if timezone:
73
+ # Set server's timezone for offset-naive datetime.
74
+ if item.tzinfo is None:
75
+ item = timezone.localize(item)
76
+
77
+ item = item.astimezone(utc)
78
+
79
+ else:
80
+ # If datetime is offset-aware use it's timezone.
81
+ if item.tzinfo is not None:
82
+ item = item.astimezone(utc)
83
+
84
+ items[i] = int(to_timestamp(item))
85
+
86
+
87
+ class DateTime64Column(DateTimeColumn):
88
+ ch_type = 'DateTime64'
89
+ format = 'q'
90
+
91
+ max_scale = 6
92
+
93
+ def __init__(self, scale=0, **kwargs):
94
+ self.scale = scale
95
+ super(DateTime64Column, self).__init__(**kwargs)
96
+
97
+ def after_read_items(self, items, nulls_map=None):
98
+ scale = float(10 ** self.scale)
99
+
100
+ tz = self.timezone
101
+ fromts = datetime.fromtimestamp
102
+
103
+ # A bit ugly copy-paste. But it helps save time on items
104
+ # processing by avoiding lambda calls or if in loop.
105
+ if self.offset_naive:
106
+ if tz:
107
+ if nulls_map is None:
108
+ return tuple(
109
+ fromts(item / scale, tz).replace(tzinfo=None)
110
+ for item in items
111
+ )
112
+ else:
113
+ return tuple(
114
+ (None if is_null else
115
+ fromts(items[i] / scale, tz).replace(tzinfo=None))
116
+ for i, is_null in enumerate(nulls_map)
117
+ )
118
+ else:
119
+ if nulls_map is None:
120
+ return tuple(fromts(item / scale) for item in items)
121
+ else:
122
+ return tuple(
123
+ (None if is_null else fromts(items[i] / scale))
124
+ for i, is_null in enumerate(nulls_map)
125
+ )
126
+
127
+ else:
128
+ if nulls_map is None:
129
+ return tuple(fromts(item / scale, tz) for item in items)
130
+ else:
131
+ return tuple(
132
+ (None if is_null else fromts(items[i] / scale, tz))
133
+ for i, is_null in enumerate(nulls_map)
134
+ )
135
+
136
+ def before_write_items(self, items, nulls_map=None):
137
+ scale = 10 ** self.scale
138
+ frac_scale = 10 ** (self.max_scale - self.scale)
139
+
140
+ timezone = self.timezone
141
+ null_value = self.null_value
142
+ to_timestamp = datetime.timestamp
143
+
144
+ for i, item in enumerate(items):
145
+ if nulls_map and nulls_map[i]:
146
+ items[i] = null_value
147
+ continue
148
+
149
+ if isinstance(item, int):
150
+ # support supplying raw integers to avoid
151
+ # costly timezone conversions when using datetime
152
+ continue
153
+
154
+ if timezone:
155
+ # Set server's timezone for offset-naive datetime.
156
+ if item.tzinfo is None:
157
+ item = timezone.localize(item)
158
+
159
+ item = item.astimezone(utc)
160
+
161
+ else:
162
+ # If datetime is offset-aware use it's timezone.
163
+ if item.tzinfo is not None:
164
+ item = item.astimezone(utc)
165
+
166
+ items[i] = (
167
+ int(to_timestamp(item)) * scale +
168
+ int(item.microsecond / frac_scale)
169
+ )
170
+
171
+
172
+ def create_datetime_column(spec, column_options):
173
+ if spec.startswith('DateTime64'):
174
+ cls = DateTime64Column
175
+ spec = spec[11:-1]
176
+ params = spec.split(',', 1)
177
+ column_options['scale'] = int(params[0])
178
+ if len(params) > 1:
179
+ spec = params[1].strip() + ')'
180
+ else:
181
+ cls = DateTimeColumn
182
+ spec = spec[9:]
183
+
184
+ context = column_options['context']
185
+
186
+ tz_name = timezone = None
187
+ offset_naive = True
188
+
189
+ # Use column's timezone if it's specified.
190
+ if spec and spec[-1] == ')':
191
+ tz_name = spec[1:-2]
192
+ offset_naive = False
193
+ else:
194
+ if not context.settings.get('use_client_time_zone', False):
195
+ local_timezone = get_localzone_name_compat()
196
+ remote_timezone = context.server_info.get_timezone()
197
+ if local_timezone != remote_timezone:
198
+ tz_name = remote_timezone
199
+
200
+ if tz_name:
201
+ timezone = get_timezone(tz_name)
202
+
203
+ return cls(timezone=timezone, offset_naive=offset_naive, **column_options)
@@ -1,118 +1,116 @@
1
- from decimal import Decimal, localcontext
2
-
3
- from .base import FormatColumn
4
- from .exceptions import ColumnTypeMismatchException
5
- from .intcolumn import Int128Column, Int256Column
6
-
7
-
8
- class DecimalColumn(FormatColumn):
9
- py_types = (Decimal, float, int)
10
- max_precision = None
11
- int_size = None
12
-
13
- def __init__(self, precision, scale, types_check=False, **kwargs):
14
- self.precision = precision
15
- self.scale = scale
16
- super(DecimalColumn, self).__init__(**kwargs)
17
-
18
- if types_check:
19
- max_signed_int = (1 << (8 * self.int_size - 1)) - 1
20
-
21
- def check_item(value):
22
- if value < -max_signed_int or value > max_signed_int:
23
- raise ColumnTypeMismatchException(value)
24
-
25
- self.check_item = check_item
26
-
27
- def after_read_items(self, items, nulls_map=None):
28
- if self.scale >= 1:
29
- scale = 10 ** self.scale
30
-
31
- if nulls_map is None:
32
- return tuple(Decimal(item) / scale for item in items)
33
- else:
34
- return tuple(
35
- (None if is_null else Decimal(items[i]) / scale)
36
- for i, is_null in enumerate(nulls_map)
37
- )
38
- else:
39
- if nulls_map is None:
40
- return tuple(Decimal(item) for item in items)
41
- else:
42
- return tuple(
43
- (None if is_null else Decimal(items[i]))
44
- for i, is_null in enumerate(nulls_map)
45
- )
46
-
47
- def before_write_items(self, items, nulls_map=None):
48
- null_value = self.null_value
49
-
50
- if self.scale >= 1:
51
- scale = 10 ** self.scale
52
-
53
- for i, item in enumerate(items):
54
- if nulls_map and nulls_map[i]:
55
- items[i] = null_value
56
- else:
57
- items[i] = int(Decimal(str(item)) * scale)
58
-
59
- else:
60
- for i, item in enumerate(items):
61
- if nulls_map and nulls_map[i]:
62
- items[i] = null_value
63
- else:
64
- items[i] = int(Decimal(str(item)))
65
-
66
- # Override default precision to the maximum supported by underlying type.
67
- def _write_data(self, items, buf):
68
- with localcontext() as ctx:
69
- ctx.prec = self.max_precision
70
- super(DecimalColumn, self)._write_data(items, buf)
71
-
72
- def _read_data(self, n_items, buf, nulls_map=None):
73
- with localcontext() as ctx:
74
- ctx.prec = self.max_precision
75
- return super(DecimalColumn, self)._read_data(
76
- n_items, buf, nulls_map=nulls_map
77
- )
78
-
79
-
80
- class Decimal32Column(DecimalColumn):
81
- format = 'i'
82
- max_precision = 9
83
- int_size = 4
84
-
85
-
86
- class Decimal64Column(DecimalColumn):
87
- format = 'q'
88
- max_precision = 18
89
- int_size = 8
90
-
91
-
92
- class Decimal128Column(DecimalColumn, Int128Column):
93
- max_precision = 38
94
-
95
-
96
- class Decimal256Column(DecimalColumn, Int256Column):
97
- max_precision = 76
98
-
99
-
100
- def create_decimal_column(spec, column_options):
101
- precision, scale = spec[8:-1].split(',')
102
- precision, scale = int(precision), int(scale)
103
-
104
- # Maximum precisions for underlying types are:
105
- # Int32 10**9
106
- # Int64 10**18
107
- # Int128 10**38
108
- # Int256 10**76
109
- if precision <= 9:
110
- cls = Decimal32Column
111
- elif precision <= 18:
112
- cls = Decimal64Column
113
- elif precision <= 38:
114
- cls = Decimal128Column
115
- else:
116
- cls = Decimal256Column
117
-
118
- return cls(precision, scale, **column_options)
1
+ from decimal import Decimal, localcontext
2
+
3
+ from .base import FormatColumn
4
+ from .exceptions import ColumnTypeMismatchException
5
+ from .intcolumn import Int128Column, Int256Column
6
+
7
+
8
+ class DecimalColumn(FormatColumn):
9
+ py_types = (Decimal, float, int)
10
+ max_precision = None
11
+
12
+ def __init__(self, precision, scale, types_check=False, **kwargs):
13
+ self.precision = precision
14
+ self.scale = scale
15
+ super(DecimalColumn, self).__init__(**kwargs)
16
+
17
+ if types_check:
18
+ def check_item(value):
19
+ parts = str(value).split('.')
20
+ int_part = parts[0]
21
+
22
+ if len(int_part) > precision:
23
+ raise ColumnTypeMismatchException(value)
24
+
25
+ self.check_item = check_item
26
+
27
+ def after_read_items(self, items, nulls_map=None):
28
+ if self.scale >= 1:
29
+ scale = 10 ** self.scale
30
+
31
+ if nulls_map is None:
32
+ return tuple(Decimal(item) / scale for item in items)
33
+ else:
34
+ return tuple(
35
+ (None if is_null else Decimal(items[i]) / scale)
36
+ for i, is_null in enumerate(nulls_map)
37
+ )
38
+ else:
39
+ if nulls_map is None:
40
+ return tuple(Decimal(item) for item in items)
41
+ else:
42
+ return tuple(
43
+ (None if is_null else Decimal(items[i]))
44
+ for i, is_null in enumerate(nulls_map)
45
+ )
46
+
47
+ def before_write_items(self, items, nulls_map=None):
48
+ null_value = self.null_value
49
+
50
+ if self.scale >= 1:
51
+ scale = 10 ** self.scale
52
+
53
+ for i, item in enumerate(items):
54
+ if nulls_map and nulls_map[i]:
55
+ items[i] = null_value
56
+ else:
57
+ items[i] = int(Decimal(str(item)) * scale)
58
+
59
+ else:
60
+ for i, item in enumerate(items):
61
+ if nulls_map and nulls_map[i]:
62
+ items[i] = null_value
63
+ else:
64
+ items[i] = int(Decimal(str(item)))
65
+
66
+ # Override default precision to the maximum supported by underlying type.
67
+ def _write_data(self, items, buf):
68
+ with localcontext() as ctx:
69
+ ctx.prec = self.max_precision
70
+ super(DecimalColumn, self)._write_data(items, buf)
71
+
72
+ def _read_data(self, n_items, buf, nulls_map=None):
73
+ with localcontext() as ctx:
74
+ ctx.prec = self.max_precision
75
+ return super(DecimalColumn, self)._read_data(
76
+ n_items, buf, nulls_map=nulls_map
77
+ )
78
+
79
+
80
+ class Decimal32Column(DecimalColumn):
81
+ format = 'i'
82
+ max_precision = 9
83
+
84
+
85
+ class Decimal64Column(DecimalColumn):
86
+ format = 'q'
87
+ max_precision = 18
88
+
89
+
90
+ class Decimal128Column(DecimalColumn, Int128Column):
91
+ max_precision = 38
92
+
93
+
94
+ class Decimal256Column(DecimalColumn, Int256Column):
95
+ max_precision = 76
96
+
97
+
98
+ def create_decimal_column(spec, column_options):
99
+ precision, scale = spec[8:-1].split(',')
100
+ precision, scale = int(precision), int(scale)
101
+
102
+ # Maximum precisions for underlying types are:
103
+ # Int32 10**9
104
+ # Int64 10**18
105
+ # Int128 10**38
106
+ # Int256 10**76
107
+ if precision <= 9:
108
+ cls = Decimal32Column
109
+ elif precision <= 18:
110
+ cls = Decimal64Column
111
+ elif precision <= 38:
112
+ cls = Decimal128Column
113
+ else:
114
+ cls = Decimal256Column
115
+
116
+ return cls(precision, scale, **column_options)