sqlalchemy-cratedb 0.36.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlalchemy_cratedb/__init__.py +59 -0
- sqlalchemy_cratedb/compat/__init__.py +0 -0
- sqlalchemy_cratedb/compat/api13.py +156 -0
- sqlalchemy_cratedb/compat/core10.py +264 -0
- sqlalchemy_cratedb/compat/core14.py +359 -0
- sqlalchemy_cratedb/compat/core20.py +447 -0
- sqlalchemy_cratedb/compiler.py +319 -0
- sqlalchemy_cratedb/dialect.py +372 -0
- sqlalchemy_cratedb/predicate.py +99 -0
- sqlalchemy_cratedb/sa_version.py +28 -0
- sqlalchemy_cratedb/support.py +62 -0
- sqlalchemy_cratedb/type/__init__.py +3 -0
- sqlalchemy_cratedb/type/array.py +144 -0
- sqlalchemy_cratedb/type/geo.py +48 -0
- sqlalchemy_cratedb/type/object.py +92 -0
- sqlalchemy_cratedb-0.36.0.dist-info/LICENSE +178 -0
- sqlalchemy_cratedb-0.36.0.dist-info/METADATA +137 -0
- sqlalchemy_cratedb-0.36.0.dist-info/NOTICE +24 -0
- sqlalchemy_cratedb-0.36.0.dist-info/RECORD +22 -0
- sqlalchemy_cratedb-0.36.0.dist-info/WHEEL +6 -0
- sqlalchemy_cratedb-0.36.0.dist-info/entry_points.txt +2 -0
- sqlalchemy_cratedb-0.36.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,319 @@
|
|
1
|
+
# -*- coding: utf-8; -*-
|
2
|
+
#
|
3
|
+
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
|
4
|
+
# license agreements. See the NOTICE file distributed with this work for
|
5
|
+
# additional information regarding copyright ownership. Crate licenses
|
6
|
+
# this file to you under the Apache License, Version 2.0 (the "License");
|
7
|
+
# you may not use this file except in compliance with the License. You may
|
8
|
+
# obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing, software
|
13
|
+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
14
|
+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
15
|
+
# License for the specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
#
|
18
|
+
# However, if you have executed another commercial license agreement
|
19
|
+
# with Crate these terms will supersede the license and you may use the
|
20
|
+
# software solely pursuant to the terms of the relevant commercial agreement.
|
21
|
+
|
22
|
+
import string
|
23
|
+
import warnings
|
24
|
+
from collections import defaultdict
|
25
|
+
|
26
|
+
import sqlalchemy as sa
|
27
|
+
from sqlalchemy.dialects.postgresql.base import PGCompiler
|
28
|
+
from sqlalchemy.sql import compiler
|
29
|
+
from sqlalchemy.types import String
|
30
|
+
from .type.geo import Geopoint, Geoshape
|
31
|
+
from .type.object import MutableDict, ObjectTypeImpl
|
32
|
+
from .sa_version import SA_VERSION, SA_1_4
|
33
|
+
|
34
|
+
|
35
|
+
def rewrite_update(clauseelement, multiparams, params):
|
36
|
+
""" change the params to enable partial updates
|
37
|
+
|
38
|
+
sqlalchemy by default only supports updates of complex types in the form of
|
39
|
+
|
40
|
+
"col = ?", ({"x": 1, "y": 2}
|
41
|
+
|
42
|
+
but crate supports
|
43
|
+
|
44
|
+
"col['x'] = ?, col['y'] = ?", (1, 2)
|
45
|
+
|
46
|
+
by using the `ObjectType` (`MutableDict`) type.
|
47
|
+
The update statement is only rewritten if an item of the MutableDict was
|
48
|
+
changed.
|
49
|
+
"""
|
50
|
+
newmultiparams = []
|
51
|
+
_multiparams = multiparams[0]
|
52
|
+
if len(_multiparams) == 0:
|
53
|
+
return clauseelement, multiparams, params
|
54
|
+
for _params in _multiparams:
|
55
|
+
newparams = {}
|
56
|
+
for key, val in _params.items():
|
57
|
+
if (
|
58
|
+
not isinstance(val, MutableDict) or
|
59
|
+
(not any(val._changed_keys) and not any(val._deleted_keys))
|
60
|
+
):
|
61
|
+
newparams[key] = val
|
62
|
+
continue
|
63
|
+
|
64
|
+
for subkey, subval in val.items():
|
65
|
+
if subkey in val._changed_keys:
|
66
|
+
newparams["{0}['{1}']".format(key, subkey)] = subval
|
67
|
+
for subkey in val._deleted_keys:
|
68
|
+
newparams["{0}['{1}']".format(key, subkey)] = None
|
69
|
+
newmultiparams.append(newparams)
|
70
|
+
_multiparams = (newmultiparams, )
|
71
|
+
clause = clauseelement.values(newmultiparams[0])
|
72
|
+
clause._crate_specific = True
|
73
|
+
return clause, _multiparams, params
|
74
|
+
|
75
|
+
|
76
|
+
@sa.event.listens_for(sa.engine.Engine, "before_execute", retval=True)
|
77
|
+
def crate_before_execute(conn, clauseelement, multiparams, params, *args, **kwargs):
|
78
|
+
is_crate = type(conn.dialect).__name__ == 'CrateDialect'
|
79
|
+
if is_crate and isinstance(clauseelement, sa.sql.expression.Update):
|
80
|
+
if SA_VERSION >= SA_1_4:
|
81
|
+
if params is None:
|
82
|
+
multiparams = ([],)
|
83
|
+
else:
|
84
|
+
multiparams = ([params],)
|
85
|
+
params = {}
|
86
|
+
|
87
|
+
clauseelement, multiparams, params = rewrite_update(clauseelement, multiparams, params)
|
88
|
+
|
89
|
+
if SA_VERSION >= SA_1_4:
|
90
|
+
if multiparams[0]:
|
91
|
+
params = multiparams[0][0]
|
92
|
+
else:
|
93
|
+
params = multiparams[0]
|
94
|
+
multiparams = []
|
95
|
+
|
96
|
+
return clauseelement, multiparams, params
|
97
|
+
|
98
|
+
|
99
|
+
class CrateDDLCompiler(compiler.DDLCompiler):
|
100
|
+
|
101
|
+
__special_opts_tmpl = {
|
102
|
+
'PARTITIONED_BY': ' PARTITIONED BY ({0})'
|
103
|
+
}
|
104
|
+
__clustered_opts_tmpl = {
|
105
|
+
'NUMBER_OF_SHARDS': ' INTO {0} SHARDS',
|
106
|
+
'CLUSTERED_BY': ' BY ({0})',
|
107
|
+
}
|
108
|
+
__clustered_opt_tmpl = ' CLUSTERED{CLUSTERED_BY}{NUMBER_OF_SHARDS}'
|
109
|
+
|
110
|
+
def get_column_specification(self, column, **kwargs):
|
111
|
+
colspec = self.preparer.format_column(column) + " " + \
|
112
|
+
self.dialect.type_compiler.process(column.type)
|
113
|
+
|
114
|
+
default = self.get_column_default_string(column)
|
115
|
+
if default is not None:
|
116
|
+
colspec += " DEFAULT " + default
|
117
|
+
|
118
|
+
if column.computed is not None:
|
119
|
+
colspec += " " + self.process(column.computed)
|
120
|
+
|
121
|
+
if column.nullable is False:
|
122
|
+
colspec += " NOT NULL"
|
123
|
+
elif column.nullable and column.primary_key:
|
124
|
+
raise sa.exc.CompileError(
|
125
|
+
"Primary key columns cannot be nullable"
|
126
|
+
)
|
127
|
+
|
128
|
+
if column.dialect_options['crate'].get('index') is False:
|
129
|
+
if isinstance(column.type, (Geopoint, Geoshape, ObjectTypeImpl)):
|
130
|
+
raise sa.exc.CompileError(
|
131
|
+
"Disabling indexing is not supported for column "
|
132
|
+
"types OBJECT, GEO_POINT, and GEO_SHAPE"
|
133
|
+
)
|
134
|
+
|
135
|
+
colspec += " INDEX OFF"
|
136
|
+
|
137
|
+
if column.dialect_options['crate'].get('columnstore') is False:
|
138
|
+
if not isinstance(column.type, (String, )):
|
139
|
+
raise sa.exc.CompileError(
|
140
|
+
"Controlling the columnstore is only allowed for STRING columns"
|
141
|
+
)
|
142
|
+
|
143
|
+
colspec += " STORAGE WITH (columnstore = false)"
|
144
|
+
|
145
|
+
return colspec
|
146
|
+
|
147
|
+
def visit_computed_column(self, generated):
|
148
|
+
if generated.persisted is False:
|
149
|
+
raise sa.exc.CompileError(
|
150
|
+
"Virtual computed columns are not supported, set "
|
151
|
+
"'persisted' to None or True"
|
152
|
+
)
|
153
|
+
|
154
|
+
return "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
|
155
|
+
generated.sqltext, include_table=False, literal_binds=True
|
156
|
+
)
|
157
|
+
|
158
|
+
def post_create_table(self, table):
|
159
|
+
special_options = ''
|
160
|
+
clustered_options = defaultdict(str)
|
161
|
+
table_opts = []
|
162
|
+
|
163
|
+
opts = dict(
|
164
|
+
(k[len(self.dialect.name) + 1:].upper(), v)
|
165
|
+
for k, v, in table.kwargs.items()
|
166
|
+
if k.startswith('%s_' % self.dialect.name)
|
167
|
+
)
|
168
|
+
for k, v in opts.items():
|
169
|
+
if k in self.__special_opts_tmpl:
|
170
|
+
special_options += self.__special_opts_tmpl[k].format(v)
|
171
|
+
elif k in self.__clustered_opts_tmpl:
|
172
|
+
clustered_options[k] = self.__clustered_opts_tmpl[k].format(v)
|
173
|
+
else:
|
174
|
+
table_opts.append('{0} = {1}'.format(k, v))
|
175
|
+
if clustered_options:
|
176
|
+
special_options += string.Formatter().vformat(
|
177
|
+
self.__clustered_opt_tmpl, (), clustered_options)
|
178
|
+
if table_opts:
|
179
|
+
return special_options + ' WITH ({0})'.format(
|
180
|
+
', '.join(sorted(table_opts)))
|
181
|
+
return special_options
|
182
|
+
|
183
|
+
def visit_foreign_key_constraint(self, constraint, **kw):
|
184
|
+
"""
|
185
|
+
CrateDB does not support foreign key constraints.
|
186
|
+
"""
|
187
|
+
warnings.warn("CrateDB does not support foreign key constraints, "
|
188
|
+
"they will be omitted when generating DDL statements.")
|
189
|
+
return None
|
190
|
+
|
191
|
+
def visit_unique_constraint(self, constraint, **kw):
|
192
|
+
"""
|
193
|
+
CrateDB does not support unique key constraints.
|
194
|
+
"""
|
195
|
+
warnings.warn("CrateDB does not support unique constraints, "
|
196
|
+
"they will be omitted when generating DDL statements.")
|
197
|
+
return None
|
198
|
+
|
199
|
+
|
200
|
+
class CrateTypeCompiler(compiler.GenericTypeCompiler):
|
201
|
+
|
202
|
+
def visit_string(self, type_, **kw):
|
203
|
+
return 'STRING'
|
204
|
+
|
205
|
+
def visit_unicode(self, type_, **kw):
|
206
|
+
return 'STRING'
|
207
|
+
|
208
|
+
def visit_TEXT(self, type_, **kw):
|
209
|
+
return 'STRING'
|
210
|
+
|
211
|
+
def visit_DECIMAL(self, type_, **kw):
|
212
|
+
return 'DOUBLE'
|
213
|
+
|
214
|
+
def visit_BIGINT(self, type_, **kw):
|
215
|
+
return 'LONG'
|
216
|
+
|
217
|
+
def visit_NUMERIC(self, type_, **kw):
|
218
|
+
return 'LONG'
|
219
|
+
|
220
|
+
def visit_INTEGER(self, type_, **kw):
|
221
|
+
return 'INT'
|
222
|
+
|
223
|
+
def visit_SMALLINT(self, type_, **kw):
|
224
|
+
return 'SHORT'
|
225
|
+
|
226
|
+
def visit_datetime(self, type_, **kw):
|
227
|
+
return 'TIMESTAMP'
|
228
|
+
|
229
|
+
def visit_date(self, type_, **kw):
|
230
|
+
return 'TIMESTAMP'
|
231
|
+
|
232
|
+
def visit_ARRAY(self, type_, **kw):
|
233
|
+
if type_.dimensions is not None and type_.dimensions > 1:
|
234
|
+
raise NotImplementedError(
|
235
|
+
"CrateDB doesn't support multidimensional arrays")
|
236
|
+
return 'ARRAY({0})'.format(self.process(type_.item_type))
|
237
|
+
|
238
|
+
def visit_OBJECT(self, type_, **kw):
|
239
|
+
return "OBJECT"
|
240
|
+
|
241
|
+
|
242
|
+
class CrateCompiler(compiler.SQLCompiler):
|
243
|
+
|
244
|
+
def visit_getitem_binary(self, binary, operator, **kw):
|
245
|
+
return "{0}['{1}']".format(
|
246
|
+
self.process(binary.left, **kw),
|
247
|
+
binary.right.value
|
248
|
+
)
|
249
|
+
|
250
|
+
def visit_json_getitem_op_binary(
|
251
|
+
self, binary, operator, _cast_applied=False, **kw
|
252
|
+
):
|
253
|
+
return "{0}['{1}']".format(
|
254
|
+
self.process(binary.left, **kw),
|
255
|
+
binary.right.value
|
256
|
+
)
|
257
|
+
|
258
|
+
def visit_any(self, element, **kw):
|
259
|
+
return "%s%sANY (%s)" % (
|
260
|
+
self.process(element.left, **kw),
|
261
|
+
compiler.OPERATORS[element.operator],
|
262
|
+
self.process(element.right, **kw)
|
263
|
+
)
|
264
|
+
|
265
|
+
def visit_ilike_case_insensitive_operand(self, element, **kw):
|
266
|
+
"""
|
267
|
+
Use native `ILIKE` operator, like PostgreSQL's `PGCompiler`.
|
268
|
+
"""
|
269
|
+
if self.dialect.has_ilike_operator():
|
270
|
+
return element.element._compiler_dispatch(self, **kw)
|
271
|
+
else:
|
272
|
+
return super().visit_ilike_case_insensitive_operand(element, **kw)
|
273
|
+
|
274
|
+
def visit_ilike_op_binary(self, binary, operator, **kw):
|
275
|
+
"""
|
276
|
+
Use native `ILIKE` operator, like PostgreSQL's `PGCompiler`.
|
277
|
+
|
278
|
+
Do not implement the `ESCAPE` functionality, because it is not
|
279
|
+
supported by CrateDB.
|
280
|
+
"""
|
281
|
+
if binary.modifiers.get("escape", None) is not None:
|
282
|
+
raise NotImplementedError("Unsupported feature: ESCAPE is not supported")
|
283
|
+
if self.dialect.has_ilike_operator():
|
284
|
+
return "%s ILIKE %s" % (
|
285
|
+
self.process(binary.left, **kw),
|
286
|
+
self.process(binary.right, **kw),
|
287
|
+
)
|
288
|
+
else:
|
289
|
+
return super().visit_ilike_op_binary(binary, operator, **kw)
|
290
|
+
|
291
|
+
def visit_not_ilike_op_binary(self, binary, operator, **kw):
|
292
|
+
"""
|
293
|
+
Use native `ILIKE` operator, like PostgreSQL's `PGCompiler`.
|
294
|
+
|
295
|
+
Do not implement the `ESCAPE` functionality, because it is not
|
296
|
+
supported by CrateDB.
|
297
|
+
"""
|
298
|
+
if binary.modifiers.get("escape", None) is not None:
|
299
|
+
raise NotImplementedError("Unsupported feature: ESCAPE is not supported")
|
300
|
+
if self.dialect.has_ilike_operator():
|
301
|
+
return "%s NOT ILIKE %s" % (
|
302
|
+
self.process(binary.left, **kw),
|
303
|
+
self.process(binary.right, **kw),
|
304
|
+
)
|
305
|
+
else:
|
306
|
+
return super().visit_not_ilike_op_binary(binary, operator, **kw)
|
307
|
+
|
308
|
+
def limit_clause(self, select, **kw):
|
309
|
+
"""
|
310
|
+
Generate OFFSET / LIMIT clause, PostgreSQL-compatible.
|
311
|
+
"""
|
312
|
+
return PGCompiler.limit_clause(self, select, **kw)
|
313
|
+
|
314
|
+
def for_update_clause(self, select, **kw):
|
315
|
+
# CrateDB does not support the `INSERT ... FOR UPDATE` clause.
|
316
|
+
# See https://github.com/crate/crate-python/issues/577.
|
317
|
+
warnings.warn("CrateDB does not support the 'INSERT ... FOR UPDATE' clause, "
|
318
|
+
"it will be omitted when generating SQL statements.")
|
319
|
+
return ''
|
@@ -0,0 +1,372 @@
|
|
1
|
+
# -*- coding: utf-8; -*-
|
2
|
+
#
|
3
|
+
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
|
4
|
+
# license agreements. See the NOTICE file distributed with this work for
|
5
|
+
# additional information regarding copyright ownership. Crate licenses
|
6
|
+
# this file to you under the Apache License, Version 2.0 (the "License");
|
7
|
+
# you may not use this file except in compliance with the License. You may
|
8
|
+
# obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing, software
|
13
|
+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
14
|
+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
15
|
+
# License for the specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
#
|
18
|
+
# However, if you have executed another commercial license agreement
|
19
|
+
# with Crate these terms will supersede the license and you may use the
|
20
|
+
# software solely pursuant to the terms of the relevant commercial agreement.
|
21
|
+
|
22
|
+
import logging
|
23
|
+
from datetime import datetime, date
|
24
|
+
|
25
|
+
from sqlalchemy import types as sqltypes
|
26
|
+
from sqlalchemy.engine import default, reflection
|
27
|
+
from sqlalchemy.sql import functions
|
28
|
+
from sqlalchemy.util import asbool, to_list
|
29
|
+
|
30
|
+
from .compiler import (
|
31
|
+
CrateTypeCompiler,
|
32
|
+
CrateDDLCompiler
|
33
|
+
)
|
34
|
+
from crate.client.exceptions import TimezoneUnawareException
|
35
|
+
from .sa_version import SA_VERSION, SA_1_4, SA_2_0
|
36
|
+
from .type import ObjectArray, ObjectType
|
37
|
+
|
38
|
+
TYPES_MAP = {
|
39
|
+
"boolean": sqltypes.Boolean,
|
40
|
+
"short": sqltypes.SmallInteger,
|
41
|
+
"smallint": sqltypes.SmallInteger,
|
42
|
+
"timestamp": sqltypes.TIMESTAMP,
|
43
|
+
"timestamp with time zone": sqltypes.TIMESTAMP,
|
44
|
+
"object": ObjectType,
|
45
|
+
"integer": sqltypes.Integer,
|
46
|
+
"long": sqltypes.NUMERIC,
|
47
|
+
"bigint": sqltypes.NUMERIC,
|
48
|
+
"double": sqltypes.DECIMAL,
|
49
|
+
"double precision": sqltypes.DECIMAL,
|
50
|
+
"object_array": ObjectArray,
|
51
|
+
"float": sqltypes.Float,
|
52
|
+
"real": sqltypes.Float,
|
53
|
+
"string": sqltypes.String,
|
54
|
+
"text": sqltypes.String
|
55
|
+
}
|
56
|
+
try:
|
57
|
+
# SQLAlchemy >= 1.1
|
58
|
+
from sqlalchemy.types import ARRAY
|
59
|
+
TYPES_MAP["integer_array"] = ARRAY(sqltypes.Integer)
|
60
|
+
TYPES_MAP["boolean_array"] = ARRAY(sqltypes.Boolean)
|
61
|
+
TYPES_MAP["short_array"] = ARRAY(sqltypes.SmallInteger)
|
62
|
+
TYPES_MAP["smallint_array"] = ARRAY(sqltypes.SmallInteger)
|
63
|
+
TYPES_MAP["timestamp_array"] = ARRAY(sqltypes.TIMESTAMP)
|
64
|
+
TYPES_MAP["timestamp with time zone_array"] = ARRAY(sqltypes.TIMESTAMP)
|
65
|
+
TYPES_MAP["long_array"] = ARRAY(sqltypes.NUMERIC)
|
66
|
+
TYPES_MAP["bigint_array"] = ARRAY(sqltypes.NUMERIC)
|
67
|
+
TYPES_MAP["double_array"] = ARRAY(sqltypes.DECIMAL)
|
68
|
+
TYPES_MAP["double precision_array"] = ARRAY(sqltypes.DECIMAL)
|
69
|
+
TYPES_MAP["float_array"] = ARRAY(sqltypes.Float)
|
70
|
+
TYPES_MAP["real_array"] = ARRAY(sqltypes.Float)
|
71
|
+
TYPES_MAP["string_array"] = ARRAY(sqltypes.String)
|
72
|
+
TYPES_MAP["text_array"] = ARRAY(sqltypes.String)
|
73
|
+
except Exception:
|
74
|
+
pass
|
75
|
+
|
76
|
+
|
77
|
+
log = logging.getLogger(__name__)
|
78
|
+
|
79
|
+
|
80
|
+
class Date(sqltypes.Date):
|
81
|
+
def bind_processor(self, dialect):
|
82
|
+
def process(value):
|
83
|
+
if value is not None:
|
84
|
+
assert isinstance(value, date)
|
85
|
+
return value.strftime('%Y-%m-%d')
|
86
|
+
return process
|
87
|
+
|
88
|
+
def result_processor(self, dialect, coltype):
|
89
|
+
def process(value):
|
90
|
+
if not value:
|
91
|
+
return
|
92
|
+
try:
|
93
|
+
return datetime.utcfromtimestamp(value / 1e3).date()
|
94
|
+
except TypeError:
|
95
|
+
pass
|
96
|
+
|
97
|
+
# Crate doesn't really have datetime or date types but a
|
98
|
+
# timestamp type. The "date" mapping (conversion to long)
|
99
|
+
# is only applied if the schema definition for the column exists
|
100
|
+
# and if the sql insert statement was used.
|
101
|
+
# In case of dynamic mapping or using the rest indexing endpoint
|
102
|
+
# the date will be returned in the format it was inserted.
|
103
|
+
log.warning(
|
104
|
+
"Received timestamp isn't a long value."
|
105
|
+
"Trying to parse as date string and then as datetime string")
|
106
|
+
try:
|
107
|
+
return datetime.strptime(value, '%Y-%m-%d').date()
|
108
|
+
except ValueError:
|
109
|
+
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ').date()
|
110
|
+
return process
|
111
|
+
|
112
|
+
|
113
|
+
class DateTime(sqltypes.DateTime):
|
114
|
+
|
115
|
+
TZ_ERROR_MSG = "Timezone aware datetime objects are not supported"
|
116
|
+
|
117
|
+
def bind_processor(self, dialect):
|
118
|
+
def process(value):
|
119
|
+
if value is not None:
|
120
|
+
assert isinstance(value, datetime)
|
121
|
+
if value.tzinfo is not None:
|
122
|
+
raise TimezoneUnawareException(DateTime.TZ_ERROR_MSG)
|
123
|
+
return value.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
|
124
|
+
return value
|
125
|
+
return process
|
126
|
+
|
127
|
+
def result_processor(self, dialect, coltype):
|
128
|
+
def process(value):
|
129
|
+
if not value:
|
130
|
+
return
|
131
|
+
try:
|
132
|
+
return datetime.utcfromtimestamp(value / 1e3)
|
133
|
+
except TypeError:
|
134
|
+
pass
|
135
|
+
|
136
|
+
# Crate doesn't really have datetime or date types but a
|
137
|
+
# timestamp type. The "date" mapping (conversion to long)
|
138
|
+
# is only applied if the schema definition for the column exists
|
139
|
+
# and if the sql insert statement was used.
|
140
|
+
# In case of dynamic mapping or using the rest indexing endpoint
|
141
|
+
# the date will be returned in the format it was inserted.
|
142
|
+
log.warning(
|
143
|
+
"Received timestamp isn't a long value."
|
144
|
+
"Trying to parse as datetime string and then as date string")
|
145
|
+
try:
|
146
|
+
return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ')
|
147
|
+
except ValueError:
|
148
|
+
return datetime.strptime(value, '%Y-%m-%d')
|
149
|
+
return process
|
150
|
+
|
151
|
+
|
152
|
+
colspecs = {
|
153
|
+
sqltypes.DateTime: DateTime,
|
154
|
+
sqltypes.Date: Date
|
155
|
+
}
|
156
|
+
|
157
|
+
|
158
|
+
if SA_VERSION >= SA_2_0:
|
159
|
+
from .compat.core20 import CrateCompilerSA20
|
160
|
+
statement_compiler = CrateCompilerSA20
|
161
|
+
elif SA_VERSION >= SA_1_4:
|
162
|
+
from .compat.core14 import CrateCompilerSA14
|
163
|
+
statement_compiler = CrateCompilerSA14
|
164
|
+
else:
|
165
|
+
from .compat.core10 import CrateCompilerSA10
|
166
|
+
statement_compiler = CrateCompilerSA10
|
167
|
+
|
168
|
+
|
169
|
+
class CrateDialect(default.DefaultDialect):
|
170
|
+
name = 'crate'
|
171
|
+
driver = 'crate-python'
|
172
|
+
default_paramstyle = 'qmark'
|
173
|
+
statement_compiler = statement_compiler
|
174
|
+
ddl_compiler = CrateDDLCompiler
|
175
|
+
type_compiler = CrateTypeCompiler
|
176
|
+
use_insertmanyvalues = True
|
177
|
+
use_insertmanyvalues_wo_returning = True
|
178
|
+
supports_multivalues_insert = True
|
179
|
+
supports_native_boolean = True
|
180
|
+
supports_statement_cache = True
|
181
|
+
colspecs = colspecs
|
182
|
+
implicit_returning = True
|
183
|
+
insert_returning = True
|
184
|
+
update_returning = True
|
185
|
+
|
186
|
+
def __init__(self, **kwargs):
|
187
|
+
default.DefaultDialect.__init__(self, **kwargs)
|
188
|
+
|
189
|
+
# CrateDB does not need `OBJECT` types to be serialized as JSON.
|
190
|
+
# Corresponding data is forwarded 1:1, and will get marshalled
|
191
|
+
# by the low-level driver.
|
192
|
+
self._json_deserializer = lambda x: x
|
193
|
+
self._json_serializer = lambda x: x
|
194
|
+
|
195
|
+
# Currently, our SQL parser doesn't support unquoted column names that
|
196
|
+
# start with _. Adding it here causes sqlalchemy to quote such columns.
|
197
|
+
self.identifier_preparer.illegal_initial_characters.add('_')
|
198
|
+
|
199
|
+
def initialize(self, connection):
|
200
|
+
# get lowest server version
|
201
|
+
self.server_version_info = \
|
202
|
+
self._get_server_version_info(connection)
|
203
|
+
# get default schema name
|
204
|
+
self.default_schema_name = \
|
205
|
+
self._get_default_schema_name(connection)
|
206
|
+
|
207
|
+
def do_rollback(self, connection):
|
208
|
+
# if any exception is raised by the dbapi, sqlalchemy by default
|
209
|
+
# attempts to do a rollback crate doesn't support rollbacks.
|
210
|
+
# implementing this as noop seems to cause sqlalchemy to propagate the
|
211
|
+
# original exception to the user
|
212
|
+
pass
|
213
|
+
|
214
|
+
def connect(self, host=None, port=None, *args, **kwargs):
|
215
|
+
server = None
|
216
|
+
if host:
|
217
|
+
server = '{0}:{1}'.format(host, port or '4200')
|
218
|
+
if 'servers' in kwargs:
|
219
|
+
server = kwargs.pop('servers')
|
220
|
+
servers = to_list(server)
|
221
|
+
if servers:
|
222
|
+
use_ssl = asbool(kwargs.pop("ssl", False))
|
223
|
+
if use_ssl:
|
224
|
+
servers = ["https://" + server for server in servers]
|
225
|
+
return self.dbapi.connect(servers=servers, **kwargs)
|
226
|
+
return self.dbapi.connect(**kwargs)
|
227
|
+
|
228
|
+
def _get_default_schema_name(self, connection):
|
229
|
+
return 'doc'
|
230
|
+
|
231
|
+
def _get_server_version_info(self, connection):
|
232
|
+
return tuple(connection.connection.lowest_server_version.version)
|
233
|
+
|
234
|
+
@classmethod
|
235
|
+
def import_dbapi(cls):
|
236
|
+
from crate import client
|
237
|
+
return client
|
238
|
+
|
239
|
+
@classmethod
|
240
|
+
def dbapi(cls):
|
241
|
+
return cls.import_dbapi()
|
242
|
+
|
243
|
+
def has_schema(self, connection, schema, **kw):
|
244
|
+
return schema in self.get_schema_names(connection, **kw)
|
245
|
+
|
246
|
+
def has_table(self, connection, table_name, schema=None, **kw):
|
247
|
+
return table_name in self.get_table_names(connection, schema=schema, **kw)
|
248
|
+
|
249
|
+
@reflection.cache
|
250
|
+
def get_schema_names(self, connection, **kw):
|
251
|
+
cursor = connection.exec_driver_sql(
|
252
|
+
"select schema_name "
|
253
|
+
"from information_schema.schemata "
|
254
|
+
"order by schema_name asc"
|
255
|
+
)
|
256
|
+
return [row[0] for row in cursor.fetchall()]
|
257
|
+
|
258
|
+
@reflection.cache
|
259
|
+
def get_table_names(self, connection, schema=None, **kw):
|
260
|
+
cursor = connection.exec_driver_sql(
|
261
|
+
"SELECT table_name FROM information_schema.tables "
|
262
|
+
"WHERE {0} = ? "
|
263
|
+
"AND table_type = 'BASE TABLE' "
|
264
|
+
"ORDER BY table_name ASC, {0} ASC".format(self.schema_column),
|
265
|
+
(schema or self.default_schema_name, )
|
266
|
+
)
|
267
|
+
return [row[0] for row in cursor.fetchall()]
|
268
|
+
|
269
|
+
@reflection.cache
|
270
|
+
def get_view_names(self, connection, schema=None, **kw):
|
271
|
+
cursor = connection.exec_driver_sql(
|
272
|
+
"SELECT table_name FROM information_schema.views "
|
273
|
+
"ORDER BY table_name ASC, {0} ASC".format(self.schema_column),
|
274
|
+
(schema or self.default_schema_name, )
|
275
|
+
)
|
276
|
+
return [row[0] for row in cursor.fetchall()]
|
277
|
+
|
278
|
+
@reflection.cache
|
279
|
+
def get_columns(self, connection, table_name, schema=None, **kw):
|
280
|
+
query = "SELECT column_name, data_type " \
|
281
|
+
"FROM information_schema.columns " \
|
282
|
+
"WHERE table_name = ? AND {0} = ? " \
|
283
|
+
"AND column_name !~ ?" \
|
284
|
+
.format(self.schema_column)
|
285
|
+
cursor = connection.exec_driver_sql(
|
286
|
+
query,
|
287
|
+
(table_name,
|
288
|
+
schema or self.default_schema_name,
|
289
|
+
r"(.*)\[\'(.*)\'\]") # regex to filter subscript
|
290
|
+
)
|
291
|
+
return [self._create_column_info(row) for row in cursor.fetchall()]
|
292
|
+
|
293
|
+
@reflection.cache
|
294
|
+
def get_pk_constraint(self, engine, table_name, schema=None, **kw):
|
295
|
+
if self.server_version_info >= (3, 0, 0):
|
296
|
+
query = """SELECT column_name
|
297
|
+
FROM information_schema.key_column_usage
|
298
|
+
WHERE table_name = ? AND table_schema = ?"""
|
299
|
+
|
300
|
+
def result_fun(result):
|
301
|
+
rows = result.fetchall()
|
302
|
+
return set(map(lambda el: el[0], rows))
|
303
|
+
|
304
|
+
elif self.server_version_info >= (2, 3, 0):
|
305
|
+
query = """SELECT column_name
|
306
|
+
FROM information_schema.key_column_usage
|
307
|
+
WHERE table_name = ? AND table_catalog = ?"""
|
308
|
+
|
309
|
+
def result_fun(result):
|
310
|
+
rows = result.fetchall()
|
311
|
+
return set(map(lambda el: el[0], rows))
|
312
|
+
|
313
|
+
else:
|
314
|
+
query = """SELECT constraint_name
|
315
|
+
FROM information_schema.table_constraints
|
316
|
+
WHERE table_name = ? AND {schema_col} = ?
|
317
|
+
AND constraint_type='PRIMARY_KEY'
|
318
|
+
""".format(schema_col=self.schema_column)
|
319
|
+
|
320
|
+
def result_fun(result):
|
321
|
+
rows = result.fetchone()
|
322
|
+
return set(rows[0] if rows else [])
|
323
|
+
|
324
|
+
pk_result = engine.exec_driver_sql(
|
325
|
+
query,
|
326
|
+
(table_name, schema or self.default_schema_name)
|
327
|
+
)
|
328
|
+
pks = result_fun(pk_result)
|
329
|
+
return {'constrained_columns': pks,
|
330
|
+
'name': 'PRIMARY KEY'}
|
331
|
+
|
332
|
+
@reflection.cache
|
333
|
+
def get_foreign_keys(self, connection, table_name, schema=None,
|
334
|
+
postgresql_ignore_search_path=False, **kw):
|
335
|
+
# Crate doesn't support Foreign Keys, so this stays empty
|
336
|
+
return []
|
337
|
+
|
338
|
+
@reflection.cache
|
339
|
+
def get_indexes(self, connection, table_name, schema, **kw):
|
340
|
+
return []
|
341
|
+
|
342
|
+
@property
|
343
|
+
def schema_column(self):
|
344
|
+
return "table_schema"
|
345
|
+
|
346
|
+
def _create_column_info(self, row):
|
347
|
+
return {
|
348
|
+
'name': row[0],
|
349
|
+
'type': self._resolve_type(row[1]),
|
350
|
+
# In Crate every column is nullable except PK
|
351
|
+
# Primary Key Constraints are not nullable anyway, no matter what
|
352
|
+
# we return here, so it's fine to return always `True`
|
353
|
+
'nullable': True
|
354
|
+
}
|
355
|
+
|
356
|
+
def _resolve_type(self, type_):
|
357
|
+
return TYPES_MAP.get(type_, sqltypes.UserDefinedType)
|
358
|
+
|
359
|
+
def has_ilike_operator(self):
|
360
|
+
"""
|
361
|
+
Only CrateDB 4.1.0 and higher implements the `ILIKE` operator.
|
362
|
+
"""
|
363
|
+
server_version_info = self.server_version_info
|
364
|
+
return server_version_info is not None and server_version_info >= (4, 1, 0)
|
365
|
+
|
366
|
+
|
367
|
+
class DateTrunc(functions.GenericFunction):
|
368
|
+
name = "date_trunc"
|
369
|
+
type = sqltypes.TIMESTAMP
|
370
|
+
|
371
|
+
|
372
|
+
dialect = CrateDialect
|