sqlalchemy-cratedb 0.38.0.dev0__py3-none-any.whl → 0.39.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,17 +24,18 @@ import warnings
24
24
  from collections import defaultdict
25
25
 
26
26
  import sqlalchemy as sa
27
- from sqlalchemy.dialects.postgresql.base import PGCompiler
28
27
  from sqlalchemy.dialects.postgresql.base import RESERVED_WORDS as POSTGRESQL_RESERVED_WORDS
28
+ from sqlalchemy.dialects.postgresql.base import PGCompiler
29
29
  from sqlalchemy.sql import compiler
30
30
  from sqlalchemy.types import String
31
+
32
+ from .sa_version import SA_1_4, SA_VERSION
31
33
  from .type.geo import Geopoint, Geoshape
32
34
  from .type.object import MutableDict, ObjectTypeImpl
33
- from .sa_version import SA_VERSION, SA_1_4
34
35
 
35
36
 
36
37
  def rewrite_update(clauseelement, multiparams, params):
37
- """ change the params to enable partial updates
38
+ """change the params to enable partial updates
38
39
 
39
40
  sqlalchemy by default only supports updates of complex types in the form of
40
41
 
@@ -55,9 +56,8 @@ def rewrite_update(clauseelement, multiparams, params):
55
56
  for _params in _multiparams:
56
57
  newparams = {}
57
58
  for key, val in _params.items():
58
- if (
59
- not isinstance(val, MutableDict) or
60
- (not any(val._changed_keys) and not any(val._deleted_keys))
59
+ if not isinstance(val, MutableDict) or (
60
+ not any(val._changed_keys) and not any(val._deleted_keys)
61
61
  ):
62
62
  newparams[key] = val
63
63
  continue
@@ -68,7 +68,7 @@ def rewrite_update(clauseelement, multiparams, params):
68
68
  for subkey in val._deleted_keys:
69
69
  newparams["{0}['{1}']".format(key, subkey)] = None
70
70
  newmultiparams.append(newparams)
71
- _multiparams = (newmultiparams, )
71
+ _multiparams = (newmultiparams,)
72
72
  clause = clauseelement.values(newmultiparams[0])
73
73
  clause._crate_specific = True
74
74
  return clause, _multiparams, params
@@ -76,7 +76,7 @@ def rewrite_update(clauseelement, multiparams, params):
76
76
 
77
77
  @sa.event.listens_for(sa.engine.Engine, "before_execute", retval=True)
78
78
  def crate_before_execute(conn, clauseelement, multiparams, params, *args, **kwargs):
79
- is_crate = type(conn.dialect).__name__ == 'CrateDialect'
79
+ is_crate = type(conn.dialect).__name__ == "CrateDialect"
80
80
  if is_crate and isinstance(clauseelement, sa.sql.expression.Update):
81
81
  if SA_VERSION >= SA_1_4:
82
82
  if params is None:
@@ -98,19 +98,19 @@ def crate_before_execute(conn, clauseelement, multiparams, params, *args, **kwar
98
98
 
99
99
 
100
100
  class CrateDDLCompiler(compiler.DDLCompiler):
101
-
102
- __special_opts_tmpl = {
103
- 'PARTITIONED_BY': ' PARTITIONED BY ({0})'
104
- }
101
+ __special_opts_tmpl = {"partitioned_by": " PARTITIONED BY ({0})"}
105
102
  __clustered_opts_tmpl = {
106
- 'NUMBER_OF_SHARDS': ' INTO {0} SHARDS',
107
- 'CLUSTERED_BY': ' BY ({0})',
103
+ "number_of_shards": " INTO {0} SHARDS",
104
+ "clustered_by": " BY ({0})",
108
105
  }
109
- __clustered_opt_tmpl = ' CLUSTERED{CLUSTERED_BY}{NUMBER_OF_SHARDS}'
106
+ __clustered_opt_tmpl = " CLUSTERED{clustered_by}{number_of_shards}"
110
107
 
111
108
  def get_column_specification(self, column, **kwargs):
112
- colspec = self.preparer.format_column(column) + " " + \
113
- self.dialect.type_compiler.process(column.type)
109
+ colspec = (
110
+ self.preparer.format_column(column)
111
+ + " "
112
+ + self.dialect.type_compiler.process(column.type)
113
+ )
114
114
 
115
115
  default = self.get_column_default_string(column)
116
116
  if default is not None:
@@ -122,11 +122,9 @@ class CrateDDLCompiler(compiler.DDLCompiler):
122
122
  if column.nullable is False:
123
123
  colspec += " NOT NULL"
124
124
  elif column.nullable and column.primary_key:
125
- raise sa.exc.CompileError(
126
- "Primary key columns cannot be nullable"
127
- )
125
+ raise sa.exc.CompileError("Primary key columns cannot be nullable")
128
126
 
129
- if column.dialect_options['crate'].get('index') is False:
127
+ if column.dialect_options["crate"].get("index") is False:
130
128
  if isinstance(column.type, (Geopoint, Geoshape, ObjectTypeImpl)):
131
129
  raise sa.exc.CompileError(
132
130
  "Disabling indexing is not supported for column "
@@ -135,8 +133,8 @@ class CrateDDLCompiler(compiler.DDLCompiler):
135
133
 
136
134
  colspec += " INDEX OFF"
137
135
 
138
- if column.dialect_options['crate'].get('columnstore') is False:
139
- if not isinstance(column.type, (String, )):
136
+ if column.dialect_options["crate"].get("columnstore") is False:
137
+ if not isinstance(column.type, (String,)):
140
138
  raise sa.exc.CompileError(
141
139
  "Controlling the columnstore is only allowed for STRING columns"
142
140
  )
@@ -148,8 +146,7 @@ class CrateDDLCompiler(compiler.DDLCompiler):
148
146
  def visit_computed_column(self, generated):
149
147
  if generated.persisted is False:
150
148
  raise sa.exc.CompileError(
151
- "Virtual computed columns are not supported, set "
152
- "'persisted' to None or True"
149
+ "Virtual computed columns are not supported, set " "'persisted' to None or True"
153
150
  )
154
151
 
155
152
  return "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
@@ -157,14 +154,14 @@ class CrateDDLCompiler(compiler.DDLCompiler):
157
154
  )
158
155
 
159
156
  def post_create_table(self, table):
160
- special_options = ''
157
+ special_options = ""
161
158
  clustered_options = defaultdict(str)
162
159
  table_opts = []
163
160
 
164
161
  opts = dict(
165
- (k[len(self.dialect.name) + 1:].upper(), v)
166
- for k, v, in table.kwargs.items()
167
- if k.startswith('%s_' % self.dialect.name)
162
+ (k[len(self.dialect.name) + 1 :], v)
163
+ for k, v in table.kwargs.items()
164
+ if k.startswith("%s_" % self.dialect.name)
168
165
  )
169
166
  for k, v in opts.items():
170
167
  if k in self.__special_opts_tmpl:
@@ -172,69 +169,73 @@ class CrateDDLCompiler(compiler.DDLCompiler):
172
169
  elif k in self.__clustered_opts_tmpl:
173
170
  clustered_options[k] = self.__clustered_opts_tmpl[k].format(v)
174
171
  else:
175
- table_opts.append('{0} = {1}'.format(k, v))
172
+ table_opts.append("{0} = {1}".format(k, v))
176
173
  if clustered_options:
177
174
  special_options += string.Formatter().vformat(
178
- self.__clustered_opt_tmpl, (), clustered_options)
175
+ self.__clustered_opt_tmpl, (), clustered_options
176
+ )
179
177
  if table_opts:
180
- return special_options + ' WITH ({0})'.format(
181
- ', '.join(sorted(table_opts)))
178
+ return special_options + " WITH ({0})".format(", ".join(sorted(table_opts)))
182
179
  return special_options
183
180
 
184
181
  def visit_foreign_key_constraint(self, constraint, **kw):
185
182
  """
186
183
  CrateDB does not support foreign key constraints.
187
184
  """
188
- warnings.warn("CrateDB does not support foreign key constraints, "
189
- "they will be omitted when generating DDL statements.")
190
- return None
185
+ warnings.warn(
186
+ "CrateDB does not support foreign key constraints, "
187
+ "they will be omitted when generating DDL statements.",
188
+ stacklevel=2,
189
+ )
190
+ return
191
191
 
192
192
  def visit_unique_constraint(self, constraint, **kw):
193
193
  """
194
194
  CrateDB does not support unique key constraints.
195
195
  """
196
- warnings.warn("CrateDB does not support unique constraints, "
197
- "they will be omitted when generating DDL statements.")
198
- return None
196
+ warnings.warn(
197
+ "CrateDB does not support unique constraints, "
198
+ "they will be omitted when generating DDL statements.",
199
+ stacklevel=2,
200
+ )
201
+ return
199
202
 
200
203
 
201
204
  class CrateTypeCompiler(compiler.GenericTypeCompiler):
202
-
203
205
  def visit_string(self, type_, **kw):
204
- return 'STRING'
206
+ return "STRING"
205
207
 
206
208
  def visit_unicode(self, type_, **kw):
207
- return 'STRING'
209
+ return "STRING"
208
210
 
209
211
  def visit_TEXT(self, type_, **kw):
210
- return 'STRING'
212
+ return "STRING"
211
213
 
212
214
  def visit_DECIMAL(self, type_, **kw):
213
- return 'DOUBLE'
215
+ return "DOUBLE"
214
216
 
215
217
  def visit_BIGINT(self, type_, **kw):
216
- return 'LONG'
218
+ return "LONG"
217
219
 
218
220
  def visit_NUMERIC(self, type_, **kw):
219
- return 'LONG'
221
+ return "LONG"
220
222
 
221
223
  def visit_INTEGER(self, type_, **kw):
222
- return 'INT'
224
+ return "INT"
223
225
 
224
226
  def visit_SMALLINT(self, type_, **kw):
225
- return 'SHORT'
227
+ return "SHORT"
226
228
 
227
229
  def visit_datetime(self, type_, **kw):
228
- return 'TIMESTAMP'
230
+ return self.visit_TIMESTAMP(type_, **kw)
229
231
 
230
232
  def visit_date(self, type_, **kw):
231
- return 'TIMESTAMP'
233
+ return "TIMESTAMP"
232
234
 
233
235
  def visit_ARRAY(self, type_, **kw):
234
236
  if type_.dimensions is not None and type_.dimensions > 1:
235
- raise NotImplementedError(
236
- "CrateDB doesn't support multidimensional arrays")
237
- return 'ARRAY({0})'.format(self.process(type_.item_type))
237
+ raise NotImplementedError("CrateDB doesn't support multidimensional arrays")
238
+ return "ARRAY({0})".format(self.process(type_.item_type))
238
239
 
239
240
  def visit_OBJECT(self, type_, **kw):
240
241
  return "OBJECT"
@@ -245,28 +246,27 @@ class CrateTypeCompiler(compiler.GenericTypeCompiler):
245
246
  raise ValueError("FloatVector must be initialized with dimension size")
246
247
  return f"FLOAT_VECTOR({dimensions})"
247
248
 
249
+ def visit_TIMESTAMP(self, type_, **kw):
250
+ """
251
+ Support for `TIMESTAMP WITH|WITHOUT TIME ZONE`.
252
+
253
+ From `sqlalchemy.dialects.postgresql.base.PGTypeCompiler`.
254
+ """
255
+ return "TIMESTAMP %s" % ((type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE",)
248
256
 
249
- class CrateCompiler(compiler.SQLCompiler):
250
257
 
258
+ class CrateCompiler(compiler.SQLCompiler):
251
259
  def visit_getitem_binary(self, binary, operator, **kw):
252
- return "{0}['{1}']".format(
253
- self.process(binary.left, **kw),
254
- binary.right.value
255
- )
260
+ return "{0}['{1}']".format(self.process(binary.left, **kw), binary.right.value)
256
261
 
257
- def visit_json_getitem_op_binary(
258
- self, binary, operator, _cast_applied=False, **kw
259
- ):
260
- return "{0}['{1}']".format(
261
- self.process(binary.left, **kw),
262
- binary.right.value
263
- )
262
+ def visit_json_getitem_op_binary(self, binary, operator, _cast_applied=False, **kw):
263
+ return "{0}['{1}']".format(self.process(binary.left, **kw), binary.right.value)
264
264
 
265
265
  def visit_any(self, element, **kw):
266
266
  return "%s%sANY (%s)" % (
267
267
  self.process(element.left, **kw),
268
268
  compiler.OPERATORS[element.operator],
269
- self.process(element.right, **kw)
269
+ self.process(element.right, **kw),
270
270
  )
271
271
 
272
272
  def visit_ilike_case_insensitive_operand(self, element, **kw):
@@ -321,29 +321,32 @@ class CrateCompiler(compiler.SQLCompiler):
321
321
  def for_update_clause(self, select, **kw):
322
322
  # CrateDB does not support the `INSERT ... FOR UPDATE` clause.
323
323
  # See https://github.com/crate/crate-python/issues/577.
324
- warnings.warn("CrateDB does not support the 'INSERT ... FOR UPDATE' clause, "
325
- "it will be omitted when generating SQL statements.")
326
- return ''
324
+ warnings.warn(
325
+ "CrateDB does not support the 'INSERT ... FOR UPDATE' clause, "
326
+ "it will be omitted when generating SQL statements.",
327
+ stacklevel=2,
328
+ )
329
+ return ""
327
330
 
328
331
 
329
- CRATEDB_RESERVED_WORDS = \
330
- "add, alter, between, by, called, costs, delete, deny, directory, drop, escape, exists, " \
331
- "extract, first, function, if, index, input, insert, last, match, nulls, object, " \
332
- "persistent, recursive, reset, returns, revoke, set, stratify, transient, try_cast, " \
332
+ CRATEDB_RESERVED_WORDS = (
333
+ "add, alter, between, by, called, costs, delete, deny, directory, drop, escape, exists, "
334
+ "extract, first, function, if, index, input, insert, last, match, nulls, object, "
335
+ "persistent, recursive, reset, returns, revoke, set, stratify, transient, try_cast, "
333
336
  "unbounded, update".split(", ")
337
+ )
334
338
 
335
339
 
336
340
  class CrateIdentifierPreparer(sa.sql.compiler.IdentifierPreparer):
337
341
  """
338
342
  Define CrateDB's reserved words to be quoted properly.
339
343
  """
344
+
340
345
  reserved_words = set(list(POSTGRESQL_RESERVED_WORDS) + CRATEDB_RESERVED_WORDS)
341
346
 
342
347
  def _unquote_identifier(self, value):
343
348
  if value[0] == self.initial_quote:
344
- value = value[1:-1].replace(
345
- self.escape_to_quote, self.escape_quote
346
- )
349
+ value = value[1:-1].replace(self.escape_to_quote, self.escape_quote)
347
350
  return value
348
351
 
349
352
  def format_type(self, type_, use_schema=True):
@@ -353,10 +356,6 @@ class CrateIdentifierPreparer(sa.sql.compiler.IdentifierPreparer):
353
356
  name = self.quote(type_.name)
354
357
  effective_schema = self.schema_for_object(type_)
355
358
 
356
- if (
357
- not self.omit_schema
358
- and use_schema
359
- and effective_schema is not None
360
- ):
359
+ if not self.omit_schema and use_schema and effective_schema is not None:
361
360
  name = self.quote_schema(effective_schema) + "." + name
362
361
  return name
@@ -20,7 +20,7 @@
20
20
  # software solely pursuant to the terms of the relevant commercial agreement.
21
21
 
22
22
  import logging
23
- from datetime import datetime, date
23
+ from datetime import date, datetime
24
24
 
25
25
  from sqlalchemy import types as sqltypes
26
26
  from sqlalchemy.engine import default, reflection
@@ -28,20 +28,19 @@ from sqlalchemy.sql import functions
28
28
  from sqlalchemy.util import asbool, to_list
29
29
 
30
30
  from .compiler import (
31
- CrateTypeCompiler,
32
31
  CrateDDLCompiler,
33
32
  CrateIdentifierPreparer,
33
+ CrateTypeCompiler,
34
34
  )
35
- from crate.client.exceptions import TimezoneUnawareException
36
- from .sa_version import SA_VERSION, SA_1_4, SA_2_0
35
+ from .sa_version import SA_1_4, SA_2_0, SA_VERSION
37
36
  from .type import FloatVector, ObjectArray, ObjectType
38
37
 
39
38
  TYPES_MAP = {
40
39
  "boolean": sqltypes.Boolean,
41
40
  "short": sqltypes.SmallInteger,
42
41
  "smallint": sqltypes.SmallInteger,
43
- "timestamp": sqltypes.TIMESTAMP,
44
- "timestamp with time zone": sqltypes.TIMESTAMP,
42
+ "timestamp": sqltypes.TIMESTAMP(timezone=False),
43
+ "timestamp with time zone": sqltypes.TIMESTAMP(timezone=True),
45
44
  "object": ObjectType,
46
45
  "integer": sqltypes.Integer,
47
46
  "long": sqltypes.NUMERIC,
@@ -55,15 +54,18 @@ TYPES_MAP = {
55
54
  "text": sqltypes.String,
56
55
  "float_vector": FloatVector,
57
56
  }
57
+
58
+ # Needed for SQLAlchemy >= 1.1.
59
+ # TODO: Dissolve.
58
60
  try:
59
- # SQLAlchemy >= 1.1
60
61
  from sqlalchemy.types import ARRAY
62
+
61
63
  TYPES_MAP["integer_array"] = ARRAY(sqltypes.Integer)
62
64
  TYPES_MAP["boolean_array"] = ARRAY(sqltypes.Boolean)
63
65
  TYPES_MAP["short_array"] = ARRAY(sqltypes.SmallInteger)
64
66
  TYPES_MAP["smallint_array"] = ARRAY(sqltypes.SmallInteger)
65
- TYPES_MAP["timestamp_array"] = ARRAY(sqltypes.TIMESTAMP)
66
- TYPES_MAP["timestamp with time zone_array"] = ARRAY(sqltypes.TIMESTAMP)
67
+ TYPES_MAP["timestamp_array"] = ARRAY(sqltypes.TIMESTAMP(timezone=False))
68
+ TYPES_MAP["timestamp with time zone_array"] = ARRAY(sqltypes.TIMESTAMP(timezone=True))
67
69
  TYPES_MAP["long_array"] = ARRAY(sqltypes.NUMERIC)
68
70
  TYPES_MAP["bigint_array"] = ARRAY(sqltypes.NUMERIC)
69
71
  TYPES_MAP["double_array"] = ARRAY(sqltypes.DECIMAL)
@@ -72,7 +74,7 @@ try:
72
74
  TYPES_MAP["real_array"] = ARRAY(sqltypes.Float)
73
75
  TYPES_MAP["string_array"] = ARRAY(sqltypes.String)
74
76
  TYPES_MAP["text_array"] = ARRAY(sqltypes.String)
75
- except Exception:
77
+ except Exception: # noqa: S110
76
78
  pass
77
79
 
78
80
 
@@ -83,14 +85,16 @@ class Date(sqltypes.Date):
83
85
  def bind_processor(self, dialect):
84
86
  def process(value):
85
87
  if value is not None:
86
- assert isinstance(value, date)
87
- return value.strftime('%Y-%m-%d')
88
+ assert isinstance(value, date) # noqa: S101
89
+ return value.strftime("%Y-%m-%d")
90
+ return None
91
+
88
92
  return process
89
93
 
90
94
  def result_processor(self, dialect, coltype):
91
95
  def process(value):
92
96
  if not value:
93
- return
97
+ return None
94
98
  try:
95
99
  return datetime.utcfromtimestamp(value / 1e3).date()
96
100
  except TypeError:
@@ -104,32 +108,29 @@ class Date(sqltypes.Date):
104
108
  # the date will be returned in the format it was inserted.
105
109
  log.warning(
106
110
  "Received timestamp isn't a long value."
107
- "Trying to parse as date string and then as datetime string")
111
+ "Trying to parse as date string and then as datetime string"
112
+ )
108
113
  try:
109
- return datetime.strptime(value, '%Y-%m-%d').date()
114
+ return datetime.strptime(value, "%Y-%m-%d").date()
110
115
  except ValueError:
111
- return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ').date()
116
+ return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ").date()
117
+
112
118
  return process
113
119
 
114
120
 
115
121
  class DateTime(sqltypes.DateTime):
116
-
117
- TZ_ERROR_MSG = "Timezone aware datetime objects are not supported"
118
-
119
122
  def bind_processor(self, dialect):
120
123
  def process(value):
121
- if value is not None:
122
- assert isinstance(value, datetime)
123
- if value.tzinfo is not None:
124
- raise TimezoneUnawareException(DateTime.TZ_ERROR_MSG)
125
- return value.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
124
+ if isinstance(value, (datetime, date)):
125
+ return value.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
126
126
  return value
127
+
127
128
  return process
128
129
 
129
130
  def result_processor(self, dialect, coltype):
130
131
  def process(value):
131
132
  if not value:
132
- return
133
+ return None
133
134
  try:
134
135
  return datetime.utcfromtimestamp(value / 1e3)
135
136
  except TypeError:
@@ -143,35 +144,41 @@ class DateTime(sqltypes.DateTime):
143
144
  # the date will be returned in the format it was inserted.
144
145
  log.warning(
145
146
  "Received timestamp isn't a long value."
146
- "Trying to parse as datetime string and then as date string")
147
+ "Trying to parse as datetime string and then as date string"
148
+ )
147
149
  try:
148
- return datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ')
150
+ return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%fZ")
149
151
  except ValueError:
150
- return datetime.strptime(value, '%Y-%m-%d')
152
+ return datetime.strptime(value, "%Y-%m-%d")
153
+
151
154
  return process
152
155
 
153
156
 
154
157
  colspecs = {
158
+ sqltypes.Date: Date,
155
159
  sqltypes.DateTime: DateTime,
156
- sqltypes.Date: Date
160
+ sqltypes.TIMESTAMP: DateTime,
157
161
  }
158
162
 
159
163
 
160
164
  if SA_VERSION >= SA_2_0:
161
165
  from .compat.core20 import CrateCompilerSA20
166
+
162
167
  statement_compiler = CrateCompilerSA20
163
168
  elif SA_VERSION >= SA_1_4:
164
169
  from .compat.core14 import CrateCompilerSA14
170
+
165
171
  statement_compiler = CrateCompilerSA14
166
172
  else:
167
173
  from .compat.core10 import CrateCompilerSA10
174
+
168
175
  statement_compiler = CrateCompilerSA10
169
176
 
170
177
 
171
178
  class CrateDialect(default.DefaultDialect):
172
- name = 'crate'
173
- driver = 'crate-python'
174
- default_paramstyle = 'qmark'
179
+ name = "crate"
180
+ driver = "crate-python"
181
+ default_paramstyle = "qmark"
175
182
  statement_compiler = statement_compiler
176
183
  ddl_compiler = CrateDDLCompiler
177
184
  type_compiler = CrateTypeCompiler
@@ -197,15 +204,13 @@ class CrateDialect(default.DefaultDialect):
197
204
 
198
205
  # Currently, our SQL parser doesn't support unquoted column names that
199
206
  # start with _. Adding it here causes sqlalchemy to quote such columns.
200
- self.identifier_preparer.illegal_initial_characters.add('_')
207
+ self.identifier_preparer.illegal_initial_characters.add("_")
201
208
 
202
209
  def initialize(self, connection):
203
210
  # get lowest server version
204
- self.server_version_info = \
205
- self._get_server_version_info(connection)
211
+ self.server_version_info = self._get_server_version_info(connection)
206
212
  # get default schema name
207
- self.default_schema_name = \
208
- self._get_default_schema_name(connection)
213
+ self.default_schema_name = self._get_default_schema_name(connection)
209
214
 
210
215
  def do_rollback(self, connection):
211
216
  # if any exception is raised by the dbapi, sqlalchemy by default
@@ -217,9 +222,9 @@ class CrateDialect(default.DefaultDialect):
217
222
  def connect(self, host=None, port=None, *args, **kwargs):
218
223
  server = None
219
224
  if host:
220
- server = '{0}:{1}'.format(host, port or '4200')
221
- if 'servers' in kwargs:
222
- server = kwargs.pop('servers')
225
+ server = "{0}:{1}".format(host, port or "4200")
226
+ if "servers" in kwargs:
227
+ server = kwargs.pop("servers")
223
228
  servers = to_list(server)
224
229
  if servers:
225
230
  use_ssl = asbool(kwargs.pop("ssl", False))
@@ -229,7 +234,7 @@ class CrateDialect(default.DefaultDialect):
229
234
  return self.dbapi.connect(**kwargs)
230
235
 
231
236
  def _get_default_schema_name(self, connection):
232
- return 'doc'
237
+ return "doc"
233
238
 
234
239
  def _get_effective_schema_name(self, connection):
235
240
  schema_name_raw = connection.engine.url.query.get("schema")
@@ -246,6 +251,7 @@ class CrateDialect(default.DefaultDialect):
246
251
  @classmethod
247
252
  def import_dbapi(cls):
248
253
  from crate import client
254
+
249
255
  return client
250
256
 
251
257
  @classmethod
@@ -261,9 +267,7 @@ class CrateDialect(default.DefaultDialect):
261
267
  @reflection.cache
262
268
  def get_schema_names(self, connection, **kw):
263
269
  cursor = connection.exec_driver_sql(
264
- "select schema_name "
265
- "from information_schema.schemata "
266
- "order by schema_name asc"
270
+ "select schema_name " "from information_schema.schemata " "order by schema_name asc"
267
271
  )
268
272
  return [row[0] for row in cursor.fetchall()]
269
273
 
@@ -276,7 +280,7 @@ class CrateDialect(default.DefaultDialect):
276
280
  "WHERE {0} = ? "
277
281
  "AND table_type = 'BASE TABLE' "
278
282
  "ORDER BY table_name ASC, {0} ASC".format(self.schema_column),
279
- (schema or self.default_schema_name, )
283
+ (schema or self.default_schema_name,),
280
284
  )
281
285
  return [row[0] for row in cursor.fetchall()]
282
286
 
@@ -285,22 +289,25 @@ class CrateDialect(default.DefaultDialect):
285
289
  cursor = connection.exec_driver_sql(
286
290
  "SELECT table_name FROM information_schema.views "
287
291
  "ORDER BY table_name ASC, {0} ASC".format(self.schema_column),
288
- (schema or self.default_schema_name, )
292
+ (schema or self.default_schema_name,),
289
293
  )
290
294
  return [row[0] for row in cursor.fetchall()]
291
295
 
292
296
  @reflection.cache
293
297
  def get_columns(self, connection, table_name, schema=None, **kw):
294
- query = "SELECT column_name, data_type " \
295
- "FROM information_schema.columns " \
296
- "WHERE table_name = ? AND {0} = ? " \
297
- "AND column_name !~ ?" \
298
- .format(self.schema_column)
298
+ query = (
299
+ "SELECT column_name, data_type "
300
+ "FROM information_schema.columns "
301
+ "WHERE table_name = ? AND {0} = ? "
302
+ "AND column_name !~ ?".format(self.schema_column)
303
+ )
299
304
  cursor = connection.exec_driver_sql(
300
305
  query,
301
- (table_name,
302
- schema or self.default_schema_name,
303
- r"(.*)\[\'(.*)\'\]") # regex to filter subscript
306
+ (
307
+ table_name,
308
+ schema or self.default_schema_name,
309
+ r"(.*)\[\'(.*)\'\]",
310
+ ), # regex to filter subscript
304
311
  )
305
312
  return [self._create_column_info(row) for row in cursor.fetchall()]
306
313
 
@@ -335,17 +342,14 @@ class CrateDialect(default.DefaultDialect):
335
342
  rows = result.fetchone()
336
343
  return set(rows[0] if rows else [])
337
344
 
338
- pk_result = engine.exec_driver_sql(
339
- query,
340
- (table_name, schema or self.default_schema_name)
341
- )
345
+ pk_result = engine.exec_driver_sql(query, (table_name, schema or self.default_schema_name))
342
346
  pks = result_fun(pk_result)
343
- return {'constrained_columns': list(sorted(pks)),
344
- 'name': 'PRIMARY KEY'}
347
+ return {"constrained_columns": sorted(pks), "name": "PRIMARY KEY"}
345
348
 
346
349
  @reflection.cache
347
- def get_foreign_keys(self, connection, table_name, schema=None,
348
- postgresql_ignore_search_path=False, **kw):
350
+ def get_foreign_keys(
351
+ self, connection, table_name, schema=None, postgresql_ignore_search_path=False, **kw
352
+ ):
349
353
  # Crate doesn't support Foreign Keys, so this stays empty
350
354
  return []
351
355
 
@@ -359,12 +363,12 @@ class CrateDialect(default.DefaultDialect):
359
363
 
360
364
  def _create_column_info(self, row):
361
365
  return {
362
- 'name': row[0],
363
- 'type': self._resolve_type(row[1]),
366
+ "name": row[0],
367
+ "type": self._resolve_type(row[1]),
364
368
  # In Crate every column is nullable except PK
365
369
  # Primary Key Constraints are not nullable anyway, no matter what
366
370
  # we return here, so it's fine to return always `True`
367
- 'nullable': True
371
+ "nullable": True,
368
372
  }
369
373
 
370
374
  def _resolve_type(self, type_):