databricks-sqlalchemy 0.0.1b1__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +274 -0
- databricks/sqlalchemy/__init__.py +4 -2
- databricks/sqlalchemy/_ddl.py +100 -0
- databricks/sqlalchemy/_parse.py +385 -0
- databricks/sqlalchemy/_types.py +323 -0
- databricks/sqlalchemy/base.py +436 -0
- databricks/sqlalchemy/dependency_test/test_dependency.py +22 -0
- databricks/sqlalchemy/py.typed +0 -0
- databricks/sqlalchemy/pytest.ini +4 -0
- databricks/sqlalchemy/requirements.py +249 -0
- databricks/sqlalchemy/setup.cfg +4 -0
- databricks/sqlalchemy/test/_extra.py +70 -0
- databricks/sqlalchemy/test/_future.py +331 -0
- databricks/sqlalchemy/test/_regression.py +311 -0
- databricks/sqlalchemy/test/_unsupported.py +450 -0
- databricks/sqlalchemy/test/conftest.py +13 -0
- databricks/sqlalchemy/test/overrides/_componentreflectiontest.py +189 -0
- databricks/sqlalchemy/test/overrides/_ctetest.py +33 -0
- databricks/sqlalchemy/test/test_suite.py +13 -0
- databricks/sqlalchemy/test_local/__init__.py +5 -0
- databricks/sqlalchemy/test_local/conftest.py +44 -0
- databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx +0 -0
- databricks/sqlalchemy/test_local/e2e/test_basic.py +543 -0
- databricks/sqlalchemy/test_local/test_ddl.py +96 -0
- databricks/sqlalchemy/test_local/test_parsing.py +160 -0
- databricks/sqlalchemy/test_local/test_types.py +161 -0
- databricks_sqlalchemy-1.0.0.dist-info/LICENSE +201 -0
- databricks_sqlalchemy-1.0.0.dist-info/METADATA +225 -0
- databricks_sqlalchemy-1.0.0.dist-info/RECORD +31 -0
- {databricks_sqlalchemy-0.0.1b1.dist-info → databricks_sqlalchemy-1.0.0.dist-info}/WHEEL +1 -1
- databricks_sqlalchemy-1.0.0.dist-info/entry_points.txt +3 -0
- databricks/__init__.py +0 -7
- databricks_sqlalchemy-0.0.1b1.dist-info/METADATA +0 -19
- databricks_sqlalchemy-0.0.1b1.dist-info/RECORD +0 -5
@@ -0,0 +1,450 @@
|
|
1
|
+
# type: ignore
|
2
|
+
|
3
|
+
from enum import Enum
|
4
|
+
|
5
|
+
import pytest
|
6
|
+
from databricks.sqlalchemy.test._regression import (
|
7
|
+
ComponentReflectionTest,
|
8
|
+
ComponentReflectionTestExtra,
|
9
|
+
CTETest,
|
10
|
+
FetchLimitOffsetTest,
|
11
|
+
FutureTableDDLTest,
|
12
|
+
HasTableTest,
|
13
|
+
InsertBehaviorTest,
|
14
|
+
NumericTest,
|
15
|
+
TableDDLTest,
|
16
|
+
UuidTest,
|
17
|
+
)
|
18
|
+
|
19
|
+
# These are test suites that are fully skipped with a SkipReason
|
20
|
+
from sqlalchemy.testing.suite import (
|
21
|
+
AutocommitIsolationTest,
|
22
|
+
DateTimeTZTest,
|
23
|
+
ExceptionTest,
|
24
|
+
HasIndexTest,
|
25
|
+
HasSequenceTest,
|
26
|
+
HasSequenceTestEmpty,
|
27
|
+
IsolationLevelTest,
|
28
|
+
LastrowidTest,
|
29
|
+
LongNameBlowoutTest,
|
30
|
+
PercentSchemaNamesTest,
|
31
|
+
ReturningTest,
|
32
|
+
SequenceCompilerTest,
|
33
|
+
SequenceTest,
|
34
|
+
ServerSideCursorsTest,
|
35
|
+
UnicodeSchemaTest,
|
36
|
+
)
|
37
|
+
|
38
|
+
|
39
|
+
class SkipReason(Enum):
|
40
|
+
AUTO_INC = "implicit AUTO_INCREMENT"
|
41
|
+
CTE_FEAT = "required CTE features"
|
42
|
+
CURSORS = "server-side cursors"
|
43
|
+
DECIMAL_FEAT = "required decimal features"
|
44
|
+
ENFORCE_KEYS = "enforcing primary or foreign key restraints"
|
45
|
+
FETCH = "fetch clauses"
|
46
|
+
IDENTIFIER_LENGTH = "identifiers > 255 characters"
|
47
|
+
IMPL_FLOAT_PREC = "required implicit float precision"
|
48
|
+
IMPLICIT_ORDER = "deterministic return order if ORDER BY is not present"
|
49
|
+
INDEXES = "SQL INDEXes"
|
50
|
+
RETURNING = "INSERT ... RETURNING syntax"
|
51
|
+
SEQUENCES = "SQL SEQUENCES"
|
52
|
+
STRING_FEAT = "required STRING type features"
|
53
|
+
SYMBOL_CHARSET = "symbols expected by test"
|
54
|
+
TEMP_TBL = "temporary tables"
|
55
|
+
TIMEZONE_OPT = "timezone-optional TIMESTAMP fields"
|
56
|
+
TRANSACTIONS = "transactions"
|
57
|
+
UNIQUE = "UNIQUE constraints"
|
58
|
+
|
59
|
+
|
60
|
+
def render_skip_reason(rsn: SkipReason, setup_error=False, extra=False) -> str:
|
61
|
+
prefix = "[BADSETUP]" if setup_error else ""
|
62
|
+
postfix = " More detail in _unsupported.py" if extra else ""
|
63
|
+
return f"[UNSUPPORTED]{prefix}[{rsn.name}]: Databricks does not support {rsn.value}.{postfix}"
|
64
|
+
|
65
|
+
|
66
|
+
@pytest.mark.reviewed
|
67
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.ENFORCE_KEYS))
|
68
|
+
class ExceptionTest(ExceptionTest):
|
69
|
+
"""Per Databricks documentation, primary and foreign key constraints are informational only
|
70
|
+
and are not enforced.
|
71
|
+
|
72
|
+
https://docs.databricks.com/api/workspace/tableconstraints
|
73
|
+
"""
|
74
|
+
|
75
|
+
pass
|
76
|
+
|
77
|
+
|
78
|
+
@pytest.mark.reviewed
|
79
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.IDENTIFIER_LENGTH))
|
80
|
+
class LongNameBlowoutTest(LongNameBlowoutTest):
|
81
|
+
"""These tests all include assertions that the tested name > 255 characters"""
|
82
|
+
|
83
|
+
pass
|
84
|
+
|
85
|
+
|
86
|
+
@pytest.mark.reviewed
|
87
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES))
|
88
|
+
class HasSequenceTest(HasSequenceTest):
|
89
|
+
pass
|
90
|
+
|
91
|
+
|
92
|
+
@pytest.mark.reviewed
|
93
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES))
|
94
|
+
class HasSequenceTestEmpty(HasSequenceTestEmpty):
|
95
|
+
pass
|
96
|
+
|
97
|
+
|
98
|
+
@pytest.mark.reviewed
|
99
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES))
|
100
|
+
class HasIndexTest(HasIndexTest):
|
101
|
+
pass
|
102
|
+
|
103
|
+
|
104
|
+
@pytest.mark.reviewed
|
105
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET))
|
106
|
+
class UnicodeSchemaTest(UnicodeSchemaTest):
|
107
|
+
pass
|
108
|
+
|
109
|
+
|
110
|
+
@pytest.mark.reviewed
|
111
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.CURSORS))
|
112
|
+
class ServerSideCursorsTest(ServerSideCursorsTest):
|
113
|
+
pass
|
114
|
+
|
115
|
+
|
116
|
+
@pytest.mark.reviewed
|
117
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SYMBOL_CHARSET))
|
118
|
+
class PercentSchemaNamesTest(PercentSchemaNamesTest):
|
119
|
+
pass
|
120
|
+
|
121
|
+
|
122
|
+
@pytest.mark.reviewed
|
123
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS))
|
124
|
+
class IsolationLevelTest(IsolationLevelTest):
|
125
|
+
pass
|
126
|
+
|
127
|
+
|
128
|
+
@pytest.mark.reviewed
|
129
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TRANSACTIONS))
|
130
|
+
class AutocommitIsolationTest(AutocommitIsolationTest):
|
131
|
+
pass
|
132
|
+
|
133
|
+
|
134
|
+
@pytest.mark.reviewed
|
135
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING))
|
136
|
+
class ReturningTest(ReturningTest):
|
137
|
+
pass
|
138
|
+
|
139
|
+
|
140
|
+
@pytest.mark.reviewed
|
141
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES))
|
142
|
+
class SequenceTest(SequenceTest):
|
143
|
+
pass
|
144
|
+
|
145
|
+
|
146
|
+
@pytest.mark.reviewed
|
147
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.SEQUENCES))
|
148
|
+
class SequenceCompilerTest(SequenceCompilerTest):
|
149
|
+
pass
|
150
|
+
|
151
|
+
|
152
|
+
class FetchLimitOffsetTest(FetchLimitOffsetTest):
|
153
|
+
@pytest.mark.flaky
|
154
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.IMPLICIT_ORDER, extra=True))
|
155
|
+
def test_limit_render_multiple_times(self):
|
156
|
+
"""This test depends on the order that records are inserted into the table. It's passing criteria requires that
|
157
|
+
a record inserted with id=1 is the first record returned when no ORDER BY clause is specified. But Databricks occasionally
|
158
|
+
INSERTS in a different order, which makes this test seem to fail. The test is flaky, but the underlying functionality
|
159
|
+
(can multiple LIMIT clauses be rendered) is not broken.
|
160
|
+
|
161
|
+
Unclear if this is a bug in Databricks, Delta, or some race-condition in the test itself.
|
162
|
+
"""
|
163
|
+
pass
|
164
|
+
|
165
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
166
|
+
def test_bound_fetch_offset(self):
|
167
|
+
pass
|
168
|
+
|
169
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
170
|
+
def test_fetch_offset_no_order(self):
|
171
|
+
pass
|
172
|
+
|
173
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
174
|
+
def test_fetch_offset_nobinds(self):
|
175
|
+
pass
|
176
|
+
|
177
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
178
|
+
def test_simple_fetch(self):
|
179
|
+
pass
|
180
|
+
|
181
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
182
|
+
def test_simple_fetch_offset(self):
|
183
|
+
pass
|
184
|
+
|
185
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
186
|
+
def test_simple_fetch_percent(self):
|
187
|
+
pass
|
188
|
+
|
189
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
190
|
+
def test_simple_fetch_percent_ties(self):
|
191
|
+
pass
|
192
|
+
|
193
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
194
|
+
def test_simple_fetch_ties(self):
|
195
|
+
pass
|
196
|
+
|
197
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
198
|
+
def test_expr_fetch_offset(self):
|
199
|
+
pass
|
200
|
+
|
201
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
202
|
+
def test_fetch_offset_percent(self):
|
203
|
+
pass
|
204
|
+
|
205
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
206
|
+
def test_fetch_offset_percent_ties(self):
|
207
|
+
pass
|
208
|
+
|
209
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
210
|
+
def test_fetch_offset_ties(self):
|
211
|
+
pass
|
212
|
+
|
213
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.FETCH))
|
214
|
+
def test_fetch_offset_ties_exact_number(self):
|
215
|
+
pass
|
216
|
+
|
217
|
+
|
218
|
+
class UuidTest(UuidTest):
|
219
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.RETURNING))
|
220
|
+
def test_uuid_returning(self):
|
221
|
+
pass
|
222
|
+
|
223
|
+
|
224
|
+
class FutureTableDDLTest(FutureTableDDLTest):
|
225
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
226
|
+
def test_create_index_if_not_exists(self):
|
227
|
+
"""We could use requirements.index_reflection and requirements.index_ddl_if_exists
|
228
|
+
here to disable this but prefer a more meaningful skip message
|
229
|
+
"""
|
230
|
+
pass
|
231
|
+
|
232
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
233
|
+
def test_drop_index_if_exists(self):
|
234
|
+
"""We could use requirements.index_reflection and requirements.index_ddl_if_exists
|
235
|
+
here to disable this but prefer a more meaningful skip message
|
236
|
+
"""
|
237
|
+
pass
|
238
|
+
|
239
|
+
|
240
|
+
class TableDDLTest(TableDDLTest):
|
241
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES))
|
242
|
+
def test_create_index_if_not_exists(self, connection):
|
243
|
+
"""We could use requirements.index_reflection and requirements.index_ddl_if_exists
|
244
|
+
here to disable this but prefer a more meaningful skip message
|
245
|
+
"""
|
246
|
+
pass
|
247
|
+
|
248
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.INDEXES))
|
249
|
+
def test_drop_index_if_exists(self, connection):
|
250
|
+
"""We could use requirements.index_reflection and requirements.index_ddl_if_exists
|
251
|
+
here to disable this but prefer a more meaningful skip message
|
252
|
+
"""
|
253
|
+
pass
|
254
|
+
|
255
|
+
|
256
|
+
class ComponentReflectionTest(ComponentReflectionTest):
|
257
|
+
"""This test requires two schemas be present in the target Databricks workspace:
|
258
|
+
- The schema set in --dburi
|
259
|
+
- A second schema named "test_schema"
|
260
|
+
|
261
|
+
Note that test_get_multi_foreign keys is flaky because DBR does not guarantee the order of data returned in DESCRIBE TABLE EXTENDED
|
262
|
+
"""
|
263
|
+
|
264
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.UNIQUE))
|
265
|
+
def test_get_multi_unique_constraints(self):
|
266
|
+
pass
|
267
|
+
|
268
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL, True, True))
|
269
|
+
def test_get_temp_view_names(self):
|
270
|
+
"""While Databricks supports temporary views, this test creates a temp view aimed at a temp table.
|
271
|
+
Databricks doesn't support temp tables. So the test can never pass.
|
272
|
+
"""
|
273
|
+
pass
|
274
|
+
|
275
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL))
|
276
|
+
def test_get_temp_table_columns(self):
|
277
|
+
pass
|
278
|
+
|
279
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL))
|
280
|
+
def test_get_temp_table_indexes(self):
|
281
|
+
pass
|
282
|
+
|
283
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL))
|
284
|
+
def test_get_temp_table_names(self):
|
285
|
+
pass
|
286
|
+
|
287
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL))
|
288
|
+
def test_get_temp_table_unique_constraints(self):
|
289
|
+
pass
|
290
|
+
|
291
|
+
@pytest.mark.skip(reason=render_skip_reason(SkipReason.TEMP_TBL))
|
292
|
+
def test_reflect_table_temp_table(self):
|
293
|
+
pass
|
294
|
+
|
295
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
296
|
+
def test_get_indexes(self):
|
297
|
+
pass
|
298
|
+
|
299
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
300
|
+
def test_multi_indexes(self):
|
301
|
+
pass
|
302
|
+
|
303
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
304
|
+
def get_noncol_index(self):
|
305
|
+
pass
|
306
|
+
|
307
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.UNIQUE))
|
308
|
+
def test_get_unique_constraints(self):
|
309
|
+
pass
|
310
|
+
|
311
|
+
|
312
|
+
class NumericTest(NumericTest):
|
313
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT))
|
314
|
+
def test_enotation_decimal(self):
|
315
|
+
"""This test automatically runs if requirements.precision_numerics_enotation_large is open()"""
|
316
|
+
pass
|
317
|
+
|
318
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.DECIMAL_FEAT))
|
319
|
+
def test_enotation_decimal_large(self):
|
320
|
+
"""This test automatically runs if requirements.precision_numerics_enotation_large is open()"""
|
321
|
+
pass
|
322
|
+
|
323
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True))
|
324
|
+
def test_float_coerce_round_trip(self):
|
325
|
+
"""
|
326
|
+
This automatically runs if requirements.literal_float_coercion is open()
|
327
|
+
|
328
|
+
Without additional work, Databricks returns 15.75629997253418 when you SELECT 15.7563.
|
329
|
+
This is a potential area where we could override the Float literal processor to add a CAST.
|
330
|
+
Will leave to a PM to decide if we should do so.
|
331
|
+
"""
|
332
|
+
pass
|
333
|
+
|
334
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.IMPL_FLOAT_PREC, extra=True))
|
335
|
+
def test_float_custom_scale(self):
|
336
|
+
"""This test automatically runs if requirements.precision_generic_float_type is open()"""
|
337
|
+
pass
|
338
|
+
|
339
|
+
|
340
|
+
class HasTableTest(HasTableTest):
|
341
|
+
"""Databricks does not support temporary tables."""
|
342
|
+
|
343
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL))
|
344
|
+
def test_has_table_temp_table(self):
|
345
|
+
pass
|
346
|
+
|
347
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.TEMP_TBL, True, True))
|
348
|
+
def test_has_table_temp_view(self):
|
349
|
+
"""Databricks supports temporary views but this test depends on requirements.has_temp_table, which we
|
350
|
+
explicitly close so that we can run other tests in this group. See the comment under has_temp_table in
|
351
|
+
requirements.py for details.
|
352
|
+
|
353
|
+
From what I can see, there is no way to run this test since it will fail during setup if we mark has_temp_table
|
354
|
+
open(). It _might_ be possible to hijack this behaviour by implementing temp_table_keyword_args in our own
|
355
|
+
provision.py. Doing so would mean creating a real table during this class setup instead of a temp table. Then
|
356
|
+
we could just skip the temp table tests but run the temp view tests. But this test fixture doesn't cleanup its
|
357
|
+
temp tables and has no hook to do so.
|
358
|
+
|
359
|
+
It would be ideal for SQLAlchemy to define a separate requirements.has_temp_views.
|
360
|
+
"""
|
361
|
+
pass
|
362
|
+
|
363
|
+
|
364
|
+
class ComponentReflectionTestExtra(ComponentReflectionTestExtra):
|
365
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
366
|
+
def test_reflect_covering_index(self):
|
367
|
+
pass
|
368
|
+
|
369
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.INDEXES))
|
370
|
+
def test_reflect_expression_based_indexes(self):
|
371
|
+
pass
|
372
|
+
|
373
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.STRING_FEAT, extra=True))
|
374
|
+
def test_varchar_reflection(self):
|
375
|
+
"""Databricks doesn't enforce string length limitations like STRING(255)."""
|
376
|
+
pass
|
377
|
+
|
378
|
+
|
379
|
+
class InsertBehaviorTest(InsertBehaviorTest):
|
380
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True))
|
381
|
+
def test_autoclose_on_insert(self):
|
382
|
+
"""The setup for this test creates a column with implicit autoincrement enabled.
|
383
|
+
This dialect does not implement implicit autoincrement - users must declare Identity() explicitly.
|
384
|
+
"""
|
385
|
+
pass
|
386
|
+
|
387
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True))
|
388
|
+
def test_insert_from_select_autoinc(self):
|
389
|
+
"""Implicit autoincrement is not implemented in this dialect."""
|
390
|
+
pass
|
391
|
+
|
392
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, True, True))
|
393
|
+
def test_insert_from_select_autoinc_no_rows(self):
|
394
|
+
pass
|
395
|
+
|
396
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.RETURNING))
|
397
|
+
def test_autoclose_on_insert_implicit_returning(self):
|
398
|
+
pass
|
399
|
+
|
400
|
+
|
401
|
+
@pytest.mark.reviewed
|
402
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.AUTO_INC, extra=True))
|
403
|
+
class LastrowidTest(LastrowidTest):
|
404
|
+
"""SQLAlchemy docs describe that a column without an explicit Identity() may implicitly create one if autoincrement=True.
|
405
|
+
That is what this method tests. Databricks supports auto-incrementing IDENTITY columns but they must be explicitly
|
406
|
+
declared. This limitation is present in our dialect as well. Which means that SQLAlchemy's autoincrement setting of a column
|
407
|
+
is ignored. We emit a logging.WARN message if you try it.
|
408
|
+
|
409
|
+
In the future we could handle this autoincrement by implicitly calling the visit_identity_column() method of our DDLCompiler
|
410
|
+
when autoincrement=True. There is an example of this in the Microsoft SQL Server dialect: MSSDDLCompiler.get_column_specification
|
411
|
+
|
412
|
+
For now, if you need to create a SQLAlchemy column with an auto-incrementing identity, you must set this explicitly in your column
|
413
|
+
definition by passing an Identity() to the column constructor.
|
414
|
+
"""
|
415
|
+
|
416
|
+
pass
|
417
|
+
|
418
|
+
|
419
|
+
class CTETest(CTETest):
|
420
|
+
"""During the teardown for this test block, it tries to drop a constraint that it never named which raises
|
421
|
+
a compilation error. This could point to poor constraint reflection but our other constraint reflection
|
422
|
+
tests pass. Requires investigation.
|
423
|
+
"""
|
424
|
+
|
425
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True))
|
426
|
+
def test_select_recursive_round_trip(self):
|
427
|
+
pass
|
428
|
+
|
429
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.CTE_FEAT, extra=True))
|
430
|
+
def test_delete_scalar_subq_round_trip(self):
|
431
|
+
"""Error received is [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY.MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY]
|
432
|
+
|
433
|
+
This suggests a limitation of the platform. But a workaround may be possible if customers require it.
|
434
|
+
"""
|
435
|
+
pass
|
436
|
+
|
437
|
+
|
438
|
+
@pytest.mark.reviewed
|
439
|
+
@pytest.mark.skip(render_skip_reason(SkipReason.TIMEZONE_OPT, True))
|
440
|
+
class DateTimeTZTest(DateTimeTZTest):
|
441
|
+
"""Test whether the sqlalchemy.DateTime() type can _optionally_ include timezone info.
|
442
|
+
This dialect maps DateTime() → TIMESTAMP, which _always_ includes tzinfo.
|
443
|
+
|
444
|
+
Users can use databricks.sqlalchemy.TIMESTAMP_NTZ for a tzinfo-less timestamp. The SQLA docs
|
445
|
+
acknowledge this is expected for some dialects.
|
446
|
+
|
447
|
+
https://docs.sqlalchemy.org/en/20/core/type_basics.html#sqlalchemy.types.DateTime
|
448
|
+
"""
|
449
|
+
|
450
|
+
pass
|
@@ -0,0 +1,13 @@
|
|
1
|
+
from sqlalchemy.dialects import registry
|
2
|
+
import pytest
|
3
|
+
|
4
|
+
registry.register("databricks", "databricks.sqlalchemy", "DatabricksDialect")
|
5
|
+
# sqlalchemy's dialect-testing machinery wants an entry like this.
|
6
|
+
# This seems to be based around dialects maybe having multiple drivers
|
7
|
+
# and wanting to test driver-specific URLs, but doesn't seem to make
|
8
|
+
# much sense for dialects with only one driver.
|
9
|
+
registry.register("databricks.databricks", "databricks.sqlalchemy", "DatabricksDialect")
|
10
|
+
|
11
|
+
pytest.register_assert_rewrite("sqlalchemy.testing.assertions")
|
12
|
+
|
13
|
+
from sqlalchemy.testing.plugin.pytestplugin import *
|
@@ -0,0 +1,189 @@
|
|
1
|
+
"""The default test setup uses self-referential foreign keys and indexes for a test table.
|
2
|
+
We override to remove these assumptions.
|
3
|
+
|
4
|
+
Note that test_multi_foreign_keys currently does not pass for all combinations due to
|
5
|
+
an ordering issue. The dialect returns the expected information. But this test makes assertions
|
6
|
+
on the order of the returned results. We can't guarantee that order at the moment.
|
7
|
+
|
8
|
+
The test fixture actually tries to sort the outputs, but this sort isn't working. Will need
|
9
|
+
to follow-up on this later.
|
10
|
+
"""
|
11
|
+
import sqlalchemy as sa
|
12
|
+
from sqlalchemy.testing import config
|
13
|
+
from sqlalchemy.testing.schema import Column
|
14
|
+
from sqlalchemy.testing.schema import Table
|
15
|
+
from sqlalchemy import ForeignKey
|
16
|
+
from sqlalchemy import testing
|
17
|
+
|
18
|
+
from sqlalchemy.testing.suite.test_reflection import ComponentReflectionTest
|
19
|
+
|
20
|
+
|
21
|
+
class ComponentReflectionTest(ComponentReflectionTest): # type: ignore
|
22
|
+
@classmethod
|
23
|
+
def define_reflected_tables(cls, metadata, schema):
|
24
|
+
if schema:
|
25
|
+
schema_prefix = schema + "."
|
26
|
+
else:
|
27
|
+
schema_prefix = ""
|
28
|
+
|
29
|
+
if testing.requires.self_referential_foreign_keys.enabled:
|
30
|
+
parent_id_args = (
|
31
|
+
ForeignKey(
|
32
|
+
"%susers.user_id" % schema_prefix, name="user_id_fk", use_alter=True
|
33
|
+
),
|
34
|
+
)
|
35
|
+
else:
|
36
|
+
parent_id_args = ()
|
37
|
+
users = Table(
|
38
|
+
"users",
|
39
|
+
metadata,
|
40
|
+
Column("user_id", sa.INT, primary_key=True),
|
41
|
+
Column("test1", sa.CHAR(5), nullable=False),
|
42
|
+
Column("test2", sa.Float(), nullable=False),
|
43
|
+
Column("parent_user_id", sa.Integer, *parent_id_args),
|
44
|
+
sa.CheckConstraint(
|
45
|
+
"test2 > 0",
|
46
|
+
name="zz_test2_gt_zero",
|
47
|
+
comment="users check constraint",
|
48
|
+
),
|
49
|
+
sa.CheckConstraint("test2 <= 1000"),
|
50
|
+
schema=schema,
|
51
|
+
test_needs_fk=True,
|
52
|
+
)
|
53
|
+
|
54
|
+
Table(
|
55
|
+
"dingalings",
|
56
|
+
metadata,
|
57
|
+
Column("dingaling_id", sa.Integer, primary_key=True),
|
58
|
+
Column(
|
59
|
+
"address_id",
|
60
|
+
sa.Integer,
|
61
|
+
ForeignKey(
|
62
|
+
"%semail_addresses.address_id" % schema_prefix,
|
63
|
+
name="zz_email_add_id_fg",
|
64
|
+
comment="di fk comment",
|
65
|
+
),
|
66
|
+
),
|
67
|
+
Column(
|
68
|
+
"id_user",
|
69
|
+
sa.Integer,
|
70
|
+
ForeignKey("%susers.user_id" % schema_prefix),
|
71
|
+
),
|
72
|
+
Column("data", sa.String(30), unique=True),
|
73
|
+
sa.CheckConstraint(
|
74
|
+
"address_id > 0 AND address_id < 1000",
|
75
|
+
name="address_id_gt_zero",
|
76
|
+
),
|
77
|
+
sa.UniqueConstraint(
|
78
|
+
"address_id",
|
79
|
+
"dingaling_id",
|
80
|
+
name="zz_dingalings_multiple",
|
81
|
+
comment="di unique comment",
|
82
|
+
),
|
83
|
+
schema=schema,
|
84
|
+
test_needs_fk=True,
|
85
|
+
)
|
86
|
+
Table(
|
87
|
+
"email_addresses",
|
88
|
+
metadata,
|
89
|
+
Column("address_id", sa.Integer),
|
90
|
+
Column("remote_user_id", sa.Integer, ForeignKey(users.c.user_id)),
|
91
|
+
Column("email_address", sa.String(20)),
|
92
|
+
sa.PrimaryKeyConstraint(
|
93
|
+
"address_id", name="email_ad_pk", comment="ea pk comment"
|
94
|
+
),
|
95
|
+
schema=schema,
|
96
|
+
test_needs_fk=True,
|
97
|
+
)
|
98
|
+
Table(
|
99
|
+
"comment_test",
|
100
|
+
metadata,
|
101
|
+
Column("id", sa.Integer, primary_key=True, comment="id comment"),
|
102
|
+
Column("data", sa.String(20), comment="data % comment"),
|
103
|
+
Column(
|
104
|
+
"d2",
|
105
|
+
sa.String(20),
|
106
|
+
comment=r"""Comment types type speedily ' " \ '' Fun!""",
|
107
|
+
),
|
108
|
+
Column("d3", sa.String(42), comment="Comment\nwith\rescapes"),
|
109
|
+
schema=schema,
|
110
|
+
comment=r"""the test % ' " \ table comment""",
|
111
|
+
)
|
112
|
+
Table(
|
113
|
+
"no_constraints",
|
114
|
+
metadata,
|
115
|
+
Column("data", sa.String(20)),
|
116
|
+
schema=schema,
|
117
|
+
comment="no\nconstraints\rhas\fescaped\vcomment",
|
118
|
+
)
|
119
|
+
|
120
|
+
if testing.requires.cross_schema_fk_reflection.enabled:
|
121
|
+
if schema is None:
|
122
|
+
Table(
|
123
|
+
"local_table",
|
124
|
+
metadata,
|
125
|
+
Column("id", sa.Integer, primary_key=True),
|
126
|
+
Column("data", sa.String(20)),
|
127
|
+
Column(
|
128
|
+
"remote_id",
|
129
|
+
ForeignKey("%s.remote_table_2.id" % testing.config.test_schema),
|
130
|
+
),
|
131
|
+
test_needs_fk=True,
|
132
|
+
schema=config.db.dialect.default_schema_name,
|
133
|
+
)
|
134
|
+
else:
|
135
|
+
Table(
|
136
|
+
"remote_table",
|
137
|
+
metadata,
|
138
|
+
Column("id", sa.Integer, primary_key=True),
|
139
|
+
Column(
|
140
|
+
"local_id",
|
141
|
+
ForeignKey(
|
142
|
+
"%s.local_table.id" % config.db.dialect.default_schema_name
|
143
|
+
),
|
144
|
+
),
|
145
|
+
Column("data", sa.String(20)),
|
146
|
+
schema=schema,
|
147
|
+
test_needs_fk=True,
|
148
|
+
)
|
149
|
+
Table(
|
150
|
+
"remote_table_2",
|
151
|
+
metadata,
|
152
|
+
Column("id", sa.Integer, primary_key=True),
|
153
|
+
Column("data", sa.String(20)),
|
154
|
+
schema=schema,
|
155
|
+
test_needs_fk=True,
|
156
|
+
)
|
157
|
+
|
158
|
+
if testing.requires.index_reflection.enabled:
|
159
|
+
Index("users_t_idx", users.c.test1, users.c.test2, unique=True)
|
160
|
+
Index("users_all_idx", users.c.user_id, users.c.test2, users.c.test1)
|
161
|
+
|
162
|
+
if not schema:
|
163
|
+
# test_needs_fk is at the moment to force MySQL InnoDB
|
164
|
+
noncol_idx_test_nopk = Table(
|
165
|
+
"noncol_idx_test_nopk",
|
166
|
+
metadata,
|
167
|
+
Column("q", sa.String(5)),
|
168
|
+
test_needs_fk=True,
|
169
|
+
)
|
170
|
+
|
171
|
+
noncol_idx_test_pk = Table(
|
172
|
+
"noncol_idx_test_pk",
|
173
|
+
metadata,
|
174
|
+
Column("id", sa.Integer, primary_key=True),
|
175
|
+
Column("q", sa.String(5)),
|
176
|
+
test_needs_fk=True,
|
177
|
+
)
|
178
|
+
|
179
|
+
if (
|
180
|
+
testing.requires.indexes_with_ascdesc.enabled
|
181
|
+
and testing.requires.reflect_indexes_with_ascdesc.enabled
|
182
|
+
):
|
183
|
+
Index("noncol_idx_nopk", noncol_idx_test_nopk.c.q.desc())
|
184
|
+
Index("noncol_idx_pk", noncol_idx_test_pk.c.q.desc())
|
185
|
+
|
186
|
+
if testing.requires.view_column_reflection.enabled:
|
187
|
+
cls.define_views(metadata, schema)
|
188
|
+
if not schema and testing.requires.temp_table_reflection.enabled:
|
189
|
+
cls.define_temp_tables(metadata)
|
@@ -0,0 +1,33 @@
|
|
1
|
+
"""The default test setup uses a self-referential foreign key. With our dialect this requires
|
2
|
+
`use_alter=True` and the fk constraint to be named. So we override this to make the test pass.
|
3
|
+
"""
|
4
|
+
|
5
|
+
from sqlalchemy.testing.suite import CTETest
|
6
|
+
|
7
|
+
from sqlalchemy.testing.schema import Column
|
8
|
+
from sqlalchemy.testing.schema import Table
|
9
|
+
from sqlalchemy import ForeignKey
|
10
|
+
from sqlalchemy import Integer
|
11
|
+
from sqlalchemy import String
|
12
|
+
|
13
|
+
|
14
|
+
class CTETest(CTETest): # type: ignore
|
15
|
+
@classmethod
|
16
|
+
def define_tables(cls, metadata):
|
17
|
+
Table(
|
18
|
+
"some_table",
|
19
|
+
metadata,
|
20
|
+
Column("id", Integer, primary_key=True),
|
21
|
+
Column("data", String(50)),
|
22
|
+
Column(
|
23
|
+
"parent_id", ForeignKey("some_table.id", name="fk_test", use_alter=True)
|
24
|
+
),
|
25
|
+
)
|
26
|
+
|
27
|
+
Table(
|
28
|
+
"some_other_table",
|
29
|
+
metadata,
|
30
|
+
Column("id", Integer, primary_key=True),
|
31
|
+
Column("data", String(50)),
|
32
|
+
Column("parent_id", Integer),
|
33
|
+
)
|
@@ -0,0 +1,13 @@
|
|
1
|
+
"""
|
2
|
+
The order of these imports is important. Test cases are imported first from SQLAlchemy,
|
3
|
+
then are overridden by our local skip markers in _regression, _unsupported, and _future.
|
4
|
+
"""
|
5
|
+
|
6
|
+
|
7
|
+
# type: ignore
|
8
|
+
# fmt: off
|
9
|
+
from sqlalchemy.testing.suite import *
|
10
|
+
from databricks.sqlalchemy.test._regression import *
|
11
|
+
from databricks.sqlalchemy.test._unsupported import *
|
12
|
+
from databricks.sqlalchemy.test._future import *
|
13
|
+
from databricks.sqlalchemy.test._extra import TinyIntegerTest, DateTimeTZTestCustom
|