databricks-sqlalchemy 2.0.2__tar.gz → 2.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/CHANGELOG.md +2 -2
  2. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/PKG-INFO +3 -2
  3. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/pyproject.toml +7 -3
  4. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/py.typed → databricks_sqlalchemy-2.0.4/src/databricks/__init__.py +0 -0
  5. databricks_sqlalchemy-2.0.4/src/databricks/sqlalchemy/py.typed +0 -0
  6. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/dependency_test/test_dependency.py +0 -22
  7. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/_extra.py +0 -70
  8. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/_future.py +0 -331
  9. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/_regression.py +0 -311
  10. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/_unsupported.py +0 -450
  11. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/conftest.py +0 -13
  12. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/overrides/_componentreflectiontest.py +0 -189
  13. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/overrides/_ctetest.py +0 -33
  14. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test/test_suite.py +0 -13
  15. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/__init__.py +0 -5
  16. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/conftest.py +0 -44
  17. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/e2e/MOCK_DATA.xlsx +0 -0
  18. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/e2e/test_basic.py +0 -543
  19. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/test_ddl.py +0 -96
  20. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/test_parsing.py +0 -160
  21. databricks_sqlalchemy-2.0.2/src/databricks/sqlalchemy/test_local/test_types.py +0 -161
  22. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/LICENSE +0 -0
  23. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/README.md +0 -0
  24. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/__init__.py +0 -0
  25. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/_ddl.py +0 -0
  26. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/_parse.py +0 -0
  27. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/_types.py +0 -0
  28. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/base.py +0 -0
  29. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/pytest.ini +0 -0
  30. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/requirements.py +0 -0
  31. {databricks_sqlalchemy-2.0.2 → databricks_sqlalchemy-2.0.4}/src/databricks/sqlalchemy/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  # Release History
2
2
 
3
- # 2.0.2
3
+ # 2.0.4 (2025-01-27)
4
4
 
5
5
  - All the SQLAlchemy features from `databricks-sql-connector>=4.0.0` have been moved to this `databricks-sqlalchemy` library
6
- - Support for SQLAlchemy v2 dialect is provided
6
+ - Support for SQLAlchemy v2 dialect is provided
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sqlalchemy
3
- Version: 2.0.2
3
+ Version: 2.0.4
4
4
  Summary: Databricks SQLAlchemy plugin for Python
5
5
  License: Apache-2.0
6
6
  Author: Databricks
@@ -13,7 +13,8 @@ Classifier: Programming Language :: Python :: 3.9
13
13
  Classifier: Programming Language :: Python :: 3.10
14
14
  Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Programming Language :: Python :: 3.12
16
- Requires-Dist: databricks_sql_connector (==4.0.0.b4)
16
+ Requires-Dist: databricks_sql_connector (>=4.0.0)
17
+ Requires-Dist: pyarrow (>=14.0.1,<17)
17
18
  Requires-Dist: sqlalchemy (>=2.0.21)
18
19
  Project-URL: Bug Tracker, https://github.com/databricks/databricks-sqlalchemy/issues
19
20
  Project-URL: Homepage, https://github.com/databricks/databricks-sqlalchemy
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "databricks-sqlalchemy"
3
- version = "2.0.2"
3
+ version = "2.0.4"
4
4
  description = "Databricks SQLAlchemy plugin for Python"
5
5
  authors = ["Databricks <databricks-sql-connector-maintainers@databricks.com>"]
6
6
  license = "Apache-2.0"
@@ -10,7 +10,8 @@ include = ["CHANGELOG.md"]
10
10
 
11
11
  [tool.poetry.dependencies]
12
12
  python = "^3.8.0"
13
- databricks_sql_connector = { version = "==4.0.0.b4"}
13
+ databricks_sql_connector = { version = ">=4.0.0"}
14
+ pyarrow = { version = ">=14.0.1,<17"}
14
15
  sqlalchemy = { version = ">=2.0.21" }
15
16
 
16
17
  [tool.poetry.dev-dependencies]
@@ -39,5 +40,8 @@ markers = {"reviewed" = "Test case has been reviewed by Databricks"}
39
40
  minversion = "6.0"
40
41
  log_cli = "false"
41
42
  log_cli_level = "INFO"
42
- testpaths = ["tests", "src/databricks/sqlalchemy/test_local"]
43
+ testpaths = ["tests", "tests/test_local"]
43
44
  env_files = ["test.env"]
45
+
46
+ [tool.mypy]
47
+ ignore_missing_imports = true
@@ -1,22 +0,0 @@
1
- import pytest
2
-
3
- class DatabricksImportError(Exception):
4
- pass
5
-
6
- class TestLibraryDependencySuite:
7
-
8
- @pytest.mark.skipif(pytest.importorskip("databricks_sql_connector"), reason="databricks_sql_connector is present")
9
- def test_sql_core(self):
10
- with pytest.raises(DatabricksImportError, match="databricks_sql_connector module is not available"):
11
- try:
12
- import databricks
13
- except ImportError:
14
- raise DatabricksImportError("databricks_sql_connector_core module is not available")
15
-
16
- @pytest.mark.skipif(pytest.importorskip("sqlalchemy"), reason="SQLAlchemy is present")
17
- def test_sqlalchemy(self):
18
- with pytest.raises(DatabricksImportError, match="sqlalchemy module is not available"):
19
- try:
20
- import sqlalchemy
21
- except ImportError:
22
- raise DatabricksImportError("sqlalchemy module is not available")
@@ -1,70 +0,0 @@
1
- """Additional tests authored by Databricks that use SQLAlchemy's test fixtures
2
- """
3
-
4
- import datetime
5
-
6
- from sqlalchemy.testing.suite.test_types import (
7
- _LiteralRoundTripFixture,
8
- fixtures,
9
- testing,
10
- eq_,
11
- select,
12
- Table,
13
- Column,
14
- config,
15
- _DateFixture,
16
- literal,
17
- )
18
- from databricks.sqlalchemy import TINYINT, TIMESTAMP
19
-
20
-
21
- class TinyIntegerTest(_LiteralRoundTripFixture, fixtures.TestBase):
22
- __backend__ = True
23
-
24
- def test_literal(self, literal_round_trip):
25
- literal_round_trip(TINYINT, [5], [5])
26
-
27
- @testing.fixture
28
- def integer_round_trip(self, metadata, connection):
29
- def run(datatype, data):
30
- int_table = Table(
31
- "tiny_integer_table",
32
- metadata,
33
- Column(
34
- "id",
35
- TINYINT,
36
- primary_key=True,
37
- test_needs_autoincrement=False,
38
- ),
39
- Column("tiny_integer_data", datatype),
40
- )
41
-
42
- metadata.create_all(config.db)
43
-
44
- connection.execute(int_table.insert(), {"id": 1, "integer_data": data})
45
-
46
- row = connection.execute(select(int_table.c.integer_data)).first()
47
-
48
- eq_(row, (data,))
49
-
50
- assert isinstance(row[0], int)
51
-
52
- return run
53
-
54
-
55
- class DateTimeTZTestCustom(_DateFixture, fixtures.TablesTest):
56
- """This test confirms that when a user uses the TIMESTAMP
57
- type to store a datetime object, it retains its timezone
58
- """
59
-
60
- __backend__ = True
61
- datatype = TIMESTAMP
62
- data = datetime.datetime(2012, 10, 15, 12, 57, 18, tzinfo=datetime.timezone.utc)
63
-
64
- @testing.requires.datetime_implicit_bound
65
- def test_select_direct(self, connection):
66
-
67
- # We need to pass the TIMESTAMP type to the literal function
68
- # so that the value is processed correctly.
69
- result = connection.scalar(select(literal(self.data, TIMESTAMP)))
70
- eq_(result, self.data)
@@ -1,331 +0,0 @@
1
- # type: ignore
2
-
3
- from enum import Enum
4
-
5
- import pytest
6
- from databricks.sqlalchemy.test._regression import (
7
- ExpandingBoundInTest,
8
- IdentityAutoincrementTest,
9
- LikeFunctionsTest,
10
- NormalizedNameTest,
11
- )
12
- from databricks.sqlalchemy.test._unsupported import (
13
- ComponentReflectionTest,
14
- ComponentReflectionTestExtra,
15
- CTETest,
16
- InsertBehaviorTest,
17
- )
18
- from sqlalchemy.testing.suite import (
19
- ArrayTest,
20
- BinaryTest,
21
- BizarroCharacterFKResolutionTest,
22
- CollateTest,
23
- ComputedColumnTest,
24
- ComputedReflectionTest,
25
- DifficultParametersTest,
26
- FutureWeCanSetDefaultSchemaWEventsTest,
27
- IdentityColumnTest,
28
- IdentityReflectionTest,
29
- JSONLegacyStringCastIndexTest,
30
- JSONTest,
31
- NativeUUIDTest,
32
- QuotedNameArgumentTest,
33
- RowCountTest,
34
- SimpleUpdateDeleteTest,
35
- WeCanSetDefaultSchemaWEventsTest,
36
- )
37
-
38
-
39
- class FutureFeature(Enum):
40
- ARRAY = "ARRAY column type handling"
41
- BINARY = "BINARY column type handling"
42
- CHECK = "CHECK constraint handling"
43
- COLLATE = "COLLATE DDL generation"
44
- CTE_FEAT = "required CTE features"
45
- EMPTY_INSERT = "empty INSERT support"
46
- FK_OPTS = "foreign key option checking"
47
- GENERATED_COLUMNS = "Delta computed / generated columns support"
48
- IDENTITY = "identity reflection"
49
- JSON = "JSON column type handling"
50
- MULTI_PK = "get_multi_pk_constraint method"
51
- PROVISION = "event-driven engine configuration"
52
- REGEXP = "_visit_regexp"
53
- SANE_ROWCOUNT = "sane_rowcount support"
54
- TBL_OPTS = "get_table_options method"
55
- TEST_DESIGN = "required test-fixture overrides"
56
- TUPLE_LITERAL = "tuple-like IN markers completely"
57
- UUID = "native Uuid() type"
58
- VIEW_DEF = "get_view_definition method"
59
-
60
-
61
- def render_future_feature(rsn: FutureFeature, extra=False) -> str:
62
- postfix = " More detail in _future.py" if extra else ""
63
- return f"[FUTURE][{rsn.name}]: This dialect doesn't implement {rsn.value}.{postfix}"
64
-
65
-
66
- @pytest.mark.reviewed
67
- @pytest.mark.skip(render_future_feature(FutureFeature.BINARY))
68
- class BinaryTest(BinaryTest):
69
- """Databricks doesn't support binding of BINARY type values. When DBR supports this, we can implement
70
- in this dialect.
71
- """
72
-
73
- pass
74
-
75
-
76
- class ExpandingBoundInTest(ExpandingBoundInTest):
77
- @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL))
78
- def test_empty_heterogeneous_tuples_bindparam(self):
79
- pass
80
-
81
- @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL))
82
- def test_empty_heterogeneous_tuples_direct(self):
83
- pass
84
-
85
- @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL))
86
- def test_empty_homogeneous_tuples_bindparam(self):
87
- pass
88
-
89
- @pytest.mark.skip(render_future_feature(FutureFeature.TUPLE_LITERAL))
90
- def test_empty_homogeneous_tuples_direct(self):
91
- pass
92
-
93
-
94
- class NormalizedNameTest(NormalizedNameTest):
95
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True))
96
- def test_get_table_names(self):
97
- """I'm not clear how this test can ever pass given that it's assertion looks like this:
98
-
99
- ```python
100
- eq_(tablenames[0].upper(), tablenames[0].lower())
101
- eq_(tablenames[1].upper(), tablenames[1].lower())
102
- ```
103
-
104
- It's forcibly calling .upper() and .lower() on the same string and expecting them to be equal.
105
- """
106
- pass
107
-
108
-
109
- class CTETest(CTETest):
110
- @pytest.mark.skip(render_future_feature(FutureFeature.CTE_FEAT, True))
111
- def test_delete_from_round_trip(self):
112
- """Databricks dialect doesn't implement multiple-table criteria within DELETE"""
113
- pass
114
-
115
-
116
- @pytest.mark.reviewed
117
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True))
118
- class IdentityColumnTest(IdentityColumnTest):
119
- """Identity works. Test needs rewrite for Databricks. See comments in test_suite.py
120
-
121
- The setup for these tests tries to create a table with a DELTA IDENTITY column but has two problems:
122
- 1. It uses an Integer() type for the column. Whereas DELTA IDENTITY columns must be BIGINT.
123
- 2. It tries to set the start == 42, which Databricks doesn't support
124
-
125
- I can get the tests to _run_ by patching the table fixture to use BigInteger(). But it asserts that the
126
- identity of two rows are 42 and 43, which is not possible since they will be rows 1 and 2 instead.
127
-
128
- I'm satisified through manual testing that our implementation of visit_identity_column works but a better test is needed.
129
- """
130
-
131
- pass
132
-
133
-
134
- class IdentityAutoincrementTest(IdentityAutoincrementTest):
135
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True))
136
- def test_autoincrement_with_identity(self):
137
- """This test has the same issue as IdentityColumnTest.test_select_all in that it creates a table with identity
138
- using an Integer() rather than a BigInteger(). If I override this behaviour to use a BigInteger() instead, the
139
- test passes.
140
- """
141
-
142
-
143
- @pytest.mark.reviewed
144
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN))
145
- class BizarroCharacterFKResolutionTest(BizarroCharacterFKResolutionTest):
146
- """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures,
147
- we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of
148
- these tests is not an acceptance criteria for our dialect.
149
- """
150
-
151
-
152
- @pytest.mark.reviewed
153
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN))
154
- class DifficultParametersTest(DifficultParametersTest):
155
- """Some of the combinations in this test pass. Others fail. Given the esoteric nature of these failures,
156
- we have opted to defer implementing fixes to a later time, guided by customer feedback. Passage of
157
- these tests is not an acceptance criteria for our dialect.
158
- """
159
-
160
-
161
- @pytest.mark.reviewed
162
- @pytest.mark.skip(render_future_feature(FutureFeature.IDENTITY, True))
163
- class IdentityReflectionTest(IdentityReflectionTest):
164
- """It's not clear _how_ to implement this for SQLAlchemy. Columns created with GENERATED ALWAYS AS IDENTITY
165
- are not specially demarked in the output of TGetColumnsResponse or DESCRIBE TABLE EXTENDED.
166
-
167
- We could theoretically parse this from the contents of `SHOW CREATE TABLE` but that feels like a hack.
168
- """
169
-
170
-
171
- @pytest.mark.reviewed
172
- @pytest.mark.skip(render_future_feature(FutureFeature.JSON))
173
- class JSONTest(JSONTest):
174
- """Databricks supports JSON path expressions in queries it's just not implemented in this dialect."""
175
-
176
- pass
177
-
178
-
179
- @pytest.mark.reviewed
180
- @pytest.mark.skip(render_future_feature(FutureFeature.JSON))
181
- class JSONLegacyStringCastIndexTest(JSONLegacyStringCastIndexTest):
182
- """Same comment applies as JSONTest"""
183
-
184
- pass
185
-
186
-
187
- class LikeFunctionsTest(LikeFunctionsTest):
188
- @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP))
189
- def test_not_regexp_match(self):
190
- """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically."""
191
- pass
192
-
193
- @pytest.mark.skip(render_future_feature(FutureFeature.REGEXP))
194
- def test_regexp_match(self):
195
- """The defaul dialect doesn't implement _visit_regexp methods so we don't get them automatically."""
196
- pass
197
-
198
-
199
- @pytest.mark.reviewed
200
- @pytest.mark.skip(render_future_feature(FutureFeature.COLLATE))
201
- class CollateTest(CollateTest):
202
- """This is supported in Databricks. Not implemented here."""
203
-
204
-
205
- @pytest.mark.reviewed
206
- @pytest.mark.skip(render_future_feature(FutureFeature.UUID, True))
207
- class NativeUUIDTest(NativeUUIDTest):
208
- """Type implementation will be straightforward. Since Databricks doesn't have a native UUID type we can use
209
- a STRING field, create a custom TypeDecorator for sqlalchemy.types.Uuid and add it to the dialect's colspecs.
210
-
211
- Then mark requirements.uuid_data_type as open() so this test can run.
212
- """
213
-
214
-
215
- @pytest.mark.reviewed
216
- @pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT))
217
- class RowCountTest(RowCountTest):
218
- pass
219
-
220
-
221
- @pytest.mark.reviewed
222
- @pytest.mark.skip(render_future_feature(FutureFeature.SANE_ROWCOUNT))
223
- class SimpleUpdateDeleteTest(SimpleUpdateDeleteTest):
224
- pass
225
-
226
-
227
- @pytest.mark.reviewed
228
- @pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True))
229
- class WeCanSetDefaultSchemaWEventsTest(WeCanSetDefaultSchemaWEventsTest):
230
- """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema
231
- or, in this case, changing the default schema for the connection after it's been built. This would override
232
- the schema defined in the sqlalchemy connection string. This support is possible but is not implemented
233
- in the dialect. Deferred for now.
234
- """
235
-
236
- pass
237
-
238
-
239
- @pytest.mark.reviewed
240
- @pytest.mark.skip(render_future_feature(FutureFeature.PROVISION, True))
241
- class FutureWeCanSetDefaultSchemaWEventsTest(FutureWeCanSetDefaultSchemaWEventsTest):
242
- """provision.py allows us to define event listeners that emit DDL for things like setting up a test schema
243
- or, in this case, changing the default schema for the connection after it's been built. This would override
244
- the schema defined in the sqlalchemy connection string. This support is possible but is not implemented
245
- in the dialect. Deferred for now.
246
- """
247
-
248
- pass
249
-
250
-
251
- class ComponentReflectionTest(ComponentReflectionTest):
252
- @pytest.mark.skip(reason=render_future_feature(FutureFeature.TBL_OPTS, True))
253
- def test_multi_get_table_options_tables(self):
254
- """It's not clear what the expected ouput from this method would even _be_. Requires research."""
255
- pass
256
-
257
- @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF))
258
- def test_get_view_definition(self):
259
- pass
260
-
261
- @pytest.mark.skip(render_future_feature(FutureFeature.VIEW_DEF))
262
- def test_get_view_definition_does_not_exist(self):
263
- pass
264
-
265
- @pytest.mark.skip(render_future_feature(FutureFeature.MULTI_PK))
266
- def test_get_multi_pk_constraint(self):
267
- pass
268
-
269
- @pytest.mark.skip(render_future_feature(FutureFeature.CHECK))
270
- def test_get_multi_check_constraints(self):
271
- pass
272
-
273
-
274
- class ComponentReflectionTestExtra(ComponentReflectionTestExtra):
275
- @pytest.mark.skip(render_future_feature(FutureFeature.CHECK))
276
- def test_get_check_constraints(self):
277
- pass
278
-
279
- @pytest.mark.skip(render_future_feature(FutureFeature.FK_OPTS))
280
- def test_get_foreign_key_options(self):
281
- """It's not clear from the test code what the expected output is here. Further research required."""
282
- pass
283
-
284
-
285
- class InsertBehaviorTest(InsertBehaviorTest):
286
- @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True))
287
- def test_empty_insert(self):
288
- """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need
289
- to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation
290
- the request fails with a syntax error
291
- """
292
- pass
293
-
294
- @pytest.mark.skip(render_future_feature(FutureFeature.EMPTY_INSERT, True))
295
- def test_empty_insert_multiple(self):
296
- """Empty inserts are possible using DEFAULT VALUES on Databricks. To implement it, we need
297
- to hook into the SQLCompiler to render a no-op column list. With SQLAlchemy's default implementation
298
- the request fails with a syntax error
299
- """
300
- pass
301
-
302
-
303
- @pytest.mark.reviewed
304
- @pytest.mark.skip(render_future_feature(FutureFeature.ARRAY))
305
- class ArrayTest(ArrayTest):
306
- """While Databricks supports ARRAY types, DBR cannot handle bound parameters of this type.
307
- This makes them unusable to SQLAlchemy without some workaround. Potentially we could inline
308
- the values of these parameters (which risks sql injection).
309
- """
310
-
311
-
312
- @pytest.mark.reviewed
313
- @pytest.mark.skip(render_future_feature(FutureFeature.TEST_DESIGN, True))
314
- class QuotedNameArgumentTest(QuotedNameArgumentTest):
315
- """These tests are challenging. The whole test setup depends on a table with a name like `quote ' one`
316
- which will never work on Databricks because table names can't contains spaces. But QuotedNamedArgumentTest
317
- also checks the behaviour of DDL identifier preparation process. We need to override some of IdentifierPreparer
318
- methods because these are the ultimate control for whether or not CHECK and UNIQUE constraints are emitted.
319
- """
320
-
321
-
322
- @pytest.mark.reviewed
323
- @pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS))
324
- class ComputedColumnTest(ComputedColumnTest):
325
- pass
326
-
327
-
328
- @pytest.mark.reviewed
329
- @pytest.mark.skip(reason=render_future_feature(FutureFeature.GENERATED_COLUMNS))
330
- class ComputedReflectionTest(ComputedReflectionTest):
331
- pass