iceaxe 0.7.1__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of iceaxe might be problematic. Click here for more details.
- iceaxe/__init__.py +20 -0
- iceaxe/__tests__/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/test_bulk_insert.py +45 -0
- iceaxe/__tests__/benchmarks/test_select.py +114 -0
- iceaxe/__tests__/conf_models.py +133 -0
- iceaxe/__tests__/conftest.py +204 -0
- iceaxe/__tests__/docker_helpers.py +208 -0
- iceaxe/__tests__/helpers.py +268 -0
- iceaxe/__tests__/migrations/__init__.py +0 -0
- iceaxe/__tests__/migrations/conftest.py +36 -0
- iceaxe/__tests__/migrations/test_action_sorter.py +237 -0
- iceaxe/__tests__/migrations/test_generator.py +140 -0
- iceaxe/__tests__/migrations/test_generics.py +91 -0
- iceaxe/__tests__/mountaineer/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/test_core.py +76 -0
- iceaxe/__tests__/schemas/__init__.py +0 -0
- iceaxe/__tests__/schemas/test_actions.py +1264 -0
- iceaxe/__tests__/schemas/test_cli.py +25 -0
- iceaxe/__tests__/schemas/test_db_memory_serializer.py +1525 -0
- iceaxe/__tests__/schemas/test_db_serializer.py +398 -0
- iceaxe/__tests__/schemas/test_db_stubs.py +190 -0
- iceaxe/__tests__/test_alias.py +83 -0
- iceaxe/__tests__/test_base.py +52 -0
- iceaxe/__tests__/test_comparison.py +383 -0
- iceaxe/__tests__/test_field.py +11 -0
- iceaxe/__tests__/test_helpers.py +9 -0
- iceaxe/__tests__/test_modifications.py +151 -0
- iceaxe/__tests__/test_queries.py +605 -0
- iceaxe/__tests__/test_queries_str.py +173 -0
- iceaxe/__tests__/test_session.py +1511 -0
- iceaxe/__tests__/test_text_search.py +287 -0
- iceaxe/alias_values.py +67 -0
- iceaxe/base.py +350 -0
- iceaxe/comparison.py +560 -0
- iceaxe/field.py +250 -0
- iceaxe/functions.py +906 -0
- iceaxe/generics.py +140 -0
- iceaxe/io.py +107 -0
- iceaxe/logging.py +91 -0
- iceaxe/migrations/__init__.py +5 -0
- iceaxe/migrations/action_sorter.py +98 -0
- iceaxe/migrations/cli.py +228 -0
- iceaxe/migrations/client_io.py +62 -0
- iceaxe/migrations/generator.py +404 -0
- iceaxe/migrations/migration.py +86 -0
- iceaxe/migrations/migrator.py +101 -0
- iceaxe/modifications.py +176 -0
- iceaxe/mountaineer/__init__.py +10 -0
- iceaxe/mountaineer/cli.py +74 -0
- iceaxe/mountaineer/config.py +46 -0
- iceaxe/mountaineer/dependencies/__init__.py +6 -0
- iceaxe/mountaineer/dependencies/core.py +67 -0
- iceaxe/postgres.py +133 -0
- iceaxe/py.typed +0 -0
- iceaxe/queries.py +1455 -0
- iceaxe/queries_str.py +294 -0
- iceaxe/schemas/__init__.py +0 -0
- iceaxe/schemas/actions.py +864 -0
- iceaxe/schemas/cli.py +30 -0
- iceaxe/schemas/db_memory_serializer.py +705 -0
- iceaxe/schemas/db_serializer.py +346 -0
- iceaxe/schemas/db_stubs.py +525 -0
- iceaxe/session.py +860 -0
- iceaxe/session_optimized.c +12035 -0
- iceaxe/session_optimized.cpython-313-darwin.so +0 -0
- iceaxe/session_optimized.pyx +212 -0
- iceaxe/sql_types.py +148 -0
- iceaxe/typing.py +73 -0
- iceaxe-0.7.1.dist-info/METADATA +261 -0
- iceaxe-0.7.1.dist-info/RECORD +75 -0
- iceaxe-0.7.1.dist-info/WHEEL +6 -0
- iceaxe-0.7.1.dist-info/licenses/LICENSE +21 -0
- iceaxe-0.7.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
from enum import Enum, IntEnum, StrEnum
|
|
2
|
+
|
|
3
|
+
import pytest
|
|
4
|
+
|
|
5
|
+
from iceaxe.schemas.actions import (
|
|
6
|
+
ColumnType,
|
|
7
|
+
ConstraintType,
|
|
8
|
+
ForeignKeyConstraint,
|
|
9
|
+
)
|
|
10
|
+
from iceaxe.schemas.db_serializer import DatabaseSerializer
|
|
11
|
+
from iceaxe.schemas.db_stubs import (
|
|
12
|
+
DBColumn,
|
|
13
|
+
DBColumnPointer,
|
|
14
|
+
DBConstraint,
|
|
15
|
+
DBObject,
|
|
16
|
+
DBObjectPointer,
|
|
17
|
+
DBTable,
|
|
18
|
+
DBType,
|
|
19
|
+
DBTypePointer,
|
|
20
|
+
)
|
|
21
|
+
from iceaxe.session import DBConnection
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def compare_db_objects(
|
|
25
|
+
calculated: list[tuple[DBObject, list[DBObject | DBObjectPointer]]],
|
|
26
|
+
expected: list[tuple[DBObject, list[DBObject | DBObjectPointer]]],
|
|
27
|
+
):
|
|
28
|
+
"""
|
|
29
|
+
Helper function to compare lists of DBObjects. The order doesn't actually matter
|
|
30
|
+
for downstream uses, but we can't do a simple equality check with a set because the
|
|
31
|
+
dependencies list is un-hashable.
|
|
32
|
+
|
|
33
|
+
"""
|
|
34
|
+
assert sorted(calculated, key=lambda x: x[0].representation()) == sorted(
|
|
35
|
+
expected, key=lambda x: x[0].representation()
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class ValueEnumStandard(Enum):
|
|
40
|
+
A = "A"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class ValueEnumStr(StrEnum):
|
|
44
|
+
A = "A"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ValueEnumInt(IntEnum):
|
|
48
|
+
A = 1
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@pytest.mark.asyncio
|
|
52
|
+
@pytest.mark.parametrize(
|
|
53
|
+
"sql_text, expected_db_objects",
|
|
54
|
+
[
|
|
55
|
+
# Enum
|
|
56
|
+
(
|
|
57
|
+
"""
|
|
58
|
+
CREATE TYPE valueenumstandard AS ENUM ('A');
|
|
59
|
+
CREATE TABLE exampledbmodel (
|
|
60
|
+
id SERIAL PRIMARY KEY,
|
|
61
|
+
standard_enum valueenumstandard NOT NULL
|
|
62
|
+
);
|
|
63
|
+
""",
|
|
64
|
+
[
|
|
65
|
+
(
|
|
66
|
+
DBType(
|
|
67
|
+
name="valueenumstandard",
|
|
68
|
+
values=frozenset({"A"}),
|
|
69
|
+
reference_columns=frozenset(
|
|
70
|
+
{("exampledbmodel", "standard_enum")}
|
|
71
|
+
),
|
|
72
|
+
),
|
|
73
|
+
[
|
|
74
|
+
DBTable(table_name="exampledbmodel"),
|
|
75
|
+
],
|
|
76
|
+
),
|
|
77
|
+
(
|
|
78
|
+
DBColumn(
|
|
79
|
+
table_name="exampledbmodel",
|
|
80
|
+
column_name="standard_enum",
|
|
81
|
+
column_type=DBTypePointer(
|
|
82
|
+
name="valueenumstandard",
|
|
83
|
+
),
|
|
84
|
+
column_is_list=False,
|
|
85
|
+
nullable=False,
|
|
86
|
+
),
|
|
87
|
+
[
|
|
88
|
+
DBType(
|
|
89
|
+
name="valueenumstandard",
|
|
90
|
+
values=frozenset({"A"}),
|
|
91
|
+
reference_columns=frozenset(
|
|
92
|
+
{("exampledbmodel", "standard_enum")}
|
|
93
|
+
),
|
|
94
|
+
),
|
|
95
|
+
DBTable(table_name="exampledbmodel"),
|
|
96
|
+
],
|
|
97
|
+
),
|
|
98
|
+
],
|
|
99
|
+
),
|
|
100
|
+
# Nullable type
|
|
101
|
+
(
|
|
102
|
+
"""
|
|
103
|
+
CREATE TABLE exampledbmodel (
|
|
104
|
+
id SERIAL PRIMARY KEY,
|
|
105
|
+
was_nullable VARCHAR
|
|
106
|
+
);
|
|
107
|
+
""",
|
|
108
|
+
[
|
|
109
|
+
(
|
|
110
|
+
DBColumn(
|
|
111
|
+
table_name="exampledbmodel",
|
|
112
|
+
column_name="was_nullable",
|
|
113
|
+
column_type=ColumnType.VARCHAR,
|
|
114
|
+
column_is_list=False,
|
|
115
|
+
nullable=True,
|
|
116
|
+
),
|
|
117
|
+
[
|
|
118
|
+
DBTable(table_name="exampledbmodel"),
|
|
119
|
+
],
|
|
120
|
+
),
|
|
121
|
+
],
|
|
122
|
+
),
|
|
123
|
+
# List types
|
|
124
|
+
(
|
|
125
|
+
"""
|
|
126
|
+
CREATE TABLE exampledbmodel (
|
|
127
|
+
id SERIAL PRIMARY KEY,
|
|
128
|
+
array_list VARCHAR[] NOT NULL
|
|
129
|
+
);
|
|
130
|
+
""",
|
|
131
|
+
[
|
|
132
|
+
(
|
|
133
|
+
DBColumn(
|
|
134
|
+
table_name="exampledbmodel",
|
|
135
|
+
column_name="array_list",
|
|
136
|
+
column_type=ColumnType.VARCHAR,
|
|
137
|
+
column_is_list=True,
|
|
138
|
+
nullable=False,
|
|
139
|
+
),
|
|
140
|
+
[
|
|
141
|
+
DBTable(table_name="exampledbmodel"),
|
|
142
|
+
],
|
|
143
|
+
)
|
|
144
|
+
],
|
|
145
|
+
),
|
|
146
|
+
# Test PostgreSQL's storage format for timestamp without timezone
|
|
147
|
+
(
|
|
148
|
+
"""
|
|
149
|
+
CREATE TABLE exampledbmodel (
|
|
150
|
+
id SERIAL PRIMARY KEY,
|
|
151
|
+
created_at TIMESTAMP NOT NULL
|
|
152
|
+
);
|
|
153
|
+
""",
|
|
154
|
+
[
|
|
155
|
+
(
|
|
156
|
+
DBColumn(
|
|
157
|
+
table_name="exampledbmodel",
|
|
158
|
+
column_name="created_at",
|
|
159
|
+
column_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
|
|
160
|
+
column_is_list=False,
|
|
161
|
+
nullable=False,
|
|
162
|
+
),
|
|
163
|
+
[
|
|
164
|
+
DBTable(table_name="exampledbmodel"),
|
|
165
|
+
],
|
|
166
|
+
)
|
|
167
|
+
],
|
|
168
|
+
),
|
|
169
|
+
# Test PostgreSQL's storage format for timestamp with timezone
|
|
170
|
+
(
|
|
171
|
+
"""
|
|
172
|
+
CREATE TABLE exampledbmodel (
|
|
173
|
+
id SERIAL PRIMARY KEY,
|
|
174
|
+
created_at TIMESTAMPTZ NOT NULL
|
|
175
|
+
);
|
|
176
|
+
""",
|
|
177
|
+
[
|
|
178
|
+
(
|
|
179
|
+
DBColumn(
|
|
180
|
+
table_name="exampledbmodel",
|
|
181
|
+
column_name="created_at",
|
|
182
|
+
column_type=ColumnType.TIMESTAMP_WITH_TIME_ZONE,
|
|
183
|
+
column_is_list=False,
|
|
184
|
+
nullable=False,
|
|
185
|
+
),
|
|
186
|
+
[
|
|
187
|
+
DBTable(table_name="exampledbmodel"),
|
|
188
|
+
],
|
|
189
|
+
)
|
|
190
|
+
],
|
|
191
|
+
),
|
|
192
|
+
],
|
|
193
|
+
)
|
|
194
|
+
async def test_simple_db_serializer(
|
|
195
|
+
sql_text: str,
|
|
196
|
+
expected_db_objects: list[tuple[DBObject, list[DBObject | DBObjectPointer]]],
|
|
197
|
+
db_connection: DBConnection,
|
|
198
|
+
clear_all_database_objects,
|
|
199
|
+
):
|
|
200
|
+
# Create this new database
|
|
201
|
+
await db_connection.conn.execute(sql_text)
|
|
202
|
+
|
|
203
|
+
db_serializer = DatabaseSerializer()
|
|
204
|
+
db_objects = []
|
|
205
|
+
async for values in db_serializer.get_objects(db_connection):
|
|
206
|
+
db_objects.append(values)
|
|
207
|
+
|
|
208
|
+
# Table and primary key are created for each model
|
|
209
|
+
base_db_objects: list[tuple[DBObject, list[DBObject | DBObjectPointer]]] = [
|
|
210
|
+
(
|
|
211
|
+
DBTable(table_name="exampledbmodel"),
|
|
212
|
+
[],
|
|
213
|
+
),
|
|
214
|
+
(
|
|
215
|
+
DBColumn(
|
|
216
|
+
table_name="exampledbmodel",
|
|
217
|
+
column_name="id",
|
|
218
|
+
column_type=ColumnType.INTEGER,
|
|
219
|
+
column_is_list=False,
|
|
220
|
+
nullable=False,
|
|
221
|
+
),
|
|
222
|
+
[
|
|
223
|
+
DBTable(table_name="exampledbmodel"),
|
|
224
|
+
],
|
|
225
|
+
),
|
|
226
|
+
(
|
|
227
|
+
DBConstraint(
|
|
228
|
+
table_name="exampledbmodel",
|
|
229
|
+
constraint_name="exampledbmodel_pkey",
|
|
230
|
+
columns=frozenset({"id"}),
|
|
231
|
+
constraint_type=ConstraintType.PRIMARY_KEY,
|
|
232
|
+
foreign_key_constraint=None,
|
|
233
|
+
),
|
|
234
|
+
[
|
|
235
|
+
DBColumnPointer(table_name="exampledbmodel", column_name="id"),
|
|
236
|
+
DBTable(table_name="exampledbmodel"),
|
|
237
|
+
],
|
|
238
|
+
),
|
|
239
|
+
]
|
|
240
|
+
|
|
241
|
+
compare_db_objects(db_objects, base_db_objects + expected_db_objects)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
@pytest.mark.asyncio
|
|
245
|
+
async def test_db_serializer_foreign_key(
|
|
246
|
+
db_connection: DBConnection,
|
|
247
|
+
clear_all_database_objects,
|
|
248
|
+
):
|
|
249
|
+
await db_connection.conn.execute(
|
|
250
|
+
"""
|
|
251
|
+
CREATE TABLE foreignmodel (
|
|
252
|
+
id SERIAL PRIMARY KEY
|
|
253
|
+
);
|
|
254
|
+
CREATE TABLE exampledbmodel (
|
|
255
|
+
id SERIAL PRIMARY KEY,
|
|
256
|
+
foreign_key_id INTEGER REFERENCES foreignmodel(id) NOT NULL
|
|
257
|
+
);
|
|
258
|
+
"""
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
db_serializer = DatabaseSerializer()
|
|
262
|
+
db_objects = []
|
|
263
|
+
async for values in db_serializer.get_objects(db_connection):
|
|
264
|
+
db_objects.append(values)
|
|
265
|
+
|
|
266
|
+
expected_db_objects: list[tuple[DBObject, list[DBObject | DBObjectPointer]]] = [
|
|
267
|
+
# Basic ExampleDBModel table
|
|
268
|
+
(
|
|
269
|
+
DBTable(table_name="exampledbmodel"),
|
|
270
|
+
[],
|
|
271
|
+
),
|
|
272
|
+
(
|
|
273
|
+
DBColumn(
|
|
274
|
+
table_name="exampledbmodel",
|
|
275
|
+
column_name="foreign_key_id",
|
|
276
|
+
column_type=ColumnType.INTEGER,
|
|
277
|
+
column_is_list=False,
|
|
278
|
+
nullable=False,
|
|
279
|
+
),
|
|
280
|
+
[
|
|
281
|
+
DBTable(table_name="exampledbmodel"),
|
|
282
|
+
],
|
|
283
|
+
),
|
|
284
|
+
(
|
|
285
|
+
DBColumn(
|
|
286
|
+
table_name="exampledbmodel",
|
|
287
|
+
column_name="id",
|
|
288
|
+
column_type=ColumnType.INTEGER,
|
|
289
|
+
column_is_list=False,
|
|
290
|
+
nullable=False,
|
|
291
|
+
),
|
|
292
|
+
[
|
|
293
|
+
DBTable(table_name="exampledbmodel"),
|
|
294
|
+
],
|
|
295
|
+
),
|
|
296
|
+
# ForeignModel table
|
|
297
|
+
(
|
|
298
|
+
DBTable(table_name="foreignmodel"),
|
|
299
|
+
[],
|
|
300
|
+
),
|
|
301
|
+
(
|
|
302
|
+
DBConstraint(
|
|
303
|
+
table_name="foreignmodel",
|
|
304
|
+
constraint_name="foreignmodel_pkey",
|
|
305
|
+
columns=frozenset({"id"}),
|
|
306
|
+
constraint_type=ConstraintType.PRIMARY_KEY,
|
|
307
|
+
foreign_key_constraint=None,
|
|
308
|
+
),
|
|
309
|
+
[
|
|
310
|
+
DBColumnPointer(table_name="foreignmodel", column_name="id"),
|
|
311
|
+
DBTable(table_name="foreignmodel"),
|
|
312
|
+
],
|
|
313
|
+
),
|
|
314
|
+
(
|
|
315
|
+
DBColumn(
|
|
316
|
+
table_name="foreignmodel",
|
|
317
|
+
column_name="id",
|
|
318
|
+
column_type=ColumnType.INTEGER,
|
|
319
|
+
column_is_list=False,
|
|
320
|
+
nullable=False,
|
|
321
|
+
),
|
|
322
|
+
[
|
|
323
|
+
DBTable(table_name="foreignmodel"),
|
|
324
|
+
],
|
|
325
|
+
),
|
|
326
|
+
# Foreign key constraint to link ExampleDBModel to ForeignModel
|
|
327
|
+
(
|
|
328
|
+
DBConstraint(
|
|
329
|
+
table_name="exampledbmodel",
|
|
330
|
+
constraint_name="exampledbmodel_foreign_key_id_fkey",
|
|
331
|
+
columns=frozenset({"foreign_key_id"}),
|
|
332
|
+
constraint_type=ConstraintType.FOREIGN_KEY,
|
|
333
|
+
foreign_key_constraint=ForeignKeyConstraint(
|
|
334
|
+
target_table="foreignmodel", target_columns=frozenset({"id"})
|
|
335
|
+
),
|
|
336
|
+
),
|
|
337
|
+
[
|
|
338
|
+
DBColumnPointer(
|
|
339
|
+
table_name="exampledbmodel", column_name="foreign_key_id"
|
|
340
|
+
),
|
|
341
|
+
DBTable(table_name="exampledbmodel"),
|
|
342
|
+
],
|
|
343
|
+
),
|
|
344
|
+
(
|
|
345
|
+
DBConstraint(
|
|
346
|
+
table_name="exampledbmodel",
|
|
347
|
+
constraint_name="exampledbmodel_pkey",
|
|
348
|
+
columns=frozenset({"id"}),
|
|
349
|
+
constraint_type=ConstraintType.PRIMARY_KEY,
|
|
350
|
+
foreign_key_constraint=None,
|
|
351
|
+
),
|
|
352
|
+
[
|
|
353
|
+
DBColumnPointer(table_name="exampledbmodel", column_name="id"),
|
|
354
|
+
DBTable(table_name="exampledbmodel"),
|
|
355
|
+
],
|
|
356
|
+
),
|
|
357
|
+
]
|
|
358
|
+
|
|
359
|
+
compare_db_objects(db_objects, expected_db_objects)
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
@pytest.mark.asyncio
|
|
363
|
+
async def test_db_serializer_foreign_key_actions(
|
|
364
|
+
db_connection: DBConnection,
|
|
365
|
+
clear_all_database_objects,
|
|
366
|
+
):
|
|
367
|
+
"""
|
|
368
|
+
Test that foreign key ON UPDATE/ON DELETE actions are correctly deserialized from the database.
|
|
369
|
+
"""
|
|
370
|
+
await db_connection.conn.execute(
|
|
371
|
+
"""
|
|
372
|
+
CREATE TABLE foreignmodel (
|
|
373
|
+
id SERIAL PRIMARY KEY
|
|
374
|
+
);
|
|
375
|
+
CREATE TABLE exampledbmodel (
|
|
376
|
+
id SERIAL PRIMARY KEY,
|
|
377
|
+
foreign_key_id INTEGER REFERENCES foreignmodel(id) ON DELETE CASCADE ON UPDATE CASCADE NOT NULL
|
|
378
|
+
);
|
|
379
|
+
"""
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
db_serializer = DatabaseSerializer()
|
|
383
|
+
db_objects = []
|
|
384
|
+
async for values in db_serializer.get_objects(db_connection):
|
|
385
|
+
db_objects.append(values)
|
|
386
|
+
|
|
387
|
+
# Find the foreign key constraint
|
|
388
|
+
fk_constraint = next(
|
|
389
|
+
obj
|
|
390
|
+
for obj, _ in db_objects
|
|
391
|
+
if isinstance(obj, DBConstraint)
|
|
392
|
+
and obj.constraint_type == ConstraintType.FOREIGN_KEY
|
|
393
|
+
)
|
|
394
|
+
assert fk_constraint.foreign_key_constraint is not None
|
|
395
|
+
assert fk_constraint.foreign_key_constraint.target_table == "foreignmodel"
|
|
396
|
+
assert fk_constraint.foreign_key_constraint.target_columns == frozenset({"id"})
|
|
397
|
+
assert fk_constraint.foreign_key_constraint.on_delete == "CASCADE"
|
|
398
|
+
assert fk_constraint.foreign_key_constraint.on_update == "CASCADE"
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from iceaxe.schemas.db_stubs import ConstraintPointerInfo, DBObjectPointer, DBType
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class MockDBObjectPointer(DBObjectPointer):
|
|
7
|
+
"""Mock implementation of DBObjectPointer for testing parser methods."""
|
|
8
|
+
|
|
9
|
+
representation_str: str
|
|
10
|
+
|
|
11
|
+
def representation(self) -> str:
|
|
12
|
+
return self.representation_str
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@pytest.mark.parametrize(
|
|
16
|
+
"representation_str,expected_result",
|
|
17
|
+
[
|
|
18
|
+
# Valid constraint pointer formats
|
|
19
|
+
(
|
|
20
|
+
"users.['id'].PRIMARY KEY",
|
|
21
|
+
ConstraintPointerInfo("users", ["id"], "PRIMARY KEY"),
|
|
22
|
+
),
|
|
23
|
+
(
|
|
24
|
+
"orders.['user_id', 'product_id'].UNIQUE",
|
|
25
|
+
ConstraintPointerInfo("orders", ["user_id", "product_id"], "UNIQUE"),
|
|
26
|
+
),
|
|
27
|
+
(
|
|
28
|
+
"products.['name'].INDEX",
|
|
29
|
+
ConstraintPointerInfo("products", ["name"], "INDEX"),
|
|
30
|
+
),
|
|
31
|
+
(
|
|
32
|
+
"table_name.['col1', 'col2', 'col3'].FOREIGN KEY",
|
|
33
|
+
ConstraintPointerInfo(
|
|
34
|
+
"table_name", ["col1", "col2", "col3"], "FOREIGN KEY"
|
|
35
|
+
),
|
|
36
|
+
),
|
|
37
|
+
# Single quotes
|
|
38
|
+
("users.['email'].UNIQUE", ConstraintPointerInfo("users", ["email"], "UNIQUE")),
|
|
39
|
+
# Double quotes
|
|
40
|
+
('users.["email"].UNIQUE', ConstraintPointerInfo("users", ["email"], "UNIQUE")),
|
|
41
|
+
# Mixed quotes
|
|
42
|
+
(
|
|
43
|
+
"users.[\"col1\", 'col2'].UNIQUE",
|
|
44
|
+
ConstraintPointerInfo("users", ["col1", "col2"], "UNIQUE"),
|
|
45
|
+
),
|
|
46
|
+
# Extra whitespace
|
|
47
|
+
(
|
|
48
|
+
"users.[ 'col1' , 'col2' ].UNIQUE",
|
|
49
|
+
ConstraintPointerInfo("users", ["col1", "col2"], "UNIQUE"),
|
|
50
|
+
),
|
|
51
|
+
# Empty column list
|
|
52
|
+
("users.[].CHECK", ConstraintPointerInfo("users", [], "CHECK")),
|
|
53
|
+
# Schema-qualified table names (dots in table names are valid when representing schema.table)
|
|
54
|
+
(
|
|
55
|
+
"public.users.['column'].PRIMARY KEY",
|
|
56
|
+
ConstraintPointerInfo("public.users", ["column"], "PRIMARY KEY"),
|
|
57
|
+
),
|
|
58
|
+
# Complex constraint types
|
|
59
|
+
(
|
|
60
|
+
"users.['id'].PRIMARY KEY AUTOINCREMENT",
|
|
61
|
+
ConstraintPointerInfo("users", ["id"], "PRIMARY KEY AUTOINCREMENT"),
|
|
62
|
+
),
|
|
63
|
+
# Table names with underscores and numbers (valid PostgreSQL identifiers)
|
|
64
|
+
(
|
|
65
|
+
"user_table_2.['id'].PRIMARY KEY",
|
|
66
|
+
ConstraintPointerInfo("user_table_2", ["id"], "PRIMARY KEY"),
|
|
67
|
+
),
|
|
68
|
+
# Column names with underscores and numbers
|
|
69
|
+
(
|
|
70
|
+
"users.['user_id_2', 'created_at'].UNIQUE",
|
|
71
|
+
ConstraintPointerInfo("users", ["user_id_2", "created_at"], "UNIQUE"),
|
|
72
|
+
),
|
|
73
|
+
# Invalid formats that should return None
|
|
74
|
+
("users.column.UNIQUE", None), # Missing brackets
|
|
75
|
+
("users.['column']", None), # Missing constraint type
|
|
76
|
+
("['column'].UNIQUE", None), # Missing table name
|
|
77
|
+
("users", None), # Just table name
|
|
78
|
+
("", None), # Empty string
|
|
79
|
+
("users.column", None), # Simple table.column format
|
|
80
|
+
("invalid_format", None), # Random string
|
|
81
|
+
# Malformed bracket syntax
|
|
82
|
+
("users.[column].UNIQUE", None), # Missing quotes in brackets
|
|
83
|
+
("users.['column.UNIQUE", None), # Unclosed bracket
|
|
84
|
+
("users.column'].UNIQUE", None), # Missing opening bracket
|
|
85
|
+
],
|
|
86
|
+
)
|
|
87
|
+
def test_parse_constraint_pointer(
|
|
88
|
+
representation_str: str, expected_result: ConstraintPointerInfo | None
|
|
89
|
+
):
|
|
90
|
+
"""Test parsing of constraint pointer representations."""
|
|
91
|
+
pointer = MockDBObjectPointer(representation_str=representation_str)
|
|
92
|
+
result = pointer.parse_constraint_pointer()
|
|
93
|
+
|
|
94
|
+
if expected_result is None:
|
|
95
|
+
assert result is None
|
|
96
|
+
else:
|
|
97
|
+
assert result is not None
|
|
98
|
+
assert result.table_name == expected_result.table_name
|
|
99
|
+
assert result.column_names == expected_result.column_names
|
|
100
|
+
assert result.constraint_type == expected_result.constraint_type
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@pytest.mark.parametrize(
|
|
104
|
+
"representation_str,expected_table_name",
|
|
105
|
+
[
|
|
106
|
+
# Constraint pointer formats
|
|
107
|
+
("users.['id'].PRIMARY KEY", "users"),
|
|
108
|
+
("orders.['user_id', 'product_id'].UNIQUE", "orders"),
|
|
109
|
+
("public.users.['column'].INDEX", "public.users"),
|
|
110
|
+
# Simple table.column formats
|
|
111
|
+
("users.email", "users"),
|
|
112
|
+
("products.name", "products"),
|
|
113
|
+
("public.users.column", "public.users"), # Schema.table.column format
|
|
114
|
+
# Edge cases
|
|
115
|
+
("table_only", "table_only"),
|
|
116
|
+
("", None), # Empty string should return None
|
|
117
|
+
("users.['id'].PRIMARY KEY", "users"), # Constraint format takes precedence
|
|
118
|
+
# Complex table names with underscores and numbers
|
|
119
|
+
("user_table_123.column", "user_table_123"),
|
|
120
|
+
("schema_1.table_2.column", "schema_1.table_2"),
|
|
121
|
+
# Multiple dots in representation (should extract the table part correctly)
|
|
122
|
+
("very.long.schema.table.['col'].UNIQUE", "very.long.schema.table"),
|
|
123
|
+
],
|
|
124
|
+
)
|
|
125
|
+
def test_get_table_name(representation_str: str, expected_table_name: str | None):
|
|
126
|
+
"""Test extraction of table names from pointer representations."""
|
|
127
|
+
pointer = MockDBObjectPointer(representation_str=representation_str)
|
|
128
|
+
result = pointer.get_table_name()
|
|
129
|
+
assert result == expected_table_name
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
@pytest.mark.parametrize(
|
|
133
|
+
"representation_str,expected_column_names",
|
|
134
|
+
[
|
|
135
|
+
# Constraint pointer formats
|
|
136
|
+
("users.['id'].PRIMARY KEY", ["id"]),
|
|
137
|
+
("orders.['user_id', 'product_id'].UNIQUE", ["user_id", "product_id"]),
|
|
138
|
+
("products.['name', 'category', 'price'].INDEX", ["name", "category", "price"]),
|
|
139
|
+
("users.[].CHECK", []), # Empty column list
|
|
140
|
+
# Simple table.column formats
|
|
141
|
+
("users.email", ["email"]),
|
|
142
|
+
("products.name", ["name"]),
|
|
143
|
+
("public.users.column", ["column"]), # Schema.table.column format
|
|
144
|
+
# Edge cases
|
|
145
|
+
("table_only", []), # No columns
|
|
146
|
+
("", []), # Empty string
|
|
147
|
+
# Whitespace handling
|
|
148
|
+
("users.[ 'col1' , 'col2' ].UNIQUE", ["col1", "col2"]),
|
|
149
|
+
# Quote handling
|
|
150
|
+
("users.[\"col1\", 'col2'].UNIQUE", ["col1", "col2"]),
|
|
151
|
+
# Column names with underscores and numbers
|
|
152
|
+
(
|
|
153
|
+
"users.['user_id_2', 'created_at_timestamp'].UNIQUE",
|
|
154
|
+
["user_id_2", "created_at_timestamp"],
|
|
155
|
+
),
|
|
156
|
+
# Complex schema.table.column cases
|
|
157
|
+
("schema.table.column_name", ["column_name"]),
|
|
158
|
+
("very.long.schema.table.column", ["column"]),
|
|
159
|
+
],
|
|
160
|
+
)
|
|
161
|
+
def test_get_column_names(representation_str: str, expected_column_names: list[str]):
|
|
162
|
+
"""Test extraction of column names from pointer representations."""
|
|
163
|
+
pointer = MockDBObjectPointer(representation_str=representation_str)
|
|
164
|
+
result = pointer.get_column_names()
|
|
165
|
+
assert result == expected_column_names
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def test_merge_type_columns():
|
|
169
|
+
"""
|
|
170
|
+
Allow separately yielded type definitions to collect their reference columns. If an
|
|
171
|
+
enum is referenced in one place, this should build up to the full definition.
|
|
172
|
+
|
|
173
|
+
"""
|
|
174
|
+
type_a = DBType(
|
|
175
|
+
name="type_a",
|
|
176
|
+
values=frozenset({"A"}),
|
|
177
|
+
reference_columns=frozenset({("table_a", "column_a")}),
|
|
178
|
+
)
|
|
179
|
+
type_b = DBType(
|
|
180
|
+
name="type_a",
|
|
181
|
+
values=frozenset({"A"}),
|
|
182
|
+
reference_columns=frozenset({("table_b", "column_b")}),
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
merged = type_a.merge(type_b)
|
|
186
|
+
assert merged.name == "type_a"
|
|
187
|
+
assert merged.values == frozenset({"A"})
|
|
188
|
+
assert merged.reference_columns == frozenset(
|
|
189
|
+
{("table_a", "column_a"), ("table_b", "column_b")}
|
|
190
|
+
)
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
|
|
3
|
+
from iceaxe import alias, func, select, sql
|
|
4
|
+
from iceaxe.__tests__.conf_models import UserDemo
|
|
5
|
+
from iceaxe.queries import QueryBuilder
|
|
6
|
+
from iceaxe.session import (
|
|
7
|
+
DBConnection,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@pytest.mark.asyncio
|
|
12
|
+
async def test_alias_with_function(db_connection: DBConnection):
|
|
13
|
+
"""Test using alias with a function value."""
|
|
14
|
+
demo = UserDemo(id=1, name="Test Title", email="john@example.com")
|
|
15
|
+
await db_connection.insert([demo])
|
|
16
|
+
|
|
17
|
+
# Test using string length function with alias
|
|
18
|
+
results = await db_connection.exec(
|
|
19
|
+
select((UserDemo, alias("name_length", func.length(UserDemo.name))))
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
assert len(results) == 1
|
|
23
|
+
assert results[0][0].id == 1
|
|
24
|
+
assert isinstance(results[0][1], int) # length result
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@pytest.mark.asyncio
|
|
28
|
+
async def test_alias_with_raw_sql(db_connection: DBConnection):
|
|
29
|
+
"""
|
|
30
|
+
Test that we can use a text query alongside an alias to map raw SQL results
|
|
31
|
+
to typed values.
|
|
32
|
+
"""
|
|
33
|
+
user = UserDemo(name="John Doe", email="john@example.com")
|
|
34
|
+
await db_connection.insert([user])
|
|
35
|
+
|
|
36
|
+
# Create a query that uses text() alongside an alias
|
|
37
|
+
query = (
|
|
38
|
+
QueryBuilder()
|
|
39
|
+
.select((UserDemo, alias("rollup_value", int)))
|
|
40
|
+
.text(
|
|
41
|
+
f"""
|
|
42
|
+
SELECT {sql.select(UserDemo)}, COUNT(*) AS rollup_value
|
|
43
|
+
FROM userdemo
|
|
44
|
+
GROUP BY id
|
|
45
|
+
"""
|
|
46
|
+
)
|
|
47
|
+
)
|
|
48
|
+
result = await db_connection.exec(query)
|
|
49
|
+
assert len(result) == 1
|
|
50
|
+
assert isinstance(result[0], tuple)
|
|
51
|
+
assert isinstance(result[0][0], UserDemo)
|
|
52
|
+
assert result[0][0].name == "John Doe"
|
|
53
|
+
assert result[0][0].email == "john@example.com"
|
|
54
|
+
assert result[0][1] == 1 # The count should be 1
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.mark.asyncio
|
|
58
|
+
async def test_multiple_aliases(db_connection: DBConnection):
|
|
59
|
+
"""Test using multiple aliases in a single query."""
|
|
60
|
+
demo1 = UserDemo(id=1, name="First Item", email="john@example.com")
|
|
61
|
+
demo2 = UserDemo(id=2, name="Second Item", email="jane@example.com")
|
|
62
|
+
await db_connection.insert([demo1, demo2])
|
|
63
|
+
|
|
64
|
+
# Test multiple aliases with different SQL functions
|
|
65
|
+
results = await db_connection.exec(
|
|
66
|
+
select(
|
|
67
|
+
(
|
|
68
|
+
UserDemo,
|
|
69
|
+
alias("upper_name", func.upper(UserDemo.name)),
|
|
70
|
+
alias("item_count", func.count(UserDemo.id)),
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
.group_by(UserDemo.id, UserDemo.name)
|
|
74
|
+
.order_by(UserDemo.id)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
assert len(results) == 2
|
|
78
|
+
assert results[0][0].id == 1
|
|
79
|
+
assert results[0][1] == "FIRST ITEM" # uppercase name
|
|
80
|
+
assert results[0][2] == 1 # count result
|
|
81
|
+
assert results[1][0].id == 2
|
|
82
|
+
assert results[1][1] == "SECOND ITEM" # uppercase name
|
|
83
|
+
assert results[1][2] == 1 # count result
|