iceaxe 0.7.1__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of iceaxe might be problematic. Click here for more details.
- iceaxe/__init__.py +20 -0
- iceaxe/__tests__/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/test_bulk_insert.py +45 -0
- iceaxe/__tests__/benchmarks/test_select.py +114 -0
- iceaxe/__tests__/conf_models.py +133 -0
- iceaxe/__tests__/conftest.py +204 -0
- iceaxe/__tests__/docker_helpers.py +208 -0
- iceaxe/__tests__/helpers.py +268 -0
- iceaxe/__tests__/migrations/__init__.py +0 -0
- iceaxe/__tests__/migrations/conftest.py +36 -0
- iceaxe/__tests__/migrations/test_action_sorter.py +237 -0
- iceaxe/__tests__/migrations/test_generator.py +140 -0
- iceaxe/__tests__/migrations/test_generics.py +91 -0
- iceaxe/__tests__/mountaineer/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/test_core.py +76 -0
- iceaxe/__tests__/schemas/__init__.py +0 -0
- iceaxe/__tests__/schemas/test_actions.py +1264 -0
- iceaxe/__tests__/schemas/test_cli.py +25 -0
- iceaxe/__tests__/schemas/test_db_memory_serializer.py +1525 -0
- iceaxe/__tests__/schemas/test_db_serializer.py +398 -0
- iceaxe/__tests__/schemas/test_db_stubs.py +190 -0
- iceaxe/__tests__/test_alias.py +83 -0
- iceaxe/__tests__/test_base.py +52 -0
- iceaxe/__tests__/test_comparison.py +383 -0
- iceaxe/__tests__/test_field.py +11 -0
- iceaxe/__tests__/test_helpers.py +9 -0
- iceaxe/__tests__/test_modifications.py +151 -0
- iceaxe/__tests__/test_queries.py +605 -0
- iceaxe/__tests__/test_queries_str.py +173 -0
- iceaxe/__tests__/test_session.py +1511 -0
- iceaxe/__tests__/test_text_search.py +287 -0
- iceaxe/alias_values.py +67 -0
- iceaxe/base.py +350 -0
- iceaxe/comparison.py +560 -0
- iceaxe/field.py +250 -0
- iceaxe/functions.py +906 -0
- iceaxe/generics.py +140 -0
- iceaxe/io.py +107 -0
- iceaxe/logging.py +91 -0
- iceaxe/migrations/__init__.py +5 -0
- iceaxe/migrations/action_sorter.py +98 -0
- iceaxe/migrations/cli.py +228 -0
- iceaxe/migrations/client_io.py +62 -0
- iceaxe/migrations/generator.py +404 -0
- iceaxe/migrations/migration.py +86 -0
- iceaxe/migrations/migrator.py +101 -0
- iceaxe/modifications.py +176 -0
- iceaxe/mountaineer/__init__.py +10 -0
- iceaxe/mountaineer/cli.py +74 -0
- iceaxe/mountaineer/config.py +46 -0
- iceaxe/mountaineer/dependencies/__init__.py +6 -0
- iceaxe/mountaineer/dependencies/core.py +67 -0
- iceaxe/postgres.py +133 -0
- iceaxe/py.typed +0 -0
- iceaxe/queries.py +1455 -0
- iceaxe/queries_str.py +294 -0
- iceaxe/schemas/__init__.py +0 -0
- iceaxe/schemas/actions.py +864 -0
- iceaxe/schemas/cli.py +30 -0
- iceaxe/schemas/db_memory_serializer.py +705 -0
- iceaxe/schemas/db_serializer.py +346 -0
- iceaxe/schemas/db_stubs.py +525 -0
- iceaxe/session.py +860 -0
- iceaxe/session_optimized.c +12035 -0
- iceaxe/session_optimized.cpython-313-darwin.so +0 -0
- iceaxe/session_optimized.pyx +212 -0
- iceaxe/sql_types.py +148 -0
- iceaxe/typing.py +73 -0
- iceaxe-0.7.1.dist-info/METADATA +261 -0
- iceaxe-0.7.1.dist-info/RECORD +75 -0
- iceaxe-0.7.1.dist-info/WHEEL +6 -0
- iceaxe-0.7.1.dist-info/licenses/LICENSE +21 -0
- iceaxe-0.7.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1264 @@
|
|
|
1
|
+
from unittest.mock import AsyncMock
|
|
2
|
+
|
|
3
|
+
import asyncpg
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
from iceaxe.schemas.actions import (
|
|
7
|
+
CheckConstraint,
|
|
8
|
+
ColumnType,
|
|
9
|
+
ConstraintType,
|
|
10
|
+
DatabaseActions,
|
|
11
|
+
DryRunAction,
|
|
12
|
+
ForeignKeyConstraint,
|
|
13
|
+
assert_is_safe_sql_identifier,
|
|
14
|
+
format_sql_values,
|
|
15
|
+
)
|
|
16
|
+
from iceaxe.session import DBConnection
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@pytest.fixture
|
|
20
|
+
def db_backed_actions(
|
|
21
|
+
db_connection: DBConnection,
|
|
22
|
+
clear_all_database_objects,
|
|
23
|
+
):
|
|
24
|
+
"""
|
|
25
|
+
Fixture that should be used for actions that should actually be executed
|
|
26
|
+
against a database. We will clear all database objects before and after
|
|
27
|
+
the test, so no backed objects will be available.
|
|
28
|
+
|
|
29
|
+
"""
|
|
30
|
+
return DatabaseActions(dry_run=False, db_connection=db_connection)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def example_action_fn(arg_1: str):
|
|
34
|
+
pass
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@pytest.mark.asyncio
|
|
38
|
+
async def test_record_signature_dry_run():
|
|
39
|
+
database_actions = DatabaseActions(dry_run=True)
|
|
40
|
+
|
|
41
|
+
await database_actions._record_signature(
|
|
42
|
+
example_action_fn, {"arg_1": "test"}, "SQL"
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
assert database_actions.dry_run_actions == [
|
|
46
|
+
DryRunAction(fn=example_action_fn, kwargs={"arg_1": "test"})
|
|
47
|
+
]
|
|
48
|
+
assert database_actions.prod_sqls == []
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@pytest.mark.asyncio
|
|
52
|
+
async def test_record_signature_prod():
|
|
53
|
+
database_actions = DatabaseActions(dry_run=False, db_connection=AsyncMock())
|
|
54
|
+
|
|
55
|
+
await database_actions._record_signature(
|
|
56
|
+
example_action_fn, {"arg_1": "test"}, "SQL"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
assert database_actions.dry_run_actions == []
|
|
60
|
+
assert database_actions.prod_sqls == ["SQL"]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@pytest.mark.asyncio
|
|
64
|
+
async def test_record_signature_incorrect_kwarg():
|
|
65
|
+
database_actions = DatabaseActions(dry_run=False, db_connection=AsyncMock())
|
|
66
|
+
|
|
67
|
+
# An extra, non-existent kwarg is provided
|
|
68
|
+
with pytest.raises(ValueError):
|
|
69
|
+
await database_actions._record_signature(
|
|
70
|
+
example_action_fn, {"arg_1": "test", "arg_2": "test"}, "SQL"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# A required kwarg is missing
|
|
74
|
+
with pytest.raises(ValueError):
|
|
75
|
+
await database_actions._record_signature(example_action_fn, {}, "SQL")
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@pytest.mark.parametrize(
|
|
79
|
+
"identifier, expected_is_valid",
|
|
80
|
+
[
|
|
81
|
+
# Valid identifiers
|
|
82
|
+
("validTableName", True),
|
|
83
|
+
("_valid_table_name", True),
|
|
84
|
+
("Table123", True),
|
|
85
|
+
("_", True),
|
|
86
|
+
("t", True),
|
|
87
|
+
# Invalid identifiers
|
|
88
|
+
("123table", False),
|
|
89
|
+
("table-name", False),
|
|
90
|
+
("table name", False),
|
|
91
|
+
("table$name", False),
|
|
92
|
+
("table!name", False),
|
|
93
|
+
("table@name", False),
|
|
94
|
+
("table#name", False),
|
|
95
|
+
("", False),
|
|
96
|
+
(" ", False),
|
|
97
|
+
(" table", False),
|
|
98
|
+
("table ", False),
|
|
99
|
+
("table\n", False),
|
|
100
|
+
# SQL injection attempts
|
|
101
|
+
("table; DROP TABLE users;", False),
|
|
102
|
+
("table; SELECT * FROM users", False),
|
|
103
|
+
("1;1", False),
|
|
104
|
+
(";", False),
|
|
105
|
+
("--comment", False),
|
|
106
|
+
("' OR '1'='1", False),
|
|
107
|
+
('" OR "1"="1', False),
|
|
108
|
+
("table`", False),
|
|
109
|
+
("[table]", False),
|
|
110
|
+
("{table}", False),
|
|
111
|
+
("<script>", False),
|
|
112
|
+
('"; DROP TABLE users; --', False),
|
|
113
|
+
("'; DROP TABLE users; --", False),
|
|
114
|
+
],
|
|
115
|
+
)
|
|
116
|
+
def test_is_safe_sql_identifier(identifier: str, expected_is_valid: bool):
|
|
117
|
+
if expected_is_valid:
|
|
118
|
+
assert_is_safe_sql_identifier(identifier)
|
|
119
|
+
else:
|
|
120
|
+
with pytest.raises(ValueError):
|
|
121
|
+
assert_is_safe_sql_identifier(identifier)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@pytest.mark.parametrize(
|
|
125
|
+
"values, expected",
|
|
126
|
+
[
|
|
127
|
+
# Simple strings without special characters
|
|
128
|
+
(["single"], "'single'"),
|
|
129
|
+
([], ""),
|
|
130
|
+
(["apple", "banana"], "'apple', 'banana'"),
|
|
131
|
+
# Strings with single quotes that need escaping
|
|
132
|
+
(["O'Neill", "d'Artagnan"], "'O''Neill', 'd''Artagnan'"),
|
|
133
|
+
# Mixed strings, no special characters and with special characters
|
|
134
|
+
(["hello", "it's a test"], "'hello', 'it''s a test'"),
|
|
135
|
+
# Strings that contain SQL-like syntax
|
|
136
|
+
(
|
|
137
|
+
["SELECT * FROM users;", "DROP TABLE students;"],
|
|
138
|
+
"'SELECT * FROM users;', 'DROP TABLE students;'",
|
|
139
|
+
),
|
|
140
|
+
# Empty strings and spaces
|
|
141
|
+
(["", " ", " "], "'', ' ', ' '"),
|
|
142
|
+
],
|
|
143
|
+
)
|
|
144
|
+
def test_format_sql_values(values, expected):
|
|
145
|
+
assert format_sql_values(values) == expected
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
@pytest.mark.asyncio
|
|
149
|
+
async def test_add_table(
|
|
150
|
+
db_backed_actions: DatabaseActions,
|
|
151
|
+
db_connection: DBConnection,
|
|
152
|
+
):
|
|
153
|
+
await db_backed_actions.add_table("test_table")
|
|
154
|
+
|
|
155
|
+
# We should have a table in the database
|
|
156
|
+
assert await db_connection.conn.execute("SELECT * FROM test_table")
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
@pytest.mark.asyncio
|
|
160
|
+
async def test_add_table_reserved_keyword(
|
|
161
|
+
db_backed_actions: DatabaseActions,
|
|
162
|
+
db_connection: DBConnection,
|
|
163
|
+
):
|
|
164
|
+
"""
|
|
165
|
+
Confirm that table migrations will wrap the table name in double quotes
|
|
166
|
+
to avoid conflicts with reserved keywords.
|
|
167
|
+
|
|
168
|
+
"""
|
|
169
|
+
await db_backed_actions.add_table("user")
|
|
170
|
+
|
|
171
|
+
# We should have a table in the database
|
|
172
|
+
assert await db_connection.conn.execute("SELECT * FROM user")
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@pytest.mark.asyncio
|
|
176
|
+
async def test_drop_table(
|
|
177
|
+
db_backed_actions: DatabaseActions,
|
|
178
|
+
db_connection: DBConnection,
|
|
179
|
+
):
|
|
180
|
+
# Set up a table for us to drop first
|
|
181
|
+
await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
|
|
182
|
+
|
|
183
|
+
await db_backed_actions.drop_table("test_table")
|
|
184
|
+
|
|
185
|
+
# We should not have a table in the database
|
|
186
|
+
with pytest.raises(asyncpg.exceptions.UndefinedTableError):
|
|
187
|
+
await db_connection.conn.execute("SELECT * FROM test_table")
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
@pytest.mark.asyncio
|
|
191
|
+
async def test_add_column(
|
|
192
|
+
db_backed_actions: DatabaseActions,
|
|
193
|
+
db_connection: DBConnection,
|
|
194
|
+
):
|
|
195
|
+
# Set up a table for us to drop first
|
|
196
|
+
await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
|
|
197
|
+
|
|
198
|
+
# Standard type
|
|
199
|
+
await db_backed_actions.add_column(
|
|
200
|
+
"test_table",
|
|
201
|
+
"test_column",
|
|
202
|
+
explicit_data_type=ColumnType.VARCHAR,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
# Standard, list type
|
|
206
|
+
await db_backed_actions.add_column(
|
|
207
|
+
"test_table",
|
|
208
|
+
"test_column_list",
|
|
209
|
+
explicit_data_type=ColumnType.VARCHAR,
|
|
210
|
+
explicit_data_is_list=True,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
# We should now have columns in the table
|
|
214
|
+
# Insert an object with the expected columns
|
|
215
|
+
await db_connection.conn.execute(
|
|
216
|
+
"INSERT INTO test_table (test_column, test_column_list) VALUES ($1, $2)",
|
|
217
|
+
"test_value",
|
|
218
|
+
["value_1", "value_2"],
|
|
219
|
+
)
|
|
220
|
+
|
|
221
|
+
# Make sure that we can retrieve the object
|
|
222
|
+
rows = await db_connection.conn.fetch("SELECT * FROM test_table")
|
|
223
|
+
row = rows[0]
|
|
224
|
+
assert row
|
|
225
|
+
assert row["test_column"] == "test_value"
|
|
226
|
+
assert row["test_column_list"] == ["value_1", "value_2"]
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
@pytest.mark.asyncio
|
|
230
|
+
@pytest.mark.parametrize("enum_value", [value for value in ColumnType])
|
|
231
|
+
async def test_add_column_any_type(
|
|
232
|
+
enum_value: ColumnType,
|
|
233
|
+
db_backed_actions: DatabaseActions,
|
|
234
|
+
db_connection: DBConnection,
|
|
235
|
+
):
|
|
236
|
+
"""
|
|
237
|
+
Simple test that all our known type enum values are formatted properly
|
|
238
|
+
to be inserted into the database, since we don't otherwise validate insertion
|
|
239
|
+
values here.
|
|
240
|
+
|
|
241
|
+
"""
|
|
242
|
+
# Set up a table for us to drop first
|
|
243
|
+
await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
|
|
244
|
+
|
|
245
|
+
await db_backed_actions.add_column(
|
|
246
|
+
"test_table",
|
|
247
|
+
"test_column",
|
|
248
|
+
explicit_data_type=enum_value,
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
# Query the postgres index to see if the column was created
|
|
252
|
+
rows = await db_connection.conn.fetch(
|
|
253
|
+
"SELECT data_type FROM information_schema.columns WHERE table_name = 'test_table' AND column_name = 'test_column'"
|
|
254
|
+
)
|
|
255
|
+
row = rows[0]
|
|
256
|
+
|
|
257
|
+
# Some values are shortcuts for other values when inserted without
|
|
258
|
+
# additional parameters. We keep track of that mapping here so we allow
|
|
259
|
+
# some flexibility when checking the expected value.
|
|
260
|
+
# (inserted, allowed alternative value in database)
|
|
261
|
+
known_equivalents = (
|
|
262
|
+
(ColumnType.DECIMAL, ColumnType.NUMERIC),
|
|
263
|
+
(ColumnType.SERIAL, ColumnType.INTEGER),
|
|
264
|
+
(ColumnType.BIGSERIAL, ColumnType.BIGINT),
|
|
265
|
+
(ColumnType.CHAR, "character"),
|
|
266
|
+
(ColumnType.TIME_WITHOUT_TIME_ZONE, "time without time zone"),
|
|
267
|
+
(ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE, "timestamp without time zone"),
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
allowed_values = {enum_value.value}
|
|
271
|
+
for known_value, alternative in known_equivalents:
|
|
272
|
+
if enum_value == known_value:
|
|
273
|
+
allowed_values.add(alternative)
|
|
274
|
+
|
|
275
|
+
assert row
|
|
276
|
+
assert row["data_type"] in allowed_values
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
@pytest.mark.asyncio
|
|
280
|
+
async def test_drop_column(
|
|
281
|
+
db_backed_actions: DatabaseActions,
|
|
282
|
+
db_connection: DBConnection,
|
|
283
|
+
):
|
|
284
|
+
# Set up a table for us to drop first
|
|
285
|
+
await db_connection.conn.execute(
|
|
286
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
await db_backed_actions.drop_column("test_table", "test_column")
|
|
290
|
+
|
|
291
|
+
# We should not have a column in the table
|
|
292
|
+
with pytest.raises(asyncpg.exceptions.UndefinedColumnError):
|
|
293
|
+
await db_connection.conn.execute("SELECT test_column FROM test_table")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@pytest.mark.asyncio
|
|
297
|
+
async def test_rename_column(
|
|
298
|
+
db_backed_actions: DatabaseActions,
|
|
299
|
+
db_connection: DBConnection,
|
|
300
|
+
):
|
|
301
|
+
# Set up a table for us to drop first
|
|
302
|
+
await db_connection.conn.execute(
|
|
303
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
await db_backed_actions.rename_column("test_table", "test_column", "new_column")
|
|
307
|
+
|
|
308
|
+
# We should have a column in the table
|
|
309
|
+
assert await db_connection.conn.execute("SELECT new_column FROM test_table")
|
|
310
|
+
|
|
311
|
+
# We should not have a column in the table
|
|
312
|
+
with pytest.raises(asyncpg.exceptions.UndefinedColumnError):
|
|
313
|
+
await db_connection.conn.execute("SELECT test_column FROM test_table")
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
@pytest.mark.asyncio
|
|
317
|
+
async def test_modify_column_type(
|
|
318
|
+
db_backed_actions: DatabaseActions,
|
|
319
|
+
db_connection: DBConnection,
|
|
320
|
+
):
|
|
321
|
+
# Set up a table with the old types
|
|
322
|
+
await db_connection.conn.execute(
|
|
323
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
# Modify the column type from VARCHAR to TEXT, which is a compatible change
|
|
327
|
+
# that doesn't require explicit casting
|
|
328
|
+
await db_backed_actions.modify_column_type(
|
|
329
|
+
"test_table", "test_column", ColumnType.TEXT
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
# We should now be able to inject a text value
|
|
333
|
+
await db_connection.conn.execute(
|
|
334
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
335
|
+
"test_string_value",
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
# Make sure that we can retrieve the object
|
|
339
|
+
rows = await db_connection.conn.fetch("SELECT * FROM test_table")
|
|
340
|
+
row = rows[0]
|
|
341
|
+
assert row
|
|
342
|
+
assert row["test_column"] == "test_string_value"
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
@pytest.mark.asyncio
|
|
346
|
+
@pytest.mark.parametrize(
|
|
347
|
+
"from_type,to_type,test_value,expected_value,requires_autocast",
|
|
348
|
+
[
|
|
349
|
+
# String conversions
|
|
350
|
+
(ColumnType.VARCHAR, ColumnType.TEXT, "test", "test", False),
|
|
351
|
+
(ColumnType.TEXT, ColumnType.VARCHAR, "test", "test", False),
|
|
352
|
+
# Numeric conversions - these require autocast
|
|
353
|
+
(ColumnType.VARCHAR, ColumnType.INTEGER, "123", 123, True),
|
|
354
|
+
(ColumnType.TEXT, ColumnType.INTEGER, "456", 456, True),
|
|
355
|
+
(ColumnType.INTEGER, ColumnType.BIGINT, 123, 123, False),
|
|
356
|
+
(ColumnType.INTEGER, ColumnType.SMALLINT, 50, 50, False),
|
|
357
|
+
(ColumnType.SMALLINT, ColumnType.INTEGER, 50, 50, False),
|
|
358
|
+
(ColumnType.INTEGER, ColumnType.REAL, 123, 123.0, False),
|
|
359
|
+
(ColumnType.REAL, ColumnType.DOUBLE_PRECISION, 123.5, 123.5, False),
|
|
360
|
+
# Boolean conversions - require autocast
|
|
361
|
+
(ColumnType.VARCHAR, ColumnType.BOOLEAN, "true", True, True),
|
|
362
|
+
(ColumnType.TEXT, ColumnType.BOOLEAN, "false", False, True),
|
|
363
|
+
(ColumnType.INTEGER, ColumnType.BOOLEAN, 1, True, True),
|
|
364
|
+
# Timestamp conversions - require autocast for string sources
|
|
365
|
+
(ColumnType.VARCHAR, ColumnType.DATE, "2023-01-01", "2023-01-01", True),
|
|
366
|
+
(
|
|
367
|
+
ColumnType.TEXT,
|
|
368
|
+
ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
|
|
369
|
+
"2023-01-01 12:00:00",
|
|
370
|
+
"2023-01-01 12:00:00",
|
|
371
|
+
True,
|
|
372
|
+
),
|
|
373
|
+
# JSON conversions - require autocast, return as strings
|
|
374
|
+
(
|
|
375
|
+
ColumnType.TEXT,
|
|
376
|
+
ColumnType.JSON,
|
|
377
|
+
'{"key": "value"}',
|
|
378
|
+
'{"key": "value"}',
|
|
379
|
+
True,
|
|
380
|
+
),
|
|
381
|
+
(
|
|
382
|
+
ColumnType.VARCHAR,
|
|
383
|
+
ColumnType.JSONB,
|
|
384
|
+
'{"key": "value"}',
|
|
385
|
+
'{"key": "value"}',
|
|
386
|
+
True,
|
|
387
|
+
),
|
|
388
|
+
(
|
|
389
|
+
ColumnType.JSON,
|
|
390
|
+
ColumnType.JSONB,
|
|
391
|
+
'{"key": "value"}',
|
|
392
|
+
'{"key": "value"}',
|
|
393
|
+
False,
|
|
394
|
+
),
|
|
395
|
+
],
|
|
396
|
+
)
|
|
397
|
+
async def test_modify_column_type_with_autocast(
|
|
398
|
+
from_type: ColumnType,
|
|
399
|
+
to_type: ColumnType,
|
|
400
|
+
test_value,
|
|
401
|
+
expected_value,
|
|
402
|
+
requires_autocast: bool,
|
|
403
|
+
db_backed_actions: DatabaseActions,
|
|
404
|
+
db_connection: DBConnection,
|
|
405
|
+
):
|
|
406
|
+
"""
|
|
407
|
+
Test column type modifications with autocast for various type conversions.
|
|
408
|
+
"""
|
|
409
|
+
table_name = "test_table_autocast"
|
|
410
|
+
column_name = "test_column"
|
|
411
|
+
|
|
412
|
+
# Create table with source type - handle special cases
|
|
413
|
+
if from_type == ColumnType.CHAR:
|
|
414
|
+
# CHAR needs a length specifier
|
|
415
|
+
type_spec = f"{from_type.value}(10)"
|
|
416
|
+
else:
|
|
417
|
+
type_spec = from_type.value
|
|
418
|
+
|
|
419
|
+
await db_connection.conn.execute(
|
|
420
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} {type_spec})"
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
# Insert test data
|
|
424
|
+
await db_connection.conn.execute(
|
|
425
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
426
|
+
test_value,
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
# Modify column type with autocast if required
|
|
430
|
+
await db_backed_actions.modify_column_type(
|
|
431
|
+
table_name,
|
|
432
|
+
column_name,
|
|
433
|
+
explicit_data_type=to_type,
|
|
434
|
+
autocast=requires_autocast,
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
# Verify the conversion worked
|
|
438
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
439
|
+
row = rows[0]
|
|
440
|
+
assert row
|
|
441
|
+
|
|
442
|
+
# Handle different expected value types
|
|
443
|
+
actual_value = row[column_name]
|
|
444
|
+
if isinstance(expected_value, str) and to_type in [
|
|
445
|
+
ColumnType.DATE,
|
|
446
|
+
ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
|
|
447
|
+
]:
|
|
448
|
+
# For date/timestamp, convert to string for comparison
|
|
449
|
+
actual_value = str(actual_value)
|
|
450
|
+
|
|
451
|
+
assert actual_value == expected_value
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
@pytest.mark.asyncio
|
|
455
|
+
async def test_modify_column_type_autocast_without_data(
|
|
456
|
+
db_backed_actions: DatabaseActions,
|
|
457
|
+
db_connection: DBConnection,
|
|
458
|
+
):
|
|
459
|
+
"""
|
|
460
|
+
Test that autocast works even when there's no data in the column.
|
|
461
|
+
"""
|
|
462
|
+
table_name = "test_table_empty"
|
|
463
|
+
column_name = "test_column"
|
|
464
|
+
|
|
465
|
+
# Create table with VARCHAR
|
|
466
|
+
await db_connection.conn.execute(
|
|
467
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
# Convert to INTEGER with autocast (should work even with no data)
|
|
471
|
+
await db_backed_actions.modify_column_type(
|
|
472
|
+
table_name,
|
|
473
|
+
column_name,
|
|
474
|
+
explicit_data_type=ColumnType.INTEGER,
|
|
475
|
+
autocast=True,
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
# Insert integer data to verify it works
|
|
479
|
+
await db_connection.conn.execute(
|
|
480
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
481
|
+
42,
|
|
482
|
+
)
|
|
483
|
+
|
|
484
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
485
|
+
row = rows[0]
|
|
486
|
+
assert row
|
|
487
|
+
assert row[column_name] == 42
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
@pytest.mark.asyncio
|
|
491
|
+
async def test_modify_column_type_incompatible_without_autocast_fails(
|
|
492
|
+
db_backed_actions: DatabaseActions,
|
|
493
|
+
db_connection: DBConnection,
|
|
494
|
+
):
|
|
495
|
+
"""
|
|
496
|
+
Test that incompatible type changes fail without autocast.
|
|
497
|
+
"""
|
|
498
|
+
table_name = "test_table_fail"
|
|
499
|
+
column_name = "test_column"
|
|
500
|
+
|
|
501
|
+
# Create table with VARCHAR containing non-numeric data
|
|
502
|
+
await db_connection.conn.execute(
|
|
503
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
await db_connection.conn.execute(
|
|
507
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
508
|
+
"not_a_number",
|
|
509
|
+
)
|
|
510
|
+
|
|
511
|
+
# Attempt to convert to INTEGER without autocast should fail
|
|
512
|
+
with pytest.raises(Exception): # Should be DatatypeMismatchError
|
|
513
|
+
await db_backed_actions.modify_column_type(
|
|
514
|
+
table_name,
|
|
515
|
+
column_name,
|
|
516
|
+
explicit_data_type=ColumnType.INTEGER,
|
|
517
|
+
autocast=False,
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
@pytest.mark.asyncio
|
|
522
|
+
async def test_modify_column_type_custom_type_with_autocast(
|
|
523
|
+
db_backed_actions: DatabaseActions,
|
|
524
|
+
db_connection: DBConnection,
|
|
525
|
+
):
|
|
526
|
+
"""
|
|
527
|
+
Test autocast with custom types (enums).
|
|
528
|
+
"""
|
|
529
|
+
table_name = "test_table_custom"
|
|
530
|
+
column_name = "test_column"
|
|
531
|
+
enum_name = "test_enum"
|
|
532
|
+
|
|
533
|
+
# Create enum type
|
|
534
|
+
await db_connection.conn.execute(f"CREATE TYPE {enum_name} AS ENUM ('A', 'B', 'C')")
|
|
535
|
+
|
|
536
|
+
# Create table with VARCHAR
|
|
537
|
+
await db_connection.conn.execute(
|
|
538
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
# Insert enum-compatible string
|
|
542
|
+
await db_connection.conn.execute(
|
|
543
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
544
|
+
"A",
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
# Convert to custom enum type with autocast
|
|
548
|
+
await db_backed_actions.modify_column_type(
|
|
549
|
+
table_name,
|
|
550
|
+
column_name,
|
|
551
|
+
custom_data_type=enum_name,
|
|
552
|
+
autocast=True,
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
# Verify the conversion worked
|
|
556
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
557
|
+
row = rows[0]
|
|
558
|
+
assert row
|
|
559
|
+
assert row[column_name] == "A"
|
|
560
|
+
|
|
561
|
+
# Verify the column type is now the custom enum
|
|
562
|
+
type_info = await db_connection.conn.fetch(
|
|
563
|
+
"""
|
|
564
|
+
SELECT data_type, udt_name
|
|
565
|
+
FROM information_schema.columns
|
|
566
|
+
WHERE table_name = $1 AND column_name = $2
|
|
567
|
+
""",
|
|
568
|
+
table_name,
|
|
569
|
+
column_name,
|
|
570
|
+
)
|
|
571
|
+
assert type_info[0]["udt_name"] == enum_name
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
@pytest.mark.asyncio
|
|
575
|
+
async def test_add_constraint_foreign_key(
|
|
576
|
+
db_backed_actions: DatabaseActions,
|
|
577
|
+
db_connection: DBConnection,
|
|
578
|
+
):
|
|
579
|
+
# Set up two tables since we need a table target
|
|
580
|
+
await db_connection.conn.execute(
|
|
581
|
+
"CREATE TABLE external_table (id SERIAL PRIMARY KEY, external_column VARCHAR)"
|
|
582
|
+
)
|
|
583
|
+
await db_connection.conn.execute(
|
|
584
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column_id INTEGER)"
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
# Insert an existing object into the external table
|
|
588
|
+
await db_connection.conn.execute(
|
|
589
|
+
"INSERT INTO external_table (id, external_column) VALUES ($1, $2)",
|
|
590
|
+
1,
|
|
591
|
+
"test_value",
|
|
592
|
+
)
|
|
593
|
+
|
|
594
|
+
# Add a foreign_key
|
|
595
|
+
await db_backed_actions.add_constraint(
|
|
596
|
+
"test_table",
|
|
597
|
+
["test_column_id"],
|
|
598
|
+
ConstraintType.FOREIGN_KEY,
|
|
599
|
+
"test_foreign_key_constraint",
|
|
600
|
+
constraint_args=ForeignKeyConstraint(
|
|
601
|
+
target_table="external_table",
|
|
602
|
+
target_columns=frozenset({"id"}),
|
|
603
|
+
),
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
# We should now have a foreign key constraint
|
|
607
|
+
# Insert an object that links to our known external object
|
|
608
|
+
await db_connection.conn.execute(
|
|
609
|
+
"INSERT INTO test_table (test_column_id) VALUES ($1)",
|
|
610
|
+
1,
|
|
611
|
+
)
|
|
612
|
+
|
|
613
|
+
# We should not be able to insert an object that does not link to the external object
|
|
614
|
+
with pytest.raises(asyncpg.exceptions.ForeignKeyViolationError):
|
|
615
|
+
await db_connection.conn.execute(
|
|
616
|
+
"INSERT INTO test_table (test_column_id) VALUES ($1)",
|
|
617
|
+
2,
|
|
618
|
+
)
|
|
619
|
+
|
|
620
|
+
|
|
621
|
+
@pytest.mark.asyncio
|
|
622
|
+
async def test_add_constraint_unique(
|
|
623
|
+
db_backed_actions: DatabaseActions,
|
|
624
|
+
db_connection: DBConnection,
|
|
625
|
+
):
|
|
626
|
+
# Add the table that should have a unique column
|
|
627
|
+
await db_connection.conn.execute(
|
|
628
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
629
|
+
)
|
|
630
|
+
|
|
631
|
+
# Add a unique constraint
|
|
632
|
+
await db_backed_actions.add_constraint(
|
|
633
|
+
"test_table",
|
|
634
|
+
["test_column"],
|
|
635
|
+
ConstraintType.UNIQUE,
|
|
636
|
+
"test_unique_constraint",
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
# We should now have a unique constraint, make sure that we can't
|
|
640
|
+
# insert the same value twice
|
|
641
|
+
await db_connection.conn.execute(
|
|
642
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
643
|
+
"test_value",
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
with pytest.raises(asyncpg.exceptions.UniqueViolationError):
|
|
647
|
+
await db_connection.conn.execute(
|
|
648
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
649
|
+
"test_value",
|
|
650
|
+
)
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
@pytest.mark.asyncio
|
|
654
|
+
async def test_add_constraint_primary_key(
|
|
655
|
+
db_backed_actions: DatabaseActions,
|
|
656
|
+
db_connection: DBConnection,
|
|
657
|
+
):
|
|
658
|
+
# Create an empty table to simulate one just created
|
|
659
|
+
await db_connection.conn.execute("CREATE TABLE test_table ()")
|
|
660
|
+
|
|
661
|
+
# Add a new column
|
|
662
|
+
await db_backed_actions.add_column("test_table", "test_column", ColumnType.INTEGER)
|
|
663
|
+
|
|
664
|
+
# Promote the column to a primary key
|
|
665
|
+
await db_backed_actions.add_constraint(
|
|
666
|
+
"test_table",
|
|
667
|
+
["test_column"],
|
|
668
|
+
ConstraintType.PRIMARY_KEY,
|
|
669
|
+
"test_primary_key_constraint",
|
|
670
|
+
)
|
|
671
|
+
|
|
672
|
+
# We should now have a primary key constraint, make sure that we can insert
|
|
673
|
+
# a value into the column
|
|
674
|
+
await db_connection.conn.execute(
|
|
675
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
676
|
+
1,
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
# We should not be able to insert a duplicate primary key value
|
|
680
|
+
with pytest.raises(asyncpg.exceptions.UniqueViolationError):
|
|
681
|
+
await db_connection.conn.execute(
|
|
682
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
683
|
+
1,
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
@pytest.mark.asyncio
|
|
688
|
+
async def test_add_constraint_check(
|
|
689
|
+
db_backed_actions: DatabaseActions,
|
|
690
|
+
db_connection: DBConnection,
|
|
691
|
+
):
|
|
692
|
+
# Create a table with a integer price column
|
|
693
|
+
await db_connection.conn.execute(
|
|
694
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, price INTEGER)"
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
# Now we add a check constraint that this price column should be positive
|
|
698
|
+
await db_backed_actions.add_constraint(
|
|
699
|
+
"test_table",
|
|
700
|
+
[],
|
|
701
|
+
ConstraintType.CHECK,
|
|
702
|
+
"test_check_constraint",
|
|
703
|
+
constraint_args=CheckConstraint(check_condition="price > 0"),
|
|
704
|
+
)
|
|
705
|
+
|
|
706
|
+
# Make sure that we can insert a positive value
|
|
707
|
+
await db_connection.conn.execute(
|
|
708
|
+
"INSERT INTO test_table (price) VALUES ($1)",
|
|
709
|
+
1,
|
|
710
|
+
)
|
|
711
|
+
|
|
712
|
+
# We expect negative values to fail
|
|
713
|
+
with pytest.raises(asyncpg.exceptions.CheckViolationError):
|
|
714
|
+
await db_connection.conn.execute(
|
|
715
|
+
"INSERT INTO test_table (price) VALUES ($1)",
|
|
716
|
+
-1,
|
|
717
|
+
)
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
@pytest.mark.asyncio
|
|
721
|
+
async def test_drop_constraint(
|
|
722
|
+
db_backed_actions: DatabaseActions,
|
|
723
|
+
db_connection: DBConnection,
|
|
724
|
+
):
|
|
725
|
+
# Manually create a table with a unique constraint
|
|
726
|
+
await db_connection.conn.execute(
|
|
727
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
728
|
+
)
|
|
729
|
+
await db_connection.conn.execute(
|
|
730
|
+
"ALTER TABLE test_table ADD CONSTRAINT test_unique_constraint UNIQUE (test_column)"
|
|
731
|
+
)
|
|
732
|
+
|
|
733
|
+
# Drop the unique constraint
|
|
734
|
+
await db_backed_actions.drop_constraint("test_table", "test_unique_constraint")
|
|
735
|
+
|
|
736
|
+
# We should now be able to insert the same value twice
|
|
737
|
+
await db_connection.conn.execute(
|
|
738
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
739
|
+
"test_value",
|
|
740
|
+
)
|
|
741
|
+
|
|
742
|
+
await db_connection.conn.execute(
|
|
743
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
744
|
+
"test_value",
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
@pytest.mark.asyncio
|
|
749
|
+
async def test_add_not_null(
|
|
750
|
+
db_backed_actions: DatabaseActions,
|
|
751
|
+
db_connection: DBConnection,
|
|
752
|
+
):
|
|
753
|
+
# Create a table with a nullable column (default behavior for fields)
|
|
754
|
+
await db_connection.conn.execute(
|
|
755
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
await db_backed_actions.add_not_null("test_table", "test_column")
|
|
759
|
+
|
|
760
|
+
# We should now have a not null constraint, make sure that we can't insert a null value
|
|
761
|
+
with pytest.raises(asyncpg.exceptions.NotNullViolationError):
|
|
762
|
+
await db_connection.conn.execute(
|
|
763
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
764
|
+
None,
|
|
765
|
+
)
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
@pytest.mark.asyncio
|
|
769
|
+
async def test_drop_not_null(
|
|
770
|
+
db_backed_actions: DatabaseActions,
|
|
771
|
+
db_connection: DBConnection,
|
|
772
|
+
):
|
|
773
|
+
# Create a table with a not null column
|
|
774
|
+
await db_connection.conn.execute(
|
|
775
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR NOT NULL)"
|
|
776
|
+
)
|
|
777
|
+
|
|
778
|
+
await db_backed_actions.drop_not_null("test_table", "test_column")
|
|
779
|
+
|
|
780
|
+
# We should now be able to insert a null value
|
|
781
|
+
await db_connection.conn.execute(
|
|
782
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
783
|
+
None,
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
|
|
787
|
+
@pytest.mark.asyncio
|
|
788
|
+
async def test_add_type(
|
|
789
|
+
db_backed_actions: DatabaseActions,
|
|
790
|
+
db_connection: DBConnection,
|
|
791
|
+
):
|
|
792
|
+
await db_backed_actions.add_type("test_type", ["A", "B"])
|
|
793
|
+
|
|
794
|
+
# Create a new table with a column of the new type
|
|
795
|
+
await db_connection.conn.execute(
|
|
796
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
|
|
797
|
+
)
|
|
798
|
+
|
|
799
|
+
# We should be able to insert values that match this type
|
|
800
|
+
await db_connection.conn.execute(
|
|
801
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
802
|
+
"A",
|
|
803
|
+
)
|
|
804
|
+
|
|
805
|
+
# Values not in the enum type definition should fail during insertion
|
|
806
|
+
with pytest.raises(asyncpg.exceptions.InvalidTextRepresentationError):
|
|
807
|
+
await db_connection.conn.execute(
|
|
808
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
809
|
+
"C",
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
|
|
813
|
+
@pytest.mark.asyncio
|
|
814
|
+
async def test_add_type_values(
|
|
815
|
+
db_backed_actions: DatabaseActions,
|
|
816
|
+
db_connection: DBConnection,
|
|
817
|
+
):
|
|
818
|
+
# Create an existing enum
|
|
819
|
+
await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A')")
|
|
820
|
+
|
|
821
|
+
# Create a table that uses this enum
|
|
822
|
+
await db_connection.conn.execute(
|
|
823
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
|
|
824
|
+
)
|
|
825
|
+
|
|
826
|
+
# Add a new value to this enum
|
|
827
|
+
await db_backed_actions.add_type_values("test_type", ["B"])
|
|
828
|
+
|
|
829
|
+
# We should be able to insert values that match this type
|
|
830
|
+
await db_connection.conn.execute(
|
|
831
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
832
|
+
"B",
|
|
833
|
+
)
|
|
834
|
+
|
|
835
|
+
|
|
836
|
+
@pytest.mark.asyncio
|
|
837
|
+
async def test_drop_type_values_no_existing_references(
|
|
838
|
+
db_backed_actions: DatabaseActions,
|
|
839
|
+
db_connection: DBConnection,
|
|
840
|
+
):
|
|
841
|
+
# Create an existing enum with two values
|
|
842
|
+
await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A', 'B')")
|
|
843
|
+
|
|
844
|
+
# Drop a value from this enum
|
|
845
|
+
await db_backed_actions.drop_type_values("test_type", ["B"], target_columns=[])
|
|
846
|
+
|
|
847
|
+
# Create a table that uses this enum
|
|
848
|
+
await db_connection.conn.execute(
|
|
849
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
|
|
850
|
+
)
|
|
851
|
+
|
|
852
|
+
# Fetch the values for the enum that are currently in use
|
|
853
|
+
result = await db_connection.conn.fetch(
|
|
854
|
+
"SELECT array_agg(unnest) AS value FROM unnest(enum_range(NULL::test_type))"
|
|
855
|
+
)
|
|
856
|
+
current_values = result[0]
|
|
857
|
+
assert current_values["value"] == ["A"]
|
|
858
|
+
|
|
859
|
+
|
|
860
|
+
@pytest.mark.asyncio
|
|
861
|
+
async def test_drop_type_values(
|
|
862
|
+
db_backed_actions: DatabaseActions,
|
|
863
|
+
db_connection: DBConnection,
|
|
864
|
+
):
|
|
865
|
+
# Create an existing enum with two values
|
|
866
|
+
await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A', 'B')")
|
|
867
|
+
|
|
868
|
+
# Create a table that uses this enum
|
|
869
|
+
await db_connection.conn.execute(
|
|
870
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
|
|
871
|
+
)
|
|
872
|
+
|
|
873
|
+
# Drop a value from this enum
|
|
874
|
+
await db_backed_actions.drop_type_values(
|
|
875
|
+
"test_type", ["B"], target_columns=[("test_table", "test_column")]
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
# Fetch the values for the enum that are currently in use
|
|
879
|
+
result = await db_connection.conn.fetch(
|
|
880
|
+
"SELECT array_agg(unnest) AS value FROM unnest(enum_range(NULL::test_type))"
|
|
881
|
+
)
|
|
882
|
+
current_values = result[0]
|
|
883
|
+
assert current_values["value"] == ["A"]
|
|
884
|
+
|
|
885
|
+
# We should be able to insert values that match A but not B
|
|
886
|
+
await db_connection.conn.execute(
|
|
887
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
888
|
+
"A",
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
with pytest.raises(asyncpg.exceptions.InvalidTextRepresentationError):
|
|
892
|
+
await db_connection.conn.execute(
|
|
893
|
+
"INSERT INTO test_table (test_column) VALUES ($1)",
|
|
894
|
+
"B",
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
|
|
898
|
+
@pytest.mark.asyncio
|
|
899
|
+
async def test_drop_type(
|
|
900
|
+
db_backed_actions: DatabaseActions,
|
|
901
|
+
db_connection: DBConnection,
|
|
902
|
+
):
|
|
903
|
+
# Create a new type
|
|
904
|
+
await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A')")
|
|
905
|
+
|
|
906
|
+
# Drop this type
|
|
907
|
+
await db_backed_actions.drop_type("test_type")
|
|
908
|
+
|
|
909
|
+
# We shouldn't be able to create a table with this type
|
|
910
|
+
with pytest.raises(asyncpg.exceptions.UndefinedObjectError):
|
|
911
|
+
await db_connection.conn.execute(
|
|
912
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
|
|
913
|
+
)
|
|
914
|
+
|
|
915
|
+
|
|
916
|
+
@pytest.mark.asyncio
|
|
917
|
+
async def test_add_index_single_column(
|
|
918
|
+
db_backed_actions: DatabaseActions,
|
|
919
|
+
db_connection: DBConnection,
|
|
920
|
+
):
|
|
921
|
+
# Create a table with a column to index
|
|
922
|
+
await db_connection.conn.execute(
|
|
923
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
924
|
+
)
|
|
925
|
+
|
|
926
|
+
# Add an index on a single column
|
|
927
|
+
await db_backed_actions.add_index(
|
|
928
|
+
"test_table",
|
|
929
|
+
["test_column"],
|
|
930
|
+
"test_single_column_index",
|
|
931
|
+
)
|
|
932
|
+
|
|
933
|
+
# Verify the index exists by querying the system catalog
|
|
934
|
+
result = await db_connection.conn.fetch(
|
|
935
|
+
"""
|
|
936
|
+
SELECT indexname, indexdef
|
|
937
|
+
FROM pg_indexes
|
|
938
|
+
WHERE tablename = 'test_table'
|
|
939
|
+
AND indexname = 'test_single_column_index'
|
|
940
|
+
"""
|
|
941
|
+
)
|
|
942
|
+
assert len(result) == 1
|
|
943
|
+
assert (
|
|
944
|
+
"CREATE INDEX test_single_column_index ON public.test_table USING btree (test_column)"
|
|
945
|
+
in result[0]["indexdef"]
|
|
946
|
+
)
|
|
947
|
+
|
|
948
|
+
|
|
949
|
+
@pytest.mark.asyncio
|
|
950
|
+
async def test_add_index_multiple_columns(
|
|
951
|
+
db_backed_actions: DatabaseActions,
|
|
952
|
+
db_connection: DBConnection,
|
|
953
|
+
):
|
|
954
|
+
# Create a table with multiple columns to index
|
|
955
|
+
await db_connection.conn.execute(
|
|
956
|
+
"""
|
|
957
|
+
CREATE TABLE test_table (
|
|
958
|
+
id SERIAL PRIMARY KEY,
|
|
959
|
+
first_column VARCHAR,
|
|
960
|
+
second_column INTEGER
|
|
961
|
+
)
|
|
962
|
+
"""
|
|
963
|
+
)
|
|
964
|
+
|
|
965
|
+
# Add an index on multiple columns
|
|
966
|
+
await db_backed_actions.add_index(
|
|
967
|
+
"test_table",
|
|
968
|
+
["first_column", "second_column"],
|
|
969
|
+
"test_multi_column_index",
|
|
970
|
+
)
|
|
971
|
+
|
|
972
|
+
# Verify the index exists and includes both columns
|
|
973
|
+
result = await db_connection.conn.fetch(
|
|
974
|
+
"""
|
|
975
|
+
SELECT indexname, indexdef
|
|
976
|
+
FROM pg_indexes
|
|
977
|
+
WHERE tablename = 'test_table'
|
|
978
|
+
AND indexname = 'test_multi_column_index'
|
|
979
|
+
"""
|
|
980
|
+
)
|
|
981
|
+
assert len(result) == 1
|
|
982
|
+
assert (
|
|
983
|
+
"CREATE INDEX test_multi_column_index ON public.test_table USING btree (first_column, second_column)"
|
|
984
|
+
in result[0]["indexdef"]
|
|
985
|
+
)
|
|
986
|
+
|
|
987
|
+
|
|
988
|
+
@pytest.mark.asyncio
|
|
989
|
+
async def test_drop_index(
|
|
990
|
+
db_backed_actions: DatabaseActions,
|
|
991
|
+
db_connection: DBConnection,
|
|
992
|
+
):
|
|
993
|
+
# Create a table and add an index
|
|
994
|
+
await db_connection.conn.execute(
|
|
995
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
|
|
996
|
+
)
|
|
997
|
+
await db_connection.conn.execute(
|
|
998
|
+
"CREATE INDEX test_index ON test_table (test_column)"
|
|
999
|
+
)
|
|
1000
|
+
|
|
1001
|
+
# Verify the index exists before dropping
|
|
1002
|
+
result = await db_connection.conn.fetch(
|
|
1003
|
+
"""
|
|
1004
|
+
SELECT indexname
|
|
1005
|
+
FROM pg_indexes
|
|
1006
|
+
WHERE tablename = 'test_table'
|
|
1007
|
+
AND indexname = 'test_index'
|
|
1008
|
+
"""
|
|
1009
|
+
)
|
|
1010
|
+
assert len(result) == 1
|
|
1011
|
+
|
|
1012
|
+
# Drop the index
|
|
1013
|
+
await db_backed_actions.drop_index("test_table", "test_index")
|
|
1014
|
+
|
|
1015
|
+
# Verify the index no longer exists
|
|
1016
|
+
result = await db_connection.conn.fetch(
|
|
1017
|
+
"""
|
|
1018
|
+
SELECT indexname
|
|
1019
|
+
FROM pg_indexes
|
|
1020
|
+
WHERE tablename = 'test_table'
|
|
1021
|
+
AND indexname = 'test_index'
|
|
1022
|
+
"""
|
|
1023
|
+
)
|
|
1024
|
+
assert len(result) == 0
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
@pytest.mark.asyncio
|
|
1028
|
+
async def test_add_constraint_foreign_key_actions(
|
|
1029
|
+
db_backed_actions: DatabaseActions,
|
|
1030
|
+
db_connection: DBConnection,
|
|
1031
|
+
):
|
|
1032
|
+
# Set up two tables since we need a table target
|
|
1033
|
+
await db_connection.conn.execute(
|
|
1034
|
+
"CREATE TABLE external_table (id SERIAL PRIMARY KEY, external_column VARCHAR)"
|
|
1035
|
+
)
|
|
1036
|
+
await db_connection.conn.execute(
|
|
1037
|
+
"CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column_id INTEGER)"
|
|
1038
|
+
)
|
|
1039
|
+
|
|
1040
|
+
# Insert an existing object into the external table
|
|
1041
|
+
await db_connection.conn.execute(
|
|
1042
|
+
"INSERT INTO external_table (id, external_column) VALUES ($1, $2)",
|
|
1043
|
+
1,
|
|
1044
|
+
"test_value",
|
|
1045
|
+
)
|
|
1046
|
+
|
|
1047
|
+
# Add a foreign_key with CASCADE actions
|
|
1048
|
+
await db_backed_actions.add_constraint(
|
|
1049
|
+
"test_table",
|
|
1050
|
+
["test_column_id"],
|
|
1051
|
+
ConstraintType.FOREIGN_KEY,
|
|
1052
|
+
"test_foreign_key_constraint",
|
|
1053
|
+
constraint_args=ForeignKeyConstraint(
|
|
1054
|
+
target_table="external_table",
|
|
1055
|
+
target_columns=frozenset({"id"}),
|
|
1056
|
+
on_delete="CASCADE",
|
|
1057
|
+
on_update="CASCADE",
|
|
1058
|
+
),
|
|
1059
|
+
)
|
|
1060
|
+
|
|
1061
|
+
# Insert a row that references the external table
|
|
1062
|
+
await db_connection.conn.execute(
|
|
1063
|
+
"INSERT INTO test_table (test_column_id) VALUES ($1)",
|
|
1064
|
+
1,
|
|
1065
|
+
)
|
|
1066
|
+
|
|
1067
|
+
# Update the external table id - should cascade to test_table
|
|
1068
|
+
await db_connection.conn.execute(
|
|
1069
|
+
"UPDATE external_table SET id = $1 WHERE id = $2",
|
|
1070
|
+
2,
|
|
1071
|
+
1,
|
|
1072
|
+
)
|
|
1073
|
+
|
|
1074
|
+
# Verify the update cascaded
|
|
1075
|
+
result = await db_connection.conn.fetch(
|
|
1076
|
+
"SELECT test_column_id FROM test_table WHERE id = 1"
|
|
1077
|
+
)
|
|
1078
|
+
assert result[0]["test_column_id"] == 2
|
|
1079
|
+
|
|
1080
|
+
# Delete from external table - should cascade to test_table
|
|
1081
|
+
await db_connection.conn.execute(
|
|
1082
|
+
"DELETE FROM external_table WHERE id = $1",
|
|
1083
|
+
2,
|
|
1084
|
+
)
|
|
1085
|
+
|
|
1086
|
+
# Verify the delete cascaded
|
|
1087
|
+
result = await db_connection.conn.fetch("SELECT COUNT(*) FROM test_table")
|
|
1088
|
+
assert result[0]["count"] == 0
|
|
1089
|
+
|
|
1090
|
+
|
|
1091
|
+
@pytest.mark.asyncio
|
|
1092
|
+
async def test_modify_column_type_uuid_conversion(
|
|
1093
|
+
db_backed_actions: DatabaseActions,
|
|
1094
|
+
db_connection: DBConnection,
|
|
1095
|
+
):
|
|
1096
|
+
"""
|
|
1097
|
+
Test UUID type conversions specifically.
|
|
1098
|
+
"""
|
|
1099
|
+
table_name = "test_table_uuid"
|
|
1100
|
+
column_name = "test_column"
|
|
1101
|
+
uuid_string = "550e8400-e29b-41d4-a716-446655440000"
|
|
1102
|
+
|
|
1103
|
+
# Create table with VARCHAR
|
|
1104
|
+
await db_connection.conn.execute(
|
|
1105
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
|
|
1106
|
+
)
|
|
1107
|
+
|
|
1108
|
+
# Insert UUID string
|
|
1109
|
+
await db_connection.conn.execute(
|
|
1110
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
1111
|
+
uuid_string,
|
|
1112
|
+
)
|
|
1113
|
+
|
|
1114
|
+
# Convert to UUID with autocast
|
|
1115
|
+
await db_backed_actions.modify_column_type(
|
|
1116
|
+
table_name,
|
|
1117
|
+
column_name,
|
|
1118
|
+
explicit_data_type=ColumnType.UUID,
|
|
1119
|
+
autocast=True,
|
|
1120
|
+
)
|
|
1121
|
+
|
|
1122
|
+
# Verify the conversion worked
|
|
1123
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
1124
|
+
row = rows[0]
|
|
1125
|
+
assert row
|
|
1126
|
+
|
|
1127
|
+
# UUID columns return UUID objects
|
|
1128
|
+
actual_value = row[column_name]
|
|
1129
|
+
assert str(actual_value) == uuid_string
|
|
1130
|
+
|
|
1131
|
+
|
|
1132
|
+
@pytest.mark.asyncio
|
|
1133
|
+
async def test_modify_column_type_date_to_timestamp(
|
|
1134
|
+
db_backed_actions: DatabaseActions,
|
|
1135
|
+
db_connection: DBConnection,
|
|
1136
|
+
):
|
|
1137
|
+
"""
|
|
1138
|
+
Test date to timestamp conversions.
|
|
1139
|
+
"""
|
|
1140
|
+
from datetime import date, datetime
|
|
1141
|
+
|
|
1142
|
+
table_name = "test_table_date"
|
|
1143
|
+
column_name = "test_column"
|
|
1144
|
+
test_date = date(2023, 1, 1)
|
|
1145
|
+
|
|
1146
|
+
# Create table with DATE
|
|
1147
|
+
await db_connection.conn.execute(
|
|
1148
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} DATE)"
|
|
1149
|
+
)
|
|
1150
|
+
|
|
1151
|
+
# Insert date value
|
|
1152
|
+
await db_connection.conn.execute(
|
|
1153
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
1154
|
+
test_date,
|
|
1155
|
+
)
|
|
1156
|
+
|
|
1157
|
+
# Convert to TIMESTAMP (no autocast needed for compatible types)
|
|
1158
|
+
await db_backed_actions.modify_column_type(
|
|
1159
|
+
table_name,
|
|
1160
|
+
column_name,
|
|
1161
|
+
explicit_data_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
|
|
1162
|
+
autocast=False,
|
|
1163
|
+
)
|
|
1164
|
+
|
|
1165
|
+
# Verify the conversion worked
|
|
1166
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
1167
|
+
row = rows[0]
|
|
1168
|
+
assert row
|
|
1169
|
+
|
|
1170
|
+
# Should be a datetime object now
|
|
1171
|
+
actual_value = row[column_name]
|
|
1172
|
+
assert isinstance(actual_value, datetime)
|
|
1173
|
+
assert actual_value.date() == test_date
|
|
1174
|
+
|
|
1175
|
+
|
|
1176
|
+
@pytest.mark.asyncio
|
|
1177
|
+
async def test_modify_column_type_char_to_varchar(
|
|
1178
|
+
db_backed_actions: DatabaseActions,
|
|
1179
|
+
db_connection: DBConnection,
|
|
1180
|
+
):
|
|
1181
|
+
"""
|
|
1182
|
+
Test CHAR to VARCHAR conversion with proper length handling.
|
|
1183
|
+
"""
|
|
1184
|
+
table_name = "test_table_char"
|
|
1185
|
+
column_name = "test_column"
|
|
1186
|
+
test_value = "test"
|
|
1187
|
+
|
|
1188
|
+
# Create table with CHAR(10)
|
|
1189
|
+
await db_connection.conn.execute(
|
|
1190
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} CHAR(10))"
|
|
1191
|
+
)
|
|
1192
|
+
|
|
1193
|
+
# Insert test value
|
|
1194
|
+
await db_connection.conn.execute(
|
|
1195
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
1196
|
+
test_value,
|
|
1197
|
+
)
|
|
1198
|
+
|
|
1199
|
+
# Convert to VARCHAR
|
|
1200
|
+
await db_backed_actions.modify_column_type(
|
|
1201
|
+
table_name,
|
|
1202
|
+
column_name,
|
|
1203
|
+
explicit_data_type=ColumnType.VARCHAR,
|
|
1204
|
+
autocast=False,
|
|
1205
|
+
)
|
|
1206
|
+
|
|
1207
|
+
# Verify the conversion worked
|
|
1208
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
1209
|
+
row = rows[0]
|
|
1210
|
+
assert row
|
|
1211
|
+
|
|
1212
|
+
# CHAR pads with spaces, VARCHAR should trim them
|
|
1213
|
+
actual_value = row[column_name]
|
|
1214
|
+
assert actual_value.strip() == test_value
|
|
1215
|
+
|
|
1216
|
+
|
|
1217
|
+
@pytest.mark.asyncio
|
|
1218
|
+
async def test_modify_column_type_scalar_to_array(
|
|
1219
|
+
db_backed_actions: DatabaseActions,
|
|
1220
|
+
db_connection: DBConnection,
|
|
1221
|
+
):
|
|
1222
|
+
"""
|
|
1223
|
+
Test converting a scalar column to an array column with autocast.
|
|
1224
|
+
"""
|
|
1225
|
+
table_name = "test_table_array_conversion"
|
|
1226
|
+
column_name = "test_column"
|
|
1227
|
+
|
|
1228
|
+
# Create table with INTEGER
|
|
1229
|
+
await db_connection.conn.execute(
|
|
1230
|
+
f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} INTEGER)"
|
|
1231
|
+
)
|
|
1232
|
+
|
|
1233
|
+
# Insert a scalar value
|
|
1234
|
+
await db_connection.conn.execute(
|
|
1235
|
+
f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
|
|
1236
|
+
42,
|
|
1237
|
+
)
|
|
1238
|
+
|
|
1239
|
+
# Convert to INTEGER[] using autocast
|
|
1240
|
+
await db_backed_actions.modify_column_type(
|
|
1241
|
+
table_name,
|
|
1242
|
+
column_name,
|
|
1243
|
+
explicit_data_type=ColumnType.INTEGER,
|
|
1244
|
+
explicit_data_is_list=True,
|
|
1245
|
+
autocast=True,
|
|
1246
|
+
)
|
|
1247
|
+
|
|
1248
|
+
# Verify the scalar value was converted to a single-element array
|
|
1249
|
+
rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
|
|
1250
|
+
row = rows[0]
|
|
1251
|
+
assert row
|
|
1252
|
+
assert row[column_name] == [42]
|
|
1253
|
+
|
|
1254
|
+
# Verify the column type is now an array by checking the PostgreSQL catalog
|
|
1255
|
+
type_info = await db_connection.conn.fetch(
|
|
1256
|
+
"""
|
|
1257
|
+
SELECT data_type, udt_name
|
|
1258
|
+
FROM information_schema.columns
|
|
1259
|
+
WHERE table_name = $1 AND column_name = $2
|
|
1260
|
+
""",
|
|
1261
|
+
table_name,
|
|
1262
|
+
column_name,
|
|
1263
|
+
)
|
|
1264
|
+
assert type_info[0]["data_type"] == "ARRAY"
|