iceaxe 0.8.3__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of iceaxe might be problematic. Click here for more details.

Files changed (75) hide show
  1. iceaxe/__init__.py +20 -0
  2. iceaxe/__tests__/__init__.py +0 -0
  3. iceaxe/__tests__/benchmarks/__init__.py +0 -0
  4. iceaxe/__tests__/benchmarks/test_bulk_insert.py +45 -0
  5. iceaxe/__tests__/benchmarks/test_select.py +114 -0
  6. iceaxe/__tests__/conf_models.py +133 -0
  7. iceaxe/__tests__/conftest.py +204 -0
  8. iceaxe/__tests__/docker_helpers.py +208 -0
  9. iceaxe/__tests__/helpers.py +268 -0
  10. iceaxe/__tests__/migrations/__init__.py +0 -0
  11. iceaxe/__tests__/migrations/conftest.py +36 -0
  12. iceaxe/__tests__/migrations/test_action_sorter.py +237 -0
  13. iceaxe/__tests__/migrations/test_generator.py +140 -0
  14. iceaxe/__tests__/migrations/test_generics.py +91 -0
  15. iceaxe/__tests__/mountaineer/__init__.py +0 -0
  16. iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
  17. iceaxe/__tests__/mountaineer/dependencies/test_core.py +76 -0
  18. iceaxe/__tests__/schemas/__init__.py +0 -0
  19. iceaxe/__tests__/schemas/test_actions.py +1265 -0
  20. iceaxe/__tests__/schemas/test_cli.py +25 -0
  21. iceaxe/__tests__/schemas/test_db_memory_serializer.py +1571 -0
  22. iceaxe/__tests__/schemas/test_db_serializer.py +435 -0
  23. iceaxe/__tests__/schemas/test_db_stubs.py +190 -0
  24. iceaxe/__tests__/test_alias.py +83 -0
  25. iceaxe/__tests__/test_base.py +52 -0
  26. iceaxe/__tests__/test_comparison.py +383 -0
  27. iceaxe/__tests__/test_field.py +11 -0
  28. iceaxe/__tests__/test_helpers.py +9 -0
  29. iceaxe/__tests__/test_modifications.py +151 -0
  30. iceaxe/__tests__/test_queries.py +764 -0
  31. iceaxe/__tests__/test_queries_str.py +173 -0
  32. iceaxe/__tests__/test_session.py +1511 -0
  33. iceaxe/__tests__/test_text_search.py +287 -0
  34. iceaxe/alias_values.py +67 -0
  35. iceaxe/base.py +351 -0
  36. iceaxe/comparison.py +560 -0
  37. iceaxe/field.py +263 -0
  38. iceaxe/functions.py +1432 -0
  39. iceaxe/generics.py +140 -0
  40. iceaxe/io.py +107 -0
  41. iceaxe/logging.py +91 -0
  42. iceaxe/migrations/__init__.py +5 -0
  43. iceaxe/migrations/action_sorter.py +98 -0
  44. iceaxe/migrations/cli.py +228 -0
  45. iceaxe/migrations/client_io.py +62 -0
  46. iceaxe/migrations/generator.py +404 -0
  47. iceaxe/migrations/migration.py +86 -0
  48. iceaxe/migrations/migrator.py +101 -0
  49. iceaxe/modifications.py +176 -0
  50. iceaxe/mountaineer/__init__.py +10 -0
  51. iceaxe/mountaineer/cli.py +74 -0
  52. iceaxe/mountaineer/config.py +46 -0
  53. iceaxe/mountaineer/dependencies/__init__.py +6 -0
  54. iceaxe/mountaineer/dependencies/core.py +67 -0
  55. iceaxe/postgres.py +133 -0
  56. iceaxe/py.typed +0 -0
  57. iceaxe/queries.py +1459 -0
  58. iceaxe/queries_str.py +294 -0
  59. iceaxe/schemas/__init__.py +0 -0
  60. iceaxe/schemas/actions.py +864 -0
  61. iceaxe/schemas/cli.py +30 -0
  62. iceaxe/schemas/db_memory_serializer.py +711 -0
  63. iceaxe/schemas/db_serializer.py +347 -0
  64. iceaxe/schemas/db_stubs.py +529 -0
  65. iceaxe/session.py +860 -0
  66. iceaxe/session_optimized.c +12207 -0
  67. iceaxe/session_optimized.cpython-313-darwin.so +0 -0
  68. iceaxe/session_optimized.pyx +212 -0
  69. iceaxe/sql_types.py +149 -0
  70. iceaxe/typing.py +73 -0
  71. iceaxe-0.8.3.dist-info/METADATA +262 -0
  72. iceaxe-0.8.3.dist-info/RECORD +75 -0
  73. iceaxe-0.8.3.dist-info/WHEEL +6 -0
  74. iceaxe-0.8.3.dist-info/licenses/LICENSE +21 -0
  75. iceaxe-0.8.3.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1265 @@
1
+ from unittest.mock import AsyncMock
2
+
3
+ import asyncpg
4
+ import pytest
5
+
6
+ from iceaxe.schemas.actions import (
7
+ CheckConstraint,
8
+ ColumnType,
9
+ ConstraintType,
10
+ DatabaseActions,
11
+ DryRunAction,
12
+ ForeignKeyConstraint,
13
+ assert_is_safe_sql_identifier,
14
+ format_sql_values,
15
+ )
16
+ from iceaxe.session import DBConnection
17
+
18
+
19
+ @pytest.fixture
20
+ def db_backed_actions(
21
+ db_connection: DBConnection,
22
+ clear_all_database_objects,
23
+ ):
24
+ """
25
+ Fixture that should be used for actions that should actually be executed
26
+ against a database. We will clear all database objects before and after
27
+ the test, so no backed objects will be available.
28
+
29
+ """
30
+ return DatabaseActions(dry_run=False, db_connection=db_connection)
31
+
32
+
33
+ def example_action_fn(arg_1: str):
34
+ pass
35
+
36
+
37
+ @pytest.mark.asyncio
38
+ async def test_record_signature_dry_run():
39
+ database_actions = DatabaseActions(dry_run=True)
40
+
41
+ await database_actions._record_signature(
42
+ example_action_fn, {"arg_1": "test"}, "SQL"
43
+ )
44
+
45
+ assert database_actions.dry_run_actions == [
46
+ DryRunAction(fn=example_action_fn, kwargs={"arg_1": "test"})
47
+ ]
48
+ assert database_actions.prod_sqls == []
49
+
50
+
51
+ @pytest.mark.asyncio
52
+ async def test_record_signature_prod():
53
+ database_actions = DatabaseActions(dry_run=False, db_connection=AsyncMock())
54
+
55
+ await database_actions._record_signature(
56
+ example_action_fn, {"arg_1": "test"}, "SQL"
57
+ )
58
+
59
+ assert database_actions.dry_run_actions == []
60
+ assert database_actions.prod_sqls == ["SQL"]
61
+
62
+
63
+ @pytest.mark.asyncio
64
+ async def test_record_signature_incorrect_kwarg():
65
+ database_actions = DatabaseActions(dry_run=False, db_connection=AsyncMock())
66
+
67
+ # An extra, non-existent kwarg is provided
68
+ with pytest.raises(ValueError):
69
+ await database_actions._record_signature(
70
+ example_action_fn, {"arg_1": "test", "arg_2": "test"}, "SQL"
71
+ )
72
+
73
+ # A required kwarg is missing
74
+ with pytest.raises(ValueError):
75
+ await database_actions._record_signature(example_action_fn, {}, "SQL")
76
+
77
+
78
+ @pytest.mark.parametrize(
79
+ "identifier, expected_is_valid",
80
+ [
81
+ # Valid identifiers
82
+ ("validTableName", True),
83
+ ("_valid_table_name", True),
84
+ ("Table123", True),
85
+ ("_", True),
86
+ ("t", True),
87
+ # Invalid identifiers
88
+ ("123table", False),
89
+ ("table-name", False),
90
+ ("table name", False),
91
+ ("table$name", False),
92
+ ("table!name", False),
93
+ ("table@name", False),
94
+ ("table#name", False),
95
+ ("", False),
96
+ (" ", False),
97
+ (" table", False),
98
+ ("table ", False),
99
+ ("table\n", False),
100
+ # SQL injection attempts
101
+ ("table; DROP TABLE users;", False),
102
+ ("table; SELECT * FROM users", False),
103
+ ("1;1", False),
104
+ (";", False),
105
+ ("--comment", False),
106
+ ("' OR '1'='1", False),
107
+ ('" OR "1"="1', False),
108
+ ("table`", False),
109
+ ("[table]", False),
110
+ ("{table}", False),
111
+ ("<script>", False),
112
+ ('"; DROP TABLE users; --', False),
113
+ ("'; DROP TABLE users; --", False),
114
+ ],
115
+ )
116
+ def test_is_safe_sql_identifier(identifier: str, expected_is_valid: bool):
117
+ if expected_is_valid:
118
+ assert_is_safe_sql_identifier(identifier)
119
+ else:
120
+ with pytest.raises(ValueError):
121
+ assert_is_safe_sql_identifier(identifier)
122
+
123
+
124
+ @pytest.mark.parametrize(
125
+ "values, expected",
126
+ [
127
+ # Simple strings without special characters
128
+ (["single"], "'single'"),
129
+ ([], ""),
130
+ (["apple", "banana"], "'apple', 'banana'"),
131
+ # Strings with single quotes that need escaping
132
+ (["O'Neill", "d'Artagnan"], "'O''Neill', 'd''Artagnan'"),
133
+ # Mixed strings, no special characters and with special characters
134
+ (["hello", "it's a test"], "'hello', 'it''s a test'"),
135
+ # Strings that contain SQL-like syntax
136
+ (
137
+ ["SELECT * FROM users;", "DROP TABLE students;"],
138
+ "'SELECT * FROM users;', 'DROP TABLE students;'",
139
+ ),
140
+ # Empty strings and spaces
141
+ (["", " ", " "], "'', ' ', ' '"),
142
+ ],
143
+ )
144
+ def test_format_sql_values(values, expected):
145
+ assert format_sql_values(values) == expected
146
+
147
+
148
+ @pytest.mark.asyncio
149
+ async def test_add_table(
150
+ db_backed_actions: DatabaseActions,
151
+ db_connection: DBConnection,
152
+ ):
153
+ await db_backed_actions.add_table("test_table")
154
+
155
+ # We should have a table in the database
156
+ assert await db_connection.conn.execute("SELECT * FROM test_table")
157
+
158
+
159
+ @pytest.mark.asyncio
160
+ async def test_add_table_reserved_keyword(
161
+ db_backed_actions: DatabaseActions,
162
+ db_connection: DBConnection,
163
+ ):
164
+ """
165
+ Confirm that table migrations will wrap the table name in double quotes
166
+ to avoid conflicts with reserved keywords.
167
+
168
+ """
169
+ await db_backed_actions.add_table("user")
170
+
171
+ # We should have a table in the database
172
+ assert await db_connection.conn.execute("SELECT * FROM user")
173
+
174
+
175
+ @pytest.mark.asyncio
176
+ async def test_drop_table(
177
+ db_backed_actions: DatabaseActions,
178
+ db_connection: DBConnection,
179
+ ):
180
+ # Set up a table for us to drop first
181
+ await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
182
+
183
+ await db_backed_actions.drop_table("test_table")
184
+
185
+ # We should not have a table in the database
186
+ with pytest.raises(asyncpg.exceptions.UndefinedTableError):
187
+ await db_connection.conn.execute("SELECT * FROM test_table")
188
+
189
+
190
+ @pytest.mark.asyncio
191
+ async def test_add_column(
192
+ db_backed_actions: DatabaseActions,
193
+ db_connection: DBConnection,
194
+ ):
195
+ # Set up a table for us to drop first
196
+ await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
197
+
198
+ # Standard type
199
+ await db_backed_actions.add_column(
200
+ "test_table",
201
+ "test_column",
202
+ explicit_data_type=ColumnType.VARCHAR,
203
+ )
204
+
205
+ # Standard, list type
206
+ await db_backed_actions.add_column(
207
+ "test_table",
208
+ "test_column_list",
209
+ explicit_data_type=ColumnType.VARCHAR,
210
+ explicit_data_is_list=True,
211
+ )
212
+
213
+ # We should now have columns in the table
214
+ # Insert an object with the expected columns
215
+ await db_connection.conn.execute(
216
+ "INSERT INTO test_table (test_column, test_column_list) VALUES ($1, $2)",
217
+ "test_value",
218
+ ["value_1", "value_2"],
219
+ )
220
+
221
+ # Make sure that we can retrieve the object
222
+ rows = await db_connection.conn.fetch("SELECT * FROM test_table")
223
+ row = rows[0]
224
+ assert row
225
+ assert row["test_column"] == "test_value"
226
+ assert row["test_column_list"] == ["value_1", "value_2"]
227
+
228
+
229
+ @pytest.mark.asyncio
230
+ @pytest.mark.parametrize("enum_value", [value for value in ColumnType])
231
+ async def test_add_column_any_type(
232
+ enum_value: ColumnType,
233
+ db_backed_actions: DatabaseActions,
234
+ db_connection: DBConnection,
235
+ ):
236
+ """
237
+ Simple test that all our known type enum values are formatted properly
238
+ to be inserted into the database, since we don't otherwise validate insertion
239
+ values here.
240
+
241
+ """
242
+ # Set up a table for us to drop first
243
+ await db_connection.conn.execute("CREATE TABLE test_table (id SERIAL PRIMARY KEY)")
244
+
245
+ await db_backed_actions.add_column(
246
+ "test_table",
247
+ "test_column",
248
+ explicit_data_type=enum_value,
249
+ )
250
+
251
+ # Query the postgres index to see if the column was created
252
+ rows = await db_connection.conn.fetch(
253
+ "SELECT data_type FROM information_schema.columns WHERE table_name = 'test_table' AND column_name = 'test_column'"
254
+ )
255
+ row = rows[0]
256
+
257
+ # Some values are shortcuts for other values when inserted without
258
+ # additional parameters. We keep track of that mapping here so we allow
259
+ # some flexibility when checking the expected value.
260
+ # (inserted, allowed alternative value in database)
261
+ known_equivalents = (
262
+ (ColumnType.DECIMAL, ColumnType.NUMERIC),
263
+ (ColumnType.SERIAL, ColumnType.INTEGER),
264
+ (ColumnType.BIGSERIAL, ColumnType.BIGINT),
265
+ (ColumnType.SMALLSERIAL, ColumnType.SMALLINT),
266
+ (ColumnType.CHAR, "character"),
267
+ (ColumnType.TIME_WITHOUT_TIME_ZONE, "time without time zone"),
268
+ (ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE, "timestamp without time zone"),
269
+ )
270
+
271
+ allowed_values = {enum_value.value}
272
+ for known_value, alternative in known_equivalents:
273
+ if enum_value == known_value:
274
+ allowed_values.add(alternative)
275
+
276
+ assert row
277
+ assert row["data_type"] in allowed_values
278
+
279
+
280
+ @pytest.mark.asyncio
281
+ async def test_drop_column(
282
+ db_backed_actions: DatabaseActions,
283
+ db_connection: DBConnection,
284
+ ):
285
+ # Set up a table for us to drop first
286
+ await db_connection.conn.execute(
287
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
288
+ )
289
+
290
+ await db_backed_actions.drop_column("test_table", "test_column")
291
+
292
+ # We should not have a column in the table
293
+ with pytest.raises(asyncpg.exceptions.UndefinedColumnError):
294
+ await db_connection.conn.execute("SELECT test_column FROM test_table")
295
+
296
+
297
+ @pytest.mark.asyncio
298
+ async def test_rename_column(
299
+ db_backed_actions: DatabaseActions,
300
+ db_connection: DBConnection,
301
+ ):
302
+ # Set up a table for us to drop first
303
+ await db_connection.conn.execute(
304
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
305
+ )
306
+
307
+ await db_backed_actions.rename_column("test_table", "test_column", "new_column")
308
+
309
+ # We should have a column in the table
310
+ assert await db_connection.conn.execute("SELECT new_column FROM test_table")
311
+
312
+ # We should not have a column in the table
313
+ with pytest.raises(asyncpg.exceptions.UndefinedColumnError):
314
+ await db_connection.conn.execute("SELECT test_column FROM test_table")
315
+
316
+
317
+ @pytest.mark.asyncio
318
+ async def test_modify_column_type(
319
+ db_backed_actions: DatabaseActions,
320
+ db_connection: DBConnection,
321
+ ):
322
+ # Set up a table with the old types
323
+ await db_connection.conn.execute(
324
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
325
+ )
326
+
327
+ # Modify the column type from VARCHAR to TEXT, which is a compatible change
328
+ # that doesn't require explicit casting
329
+ await db_backed_actions.modify_column_type(
330
+ "test_table", "test_column", ColumnType.TEXT
331
+ )
332
+
333
+ # We should now be able to inject a text value
334
+ await db_connection.conn.execute(
335
+ "INSERT INTO test_table (test_column) VALUES ($1)",
336
+ "test_string_value",
337
+ )
338
+
339
+ # Make sure that we can retrieve the object
340
+ rows = await db_connection.conn.fetch("SELECT * FROM test_table")
341
+ row = rows[0]
342
+ assert row
343
+ assert row["test_column"] == "test_string_value"
344
+
345
+
346
+ @pytest.mark.asyncio
347
+ @pytest.mark.parametrize(
348
+ "from_type,to_type,test_value,expected_value,requires_autocast",
349
+ [
350
+ # String conversions
351
+ (ColumnType.VARCHAR, ColumnType.TEXT, "test", "test", False),
352
+ (ColumnType.TEXT, ColumnType.VARCHAR, "test", "test", False),
353
+ # Numeric conversions - these require autocast
354
+ (ColumnType.VARCHAR, ColumnType.INTEGER, "123", 123, True),
355
+ (ColumnType.TEXT, ColumnType.INTEGER, "456", 456, True),
356
+ (ColumnType.INTEGER, ColumnType.BIGINT, 123, 123, False),
357
+ (ColumnType.INTEGER, ColumnType.SMALLINT, 50, 50, False),
358
+ (ColumnType.SMALLINT, ColumnType.INTEGER, 50, 50, False),
359
+ (ColumnType.INTEGER, ColumnType.REAL, 123, 123.0, False),
360
+ (ColumnType.REAL, ColumnType.DOUBLE_PRECISION, 123.5, 123.5, False),
361
+ # Boolean conversions - require autocast
362
+ (ColumnType.VARCHAR, ColumnType.BOOLEAN, "true", True, True),
363
+ (ColumnType.TEXT, ColumnType.BOOLEAN, "false", False, True),
364
+ (ColumnType.INTEGER, ColumnType.BOOLEAN, 1, True, True),
365
+ # Timestamp conversions - require autocast for string sources
366
+ (ColumnType.VARCHAR, ColumnType.DATE, "2023-01-01", "2023-01-01", True),
367
+ (
368
+ ColumnType.TEXT,
369
+ ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
370
+ "2023-01-01 12:00:00",
371
+ "2023-01-01 12:00:00",
372
+ True,
373
+ ),
374
+ # JSON conversions - require autocast, return as strings
375
+ (
376
+ ColumnType.TEXT,
377
+ ColumnType.JSON,
378
+ '{"key": "value"}',
379
+ '{"key": "value"}',
380
+ True,
381
+ ),
382
+ (
383
+ ColumnType.VARCHAR,
384
+ ColumnType.JSONB,
385
+ '{"key": "value"}',
386
+ '{"key": "value"}',
387
+ True,
388
+ ),
389
+ (
390
+ ColumnType.JSON,
391
+ ColumnType.JSONB,
392
+ '{"key": "value"}',
393
+ '{"key": "value"}',
394
+ False,
395
+ ),
396
+ ],
397
+ )
398
+ async def test_modify_column_type_with_autocast(
399
+ from_type: ColumnType,
400
+ to_type: ColumnType,
401
+ test_value,
402
+ expected_value,
403
+ requires_autocast: bool,
404
+ db_backed_actions: DatabaseActions,
405
+ db_connection: DBConnection,
406
+ ):
407
+ """
408
+ Test column type modifications with autocast for various type conversions.
409
+ """
410
+ table_name = "test_table_autocast"
411
+ column_name = "test_column"
412
+
413
+ # Create table with source type - handle special cases
414
+ if from_type == ColumnType.CHAR:
415
+ # CHAR needs a length specifier
416
+ type_spec = f"{from_type.value}(10)"
417
+ else:
418
+ type_spec = from_type.value
419
+
420
+ await db_connection.conn.execute(
421
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} {type_spec})"
422
+ )
423
+
424
+ # Insert test data
425
+ await db_connection.conn.execute(
426
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
427
+ test_value,
428
+ )
429
+
430
+ # Modify column type with autocast if required
431
+ await db_backed_actions.modify_column_type(
432
+ table_name,
433
+ column_name,
434
+ explicit_data_type=to_type,
435
+ autocast=requires_autocast,
436
+ )
437
+
438
+ # Verify the conversion worked
439
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
440
+ row = rows[0]
441
+ assert row
442
+
443
+ # Handle different expected value types
444
+ actual_value = row[column_name]
445
+ if isinstance(expected_value, str) and to_type in [
446
+ ColumnType.DATE,
447
+ ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
448
+ ]:
449
+ # For date/timestamp, convert to string for comparison
450
+ actual_value = str(actual_value)
451
+
452
+ assert actual_value == expected_value
453
+
454
+
455
+ @pytest.mark.asyncio
456
+ async def test_modify_column_type_autocast_without_data(
457
+ db_backed_actions: DatabaseActions,
458
+ db_connection: DBConnection,
459
+ ):
460
+ """
461
+ Test that autocast works even when there's no data in the column.
462
+ """
463
+ table_name = "test_table_empty"
464
+ column_name = "test_column"
465
+
466
+ # Create table with VARCHAR
467
+ await db_connection.conn.execute(
468
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
469
+ )
470
+
471
+ # Convert to INTEGER with autocast (should work even with no data)
472
+ await db_backed_actions.modify_column_type(
473
+ table_name,
474
+ column_name,
475
+ explicit_data_type=ColumnType.INTEGER,
476
+ autocast=True,
477
+ )
478
+
479
+ # Insert integer data to verify it works
480
+ await db_connection.conn.execute(
481
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
482
+ 42,
483
+ )
484
+
485
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
486
+ row = rows[0]
487
+ assert row
488
+ assert row[column_name] == 42
489
+
490
+
491
+ @pytest.mark.asyncio
492
+ async def test_modify_column_type_incompatible_without_autocast_fails(
493
+ db_backed_actions: DatabaseActions,
494
+ db_connection: DBConnection,
495
+ ):
496
+ """
497
+ Test that incompatible type changes fail without autocast.
498
+ """
499
+ table_name = "test_table_fail"
500
+ column_name = "test_column"
501
+
502
+ # Create table with VARCHAR containing non-numeric data
503
+ await db_connection.conn.execute(
504
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
505
+ )
506
+
507
+ await db_connection.conn.execute(
508
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
509
+ "not_a_number",
510
+ )
511
+
512
+ # Attempt to convert to INTEGER without autocast should fail
513
+ with pytest.raises(Exception): # Should be DatatypeMismatchError
514
+ await db_backed_actions.modify_column_type(
515
+ table_name,
516
+ column_name,
517
+ explicit_data_type=ColumnType.INTEGER,
518
+ autocast=False,
519
+ )
520
+
521
+
522
+ @pytest.mark.asyncio
523
+ async def test_modify_column_type_custom_type_with_autocast(
524
+ db_backed_actions: DatabaseActions,
525
+ db_connection: DBConnection,
526
+ ):
527
+ """
528
+ Test autocast with custom types (enums).
529
+ """
530
+ table_name = "test_table_custom"
531
+ column_name = "test_column"
532
+ enum_name = "test_enum"
533
+
534
+ # Create enum type
535
+ await db_connection.conn.execute(f"CREATE TYPE {enum_name} AS ENUM ('A', 'B', 'C')")
536
+
537
+ # Create table with VARCHAR
538
+ await db_connection.conn.execute(
539
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
540
+ )
541
+
542
+ # Insert enum-compatible string
543
+ await db_connection.conn.execute(
544
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
545
+ "A",
546
+ )
547
+
548
+ # Convert to custom enum type with autocast
549
+ await db_backed_actions.modify_column_type(
550
+ table_name,
551
+ column_name,
552
+ custom_data_type=enum_name,
553
+ autocast=True,
554
+ )
555
+
556
+ # Verify the conversion worked
557
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
558
+ row = rows[0]
559
+ assert row
560
+ assert row[column_name] == "A"
561
+
562
+ # Verify the column type is now the custom enum
563
+ type_info = await db_connection.conn.fetch(
564
+ """
565
+ SELECT data_type, udt_name
566
+ FROM information_schema.columns
567
+ WHERE table_name = $1 AND column_name = $2
568
+ """,
569
+ table_name,
570
+ column_name,
571
+ )
572
+ assert type_info[0]["udt_name"] == enum_name
573
+
574
+
575
+ @pytest.mark.asyncio
576
+ async def test_add_constraint_foreign_key(
577
+ db_backed_actions: DatabaseActions,
578
+ db_connection: DBConnection,
579
+ ):
580
+ # Set up two tables since we need a table target
581
+ await db_connection.conn.execute(
582
+ "CREATE TABLE external_table (id SERIAL PRIMARY KEY, external_column VARCHAR)"
583
+ )
584
+ await db_connection.conn.execute(
585
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column_id INTEGER)"
586
+ )
587
+
588
+ # Insert an existing object into the external table
589
+ await db_connection.conn.execute(
590
+ "INSERT INTO external_table (id, external_column) VALUES ($1, $2)",
591
+ 1,
592
+ "test_value",
593
+ )
594
+
595
+ # Add a foreign_key
596
+ await db_backed_actions.add_constraint(
597
+ "test_table",
598
+ ["test_column_id"],
599
+ ConstraintType.FOREIGN_KEY,
600
+ "test_foreign_key_constraint",
601
+ constraint_args=ForeignKeyConstraint(
602
+ target_table="external_table",
603
+ target_columns=frozenset({"id"}),
604
+ ),
605
+ )
606
+
607
+ # We should now have a foreign key constraint
608
+ # Insert an object that links to our known external object
609
+ await db_connection.conn.execute(
610
+ "INSERT INTO test_table (test_column_id) VALUES ($1)",
611
+ 1,
612
+ )
613
+
614
+ # We should not be able to insert an object that does not link to the external object
615
+ with pytest.raises(asyncpg.exceptions.ForeignKeyViolationError):
616
+ await db_connection.conn.execute(
617
+ "INSERT INTO test_table (test_column_id) VALUES ($1)",
618
+ 2,
619
+ )
620
+
621
+
622
+ @pytest.mark.asyncio
623
+ async def test_add_constraint_unique(
624
+ db_backed_actions: DatabaseActions,
625
+ db_connection: DBConnection,
626
+ ):
627
+ # Add the table that should have a unique column
628
+ await db_connection.conn.execute(
629
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
630
+ )
631
+
632
+ # Add a unique constraint
633
+ await db_backed_actions.add_constraint(
634
+ "test_table",
635
+ ["test_column"],
636
+ ConstraintType.UNIQUE,
637
+ "test_unique_constraint",
638
+ )
639
+
640
+ # We should now have a unique constraint, make sure that we can't
641
+ # insert the same value twice
642
+ await db_connection.conn.execute(
643
+ "INSERT INTO test_table (test_column) VALUES ($1)",
644
+ "test_value",
645
+ )
646
+
647
+ with pytest.raises(asyncpg.exceptions.UniqueViolationError):
648
+ await db_connection.conn.execute(
649
+ "INSERT INTO test_table (test_column) VALUES ($1)",
650
+ "test_value",
651
+ )
652
+
653
+
654
+ @pytest.mark.asyncio
655
+ async def test_add_constraint_primary_key(
656
+ db_backed_actions: DatabaseActions,
657
+ db_connection: DBConnection,
658
+ ):
659
+ # Create an empty table to simulate one just created
660
+ await db_connection.conn.execute("CREATE TABLE test_table ()")
661
+
662
+ # Add a new column
663
+ await db_backed_actions.add_column("test_table", "test_column", ColumnType.INTEGER)
664
+
665
+ # Promote the column to a primary key
666
+ await db_backed_actions.add_constraint(
667
+ "test_table",
668
+ ["test_column"],
669
+ ConstraintType.PRIMARY_KEY,
670
+ "test_primary_key_constraint",
671
+ )
672
+
673
+ # We should now have a primary key constraint, make sure that we can insert
674
+ # a value into the column
675
+ await db_connection.conn.execute(
676
+ "INSERT INTO test_table (test_column) VALUES ($1)",
677
+ 1,
678
+ )
679
+
680
+ # We should not be able to insert a duplicate primary key value
681
+ with pytest.raises(asyncpg.exceptions.UniqueViolationError):
682
+ await db_connection.conn.execute(
683
+ "INSERT INTO test_table (test_column) VALUES ($1)",
684
+ 1,
685
+ )
686
+
687
+
688
+ @pytest.mark.asyncio
689
+ async def test_add_constraint_check(
690
+ db_backed_actions: DatabaseActions,
691
+ db_connection: DBConnection,
692
+ ):
693
+ # Create a table with a integer price column
694
+ await db_connection.conn.execute(
695
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, price INTEGER)"
696
+ )
697
+
698
+ # Now we add a check constraint that this price column should be positive
699
+ await db_backed_actions.add_constraint(
700
+ "test_table",
701
+ [],
702
+ ConstraintType.CHECK,
703
+ "test_check_constraint",
704
+ constraint_args=CheckConstraint(check_condition="price > 0"),
705
+ )
706
+
707
+ # Make sure that we can insert a positive value
708
+ await db_connection.conn.execute(
709
+ "INSERT INTO test_table (price) VALUES ($1)",
710
+ 1,
711
+ )
712
+
713
+ # We expect negative values to fail
714
+ with pytest.raises(asyncpg.exceptions.CheckViolationError):
715
+ await db_connection.conn.execute(
716
+ "INSERT INTO test_table (price) VALUES ($1)",
717
+ -1,
718
+ )
719
+
720
+
721
+ @pytest.mark.asyncio
722
+ async def test_drop_constraint(
723
+ db_backed_actions: DatabaseActions,
724
+ db_connection: DBConnection,
725
+ ):
726
+ # Manually create a table with a unique constraint
727
+ await db_connection.conn.execute(
728
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
729
+ )
730
+ await db_connection.conn.execute(
731
+ "ALTER TABLE test_table ADD CONSTRAINT test_unique_constraint UNIQUE (test_column)"
732
+ )
733
+
734
+ # Drop the unique constraint
735
+ await db_backed_actions.drop_constraint("test_table", "test_unique_constraint")
736
+
737
+ # We should now be able to insert the same value twice
738
+ await db_connection.conn.execute(
739
+ "INSERT INTO test_table (test_column) VALUES ($1)",
740
+ "test_value",
741
+ )
742
+
743
+ await db_connection.conn.execute(
744
+ "INSERT INTO test_table (test_column) VALUES ($1)",
745
+ "test_value",
746
+ )
747
+
748
+
749
+ @pytest.mark.asyncio
750
+ async def test_add_not_null(
751
+ db_backed_actions: DatabaseActions,
752
+ db_connection: DBConnection,
753
+ ):
754
+ # Create a table with a nullable column (default behavior for fields)
755
+ await db_connection.conn.execute(
756
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
757
+ )
758
+
759
+ await db_backed_actions.add_not_null("test_table", "test_column")
760
+
761
+ # We should now have a not null constraint, make sure that we can't insert a null value
762
+ with pytest.raises(asyncpg.exceptions.NotNullViolationError):
763
+ await db_connection.conn.execute(
764
+ "INSERT INTO test_table (test_column) VALUES ($1)",
765
+ None,
766
+ )
767
+
768
+
769
+ @pytest.mark.asyncio
770
+ async def test_drop_not_null(
771
+ db_backed_actions: DatabaseActions,
772
+ db_connection: DBConnection,
773
+ ):
774
+ # Create a table with a not null column
775
+ await db_connection.conn.execute(
776
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR NOT NULL)"
777
+ )
778
+
779
+ await db_backed_actions.drop_not_null("test_table", "test_column")
780
+
781
+ # We should now be able to insert a null value
782
+ await db_connection.conn.execute(
783
+ "INSERT INTO test_table (test_column) VALUES ($1)",
784
+ None,
785
+ )
786
+
787
+
788
+ @pytest.mark.asyncio
789
+ async def test_add_type(
790
+ db_backed_actions: DatabaseActions,
791
+ db_connection: DBConnection,
792
+ ):
793
+ await db_backed_actions.add_type("test_type", ["A", "B"])
794
+
795
+ # Create a new table with a column of the new type
796
+ await db_connection.conn.execute(
797
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
798
+ )
799
+
800
+ # We should be able to insert values that match this type
801
+ await db_connection.conn.execute(
802
+ "INSERT INTO test_table (test_column) VALUES ($1)",
803
+ "A",
804
+ )
805
+
806
+ # Values not in the enum type definition should fail during insertion
807
+ with pytest.raises(asyncpg.exceptions.InvalidTextRepresentationError):
808
+ await db_connection.conn.execute(
809
+ "INSERT INTO test_table (test_column) VALUES ($1)",
810
+ "C",
811
+ )
812
+
813
+
814
+ @pytest.mark.asyncio
815
+ async def test_add_type_values(
816
+ db_backed_actions: DatabaseActions,
817
+ db_connection: DBConnection,
818
+ ):
819
+ # Create an existing enum
820
+ await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A')")
821
+
822
+ # Create a table that uses this enum
823
+ await db_connection.conn.execute(
824
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
825
+ )
826
+
827
+ # Add a new value to this enum
828
+ await db_backed_actions.add_type_values("test_type", ["B"])
829
+
830
+ # We should be able to insert values that match this type
831
+ await db_connection.conn.execute(
832
+ "INSERT INTO test_table (test_column) VALUES ($1)",
833
+ "B",
834
+ )
835
+
836
+
837
+ @pytest.mark.asyncio
838
+ async def test_drop_type_values_no_existing_references(
839
+ db_backed_actions: DatabaseActions,
840
+ db_connection: DBConnection,
841
+ ):
842
+ # Create an existing enum with two values
843
+ await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A', 'B')")
844
+
845
+ # Drop a value from this enum
846
+ await db_backed_actions.drop_type_values("test_type", ["B"], target_columns=[])
847
+
848
+ # Create a table that uses this enum
849
+ await db_connection.conn.execute(
850
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
851
+ )
852
+
853
+ # Fetch the values for the enum that are currently in use
854
+ result = await db_connection.conn.fetch(
855
+ "SELECT array_agg(unnest) AS value FROM unnest(enum_range(NULL::test_type))"
856
+ )
857
+ current_values = result[0]
858
+ assert current_values["value"] == ["A"]
859
+
860
+
861
+ @pytest.mark.asyncio
862
+ async def test_drop_type_values(
863
+ db_backed_actions: DatabaseActions,
864
+ db_connection: DBConnection,
865
+ ):
866
+ # Create an existing enum with two values
867
+ await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A', 'B')")
868
+
869
+ # Create a table that uses this enum
870
+ await db_connection.conn.execute(
871
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
872
+ )
873
+
874
+ # Drop a value from this enum
875
+ await db_backed_actions.drop_type_values(
876
+ "test_type", ["B"], target_columns=[("test_table", "test_column")]
877
+ )
878
+
879
+ # Fetch the values for the enum that are currently in use
880
+ result = await db_connection.conn.fetch(
881
+ "SELECT array_agg(unnest) AS value FROM unnest(enum_range(NULL::test_type))"
882
+ )
883
+ current_values = result[0]
884
+ assert current_values["value"] == ["A"]
885
+
886
+ # We should be able to insert values that match A but not B
887
+ await db_connection.conn.execute(
888
+ "INSERT INTO test_table (test_column) VALUES ($1)",
889
+ "A",
890
+ )
891
+
892
+ with pytest.raises(asyncpg.exceptions.InvalidTextRepresentationError):
893
+ await db_connection.conn.execute(
894
+ "INSERT INTO test_table (test_column) VALUES ($1)",
895
+ "B",
896
+ )
897
+
898
+
899
+ @pytest.mark.asyncio
900
+ async def test_drop_type(
901
+ db_backed_actions: DatabaseActions,
902
+ db_connection: DBConnection,
903
+ ):
904
+ # Create a new type
905
+ await db_connection.conn.execute("CREATE TYPE test_type AS ENUM ('A')")
906
+
907
+ # Drop this type
908
+ await db_backed_actions.drop_type("test_type")
909
+
910
+ # We shouldn't be able to create a table with this type
911
+ with pytest.raises(asyncpg.exceptions.UndefinedObjectError):
912
+ await db_connection.conn.execute(
913
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column test_type)"
914
+ )
915
+
916
+
917
+ @pytest.mark.asyncio
918
+ async def test_add_index_single_column(
919
+ db_backed_actions: DatabaseActions,
920
+ db_connection: DBConnection,
921
+ ):
922
+ # Create a table with a column to index
923
+ await db_connection.conn.execute(
924
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
925
+ )
926
+
927
+ # Add an index on a single column
928
+ await db_backed_actions.add_index(
929
+ "test_table",
930
+ ["test_column"],
931
+ "test_single_column_index",
932
+ )
933
+
934
+ # Verify the index exists by querying the system catalog
935
+ result = await db_connection.conn.fetch(
936
+ """
937
+ SELECT indexname, indexdef
938
+ FROM pg_indexes
939
+ WHERE tablename = 'test_table'
940
+ AND indexname = 'test_single_column_index'
941
+ """
942
+ )
943
+ assert len(result) == 1
944
+ assert (
945
+ "CREATE INDEX test_single_column_index ON public.test_table USING btree (test_column)"
946
+ in result[0]["indexdef"]
947
+ )
948
+
949
+
950
+ @pytest.mark.asyncio
951
+ async def test_add_index_multiple_columns(
952
+ db_backed_actions: DatabaseActions,
953
+ db_connection: DBConnection,
954
+ ):
955
+ # Create a table with multiple columns to index
956
+ await db_connection.conn.execute(
957
+ """
958
+ CREATE TABLE test_table (
959
+ id SERIAL PRIMARY KEY,
960
+ first_column VARCHAR,
961
+ second_column INTEGER
962
+ )
963
+ """
964
+ )
965
+
966
+ # Add an index on multiple columns
967
+ await db_backed_actions.add_index(
968
+ "test_table",
969
+ ["first_column", "second_column"],
970
+ "test_multi_column_index",
971
+ )
972
+
973
+ # Verify the index exists and includes both columns
974
+ result = await db_connection.conn.fetch(
975
+ """
976
+ SELECT indexname, indexdef
977
+ FROM pg_indexes
978
+ WHERE tablename = 'test_table'
979
+ AND indexname = 'test_multi_column_index'
980
+ """
981
+ )
982
+ assert len(result) == 1
983
+ assert (
984
+ "CREATE INDEX test_multi_column_index ON public.test_table USING btree (first_column, second_column)"
985
+ in result[0]["indexdef"]
986
+ )
987
+
988
+
989
+ @pytest.mark.asyncio
990
+ async def test_drop_index(
991
+ db_backed_actions: DatabaseActions,
992
+ db_connection: DBConnection,
993
+ ):
994
+ # Create a table and add an index
995
+ await db_connection.conn.execute(
996
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column VARCHAR)"
997
+ )
998
+ await db_connection.conn.execute(
999
+ "CREATE INDEX test_index ON test_table (test_column)"
1000
+ )
1001
+
1002
+ # Verify the index exists before dropping
1003
+ result = await db_connection.conn.fetch(
1004
+ """
1005
+ SELECT indexname
1006
+ FROM pg_indexes
1007
+ WHERE tablename = 'test_table'
1008
+ AND indexname = 'test_index'
1009
+ """
1010
+ )
1011
+ assert len(result) == 1
1012
+
1013
+ # Drop the index
1014
+ await db_backed_actions.drop_index("test_table", "test_index")
1015
+
1016
+ # Verify the index no longer exists
1017
+ result = await db_connection.conn.fetch(
1018
+ """
1019
+ SELECT indexname
1020
+ FROM pg_indexes
1021
+ WHERE tablename = 'test_table'
1022
+ AND indexname = 'test_index'
1023
+ """
1024
+ )
1025
+ assert len(result) == 0
1026
+
1027
+
1028
+ @pytest.mark.asyncio
1029
+ async def test_add_constraint_foreign_key_actions(
1030
+ db_backed_actions: DatabaseActions,
1031
+ db_connection: DBConnection,
1032
+ ):
1033
+ # Set up two tables since we need a table target
1034
+ await db_connection.conn.execute(
1035
+ "CREATE TABLE external_table (id SERIAL PRIMARY KEY, external_column VARCHAR)"
1036
+ )
1037
+ await db_connection.conn.execute(
1038
+ "CREATE TABLE test_table (id SERIAL PRIMARY KEY, test_column_id INTEGER)"
1039
+ )
1040
+
1041
+ # Insert an existing object into the external table
1042
+ await db_connection.conn.execute(
1043
+ "INSERT INTO external_table (id, external_column) VALUES ($1, $2)",
1044
+ 1,
1045
+ "test_value",
1046
+ )
1047
+
1048
+ # Add a foreign_key with CASCADE actions
1049
+ await db_backed_actions.add_constraint(
1050
+ "test_table",
1051
+ ["test_column_id"],
1052
+ ConstraintType.FOREIGN_KEY,
1053
+ "test_foreign_key_constraint",
1054
+ constraint_args=ForeignKeyConstraint(
1055
+ target_table="external_table",
1056
+ target_columns=frozenset({"id"}),
1057
+ on_delete="CASCADE",
1058
+ on_update="CASCADE",
1059
+ ),
1060
+ )
1061
+
1062
+ # Insert a row that references the external table
1063
+ await db_connection.conn.execute(
1064
+ "INSERT INTO test_table (test_column_id) VALUES ($1)",
1065
+ 1,
1066
+ )
1067
+
1068
+ # Update the external table id - should cascade to test_table
1069
+ await db_connection.conn.execute(
1070
+ "UPDATE external_table SET id = $1 WHERE id = $2",
1071
+ 2,
1072
+ 1,
1073
+ )
1074
+
1075
+ # Verify the update cascaded
1076
+ result = await db_connection.conn.fetch(
1077
+ "SELECT test_column_id FROM test_table WHERE id = 1"
1078
+ )
1079
+ assert result[0]["test_column_id"] == 2
1080
+
1081
+ # Delete from external table - should cascade to test_table
1082
+ await db_connection.conn.execute(
1083
+ "DELETE FROM external_table WHERE id = $1",
1084
+ 2,
1085
+ )
1086
+
1087
+ # Verify the delete cascaded
1088
+ result = await db_connection.conn.fetch("SELECT COUNT(*) FROM test_table")
1089
+ assert result[0]["count"] == 0
1090
+
1091
+
1092
+ @pytest.mark.asyncio
1093
+ async def test_modify_column_type_uuid_conversion(
1094
+ db_backed_actions: DatabaseActions,
1095
+ db_connection: DBConnection,
1096
+ ):
1097
+ """
1098
+ Test UUID type conversions specifically.
1099
+ """
1100
+ table_name = "test_table_uuid"
1101
+ column_name = "test_column"
1102
+ uuid_string = "550e8400-e29b-41d4-a716-446655440000"
1103
+
1104
+ # Create table with VARCHAR
1105
+ await db_connection.conn.execute(
1106
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} VARCHAR)"
1107
+ )
1108
+
1109
+ # Insert UUID string
1110
+ await db_connection.conn.execute(
1111
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
1112
+ uuid_string,
1113
+ )
1114
+
1115
+ # Convert to UUID with autocast
1116
+ await db_backed_actions.modify_column_type(
1117
+ table_name,
1118
+ column_name,
1119
+ explicit_data_type=ColumnType.UUID,
1120
+ autocast=True,
1121
+ )
1122
+
1123
+ # Verify the conversion worked
1124
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
1125
+ row = rows[0]
1126
+ assert row
1127
+
1128
+ # UUID columns return UUID objects
1129
+ actual_value = row[column_name]
1130
+ assert str(actual_value) == uuid_string
1131
+
1132
+
1133
+ @pytest.mark.asyncio
1134
+ async def test_modify_column_type_date_to_timestamp(
1135
+ db_backed_actions: DatabaseActions,
1136
+ db_connection: DBConnection,
1137
+ ):
1138
+ """
1139
+ Test date to timestamp conversions.
1140
+ """
1141
+ from datetime import date, datetime
1142
+
1143
+ table_name = "test_table_date"
1144
+ column_name = "test_column"
1145
+ test_date = date(2023, 1, 1)
1146
+
1147
+ # Create table with DATE
1148
+ await db_connection.conn.execute(
1149
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} DATE)"
1150
+ )
1151
+
1152
+ # Insert date value
1153
+ await db_connection.conn.execute(
1154
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
1155
+ test_date,
1156
+ )
1157
+
1158
+ # Convert to TIMESTAMP (no autocast needed for compatible types)
1159
+ await db_backed_actions.modify_column_type(
1160
+ table_name,
1161
+ column_name,
1162
+ explicit_data_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
1163
+ autocast=False,
1164
+ )
1165
+
1166
+ # Verify the conversion worked
1167
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
1168
+ row = rows[0]
1169
+ assert row
1170
+
1171
+ # Should be a datetime object now
1172
+ actual_value = row[column_name]
1173
+ assert isinstance(actual_value, datetime)
1174
+ assert actual_value.date() == test_date
1175
+
1176
+
1177
+ @pytest.mark.asyncio
1178
+ async def test_modify_column_type_char_to_varchar(
1179
+ db_backed_actions: DatabaseActions,
1180
+ db_connection: DBConnection,
1181
+ ):
1182
+ """
1183
+ Test CHAR to VARCHAR conversion with proper length handling.
1184
+ """
1185
+ table_name = "test_table_char"
1186
+ column_name = "test_column"
1187
+ test_value = "test"
1188
+
1189
+ # Create table with CHAR(10)
1190
+ await db_connection.conn.execute(
1191
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} CHAR(10))"
1192
+ )
1193
+
1194
+ # Insert test value
1195
+ await db_connection.conn.execute(
1196
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
1197
+ test_value,
1198
+ )
1199
+
1200
+ # Convert to VARCHAR
1201
+ await db_backed_actions.modify_column_type(
1202
+ table_name,
1203
+ column_name,
1204
+ explicit_data_type=ColumnType.VARCHAR,
1205
+ autocast=False,
1206
+ )
1207
+
1208
+ # Verify the conversion worked
1209
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
1210
+ row = rows[0]
1211
+ assert row
1212
+
1213
+ # CHAR pads with spaces, VARCHAR should trim them
1214
+ actual_value = row[column_name]
1215
+ assert actual_value.strip() == test_value
1216
+
1217
+
1218
+ @pytest.mark.asyncio
1219
+ async def test_modify_column_type_scalar_to_array(
1220
+ db_backed_actions: DatabaseActions,
1221
+ db_connection: DBConnection,
1222
+ ):
1223
+ """
1224
+ Test converting a scalar column to an array column with autocast.
1225
+ """
1226
+ table_name = "test_table_array_conversion"
1227
+ column_name = "test_column"
1228
+
1229
+ # Create table with INTEGER
1230
+ await db_connection.conn.execute(
1231
+ f"CREATE TABLE {table_name} (id SERIAL PRIMARY KEY, {column_name} INTEGER)"
1232
+ )
1233
+
1234
+ # Insert a scalar value
1235
+ await db_connection.conn.execute(
1236
+ f"INSERT INTO {table_name} ({column_name}) VALUES ($1)",
1237
+ 42,
1238
+ )
1239
+
1240
+ # Convert to INTEGER[] using autocast
1241
+ await db_backed_actions.modify_column_type(
1242
+ table_name,
1243
+ column_name,
1244
+ explicit_data_type=ColumnType.INTEGER,
1245
+ explicit_data_is_list=True,
1246
+ autocast=True,
1247
+ )
1248
+
1249
+ # Verify the scalar value was converted to a single-element array
1250
+ rows = await db_connection.conn.fetch(f"SELECT * FROM {table_name}")
1251
+ row = rows[0]
1252
+ assert row
1253
+ assert row[column_name] == [42]
1254
+
1255
+ # Verify the column type is now an array by checking the PostgreSQL catalog
1256
+ type_info = await db_connection.conn.fetch(
1257
+ """
1258
+ SELECT data_type, udt_name
1259
+ FROM information_schema.columns
1260
+ WHERE table_name = $1 AND column_name = $2
1261
+ """,
1262
+ table_name,
1263
+ column_name,
1264
+ )
1265
+ assert type_info[0]["data_type"] == "ARRAY"