iceaxe 0.7.0.dev1__tar.gz → 0.7.0.dev3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of iceaxe might be problematic. Click here for more details.

Files changed (81) hide show
  1. {iceaxe-0.7.0.dev1/iceaxe.egg-info → iceaxe-0.7.0.dev3}/PKG-INFO +2 -3
  2. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/README.md +1 -2
  3. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/schemas/test_actions.py +5 -5
  4. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/schemas/test_db_memory_serializer.py +63 -2
  5. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/schemas/test_db_serializer.py +46 -0
  6. iceaxe-0.7.0.dev3/iceaxe/__tests__/schemas/test_db_stubs.py +190 -0
  7. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_comparison.py +27 -0
  8. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/base.py +1 -1
  9. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/actions.py +4 -4
  10. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/db_memory_serializer.py +89 -7
  11. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/db_stubs.py +113 -0
  12. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/session.py +1 -1
  13. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/sql_types.py +33 -4
  14. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3/iceaxe.egg-info}/PKG-INFO +2 -3
  15. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/pyproject.toml +1 -1
  16. iceaxe-0.7.0.dev1/iceaxe/__tests__/schemas/test_db_stubs.py +0 -26
  17. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/LICENSE +0 -0
  18. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/MANIFEST.in +0 -0
  19. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__init__.py +0 -0
  20. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/__init__.py +0 -0
  21. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/benchmarks/__init__.py +0 -0
  22. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/benchmarks/test_bulk_insert.py +0 -0
  23. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/benchmarks/test_select.py +0 -0
  24. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/conf_models.py +0 -0
  25. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/conftest.py +0 -0
  26. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/helpers.py +0 -0
  27. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/migrations/__init__.py +0 -0
  28. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/migrations/conftest.py +0 -0
  29. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/migrations/test_action_sorter.py +0 -0
  30. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/migrations/test_generator.py +0 -0
  31. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/migrations/test_generics.py +0 -0
  32. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/mountaineer/__init__.py +0 -0
  33. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
  34. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/mountaineer/dependencies/test_core.py +0 -0
  35. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/schemas/__init__.py +0 -0
  36. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/schemas/test_cli.py +0 -0
  37. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_alias.py +0 -0
  38. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_base.py +0 -0
  39. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_field.py +0 -0
  40. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_helpers.py +0 -0
  41. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_modifications.py +0 -0
  42. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_queries.py +0 -0
  43. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_queries_str.py +0 -0
  44. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_session.py +0 -0
  45. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/__tests__/test_text_search.py +0 -0
  46. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/alias_values.py +0 -0
  47. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/comparison.py +0 -0
  48. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/field.py +0 -0
  49. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/functions.py +0 -0
  50. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/generics.py +0 -0
  51. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/io.py +0 -0
  52. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/logging.py +0 -0
  53. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/__init__.py +0 -0
  54. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/action_sorter.py +0 -0
  55. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/cli.py +0 -0
  56. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/client_io.py +0 -0
  57. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/generator.py +0 -0
  58. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/migration.py +0 -0
  59. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/migrations/migrator.py +0 -0
  60. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/modifications.py +0 -0
  61. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/mountaineer/__init__.py +0 -0
  62. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/mountaineer/cli.py +0 -0
  63. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/mountaineer/config.py +0 -0
  64. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/mountaineer/dependencies/__init__.py +0 -0
  65. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/mountaineer/dependencies/core.py +0 -0
  66. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/postgres.py +0 -0
  67. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/py.typed +0 -0
  68. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/queries.py +0 -0
  69. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/queries_str.py +0 -0
  70. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/__init__.py +0 -0
  71. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/cli.py +0 -0
  72. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/schemas/db_serializer.py +0 -0
  73. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/session_optimized.c +0 -0
  74. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/session_optimized.pyx +0 -0
  75. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe/typing.py +0 -0
  76. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe.egg-info/SOURCES.txt +0 -0
  77. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe.egg-info/dependency_links.txt +0 -0
  78. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe.egg-info/requires.txt +0 -0
  79. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/iceaxe.egg-info/top_level.txt +0 -0
  80. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/setup.cfg +0 -0
  81. {iceaxe-0.7.0.dev1 → iceaxe-0.7.0.dev3}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iceaxe
3
- Version: 0.7.0.dev1
3
+ Version: 0.7.0.dev3
4
4
  Summary: A modern, fast ORM for Python.
5
5
  Author-email: Pierce Freeman <pierce@freeman.vc>
6
6
  Requires-Python: >=3.11
@@ -15,8 +15,7 @@ Dynamic: license-file
15
15
 
16
16
  ![Iceaxe Logo](https://raw.githubusercontent.com/piercefreeman/iceaxe/main/media/header.png)
17
17
 
18
- ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/iceaxe)
19
- [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
18
+ ![Python Version](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2Fpiercefreeman%2Ficeaxe%2Frefs%2Fheads%2Fmain%2Fpyproject.toml) [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
20
19
 
21
20
  A modern, fast ORM for Python. We have the following goals:
22
21
 
@@ -2,8 +2,7 @@
2
2
 
3
3
  ![Iceaxe Logo](https://raw.githubusercontent.com/piercefreeman/iceaxe/main/media/header.png)
4
4
 
5
- ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/iceaxe)
6
- [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
5
+ ![Python Version](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2Fpiercefreeman%2Ficeaxe%2Frefs%2Fheads%2Fmain%2Fpyproject.toml) [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
7
6
 
8
7
  A modern, fast ORM for Python. We have the following goals:
9
8
 
@@ -263,8 +263,8 @@ async def test_add_column_any_type(
263
263
  (ColumnType.SERIAL, ColumnType.INTEGER),
264
264
  (ColumnType.BIGSERIAL, ColumnType.BIGINT),
265
265
  (ColumnType.CHAR, "character"),
266
- (ColumnType.TIME, "time without time zone"),
267
- (ColumnType.TIMESTAMP, "timestamp without time zone"),
266
+ (ColumnType.TIME_WITHOUT_TIME_ZONE, "time without time zone"),
267
+ (ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE, "timestamp without time zone"),
268
268
  )
269
269
 
270
270
  allowed_values = {enum_value.value}
@@ -365,7 +365,7 @@ async def test_modify_column_type(
365
365
  (ColumnType.VARCHAR, ColumnType.DATE, "2023-01-01", "2023-01-01", True),
366
366
  (
367
367
  ColumnType.TEXT,
368
- ColumnType.TIMESTAMP,
368
+ ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
369
369
  "2023-01-01 12:00:00",
370
370
  "2023-01-01 12:00:00",
371
371
  True,
@@ -443,7 +443,7 @@ async def test_modify_column_type_with_autocast(
443
443
  actual_value = row[column_name]
444
444
  if isinstance(expected_value, str) and to_type in [
445
445
  ColumnType.DATE,
446
- ColumnType.TIMESTAMP,
446
+ ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
447
447
  ]:
448
448
  # For date/timestamp, convert to string for comparison
449
449
  actual_value = str(actual_value)
@@ -1158,7 +1158,7 @@ async def test_modify_column_type_date_to_timestamp(
1158
1158
  await db_backed_actions.modify_column_type(
1159
1159
  table_name,
1160
1160
  column_name,
1161
- explicit_data_type=ColumnType.TIMESTAMP,
1161
+ explicit_data_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
1162
1162
  autocast=False,
1163
1163
  )
1164
1164
 
@@ -1,3 +1,4 @@
1
+ import warnings
1
2
  from datetime import date, datetime, time, timedelta
2
3
  from enum import Enum, IntEnum, StrEnum
3
4
  from typing import Generic, Sequence, TypeVar
@@ -20,6 +21,7 @@ from iceaxe.schemas.actions import (
20
21
  DryRunComment,
21
22
  )
22
23
  from iceaxe.schemas.db_memory_serializer import (
24
+ CompositePrimaryKeyConstraintError,
23
25
  DatabaseHandler,
24
26
  DatabaseMemorySerializer,
25
27
  )
@@ -748,7 +750,7 @@ def test_enum_column_assignment(clear_all_database_objects):
748
750
  DBColumn(
749
751
  table_name="exampledbmodel",
750
752
  column_name="standard_datetime",
751
- column_type=ColumnType.TIMESTAMP,
753
+ column_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
752
754
  column_is_list=False,
753
755
  nullable=False,
754
756
  ),
@@ -808,7 +810,7 @@ def test_enum_column_assignment(clear_all_database_objects):
808
810
  DBColumn(
809
811
  table_name="exampledbmodel",
810
812
  column_name="standard_time",
811
- column_type=ColumnType.TIME,
813
+ column_type=ColumnType.TIME_WITHOUT_TIME_ZONE,
812
814
  column_is_list=False,
813
815
  nullable=False,
814
816
  ),
@@ -1462,3 +1464,62 @@ def test_foreign_key_actions():
1462
1464
  assert fk_constraint.foreign_key_constraint.target_columns == frozenset({"id"})
1463
1465
  assert fk_constraint.foreign_key_constraint.on_delete == "CASCADE"
1464
1466
  assert fk_constraint.foreign_key_constraint.on_update == "CASCADE"
1467
+
1468
+
1469
+ def test_multiple_primary_keys_foreign_key_error():
1470
+ """
1471
+ Test that when a model has multiple primary keys and foreign key constraints,
1472
+ we get a helpful error message explaining the issue.
1473
+ """
1474
+
1475
+ class User(TableBase):
1476
+ id: int = Field(primary_key=True)
1477
+ tenant_id: int = Field(primary_key=True) # Composite primary key
1478
+ name: str
1479
+
1480
+ class Topic(TableBase):
1481
+ id: str = Field(primary_key=True)
1482
+ tenant_id: int = Field(primary_key=True) # Composite primary key
1483
+ title: str
1484
+
1485
+ class Rec(TableBase):
1486
+ id: int = Field(primary_key=True, default=None)
1487
+ creator_id: int = Field(
1488
+ foreign_key="user.id"
1489
+ ) # This will fail because user is leveraging our synthetic primary key
1490
+ topic_id: str = Field(
1491
+ foreign_key="topic.id"
1492
+ ) # This will fail because topic is leveraging our synthetic primary key
1493
+
1494
+ migrator = DatabaseMemorySerializer()
1495
+
1496
+ with pytest.raises(CompositePrimaryKeyConstraintError) as exc_info:
1497
+ db_objects = list(migrator.delegate([User, Topic, Rec]))
1498
+ migrator.order_db_objects(db_objects)
1499
+
1500
+ # Check that the exception has the expected attributes
1501
+ assert exc_info.value.missing_constraints == [("user", "id")]
1502
+
1503
+
1504
+ def test_multiple_primary_keys_warning():
1505
+ """
1506
+ Test that when a model has multiple primary keys, we get a warning.
1507
+ """
1508
+
1509
+ class ExampleModel(TableBase):
1510
+ value_a: int = Field(primary_key=True)
1511
+ value_b: int = Field(primary_key=True)
1512
+
1513
+ migrator = DatabaseMemorySerializer()
1514
+
1515
+ with warnings.catch_warnings(record=True) as w:
1516
+ warnings.simplefilter("always")
1517
+ list(migrator.delegate([ExampleModel]))
1518
+
1519
+ # Check that a warning was issued
1520
+ assert len(w) == 1
1521
+ assert issubclass(w[0].category, UserWarning)
1522
+ warning_message = str(w[0].message)
1523
+ assert "multiple fields marked as primary_key=True" in warning_message
1524
+ assert "composite primary key constraint" in warning_message
1525
+ assert "Consider using only one primary key field" in warning_message
@@ -143,6 +143,52 @@ class ValueEnumInt(IntEnum):
143
143
  )
144
144
  ],
145
145
  ),
146
+ # Test PostgreSQL's storage format for timestamp without timezone
147
+ (
148
+ """
149
+ CREATE TABLE exampledbmodel (
150
+ id SERIAL PRIMARY KEY,
151
+ created_at TIMESTAMP NOT NULL
152
+ );
153
+ """,
154
+ [
155
+ (
156
+ DBColumn(
157
+ table_name="exampledbmodel",
158
+ column_name="created_at",
159
+ column_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
160
+ column_is_list=False,
161
+ nullable=False,
162
+ ),
163
+ [
164
+ DBTable(table_name="exampledbmodel"),
165
+ ],
166
+ )
167
+ ],
168
+ ),
169
+ # Test PostgreSQL's storage format for timestamp with timezone
170
+ (
171
+ """
172
+ CREATE TABLE exampledbmodel (
173
+ id SERIAL PRIMARY KEY,
174
+ created_at TIMESTAMPTZ NOT NULL
175
+ );
176
+ """,
177
+ [
178
+ (
179
+ DBColumn(
180
+ table_name="exampledbmodel",
181
+ column_name="created_at",
182
+ column_type=ColumnType.TIMESTAMP_WITH_TIME_ZONE,
183
+ column_is_list=False,
184
+ nullable=False,
185
+ ),
186
+ [
187
+ DBTable(table_name="exampledbmodel"),
188
+ ],
189
+ )
190
+ ],
191
+ ),
146
192
  ],
147
193
  )
148
194
  async def test_simple_db_serializer(
@@ -0,0 +1,190 @@
1
+ import pytest
2
+
3
+ from iceaxe.schemas.db_stubs import ConstraintPointerInfo, DBObjectPointer, DBType
4
+
5
+
6
+ class MockDBObjectPointer(DBObjectPointer):
7
+ """Mock implementation of DBObjectPointer for testing parser methods."""
8
+
9
+ representation_str: str
10
+
11
+ def representation(self) -> str:
12
+ return self.representation_str
13
+
14
+
15
+ @pytest.mark.parametrize(
16
+ "representation_str,expected_result",
17
+ [
18
+ # Valid constraint pointer formats
19
+ (
20
+ "users.['id'].PRIMARY KEY",
21
+ ConstraintPointerInfo("users", ["id"], "PRIMARY KEY"),
22
+ ),
23
+ (
24
+ "orders.['user_id', 'product_id'].UNIQUE",
25
+ ConstraintPointerInfo("orders", ["user_id", "product_id"], "UNIQUE"),
26
+ ),
27
+ (
28
+ "products.['name'].INDEX",
29
+ ConstraintPointerInfo("products", ["name"], "INDEX"),
30
+ ),
31
+ (
32
+ "table_name.['col1', 'col2', 'col3'].FOREIGN KEY",
33
+ ConstraintPointerInfo(
34
+ "table_name", ["col1", "col2", "col3"], "FOREIGN KEY"
35
+ ),
36
+ ),
37
+ # Single quotes
38
+ ("users.['email'].UNIQUE", ConstraintPointerInfo("users", ["email"], "UNIQUE")),
39
+ # Double quotes
40
+ ('users.["email"].UNIQUE', ConstraintPointerInfo("users", ["email"], "UNIQUE")),
41
+ # Mixed quotes
42
+ (
43
+ "users.[\"col1\", 'col2'].UNIQUE",
44
+ ConstraintPointerInfo("users", ["col1", "col2"], "UNIQUE"),
45
+ ),
46
+ # Extra whitespace
47
+ (
48
+ "users.[ 'col1' , 'col2' ].UNIQUE",
49
+ ConstraintPointerInfo("users", ["col1", "col2"], "UNIQUE"),
50
+ ),
51
+ # Empty column list
52
+ ("users.[].CHECK", ConstraintPointerInfo("users", [], "CHECK")),
53
+ # Schema-qualified table names (dots in table names are valid when representing schema.table)
54
+ (
55
+ "public.users.['column'].PRIMARY KEY",
56
+ ConstraintPointerInfo("public.users", ["column"], "PRIMARY KEY"),
57
+ ),
58
+ # Complex constraint types
59
+ (
60
+ "users.['id'].PRIMARY KEY AUTOINCREMENT",
61
+ ConstraintPointerInfo("users", ["id"], "PRIMARY KEY AUTOINCREMENT"),
62
+ ),
63
+ # Table names with underscores and numbers (valid PostgreSQL identifiers)
64
+ (
65
+ "user_table_2.['id'].PRIMARY KEY",
66
+ ConstraintPointerInfo("user_table_2", ["id"], "PRIMARY KEY"),
67
+ ),
68
+ # Column names with underscores and numbers
69
+ (
70
+ "users.['user_id_2', 'created_at'].UNIQUE",
71
+ ConstraintPointerInfo("users", ["user_id_2", "created_at"], "UNIQUE"),
72
+ ),
73
+ # Invalid formats that should return None
74
+ ("users.column.UNIQUE", None), # Missing brackets
75
+ ("users.['column']", None), # Missing constraint type
76
+ ("['column'].UNIQUE", None), # Missing table name
77
+ ("users", None), # Just table name
78
+ ("", None), # Empty string
79
+ ("users.column", None), # Simple table.column format
80
+ ("invalid_format", None), # Random string
81
+ # Malformed bracket syntax
82
+ ("users.[column].UNIQUE", None), # Missing quotes in brackets
83
+ ("users.['column.UNIQUE", None), # Unclosed bracket
84
+ ("users.column'].UNIQUE", None), # Missing opening bracket
85
+ ],
86
+ )
87
+ def test_parse_constraint_pointer(
88
+ representation_str: str, expected_result: ConstraintPointerInfo | None
89
+ ):
90
+ """Test parsing of constraint pointer representations."""
91
+ pointer = MockDBObjectPointer(representation_str=representation_str)
92
+ result = pointer.parse_constraint_pointer()
93
+
94
+ if expected_result is None:
95
+ assert result is None
96
+ else:
97
+ assert result is not None
98
+ assert result.table_name == expected_result.table_name
99
+ assert result.column_names == expected_result.column_names
100
+ assert result.constraint_type == expected_result.constraint_type
101
+
102
+
103
+ @pytest.mark.parametrize(
104
+ "representation_str,expected_table_name",
105
+ [
106
+ # Constraint pointer formats
107
+ ("users.['id'].PRIMARY KEY", "users"),
108
+ ("orders.['user_id', 'product_id'].UNIQUE", "orders"),
109
+ ("public.users.['column'].INDEX", "public.users"),
110
+ # Simple table.column formats
111
+ ("users.email", "users"),
112
+ ("products.name", "products"),
113
+ ("public.users.column", "public.users"), # Schema.table.column format
114
+ # Edge cases
115
+ ("table_only", "table_only"),
116
+ ("", None), # Empty string should return None
117
+ ("users.['id'].PRIMARY KEY", "users"), # Constraint format takes precedence
118
+ # Complex table names with underscores and numbers
119
+ ("user_table_123.column", "user_table_123"),
120
+ ("schema_1.table_2.column", "schema_1.table_2"),
121
+ # Multiple dots in representation (should extract the table part correctly)
122
+ ("very.long.schema.table.['col'].UNIQUE", "very.long.schema.table"),
123
+ ],
124
+ )
125
+ def test_get_table_name(representation_str: str, expected_table_name: str | None):
126
+ """Test extraction of table names from pointer representations."""
127
+ pointer = MockDBObjectPointer(representation_str=representation_str)
128
+ result = pointer.get_table_name()
129
+ assert result == expected_table_name
130
+
131
+
132
+ @pytest.mark.parametrize(
133
+ "representation_str,expected_column_names",
134
+ [
135
+ # Constraint pointer formats
136
+ ("users.['id'].PRIMARY KEY", ["id"]),
137
+ ("orders.['user_id', 'product_id'].UNIQUE", ["user_id", "product_id"]),
138
+ ("products.['name', 'category', 'price'].INDEX", ["name", "category", "price"]),
139
+ ("users.[].CHECK", []), # Empty column list
140
+ # Simple table.column formats
141
+ ("users.email", ["email"]),
142
+ ("products.name", ["name"]),
143
+ ("public.users.column", ["column"]), # Schema.table.column format
144
+ # Edge cases
145
+ ("table_only", []), # No columns
146
+ ("", []), # Empty string
147
+ # Whitespace handling
148
+ ("users.[ 'col1' , 'col2' ].UNIQUE", ["col1", "col2"]),
149
+ # Quote handling
150
+ ("users.[\"col1\", 'col2'].UNIQUE", ["col1", "col2"]),
151
+ # Column names with underscores and numbers
152
+ (
153
+ "users.['user_id_2', 'created_at_timestamp'].UNIQUE",
154
+ ["user_id_2", "created_at_timestamp"],
155
+ ),
156
+ # Complex schema.table.column cases
157
+ ("schema.table.column_name", ["column_name"]),
158
+ ("very.long.schema.table.column", ["column"]),
159
+ ],
160
+ )
161
+ def test_get_column_names(representation_str: str, expected_column_names: list[str]):
162
+ """Test extraction of column names from pointer representations."""
163
+ pointer = MockDBObjectPointer(representation_str=representation_str)
164
+ result = pointer.get_column_names()
165
+ assert result == expected_column_names
166
+
167
+
168
+ def test_merge_type_columns():
169
+ """
170
+ Allow separately yielded type definitions to collect their reference columns. If an
171
+ enum is referenced in one place, this should build up to the full definition.
172
+
173
+ """
174
+ type_a = DBType(
175
+ name="type_a",
176
+ values=frozenset({"A"}),
177
+ reference_columns=frozenset({("table_a", "column_a")}),
178
+ )
179
+ type_b = DBType(
180
+ name="type_a",
181
+ values=frozenset({"A"}),
182
+ reference_columns=frozenset({("table_b", "column_b")}),
183
+ )
184
+
185
+ merged = type_a.merge(type_b)
186
+ assert merged.name == "type_a"
187
+ assert merged.values == frozenset({"A"})
188
+ assert merged.reference_columns == frozenset(
189
+ {("table_a", "column_a"), ("table_b", "column_b")}
190
+ )
@@ -10,6 +10,7 @@ from iceaxe.base import TableBase
10
10
  from iceaxe.comparison import ComparisonType, FieldComparison
11
11
  from iceaxe.field import DBFieldClassDefinition, DBFieldInfo
12
12
  from iceaxe.queries_str import QueryLiteral
13
+ from iceaxe.sql_types import ColumnType
13
14
  from iceaxe.typing import column
14
15
 
15
16
 
@@ -354,3 +355,29 @@ def test_force_join_constraints(
354
355
  )
355
356
  forced = comparison.force_join_constraints()
356
357
  assert forced.comparison == expected_comparison
358
+
359
+
360
+ @pytest.mark.parametrize(
361
+ "sql_type_string, expected_column_type",
362
+ [
363
+ ("timestamp", ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE), # Tests aliasing
364
+ ("timestamp without time zone", ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE),
365
+ ("timestamp with time zone", ColumnType.TIMESTAMP_WITH_TIME_ZONE),
366
+ ("time", ColumnType.TIME_WITHOUT_TIME_ZONE), # Tests aliasing
367
+ ("time without time zone", ColumnType.TIME_WITHOUT_TIME_ZONE),
368
+ ("time with time zone", ColumnType.TIME_WITH_TIME_ZONE),
369
+ ],
370
+ )
371
+ def test_postgres_datetime_timezone_casting(
372
+ sql_type_string: str, expected_column_type: ColumnType
373
+ ):
374
+ """
375
+ Test that PostgresDateTime fields with different timezone configurations
376
+ are properly handled by the ColumnType enum, specifically testing that
377
+ PostgreSQL's storage format ('timestamp without time zone') can be parsed.
378
+ This also tests that SQL standard aliases like "timestamp" correctly map
379
+ to "timestamp without time zone".
380
+ """
381
+
382
+ # Test that ColumnType enum can handle PostgreSQL's storage formats and aliases
383
+ assert ColumnType(sql_type_string) == expected_column_type
@@ -273,7 +273,7 @@ class TableBase(BaseModel, metaclass=DBModelMetaclass):
273
273
  :param name: Attribute name
274
274
  :param value: New value
275
275
  """
276
- if name in self.model_fields:
276
+ if name in self.__class__.model_fields:
277
277
  self.modified_attrs[name] = value
278
278
  for callback in self.modified_attrs_callbacks:
279
279
  callback(self)
@@ -299,7 +299,7 @@ class DatabaseActions:
299
299
  - Scalar to array types (INTEGER → INTEGER[])
300
300
  - Custom enum conversions (VARCHAR/TEXT → custom enum)
301
301
  - Compatible numeric conversions (INTEGER → BIGINT)
302
-
302
+
303
303
  When autocast=False, PostgreSQL will only allow the type change if it's
304
304
  compatible without explicit casting, which may fail for many conversions.
305
305
 
@@ -308,7 +308,7 @@ class DatabaseActions:
308
308
  await actor.modify_column_type(
309
309
  "products", "price", ColumnType.INTEGER, autocast=True
310
310
  )
311
-
311
+
312
312
  # Manual migration with custom control
313
313
  await actor.modify_column_type(
314
314
  "products", "price", ColumnType.INTEGER, autocast=False
@@ -408,8 +408,8 @@ class DatabaseActions:
408
408
  return f"{column_name}::boolean"
409
409
  elif explicit_data_type in [
410
410
  ColumnType.DATE,
411
- ColumnType.TIMESTAMP,
412
- ColumnType.TIME,
411
+ ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
412
+ ColumnType.TIME_WITHOUT_TIME_ZONE,
413
413
  ]:
414
414
  # Date/time conversions
415
415
  return f"{column_name}::{explicit_data_type.value}"
@@ -1,3 +1,4 @@
1
+ import warnings
1
2
  from dataclasses import dataclass
2
3
  from datetime import date, datetime, time, timedelta
3
4
  from inspect import isgenerator
@@ -54,6 +55,47 @@ from iceaxe.typing import (
54
55
  NodeYieldType = Union[DBObject, DBObjectPointer, "NodeDefinition"]
55
56
 
56
57
 
58
+ class CompositePrimaryKeyConstraintError(ValueError):
59
+ """
60
+ Raised when foreign key constraints cannot be resolved due to composite primary keys.
61
+
62
+ This occurs when a table has multiple fields marked as primary_key=True, creating
63
+ a composite primary key constraint, but foreign key constraints expect individual
64
+ primary key constraints on the target columns.
65
+
66
+ """
67
+
68
+ def __init__(self, missing_constraints: list[tuple[str, str]], base_message: str):
69
+ self.missing_constraints = missing_constraints
70
+ self.base_message = base_message
71
+
72
+ # Construct the detailed error message
73
+ error_msg = base_message
74
+
75
+ if missing_constraints:
76
+ error_msg += "\n\nThis error commonly occurs when you have multiple fields marked as primary_key=True in your model."
77
+ error_msg += "\nIceaxe creates a single composite primary key constraint, but foreign key constraints"
78
+ error_msg += (
79
+ "\nexpect individual primary key constraints on the target columns."
80
+ )
81
+ error_msg += "\n\nFor a detailed explanation of why this happens and how to fix it, see:"
82
+ error_msg += "\nhttps://mountaineer.sh/iceaxe/guides/relationships#composite-primary-keys-and-foreign-key-constraints"
83
+ error_msg += "\n\nTo fix this issue, choose one of these approaches:"
84
+ error_msg += "\n\nRecommended: Modify the current table"
85
+ error_msg += (
86
+ "\n - Keep only one field as primary_key=True (e.g., just 'id')"
87
+ )
88
+ error_msg += "\n - Add a UniqueConstraint if you need uniqueness across multiple fields"
89
+ error_msg += "\n - This is usually the better design pattern"
90
+
91
+ # Show specific table/column combinations that are missing
92
+ error_msg += "\n\nCurrently missing individual primary key constraints:"
93
+ for table_name, column_name in missing_constraints:
94
+ error_msg += f"\n - Table '{table_name}' needs a primary key on column '{column_name}'"
95
+
96
+ super().__init__(error_msg)
97
+
98
+
57
99
  @dataclass
58
100
  class NodeDefinition:
59
101
  node: DBObject
@@ -125,9 +167,37 @@ class DatabaseMemorySerializer:
125
167
  pointer.representation() in db_objects_by_name
126
168
  for pointer in dep.pointers
127
169
  ):
128
- raise ValueError(
129
- f"None of the OR pointers {[p.representation() for p in dep.pointers]} found in the defined database objects"
130
- )
170
+ # Create a more helpful error message for common cases
171
+ missing_pointers = [
172
+ p.representation() for p in dep.pointers
173
+ ]
174
+ error_msg = f"None of the OR pointers {missing_pointers} found in the defined database objects"
175
+
176
+ # Check if this is the common case of multiple primary keys causing foreign key issues
177
+ primary_key_pointers = []
178
+ for p in dep.pointers:
179
+ parsed = p.parse_constraint_pointer()
180
+ if parsed and parsed.constraint_type == "PRIMARY KEY":
181
+ primary_key_pointers.append(p)
182
+
183
+ if primary_key_pointers:
184
+ # Extract table and column info from the primary key pointers
185
+ primary_key_info: list[tuple[str, str]] = []
186
+ for pointer in primary_key_pointers:
187
+ table_name = pointer.get_table_name()
188
+ column_names = pointer.get_column_names()
189
+
190
+ if table_name and column_names:
191
+ for column_name in column_names:
192
+ primary_key_info.append(
193
+ (table_name, column_name)
194
+ )
195
+
196
+ if primary_key_info:
197
+ raise CompositePrimaryKeyConstraintError(
198
+ primary_key_info, error_msg
199
+ )
200
+ raise ValueError(error_msg)
131
201
  elif dep.representation() not in db_objects_by_name:
132
202
  raise ValueError(
133
203
  f"Pointer {dep.representation()} not found in the defined database objects"
@@ -384,12 +454,12 @@ class DatabaseHandler:
384
454
  primitive_type=(
385
455
  ColumnType.TIMESTAMP_WITH_TIME_ZONE
386
456
  if info.postgres_config.timezone
387
- else ColumnType.TIMESTAMP
457
+ else ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE
388
458
  )
389
459
  )
390
460
  # Assume no timezone if not specified
391
461
  return TypeDeclarationResponse(
392
- primitive_type=ColumnType.TIMESTAMP,
462
+ primitive_type=ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
393
463
  )
394
464
  elif is_type_compatible(annotation, date): # type: ignore
395
465
  return TypeDeclarationResponse(
@@ -401,11 +471,11 @@ class DatabaseHandler:
401
471
  primitive_type=(
402
472
  ColumnType.TIME_WITH_TIME_ZONE
403
473
  if info.postgres_config.timezone
404
- else ColumnType.TIME
474
+ else ColumnType.TIME_WITHOUT_TIME_ZONE
405
475
  ),
406
476
  )
407
477
  return TypeDeclarationResponse(
408
- primitive_type=ColumnType.TIME,
478
+ primitive_type=ColumnType.TIME_WITHOUT_TIME_ZONE,
409
479
  )
410
480
  elif is_type_compatible(annotation, timedelta): # type: ignore
411
481
  return TypeDeclarationResponse(
@@ -544,6 +614,18 @@ class DatabaseHandler:
544
614
  if not keys:
545
615
  return
546
616
 
617
+ # Warn users about potential issues with multiple primary keys
618
+ if len(keys) > 1:
619
+ column_names = [key for key, _ in keys]
620
+ warnings.warn(
621
+ f"Table '{table.get_table_name()}' has multiple fields marked as primary_key=True: {column_names}. "
622
+ f"This creates a composite primary key constraint, which may cause issues with foreign key "
623
+ f"constraints that expect individual primary keys on target columns. "
624
+ f"Consider using only one primary key field and adding UniqueConstraint for uniqueness instead.",
625
+ UserWarning,
626
+ stacklevel=3,
627
+ )
628
+
547
629
  columns = [key for key, _ in keys]
548
630
  yield from self._yield_nodes(
549
631
  DBConstraint(
@@ -1,4 +1,6 @@
1
+ import re
1
2
  from abc import abstractmethod
3
+ from dataclasses import dataclass
2
4
  from typing import Self, Union
3
5
 
4
6
  from pydantic import BaseModel, Field, model_validator
@@ -12,6 +14,15 @@ from iceaxe.schemas.actions import (
12
14
  )
13
15
 
14
16
 
17
+ @dataclass
18
+ class ConstraintPointerInfo:
19
+ """Information parsed from a constraint pointer representation."""
20
+
21
+ table_name: str
22
+ column_names: list[str]
23
+ constraint_type: str
24
+
25
+
15
26
  class DBObject(BaseModel):
16
27
  """
17
28
  A subclass for all models that are intended to store
@@ -86,6 +97,108 @@ class DBObjectPointer(BaseModel):
86
97
  def representation(self) -> str:
87
98
  pass
88
99
 
100
+ def parse_constraint_pointer(self) -> ConstraintPointerInfo | None:
101
+ """
102
+ Parse a constraint pointer representation into its components.
103
+
104
+ Returns:
105
+ ConstraintPointerInfo | None: Parsed constraint information or None if not a constraint pointer
106
+
107
+ Examples:
108
+ "table.['column'].PRIMARY KEY" -> ConstraintPointerInfo("table", ["column"], "PRIMARY KEY")
109
+ "table.['col1', 'col2'].UNIQUE" -> ConstraintPointerInfo("table", ["col1", "col2"], "UNIQUE")
110
+ """
111
+ representation = self.representation()
112
+
113
+ # Pattern to match: table_name.[column_list].constraint_type
114
+ # where column_list can be ['col'] or ['col1', 'col2', ...]
115
+ # The table_name can contain dots (for schema.table), so we need to be more careful
116
+ # We look for the pattern .[...]. to identify where the column list starts
117
+ pattern = r"^(.+)\.(\[.*?\])\.(.+)$"
118
+ match = re.match(pattern, representation)
119
+
120
+ if not match:
121
+ return None
122
+
123
+ table_name, columns_part, constraint_type = match.groups()
124
+
125
+ # Validate that the column list contains properly quoted column names or is empty
126
+ # Remove brackets and check the content
127
+ columns_str = columns_part.strip("[]")
128
+ if not columns_str:
129
+ # Empty column list is valid
130
+ return ConstraintPointerInfo(table_name, [], constraint_type)
131
+
132
+ # Split by comma and validate each column name is properly quoted
133
+ columns = []
134
+ for col in columns_str.split(","):
135
+ col = col.strip()
136
+ # Check if the column is properly quoted (single or double quotes)
137
+ if (col.startswith("'") and col.endswith("'")) or (
138
+ col.startswith('"') and col.endswith('"')
139
+ ):
140
+ # Remove quotes and add to list
141
+ col_name = col[1:-1]
142
+ if col_name: # Don't add empty column names
143
+ columns.append(col_name)
144
+ else:
145
+ # Column is not properly quoted, this is not a valid constraint pointer
146
+ return None
147
+
148
+ return ConstraintPointerInfo(table_name, columns, constraint_type)
149
+
150
+ def get_table_name(self) -> str | None:
151
+ """
152
+ Extract the table name from the pointer representation.
153
+
154
+ Returns:
155
+ str | None: The table name if it can be parsed, None otherwise
156
+ """
157
+ # Try constraint pointer format first
158
+ parsed = self.parse_constraint_pointer()
159
+ if parsed is not None:
160
+ return parsed.table_name
161
+
162
+ # Try simple table.column format
163
+ representation = self.representation()
164
+ if not representation:
165
+ return None
166
+
167
+ parts = representation.split(".")
168
+ if len(parts) >= 2:
169
+ # For schema.table.column format, take all parts except the last one
170
+ return ".".join(parts[:-1])
171
+ elif len(parts) == 1:
172
+ # Just a table name
173
+ return parts[0]
174
+ else:
175
+ return None
176
+
177
+ def get_column_names(self) -> list[str]:
178
+ """
179
+ Extract column names from the pointer representation.
180
+
181
+ Returns:
182
+ list[str]: List of column names if they can be parsed, empty list otherwise
183
+ """
184
+ # Try constraint pointer format first
185
+ parsed = self.parse_constraint_pointer()
186
+ if parsed is not None:
187
+ return parsed.column_names
188
+
189
+ # Try simple table.column format
190
+ representation = self.representation()
191
+ if not representation:
192
+ return []
193
+
194
+ parts = representation.split(".")
195
+ if len(parts) >= 2:
196
+ # For schema.table.column format, take the last part as the column name
197
+ return [parts[-1]]
198
+ else:
199
+ # Just a table name, no columns
200
+ return []
201
+
89
202
 
90
203
  class DBTable(DBObject):
91
204
  table_name: str
@@ -593,7 +593,7 @@ class DBConnection:
593
593
  modified_attrs = frozenset(
594
594
  k
595
595
  for k, v in obj.get_modified_attributes().items()
596
- if not obj.model_fields[k].exclude
596
+ if not obj.__class__.model_fields[k].exclude
597
597
  )
598
598
  if modified_attrs:
599
599
  updates_by_fields[modified_attrs].append(obj)
@@ -8,6 +8,10 @@ class ColumnType(StrEnum):
8
8
  # the column they can be case-insensitive, but when we're casting from
9
9
  # the database to memory they must align with the on-disk representation
10
10
  # which is lowercase.
11
+ #
12
+ # Note: The SQL standard requires that writing just "timestamp" be equivalent
13
+ # to "timestamp without time zone", and PostgreSQL honors that behavior.
14
+ # Similarly, "time" is equivalent to "time without time zone".
11
15
 
12
16
  # Numeric Types
13
17
  SMALLINT = "smallint"
@@ -33,9 +37,9 @@ class ColumnType(StrEnum):
33
37
 
34
38
  # Date/Time Types
35
39
  DATE = "date"
36
- TIME = "time"
40
+ TIME_WITHOUT_TIME_ZONE = "time without time zone"
37
41
  TIME_WITH_TIME_ZONE = "time with time zone"
38
- TIMESTAMP = "timestamp"
42
+ TIMESTAMP_WITHOUT_TIME_ZONE = "timestamp without time zone"
39
43
  TIMESTAMP_WITH_TIME_ZONE = "timestamp with time zone"
40
44
  INTERVAL = "interval"
41
45
 
@@ -85,6 +89,31 @@ class ColumnType(StrEnum):
85
89
  # Object Identifier Type
86
90
  OID = "oid"
87
91
 
92
+ @classmethod
93
+ def _missing_(cls, value: object):
94
+ """
95
+ Handle SQL standard aliases when the exact enum value is not found.
96
+
97
+ The SQL standard requires that "timestamp" be equivalent to "timestamp without time zone"
98
+ and "time" be equivalent to "time without time zone".
99
+ """
100
+ # Only handle string values for SQL type aliases
101
+ if not isinstance(value, str):
102
+ return None
103
+
104
+ aliases = {
105
+ "timestamp": "timestamp without time zone",
106
+ "time": "time without time zone",
107
+ }
108
+
109
+ # Check if this is an alias we can resolve
110
+ if value in aliases:
111
+ # Return the actual enum member for the aliased value
112
+ return cls(aliases[value])
113
+
114
+ # If not an alias, let the default enum behavior handle it
115
+ return None
116
+
88
117
 
89
118
  class ConstraintType(StrEnum):
90
119
  PRIMARY_KEY = "PRIMARY KEY"
@@ -105,9 +134,9 @@ def get_python_to_sql_mapping():
105
134
  bool: ColumnType.BOOLEAN,
106
135
  bytes: ColumnType.BYTEA,
107
136
  UUID: ColumnType.UUID,
108
- datetime: ColumnType.TIMESTAMP,
137
+ datetime: ColumnType.TIMESTAMP_WITHOUT_TIME_ZONE,
109
138
  date: ColumnType.DATE,
110
- time: ColumnType.TIME,
139
+ time: ColumnType.TIME_WITHOUT_TIME_ZONE,
111
140
  timedelta: ColumnType.INTERVAL,
112
141
  }
113
142
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iceaxe
3
- Version: 0.7.0.dev1
3
+ Version: 0.7.0.dev3
4
4
  Summary: A modern, fast ORM for Python.
5
5
  Author-email: Pierce Freeman <pierce@freeman.vc>
6
6
  Requires-Python: >=3.11
@@ -15,8 +15,7 @@ Dynamic: license-file
15
15
 
16
16
  ![Iceaxe Logo](https://raw.githubusercontent.com/piercefreeman/iceaxe/main/media/header.png)
17
17
 
18
- ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/iceaxe)
19
- [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
18
+ ![Python Version](https://img.shields.io/python/required-version-toml?tomlFilePath=https%3A%2F%2Fraw.githubusercontent.com%2Fpiercefreeman%2Ficeaxe%2Frefs%2Fheads%2Fmain%2Fpyproject.toml) [![Test status](https://github.com/piercefreeman/iceaxe/actions/workflows/test.yml/badge.svg)](https://github.com/piercefreeman/iceaxe/actions)
20
19
 
21
20
  A modern, fast ORM for Python. We have the following goals:
22
21
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "iceaxe"
3
- version = "0.7.0.dev1"
3
+ version = "0.7.0.dev3"
4
4
  description = "A modern, fast ORM for Python."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -1,26 +0,0 @@
1
- from iceaxe.schemas.db_stubs import DBType
2
-
3
-
4
- def test_merge_type_columns():
5
- """
6
- Allow separately yielded type definitions to collect their reference columns. If an
7
- enum is referenced in one place, this should build up to the full definition.
8
-
9
- """
10
- type_a = DBType(
11
- name="type_a",
12
- values=frozenset({"A"}),
13
- reference_columns=frozenset({("table_a", "column_a")}),
14
- )
15
- type_b = DBType(
16
- name="type_a",
17
- values=frozenset({"A"}),
18
- reference_columns=frozenset({("table_b", "column_b")}),
19
- )
20
-
21
- merged = type_a.merge(type_b)
22
- assert merged.name == "type_a"
23
- assert merged.values == frozenset({"A"})
24
- assert merged.reference_columns == frozenset(
25
- {("table_a", "column_a"), ("table_b", "column_b")}
26
- )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes