iceaxe 0.6.0.dev1__tar.gz → 0.6.0.dev2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. {iceaxe-0.6.0.dev1/iceaxe.egg-info → iceaxe-0.6.0.dev2}/PKG-INFO +1 -1
  2. iceaxe-0.6.0.dev2/iceaxe/__init__.py +20 -0
  3. iceaxe-0.6.0.dev2/iceaxe/__tests__/__init__.py +0 -0
  4. iceaxe-0.6.0.dev2/iceaxe/__tests__/benchmarks/__init__.py +0 -0
  5. iceaxe-0.6.0.dev2/iceaxe/__tests__/benchmarks/test_bulk_insert.py +45 -0
  6. iceaxe-0.6.0.dev2/iceaxe/__tests__/benchmarks/test_select.py +114 -0
  7. iceaxe-0.6.0.dev2/iceaxe/__tests__/conf_models.py +133 -0
  8. iceaxe-0.6.0.dev2/iceaxe/__tests__/conftest.py +135 -0
  9. iceaxe-0.6.0.dev2/iceaxe/__tests__/helpers.py +268 -0
  10. iceaxe-0.6.0.dev2/iceaxe/__tests__/migrations/__init__.py +0 -0
  11. iceaxe-0.6.0.dev2/iceaxe/__tests__/migrations/conftest.py +36 -0
  12. iceaxe-0.6.0.dev2/iceaxe/__tests__/migrations/test_action_sorter.py +237 -0
  13. iceaxe-0.6.0.dev2/iceaxe/__tests__/migrations/test_generator.py +140 -0
  14. iceaxe-0.6.0.dev2/iceaxe/__tests__/migrations/test_generics.py +91 -0
  15. iceaxe-0.6.0.dev2/iceaxe/__tests__/mountaineer/__init__.py +0 -0
  16. iceaxe-0.6.0.dev2/iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
  17. iceaxe-0.6.0.dev2/iceaxe/__tests__/mountaineer/dependencies/test_core.py +76 -0
  18. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/__init__.py +0 -0
  19. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/test_actions.py +859 -0
  20. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/test_cli.py +25 -0
  21. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/test_db_memory_serializer.py +1463 -0
  22. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/test_db_serializer.py +352 -0
  23. iceaxe-0.6.0.dev2/iceaxe/__tests__/schemas/test_db_stubs.py +26 -0
  24. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_alias.py +83 -0
  25. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_base.py +32 -0
  26. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_comparison.py +296 -0
  27. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_field.py +11 -0
  28. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_helpers.py +9 -0
  29. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_modifications.py +151 -0
  30. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_queries.py +605 -0
  31. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_queries_str.py +173 -0
  32. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_session.py +1509 -0
  33. iceaxe-0.6.0.dev2/iceaxe/__tests__/test_text_search.py +287 -0
  34. iceaxe-0.6.0.dev2/iceaxe/alias_values.py +67 -0
  35. iceaxe-0.6.0.dev2/iceaxe/base.py +350 -0
  36. iceaxe-0.6.0.dev2/iceaxe/comparison.py +514 -0
  37. iceaxe-0.6.0.dev2/iceaxe/field.py +250 -0
  38. iceaxe-0.6.0.dev2/iceaxe/functions.py +906 -0
  39. iceaxe-0.6.0.dev2/iceaxe/generics.py +140 -0
  40. iceaxe-0.6.0.dev2/iceaxe/io.py +107 -0
  41. iceaxe-0.6.0.dev2/iceaxe/logging.py +91 -0
  42. iceaxe-0.6.0.dev2/iceaxe/migrations/__init__.py +5 -0
  43. iceaxe-0.6.0.dev2/iceaxe/migrations/action_sorter.py +98 -0
  44. iceaxe-0.6.0.dev2/iceaxe/migrations/cli.py +228 -0
  45. iceaxe-0.6.0.dev2/iceaxe/migrations/client_io.py +62 -0
  46. iceaxe-0.6.0.dev2/iceaxe/migrations/generator.py +404 -0
  47. iceaxe-0.6.0.dev2/iceaxe/migrations/migration.py +66 -0
  48. iceaxe-0.6.0.dev2/iceaxe/migrations/migrator.py +88 -0
  49. iceaxe-0.6.0.dev2/iceaxe/modifications.py +176 -0
  50. iceaxe-0.6.0.dev2/iceaxe/mountaineer/__init__.py +10 -0
  51. iceaxe-0.6.0.dev2/iceaxe/mountaineer/cli.py +74 -0
  52. iceaxe-0.6.0.dev2/iceaxe/mountaineer/config.py +46 -0
  53. iceaxe-0.6.0.dev2/iceaxe/mountaineer/dependencies/__init__.py +6 -0
  54. iceaxe-0.6.0.dev2/iceaxe/mountaineer/dependencies/core.py +67 -0
  55. iceaxe-0.6.0.dev2/iceaxe/postgres.py +133 -0
  56. iceaxe-0.6.0.dev2/iceaxe/py.typed +0 -0
  57. iceaxe-0.6.0.dev2/iceaxe/queries.py +1452 -0
  58. iceaxe-0.6.0.dev2/iceaxe/queries_str.py +294 -0
  59. iceaxe-0.6.0.dev2/iceaxe/schemas/__init__.py +0 -0
  60. iceaxe-0.6.0.dev2/iceaxe/schemas/actions.py +766 -0
  61. iceaxe-0.6.0.dev2/iceaxe/schemas/cli.py +30 -0
  62. iceaxe-0.6.0.dev2/iceaxe/schemas/db_memory_serializer.py +623 -0
  63. iceaxe-0.6.0.dev2/iceaxe/schemas/db_serializer.py +346 -0
  64. iceaxe-0.6.0.dev2/iceaxe/schemas/db_stubs.py +405 -0
  65. iceaxe-0.6.0.dev2/iceaxe/session.py +860 -0
  66. iceaxe-0.6.0.dev2/iceaxe/sql_types.py +119 -0
  67. iceaxe-0.6.0.dev2/iceaxe/typing.py +73 -0
  68. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2/iceaxe.egg-info}/PKG-INFO +1 -1
  69. iceaxe-0.6.0.dev2/iceaxe.egg-info/SOURCES.txt +78 -0
  70. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/pyproject.toml +4 -5
  71. iceaxe-0.6.0.dev1/iceaxe.egg-info/SOURCES.txt +0 -12
  72. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/LICENSE +0 -0
  73. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/MANIFEST.in +0 -0
  74. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/README.md +0 -0
  75. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/iceaxe/session_optimized.c +0 -0
  76. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/iceaxe/session_optimized.pyx +0 -0
  77. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/iceaxe.egg-info/dependency_links.txt +0 -0
  78. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/iceaxe.egg-info/requires.txt +0 -0
  79. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/iceaxe.egg-info/top_level.txt +0 -0
  80. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/setup.cfg +0 -0
  81. {iceaxe-0.6.0.dev1 → iceaxe-0.6.0.dev2}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iceaxe
3
- Version: 0.6.0.dev1
3
+ Version: 0.6.0.dev2
4
4
  Summary: A modern, fast ORM for Python.
5
5
  Author-email: Pierce Freeman <pierce@freeman.vc>
6
6
  Requires-Python: >=3.11
@@ -0,0 +1,20 @@
1
+ from .alias_values import alias as alias
2
+ from .base import (
3
+ IndexConstraint as IndexConstraint,
4
+ TableBase as TableBase,
5
+ UniqueConstraint as UniqueConstraint,
6
+ )
7
+ from .field import Field as Field
8
+ from .functions import func as func
9
+ from .postgres import PostgresDateTime as PostgresDateTime, PostgresTime as PostgresTime
10
+ from .queries import (
11
+ QueryBuilder as QueryBuilder,
12
+ and_ as and_,
13
+ delete as delete,
14
+ or_ as or_,
15
+ select as select,
16
+ update as update,
17
+ )
18
+ from .queries_str import sql as sql
19
+ from .session import DBConnection as DBConnection
20
+ from .typing import column as column
File without changes
@@ -0,0 +1,45 @@
1
+ import time
2
+ from typing import Sequence
3
+
4
+ import pytest
5
+
6
+ from iceaxe.__tests__.conf_models import UserDemo
7
+ from iceaxe.logging import CONSOLE, LOGGER
8
+ from iceaxe.session import DBConnection
9
+
10
+
11
+ def generate_test_users(count: int) -> Sequence[UserDemo]:
12
+ """
13
+ Generate a sequence of test users for bulk insertion.
14
+
15
+ :param count: Number of users to generate
16
+ :return: Sequence of UserDemo instances
17
+ """
18
+ return [
19
+ UserDemo(name=f"User {i}", email=f"user{i}@example.com") for i in range(count)
20
+ ]
21
+
22
+
23
+ @pytest.mark.asyncio
24
+ @pytest.mark.integration_tests
25
+ async def test_bulk_insert_performance(db_connection: DBConnection):
26
+ """
27
+ Test the performance of bulk inserting 500k records.
28
+ """
29
+ NUM_USERS = 500_000
30
+ users = generate_test_users(NUM_USERS)
31
+ LOGGER.info(f"Generated {NUM_USERS} test users")
32
+
33
+ start_time = time.time()
34
+
35
+ await db_connection.insert(users)
36
+
37
+ total_time = time.time() - start_time
38
+ records_per_second = NUM_USERS / total_time
39
+
40
+ CONSOLE.print("\nBulk Insert Performance:")
41
+ CONSOLE.print(f"Total time: {total_time:.2f} seconds")
42
+ CONSOLE.print(f"Records per second: {records_per_second:.2f}")
43
+
44
+ result = await db_connection.conn.fetchval("SELECT COUNT(*) FROM userdemo")
45
+ assert result == NUM_USERS
@@ -0,0 +1,114 @@
1
+ from enum import Enum
2
+ from time import monotonic_ns
3
+ from typing import Any
4
+
5
+ import asyncpg
6
+ import pytest
7
+
8
+ from iceaxe.__tests__.conf_models import UserDemo, run_profile
9
+ from iceaxe.logging import CONSOLE, LOGGER
10
+ from iceaxe.queries import QueryBuilder
11
+ from iceaxe.session import DBConnection
12
+
13
+
14
+ class FetchType(Enum):
15
+ ID = "id"
16
+ OBJ = "obj"
17
+
18
+
19
+ async def insert_users(conn: asyncpg.Connection, num_users: int):
20
+ users = [(f"User {i}", f"user{i}@example.com") for i in range(num_users)]
21
+ await conn.executemany("INSERT INTO userdemo (name, email) VALUES ($1, $2)", users)
22
+
23
+
24
+ async def fetch_users_raw(conn: asyncpg.Connection, fetch_type: FetchType) -> list[Any]:
25
+ if fetch_type == FetchType.OBJ:
26
+ return await conn.fetch("SELECT * FROM userdemo") # type: ignore
27
+ elif fetch_type == FetchType.ID:
28
+ return await conn.fetch("SELECT id FROM userdemo") # type: ignore
29
+ else:
30
+ raise ValueError(f"Invalid run profile: {fetch_type}")
31
+
32
+
33
+ def build_iceaxe_query(fetch_type: FetchType):
34
+ if fetch_type == FetchType.OBJ:
35
+ return QueryBuilder().select(UserDemo)
36
+ elif fetch_type == FetchType.ID:
37
+ return QueryBuilder().select(UserDemo.id)
38
+ else:
39
+ raise ValueError(f"Invalid run profile: {fetch_type}")
40
+
41
+
42
+ @pytest.mark.asyncio
43
+ @pytest.mark.integration_tests
44
+ @pytest.mark.parametrize(
45
+ "fetch_type, allowed_overhead",
46
+ [
47
+ (FetchType.ID, 10),
48
+ (FetchType.OBJ, 800),
49
+ ],
50
+ )
51
+ async def test_benchmark(
52
+ db_connection: DBConnection, request, fetch_type: FetchType, allowed_overhead: float
53
+ ):
54
+ num_users = 500_000
55
+ num_loops = 100
56
+
57
+ # Insert users using raw asyncpg
58
+ await insert_users(db_connection.conn, num_users)
59
+
60
+ # Benchmark raw asyncpg query
61
+ start_time = monotonic_ns()
62
+ raw_results: list[Any] = []
63
+ for _ in range(num_loops):
64
+ raw_results = await fetch_users_raw(db_connection.conn, fetch_type)
65
+ raw_time = monotonic_ns() - start_time
66
+ raw_time_seconds = raw_time / 1e9
67
+ raw_time_per_query = (raw_time / num_loops) / 1e9
68
+
69
+ LOGGER.info(
70
+ f"Raw asyncpg query time: {raw_time_per_query:.4f} (total: {raw_time_seconds:.4f}) seconds"
71
+ )
72
+ CONSOLE.print(
73
+ f"Raw asyncpg query time: {raw_time_per_query:.4f} (total: {raw_time_seconds:.4f}) seconds"
74
+ )
75
+
76
+ # Benchmark DBConnection.exec query
77
+ start_time = monotonic_ns()
78
+ query = build_iceaxe_query(fetch_type)
79
+ db_results: list[UserDemo] | list[int] = []
80
+ for _ in range(num_loops):
81
+ db_results = await db_connection.exec(query)
82
+ db_time = monotonic_ns() - start_time
83
+ db_time_seconds = db_time / 1e9
84
+ db_time_per_query = (db_time / num_loops) / 1e9
85
+
86
+ LOGGER.info(
87
+ f"DBConnection.exec query time: {db_time_per_query:.4f} (total: {db_time_seconds:.4f}) seconds"
88
+ )
89
+ CONSOLE.print(
90
+ f"DBConnection.exec query time: {db_time_per_query:.4f} (total: {db_time_seconds:.4f}) seconds"
91
+ )
92
+
93
+ # Slower than the raw run since we need to run the performance instrumentation
94
+ if False:
95
+ with run_profile(request):
96
+ # Right now we don't cache results so we can run multiple times to get a better measure of samples
97
+ for _ in range(num_loops):
98
+ query = build_iceaxe_query(fetch_type)
99
+ db_results = await db_connection.exec(query)
100
+
101
+ # Compare results
102
+ assert len(raw_results) == len(db_results) == num_users, "Result count mismatch"
103
+
104
+ # Calculate performance difference
105
+ performance_diff = (db_time - raw_time) / raw_time * 100
106
+ LOGGER.info(f"Performance difference: {performance_diff:.2f}%")
107
+ CONSOLE.print(f"Performance difference: {performance_diff:.2f}%")
108
+
109
+ # Assert that DBConnection.exec is at most X% slower than raw query
110
+ assert performance_diff <= allowed_overhead, (
111
+ f"DBConnection.exec is {performance_diff:.2f}% slower than raw query, which exceeds the {allowed_overhead}% threshold"
112
+ )
113
+
114
+ LOGGER.info("Benchmark completed successfully.")
@@ -0,0 +1,133 @@
1
+ from contextlib import contextmanager
2
+ from enum import StrEnum
3
+ from pathlib import Path
4
+ from typing import Any
5
+
6
+ from pyinstrument import Profiler
7
+
8
+ from iceaxe.base import Field, TableBase, UniqueConstraint
9
+
10
+
11
+ class UserDemo(TableBase):
12
+ id: int = Field(primary_key=True, default=None)
13
+ name: str
14
+ email: str
15
+
16
+
17
+ class ArtifactDemo(TableBase):
18
+ id: int = Field(primary_key=True, default=None)
19
+ title: str
20
+ user_id: int = Field(foreign_key="userdemo.id")
21
+
22
+
23
+ class ComplexDemo(TableBase):
24
+ id: int = Field(primary_key=True, default=None)
25
+ string_list: list[str]
26
+ json_data: dict[str, str] = Field(is_json=True)
27
+
28
+
29
+ class Employee(TableBase):
30
+ id: int = Field(primary_key=True, default=None)
31
+ email: str = Field(unique=True)
32
+ first_name: str
33
+ last_name: str
34
+ department: str
35
+ salary: float
36
+
37
+
38
+ class Department(TableBase):
39
+ id: int = Field(primary_key=True, default=None)
40
+ name: str = Field(unique=True)
41
+ budget: float
42
+ location: str
43
+
44
+
45
+ class ProjectAssignment(TableBase):
46
+ id: int = Field(primary_key=True, default=None)
47
+ employee_id: int = Field(foreign_key="employee.id")
48
+ project_name: str
49
+ role: str
50
+ start_date: str
51
+
52
+
53
+ class EmployeeStatus(StrEnum):
54
+ ACTIVE = "active"
55
+ INACTIVE = "inactive"
56
+ ON_LEAVE = "on_leave"
57
+
58
+
59
+ class EmployeeMetadata(TableBase):
60
+ id: int = Field(primary_key=True, default=None)
61
+ employee_id: int = Field(foreign_key="employee.id")
62
+ status: EmployeeStatus
63
+ tags: list[str] = Field(is_json=True)
64
+ additional_info: dict[str, Any] = Field(is_json=True)
65
+
66
+
67
+ class FunctionDemoModel(TableBase):
68
+ id: int = Field(primary_key=True, default=None)
69
+ balance: float
70
+ created_at: str
71
+ birth_date: str
72
+ start_date: str
73
+ end_date: str
74
+ year: int
75
+ month: int
76
+ day: int
77
+ hour: int
78
+ minute: int
79
+ second: int
80
+ years: int
81
+ months: int
82
+ days: int
83
+ weeks: int
84
+ hours: int
85
+ minutes: int
86
+ seconds: int
87
+ name: str
88
+ balance_str: str
89
+ timestamp_str: str
90
+
91
+
92
+ class DemoModelA(TableBase):
93
+ id: int = Field(primary_key=True, default=None)
94
+ name: str
95
+ description: str
96
+ code: str = Field(unique=True)
97
+
98
+
99
+ class DemoModelB(TableBase):
100
+ id: int = Field(primary_key=True, default=None)
101
+ name: str
102
+ category: str
103
+ code: str = Field(unique=True)
104
+
105
+
106
+ class JsonDemo(TableBase):
107
+ """
108
+ Model for testing JSON field updates.
109
+ """
110
+
111
+ id: int | None = Field(primary_key=True, default=None)
112
+ settings: dict[Any, Any] = Field(is_json=True)
113
+ metadata: dict[Any, Any] | None = Field(is_json=True)
114
+ unique_val: str
115
+
116
+ table_args = [UniqueConstraint(columns=["unique_val"])]
117
+
118
+
119
+ @contextmanager
120
+ def run_profile(request):
121
+ TESTS_ROOT = Path.cwd()
122
+ PROFILE_ROOT = TESTS_ROOT / ".profiles"
123
+
124
+ # Turn profiling on
125
+ profiler = Profiler()
126
+ profiler.start()
127
+
128
+ yield # Run test
129
+
130
+ profiler.stop()
131
+ PROFILE_ROOT.mkdir(exist_ok=True)
132
+ results_file = PROFILE_ROOT / f"{request.node.name}.html"
133
+ profiler.write_html(results_file)
@@ -0,0 +1,135 @@
1
+ import asyncpg
2
+ import pytest
3
+ import pytest_asyncio
4
+
5
+ from iceaxe.base import DBModelMetaclass
6
+ from iceaxe.session import DBConnection
7
+
8
+
9
+ @pytest_asyncio.fixture
10
+ async def db_connection():
11
+ conn = DBConnection(
12
+ await asyncpg.connect(
13
+ host="localhost",
14
+ port=5438,
15
+ user="iceaxe",
16
+ password="mysecretpassword",
17
+ database="iceaxe_test_db",
18
+ )
19
+ )
20
+
21
+ # Drop all tables first to ensure clean state
22
+ await conn.conn.execute("DROP TABLE IF EXISTS artifactdemo CASCADE")
23
+ await conn.conn.execute("DROP TABLE IF EXISTS userdemo CASCADE")
24
+ await conn.conn.execute("DROP TABLE IF EXISTS complexdemo CASCADE")
25
+ await conn.conn.execute("DROP TABLE IF EXISTS article CASCADE")
26
+
27
+ # Create tables
28
+ await conn.conn.execute("""
29
+ CREATE TABLE IF NOT EXISTS userdemo (
30
+ id SERIAL PRIMARY KEY,
31
+ name TEXT,
32
+ email TEXT
33
+ )
34
+ """)
35
+
36
+ await conn.conn.execute("""
37
+ CREATE TABLE IF NOT EXISTS artifactdemo (
38
+ id SERIAL PRIMARY KEY,
39
+ title TEXT,
40
+ user_id INT REFERENCES userdemo(id)
41
+ )
42
+ """)
43
+
44
+ await conn.conn.execute("""
45
+ CREATE TABLE IF NOT EXISTS complexdemo (
46
+ id SERIAL PRIMARY KEY,
47
+ string_list TEXT[],
48
+ json_data JSON
49
+ )
50
+ """)
51
+
52
+ await conn.conn.execute("""
53
+ CREATE TABLE IF NOT EXISTS article (
54
+ id SERIAL PRIMARY KEY,
55
+ title TEXT,
56
+ content TEXT,
57
+ summary TEXT
58
+ )
59
+ """)
60
+
61
+ # Create each index separately to handle errors better
62
+ yield conn
63
+
64
+ # Drop all tables after tests
65
+ await conn.conn.execute("DROP TABLE IF EXISTS artifactdemo CASCADE")
66
+ await conn.conn.execute("DROP TABLE IF EXISTS userdemo CASCADE")
67
+ await conn.conn.execute("DROP TABLE IF EXISTS complexdemo CASCADE")
68
+ await conn.conn.execute("DROP TABLE IF EXISTS article CASCADE")
69
+ await conn.conn.close()
70
+
71
+
72
+ @pytest_asyncio.fixture()
73
+ async def indexed_db_connection(db_connection: DBConnection):
74
+ await db_connection.conn.execute(
75
+ "CREATE INDEX IF NOT EXISTS article_title_tsv_idx ON article USING GIN (to_tsvector('english', title))"
76
+ )
77
+ await db_connection.conn.execute(
78
+ "CREATE INDEX IF NOT EXISTS article_content_tsv_idx ON article USING GIN (to_tsvector('english', content))"
79
+ )
80
+ await db_connection.conn.execute(
81
+ "CREATE INDEX IF NOT EXISTS article_summary_tsv_idx ON article USING GIN (to_tsvector('english', summary))"
82
+ )
83
+
84
+ yield db_connection
85
+
86
+
87
+ @pytest_asyncio.fixture(autouse=True)
88
+ async def clear_table(db_connection):
89
+ # Clear all tables and reset sequences
90
+ await db_connection.conn.execute(
91
+ "TRUNCATE TABLE userdemo, article RESTART IDENTITY CASCADE"
92
+ )
93
+
94
+
95
+ @pytest_asyncio.fixture
96
+ async def clear_all_database_objects(db_connection: DBConnection):
97
+ """
98
+ Clear all database objects.
99
+ """
100
+ # Step 1: Drop all tables in the public schema
101
+ await db_connection.conn.execute(
102
+ """
103
+ DO $$ DECLARE
104
+ r RECORD;
105
+ BEGIN
106
+ FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = 'public') LOOP
107
+ EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
108
+ END LOOP;
109
+ END $$;
110
+ """
111
+ )
112
+
113
+ # Step 2: Drop all custom types in the public schema
114
+ await db_connection.conn.execute(
115
+ """
116
+ DO $$ DECLARE
117
+ r RECORD;
118
+ BEGIN
119
+ FOR r IN (SELECT typname FROM pg_type WHERE typtype = 'e' AND typnamespace = (SELECT oid FROM pg_namespace WHERE nspname = 'public')) LOOP
120
+ EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
121
+ END LOOP;
122
+ END $$;
123
+ """
124
+ )
125
+
126
+
127
+ @pytest.fixture
128
+ def clear_registry():
129
+ current_registry = DBModelMetaclass._registry
130
+ DBModelMetaclass._registry = []
131
+
132
+ try:
133
+ yield
134
+ finally:
135
+ DBModelMetaclass._registry = current_registry