iceaxe 0.7.0.dev3__tar.gz → 0.7.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of iceaxe might be problematic. Click here for more details.

Files changed (81) hide show
  1. {iceaxe-0.7.0.dev3/iceaxe.egg-info → iceaxe-0.7.2}/PKG-INFO +1 -1
  2. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/conftest.py +97 -28
  3. iceaxe-0.7.2/iceaxe/__tests__/docker_helpers.py +208 -0
  4. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/migrations/test_action_sorter.py +1 -1
  5. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_queries.py +159 -0
  6. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_session.py +32 -30
  7. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/functions.py +527 -1
  8. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/migration.py +22 -2
  9. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/migrator.py +13 -0
  10. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/queries.py +5 -1
  11. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/db_stubs.py +22 -16
  12. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/session_optimized.c +244 -68
  13. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2/iceaxe.egg-info}/PKG-INFO +1 -1
  14. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe.egg-info/SOURCES.txt +1 -0
  15. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/pyproject.toml +2 -2
  16. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/LICENSE +0 -0
  17. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/MANIFEST.in +0 -0
  18. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/README.md +0 -0
  19. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__init__.py +0 -0
  20. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/__init__.py +0 -0
  21. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/benchmarks/__init__.py +0 -0
  22. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/benchmarks/test_bulk_insert.py +0 -0
  23. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/benchmarks/test_select.py +0 -0
  24. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/conf_models.py +0 -0
  25. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/helpers.py +0 -0
  26. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/migrations/__init__.py +0 -0
  27. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/migrations/conftest.py +0 -0
  28. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/migrations/test_generator.py +0 -0
  29. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/migrations/test_generics.py +0 -0
  30. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/mountaineer/__init__.py +0 -0
  31. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
  32. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/mountaineer/dependencies/test_core.py +0 -0
  33. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/__init__.py +0 -0
  34. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/test_actions.py +0 -0
  35. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/test_cli.py +0 -0
  36. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/test_db_memory_serializer.py +0 -0
  37. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/test_db_serializer.py +0 -0
  38. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/schemas/test_db_stubs.py +0 -0
  39. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_alias.py +0 -0
  40. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_base.py +0 -0
  41. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_comparison.py +0 -0
  42. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_field.py +0 -0
  43. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_helpers.py +0 -0
  44. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_modifications.py +0 -0
  45. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_queries_str.py +0 -0
  46. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/__tests__/test_text_search.py +0 -0
  47. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/alias_values.py +0 -0
  48. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/base.py +0 -0
  49. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/comparison.py +0 -0
  50. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/field.py +0 -0
  51. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/generics.py +0 -0
  52. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/io.py +0 -0
  53. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/logging.py +0 -0
  54. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/__init__.py +0 -0
  55. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/action_sorter.py +0 -0
  56. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/cli.py +0 -0
  57. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/client_io.py +0 -0
  58. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/migrations/generator.py +0 -0
  59. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/modifications.py +0 -0
  60. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/mountaineer/__init__.py +0 -0
  61. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/mountaineer/cli.py +0 -0
  62. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/mountaineer/config.py +0 -0
  63. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/mountaineer/dependencies/__init__.py +0 -0
  64. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/mountaineer/dependencies/core.py +0 -0
  65. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/postgres.py +0 -0
  66. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/py.typed +0 -0
  67. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/queries_str.py +0 -0
  68. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/__init__.py +0 -0
  69. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/actions.py +0 -0
  70. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/cli.py +0 -0
  71. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/db_memory_serializer.py +0 -0
  72. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/schemas/db_serializer.py +0 -0
  73. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/session.py +0 -0
  74. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/session_optimized.pyx +0 -0
  75. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/sql_types.py +0 -0
  76. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe/typing.py +0 -0
  77. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe.egg-info/dependency_links.txt +0 -0
  78. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe.egg-info/requires.txt +0 -0
  79. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/iceaxe.egg-info/top_level.txt +0 -0
  80. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/setup.cfg +0 -0
  81. {iceaxe-0.7.0.dev3 → iceaxe-0.7.2}/setup.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: iceaxe
3
- Version: 0.7.0.dev3
3
+ Version: 0.7.2
4
4
  Summary: A modern, fast ORM for Python.
5
5
  Author-email: Pierce Freeman <pierce@freeman.vc>
6
6
  Requires-Python: >=3.11
@@ -1,84 +1,151 @@
1
+ import logging
2
+
1
3
  import asyncpg
2
4
  import pytest
3
5
  import pytest_asyncio
4
6
 
7
+ from iceaxe.__tests__ import docker_helpers
5
8
  from iceaxe.base import DBModelMetaclass
6
9
  from iceaxe.session import DBConnection
7
10
 
11
+ # Configure logging
12
+ logging.basicConfig(level=logging.INFO)
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ @pytest.fixture(scope="session")
17
+ def docker_postgres():
18
+ """
19
+ Fixture that creates a PostgreSQL container using the Python Docker API.
20
+ This allows running individual tests without needing Docker Compose.
21
+ """
22
+ # Create and start a PostgreSQL container
23
+ postgres_container = docker_helpers.PostgresContainer()
24
+
25
+ # Start the container and yield connection details
26
+ connection_info = postgres_container.start()
27
+ yield connection_info
28
+
29
+ # Cleanup: stop the container
30
+ postgres_container.stop()
31
+
8
32
 
9
33
  @pytest_asyncio.fixture
10
- async def db_connection():
34
+ async def db_connection(docker_postgres):
35
+ """
36
+ Create a database connection using the PostgreSQL container.
37
+ """
11
38
  conn = DBConnection(
12
39
  await asyncpg.connect(
13
- host="localhost",
14
- port=5438,
15
- user="iceaxe",
16
- password="mysecretpassword",
17
- database="iceaxe_test_db",
40
+ host=docker_postgres["host"],
41
+ port=docker_postgres["port"],
42
+ user=docker_postgres["user"],
43
+ password=docker_postgres["password"],
44
+ database=docker_postgres["database"],
18
45
  )
19
46
  )
20
47
 
21
48
  # Drop all tables first to ensure clean state
22
- await conn.conn.execute("DROP TABLE IF EXISTS artifactdemo CASCADE")
23
- await conn.conn.execute("DROP TABLE IF EXISTS userdemo CASCADE")
24
- await conn.conn.execute("DROP TABLE IF EXISTS complexdemo CASCADE")
25
- await conn.conn.execute("DROP TABLE IF EXISTS article CASCADE")
49
+ known_tables = [
50
+ "artifactdemo",
51
+ "userdemo",
52
+ "complexdemo",
53
+ "article",
54
+ "employee",
55
+ "department",
56
+ "projectassignment",
57
+ "employeemetadata",
58
+ "functiondemomodel",
59
+ "demomodela",
60
+ "demomodelb",
61
+ "jsondemo",
62
+ "complextypedemo",
63
+ ]
64
+ known_types = ["statusenum", "employeestatus"]
65
+
66
+ for table in known_tables:
67
+ await conn.conn.execute(f"DROP TABLE IF EXISTS {table} CASCADE", timeout=30.0)
68
+
69
+ for known_type in known_types:
70
+ await conn.conn.execute(
71
+ f"DROP TYPE IF EXISTS {known_type} CASCADE", timeout=30.0
72
+ )
26
73
 
27
74
  # Create tables
28
- await conn.conn.execute("""
75
+ await conn.conn.execute(
76
+ """
29
77
  CREATE TABLE IF NOT EXISTS userdemo (
30
78
  id SERIAL PRIMARY KEY,
31
79
  name TEXT,
32
80
  email TEXT
33
81
  )
34
- """)
82
+ """,
83
+ timeout=30.0,
84
+ )
35
85
 
36
- await conn.conn.execute("""
86
+ await conn.conn.execute(
87
+ """
37
88
  CREATE TABLE IF NOT EXISTS artifactdemo (
38
89
  id SERIAL PRIMARY KEY,
39
90
  title TEXT,
40
91
  user_id INT REFERENCES userdemo(id)
41
92
  )
42
- """)
93
+ """,
94
+ timeout=30.0,
95
+ )
43
96
 
44
- await conn.conn.execute("""
97
+ await conn.conn.execute(
98
+ """
45
99
  CREATE TABLE IF NOT EXISTS complexdemo (
46
100
  id SERIAL PRIMARY KEY,
47
101
  string_list TEXT[],
48
102
  json_data JSON
49
103
  )
50
- """)
104
+ """,
105
+ timeout=30.0,
106
+ )
51
107
 
52
- await conn.conn.execute("""
108
+ await conn.conn.execute(
109
+ """
53
110
  CREATE TABLE IF NOT EXISTS article (
54
111
  id SERIAL PRIMARY KEY,
55
112
  title TEXT,
56
113
  content TEXT,
57
114
  summary TEXT
58
115
  )
59
- """)
116
+ """,
117
+ timeout=30.0,
118
+ )
60
119
 
61
120
  # Create each index separately to handle errors better
62
121
  yield conn
63
122
 
64
123
  # Drop all tables after tests
65
- await conn.conn.execute("DROP TABLE IF EXISTS artifactdemo CASCADE")
66
- await conn.conn.execute("DROP TABLE IF EXISTS userdemo CASCADE")
67
- await conn.conn.execute("DROP TABLE IF EXISTS complexdemo CASCADE")
68
- await conn.conn.execute("DROP TABLE IF EXISTS article CASCADE")
124
+ for table in known_tables:
125
+ await conn.conn.execute(f"DROP TABLE IF EXISTS {table} CASCADE", timeout=30.0)
126
+
127
+ # Drop all types after tests
128
+ for known_type in known_types:
129
+ await conn.conn.execute(
130
+ f"DROP TYPE IF EXISTS {known_type} CASCADE", timeout=30.0
131
+ )
132
+
69
133
  await conn.conn.close()
70
134
 
71
135
 
72
136
  @pytest_asyncio.fixture()
73
137
  async def indexed_db_connection(db_connection: DBConnection):
74
138
  await db_connection.conn.execute(
75
- "CREATE INDEX IF NOT EXISTS article_title_tsv_idx ON article USING GIN (to_tsvector('english', title))"
139
+ "CREATE INDEX IF NOT EXISTS article_title_tsv_idx ON article USING GIN (to_tsvector('english', title))",
140
+ timeout=30.0,
76
141
  )
77
142
  await db_connection.conn.execute(
78
- "CREATE INDEX IF NOT EXISTS article_content_tsv_idx ON article USING GIN (to_tsvector('english', content))"
143
+ "CREATE INDEX IF NOT EXISTS article_content_tsv_idx ON article USING GIN (to_tsvector('english', content))",
144
+ timeout=30.0,
79
145
  )
80
146
  await db_connection.conn.execute(
81
- "CREATE INDEX IF NOT EXISTS article_summary_tsv_idx ON article USING GIN (to_tsvector('english', summary))"
147
+ "CREATE INDEX IF NOT EXISTS article_summary_tsv_idx ON article USING GIN (to_tsvector('english', summary))",
148
+ timeout=30.0,
82
149
  )
83
150
 
84
151
  yield db_connection
@@ -88,7 +155,7 @@ async def indexed_db_connection(db_connection: DBConnection):
88
155
  async def clear_table(db_connection):
89
156
  # Clear all tables and reset sequences
90
157
  await db_connection.conn.execute(
91
- "TRUNCATE TABLE userdemo, article RESTART IDENTITY CASCADE"
158
+ "TRUNCATE TABLE userdemo, article RESTART IDENTITY CASCADE", timeout=30.0
92
159
  )
93
160
 
94
161
 
@@ -107,7 +174,8 @@ async def clear_all_database_objects(db_connection: DBConnection):
107
174
  EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
108
175
  END LOOP;
109
176
  END $$;
110
- """
177
+ """,
178
+ timeout=30.0,
111
179
  )
112
180
 
113
181
  # Step 2: Drop all custom types in the public schema
@@ -120,7 +188,8 @@ async def clear_all_database_objects(db_connection: DBConnection):
120
188
  EXECUTE 'DROP TYPE IF EXISTS ' || quote_ident(r.typname) || ' CASCADE';
121
189
  END LOOP;
122
190
  END $$;
123
- """
191
+ """,
192
+ timeout=30.0,
124
193
  )
125
194
 
126
195
 
@@ -0,0 +1,208 @@
1
+ """
2
+ Docker helper utilities for testing.
3
+
4
+ This module provides classes and functions to manage Docker containers for testing,
5
+ particularly focusing on PostgreSQL database containers.
6
+ """
7
+
8
+ import logging
9
+ import socket
10
+ import time
11
+ import uuid
12
+ from typing import Any, Dict, Optional, cast
13
+
14
+ import docker
15
+ from docker.errors import APIError
16
+
17
+ # Configure logging
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def get_free_port() -> int:
22
+ """Find a free port on the host machine."""
23
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
24
+ s.bind(("", 0))
25
+ return s.getsockname()[1]
26
+
27
+
28
+ class PostgresContainer:
29
+ """
30
+ A class that manages a PostgreSQL Docker container for testing.
31
+
32
+ This class handles the lifecycle of a PostgreSQL container, including:
33
+ - Starting the container with appropriate configuration
34
+ - Finding available ports
35
+ - Waiting for the container to be ready
36
+ - Providing connection information
37
+ - Cleaning up after tests
38
+ """
39
+
40
+ def __init__(
41
+ self,
42
+ pg_user: str = "iceaxe",
43
+ pg_password: str = "mysecretpassword",
44
+ pg_db: str = "iceaxe_test_db",
45
+ postgres_version: str = "16",
46
+ ):
47
+ self.pg_user = pg_user
48
+ self.pg_password = pg_password
49
+ self.pg_db = pg_db
50
+ self.postgres_version = postgres_version
51
+ self.port = get_free_port()
52
+ self.container: Optional[Any] = None
53
+ self.client = docker.from_env()
54
+ self.container_name = f"iceaxe-postgres-test-{uuid.uuid4().hex[:8]}"
55
+
56
+ def start(self) -> Dict[str, Any]:
57
+ """
58
+ Start the PostgreSQL container.
59
+
60
+ Returns:
61
+ Dict[str, Any]: Connection information for the PostgreSQL container
62
+
63
+ Raises:
64
+ RuntimeError: If the container fails to start or become ready
65
+ """
66
+ logger.info(f"Starting PostgreSQL container on port {self.port}")
67
+
68
+ max_attempts = 3
69
+ attempt = 0
70
+
71
+ while attempt < max_attempts:
72
+ attempt += 1
73
+ try:
74
+ self.container = self._run_container(self.port)
75
+ break
76
+ except APIError as e:
77
+ if "port is already allocated" in str(e) and attempt < max_attempts:
78
+ logger.warning(
79
+ f"Port {self.port} is still in use. Trying with a new port (attempt {attempt}/{max_attempts})."
80
+ )
81
+ self.port = get_free_port()
82
+ else:
83
+ raise RuntimeError(f"Failed to start PostgreSQL container: {e}")
84
+
85
+ # Wait for PostgreSQL to be ready
86
+ if not self._wait_for_container_ready():
87
+ self.stop()
88
+ raise RuntimeError("Failed to connect to PostgreSQL container")
89
+
90
+ return self.get_connection_info()
91
+
92
+ def _run_container(
93
+ self, port: int
94
+ ) -> Any: # Type as Any since docker.models.containers.Container isn't imported
95
+ """
96
+ Run the Docker container with the specified port.
97
+
98
+ Args:
99
+ port: The port to map PostgreSQL to on the host
100
+
101
+ Returns:
102
+ The Docker container object
103
+ """
104
+ return self.client.containers.run(
105
+ f"postgres:{self.postgres_version}",
106
+ name=self.container_name,
107
+ detach=True,
108
+ environment={
109
+ "POSTGRES_USER": self.pg_user,
110
+ "POSTGRES_PASSWORD": self.pg_password,
111
+ "POSTGRES_DB": self.pg_db,
112
+ # Additional settings for faster startup in testing
113
+ "POSTGRES_HOST_AUTH_METHOD": "trust",
114
+ },
115
+ ports={"5432/tcp": port},
116
+ remove=True, # Auto-remove container when stopped
117
+ )
118
+
119
+ def _wait_for_container_ready(self) -> bool:
120
+ """
121
+ Wait for the PostgreSQL container to be ready.
122
+
123
+ Returns:
124
+ bool: True if the container is ready, False otherwise
125
+ """
126
+ max_retries = 30
127
+ retry_interval = 1
128
+
129
+ for i in range(max_retries):
130
+ try:
131
+ if self.container is None:
132
+ logger.warning("Container is None, cannot proceed")
133
+ return False
134
+
135
+ # We've already checked that self.container is not None
136
+ container = cast(Any, self.container)
137
+ container.reload() # Refresh container status
138
+ if container.status != "running":
139
+ logger.warning(f"Container status: {container.status}")
140
+ return False
141
+
142
+ # Try to connect to PostgreSQL
143
+ conn = socket.create_connection(("localhost", self.port), timeout=1)
144
+ conn.close()
145
+ # Wait a bit more to ensure PostgreSQL is fully initialized
146
+ time.sleep(2)
147
+ logger.info(f"PostgreSQL container is ready after {i + 1} attempt(s)")
148
+ return True
149
+ except (socket.error, ConnectionRefusedError) as e:
150
+ if i == max_retries - 1:
151
+ logger.warning(
152
+ f"Failed to connect after {max_retries} attempts: {e}"
153
+ )
154
+ return False
155
+ time.sleep(retry_interval)
156
+ except Exception as e:
157
+ logger.warning(f"Unexpected error checking container readiness: {e}")
158
+ if i == max_retries - 1:
159
+ return False
160
+ time.sleep(retry_interval)
161
+
162
+ return False
163
+
164
+ def stop(self) -> None:
165
+ """
166
+ Stop the PostgreSQL container.
167
+
168
+ This method ensures the container is properly stopped and removed.
169
+ """
170
+ if self.container is not None:
171
+ try:
172
+ logger.info(f"Stopping PostgreSQL container {self.container_name}")
173
+ # We've already checked that self.container is not None
174
+ container = cast(Any, self.container)
175
+ container.stop(timeout=10) # Allow 10 seconds for graceful shutdown
176
+ except Exception as e:
177
+ logger.warning(f"Failed to stop container: {e}")
178
+ try:
179
+ # Force remove as a fallback
180
+ if self.container is not None:
181
+ self.container.remove(force=True)
182
+ logger.info("Forced container removal")
183
+ except Exception as e2:
184
+ logger.warning(f"Failed to force remove container: {e2}")
185
+
186
+ def get_connection_info(self) -> Dict[str, Any]:
187
+ """
188
+ Get the connection information for the PostgreSQL container.
189
+
190
+ Returns:
191
+ Dict[str, Any]: A dictionary containing connection parameters
192
+ """
193
+ return {
194
+ "host": "localhost",
195
+ "port": self.port,
196
+ "user": self.pg_user,
197
+ "password": self.pg_password,
198
+ "database": self.pg_db,
199
+ }
200
+
201
+ def get_connection_string(self) -> str:
202
+ """
203
+ Get a PostgreSQL connection string.
204
+
205
+ Returns:
206
+ str: A connection string in the format 'postgresql://user:password@host:port/database'
207
+ """
208
+ return f"postgresql://{self.pg_user}:{self.pg_password}@localhost:{self.port}/{self.pg_db}"
@@ -19,7 +19,7 @@ class MockNode(DBObject):
19
19
  async def create(self, actor: DatabaseActions):
20
20
  pass
21
21
 
22
- async def migrate(self, previous: "MockNode", actor: DatabaseActions):
22
+ async def migrate(self, previous: DBObject, actor: DatabaseActions):
23
23
  pass
24
24
 
25
25
  async def destroy(self, actor: DatabaseActions):
@@ -5,6 +5,7 @@ import pytest
5
5
 
6
6
  from iceaxe.__tests__.conf_models import (
7
7
  ArtifactDemo,
8
+ ComplexDemo,
8
9
  Employee,
9
10
  FunctionDemoModel,
10
11
  UserDemo,
@@ -316,6 +317,13 @@ def test_function_transformations():
316
317
  [],
317
318
  )
318
319
 
320
+ # Test unnest function
321
+ new_query = QueryBuilder().select(func.unnest(ComplexDemo.string_list))
322
+ assert new_query.build() == (
323
+ 'SELECT unnest("complexdemo"."string_list") AS aggregate_0 FROM "complexdemo"',
324
+ [],
325
+ )
326
+
319
327
  # Test type conversion functions
320
328
  new_query = QueryBuilder().select(
321
329
  (
@@ -337,6 +345,157 @@ def test_function_transformations():
337
345
  )
338
346
 
339
347
 
348
+ def test_array_operators():
349
+ # Test ANY operator
350
+ new_query = (
351
+ QueryBuilder()
352
+ .select(ComplexDemo)
353
+ .where(func.any(ComplexDemo.string_list) == "python")
354
+ )
355
+ assert new_query.build() == (
356
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
357
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE \'python\' = ANY("complexdemo"."string_list")',
358
+ [],
359
+ )
360
+
361
+ # Test ALL operator
362
+ new_query = (
363
+ QueryBuilder()
364
+ .select(ComplexDemo)
365
+ .where(func.all(ComplexDemo.string_list) == "active")
366
+ )
367
+ assert new_query.build() == (
368
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
369
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE \'active\' = ALL("complexdemo"."string_list")',
370
+ [],
371
+ )
372
+
373
+ # Test array_contains operator (@>)
374
+ new_query = (
375
+ QueryBuilder()
376
+ .select(ComplexDemo)
377
+ .where(
378
+ func.array_contains(ComplexDemo.string_list, ["python", "django"]) == True # noqa: E712
379
+ )
380
+ )
381
+ assert new_query.build() == (
382
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
383
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE "complexdemo"."string_list" @> ARRAY[\'python\',\'django\'] = $1',
384
+ [True],
385
+ )
386
+
387
+ # Test array_contained_by operator (<@)
388
+ new_query = (
389
+ QueryBuilder()
390
+ .select(ComplexDemo)
391
+ .where(
392
+ func.array_contained_by( # noqa: E712
393
+ ComplexDemo.string_list, ["python", "java", "go", "rust"]
394
+ )
395
+ == True
396
+ )
397
+ )
398
+ assert new_query.build() == (
399
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
400
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE "complexdemo"."string_list" <@ ARRAY[\'python\',\'java\',\'go\',\'rust\'] = $1',
401
+ [True],
402
+ )
403
+
404
+ # Test array_overlaps operator (&&)
405
+ new_query = (
406
+ QueryBuilder()
407
+ .select(ComplexDemo)
408
+ .where(
409
+ func.array_overlaps( # noqa: E712
410
+ ComplexDemo.string_list, ["python", "data-science", "ml"]
411
+ )
412
+ == True
413
+ )
414
+ )
415
+ assert new_query.build() == (
416
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
417
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE "complexdemo"."string_list" && ARRAY[\'python\',\'data-science\',\'ml\'] = $1',
418
+ [True],
419
+ )
420
+
421
+
422
+ def test_array_comparison_operators():
423
+ # Test ANY with different operators
424
+ new_query = (
425
+ QueryBuilder()
426
+ .select(ComplexDemo)
427
+ .where(func.any(ComplexDemo.string_list) != "inactive")
428
+ )
429
+ assert new_query.build() == (
430
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
431
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE \'inactive\' != ANY("complexdemo"."string_list")',
432
+ [],
433
+ )
434
+
435
+ # Test ALL with >= operator
436
+ new_query = (
437
+ QueryBuilder()
438
+ .select(ComplexDemo)
439
+ .where(func.all(ComplexDemo.string_list) >= "a")
440
+ )
441
+ assert new_query.build() == (
442
+ 'SELECT "complexdemo"."id" AS "complexdemo_id", "complexdemo"."string_list" AS "complexdemo_string_list", '
443
+ '"complexdemo"."json_data" AS "complexdemo_json_data" FROM "complexdemo" WHERE \'a\' >= ALL("complexdemo"."string_list")',
444
+ [],
445
+ )
446
+
447
+
448
+ def test_array_manipulation_functions():
449
+ # Test array_append
450
+ new_query = QueryBuilder().select(
451
+ func.array_append(ComplexDemo.string_list, "new-tag")
452
+ )
453
+ assert new_query.build() == (
454
+ 'SELECT array_append("complexdemo"."string_list", \'new-tag\') AS aggregate_0 FROM "complexdemo"',
455
+ [],
456
+ )
457
+
458
+ # Test array_prepend
459
+ new_query = QueryBuilder().select(
460
+ func.array_prepend("featured", ComplexDemo.string_list)
461
+ )
462
+ assert new_query.build() == (
463
+ 'SELECT array_prepend(\'featured\', "complexdemo"."string_list") AS aggregate_0 FROM "complexdemo"',
464
+ [],
465
+ )
466
+
467
+ # Test array_cat with field - this would require a join in practice
468
+ # For now, let's test with a simpler case using the same table
469
+ # or we could test array_cat with a literal array which is more common
470
+
471
+ # Test array_cat with literal array
472
+ new_query = QueryBuilder().select(
473
+ func.array_cat(ComplexDemo.string_list, ["admin", "superuser"])
474
+ )
475
+ assert new_query.build() == (
476
+ 'SELECT array_cat("complexdemo"."string_list", ARRAY[\'admin\',\'superuser\']) AS aggregate_0 FROM "complexdemo"',
477
+ [],
478
+ )
479
+
480
+ # Test array_position
481
+ new_query = QueryBuilder().select(
482
+ func.array_position(ComplexDemo.string_list, "python")
483
+ )
484
+ assert new_query.build() == (
485
+ 'SELECT array_position("complexdemo"."string_list", \'python\') AS aggregate_0 FROM "complexdemo"',
486
+ [],
487
+ )
488
+
489
+ # Test array_remove
490
+ new_query = QueryBuilder().select(
491
+ func.array_remove(ComplexDemo.string_list, "deprecated")
492
+ )
493
+ assert new_query.build() == (
494
+ 'SELECT array_remove("complexdemo"."string_list", \'deprecated\') AS aggregate_0 FROM "complexdemo"',
495
+ [],
496
+ )
497
+
498
+
340
499
  def test_invalid_where_condition():
341
500
  with pytest.raises(ValueError):
342
501
  QueryBuilder().select(UserDemo.id).where("invalid condition") # type: ignore