iceaxe 0.7.1__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of iceaxe might be problematic. Click here for more details.
- iceaxe/__init__.py +20 -0
- iceaxe/__tests__/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/__init__.py +0 -0
- iceaxe/__tests__/benchmarks/test_bulk_insert.py +45 -0
- iceaxe/__tests__/benchmarks/test_select.py +114 -0
- iceaxe/__tests__/conf_models.py +133 -0
- iceaxe/__tests__/conftest.py +204 -0
- iceaxe/__tests__/docker_helpers.py +208 -0
- iceaxe/__tests__/helpers.py +268 -0
- iceaxe/__tests__/migrations/__init__.py +0 -0
- iceaxe/__tests__/migrations/conftest.py +36 -0
- iceaxe/__tests__/migrations/test_action_sorter.py +237 -0
- iceaxe/__tests__/migrations/test_generator.py +140 -0
- iceaxe/__tests__/migrations/test_generics.py +91 -0
- iceaxe/__tests__/mountaineer/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/__init__.py +0 -0
- iceaxe/__tests__/mountaineer/dependencies/test_core.py +76 -0
- iceaxe/__tests__/schemas/__init__.py +0 -0
- iceaxe/__tests__/schemas/test_actions.py +1264 -0
- iceaxe/__tests__/schemas/test_cli.py +25 -0
- iceaxe/__tests__/schemas/test_db_memory_serializer.py +1525 -0
- iceaxe/__tests__/schemas/test_db_serializer.py +398 -0
- iceaxe/__tests__/schemas/test_db_stubs.py +190 -0
- iceaxe/__tests__/test_alias.py +83 -0
- iceaxe/__tests__/test_base.py +52 -0
- iceaxe/__tests__/test_comparison.py +383 -0
- iceaxe/__tests__/test_field.py +11 -0
- iceaxe/__tests__/test_helpers.py +9 -0
- iceaxe/__tests__/test_modifications.py +151 -0
- iceaxe/__tests__/test_queries.py +605 -0
- iceaxe/__tests__/test_queries_str.py +173 -0
- iceaxe/__tests__/test_session.py +1511 -0
- iceaxe/__tests__/test_text_search.py +287 -0
- iceaxe/alias_values.py +67 -0
- iceaxe/base.py +350 -0
- iceaxe/comparison.py +560 -0
- iceaxe/field.py +250 -0
- iceaxe/functions.py +906 -0
- iceaxe/generics.py +140 -0
- iceaxe/io.py +107 -0
- iceaxe/logging.py +91 -0
- iceaxe/migrations/__init__.py +5 -0
- iceaxe/migrations/action_sorter.py +98 -0
- iceaxe/migrations/cli.py +228 -0
- iceaxe/migrations/client_io.py +62 -0
- iceaxe/migrations/generator.py +404 -0
- iceaxe/migrations/migration.py +86 -0
- iceaxe/migrations/migrator.py +101 -0
- iceaxe/modifications.py +176 -0
- iceaxe/mountaineer/__init__.py +10 -0
- iceaxe/mountaineer/cli.py +74 -0
- iceaxe/mountaineer/config.py +46 -0
- iceaxe/mountaineer/dependencies/__init__.py +6 -0
- iceaxe/mountaineer/dependencies/core.py +67 -0
- iceaxe/postgres.py +133 -0
- iceaxe/py.typed +0 -0
- iceaxe/queries.py +1455 -0
- iceaxe/queries_str.py +294 -0
- iceaxe/schemas/__init__.py +0 -0
- iceaxe/schemas/actions.py +864 -0
- iceaxe/schemas/cli.py +30 -0
- iceaxe/schemas/db_memory_serializer.py +705 -0
- iceaxe/schemas/db_serializer.py +346 -0
- iceaxe/schemas/db_stubs.py +525 -0
- iceaxe/session.py +860 -0
- iceaxe/session_optimized.c +12035 -0
- iceaxe/session_optimized.cpython-313-darwin.so +0 -0
- iceaxe/session_optimized.pyx +212 -0
- iceaxe/sql_types.py +148 -0
- iceaxe/typing.py +73 -0
- iceaxe-0.7.1.dist-info/METADATA +261 -0
- iceaxe-0.7.1.dist-info/RECORD +75 -0
- iceaxe-0.7.1.dist-info/WHEEL +6 -0
- iceaxe-0.7.1.dist-info/licenses/LICENSE +21 -0
- iceaxe-0.7.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import cast
|
|
3
|
+
|
|
4
|
+
from iceaxe.io import lru_cache_async
|
|
5
|
+
from iceaxe.postgres import ForeignKeyModifications
|
|
6
|
+
from iceaxe.schemas.actions import (
|
|
7
|
+
CheckConstraint,
|
|
8
|
+
ColumnType,
|
|
9
|
+
ConstraintType,
|
|
10
|
+
ForeignKeyConstraint,
|
|
11
|
+
)
|
|
12
|
+
from iceaxe.schemas.db_stubs import (
|
|
13
|
+
DBColumn,
|
|
14
|
+
DBColumnPointer,
|
|
15
|
+
DBConstraint,
|
|
16
|
+
DBObject,
|
|
17
|
+
DBTable,
|
|
18
|
+
DBType,
|
|
19
|
+
DBTypePointer,
|
|
20
|
+
)
|
|
21
|
+
from iceaxe.session import DBConnection
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DatabaseSerializer:
|
|
25
|
+
"""
|
|
26
|
+
Convert the current database state to the intermediary DBObject representations that
|
|
27
|
+
represent its current configuration properties. Used for introspection
|
|
28
|
+
and comparison to the in-code definitions.
|
|
29
|
+
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self):
|
|
33
|
+
# Internal tables used for migration management, shouldn't be managed in-memory and therefore
|
|
34
|
+
# won't be mirrored by our DBMemorySerializer. We exclude them from this serialization lest there
|
|
35
|
+
# be a detected conflict and we try to remove the migration metadata.
|
|
36
|
+
self.ignore_tables = ["migration_info"]
|
|
37
|
+
|
|
38
|
+
@staticmethod
|
|
39
|
+
def _unwrap_db_str(value: str | bytes | bytearray | memoryview) -> str:
|
|
40
|
+
"""
|
|
41
|
+
Helper method to handle database values that might be bytes-like or strings.
|
|
42
|
+
PostgreSQL sometimes returns bytes-like objects for certain fields, this normalizes the output.
|
|
43
|
+
|
|
44
|
+
:param value: The value from the database, either string or bytes-like object
|
|
45
|
+
:return: The string representation of the value
|
|
46
|
+
"""
|
|
47
|
+
if isinstance(value, str):
|
|
48
|
+
return value
|
|
49
|
+
|
|
50
|
+
if isinstance(value, (bytes, bytearray, memoryview)):
|
|
51
|
+
return bytes(value).decode()
|
|
52
|
+
|
|
53
|
+
raise ValueError(f"Unexpected type for database value: {type(value)}")
|
|
54
|
+
|
|
55
|
+
async def get_objects(self, connection: DBConnection):
|
|
56
|
+
tables = []
|
|
57
|
+
async for table, dependencies in self.get_tables(connection):
|
|
58
|
+
tables.append(table)
|
|
59
|
+
yield table, dependencies
|
|
60
|
+
|
|
61
|
+
for table in tables:
|
|
62
|
+
async for column, dependencies in self.get_columns(
|
|
63
|
+
connection, table.table_name
|
|
64
|
+
):
|
|
65
|
+
yield column, dependencies + [table]
|
|
66
|
+
|
|
67
|
+
async for constraint, dependencies in self.get_constraints(
|
|
68
|
+
connection, table.table_name
|
|
69
|
+
):
|
|
70
|
+
yield constraint, dependencies + [table]
|
|
71
|
+
|
|
72
|
+
async for constraint, dependencies in self.get_indexes(
|
|
73
|
+
connection, table.table_name
|
|
74
|
+
):
|
|
75
|
+
yield constraint, dependencies + [table]
|
|
76
|
+
|
|
77
|
+
async def get_tables(self, session: DBConnection):
|
|
78
|
+
result = await session.conn.fetch(
|
|
79
|
+
"SELECT table_name FROM information_schema.tables WHERE table_schema='public'"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
for row in result:
|
|
83
|
+
if row["table_name"] in self.ignore_tables:
|
|
84
|
+
continue
|
|
85
|
+
yield DBTable(table_name=row["table_name"]), []
|
|
86
|
+
|
|
87
|
+
async def get_columns(self, session: DBConnection, table_name: str):
|
|
88
|
+
query = """
|
|
89
|
+
SELECT
|
|
90
|
+
cols.column_name,
|
|
91
|
+
cols.udt_name,
|
|
92
|
+
cols.data_type,
|
|
93
|
+
cols.is_nullable,
|
|
94
|
+
CASE
|
|
95
|
+
WHEN cols.data_type = 'ARRAY' THEN elem_type.data_type
|
|
96
|
+
ELSE NULL
|
|
97
|
+
END AS element_type
|
|
98
|
+
FROM information_schema.columns AS cols
|
|
99
|
+
LEFT JOIN information_schema.element_types AS elem_type
|
|
100
|
+
ON cols.table_catalog = elem_type.object_catalog
|
|
101
|
+
AND cols.table_schema = elem_type.object_schema
|
|
102
|
+
AND cols.table_name = elem_type.object_name
|
|
103
|
+
AND cols.dtd_identifier = elem_type.collection_type_identifier
|
|
104
|
+
WHERE cols.table_name = $1
|
|
105
|
+
AND cols.table_schema = 'public';
|
|
106
|
+
|
|
107
|
+
"""
|
|
108
|
+
result = await session.conn.fetch(query, table_name)
|
|
109
|
+
|
|
110
|
+
column_dependencies: list[DBObject] = []
|
|
111
|
+
for row in result:
|
|
112
|
+
column_is_list = False
|
|
113
|
+
|
|
114
|
+
if row["data_type"] == "USER-DEFINED":
|
|
115
|
+
column_type, column_type_deps = await self.fetch_custom_type(
|
|
116
|
+
session, row["udt_name"]
|
|
117
|
+
)
|
|
118
|
+
column_dependencies.append(column_type)
|
|
119
|
+
yield column_type, column_type_deps
|
|
120
|
+
elif row["data_type"] == "ARRAY":
|
|
121
|
+
column_is_list = True
|
|
122
|
+
column_type = ColumnType(row["element_type"])
|
|
123
|
+
else:
|
|
124
|
+
column_type = ColumnType(row["data_type"])
|
|
125
|
+
|
|
126
|
+
yield (
|
|
127
|
+
DBColumn(
|
|
128
|
+
table_name=table_name,
|
|
129
|
+
column_name=row["column_name"],
|
|
130
|
+
column_type=(
|
|
131
|
+
DBTypePointer(name=column_type.name)
|
|
132
|
+
if isinstance(column_type, DBType)
|
|
133
|
+
else column_type
|
|
134
|
+
),
|
|
135
|
+
column_is_list=column_is_list,
|
|
136
|
+
nullable=(row["is_nullable"] == "YES"),
|
|
137
|
+
),
|
|
138
|
+
column_dependencies,
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
async def get_constraints(self, session: DBConnection, table_name: str):
|
|
142
|
+
query = """
|
|
143
|
+
SELECT
|
|
144
|
+
conname,
|
|
145
|
+
contype,
|
|
146
|
+
conrelid,
|
|
147
|
+
confrelid,
|
|
148
|
+
conkey,
|
|
149
|
+
confkey,
|
|
150
|
+
confupdtype,
|
|
151
|
+
confdeltype
|
|
152
|
+
FROM pg_constraint
|
|
153
|
+
INNER JOIN pg_class ON pg_constraint.conrelid = pg_class.oid
|
|
154
|
+
WHERE pg_class.relname = $1
|
|
155
|
+
"""
|
|
156
|
+
result = await session.conn.fetch(query, table_name)
|
|
157
|
+
for row in result:
|
|
158
|
+
contype = self._unwrap_db_str(row["contype"])
|
|
159
|
+
# Determine type
|
|
160
|
+
if contype == "p":
|
|
161
|
+
ctype = ConstraintType.PRIMARY_KEY
|
|
162
|
+
elif contype == "f":
|
|
163
|
+
ctype = ConstraintType.FOREIGN_KEY
|
|
164
|
+
elif contype == "u":
|
|
165
|
+
ctype = ConstraintType.UNIQUE
|
|
166
|
+
elif contype == "c":
|
|
167
|
+
ctype = ConstraintType.CHECK
|
|
168
|
+
else:
|
|
169
|
+
raise ValueError(f"Unknown constraint type: {row['contype']}")
|
|
170
|
+
|
|
171
|
+
columns = await self.fetch_constraint_columns(
|
|
172
|
+
session, row["conkey"], table_name
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
# Handle foreign key specifics
|
|
176
|
+
fk_constraint: ForeignKeyConstraint | None = None
|
|
177
|
+
check_constraint: CheckConstraint | None = None
|
|
178
|
+
|
|
179
|
+
if ctype == ConstraintType.FOREIGN_KEY:
|
|
180
|
+
# Fetch target table
|
|
181
|
+
fk_query = "SELECT relname FROM pg_class WHERE oid = $1"
|
|
182
|
+
fk_result = await session.conn.fetch(fk_query, row["confrelid"])
|
|
183
|
+
target_table = fk_result[0]["relname"]
|
|
184
|
+
|
|
185
|
+
# Fetch target columns
|
|
186
|
+
target_columns_query = """
|
|
187
|
+
SELECT a.attname AS column_name
|
|
188
|
+
FROM pg_attribute a
|
|
189
|
+
WHERE a.attrelid = $1 AND a.attnum = ANY($2)
|
|
190
|
+
"""
|
|
191
|
+
target_columns_result = await session.conn.fetch(
|
|
192
|
+
target_columns_query,
|
|
193
|
+
row["confrelid"],
|
|
194
|
+
row["confkey"],
|
|
195
|
+
)
|
|
196
|
+
target_columns = {row["column_name"] for row in target_columns_result}
|
|
197
|
+
|
|
198
|
+
# Map PostgreSQL action codes to action strings
|
|
199
|
+
action_map = {
|
|
200
|
+
"a": "NO ACTION",
|
|
201
|
+
"r": "RESTRICT",
|
|
202
|
+
"c": "CASCADE",
|
|
203
|
+
"n": "SET NULL",
|
|
204
|
+
"d": "SET DEFAULT",
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
on_update = action_map.get(
|
|
208
|
+
self._unwrap_db_str(row["confupdtype"]),
|
|
209
|
+
"NO ACTION",
|
|
210
|
+
)
|
|
211
|
+
on_delete = action_map.get(
|
|
212
|
+
self._unwrap_db_str(row["confdeltype"]),
|
|
213
|
+
"NO ACTION",
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
on_update_mod = cast(ForeignKeyModifications, on_update)
|
|
217
|
+
on_delete_mod = cast(ForeignKeyModifications, on_delete)
|
|
218
|
+
|
|
219
|
+
fk_constraint = ForeignKeyConstraint(
|
|
220
|
+
target_table=target_table,
|
|
221
|
+
target_columns=frozenset(target_columns),
|
|
222
|
+
on_delete=on_delete_mod,
|
|
223
|
+
on_update=on_update_mod,
|
|
224
|
+
)
|
|
225
|
+
elif ctype == ConstraintType.CHECK:
|
|
226
|
+
# Retrieve the check constraint expression
|
|
227
|
+
check_query = """
|
|
228
|
+
SELECT pg_get_constraintdef(c.oid) AS consrc
|
|
229
|
+
FROM pg_constraint c
|
|
230
|
+
WHERE c.oid = $1
|
|
231
|
+
"""
|
|
232
|
+
check_result = await session.conn.fetch(check_query, row["oid"])
|
|
233
|
+
check_constraint_expr = check_result[0]["consrc"]
|
|
234
|
+
|
|
235
|
+
check_constraint = CheckConstraint(
|
|
236
|
+
check_condition=check_constraint_expr,
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
yield (
|
|
240
|
+
DBConstraint(
|
|
241
|
+
table_name=table_name,
|
|
242
|
+
constraint_name=row["conname"],
|
|
243
|
+
columns=frozenset(columns),
|
|
244
|
+
constraint_type=ctype,
|
|
245
|
+
foreign_key_constraint=fk_constraint,
|
|
246
|
+
check_constraint=check_constraint,
|
|
247
|
+
),
|
|
248
|
+
[
|
|
249
|
+
# We require the columns to be created first
|
|
250
|
+
DBColumnPointer(table_name=table_name, column_name=column)
|
|
251
|
+
for column in columns
|
|
252
|
+
],
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
async def get_indexes(self, session: DBConnection, table_name: str):
|
|
256
|
+
# Query for indexes, excluding primary keys
|
|
257
|
+
index_query = """
|
|
258
|
+
SELECT i.indexname, i.indexdef
|
|
259
|
+
FROM pg_indexes i
|
|
260
|
+
LEFT JOIN pg_constraint c ON c.conname = i.indexname
|
|
261
|
+
WHERE i.tablename = $1
|
|
262
|
+
AND c.conname IS NULL
|
|
263
|
+
AND i.indexdef NOT ILIKE '%UNIQUE INDEX%'
|
|
264
|
+
"""
|
|
265
|
+
index_result = await session.conn.fetch(index_query, table_name)
|
|
266
|
+
|
|
267
|
+
for row in index_result:
|
|
268
|
+
index_name = row["indexname"]
|
|
269
|
+
index_def = row["indexdef"]
|
|
270
|
+
|
|
271
|
+
# Extract columns from index definition
|
|
272
|
+
columns_match = re.search(r"\((.*?)\)", index_def)
|
|
273
|
+
if columns_match:
|
|
274
|
+
# Reserved names are quoted in the response body
|
|
275
|
+
columns = [
|
|
276
|
+
col.strip().strip('"') for col in columns_match.group(1).split(",")
|
|
277
|
+
]
|
|
278
|
+
else:
|
|
279
|
+
columns = []
|
|
280
|
+
|
|
281
|
+
yield (
|
|
282
|
+
DBConstraint(
|
|
283
|
+
table_name=table_name,
|
|
284
|
+
columns=frozenset(columns),
|
|
285
|
+
constraint_name=index_name,
|
|
286
|
+
constraint_type=ConstraintType.INDEX,
|
|
287
|
+
),
|
|
288
|
+
[
|
|
289
|
+
DBColumnPointer(table_name=table_name, column_name=column)
|
|
290
|
+
for column in columns
|
|
291
|
+
],
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
async def fetch_constraint_columns(self, session: DBConnection, conkey, table_name):
|
|
295
|
+
# Assume conkey is a list of column indices; this function would fetch actual column names
|
|
296
|
+
query = "SELECT attname FROM pg_attribute WHERE attnum = ANY($1) AND attrelid = (SELECT oid FROM pg_class WHERE relname = $2)"
|
|
297
|
+
return [
|
|
298
|
+
row["attname"]
|
|
299
|
+
for row in await session.conn.fetch(query, conkey, table_name)
|
|
300
|
+
]
|
|
301
|
+
|
|
302
|
+
# Enum values are not expected to change within one session, cache the same
|
|
303
|
+
# type if we see it within the same session
|
|
304
|
+
@lru_cache_async(maxsize=None)
|
|
305
|
+
async def fetch_custom_type(self, session: DBConnection, type_name: str):
|
|
306
|
+
# Get the values in this enum
|
|
307
|
+
values_query = """
|
|
308
|
+
SELECT enumlabel
|
|
309
|
+
FROM pg_enum
|
|
310
|
+
JOIN pg_type ON pg_enum.enumtypid = pg_type.oid
|
|
311
|
+
WHERE pg_type.typname = $1
|
|
312
|
+
"""
|
|
313
|
+
values = frozenset(
|
|
314
|
+
[
|
|
315
|
+
row["enumlabel"]
|
|
316
|
+
for row in await session.conn.fetch(values_query, type_name)
|
|
317
|
+
]
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
# Determine all the columns where this type is referenced
|
|
321
|
+
reference_columns_query = """
|
|
322
|
+
SELECT
|
|
323
|
+
n.nspname AS schema_name,
|
|
324
|
+
c.relname AS table_name,
|
|
325
|
+
a.attname AS column_name
|
|
326
|
+
FROM pg_catalog.pg_type t
|
|
327
|
+
JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
|
|
328
|
+
JOIN pg_catalog.pg_attribute a ON a.atttypid = t.oid
|
|
329
|
+
JOIN pg_catalog.pg_class c ON c.oid = a.attrelid
|
|
330
|
+
WHERE
|
|
331
|
+
t.typname = $1
|
|
332
|
+
AND a.attnum > 0
|
|
333
|
+
AND NOT a.attisdropped;
|
|
334
|
+
"""
|
|
335
|
+
reference_columns_results = await session.conn.fetch(
|
|
336
|
+
reference_columns_query, type_name
|
|
337
|
+
)
|
|
338
|
+
reference_columns = frozenset(
|
|
339
|
+
{
|
|
340
|
+
(row["table_name"], row["column_name"])
|
|
341
|
+
for row in reference_columns_results
|
|
342
|
+
}
|
|
343
|
+
)
|
|
344
|
+
return DBType(
|
|
345
|
+
name=type_name, values=values, reference_columns=reference_columns
|
|
346
|
+
), []
|