deepsel 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
deepsel-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Deepsel Inc.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
deepsel-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: deepsel
3
+ Version: 0.1.0
4
+ Summary: Database utilities and authentication framework for SQLAlchemy, FastAPI Python applications
5
+ Author-email: "Deepsel Inc." <info@deepsel.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/DeepselSystems/deepsel-core
8
+ Project-URL: Repository, https://github.com/DeepselSystems/deepsel-core
9
+ Project-URL: Issues, https://github.com/DeepselSystems/deepsel-core/issues
10
+ Keywords: sqlalchemy,database,migration,schema,postgres,deepsel
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Topic :: Database
17
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
18
+ Requires-Python: >=3.12
19
+ Description-Content-Type: text/markdown
20
+ License-File: LICENSE
21
+ Requires-Dist: sqlalchemy>=2.0.30
22
+ Provides-Extra: dev
23
+ Requires-Dist: pytest>=8.3.5; extra == "dev"
24
+ Requires-Dist: pytest-cov>=7.0.0; extra == "dev"
25
+ Requires-Dist: black>=25.0.0; extra == "dev"
26
+ Requires-Dist: flake8>=7.2.0; extra == "dev"
27
+ Requires-Dist: bandit>=1.8.0; extra == "dev"
28
+ Requires-Dist: build>=0.10.0; extra == "dev"
29
+ Requires-Dist: twine>=6.2.0; extra == "dev"
30
+ Dynamic: license-file
@@ -0,0 +1 @@
1
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -0,0 +1,8 @@
1
+ """
2
+ Deepsel Core - Database schema migration and management utilities
3
+ """
4
+
5
+ from .db_manager import DatabaseManager
6
+
7
+ __version__ = "0.1.0"
8
+ __all__ = ["DatabaseManager"]
@@ -0,0 +1,569 @@
1
+ import logging
2
+ from sqlalchemy import Enum, Table, inspect, text, Column
3
+ from sqlalchemy.engine import Connection
4
+ from sqlalchemy.orm import Session
5
+ from sqlalchemy.exc import IntegrityError
6
+ from sqlalchemy.orm.decl_api import DeclarativeBase
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+
11
+ class DatabaseManager:
12
+ def __init__(
13
+ self,
14
+ sqlalchemy_declarative_base: DeclarativeBase,
15
+ db_session_factory,
16
+ models_pool: dict,
17
+ ):
18
+ self.declarative_base = sqlalchemy_declarative_base
19
+ self.db_session_factory = db_session_factory
20
+ self.models_pool = models_pool
21
+ self.startup_database_update()
22
+
23
+ def startup_database_update(self):
24
+ logger.info("Database migration started...")
25
+ try:
26
+ with self.db_session_factory() as db:
27
+ self.compare_and_update_schema(db)
28
+ logger.info("Database migration completed successfully")
29
+ except Exception as e:
30
+ logger.error(f"Database migration failed: {e}", exc_info=True)
31
+ raise
32
+
33
+ def compare_and_update_schema(self, db: Session):
34
+ existing_schema: dict = self.reflect_database_schema(db)
35
+ model_tables: list[str] = list(self.models_pool.keys())
36
+ engine = db.bind
37
+ deferred_foreign_keys = []
38
+
39
+ with engine.begin() as connection:
40
+ for table_name in model_tables:
41
+ if table_name not in existing_schema:
42
+ command = text(f'CREATE TABLE "{table_name}" ();')
43
+ logger.info(
44
+ f"Detected new table {table_name}, creating... {command}"
45
+ )
46
+ connection.execute(command)
47
+ table: Table = Table(table_name, self.declarative_base.metadata)
48
+ self.update_table_schema(
49
+ db, table, {}, connection, deferred_foreign_keys
50
+ )
51
+ else:
52
+ table: Table = Table(table_name, self.declarative_base.metadata)
53
+ self.update_table_schema(
54
+ db,
55
+ table,
56
+ existing_schema[table_name],
57
+ connection,
58
+ deferred_foreign_keys,
59
+ )
60
+
61
+ for table_name in existing_schema:
62
+ if table_name not in model_tables and table_name != "alembic_version":
63
+ command = text(f'DROP TABLE "{table_name}" CASCADE;')
64
+ logger.info(f"Detected removed table {table_name}: {command}")
65
+ connection.execute(command)
66
+
67
+ for foreign_key in deferred_foreign_keys:
68
+ table = foreign_key["table"]
69
+ column = foreign_key["column"]
70
+ referenced_table = foreign_key["foreign_key"].column.table.name
71
+ referenced_column = foreign_key["foreign_key"].column.name
72
+ command = text(
73
+ f'ALTER TABLE "{table}" ADD FOREIGN KEY ("{column}") REFERENCES "{referenced_table}" ("{referenced_column}");'
74
+ )
75
+ logger.info(
76
+ f'Adding foreign key for column "{column}" in table "{table}"... {command}'
77
+ )
78
+ connection.execute(command)
79
+
80
+ logger.info("Database schema updated.")
81
+
82
+ def reflect_database_schema(self, db: Session):
83
+ engine = db.bind
84
+ inspector = inspect(engine)
85
+ existing_schema = {}
86
+ for table_name in inspector.get_table_names():
87
+ existing_schema[table_name] = {
88
+ col["name"]: col for col in inspector.get_columns(table_name)
89
+ }
90
+ return existing_schema
91
+
92
+ def update_table_schema(
93
+ self,
94
+ db: Session,
95
+ model_table: Table,
96
+ existing_table_schema: dict,
97
+ connection: Connection,
98
+ deferred_foreign_keys=None,
99
+ ):
100
+ if deferred_foreign_keys is None:
101
+ deferred_foreign_keys = []
102
+ model_columns = {c.name: c for c in model_table.columns}
103
+ existing_columns = existing_table_schema
104
+ engine = db.bind
105
+ inspector = inspect(engine)
106
+
107
+ unique_constraints = inspector.get_unique_constraints(model_table.name)
108
+ indexes = [
109
+ index
110
+ for index in inspector.get_indexes(model_table.name)
111
+ if not index["unique"]
112
+ ]
113
+ enums = inspector.get_enums()
114
+ foreign_key_constraints = inspector.get_foreign_keys(model_table.name)
115
+ existing_foreign_keys = [
116
+ column
117
+ for constraint in foreign_key_constraints
118
+ for column in constraint["constrained_columns"]
119
+ ]
120
+
121
+ existing_pk_constraint = inspector.get_pk_constraint(model_table.name)
122
+ existing_primary_keys = existing_pk_constraint["constrained_columns"] or []
123
+ model_primary_keys = [col.name for col in model_table.primary_key.columns]
124
+ is_composite_primary_key = len(model_primary_keys) > 1
125
+ is_existing_pk_removed = False
126
+ if existing_primary_keys != model_primary_keys:
127
+ if existing_primary_keys:
128
+ command = text(
129
+ f'ALTER TABLE "{model_table.name}" DROP CONSTRAINT {existing_pk_constraint["name"]};'
130
+ )
131
+ connection.execute(command)
132
+ is_existing_pk_removed = True
133
+
134
+ for col_name, existing_column in existing_columns.items():
135
+ if col_name in model_columns:
136
+ model_column = model_columns[col_name]
137
+ changes = []
138
+ nullable = model_column.nullable
139
+ has_unique_constraint = None
140
+ has_index = None
141
+
142
+ for constraint in unique_constraints:
143
+ if col_name == constraint["column_names"][0]:
144
+ has_unique_constraint = True
145
+
146
+ for index in indexes:
147
+ if col_name in index["column_names"]:
148
+ has_index = True
149
+
150
+ if model_column.foreign_keys:
151
+ if col_name not in existing_foreign_keys:
152
+ for foreign_key in model_column.foreign_keys:
153
+ deferred_foreign_keys.append(
154
+ {
155
+ "table": model_table.name,
156
+ "column": col_name,
157
+ "foreign_key": foreign_key,
158
+ }
159
+ )
160
+ else:
161
+ foreign_key = None
162
+ for fk in model_column.foreign_keys:
163
+ foreign_key = fk
164
+
165
+ existing_foreign_key_constraint = [
166
+ constraint
167
+ for constraint in foreign_key_constraints
168
+ if col_name in constraint["constrained_columns"]
169
+ ][0]
170
+ existing_referred_table = existing_foreign_key_constraint[
171
+ "referred_table"
172
+ ]
173
+ existing_referred_column = existing_foreign_key_constraint[
174
+ "referred_columns"
175
+ ][0]
176
+ new_referred_table = foreign_key.column.table.name
177
+ new_referred_column = foreign_key.column.name
178
+ if (
179
+ existing_referred_table != new_referred_table
180
+ or existing_referred_column != new_referred_column
181
+ ):
182
+ command = text(
183
+ f'ALTER TABLE "{model_table.name}" DROP CONSTRAINT {existing_foreign_key_constraint["name"]};'
184
+ )
185
+ logger.info(
186
+ f'Removing foreign key for column "{col_name}" in table "{model_table.name}"... {command}'
187
+ )
188
+ connection.execute(command)
189
+ command = text(
190
+ f'ALTER TABLE "{model_table.name}" ADD FOREIGN KEY ("{col_name}") REFERENCES "{new_referred_table}" ("{new_referred_column}");'
191
+ )
192
+ logger.info(
193
+ f'Adding foreign key for column "{col_name}" in table "{model_table.name}"... {command}'
194
+ )
195
+ connection.execute(command)
196
+ else:
197
+ if col_name in existing_foreign_keys:
198
+ foreign_key_constraint_name = [
199
+ constraint["name"]
200
+ for constraint in foreign_key_constraints
201
+ if col_name in constraint["constrained_columns"]
202
+ ][0]
203
+ command = text(
204
+ f'ALTER TABLE "{model_table.name}" DROP CONSTRAINT {foreign_key_constraint_name};'
205
+ )
206
+ logger.info(
207
+ f'Removing foreign key for column "{col_name}" in table "{model_table.name}"... {command}'
208
+ )
209
+ connection.execute(command)
210
+
211
+ old_type = existing_column["type"].compile(engine.dialect)
212
+ new_type = model_column.type.compile(engine.dialect)
213
+
214
+ if old_type != new_type:
215
+ if old_type == "DOUBLE PRECISION" and new_type == "FLOAT":
216
+ pass
217
+ else:
218
+ changes.append("TYPE")
219
+ if model_column.nullable != existing_column.get("nullable", True):
220
+ changes.append("NULLABLE")
221
+ if model_column.unique != has_unique_constraint:
222
+ changes.append("UNIQUE")
223
+ if model_column.index != has_index:
224
+ changes.append("INDEX")
225
+ if hasattr(model_column.type, "enums") and isinstance(
226
+ existing_column["type"], Enum
227
+ ):
228
+ if model_column.type.enums != existing_column["type"].enums:
229
+ changes.append("ENUM")
230
+
231
+ if "TYPE" in changes:
232
+ if not nullable and model_column.default is None:
233
+ logger.info(
234
+ f'Column "{col_name}" in table "{model_table.name}" has nullable=False, and cannot change type without a default value.'
235
+ )
236
+ else:
237
+ logger.info(
238
+ f'Column "{col_name}" in table "{model_table.name}" has changed type, dropping old column...',
239
+ )
240
+ command = text(
241
+ f'ALTER TABLE "{model_table.name}" DROP COLUMN "{col_name}";'
242
+ )
243
+ connection.execute(command)
244
+ existing_columns[col_name]["dropped"] = True
245
+ continue
246
+
247
+ if "NULLABLE" in changes:
248
+ if not model_column.nullable:
249
+ if model_column.default is None:
250
+ try:
251
+ command = text(
252
+ f'ALTER TABLE "{model_table.name}" ALTER COLUMN "{col_name}" SET NOT NULL;'
253
+ )
254
+ logger.info(
255
+ f'Column "{col_name}" in table "{model_table.name}" has changed to NOT NULL without default value, attempting... {command}'
256
+ )
257
+ connection.execute(command)
258
+ except Exception as e:
259
+ logger.warning(
260
+ f'Column "{col_name}" in table "{model_table.name}" cannot be set to NOT NULL without a default value.'
261
+ )
262
+ logger.debug(e)
263
+ else:
264
+ if isinstance(model_column.default.arg, str):
265
+ default = f"'{model_column.default.arg}'"
266
+ elif hasattr(model_column.type, "enums") and hasattr(
267
+ model_column.default.arg, "name"
268
+ ):
269
+ default = f"'{model_column.default.arg.name}'"
270
+ else:
271
+ default = model_column.default.arg
272
+
273
+ command = text(
274
+ f"""
275
+ ALTER TABLE "{model_table.name}"
276
+ ALTER COLUMN "{col_name}" TYPE {model_column.type.compile(engine.dialect)} USING (COALESCE("{col_name}", {default})),
277
+ ALTER COLUMN "{col_name}" SET DEFAULT {default},
278
+ ALTER COLUMN "{col_name}" SET NOT NULL;
279
+ """
280
+ )
281
+ logger.info(
282
+ f'Column "{col_name}" in table "{model_table.name}" has changed to NOT NULL, setting default value... {command}'
283
+ )
284
+ connection.execute(command)
285
+ else:
286
+ command = text(
287
+ f'ALTER TABLE "{model_table.name}" ALTER COLUMN "{col_name}" DROP NOT NULL;'
288
+ )
289
+ logger.info(
290
+ f'Column "{col_name}" in table "{model_table.name}" has changed to NULL, dropping NOT NULL... {command}'
291
+ )
292
+ connection.execute(command)
293
+
294
+ if "UNIQUE" in changes:
295
+ _update_existing_column_unique_constraints(
296
+ model_table,
297
+ unique_constraints,
298
+ connection,
299
+ model_columns,
300
+ col_name,
301
+ model_column,
302
+ )
303
+
304
+ if "INDEX" in changes:
305
+ if model_column.index:
306
+ command = text(
307
+ f'CREATE INDEX {model_table.name}_{col_name}_index ON {model_table.name} ("{col_name}");'
308
+ )
309
+ logger.info(
310
+ f'Column "{col_name}" in table "{model_table.name}" has added index, adding... {command}'
311
+ )
312
+ connection.execute(command)
313
+ else:
314
+ command = text(
315
+ f"DROP INDEX {model_table.name}_{col_name}_index;"
316
+ )
317
+ logger.info(
318
+ f'Column "{col_name}" in table "{model_table.name}" has dropped index, dropping... {command}'
319
+ )
320
+ connection.execute(command)
321
+
322
+ if "ENUM" in changes:
323
+ existing_enum_type = existing_column["type"].compile(engine.dialect)
324
+ command = ""
325
+ for value in model_column.type.enums:
326
+ if value not in existing_column["type"].enums:
327
+ command += (
328
+ f"ALTER TYPE {existing_enum_type} ADD VALUE '{value}';"
329
+ )
330
+ if command:
331
+ logger.info(
332
+ f'Updating enum type for column "{col_name}" in table "{model_table.name}": {command}'
333
+ )
334
+ connection.execute(text(command))
335
+ if existing_enum_type != model_column.type.compile(engine.dialect):
336
+ command = text(
337
+ f"ALTER TYPE {existing_enum_type} RENAME TO {model_column.type.compile(engine.dialect)};"
338
+ )
339
+ logger.info(
340
+ f'Renaming enum type for column "{col_name}" in table "{model_table.name}": {command}'
341
+ )
342
+ connection.execute(command)
343
+
344
+ new_columns = []
345
+ for col_name, model_column in model_columns.items():
346
+ if col_name not in existing_columns or existing_columns[col_name].get(
347
+ "dropped", False
348
+ ):
349
+ col_type = model_column.type.compile(engine.dialect)
350
+ nullable = "NULL" if model_column.nullable else "NOT NULL"
351
+ unique = "UNIQUE" if model_column.unique else ""
352
+ default = ""
353
+ autoincrement = ""
354
+ if not is_composite_primary_key:
355
+ col_type = (
356
+ "SERIAL PRIMARY KEY"
357
+ if model_column.primary_key and col_type == "INTEGER"
358
+ else col_type
359
+ )
360
+
361
+ is_enum = hasattr(model_column.type, "enums")
362
+ if is_enum:
363
+ if col_type not in [enum["name"] for enum in enums]:
364
+ command = text(
365
+ f"CREATE TYPE {col_type} AS ENUM {tuple(model_column.type.enums)};"
366
+ )
367
+ logger.info(
368
+ f'Creating enum type for column "{col_name}" in table "{model_table.name}": {command}'
369
+ )
370
+ connection.execute(command)
371
+ enums.append(
372
+ {"name": col_type, "labels": model_column.type.enums}
373
+ )
374
+ else:
375
+ command = ""
376
+ existing_enum_type = [
377
+ enum for enum in enums if enum["name"] == col_type
378
+ ][0]
379
+ existing_enum_values = existing_enum_type["labels"]
380
+ for value in model_column.type.enums:
381
+ if value not in existing_enum_values:
382
+ command += f"ALTER TYPE {col_type} ADD VALUE '{value}';"
383
+ if command:
384
+ logger.info(
385
+ f'Updating enum type for column "{col_name}" in table "{model_table.name}": {command}'
386
+ )
387
+ connection.execute(text(command))
388
+
389
+ if model_column.default is not None:
390
+ default_val_type = type(model_column.default.arg)
391
+ if default_val_type == str:
392
+ default = f"DEFAULT '{model_column.default.arg}'"
393
+ elif (
394
+ default_val_type == int
395
+ or default_val_type == float
396
+ or default_val_type == bool
397
+ ):
398
+ default = f"DEFAULT {model_column.default.arg}"
399
+ elif is_enum:
400
+ default = f"DEFAULT '{model_column.default.arg.name}'"
401
+ elif default_val_type == dict or default_val_type == list:
402
+ default = f"DEFAULT '{model_column.default.arg}'"
403
+ else:
404
+ pass
405
+
406
+ if model_column.primary_key and col_type == "BIGINT":
407
+ autoincrement = "GENERATED ALWAYS AS IDENTITY"
408
+ nullable = ""
409
+
410
+ command = text(
411
+ f'ALTER TABLE "{model_table.name}" ADD COLUMN "{col_name}" {col_type} {nullable} {unique} {default} {autoincrement};'
412
+ )
413
+ logger.info(
414
+ f'Adding column "{col_name}" to table "{model_table.name}": {command}'
415
+ )
416
+ new_columns.append(col_name)
417
+ connection.execute(command)
418
+
419
+ if model_column.index:
420
+ command = text(
421
+ f'CREATE INDEX {model_table.name}_{col_name}_index ON {model_table.name} ("{col_name}");'
422
+ )
423
+ logger.info(
424
+ f'Adding index for column "{col_name}" in table "{model_table.name}": {command}'
425
+ )
426
+ connection.execute(command)
427
+
428
+ if model_column.unique and "organization_id" not in model_columns:
429
+ single_unique_constraint = f"{model_table.name}_{col_name}_unique"
430
+ command = text(
431
+ f'ALTER TABLE "{model_table.name}" ADD CONSTRAINT {single_unique_constraint} UNIQUE ("{col_name}");'
432
+ )
433
+ logger.info(
434
+ f'Adding unique constraint for column "{col_name}" in table "{model_table.name}"... {command}'
435
+ )
436
+ connection.execute(command)
437
+
438
+ if model_column.foreign_keys:
439
+ for foreign_key in model_column.foreign_keys:
440
+ deferred_foreign_keys.append(
441
+ {
442
+ "table": model_table.name,
443
+ "column": col_name,
444
+ "foreign_key": foreign_key,
445
+ }
446
+ )
447
+
448
+ if is_composite_primary_key and (
449
+ not existing_primary_keys or is_existing_pk_removed
450
+ ):
451
+ key_columns = ", ".join(model_primary_keys)
452
+ command = text(
453
+ f"ALTER TABLE {model_table.name} ADD PRIMARY KEY ({key_columns});"
454
+ )
455
+ logger.info(
456
+ f'Adding composite primary key for columns "{key_columns}" in table "{model_table.name}"... {command}'
457
+ )
458
+ connection.execute(command)
459
+
460
+ self._create_table_composite_unique_constrains(
461
+ model_table,
462
+ existing_table_schema,
463
+ connection,
464
+ model_columns,
465
+ new_columns,
466
+ )
467
+
468
+ for col_name in existing_columns:
469
+ if col_name not in model_columns:
470
+ command = text(
471
+ f'ALTER TABLE "{model_table.name}" DROP COLUMN "{col_name}";'
472
+ )
473
+ logger.info(
474
+ f"Detected removed column {col_name} in table {model_table.name}: {command}",
475
+ )
476
+ connection.execute(command)
477
+
478
+ def _create_table_composite_unique_constrains(
479
+ self,
480
+ model_table: Table,
481
+ existing_table_schema: dict,
482
+ connection: Connection,
483
+ model_columns: dict[str, Column],
484
+ new_columns: list,
485
+ ):
486
+ if "organization_id" not in model_columns:
487
+ return
488
+ for col_name, model_column in model_columns.items():
489
+ if col_name == "organization_id":
490
+ continue
491
+ if not model_column.unique:
492
+ continue
493
+
494
+ single_unique_constraint = f"{model_table.name}_{col_name}_unique"
495
+ if single_unique_constraint in existing_table_schema:
496
+ command = text(
497
+ f'ALTER TABLE "{model_table.name}" DROP CONSTRAINT {model_table.name}_{col_name}_unique;'
498
+ )
499
+ logger.info(
500
+ f'Column "{col_name}" in table "{model_table.name}" has changed to NOT UNIQUE, dropping unique constraint... {command}'
501
+ )
502
+ connection.execute(command)
503
+
504
+ composite_unique_constraint_name = (
505
+ f"{model_table.name}_{col_name}_organization_id_unique"
506
+ )
507
+ if col_name not in new_columns:
508
+ return
509
+ command = text(
510
+ f'ALTER TABLE "{model_table.name}" ADD CONSTRAINT {composite_unique_constraint_name} UNIQUE ("{col_name}", organization_id);'
511
+ )
512
+ logger.info(
513
+ f'Adding composite unique constraint for columns "{col_name}" and "organization_id" in table "{model_table.name}"... {command}'
514
+ )
515
+ connection.execute(command)
516
+
517
+
518
+ def _update_existing_column_unique_constraints(
519
+ model_table: Table,
520
+ existing_unique_constraints: list[dict],
521
+ connection: Connection,
522
+ model_columns: dict,
523
+ col_name: str,
524
+ model_column,
525
+ ):
526
+ """
527
+ Updates the unique constraints for a specified column in a database table based on the column's current schema definition.
528
+
529
+ This function handles both the addition and removal of unique constraints. If the column is intended to be unique and
530
+ it's part of a composite unique key (involving `organization_id`), it adds or removes a composite constraint. Otherwise,
531
+ it manages a single-column unique constraint.
532
+ """
533
+
534
+ if model_column.unique:
535
+ if "organization_id" in model_columns:
536
+ constraint_name = f"{model_table.name}_{col_name}_organization_id_unique"
537
+ command = text(
538
+ f'ALTER TABLE "{model_table.name}" ADD CONSTRAINT {constraint_name} UNIQUE ("{col_name}", organization_id);'
539
+ )
540
+ else:
541
+ constraint_name = f"{model_table.name}_{col_name}_unique"
542
+ command = text(
543
+ f'ALTER TABLE "{model_table.name}" ADD CONSTRAINT {constraint_name} UNIQUE ("{col_name}");'
544
+ )
545
+
546
+ logger.info(
547
+ f'Column "{col_name}" in table "{model_table.name}" has changed to UNIQUE, attempting to add unique constraint... {command}'
548
+ )
549
+ try:
550
+ connection.execute(command)
551
+ except IntegrityError as e:
552
+ logger.warning(
553
+ f'Column "{col_name}" in table "{model_table.name}" cannot be set to UNIQUE, it may contain duplicate values.'
554
+ )
555
+ message = str(e.orig)
556
+ detail = message.split("DETAIL: ")[1]
557
+ logger.warning(detail)
558
+
559
+ else:
560
+ for constraint in existing_unique_constraints:
561
+ if col_name in constraint["column_names"]:
562
+ unique_constraint_name = constraint["name"]
563
+ command = text(
564
+ f'ALTER TABLE "{model_table.name}" DROP CONSTRAINT {unique_constraint_name};'
565
+ )
566
+ logger.info(
567
+ f'Column "{col_name}" in table "{model_table.name}" has changed to NOT UNIQUE, dropping unique constraint... {command}'
568
+ )
569
+ connection.execute(command)
File without changes
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: deepsel
3
+ Version: 0.1.0
4
+ Summary: Database utilities and authentication framework for SQLAlchemy, FastAPI Python applications
5
+ Author-email: "Deepsel Inc." <info@deepsel.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/DeepselSystems/deepsel-core
8
+ Project-URL: Repository, https://github.com/DeepselSystems/deepsel-core
9
+ Project-URL: Issues, https://github.com/DeepselSystems/deepsel-core/issues
10
+ Keywords: sqlalchemy,database,migration,schema,postgres,deepsel
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Topic :: Database
17
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
18
+ Requires-Python: >=3.12
19
+ Description-Content-Type: text/markdown
20
+ License-File: LICENSE
21
+ Requires-Dist: sqlalchemy>=2.0.30
22
+ Provides-Extra: dev
23
+ Requires-Dist: pytest>=8.3.5; extra == "dev"
24
+ Requires-Dist: pytest-cov>=7.0.0; extra == "dev"
25
+ Requires-Dist: black>=25.0.0; extra == "dev"
26
+ Requires-Dist: flake8>=7.2.0; extra == "dev"
27
+ Requires-Dist: bandit>=1.8.0; extra == "dev"
28
+ Requires-Dist: build>=0.10.0; extra == "dev"
29
+ Requires-Dist: twine>=6.2.0; extra == "dev"
30
+ Dynamic: license-file
@@ -0,0 +1,13 @@
1
+ LICENSE
2
+ pyproject.toml
3
+ setup.py
4
+ deepsel/__init__.py
5
+ deepsel/py.typed
6
+ deepsel.egg-info/PKG-INFO
7
+ deepsel.egg-info/SOURCES.txt
8
+ deepsel.egg-info/dependency_links.txt
9
+ deepsel.egg-info/requires.txt
10
+ deepsel.egg-info/top_level.txt
11
+ deepsel/core/__init__.py
12
+ deepsel/core/db_manager.py
13
+ tests/test_db_manager.py
@@ -0,0 +1,10 @@
1
+ sqlalchemy>=2.0.30
2
+
3
+ [dev]
4
+ pytest>=8.3.5
5
+ pytest-cov>=7.0.0
6
+ black>=25.0.0
7
+ flake8>=7.2.0
8
+ bandit>=1.8.0
9
+ build>=0.10.0
10
+ twine>=6.2.0
@@ -0,0 +1 @@
1
+ deepsel
@@ -0,0 +1,60 @@
1
+ [build-system]
2
+ requires = ["setuptools>=80.9.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "deepsel"
7
+ version = "0.1.0"
8
+ description = "Database utilities and authentication framework for SQLAlchemy, FastAPI Python applications"
9
+ readme = "README.md"
10
+ authors = [
11
+ {name = "Deepsel Inc.", email = "info@deepsel.com"}
12
+ ]
13
+ license = {text = "MIT"}
14
+ classifiers = [
15
+ "Development Status :: 4 - Beta",
16
+ "Intended Audience :: Developers",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Programming Language :: Python :: 3",
19
+ "Programming Language :: Python :: 3.12",
20
+ "Topic :: Database",
21
+ "Topic :: Software Development :: Libraries :: Python Modules",
22
+ ]
23
+ keywords = ["sqlalchemy", "database", "migration", "schema", "postgres", "deepsel"]
24
+ requires-python = ">=3.12"
25
+ dependencies = [
26
+ "sqlalchemy>=2.0.30",
27
+ ]
28
+
29
+ [project.optional-dependencies]
30
+ dev = [
31
+ "pytest>=8.3.5",
32
+ "pytest-cov>=7.0.0",
33
+ "black>=25.0.0",
34
+ "flake8>=7.2.0",
35
+ "bandit>=1.8.0",
36
+ "build>=0.10.0",
37
+ "twine>=6.2.0",
38
+ ]
39
+
40
+ [project.urls]
41
+ Homepage = "https://github.com/DeepselSystems/deepsel-core"
42
+ Repository = "https://github.com/DeepselSystems/deepsel-core"
43
+ Issues = "https://github.com/DeepselSystems/deepsel-core/issues"
44
+
45
+ [tool.setuptools.packages.find]
46
+ where = ["."]
47
+ include = ["deepsel*"]
48
+ exclude = ["tests*", "examples*"]
49
+
50
+ [tool.setuptools.package-data]
51
+ deepsel = ["py.typed"]
52
+
53
+ [tool.black]
54
+ line-length = 88
55
+ target-version = ['py312']
56
+
57
+ [tool.pytest.ini_options]
58
+ testpaths = ["tests"]
59
+ python_files = ["test_*.py"]
60
+ addopts = "-v --cov=deepsel --cov-report=term-missing"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
deepsel-0.1.0/setup.py ADDED
@@ -0,0 +1,3 @@
1
+ from setuptools import setup
2
+
3
+ setup()
@@ -0,0 +1,53 @@
1
+ import pytest
2
+ from unittest.mock import Mock, MagicMock, patch
3
+ from sqlalchemy import Table, Column, Integer, String
4
+ from sqlalchemy.orm import declarative_base
5
+ from deepsel.core import DatabaseManager
6
+
7
+
8
+ @pytest.fixture
9
+ def mock_base():
10
+ return declarative_base()
11
+
12
+
13
+ @pytest.fixture
14
+ def mock_db_session():
15
+ session = Mock()
16
+ session.bind = Mock()
17
+ return session
18
+
19
+
20
+ @pytest.fixture
21
+ def mock_models_pool():
22
+ return {"test_table": Mock()}
23
+
24
+
25
+ def test_database_manager_initialization(mock_base, mock_models_pool):
26
+ with patch.object(DatabaseManager, "startup_database_update"):
27
+ db_manager = DatabaseManager.__new__(DatabaseManager)
28
+ db_manager.declarative_base = mock_base
29
+ db_manager.models_pool = mock_models_pool
30
+
31
+ assert db_manager.declarative_base == mock_base
32
+ assert db_manager.models_pool == mock_models_pool
33
+
34
+
35
+ def test_reflect_database_schema(mock_base, mock_db_session, mock_models_pool):
36
+ with patch.object(DatabaseManager, "startup_database_update"):
37
+ db_manager = DatabaseManager.__new__(DatabaseManager)
38
+ db_manager.declarative_base = mock_base
39
+ db_manager.models_pool = mock_models_pool
40
+
41
+ with patch("deepsel.core.db_manager.inspect") as mock_inspect:
42
+ mock_inspector = Mock()
43
+ mock_inspector.get_table_names.return_value = ["table1", "table2"]
44
+ mock_inspector.get_columns.return_value = [
45
+ {"name": "id", "type": Integer()},
46
+ {"name": "name", "type": String()},
47
+ ]
48
+ mock_inspect.return_value = mock_inspector
49
+
50
+ schema = db_manager.reflect_database_schema(mock_db_session)
51
+
52
+ assert "table1" in schema
53
+ assert "table2" in schema