maleo-database 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- maleo/database/__init__.py +0 -0
- maleo/database/config/__init__.py +105 -0
- maleo/database/config/additional.py +36 -0
- maleo/database/config/connection.py +544 -0
- maleo/database/config/identifier.py +12 -0
- maleo/database/config/pooling.py +255 -0
- maleo/database/enums.py +85 -0
- maleo/database/managers/__init__.py +0 -0
- maleo/database/managers/clients/__init__.py +0 -0
- maleo/database/managers/clients/elasticsearch.py +66 -0
- maleo/database/managers/clients/mongodb.py +53 -0
- maleo/database/managers/clients/redis.py +57 -0
- maleo/database/managers/engines/__init__.py +0 -0
- maleo/database/managers/engines/mysql.py +56 -0
- maleo/database/managers/engines/postgresql.py +58 -0
- maleo/database/managers/engines/sqlite.py +55 -0
- maleo/database/managers/engines/sqlserver.py +63 -0
- maleo/database/managers/session.py +123 -0
- maleo/database/orm/__init__.py +0 -0
- maleo/database/orm/base.py +7 -0
- maleo/database/orm/models/__init__.py +0 -0
- maleo/database/orm/models/mixins/__init__.py +0 -0
- maleo/database/orm/models/mixins/identifier.py +11 -0
- maleo/database/orm/models/mixins/status.py +12 -0
- maleo/database/orm/models/mixins/timestamp.py +65 -0
- maleo/database/orm/models/table.py +17 -0
- maleo/database/orm/queries.py +234 -0
- maleo_database-0.0.1.dist-info/METADATA +93 -0
- maleo_database-0.0.1.dist-info/RECORD +32 -0
- maleo_database-0.0.1.dist-info/WHEEL +5 -0
- maleo_database-0.0.1.dist-info/licenses/LICENSE +57 -0
- maleo_database-0.0.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,55 @@
|
|
1
|
+
from sqlalchemy.engine import create_engine, Engine
|
2
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine
|
3
|
+
from typing import Literal, Tuple, Union, overload
|
4
|
+
from ...config import SQLiteDatabaseConfig
|
5
|
+
from ...enums import Connection
|
6
|
+
|
7
|
+
|
8
|
+
class SQLiteEngineManager:
|
9
|
+
def __init__(self, config: SQLiteDatabaseConfig) -> None:
|
10
|
+
self.config = config
|
11
|
+
self._async_engine: AsyncEngine = self._init(Connection.ASYNC)
|
12
|
+
self._sync_engine: Engine = self._init(Connection.SYNC)
|
13
|
+
|
14
|
+
@overload
|
15
|
+
def _init(self, connection: Literal[Connection.ASYNC]) -> AsyncEngine: ...
|
16
|
+
@overload
|
17
|
+
def _init(self, connection: Literal[Connection.SYNC]) -> Engine: ...
|
18
|
+
def _init(
|
19
|
+
self, connection: Connection = Connection.ASYNC
|
20
|
+
) -> Union[AsyncEngine, Engine]:
|
21
|
+
url = self.config.connection.make_url(connection)
|
22
|
+
|
23
|
+
# SQLite pooling is limited, most params don't apply
|
24
|
+
pooling_kwargs = self.config.pooling.model_dump(
|
25
|
+
exclude={"wal_mode", "busy_timeout"}, # These go in URL options
|
26
|
+
exclude_none=True,
|
27
|
+
)
|
28
|
+
|
29
|
+
engine_kwargs = {"echo": self.config.connection.echo, **pooling_kwargs}
|
30
|
+
|
31
|
+
if connection is Connection.ASYNC:
|
32
|
+
self._async_engine = create_async_engine(url, **engine_kwargs)
|
33
|
+
return self._async_engine
|
34
|
+
elif connection is Connection.SYNC:
|
35
|
+
self._sync_engine = create_engine(url, **engine_kwargs)
|
36
|
+
return self._sync_engine
|
37
|
+
|
38
|
+
@overload
|
39
|
+
def get(self, connection: Literal[Connection.ASYNC]) -> AsyncEngine: ...
|
40
|
+
@overload
|
41
|
+
def get(self, connection: Literal[Connection.SYNC]) -> Engine: ...
|
42
|
+
def get(
|
43
|
+
self, connection: Connection = Connection.ASYNC
|
44
|
+
) -> Union[AsyncEngine, Engine]:
|
45
|
+
if connection is Connection.ASYNC:
|
46
|
+
return self._async_engine
|
47
|
+
elif connection is Connection.SYNC:
|
48
|
+
return self._sync_engine
|
49
|
+
|
50
|
+
def get_all(self) -> Tuple[AsyncEngine, Engine]:
|
51
|
+
return (self._async_engine, self._sync_engine)
|
52
|
+
|
53
|
+
async def dispose(self):
|
54
|
+
await self._async_engine.dispose()
|
55
|
+
self._sync_engine.dispose()
|
@@ -0,0 +1,63 @@
|
|
1
|
+
from sqlalchemy.engine import create_engine, Engine
|
2
|
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncEngine
|
3
|
+
from typing import Literal, Tuple, Union, overload
|
4
|
+
from ...config import SQLServerDatabaseConfig
|
5
|
+
from ...enums import Connection
|
6
|
+
|
7
|
+
|
8
|
+
class SQLServerEngineManager:
|
9
|
+
def __init__(self, config: SQLServerDatabaseConfig) -> None:
|
10
|
+
self.config = config
|
11
|
+
self._async_engine: AsyncEngine = self._init(Connection.ASYNC)
|
12
|
+
self._sync_engine: Engine = self._init(Connection.SYNC)
|
13
|
+
|
14
|
+
@overload
|
15
|
+
def _init(self, connection: Literal[Connection.ASYNC]) -> AsyncEngine: ...
|
16
|
+
@overload
|
17
|
+
def _init(self, connection: Literal[Connection.SYNC]) -> Engine: ...
|
18
|
+
def _init(
|
19
|
+
self, connection: Connection = Connection.ASYNC
|
20
|
+
) -> Union[AsyncEngine, Engine]:
|
21
|
+
url = self.config.connection.make_url(connection)
|
22
|
+
|
23
|
+
pooling_kwargs = self.config.pooling.model_dump(
|
24
|
+
exclude={
|
25
|
+
"strategy",
|
26
|
+
"connection_timeout", # Goes in URL
|
27
|
+
"command_timeout", # Goes in URL
|
28
|
+
"packet_size", # Goes in URL
|
29
|
+
"trust_server_certificate", # Goes in URL
|
30
|
+
},
|
31
|
+
exclude_none=True,
|
32
|
+
)
|
33
|
+
|
34
|
+
engine_kwargs = {
|
35
|
+
"echo": self.config.connection.echo,
|
36
|
+
**pooling_kwargs, # This includes encrypt, which is valid
|
37
|
+
}
|
38
|
+
|
39
|
+
if connection is Connection.ASYNC:
|
40
|
+
self._async_engine = create_async_engine(url, **engine_kwargs)
|
41
|
+
return self._async_engine
|
42
|
+
elif connection is Connection.SYNC:
|
43
|
+
self._sync_engine = create_engine(url, **engine_kwargs)
|
44
|
+
return self._sync_engine
|
45
|
+
|
46
|
+
@overload
|
47
|
+
def get(self, connection: Literal[Connection.ASYNC]) -> AsyncEngine: ...
|
48
|
+
@overload
|
49
|
+
def get(self, connection: Literal[Connection.SYNC]) -> Engine: ...
|
50
|
+
def get(
|
51
|
+
self, connection: Connection = Connection.ASYNC
|
52
|
+
) -> Union[AsyncEngine, Engine]:
|
53
|
+
if connection is Connection.ASYNC:
|
54
|
+
return self._async_engine
|
55
|
+
elif connection is Connection.SYNC:
|
56
|
+
return self._sync_engine
|
57
|
+
|
58
|
+
def get_all(self) -> Tuple[AsyncEngine, Engine]:
|
59
|
+
return (self._async_engine, self._sync_engine)
|
60
|
+
|
61
|
+
async def dispose(self):
|
62
|
+
await self._async_engine.dispose()
|
63
|
+
self._sync_engine.dispose()
|
@@ -0,0 +1,123 @@
|
|
1
|
+
from contextlib import (
|
2
|
+
AbstractAsyncContextManager,
|
3
|
+
AbstractContextManager,
|
4
|
+
asynccontextmanager,
|
5
|
+
contextmanager,
|
6
|
+
)
|
7
|
+
from pydantic import ValidationError
|
8
|
+
from sqlalchemy.engine import Engine
|
9
|
+
from sqlalchemy.exc import SQLAlchemyError
|
10
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, AsyncSession
|
11
|
+
from sqlalchemy.orm import sessionmaker, Session
|
12
|
+
from typing import AsyncGenerator, Generator, Literal, Tuple, Union, overload
|
13
|
+
from ..enums import Connection
|
14
|
+
|
15
|
+
|
16
|
+
class SessionManager:
|
17
|
+
def __init__(self, engines: Tuple[AsyncEngine, Engine]) -> None:
|
18
|
+
self._async_engine, self._sync_engine = engines
|
19
|
+
self._async_sessionmaker: async_sessionmaker[AsyncSession] = async_sessionmaker[
|
20
|
+
AsyncSession
|
21
|
+
](bind=self._async_engine, expire_on_commit=True)
|
22
|
+
self._sync_sessionmaker: sessionmaker[Session] = sessionmaker[Session](
|
23
|
+
bind=self._sync_engine, expire_on_commit=True
|
24
|
+
)
|
25
|
+
|
26
|
+
async def _async_session_handler(self) -> AsyncGenerator[AsyncSession, None]:
|
27
|
+
"""Async session handler with proper error handling."""
|
28
|
+
session = self._async_sessionmaker()
|
29
|
+
try:
|
30
|
+
yield session
|
31
|
+
await session.commit()
|
32
|
+
except (SQLAlchemyError, ValidationError, Exception):
|
33
|
+
await session.rollback()
|
34
|
+
raise
|
35
|
+
finally:
|
36
|
+
await session.close()
|
37
|
+
|
38
|
+
def _sync_session_handler(self) -> Generator[Session, None, None]:
|
39
|
+
"""Sync session handler with proper error handling."""
|
40
|
+
session = self._sync_sessionmaker()
|
41
|
+
try:
|
42
|
+
yield session
|
43
|
+
session.commit()
|
44
|
+
except (SQLAlchemyError, ValidationError, Exception):
|
45
|
+
session.rollback()
|
46
|
+
raise
|
47
|
+
finally:
|
48
|
+
session.close()
|
49
|
+
|
50
|
+
# Overloaded inject methods
|
51
|
+
@overload
|
52
|
+
def inject(
|
53
|
+
self, connection: Literal[Connection.ASYNC]
|
54
|
+
) -> AsyncGenerator[AsyncSession, None]: ...
|
55
|
+
|
56
|
+
@overload
|
57
|
+
def inject(
|
58
|
+
self, connection: Literal[Connection.SYNC]
|
59
|
+
) -> Generator[Session, None, None]: ...
|
60
|
+
|
61
|
+
def inject(
|
62
|
+
self, connection: Connection = Connection.ASYNC
|
63
|
+
) -> Union[AsyncGenerator[AsyncSession, None], Generator[Session, None, None]]:
|
64
|
+
"""Returns a generator for dependency injection."""
|
65
|
+
if connection is Connection.ASYNC:
|
66
|
+
return self._async_session_handler()
|
67
|
+
else:
|
68
|
+
return self._sync_session_handler()
|
69
|
+
|
70
|
+
# Overloaded context manager methods
|
71
|
+
@overload
|
72
|
+
def get(
|
73
|
+
self, connection: Literal[Connection.ASYNC]
|
74
|
+
) -> AbstractAsyncContextManager[AsyncSession]: ...
|
75
|
+
|
76
|
+
@overload
|
77
|
+
def get(
|
78
|
+
self, connection: Literal[Connection.SYNC]
|
79
|
+
) -> AbstractContextManager[Session]: ...
|
80
|
+
|
81
|
+
def get(
|
82
|
+
self, connection: Connection = Connection.ASYNC
|
83
|
+
) -> Union[
|
84
|
+
AbstractAsyncContextManager[AsyncSession], AbstractContextManager[Session]
|
85
|
+
]:
|
86
|
+
"""Context manager for manual session handling."""
|
87
|
+
if connection is Connection.ASYNC:
|
88
|
+
return self._async_context_manager()
|
89
|
+
else:
|
90
|
+
return self._sync_context_manager()
|
91
|
+
|
92
|
+
@asynccontextmanager
|
93
|
+
async def _async_context_manager(self) -> AsyncGenerator[AsyncSession, None]:
|
94
|
+
"""Async context manager implementation."""
|
95
|
+
async for session in self._async_session_handler():
|
96
|
+
yield session
|
97
|
+
|
98
|
+
@contextmanager
|
99
|
+
def _sync_context_manager(self) -> Generator[Session, None, None]:
|
100
|
+
"""Sync context manager implementation."""
|
101
|
+
yield from self._sync_session_handler()
|
102
|
+
|
103
|
+
# Alternative: More explicit methods
|
104
|
+
@asynccontextmanager
|
105
|
+
async def get_async(self) -> AsyncGenerator[AsyncSession, None]:
|
106
|
+
"""Explicit async context manager."""
|
107
|
+
async for session in self._async_session_handler():
|
108
|
+
yield session
|
109
|
+
|
110
|
+
@contextmanager
|
111
|
+
def get_sync(self) -> Generator[Session, None, None]:
|
112
|
+
"""Explicit sync context manager."""
|
113
|
+
yield from self._sync_session_handler()
|
114
|
+
# with self._sync_session_handler() as session:
|
115
|
+
# yield session
|
116
|
+
|
117
|
+
def inject_async(self) -> AsyncGenerator[AsyncSession, None]:
|
118
|
+
"""Explicit async dependency injection."""
|
119
|
+
return self._async_session_handler()
|
120
|
+
|
121
|
+
def inject_sync(self) -> Generator[Session, None, None]:
|
122
|
+
"""Explicit sync dependency injection."""
|
123
|
+
return self._sync_session_handler()
|
File without changes
|
File without changes
|
File without changes
|
@@ -0,0 +1,11 @@
|
|
1
|
+
from sqlalchemy.dialects.postgresql import UUID as PostgresUUID
|
2
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
3
|
+
from sqlalchemy.types import Integer
|
4
|
+
from uuid import UUID as PythonUUID, uuid4
|
5
|
+
|
6
|
+
|
7
|
+
class DataIdentifier:
|
8
|
+
id: Mapped[int] = mapped_column("id", Integer, primary_key=True)
|
9
|
+
uuid: Mapped[PythonUUID] = mapped_column(
|
10
|
+
"uuid", PostgresUUID(as_uuid=True), default=uuid4, unique=True, nullable=False
|
11
|
+
)
|
@@ -0,0 +1,12 @@
|
|
1
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
2
|
+
from sqlalchemy.types import Enum
|
3
|
+
from maleo.enums.status import DataStatus as DataStatusEnum
|
4
|
+
|
5
|
+
|
6
|
+
class DataStatus:
|
7
|
+
status: Mapped[DataStatusEnum] = mapped_column(
|
8
|
+
"status",
|
9
|
+
Enum(DataStatusEnum, name="statustype", create_constraints=True),
|
10
|
+
default=DataStatusEnum.ACTIVE,
|
11
|
+
nullable=False,
|
12
|
+
)
|
@@ -0,0 +1,65 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from sqlalchemy import func
|
3
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
4
|
+
from sqlalchemy.types import TIMESTAMP
|
5
|
+
from maleo.types.base.datetime import OptionalDatetime
|
6
|
+
|
7
|
+
|
8
|
+
class CreateTimestamp:
|
9
|
+
created_at: Mapped[datetime] = mapped_column(
|
10
|
+
"created_at",
|
11
|
+
TIMESTAMP(timezone=True),
|
12
|
+
server_default=func.now(),
|
13
|
+
nullable=False,
|
14
|
+
)
|
15
|
+
|
16
|
+
|
17
|
+
class UpdateTimestamp:
|
18
|
+
updated_at: Mapped[datetime] = mapped_column(
|
19
|
+
"updated_at",
|
20
|
+
TIMESTAMP(timezone=True),
|
21
|
+
server_default=func.now(),
|
22
|
+
onupdate=func.now(),
|
23
|
+
nullable=False,
|
24
|
+
)
|
25
|
+
|
26
|
+
|
27
|
+
class LifecyleTimestamp(UpdateTimestamp, CreateTimestamp):
|
28
|
+
pass
|
29
|
+
|
30
|
+
|
31
|
+
class DeleteTimestamp:
|
32
|
+
deleted_at: Mapped[OptionalDatetime] = mapped_column(
|
33
|
+
"deleted_at", TIMESTAMP(timezone=True)
|
34
|
+
)
|
35
|
+
|
36
|
+
|
37
|
+
class RestoreTimestamp:
|
38
|
+
restored_at: Mapped[OptionalDatetime] = mapped_column(
|
39
|
+
"restored_at", TIMESTAMP(timezone=True)
|
40
|
+
)
|
41
|
+
|
42
|
+
|
43
|
+
class DeactivateTimestamp:
|
44
|
+
deactivated_at: Mapped[OptionalDatetime] = mapped_column(
|
45
|
+
"deactivated_at", TIMESTAMP(timezone=True)
|
46
|
+
)
|
47
|
+
|
48
|
+
|
49
|
+
class ActivateTimestamp:
|
50
|
+
activated_at: Mapped[datetime] = mapped_column(
|
51
|
+
"activated_at",
|
52
|
+
TIMESTAMP(timezone=True),
|
53
|
+
server_default=func.now(),
|
54
|
+
nullable=False,
|
55
|
+
)
|
56
|
+
|
57
|
+
|
58
|
+
class StatusTimestamp(
|
59
|
+
ActivateTimestamp, DeactivateTimestamp, RestoreTimestamp, DeleteTimestamp
|
60
|
+
):
|
61
|
+
pass
|
62
|
+
|
63
|
+
|
64
|
+
class DataTimestamp(StatusTimestamp, LifecyleTimestamp):
|
65
|
+
pass
|
@@ -0,0 +1,17 @@
|
|
1
|
+
from sqlalchemy.orm import declared_attr
|
2
|
+
from maleo.utils.formatters.case import to_snake
|
3
|
+
from .mixins.identifier import DataIdentifier
|
4
|
+
from .mixins.status import DataStatus
|
5
|
+
from .mixins.timestamp import DataTimestamp
|
6
|
+
|
7
|
+
|
8
|
+
class BaseTable:
|
9
|
+
__abstract__ = True
|
10
|
+
|
11
|
+
@declared_attr # type: ignore
|
12
|
+
def __tablename__(cls) -> str:
|
13
|
+
return to_snake(cls.__name__) # type: ignore
|
14
|
+
|
15
|
+
|
16
|
+
class DataTable(DataStatus, DataTimestamp, DataIdentifier):
|
17
|
+
pass
|
@@ -0,0 +1,234 @@
|
|
1
|
+
from sqlalchemy import Column, Table
|
2
|
+
from sqlalchemy.ext.declarative import DeclarativeMeta
|
3
|
+
from sqlalchemy.orm import Query, Session, aliased
|
4
|
+
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
5
|
+
from sqlalchemy.sql.expression import or_, asc, cast, desc
|
6
|
+
from sqlalchemy.types import DATE, String, TEXT, TIMESTAMP
|
7
|
+
from typing import Sequence, Type
|
8
|
+
from maleo.enums.sort import Order
|
9
|
+
from maleo.mixins.general import DateFilter, SortColumn
|
10
|
+
from maleo.types.base.any import OptionalAny
|
11
|
+
from maleo.types.base.boolean import OptionalBoolean
|
12
|
+
from maleo.types.base.integer import OptionalListOfIntegers
|
13
|
+
from maleo.types.base.string import OptionalListOfStrings, OptionalString
|
14
|
+
from maleo.types.enums.status import OptionalListOfDataStatuses
|
15
|
+
|
16
|
+
|
17
|
+
def filter_column(
|
18
|
+
query: Query,
|
19
|
+
table: Type[DeclarativeMeta],
|
20
|
+
column: str,
|
21
|
+
value: OptionalAny = None,
|
22
|
+
include_null: bool = False,
|
23
|
+
) -> Query:
|
24
|
+
column_attr = getattr(table, column, None)
|
25
|
+
if column_attr is None or not isinstance(column_attr, InstrumentedAttribute):
|
26
|
+
return query
|
27
|
+
|
28
|
+
value_filters = []
|
29
|
+
if value is not None:
|
30
|
+
value_filters.extend([column_attr == val for val in value])
|
31
|
+
|
32
|
+
if value_filters:
|
33
|
+
if include_null:
|
34
|
+
value_filters.append(column_attr.is_(None))
|
35
|
+
query = query.filter(or_(*value_filters))
|
36
|
+
|
37
|
+
return query
|
38
|
+
|
39
|
+
|
40
|
+
def filter_ids(
|
41
|
+
query: Query,
|
42
|
+
table: Type[DeclarativeMeta],
|
43
|
+
column: str,
|
44
|
+
ids: OptionalListOfIntegers = None,
|
45
|
+
include_null: bool = False,
|
46
|
+
) -> Query:
|
47
|
+
column_attr = getattr(table, column, None)
|
48
|
+
if column_attr is None or not isinstance(column_attr, InstrumentedAttribute):
|
49
|
+
return query
|
50
|
+
|
51
|
+
id_filters = []
|
52
|
+
if ids is not None:
|
53
|
+
id_filters.extend([column_attr == id for id in ids])
|
54
|
+
|
55
|
+
if id_filters:
|
56
|
+
if include_null:
|
57
|
+
id_filters.append(column_attr.is_(None))
|
58
|
+
query = query.filter(or_(*id_filters))
|
59
|
+
|
60
|
+
return query
|
61
|
+
|
62
|
+
|
63
|
+
def filter_timestamps(
|
64
|
+
query: Query,
|
65
|
+
table: Type[DeclarativeMeta], # type: ignore
|
66
|
+
date_filters: Sequence[DateFilter],
|
67
|
+
) -> Query:
|
68
|
+
if date_filters and len(date_filters) > 0:
|
69
|
+
for date_filter in date_filters:
|
70
|
+
try:
|
71
|
+
table: Table = table.__table__ # type: ignore
|
72
|
+
column: Column = table.columns[date_filter.name]
|
73
|
+
column_attr: InstrumentedAttribute = getattr(table, date_filter.name)
|
74
|
+
if isinstance(column.type, (TIMESTAMP, DATE)):
|
75
|
+
if date_filter.from_date and date_filter.to_date:
|
76
|
+
query = query.filter(
|
77
|
+
column_attr.between(
|
78
|
+
date_filter.from_date, date_filter.to_date
|
79
|
+
)
|
80
|
+
)
|
81
|
+
elif date_filter.from_date:
|
82
|
+
query = query.filter(column_attr >= date_filter.from_date)
|
83
|
+
elif date_filter.to_date:
|
84
|
+
query = query.filter(column_attr <= date_filter.to_date)
|
85
|
+
except KeyError:
|
86
|
+
continue
|
87
|
+
return query
|
88
|
+
|
89
|
+
|
90
|
+
def filter_statuses(
|
91
|
+
query: Query,
|
92
|
+
table: Type[DeclarativeMeta],
|
93
|
+
statuses: OptionalListOfDataStatuses,
|
94
|
+
) -> Query:
|
95
|
+
if statuses is not None:
|
96
|
+
status_filters = [table.status == status for status in statuses] # type: ignore
|
97
|
+
query = query.filter(or_(*status_filters))
|
98
|
+
return query
|
99
|
+
|
100
|
+
|
101
|
+
def filter_is_root(
|
102
|
+
query: Query,
|
103
|
+
table: Type[DeclarativeMeta],
|
104
|
+
parent_column: str = "parent_id",
|
105
|
+
is_root: OptionalBoolean = None,
|
106
|
+
) -> Query:
|
107
|
+
parent_attr = getattr(table, parent_column, None)
|
108
|
+
if parent_attr is None or not isinstance(parent_attr, InstrumentedAttribute):
|
109
|
+
return query
|
110
|
+
if is_root is not None:
|
111
|
+
query = query.filter(
|
112
|
+
parent_attr.is_(None) if is_root else parent_attr.is_not(None)
|
113
|
+
)
|
114
|
+
return query
|
115
|
+
|
116
|
+
|
117
|
+
def filter_is_parent(
|
118
|
+
session: Session,
|
119
|
+
query: Query,
|
120
|
+
table: Type[DeclarativeMeta],
|
121
|
+
id_column: str = "id",
|
122
|
+
parent_column: str = "parent_id",
|
123
|
+
is_parent: OptionalBoolean = None,
|
124
|
+
) -> Query:
|
125
|
+
id_attr = getattr(table, id_column, None)
|
126
|
+
if id_attr is None or not isinstance(id_attr, InstrumentedAttribute):
|
127
|
+
return query
|
128
|
+
parent_attr = getattr(table, parent_column, None)
|
129
|
+
if parent_attr is None or not isinstance(parent_attr, InstrumentedAttribute):
|
130
|
+
return query
|
131
|
+
if is_parent is not None:
|
132
|
+
child_table = aliased(table)
|
133
|
+
child_parent_attr = getattr(child_table, parent_column)
|
134
|
+
subq = session.query(child_table).filter(child_parent_attr == id_attr).exists()
|
135
|
+
query = query.filter(subq if is_parent else ~subq)
|
136
|
+
return query
|
137
|
+
|
138
|
+
|
139
|
+
def filter_is_child(
|
140
|
+
query: Query,
|
141
|
+
table: Type[DeclarativeMeta],
|
142
|
+
parent_column: str = "parent_id",
|
143
|
+
is_child: OptionalBoolean = None,
|
144
|
+
) -> Query:
|
145
|
+
parent_attr = getattr(table, parent_column, None)
|
146
|
+
if parent_attr is None or not isinstance(parent_attr, InstrumentedAttribute):
|
147
|
+
return query
|
148
|
+
if is_child is not None:
|
149
|
+
query = query.filter(
|
150
|
+
parent_attr.is_not(None) if is_child else parent_attr.is_(None)
|
151
|
+
)
|
152
|
+
return query
|
153
|
+
|
154
|
+
|
155
|
+
def filter_is_leaf(
|
156
|
+
session: Session,
|
157
|
+
query: Query,
|
158
|
+
table: Type[DeclarativeMeta],
|
159
|
+
id_column: str = "id",
|
160
|
+
parent_column: str = "parent_id",
|
161
|
+
is_leaf: OptionalBoolean = None,
|
162
|
+
) -> Query:
|
163
|
+
id_attr = getattr(table, id_column, None)
|
164
|
+
if id_attr is None or not isinstance(id_attr, InstrumentedAttribute):
|
165
|
+
return query
|
166
|
+
parent_attr = getattr(table, parent_column, None)
|
167
|
+
if parent_attr is None or not isinstance(parent_attr, InstrumentedAttribute):
|
168
|
+
return query
|
169
|
+
if is_leaf is not None:
|
170
|
+
child_table = aliased(table)
|
171
|
+
child_parent_attr = getattr(child_table, parent_column)
|
172
|
+
subq = session.query(child_table).filter(child_parent_attr == id_attr).exists()
|
173
|
+
query = query.filter(~subq if is_leaf else subq)
|
174
|
+
return query
|
175
|
+
|
176
|
+
|
177
|
+
def search(
|
178
|
+
query: Query,
|
179
|
+
table: Type[DeclarativeMeta],
|
180
|
+
search: OptionalString = None,
|
181
|
+
columns: OptionalListOfStrings = None,
|
182
|
+
) -> Query:
|
183
|
+
if search is None:
|
184
|
+
return query
|
185
|
+
|
186
|
+
search_term = f"%{search}%"
|
187
|
+
sqla_table: Table = table.__table__ # type: ignore
|
188
|
+
search_filters = []
|
189
|
+
|
190
|
+
for name, attr in vars(table).items():
|
191
|
+
# Only consider InstrumentedAttribute (mapped columns)
|
192
|
+
if not isinstance(attr, InstrumentedAttribute):
|
193
|
+
continue
|
194
|
+
|
195
|
+
try:
|
196
|
+
column: Column = sqla_table.columns[name]
|
197
|
+
except KeyError:
|
198
|
+
continue
|
199
|
+
|
200
|
+
# Skip columns not in the user-provided list
|
201
|
+
if columns is not None and name not in columns:
|
202
|
+
continue
|
203
|
+
|
204
|
+
# Only allow string/TEXT columns
|
205
|
+
if isinstance(column.type, (String, TEXT)):
|
206
|
+
search_filters.append(cast(attr, TEXT).ilike(search_term))
|
207
|
+
|
208
|
+
if search_filters:
|
209
|
+
query = query.filter(or_(*search_filters))
|
210
|
+
|
211
|
+
return query
|
212
|
+
|
213
|
+
|
214
|
+
def sort(
|
215
|
+
query: Query,
|
216
|
+
table: Type[DeclarativeMeta],
|
217
|
+
sort_columns: Sequence[SortColumn],
|
218
|
+
) -> Query:
|
219
|
+
for sort_column in sort_columns:
|
220
|
+
try:
|
221
|
+
sort_col = getattr(table, sort_column.name)
|
222
|
+
sort_col = (
|
223
|
+
asc(sort_col) if sort_column.order is Order.ASC else desc(sort_col)
|
224
|
+
)
|
225
|
+
query = query.order_by(sort_col)
|
226
|
+
except AttributeError:
|
227
|
+
continue
|
228
|
+
return query
|
229
|
+
|
230
|
+
|
231
|
+
def paginate(query: Query, page: int, limit: int) -> Query:
|
232
|
+
offset: int = int((page - 1) * limit) # Calculate offset based on page
|
233
|
+
query = query.limit(limit=limit).offset(offset=offset)
|
234
|
+
return query
|
@@ -0,0 +1,93 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: maleo-database
|
3
|
+
Version: 0.0.1
|
4
|
+
Summary: Database package for MaleoSuite
|
5
|
+
Author-email: Agra Bima Yuda <agra@nexmedis.com>
|
6
|
+
License: Proprietary
|
7
|
+
Requires-Python: >=3.12
|
8
|
+
Description-Content-Type: text/markdown
|
9
|
+
License-File: LICENSE
|
10
|
+
Requires-Dist: aioredis>=2.0.1
|
11
|
+
Requires-Dist: annotated-types>=0.7.0
|
12
|
+
Requires-Dist: anyio>=4.10.0
|
13
|
+
Requires-Dist: async-timeout>=5.0.1
|
14
|
+
Requires-Dist: black>=25.1.0
|
15
|
+
Requires-Dist: cachetools>=5.5.2
|
16
|
+
Requires-Dist: certifi>=2025.8.3
|
17
|
+
Requires-Dist: cffi>=1.17.1
|
18
|
+
Requires-Dist: cfgv>=3.4.0
|
19
|
+
Requires-Dist: click>=8.2.1
|
20
|
+
Requires-Dist: cryptography>=45.0.6
|
21
|
+
Requires-Dist: distlib>=0.4.0
|
22
|
+
Requires-Dist: dnspython>=2.7.0
|
23
|
+
Requires-Dist: elastic-transport>=9.1.0
|
24
|
+
Requires-Dist: elasticsearch>=9.1.0
|
25
|
+
Requires-Dist: fastapi>=0.116.1
|
26
|
+
Requires-Dist: filelock>=3.19.1
|
27
|
+
Requires-Dist: google-auth>=2.40.3
|
28
|
+
Requires-Dist: greenlet>=3.2.4
|
29
|
+
Requires-Dist: identify>=2.6.13
|
30
|
+
Requires-Dist: idna>=3.10
|
31
|
+
Requires-Dist: maleo-constants>=0.0.2
|
32
|
+
Requires-Dist: maleo-enums>=0.0.2
|
33
|
+
Requires-Dist: maleo-mixins>=0.0.2
|
34
|
+
Requires-Dist: maleo-types-base>=0.0.2
|
35
|
+
Requires-Dist: maleo-types-enums>=0.0.2
|
36
|
+
Requires-Dist: maleo-utils>=0.0.3
|
37
|
+
Requires-Dist: motor>=3.7.1
|
38
|
+
Requires-Dist: mypy_extensions>=1.1.0
|
39
|
+
Requires-Dist: nodeenv>=1.9.1
|
40
|
+
Requires-Dist: packaging>=25.0
|
41
|
+
Requires-Dist: pathspec>=0.12.1
|
42
|
+
Requires-Dist: platformdirs>=4.4.0
|
43
|
+
Requires-Dist: pre_commit>=4.3.0
|
44
|
+
Requires-Dist: pyasn1>=0.6.1
|
45
|
+
Requires-Dist: pyasn1_modules>=0.4.2
|
46
|
+
Requires-Dist: pycparser>=2.22
|
47
|
+
Requires-Dist: pycryptodome>=3.23.0
|
48
|
+
Requires-Dist: pydantic>=2.11.7
|
49
|
+
Requires-Dist: pydantic_core>=2.33.2
|
50
|
+
Requires-Dist: pymongo>=4.14.1
|
51
|
+
Requires-Dist: python-dateutil>=2.9.0.post0
|
52
|
+
Requires-Dist: PyYAML>=6.0.2
|
53
|
+
Requires-Dist: redis>=6.4.0
|
54
|
+
Requires-Dist: rsa>=4.9.1
|
55
|
+
Requires-Dist: six>=1.17.0
|
56
|
+
Requires-Dist: sniffio>=1.3.1
|
57
|
+
Requires-Dist: SQLAlchemy>=2.0.43
|
58
|
+
Requires-Dist: starlette>=0.47.3
|
59
|
+
Requires-Dist: typing-inspection>=0.4.1
|
60
|
+
Requires-Dist: typing_extensions>=4.15.0
|
61
|
+
Requires-Dist: urllib3>=2.5.0
|
62
|
+
Requires-Dist: virtualenv>=20.34.0
|
63
|
+
Dynamic: license-file
|
64
|
+
|
65
|
+
# README #
|
66
|
+
|
67
|
+
This README would normally document whatever steps are necessary to get your application up and running.
|
68
|
+
|
69
|
+
### What is this repository for? ###
|
70
|
+
|
71
|
+
* Quick summary
|
72
|
+
* Version
|
73
|
+
* [Learn Markdown](https://bitbucket.org/tutorials/markdowndemo)
|
74
|
+
|
75
|
+
### How do I get set up? ###
|
76
|
+
|
77
|
+
* Summary of set up
|
78
|
+
* Configuration
|
79
|
+
* Dependencies
|
80
|
+
* Database configuration
|
81
|
+
* How to run tests
|
82
|
+
* Deployment instructions
|
83
|
+
|
84
|
+
### Contribution guidelines ###
|
85
|
+
|
86
|
+
* Writing tests
|
87
|
+
* Code review
|
88
|
+
* Other guidelines
|
89
|
+
|
90
|
+
### Who do I talk to? ###
|
91
|
+
|
92
|
+
* Repo owner or admin
|
93
|
+
* Other community or team contact
|