forktex-core-py 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- forktex_core_py-0.1.0/PKG-INFO +14 -0
- forktex_core_py-0.1.0/README.md +35 -0
- forktex_core_py-0.1.0/pyproject.toml +34 -0
- forktex_core_py-0.1.0/src/forktex_core/__init__.py +3 -0
- forktex_core_py-0.1.0/src/forktex_core/psql/__init__.py +47 -0
- forktex_core_py-0.1.0/src/forktex_core/psql/connection.py +112 -0
- forktex_core_py-0.1.0/src/forktex_core/psql/crud.py +253 -0
- forktex_core_py-0.1.0/src/forktex_core/psql/models.py +167 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/__init__.py +40 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/connection.py +51 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/decorators.py +58 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/namespaces.py +31 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/ops.py +173 -0
- forktex_core_py-0.1.0/src/forktex_core/redis/serialization.py +22 -0
- forktex_core_py-0.1.0/tests/__init__.py +0 -0
- forktex_core_py-0.1.0/tests/conftest.py +54 -0
- forktex_core_py-0.1.0/tests/test_psql/__init__.py +0 -0
- forktex_core_py-0.1.0/tests/test_psql/test_connection.py +69 -0
- forktex_core_py-0.1.0/tests/test_psql/test_crud.py +76 -0
- forktex_core_py-0.1.0/tests/test_psql/test_models.py +67 -0
- forktex_core_py-0.1.0/tests/test_redis/__init__.py +0 -0
- forktex_core_py-0.1.0/tests/test_redis/test_cache.py +96 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: forktex-core-py
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Shared database (PostgreSQL) and cache (Redis) primitives for the FORKTEX ecosystem
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: asyncpg>=0.29
|
|
7
|
+
Requires-Dist: pydantic>=2.0
|
|
8
|
+
Requires-Dist: sqlalchemy[asyncio]>=2.0
|
|
9
|
+
Provides-Extra: dev
|
|
10
|
+
Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
|
|
11
|
+
Requires-Dist: pytest>=8.0; extra == 'dev'
|
|
12
|
+
Requires-Dist: ruff>=0.7.0; extra == 'dev'
|
|
13
|
+
Provides-Extra: redis
|
|
14
|
+
Requires-Dist: redis[hiredis]>=5.0; extra == 'redis'
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
# forktex-core
|
|
2
|
+
|
|
3
|
+
Shared database (PostgreSQL) and cache (Redis) primitives for the FORKTEX ecosystem.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install forktex-core # psql only
|
|
9
|
+
pip install forktex-core[redis] # psql + redis
|
|
10
|
+
```
|
|
11
|
+
|
|
12
|
+
## PostgreSQL
|
|
13
|
+
|
|
14
|
+
```python
|
|
15
|
+
from forktex_core.psql import init_engine, close_engine, get_session, BaseDBModel, TimestampMixin, AuditMixin
|
|
16
|
+
|
|
17
|
+
# Initialize in app lifespan
|
|
18
|
+
init_engine("postgresql+asyncpg://user:pass@localhost/db")
|
|
19
|
+
|
|
20
|
+
# Use in services
|
|
21
|
+
async with get_session() as session:
|
|
22
|
+
user = await session.get(User, user_id)
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Redis
|
|
26
|
+
|
|
27
|
+
```python
|
|
28
|
+
from forktex_core.redis import init, close, cached
|
|
29
|
+
|
|
30
|
+
await init("redis://localhost:6379/0")
|
|
31
|
+
|
|
32
|
+
@cached(ttl=300)
|
|
33
|
+
async def get_profile(user_id: str):
|
|
34
|
+
...
|
|
35
|
+
```
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["hatchling"]
|
|
3
|
+
build-backend = "hatchling.build"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "forktex-core-py"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "Shared database (PostgreSQL) and cache (Redis) primitives for the FORKTEX ecosystem"
|
|
9
|
+
requires-python = ">=3.11"
|
|
10
|
+
dependencies = [
|
|
11
|
+
"sqlalchemy[asyncio]>=2.0",
|
|
12
|
+
"asyncpg>=0.29",
|
|
13
|
+
"pydantic>=2.0",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
[project.optional-dependencies]
|
|
17
|
+
redis = ["redis[hiredis]>=5.0"]
|
|
18
|
+
dev = [
|
|
19
|
+
"pytest>=8.0",
|
|
20
|
+
"pytest-asyncio>=0.24",
|
|
21
|
+
"ruff>=0.7.0",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
[tool.hatch.build.targets.wheel]
|
|
25
|
+
packages = ["src/forktex_core"]
|
|
26
|
+
|
|
27
|
+
[tool.ruff]
|
|
28
|
+
target-version = "py311"
|
|
29
|
+
line-length = 120
|
|
30
|
+
|
|
31
|
+
[tool.pytest.ini_options]
|
|
32
|
+
asyncio_mode = "auto"
|
|
33
|
+
asyncio_default_fixture_loop_scope = "session"
|
|
34
|
+
asyncio_default_test_loop_scope = "session"
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""PostgreSQL async primitives: connection, base models, CRUD utilities."""
|
|
2
|
+
|
|
3
|
+
from forktex_core.psql.connection import (
|
|
4
|
+
init_engine,
|
|
5
|
+
close_engine,
|
|
6
|
+
get_session,
|
|
7
|
+
with_transactional_session,
|
|
8
|
+
)
|
|
9
|
+
from forktex_core.psql.models import (
|
|
10
|
+
BaseDBModel,
|
|
11
|
+
ReprMixin,
|
|
12
|
+
TimestampMixin,
|
|
13
|
+
AuditMixin,
|
|
14
|
+
JsonModelColumn,
|
|
15
|
+
)
|
|
16
|
+
from forktex_core.psql.crud import (
|
|
17
|
+
PageResponse,
|
|
18
|
+
ScrollResponse,
|
|
19
|
+
ConflictError,
|
|
20
|
+
get,
|
|
21
|
+
list_all,
|
|
22
|
+
paginate,
|
|
23
|
+
paginate_scroll,
|
|
24
|
+
find_one_by,
|
|
25
|
+
create,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
__all__ = [
|
|
29
|
+
"init_engine",
|
|
30
|
+
"close_engine",
|
|
31
|
+
"get_session",
|
|
32
|
+
"with_transactional_session",
|
|
33
|
+
"BaseDBModel",
|
|
34
|
+
"ReprMixin",
|
|
35
|
+
"TimestampMixin",
|
|
36
|
+
"AuditMixin",
|
|
37
|
+
"JsonModelColumn",
|
|
38
|
+
"PageResponse",
|
|
39
|
+
"ScrollResponse",
|
|
40
|
+
"ConflictError",
|
|
41
|
+
"get",
|
|
42
|
+
"list_all",
|
|
43
|
+
"paginate",
|
|
44
|
+
"paginate_scroll",
|
|
45
|
+
"find_one_by",
|
|
46
|
+
"create",
|
|
47
|
+
]
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Async SQLAlchemy engine and session management for PostgreSQL.
|
|
2
|
+
|
|
3
|
+
Unified connection module ForkTex Ecosystem api Archtypes
|
|
4
|
+
Supports configurable engine kwargs for pool tuning.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
# Basic (network/cloud style — default pool settings)
|
|
8
|
+
init_engine("postgresql+asyncpg://user:pass@host/db")
|
|
9
|
+
|
|
10
|
+
# With pool tuning (intelligence style)
|
|
11
|
+
init_engine(
|
|
12
|
+
"postgresql+asyncpg://user:pass@host/db",
|
|
13
|
+
pool_size=20,
|
|
14
|
+
max_overflow=10,
|
|
15
|
+
pool_pre_ping=True,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
# FastAPI lifespan
|
|
19
|
+
@asynccontextmanager
|
|
20
|
+
async def lifespan(app):
|
|
21
|
+
init_engine(settings.db_url)
|
|
22
|
+
yield
|
|
23
|
+
await close_engine()
|
|
24
|
+
|
|
25
|
+
# Route handler
|
|
26
|
+
async def my_route(session: AsyncSession = Depends(get_session)):
|
|
27
|
+
...
|
|
28
|
+
|
|
29
|
+
# Service layer
|
|
30
|
+
@with_transactional_session
|
|
31
|
+
async def my_service(session: AsyncSession, ...):
|
|
32
|
+
...
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
from contextlib import asynccontextmanager
|
|
36
|
+
import functools
|
|
37
|
+
from typing import AsyncGenerator, Optional
|
|
38
|
+
|
|
39
|
+
from sqlalchemy.ext.asyncio import (
|
|
40
|
+
async_sessionmaker,
|
|
41
|
+
create_async_engine,
|
|
42
|
+
AsyncSession,
|
|
43
|
+
AsyncEngine,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# Module-level references (set by init_engine).
|
|
48
|
+
engine: Optional[AsyncEngine] = None
|
|
49
|
+
_async_sessionmaker: Optional[async_sessionmaker] = None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def init_engine(db_url: str, *, echo: bool = False, **engine_kwargs) -> async_sessionmaker:
|
|
53
|
+
"""Initialize the async engine and session factory.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
db_url: SQLAlchemy async database URL (e.g. postgresql+asyncpg://...).
|
|
57
|
+
echo: Echo SQL statements to stdout.
|
|
58
|
+
**engine_kwargs: Forwarded to create_async_engine (pool_size, max_overflow,
|
|
59
|
+
pool_pre_ping, pool_recycle, etc.).
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
The configured async_sessionmaker.
|
|
63
|
+
"""
|
|
64
|
+
global engine, _async_sessionmaker
|
|
65
|
+
engine = create_async_engine(db_url, echo=echo, **engine_kwargs)
|
|
66
|
+
_async_sessionmaker = async_sessionmaker(bind=engine, expire_on_commit=False, class_=AsyncSession)
|
|
67
|
+
return _async_sessionmaker
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
async def close_engine() -> None:
|
|
71
|
+
"""Dispose the engine on app shutdown."""
|
|
72
|
+
global engine
|
|
73
|
+
if engine is not None:
|
|
74
|
+
await engine.dispose()
|
|
75
|
+
engine = None
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@asynccontextmanager
|
|
79
|
+
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
|
80
|
+
"""Yield a transactional async session. Commits on success, rolls back on error.
|
|
81
|
+
|
|
82
|
+
Can be used as a FastAPI dependency via ``Depends(get_session)`` or as a
|
|
83
|
+
plain async context manager in service code.
|
|
84
|
+
"""
|
|
85
|
+
assert _async_sessionmaker is not None, "Engine/sessionmaker not initialized — call init_engine() first"
|
|
86
|
+
async with _async_sessionmaker() as session:
|
|
87
|
+
try:
|
|
88
|
+
yield session
|
|
89
|
+
await session.commit()
|
|
90
|
+
except Exception:
|
|
91
|
+
await session.rollback()
|
|
92
|
+
raise
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def with_transactional_session(func):
|
|
96
|
+
"""Decorator that auto-injects a session as the first arg if not already provided.
|
|
97
|
+
|
|
98
|
+
If the first positional arg is already an ``AsyncSession``, or ``session``
|
|
99
|
+
is in kwargs, the function is called as-is. Otherwise a new session is
|
|
100
|
+
created via ``get_session()``.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
@functools.wraps(func)
|
|
104
|
+
async def wrapper(*args, **kwargs):
|
|
105
|
+
if args and isinstance(args[0], AsyncSession):
|
|
106
|
+
return await func(*args, **kwargs)
|
|
107
|
+
if "session" in kwargs:
|
|
108
|
+
return await func(*args, **kwargs)
|
|
109
|
+
async with get_session() as session:
|
|
110
|
+
return await func(session, *args, **kwargs)
|
|
111
|
+
|
|
112
|
+
return wrapper
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Generic async CRUD utilities for SQLAlchemy.
|
|
2
|
+
|
|
3
|
+
Provides paginated queries (page-based and cursor-based), single-record
|
|
4
|
+
lookups, and creation helpers with conflict detection.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from math import ceil
|
|
8
|
+
from typing import Any, Callable, Generic, List, Optional, Type, TypeVar
|
|
9
|
+
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
from sqlalchemy import ColumnElement, Select, func, select
|
|
12
|
+
from sqlalchemy.exc import IntegrityError
|
|
13
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
|
14
|
+
from sqlalchemy.orm import InstrumentedAttribute
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
T = TypeVar("T")
|
|
18
|
+
U = TypeVar("U")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# ---------------------------------------------------------------------------
|
|
22
|
+
# Response containers
|
|
23
|
+
# ---------------------------------------------------------------------------
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class PageResponse(BaseModel, Generic[T]):
|
|
27
|
+
"""Page-based pagination response."""
|
|
28
|
+
|
|
29
|
+
data: list[T]
|
|
30
|
+
has_more: bool
|
|
31
|
+
limit: int
|
|
32
|
+
total_count: Optional[int] = None
|
|
33
|
+
current_page: Optional[int] = None
|
|
34
|
+
total_pages: Optional[int] = None
|
|
35
|
+
|
|
36
|
+
def apply_to_page_data(self, map_func: Callable[[T], U]) -> "PageResponse[U]":
|
|
37
|
+
return PageResponse[U](
|
|
38
|
+
data=[map_func(item) for item in self.data],
|
|
39
|
+
has_more=self.has_more,
|
|
40
|
+
limit=self.limit,
|
|
41
|
+
total_count=self.total_count,
|
|
42
|
+
current_page=self.current_page,
|
|
43
|
+
total_pages=self.total_pages,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ScrollResponse(BaseModel, Generic[T]):
|
|
48
|
+
"""Cursor-based (scroll) pagination response."""
|
|
49
|
+
|
|
50
|
+
data: list[T]
|
|
51
|
+
limit: int
|
|
52
|
+
has_more: bool
|
|
53
|
+
next_cursor: str | None = None
|
|
54
|
+
|
|
55
|
+
def apply_to_scroll_data(self, map_func: Callable[[T], U]) -> "ScrollResponse[U]":
|
|
56
|
+
return ScrollResponse[U](
|
|
57
|
+
data=[map_func(item) for item in self.data],
|
|
58
|
+
limit=self.limit,
|
|
59
|
+
has_more=self.has_more,
|
|
60
|
+
next_cursor=self.next_cursor,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# ---------------------------------------------------------------------------
|
|
65
|
+
# Exceptions
|
|
66
|
+
# ---------------------------------------------------------------------------
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class ConflictError(Exception):
|
|
70
|
+
"""Raised when an INSERT/UPDATE violates a uniqueness constraint."""
|
|
71
|
+
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
# ---------------------------------------------------------------------------
|
|
76
|
+
# Single-record operations
|
|
77
|
+
# ---------------------------------------------------------------------------
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
async def get(
|
|
81
|
+
session: AsyncSession,
|
|
82
|
+
model: Type[T],
|
|
83
|
+
value: Any,
|
|
84
|
+
*,
|
|
85
|
+
key: str = "id",
|
|
86
|
+
options: list | None = None,
|
|
87
|
+
) -> T | None:
|
|
88
|
+
"""Retrieve a single record by column value (default: primary key)."""
|
|
89
|
+
if not hasattr(model, key):
|
|
90
|
+
raise AttributeError(f"{model.__name__} has no attribute '{key}'")
|
|
91
|
+
|
|
92
|
+
stmt = select(model).where(getattr(model, key) == value)
|
|
93
|
+
if options:
|
|
94
|
+
stmt = stmt.options(*options)
|
|
95
|
+
result = await session.execute(stmt)
|
|
96
|
+
return result.scalar_one_or_none()
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
async def find_one_by(
|
|
100
|
+
session: AsyncSession,
|
|
101
|
+
model: Type[T],
|
|
102
|
+
**filters,
|
|
103
|
+
) -> Optional[T]:
|
|
104
|
+
"""Find a single record matching all keyword filters."""
|
|
105
|
+
stmt = select(model).filter_by(**filters)
|
|
106
|
+
res = await session.execute(stmt)
|
|
107
|
+
return res.scalar_one_or_none()
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
async def list_all(
|
|
111
|
+
session: AsyncSession,
|
|
112
|
+
model: Type[T],
|
|
113
|
+
*,
|
|
114
|
+
options: list | None = None,
|
|
115
|
+
) -> List[T]:
|
|
116
|
+
"""Return all records of a model (use sparingly on large tables)."""
|
|
117
|
+
stmt = select(model)
|
|
118
|
+
if options:
|
|
119
|
+
stmt = stmt.options(*options)
|
|
120
|
+
res = await session.execute(stmt)
|
|
121
|
+
return list(res.scalars().all())
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
async def create(
|
|
125
|
+
session: AsyncSession,
|
|
126
|
+
model: Type[T],
|
|
127
|
+
**values,
|
|
128
|
+
) -> T:
|
|
129
|
+
"""Create a new record. Raises ``ConflictError`` on integrity violation."""
|
|
130
|
+
obj = model(**values)
|
|
131
|
+
session.add(obj)
|
|
132
|
+
try:
|
|
133
|
+
await session.flush()
|
|
134
|
+
except IntegrityError as exc:
|
|
135
|
+
raise ConflictError(str(exc)) from exc
|
|
136
|
+
await session.refresh(obj)
|
|
137
|
+
return obj
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
# ---------------------------------------------------------------------------
|
|
141
|
+
# Paginated queries
|
|
142
|
+
# ---------------------------------------------------------------------------
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
async def paginate(
|
|
146
|
+
session: AsyncSession,
|
|
147
|
+
model: Type[T],
|
|
148
|
+
page: int = 1,
|
|
149
|
+
page_size: int = 100,
|
|
150
|
+
conditions: Optional[List[ColumnElement]] = None,
|
|
151
|
+
order_by: Optional[list[ColumnElement]] = None,
|
|
152
|
+
joins: Optional[list[InstrumentedAttribute]] = None,
|
|
153
|
+
options: list | None = None,
|
|
154
|
+
) -> "PageResponse[T]":
|
|
155
|
+
"""Page-based pagination with total count."""
|
|
156
|
+
stmt = select(model)
|
|
157
|
+
|
|
158
|
+
if options:
|
|
159
|
+
stmt = stmt.options(*options)
|
|
160
|
+
if joins:
|
|
161
|
+
for join in joins:
|
|
162
|
+
stmt = stmt.outerjoin(join)
|
|
163
|
+
if conditions:
|
|
164
|
+
stmt = stmt.where(*conditions)
|
|
165
|
+
if order_by:
|
|
166
|
+
stmt = stmt.order_by(*order_by)
|
|
167
|
+
|
|
168
|
+
return await _paginate_query(session, stmt, page, page_size)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
async def paginate_scroll(
|
|
172
|
+
session: AsyncSession,
|
|
173
|
+
model: Type[T],
|
|
174
|
+
limit: int = 20,
|
|
175
|
+
conditions: Optional[List[ColumnElement]] = None,
|
|
176
|
+
order_by: Optional[list[ColumnElement]] = None,
|
|
177
|
+
joins: Optional[list[InstrumentedAttribute]] = None,
|
|
178
|
+
options: list | None = None,
|
|
179
|
+
) -> "ScrollResponse[T]":
|
|
180
|
+
"""Cursor-based (scroll) pagination. Fetches limit+1 to detect has_more."""
|
|
181
|
+
stmt = select(model)
|
|
182
|
+
|
|
183
|
+
if options:
|
|
184
|
+
stmt = stmt.options(*options)
|
|
185
|
+
if joins:
|
|
186
|
+
for join in joins:
|
|
187
|
+
stmt = stmt.outerjoin(join)
|
|
188
|
+
if conditions:
|
|
189
|
+
stmt = stmt.where(*conditions)
|
|
190
|
+
if order_by:
|
|
191
|
+
stmt = stmt.order_by(*order_by)
|
|
192
|
+
|
|
193
|
+
return await _paginate_scroll_query(session, stmt, limit)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
# ---------------------------------------------------------------------------
|
|
197
|
+
# Internal helpers
|
|
198
|
+
# ---------------------------------------------------------------------------
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
async def _paginate_query(
|
|
202
|
+
session: AsyncSession,
|
|
203
|
+
query: Select[Any],
|
|
204
|
+
page: int = 1,
|
|
205
|
+
page_size: int = 100,
|
|
206
|
+
) -> "PageResponse[T]":
|
|
207
|
+
if page < 1:
|
|
208
|
+
page = 1
|
|
209
|
+
if page_size < 1:
|
|
210
|
+
page_size = 10
|
|
211
|
+
|
|
212
|
+
total_count_stmt = select(func.count()).select_from(query.subquery())
|
|
213
|
+
total_count = (await session.execute(total_count_stmt)).scalar_one()
|
|
214
|
+
|
|
215
|
+
offset = (page - 1) * page_size
|
|
216
|
+
paginated_stmt = query.offset(offset).limit(page_size)
|
|
217
|
+
result = await session.execute(paginated_stmt)
|
|
218
|
+
data = list(result.scalars().all())
|
|
219
|
+
|
|
220
|
+
has_more = (page * page_size) < total_count
|
|
221
|
+
total_pages = ceil(total_count / page_size) if page_size else 1
|
|
222
|
+
|
|
223
|
+
return PageResponse(
|
|
224
|
+
data=data,
|
|
225
|
+
has_more=has_more,
|
|
226
|
+
limit=page_size,
|
|
227
|
+
total_count=total_count,
|
|
228
|
+
current_page=page,
|
|
229
|
+
total_pages=total_pages,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
async def _paginate_scroll_query(
|
|
234
|
+
session: AsyncSession,
|
|
235
|
+
query: Select[Any],
|
|
236
|
+
limit: int = 20,
|
|
237
|
+
) -> "ScrollResponse[T]":
|
|
238
|
+
if limit < 1:
|
|
239
|
+
limit = 20
|
|
240
|
+
|
|
241
|
+
scroll_stmt = query.limit(limit + 1)
|
|
242
|
+
result = await session.execute(scroll_stmt)
|
|
243
|
+
data = list(result.scalars().all())
|
|
244
|
+
|
|
245
|
+
has_more = len(data) > limit
|
|
246
|
+
if has_more:
|
|
247
|
+
data = data[:limit]
|
|
248
|
+
|
|
249
|
+
return ScrollResponse(
|
|
250
|
+
data=data,
|
|
251
|
+
has_more=has_more,
|
|
252
|
+
limit=limit,
|
|
253
|
+
)
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""SQLAlchemy base models and mixins.
|
|
2
|
+
|
|
3
|
+
Extracted from the FORKTEX ecosystem. All API projects (network, cloud,
|
|
4
|
+
workflow, intelligence) share these base classes.
|
|
5
|
+
|
|
6
|
+
- ``BaseDBModel``: DeclarativeBase with StrEnum auto-mapping and repr.
|
|
7
|
+
- ``TimestampMixin``: created_at / updated_at with server defaults.
|
|
8
|
+
- ``AuditMixin``: Extends TimestampMixin with created_by_id, updated_by_id,
|
|
9
|
+
soft delete (archived_at / is_active), and archive consistency constraints.
|
|
10
|
+
- ``JsonModelColumn``: Helper for storing Pydantic models in JSON columns.
|
|
11
|
+
|
|
12
|
+
Note: AuditMixin does NOT declare ForeignKey constraints on created_by_id /
|
|
13
|
+
updated_by_id. Each consumer project adds its own FK references via
|
|
14
|
+
``__table_args__`` if needed, since the user table name varies across projects.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import enum
|
|
18
|
+
import uuid
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from typing import TypeVar, Generic
|
|
21
|
+
|
|
22
|
+
import sqlalchemy as sa
|
|
23
|
+
from pydantic import BaseModel
|
|
24
|
+
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
|
25
|
+
from sqlalchemy.orm.decl_api import declared_attr
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ReprMixin:
|
|
29
|
+
"""Readable __repr__ showing all non-private attributes."""
|
|
30
|
+
|
|
31
|
+
def __repr__(self):
|
|
32
|
+
cls = self.__class__.__name__
|
|
33
|
+
attrs = ", ".join(
|
|
34
|
+
f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_")
|
|
35
|
+
)
|
|
36
|
+
return f"{cls}({attrs})"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class BaseDBModel(DeclarativeBase, ReprMixin):
|
|
40
|
+
"""Base for all SQLAlchemy ORM models.
|
|
41
|
+
|
|
42
|
+
Automatically maps ``StrEnum`` to non-native string columns.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
type_annotation_map = {
|
|
46
|
+
enum.StrEnum: sa.Enum(enum.StrEnum, native_enum=False, length=64),
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class TimestampMixin:
|
|
51
|
+
"""Adds created_at and updated_at with server-side defaults."""
|
|
52
|
+
|
|
53
|
+
created_at: Mapped[datetime] = mapped_column(
|
|
54
|
+
server_default=sa.func.now(), nullable=False
|
|
55
|
+
)
|
|
56
|
+
updated_at: Mapped[datetime] = mapped_column(
|
|
57
|
+
server_default=sa.func.now(), onupdate=sa.func.now(), nullable=False
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class AuditMixin(TimestampMixin):
|
|
62
|
+
"""Full audit trail with soft delete.
|
|
63
|
+
|
|
64
|
+
Provides:
|
|
65
|
+
- created_by_id / updated_by_id as plain UUID columns (no FK — add your own).
|
|
66
|
+
- archived_at / is_active for soft delete.
|
|
67
|
+
- A check constraint enforcing (is_active ⟺ archived_at IS NULL).
|
|
68
|
+
- An optional partial unique index on ``unique_fields`` (active records only).
|
|
69
|
+
|
|
70
|
+
Usage::
|
|
71
|
+
|
|
72
|
+
class MyModel(BaseDBModel, AuditMixin):
|
|
73
|
+
__tablename__ = "my_model"
|
|
74
|
+
unique_fields = ("org_id", "name") # optional: partial unique on active rows
|
|
75
|
+
id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
|
|
76
|
+
...
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
created_by_id: Mapped[uuid.UUID | None] = mapped_column(
|
|
80
|
+
sa.UUID(as_uuid=True),
|
|
81
|
+
nullable=True,
|
|
82
|
+
index=True,
|
|
83
|
+
)
|
|
84
|
+
updated_by_id: Mapped[uuid.UUID | None] = mapped_column(
|
|
85
|
+
sa.UUID(as_uuid=True),
|
|
86
|
+
nullable=True,
|
|
87
|
+
index=True,
|
|
88
|
+
)
|
|
89
|
+
archived_at: Mapped[datetime | None] = mapped_column(
|
|
90
|
+
sa.DateTime(timezone=True), nullable=True, index=True
|
|
91
|
+
)
|
|
92
|
+
is_active: Mapped[bool] = mapped_column(
|
|
93
|
+
sa.Boolean, nullable=False, default=True, index=True
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
def __init_subclass__(cls, **kwargs):
|
|
97
|
+
super().__init_subclass__(**kwargs)
|
|
98
|
+
if cls is AuditMixin:
|
|
99
|
+
return
|
|
100
|
+
if not issubclass(cls, BaseDBModel):
|
|
101
|
+
raise TypeError("AuditMixin can only be used with BaseDBModel subclasses.")
|
|
102
|
+
if not hasattr(cls, "__tablename__") or not isinstance(cls.__tablename__, str):
|
|
103
|
+
raise TypeError("Classes using AuditMixin must define __tablename__")
|
|
104
|
+
|
|
105
|
+
@declared_attr
|
|
106
|
+
def __table_args__(cls):
|
|
107
|
+
table_args = []
|
|
108
|
+
|
|
109
|
+
# Partial unique index for "active only" uniqueness.
|
|
110
|
+
if getattr(cls, "unique_fields", None):
|
|
111
|
+
table_args.append(
|
|
112
|
+
sa.Index(
|
|
113
|
+
f"uq_{cls.__tablename__}_active",
|
|
114
|
+
*cls.unique_fields,
|
|
115
|
+
unique=True,
|
|
116
|
+
postgresql_where=sa.text("archived_at IS NULL"),
|
|
117
|
+
)
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
# Validity constraint: active ⟺ not archived.
|
|
121
|
+
table_args.append(
|
|
122
|
+
sa.CheckConstraint(
|
|
123
|
+
"(is_active AND archived_at IS NULL) OR "
|
|
124
|
+
"(NOT is_active AND archived_at IS NOT NULL)",
|
|
125
|
+
name=f"ck_{cls.__tablename__}_active_archive_consistency",
|
|
126
|
+
)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
return tuple(table_args)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
T = TypeVar("T", bound=BaseModel)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class JsonModelColumn(Generic[T]):
|
|
136
|
+
"""Helper for SQLAlchemy JSON columns storing lists of Pydantic models.
|
|
137
|
+
|
|
138
|
+
Usage::
|
|
139
|
+
|
|
140
|
+
# Serialize before writing to DB
|
|
141
|
+
row.tags = JsonModelColumn.serialize(tag_models)
|
|
142
|
+
|
|
143
|
+
# Deserialize after reading from DB
|
|
144
|
+
tags = JsonModelColumn.deserialize(row.tags, TagModel)
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
@staticmethod
|
|
148
|
+
def serialize(models: list[T] | list[dict]) -> list[dict]:
|
|
149
|
+
result = []
|
|
150
|
+
for v in models:
|
|
151
|
+
if isinstance(v, BaseModel):
|
|
152
|
+
result.append(v.model_dump(mode="json"))
|
|
153
|
+
else:
|
|
154
|
+
cleaned = {}
|
|
155
|
+
for key, val in v.items():
|
|
156
|
+
if isinstance(val, enum.Enum):
|
|
157
|
+
cleaned[key] = val.value
|
|
158
|
+
elif isinstance(val, datetime):
|
|
159
|
+
cleaned[key] = val.isoformat()
|
|
160
|
+
else:
|
|
161
|
+
cleaned[key] = val
|
|
162
|
+
result.append(cleaned)
|
|
163
|
+
return result
|
|
164
|
+
|
|
165
|
+
@staticmethod
|
|
166
|
+
def deserialize(data: list[dict], model: type[T]) -> list[T]:
|
|
167
|
+
return [model(**item) for item in (data or [])]
|