forktex-core 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,14 @@
1
+ Metadata-Version: 2.4
2
+ Name: forktex-core
3
+ Version: 0.1.0
4
+ Summary: Shared database (PostgreSQL) and cache (Redis) primitives for the FORKTEX ecosystem
5
+ Requires-Python: >=3.11
6
+ Requires-Dist: asyncpg>=0.29
7
+ Requires-Dist: pydantic>=2.0
8
+ Requires-Dist: sqlalchemy[asyncio]>=2.0
9
+ Provides-Extra: dev
10
+ Requires-Dist: pytest-asyncio>=0.24; extra == 'dev'
11
+ Requires-Dist: pytest>=8.0; extra == 'dev'
12
+ Requires-Dist: ruff>=0.7.0; extra == 'dev'
13
+ Provides-Extra: redis
14
+ Requires-Dist: redis[hiredis]>=5.0; extra == 'redis'
@@ -0,0 +1,183 @@
1
+ # forktex-core
2
+
3
+ Shared async database (PostgreSQL) and cache (Redis) primitives for the [FORKTEX](https://forktex.com) ecosystem.
4
+
5
+ Extracted from battle-tested patterns across 4 production APIs (Network, Cloud, Intelligence, Workflow) into a single reusable library.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ pip install forktex-core # PostgreSQL only
11
+ pip install forktex-core[redis] # PostgreSQL + Redis
12
+ ```
13
+
14
+ ## Architecture
15
+
16
+ ```
17
+ forktex-core
18
+ psql/ # PostgreSQL async primitives
19
+ connection.py # Engine init, session management, transactional decorator
20
+ models.py # BaseDBModel, TimestampMixin, AuditMixin, JsonModelColumn
21
+ crud.py # PageResponse, ScrollResponse, paginate, get, create, find_one_by
22
+ redis/ # Redis async caching
23
+ connection.py # Init, close, health check
24
+ ops.py # get/set/delete, fetch-or-set, stale-while-revalidate
25
+ decorators.py # @cached decorator with TTL + SWR support
26
+ serialization.py # Pydantic-aware JSON serialization
27
+ namespaces.py # Key prefix management
28
+ ```
29
+
30
+ ## PostgreSQL (`forktex_core.psql`)
31
+
32
+ ### Connection
33
+
34
+ ```python
35
+ from forktex_core.psql import init_engine, close_engine, get_session, with_transactional_session
36
+
37
+ # Initialize — accepts any SQLAlchemy engine kwargs
38
+ init_engine("postgresql+asyncpg://user:pass@localhost/db")
39
+
40
+ # With pool tuning (as used by Intelligence API)
41
+ init_engine("postgresql+asyncpg://...", pool_size=20, max_overflow=10, pool_pre_ping=True)
42
+
43
+ # FastAPI lifespan
44
+ @asynccontextmanager
45
+ async def lifespan(app):
46
+ init_engine(settings.db_url)
47
+ yield
48
+ await close_engine()
49
+
50
+ # Session — auto-commits on success, rolls back on error
51
+ async with get_session() as session:
52
+ user = await session.get(User, user_id)
53
+
54
+ # Decorator — auto-injects session if not provided
55
+ @with_transactional_session
56
+ async def create_user(session: AsyncSession, email: str) -> User:
57
+ ...
58
+ ```
59
+
60
+ ### Models
61
+
62
+ ```python
63
+ from forktex_core.psql import BaseDBModel, TimestampMixin, AuditMixin, JsonModelColumn
64
+
65
+ # Basic model with timestamps
66
+ class Project(BaseDBModel, TimestampMixin):
67
+ __tablename__ = "project"
68
+ id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
69
+ name: Mapped[str] = mapped_column(String(255))
70
+
71
+ # Full audit trail with soft delete
72
+ class Task(BaseDBModel, AuditMixin):
73
+ __tablename__ = "task"
74
+ unique_fields = ("project_id", "name") # partial unique index on active rows
75
+
76
+ id: Mapped[uuid.UUID] = mapped_column(primary_key=True, default=uuid.uuid4)
77
+ project_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("project.id"))
78
+ name: Mapped[str] = mapped_column(String(255))
79
+ ```
80
+
81
+ **AuditMixin** provides:
82
+ - `created_at` / `updated_at` (server defaults)
83
+ - `created_by_id` / `updated_by_id` (plain UUID, no FK — consumers add their own)
84
+ - `archived_at` / `is_active` (soft delete with consistency constraint)
85
+ - Partial unique index on `unique_fields` (active records only)
86
+
87
+ ### CRUD
88
+
89
+ ```python
90
+ from forktex_core.psql import paginate, paginate_scroll, get, find_one_by, create, list_all
91
+
92
+ # Paginated query with total count
93
+ page = await paginate(session, Task, page=1, page_size=20,
94
+ conditions=[Task.project_id == project_id],
95
+ order_by=[Task.created_at.desc()])
96
+ # page.data, page.has_more, page.total_count, page.total_pages
97
+
98
+ # Cursor-based scroll (no COUNT query)
99
+ scroll = await paginate_scroll(session, Task, limit=50)
100
+ # scroll.data, scroll.has_more
101
+
102
+ # Single record
103
+ task = await get(session, Task, task_id)
104
+ task = await find_one_by(session, Task, name="my-task", project_id=pid)
105
+
106
+ # Create with conflict detection
107
+ try:
108
+ task = await create(session, Task, name="new", project_id=pid)
109
+ except ConflictError:
110
+ ... # unique constraint violated
111
+ ```
112
+
113
+ ## Redis (`forktex_core.redis`)
114
+
115
+ ### Connection
116
+
117
+ ```python
118
+ from forktex_core.redis import init, close, available, get_client
119
+
120
+ await init("redis://localhost:6379/0")
121
+ assert available()
122
+ await close()
123
+ ```
124
+
125
+ ### Caching
126
+
127
+ ```python
128
+ from forktex_core.redis import cached, ops
129
+
130
+ # Simple TTL cache
131
+ @cached(ttl=300)
132
+ async def get_profile(user_id: str) -> dict:
133
+ ...
134
+
135
+ # Stale-while-revalidate (fast reads, background refresh)
136
+ @cached(ttl=60, stale_ttl=300)
137
+ async def get_feed(org_id: str) -> dict:
138
+ ...
139
+
140
+ # Manual ops
141
+ await ops.set("key", "value", ex=60)
142
+ value = await ops.get("key")
143
+ await ops.invalidate_prefix("user:")
144
+ ```
145
+
146
+ ### Namespaces
147
+
148
+ ```python
149
+ from enum import StrEnum
150
+ from forktex_core.redis.namespaces import key_for
151
+
152
+ class CachePrefix(StrEnum):
153
+ USER = "user"
154
+ FEED = "feed"
155
+
156
+ key = key_for(CachePrefix.USER, user_id) # "user:abc-123"
157
+ ```
158
+
159
+ ## Consumers
160
+
161
+ | Project | Uses |
162
+ |---------|------|
163
+ | [Network API](https://network.forktex.com) | psql (connection, models, AuditMixin, crud) + redis (all) |
164
+ | [Cloud API](https://cloud.forktex.com) | psql (connection, models, crud) |
165
+ | [Intelligence API](https://intelligence.forktex.com) | psql (connection with pool tuning, models) |
166
+ | [Workflow API](https://workflow.forktex.com) | psql (connection, models, crud) |
167
+ | [forktex-workflow SDK](../workflow/sdk) | psql (models as base for ORM) |
168
+
169
+ ## Testing
170
+
171
+ Tests use real PostgreSQL and Redis via [testcontainers](https://testcontainers-python.readthedocs.io/):
172
+
173
+ ```bash
174
+ make test-core # 23 tests — real PG + Redis containers
175
+ ```
176
+
177
+ ## Part of forktex-py
178
+
179
+ This package is maintained in the [forktex-py monorepo](https://github.com/forktex/forktex-python) alongside:
180
+ - `forktex` — CLI agent framework
181
+ - `forktex-documents` — Jinja2 + WeasyPrint rendering
182
+ - `forktex-intelligence` — Intelligence API SDK
183
+ - `forktex-cloud` — Cloud platform SDK
@@ -0,0 +1,34 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "forktex-core"
7
+ version = "0.1.0"
8
+ description = "Shared database (PostgreSQL) and cache (Redis) primitives for the FORKTEX ecosystem"
9
+ requires-python = ">=3.11"
10
+ dependencies = [
11
+ "sqlalchemy[asyncio]>=2.0",
12
+ "asyncpg>=0.29",
13
+ "pydantic>=2.0",
14
+ ]
15
+
16
+ [project.optional-dependencies]
17
+ redis = ["redis[hiredis]>=5.0"]
18
+ dev = [
19
+ "pytest>=8.0",
20
+ "pytest-asyncio>=0.24",
21
+ "ruff>=0.7.0",
22
+ ]
23
+
24
+ [tool.hatch.build.targets.wheel]
25
+ packages = ["src/forktex_core"]
26
+
27
+ [tool.ruff]
28
+ target-version = "py311"
29
+ line-length = 120
30
+
31
+ [tool.pytest.ini_options]
32
+ asyncio_mode = "auto"
33
+ asyncio_default_fixture_loop_scope = "session"
34
+ asyncio_default_test_loop_scope = "session"
@@ -0,0 +1,3 @@
1
+ """FORKTEX Core — shared database and cache primitives."""
2
+
3
+ __version__ = "0.1.0"
@@ -0,0 +1,47 @@
1
+ """PostgreSQL async primitives: connection, base models, CRUD utilities."""
2
+
3
+ from forktex_core.psql.connection import (
4
+ init_engine,
5
+ close_engine,
6
+ get_session,
7
+ with_transactional_session,
8
+ )
9
+ from forktex_core.psql.models import (
10
+ BaseDBModel,
11
+ ReprMixin,
12
+ TimestampMixin,
13
+ AuditMixin,
14
+ JsonModelColumn,
15
+ )
16
+ from forktex_core.psql.crud import (
17
+ PageResponse,
18
+ ScrollResponse,
19
+ ConflictError,
20
+ get,
21
+ list_all,
22
+ paginate,
23
+ paginate_scroll,
24
+ find_one_by,
25
+ create,
26
+ )
27
+
28
+ __all__ = [
29
+ "init_engine",
30
+ "close_engine",
31
+ "get_session",
32
+ "with_transactional_session",
33
+ "BaseDBModel",
34
+ "ReprMixin",
35
+ "TimestampMixin",
36
+ "AuditMixin",
37
+ "JsonModelColumn",
38
+ "PageResponse",
39
+ "ScrollResponse",
40
+ "ConflictError",
41
+ "get",
42
+ "list_all",
43
+ "paginate",
44
+ "paginate_scroll",
45
+ "find_one_by",
46
+ "create",
47
+ ]
@@ -0,0 +1,112 @@
1
+ """Async SQLAlchemy engine and session management for PostgreSQL.
2
+
3
+ Unified connection module ForkTex Ecosystem api Archtypes
4
+ Supports configurable engine kwargs for pool tuning.
5
+
6
+ Usage:
7
+ # Basic (network/cloud style — default pool settings)
8
+ init_engine("postgresql+asyncpg://user:pass@host/db")
9
+
10
+ # With pool tuning (intelligence style)
11
+ init_engine(
12
+ "postgresql+asyncpg://user:pass@host/db",
13
+ pool_size=20,
14
+ max_overflow=10,
15
+ pool_pre_ping=True,
16
+ )
17
+
18
+ # FastAPI lifespan
19
+ @asynccontextmanager
20
+ async def lifespan(app):
21
+ init_engine(settings.db_url)
22
+ yield
23
+ await close_engine()
24
+
25
+ # Route handler
26
+ async def my_route(session: AsyncSession = Depends(get_session)):
27
+ ...
28
+
29
+ # Service layer
30
+ @with_transactional_session
31
+ async def my_service(session: AsyncSession, ...):
32
+ ...
33
+ """
34
+
35
+ from contextlib import asynccontextmanager
36
+ import functools
37
+ from typing import AsyncGenerator, Optional
38
+
39
+ from sqlalchemy.ext.asyncio import (
40
+ async_sessionmaker,
41
+ create_async_engine,
42
+ AsyncSession,
43
+ AsyncEngine,
44
+ )
45
+
46
+
47
+ # Module-level references (set by init_engine).
48
+ engine: Optional[AsyncEngine] = None
49
+ _async_sessionmaker: Optional[async_sessionmaker] = None
50
+
51
+
52
+ def init_engine(db_url: str, *, echo: bool = False, **engine_kwargs) -> async_sessionmaker:
53
+ """Initialize the async engine and session factory.
54
+
55
+ Args:
56
+ db_url: SQLAlchemy async database URL (e.g. postgresql+asyncpg://...).
57
+ echo: Echo SQL statements to stdout.
58
+ **engine_kwargs: Forwarded to create_async_engine (pool_size, max_overflow,
59
+ pool_pre_ping, pool_recycle, etc.).
60
+
61
+ Returns:
62
+ The configured async_sessionmaker.
63
+ """
64
+ global engine, _async_sessionmaker
65
+ engine = create_async_engine(db_url, echo=echo, **engine_kwargs)
66
+ _async_sessionmaker = async_sessionmaker(bind=engine, expire_on_commit=False, class_=AsyncSession)
67
+ return _async_sessionmaker
68
+
69
+
70
+ async def close_engine() -> None:
71
+ """Dispose the engine on app shutdown."""
72
+ global engine
73
+ if engine is not None:
74
+ await engine.dispose()
75
+ engine = None
76
+
77
+
78
+ @asynccontextmanager
79
+ async def get_session() -> AsyncGenerator[AsyncSession, None]:
80
+ """Yield a transactional async session. Commits on success, rolls back on error.
81
+
82
+ Can be used as a FastAPI dependency via ``Depends(get_session)`` or as a
83
+ plain async context manager in service code.
84
+ """
85
+ assert _async_sessionmaker is not None, "Engine/sessionmaker not initialized — call init_engine() first"
86
+ async with _async_sessionmaker() as session:
87
+ try:
88
+ yield session
89
+ await session.commit()
90
+ except Exception:
91
+ await session.rollback()
92
+ raise
93
+
94
+
95
+ def with_transactional_session(func):
96
+ """Decorator that auto-injects a session as the first arg if not already provided.
97
+
98
+ If the first positional arg is already an ``AsyncSession``, or ``session``
99
+ is in kwargs, the function is called as-is. Otherwise a new session is
100
+ created via ``get_session()``.
101
+ """
102
+
103
+ @functools.wraps(func)
104
+ async def wrapper(*args, **kwargs):
105
+ if args and isinstance(args[0], AsyncSession):
106
+ return await func(*args, **kwargs)
107
+ if "session" in kwargs:
108
+ return await func(*args, **kwargs)
109
+ async with get_session() as session:
110
+ return await func(session, *args, **kwargs)
111
+
112
+ return wrapper
@@ -0,0 +1,253 @@
1
+ """Generic async CRUD utilities for SQLAlchemy.
2
+
3
+ Provides paginated queries (page-based and cursor-based), single-record
4
+ lookups, and creation helpers with conflict detection.
5
+ """
6
+
7
+ from math import ceil
8
+ from typing import Any, Callable, Generic, List, Optional, Type, TypeVar
9
+
10
+ from pydantic import BaseModel
11
+ from sqlalchemy import ColumnElement, Select, func, select
12
+ from sqlalchemy.exc import IntegrityError
13
+ from sqlalchemy.ext.asyncio import AsyncSession
14
+ from sqlalchemy.orm import InstrumentedAttribute
15
+
16
+
17
+ T = TypeVar("T")
18
+ U = TypeVar("U")
19
+
20
+
21
+ # ---------------------------------------------------------------------------
22
+ # Response containers
23
+ # ---------------------------------------------------------------------------
24
+
25
+
26
+ class PageResponse(BaseModel, Generic[T]):
27
+ """Page-based pagination response."""
28
+
29
+ data: list[T]
30
+ has_more: bool
31
+ limit: int
32
+ total_count: Optional[int] = None
33
+ current_page: Optional[int] = None
34
+ total_pages: Optional[int] = None
35
+
36
+ def apply_to_page_data(self, map_func: Callable[[T], U]) -> "PageResponse[U]":
37
+ return PageResponse[U](
38
+ data=[map_func(item) for item in self.data],
39
+ has_more=self.has_more,
40
+ limit=self.limit,
41
+ total_count=self.total_count,
42
+ current_page=self.current_page,
43
+ total_pages=self.total_pages,
44
+ )
45
+
46
+
47
+ class ScrollResponse(BaseModel, Generic[T]):
48
+ """Cursor-based (scroll) pagination response."""
49
+
50
+ data: list[T]
51
+ limit: int
52
+ has_more: bool
53
+ next_cursor: str | None = None
54
+
55
+ def apply_to_scroll_data(self, map_func: Callable[[T], U]) -> "ScrollResponse[U]":
56
+ return ScrollResponse[U](
57
+ data=[map_func(item) for item in self.data],
58
+ limit=self.limit,
59
+ has_more=self.has_more,
60
+ next_cursor=self.next_cursor,
61
+ )
62
+
63
+
64
+ # ---------------------------------------------------------------------------
65
+ # Exceptions
66
+ # ---------------------------------------------------------------------------
67
+
68
+
69
+ class ConflictError(Exception):
70
+ """Raised when an INSERT/UPDATE violates a uniqueness constraint."""
71
+
72
+ pass
73
+
74
+
75
+ # ---------------------------------------------------------------------------
76
+ # Single-record operations
77
+ # ---------------------------------------------------------------------------
78
+
79
+
80
+ async def get(
81
+ session: AsyncSession,
82
+ model: Type[T],
83
+ value: Any,
84
+ *,
85
+ key: str = "id",
86
+ options: list | None = None,
87
+ ) -> T | None:
88
+ """Retrieve a single record by column value (default: primary key)."""
89
+ if not hasattr(model, key):
90
+ raise AttributeError(f"{model.__name__} has no attribute '{key}'")
91
+
92
+ stmt = select(model).where(getattr(model, key) == value)
93
+ if options:
94
+ stmt = stmt.options(*options)
95
+ result = await session.execute(stmt)
96
+ return result.scalar_one_or_none()
97
+
98
+
99
+ async def find_one_by(
100
+ session: AsyncSession,
101
+ model: Type[T],
102
+ **filters,
103
+ ) -> Optional[T]:
104
+ """Find a single record matching all keyword filters."""
105
+ stmt = select(model).filter_by(**filters)
106
+ res = await session.execute(stmt)
107
+ return res.scalar_one_or_none()
108
+
109
+
110
+ async def list_all(
111
+ session: AsyncSession,
112
+ model: Type[T],
113
+ *,
114
+ options: list | None = None,
115
+ ) -> List[T]:
116
+ """Return all records of a model (use sparingly on large tables)."""
117
+ stmt = select(model)
118
+ if options:
119
+ stmt = stmt.options(*options)
120
+ res = await session.execute(stmt)
121
+ return list(res.scalars().all())
122
+
123
+
124
+ async def create(
125
+ session: AsyncSession,
126
+ model: Type[T],
127
+ **values,
128
+ ) -> T:
129
+ """Create a new record. Raises ``ConflictError`` on integrity violation."""
130
+ obj = model(**values)
131
+ session.add(obj)
132
+ try:
133
+ await session.flush()
134
+ except IntegrityError as exc:
135
+ raise ConflictError(str(exc)) from exc
136
+ await session.refresh(obj)
137
+ return obj
138
+
139
+
140
+ # ---------------------------------------------------------------------------
141
+ # Paginated queries
142
+ # ---------------------------------------------------------------------------
143
+
144
+
145
+ async def paginate(
146
+ session: AsyncSession,
147
+ model: Type[T],
148
+ page: int = 1,
149
+ page_size: int = 100,
150
+ conditions: Optional[List[ColumnElement]] = None,
151
+ order_by: Optional[list[ColumnElement]] = None,
152
+ joins: Optional[list[InstrumentedAttribute]] = None,
153
+ options: list | None = None,
154
+ ) -> "PageResponse[T]":
155
+ """Page-based pagination with total count."""
156
+ stmt = select(model)
157
+
158
+ if options:
159
+ stmt = stmt.options(*options)
160
+ if joins:
161
+ for join in joins:
162
+ stmt = stmt.outerjoin(join)
163
+ if conditions:
164
+ stmt = stmt.where(*conditions)
165
+ if order_by:
166
+ stmt = stmt.order_by(*order_by)
167
+
168
+ return await _paginate_query(session, stmt, page, page_size)
169
+
170
+
171
+ async def paginate_scroll(
172
+ session: AsyncSession,
173
+ model: Type[T],
174
+ limit: int = 20,
175
+ conditions: Optional[List[ColumnElement]] = None,
176
+ order_by: Optional[list[ColumnElement]] = None,
177
+ joins: Optional[list[InstrumentedAttribute]] = None,
178
+ options: list | None = None,
179
+ ) -> "ScrollResponse[T]":
180
+ """Cursor-based (scroll) pagination. Fetches limit+1 to detect has_more."""
181
+ stmt = select(model)
182
+
183
+ if options:
184
+ stmt = stmt.options(*options)
185
+ if joins:
186
+ for join in joins:
187
+ stmt = stmt.outerjoin(join)
188
+ if conditions:
189
+ stmt = stmt.where(*conditions)
190
+ if order_by:
191
+ stmt = stmt.order_by(*order_by)
192
+
193
+ return await _paginate_scroll_query(session, stmt, limit)
194
+
195
+
196
+ # ---------------------------------------------------------------------------
197
+ # Internal helpers
198
+ # ---------------------------------------------------------------------------
199
+
200
+
201
+ async def _paginate_query(
202
+ session: AsyncSession,
203
+ query: Select[Any],
204
+ page: int = 1,
205
+ page_size: int = 100,
206
+ ) -> "PageResponse[T]":
207
+ if page < 1:
208
+ page = 1
209
+ if page_size < 1:
210
+ page_size = 10
211
+
212
+ total_count_stmt = select(func.count()).select_from(query.subquery())
213
+ total_count = (await session.execute(total_count_stmt)).scalar_one()
214
+
215
+ offset = (page - 1) * page_size
216
+ paginated_stmt = query.offset(offset).limit(page_size)
217
+ result = await session.execute(paginated_stmt)
218
+ data = list(result.scalars().all())
219
+
220
+ has_more = (page * page_size) < total_count
221
+ total_pages = ceil(total_count / page_size) if page_size else 1
222
+
223
+ return PageResponse(
224
+ data=data,
225
+ has_more=has_more,
226
+ limit=page_size,
227
+ total_count=total_count,
228
+ current_page=page,
229
+ total_pages=total_pages,
230
+ )
231
+
232
+
233
+ async def _paginate_scroll_query(
234
+ session: AsyncSession,
235
+ query: Select[Any],
236
+ limit: int = 20,
237
+ ) -> "ScrollResponse[T]":
238
+ if limit < 1:
239
+ limit = 20
240
+
241
+ scroll_stmt = query.limit(limit + 1)
242
+ result = await session.execute(scroll_stmt)
243
+ data = list(result.scalars().all())
244
+
245
+ has_more = len(data) > limit
246
+ if has_more:
247
+ data = data[:limit]
248
+
249
+ return ScrollResponse(
250
+ data=data,
251
+ has_more=has_more,
252
+ limit=limit,
253
+ )