etchdb 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etchdb/__init__.py +9 -0
- etchdb/adapter.py +56 -0
- etchdb/aiosqlite/__init__.py +17 -0
- etchdb/aiosqlite/adapter.py +148 -0
- etchdb/asyncpg/__init__.py +17 -0
- etchdb/asyncpg/adapter.py +106 -0
- etchdb/db.py +179 -0
- etchdb/py.typed +0 -0
- etchdb/query.py +18 -0
- etchdb/row.py +29 -0
- etchdb/sql/__init__.py +209 -0
- etchdb-0.1.0.dist-info/METADATA +168 -0
- etchdb-0.1.0.dist-info/RECORD +15 -0
- etchdb-0.1.0.dist-info/WHEEL +4 -0
- etchdb-0.1.0.dist-info/licenses/LICENSE +21 -0
etchdb/__init__.py
ADDED
etchdb/adapter.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""Abstract DB adapter; the boundary DB sits on top of."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from contextlib import AbstractAsyncContextManager
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AdapterBase(ABC):
|
|
9
|
+
"""Abstract DB adapter implemented by each driver.
|
|
10
|
+
|
|
11
|
+
The four raw-SQL methods mirror asyncpg's vocabulary:
|
|
12
|
+
`execute / fetch / fetchrow / fetchval`. All take positional
|
|
13
|
+
`*params` bound through the driver's parameterised query API.
|
|
14
|
+
|
|
15
|
+
`placeholder(i)` converts a 0-indexed parameter position to the
|
|
16
|
+
driver's placeholder syntax. Postgres returns `$1, $2, ...`; SQLite
|
|
17
|
+
returns `?`.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
@staticmethod
|
|
21
|
+
@abstractmethod
|
|
22
|
+
def placeholder(i: int) -> str:
|
|
23
|
+
"""Return the placeholder for the i-th parameter (0-indexed)."""
|
|
24
|
+
...
|
|
25
|
+
|
|
26
|
+
@abstractmethod
|
|
27
|
+
async def execute(self, sql: str, *params: Any) -> Any: ...
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]: ...
|
|
31
|
+
|
|
32
|
+
@abstractmethod
|
|
33
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None: ...
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
async def fetchval(self, sql: str, *params: Any) -> Any: ...
|
|
37
|
+
|
|
38
|
+
@abstractmethod
|
|
39
|
+
def transaction(self) -> AbstractAsyncContextManager["AdapterBase"]:
|
|
40
|
+
"""Return an async context manager yielding a transaction-scoped adapter.
|
|
41
|
+
|
|
42
|
+
Inside the `async with` block, all calls on the yielded adapter run
|
|
43
|
+
on the same connection within a single transaction. The transaction
|
|
44
|
+
commits on a clean exit and rolls back on any exception.
|
|
45
|
+
"""
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
async def close(self) -> None:
|
|
50
|
+
"""Release resources owned by this adapter.
|
|
51
|
+
|
|
52
|
+
For pool-owning adapters created via `from_url`, this closes the
|
|
53
|
+
pool. For adapters wrapping an externally-managed pool (`from_pool`),
|
|
54
|
+
this is a no-op; the caller owns the pool's lifecycle.
|
|
55
|
+
"""
|
|
56
|
+
...
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""aiosqlite adapter for etchdb.
|
|
2
|
+
|
|
3
|
+
Importing this subpackage requires aiosqlite to be installed. The
|
|
4
|
+
top-level `etchdb` namespace does NOT depend on aiosqlite; only this
|
|
5
|
+
subpackage does.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import aiosqlite as _aiosqlite # noqa: F401
|
|
10
|
+
except ImportError as e:
|
|
11
|
+
raise ImportError(
|
|
12
|
+
"etchdb.aiosqlite requires the aiosqlite package. Install with: pip install etchdb[sqlite]"
|
|
13
|
+
) from e
|
|
14
|
+
|
|
15
|
+
from etchdb.aiosqlite.adapter import AiosqliteAdapter
|
|
16
|
+
|
|
17
|
+
__all__ = ["AiosqliteAdapter"]
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
"""aiosqlite adapter implementation.
|
|
2
|
+
|
|
3
|
+
aiosqlite has no pool concept; it wraps a single sqlite3 connection
|
|
4
|
+
that runs on its own background thread. The adapter therefore holds
|
|
5
|
+
one connection rather than a pool. Concurrent calls serialise through
|
|
6
|
+
aiosqlite's internal queue, which is the correct sqlite3 model.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from collections.abc import AsyncIterator
|
|
12
|
+
from contextlib import asynccontextmanager
|
|
13
|
+
from typing import Any
|
|
14
|
+
from urllib.parse import urlparse
|
|
15
|
+
|
|
16
|
+
import aiosqlite
|
|
17
|
+
|
|
18
|
+
from etchdb.adapter import AdapterBase
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _path_from_url(url: str) -> str:
|
|
22
|
+
"""Extract the SQLite database path from a URL.
|
|
23
|
+
|
|
24
|
+
Supported forms:
|
|
25
|
+
sqlite:///:memory: -> ":memory:"
|
|
26
|
+
sqlite+aiosqlite:///:memory: -> ":memory:"
|
|
27
|
+
sqlite:///relative.db -> "relative.db"
|
|
28
|
+
sqlite:////absolute/path.db -> "/absolute/path.db"
|
|
29
|
+
"""
|
|
30
|
+
parsed = urlparse(url)
|
|
31
|
+
path = parsed.path
|
|
32
|
+
if path.startswith("/"):
|
|
33
|
+
path = path[1:]
|
|
34
|
+
return path or ":memory:"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class AiosqliteAdapter(AdapterBase):
|
|
38
|
+
"""AdapterBase implementation backed by a single aiosqlite connection.
|
|
39
|
+
|
|
40
|
+
Construct via `from_connection(conn)` to wrap an externally-managed
|
|
41
|
+
connection (etchdb will not close it), or `await from_url(url)` to
|
|
42
|
+
let etchdb create and own the connection.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def __init__(self, conn: aiosqlite.Connection, *, owns_conn: bool = False):
|
|
46
|
+
self._conn = conn
|
|
47
|
+
self._owns_conn = owns_conn
|
|
48
|
+
|
|
49
|
+
@staticmethod
|
|
50
|
+
def placeholder(i: int) -> str:
|
|
51
|
+
return "?"
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def from_connection(cls, conn: aiosqlite.Connection) -> AiosqliteAdapter:
|
|
55
|
+
"""Wrap an externally-managed aiosqlite connection. The caller closes it."""
|
|
56
|
+
return cls(conn, owns_conn=False)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
async def from_url(cls, url: str) -> AiosqliteAdapter:
|
|
60
|
+
"""Open an aiosqlite connection from `url` and wrap it.
|
|
61
|
+
|
|
62
|
+
etchdb owns the connection; `close()` will close it.
|
|
63
|
+
"""
|
|
64
|
+
path = _path_from_url(url)
|
|
65
|
+
conn = await aiosqlite.connect(path)
|
|
66
|
+
conn.row_factory = aiosqlite.Row
|
|
67
|
+
return cls(conn, owns_conn=True)
|
|
68
|
+
|
|
69
|
+
async def execute(self, sql: str, *params: Any) -> Any:
|
|
70
|
+
await self._conn.execute(sql, params)
|
|
71
|
+
await self._conn.commit()
|
|
72
|
+
|
|
73
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]:
|
|
74
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
75
|
+
rows = await cursor.fetchall()
|
|
76
|
+
return [dict(r) for r in rows]
|
|
77
|
+
|
|
78
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None:
|
|
79
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
80
|
+
row = await cursor.fetchone()
|
|
81
|
+
return dict(row) if row is not None else None
|
|
82
|
+
|
|
83
|
+
async def fetchval(self, sql: str, *params: Any) -> Any:
|
|
84
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
85
|
+
row = await cursor.fetchone()
|
|
86
|
+
return row[0] if row is not None else None
|
|
87
|
+
|
|
88
|
+
@asynccontextmanager
|
|
89
|
+
async def transaction(self) -> AsyncIterator[AdapterBase]:
|
|
90
|
+
# sqlite3's default isolation_level auto-injects BEGIN before DML,
|
|
91
|
+
# so we don't emit one explicitly; we just commit on clean exit
|
|
92
|
+
# and rollback on any exception.
|
|
93
|
+
try:
|
|
94
|
+
yield _AiosqliteTxAdapter(self._conn)
|
|
95
|
+
except BaseException:
|
|
96
|
+
await self._conn.rollback()
|
|
97
|
+
raise
|
|
98
|
+
else:
|
|
99
|
+
await self._conn.commit()
|
|
100
|
+
|
|
101
|
+
async def close(self) -> None:
|
|
102
|
+
if self._owns_conn:
|
|
103
|
+
await self._conn.close()
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class _AiosqliteTxAdapter(AdapterBase):
|
|
107
|
+
"""Tx-scoped aiosqlite adapter; does not commit on each statement."""
|
|
108
|
+
|
|
109
|
+
def __init__(self, conn: aiosqlite.Connection):
|
|
110
|
+
self._conn = conn
|
|
111
|
+
|
|
112
|
+
@staticmethod
|
|
113
|
+
def placeholder(i: int) -> str:
|
|
114
|
+
return "?"
|
|
115
|
+
|
|
116
|
+
async def execute(self, sql: str, *params: Any) -> Any:
|
|
117
|
+
await self._conn.execute(sql, params)
|
|
118
|
+
|
|
119
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]:
|
|
120
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
121
|
+
rows = await cursor.fetchall()
|
|
122
|
+
return [dict(r) for r in rows]
|
|
123
|
+
|
|
124
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None:
|
|
125
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
126
|
+
row = await cursor.fetchone()
|
|
127
|
+
return dict(row) if row is not None else None
|
|
128
|
+
|
|
129
|
+
async def fetchval(self, sql: str, *params: Any) -> Any:
|
|
130
|
+
async with self._conn.execute(sql, params) as cursor:
|
|
131
|
+
row = await cursor.fetchone()
|
|
132
|
+
return row[0] if row is not None else None
|
|
133
|
+
|
|
134
|
+
@asynccontextmanager
|
|
135
|
+
async def transaction(self) -> AsyncIterator[AdapterBase]:
|
|
136
|
+
sp = "etchdb_sp"
|
|
137
|
+
await self._conn.execute(f"SAVEPOINT {sp}")
|
|
138
|
+
try:
|
|
139
|
+
yield self
|
|
140
|
+
except BaseException:
|
|
141
|
+
await self._conn.execute(f"ROLLBACK TO SAVEPOINT {sp}")
|
|
142
|
+
await self._conn.execute(f"RELEASE SAVEPOINT {sp}")
|
|
143
|
+
raise
|
|
144
|
+
else:
|
|
145
|
+
await self._conn.execute(f"RELEASE SAVEPOINT {sp}")
|
|
146
|
+
|
|
147
|
+
async def close(self) -> None:
|
|
148
|
+
return None
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""asyncpg adapter for etchdb.
|
|
2
|
+
|
|
3
|
+
Importing this subpackage requires asyncpg to be installed. The
|
|
4
|
+
top-level `etchdb` namespace does NOT depend on asyncpg; only this
|
|
5
|
+
subpackage does.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
try:
|
|
9
|
+
import asyncpg as _asyncpg # noqa: F401
|
|
10
|
+
except ImportError as e:
|
|
11
|
+
raise ImportError(
|
|
12
|
+
"etchdb.asyncpg requires the asyncpg package. Install with: pip install etchdb[asyncpg]"
|
|
13
|
+
) from e
|
|
14
|
+
|
|
15
|
+
from etchdb.asyncpg.adapter import AsyncpgAdapter
|
|
16
|
+
|
|
17
|
+
__all__ = ["AsyncpgAdapter"]
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""asyncpg adapter implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import AsyncIterator
|
|
6
|
+
from contextlib import asynccontextmanager
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import asyncpg
|
|
10
|
+
|
|
11
|
+
from etchdb.adapter import AdapterBase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AsyncpgAdapter(AdapterBase):
|
|
15
|
+
"""AdapterBase implementation backed by an asyncpg pool.
|
|
16
|
+
|
|
17
|
+
Construct via `from_pool(pool)` to wrap an externally-managed pool
|
|
18
|
+
(etchdb will not close it), or `await from_url(url)` to let etchdb
|
|
19
|
+
create and own the pool. The `owns_pool` flag tracks ownership.
|
|
20
|
+
|
|
21
|
+
For custom pool settings (init=, min_size=, max_size=, codecs),
|
|
22
|
+
create the pool yourself with `asyncpg.create_pool(...)` and pass
|
|
23
|
+
it to `from_pool`. `from_url` is intentionally minimal.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self, pool: asyncpg.Pool, *, owns_pool: bool = False):
|
|
27
|
+
self._pool = pool
|
|
28
|
+
self._owns_pool = owns_pool
|
|
29
|
+
|
|
30
|
+
@staticmethod
|
|
31
|
+
def placeholder(i: int) -> str:
|
|
32
|
+
return f"${i + 1}"
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_pool(cls, pool: asyncpg.Pool) -> AsyncpgAdapter:
|
|
36
|
+
"""Wrap an externally-managed asyncpg pool. The caller closes it."""
|
|
37
|
+
return cls(pool, owns_pool=False)
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
async def from_url(cls, url: str) -> AsyncpgAdapter:
|
|
41
|
+
"""Create an asyncpg pool from `url` and wrap it.
|
|
42
|
+
|
|
43
|
+
etchdb owns the pool; `close()` will close it.
|
|
44
|
+
"""
|
|
45
|
+
pool = await asyncpg.create_pool(url)
|
|
46
|
+
return cls(pool, owns_pool=True)
|
|
47
|
+
|
|
48
|
+
async def execute(self, sql: str, *params: Any) -> Any:
|
|
49
|
+
async with self._pool.acquire() as conn:
|
|
50
|
+
return await conn.execute(sql, *params)
|
|
51
|
+
|
|
52
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]:
|
|
53
|
+
async with self._pool.acquire() as conn:
|
|
54
|
+
records = await conn.fetch(sql, *params)
|
|
55
|
+
return [dict(r) for r in records]
|
|
56
|
+
|
|
57
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None:
|
|
58
|
+
async with self._pool.acquire() as conn:
|
|
59
|
+
record = await conn.fetchrow(sql, *params)
|
|
60
|
+
return dict(record) if record is not None else None
|
|
61
|
+
|
|
62
|
+
async def fetchval(self, sql: str, *params: Any) -> Any:
|
|
63
|
+
async with self._pool.acquire() as conn:
|
|
64
|
+
return await conn.fetchval(sql, *params)
|
|
65
|
+
|
|
66
|
+
@asynccontextmanager
|
|
67
|
+
async def transaction(self) -> AsyncIterator[AdapterBase]:
|
|
68
|
+
async with self._pool.acquire() as conn, conn.transaction():
|
|
69
|
+
yield _AsyncpgConnAdapter(conn)
|
|
70
|
+
|
|
71
|
+
async def close(self) -> None:
|
|
72
|
+
if self._owns_pool:
|
|
73
|
+
await self._pool.close()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class _AsyncpgConnAdapter(AdapterBase):
|
|
77
|
+
"""Single-connection adapter, used inside a transaction."""
|
|
78
|
+
|
|
79
|
+
def __init__(self, conn: asyncpg.Connection):
|
|
80
|
+
self._conn = conn
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def placeholder(i: int) -> str:
|
|
84
|
+
return f"${i + 1}"
|
|
85
|
+
|
|
86
|
+
async def execute(self, sql: str, *params: Any) -> Any:
|
|
87
|
+
return await self._conn.execute(sql, *params)
|
|
88
|
+
|
|
89
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]:
|
|
90
|
+
records = await self._conn.fetch(sql, *params)
|
|
91
|
+
return [dict(r) for r in records]
|
|
92
|
+
|
|
93
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None:
|
|
94
|
+
record = await self._conn.fetchrow(sql, *params)
|
|
95
|
+
return dict(record) if record is not None else None
|
|
96
|
+
|
|
97
|
+
async def fetchval(self, sql: str, *params: Any) -> Any:
|
|
98
|
+
return await self._conn.fetchval(sql, *params)
|
|
99
|
+
|
|
100
|
+
@asynccontextmanager
|
|
101
|
+
async def transaction(self) -> AsyncIterator[AdapterBase]:
|
|
102
|
+
async with self._conn.transaction():
|
|
103
|
+
yield self
|
|
104
|
+
|
|
105
|
+
async def close(self) -> None:
|
|
106
|
+
return None
|
etchdb/db.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
"""DB facade: the user-facing entry point.
|
|
2
|
+
|
|
3
|
+
DB sits on top of an AdapterBase and exposes the user-facing API:
|
|
4
|
+
typed CRUD over Row, raw SQL passthrough mirroring asyncpg's
|
|
5
|
+
vocabulary, typed-result helpers `fetch_models / fetch_model`, a
|
|
6
|
+
transaction context manager, and a `compose` inspector for previewing
|
|
7
|
+
the SQL of a typed op without executing it. Construct directly with
|
|
8
|
+
an adapter, or via the URL-scheme dispatcher `DB.from_url`.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
from contextlib import asynccontextmanager
|
|
14
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
15
|
+
from urllib.parse import urlparse
|
|
16
|
+
|
|
17
|
+
from etchdb import sql
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from collections.abc import AsyncIterator
|
|
21
|
+
|
|
22
|
+
from etchdb.adapter import AdapterBase
|
|
23
|
+
from etchdb.query import SqlQuery
|
|
24
|
+
from etchdb.row import Row
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _hydrate(model_or_row: type[Row] | Row, row: dict[str, Any] | None) -> Row | None:
|
|
28
|
+
"""Build a Row from a fetchrow result, or return None when there is no row.
|
|
29
|
+
|
|
30
|
+
Centralises the dict-to-Row construction and the None-handling that
|
|
31
|
+
fetchrow-based methods (`fetch_model`, `insert`, `update`) all share.
|
|
32
|
+
`model_or_row` may be either a Row class (for `fetch_model`) or a Row
|
|
33
|
+
instance whose class should be used (for `insert` and `update`).
|
|
34
|
+
"""
|
|
35
|
+
if row is None:
|
|
36
|
+
return None
|
|
37
|
+
cls = model_or_row if isinstance(model_or_row, type) else type(model_or_row)
|
|
38
|
+
return cls(**row)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
_COMPOSE_OPS = {
|
|
42
|
+
"get": sql.select_one,
|
|
43
|
+
"query": sql.select_many,
|
|
44
|
+
"insert": sql.insert,
|
|
45
|
+
"update": sql.update,
|
|
46
|
+
"delete": sql.delete,
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class DB:
|
|
51
|
+
"""User-facing DB facade."""
|
|
52
|
+
|
|
53
|
+
def __init__(self, adapter: AdapterBase):
|
|
54
|
+
self._adapter = adapter
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
async def from_url(cls, url: str) -> DB:
|
|
58
|
+
"""Open a DB from a URL, dispatching on the URL scheme.
|
|
59
|
+
|
|
60
|
+
Supported schemes:
|
|
61
|
+
postgresql://, postgres://, postgresql+asyncpg:// -> asyncpg
|
|
62
|
+
sqlite:///, sqlite+aiosqlite:/// -> aiosqlite
|
|
63
|
+
postgresql+psycopg:// -> NotImplementedError
|
|
64
|
+
|
|
65
|
+
Driver subpackages are imported lazily so users only need the
|
|
66
|
+
driver they actually use installed.
|
|
67
|
+
"""
|
|
68
|
+
scheme = urlparse(url).scheme.lower()
|
|
69
|
+
|
|
70
|
+
if scheme in {"postgresql", "postgres", "postgresql+asyncpg"}:
|
|
71
|
+
from etchdb.asyncpg import AsyncpgAdapter
|
|
72
|
+
|
|
73
|
+
# asyncpg only accepts the bare postgresql:// scheme.
|
|
74
|
+
if scheme == "postgresql+asyncpg":
|
|
75
|
+
url = "postgresql://" + url.split("://", 1)[1]
|
|
76
|
+
adapter: AdapterBase = await AsyncpgAdapter.from_url(url)
|
|
77
|
+
elif scheme in {"sqlite", "sqlite+aiosqlite"}:
|
|
78
|
+
from etchdb.aiosqlite import AiosqliteAdapter
|
|
79
|
+
|
|
80
|
+
adapter = await AiosqliteAdapter.from_url(url)
|
|
81
|
+
elif scheme == "postgresql+psycopg":
|
|
82
|
+
raise NotImplementedError("psycopg adapter not yet shipped")
|
|
83
|
+
else:
|
|
84
|
+
raise ValueError(f"Unsupported URL scheme: {scheme!r}")
|
|
85
|
+
|
|
86
|
+
return cls(adapter)
|
|
87
|
+
|
|
88
|
+
async def execute(self, sql: str, *params: Any) -> Any:
|
|
89
|
+
return await self._adapter.execute(sql, *params)
|
|
90
|
+
|
|
91
|
+
async def fetch(self, sql: str, *params: Any) -> list[dict[str, Any]]:
|
|
92
|
+
return await self._adapter.fetch(sql, *params)
|
|
93
|
+
|
|
94
|
+
async def fetchrow(self, sql: str, *params: Any) -> dict[str, Any] | None:
|
|
95
|
+
return await self._adapter.fetchrow(sql, *params)
|
|
96
|
+
|
|
97
|
+
async def fetchval(self, sql: str, *params: Any) -> Any:
|
|
98
|
+
return await self._adapter.fetchval(sql, *params)
|
|
99
|
+
|
|
100
|
+
@asynccontextmanager
|
|
101
|
+
async def transaction(self) -> AsyncIterator[DB]:
|
|
102
|
+
"""Open a transaction. Commits on clean exit, rolls back on exception.
|
|
103
|
+
|
|
104
|
+
Yields a DB bound to the transaction's connection so that calls
|
|
105
|
+
on `tx` use the same connection as the surrounding block.
|
|
106
|
+
"""
|
|
107
|
+
async with self._adapter.transaction() as tx_adapter:
|
|
108
|
+
yield DB(tx_adapter)
|
|
109
|
+
|
|
110
|
+
async def close(self) -> None:
|
|
111
|
+
await self._adapter.close()
|
|
112
|
+
|
|
113
|
+
async def fetch_models(self, model: type[Row], sql: str, *params: Any) -> list[Row]:
|
|
114
|
+
rows = await self._adapter.fetch(sql, *params)
|
|
115
|
+
return [model(**r) for r in rows]
|
|
116
|
+
|
|
117
|
+
async def fetch_model(self, model: type[Row], sql: str, *params: Any) -> Row | None:
|
|
118
|
+
row = await self._adapter.fetchrow(sql, *params)
|
|
119
|
+
return _hydrate(model, row)
|
|
120
|
+
|
|
121
|
+
async def get(self, model: type[Row], **filters: Any) -> Row | None:
|
|
122
|
+
q = sql.select_one(model, placeholder=self._adapter.placeholder, **filters)
|
|
123
|
+
return await self.fetch_model(model, q.sql, *q.params)
|
|
124
|
+
|
|
125
|
+
async def query(
|
|
126
|
+
self,
|
|
127
|
+
model: type[Row],
|
|
128
|
+
*,
|
|
129
|
+
limit: int | None = None,
|
|
130
|
+
offset: int | None = None,
|
|
131
|
+
order_by: str | list[str] | None = None,
|
|
132
|
+
**filters: Any,
|
|
133
|
+
) -> list[Row]:
|
|
134
|
+
q = sql.select_many(
|
|
135
|
+
model,
|
|
136
|
+
placeholder=self._adapter.placeholder,
|
|
137
|
+
limit=limit,
|
|
138
|
+
offset=offset,
|
|
139
|
+
order_by=order_by,
|
|
140
|
+
**filters,
|
|
141
|
+
)
|
|
142
|
+
return await self.fetch_models(model, q.sql, *q.params)
|
|
143
|
+
|
|
144
|
+
async def insert(self, row: Row) -> Row:
|
|
145
|
+
q = sql.insert(row, placeholder=self._adapter.placeholder)
|
|
146
|
+
result = await self._adapter.fetchrow(q.sql, *q.params)
|
|
147
|
+
return _hydrate(row, result) or row
|
|
148
|
+
|
|
149
|
+
async def update(self, row: Row) -> Row | None:
|
|
150
|
+
"""Update `row` keyed by its primary key. Returns the updated row,
|
|
151
|
+
or None if no row matched."""
|
|
152
|
+
q = sql.update(row, placeholder=self._adapter.placeholder, returning="*")
|
|
153
|
+
result = await self._adapter.fetchrow(q.sql, *q.params)
|
|
154
|
+
return _hydrate(row, result)
|
|
155
|
+
|
|
156
|
+
async def delete(self, row: Row) -> None:
|
|
157
|
+
q = sql.delete(row, placeholder=self._adapter.placeholder)
|
|
158
|
+
await self._adapter.execute(q.sql, *q.params)
|
|
159
|
+
|
|
160
|
+
def compose(
|
|
161
|
+
self,
|
|
162
|
+
op: Literal["get", "query", "insert", "update", "delete"],
|
|
163
|
+
*args: Any,
|
|
164
|
+
**kwargs: Any,
|
|
165
|
+
) -> SqlQuery:
|
|
166
|
+
"""Return the SqlQuery a typed op would produce, without executing it.
|
|
167
|
+
|
|
168
|
+
Lets callers inspect or test SQL before it touches the DB. The
|
|
169
|
+
placeholder style follows the underlying adapter ($N for asyncpg,
|
|
170
|
+
? for aiosqlite).
|
|
171
|
+
|
|
172
|
+
q = db.compose("get", User, id=1)
|
|
173
|
+
print(q.sql, q.params)
|
|
174
|
+
"""
|
|
175
|
+
try:
|
|
176
|
+
fn = _COMPOSE_OPS[op]
|
|
177
|
+
except KeyError as e:
|
|
178
|
+
raise ValueError(f"Unknown op {op!r}. Expected one of: {sorted(_COMPOSE_OPS)}") from e
|
|
179
|
+
return fn(*args, placeholder=self._adapter.placeholder, **kwargs)
|
etchdb/py.typed
ADDED
|
File without changes
|
etchdb/query.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Inspectable SQL value type."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, NamedTuple
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SqlQuery(NamedTuple):
|
|
7
|
+
"""A SQL string and its bound parameters.
|
|
8
|
+
|
|
9
|
+
Returned by every typed operation in etchdb. Useful for testing,
|
|
10
|
+
debugging, and copy-pasting into psql.
|
|
11
|
+
|
|
12
|
+
q = pg.insert(user)
|
|
13
|
+
print(q.sql) # INSERT INTO users (id, name) VALUES ($1, $2) RETURNING *
|
|
14
|
+
print(q.params) # [1, "Alice"]
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
sql: str
|
|
18
|
+
params: list[Any]
|
etchdb/row.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Base class for typed table rows."""
|
|
2
|
+
|
|
3
|
+
from typing import ClassVar
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Row(BaseModel):
|
|
9
|
+
"""Base class for typed table rows.
|
|
10
|
+
|
|
11
|
+
Subclass to declare a table:
|
|
12
|
+
|
|
13
|
+
class User(Row):
|
|
14
|
+
__table__ = "users"
|
|
15
|
+
id: int
|
|
16
|
+
name: str
|
|
17
|
+
|
|
18
|
+
`__table__` is required. `__pk__` defaults to `("id",)`; override
|
|
19
|
+
if your primary key is composite or named differently.
|
|
20
|
+
|
|
21
|
+
class UserRole(Row):
|
|
22
|
+
__table__ = "user_roles"
|
|
23
|
+
__pk__ = ("user_id", "role_id")
|
|
24
|
+
user_id: int
|
|
25
|
+
role_id: int
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
__table__: ClassVar[str]
|
|
29
|
+
__pk__: ClassVar[tuple[str, ...]] = ("id",)
|
etchdb/sql/__init__.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
"""Dialect-neutral SQL emitter.
|
|
2
|
+
|
|
3
|
+
Generates SqlQuery values for typed Row operations. Driver-free: knows
|
|
4
|
+
nothing about asyncpg, psycopg, or aiosqlite. Each adapter passes its
|
|
5
|
+
own `placeholder` callable so the same emitter works for both Postgres
|
|
6
|
+
($1, $2, ...) and SQLite (?, ?, ...).
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from etchdb.query import SqlQuery
|
|
13
|
+
from etchdb.row import Row
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def insert(
|
|
17
|
+
row: Row,
|
|
18
|
+
*,
|
|
19
|
+
placeholder: Callable[[int], str],
|
|
20
|
+
returning: str | list[str] | None = "*",
|
|
21
|
+
) -> SqlQuery:
|
|
22
|
+
"""Build an INSERT for `row`, emitting only fields in `model_fields_set`.
|
|
23
|
+
|
|
24
|
+
Defaulted fields are omitted so the database applies its own DEFAULT
|
|
25
|
+
(or sequence). An explicit `None` is treated as set. With nothing
|
|
26
|
+
set, emits `INSERT INTO ... DEFAULT VALUES`. `returning="*"` by
|
|
27
|
+
default; pass `None` or a column list to override.
|
|
28
|
+
"""
|
|
29
|
+
table = _table_name(row)
|
|
30
|
+
fields = [f for f in type(row).model_fields if f in row.model_fields_set]
|
|
31
|
+
|
|
32
|
+
if not fields:
|
|
33
|
+
sql = f"INSERT INTO {table} DEFAULT VALUES"
|
|
34
|
+
if returning is not None:
|
|
35
|
+
sql += f" RETURNING {_format_columns(returning)}"
|
|
36
|
+
return SqlQuery(sql=sql, params=[])
|
|
37
|
+
|
|
38
|
+
values = [getattr(row, f) for f in fields]
|
|
39
|
+
columns = ", ".join(fields)
|
|
40
|
+
placeholders = ", ".join(placeholder(i) for i in range(len(fields)))
|
|
41
|
+
|
|
42
|
+
sql = f"INSERT INTO {table} ({columns}) VALUES ({placeholders})"
|
|
43
|
+
if returning is not None:
|
|
44
|
+
sql += f" RETURNING {_format_columns(returning)}"
|
|
45
|
+
|
|
46
|
+
return SqlQuery(sql=sql, params=values)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def select_one(
|
|
50
|
+
row_class: type[Row],
|
|
51
|
+
*,
|
|
52
|
+
placeholder: Callable[[int], str],
|
|
53
|
+
**filters: Any,
|
|
54
|
+
) -> SqlQuery:
|
|
55
|
+
"""Build a SELECT for at most one row matching `filters`.
|
|
56
|
+
|
|
57
|
+
Filters are joined with AND. Pass no filters to fetch the first row
|
|
58
|
+
in the table (mostly useful for tests / single-row tables).
|
|
59
|
+
"""
|
|
60
|
+
table = _table_name(row_class)
|
|
61
|
+
columns = ", ".join(row_class.model_fields)
|
|
62
|
+
|
|
63
|
+
where_sql = _eq_clauses(list(filters), placeholder=placeholder)
|
|
64
|
+
sql = f"SELECT {columns} FROM {table}"
|
|
65
|
+
if where_sql:
|
|
66
|
+
sql += f" WHERE {where_sql}"
|
|
67
|
+
sql += " LIMIT 1"
|
|
68
|
+
|
|
69
|
+
return SqlQuery(sql=sql, params=list(filters.values()))
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def select_many(
|
|
73
|
+
row_class: type[Row],
|
|
74
|
+
*,
|
|
75
|
+
placeholder: Callable[[int], str],
|
|
76
|
+
limit: int | None = None,
|
|
77
|
+
offset: int | None = None,
|
|
78
|
+
order_by: str | list[str] | None = None,
|
|
79
|
+
**filters: Any,
|
|
80
|
+
) -> SqlQuery:
|
|
81
|
+
"""Build a SELECT for multiple rows.
|
|
82
|
+
|
|
83
|
+
Filters (keyword arguments) are joined with AND. `limit`, `offset`,
|
|
84
|
+
and `order_by` are keyword-only. `limit` and `offset` are bound as
|
|
85
|
+
parameters; `order_by` is interpolated as a raw SQL fragment, so do
|
|
86
|
+
not pass user-controlled values to it.
|
|
87
|
+
"""
|
|
88
|
+
table = _table_name(row_class)
|
|
89
|
+
columns = ", ".join(row_class.model_fields)
|
|
90
|
+
|
|
91
|
+
where_sql = _eq_clauses(list(filters), placeholder=placeholder)
|
|
92
|
+
params: list[Any] = list(filters.values())
|
|
93
|
+
|
|
94
|
+
sql = f"SELECT {columns} FROM {table}"
|
|
95
|
+
if where_sql:
|
|
96
|
+
sql += f" WHERE {where_sql}"
|
|
97
|
+
if order_by:
|
|
98
|
+
sql += f" ORDER BY {_format_columns(order_by)}"
|
|
99
|
+
if limit is not None:
|
|
100
|
+
params.append(int(limit))
|
|
101
|
+
sql += f" LIMIT {placeholder(len(params) - 1)}"
|
|
102
|
+
if offset is not None:
|
|
103
|
+
params.append(int(offset))
|
|
104
|
+
sql += f" OFFSET {placeholder(len(params) - 1)}"
|
|
105
|
+
|
|
106
|
+
return SqlQuery(sql=sql, params=params)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def update(
|
|
110
|
+
row: Row,
|
|
111
|
+
*,
|
|
112
|
+
placeholder: Callable[[int], str],
|
|
113
|
+
returning: str | list[str] | None = None,
|
|
114
|
+
) -> SqlQuery:
|
|
115
|
+
"""Build an UPDATE for `row` keyed by its primary key.
|
|
116
|
+
|
|
117
|
+
Only fields in `model_fields_set` go into the SET clause, giving
|
|
118
|
+
partial-update semantics: columns the caller didn't touch are
|
|
119
|
+
preserved. WHERE uses every field in `__pk__`. Raises ValueError if
|
|
120
|
+
any PK field is unset (no row to identify) or if no non-PK field is
|
|
121
|
+
set (nothing to update). Pass `returning="*"` to add RETURNING.
|
|
122
|
+
"""
|
|
123
|
+
table = _table_name(row)
|
|
124
|
+
_ensure_pk_set(row, "update")
|
|
125
|
+
pk_set = set(row.__pk__)
|
|
126
|
+
set_fields = [
|
|
127
|
+
f for f in type(row).model_fields if f in row.model_fields_set and f not in pk_set
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
if not set_fields:
|
|
131
|
+
raise ValueError(f"{type(row).__name__} has no non-PK fields to update")
|
|
132
|
+
|
|
133
|
+
set_sql = _eq_clauses(set_fields, placeholder=placeholder, sep=", ")
|
|
134
|
+
pk_sql = _eq_clauses(list(row.__pk__), placeholder=placeholder, start=len(set_fields))
|
|
135
|
+
|
|
136
|
+
set_values = [getattr(row, f) for f in set_fields]
|
|
137
|
+
pk_values = [getattr(row, f) for f in row.__pk__]
|
|
138
|
+
|
|
139
|
+
sql = f"UPDATE {table} SET {set_sql} WHERE {pk_sql}"
|
|
140
|
+
if returning is not None:
|
|
141
|
+
sql += f" RETURNING {_format_columns(returning)}"
|
|
142
|
+
|
|
143
|
+
return SqlQuery(sql=sql, params=set_values + pk_values)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def delete(row: Row, *, placeholder: Callable[[int], str]) -> SqlQuery:
|
|
147
|
+
"""Build a DELETE for `row` keyed by its primary key.
|
|
148
|
+
|
|
149
|
+
Raises ValueError if any PK field is unset (no row to identify).
|
|
150
|
+
"""
|
|
151
|
+
table = _table_name(row)
|
|
152
|
+
_ensure_pk_set(row, "delete")
|
|
153
|
+
pk_sql = _eq_clauses(list(row.__pk__), placeholder=placeholder)
|
|
154
|
+
pk_values = [getattr(row, f) for f in row.__pk__]
|
|
155
|
+
|
|
156
|
+
sql = f"DELETE FROM {table} WHERE {pk_sql}"
|
|
157
|
+
return SqlQuery(sql=sql, params=pk_values)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
# --- helpers ----------------------------------------------------------
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def _ensure_pk_set(row: Row, op: str) -> None:
|
|
164
|
+
"""Raise ValueError if any field in __pk__ is not in model_fields_set.
|
|
165
|
+
|
|
166
|
+
Without this check, an unset PK field would yield `WHERE id = NULL`,
|
|
167
|
+
which matches no rows: the caller's update or delete becomes a
|
|
168
|
+
silent no-op. Failing loudly catches "I forgot to set the PK".
|
|
169
|
+
"""
|
|
170
|
+
unset = [f for f in row.__pk__ if f not in row.model_fields_set]
|
|
171
|
+
if unset:
|
|
172
|
+
raise ValueError(
|
|
173
|
+
f"Cannot {op}: primary key field(s) {unset} not set on this "
|
|
174
|
+
f"{type(row).__name__}. Set them so the row can be identified."
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _table_name(row_or_class: Row | type[Row]) -> str:
|
|
179
|
+
cls = row_or_class if isinstance(row_or_class, type) else type(row_or_class)
|
|
180
|
+
table = getattr(cls, "__table__", None)
|
|
181
|
+
if not table:
|
|
182
|
+
raise ValueError(
|
|
183
|
+
f"{cls.__name__} has no __table__ attribute. "
|
|
184
|
+
"Set `__table__ = 'your_table_name'` on the Row subclass."
|
|
185
|
+
)
|
|
186
|
+
return table
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _eq_clauses(
|
|
190
|
+
fields: list[str],
|
|
191
|
+
*,
|
|
192
|
+
placeholder: Callable[[int], str],
|
|
193
|
+
start: int = 0,
|
|
194
|
+
sep: str = " AND ",
|
|
195
|
+
) -> str:
|
|
196
|
+
"""Build `field1 = ? AND field2 = ? ...` clauses with the given placeholder style.
|
|
197
|
+
|
|
198
|
+
`start` is the 0-indexed position in the surrounding parameter list at
|
|
199
|
+
which this clause's parameters begin (matters for Postgres `$N` numbering).
|
|
200
|
+
"""
|
|
201
|
+
if not fields:
|
|
202
|
+
return ""
|
|
203
|
+
return sep.join(f"{f} = {placeholder(start + i)}" for i, f in enumerate(fields))
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _format_columns(cols: str | list[str]) -> str:
|
|
207
|
+
if isinstance(cols, str):
|
|
208
|
+
return cols
|
|
209
|
+
return ", ".join(cols)
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: etchdb
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Minimal async DB layer for Python. Typed CRUD over Pydantic, raw SQL when you need it
|
|
5
|
+
Project-URL: Homepage, https://github.com/varjoranta/etchdb
|
|
6
|
+
Project-URL: Repository, https://github.com/varjoranta/etchdb
|
|
7
|
+
Project-URL: Issues, https://github.com/varjoranta/etchdb/issues
|
|
8
|
+
Author-email: Hannu Varjoranta <hannu@varjosoft.com>
|
|
9
|
+
License: MIT License
|
|
10
|
+
|
|
11
|
+
Copyright (c) 2026 Hannu Varjoranta
|
|
12
|
+
|
|
13
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
14
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
15
|
+
in the Software without restriction, including without limitation the rights
|
|
16
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
17
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
18
|
+
furnished to do so, subject to the following conditions:
|
|
19
|
+
|
|
20
|
+
The above copyright notice and this permission notice shall be included in all
|
|
21
|
+
copies or substantial portions of the Software.
|
|
22
|
+
|
|
23
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
24
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
25
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
26
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
27
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
28
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
29
|
+
SOFTWARE.
|
|
30
|
+
License-File: LICENSE
|
|
31
|
+
Keywords: async,asyncpg,database,orm,postgresql,psycopg,pydantic,sqlite
|
|
32
|
+
Classifier: Development Status :: 3 - Alpha
|
|
33
|
+
Classifier: Framework :: AsyncIO
|
|
34
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
35
|
+
Classifier: Programming Language :: Python :: 3
|
|
36
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
37
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
38
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
39
|
+
Classifier: Topic :: Database
|
|
40
|
+
Classifier: Topic :: Database :: Database Engines/Servers
|
|
41
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
42
|
+
Classifier: Typing :: Typed
|
|
43
|
+
Requires-Python: >=3.12
|
|
44
|
+
Requires-Dist: pydantic>=2.0.0
|
|
45
|
+
Provides-Extra: all
|
|
46
|
+
Requires-Dist: aiosqlite>=0.19.0; extra == 'all'
|
|
47
|
+
Requires-Dist: asyncpg>=0.30.0; extra == 'all'
|
|
48
|
+
Requires-Dist: psycopg[binary]>=3.2; extra == 'all'
|
|
49
|
+
Provides-Extra: asyncpg
|
|
50
|
+
Requires-Dist: asyncpg>=0.30.0; extra == 'asyncpg'
|
|
51
|
+
Provides-Extra: dev
|
|
52
|
+
Requires-Dist: pytest-asyncio>=0.21.0; extra == 'dev'
|
|
53
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
|
|
54
|
+
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
55
|
+
Requires-Dist: ruff>=0.5.0; extra == 'dev'
|
|
56
|
+
Requires-Dist: ty; extra == 'dev'
|
|
57
|
+
Provides-Extra: psycopg
|
|
58
|
+
Requires-Dist: psycopg[binary]>=3.2; extra == 'psycopg'
|
|
59
|
+
Provides-Extra: sqlite
|
|
60
|
+
Requires-Dist: aiosqlite>=0.19.0; extra == 'sqlite'
|
|
61
|
+
Description-Content-Type: text/markdown
|
|
62
|
+
|
|
63
|
+
# etchdb
|
|
64
|
+
|
|
65
|
+
Minimal async DB layer for Python. Typed CRUD over Pydantic. Raw SQL when you need it.
|
|
66
|
+
|
|
67
|
+
## Status
|
|
68
|
+
|
|
69
|
+
Alpha. v0.1.0 on PyPI. Built in public from day one; expect tightening between alpha releases.
|
|
70
|
+
|
|
71
|
+
## Example
|
|
72
|
+
|
|
73
|
+
```python
|
|
74
|
+
from etchdb import DB, Row
|
|
75
|
+
|
|
76
|
+
class User(Row):
|
|
77
|
+
__table__ = "users"
|
|
78
|
+
id: int | None = None # leave unset and the DB allocates it (SERIAL / INTEGER PK)
|
|
79
|
+
name: str
|
|
80
|
+
email: str | None = None
|
|
81
|
+
|
|
82
|
+
# Connect (driver inferred from URL scheme)
|
|
83
|
+
db = await DB.from_url("postgresql+asyncpg://user@host/db")
|
|
84
|
+
|
|
85
|
+
# Typed CRUD
|
|
86
|
+
alice = await db.insert(User(name="Alice")) # alice.id is now populated by the DB
|
|
87
|
+
user = await db.get(User, id=alice.id) # one row or None
|
|
88
|
+
users = await db.query(User) # list of rows
|
|
89
|
+
await db.update(User(id=alice.id, name="Alice B")) # partial: email is preserved
|
|
90
|
+
await db.delete(alice)
|
|
91
|
+
|
|
92
|
+
# Typed-result raw SQL (covers most joins)
|
|
93
|
+
users = await db.fetch_models(User, """
|
|
94
|
+
SELECT u.* FROM users u JOIN orders o ON o.user_id = u.id
|
|
95
|
+
WHERE o.created_at > $1
|
|
96
|
+
""", since)
|
|
97
|
+
|
|
98
|
+
# Untyped raw SQL (mirrors asyncpg)
|
|
99
|
+
rows = await db.fetch("SELECT count(*) FROM events WHERE site_id = $1", site_id)
|
|
100
|
+
val = await db.fetchval("SELECT count(*) FROM users")
|
|
101
|
+
await db.execute("UPDATE users SET active = false WHERE id = $1", uid)
|
|
102
|
+
|
|
103
|
+
# Transactions
|
|
104
|
+
async with db.transaction() as tx:
|
|
105
|
+
await tx.insert(User(name="Carol"))
|
|
106
|
+
await tx.execute("INSERT INTO audit_log (...) VALUES (...)")
|
|
107
|
+
|
|
108
|
+
# Inspect SQL before executing (etchdb's defining feature)
|
|
109
|
+
q = db.compose("get", User, id=1)
|
|
110
|
+
print(q.sql) # SELECT id, name, email FROM users WHERE id = $1
|
|
111
|
+
print(q.params) # [1]
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
`insert` only emits the columns you actually set, so an unset `id` lets the database allocate one (SERIAL or INTEGER PRIMARY KEY). `update` does the same: a column you didn't touch keeps its current value rather than being clobbered. An explicit `None` counts as set in both cases.
|
|
115
|
+
|
|
116
|
+
## Install
|
|
117
|
+
|
|
118
|
+
Drivers are optional extras. Install only what you use:
|
|
119
|
+
|
|
120
|
+
```bash
|
|
121
|
+
pip install etchdb[asyncpg] # asyncpg + Postgres
|
|
122
|
+
pip install etchdb[psycopg] # psycopg3 + Postgres
|
|
123
|
+
pip install etchdb[sqlite] # aiosqlite + SQLite
|
|
124
|
+
pip install etchdb[all] # everything
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
The top-level `etchdb` namespace depends only on Pydantic. Driver subpackages import their driver eagerly with a clear error if it is not installed.
|
|
128
|
+
|
|
129
|
+
```python
|
|
130
|
+
from etchdb import DB, Row # always safe
|
|
131
|
+
from etchdb.asyncpg import AsyncpgAdapter # requires asyncpg
|
|
132
|
+
|
|
133
|
+
# Bring your own pool
|
|
134
|
+
db = DB(AsyncpgAdapter.from_pool(my_pool))
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
## Why
|
|
138
|
+
|
|
139
|
+
Most Python ORMs are heavy, opinionated, and leak at the seams when you reach for pgvector or PostGIS. Raw asyncpg works, but every project ends up writing the same Pydantic-bridge code. etchdb closes that gap without becoming a framework.
|
|
140
|
+
|
|
141
|
+
The design also targets AI-assisted development: predictable verbs, no metaclass magic, no implicit context vars, no lazy loading, every typed operation produces inspectable SQL. Code an LLM can write correctly on the first attempt.
|
|
142
|
+
|
|
143
|
+
## Goals
|
|
144
|
+
|
|
145
|
+
- Driver-agnostic (asyncpg or psycopg3, swap freely)
|
|
146
|
+
- Multi-dialect (Postgres primary, SQLite secondary, MySQL maybe)
|
|
147
|
+
- Async native, no sync wrappers
|
|
148
|
+
- Typed CRUD via Pydantic; raw SQL as first-class escape valve
|
|
149
|
+
- Inspectable SQL: every typed op exposes its `(sql, params)` without executing
|
|
150
|
+
|
|
151
|
+
## Non-goals
|
|
152
|
+
|
|
153
|
+
- Query builder beyond simple CRUD (use raw SQL for joins)
|
|
154
|
+
- Implicit relationships, lazy loading, eager loading
|
|
155
|
+
- Sync support
|
|
156
|
+
- A second canonical way to do anything
|
|
157
|
+
|
|
158
|
+
## Migrations
|
|
159
|
+
|
|
160
|
+
Out of scope for v0.1. A small forward-only, file-based migration helper (no autogenerate, no rollback, no DAG) is planned for a later release. etchdb owns no schema state today, so any external tool slots in fine in the meantime: Alembic if you also use SQLAlchemy, dbmate or sqitch if you don't, or a few `db.execute` calls in your bootstrap path.
|
|
161
|
+
|
|
162
|
+
## Built with AI assistance
|
|
163
|
+
|
|
164
|
+
Built with Claude Code as the primary development assistant. Design, code, and commits are reviewed and shipped by Hannu Varjoranta. Building in public, openly using AI tooling, is part of the project's premise.
|
|
165
|
+
|
|
166
|
+
## License
|
|
167
|
+
|
|
168
|
+
MIT.
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
etchdb/__init__.py,sha256=BVNM4Hn-QZ5NBc90fjvFDbyCjDpqYIpquMEyTqDY1Q4,211
|
|
2
|
+
etchdb/adapter.py,sha256=C-T1c8lYsdFeDLDyBtE_Dh_tr_Hi5jbIcn6cPOpeVqE,1907
|
|
3
|
+
etchdb/db.py,sha256=RVGlX4aGZF9ruqnQ6m1QSSNQM8qD3WnsaZIzy-gii7g,6607
|
|
4
|
+
etchdb/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
+
etchdb/query.py,sha256=OWtbNKnYjBW0eBVuYLvW9ytUSoIKWyjM9kNRUZeZiBc,456
|
|
6
|
+
etchdb/row.py,sha256=A87TOBdLbNpe_EwH3_3364x00VkYJvL2q4aHodNOGyE,675
|
|
7
|
+
etchdb/aiosqlite/__init__.py,sha256=NJ3vJ8WZxRbZymhKUiAMY08DWgl70__2gwuKEwapPZ0,493
|
|
8
|
+
etchdb/aiosqlite/adapter.py,sha256=at9vu2Zpj7ythdb6MiLFZshDbdJx34tcYgqYoA9eGTM,5140
|
|
9
|
+
etchdb/asyncpg/__init__.py,sha256=fEB__qclkBuluXGiVVMxuy33GT3RexSB5WDeNoZhKro,474
|
|
10
|
+
etchdb/asyncpg/adapter.py,sha256=mqyq0ayAD2CT-QV8oV54azAxUzy1t30RsKghAh5Q-h8,3632
|
|
11
|
+
etchdb/sql/__init__.py,sha256=CA9ALtQeRgot3FSoFrj--Mk6ezbet98sMewukm55hVM,6976
|
|
12
|
+
etchdb-0.1.0.dist-info/METADATA,sha256=l9gU_wq0VT-CN0oLXv5-12qFFuLo04-HFJh4Ykmjjs4,7167
|
|
13
|
+
etchdb-0.1.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
14
|
+
etchdb-0.1.0.dist-info/licenses/LICENSE,sha256=RnIp7FS8GthGT8lKJ1TorahoQuzqoVVkMIdfNtLh788,1073
|
|
15
|
+
etchdb-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Hannu Varjoranta
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|