AxiomQuery 0.2.0__tar.gz → 0.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {axiomquery-0.2.0 → axiomquery-0.3.0}/CHANGELOG.md +16 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/PKG-INFO +1 -1
- {axiomquery-0.2.0 → axiomquery-0.3.0}/pyproject.toml +1 -1
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/__init__.py +1 -1
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/engine.py +76 -29
- axiomquery-0.3.0/tests/test_asearch.py +82 -0
- axiomquery-0.3.0/tests/test_search.py +97 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/.github/workflows/python-publish.yml +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/.gitignore +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/CONTRIBUTING.md +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/LICENSE +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/README.md +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/examples/example_async.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/examples/example_sync.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/aggregation.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/aggregation_parser.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/ast.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/compiler.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/compiler_aggregate.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/errors.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/operators.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/parser.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/py.typed +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/src/axiom_query/schema.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/tests/conftest.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/tests/test_async.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/tests/test_list.py +0 -0
- {axiomquery-0.2.0 → axiomquery-0.3.0}/tests/test_read_group.py +0 -0
|
@@ -7,6 +7,22 @@ Versioning follows [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
|
7
7
|
|
|
8
8
|
---
|
|
9
9
|
|
|
10
|
+
## [0.3.0] — 2026-04-26
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
|
|
14
|
+
- `search()` / `asearch()` — streaming iteration over large result sets via SQLAlchemy server-side cursors with `yield_per=1000`
|
|
15
|
+
- Sync `search()` returns a Python iterator (consume with `for`); async `asearch()` returns an `AsyncScalarResult` (consume with `async for`)
|
|
16
|
+
- Single-pass; no `limit` / `offset` (use `list()` / `alist()` for paginated/materialised access)
|
|
17
|
+
- `DEFAULT_PREFETCH = 1000` module-level constant in `engine.py`
|
|
18
|
+
- Internal `_build_stmt()` helper consolidating the `select + where + order_by + limit + offset` block shared by `list` / `alist` / `search` / `asearch` / `read_group`
|
|
19
|
+
|
|
20
|
+
### Changed
|
|
21
|
+
|
|
22
|
+
- `list()` / `alist()` continue to return a materialised `list` — behaviour and signature unchanged from 0.2.0 callers' perspective
|
|
23
|
+
|
|
24
|
+
---
|
|
25
|
+
|
|
10
26
|
## [0.2.0] — 2026-04-13
|
|
11
27
|
|
|
12
28
|
### Added
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: AxiomQuery
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.0
|
|
4
4
|
Summary: Specification-based query and aggregation engine for SQLAlchemy 2.0 ORM models
|
|
5
5
|
Project-URL: Source Code, https://github.com/Axiom-Dev-Labs/AxiomQuery
|
|
6
6
|
Project-URL: Bug Tracker, https://github.com/Axiom-Dev-Labs/AxiomQuery/issues
|
|
@@ -15,6 +15,9 @@ from axiom_query.parser import parse_domain
|
|
|
15
15
|
from axiom_query.schema import ModelSchema, derive_schema
|
|
16
16
|
|
|
17
17
|
|
|
18
|
+
DEFAULT_PREFETCH = 1000
|
|
19
|
+
|
|
20
|
+
|
|
18
21
|
def _get_dialect_name(session) -> str:
|
|
19
22
|
"""Extract dialect name from a sync or async SQLAlchemy session."""
|
|
20
23
|
# AsyncSession has a .bind attribute (AsyncEngine)
|
|
@@ -38,7 +41,14 @@ class QueryEngine:
|
|
|
38
41
|
Usage::
|
|
39
42
|
|
|
40
43
|
engine = QueryEngine(Order)
|
|
41
|
-
|
|
44
|
+
|
|
45
|
+
# Materialised page
|
|
46
|
+
page = engine.list(session, domain=[["status", "=", "CONFIRMED"]], limit=20)
|
|
47
|
+
|
|
48
|
+
# Streaming iteration over every match (no len/indexing)
|
|
49
|
+
for order in engine.search(session, domain=[["status", "=", "CONFIRMED"]]):
|
|
50
|
+
process(order)
|
|
51
|
+
|
|
42
52
|
groups, total = engine.read_group(session, groupby=["status"], aggregates=["__count"])
|
|
43
53
|
"""
|
|
44
54
|
|
|
@@ -46,6 +56,21 @@ class QueryEngine:
|
|
|
46
56
|
self._model = model_class
|
|
47
57
|
self._schema: ModelSchema = derive_schema(model_class)
|
|
48
58
|
|
|
59
|
+
# ── Statement builder ────────────────────────────────────────────────
|
|
60
|
+
|
|
61
|
+
def _build_stmt(self, domain, order_by, limit, offset):
|
|
62
|
+
stmt = select(self._model)
|
|
63
|
+
if domain is not None:
|
|
64
|
+
spec = parse_domain(domain)
|
|
65
|
+
stmt = stmt.where(compile_domain(spec, self._schema))
|
|
66
|
+
if order_by is not None:
|
|
67
|
+
stmt = self._apply_order_by(stmt, order_by)
|
|
68
|
+
if limit is not None:
|
|
69
|
+
stmt = stmt.limit(limit)
|
|
70
|
+
if offset is not None:
|
|
71
|
+
stmt = stmt.offset(offset)
|
|
72
|
+
return stmt
|
|
73
|
+
|
|
49
74
|
# ── Sync API ─────────────────────────────────────────────────────────
|
|
50
75
|
|
|
51
76
|
def list(
|
|
@@ -56,24 +81,36 @@ class QueryEngine:
|
|
|
56
81
|
offset: int | None = None,
|
|
57
82
|
order_by: list | None = None,
|
|
58
83
|
) -> list:
|
|
59
|
-
"""Return all records matching the optional domain filter.
|
|
60
|
-
stmt = select(self._model)
|
|
84
|
+
"""Return all records matching the optional domain filter as a list.
|
|
61
85
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
86
|
+
Materialises the full result. Use ``search()`` for streaming over large
|
|
87
|
+
result sets.
|
|
88
|
+
"""
|
|
89
|
+
stmt = self._build_stmt(domain, order_by, limit, offset)
|
|
90
|
+
return list(session.execute(stmt).scalars().all())
|
|
66
91
|
|
|
67
|
-
|
|
68
|
-
|
|
92
|
+
def search(
|
|
93
|
+
self,
|
|
94
|
+
session: Session,
|
|
95
|
+
domain: Any = None,
|
|
96
|
+
order_by: list | None = None,
|
|
97
|
+
):
|
|
98
|
+
"""Stream records for batch processing.
|
|
69
99
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
stmt = stmt.offset(offset)
|
|
100
|
+
Returns an iterator yielding ORM instances from a server-side cursor,
|
|
101
|
+
fetched in batches of ``DEFAULT_PREFETCH`` (1000) rows. Single-pass —
|
|
102
|
+
iterate once and don't store the iterator for re-use.
|
|
74
103
|
|
|
75
|
-
|
|
76
|
-
|
|
104
|
+
No ``limit`` / ``offset``: this method is for processing every matching
|
|
105
|
+
row. Use ``list()`` if you need pagination, ``len()``, or indexing.
|
|
106
|
+
|
|
107
|
+
Driver note: true streaming requires a database driver with server-side
|
|
108
|
+
cursor support (psycopg2, asyncpg). SQLite degrades to client-side
|
|
109
|
+
iteration but remains correct.
|
|
110
|
+
"""
|
|
111
|
+
stmt = self._build_stmt(domain, order_by, limit=None, offset=None)
|
|
112
|
+
streaming_stmt = stmt.execution_options(yield_per=DEFAULT_PREFETCH)
|
|
113
|
+
return iter(session.scalars(streaming_stmt))
|
|
77
114
|
|
|
78
115
|
def read_group(
|
|
79
116
|
self,
|
|
@@ -128,24 +165,34 @@ class QueryEngine:
|
|
|
128
165
|
offset: int | None = None,
|
|
129
166
|
order_by: list | None = None,
|
|
130
167
|
) -> list:
|
|
131
|
-
"""Async variant of list()."""
|
|
132
|
-
stmt =
|
|
168
|
+
"""Async variant of ``list()`` — returns a materialised list."""
|
|
169
|
+
stmt = self._build_stmt(domain, order_by, limit, offset)
|
|
170
|
+
result = await session.execute(stmt)
|
|
171
|
+
return list(result.scalars().all())
|
|
133
172
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
173
|
+
async def asearch(
|
|
174
|
+
self,
|
|
175
|
+
session,
|
|
176
|
+
domain: Any = None,
|
|
177
|
+
order_by: list | None = None,
|
|
178
|
+
):
|
|
179
|
+
"""Async variant of ``search()`` — returns an async iterator.
|
|
138
180
|
|
|
139
|
-
|
|
140
|
-
stmt = self._apply_order_by(stmt, order_by)
|
|
181
|
+
Consume with ``async for``::
|
|
141
182
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
if offset is not None:
|
|
145
|
-
stmt = stmt.offset(offset)
|
|
183
|
+
async for record in await engine.asearch(session, domain=...):
|
|
184
|
+
process(record)
|
|
146
185
|
|
|
147
|
-
|
|
148
|
-
|
|
186
|
+
Streams ORM instances in batches of ``DEFAULT_PREFETCH`` (1000) rows
|
|
187
|
+
from a server-side cursor. Single-pass; no ``limit`` / ``offset``.
|
|
188
|
+
|
|
189
|
+
Driver note: true streaming requires a server-side cursor capable
|
|
190
|
+
driver (asyncpg). aiosqlite iterates correctly but without driver-level
|
|
191
|
+
streaming.
|
|
192
|
+
"""
|
|
193
|
+
stmt = self._build_stmt(domain, order_by, limit=None, offset=None)
|
|
194
|
+
streaming_stmt = stmt.execution_options(yield_per=DEFAULT_PREFETCH)
|
|
195
|
+
return await session.stream_scalars(streaming_stmt)
|
|
149
196
|
|
|
150
197
|
async def aread_group(
|
|
151
198
|
self,
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""Tests for QueryEngine.asearch() — async streaming iteration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from sqlalchemy import event
|
|
7
|
+
|
|
8
|
+
from axiom_query.engine import DEFAULT_PREFETCH
|
|
9
|
+
from conftest import Order
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@pytest.mark.asyncio
|
|
13
|
+
async def test_asearch_iterates_all_records(async_session, engine):
|
|
14
|
+
result = await engine.asearch(async_session)
|
|
15
|
+
records = [r async for r in result]
|
|
16
|
+
assert len(records) == 3
|
|
17
|
+
assert all(isinstance(r, Order) for r in records)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@pytest.mark.asyncio
|
|
21
|
+
async def test_asearch_with_domain(async_session, engine):
|
|
22
|
+
result = await engine.asearch(async_session, domain=[["status", "=", "CONFIRMED"]])
|
|
23
|
+
records = [r async for r in result]
|
|
24
|
+
assert len(records) == 2
|
|
25
|
+
assert all(r.status == "CONFIRMED" for r in records)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@pytest.mark.asyncio
|
|
29
|
+
async def test_asearch_with_m2o_domain(async_session, engine):
|
|
30
|
+
result = await engine.asearch(async_session, domain=[["customer.name", "=", "Alice"]])
|
|
31
|
+
records = [r async for r in result]
|
|
32
|
+
assert len(records) == 1
|
|
33
|
+
assert records[0].id == 1
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@pytest.mark.asyncio
|
|
37
|
+
async def test_asearch_supports_order_by(async_session, engine):
|
|
38
|
+
result = await engine.asearch(async_session, order_by=[["total", "desc"]])
|
|
39
|
+
records = [r async for r in result]
|
|
40
|
+
assert [r.total for r in records] == [200, 100, 50]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@pytest.mark.asyncio
|
|
44
|
+
async def test_asearch_empty_result(async_session, engine):
|
|
45
|
+
result = await engine.asearch(async_session, domain=[["status", "=", "NONEXISTENT"]])
|
|
46
|
+
records = [r async for r in result]
|
|
47
|
+
assert records == []
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@pytest.mark.asyncio
|
|
51
|
+
async def test_asearch_no_pagination_args(async_session, engine):
|
|
52
|
+
with pytest.raises(TypeError):
|
|
53
|
+
await engine.asearch(async_session, limit=10)
|
|
54
|
+
with pytest.raises(TypeError):
|
|
55
|
+
await engine.asearch(async_session, offset=5)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@pytest.mark.asyncio
|
|
59
|
+
async def test_asearch_uses_yield_per(async_session, engine, seeded_async_engine):
|
|
60
|
+
"""Verify the SQL is issued with yield_per=DEFAULT_PREFETCH."""
|
|
61
|
+
captured = []
|
|
62
|
+
|
|
63
|
+
def listener(conn, cursor, statement, parameters, context, executemany):
|
|
64
|
+
captured.append(context.execution_options)
|
|
65
|
+
|
|
66
|
+
# AsyncEngine wraps a sync Engine; events attach to the sync engine
|
|
67
|
+
sync_engine = seeded_async_engine.sync_engine
|
|
68
|
+
event.listen(sync_engine, "before_cursor_execute", listener)
|
|
69
|
+
try:
|
|
70
|
+
result = await engine.asearch(async_session)
|
|
71
|
+
records = [r async for r in result]
|
|
72
|
+
assert len(records) == 3
|
|
73
|
+
finally:
|
|
74
|
+
event.remove(sync_engine, "before_cursor_execute", listener)
|
|
75
|
+
|
|
76
|
+
assert len(captured) >= 1, f"expected >=1 statement, got {len(captured)}"
|
|
77
|
+
# Find our SELECT statement (there may be other queries on the connection)
|
|
78
|
+
yield_per_stmts = [opts for opts in captured if opts.get("yield_per") == DEFAULT_PREFETCH]
|
|
79
|
+
assert len(yield_per_stmts) == 1, (
|
|
80
|
+
f"expected 1 statement with yield_per={DEFAULT_PREFETCH}, "
|
|
81
|
+
f"got {len(yield_per_stmts)}"
|
|
82
|
+
)
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
"""Tests for QueryEngine.search() — streaming iteration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Iterator
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
from sqlalchemy import event
|
|
9
|
+
|
|
10
|
+
from axiom_query.engine import DEFAULT_PREFETCH
|
|
11
|
+
from conftest import Order
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_search_returns_iterator(session, engine):
|
|
15
|
+
result = engine.search(session)
|
|
16
|
+
assert isinstance(result, Iterator)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def test_search_iterates_all_records(session, engine):
|
|
20
|
+
records = list(engine.search(session))
|
|
21
|
+
assert len(records) == 3
|
|
22
|
+
assert all(isinstance(r, Order) for r in records)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def test_search_with_domain(session, engine):
|
|
26
|
+
records = list(engine.search(session, domain=[["status", "=", "CONFIRMED"]]))
|
|
27
|
+
assert len(records) == 2
|
|
28
|
+
assert all(r.status == "CONFIRMED" for r in records)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def test_search_with_or_domain(session, engine):
|
|
32
|
+
records = list(
|
|
33
|
+
engine.search(
|
|
34
|
+
session,
|
|
35
|
+
domain={"or": [["status", "=", "CONFIRMED"], ["status", "=", "DRAFT"]]},
|
|
36
|
+
)
|
|
37
|
+
)
|
|
38
|
+
assert len(records) == 3
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def test_search_with_m2o_domain(session, engine):
|
|
42
|
+
records = list(engine.search(session, domain=[["customer.name", "=", "Alice"]]))
|
|
43
|
+
assert len(records) == 1
|
|
44
|
+
assert records[0].id == 1
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def test_search_supports_order_by(session, engine):
|
|
48
|
+
records = list(engine.search(session, order_by=[["total", "desc"]]))
|
|
49
|
+
assert [r.total for r in records] == [200, 100, 50]
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def test_search_empty_result(session, engine):
|
|
53
|
+
records = list(engine.search(session, domain=[["status", "=", "NONEXISTENT"]]))
|
|
54
|
+
assert records == []
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def test_search_no_pagination_args(session, engine):
|
|
58
|
+
with pytest.raises(TypeError):
|
|
59
|
+
engine.search(session, limit=10)
|
|
60
|
+
with pytest.raises(TypeError):
|
|
61
|
+
engine.search(session, offset=5)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def test_search_uses_yield_per(session, engine, seeded_engine):
|
|
65
|
+
"""Verify the SQL is issued with yield_per=DEFAULT_PREFETCH."""
|
|
66
|
+
captured = []
|
|
67
|
+
|
|
68
|
+
def listener(conn, cursor, statement, parameters, context, executemany):
|
|
69
|
+
captured.append(context.execution_options)
|
|
70
|
+
|
|
71
|
+
event.listen(seeded_engine, "before_cursor_execute", listener)
|
|
72
|
+
try:
|
|
73
|
+
list(engine.search(session))
|
|
74
|
+
finally:
|
|
75
|
+
event.remove(seeded_engine, "before_cursor_execute", listener)
|
|
76
|
+
|
|
77
|
+
assert len(captured) == 1, f"expected 1 statement, got {len(captured)}"
|
|
78
|
+
assert captured[0].get("yield_per") == DEFAULT_PREFETCH
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def test_search_is_single_pass(session, engine):
|
|
82
|
+
"""Iterating the same result a second time yields nothing (iterator exhausted)."""
|
|
83
|
+
result = engine.search(session)
|
|
84
|
+
first_pass = list(result)
|
|
85
|
+
second_pass = list(result)
|
|
86
|
+
assert len(first_pass) == 3
|
|
87
|
+
assert second_pass == []
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def test_search_break_does_not_block_session(session, engine):
|
|
91
|
+
"""Breaking out of iteration should not leave the session in a bad state."""
|
|
92
|
+
for record in engine.search(session):
|
|
93
|
+
if record.id == 1:
|
|
94
|
+
break
|
|
95
|
+
# Session should still be usable
|
|
96
|
+
records = engine.list(session)
|
|
97
|
+
assert len(records) == 3
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|