AxiomQuery 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- axiom_query/__init__.py +22 -0
- axiom_query/aggregation.py +111 -0
- axiom_query/aggregation_parser.py +186 -0
- axiom_query/ast.py +64 -0
- axiom_query/compiler.py +207 -0
- axiom_query/compiler_aggregate.py +358 -0
- axiom_query/engine.py +210 -0
- axiom_query/errors.py +12 -0
- axiom_query/operators.py +30 -0
- axiom_query/parser.py +102 -0
- axiom_query/py.typed +0 -0
- axiom_query/schema.py +55 -0
- axiomquery-0.1.0.dist-info/METADATA +232 -0
- axiomquery-0.1.0.dist-info/RECORD +16 -0
- axiomquery-0.1.0.dist-info/WHEEL +4 -0
- axiomquery-0.1.0.dist-info/licenses/LICENSE +21 -0
axiom_query/__init__.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""axiom_query — standalone specification-based query engine for SQLAlchemy ORM models."""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.1.0"
|
|
4
|
+
|
|
5
|
+
from axiom_query.engine import QueryEngine
|
|
6
|
+
from axiom_query.errors import QueryError
|
|
7
|
+
from axiom_query.operators import Op
|
|
8
|
+
from axiom_query.ast import Condition, And, Or, Not, Bool, QuerySpec
|
|
9
|
+
from axiom_query.parser import parse_domain
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"QueryEngine",
|
|
13
|
+
"QueryError",
|
|
14
|
+
"Op",
|
|
15
|
+
"Condition",
|
|
16
|
+
"And",
|
|
17
|
+
"Or",
|
|
18
|
+
"Not",
|
|
19
|
+
"Bool",
|
|
20
|
+
"QuerySpec",
|
|
21
|
+
"parse_domain",
|
|
22
|
+
]
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""Immutable AST nodes for read_group (GROUP BY + aggregation) queries."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from enum import Enum
|
|
7
|
+
|
|
8
|
+
from axiom_query.ast import QuerySpec
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AggFunc(str, Enum):
|
|
12
|
+
"""Supported SQL aggregate functions."""
|
|
13
|
+
|
|
14
|
+
COUNT = "count"
|
|
15
|
+
SUM = "sum"
|
|
16
|
+
AVG = "avg"
|
|
17
|
+
MIN = "min"
|
|
18
|
+
MAX = "max"
|
|
19
|
+
|
|
20
|
+
@classmethod
|
|
21
|
+
def from_str(cls, s: str) -> AggFunc:
|
|
22
|
+
try:
|
|
23
|
+
return cls(s.lower())
|
|
24
|
+
except ValueError:
|
|
25
|
+
valid = ", ".join(m.value for m in cls)
|
|
26
|
+
raise ValueError(
|
|
27
|
+
f"Unknown aggregate function '{s}'. Valid: {valid}"
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class DateGranularity(str, Enum):
|
|
32
|
+
"""Date truncation granularities for GROUP BY on date/datetime fields."""
|
|
33
|
+
|
|
34
|
+
DAY = "day"
|
|
35
|
+
WEEK = "week"
|
|
36
|
+
MONTH = "month"
|
|
37
|
+
QUARTER = "quarter"
|
|
38
|
+
YEAR = "year"
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_str(cls, s: str) -> DateGranularity:
|
|
42
|
+
try:
|
|
43
|
+
return cls(s.lower())
|
|
44
|
+
except ValueError:
|
|
45
|
+
valid = ", ".join(m.value for m in cls)
|
|
46
|
+
raise ValueError(
|
|
47
|
+
f"Unknown date granularity '{s}'. Valid: {valid}"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass(frozen=True)
|
|
52
|
+
class AggregateSpec:
|
|
53
|
+
"""One aggregate expression in the SELECT list."""
|
|
54
|
+
|
|
55
|
+
field_path: str | None
|
|
56
|
+
function: AggFunc
|
|
57
|
+
alias: str
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass(frozen=True)
|
|
61
|
+
class GroupBySpec:
|
|
62
|
+
"""One GROUP BY expression."""
|
|
63
|
+
|
|
64
|
+
field_path: str
|
|
65
|
+
granularity: DateGranularity | None = None
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def alias(self) -> str:
|
|
69
|
+
base = self.field_path.replace(".", "__")
|
|
70
|
+
if self.granularity is not None:
|
|
71
|
+
return f"{base}__{self.granularity.value}"
|
|
72
|
+
return base
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass(frozen=True)
|
|
76
|
+
class Pagination:
|
|
77
|
+
"""Presentation concerns for grouped queries."""
|
|
78
|
+
|
|
79
|
+
order_by: list[tuple[str, str]] | None = None
|
|
80
|
+
limit: int | None = None
|
|
81
|
+
offset: int | None = None
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
@dataclass(frozen=True)
|
|
85
|
+
class ReadGroupQuery:
|
|
86
|
+
"""Complete read_group query specification."""
|
|
87
|
+
|
|
88
|
+
groupby: list[GroupBySpec]
|
|
89
|
+
aggregates: list[AggregateSpec]
|
|
90
|
+
domain: QuerySpec | None = None
|
|
91
|
+
having: QuerySpec | None = None
|
|
92
|
+
|
|
93
|
+
@property
|
|
94
|
+
def alias_map(self) -> dict[str, AggregateSpec | GroupBySpec]:
|
|
95
|
+
m: dict[str, AggregateSpec | GroupBySpec] = {}
|
|
96
|
+
for g in self.groupby:
|
|
97
|
+
m[g.alias] = g
|
|
98
|
+
for a in self.aggregates:
|
|
99
|
+
m[a.alias] = a
|
|
100
|
+
return m
|
|
101
|
+
|
|
102
|
+
@property
|
|
103
|
+
def referenced_child_entities(self) -> set[str]:
|
|
104
|
+
children: set[str] = set()
|
|
105
|
+
for g in self.groupby:
|
|
106
|
+
if "." in g.field_path:
|
|
107
|
+
children.add(g.field_path.split(".")[0])
|
|
108
|
+
for a in self.aggregates:
|
|
109
|
+
if a.field_path and "." in a.field_path:
|
|
110
|
+
children.add(a.field_path.split(".")[0])
|
|
111
|
+
return children
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"""Parse frontend JSON read_group requests into ReadGroupQuery AST nodes."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from axiom_query.aggregation import (
|
|
8
|
+
AggFunc,
|
|
9
|
+
AggregateSpec,
|
|
10
|
+
DateGranularity,
|
|
11
|
+
GroupBySpec,
|
|
12
|
+
Pagination,
|
|
13
|
+
ReadGroupQuery,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def parse_read_group(raw: dict[str, Any]) -> tuple[ReadGroupQuery, Pagination]:
|
|
18
|
+
"""Parse a JSON read_group request body into a (ReadGroupQuery, Pagination) tuple."""
|
|
19
|
+
from axiom_query.errors import QueryError
|
|
20
|
+
|
|
21
|
+
if not isinstance(raw, dict):
|
|
22
|
+
raise QueryError(
|
|
23
|
+
"INVALID_READ_GROUP",
|
|
24
|
+
f"read_group body must be a dict, got {type(raw).__name__}",
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
raw_groupby = raw.get("groupby", [])
|
|
28
|
+
if not isinstance(raw_groupby, list):
|
|
29
|
+
raise QueryError("INVALID_READ_GROUP", "groupby must be a list of strings")
|
|
30
|
+
groupby = [parse_groupby_spec(s) for s in raw_groupby]
|
|
31
|
+
|
|
32
|
+
raw_aggregates = raw.get("aggregates", ["__count"])
|
|
33
|
+
if not isinstance(raw_aggregates, list):
|
|
34
|
+
raise QueryError("INVALID_READ_GROUP", "aggregates must be a list of strings")
|
|
35
|
+
if not raw_aggregates:
|
|
36
|
+
raise QueryError(
|
|
37
|
+
"INVALID_READ_GROUP",
|
|
38
|
+
"aggregates must contain at least one spec (e.g. '__count')",
|
|
39
|
+
)
|
|
40
|
+
aggregates = [parse_aggregate_spec(s) for s in raw_aggregates]
|
|
41
|
+
|
|
42
|
+
domain = None
|
|
43
|
+
raw_domain = raw.get("domain")
|
|
44
|
+
if raw_domain is not None:
|
|
45
|
+
from axiom_query.parser import parse_domain
|
|
46
|
+
domain = parse_domain(raw_domain)
|
|
47
|
+
|
|
48
|
+
having = None
|
|
49
|
+
raw_having = raw.get("having")
|
|
50
|
+
if raw_having is not None:
|
|
51
|
+
from axiom_query.parser import parse_domain
|
|
52
|
+
having = parse_domain(raw_having)
|
|
53
|
+
|
|
54
|
+
order_by = None
|
|
55
|
+
raw_order = raw.get("order_by")
|
|
56
|
+
if raw_order is not None:
|
|
57
|
+
if not isinstance(raw_order, list):
|
|
58
|
+
raise QueryError(
|
|
59
|
+
"INVALID_READ_GROUP",
|
|
60
|
+
"order_by must be a list of [alias, direction] pairs",
|
|
61
|
+
)
|
|
62
|
+
order_by = _parse_order_by(raw_order)
|
|
63
|
+
|
|
64
|
+
limit = raw.get("limit")
|
|
65
|
+
if limit is not None and (not isinstance(limit, int) or limit < 0):
|
|
66
|
+
raise QueryError("INVALID_READ_GROUP", "limit must be a non-negative integer")
|
|
67
|
+
|
|
68
|
+
offset = raw.get("offset")
|
|
69
|
+
if offset is not None and (not isinstance(offset, int) or offset < 0):
|
|
70
|
+
raise QueryError("INVALID_READ_GROUP", "offset must be a non-negative integer")
|
|
71
|
+
|
|
72
|
+
query = ReadGroupQuery(
|
|
73
|
+
groupby=groupby,
|
|
74
|
+
aggregates=aggregates,
|
|
75
|
+
domain=domain,
|
|
76
|
+
having=having,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
pagination = Pagination(
|
|
80
|
+
order_by=order_by,
|
|
81
|
+
limit=limit,
|
|
82
|
+
offset=offset,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
return query, pagination
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def parse_aggregate_spec(spec: str) -> AggregateSpec:
|
|
89
|
+
"""Parse a single aggregate spec string."""
|
|
90
|
+
from axiom_query.errors import QueryError
|
|
91
|
+
|
|
92
|
+
if not isinstance(spec, str) or not spec.strip():
|
|
93
|
+
raise QueryError(
|
|
94
|
+
"INVALID_AGGREGATION",
|
|
95
|
+
f"Aggregate spec must be a non-empty string, got {spec!r}",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
spec = spec.strip()
|
|
99
|
+
|
|
100
|
+
if spec == "__count":
|
|
101
|
+
return AggregateSpec(field_path=None, function=AggFunc.COUNT, alias="__count")
|
|
102
|
+
|
|
103
|
+
if ":" not in spec:
|
|
104
|
+
raise QueryError(
|
|
105
|
+
"INVALID_AGGREGATION",
|
|
106
|
+
f"Aggregate spec must be 'field:function' or '__count', got '{spec}'",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
parts = spec.rsplit(":", 1)
|
|
110
|
+
if len(parts) != 2 or not parts[0] or not parts[1]:
|
|
111
|
+
raise QueryError(
|
|
112
|
+
"INVALID_AGGREGATION",
|
|
113
|
+
f"Invalid aggregate spec format: '{spec}'. Expected 'field:function'.",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
field_path, func_str = parts
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
func = AggFunc.from_str(func_str)
|
|
120
|
+
except ValueError as e:
|
|
121
|
+
raise QueryError("INVALID_AGGREGATION", str(e))
|
|
122
|
+
|
|
123
|
+
alias = f"{field_path.replace('.', '__')}__{func.value}"
|
|
124
|
+
|
|
125
|
+
return AggregateSpec(field_path=field_path, function=func, alias=alias)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def parse_groupby_spec(spec: str) -> GroupBySpec:
|
|
129
|
+
"""Parse a single groupby spec string."""
|
|
130
|
+
from axiom_query.errors import QueryError
|
|
131
|
+
|
|
132
|
+
if not isinstance(spec, str) or not spec.strip():
|
|
133
|
+
raise QueryError(
|
|
134
|
+
"INVALID_AGGREGATION",
|
|
135
|
+
f"Groupby spec must be a non-empty string, got {spec!r}",
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
spec = spec.strip()
|
|
139
|
+
|
|
140
|
+
if ":" not in spec:
|
|
141
|
+
return GroupBySpec(field_path=spec)
|
|
142
|
+
|
|
143
|
+
parts = spec.rsplit(":", 1)
|
|
144
|
+
if len(parts) != 2 or not parts[0] or not parts[1]:
|
|
145
|
+
raise QueryError(
|
|
146
|
+
"INVALID_AGGREGATION",
|
|
147
|
+
f"Invalid groupby spec format: '{spec}'. Expected 'field:granularity'.",
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
field_path, granularity_str = parts
|
|
151
|
+
|
|
152
|
+
try:
|
|
153
|
+
granularity = DateGranularity.from_str(granularity_str)
|
|
154
|
+
except ValueError as e:
|
|
155
|
+
raise QueryError("INVALID_AGGREGATION", str(e))
|
|
156
|
+
|
|
157
|
+
return GroupBySpec(field_path=field_path, granularity=granularity)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _parse_order_by(raw: list[Any]) -> list[tuple[str, str]]:
|
|
161
|
+
from axiom_query.errors import QueryError
|
|
162
|
+
|
|
163
|
+
result: list[tuple[str, str]] = []
|
|
164
|
+
for item in raw:
|
|
165
|
+
if isinstance(item, str):
|
|
166
|
+
result.append((item, "asc"))
|
|
167
|
+
elif isinstance(item, (list, tuple)) and len(item) == 2:
|
|
168
|
+
alias, direction = item
|
|
169
|
+
if not isinstance(alias, str):
|
|
170
|
+
raise QueryError(
|
|
171
|
+
"INVALID_READ_GROUP",
|
|
172
|
+
f"order_by alias must be a string, got {type(alias).__name__}",
|
|
173
|
+
)
|
|
174
|
+
direction = str(direction).lower()
|
|
175
|
+
if direction not in ("asc", "desc"):
|
|
176
|
+
raise QueryError(
|
|
177
|
+
"INVALID_READ_GROUP",
|
|
178
|
+
f"order_by direction must be 'asc' or 'desc', got '{direction}'",
|
|
179
|
+
)
|
|
180
|
+
result.append((alias, direction))
|
|
181
|
+
else:
|
|
182
|
+
raise QueryError(
|
|
183
|
+
"INVALID_READ_GROUP",
|
|
184
|
+
"order_by items must be 'alias' or ['alias', 'asc'|'desc']",
|
|
185
|
+
)
|
|
186
|
+
return result
|
axiom_query/ast.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""Immutable AST nodes for the query specification DSL."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Any, Union
|
|
7
|
+
|
|
8
|
+
from axiom_query.operators import Op
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(frozen=True)
|
|
12
|
+
class _Composable:
|
|
13
|
+
"""Shared boolean composition operators."""
|
|
14
|
+
|
|
15
|
+
def __and__(self, other: QuerySpec) -> And:
|
|
16
|
+
return And(left=self, right=other)
|
|
17
|
+
|
|
18
|
+
def __or__(self, other: QuerySpec) -> Or:
|
|
19
|
+
return Or(left=self, right=other)
|
|
20
|
+
|
|
21
|
+
def __invert__(self) -> Not:
|
|
22
|
+
return Not(operand=self)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(frozen=True)
|
|
26
|
+
class Condition(_Composable):
|
|
27
|
+
"""Leaf node: ``field_path <operator> value``."""
|
|
28
|
+
|
|
29
|
+
field_path: str
|
|
30
|
+
operator: Op
|
|
31
|
+
value: Any
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass(frozen=True)
|
|
35
|
+
class And(_Composable):
|
|
36
|
+
"""Conjunction of two specs."""
|
|
37
|
+
|
|
38
|
+
left: QuerySpec
|
|
39
|
+
right: QuerySpec
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass(frozen=True)
|
|
43
|
+
class Or(_Composable):
|
|
44
|
+
"""Disjunction of two specs."""
|
|
45
|
+
|
|
46
|
+
left: QuerySpec
|
|
47
|
+
right: QuerySpec
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass(frozen=True)
|
|
51
|
+
class Not(_Composable):
|
|
52
|
+
"""Negation of a spec."""
|
|
53
|
+
|
|
54
|
+
operand: QuerySpec
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass(frozen=True)
|
|
58
|
+
class Bool(_Composable):
|
|
59
|
+
"""Constant true/false."""
|
|
60
|
+
|
|
61
|
+
value: bool
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
QuerySpec = Union[Condition, And, Or, Not, Bool]
|
axiom_query/compiler.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
"""Compile a QuerySpec AST into SQLAlchemy WHERE clause expressions."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import date, datetime
|
|
6
|
+
from typing import Any, Callable
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import Date, DateTime, and_, exists, not_, or_, select, true, false
|
|
9
|
+
from sqlalchemy.sql.expression import ColumnElement
|
|
10
|
+
|
|
11
|
+
from axiom_query.ast import And, Bool, Condition, Not, Or, QuerySpec
|
|
12
|
+
from axiom_query.operators import Op
|
|
13
|
+
from axiom_query.schema import ModelSchema
|
|
14
|
+
|
|
15
|
+
SAResolver = Callable[[str, Op, Any], ColumnElement]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _walk_ast(spec: QuerySpec, resolver: SAResolver) -> ColumnElement:
|
|
19
|
+
"""Recursively compile a QuerySpec AST using the given resolver."""
|
|
20
|
+
match spec:
|
|
21
|
+
case Bool(value=True):
|
|
22
|
+
return true()
|
|
23
|
+
case Bool(value=False):
|
|
24
|
+
return false()
|
|
25
|
+
case And(left=left, right=right):
|
|
26
|
+
return and_(
|
|
27
|
+
_walk_ast(left, resolver),
|
|
28
|
+
_walk_ast(right, resolver),
|
|
29
|
+
)
|
|
30
|
+
case Or(left=left, right=right):
|
|
31
|
+
return or_(
|
|
32
|
+
_walk_ast(left, resolver),
|
|
33
|
+
_walk_ast(right, resolver),
|
|
34
|
+
)
|
|
35
|
+
case Not(operand=operand):
|
|
36
|
+
return not_(_walk_ast(operand, resolver))
|
|
37
|
+
case Condition(field_path=fp, operator=op, value=val):
|
|
38
|
+
return resolver(fp, op, val)
|
|
39
|
+
case _:
|
|
40
|
+
raise TypeError(f"Unknown QuerySpec node: {type(spec)}")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _make_alias_resolver(alias_map: dict[str, ColumnElement]) -> SAResolver:
|
|
44
|
+
"""Create a HAVING resolver that resolves field paths against aggregate alias expressions."""
|
|
45
|
+
from axiom_query.errors import QueryError
|
|
46
|
+
|
|
47
|
+
def resolve(fp: str, op: Op, val: Any) -> ColumnElement:
|
|
48
|
+
expr = alias_map.get(fp)
|
|
49
|
+
if expr is None:
|
|
50
|
+
available = ", ".join(sorted(alias_map.keys()))
|
|
51
|
+
raise QueryError(
|
|
52
|
+
"INVALID_HAVING_FIELD",
|
|
53
|
+
f"HAVING field '{fp}' is not a groupby or aggregate alias. "
|
|
54
|
+
f"Available: {available}",
|
|
55
|
+
)
|
|
56
|
+
return _apply_having_operator(expr, op, val)
|
|
57
|
+
|
|
58
|
+
return resolve
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _resolve_column(schema: ModelSchema, field_path: str) -> ColumnElement:
|
|
62
|
+
"""Resolve a field path to its SA column, or raise QueryError."""
|
|
63
|
+
from axiom_query.errors import QueryError
|
|
64
|
+
|
|
65
|
+
if "." in field_path:
|
|
66
|
+
child_name, field_name = field_path.split(".", 1)
|
|
67
|
+
child = schema.children.get(child_name)
|
|
68
|
+
if child is None:
|
|
69
|
+
raise QueryError(
|
|
70
|
+
"INVALID_FILTER_FIELD",
|
|
71
|
+
f"No child relation '{child_name}' on {schema.model_class.__name__}. "
|
|
72
|
+
f"Available: {', '.join(schema.children.keys()) or 'none'}",
|
|
73
|
+
)
|
|
74
|
+
col = child.columns.get(field_name)
|
|
75
|
+
if col is None:
|
|
76
|
+
raise QueryError(
|
|
77
|
+
"INVALID_FILTER_FIELD",
|
|
78
|
+
f"No field '{field_name}' on child '{child_name}'",
|
|
79
|
+
)
|
|
80
|
+
return child.table.c[field_name]
|
|
81
|
+
else:
|
|
82
|
+
col = schema.columns.get(field_path)
|
|
83
|
+
if col is None:
|
|
84
|
+
raise QueryError(
|
|
85
|
+
"INVALID_FILTER_FIELD",
|
|
86
|
+
f"No field '{field_path}' on {schema.model_class.__name__}. "
|
|
87
|
+
f"Available: {', '.join(schema.columns.keys())}",
|
|
88
|
+
)
|
|
89
|
+
return schema.table.c[field_path]
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _make_table_resolver(schema: ModelSchema) -> SAResolver:
|
|
93
|
+
"""Create a WHERE resolver that resolves field paths against table columns."""
|
|
94
|
+
|
|
95
|
+
def resolve(fp: str, op: Op, val: Any) -> ColumnElement:
|
|
96
|
+
if "." in fp:
|
|
97
|
+
child_name, field_name = fp.split(".", 1)
|
|
98
|
+
from axiom_query.errors import QueryError
|
|
99
|
+
|
|
100
|
+
child = schema.children.get(child_name)
|
|
101
|
+
if child is None:
|
|
102
|
+
raise QueryError(
|
|
103
|
+
"INVALID_FILTER_FIELD",
|
|
104
|
+
f"No child relation '{child_name}' on {schema.model_class.__name__}",
|
|
105
|
+
)
|
|
106
|
+
fk_col = child.table.c[child.fk_field]
|
|
107
|
+
field_col = child.table.c[field_name]
|
|
108
|
+
condition = _apply_operator(field_col, op, val)
|
|
109
|
+
return exists(
|
|
110
|
+
select(1)
|
|
111
|
+
.select_from(child.table)
|
|
112
|
+
.where(and_(fk_col == schema.table.c.id, condition))
|
|
113
|
+
)
|
|
114
|
+
else:
|
|
115
|
+
col = _resolve_column(schema, fp)
|
|
116
|
+
return _apply_operator(col, op, val)
|
|
117
|
+
|
|
118
|
+
return resolve
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def compile_domain(spec: QuerySpec, schema: ModelSchema) -> ColumnElement:
|
|
122
|
+
"""Compile a QuerySpec AST into a SQLAlchemy WHERE clause."""
|
|
123
|
+
resolver = _make_table_resolver(schema)
|
|
124
|
+
return _walk_ast(spec, resolver)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _coerce_value(col: Any, value: Any) -> Any:
|
|
128
|
+
"""Coerce a string value to the column's Python type when needed."""
|
|
129
|
+
if not isinstance(value, str):
|
|
130
|
+
return value
|
|
131
|
+
col_type = getattr(col, "type", None)
|
|
132
|
+
if col_type is None:
|
|
133
|
+
return value
|
|
134
|
+
if isinstance(col_type, Date) and not isinstance(col_type, DateTime):
|
|
135
|
+
try:
|
|
136
|
+
return date.fromisoformat(value)
|
|
137
|
+
except ValueError:
|
|
138
|
+
try:
|
|
139
|
+
return datetime.fromisoformat(value).date()
|
|
140
|
+
except ValueError:
|
|
141
|
+
return value
|
|
142
|
+
if isinstance(col_type, DateTime):
|
|
143
|
+
try:
|
|
144
|
+
return datetime.fromisoformat(value)
|
|
145
|
+
except ValueError:
|
|
146
|
+
return value
|
|
147
|
+
return value
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _apply_operator(col: Any, op: Op, value: Any) -> ColumnElement:
|
|
151
|
+
"""Apply a comparison operator to a SQLAlchemy column."""
|
|
152
|
+
value = _coerce_value(col, value)
|
|
153
|
+
match op:
|
|
154
|
+
case Op.EQ:
|
|
155
|
+
return col == value
|
|
156
|
+
case Op.NE:
|
|
157
|
+
return col != value
|
|
158
|
+
case Op.GT:
|
|
159
|
+
return col > value
|
|
160
|
+
case Op.LT:
|
|
161
|
+
return col < value
|
|
162
|
+
case Op.GTE:
|
|
163
|
+
return col >= value
|
|
164
|
+
case Op.LTE:
|
|
165
|
+
return col <= value
|
|
166
|
+
case Op.IN:
|
|
167
|
+
items = [_coerce_value(col, v) for v in value] if isinstance(value, list) else value
|
|
168
|
+
return col.in_(items)
|
|
169
|
+
case Op.NOT_IN:
|
|
170
|
+
items = [_coerce_value(col, v) for v in value] if isinstance(value, list) else value
|
|
171
|
+
return col.not_in(items)
|
|
172
|
+
case Op.LIKE:
|
|
173
|
+
return col.like(value)
|
|
174
|
+
case Op.ILIKE:
|
|
175
|
+
return col.ilike(value)
|
|
176
|
+
case Op.IS_NULL:
|
|
177
|
+
return col.is_(None) if value else col.is_not(None)
|
|
178
|
+
case _:
|
|
179
|
+
raise ValueError(f"Unsupported operator: {op}")
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _apply_having_operator(expr: ColumnElement, op: Op, value: Any) -> ColumnElement:
|
|
183
|
+
"""Apply a comparison operator in HAVING context."""
|
|
184
|
+
match op:
|
|
185
|
+
case Op.EQ:
|
|
186
|
+
return expr == value
|
|
187
|
+
case Op.NE:
|
|
188
|
+
return expr != value
|
|
189
|
+
case Op.GT:
|
|
190
|
+
return expr > value
|
|
191
|
+
case Op.LT:
|
|
192
|
+
return expr < value
|
|
193
|
+
case Op.GTE:
|
|
194
|
+
return expr >= value
|
|
195
|
+
case Op.LTE:
|
|
196
|
+
return expr <= value
|
|
197
|
+
case Op.IN:
|
|
198
|
+
return expr.in_(value if isinstance(value, list) else [value])
|
|
199
|
+
case Op.NOT_IN:
|
|
200
|
+
return expr.not_in(value if isinstance(value, list) else [value])
|
|
201
|
+
case _:
|
|
202
|
+
from axiom_query.errors import QueryError
|
|
203
|
+
|
|
204
|
+
raise QueryError(
|
|
205
|
+
"INVALID_HAVING_OPERATOR",
|
|
206
|
+
f"Operator '{op.value}' is not supported in HAVING clauses",
|
|
207
|
+
)
|