TypeDAL 3.17.3__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of TypeDAL might be problematic. Click here for more details.
- typedal/__about__.py +1 -1
- typedal/__init__.py +9 -9
- typedal/caching.py +36 -33
- typedal/config.py +15 -16
- typedal/constants.py +25 -0
- typedal/core.py +176 -3168
- typedal/define.py +188 -0
- typedal/fields.py +254 -29
- typedal/for_web2py.py +1 -1
- typedal/helpers.py +268 -61
- typedal/mixins.py +21 -25
- typedal/query_builder.py +1059 -0
- typedal/relationships.py +264 -0
- typedal/rows.py +524 -0
- typedal/serializers/as_json.py +9 -10
- typedal/tables.py +1122 -0
- typedal/types.py +183 -177
- typedal/web2py_py4web_shared.py +1 -1
- {typedal-3.17.3.dist-info → typedal-4.0.0.dist-info}/METADATA +8 -7
- typedal-4.0.0.dist-info/RECORD +25 -0
- typedal-3.17.3.dist-info/RECORD +0 -19
- {typedal-3.17.3.dist-info → typedal-4.0.0.dist-info}/WHEEL +0 -0
- {typedal-3.17.3.dist-info → typedal-4.0.0.dist-info}/entry_points.txt +0 -0
typedal/core.py
CHANGED
|
@@ -4,387 +4,138 @@ Core functionality of TypeDAL.
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
-
import contextlib
|
|
8
|
-
import copy
|
|
9
|
-
import csv
|
|
10
|
-
import datetime as dt
|
|
11
|
-
import functools
|
|
12
|
-
import inspect
|
|
13
|
-
import json
|
|
14
|
-
import math
|
|
15
|
-
import re
|
|
16
7
|
import sys
|
|
17
|
-
import
|
|
18
|
-
import typing
|
|
19
|
-
import uuid
|
|
8
|
+
import typing as t
|
|
20
9
|
import warnings
|
|
21
|
-
from collections import defaultdict
|
|
22
|
-
from decimal import Decimal
|
|
23
10
|
from pathlib import Path
|
|
24
|
-
from typing import
|
|
11
|
+
from typing import Optional
|
|
25
12
|
|
|
26
13
|
import pydal
|
|
27
|
-
from pydal._globals import DEFAULT
|
|
28
|
-
|
|
29
|
-
# from pydal.objects import Field as _Field
|
|
30
|
-
# from pydal.objects import Query as _Query
|
|
31
|
-
from pydal.objects import Row
|
|
32
|
-
|
|
33
|
-
# from pydal.objects import Table as _Table
|
|
34
|
-
from typing_extensions import Self, Unpack
|
|
35
14
|
|
|
36
15
|
from .config import TypeDALConfig, load_config
|
|
37
16
|
from .helpers import (
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
as_lambda,
|
|
42
|
-
classproperty,
|
|
43
|
-
extract_type_optional,
|
|
44
|
-
filter_out,
|
|
45
|
-
instanciate,
|
|
46
|
-
is_union,
|
|
47
|
-
looks_like,
|
|
48
|
-
mktable,
|
|
49
|
-
origin_is_subclass,
|
|
17
|
+
SYSTEM_SUPPORTS_TEMPLATES,
|
|
18
|
+
default_representer,
|
|
19
|
+
sql_escape_template,
|
|
50
20
|
sql_expression,
|
|
51
21
|
to_snake,
|
|
52
|
-
unwrap_type,
|
|
53
22
|
)
|
|
54
|
-
from .
|
|
55
|
-
from .types import (
|
|
56
|
-
AnyDict,
|
|
57
|
-
CacheMetadata,
|
|
58
|
-
Expression,
|
|
59
|
-
Field,
|
|
60
|
-
FieldSettings,
|
|
61
|
-
Metadata,
|
|
62
|
-
OpRow,
|
|
63
|
-
OrderBy,
|
|
64
|
-
PaginateDict,
|
|
65
|
-
Pagination,
|
|
66
|
-
Query,
|
|
67
|
-
Reference,
|
|
68
|
-
Rows,
|
|
69
|
-
SelectKwargs,
|
|
70
|
-
Set,
|
|
71
|
-
Table,
|
|
72
|
-
Validator,
|
|
73
|
-
_Types,
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
# use typing.cast(type, ...) to make mypy happy with unions
|
|
77
|
-
T_annotation = Type[Any] | types.UnionType
|
|
78
|
-
T_Query = typing.Union["Table", Query, bool, None, "TypedTable", Type["TypedTable"], Expression]
|
|
79
|
-
T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
|
|
80
|
-
T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
|
|
81
|
-
T = typing.TypeVar("T")
|
|
82
|
-
|
|
83
|
-
BASIC_MAPPINGS: dict[T_annotation, str] = {
|
|
84
|
-
str: "string",
|
|
85
|
-
int: "integer",
|
|
86
|
-
bool: "boolean",
|
|
87
|
-
bytes: "blob",
|
|
88
|
-
float: "double",
|
|
89
|
-
object: "json",
|
|
90
|
-
Decimal: "decimal(10,2)",
|
|
91
|
-
dt.date: "date",
|
|
92
|
-
dt.time: "time",
|
|
93
|
-
dt.datetime: "datetime",
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
|
|
98
|
-
"""
|
|
99
|
-
Is `cls` an instance or subclass of TypedField?
|
|
100
|
-
|
|
101
|
-
Deprecated
|
|
102
|
-
"""
|
|
103
|
-
return isinstance(cls, TypedField) or (
|
|
104
|
-
isinstance(typing.get_origin(cls), type) and issubclass(typing.get_origin(cls), TypedField)
|
|
105
|
-
)
|
|
23
|
+
from .types import Field, T, Template # type: ignore
|
|
106
24
|
|
|
25
|
+
try:
|
|
26
|
+
# python 3.14+
|
|
27
|
+
from annotationlib import ForwardRef
|
|
28
|
+
except ImportError: # pragma: no cover
|
|
29
|
+
# python 3.13-
|
|
30
|
+
from typing import ForwardRef
|
|
107
31
|
|
|
108
|
-
|
|
109
|
-
|
|
32
|
+
if t.TYPE_CHECKING:
|
|
33
|
+
from .fields import TypedField
|
|
34
|
+
from .types import AnyDict, Expression, T_Query, Table
|
|
110
35
|
|
|
111
|
-
# table-ish paramter:
|
|
112
|
-
P_Table = typing.Union[Type["TypedTable"], pydal.objects.Table]
|
|
113
36
|
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
# self, other -> Query
|
|
117
|
-
[P_Table, P_Table],
|
|
118
|
-
Query | bool,
|
|
119
|
-
]
|
|
120
|
-
]
|
|
37
|
+
# note: these functions can not be moved to a different file,
|
|
38
|
+
# because then they will have different globals and it breaks!
|
|
121
39
|
|
|
122
|
-
OnQuery: typing.TypeAlias = typing.Optional[
|
|
123
|
-
typing.Callable[
|
|
124
|
-
# self, other -> list of .on statements
|
|
125
|
-
[P_Table, P_Table],
|
|
126
|
-
list[Expression],
|
|
127
|
-
]
|
|
128
|
-
]
|
|
129
40
|
|
|
130
|
-
|
|
131
|
-
To_Type = typing.TypeVar("To_Type")
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
class Relationship(typing.Generic[To_Type]):
|
|
41
|
+
def evaluate_forward_reference_312(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
135
42
|
"""
|
|
136
|
-
|
|
137
|
-
"""
|
|
138
|
-
|
|
139
|
-
_type: Type[To_Type]
|
|
140
|
-
table: Type["TypedTable"] | type | str
|
|
141
|
-
condition: Condition
|
|
142
|
-
condition_and: Condition
|
|
143
|
-
on: OnQuery
|
|
144
|
-
multiple: bool
|
|
145
|
-
join: JOIN_OPTIONS
|
|
146
|
-
|
|
147
|
-
def __init__(
|
|
148
|
-
self,
|
|
149
|
-
_type: Type[To_Type],
|
|
150
|
-
condition: Condition = None,
|
|
151
|
-
join: JOIN_OPTIONS = None,
|
|
152
|
-
on: OnQuery = None,
|
|
153
|
-
condition_and: Condition = None,
|
|
154
|
-
):
|
|
155
|
-
"""
|
|
156
|
-
Should not be called directly, use relationship() instead!
|
|
157
|
-
"""
|
|
158
|
-
if condition and on:
|
|
159
|
-
warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
|
|
160
|
-
raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
|
|
161
|
-
|
|
162
|
-
self._type = _type
|
|
163
|
-
self.condition = condition
|
|
164
|
-
self.join = "left" if on else join # .on is always left join!
|
|
165
|
-
self.on = on
|
|
166
|
-
self.condition_and = condition_and
|
|
167
|
-
|
|
168
|
-
if args := typing.get_args(_type):
|
|
169
|
-
self.table = unwrap_type(args[0])
|
|
170
|
-
self.multiple = True
|
|
171
|
-
else:
|
|
172
|
-
self.table = typing.cast(type[TypedTable], _type)
|
|
173
|
-
self.multiple = False
|
|
174
|
-
|
|
175
|
-
if isinstance(self.table, str):
|
|
176
|
-
self.table = TypeDAL.to_snake(self.table)
|
|
177
|
-
|
|
178
|
-
def clone(self, **update: Any) -> "Relationship[To_Type]":
|
|
179
|
-
"""
|
|
180
|
-
Create a copy of the relationship, possibly updated.
|
|
181
|
-
"""
|
|
182
|
-
return self.__class__(
|
|
183
|
-
update.get("_type") or self._type,
|
|
184
|
-
update.get("condition") or self.condition,
|
|
185
|
-
update.get("join") or self.join,
|
|
186
|
-
update.get("on") or self.on,
|
|
187
|
-
update.get("condition_and") or self.condition_and,
|
|
188
|
-
)
|
|
189
|
-
|
|
190
|
-
def __repr__(self) -> str:
|
|
191
|
-
"""
|
|
192
|
-
Representation of the relationship.
|
|
193
|
-
"""
|
|
194
|
-
if callback := self.condition or self.on:
|
|
195
|
-
src_code = inspect.getsource(callback).strip()
|
|
196
|
-
|
|
197
|
-
if c_and := self.condition_and:
|
|
198
|
-
and_code = inspect.getsource(c_and).strip()
|
|
199
|
-
src_code += " AND " + and_code
|
|
200
|
-
else:
|
|
201
|
-
cls_name = self._type if isinstance(self._type, str) else self._type.__name__
|
|
202
|
-
src_code = f"to {cls_name} (missing condition)"
|
|
203
|
-
|
|
204
|
-
join = f":{self.join}" if self.join else ""
|
|
205
|
-
return f"<Relationship{join} {src_code}>"
|
|
206
|
-
|
|
207
|
-
def get_table(self, db: "TypeDAL") -> Type["TypedTable"]:
|
|
208
|
-
"""
|
|
209
|
-
Get the table this relationship is bound to.
|
|
210
|
-
"""
|
|
211
|
-
table = self.table # can be a string because db wasn't available yet
|
|
212
|
-
|
|
213
|
-
if isinstance(table, str):
|
|
214
|
-
if mapped := db._class_map.get(table):
|
|
215
|
-
# yay
|
|
216
|
-
return mapped
|
|
217
|
-
|
|
218
|
-
# boo, fall back to untyped table but pretend it is typed:
|
|
219
|
-
return typing.cast(Type["TypedTable"], db[table]) # eh close enough!
|
|
220
|
-
|
|
221
|
-
return table
|
|
222
|
-
|
|
223
|
-
def get_table_name(self) -> str:
|
|
224
|
-
"""
|
|
225
|
-
Get the name of the table this relationship is bound to.
|
|
226
|
-
"""
|
|
227
|
-
if isinstance(self.table, str):
|
|
228
|
-
return self.table
|
|
229
|
-
|
|
230
|
-
if isinstance(self.table, pydal.objects.Table):
|
|
231
|
-
return str(self.table)
|
|
232
|
-
|
|
233
|
-
# else: typed table
|
|
234
|
-
try:
|
|
235
|
-
table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
|
|
236
|
-
except Exception: # pragma: no cover
|
|
237
|
-
table = self.table
|
|
238
|
-
|
|
239
|
-
return str(table)
|
|
240
|
-
|
|
241
|
-
def __get__(self, instance: Any, owner: Any) -> "typing.Optional[list[Any]] | Relationship[To_Type]":
|
|
242
|
-
"""
|
|
243
|
-
Relationship is a descriptor class, which can be returned from a class but not an instance.
|
|
244
|
-
|
|
245
|
-
For an instance, using .join() will replace the Relationship with the actual data.
|
|
246
|
-
If you forgot to join, a warning will be shown and empty data will be returned.
|
|
247
|
-
"""
|
|
248
|
-
if not instance:
|
|
249
|
-
# relationship queried on class, that's allowed
|
|
250
|
-
return self
|
|
43
|
+
Extract the original type from a forward reference string.
|
|
251
44
|
|
|
252
|
-
|
|
253
|
-
"Trying to get data from a relationship object! Did you forget to join it?",
|
|
254
|
-
category=RuntimeWarning,
|
|
255
|
-
)
|
|
256
|
-
if self.multiple:
|
|
257
|
-
return []
|
|
258
|
-
else:
|
|
259
|
-
return None
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
def relationship(
|
|
263
|
-
_type: typing.Type[To_Type],
|
|
264
|
-
condition: Condition = None,
|
|
265
|
-
join: JOIN_OPTIONS = None,
|
|
266
|
-
on: OnQuery = None,
|
|
267
|
-
) -> To_Type:
|
|
45
|
+
Variant for python 3.12 and below
|
|
268
46
|
"""
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
class Post(TypedTable):
|
|
278
|
-
title: str
|
|
279
|
-
author: User
|
|
280
|
-
|
|
281
|
-
User.join("posts").first() # User instance with list[Post] in .posts
|
|
282
|
-
|
|
283
|
-
Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
|
|
284
|
-
In this case, the join strategy is set to LEFT so users without posts are also still selected.
|
|
47
|
+
return t.cast(
|
|
48
|
+
type,
|
|
49
|
+
fw_ref._evaluate(
|
|
50
|
+
localns=locals(),
|
|
51
|
+
globalns=globals() | namespace,
|
|
52
|
+
recursive_guard=frozenset(),
|
|
53
|
+
),
|
|
54
|
+
)
|
|
285
55
|
|
|
286
|
-
For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
|
|
287
|
-
class User(TypedTable):
|
|
288
|
-
...
|
|
289
56
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
])
|
|
57
|
+
def evaluate_forward_reference_313(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
58
|
+
"""
|
|
59
|
+
Extract the original type from a forward reference string.
|
|
294
60
|
|
|
295
|
-
|
|
61
|
+
Variant for python 3.13
|
|
296
62
|
"""
|
|
297
|
-
return
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
63
|
+
return t.cast(
|
|
64
|
+
type,
|
|
65
|
+
fw_ref._evaluate(
|
|
66
|
+
localns=locals(),
|
|
67
|
+
globalns=globals() | namespace,
|
|
68
|
+
recursive_guard=frozenset(),
|
|
69
|
+
type_params=(), # suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
|
|
70
|
+
),
|
|
303
71
|
)
|
|
304
72
|
|
|
305
73
|
|
|
306
|
-
|
|
74
|
+
def evaluate_forward_reference_314(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
75
|
+
"""
|
|
76
|
+
Extract the original type from a forward reference string.
|
|
307
77
|
|
|
78
|
+
Variant for python 3.14 (and hopefully above)
|
|
79
|
+
"""
|
|
80
|
+
return t.cast(
|
|
81
|
+
type,
|
|
82
|
+
fw_ref.evaluate(
|
|
83
|
+
locals=locals(),
|
|
84
|
+
globals=globals() | namespace,
|
|
85
|
+
type_params=(),
|
|
86
|
+
),
|
|
87
|
+
)
|
|
308
88
|
|
|
309
|
-
def _generate_relationship_condition(_: Type["TypedTable"], key: str, field: T_Field) -> Condition:
|
|
310
|
-
origin = typing.get_origin(field)
|
|
311
|
-
# else: generic
|
|
312
89
|
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
90
|
+
def evaluate_forward_reference(
|
|
91
|
+
fw_ref: ForwardRef,
|
|
92
|
+
namespace: dict[str, type] | None = None,
|
|
93
|
+
) -> type: # pragma: no cover
|
|
94
|
+
"""
|
|
95
|
+
Extract the original type from a forward reference string.
|
|
316
96
|
|
|
317
|
-
|
|
97
|
+
Automatically chooses strategy based on current Python version.
|
|
98
|
+
"""
|
|
99
|
+
if sys.version_info.minor < 13:
|
|
100
|
+
return evaluate_forward_reference_312(fw_ref, namespace=namespace or {})
|
|
101
|
+
elif sys.version_info.minor == 13:
|
|
102
|
+
return evaluate_forward_reference_313(fw_ref, namespace=namespace or {})
|
|
318
103
|
else:
|
|
319
|
-
|
|
320
|
-
# return lambda _self, _other: cls[key] == field.id
|
|
321
|
-
return lambda _self, _other: _self[key] == _other.id
|
|
104
|
+
return evaluate_forward_reference_314(fw_ref, namespace=namespace or {})
|
|
322
105
|
|
|
323
106
|
|
|
324
|
-
def
|
|
325
|
-
cls: Type["TypedTable"] | type[Any],
|
|
326
|
-
key: str,
|
|
327
|
-
field: T_Field,
|
|
328
|
-
) -> typing.Optional[Relationship[Any]]:
|
|
107
|
+
def resolve_annotation_313(ftype: str) -> type: # pragma: no cover
|
|
329
108
|
"""
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
Example:
|
|
333
|
-
class MyTable(TypedTable):
|
|
334
|
-
reference: OtherTable
|
|
335
|
-
|
|
336
|
-
`reference` contains the id of an Other Table row.
|
|
337
|
-
MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
|
|
109
|
+
Resolve an annotation that's in string representation.
|
|
338
110
|
|
|
339
|
-
|
|
340
|
-
to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
|
|
341
|
-
|
|
342
|
-
Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
|
|
111
|
+
Variant for Python 3.13
|
|
343
112
|
"""
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
if args := typing.get_args(field):
|
|
347
|
-
# TypedField[SomeType] -> SomeType
|
|
348
|
-
field = args[0]
|
|
349
|
-
elif hasattr(field, "_type"):
|
|
350
|
-
# TypedField(SomeType) -> SomeType
|
|
351
|
-
field = typing.cast(T_Field, field._type)
|
|
352
|
-
else: # pragma: no cover
|
|
353
|
-
# weird
|
|
354
|
-
return None
|
|
355
|
-
|
|
356
|
-
field, optional = extract_type_optional(field)
|
|
113
|
+
fw_ref: ForwardRef = t.get_args(t.Type[ftype])[0]
|
|
114
|
+
return evaluate_forward_reference(fw_ref)
|
|
357
115
|
|
|
358
|
-
try:
|
|
359
|
-
condition = _generate_relationship_condition(cls, key, field)
|
|
360
|
-
except Exception as e: # pragma: no cover
|
|
361
|
-
warnings.warn("Could not generate Relationship condition", source=e)
|
|
362
|
-
condition = None
|
|
363
116
|
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
return None
|
|
368
|
-
|
|
369
|
-
join = "left" if optional or typing.get_origin(field) is list else "inner"
|
|
117
|
+
def resolve_annotation_314(ftype: str) -> type: # pragma: no cover
|
|
118
|
+
"""
|
|
119
|
+
Resolve an annotation that's in string representation.
|
|
370
120
|
|
|
371
|
-
|
|
121
|
+
Variant for Python 3.14 + using annotationlib
|
|
122
|
+
"""
|
|
123
|
+
fw_ref = ForwardRef(ftype)
|
|
124
|
+
return evaluate_forward_reference(fw_ref)
|
|
372
125
|
|
|
373
126
|
|
|
374
|
-
def
|
|
375
|
-
"""
|
|
376
|
-
Extract the original type from a forward reference string.
|
|
127
|
+
def resolve_annotation(ftype: str) -> type: # pragma: no cover
|
|
377
128
|
"""
|
|
378
|
-
|
|
379
|
-
localns=locals(),
|
|
380
|
-
globalns=globals(),
|
|
381
|
-
recursive_guard=frozenset(),
|
|
382
|
-
)
|
|
383
|
-
if sys.version_info >= (3, 13): # pragma: no cover
|
|
384
|
-
# suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
|
|
385
|
-
kwargs["type_params"] = ()
|
|
129
|
+
Resolve an annotation that's in string representation.
|
|
386
130
|
|
|
387
|
-
|
|
131
|
+
Automatically chooses strategy based on current Python version.
|
|
132
|
+
"""
|
|
133
|
+
if sys.version_info.major != 3:
|
|
134
|
+
raise EnvironmentError("Only python 3 is supported.")
|
|
135
|
+
elif sys.version_info.minor <= 13:
|
|
136
|
+
return resolve_annotation_313(ftype)
|
|
137
|
+
else:
|
|
138
|
+
return resolve_annotation_314(ftype)
|
|
388
139
|
|
|
389
140
|
|
|
390
141
|
class TypeDAL(pydal.DAL): # type: ignore
|
|
@@ -393,6 +144,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
393
144
|
"""
|
|
394
145
|
|
|
395
146
|
_config: TypeDALConfig
|
|
147
|
+
_builder: TableDefinitionBuilder
|
|
396
148
|
|
|
397
149
|
def __init__(
|
|
398
150
|
self,
|
|
@@ -414,7 +166,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
414
166
|
debug: bool = False,
|
|
415
167
|
lazy_tables: bool = False,
|
|
416
168
|
db_uid: Optional[str] = None,
|
|
417
|
-
after_connection:
|
|
169
|
+
after_connection: t.Callable[..., t.Any] = None,
|
|
418
170
|
tables: Optional[list[str]] = None,
|
|
419
171
|
ignore_field_case: bool = True,
|
|
420
172
|
entity_quoting: bool = True,
|
|
@@ -443,6 +195,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
443
195
|
|
|
444
196
|
self._config = config
|
|
445
197
|
self.db = self
|
|
198
|
+
self._builder = TableDefinitionBuilder(self)
|
|
446
199
|
|
|
447
200
|
if config.folder:
|
|
448
201
|
Path(config.folder).mkdir(exist_ok=True)
|
|
@@ -477,7 +230,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
477
230
|
self.try_define(_TypedalCache)
|
|
478
231
|
self.try_define(_TypedalCacheDependency)
|
|
479
232
|
|
|
480
|
-
def try_define(self, model: Type[T], verbose: bool = False) -> Type[T]:
|
|
233
|
+
def try_define(self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
|
|
481
234
|
"""
|
|
482
235
|
Try to define a model with migrate or fall back to fake migrate.
|
|
483
236
|
"""
|
|
@@ -495,125 +248,13 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
495
248
|
# try again:
|
|
496
249
|
return self.define(model, migrate=True, fake_migrate=True, redefine=True)
|
|
497
250
|
|
|
498
|
-
default_kwargs:
|
|
251
|
+
default_kwargs: t.ClassVar[AnyDict] = {
|
|
499
252
|
# fields are 'required' (notnull) by default:
|
|
500
253
|
"notnull": True,
|
|
501
254
|
}
|
|
502
255
|
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
def _define(self, cls: Type[T], **kwargs: Any) -> Type[T]:
|
|
507
|
-
# todo: new relationship item added should also invalidate (previously unrelated) cache result
|
|
508
|
-
|
|
509
|
-
# todo: option to enable/disable cache dependency behavior:
|
|
510
|
-
# - don't set _before_update and _before_delete
|
|
511
|
-
# - don't add TypedalCacheDependency entry
|
|
512
|
-
# - don't invalidate other item on new row of this type
|
|
513
|
-
|
|
514
|
-
# when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
|
|
515
|
-
# proper way to handle this would be (but gives error right now due to Table implementing magic methods):
|
|
516
|
-
# typing.get_type_hints(cls, globalns=None, localns=None)
|
|
517
|
-
# -> ERR e.g. `pytest -svxk cli` -> name 'BestFriend' is not defined
|
|
518
|
-
|
|
519
|
-
# dirty way (with evil eval):
|
|
520
|
-
# [eval(v) for k, v in cls.__annotations__.items()]
|
|
521
|
-
# this however also stops working when variables outside this scope or even references to other
|
|
522
|
-
# objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
|
|
523
|
-
# and might break in the future, when this annotations behavior is enabled by default.
|
|
524
|
-
|
|
525
|
-
# non-annotated variables have to be passed to define_table as kwargs
|
|
526
|
-
full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
|
|
527
|
-
|
|
528
|
-
tablename = self.to_snake(cls.__name__)
|
|
529
|
-
# grab annotations of cls and it's parents:
|
|
530
|
-
annotations = all_annotations(cls)
|
|
531
|
-
# extend with `prop = TypedField()` 'annotations':
|
|
532
|
-
annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
|
|
533
|
-
# remove internal stuff:
|
|
534
|
-
annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
|
|
535
|
-
|
|
536
|
-
typedfields: dict[str, TypedField[Any]] = {
|
|
537
|
-
k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
|
|
538
|
-
}
|
|
539
|
-
|
|
540
|
-
relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
|
|
541
|
-
|
|
542
|
-
fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
|
|
543
|
-
|
|
544
|
-
# ! dont' use full_dict here:
|
|
545
|
-
other_kwargs = kwargs | {
|
|
546
|
-
k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
|
|
547
|
-
} # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
|
|
548
|
-
# now it's only used to extract relationships from the object.
|
|
549
|
-
# other properties of the class (incl methods) should not be touched
|
|
550
|
-
|
|
551
|
-
# for key in typedfields.keys() - full_dict.keys():
|
|
552
|
-
# # typed fields that don't haven't been added to the object yet
|
|
553
|
-
# setattr(cls, key, typedfields[key])
|
|
554
|
-
|
|
555
|
-
for key, field in typedfields.items():
|
|
556
|
-
# clone every property so it can be re-used across mixins:
|
|
557
|
-
clone = copy.copy(field)
|
|
558
|
-
setattr(cls, key, clone)
|
|
559
|
-
typedfields[key] = clone
|
|
560
|
-
|
|
561
|
-
# start with base classes and overwrite with current class:
|
|
562
|
-
relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
|
|
563
|
-
|
|
564
|
-
# DEPRECATED: Relationship as annotation is currently not supported!
|
|
565
|
-
# ensure they are all instances and
|
|
566
|
-
# not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
|
|
567
|
-
# relationships = {
|
|
568
|
-
# k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
|
|
569
|
-
# }
|
|
570
|
-
|
|
571
|
-
# keys of implicit references (also relationships):
|
|
572
|
-
reference_field_keys = [
|
|
573
|
-
k for k, v in fields.items() if str(v.type).split(" ")[0] in ("list:reference", "reference")
|
|
574
|
-
]
|
|
575
|
-
|
|
576
|
-
# add implicit relationships:
|
|
577
|
-
# User; list[User]; TypedField[User]; TypedField[list[User]]; TypedField(User); TypedField(list[User])
|
|
578
|
-
relationships |= {
|
|
579
|
-
k: new_relationship
|
|
580
|
-
for k in reference_field_keys
|
|
581
|
-
if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
|
|
582
|
-
}
|
|
583
|
-
|
|
584
|
-
# fixme: list[Reference] is recognized as relationship,
|
|
585
|
-
# TypedField(list[Reference]) is NOT recognized!!!
|
|
586
|
-
|
|
587
|
-
cache_dependency = self._config.caching and kwargs.pop("cache_dependency", True)
|
|
588
|
-
|
|
589
|
-
table: Table = self.define_table(tablename, *fields.values(), **kwargs)
|
|
590
|
-
|
|
591
|
-
for name, typed_field in typedfields.items():
|
|
592
|
-
field = fields[name]
|
|
593
|
-
typed_field.bind(field, table)
|
|
594
|
-
|
|
595
|
-
if issubclass(cls, TypedTable):
|
|
596
|
-
cls.__set_internals__(
|
|
597
|
-
db=self,
|
|
598
|
-
table=table,
|
|
599
|
-
# by now, all relationships should be instances!
|
|
600
|
-
relationships=typing.cast(dict[str, Relationship[Any]], relationships),
|
|
601
|
-
)
|
|
602
|
-
# map both name and rname:
|
|
603
|
-
self._class_map[str(table)] = cls
|
|
604
|
-
self._class_map[table._rname] = cls
|
|
605
|
-
cls.__on_define__(self)
|
|
606
|
-
else:
|
|
607
|
-
warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
|
|
608
|
-
|
|
609
|
-
if not tablename.startswith("typedal_") and cache_dependency:
|
|
610
|
-
table._before_update.append(lambda s, _: _remove_cache(s, tablename))
|
|
611
|
-
table._before_delete.append(lambda s: _remove_cache(s, tablename))
|
|
612
|
-
|
|
613
|
-
return cls
|
|
614
|
-
|
|
615
|
-
@typing.overload
|
|
616
|
-
def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[Type[T]], Type[T]]:
|
|
256
|
+
@t.overload
|
|
257
|
+
def define(self, maybe_cls: None = None, **kwargs: t.Any) -> t.Callable[[t.Type[T]], t.Type[T]]:
|
|
617
258
|
"""
|
|
618
259
|
Typing Overload for define without a class.
|
|
619
260
|
|
|
@@ -621,8 +262,8 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
621
262
|
class MyTable(TypedTable): ...
|
|
622
263
|
"""
|
|
623
264
|
|
|
624
|
-
@
|
|
625
|
-
def define(self, maybe_cls: Type[T], **kwargs: Any) -> Type[T]:
|
|
265
|
+
@t.overload
|
|
266
|
+
def define(self, maybe_cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
|
|
626
267
|
"""
|
|
627
268
|
Typing Overload for define with a class.
|
|
628
269
|
|
|
@@ -630,7 +271,11 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
630
271
|
class MyTable(TypedTable): ...
|
|
631
272
|
"""
|
|
632
273
|
|
|
633
|
-
def define(
|
|
274
|
+
def define(
|
|
275
|
+
self,
|
|
276
|
+
maybe_cls: t.Type[T] | None = None,
|
|
277
|
+
**kwargs: t.Any,
|
|
278
|
+
) -> t.Type[T] | t.Callable[[t.Type[T]], t.Type[T]]:
|
|
634
279
|
"""
|
|
635
280
|
Can be used as a decorator on a class that inherits `TypedTable`, \
|
|
636
281
|
or as a regular method if you need to define your classes before you have access to a 'db' instance.
|
|
@@ -653,39 +298,15 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
653
298
|
the result of pydal.define_table
|
|
654
299
|
"""
|
|
655
300
|
|
|
656
|
-
def wrapper(cls: Type[T]) -> Type[T]:
|
|
657
|
-
return self.
|
|
301
|
+
def wrapper(cls: t.Type[T]) -> t.Type[T]:
|
|
302
|
+
return self._builder.define(cls, **kwargs)
|
|
658
303
|
|
|
659
304
|
if maybe_cls:
|
|
660
305
|
return wrapper(maybe_cls)
|
|
661
306
|
|
|
662
307
|
return wrapper
|
|
663
308
|
|
|
664
|
-
|
|
665
|
-
# """
|
|
666
|
-
# Remove a table by name (both on the database level and the typedal level).
|
|
667
|
-
# """
|
|
668
|
-
# # drop calls TypedTable.drop() and removes it from the `_class_map`
|
|
669
|
-
# if cls := self._class_map.pop(table_name, None):
|
|
670
|
-
# cls.drop()
|
|
671
|
-
|
|
672
|
-
# def drop_all(self, max_retries: int = None) -> None:
|
|
673
|
-
# """
|
|
674
|
-
# Remove all tables and keep doing so until everything is gone!
|
|
675
|
-
# """
|
|
676
|
-
# retries = 0
|
|
677
|
-
# if max_retries is None:
|
|
678
|
-
# max_retries = len(self.tables)
|
|
679
|
-
#
|
|
680
|
-
# while self.tables:
|
|
681
|
-
# retries += 1
|
|
682
|
-
# for table in self.tables:
|
|
683
|
-
# self.drop(table)
|
|
684
|
-
#
|
|
685
|
-
# if retries > max_retries:
|
|
686
|
-
# raise RuntimeError("Could not delete all tables")
|
|
687
|
-
|
|
688
|
-
def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
|
|
309
|
+
def __call__(self, *_args: T_Query, **kwargs: t.Any) -> "TypedSet":
|
|
689
310
|
"""
|
|
690
311
|
A db instance can be called directly to perform a query.
|
|
691
312
|
|
|
@@ -703,11 +324,11 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
703
324
|
|
|
704
325
|
if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
|
|
705
326
|
# table defined without @db.define decorator!
|
|
706
|
-
_cls: Type[TypedTable] = cls
|
|
327
|
+
_cls: t.Type[TypedTable] = cls
|
|
707
328
|
args[0] = _cls.id != None
|
|
708
329
|
|
|
709
330
|
_set = super().__call__(*args, **kwargs)
|
|
710
|
-
return
|
|
331
|
+
return t.cast(TypedSet, _set)
|
|
711
332
|
|
|
712
333
|
def __getitem__(self, key: str) -> "Table":
|
|
713
334
|
"""
|
|
@@ -718,9 +339,9 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
718
339
|
Example:
|
|
719
340
|
db['users'] -> user
|
|
720
341
|
"""
|
|
721
|
-
return
|
|
342
|
+
return t.cast(Table, super().__getitem__(str(key)))
|
|
722
343
|
|
|
723
|
-
def find_model(self, table_name: str) -> Type["TypedTable"] | None:
|
|
344
|
+
def find_model(self, table_name: str) -> t.Type["TypedTable"] | None:
|
|
724
345
|
"""
|
|
725
346
|
Retrieves a mapped table class by its name.
|
|
726
347
|
|
|
@@ -735,96 +356,12 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
735
356
|
Returns:
|
|
736
357
|
The mapped table class if it exists, otherwise None.
|
|
737
358
|
"""
|
|
738
|
-
return self.
|
|
739
|
-
|
|
740
|
-
@classmethod
|
|
741
|
-
def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
|
|
742
|
-
# return Field(name, _type, **{**cls.default_kwargs, **kw})
|
|
743
|
-
kw_combined = cls.default_kwargs | kw
|
|
744
|
-
return Field(name, _type, **kw_combined)
|
|
745
|
-
|
|
746
|
-
@classmethod
|
|
747
|
-
def _annotation_to_pydal_fieldtype(
|
|
748
|
-
cls,
|
|
749
|
-
_ftype: T_annotation,
|
|
750
|
-
mut_kw: typing.MutableMapping[str, Any],
|
|
751
|
-
) -> Optional[str]:
|
|
752
|
-
# ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
|
|
753
|
-
ftype = typing.cast(type, _ftype) # cast from Type to type to make mypy happy)
|
|
754
|
-
|
|
755
|
-
if isinstance(ftype, str):
|
|
756
|
-
# extract type from string
|
|
757
|
-
fw_ref: typing.ForwardRef = typing.get_args(Type[ftype])[0]
|
|
758
|
-
ftype = evaluate_forward_reference(fw_ref)
|
|
759
|
-
|
|
760
|
-
if mapping := BASIC_MAPPINGS.get(ftype):
|
|
761
|
-
# basi types
|
|
762
|
-
return mapping
|
|
763
|
-
elif isinstance(ftype, pydal.objects.Table):
|
|
764
|
-
# db.table
|
|
765
|
-
return f"reference {ftype._tablename}"
|
|
766
|
-
elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
|
|
767
|
-
# SomeTable
|
|
768
|
-
snakename = cls.to_snake(ftype.__name__)
|
|
769
|
-
return f"reference {snakename}"
|
|
770
|
-
elif isinstance(ftype, TypedField):
|
|
771
|
-
# FieldType(type, ...)
|
|
772
|
-
return ftype._to_field(mut_kw)
|
|
773
|
-
elif origin_is_subclass(ftype, TypedField):
|
|
774
|
-
# TypedField[int]
|
|
775
|
-
return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
|
|
776
|
-
elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
|
|
777
|
-
# list[str] -> str -> string -> list:string
|
|
778
|
-
_child_type = typing.get_args(ftype)[0]
|
|
779
|
-
_child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
|
|
780
|
-
return f"list:{_child_type}"
|
|
781
|
-
elif is_union(ftype):
|
|
782
|
-
# str | int -> UnionType
|
|
783
|
-
# typing.Union[str | int] -> typing._UnionGenericAlias
|
|
784
|
-
|
|
785
|
-
# Optional[type] == type | None
|
|
786
|
-
|
|
787
|
-
match typing.get_args(ftype):
|
|
788
|
-
case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
|
|
789
|
-
# good union of Nullable
|
|
790
|
-
|
|
791
|
-
# if a field is optional, it is nullable:
|
|
792
|
-
mut_kw["notnull"] = False
|
|
793
|
-
return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
|
|
794
|
-
case _:
|
|
795
|
-
# two types is not supported by the db!
|
|
796
|
-
return None
|
|
797
|
-
else:
|
|
798
|
-
return None
|
|
799
|
-
|
|
800
|
-
@classmethod
|
|
801
|
-
def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
|
|
802
|
-
"""
|
|
803
|
-
Convert a annotation into a pydal Field.
|
|
804
|
-
|
|
805
|
-
Args:
|
|
806
|
-
fname: name of the property
|
|
807
|
-
ftype: annotation of the property
|
|
808
|
-
kw: when using TypedField or a function returning it (e.g. StringField),
|
|
809
|
-
keyword args can be used to pass any other settings you would normally to a pydal Field
|
|
810
|
-
|
|
811
|
-
-> pydal.Field(fname, ftype, **kw)
|
|
812
|
-
|
|
813
|
-
Example:
|
|
814
|
-
class MyTable:
|
|
815
|
-
fname: ftype
|
|
816
|
-
id: int
|
|
817
|
-
name: str
|
|
818
|
-
reference: Table
|
|
819
|
-
other: TypedField(str, default="John Doe") # default will be in kwargs
|
|
820
|
-
"""
|
|
821
|
-
fname = cls.to_snake(fname)
|
|
359
|
+
return self._builder.class_map.get(table_name, None)
|
|
822
360
|
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
|
|
361
|
+
@property
|
|
362
|
+
def _class_map(self) -> dict[str, t.Type["TypedTable"]]:
|
|
363
|
+
# alias for backward-compatibility
|
|
364
|
+
return self._builder.class_map
|
|
828
365
|
|
|
829
366
|
@staticmethod
|
|
830
367
|
def to_snake(camel: str) -> str:
|
|
@@ -833,18 +370,71 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
833
370
|
"""
|
|
834
371
|
return to_snake(camel)
|
|
835
372
|
|
|
373
|
+
def executesql(
|
|
374
|
+
self,
|
|
375
|
+
query: str | Template,
|
|
376
|
+
placeholders: t.Iterable[str] | dict[str, str] | None = None,
|
|
377
|
+
as_dict: bool = False,
|
|
378
|
+
fields: t.Iterable[Field | TypedField[t.Any]] | None = None,
|
|
379
|
+
colnames: t.Iterable[str] | None = None,
|
|
380
|
+
as_ordered_dict: bool = False,
|
|
381
|
+
) -> list[t.Any]:
|
|
382
|
+
"""
|
|
383
|
+
Executes a raw SQL statement or a TypeDAL template query.
|
|
384
|
+
|
|
385
|
+
If `query` is provided as a `Template` and the system supports template
|
|
386
|
+
rendering, it will be processed with `sql_escape_template` before being
|
|
387
|
+
executed. Otherwise, the query is passed to the underlying DAL as-is.
|
|
388
|
+
|
|
389
|
+
Args:
|
|
390
|
+
query (str | Template): The SQL query to execute, either a plain
|
|
391
|
+
string or a `Template` (created via the `t""` syntax).
|
|
392
|
+
placeholders (Iterable[str] | dict[str, str] | None, optional):
|
|
393
|
+
Parameters to substitute into the SQL statement. Can be a sequence
|
|
394
|
+
(for positional parameters) or a dictionary (for named parameters).
|
|
395
|
+
Usually not applicable when using a t-string, since template
|
|
396
|
+
expressions handle interpolation directly.
|
|
397
|
+
as_dict (bool, optional): If True, return rows as dictionaries keyed by
|
|
398
|
+
column name. Defaults to False.
|
|
399
|
+
fields (Iterable[Field | TypedField] | None, optional): Explicit set of
|
|
400
|
+
fields to map results onto. Defaults to None.
|
|
401
|
+
colnames (Iterable[str] | None, optional): Explicit column names to use
|
|
402
|
+
in the result set. Defaults to None.
|
|
403
|
+
as_ordered_dict (bool, optional): If True, return rows as `OrderedDict`s
|
|
404
|
+
preserving column order. Defaults to False.
|
|
405
|
+
|
|
406
|
+
Returns:
|
|
407
|
+
list[t.Any]: The query result set. Typically a list of tuples if
|
|
408
|
+
`as_dict` and `as_ordered_dict` are False, or a list of dict-like
|
|
409
|
+
objects if those flags are enabled.
|
|
410
|
+
"""
|
|
411
|
+
if SYSTEM_SUPPORTS_TEMPLATES and isinstance(query, Template): # pragma: no cover
|
|
412
|
+
query = sql_escape_template(self, query)
|
|
413
|
+
|
|
414
|
+
rows: list[t.Any] = super().executesql(
|
|
415
|
+
query,
|
|
416
|
+
placeholders=placeholders,
|
|
417
|
+
as_dict=as_dict,
|
|
418
|
+
fields=fields,
|
|
419
|
+
colnames=colnames,
|
|
420
|
+
as_ordered_dict=as_ordered_dict,
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
return rows
|
|
424
|
+
|
|
836
425
|
def sql_expression(
|
|
837
426
|
self,
|
|
838
|
-
sql_fragment: str,
|
|
839
|
-
*raw_args: Any,
|
|
427
|
+
sql_fragment: str | Template,
|
|
428
|
+
*raw_args: t.Any,
|
|
840
429
|
output_type: str | None = None,
|
|
841
|
-
**raw_kwargs: Any,
|
|
430
|
+
**raw_kwargs: t.Any,
|
|
842
431
|
) -> Expression:
|
|
843
432
|
"""
|
|
844
433
|
Creates a pydal Expression object representing a raw SQL fragment.
|
|
845
434
|
|
|
846
435
|
Args:
|
|
847
436
|
sql_fragment: The raw SQL fragment.
|
|
437
|
+
In python 3.14+, this can also be a t-string. In that case, don't pass other args or kwargs.
|
|
848
438
|
*raw_args: Arguments to be interpolated into the SQL fragment.
|
|
849
439
|
output_type: The expected output type of the expression.
|
|
850
440
|
**raw_kwargs: Keyword arguments to be interpolated into the SQL fragment.
|
|
@@ -855,2598 +445,16 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
855
445
|
return sql_expression(self, sql_fragment, *raw_args, output_type=output_type, **raw_kwargs)
|
|
856
446
|
|
|
857
447
|
|
|
858
|
-
def default_representer(field: TypedField[T], value: T, table: Type[TypedTable]) -> str:
|
|
859
|
-
"""
|
|
860
|
-
Simply call field.represent on the value.
|
|
861
|
-
"""
|
|
862
|
-
if represent := getattr(field, "represent", None):
|
|
863
|
-
return str(represent(value, table))
|
|
864
|
-
else:
|
|
865
|
-
return repr(value)
|
|
866
|
-
|
|
867
|
-
|
|
868
448
|
TypeDAL.representers.setdefault("rows_render", default_representer)
|
|
869
449
|
|
|
870
|
-
|
|
871
|
-
R = typing.TypeVar("R")
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
def reorder_fields(
|
|
875
|
-
table: pydal.objects.Table,
|
|
876
|
-
fields: typing.Iterable[str | Field | TypedField[Any]],
|
|
877
|
-
keep_others: bool = True,
|
|
878
|
-
) -> None:
|
|
879
|
-
"""
|
|
880
|
-
Reorder fields of a pydal table.
|
|
881
|
-
|
|
882
|
-
Args:
|
|
883
|
-
table: The pydal table object (e.g., db.mytable).
|
|
884
|
-
fields: List of field names (str) or Field objects in desired order.
|
|
885
|
-
keep_others (bool):
|
|
886
|
-
- True (default): keep other fields at the end, in their original order.
|
|
887
|
-
- False: remove other fields (only keep what's specified).
|
|
888
|
-
"""
|
|
889
|
-
# Normalize input to field names
|
|
890
|
-
desired = [f.name if isinstance(f, (TypedField, Field, pydal.objects.Field)) else str(f) for f in fields]
|
|
891
|
-
|
|
892
|
-
new_order = [f for f in desired if f in table._fields]
|
|
893
|
-
|
|
894
|
-
if keep_others:
|
|
895
|
-
# Start with desired fields, then append the rest
|
|
896
|
-
new_order.extend(f for f in table._fields if f not in desired)
|
|
897
|
-
|
|
898
|
-
table._fields = new_order
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
class TableMeta(type):
|
|
902
|
-
"""
|
|
903
|
-
This metaclass contains functionality on table classes, that doesn't exist on its instances.
|
|
904
|
-
|
|
905
|
-
Example:
|
|
906
|
-
class MyTable(TypedTable):
|
|
907
|
-
some_field: TypedField[int]
|
|
908
|
-
|
|
909
|
-
MyTable.update_or_insert(...) # should work
|
|
910
|
-
|
|
911
|
-
MyTable.some_field # -> Field, can be used to query etc.
|
|
912
|
-
|
|
913
|
-
row = MyTable.first() # returns instance of MyTable
|
|
914
|
-
|
|
915
|
-
# row.update_or_insert(...) # shouldn't work!
|
|
916
|
-
|
|
917
|
-
row.some_field # -> int, with actual data
|
|
918
|
-
|
|
919
|
-
"""
|
|
920
|
-
|
|
921
|
-
# set up by db.define:
|
|
922
|
-
# _db: TypeDAL | None = None
|
|
923
|
-
# _table: Table | None = None
|
|
924
|
-
_db: TypeDAL | None = None
|
|
925
|
-
_table: Table | None = None
|
|
926
|
-
_relationships: dict[str, Relationship[Any]] | None = None
|
|
927
|
-
|
|
928
|
-
#########################
|
|
929
|
-
# TypeDAL custom logic: #
|
|
930
|
-
#########################
|
|
931
|
-
|
|
932
|
-
def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
|
|
933
|
-
"""
|
|
934
|
-
Store the related database and pydal table for later usage.
|
|
935
|
-
"""
|
|
936
|
-
self._db = db
|
|
937
|
-
self._table = table
|
|
938
|
-
self._relationships = relationships
|
|
939
|
-
|
|
940
|
-
def __getattr__(self, col: str) -> Optional[Field]:
|
|
941
|
-
"""
|
|
942
|
-
Magic method used by TypedTableMeta to get a database field with dot notation on a class.
|
|
943
|
-
|
|
944
|
-
Example:
|
|
945
|
-
SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
|
|
946
|
-
|
|
947
|
-
"""
|
|
948
|
-
if self._table:
|
|
949
|
-
return getattr(self._table, col, None)
|
|
950
|
-
|
|
951
|
-
return None
|
|
952
|
-
|
|
953
|
-
def _ensure_table_defined(self) -> Table:
|
|
954
|
-
if not self._table:
|
|
955
|
-
raise EnvironmentError("@define or db.define is not called on this class yet!")
|
|
956
|
-
return self._table
|
|
957
|
-
|
|
958
|
-
def __iter__(self) -> typing.Generator[Field, None, None]:
|
|
959
|
-
"""
|
|
960
|
-
Loop through the columns of this model.
|
|
961
|
-
"""
|
|
962
|
-
table = self._ensure_table_defined()
|
|
963
|
-
yield from iter(table)
|
|
964
|
-
|
|
965
|
-
def __getitem__(self, item: str) -> Field:
|
|
966
|
-
"""
|
|
967
|
-
Allow dict notation to get a column of this table (-> Field instance).
|
|
968
|
-
"""
|
|
969
|
-
table = self._ensure_table_defined()
|
|
970
|
-
return table[item]
|
|
971
|
-
|
|
972
|
-
def __str__(self) -> str:
|
|
973
|
-
"""
|
|
974
|
-
Normally, just returns the underlying table name, but with a fallback if the model is unbound.
|
|
975
|
-
"""
|
|
976
|
-
if self._table:
|
|
977
|
-
return str(self._table)
|
|
978
|
-
else:
|
|
979
|
-
return f"<unbound table {self.__name__}>"
|
|
980
|
-
|
|
981
|
-
def from_row(self: Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
|
|
982
|
-
"""
|
|
983
|
-
Create a model instance from a pydal row.
|
|
984
|
-
"""
|
|
985
|
-
return self(row)
|
|
986
|
-
|
|
987
|
-
def all(self: Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
|
|
988
|
-
"""
|
|
989
|
-
Return all rows for this model.
|
|
990
|
-
"""
|
|
991
|
-
return self.collect()
|
|
992
|
-
|
|
993
|
-
def get_relationships(self) -> dict[str, Relationship[Any]]:
|
|
994
|
-
"""
|
|
995
|
-
Return the registered relationships of the current model.
|
|
996
|
-
"""
|
|
997
|
-
return self._relationships or {}
|
|
998
|
-
|
|
999
|
-
##########################
|
|
1000
|
-
# TypeDAL Modified Logic #
|
|
1001
|
-
##########################
|
|
1002
|
-
|
|
1003
|
-
def insert(self: Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
|
|
1004
|
-
"""
|
|
1005
|
-
This is only called when db.define is not used as a decorator.
|
|
1006
|
-
|
|
1007
|
-
cls.__table functions as 'self'
|
|
1008
|
-
|
|
1009
|
-
Args:
|
|
1010
|
-
**fields: anything you want to insert in the database
|
|
1011
|
-
|
|
1012
|
-
Returns: the ID of the new row.
|
|
1013
|
-
|
|
1014
|
-
"""
|
|
1015
|
-
table = self._ensure_table_defined()
|
|
1016
|
-
|
|
1017
|
-
result = table.insert(**fields)
|
|
1018
|
-
# it already is an int but mypy doesn't understand that
|
|
1019
|
-
return self(result)
|
|
1020
|
-
|
|
1021
|
-
def _insert(self, **fields: Any) -> str:
|
|
1022
|
-
table = self._ensure_table_defined()
|
|
1023
|
-
|
|
1024
|
-
return str(table._insert(**fields))
|
|
1025
|
-
|
|
1026
|
-
def bulk_insert(self: Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
|
|
1027
|
-
"""
|
|
1028
|
-
Insert multiple rows, returns a TypedRows set of new instances.
|
|
1029
|
-
"""
|
|
1030
|
-
table = self._ensure_table_defined()
|
|
1031
|
-
result = table.bulk_insert(items)
|
|
1032
|
-
return self.where(lambda row: row.id.belongs(result)).collect()
|
|
1033
|
-
|
|
1034
|
-
def update_or_insert(
|
|
1035
|
-
self: Type[T_MetaInstance],
|
|
1036
|
-
query: T_Query | AnyDict = DEFAULT,
|
|
1037
|
-
**values: Any,
|
|
1038
|
-
) -> T_MetaInstance:
|
|
1039
|
-
"""
|
|
1040
|
-
Update a row if query matches, else insert a new one.
|
|
1041
|
-
|
|
1042
|
-
Returns the created or updated instance.
|
|
1043
|
-
"""
|
|
1044
|
-
table = self._ensure_table_defined()
|
|
1045
|
-
|
|
1046
|
-
if query is DEFAULT:
|
|
1047
|
-
record = table(**values)
|
|
1048
|
-
elif isinstance(query, dict):
|
|
1049
|
-
record = table(**query)
|
|
1050
|
-
else:
|
|
1051
|
-
record = table(query)
|
|
1052
|
-
|
|
1053
|
-
if not record:
|
|
1054
|
-
return self.insert(**values)
|
|
1055
|
-
|
|
1056
|
-
record.update_record(**values)
|
|
1057
|
-
return self(record)
|
|
1058
|
-
|
|
1059
|
-
def validate_and_insert(
|
|
1060
|
-
self: Type[T_MetaInstance],
|
|
1061
|
-
**fields: Any,
|
|
1062
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1063
|
-
"""
|
|
1064
|
-
Validate input data and then insert a row.
|
|
1065
|
-
|
|
1066
|
-
Returns a tuple of (the created instance, a dict of errors).
|
|
1067
|
-
"""
|
|
1068
|
-
table = self._ensure_table_defined()
|
|
1069
|
-
result = table.validate_and_insert(**fields)
|
|
1070
|
-
if row_id := result.get("id"):
|
|
1071
|
-
return self(row_id), None
|
|
1072
|
-
else:
|
|
1073
|
-
return None, result.get("errors")
|
|
1074
|
-
|
|
1075
|
-
def validate_and_update(
|
|
1076
|
-
self: Type[T_MetaInstance],
|
|
1077
|
-
query: Query,
|
|
1078
|
-
**fields: Any,
|
|
1079
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1080
|
-
"""
|
|
1081
|
-
Validate input data and then update max 1 row.
|
|
1082
|
-
|
|
1083
|
-
Returns a tuple of (the updated instance, a dict of errors).
|
|
1084
|
-
"""
|
|
1085
|
-
table = self._ensure_table_defined()
|
|
1086
|
-
|
|
1087
|
-
result = table.validate_and_update(query, **fields)
|
|
1088
|
-
|
|
1089
|
-
if errors := result.get("errors"):
|
|
1090
|
-
return None, errors
|
|
1091
|
-
elif row_id := result.get("id"):
|
|
1092
|
-
return self(row_id), None
|
|
1093
|
-
else: # pragma: no cover
|
|
1094
|
-
# update on query without result (shouldnt happen)
|
|
1095
|
-
return None, None
|
|
1096
|
-
|
|
1097
|
-
def validate_and_update_or_insert(
|
|
1098
|
-
self: Type[T_MetaInstance],
|
|
1099
|
-
query: Query,
|
|
1100
|
-
**fields: Any,
|
|
1101
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1102
|
-
"""
|
|
1103
|
-
Validate input data and then update_and_insert (on max 1 row).
|
|
1104
|
-
|
|
1105
|
-
Returns a tuple of (the updated/created instance, a dict of errors).
|
|
1106
|
-
"""
|
|
1107
|
-
table = self._ensure_table_defined()
|
|
1108
|
-
result = table.validate_and_update_or_insert(query, **fields)
|
|
1109
|
-
|
|
1110
|
-
if errors := result.get("errors"):
|
|
1111
|
-
return None, errors
|
|
1112
|
-
elif row_id := result.get("id"):
|
|
1113
|
-
return self(row_id), None
|
|
1114
|
-
else: # pragma: no cover
|
|
1115
|
-
# update on query without result (shouldnt happen)
|
|
1116
|
-
return None, None
|
|
1117
|
-
|
|
1118
|
-
def select(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1119
|
-
"""
|
|
1120
|
-
See QueryBuilder.select!
|
|
1121
|
-
"""
|
|
1122
|
-
return QueryBuilder(self).select(*a, **kw)
|
|
1123
|
-
|
|
1124
|
-
def column(self: Type[T_MetaInstance], field: "TypedField[T] | T", **options: Unpack[SelectKwargs]) -> list[T]:
|
|
1125
|
-
"""
|
|
1126
|
-
Get all values in a specific column.
|
|
1127
|
-
|
|
1128
|
-
Shortcut for `.select(field).execute().column(field)`.
|
|
1129
|
-
"""
|
|
1130
|
-
return QueryBuilder(self).select(field, **options).execute().column(field)
|
|
1131
|
-
|
|
1132
|
-
def paginate(self: Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
|
|
1133
|
-
"""
|
|
1134
|
-
See QueryBuilder.paginate!
|
|
1135
|
-
"""
|
|
1136
|
-
return QueryBuilder(self).paginate(limit=limit, page=page)
|
|
1137
|
-
|
|
1138
|
-
def chunk(self: Type[T_MetaInstance], chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
|
|
1139
|
-
"""
|
|
1140
|
-
See QueryBuilder.chunk!
|
|
1141
|
-
"""
|
|
1142
|
-
return QueryBuilder(self).chunk(chunk_size)
|
|
1143
|
-
|
|
1144
|
-
def where(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1145
|
-
"""
|
|
1146
|
-
See QueryBuilder.where!
|
|
1147
|
-
"""
|
|
1148
|
-
return QueryBuilder(self).where(*a, **kw)
|
|
1149
|
-
|
|
1150
|
-
def orderby(self: Type[T_MetaInstance], *fields: OrderBy) -> "QueryBuilder[T_MetaInstance]":
|
|
1151
|
-
"""
|
|
1152
|
-
See QueryBuilder.orderby!
|
|
1153
|
-
"""
|
|
1154
|
-
return QueryBuilder(self).orderby(*fields)
|
|
1155
|
-
|
|
1156
|
-
def cache(self: Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1157
|
-
"""
|
|
1158
|
-
See QueryBuilder.cache!
|
|
1159
|
-
"""
|
|
1160
|
-
return QueryBuilder(self).cache(*deps, **kwargs)
|
|
1161
|
-
|
|
1162
|
-
def count(self: Type[T_MetaInstance]) -> int:
|
|
1163
|
-
"""
|
|
1164
|
-
See QueryBuilder.count!
|
|
1165
|
-
"""
|
|
1166
|
-
return QueryBuilder(self).count()
|
|
1167
|
-
|
|
1168
|
-
def exists(self: Type[T_MetaInstance]) -> bool:
|
|
1169
|
-
"""
|
|
1170
|
-
See QueryBuilder.exists!
|
|
1171
|
-
"""
|
|
1172
|
-
return QueryBuilder(self).exists()
|
|
1173
|
-
|
|
1174
|
-
def first(self: Type[T_MetaInstance]) -> T_MetaInstance | None:
|
|
1175
|
-
"""
|
|
1176
|
-
See QueryBuilder.first!
|
|
1177
|
-
"""
|
|
1178
|
-
return QueryBuilder(self).first()
|
|
1179
|
-
|
|
1180
|
-
def first_or_fail(self: Type[T_MetaInstance]) -> T_MetaInstance:
|
|
1181
|
-
"""
|
|
1182
|
-
See QueryBuilder.first_or_fail!
|
|
1183
|
-
"""
|
|
1184
|
-
return QueryBuilder(self).first_or_fail()
|
|
1185
|
-
|
|
1186
|
-
def join(
|
|
1187
|
-
self: Type[T_MetaInstance],
|
|
1188
|
-
*fields: str | Type["TypedTable"],
|
|
1189
|
-
method: JOIN_OPTIONS = None,
|
|
1190
|
-
on: OnQuery | list[Expression] | Expression = None,
|
|
1191
|
-
condition: Condition = None,
|
|
1192
|
-
condition_and: Condition = None,
|
|
1193
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
1194
|
-
"""
|
|
1195
|
-
See QueryBuilder.join!
|
|
1196
|
-
"""
|
|
1197
|
-
return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method, condition_and=condition_and)
|
|
1198
|
-
|
|
1199
|
-
def collect(self: Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
|
|
1200
|
-
"""
|
|
1201
|
-
See QueryBuilder.collect!
|
|
1202
|
-
"""
|
|
1203
|
-
return QueryBuilder(self).collect(verbose=verbose)
|
|
1204
|
-
|
|
1205
|
-
@property
|
|
1206
|
-
def ALL(cls) -> pydal.objects.SQLALL:
|
|
1207
|
-
"""
|
|
1208
|
-
Select all fields for this table.
|
|
1209
|
-
"""
|
|
1210
|
-
table = cls._ensure_table_defined()
|
|
1211
|
-
|
|
1212
|
-
return table.ALL
|
|
1213
|
-
|
|
1214
|
-
##########################
|
|
1215
|
-
# TypeDAL Shadowed Logic #
|
|
1216
|
-
##########################
|
|
1217
|
-
fields: list[str]
|
|
1218
|
-
|
|
1219
|
-
# other table methods:
|
|
450
|
+
# note: these imports exist at the bottom of this file to prevent circular import issues:
|
|
1220
451
|
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
table = self._ensure_table_defined()
|
|
1226
|
-
table.truncate(mode)
|
|
1227
|
-
|
|
1228
|
-
def drop(self, mode: str = "") -> None:
|
|
1229
|
-
"""
|
|
1230
|
-
Remove the underlying table.
|
|
1231
|
-
"""
|
|
1232
|
-
table = self._ensure_table_defined()
|
|
1233
|
-
table.drop(mode)
|
|
1234
|
-
|
|
1235
|
-
def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
|
|
1236
|
-
"""
|
|
1237
|
-
Add an index on some columns of this table.
|
|
1238
|
-
"""
|
|
1239
|
-
table = self._ensure_table_defined()
|
|
1240
|
-
result = table.create_index(name, *fields, **kwargs)
|
|
1241
|
-
return typing.cast(bool, result)
|
|
1242
|
-
|
|
1243
|
-
def drop_index(self, name: str, if_exists: bool = False) -> bool:
|
|
1244
|
-
"""
|
|
1245
|
-
Remove an index from this table.
|
|
1246
|
-
"""
|
|
1247
|
-
table = self._ensure_table_defined()
|
|
1248
|
-
result = table.drop_index(name, if_exists)
|
|
1249
|
-
return typing.cast(bool, result)
|
|
1250
|
-
|
|
1251
|
-
def import_from_csv_file(
|
|
1252
|
-
self,
|
|
1253
|
-
csvfile: typing.TextIO,
|
|
1254
|
-
id_map: dict[str, str] = None,
|
|
1255
|
-
null: Any = "<NULL>",
|
|
1256
|
-
unique: str = "uuid",
|
|
1257
|
-
id_offset: dict[str, int] = None, # id_offset used only when id_map is None
|
|
1258
|
-
transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
|
|
1259
|
-
validate: bool = False,
|
|
1260
|
-
encoding: str = "utf-8",
|
|
1261
|
-
delimiter: str = ",",
|
|
1262
|
-
quotechar: str = '"',
|
|
1263
|
-
quoting: int = csv.QUOTE_MINIMAL,
|
|
1264
|
-
restore: bool = False,
|
|
1265
|
-
**kwargs: Any,
|
|
1266
|
-
) -> None:
|
|
1267
|
-
"""
|
|
1268
|
-
Load a csv file into the database.
|
|
1269
|
-
"""
|
|
1270
|
-
table = self._ensure_table_defined()
|
|
1271
|
-
table.import_from_csv_file(
|
|
1272
|
-
csvfile,
|
|
1273
|
-
id_map=id_map,
|
|
1274
|
-
null=null,
|
|
1275
|
-
unique=unique,
|
|
1276
|
-
id_offset=id_offset,
|
|
1277
|
-
transform=transform,
|
|
1278
|
-
validate=validate,
|
|
1279
|
-
encoding=encoding,
|
|
1280
|
-
delimiter=delimiter,
|
|
1281
|
-
quotechar=quotechar,
|
|
1282
|
-
quoting=quoting,
|
|
1283
|
-
restore=restore,
|
|
1284
|
-
**kwargs,
|
|
1285
|
-
)
|
|
1286
|
-
|
|
1287
|
-
def on(self, query: Query | bool) -> Expression:
|
|
1288
|
-
"""
|
|
1289
|
-
Shadow Table.on.
|
|
1290
|
-
|
|
1291
|
-
Used for joins.
|
|
1292
|
-
|
|
1293
|
-
See Also:
|
|
1294
|
-
http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
|
|
1295
|
-
"""
|
|
1296
|
-
table = self._ensure_table_defined()
|
|
1297
|
-
return typing.cast(Expression, table.on(query))
|
|
1298
|
-
|
|
1299
|
-
def with_alias(self: Type[T_MetaInstance], alias: str) -> Type[T_MetaInstance]:
|
|
1300
|
-
"""
|
|
1301
|
-
Shadow Table.with_alias.
|
|
452
|
+
from .fields import * # noqa: E402 F403 # isort: skip ; to fill globals() scope
|
|
453
|
+
from .define import TableDefinitionBuilder # noqa: E402
|
|
454
|
+
from .rows import TypedSet # noqa: E402
|
|
455
|
+
from .tables import TypedTable # noqa: E402
|
|
1302
456
|
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
See Also:
|
|
1306
|
-
http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#One-to-many-relation
|
|
1307
|
-
"""
|
|
1308
|
-
table = self._ensure_table_defined()
|
|
1309
|
-
return typing.cast(Type[T_MetaInstance], table.with_alias(alias))
|
|
1310
|
-
|
|
1311
|
-
def unique_alias(self: Type[T_MetaInstance]) -> Type[T_MetaInstance]:
|
|
1312
|
-
"""
|
|
1313
|
-
Generates a unique alias for this table.
|
|
1314
|
-
|
|
1315
|
-
Useful for joins when joining the same table multiple times
|
|
1316
|
-
and you don't want to keep track of aliases yourself.
|
|
1317
|
-
"""
|
|
1318
|
-
key = f"{self.__name__.lower()}_{hash(uuid.uuid4())}"
|
|
1319
|
-
return self.with_alias(key)
|
|
1320
|
-
|
|
1321
|
-
# hooks:
|
|
1322
|
-
def _hook_once(
|
|
1323
|
-
cls: Type[T_MetaInstance],
|
|
1324
|
-
hooks: list[typing.Callable[P, R]],
|
|
1325
|
-
fn: typing.Callable[P, R],
|
|
1326
|
-
) -> Type[T_MetaInstance]:
|
|
1327
|
-
@functools.wraps(fn)
|
|
1328
|
-
def wraps(*a: P.args, **kw: P.kwargs) -> R:
|
|
1329
|
-
try:
|
|
1330
|
-
return fn(*a, **kw)
|
|
1331
|
-
finally:
|
|
1332
|
-
hooks.remove(wraps)
|
|
1333
|
-
|
|
1334
|
-
hooks.append(wraps)
|
|
1335
|
-
return cls
|
|
1336
|
-
|
|
1337
|
-
def before_insert(
|
|
1338
|
-
cls: Type[T_MetaInstance],
|
|
1339
|
-
fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]],
|
|
1340
|
-
) -> Type[T_MetaInstance]:
|
|
1341
|
-
"""
|
|
1342
|
-
Add a before insert hook.
|
|
1343
|
-
"""
|
|
1344
|
-
if fn not in cls._before_insert:
|
|
1345
|
-
cls._before_insert.append(fn)
|
|
1346
|
-
return cls
|
|
1347
|
-
|
|
1348
|
-
def before_insert_once(
|
|
1349
|
-
cls: Type[T_MetaInstance],
|
|
1350
|
-
fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]],
|
|
1351
|
-
) -> Type[T_MetaInstance]:
|
|
1352
|
-
"""
|
|
1353
|
-
Add a before insert hook that only fires once and then removes itself.
|
|
1354
|
-
"""
|
|
1355
|
-
return cls._hook_once(cls._before_insert, fn) # type: ignore
|
|
1356
|
-
|
|
1357
|
-
def after_insert(
|
|
1358
|
-
cls: Type[T_MetaInstance],
|
|
1359
|
-
fn: (
|
|
1360
|
-
typing.Callable[[T_MetaInstance, Reference], Optional[bool]]
|
|
1361
|
-
| typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1362
|
-
),
|
|
1363
|
-
) -> Type[T_MetaInstance]:
|
|
1364
|
-
"""
|
|
1365
|
-
Add an after insert hook.
|
|
1366
|
-
"""
|
|
1367
|
-
if fn not in cls._after_insert:
|
|
1368
|
-
cls._after_insert.append(fn)
|
|
1369
|
-
return cls
|
|
1370
|
-
|
|
1371
|
-
def after_insert_once(
|
|
1372
|
-
cls: Type[T_MetaInstance],
|
|
1373
|
-
fn: (
|
|
1374
|
-
typing.Callable[[T_MetaInstance, Reference], Optional[bool]]
|
|
1375
|
-
| typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1376
|
-
),
|
|
1377
|
-
) -> Type[T_MetaInstance]:
|
|
1378
|
-
"""
|
|
1379
|
-
Add an after insert hook that only fires once and then removes itself.
|
|
1380
|
-
"""
|
|
1381
|
-
return cls._hook_once(cls._after_insert, fn) # type: ignore
|
|
1382
|
-
|
|
1383
|
-
def before_update(
|
|
1384
|
-
cls: Type[T_MetaInstance],
|
|
1385
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1386
|
-
) -> Type[T_MetaInstance]:
|
|
1387
|
-
"""
|
|
1388
|
-
Add a before update hook.
|
|
1389
|
-
"""
|
|
1390
|
-
if fn not in cls._before_update:
|
|
1391
|
-
cls._before_update.append(fn)
|
|
1392
|
-
return cls
|
|
1393
|
-
|
|
1394
|
-
def before_update_once(
|
|
1395
|
-
cls,
|
|
1396
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1397
|
-
) -> Type[T_MetaInstance]:
|
|
1398
|
-
"""
|
|
1399
|
-
Add a before update hook that only fires once and then removes itself.
|
|
1400
|
-
"""
|
|
1401
|
-
return cls._hook_once(cls._before_update, fn) # type: ignore
|
|
1402
|
-
|
|
1403
|
-
def after_update(
|
|
1404
|
-
cls: Type[T_MetaInstance],
|
|
1405
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1406
|
-
) -> Type[T_MetaInstance]:
|
|
1407
|
-
"""
|
|
1408
|
-
Add an after update hook.
|
|
1409
|
-
"""
|
|
1410
|
-
if fn not in cls._after_update:
|
|
1411
|
-
cls._after_update.append(fn)
|
|
1412
|
-
return cls
|
|
1413
|
-
|
|
1414
|
-
def after_update_once(
|
|
1415
|
-
cls: Type[T_MetaInstance],
|
|
1416
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1417
|
-
) -> Type[T_MetaInstance]:
|
|
1418
|
-
"""
|
|
1419
|
-
Add an after update hook that only fires once and then removes itself.
|
|
1420
|
-
"""
|
|
1421
|
-
return cls._hook_once(cls._after_update, fn) # type: ignore
|
|
1422
|
-
|
|
1423
|
-
def before_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
|
|
1424
|
-
"""
|
|
1425
|
-
Add a before delete hook.
|
|
1426
|
-
"""
|
|
1427
|
-
if fn not in cls._before_delete:
|
|
1428
|
-
cls._before_delete.append(fn)
|
|
1429
|
-
return cls
|
|
1430
|
-
|
|
1431
|
-
def before_delete_once(
|
|
1432
|
-
cls: Type[T_MetaInstance],
|
|
1433
|
-
fn: typing.Callable[[Set], Optional[bool]],
|
|
1434
|
-
) -> Type[T_MetaInstance]:
|
|
1435
|
-
"""
|
|
1436
|
-
Add a before delete hook that only fires once and then removes itself.
|
|
1437
|
-
"""
|
|
1438
|
-
return cls._hook_once(cls._before_delete, fn)
|
|
1439
|
-
|
|
1440
|
-
def after_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
|
|
1441
|
-
"""
|
|
1442
|
-
Add an after delete hook.
|
|
1443
|
-
"""
|
|
1444
|
-
if fn not in cls._after_delete:
|
|
1445
|
-
cls._after_delete.append(fn)
|
|
1446
|
-
return cls
|
|
1447
|
-
|
|
1448
|
-
def after_delete_once(
|
|
1449
|
-
cls: Type[T_MetaInstance],
|
|
1450
|
-
fn: typing.Callable[[Set], Optional[bool]],
|
|
1451
|
-
) -> Type[T_MetaInstance]:
|
|
1452
|
-
"""
|
|
1453
|
-
Add an after delete hook that only fires once and then removes itself.
|
|
1454
|
-
"""
|
|
1455
|
-
return cls._hook_once(cls._after_delete, fn)
|
|
1456
|
-
|
|
1457
|
-
def reorder_fields(cls, *fields: str | Field | TypedField[Any], keep_others: bool = True) -> None:
|
|
1458
|
-
"""
|
|
1459
|
-
Reorder fields of a typedal table.
|
|
1460
|
-
|
|
1461
|
-
Args:
|
|
1462
|
-
fields: List of field names (str) or Field objects in desired order.
|
|
1463
|
-
keep_others (bool):
|
|
1464
|
-
- True (default): keep other fields at the end, in their original order.
|
|
1465
|
-
- False: remove other fields (only keep what's specified).
|
|
1466
|
-
"""
|
|
1467
|
-
return reorder_fields(cls._table, fields, keep_others=keep_others)
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
|
|
1471
|
-
"""
|
|
1472
|
-
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
1473
|
-
"""
|
|
1474
|
-
|
|
1475
|
-
# will be set by .bind on db.define
|
|
1476
|
-
name = ""
|
|
1477
|
-
_db: Optional[pydal.DAL] = None
|
|
1478
|
-
_rname: Optional[str] = None
|
|
1479
|
-
_table: Optional[Table] = None
|
|
1480
|
-
_field: Optional[Field] = None
|
|
1481
|
-
|
|
1482
|
-
_type: T_annotation
|
|
1483
|
-
kwargs: Any
|
|
1484
|
-
|
|
1485
|
-
requires: Validator | typing.Iterable[Validator]
|
|
1486
|
-
|
|
1487
|
-
# NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
|
|
1488
|
-
|
|
1489
|
-
def __init__(
|
|
1490
|
-
self,
|
|
1491
|
-
_type: Type[T_Value] | types.UnionType = str, # type: ignore
|
|
1492
|
-
/,
|
|
1493
|
-
**settings: Unpack[FieldSettings],
|
|
1494
|
-
) -> None:
|
|
1495
|
-
"""
|
|
1496
|
-
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
1497
|
-
|
|
1498
|
-
Provide the Python type for this field as the first positional argument
|
|
1499
|
-
and any other settings to Field() as keyword parameters.
|
|
1500
|
-
"""
|
|
1501
|
-
self._type = _type
|
|
1502
|
-
self.kwargs = settings
|
|
1503
|
-
# super().__init__()
|
|
1504
|
-
|
|
1505
|
-
@typing.overload
|
|
1506
|
-
def __get__(self, instance: T_MetaInstance, owner: Type[T_MetaInstance]) -> T_Value: # pragma: no cover
|
|
1507
|
-
"""
|
|
1508
|
-
row.field -> (actual data).
|
|
1509
|
-
"""
|
|
1510
|
-
|
|
1511
|
-
@typing.overload
|
|
1512
|
-
def __get__(self, instance: None, owner: "Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
|
|
1513
|
-
"""
|
|
1514
|
-
Table.field -> Field.
|
|
1515
|
-
"""
|
|
1516
|
-
|
|
1517
|
-
def __get__(
|
|
1518
|
-
self,
|
|
1519
|
-
instance: T_MetaInstance | None,
|
|
1520
|
-
owner: Type[T_MetaInstance],
|
|
1521
|
-
) -> typing.Union[T_Value, "TypedField[T_Value]"]:
|
|
1522
|
-
"""
|
|
1523
|
-
Since this class is a Descriptor field, \
|
|
1524
|
-
it returns something else depending on if it's called on a class or instance.
|
|
1525
|
-
|
|
1526
|
-
(this is mostly for mypy/typing)
|
|
1527
|
-
"""
|
|
1528
|
-
if instance:
|
|
1529
|
-
# this is only reached in a very specific case:
|
|
1530
|
-
# an instance of the object was created with a specific set of fields selected (excluding the current one)
|
|
1531
|
-
# in that case, no value was stored in the owner -> return None (since the field was not selected)
|
|
1532
|
-
return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
|
|
1533
|
-
else:
|
|
1534
|
-
# getting as class -> return actual field so pydal understands it when using in query etc.
|
|
1535
|
-
return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
|
|
1536
|
-
|
|
1537
|
-
def __str__(self) -> str:
|
|
1538
|
-
"""
|
|
1539
|
-
String representation of a Typed Field.
|
|
1540
|
-
|
|
1541
|
-
If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
|
|
1542
|
-
otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
|
|
1543
|
-
"""
|
|
1544
|
-
return str(self._field) if self._field else ""
|
|
1545
|
-
|
|
1546
|
-
def __repr__(self) -> str:
|
|
1547
|
-
"""
|
|
1548
|
-
More detailed string representation of a Typed Field.
|
|
1549
|
-
|
|
1550
|
-
Uses __str__ and adds the provided extra options (kwargs) in the representation.
|
|
1551
|
-
"""
|
|
1552
|
-
s = self.__str__()
|
|
1553
|
-
|
|
1554
|
-
if "type" in self.kwargs:
|
|
1555
|
-
# manual type in kwargs supplied
|
|
1556
|
-
t = self.kwargs["type"]
|
|
1557
|
-
elif issubclass(type, type(self._type)):
|
|
1558
|
-
# normal type, str.__name__ = 'str'
|
|
1559
|
-
t = getattr(self._type, "__name__", str(self._type))
|
|
1560
|
-
elif t_args := typing.get_args(self._type):
|
|
1561
|
-
# list[str] -> 'str'
|
|
1562
|
-
t = t_args[0].__name__
|
|
1563
|
-
else: # pragma: no cover
|
|
1564
|
-
# fallback - something else, may not even happen, I'm not sure
|
|
1565
|
-
t = self._type
|
|
1566
|
-
|
|
1567
|
-
s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
|
|
1568
|
-
|
|
1569
|
-
kw = self.kwargs.copy()
|
|
1570
|
-
kw.pop("type", None)
|
|
1571
|
-
return f"<{s} with options {kw}>"
|
|
1572
|
-
|
|
1573
|
-
def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
|
|
1574
|
-
"""
|
|
1575
|
-
Convert a Typed Field instance to a pydal.Field.
|
|
1576
|
-
|
|
1577
|
-
Actual logic in TypeDAL._to_field but this function creates the pydal type name and updates the kwarg settings.
|
|
1578
|
-
"""
|
|
1579
|
-
other_kwargs = self.kwargs.copy()
|
|
1580
|
-
extra_kwargs.update(other_kwargs) # <- modifies and overwrites the default kwargs with user-specified ones
|
|
1581
|
-
return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
|
|
1582
|
-
|
|
1583
|
-
def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
|
|
1584
|
-
"""
|
|
1585
|
-
Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
|
|
1586
|
-
"""
|
|
1587
|
-
self._table = table
|
|
1588
|
-
self._field = field
|
|
1589
|
-
|
|
1590
|
-
def __getattr__(self, key: str) -> Any:
|
|
1591
|
-
"""
|
|
1592
|
-
If the regular getattribute does not work, try to get info from the related Field.
|
|
1593
|
-
"""
|
|
1594
|
-
with contextlib.suppress(AttributeError):
|
|
1595
|
-
return super().__getattribute__(key)
|
|
1596
|
-
|
|
1597
|
-
# try on actual field:
|
|
1598
|
-
return getattr(self._field, key)
|
|
1599
|
-
|
|
1600
|
-
def __eq__(self, other: Any) -> Query:
|
|
1601
|
-
"""
|
|
1602
|
-
Performing == on a Field will result in a Query.
|
|
1603
|
-
"""
|
|
1604
|
-
return typing.cast(Query, self._field == other)
|
|
1605
|
-
|
|
1606
|
-
def __ne__(self, other: Any) -> Query:
|
|
1607
|
-
"""
|
|
1608
|
-
Performing != on a Field will result in a Query.
|
|
1609
|
-
"""
|
|
1610
|
-
return typing.cast(Query, self._field != other)
|
|
1611
|
-
|
|
1612
|
-
def __gt__(self, other: Any) -> Query:
|
|
1613
|
-
"""
|
|
1614
|
-
Performing > on a Field will result in a Query.
|
|
1615
|
-
"""
|
|
1616
|
-
return typing.cast(Query, self._field > other)
|
|
1617
|
-
|
|
1618
|
-
def __lt__(self, other: Any) -> Query:
|
|
1619
|
-
"""
|
|
1620
|
-
Performing < on a Field will result in a Query.
|
|
1621
|
-
"""
|
|
1622
|
-
return typing.cast(Query, self._field < other)
|
|
1623
|
-
|
|
1624
|
-
def __ge__(self, other: Any) -> Query:
|
|
1625
|
-
"""
|
|
1626
|
-
Performing >= on a Field will result in a Query.
|
|
1627
|
-
"""
|
|
1628
|
-
return typing.cast(Query, self._field >= other)
|
|
1629
|
-
|
|
1630
|
-
def __le__(self, other: Any) -> Query:
|
|
1631
|
-
"""
|
|
1632
|
-
Performing <= on a Field will result in a Query.
|
|
1633
|
-
"""
|
|
1634
|
-
return typing.cast(Query, self._field <= other)
|
|
1635
|
-
|
|
1636
|
-
def __hash__(self) -> int:
|
|
1637
|
-
"""
|
|
1638
|
-
Shadow Field.__hash__.
|
|
1639
|
-
"""
|
|
1640
|
-
return hash(self._field)
|
|
1641
|
-
|
|
1642
|
-
def __invert__(self) -> Expression:
|
|
1643
|
-
"""
|
|
1644
|
-
Performing ~ on a Field will result in an Expression.
|
|
1645
|
-
"""
|
|
1646
|
-
if not self._field: # pragma: no cover
|
|
1647
|
-
raise ValueError("Unbound Field can not be inverted!")
|
|
1648
|
-
|
|
1649
|
-
return typing.cast(Expression, ~self._field)
|
|
1650
|
-
|
|
1651
|
-
def lower(self) -> Expression:
|
|
1652
|
-
"""
|
|
1653
|
-
For string-fields: compare lowercased values.
|
|
1654
|
-
"""
|
|
1655
|
-
if not self._field: # pragma: no cover
|
|
1656
|
-
raise ValueError("Unbound Field can not be lowered!")
|
|
1657
|
-
|
|
1658
|
-
return typing.cast(Expression, self._field.lower())
|
|
1659
|
-
|
|
1660
|
-
# ... etc
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
class _TypedTable:
|
|
1664
|
-
"""
|
|
1665
|
-
This class is a final shared parent between TypedTable and Mixins.
|
|
1666
|
-
|
|
1667
|
-
This needs to exist because otherwise the __on_define__ of Mixins are not executed.
|
|
1668
|
-
Notably, this class exists at a level ABOVE the `metaclass=TableMeta`,
|
|
1669
|
-
because otherwise typing gets confused when Mixins are used and multiple types could satisfy
|
|
1670
|
-
generic 'T subclass of TypedTable'
|
|
1671
|
-
-> Setting 'TypedTable' as the parent for Mixin does not work at runtime (and works semi at type check time)
|
|
1672
|
-
"""
|
|
1673
|
-
|
|
1674
|
-
id: "TypedField[int]"
|
|
1675
|
-
|
|
1676
|
-
_before_insert: list[typing.Callable[[Self], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]]]
|
|
1677
|
-
_after_insert: list[
|
|
1678
|
-
typing.Callable[[Self, Reference], Optional[bool]] | typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1679
|
-
]
|
|
1680
|
-
_before_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
|
|
1681
|
-
_after_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
|
|
1682
|
-
_before_delete: list[typing.Callable[[Set], Optional[bool]]]
|
|
1683
|
-
_after_delete: list[typing.Callable[[Set], Optional[bool]]]
|
|
1684
|
-
|
|
1685
|
-
@classmethod
|
|
1686
|
-
def __on_define__(cls, db: TypeDAL) -> None:
|
|
1687
|
-
"""
|
|
1688
|
-
Method that can be implemented by tables to do an action after db.define is completed.
|
|
1689
|
-
|
|
1690
|
-
This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
|
|
1691
|
-
where you need a reference to the current database, which may not exist yet when defining the model.
|
|
1692
|
-
"""
|
|
1693
|
-
|
|
1694
|
-
@classproperty
|
|
1695
|
-
def _hooks(cls) -> dict[str, list[typing.Callable[..., Optional[bool]]]]:
|
|
1696
|
-
return {
|
|
1697
|
-
"before_insert": cls._before_insert,
|
|
1698
|
-
"after_insert": cls._after_insert,
|
|
1699
|
-
"before_update": cls._before_update,
|
|
1700
|
-
"after_update": cls._after_update,
|
|
1701
|
-
"before_delete": cls._before_delete,
|
|
1702
|
-
"after_delete": cls._after_delete,
|
|
1703
|
-
}
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
class TypedTable(_TypedTable, metaclass=TableMeta):
|
|
1707
|
-
"""
|
|
1708
|
-
Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
|
|
1709
|
-
"""
|
|
1710
|
-
|
|
1711
|
-
# set up by 'new':
|
|
1712
|
-
_row: Row | None = None
|
|
1713
|
-
_rows: tuple[Row, ...] = ()
|
|
1714
|
-
|
|
1715
|
-
_with: list[str]
|
|
1716
|
-
|
|
1717
|
-
def _setup_instance_methods(self) -> None:
|
|
1718
|
-
self.as_dict = self._as_dict # type: ignore
|
|
1719
|
-
self.__json__ = self.as_json = self._as_json # type: ignore
|
|
1720
|
-
# self.as_yaml = self._as_yaml # type: ignore
|
|
1721
|
-
self.as_xml = self._as_xml # type: ignore
|
|
1722
|
-
|
|
1723
|
-
self.update = self._update # type: ignore
|
|
1724
|
-
|
|
1725
|
-
self.delete_record = self._delete_record # type: ignore
|
|
1726
|
-
self.update_record = self._update_record # type: ignore
|
|
1727
|
-
|
|
1728
|
-
def __new__(
|
|
1729
|
-
cls,
|
|
1730
|
-
row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None,
|
|
1731
|
-
**filters: Any,
|
|
1732
|
-
) -> Self:
|
|
1733
|
-
"""
|
|
1734
|
-
Create a Typed Rows model instance from an existing row, ID or query.
|
|
1735
|
-
|
|
1736
|
-
Examples:
|
|
1737
|
-
MyTable(1)
|
|
1738
|
-
MyTable(id=1)
|
|
1739
|
-
MyTable(MyTable.id == 1)
|
|
1740
|
-
"""
|
|
1741
|
-
table = cls._ensure_table_defined()
|
|
1742
|
-
inst = super().__new__(cls)
|
|
1743
|
-
|
|
1744
|
-
if isinstance(row_or_id, TypedTable):
|
|
1745
|
-
# existing typed table instance!
|
|
1746
|
-
return typing.cast(Self, row_or_id)
|
|
1747
|
-
|
|
1748
|
-
elif isinstance(row_or_id, pydal.objects.Row):
|
|
1749
|
-
row = row_or_id
|
|
1750
|
-
elif row_or_id is not None:
|
|
1751
|
-
row = table(row_or_id, **filters)
|
|
1752
|
-
elif filters:
|
|
1753
|
-
row = table(**filters)
|
|
1754
|
-
else:
|
|
1755
|
-
# dummy object
|
|
1756
|
-
return inst
|
|
1757
|
-
|
|
1758
|
-
if not row:
|
|
1759
|
-
return None # type: ignore
|
|
1760
|
-
|
|
1761
|
-
inst._row = row
|
|
1762
|
-
|
|
1763
|
-
if hasattr(row, "id"):
|
|
1764
|
-
inst.__dict__.update(row)
|
|
1765
|
-
else:
|
|
1766
|
-
# deal with _extra (and possibly others?)
|
|
1767
|
-
# Row <{actual: {}, _extra: ...}>
|
|
1768
|
-
inst.__dict__.update(row[str(cls)])
|
|
1769
|
-
|
|
1770
|
-
inst._setup_instance_methods()
|
|
1771
|
-
return inst
|
|
1772
|
-
|
|
1773
|
-
def __iter__(self) -> typing.Generator[Any, None, None]:
|
|
1774
|
-
"""
|
|
1775
|
-
Allows looping through the columns.
|
|
1776
|
-
"""
|
|
1777
|
-
row = self._ensure_matching_row()
|
|
1778
|
-
yield from iter(row)
|
|
1779
|
-
|
|
1780
|
-
def __getitem__(self, item: str) -> Any:
|
|
1781
|
-
"""
|
|
1782
|
-
Allows dictionary notation to get columns.
|
|
1783
|
-
"""
|
|
1784
|
-
if item in self.__dict__:
|
|
1785
|
-
return self.__dict__.get(item)
|
|
1786
|
-
|
|
1787
|
-
# fallback to lookup in row
|
|
1788
|
-
if self._row:
|
|
1789
|
-
return self._row[item]
|
|
1790
|
-
|
|
1791
|
-
# nothing found!
|
|
1792
|
-
raise KeyError(item)
|
|
1793
|
-
|
|
1794
|
-
def __getattr__(self, item: str) -> Any:
|
|
1795
|
-
"""
|
|
1796
|
-
Allows dot notation to get columns.
|
|
1797
|
-
"""
|
|
1798
|
-
if value := self.get(item):
|
|
1799
|
-
return value
|
|
1800
|
-
|
|
1801
|
-
raise AttributeError(item)
|
|
1802
|
-
|
|
1803
|
-
def keys(self) -> list[str]:
|
|
1804
|
-
"""
|
|
1805
|
-
Return the combination of row + relationship keys.
|
|
1806
|
-
|
|
1807
|
-
Used by dict(row).
|
|
1808
|
-
"""
|
|
1809
|
-
return list(self._row.keys() if self._row else ()) + getattr(self, "_with", [])
|
|
1810
|
-
|
|
1811
|
-
def get(self, item: str, default: Any = None) -> Any:
|
|
1812
|
-
"""
|
|
1813
|
-
Try to get a column from this instance, else return default.
|
|
1814
|
-
"""
|
|
1815
|
-
try:
|
|
1816
|
-
return self.__getitem__(item)
|
|
1817
|
-
except KeyError:
|
|
1818
|
-
return default
|
|
1819
|
-
|
|
1820
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
|
1821
|
-
"""
|
|
1822
|
-
Data can both be updated via dot and dict notation.
|
|
1823
|
-
"""
|
|
1824
|
-
return setattr(self, key, value)
|
|
1825
|
-
|
|
1826
|
-
def __int__(self) -> int:
|
|
1827
|
-
"""
|
|
1828
|
-
Calling int on a model instance will return its id.
|
|
1829
|
-
"""
|
|
1830
|
-
return getattr(self, "id", 0)
|
|
1831
|
-
|
|
1832
|
-
def __bool__(self) -> bool:
|
|
1833
|
-
"""
|
|
1834
|
-
If the instance has an underlying row with data, it is truthy.
|
|
1835
|
-
"""
|
|
1836
|
-
return bool(getattr(self, "_row", False))
|
|
1837
|
-
|
|
1838
|
-
def _ensure_matching_row(self) -> Row:
|
|
1839
|
-
if not getattr(self, "_row", None):
|
|
1840
|
-
raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
|
|
1841
|
-
return self._row
|
|
1842
|
-
|
|
1843
|
-
def __repr__(self) -> str:
|
|
1844
|
-
"""
|
|
1845
|
-
String representation of the model instance.
|
|
1846
|
-
"""
|
|
1847
|
-
model_name = self.__class__.__name__
|
|
1848
|
-
model_data = {}
|
|
1849
|
-
|
|
1850
|
-
if self._row:
|
|
1851
|
-
model_data = self._row.as_json()
|
|
1852
|
-
|
|
1853
|
-
details = model_name
|
|
1854
|
-
details += f"({model_data})"
|
|
1855
|
-
|
|
1856
|
-
if relationships := getattr(self, "_with", []):
|
|
1857
|
-
details += f" + {relationships}"
|
|
1858
|
-
|
|
1859
|
-
return f"<{details}>"
|
|
1860
|
-
|
|
1861
|
-
# serialization
|
|
1862
|
-
# underscore variants work for class instances (set up by _setup_instance_methods)
|
|
1863
|
-
|
|
1864
|
-
@classmethod
|
|
1865
|
-
def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
|
|
1866
|
-
"""
|
|
1867
|
-
Dump the object to a plain dict.
|
|
1868
|
-
|
|
1869
|
-
Can be used as both a class or instance method:
|
|
1870
|
-
- dumps the table info if it's a class
|
|
1871
|
-
- dumps the row info if it's an instance (see _as_dict)
|
|
1872
|
-
"""
|
|
1873
|
-
table = cls._ensure_table_defined()
|
|
1874
|
-
result = table.as_dict(flat, sanitize)
|
|
1875
|
-
return typing.cast(AnyDict, result)
|
|
1876
|
-
|
|
1877
|
-
@classmethod
|
|
1878
|
-
def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
1879
|
-
"""
|
|
1880
|
-
Dump the object to json.
|
|
1881
|
-
|
|
1882
|
-
Can be used as both a class or instance method:
|
|
1883
|
-
- dumps the table info if it's a class
|
|
1884
|
-
- dumps the row info if it's an instance (see _as_json)
|
|
1885
|
-
"""
|
|
1886
|
-
data = cls.as_dict(sanitize=sanitize)
|
|
1887
|
-
return as_json.encode(data, indent=indent, **kwargs)
|
|
1888
|
-
|
|
1889
|
-
@classmethod
|
|
1890
|
-
def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
|
|
1891
|
-
"""
|
|
1892
|
-
Dump the object to xml.
|
|
1893
|
-
|
|
1894
|
-
Can be used as both a class or instance method:
|
|
1895
|
-
- dumps the table info if it's a class
|
|
1896
|
-
- dumps the row info if it's an instance (see _as_xml)
|
|
1897
|
-
"""
|
|
1898
|
-
table = cls._ensure_table_defined()
|
|
1899
|
-
return typing.cast(str, table.as_xml(sanitize))
|
|
1900
|
-
|
|
1901
|
-
@classmethod
|
|
1902
|
-
def as_yaml(cls, sanitize: bool = True) -> str:
|
|
1903
|
-
"""
|
|
1904
|
-
Dump the object to yaml.
|
|
1905
|
-
|
|
1906
|
-
Can be used as both a class or instance method:
|
|
1907
|
-
- dumps the table info if it's a class
|
|
1908
|
-
- dumps the row info if it's an instance (see _as_yaml)
|
|
1909
|
-
"""
|
|
1910
|
-
table = cls._ensure_table_defined()
|
|
1911
|
-
return typing.cast(str, table.as_yaml(sanitize))
|
|
1912
|
-
|
|
1913
|
-
def _as_dict(
|
|
1914
|
-
self,
|
|
1915
|
-
datetime_to_str: bool = False,
|
|
1916
|
-
custom_types: typing.Iterable[type] | type | None = None,
|
|
1917
|
-
) -> AnyDict:
|
|
1918
|
-
row = self._ensure_matching_row()
|
|
1919
|
-
|
|
1920
|
-
result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
|
|
1921
|
-
|
|
1922
|
-
def asdict_method(obj: Any) -> Any: # pragma: no cover
|
|
1923
|
-
if hasattr(obj, "_as_dict"): # typedal
|
|
1924
|
-
return obj._as_dict()
|
|
1925
|
-
elif hasattr(obj, "as_dict"): # pydal
|
|
1926
|
-
return obj.as_dict()
|
|
1927
|
-
else: # something else??
|
|
1928
|
-
return obj.__dict__
|
|
1929
|
-
|
|
1930
|
-
if _with := getattr(self, "_with", None):
|
|
1931
|
-
for relationship in _with:
|
|
1932
|
-
data = self.get(relationship)
|
|
1933
|
-
|
|
1934
|
-
if isinstance(data, list):
|
|
1935
|
-
data = [asdict_method(_) for _ in data]
|
|
1936
|
-
elif data:
|
|
1937
|
-
data = asdict_method(data)
|
|
1938
|
-
|
|
1939
|
-
result[relationship] = data
|
|
1940
|
-
|
|
1941
|
-
return typing.cast(AnyDict, result)
|
|
1942
|
-
|
|
1943
|
-
def _as_json(
|
|
1944
|
-
self,
|
|
1945
|
-
default: typing.Callable[[Any], Any] = None,
|
|
1946
|
-
indent: Optional[int] = None,
|
|
1947
|
-
**kwargs: Any,
|
|
1948
|
-
) -> str:
|
|
1949
|
-
data = self._as_dict()
|
|
1950
|
-
return as_json.encode(data, default=default, indent=indent, **kwargs)
|
|
1951
|
-
|
|
1952
|
-
def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
|
|
1953
|
-
row = self._ensure_matching_row()
|
|
1954
|
-
return typing.cast(str, row.as_xml(sanitize))
|
|
1955
|
-
|
|
1956
|
-
# def _as_yaml(self, sanitize: bool = True) -> str:
|
|
1957
|
-
# row = self._ensure_matching_row()
|
|
1958
|
-
# return typing.cast(str, row.as_yaml(sanitize))
|
|
1959
|
-
|
|
1960
|
-
def __setattr__(self, key: str, value: Any) -> None:
|
|
1961
|
-
"""
|
|
1962
|
-
When setting a property on a Typed Table model instance, also update the underlying row.
|
|
1963
|
-
"""
|
|
1964
|
-
if self._row and key in self._row.__dict__ and not callable(value):
|
|
1965
|
-
# enables `row.key = value; row.update_record()`
|
|
1966
|
-
self._row[key] = value
|
|
1967
|
-
|
|
1968
|
-
super().__setattr__(key, value)
|
|
1969
|
-
|
|
1970
|
-
@classmethod
|
|
1971
|
-
def update(cls: Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
|
|
1972
|
-
"""
|
|
1973
|
-
Update one record.
|
|
1974
|
-
|
|
1975
|
-
Example:
|
|
1976
|
-
MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
|
|
1977
|
-
"""
|
|
1978
|
-
# todo: update multiple?
|
|
1979
|
-
if record := cls(query):
|
|
1980
|
-
return record.update_record(**fields)
|
|
1981
|
-
else:
|
|
1982
|
-
return None
|
|
1983
|
-
|
|
1984
|
-
def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
|
|
1985
|
-
row = self._ensure_matching_row()
|
|
1986
|
-
row.update(**fields)
|
|
1987
|
-
self.__dict__.update(**fields)
|
|
1988
|
-
return self
|
|
1989
|
-
|
|
1990
|
-
def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
|
|
1991
|
-
row = self._ensure_matching_row()
|
|
1992
|
-
new_row = row.update_record(**fields)
|
|
1993
|
-
self.update(**new_row)
|
|
1994
|
-
return self
|
|
1995
|
-
|
|
1996
|
-
def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
|
|
1997
|
-
"""
|
|
1998
|
-
Here as a placeholder for _update_record.
|
|
1999
|
-
|
|
2000
|
-
Will be replaced on instance creation!
|
|
2001
|
-
"""
|
|
2002
|
-
return self._update_record(**fields)
|
|
2003
|
-
|
|
2004
|
-
def _delete_record(self) -> int:
|
|
2005
|
-
"""
|
|
2006
|
-
Actual logic in `pydal.helpers.classes.RecordDeleter`.
|
|
2007
|
-
"""
|
|
2008
|
-
row = self._ensure_matching_row()
|
|
2009
|
-
result = row.delete_record()
|
|
2010
|
-
self.__dict__ = {} # empty self, since row is no more.
|
|
2011
|
-
self._row = None # just to be sure
|
|
2012
|
-
self._setup_instance_methods()
|
|
2013
|
-
# ^ instance methods might've been deleted by emptying dict,
|
|
2014
|
-
# but we still want .as_dict to show an error, not the table's as_dict.
|
|
2015
|
-
return typing.cast(int, result)
|
|
2016
|
-
|
|
2017
|
-
def delete_record(self) -> int: # pragma: no cover
|
|
2018
|
-
"""
|
|
2019
|
-
Here as a placeholder for _delete_record.
|
|
2020
|
-
|
|
2021
|
-
Will be replaced on instance creation!
|
|
2022
|
-
"""
|
|
2023
|
-
return self._delete_record()
|
|
2024
|
-
|
|
2025
|
-
# __del__ is also called on the end of a scope so don't remove records on every del!!
|
|
2026
|
-
|
|
2027
|
-
# pickling:
|
|
2028
|
-
|
|
2029
|
-
def __getstate__(self) -> AnyDict:
|
|
2030
|
-
"""
|
|
2031
|
-
State to save when pickling.
|
|
2032
|
-
|
|
2033
|
-
Prevents db connection from being pickled.
|
|
2034
|
-
Similar to as_dict but without changing the data of the relationships (dill does that recursively)
|
|
2035
|
-
"""
|
|
2036
|
-
row = self._ensure_matching_row()
|
|
2037
|
-
result: AnyDict = row.as_dict()
|
|
2038
|
-
|
|
2039
|
-
if _with := getattr(self, "_with", None):
|
|
2040
|
-
result["_with"] = _with
|
|
2041
|
-
for relationship in _with:
|
|
2042
|
-
data = self.get(relationship)
|
|
2043
|
-
|
|
2044
|
-
result[relationship] = data
|
|
2045
|
-
|
|
2046
|
-
result["_row"] = self._row.as_json() if self._row else ""
|
|
2047
|
-
return result
|
|
2048
|
-
|
|
2049
|
-
def __setstate__(self, state: AnyDict) -> None:
|
|
2050
|
-
"""
|
|
2051
|
-
Used by dill when loading from a bytestring.
|
|
2052
|
-
"""
|
|
2053
|
-
# as_dict also includes table info, so dump as json to only get the actual row data
|
|
2054
|
-
# then create a new (more empty) row object:
|
|
2055
|
-
state["_row"] = Row(json.loads(state["_row"]))
|
|
2056
|
-
self.__dict__ |= state
|
|
2057
|
-
|
|
2058
|
-
@classmethod
|
|
2059
|
-
def _sql(cls) -> str:
|
|
2060
|
-
"""
|
|
2061
|
-
Generate SQL Schema for this table via pydal2sql (if 'migrations' extra is installed).
|
|
2062
|
-
"""
|
|
2063
|
-
try:
|
|
2064
|
-
import pydal2sql
|
|
2065
|
-
except ImportError as e: # pragma: no cover
|
|
2066
|
-
raise RuntimeError("Can not generate SQL without the 'migration' extra or `pydal2sql` installed!") from e
|
|
2067
|
-
|
|
2068
|
-
return pydal2sql.generate_sql(cls)
|
|
2069
|
-
|
|
2070
|
-
def render(self, fields: list[Field] = None, compact: bool = False) -> Self:
|
|
2071
|
-
"""
|
|
2072
|
-
Renders a copy of the object with potentially modified values.
|
|
2073
|
-
|
|
2074
|
-
Args:
|
|
2075
|
-
fields: A list of fields to render. Defaults to all representable fields in the table.
|
|
2076
|
-
compact: Whether to return only the value of the first field if there is only one field.
|
|
2077
|
-
|
|
2078
|
-
Returns:
|
|
2079
|
-
A copy of the object with potentially modified values.
|
|
2080
|
-
"""
|
|
2081
|
-
row = copy.deepcopy(self)
|
|
2082
|
-
keys = list(row)
|
|
2083
|
-
if not fields:
|
|
2084
|
-
fields = [self._table[f] for f in self._table._fields]
|
|
2085
|
-
fields = [f for f in fields if isinstance(f, Field) and f.represent]
|
|
2086
|
-
|
|
2087
|
-
for field in fields:
|
|
2088
|
-
if field._table == self._table:
|
|
2089
|
-
row[field.name] = self._db.represent(
|
|
2090
|
-
"rows_render",
|
|
2091
|
-
field,
|
|
2092
|
-
row[field.name],
|
|
2093
|
-
row,
|
|
2094
|
-
)
|
|
2095
|
-
# else: relationship, different logic:
|
|
2096
|
-
|
|
2097
|
-
for relation_name in getattr(row, "_with", []):
|
|
2098
|
-
if relation := self._relationships.get(relation_name):
|
|
2099
|
-
relation_table = relation.table
|
|
2100
|
-
if isinstance(relation_table, str):
|
|
2101
|
-
relation_table = self._db[relation_table]
|
|
2102
|
-
|
|
2103
|
-
relation_row = row[relation_name]
|
|
2104
|
-
|
|
2105
|
-
if isinstance(relation_row, list):
|
|
2106
|
-
# list of rows
|
|
2107
|
-
combined = []
|
|
2108
|
-
|
|
2109
|
-
for related_og in relation_row:
|
|
2110
|
-
related = copy.deepcopy(related_og)
|
|
2111
|
-
for fieldname in related:
|
|
2112
|
-
field = relation_table[fieldname]
|
|
2113
|
-
related[field.name] = self._db.represent(
|
|
2114
|
-
"rows_render",
|
|
2115
|
-
field,
|
|
2116
|
-
related[field.name],
|
|
2117
|
-
related,
|
|
2118
|
-
)
|
|
2119
|
-
combined.append(related)
|
|
2120
|
-
|
|
2121
|
-
row[relation_name] = combined
|
|
2122
|
-
else:
|
|
2123
|
-
# 1 row
|
|
2124
|
-
for fieldname in relation_row:
|
|
2125
|
-
field = relation_table[fieldname]
|
|
2126
|
-
row[relation_name][fieldname] = self._db.represent(
|
|
2127
|
-
"rows_render",
|
|
2128
|
-
field,
|
|
2129
|
-
relation_row[field.name],
|
|
2130
|
-
relation_row,
|
|
2131
|
-
)
|
|
2132
|
-
|
|
2133
|
-
if compact and len(keys) == 1 and keys[0] != "_extra": # pragma: no cover
|
|
2134
|
-
return typing.cast(Self, row[keys[0]])
|
|
2135
|
-
return row
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
# backwards compat:
|
|
2139
|
-
TypedRow = TypedTable
|
|
2140
|
-
|
|
2141
|
-
|
|
2142
|
-
class TypedRows(typing.Collection[T_MetaInstance], Rows):
|
|
2143
|
-
"""
|
|
2144
|
-
Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
|
|
2145
|
-
"""
|
|
2146
|
-
|
|
2147
|
-
records: dict[int, T_MetaInstance]
|
|
2148
|
-
# _rows: Rows
|
|
2149
|
-
model: Type[T_MetaInstance]
|
|
2150
|
-
metadata: Metadata
|
|
2151
|
-
|
|
2152
|
-
# pseudo-properties: actually stored in _rows
|
|
2153
|
-
db: TypeDAL
|
|
2154
|
-
colnames: list[str]
|
|
2155
|
-
fields: list[Field]
|
|
2156
|
-
colnames_fields: list[Field]
|
|
2157
|
-
response: list[tuple[Any, ...]]
|
|
2158
|
-
|
|
2159
|
-
def __init__(
|
|
2160
|
-
self,
|
|
2161
|
-
rows: Rows,
|
|
2162
|
-
model: Type[T_MetaInstance],
|
|
2163
|
-
records: dict[int, T_MetaInstance] = None,
|
|
2164
|
-
metadata: Metadata = None,
|
|
2165
|
-
raw: dict[int, list[Row]] = None,
|
|
2166
|
-
) -> None:
|
|
2167
|
-
"""
|
|
2168
|
-
Should not be called manually!
|
|
2169
|
-
|
|
2170
|
-
Normally, the `records` from an existing `Rows` object are used
|
|
2171
|
-
but these can be overwritten with a `records` dict.
|
|
2172
|
-
`metadata` can be any (un)structured data
|
|
2173
|
-
`model` is a Typed Table class
|
|
2174
|
-
"""
|
|
2175
|
-
|
|
2176
|
-
def _get_id(row: Row) -> int:
|
|
2177
|
-
"""
|
|
2178
|
-
Try to find the id field in a row.
|
|
2179
|
-
|
|
2180
|
-
If _extra exists, the row changes:
|
|
2181
|
-
<Row {'test_relationship': {'id': 1}, '_extra': {'COUNT("test_relationship"."querytable")': 8}}>
|
|
2182
|
-
"""
|
|
2183
|
-
if idx := getattr(row, "id", None):
|
|
2184
|
-
return typing.cast(int, idx)
|
|
2185
|
-
elif main := getattr(row, str(model), None):
|
|
2186
|
-
return typing.cast(int, main.id)
|
|
2187
|
-
else: # pragma: no cover
|
|
2188
|
-
raise NotImplementedError(f"`id` could not be found for {row}")
|
|
2189
|
-
|
|
2190
|
-
records = records or {_get_id(row): model(row) for row in rows}
|
|
2191
|
-
raw = raw or {}
|
|
2192
|
-
|
|
2193
|
-
for idx, entity in records.items():
|
|
2194
|
-
entity._rows = tuple(raw.get(idx, []))
|
|
2195
|
-
|
|
2196
|
-
super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
|
|
2197
|
-
self.model = model
|
|
2198
|
-
self.metadata = metadata or {}
|
|
2199
|
-
self.colnames = rows.colnames
|
|
2200
|
-
|
|
2201
|
-
def __len__(self) -> int:
|
|
2202
|
-
"""
|
|
2203
|
-
Return the count of rows.
|
|
2204
|
-
"""
|
|
2205
|
-
return len(self.records)
|
|
2206
|
-
|
|
2207
|
-
def __iter__(self) -> typing.Iterator[T_MetaInstance]:
|
|
2208
|
-
"""
|
|
2209
|
-
Loop through the rows.
|
|
2210
|
-
"""
|
|
2211
|
-
yield from self.records.values()
|
|
2212
|
-
|
|
2213
|
-
def __contains__(self, ind: Any) -> bool:
|
|
2214
|
-
"""
|
|
2215
|
-
Check if an id exists in this result set.
|
|
2216
|
-
"""
|
|
2217
|
-
return ind in self.records
|
|
2218
|
-
|
|
2219
|
-
def first(self) -> T_MetaInstance | None:
|
|
2220
|
-
"""
|
|
2221
|
-
Get the row with the lowest id.
|
|
2222
|
-
"""
|
|
2223
|
-
if not self.records:
|
|
2224
|
-
return None
|
|
2225
|
-
|
|
2226
|
-
return next(iter(self))
|
|
2227
|
-
|
|
2228
|
-
def last(self) -> T_MetaInstance | None:
|
|
2229
|
-
"""
|
|
2230
|
-
Get the row with the highest id.
|
|
2231
|
-
"""
|
|
2232
|
-
if not self.records:
|
|
2233
|
-
return None
|
|
2234
|
-
|
|
2235
|
-
max_id = max(self.records.keys())
|
|
2236
|
-
return self[max_id]
|
|
2237
|
-
|
|
2238
|
-
def find(
|
|
2239
|
-
self,
|
|
2240
|
-
f: typing.Callable[[T_MetaInstance], Query],
|
|
2241
|
-
limitby: tuple[int, int] = None,
|
|
2242
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2243
|
-
"""
|
|
2244
|
-
Returns a new Rows object, a subset of the original object, filtered by the function `f`.
|
|
2245
|
-
"""
|
|
2246
|
-
if not self.records:
|
|
2247
|
-
return self.__class__(self, self.model, {})
|
|
2248
|
-
|
|
2249
|
-
records = {}
|
|
2250
|
-
if limitby:
|
|
2251
|
-
_min, _max = limitby
|
|
2252
|
-
else:
|
|
2253
|
-
_min, _max = 0, len(self)
|
|
2254
|
-
count = 0
|
|
2255
|
-
for i, row in self.records.items():
|
|
2256
|
-
if f(row):
|
|
2257
|
-
if _min <= count:
|
|
2258
|
-
records[i] = row
|
|
2259
|
-
count += 1
|
|
2260
|
-
if count == _max:
|
|
2261
|
-
break
|
|
2262
|
-
|
|
2263
|
-
return self.__class__(self, self.model, records)
|
|
2264
|
-
|
|
2265
|
-
def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
|
|
2266
|
-
"""
|
|
2267
|
-
Removes elements from the calling Rows object, filtered by the function `f`, \
|
|
2268
|
-
and returns a new Rows object containing the removed elements.
|
|
2269
|
-
"""
|
|
2270
|
-
if not self.records:
|
|
2271
|
-
return self.__class__(self, self.model, {})
|
|
2272
|
-
removed = {}
|
|
2273
|
-
to_remove = []
|
|
2274
|
-
for i in self.records:
|
|
2275
|
-
row = self[i]
|
|
2276
|
-
if f(row):
|
|
2277
|
-
removed[i] = self.records[i]
|
|
2278
|
-
to_remove.append(i)
|
|
2279
|
-
|
|
2280
|
-
[self.records.pop(i) for i in to_remove]
|
|
2281
|
-
|
|
2282
|
-
return self.__class__(
|
|
2283
|
-
self,
|
|
2284
|
-
self.model,
|
|
2285
|
-
removed,
|
|
2286
|
-
)
|
|
2287
|
-
|
|
2288
|
-
def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
|
|
2289
|
-
"""
|
|
2290
|
-
Returns a list of sorted elements (not sorted in place).
|
|
2291
|
-
"""
|
|
2292
|
-
return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
|
|
2293
|
-
|
|
2294
|
-
def __str__(self) -> str:
|
|
2295
|
-
"""
|
|
2296
|
-
Simple string representation.
|
|
2297
|
-
"""
|
|
2298
|
-
return f"<TypedRows with {len(self)} records>"
|
|
2299
|
-
|
|
2300
|
-
def __repr__(self) -> str:
|
|
2301
|
-
"""
|
|
2302
|
-
Print a table on repr().
|
|
2303
|
-
"""
|
|
2304
|
-
data = self.as_dict()
|
|
2305
|
-
try:
|
|
2306
|
-
headers = list(next(iter(data.values())).keys())
|
|
2307
|
-
except StopIteration:
|
|
2308
|
-
headers = []
|
|
2309
|
-
|
|
2310
|
-
return mktable(data, headers)
|
|
2311
|
-
|
|
2312
|
-
def group_by_value(
|
|
2313
|
-
self,
|
|
2314
|
-
*fields: "str | Field | TypedField[T]",
|
|
2315
|
-
one_result: bool = False,
|
|
2316
|
-
**kwargs: Any,
|
|
2317
|
-
) -> dict[T, list[T_MetaInstance]]:
|
|
2318
|
-
"""
|
|
2319
|
-
Group the rows by a specific field (which will be the dict key).
|
|
2320
|
-
"""
|
|
2321
|
-
kwargs["one_result"] = one_result
|
|
2322
|
-
result = super().group_by_value(*fields, **kwargs)
|
|
2323
|
-
return typing.cast(dict[T, list[T_MetaInstance]], result)
|
|
2324
|
-
|
|
2325
|
-
def as_csv(self) -> str:
|
|
2326
|
-
"""
|
|
2327
|
-
Dump the data to csv.
|
|
2328
|
-
"""
|
|
2329
|
-
return typing.cast(str, super().as_csv())
|
|
2330
|
-
|
|
2331
|
-
def as_dict(
|
|
2332
|
-
self,
|
|
2333
|
-
key: str | Field | None = None,
|
|
2334
|
-
compact: bool = False,
|
|
2335
|
-
storage_to_dict: bool = False,
|
|
2336
|
-
datetime_to_str: bool = False,
|
|
2337
|
-
custom_types: list[type] | None = None,
|
|
2338
|
-
) -> dict[int, AnyDict]:
|
|
2339
|
-
"""
|
|
2340
|
-
Get the data in a dict of dicts.
|
|
2341
|
-
"""
|
|
2342
|
-
if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
|
|
2343
|
-
# functionality not guaranteed
|
|
2344
|
-
if isinstance(key, Field):
|
|
2345
|
-
key = key.name
|
|
2346
|
-
|
|
2347
|
-
return typing.cast(
|
|
2348
|
-
dict[int, AnyDict],
|
|
2349
|
-
super().as_dict(
|
|
2350
|
-
key or "id",
|
|
2351
|
-
compact,
|
|
2352
|
-
storage_to_dict,
|
|
2353
|
-
datetime_to_str,
|
|
2354
|
-
custom_types,
|
|
2355
|
-
),
|
|
2356
|
-
)
|
|
2357
|
-
|
|
2358
|
-
return {k: v.as_dict() for k, v in self.records.items()}
|
|
2359
|
-
|
|
2360
|
-
def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
2361
|
-
"""
|
|
2362
|
-
Turn the data into a dict and then dump to JSON.
|
|
2363
|
-
"""
|
|
2364
|
-
data = self.as_list()
|
|
2365
|
-
|
|
2366
|
-
return as_json.encode(data, default=default, indent=indent, **kwargs)
|
|
2367
|
-
|
|
2368
|
-
def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
2369
|
-
"""
|
|
2370
|
-
Turn the data into a dict and then dump to JSON.
|
|
2371
|
-
"""
|
|
2372
|
-
return self.as_json(default=default, indent=indent, **kwargs)
|
|
2373
|
-
|
|
2374
|
-
def as_list(
|
|
2375
|
-
self,
|
|
2376
|
-
compact: bool = False,
|
|
2377
|
-
storage_to_dict: bool = False,
|
|
2378
|
-
datetime_to_str: bool = False,
|
|
2379
|
-
custom_types: list[type] = None,
|
|
2380
|
-
) -> list[AnyDict]:
|
|
2381
|
-
"""
|
|
2382
|
-
Get the data in a list of dicts.
|
|
2383
|
-
"""
|
|
2384
|
-
if any([compact, storage_to_dict, datetime_to_str, custom_types]):
|
|
2385
|
-
return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
|
|
2386
|
-
|
|
2387
|
-
return [_.as_dict() for _ in self.records.values()]
|
|
2388
|
-
|
|
2389
|
-
def __getitem__(self, item: int) -> T_MetaInstance:
|
|
2390
|
-
"""
|
|
2391
|
-
You can get a specific row by ID from a typedrows by using rows[idx] notation.
|
|
2392
|
-
|
|
2393
|
-
Since pydal's implementation differs (they expect a list instead of a dict with id keys),
|
|
2394
|
-
using rows[0] will return the first row, regardless of its id.
|
|
2395
|
-
"""
|
|
2396
|
-
try:
|
|
2397
|
-
return self.records[item]
|
|
2398
|
-
except KeyError as e:
|
|
2399
|
-
if item == 0 and (row := self.first()):
|
|
2400
|
-
# special case: pydal internals think Rows.records is a list, not a dict
|
|
2401
|
-
return row
|
|
2402
|
-
|
|
2403
|
-
raise e
|
|
2404
|
-
|
|
2405
|
-
def get(self, item: int) -> typing.Optional[T_MetaInstance]:
|
|
2406
|
-
"""
|
|
2407
|
-
Get a row by ID, or receive None if it isn't in this result set.
|
|
2408
|
-
"""
|
|
2409
|
-
return self.records.get(item)
|
|
2410
|
-
|
|
2411
|
-
def update(self, **new_values: Any) -> bool:
|
|
2412
|
-
"""
|
|
2413
|
-
Update the current rows in the database with new_values.
|
|
2414
|
-
"""
|
|
2415
|
-
# cast to make mypy understand .id is a TypedField and not an int!
|
|
2416
|
-
table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
|
|
2417
|
-
|
|
2418
|
-
ids = set(self.column("id"))
|
|
2419
|
-
query = table.id.belongs(ids)
|
|
2420
|
-
return bool(self.db(query).update(**new_values))
|
|
2421
|
-
|
|
2422
|
-
def delete(self) -> bool:
|
|
2423
|
-
"""
|
|
2424
|
-
Delete the currently selected rows from the database.
|
|
2425
|
-
"""
|
|
2426
|
-
# cast to make mypy understand .id is a TypedField and not an int!
|
|
2427
|
-
table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
|
|
2428
|
-
|
|
2429
|
-
ids = set(self.column("id"))
|
|
2430
|
-
query = table.id.belongs(ids)
|
|
2431
|
-
return bool(self.db(query).delete())
|
|
2432
|
-
|
|
2433
|
-
def join(
|
|
2434
|
-
self,
|
|
2435
|
-
field: "Field | TypedField[Any]",
|
|
2436
|
-
name: str = None,
|
|
2437
|
-
constraint: Query = None,
|
|
2438
|
-
fields: list[str | Field] = None,
|
|
2439
|
-
orderby: Optional[str | Field] = None,
|
|
2440
|
-
) -> T_MetaInstance:
|
|
2441
|
-
"""
|
|
2442
|
-
This can be used to JOIN with some relationships after the initial select.
|
|
2443
|
-
|
|
2444
|
-
Using the querybuilder's .join() method is prefered!
|
|
2445
|
-
"""
|
|
2446
|
-
result = super().join(field, name, constraint, fields or [], orderby)
|
|
2447
|
-
return typing.cast(T_MetaInstance, result)
|
|
2448
|
-
|
|
2449
|
-
def export_to_csv_file(
|
|
2450
|
-
self,
|
|
2451
|
-
ofile: typing.TextIO,
|
|
2452
|
-
null: Any = "<NULL>",
|
|
2453
|
-
delimiter: str = ",",
|
|
2454
|
-
quotechar: str = '"',
|
|
2455
|
-
quoting: int = csv.QUOTE_MINIMAL,
|
|
2456
|
-
represent: bool = False,
|
|
2457
|
-
colnames: list[str] = None,
|
|
2458
|
-
write_colnames: bool = True,
|
|
2459
|
-
*args: Any,
|
|
2460
|
-
**kwargs: Any,
|
|
2461
|
-
) -> None:
|
|
2462
|
-
"""
|
|
2463
|
-
Shadow export_to_csv_file from Rows, but with typing.
|
|
2464
|
-
|
|
2465
|
-
See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
|
|
2466
|
-
"""
|
|
2467
|
-
super().export_to_csv_file(
|
|
2468
|
-
ofile,
|
|
2469
|
-
null,
|
|
2470
|
-
*args,
|
|
2471
|
-
delimiter=delimiter,
|
|
2472
|
-
quotechar=quotechar,
|
|
2473
|
-
quoting=quoting,
|
|
2474
|
-
represent=represent,
|
|
2475
|
-
colnames=colnames or self.colnames,
|
|
2476
|
-
write_colnames=write_colnames,
|
|
2477
|
-
**kwargs,
|
|
2478
|
-
)
|
|
2479
|
-
|
|
2480
|
-
@classmethod
|
|
2481
|
-
def from_rows(
|
|
2482
|
-
cls,
|
|
2483
|
-
rows: Rows,
|
|
2484
|
-
model: Type[T_MetaInstance],
|
|
2485
|
-
metadata: Metadata = None,
|
|
2486
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2487
|
-
"""
|
|
2488
|
-
Internal method to convert a Rows object to a TypedRows.
|
|
2489
|
-
"""
|
|
2490
|
-
return cls(rows, model, metadata=metadata)
|
|
2491
|
-
|
|
2492
|
-
def __getstate__(self) -> AnyDict:
|
|
2493
|
-
"""
|
|
2494
|
-
Used by dill to dump to bytes (exclude db connection etc).
|
|
2495
|
-
"""
|
|
2496
|
-
return {
|
|
2497
|
-
"metadata": json.dumps(self.metadata, default=str),
|
|
2498
|
-
"records": self.records,
|
|
2499
|
-
"model": str(self.model._table),
|
|
2500
|
-
"colnames": self.colnames,
|
|
2501
|
-
}
|
|
2502
|
-
|
|
2503
|
-
def __setstate__(self, state: AnyDict) -> None:
|
|
2504
|
-
"""
|
|
2505
|
-
Used by dill when loading from a bytestring.
|
|
2506
|
-
"""
|
|
2507
|
-
state["metadata"] = json.loads(state["metadata"])
|
|
2508
|
-
self.__dict__.update(state)
|
|
2509
|
-
# db etc. set after undill by caching.py
|
|
2510
|
-
|
|
2511
|
-
def render(
|
|
2512
|
-
self, i: int | None = None, fields: list[Field] | None = None
|
|
2513
|
-
) -> typing.Generator[T_MetaInstance, None, None]:
|
|
2514
|
-
"""
|
|
2515
|
-
Takes an index and returns a copy of the indexed row with values \
|
|
2516
|
-
transformed via the "represent" attributes of the associated fields.
|
|
2517
|
-
|
|
2518
|
-
Args:
|
|
2519
|
-
i: index. If not specified, a generator is returned for iteration
|
|
2520
|
-
over all the rows.
|
|
2521
|
-
fields: a list of fields to transform (if None, all fields with
|
|
2522
|
-
"represent" attributes will be transformed)
|
|
2523
|
-
"""
|
|
2524
|
-
if i is None:
|
|
2525
|
-
# difference: uses .keys() instead of index
|
|
2526
|
-
return (self.render(i, fields=fields) for i in self.records)
|
|
2527
|
-
|
|
2528
|
-
if not self.db.has_representer("rows_render"): # pragma: no cover
|
|
2529
|
-
raise RuntimeError(
|
|
2530
|
-
"Rows.render() needs a `rows_render` representer in DAL instance",
|
|
2531
|
-
)
|
|
2532
|
-
|
|
2533
|
-
row = self.records[i]
|
|
2534
|
-
return row.render(fields, compact=self.compact)
|
|
2535
|
-
|
|
2536
|
-
|
|
2537
|
-
from .caching import ( # noqa: E402
|
|
2538
|
-
_remove_cache,
|
|
457
|
+
from .caching import ( # isort: skip # noqa: E402
|
|
2539
458
|
_TypedalCache,
|
|
2540
459
|
_TypedalCacheDependency,
|
|
2541
|
-
create_and_hash_cache_key,
|
|
2542
|
-
get_expire,
|
|
2543
|
-
load_from_cache,
|
|
2544
|
-
save_to_cache,
|
|
2545
460
|
)
|
|
2546
|
-
|
|
2547
|
-
|
|
2548
|
-
def normalize_table_keys(row: Row, pattern: re.Pattern[str] = re.compile(r"^([a-zA-Z_]+)_(\d{5,})$")) -> Row:
|
|
2549
|
-
"""
|
|
2550
|
-
Normalize table keys in a PyDAL Row object by stripping numeric hash suffixes from table names, \
|
|
2551
|
-
only if the suffix is 5 or more digits.
|
|
2552
|
-
|
|
2553
|
-
For example:
|
|
2554
|
-
Row({'articles_12345': {...}}) -> Row({'articles': {...}})
|
|
2555
|
-
Row({'articles_123': {...}}) -> unchanged
|
|
2556
|
-
|
|
2557
|
-
Returns:
|
|
2558
|
-
Row: A new Row object with normalized keys.
|
|
2559
|
-
"""
|
|
2560
|
-
new_data: dict[str, Any] = {}
|
|
2561
|
-
for key, value in row.items():
|
|
2562
|
-
if match := pattern.match(key):
|
|
2563
|
-
base, _suffix = match.groups()
|
|
2564
|
-
normalized_key = base
|
|
2565
|
-
new_data[normalized_key] = value
|
|
2566
|
-
else:
|
|
2567
|
-
new_data[key] = value
|
|
2568
|
-
return Row(new_data)
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
class QueryBuilder(typing.Generic[T_MetaInstance]):
|
|
2572
|
-
"""
|
|
2573
|
-
Abstration on top of pydal's query system.
|
|
2574
|
-
"""
|
|
2575
|
-
|
|
2576
|
-
model: Type[T_MetaInstance]
|
|
2577
|
-
query: Query
|
|
2578
|
-
select_args: list[Any]
|
|
2579
|
-
select_kwargs: SelectKwargs
|
|
2580
|
-
relationships: dict[str, Relationship[Any]]
|
|
2581
|
-
metadata: Metadata
|
|
2582
|
-
|
|
2583
|
-
def __init__(
|
|
2584
|
-
self,
|
|
2585
|
-
model: Type[T_MetaInstance],
|
|
2586
|
-
add_query: Optional[Query] = None,
|
|
2587
|
-
select_args: Optional[list[Any]] = None,
|
|
2588
|
-
select_kwargs: Optional[SelectKwargs] = None,
|
|
2589
|
-
relationships: dict[str, Relationship[Any]] = None,
|
|
2590
|
-
metadata: Metadata = None,
|
|
2591
|
-
):
|
|
2592
|
-
"""
|
|
2593
|
-
Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
|
|
2594
|
-
|
|
2595
|
-
Example:
|
|
2596
|
-
MyTable.where(...) -> QueryBuilder[MyTable]
|
|
2597
|
-
"""
|
|
2598
|
-
self.model = model
|
|
2599
|
-
table = model._ensure_table_defined()
|
|
2600
|
-
default_query = typing.cast(Query, table.id > 0)
|
|
2601
|
-
self.query = add_query or default_query
|
|
2602
|
-
self.select_args = select_args or []
|
|
2603
|
-
self.select_kwargs = select_kwargs or {}
|
|
2604
|
-
self.relationships = relationships or {}
|
|
2605
|
-
self.metadata = metadata or {}
|
|
2606
|
-
|
|
2607
|
-
def __str__(self) -> str:
|
|
2608
|
-
"""
|
|
2609
|
-
Simple string representation for the query builder.
|
|
2610
|
-
"""
|
|
2611
|
-
return f"QueryBuilder for {self.model}"
|
|
2612
|
-
|
|
2613
|
-
def __repr__(self) -> str:
|
|
2614
|
-
"""
|
|
2615
|
-
Advanced string representation for the query builder.
|
|
2616
|
-
"""
|
|
2617
|
-
return (
|
|
2618
|
-
f"<QueryBuilder for {self.model} with "
|
|
2619
|
-
f"{len(self.select_args)} select args; "
|
|
2620
|
-
f"{len(self.select_kwargs)} select kwargs; "
|
|
2621
|
-
f"{len(self.relationships)} relationships; "
|
|
2622
|
-
f"query: {bool(self.query)}; "
|
|
2623
|
-
f"metadata: {self.metadata}; "
|
|
2624
|
-
f">"
|
|
2625
|
-
)
|
|
2626
|
-
|
|
2627
|
-
def __bool__(self) -> bool:
|
|
2628
|
-
"""
|
|
2629
|
-
Querybuilder is truthy if it has any conditions.
|
|
2630
|
-
"""
|
|
2631
|
-
table = self.model._ensure_table_defined()
|
|
2632
|
-
default_query = typing.cast(Query, table.id > 0)
|
|
2633
|
-
return any(
|
|
2634
|
-
[
|
|
2635
|
-
self.query != default_query,
|
|
2636
|
-
self.select_args,
|
|
2637
|
-
self.select_kwargs,
|
|
2638
|
-
self.relationships,
|
|
2639
|
-
self.metadata,
|
|
2640
|
-
],
|
|
2641
|
-
)
|
|
2642
|
-
|
|
2643
|
-
def _extend(
|
|
2644
|
-
self,
|
|
2645
|
-
add_query: Optional[Query] = None,
|
|
2646
|
-
overwrite_query: Optional[Query] = None,
|
|
2647
|
-
select_args: Optional[list[Any]] = None,
|
|
2648
|
-
select_kwargs: Optional[SelectKwargs] = None,
|
|
2649
|
-
relationships: dict[str, Relationship[Any]] = None,
|
|
2650
|
-
metadata: Metadata = None,
|
|
2651
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2652
|
-
return QueryBuilder(
|
|
2653
|
-
self.model,
|
|
2654
|
-
(add_query & self.query) if add_query else overwrite_query or self.query,
|
|
2655
|
-
(self.select_args + select_args) if select_args else self.select_args,
|
|
2656
|
-
(self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
|
|
2657
|
-
(self.relationships | relationships) if relationships else self.relationships,
|
|
2658
|
-
(self.metadata | (metadata or {})) if metadata else self.metadata,
|
|
2659
|
-
)
|
|
2660
|
-
|
|
2661
|
-
def select(self, *fields: Any, **options: Unpack[SelectKwargs]) -> "QueryBuilder[T_MetaInstance]":
|
|
2662
|
-
"""
|
|
2663
|
-
Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
|
|
2664
|
-
|
|
2665
|
-
Options:
|
|
2666
|
-
paraphrased from the web2py pydal docs,
|
|
2667
|
-
For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
|
|
2668
|
-
|
|
2669
|
-
orderby: field(s) to order by. Supported:
|
|
2670
|
-
table.name - sort by name, ascending
|
|
2671
|
-
~table.name - sort by name, descending
|
|
2672
|
-
<random> - sort randomly
|
|
2673
|
-
table.name|table.id - sort by two fields (first name, then id)
|
|
2674
|
-
|
|
2675
|
-
groupby, having: together with orderby:
|
|
2676
|
-
groupby can be a field (e.g. table.name) to group records by
|
|
2677
|
-
having can be a query, only those `having` the condition are grouped
|
|
2678
|
-
|
|
2679
|
-
limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
|
|
2680
|
-
distinct: bool/field. Only select rows that differ
|
|
2681
|
-
orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
|
|
2682
|
-
join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
|
|
2683
|
-
left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
|
|
2684
|
-
cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
|
|
2685
|
-
"""
|
|
2686
|
-
return self._extend(select_args=list(fields), select_kwargs=options)
|
|
2687
|
-
|
|
2688
|
-
def orderby(self, *fields: OrderBy) -> "QueryBuilder[T_MetaInstance]":
|
|
2689
|
-
"""
|
|
2690
|
-
Order the query results by specified fields.
|
|
2691
|
-
|
|
2692
|
-
Args:
|
|
2693
|
-
fields: field(s) to order by. Supported:
|
|
2694
|
-
table.name - sort by name, ascending
|
|
2695
|
-
~table.name - sort by name, descending
|
|
2696
|
-
<random> - sort randomly
|
|
2697
|
-
table.name|table.id - sort by two fields (first name, then id)
|
|
2698
|
-
|
|
2699
|
-
Returns:
|
|
2700
|
-
QueryBuilder: A new QueryBuilder instance with the ordering applied.
|
|
2701
|
-
"""
|
|
2702
|
-
return self.select(orderby=fields)
|
|
2703
|
-
|
|
2704
|
-
def where(
|
|
2705
|
-
self,
|
|
2706
|
-
*queries_or_lambdas: Query | typing.Callable[[Type[T_MetaInstance]], Query] | dict[str, Any],
|
|
2707
|
-
**filters: Any,
|
|
2708
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2709
|
-
"""
|
|
2710
|
-
Extend the builder's query.
|
|
2711
|
-
|
|
2712
|
-
Can be used in multiple ways:
|
|
2713
|
-
.where(Query) -> with a direct query such as `Table.id == 5`
|
|
2714
|
-
.where(lambda table: table.id == 5) -> with a query via a lambda
|
|
2715
|
-
.where(id=5) -> via keyword arguments
|
|
2716
|
-
|
|
2717
|
-
When using multiple where's, they will be ANDed:
|
|
2718
|
-
.where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
|
|
2719
|
-
When passing multiple queries to a single .where, they will be ORed:
|
|
2720
|
-
.where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
|
|
2721
|
-
"""
|
|
2722
|
-
new_query = self.query
|
|
2723
|
-
table = self.model._ensure_table_defined()
|
|
2724
|
-
|
|
2725
|
-
queries_or_lambdas = (
|
|
2726
|
-
*queries_or_lambdas,
|
|
2727
|
-
filters,
|
|
2728
|
-
)
|
|
2729
|
-
|
|
2730
|
-
subquery = typing.cast(Query, DummyQuery())
|
|
2731
|
-
for query_part in queries_or_lambdas:
|
|
2732
|
-
if isinstance(query_part, (Field, pydal.objects.Field)) or is_typed_field(query_part):
|
|
2733
|
-
subquery |= typing.cast(Query, query_part != None)
|
|
2734
|
-
elif isinstance(query_part, (pydal.objects.Query, Expression, pydal.objects.Expression)):
|
|
2735
|
-
subquery |= typing.cast(Query, query_part)
|
|
2736
|
-
elif callable(query_part):
|
|
2737
|
-
if result := query_part(self.model):
|
|
2738
|
-
subquery |= result
|
|
2739
|
-
elif isinstance(query_part, dict):
|
|
2740
|
-
subsubquery = DummyQuery()
|
|
2741
|
-
for field, value in query_part.items():
|
|
2742
|
-
subsubquery &= table[field] == value
|
|
2743
|
-
if subsubquery:
|
|
2744
|
-
subquery |= subsubquery
|
|
2745
|
-
else:
|
|
2746
|
-
raise ValueError(f"Unexpected query type ({type(query_part)}).")
|
|
2747
|
-
|
|
2748
|
-
if subquery:
|
|
2749
|
-
new_query &= subquery
|
|
2750
|
-
|
|
2751
|
-
return self._extend(overwrite_query=new_query)
|
|
2752
|
-
|
|
2753
|
-
def join(
|
|
2754
|
-
self,
|
|
2755
|
-
*fields: str | Type[TypedTable],
|
|
2756
|
-
method: JOIN_OPTIONS = None,
|
|
2757
|
-
on: OnQuery | list[Expression] | Expression = None,
|
|
2758
|
-
condition: Condition = None,
|
|
2759
|
-
condition_and: Condition = None,
|
|
2760
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2761
|
-
"""
|
|
2762
|
-
Include relationship fields in the result.
|
|
2763
|
-
|
|
2764
|
-
`fields` can be names of Relationships on the current model.
|
|
2765
|
-
If no fields are passed, all will be used.
|
|
2766
|
-
|
|
2767
|
-
By default, the `method` defined in the relationship is used.
|
|
2768
|
-
This can be overwritten with the `method` keyword argument (left or inner)
|
|
2769
|
-
|
|
2770
|
-
`condition_and` can be used to add extra conditions to an inner join.
|
|
2771
|
-
"""
|
|
2772
|
-
# todo: allow limiting amount of related rows returned for join?
|
|
2773
|
-
# todo: it would be nice if 'fields' could be an actual relationship
|
|
2774
|
-
# (Article.tags = list[Tag]) and you could change the .condition and .on
|
|
2775
|
-
# this could deprecate condition_and
|
|
2776
|
-
|
|
2777
|
-
relationships = self.model.get_relationships()
|
|
2778
|
-
|
|
2779
|
-
if condition and on:
|
|
2780
|
-
raise ValueError("condition and on can not be used together!")
|
|
2781
|
-
elif condition:
|
|
2782
|
-
if len(fields) != 1:
|
|
2783
|
-
raise ValueError("join(field, condition=...) can only be used with exactly one field!")
|
|
2784
|
-
|
|
2785
|
-
if isinstance(condition, pydal.objects.Query):
|
|
2786
|
-
condition = as_lambda(condition)
|
|
2787
|
-
|
|
2788
|
-
to_field = typing.cast(Type[TypedTable], fields[0])
|
|
2789
|
-
relationships = {
|
|
2790
|
-
str(to_field): Relationship(to_field, condition=condition, join=method, condition_and=condition_and)
|
|
2791
|
-
}
|
|
2792
|
-
elif on:
|
|
2793
|
-
if len(fields) != 1:
|
|
2794
|
-
raise ValueError("join(field, on=...) can only be used with exactly one field!")
|
|
2795
|
-
|
|
2796
|
-
if isinstance(on, pydal.objects.Expression):
|
|
2797
|
-
on = [on]
|
|
2798
|
-
|
|
2799
|
-
if isinstance(on, list):
|
|
2800
|
-
on = as_lambda(on)
|
|
2801
|
-
|
|
2802
|
-
to_field = typing.cast(Type[TypedTable], fields[0])
|
|
2803
|
-
relationships = {str(to_field): Relationship(to_field, on=on, join=method, condition_and=condition_and)}
|
|
2804
|
-
|
|
2805
|
-
else:
|
|
2806
|
-
if fields:
|
|
2807
|
-
# join on every relationship
|
|
2808
|
-
relationships = {str(k): relationships[str(k)].clone(condition_and=condition_and) for k in fields}
|
|
2809
|
-
|
|
2810
|
-
if method:
|
|
2811
|
-
relationships = {
|
|
2812
|
-
str(k): r.clone(join=method, condition_and=condition_and) for k, r in relationships.items()
|
|
2813
|
-
}
|
|
2814
|
-
|
|
2815
|
-
return self._extend(relationships=relationships)
|
|
2816
|
-
|
|
2817
|
-
def cache(
|
|
2818
|
-
self,
|
|
2819
|
-
*deps: Any,
|
|
2820
|
-
expires_at: Optional[dt.datetime] = None,
|
|
2821
|
-
ttl: Optional[int | dt.timedelta] = None,
|
|
2822
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2823
|
-
"""
|
|
2824
|
-
Enable caching for this query to load repeated calls from a dill row \
|
|
2825
|
-
instead of executing the sql and collecing matching rows again.
|
|
2826
|
-
"""
|
|
2827
|
-
existing = self.metadata.get("cache", {})
|
|
2828
|
-
|
|
2829
|
-
metadata: Metadata = {}
|
|
2830
|
-
|
|
2831
|
-
cache_meta = typing.cast(
|
|
2832
|
-
CacheMetadata,
|
|
2833
|
-
self.metadata.get("cache", {})
|
|
2834
|
-
| {
|
|
2835
|
-
"enabled": True,
|
|
2836
|
-
"depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
|
|
2837
|
-
"expires_at": get_expire(expires_at=expires_at, ttl=ttl),
|
|
2838
|
-
},
|
|
2839
|
-
)
|
|
2840
|
-
|
|
2841
|
-
metadata["cache"] = cache_meta
|
|
2842
|
-
return self._extend(metadata=metadata)
|
|
2843
|
-
|
|
2844
|
-
def _get_db(self) -> TypeDAL:
|
|
2845
|
-
if db := self.model._db:
|
|
2846
|
-
return db
|
|
2847
|
-
else: # pragma: no cover
|
|
2848
|
-
raise EnvironmentError("@define or db.define is not called on this class yet!")
|
|
2849
|
-
|
|
2850
|
-
def _select_arg_convert(self, arg: Any) -> Any:
|
|
2851
|
-
# typedfield are not really used at runtime anymore, but leave it in for safety:
|
|
2852
|
-
if isinstance(arg, TypedField): # pragma: no cover
|
|
2853
|
-
arg = arg._field
|
|
2854
|
-
|
|
2855
|
-
return arg
|
|
2856
|
-
|
|
2857
|
-
def delete(self) -> list[int]:
|
|
2858
|
-
"""
|
|
2859
|
-
Based on the current query, delete rows and return a list of deleted IDs.
|
|
2860
|
-
"""
|
|
2861
|
-
db = self._get_db()
|
|
2862
|
-
removed_ids = [_.id for _ in db(self.query).select("id")]
|
|
2863
|
-
if db(self.query).delete():
|
|
2864
|
-
# success!
|
|
2865
|
-
return removed_ids
|
|
2866
|
-
|
|
2867
|
-
return []
|
|
2868
|
-
|
|
2869
|
-
def _delete(self) -> str:
|
|
2870
|
-
db = self._get_db()
|
|
2871
|
-
return str(db(self.query)._delete())
|
|
2872
|
-
|
|
2873
|
-
def update(self, **fields: Any) -> list[int]:
|
|
2874
|
-
"""
|
|
2875
|
-
Based on the current query, update `fields` and return a list of updated IDs.
|
|
2876
|
-
"""
|
|
2877
|
-
# todo: limit?
|
|
2878
|
-
db = self._get_db()
|
|
2879
|
-
updated_ids = db(self.query).select("id").column("id")
|
|
2880
|
-
if db(self.query).update(**fields):
|
|
2881
|
-
# success!
|
|
2882
|
-
return updated_ids
|
|
2883
|
-
|
|
2884
|
-
return []
|
|
2885
|
-
|
|
2886
|
-
def _update(self, **fields: Any) -> str:
|
|
2887
|
-
db = self._get_db()
|
|
2888
|
-
return str(db(self.query)._update(**fields))
|
|
2889
|
-
|
|
2890
|
-
def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], SelectKwargs]:
|
|
2891
|
-
select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
|
|
2892
|
-
select_kwargs = self.select_kwargs.copy()
|
|
2893
|
-
query = self.query
|
|
2894
|
-
model = self.model
|
|
2895
|
-
mut_metadata["query"] = query
|
|
2896
|
-
# require at least id of main table:
|
|
2897
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
2898
|
-
tablename = str(model)
|
|
2899
|
-
|
|
2900
|
-
if add_id and f"{tablename}.id" not in select_fields:
|
|
2901
|
-
# fields of other selected, but required ID is missing.
|
|
2902
|
-
select_args.append(model.id)
|
|
2903
|
-
|
|
2904
|
-
if self.relationships:
|
|
2905
|
-
query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
|
|
2906
|
-
|
|
2907
|
-
return query, select_args, select_kwargs
|
|
2908
|
-
|
|
2909
|
-
def to_sql(self, add_id: bool = False) -> str:
|
|
2910
|
-
"""
|
|
2911
|
-
Generate the SQL for the built query.
|
|
2912
|
-
"""
|
|
2913
|
-
db = self._get_db()
|
|
2914
|
-
|
|
2915
|
-
query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
|
|
2916
|
-
|
|
2917
|
-
return str(db(query)._select(*select_args, **select_kwargs))
|
|
2918
|
-
|
|
2919
|
-
def _collect(self) -> str:
|
|
2920
|
-
"""
|
|
2921
|
-
Alias for to_sql, pydal-like syntax.
|
|
2922
|
-
"""
|
|
2923
|
-
return self.to_sql()
|
|
2924
|
-
|
|
2925
|
-
def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
|
|
2926
|
-
expires_at = metadata["cache"].get("expires_at")
|
|
2927
|
-
metadata["cache"] |= {
|
|
2928
|
-
# key is partly dependant on cache metadata but not these:
|
|
2929
|
-
"key": None,
|
|
2930
|
-
"status": None,
|
|
2931
|
-
"cached_at": None,
|
|
2932
|
-
"expires_at": None,
|
|
2933
|
-
}
|
|
2934
|
-
|
|
2935
|
-
_, key = create_and_hash_cache_key(
|
|
2936
|
-
self.model,
|
|
2937
|
-
metadata,
|
|
2938
|
-
self.query,
|
|
2939
|
-
self.select_args,
|
|
2940
|
-
self.select_kwargs,
|
|
2941
|
-
self.relationships.keys(),
|
|
2942
|
-
)
|
|
2943
|
-
|
|
2944
|
-
# re-set after creating key:
|
|
2945
|
-
metadata["cache"]["expires_at"] = expires_at
|
|
2946
|
-
metadata["cache"]["key"] = key
|
|
2947
|
-
|
|
2948
|
-
return load_from_cache(key, self._get_db())
|
|
2949
|
-
|
|
2950
|
-
def execute(self, add_id: bool = False) -> Rows:
|
|
2951
|
-
"""
|
|
2952
|
-
Raw version of .collect which only executes the SQL, without performing any magic afterwards.
|
|
2953
|
-
"""
|
|
2954
|
-
db = self._get_db()
|
|
2955
|
-
metadata = typing.cast(Metadata, self.metadata.copy())
|
|
2956
|
-
|
|
2957
|
-
query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
|
|
2958
|
-
|
|
2959
|
-
return db(query).select(*select_args, **select_kwargs)
|
|
2960
|
-
|
|
2961
|
-
def collect(
|
|
2962
|
-
self,
|
|
2963
|
-
verbose: bool = False,
|
|
2964
|
-
_to: Type["TypedRows[Any]"] = None,
|
|
2965
|
-
add_id: bool = True,
|
|
2966
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2967
|
-
"""
|
|
2968
|
-
Execute the built query and turn it into model instances, while handling relationships.
|
|
2969
|
-
"""
|
|
2970
|
-
if _to is None:
|
|
2971
|
-
_to = TypedRows
|
|
2972
|
-
|
|
2973
|
-
db = self._get_db()
|
|
2974
|
-
metadata = typing.cast(Metadata, self.metadata.copy())
|
|
2975
|
-
|
|
2976
|
-
if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
|
|
2977
|
-
return result
|
|
2978
|
-
|
|
2979
|
-
query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
|
|
2980
|
-
|
|
2981
|
-
metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
|
|
2982
|
-
|
|
2983
|
-
if verbose: # pragma: no cover
|
|
2984
|
-
print(metadata["sql"])
|
|
2985
|
-
|
|
2986
|
-
rows: Rows = db(query).select(*select_args, **select_kwargs)
|
|
2987
|
-
|
|
2988
|
-
metadata["final_query"] = str(query)
|
|
2989
|
-
metadata["final_args"] = [str(_) for _ in select_args]
|
|
2990
|
-
metadata["final_kwargs"] = select_kwargs
|
|
2991
|
-
|
|
2992
|
-
if verbose: # pragma: no cover
|
|
2993
|
-
print(rows)
|
|
2994
|
-
|
|
2995
|
-
if not self.relationships:
|
|
2996
|
-
# easy
|
|
2997
|
-
typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
|
|
2998
|
-
|
|
2999
|
-
else:
|
|
3000
|
-
# harder: try to match rows to the belonging objects
|
|
3001
|
-
# assume structure of {'table': <data>} per row.
|
|
3002
|
-
# if that's not the case, return default behavior again
|
|
3003
|
-
typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
|
|
3004
|
-
|
|
3005
|
-
# only saves if requested in metadata:
|
|
3006
|
-
return save_to_cache(typed_rows, rows)
|
|
3007
|
-
|
|
3008
|
-
@typing.overload
|
|
3009
|
-
def column(self, field: TypedField[T], **options: Unpack[SelectKwargs]) -> list[T]:
|
|
3010
|
-
"""
|
|
3011
|
-
If a typedfield is passed, the output type can be safely determined.
|
|
3012
|
-
"""
|
|
3013
|
-
|
|
3014
|
-
@typing.overload
|
|
3015
|
-
def column(self, field: T, **options: Unpack[SelectKwargs]) -> list[T]:
|
|
3016
|
-
"""
|
|
3017
|
-
Otherwise, the output type is loosely determined (assumes `field: type` or Any).
|
|
3018
|
-
"""
|
|
3019
|
-
|
|
3020
|
-
def column(self, field: TypedField[T] | T, **options: Unpack[SelectKwargs]) -> list[T]:
|
|
3021
|
-
"""
|
|
3022
|
-
Get all values in a specific column.
|
|
3023
|
-
|
|
3024
|
-
Shortcut for `.select(field).execute().column(field)`.
|
|
3025
|
-
"""
|
|
3026
|
-
return self.select(field, **options).execute().column(field)
|
|
3027
|
-
|
|
3028
|
-
def _handle_relationships_pre_select(
|
|
3029
|
-
self,
|
|
3030
|
-
query: Query,
|
|
3031
|
-
select_args: list[Any],
|
|
3032
|
-
select_kwargs: SelectKwargs,
|
|
3033
|
-
metadata: Metadata,
|
|
3034
|
-
) -> tuple[Query, list[Any]]:
|
|
3035
|
-
db = self._get_db()
|
|
3036
|
-
model = self.model
|
|
3037
|
-
|
|
3038
|
-
metadata["relationships"] = set(self.relationships.keys())
|
|
3039
|
-
|
|
3040
|
-
join = []
|
|
3041
|
-
for key, relation in self.relationships.items():
|
|
3042
|
-
if not relation.condition or relation.join != "inner":
|
|
3043
|
-
continue
|
|
3044
|
-
|
|
3045
|
-
other = relation.get_table(db)
|
|
3046
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
3047
|
-
condition = relation.condition(model, other)
|
|
3048
|
-
if callable(relation.condition_and):
|
|
3049
|
-
condition &= relation.condition_and(model, other)
|
|
3050
|
-
|
|
3051
|
-
join.append(other.on(condition))
|
|
3052
|
-
|
|
3053
|
-
if limitby := select_kwargs.pop("limitby", ()):
|
|
3054
|
-
# if limitby + relationships:
|
|
3055
|
-
# 1. get IDs of main table entries that match 'query'
|
|
3056
|
-
# 2. change query to .belongs(id)
|
|
3057
|
-
# 3. add joins etc
|
|
3058
|
-
|
|
3059
|
-
kwargs: SelectKwargs = select_kwargs | {"limitby": limitby}
|
|
3060
|
-
# if orderby := select_kwargs.get("orderby"):
|
|
3061
|
-
# kwargs["orderby"] = orderby
|
|
3062
|
-
|
|
3063
|
-
if join:
|
|
3064
|
-
kwargs["join"] = join
|
|
3065
|
-
|
|
3066
|
-
ids = db(query)._select(model.id, **kwargs)
|
|
3067
|
-
query = model.id.belongs(ids)
|
|
3068
|
-
metadata["ids"] = ids
|
|
3069
|
-
|
|
3070
|
-
if join:
|
|
3071
|
-
select_kwargs["join"] = join
|
|
3072
|
-
|
|
3073
|
-
left = []
|
|
3074
|
-
|
|
3075
|
-
for key, relation in self.relationships.items():
|
|
3076
|
-
other = relation.get_table(db)
|
|
3077
|
-
method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
|
|
3078
|
-
|
|
3079
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
3080
|
-
pre_alias = str(other)
|
|
3081
|
-
|
|
3082
|
-
if f"{other}." not in select_fields:
|
|
3083
|
-
# no fields of other selected. add .ALL:
|
|
3084
|
-
select_args.append(other.ALL)
|
|
3085
|
-
elif f"{other}.id" not in select_fields:
|
|
3086
|
-
# fields of other selected, but required ID is missing.
|
|
3087
|
-
select_args.append(other.id)
|
|
3088
|
-
|
|
3089
|
-
if relation.on:
|
|
3090
|
-
# if it has a .on, it's always a left join!
|
|
3091
|
-
on = relation.on(model, other)
|
|
3092
|
-
if not isinstance(on, list): # pragma: no cover
|
|
3093
|
-
on = [on]
|
|
3094
|
-
|
|
3095
|
-
on = [
|
|
3096
|
-
_
|
|
3097
|
-
for _ in on
|
|
3098
|
-
# only allow Expressions (query and such):
|
|
3099
|
-
if isinstance(_, pydal.objects.Expression)
|
|
3100
|
-
]
|
|
3101
|
-
left.extend(on)
|
|
3102
|
-
elif method == "left":
|
|
3103
|
-
# .on not given, generate it:
|
|
3104
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
3105
|
-
condition = typing.cast(Query, relation.condition(model, other))
|
|
3106
|
-
if callable(relation.condition_and):
|
|
3107
|
-
condition &= relation.condition_and(model, other)
|
|
3108
|
-
left.append(other.on(condition))
|
|
3109
|
-
else:
|
|
3110
|
-
# else: inner join (handled earlier)
|
|
3111
|
-
other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
|
|
3112
|
-
|
|
3113
|
-
# if no fields of 'other' are included, add other.ALL
|
|
3114
|
-
# else: only add other.id if missing
|
|
3115
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
3116
|
-
|
|
3117
|
-
post_alias = str(other).split(" AS ")[-1]
|
|
3118
|
-
if pre_alias != post_alias:
|
|
3119
|
-
# replace .select's with aliased:
|
|
3120
|
-
select_fields = select_fields.replace(
|
|
3121
|
-
f"{pre_alias}.",
|
|
3122
|
-
f"{post_alias}.",
|
|
3123
|
-
)
|
|
3124
|
-
|
|
3125
|
-
select_args = select_fields.split(", ")
|
|
3126
|
-
|
|
3127
|
-
select_kwargs["left"] = left
|
|
3128
|
-
return query, select_args
|
|
3129
|
-
|
|
3130
|
-
def _collect_with_relationships(
|
|
3131
|
-
self,
|
|
3132
|
-
rows: Rows,
|
|
3133
|
-
metadata: Metadata,
|
|
3134
|
-
_to: Type["TypedRows[Any]"],
|
|
3135
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
3136
|
-
"""
|
|
3137
|
-
Transform the raw rows into Typed Table model instances.
|
|
3138
|
-
"""
|
|
3139
|
-
db = self._get_db()
|
|
3140
|
-
main_table = self.model._ensure_table_defined()
|
|
3141
|
-
|
|
3142
|
-
# id: Model
|
|
3143
|
-
records = {}
|
|
3144
|
-
|
|
3145
|
-
# id: [Row]
|
|
3146
|
-
raw_per_id = defaultdict(list)
|
|
3147
|
-
|
|
3148
|
-
seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
|
|
3149
|
-
|
|
3150
|
-
for row in rows:
|
|
3151
|
-
main = row[main_table]
|
|
3152
|
-
main_id = main.id
|
|
3153
|
-
|
|
3154
|
-
raw_per_id[main_id].append(normalize_table_keys(row))
|
|
3155
|
-
|
|
3156
|
-
if main_id not in records:
|
|
3157
|
-
records[main_id] = self.model(main)
|
|
3158
|
-
records[main_id]._with = list(self.relationships.keys())
|
|
3159
|
-
|
|
3160
|
-
# setup up all relationship defaults (once)
|
|
3161
|
-
for col, relationship in self.relationships.items():
|
|
3162
|
-
records[main_id][col] = [] if relationship.multiple else None
|
|
3163
|
-
|
|
3164
|
-
# now add other relationship data
|
|
3165
|
-
for column, relation in self.relationships.items():
|
|
3166
|
-
relationship_column = f"{column}_{hash(relation)}"
|
|
3167
|
-
|
|
3168
|
-
# relationship_column works for aliases with the same target column.
|
|
3169
|
-
# if col + relationship not in the row, just use the regular name.
|
|
3170
|
-
|
|
3171
|
-
relation_data = (
|
|
3172
|
-
row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
|
|
3173
|
-
)
|
|
3174
|
-
|
|
3175
|
-
if relation_data.id is None:
|
|
3176
|
-
# always skip None ids
|
|
3177
|
-
continue
|
|
3178
|
-
|
|
3179
|
-
if f"{column}-{relation_data.id}" in seen_relations[main_id]:
|
|
3180
|
-
# speed up duplicates
|
|
3181
|
-
continue
|
|
3182
|
-
else:
|
|
3183
|
-
seen_relations[main_id].add(f"{column}-{relation_data.id}")
|
|
3184
|
-
|
|
3185
|
-
relation_table = relation.get_table(db)
|
|
3186
|
-
# hopefully an instance of a typed table and a regular row otherwise:
|
|
3187
|
-
instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
|
|
3188
|
-
|
|
3189
|
-
if relation.multiple:
|
|
3190
|
-
# create list of T
|
|
3191
|
-
if not isinstance(records[main_id].get(column), list): # pragma: no cover
|
|
3192
|
-
# should already be set up before!
|
|
3193
|
-
setattr(records[main_id], column, [])
|
|
3194
|
-
|
|
3195
|
-
records[main_id][column].append(instance)
|
|
3196
|
-
else:
|
|
3197
|
-
# create single T
|
|
3198
|
-
records[main_id][column] = instance
|
|
3199
|
-
|
|
3200
|
-
return _to(rows, self.model, records, metadata=metadata, raw=raw_per_id)
|
|
3201
|
-
|
|
3202
|
-
def collect_or_fail(self, exception: typing.Optional[Exception] = None) -> "TypedRows[T_MetaInstance]":
|
|
3203
|
-
"""
|
|
3204
|
-
Call .collect() and raise an error if nothing found.
|
|
3205
|
-
|
|
3206
|
-
Basically unwraps Optional type.
|
|
3207
|
-
"""
|
|
3208
|
-
if result := self.collect():
|
|
3209
|
-
return result
|
|
3210
|
-
|
|
3211
|
-
if not exception:
|
|
3212
|
-
exception = ValueError("Nothing found!")
|
|
3213
|
-
|
|
3214
|
-
raise exception
|
|
3215
|
-
|
|
3216
|
-
def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
|
|
3217
|
-
"""
|
|
3218
|
-
You can start iterating a Query Builder object before calling collect, for ease of use.
|
|
3219
|
-
"""
|
|
3220
|
-
yield from self.collect()
|
|
3221
|
-
|
|
3222
|
-
def __count(self, db: TypeDAL, distinct: typing.Optional[bool] = None) -> Query:
|
|
3223
|
-
# internal, shared logic between .count and ._count
|
|
3224
|
-
model = self.model
|
|
3225
|
-
query = self.query
|
|
3226
|
-
for key, relation in self.relationships.items():
|
|
3227
|
-
if (not relation.condition or relation.join != "inner") and not distinct:
|
|
3228
|
-
continue
|
|
3229
|
-
|
|
3230
|
-
other = relation.get_table(db)
|
|
3231
|
-
if not distinct:
|
|
3232
|
-
# todo: can this lead to other issues?
|
|
3233
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
3234
|
-
query &= relation.condition(model, other)
|
|
3235
|
-
|
|
3236
|
-
return query
|
|
3237
|
-
|
|
3238
|
-
def count(self, distinct: typing.Optional[bool] = None) -> int:
|
|
3239
|
-
"""
|
|
3240
|
-
Return the amount of rows matching the current query.
|
|
3241
|
-
"""
|
|
3242
|
-
db = self._get_db()
|
|
3243
|
-
query = self.__count(db, distinct=distinct)
|
|
3244
|
-
|
|
3245
|
-
return db(query).count(distinct)
|
|
3246
|
-
|
|
3247
|
-
def _count(self, distinct: typing.Optional[bool] = None) -> str:
|
|
3248
|
-
"""
|
|
3249
|
-
Return the SQL for .count().
|
|
3250
|
-
"""
|
|
3251
|
-
db = self._get_db()
|
|
3252
|
-
query = self.__count(db, distinct=distinct)
|
|
3253
|
-
|
|
3254
|
-
return typing.cast(str, db(query)._count(distinct))
|
|
3255
|
-
|
|
3256
|
-
def exists(self) -> bool:
|
|
3257
|
-
"""
|
|
3258
|
-
Determines if any records exist matching the current query.
|
|
3259
|
-
|
|
3260
|
-
Returns True if one or more records exist; otherwise, False.
|
|
3261
|
-
|
|
3262
|
-
Returns:
|
|
3263
|
-
bool: A boolean indicating whether any records exist.
|
|
3264
|
-
"""
|
|
3265
|
-
return bool(self.count())
|
|
3266
|
-
|
|
3267
|
-
def __paginate(
|
|
3268
|
-
self,
|
|
3269
|
-
limit: int,
|
|
3270
|
-
page: int = 1,
|
|
3271
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
3272
|
-
available = self.count()
|
|
3273
|
-
|
|
3274
|
-
_from = limit * (page - 1)
|
|
3275
|
-
_to = (limit * page) if limit else available
|
|
3276
|
-
|
|
3277
|
-
metadata: Metadata = {}
|
|
3278
|
-
|
|
3279
|
-
metadata["pagination"] = {
|
|
3280
|
-
"limit": limit,
|
|
3281
|
-
"current_page": page,
|
|
3282
|
-
"max_page": math.ceil(available / limit) if limit else 1,
|
|
3283
|
-
"rows": available,
|
|
3284
|
-
"min_max": (_from, _to),
|
|
3285
|
-
}
|
|
3286
|
-
|
|
3287
|
-
return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
|
|
3288
|
-
|
|
3289
|
-
def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
|
|
3290
|
-
"""
|
|
3291
|
-
Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
|
|
3292
|
-
|
|
3293
|
-
Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
|
|
3294
|
-
can be loaded with relationship data!
|
|
3295
|
-
"""
|
|
3296
|
-
builder = self.__paginate(limit, page)
|
|
3297
|
-
|
|
3298
|
-
rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
|
|
3299
|
-
|
|
3300
|
-
rows._query_builder = builder
|
|
3301
|
-
return rows
|
|
3302
|
-
|
|
3303
|
-
def _paginate(
|
|
3304
|
-
self,
|
|
3305
|
-
limit: int,
|
|
3306
|
-
page: int = 1,
|
|
3307
|
-
) -> str:
|
|
3308
|
-
builder = self.__paginate(limit, page)
|
|
3309
|
-
return builder._collect()
|
|
3310
|
-
|
|
3311
|
-
def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
|
|
3312
|
-
"""
|
|
3313
|
-
Generator that yields rows from a paginated source in chunks.
|
|
3314
|
-
|
|
3315
|
-
This function retrieves rows from a paginated data source in chunks of the
|
|
3316
|
-
specified `chunk_size` and yields them as TypedRows.
|
|
3317
|
-
|
|
3318
|
-
Example:
|
|
3319
|
-
```
|
|
3320
|
-
for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
|
|
3321
|
-
for row in chunk_of_rows:
|
|
3322
|
-
# Process each row within the chunk.
|
|
3323
|
-
pass
|
|
3324
|
-
```
|
|
3325
|
-
"""
|
|
3326
|
-
page = 1
|
|
3327
|
-
|
|
3328
|
-
while rows := self.__paginate(chunk_size, page).collect():
|
|
3329
|
-
yield rows
|
|
3330
|
-
page += 1
|
|
3331
|
-
|
|
3332
|
-
def first(self, verbose: bool = False) -> T_MetaInstance | None:
|
|
3333
|
-
"""
|
|
3334
|
-
Get the first row matching the currently built query.
|
|
3335
|
-
|
|
3336
|
-
Also adds paginate, since it would be a waste to select more rows than needed.
|
|
3337
|
-
"""
|
|
3338
|
-
if row := self.paginate(page=1, limit=1, verbose=verbose).first():
|
|
3339
|
-
return self.model.from_row(row)
|
|
3340
|
-
else:
|
|
3341
|
-
return None
|
|
3342
|
-
|
|
3343
|
-
def _first(self) -> str:
|
|
3344
|
-
return self._paginate(page=1, limit=1)
|
|
3345
|
-
|
|
3346
|
-
def first_or_fail(self, exception: typing.Optional[BaseException] = None, verbose: bool = False) -> T_MetaInstance:
|
|
3347
|
-
"""
|
|
3348
|
-
Call .first() and raise an error if nothing found.
|
|
3349
|
-
|
|
3350
|
-
Basically unwraps Optional type.
|
|
3351
|
-
"""
|
|
3352
|
-
if inst := self.first(verbose=verbose):
|
|
3353
|
-
return inst
|
|
3354
|
-
|
|
3355
|
-
if not exception:
|
|
3356
|
-
exception = ValueError("Nothing found!")
|
|
3357
|
-
|
|
3358
|
-
raise exception
|
|
3359
|
-
|
|
3360
|
-
|
|
3361
|
-
S = typing.TypeVar("S")
|
|
3362
|
-
|
|
3363
|
-
|
|
3364
|
-
class PaginatedRows(TypedRows[T_MetaInstance]):
|
|
3365
|
-
"""
|
|
3366
|
-
Extension on top of rows that is used when calling .paginate() instead of .collect().
|
|
3367
|
-
"""
|
|
3368
|
-
|
|
3369
|
-
_query_builder: QueryBuilder[T_MetaInstance]
|
|
3370
|
-
|
|
3371
|
-
@property
|
|
3372
|
-
def data(self) -> list[T_MetaInstance]:
|
|
3373
|
-
"""
|
|
3374
|
-
Get the underlying data.
|
|
3375
|
-
"""
|
|
3376
|
-
return list(self.records.values())
|
|
3377
|
-
|
|
3378
|
-
@property
|
|
3379
|
-
def pagination(self) -> Pagination:
|
|
3380
|
-
"""
|
|
3381
|
-
Get all page info.
|
|
3382
|
-
"""
|
|
3383
|
-
pagination_data = self.metadata["pagination"]
|
|
3384
|
-
|
|
3385
|
-
has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
|
|
3386
|
-
has_prev_page = pagination_data["current_page"] > 1
|
|
3387
|
-
return {
|
|
3388
|
-
"total_items": pagination_data["rows"],
|
|
3389
|
-
"current_page": pagination_data["current_page"],
|
|
3390
|
-
"per_page": pagination_data["limit"],
|
|
3391
|
-
"total_pages": pagination_data["max_page"],
|
|
3392
|
-
"has_next_page": has_next_page,
|
|
3393
|
-
"has_prev_page": has_prev_page,
|
|
3394
|
-
"next_page": pagination_data["current_page"] + 1 if has_next_page else None,
|
|
3395
|
-
"prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
|
|
3396
|
-
}
|
|
3397
|
-
|
|
3398
|
-
def next(self) -> Self:
|
|
3399
|
-
"""
|
|
3400
|
-
Get the next page.
|
|
3401
|
-
"""
|
|
3402
|
-
data = self.metadata["pagination"]
|
|
3403
|
-
if data["current_page"] >= data["max_page"]:
|
|
3404
|
-
raise StopIteration("Final Page")
|
|
3405
|
-
|
|
3406
|
-
return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
|
|
3407
|
-
|
|
3408
|
-
def previous(self) -> Self:
|
|
3409
|
-
"""
|
|
3410
|
-
Get the previous page.
|
|
3411
|
-
"""
|
|
3412
|
-
data = self.metadata["pagination"]
|
|
3413
|
-
if data["current_page"] <= 1:
|
|
3414
|
-
raise StopIteration("First Page")
|
|
3415
|
-
|
|
3416
|
-
return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
|
|
3417
|
-
|
|
3418
|
-
def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
|
|
3419
|
-
"""
|
|
3420
|
-
Convert to a dictionary with pagination info and original data.
|
|
3421
|
-
|
|
3422
|
-
All arguments are ignored!
|
|
3423
|
-
"""
|
|
3424
|
-
return {"data": super().as_dict(), "pagination": self.pagination}
|
|
3425
|
-
|
|
3426
|
-
|
|
3427
|
-
class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
|
|
3428
|
-
"""
|
|
3429
|
-
Used to make pydal Set more typed.
|
|
3430
|
-
|
|
3431
|
-
This class is not actually used, only 'cast' by TypeDAL.__call__
|
|
3432
|
-
"""
|
|
3433
|
-
|
|
3434
|
-
def count(self, distinct: typing.Optional[bool] = None, cache: AnyDict = None) -> int:
|
|
3435
|
-
"""
|
|
3436
|
-
Count returns an int.
|
|
3437
|
-
"""
|
|
3438
|
-
result = super().count(distinct, cache)
|
|
3439
|
-
return typing.cast(int, result)
|
|
3440
|
-
|
|
3441
|
-
def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
|
|
3442
|
-
"""
|
|
3443
|
-
Select returns a TypedRows of a user defined table.
|
|
3444
|
-
|
|
3445
|
-
Example:
|
|
3446
|
-
result: TypedRows[MyTable] = db(MyTable.id > 0).select()
|
|
3447
|
-
|
|
3448
|
-
for row in result:
|
|
3449
|
-
reveal_type(row) # MyTable
|
|
3450
|
-
"""
|
|
3451
|
-
rows = super().select(*fields, **attributes)
|
|
3452
|
-
return typing.cast(TypedRows[T_MetaInstance], rows)
|