TypeDAL 3.16.4__py3-none-any.whl → 4.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- typedal/__about__.py +1 -1
- typedal/__init__.py +21 -3
- typedal/caching.py +37 -34
- typedal/config.py +18 -16
- typedal/constants.py +25 -0
- typedal/core.py +188 -3115
- typedal/define.py +188 -0
- typedal/fields.py +293 -34
- typedal/for_py4web.py +1 -1
- typedal/for_web2py.py +1 -1
- typedal/helpers.py +329 -40
- typedal/mixins.py +23 -27
- typedal/query_builder.py +1119 -0
- typedal/relationships.py +390 -0
- typedal/rows.py +524 -0
- typedal/serializers/as_json.py +9 -10
- typedal/tables.py +1131 -0
- typedal/types.py +187 -179
- typedal/web2py_py4web_shared.py +1 -1
- {typedal-3.16.4.dist-info → typedal-4.2.0.dist-info}/METADATA +8 -7
- typedal-4.2.0.dist-info/RECORD +25 -0
- {typedal-3.16.4.dist-info → typedal-4.2.0.dist-info}/WHEEL +1 -1
- typedal-3.16.4.dist-info/RECORD +0 -19
- {typedal-3.16.4.dist-info → typedal-4.2.0.dist-info}/entry_points.txt +0 -0
typedal/core.py
CHANGED
|
@@ -4,391 +4,147 @@ Core functionality of TypeDAL.
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
-
import contextlib
|
|
8
|
-
import copy
|
|
9
|
-
import csv
|
|
10
|
-
import datetime as dt
|
|
11
|
-
import functools
|
|
12
|
-
import inspect
|
|
13
|
-
import json
|
|
14
|
-
import math
|
|
15
|
-
import re
|
|
16
7
|
import sys
|
|
17
|
-
import
|
|
18
|
-
import typing
|
|
19
|
-
import uuid
|
|
8
|
+
import typing as t
|
|
20
9
|
import warnings
|
|
21
|
-
from collections import defaultdict
|
|
22
|
-
from decimal import Decimal
|
|
23
10
|
from pathlib import Path
|
|
24
|
-
from typing import
|
|
11
|
+
from typing import Optional
|
|
25
12
|
|
|
26
13
|
import pydal
|
|
27
|
-
|
|
28
|
-
from
|
|
29
|
-
from pydal.objects import Query as _Query
|
|
30
|
-
from pydal.objects import Row
|
|
31
|
-
from pydal.objects import Table as _Table
|
|
32
|
-
from typing_extensions import Self, Unpack
|
|
33
|
-
|
|
34
|
-
from .config import TypeDALConfig, load_config
|
|
14
|
+
|
|
15
|
+
from .config import LazyPolicy, TypeDALConfig, load_config
|
|
35
16
|
from .helpers import (
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
classproperty,
|
|
41
|
-
extract_type_optional,
|
|
42
|
-
filter_out,
|
|
43
|
-
instanciate,
|
|
44
|
-
is_union,
|
|
45
|
-
looks_like,
|
|
46
|
-
mktable,
|
|
47
|
-
origin_is_subclass,
|
|
17
|
+
SYSTEM_SUPPORTS_TEMPLATES,
|
|
18
|
+
default_representer,
|
|
19
|
+
sql_escape_template,
|
|
20
|
+
sql_expression,
|
|
48
21
|
to_snake,
|
|
49
|
-
unwrap_type,
|
|
50
|
-
)
|
|
51
|
-
from .serializers import as_json
|
|
52
|
-
from .types import (
|
|
53
|
-
AnyDict,
|
|
54
|
-
CacheMetadata,
|
|
55
|
-
Expression,
|
|
56
|
-
Field,
|
|
57
|
-
FieldSettings,
|
|
58
|
-
Metadata,
|
|
59
|
-
OpRow,
|
|
60
|
-
PaginateDict,
|
|
61
|
-
Pagination,
|
|
62
|
-
Query,
|
|
63
|
-
Reference,
|
|
64
|
-
Rows,
|
|
65
|
-
SelectKwargs,
|
|
66
|
-
Set,
|
|
67
|
-
Table,
|
|
68
|
-
Validator,
|
|
69
|
-
_Types,
|
|
70
22
|
)
|
|
23
|
+
from .types import Field, T, Template # type: ignore
|
|
71
24
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
BASIC_MAPPINGS: dict[T_annotation, str] = {
|
|
80
|
-
str: "string",
|
|
81
|
-
int: "integer",
|
|
82
|
-
bool: "boolean",
|
|
83
|
-
bytes: "blob",
|
|
84
|
-
float: "double",
|
|
85
|
-
object: "json",
|
|
86
|
-
Decimal: "decimal(10,2)",
|
|
87
|
-
dt.date: "date",
|
|
88
|
-
dt.time: "time",
|
|
89
|
-
dt.datetime: "datetime",
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
|
|
94
|
-
"""
|
|
95
|
-
Is `cls` an instance or subclass of TypedField?
|
|
96
|
-
|
|
97
|
-
Deprecated
|
|
98
|
-
"""
|
|
99
|
-
return isinstance(cls, TypedField) or (
|
|
100
|
-
isinstance(typing.get_origin(cls), type) and issubclass(typing.get_origin(cls), TypedField)
|
|
101
|
-
)
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
JOIN_OPTIONS = typing.Literal["left", "inner", None]
|
|
105
|
-
DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
|
|
25
|
+
try:
|
|
26
|
+
# python 3.14+
|
|
27
|
+
from annotationlib import ForwardRef
|
|
28
|
+
except ImportError: # pragma: no cover
|
|
29
|
+
# python 3.13-
|
|
30
|
+
from typing import ForwardRef
|
|
106
31
|
|
|
107
|
-
|
|
108
|
-
|
|
32
|
+
if t.TYPE_CHECKING:
|
|
33
|
+
from .fields import TypedField
|
|
34
|
+
from .types import AnyDict, Expression, T_Query, Table
|
|
109
35
|
|
|
110
|
-
Condition: typing.TypeAlias = typing.Optional[
|
|
111
|
-
typing.Callable[
|
|
112
|
-
# self, other -> Query
|
|
113
|
-
[P_Table, P_Table],
|
|
114
|
-
Query | bool,
|
|
115
|
-
]
|
|
116
|
-
]
|
|
117
36
|
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
# self, other -> list of .on statements
|
|
121
|
-
[P_Table, P_Table],
|
|
122
|
-
list[Expression],
|
|
123
|
-
]
|
|
124
|
-
]
|
|
37
|
+
# note: these functions can not be moved to a different file,
|
|
38
|
+
# because then they will have different globals and it breaks!
|
|
125
39
|
|
|
126
|
-
# To_Type = typing.TypeVar("To_Type", type[Any], Type[Any], str)
|
|
127
|
-
To_Type = typing.TypeVar("To_Type")
|
|
128
40
|
|
|
129
|
-
|
|
130
|
-
class Relationship(typing.Generic[To_Type]):
|
|
131
|
-
"""
|
|
132
|
-
Define a relationship to another table.
|
|
41
|
+
def evaluate_forward_reference_312(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
133
42
|
"""
|
|
43
|
+
Extract the original type from a forward reference string.
|
|
134
44
|
|
|
135
|
-
|
|
136
|
-
table: Type["TypedTable"] | type | str
|
|
137
|
-
condition: Condition
|
|
138
|
-
condition_and: Condition
|
|
139
|
-
on: OnQuery
|
|
140
|
-
multiple: bool
|
|
141
|
-
join: JOIN_OPTIONS
|
|
142
|
-
|
|
143
|
-
def __init__(
|
|
144
|
-
self,
|
|
145
|
-
_type: To_Type,
|
|
146
|
-
condition: Condition = None,
|
|
147
|
-
join: JOIN_OPTIONS = None,
|
|
148
|
-
on: OnQuery = None,
|
|
149
|
-
condition_and: Condition = None,
|
|
150
|
-
):
|
|
151
|
-
"""
|
|
152
|
-
Should not be called directly, use relationship() instead!
|
|
153
|
-
"""
|
|
154
|
-
if condition and on:
|
|
155
|
-
warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
|
|
156
|
-
raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
|
|
157
|
-
|
|
158
|
-
self._type = _type
|
|
159
|
-
self.condition = condition
|
|
160
|
-
self.join = "left" if on else join # .on is always left join!
|
|
161
|
-
self.on = on
|
|
162
|
-
self.condition_and = condition_and
|
|
163
|
-
|
|
164
|
-
if args := typing.get_args(_type):
|
|
165
|
-
self.table = unwrap_type(args[0])
|
|
166
|
-
self.multiple = True
|
|
167
|
-
else:
|
|
168
|
-
self.table = _type
|
|
169
|
-
self.multiple = False
|
|
170
|
-
|
|
171
|
-
if isinstance(self.table, str):
|
|
172
|
-
self.table = TypeDAL.to_snake(self.table)
|
|
173
|
-
|
|
174
|
-
def clone(self, **update: Any) -> "Relationship[To_Type]":
|
|
175
|
-
"""
|
|
176
|
-
Create a copy of the relationship, possibly updated.
|
|
177
|
-
"""
|
|
178
|
-
return self.__class__(
|
|
179
|
-
update.get("_type") or self._type,
|
|
180
|
-
update.get("condition") or self.condition,
|
|
181
|
-
update.get("join") or self.join,
|
|
182
|
-
update.get("on") or self.on,
|
|
183
|
-
update.get("condition_and") or self.condition_and,
|
|
184
|
-
)
|
|
185
|
-
|
|
186
|
-
def __repr__(self) -> str:
|
|
187
|
-
"""
|
|
188
|
-
Representation of the relationship.
|
|
189
|
-
"""
|
|
190
|
-
if callback := self.condition or self.on:
|
|
191
|
-
src_code = inspect.getsource(callback).strip()
|
|
192
|
-
|
|
193
|
-
if c_and := self.condition_and:
|
|
194
|
-
and_code = inspect.getsource(c_and).strip()
|
|
195
|
-
src_code += " AND " + and_code
|
|
196
|
-
else:
|
|
197
|
-
cls_name = self._type if isinstance(self._type, str) else self._type.__name__
|
|
198
|
-
src_code = f"to {cls_name} (missing condition)"
|
|
199
|
-
|
|
200
|
-
join = f":{self.join}" if self.join else ""
|
|
201
|
-
return f"<Relationship{join} {src_code}>"
|
|
202
|
-
|
|
203
|
-
def get_table(self, db: "TypeDAL") -> Type["TypedTable"]:
|
|
204
|
-
"""
|
|
205
|
-
Get the table this relationship is bound to.
|
|
206
|
-
"""
|
|
207
|
-
table = self.table # can be a string because db wasn't available yet
|
|
208
|
-
|
|
209
|
-
if isinstance(table, str):
|
|
210
|
-
if mapped := db._class_map.get(table):
|
|
211
|
-
# yay
|
|
212
|
-
return mapped
|
|
213
|
-
|
|
214
|
-
# boo, fall back to untyped table but pretend it is typed:
|
|
215
|
-
return typing.cast(Type["TypedTable"], db[table]) # eh close enough!
|
|
216
|
-
|
|
217
|
-
return table
|
|
218
|
-
|
|
219
|
-
def get_table_name(self) -> str:
|
|
220
|
-
"""
|
|
221
|
-
Get the name of the table this relationship is bound to.
|
|
222
|
-
"""
|
|
223
|
-
if isinstance(self.table, str):
|
|
224
|
-
return self.table
|
|
225
|
-
|
|
226
|
-
if isinstance(self.table, pydal.objects.Table):
|
|
227
|
-
return str(self.table)
|
|
228
|
-
|
|
229
|
-
# else: typed table
|
|
230
|
-
try:
|
|
231
|
-
table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
|
|
232
|
-
except Exception: # pragma: no cover
|
|
233
|
-
table = self.table
|
|
234
|
-
|
|
235
|
-
return str(table)
|
|
236
|
-
|
|
237
|
-
def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
|
|
238
|
-
"""
|
|
239
|
-
Relationship is a descriptor class, which can be returned from a class but not an instance.
|
|
240
|
-
|
|
241
|
-
For an instance, using .join() will replace the Relationship with the actual data.
|
|
242
|
-
If you forgot to join, a warning will be shown and empty data will be returned.
|
|
243
|
-
"""
|
|
244
|
-
if not instance:
|
|
245
|
-
# relationship queried on class, that's allowed
|
|
246
|
-
return self
|
|
247
|
-
|
|
248
|
-
warnings.warn(
|
|
249
|
-
"Trying to get data from a relationship object! Did you forget to join it?",
|
|
250
|
-
category=RuntimeWarning,
|
|
251
|
-
)
|
|
252
|
-
if self.multiple:
|
|
253
|
-
return []
|
|
254
|
-
else:
|
|
255
|
-
return None
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
def relationship(
|
|
259
|
-
_type: typing.Type[To_Type],
|
|
260
|
-
condition: Condition = None,
|
|
261
|
-
join: JOIN_OPTIONS = None,
|
|
262
|
-
on: OnQuery = None,
|
|
263
|
-
) -> To_Type:
|
|
45
|
+
Variant for python 3.12 and below
|
|
264
46
|
"""
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
class Post(TypedTable):
|
|
274
|
-
title: str
|
|
275
|
-
author: User
|
|
276
|
-
|
|
277
|
-
User.join("posts").first() # User instance with list[Post] in .posts
|
|
278
|
-
|
|
279
|
-
Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
|
|
280
|
-
In this case, the join strategy is set to LEFT so users without posts are also still selected.
|
|
47
|
+
return t.cast(
|
|
48
|
+
type,
|
|
49
|
+
fw_ref._evaluate(
|
|
50
|
+
localns=locals(),
|
|
51
|
+
globalns=globals() | namespace,
|
|
52
|
+
recursive_guard=frozenset(),
|
|
53
|
+
),
|
|
54
|
+
)
|
|
281
55
|
|
|
282
|
-
For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
|
|
283
|
-
class User(TypedTable):
|
|
284
|
-
...
|
|
285
56
|
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
])
|
|
57
|
+
def evaluate_forward_reference_313(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
58
|
+
"""
|
|
59
|
+
Extract the original type from a forward reference string.
|
|
290
60
|
|
|
291
|
-
|
|
61
|
+
Variant for python 3.13
|
|
292
62
|
"""
|
|
293
|
-
return
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
63
|
+
return t.cast(
|
|
64
|
+
type,
|
|
65
|
+
fw_ref._evaluate(
|
|
66
|
+
localns=locals(),
|
|
67
|
+
globalns=globals() | namespace,
|
|
68
|
+
recursive_guard=frozenset(),
|
|
69
|
+
type_params=(), # suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
|
|
70
|
+
),
|
|
299
71
|
)
|
|
300
72
|
|
|
301
73
|
|
|
302
|
-
|
|
74
|
+
def evaluate_forward_reference_314(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
|
|
75
|
+
"""
|
|
76
|
+
Extract the original type from a forward reference string.
|
|
303
77
|
|
|
78
|
+
Variant for python 3.14 (and hopefully above)
|
|
79
|
+
"""
|
|
80
|
+
return t.cast(
|
|
81
|
+
type,
|
|
82
|
+
fw_ref.evaluate(
|
|
83
|
+
locals=locals(),
|
|
84
|
+
globals=globals() | namespace,
|
|
85
|
+
type_params=(),
|
|
86
|
+
),
|
|
87
|
+
)
|
|
304
88
|
|
|
305
|
-
def _generate_relationship_condition(_: Type["TypedTable"], key: str, field: T_Field) -> Condition:
|
|
306
|
-
origin = typing.get_origin(field)
|
|
307
|
-
# else: generic
|
|
308
89
|
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
90
|
+
def evaluate_forward_reference(
|
|
91
|
+
fw_ref: ForwardRef,
|
|
92
|
+
namespace: dict[str, type] | None = None,
|
|
93
|
+
) -> type: # pragma: no cover
|
|
94
|
+
"""
|
|
95
|
+
Extract the original type from a forward reference string.
|
|
312
96
|
|
|
313
|
-
|
|
97
|
+
Automatically chooses strategy based on current Python version.
|
|
98
|
+
"""
|
|
99
|
+
if sys.version_info.minor < 13:
|
|
100
|
+
return evaluate_forward_reference_312(fw_ref, namespace=namespace or {})
|
|
101
|
+
elif sys.version_info.minor == 13:
|
|
102
|
+
return evaluate_forward_reference_313(fw_ref, namespace=namespace or {})
|
|
314
103
|
else:
|
|
315
|
-
|
|
316
|
-
# return lambda _self, _other: cls[key] == field.id
|
|
317
|
-
return lambda _self, _other: _self[key] == _other.id
|
|
104
|
+
return evaluate_forward_reference_314(fw_ref, namespace=namespace or {})
|
|
318
105
|
|
|
319
106
|
|
|
320
|
-
def
|
|
321
|
-
cls: Type["TypedTable"] | type[Any],
|
|
322
|
-
key: str,
|
|
323
|
-
field: T_Field,
|
|
324
|
-
) -> typing.Optional[Relationship[Any]]:
|
|
107
|
+
def resolve_annotation_313(ftype: str) -> type: # pragma: no cover
|
|
325
108
|
"""
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
Example:
|
|
329
|
-
class MyTable(TypedTable):
|
|
330
|
-
reference: OtherTable
|
|
331
|
-
|
|
332
|
-
`reference` contains the id of an Other Table row.
|
|
333
|
-
MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
|
|
334
|
-
|
|
335
|
-
This function will automatically perform this logic (called in db.define):
|
|
336
|
-
to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
|
|
109
|
+
Resolve an annotation that's in string representation.
|
|
337
110
|
|
|
338
|
-
|
|
111
|
+
Variant for Python 3.13
|
|
339
112
|
"""
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
if args := typing.get_args(field):
|
|
343
|
-
# TypedField[SomeType] -> SomeType
|
|
344
|
-
field = args[0]
|
|
345
|
-
elif hasattr(field, "_type"):
|
|
346
|
-
# TypedField(SomeType) -> SomeType
|
|
347
|
-
field = typing.cast(T_Field, field._type)
|
|
348
|
-
else: # pragma: no cover
|
|
349
|
-
# weird
|
|
350
|
-
return None
|
|
113
|
+
fw_ref: ForwardRef = t.get_args(t.Type[ftype])[0]
|
|
114
|
+
return evaluate_forward_reference(fw_ref)
|
|
351
115
|
|
|
352
|
-
field, optional = extract_type_optional(field)
|
|
353
116
|
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
warnings.warn("Could not generate Relationship condition", source=e)
|
|
358
|
-
condition = None
|
|
359
|
-
|
|
360
|
-
if not condition: # pragma: no cover
|
|
361
|
-
# something went wrong, not a valid relationship
|
|
362
|
-
warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
|
|
363
|
-
return None
|
|
364
|
-
|
|
365
|
-
join = "left" if optional or typing.get_origin(field) is list else "inner"
|
|
117
|
+
def resolve_annotation_314(ftype: str) -> type: # pragma: no cover
|
|
118
|
+
"""
|
|
119
|
+
Resolve an annotation that's in string representation.
|
|
366
120
|
|
|
367
|
-
|
|
121
|
+
Variant for Python 3.14 + using annotationlib
|
|
122
|
+
"""
|
|
123
|
+
fw_ref = ForwardRef(ftype)
|
|
124
|
+
return evaluate_forward_reference(fw_ref)
|
|
368
125
|
|
|
369
126
|
|
|
370
|
-
def
|
|
127
|
+
def resolve_annotation(ftype: str) -> type: # pragma: no cover
|
|
371
128
|
"""
|
|
372
|
-
|
|
373
|
-
"""
|
|
374
|
-
kwargs = dict(
|
|
375
|
-
localns=locals(),
|
|
376
|
-
globalns=globals(),
|
|
377
|
-
recursive_guard=frozenset(),
|
|
378
|
-
)
|
|
379
|
-
if sys.version_info >= (3, 13): # pragma: no cover
|
|
380
|
-
# suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
|
|
381
|
-
kwargs["type_params"] = ()
|
|
129
|
+
Resolve an annotation that's in string representation.
|
|
382
130
|
|
|
383
|
-
|
|
131
|
+
Automatically chooses strategy based on current Python version.
|
|
132
|
+
"""
|
|
133
|
+
if sys.version_info.major != 3:
|
|
134
|
+
raise EnvironmentError("Only python 3 is supported.")
|
|
135
|
+
elif sys.version_info.minor <= 13:
|
|
136
|
+
return resolve_annotation_313(ftype)
|
|
137
|
+
else:
|
|
138
|
+
return resolve_annotation_314(ftype)
|
|
384
139
|
|
|
385
140
|
|
|
386
|
-
class TypeDAL(pydal.DAL):
|
|
141
|
+
class TypeDAL(pydal.DAL):
|
|
387
142
|
"""
|
|
388
143
|
Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
|
|
389
144
|
"""
|
|
390
145
|
|
|
391
146
|
_config: TypeDALConfig
|
|
147
|
+
_builder: TableDefinitionBuilder
|
|
392
148
|
|
|
393
149
|
def __init__(
|
|
394
150
|
self,
|
|
@@ -410,7 +166,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
410
166
|
debug: bool = False,
|
|
411
167
|
lazy_tables: bool = False,
|
|
412
168
|
db_uid: Optional[str] = None,
|
|
413
|
-
after_connection:
|
|
169
|
+
after_connection: t.Callable[..., t.Any] = None,
|
|
414
170
|
tables: Optional[list[str]] = None,
|
|
415
171
|
ignore_field_case: bool = True,
|
|
416
172
|
entity_quoting: bool = True,
|
|
@@ -420,6 +176,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
420
176
|
use_env: bool | str = True,
|
|
421
177
|
connection: Optional[str] = None,
|
|
422
178
|
config: Optional[TypeDALConfig] = None,
|
|
179
|
+
lazy_policy: LazyPolicy | None = None,
|
|
423
180
|
) -> None:
|
|
424
181
|
"""
|
|
425
182
|
Adds some internal tables after calling pydal's default init.
|
|
@@ -435,10 +192,12 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
435
192
|
fake_migrate=fake_migrate,
|
|
436
193
|
caching=enable_typedal_caching,
|
|
437
194
|
pool_size=pool_size,
|
|
195
|
+
lazy_policy=lazy_policy,
|
|
438
196
|
)
|
|
439
197
|
|
|
440
198
|
self._config = config
|
|
441
199
|
self.db = self
|
|
200
|
+
self._builder = TableDefinitionBuilder(self)
|
|
442
201
|
|
|
443
202
|
if config.folder:
|
|
444
203
|
Path(config.folder).mkdir(exist_ok=True)
|
|
@@ -473,7 +232,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
473
232
|
self.try_define(_TypedalCache)
|
|
474
233
|
self.try_define(_TypedalCacheDependency)
|
|
475
234
|
|
|
476
|
-
def try_define(self, model: Type[T], verbose: bool = False) -> Type[T]:
|
|
235
|
+
def try_define(self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
|
|
477
236
|
"""
|
|
478
237
|
Try to define a model with migrate or fall back to fake migrate.
|
|
479
238
|
"""
|
|
@@ -491,125 +250,13 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
491
250
|
# try again:
|
|
492
251
|
return self.define(model, migrate=True, fake_migrate=True, redefine=True)
|
|
493
252
|
|
|
494
|
-
default_kwargs:
|
|
253
|
+
default_kwargs: t.ClassVar[AnyDict] = {
|
|
495
254
|
# fields are 'required' (notnull) by default:
|
|
496
255
|
"notnull": True,
|
|
497
256
|
}
|
|
498
257
|
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
def _define(self, cls: Type[T], **kwargs: Any) -> Type[T]:
|
|
503
|
-
# todo: new relationship item added should also invalidate (previously unrelated) cache result
|
|
504
|
-
|
|
505
|
-
# todo: option to enable/disable cache dependency behavior:
|
|
506
|
-
# - don't set _before_update and _before_delete
|
|
507
|
-
# - don't add TypedalCacheDependency entry
|
|
508
|
-
# - don't invalidate other item on new row of this type
|
|
509
|
-
|
|
510
|
-
# when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
|
|
511
|
-
# proper way to handle this would be (but gives error right now due to Table implementing magic methods):
|
|
512
|
-
# typing.get_type_hints(cls, globalns=None, localns=None)
|
|
513
|
-
# -> ERR e.g. `pytest -svxk cli` -> name 'BestFriend' is not defined
|
|
514
|
-
|
|
515
|
-
# dirty way (with evil eval):
|
|
516
|
-
# [eval(v) for k, v in cls.__annotations__.items()]
|
|
517
|
-
# this however also stops working when variables outside this scope or even references to other
|
|
518
|
-
# objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
|
|
519
|
-
# and might break in the future, when this annotations behavior is enabled by default.
|
|
520
|
-
|
|
521
|
-
# non-annotated variables have to be passed to define_table as kwargs
|
|
522
|
-
full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
|
|
523
|
-
|
|
524
|
-
tablename = self.to_snake(cls.__name__)
|
|
525
|
-
# grab annotations of cls and it's parents:
|
|
526
|
-
annotations = all_annotations(cls)
|
|
527
|
-
# extend with `prop = TypedField()` 'annotations':
|
|
528
|
-
annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
|
|
529
|
-
# remove internal stuff:
|
|
530
|
-
annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
|
|
531
|
-
|
|
532
|
-
typedfields: dict[str, TypedField[Any]] = {
|
|
533
|
-
k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
|
|
534
|
-
}
|
|
535
|
-
|
|
536
|
-
relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
|
|
537
|
-
|
|
538
|
-
fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
|
|
539
|
-
|
|
540
|
-
# ! dont' use full_dict here:
|
|
541
|
-
other_kwargs = kwargs | {
|
|
542
|
-
k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
|
|
543
|
-
} # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
|
|
544
|
-
# now it's only used to extract relationships from the object.
|
|
545
|
-
# other properties of the class (incl methods) should not be touched
|
|
546
|
-
|
|
547
|
-
# for key in typedfields.keys() - full_dict.keys():
|
|
548
|
-
# # typed fields that don't haven't been added to the object yet
|
|
549
|
-
# setattr(cls, key, typedfields[key])
|
|
550
|
-
|
|
551
|
-
for key, field in typedfields.items():
|
|
552
|
-
# clone every property so it can be re-used across mixins:
|
|
553
|
-
clone = copy.copy(field)
|
|
554
|
-
setattr(cls, key, clone)
|
|
555
|
-
typedfields[key] = clone
|
|
556
|
-
|
|
557
|
-
# start with base classes and overwrite with current class:
|
|
558
|
-
relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
|
|
559
|
-
|
|
560
|
-
# DEPRECATED: Relationship as annotation is currently not supported!
|
|
561
|
-
# ensure they are all instances and
|
|
562
|
-
# not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
|
|
563
|
-
# relationships = {
|
|
564
|
-
# k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
|
|
565
|
-
# }
|
|
566
|
-
|
|
567
|
-
# keys of implicit references (also relationships):
|
|
568
|
-
reference_field_keys = [
|
|
569
|
-
k for k, v in fields.items() if str(v.type).split(" ")[0] in ("list:reference", "reference")
|
|
570
|
-
]
|
|
571
|
-
|
|
572
|
-
# add implicit relationships:
|
|
573
|
-
# User; list[User]; TypedField[User]; TypedField[list[User]]; TypedField(User); TypedField(list[User])
|
|
574
|
-
relationships |= {
|
|
575
|
-
k: new_relationship
|
|
576
|
-
for k in reference_field_keys
|
|
577
|
-
if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
|
|
578
|
-
}
|
|
579
|
-
|
|
580
|
-
# fixme: list[Reference] is recognized as relationship,
|
|
581
|
-
# TypedField(list[Reference]) is NOT recognized!!!
|
|
582
|
-
|
|
583
|
-
cache_dependency = self._config.caching and kwargs.pop("cache_dependency", True)
|
|
584
|
-
|
|
585
|
-
table: Table = self.define_table(tablename, *fields.values(), **kwargs)
|
|
586
|
-
|
|
587
|
-
for name, typed_field in typedfields.items():
|
|
588
|
-
field = fields[name]
|
|
589
|
-
typed_field.bind(field, table)
|
|
590
|
-
|
|
591
|
-
if issubclass(cls, TypedTable):
|
|
592
|
-
cls.__set_internals__(
|
|
593
|
-
db=self,
|
|
594
|
-
table=table,
|
|
595
|
-
# by now, all relationships should be instances!
|
|
596
|
-
relationships=typing.cast(dict[str, Relationship[Any]], relationships),
|
|
597
|
-
)
|
|
598
|
-
# map both name and rname:
|
|
599
|
-
self._class_map[str(table)] = cls
|
|
600
|
-
self._class_map[table._rname] = cls
|
|
601
|
-
cls.__on_define__(self)
|
|
602
|
-
else:
|
|
603
|
-
warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
|
|
604
|
-
|
|
605
|
-
if not tablename.startswith("typedal_") and cache_dependency:
|
|
606
|
-
table._before_update.append(lambda s, _: _remove_cache(s, tablename))
|
|
607
|
-
table._before_delete.append(lambda s: _remove_cache(s, tablename))
|
|
608
|
-
|
|
609
|
-
return cls
|
|
610
|
-
|
|
611
|
-
@typing.overload
|
|
612
|
-
def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[Type[T]], Type[T]]:
|
|
258
|
+
@t.overload
|
|
259
|
+
def define(self, maybe_cls: None = None, **kwargs: t.Any) -> t.Callable[[t.Type[T]], t.Type[T]]:
|
|
613
260
|
"""
|
|
614
261
|
Typing Overload for define without a class.
|
|
615
262
|
|
|
@@ -617,8 +264,8 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
617
264
|
class MyTable(TypedTable): ...
|
|
618
265
|
"""
|
|
619
266
|
|
|
620
|
-
@
|
|
621
|
-
def define(self, maybe_cls: Type[T], **kwargs: Any) -> Type[T]:
|
|
267
|
+
@t.overload
|
|
268
|
+
def define(self, maybe_cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
|
|
622
269
|
"""
|
|
623
270
|
Typing Overload for define with a class.
|
|
624
271
|
|
|
@@ -626,7 +273,11 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
626
273
|
class MyTable(TypedTable): ...
|
|
627
274
|
"""
|
|
628
275
|
|
|
629
|
-
def define(
|
|
276
|
+
def define(
|
|
277
|
+
self,
|
|
278
|
+
maybe_cls: t.Type[T] | None = None,
|
|
279
|
+
**kwargs: t.Any,
|
|
280
|
+
) -> t.Type[T] | t.Callable[[t.Type[T]], t.Type[T]]:
|
|
630
281
|
"""
|
|
631
282
|
Can be used as a decorator on a class that inherits `TypedTable`, \
|
|
632
283
|
or as a regular method if you need to define your classes before you have access to a 'db' instance.
|
|
@@ -649,39 +300,15 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
649
300
|
the result of pydal.define_table
|
|
650
301
|
"""
|
|
651
302
|
|
|
652
|
-
def wrapper(cls: Type[T]) -> Type[T]:
|
|
653
|
-
return self.
|
|
303
|
+
def wrapper(cls: t.Type[T]) -> t.Type[T]:
|
|
304
|
+
return self._builder.define(cls, **kwargs)
|
|
654
305
|
|
|
655
306
|
if maybe_cls:
|
|
656
307
|
return wrapper(maybe_cls)
|
|
657
308
|
|
|
658
309
|
return wrapper
|
|
659
310
|
|
|
660
|
-
|
|
661
|
-
# """
|
|
662
|
-
# Remove a table by name (both on the database level and the typedal level).
|
|
663
|
-
# """
|
|
664
|
-
# # drop calls TypedTable.drop() and removes it from the `_class_map`
|
|
665
|
-
# if cls := self._class_map.pop(table_name, None):
|
|
666
|
-
# cls.drop()
|
|
667
|
-
|
|
668
|
-
# def drop_all(self, max_retries: int = None) -> None:
|
|
669
|
-
# """
|
|
670
|
-
# Remove all tables and keep doing so until everything is gone!
|
|
671
|
-
# """
|
|
672
|
-
# retries = 0
|
|
673
|
-
# if max_retries is None:
|
|
674
|
-
# max_retries = len(self.tables)
|
|
675
|
-
#
|
|
676
|
-
# while self.tables:
|
|
677
|
-
# retries += 1
|
|
678
|
-
# for table in self.tables:
|
|
679
|
-
# self.drop(table)
|
|
680
|
-
#
|
|
681
|
-
# if retries > max_retries:
|
|
682
|
-
# raise RuntimeError("Could not delete all tables")
|
|
683
|
-
|
|
684
|
-
def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
|
|
311
|
+
def __call__(self, *_args: T_Query, **kwargs: t.Any) -> "TypedSet":
|
|
685
312
|
"""
|
|
686
313
|
A db instance can be called directly to perform a query.
|
|
687
314
|
|
|
@@ -699,11 +326,11 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
699
326
|
|
|
700
327
|
if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
|
|
701
328
|
# table defined without @db.define decorator!
|
|
702
|
-
_cls: Type[TypedTable] = cls
|
|
329
|
+
_cls: t.Type[TypedTable] = cls
|
|
703
330
|
args[0] = _cls.id != None
|
|
704
331
|
|
|
705
332
|
_set = super().__call__(*args, **kwargs)
|
|
706
|
-
return
|
|
333
|
+
return t.cast(TypedSet, _set)
|
|
707
334
|
|
|
708
335
|
def __getitem__(self, key: str) -> "Table":
|
|
709
336
|
"""
|
|
@@ -714,9 +341,9 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
714
341
|
Example:
|
|
715
342
|
db['users'] -> user
|
|
716
343
|
"""
|
|
717
|
-
return
|
|
344
|
+
return t.cast(Table, super().__getitem__(str(key)))
|
|
718
345
|
|
|
719
|
-
def find_model(self, table_name: str) -> Type["TypedTable"] | None:
|
|
346
|
+
def find_model(self, table_name: str) -> t.Type["TypedTable"] | None:
|
|
720
347
|
"""
|
|
721
348
|
Retrieves a mapped table class by its name.
|
|
722
349
|
|
|
@@ -731,96 +358,12 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
731
358
|
Returns:
|
|
732
359
|
The mapped table class if it exists, otherwise None.
|
|
733
360
|
"""
|
|
734
|
-
return self.
|
|
735
|
-
|
|
736
|
-
@classmethod
|
|
737
|
-
def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
|
|
738
|
-
# return Field(name, _type, **{**cls.default_kwargs, **kw})
|
|
739
|
-
kw_combined = cls.default_kwargs | kw
|
|
740
|
-
return Field(name, _type, **kw_combined)
|
|
741
|
-
|
|
742
|
-
@classmethod
|
|
743
|
-
def _annotation_to_pydal_fieldtype(
|
|
744
|
-
cls,
|
|
745
|
-
_ftype: T_annotation,
|
|
746
|
-
mut_kw: typing.MutableMapping[str, Any],
|
|
747
|
-
) -> Optional[str]:
|
|
748
|
-
# ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
|
|
749
|
-
ftype = typing.cast(type, _ftype) # cast from Type to type to make mypy happy)
|
|
750
|
-
|
|
751
|
-
if isinstance(ftype, str):
|
|
752
|
-
# extract type from string
|
|
753
|
-
fw_ref: typing.ForwardRef = typing.get_args(Type[ftype])[0]
|
|
754
|
-
ftype = evaluate_forward_reference(fw_ref)
|
|
755
|
-
|
|
756
|
-
if mapping := BASIC_MAPPINGS.get(ftype):
|
|
757
|
-
# basi types
|
|
758
|
-
return mapping
|
|
759
|
-
elif isinstance(ftype, _Table):
|
|
760
|
-
# db.table
|
|
761
|
-
return f"reference {ftype._tablename}"
|
|
762
|
-
elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
|
|
763
|
-
# SomeTable
|
|
764
|
-
snakename = cls.to_snake(ftype.__name__)
|
|
765
|
-
return f"reference {snakename}"
|
|
766
|
-
elif isinstance(ftype, TypedField):
|
|
767
|
-
# FieldType(type, ...)
|
|
768
|
-
return ftype._to_field(mut_kw)
|
|
769
|
-
elif origin_is_subclass(ftype, TypedField):
|
|
770
|
-
# TypedField[int]
|
|
771
|
-
return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
|
|
772
|
-
elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
|
|
773
|
-
# list[str] -> str -> string -> list:string
|
|
774
|
-
_child_type = typing.get_args(ftype)[0]
|
|
775
|
-
_child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
|
|
776
|
-
return f"list:{_child_type}"
|
|
777
|
-
elif is_union(ftype):
|
|
778
|
-
# str | int -> UnionType
|
|
779
|
-
# typing.Union[str | int] -> typing._UnionGenericAlias
|
|
780
|
-
|
|
781
|
-
# Optional[type] == type | None
|
|
782
|
-
|
|
783
|
-
match typing.get_args(ftype):
|
|
784
|
-
case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
|
|
785
|
-
# good union of Nullable
|
|
786
|
-
|
|
787
|
-
# if a field is optional, it is nullable:
|
|
788
|
-
mut_kw["notnull"] = False
|
|
789
|
-
return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
|
|
790
|
-
case _:
|
|
791
|
-
# two types is not supported by the db!
|
|
792
|
-
return None
|
|
793
|
-
else:
|
|
794
|
-
return None
|
|
795
|
-
|
|
796
|
-
@classmethod
|
|
797
|
-
def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
|
|
798
|
-
"""
|
|
799
|
-
Convert a annotation into a pydal Field.
|
|
800
|
-
|
|
801
|
-
Args:
|
|
802
|
-
fname: name of the property
|
|
803
|
-
ftype: annotation of the property
|
|
804
|
-
kw: when using TypedField or a function returning it (e.g. StringField),
|
|
805
|
-
keyword args can be used to pass any other settings you would normally to a pydal Field
|
|
806
|
-
|
|
807
|
-
-> pydal.Field(fname, ftype, **kw)
|
|
808
|
-
|
|
809
|
-
Example:
|
|
810
|
-
class MyTable:
|
|
811
|
-
fname: ftype
|
|
812
|
-
id: int
|
|
813
|
-
name: str
|
|
814
|
-
reference: Table
|
|
815
|
-
other: TypedField(str, default="John Doe") # default will be in kwargs
|
|
816
|
-
"""
|
|
817
|
-
fname = cls.to_snake(fname)
|
|
361
|
+
return self._builder.class_map.get(table_name, None)
|
|
818
362
|
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
|
|
363
|
+
@property
|
|
364
|
+
def _class_map(self) -> dict[str, t.Type["TypedTable"]]:
|
|
365
|
+
# alias for backward-compatibility
|
|
366
|
+
return self._builder.class_map
|
|
824
367
|
|
|
825
368
|
@staticmethod
|
|
826
369
|
def to_snake(camel: str) -> str:
|
|
@@ -829,2561 +372,91 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
829
372
|
"""
|
|
830
373
|
return to_snake(camel)
|
|
831
374
|
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
TypeDAL.representers.setdefault("rows_render", default_representer)
|
|
844
|
-
|
|
845
|
-
P = typing.ParamSpec("P")
|
|
846
|
-
R = typing.TypeVar("R")
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
def reorder_fields(
|
|
850
|
-
table: pydal.objects.Table,
|
|
851
|
-
fields: typing.Iterable[str | Field | TypedField],
|
|
852
|
-
keep_others: bool = True,
|
|
853
|
-
) -> None:
|
|
854
|
-
"""
|
|
855
|
-
Reorder fields of a pydal table.
|
|
856
|
-
|
|
857
|
-
Args:
|
|
858
|
-
table: The pydal table object (e.g., db.mytable).
|
|
859
|
-
fields: List of field names (str) or Field objects in desired order.
|
|
860
|
-
keep_others (bool):
|
|
861
|
-
- True (default): keep other fields at the end, in their original order.
|
|
862
|
-
- False: remove other fields (only keep what's specified).
|
|
863
|
-
"""
|
|
864
|
-
# Normalize input to field names
|
|
865
|
-
desired = [f.name if isinstance(f, (TypedField, Field, pydal.objects.Field)) else str(f) for f in fields]
|
|
866
|
-
|
|
867
|
-
new_order = [f for f in desired if f in table._fields]
|
|
868
|
-
|
|
869
|
-
if keep_others:
|
|
870
|
-
# Start with desired fields, then append the rest
|
|
871
|
-
new_order.extend(f for f in table._fields if f not in desired)
|
|
872
|
-
|
|
873
|
-
table._fields = new_order
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
class TableMeta(type):
|
|
877
|
-
"""
|
|
878
|
-
This metaclass contains functionality on table classes, that doesn't exist on its instances.
|
|
879
|
-
|
|
880
|
-
Example:
|
|
881
|
-
class MyTable(TypedTable):
|
|
882
|
-
some_field: TypedField[int]
|
|
883
|
-
|
|
884
|
-
MyTable.update_or_insert(...) # should work
|
|
885
|
-
|
|
886
|
-
MyTable.some_field # -> Field, can be used to query etc.
|
|
887
|
-
|
|
888
|
-
row = MyTable.first() # returns instance of MyTable
|
|
889
|
-
|
|
890
|
-
# row.update_or_insert(...) # shouldn't work!
|
|
891
|
-
|
|
892
|
-
row.some_field # -> int, with actual data
|
|
893
|
-
|
|
894
|
-
"""
|
|
895
|
-
|
|
896
|
-
# set up by db.define:
|
|
897
|
-
# _db: TypeDAL | None = None
|
|
898
|
-
# _table: Table | None = None
|
|
899
|
-
_db: TypeDAL | None = None
|
|
900
|
-
_table: Table | None = None
|
|
901
|
-
_relationships: dict[str, Relationship[Any]] | None = None
|
|
902
|
-
|
|
903
|
-
#########################
|
|
904
|
-
# TypeDAL custom logic: #
|
|
905
|
-
#########################
|
|
906
|
-
|
|
907
|
-
def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
|
|
908
|
-
"""
|
|
909
|
-
Store the related database and pydal table for later usage.
|
|
910
|
-
"""
|
|
911
|
-
self._db = db
|
|
912
|
-
self._table = table
|
|
913
|
-
self._relationships = relationships
|
|
914
|
-
|
|
915
|
-
def __getattr__(self, col: str) -> Optional[Field]:
|
|
916
|
-
"""
|
|
917
|
-
Magic method used by TypedTableMeta to get a database field with dot notation on a class.
|
|
918
|
-
|
|
919
|
-
Example:
|
|
920
|
-
SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
|
|
921
|
-
|
|
922
|
-
"""
|
|
923
|
-
if self._table:
|
|
924
|
-
return getattr(self._table, col, None)
|
|
925
|
-
|
|
926
|
-
return None
|
|
927
|
-
|
|
928
|
-
def _ensure_table_defined(self) -> Table:
|
|
929
|
-
if not self._table:
|
|
930
|
-
raise EnvironmentError("@define or db.define is not called on this class yet!")
|
|
931
|
-
return self._table
|
|
932
|
-
|
|
933
|
-
def __iter__(self) -> typing.Generator[Field, None, None]:
|
|
934
|
-
"""
|
|
935
|
-
Loop through the columns of this model.
|
|
936
|
-
"""
|
|
937
|
-
table = self._ensure_table_defined()
|
|
938
|
-
yield from iter(table)
|
|
939
|
-
|
|
940
|
-
def __getitem__(self, item: str) -> Field:
|
|
941
|
-
"""
|
|
942
|
-
Allow dict notation to get a column of this table (-> Field instance).
|
|
943
|
-
"""
|
|
944
|
-
table = self._ensure_table_defined()
|
|
945
|
-
return table[item]
|
|
946
|
-
|
|
947
|
-
def __str__(self) -> str:
|
|
948
|
-
"""
|
|
949
|
-
Normally, just returns the underlying table name, but with a fallback if the model is unbound.
|
|
950
|
-
"""
|
|
951
|
-
if self._table:
|
|
952
|
-
return str(self._table)
|
|
953
|
-
else:
|
|
954
|
-
return f"<unbound table {self.__name__}>"
|
|
955
|
-
|
|
956
|
-
def from_row(self: Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
|
|
957
|
-
"""
|
|
958
|
-
Create a model instance from a pydal row.
|
|
959
|
-
"""
|
|
960
|
-
return self(row)
|
|
961
|
-
|
|
962
|
-
def all(self: Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
|
|
963
|
-
"""
|
|
964
|
-
Return all rows for this model.
|
|
965
|
-
"""
|
|
966
|
-
return self.collect()
|
|
967
|
-
|
|
968
|
-
def get_relationships(self) -> dict[str, Relationship[Any]]:
|
|
969
|
-
"""
|
|
970
|
-
Return the registered relationships of the current model.
|
|
971
|
-
"""
|
|
972
|
-
return self._relationships or {}
|
|
973
|
-
|
|
974
|
-
##########################
|
|
975
|
-
# TypeDAL Modified Logic #
|
|
976
|
-
##########################
|
|
977
|
-
|
|
978
|
-
def insert(self: Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
|
|
375
|
+
def executesql(
|
|
376
|
+
self,
|
|
377
|
+
query: str | Template,
|
|
378
|
+
placeholders: t.Iterable[str] | dict[str, str] | None = None,
|
|
379
|
+
as_dict: bool = False,
|
|
380
|
+
fields: t.Iterable[Field | TypedField[t.Any]] | None = None,
|
|
381
|
+
colnames: t.Iterable[str] | None = None,
|
|
382
|
+
as_ordered_dict: bool = False,
|
|
383
|
+
) -> list[t.Any]:
|
|
979
384
|
"""
|
|
980
|
-
|
|
385
|
+
Executes a raw SQL statement or a TypeDAL template query.
|
|
981
386
|
|
|
982
|
-
|
|
387
|
+
If `query` is provided as a `Template` and the system supports template
|
|
388
|
+
rendering, it will be processed with `sql_escape_template` before being
|
|
389
|
+
executed. Otherwise, the query is passed to the underlying DAL as-is.
|
|
983
390
|
|
|
984
391
|
Args:
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
def bulk_insert(self: Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
|
|
1002
|
-
"""
|
|
1003
|
-
Insert multiple rows, returns a TypedRows set of new instances.
|
|
1004
|
-
"""
|
|
1005
|
-
table = self._ensure_table_defined()
|
|
1006
|
-
result = table.bulk_insert(items)
|
|
1007
|
-
return self.where(lambda row: row.id.belongs(result)).collect()
|
|
1008
|
-
|
|
1009
|
-
def update_or_insert(
|
|
1010
|
-
self: Type[T_MetaInstance],
|
|
1011
|
-
query: T_Query | AnyDict = DEFAULT,
|
|
1012
|
-
**values: Any,
|
|
1013
|
-
) -> T_MetaInstance:
|
|
1014
|
-
"""
|
|
1015
|
-
Update a row if query matches, else insert a new one.
|
|
1016
|
-
|
|
1017
|
-
Returns the created or updated instance.
|
|
1018
|
-
"""
|
|
1019
|
-
table = self._ensure_table_defined()
|
|
1020
|
-
|
|
1021
|
-
if query is DEFAULT:
|
|
1022
|
-
record = table(**values)
|
|
1023
|
-
elif isinstance(query, dict):
|
|
1024
|
-
record = table(**query)
|
|
1025
|
-
else:
|
|
1026
|
-
record = table(query)
|
|
1027
|
-
|
|
1028
|
-
if not record:
|
|
1029
|
-
return self.insert(**values)
|
|
1030
|
-
|
|
1031
|
-
record.update_record(**values)
|
|
1032
|
-
return self(record)
|
|
1033
|
-
|
|
1034
|
-
def validate_and_insert(
|
|
1035
|
-
self: Type[T_MetaInstance],
|
|
1036
|
-
**fields: Any,
|
|
1037
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1038
|
-
"""
|
|
1039
|
-
Validate input data and then insert a row.
|
|
1040
|
-
|
|
1041
|
-
Returns a tuple of (the created instance, a dict of errors).
|
|
1042
|
-
"""
|
|
1043
|
-
table = self._ensure_table_defined()
|
|
1044
|
-
result = table.validate_and_insert(**fields)
|
|
1045
|
-
if row_id := result.get("id"):
|
|
1046
|
-
return self(row_id), None
|
|
1047
|
-
else:
|
|
1048
|
-
return None, result.get("errors")
|
|
1049
|
-
|
|
1050
|
-
def validate_and_update(
|
|
1051
|
-
self: Type[T_MetaInstance],
|
|
1052
|
-
query: Query,
|
|
1053
|
-
**fields: Any,
|
|
1054
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1055
|
-
"""
|
|
1056
|
-
Validate input data and then update max 1 row.
|
|
1057
|
-
|
|
1058
|
-
Returns a tuple of (the updated instance, a dict of errors).
|
|
1059
|
-
"""
|
|
1060
|
-
table = self._ensure_table_defined()
|
|
1061
|
-
|
|
1062
|
-
result = table.validate_and_update(query, **fields)
|
|
1063
|
-
|
|
1064
|
-
if errors := result.get("errors"):
|
|
1065
|
-
return None, errors
|
|
1066
|
-
elif row_id := result.get("id"):
|
|
1067
|
-
return self(row_id), None
|
|
1068
|
-
else: # pragma: no cover
|
|
1069
|
-
# update on query without result (shouldnt happen)
|
|
1070
|
-
return None, None
|
|
1071
|
-
|
|
1072
|
-
def validate_and_update_or_insert(
|
|
1073
|
-
self: Type[T_MetaInstance],
|
|
1074
|
-
query: Query,
|
|
1075
|
-
**fields: Any,
|
|
1076
|
-
) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
|
|
1077
|
-
"""
|
|
1078
|
-
Validate input data and then update_and_insert (on max 1 row).
|
|
1079
|
-
|
|
1080
|
-
Returns a tuple of (the updated/created instance, a dict of errors).
|
|
1081
|
-
"""
|
|
1082
|
-
table = self._ensure_table_defined()
|
|
1083
|
-
result = table.validate_and_update_or_insert(query, **fields)
|
|
1084
|
-
|
|
1085
|
-
if errors := result.get("errors"):
|
|
1086
|
-
return None, errors
|
|
1087
|
-
elif row_id := result.get("id"):
|
|
1088
|
-
return self(row_id), None
|
|
1089
|
-
else: # pragma: no cover
|
|
1090
|
-
# update on query without result (shouldnt happen)
|
|
1091
|
-
return None, None
|
|
1092
|
-
|
|
1093
|
-
def select(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1094
|
-
"""
|
|
1095
|
-
See QueryBuilder.select!
|
|
1096
|
-
"""
|
|
1097
|
-
return QueryBuilder(self).select(*a, **kw)
|
|
1098
|
-
|
|
1099
|
-
def column(self: Type[T_MetaInstance], field: "TypedField[T] | T", **options: Unpack[SelectKwargs]) -> list[T]:
|
|
1100
|
-
"""
|
|
1101
|
-
Get all values in a specific column.
|
|
1102
|
-
|
|
1103
|
-
Shortcut for `.select(field).execute().column(field)`.
|
|
1104
|
-
"""
|
|
1105
|
-
return QueryBuilder(self).select(field, **options).execute().column(field)
|
|
1106
|
-
|
|
1107
|
-
def paginate(self: Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
|
|
1108
|
-
"""
|
|
1109
|
-
See QueryBuilder.paginate!
|
|
1110
|
-
"""
|
|
1111
|
-
return QueryBuilder(self).paginate(limit=limit, page=page)
|
|
1112
|
-
|
|
1113
|
-
def chunk(self: Type[T_MetaInstance], chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
|
|
1114
|
-
"""
|
|
1115
|
-
See QueryBuilder.chunk!
|
|
1116
|
-
"""
|
|
1117
|
-
return QueryBuilder(self).chunk(chunk_size)
|
|
1118
|
-
|
|
1119
|
-
def where(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1120
|
-
"""
|
|
1121
|
-
See QueryBuilder.where!
|
|
1122
|
-
"""
|
|
1123
|
-
return QueryBuilder(self).where(*a, **kw)
|
|
1124
|
-
|
|
1125
|
-
def cache(self: Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
|
|
1126
|
-
"""
|
|
1127
|
-
See QueryBuilder.cache!
|
|
1128
|
-
"""
|
|
1129
|
-
return QueryBuilder(self).cache(*deps, **kwargs)
|
|
1130
|
-
|
|
1131
|
-
def count(self: Type[T_MetaInstance]) -> int:
|
|
1132
|
-
"""
|
|
1133
|
-
See QueryBuilder.count!
|
|
1134
|
-
"""
|
|
1135
|
-
return QueryBuilder(self).count()
|
|
1136
|
-
|
|
1137
|
-
def exists(self: Type[T_MetaInstance]) -> bool:
|
|
1138
|
-
"""
|
|
1139
|
-
See QueryBuilder.exists!
|
|
1140
|
-
"""
|
|
1141
|
-
return QueryBuilder(self).exists()
|
|
1142
|
-
|
|
1143
|
-
def first(self: Type[T_MetaInstance]) -> T_MetaInstance | None:
|
|
1144
|
-
"""
|
|
1145
|
-
See QueryBuilder.first!
|
|
1146
|
-
"""
|
|
1147
|
-
return QueryBuilder(self).first()
|
|
1148
|
-
|
|
1149
|
-
def first_or_fail(self: Type[T_MetaInstance]) -> T_MetaInstance:
|
|
1150
|
-
"""
|
|
1151
|
-
See QueryBuilder.first_or_fail!
|
|
1152
|
-
"""
|
|
1153
|
-
return QueryBuilder(self).first_or_fail()
|
|
1154
|
-
|
|
1155
|
-
def join(
|
|
1156
|
-
self: Type[T_MetaInstance],
|
|
1157
|
-
*fields: str | Type["TypedTable"],
|
|
1158
|
-
method: JOIN_OPTIONS = None,
|
|
1159
|
-
on: OnQuery | list[Expression] | Expression = None,
|
|
1160
|
-
condition: Condition = None,
|
|
1161
|
-
condition_and: Condition = None,
|
|
1162
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
1163
|
-
"""
|
|
1164
|
-
See QueryBuilder.join!
|
|
1165
|
-
"""
|
|
1166
|
-
return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method, condition_and=condition_and)
|
|
1167
|
-
|
|
1168
|
-
def collect(self: Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
|
|
1169
|
-
"""
|
|
1170
|
-
See QueryBuilder.collect!
|
|
1171
|
-
"""
|
|
1172
|
-
return QueryBuilder(self).collect(verbose=verbose)
|
|
1173
|
-
|
|
1174
|
-
@property
|
|
1175
|
-
def ALL(cls) -> pydal.objects.SQLALL:
|
|
1176
|
-
"""
|
|
1177
|
-
Select all fields for this table.
|
|
1178
|
-
"""
|
|
1179
|
-
table = cls._ensure_table_defined()
|
|
1180
|
-
|
|
1181
|
-
return table.ALL
|
|
1182
|
-
|
|
1183
|
-
##########################
|
|
1184
|
-
# TypeDAL Shadowed Logic #
|
|
1185
|
-
##########################
|
|
1186
|
-
fields: list[str]
|
|
392
|
+
query (str | Template): The SQL query to execute, either a plain
|
|
393
|
+
string or a `Template` (created via the `t""` syntax).
|
|
394
|
+
placeholders (Iterable[str] | dict[str, str] | None, optional):
|
|
395
|
+
Parameters to substitute into the SQL statement. Can be a sequence
|
|
396
|
+
(for positional parameters) or a dictionary (for named parameters).
|
|
397
|
+
Usually not applicable when using a t-string, since template
|
|
398
|
+
expressions handle interpolation directly.
|
|
399
|
+
as_dict (bool, optional): If True, return rows as dictionaries keyed by
|
|
400
|
+
column name. Defaults to False.
|
|
401
|
+
fields (Iterable[Field | TypedField] | None, optional): Explicit set of
|
|
402
|
+
fields to map results onto. Defaults to None.
|
|
403
|
+
colnames (Iterable[str] | None, optional): Explicit column names to use
|
|
404
|
+
in the result set. Defaults to None.
|
|
405
|
+
as_ordered_dict (bool, optional): If True, return rows as `OrderedDict`s
|
|
406
|
+
preserving column order. Defaults to False.
|
|
1187
407
|
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
|
|
1205
|
-
"""
|
|
1206
|
-
Add an index on some columns of this table.
|
|
1207
|
-
"""
|
|
1208
|
-
table = self._ensure_table_defined()
|
|
1209
|
-
result = table.create_index(name, *fields, **kwargs)
|
|
1210
|
-
return typing.cast(bool, result)
|
|
1211
|
-
|
|
1212
|
-
def drop_index(self, name: str, if_exists: bool = False) -> bool:
|
|
1213
|
-
"""
|
|
1214
|
-
Remove an index from this table.
|
|
1215
|
-
"""
|
|
1216
|
-
table = self._ensure_table_defined()
|
|
1217
|
-
result = table.drop_index(name, if_exists)
|
|
1218
|
-
return typing.cast(bool, result)
|
|
1219
|
-
|
|
1220
|
-
def import_from_csv_file(
|
|
1221
|
-
self,
|
|
1222
|
-
csvfile: typing.TextIO,
|
|
1223
|
-
id_map: dict[str, str] = None,
|
|
1224
|
-
null: Any = "<NULL>",
|
|
1225
|
-
unique: str = "uuid",
|
|
1226
|
-
id_offset: dict[str, int] = None, # id_offset used only when id_map is None
|
|
1227
|
-
transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
|
|
1228
|
-
validate: bool = False,
|
|
1229
|
-
encoding: str = "utf-8",
|
|
1230
|
-
delimiter: str = ",",
|
|
1231
|
-
quotechar: str = '"',
|
|
1232
|
-
quoting: int = csv.QUOTE_MINIMAL,
|
|
1233
|
-
restore: bool = False,
|
|
1234
|
-
**kwargs: Any,
|
|
1235
|
-
) -> None:
|
|
1236
|
-
"""
|
|
1237
|
-
Load a csv file into the database.
|
|
1238
|
-
"""
|
|
1239
|
-
table = self._ensure_table_defined()
|
|
1240
|
-
table.import_from_csv_file(
|
|
1241
|
-
csvfile,
|
|
1242
|
-
id_map=id_map,
|
|
1243
|
-
null=null,
|
|
1244
|
-
unique=unique,
|
|
1245
|
-
id_offset=id_offset,
|
|
1246
|
-
transform=transform,
|
|
1247
|
-
validate=validate,
|
|
1248
|
-
encoding=encoding,
|
|
1249
|
-
delimiter=delimiter,
|
|
1250
|
-
quotechar=quotechar,
|
|
1251
|
-
quoting=quoting,
|
|
1252
|
-
restore=restore,
|
|
1253
|
-
**kwargs,
|
|
408
|
+
Returns:
|
|
409
|
+
list[t.Any]: The query result set. Typically a list of tuples if
|
|
410
|
+
`as_dict` and `as_ordered_dict` are False, or a list of dict-like
|
|
411
|
+
objects if those flags are enabled.
|
|
412
|
+
"""
|
|
413
|
+
if SYSTEM_SUPPORTS_TEMPLATES and isinstance(query, Template): # pragma: no cover
|
|
414
|
+
query = sql_escape_template(self, query)
|
|
415
|
+
|
|
416
|
+
rows: list[t.Any] = super().executesql(
|
|
417
|
+
query,
|
|
418
|
+
placeholders=placeholders,
|
|
419
|
+
as_dict=as_dict,
|
|
420
|
+
fields=fields,
|
|
421
|
+
colnames=colnames,
|
|
422
|
+
as_ordered_dict=as_ordered_dict,
|
|
1254
423
|
)
|
|
1255
424
|
|
|
1256
|
-
|
|
1257
|
-
"""
|
|
1258
|
-
Shadow Table.on.
|
|
1259
|
-
|
|
1260
|
-
Used for joins.
|
|
1261
|
-
|
|
1262
|
-
See Also:
|
|
1263
|
-
http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
|
|
1264
|
-
"""
|
|
1265
|
-
table = self._ensure_table_defined()
|
|
1266
|
-
return typing.cast(Expression, table.on(query))
|
|
425
|
+
return rows
|
|
1267
426
|
|
|
1268
|
-
def
|
|
427
|
+
def sql_expression(
|
|
428
|
+
self,
|
|
429
|
+
sql_fragment: str | Template,
|
|
430
|
+
*raw_args: t.Any,
|
|
431
|
+
output_type: str | None = None,
|
|
432
|
+
**raw_kwargs: t.Any,
|
|
433
|
+
) -> Expression:
|
|
1269
434
|
"""
|
|
1270
|
-
|
|
435
|
+
Creates a pydal Expression object representing a raw SQL fragment.
|
|
1271
436
|
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
return typing.cast(Type[T_MetaInstance], table.with_alias(alias))
|
|
437
|
+
Args:
|
|
438
|
+
sql_fragment: The raw SQL fragment.
|
|
439
|
+
In python 3.14+, this can also be a t-string. In that case, don't pass other args or kwargs.
|
|
440
|
+
*raw_args: Arguments to be interpolated into the SQL fragment.
|
|
441
|
+
output_type: The expected output type of the expression.
|
|
442
|
+
**raw_kwargs: Keyword arguments to be interpolated into the SQL fragment.
|
|
1279
443
|
|
|
1280
|
-
|
|
444
|
+
Returns:
|
|
445
|
+
A pydal Expression object.
|
|
1281
446
|
"""
|
|
1282
|
-
|
|
447
|
+
return sql_expression(self, sql_fragment, *raw_args, output_type=output_type, **raw_kwargs)
|
|
1283
448
|
|
|
1284
|
-
Useful for joins when joining the same table multiple times
|
|
1285
|
-
and you don't want to keep track of aliases yourself.
|
|
1286
|
-
"""
|
|
1287
|
-
key = f"{self.__name__.lower()}_{hash(uuid.uuid4())}"
|
|
1288
|
-
return self.with_alias(key)
|
|
1289
|
-
|
|
1290
|
-
# hooks:
|
|
1291
|
-
def _hook_once(
|
|
1292
|
-
cls: Type[T_MetaInstance],
|
|
1293
|
-
hooks: list[typing.Callable[P, R]],
|
|
1294
|
-
fn: typing.Callable[P, R],
|
|
1295
|
-
) -> Type[T_MetaInstance]:
|
|
1296
|
-
@functools.wraps(fn)
|
|
1297
|
-
def wraps(*a: P.args, **kw: P.kwargs) -> R:
|
|
1298
|
-
try:
|
|
1299
|
-
return fn(*a, **kw)
|
|
1300
|
-
finally:
|
|
1301
|
-
hooks.remove(wraps)
|
|
1302
|
-
|
|
1303
|
-
hooks.append(wraps)
|
|
1304
|
-
return cls
|
|
1305
|
-
|
|
1306
|
-
def before_insert(
|
|
1307
|
-
cls: Type[T_MetaInstance],
|
|
1308
|
-
fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]],
|
|
1309
|
-
) -> Type[T_MetaInstance]:
|
|
1310
|
-
"""
|
|
1311
|
-
Add a before insert hook.
|
|
1312
|
-
"""
|
|
1313
|
-
if fn not in cls._before_insert:
|
|
1314
|
-
cls._before_insert.append(fn)
|
|
1315
|
-
return cls
|
|
1316
|
-
|
|
1317
|
-
def before_insert_once(
|
|
1318
|
-
cls: Type[T_MetaInstance],
|
|
1319
|
-
fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]],
|
|
1320
|
-
) -> Type[T_MetaInstance]:
|
|
1321
|
-
"""
|
|
1322
|
-
Add a before insert hook that only fires once and then removes itself.
|
|
1323
|
-
"""
|
|
1324
|
-
return cls._hook_once(cls._before_insert, fn) # type: ignore
|
|
1325
449
|
|
|
1326
|
-
|
|
1327
|
-
cls: Type[T_MetaInstance],
|
|
1328
|
-
fn: (
|
|
1329
|
-
typing.Callable[[T_MetaInstance, Reference], Optional[bool]]
|
|
1330
|
-
| typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1331
|
-
),
|
|
1332
|
-
) -> Type[T_MetaInstance]:
|
|
1333
|
-
"""
|
|
1334
|
-
Add an after insert hook.
|
|
1335
|
-
"""
|
|
1336
|
-
if fn not in cls._after_insert:
|
|
1337
|
-
cls._after_insert.append(fn)
|
|
1338
|
-
return cls
|
|
1339
|
-
|
|
1340
|
-
def after_insert_once(
|
|
1341
|
-
cls: Type[T_MetaInstance],
|
|
1342
|
-
fn: (
|
|
1343
|
-
typing.Callable[[T_MetaInstance, Reference], Optional[bool]]
|
|
1344
|
-
| typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1345
|
-
),
|
|
1346
|
-
) -> Type[T_MetaInstance]:
|
|
1347
|
-
"""
|
|
1348
|
-
Add an after insert hook that only fires once and then removes itself.
|
|
1349
|
-
"""
|
|
1350
|
-
return cls._hook_once(cls._after_insert, fn) # type: ignore
|
|
450
|
+
TypeDAL.representers.setdefault("rows_render", default_representer)
|
|
1351
451
|
|
|
1352
|
-
|
|
1353
|
-
cls: Type[T_MetaInstance],
|
|
1354
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1355
|
-
) -> Type[T_MetaInstance]:
|
|
1356
|
-
"""
|
|
1357
|
-
Add a before update hook.
|
|
1358
|
-
"""
|
|
1359
|
-
if fn not in cls._before_update:
|
|
1360
|
-
cls._before_update.append(fn)
|
|
1361
|
-
return cls
|
|
1362
|
-
|
|
1363
|
-
def before_update_once(
|
|
1364
|
-
cls,
|
|
1365
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1366
|
-
) -> Type[T_MetaInstance]:
|
|
1367
|
-
"""
|
|
1368
|
-
Add a before update hook that only fires once and then removes itself.
|
|
1369
|
-
"""
|
|
1370
|
-
return cls._hook_once(cls._before_update, fn) # type: ignore
|
|
452
|
+
# note: these imports exist at the bottom of this file to prevent circular import issues:
|
|
1371
453
|
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
"""
|
|
1377
|
-
Add an after update hook.
|
|
1378
|
-
"""
|
|
1379
|
-
if fn not in cls._after_update:
|
|
1380
|
-
cls._after_update.append(fn)
|
|
1381
|
-
return cls
|
|
1382
|
-
|
|
1383
|
-
def after_update_once(
|
|
1384
|
-
cls: Type[T_MetaInstance],
|
|
1385
|
-
fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
|
|
1386
|
-
) -> Type[T_MetaInstance]:
|
|
1387
|
-
"""
|
|
1388
|
-
Add an after update hook that only fires once and then removes itself.
|
|
1389
|
-
"""
|
|
1390
|
-
return cls._hook_once(cls._after_update, fn) # type: ignore
|
|
454
|
+
from .fields import * # noqa: E402 F403 # isort: skip ; to fill globals() scope
|
|
455
|
+
from .define import TableDefinitionBuilder # noqa: E402
|
|
456
|
+
from .rows import TypedSet # noqa: E402
|
|
457
|
+
from .tables import TypedTable # noqa: E402
|
|
1391
458
|
|
|
1392
|
-
|
|
1393
|
-
"""
|
|
1394
|
-
Add a before delete hook.
|
|
1395
|
-
"""
|
|
1396
|
-
if fn not in cls._before_delete:
|
|
1397
|
-
cls._before_delete.append(fn)
|
|
1398
|
-
return cls
|
|
1399
|
-
|
|
1400
|
-
def before_delete_once(
|
|
1401
|
-
cls: Type[T_MetaInstance],
|
|
1402
|
-
fn: typing.Callable[[Set], Optional[bool]],
|
|
1403
|
-
) -> Type[T_MetaInstance]:
|
|
1404
|
-
"""
|
|
1405
|
-
Add a before delete hook that only fires once and then removes itself.
|
|
1406
|
-
"""
|
|
1407
|
-
return cls._hook_once(cls._before_delete, fn)
|
|
1408
|
-
|
|
1409
|
-
def after_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
|
|
1410
|
-
"""
|
|
1411
|
-
Add an after delete hook.
|
|
1412
|
-
"""
|
|
1413
|
-
if fn not in cls._after_delete:
|
|
1414
|
-
cls._after_delete.append(fn)
|
|
1415
|
-
return cls
|
|
1416
|
-
|
|
1417
|
-
def after_delete_once(
|
|
1418
|
-
cls: Type[T_MetaInstance],
|
|
1419
|
-
fn: typing.Callable[[Set], Optional[bool]],
|
|
1420
|
-
) -> Type[T_MetaInstance]:
|
|
1421
|
-
"""
|
|
1422
|
-
Add an after delete hook that only fires once and then removes itself.
|
|
1423
|
-
"""
|
|
1424
|
-
return cls._hook_once(cls._after_delete, fn)
|
|
1425
|
-
|
|
1426
|
-
def reorder_fields(cls, *fields: str | Field | TypedField, keep_others: bool = True):
|
|
1427
|
-
"""
|
|
1428
|
-
Reorder fields of a typedal table.
|
|
1429
|
-
|
|
1430
|
-
Args:
|
|
1431
|
-
fields: List of field names (str) or Field objects in desired order.
|
|
1432
|
-
keep_others (bool):
|
|
1433
|
-
- True (default): keep other fields at the end, in their original order.
|
|
1434
|
-
- False: remove other fields (only keep what's specified).
|
|
1435
|
-
"""
|
|
1436
|
-
|
|
1437
|
-
return reorder_fields(cls._table, fields, keep_others=keep_others)
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
|
|
1441
|
-
"""
|
|
1442
|
-
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
1443
|
-
"""
|
|
1444
|
-
|
|
1445
|
-
# will be set by .bind on db.define
|
|
1446
|
-
name = ""
|
|
1447
|
-
_db: Optional[pydal.DAL] = None
|
|
1448
|
-
_rname: Optional[str] = None
|
|
1449
|
-
_table: Optional[Table] = None
|
|
1450
|
-
_field: Optional[Field] = None
|
|
1451
|
-
|
|
1452
|
-
_type: T_annotation
|
|
1453
|
-
kwargs: Any
|
|
1454
|
-
|
|
1455
|
-
requires: Validator | typing.Iterable[Validator]
|
|
1456
|
-
|
|
1457
|
-
# NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
|
|
1458
|
-
|
|
1459
|
-
def __init__(
|
|
1460
|
-
self,
|
|
1461
|
-
_type: Type[T_Value] | types.UnionType = str, # type: ignore
|
|
1462
|
-
/,
|
|
1463
|
-
**settings: Unpack[FieldSettings],
|
|
1464
|
-
) -> None:
|
|
1465
|
-
"""
|
|
1466
|
-
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
1467
|
-
|
|
1468
|
-
Provide the Python type for this field as the first positional argument
|
|
1469
|
-
and any other settings to Field() as keyword parameters.
|
|
1470
|
-
"""
|
|
1471
|
-
self._type = _type
|
|
1472
|
-
self.kwargs = settings
|
|
1473
|
-
# super().__init__()
|
|
1474
|
-
|
|
1475
|
-
@typing.overload
|
|
1476
|
-
def __get__(self, instance: T_MetaInstance, owner: Type[T_MetaInstance]) -> T_Value: # pragma: no cover
|
|
1477
|
-
"""
|
|
1478
|
-
row.field -> (actual data).
|
|
1479
|
-
"""
|
|
1480
|
-
|
|
1481
|
-
@typing.overload
|
|
1482
|
-
def __get__(self, instance: None, owner: "Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
|
|
1483
|
-
"""
|
|
1484
|
-
Table.field -> Field.
|
|
1485
|
-
"""
|
|
1486
|
-
|
|
1487
|
-
def __get__(
|
|
1488
|
-
self,
|
|
1489
|
-
instance: T_MetaInstance | None,
|
|
1490
|
-
owner: Type[T_MetaInstance],
|
|
1491
|
-
) -> typing.Union[T_Value, "TypedField[T_Value]"]:
|
|
1492
|
-
"""
|
|
1493
|
-
Since this class is a Descriptor field, \
|
|
1494
|
-
it returns something else depending on if it's called on a class or instance.
|
|
1495
|
-
|
|
1496
|
-
(this is mostly for mypy/typing)
|
|
1497
|
-
"""
|
|
1498
|
-
if instance:
|
|
1499
|
-
# this is only reached in a very specific case:
|
|
1500
|
-
# an instance of the object was created with a specific set of fields selected (excluding the current one)
|
|
1501
|
-
# in that case, no value was stored in the owner -> return None (since the field was not selected)
|
|
1502
|
-
return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
|
|
1503
|
-
else:
|
|
1504
|
-
# getting as class -> return actual field so pydal understands it when using in query etc.
|
|
1505
|
-
return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
|
|
1506
|
-
|
|
1507
|
-
def __str__(self) -> str:
|
|
1508
|
-
"""
|
|
1509
|
-
String representation of a Typed Field.
|
|
1510
|
-
|
|
1511
|
-
If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
|
|
1512
|
-
otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
|
|
1513
|
-
"""
|
|
1514
|
-
return str(self._field) if self._field else ""
|
|
1515
|
-
|
|
1516
|
-
def __repr__(self) -> str:
|
|
1517
|
-
"""
|
|
1518
|
-
More detailed string representation of a Typed Field.
|
|
1519
|
-
|
|
1520
|
-
Uses __str__ and adds the provided extra options (kwargs) in the representation.
|
|
1521
|
-
"""
|
|
1522
|
-
s = self.__str__()
|
|
1523
|
-
|
|
1524
|
-
if "type" in self.kwargs:
|
|
1525
|
-
# manual type in kwargs supplied
|
|
1526
|
-
t = self.kwargs["type"]
|
|
1527
|
-
elif issubclass(type, type(self._type)):
|
|
1528
|
-
# normal type, str.__name__ = 'str'
|
|
1529
|
-
t = getattr(self._type, "__name__", str(self._type))
|
|
1530
|
-
elif t_args := typing.get_args(self._type):
|
|
1531
|
-
# list[str] -> 'str'
|
|
1532
|
-
t = t_args[0].__name__
|
|
1533
|
-
else: # pragma: no cover
|
|
1534
|
-
# fallback - something else, may not even happen, I'm not sure
|
|
1535
|
-
t = self._type
|
|
1536
|
-
|
|
1537
|
-
s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
|
|
1538
|
-
|
|
1539
|
-
kw = self.kwargs.copy()
|
|
1540
|
-
kw.pop("type", None)
|
|
1541
|
-
return f"<{s} with options {kw}>"
|
|
1542
|
-
|
|
1543
|
-
def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
|
|
1544
|
-
"""
|
|
1545
|
-
Convert a Typed Field instance to a pydal.Field.
|
|
1546
|
-
|
|
1547
|
-
Actual logic in TypeDAL._to_field but this function creates the pydal type name and updates the kwarg settings.
|
|
1548
|
-
"""
|
|
1549
|
-
other_kwargs = self.kwargs.copy()
|
|
1550
|
-
extra_kwargs.update(other_kwargs) # <- modifies and overwrites the default kwargs with user-specified ones
|
|
1551
|
-
return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
|
|
1552
|
-
|
|
1553
|
-
def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
|
|
1554
|
-
"""
|
|
1555
|
-
Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
|
|
1556
|
-
"""
|
|
1557
|
-
self._table = table
|
|
1558
|
-
self._field = field
|
|
1559
|
-
|
|
1560
|
-
def __getattr__(self, key: str) -> Any:
|
|
1561
|
-
"""
|
|
1562
|
-
If the regular getattribute does not work, try to get info from the related Field.
|
|
1563
|
-
"""
|
|
1564
|
-
with contextlib.suppress(AttributeError):
|
|
1565
|
-
return super().__getattribute__(key)
|
|
1566
|
-
|
|
1567
|
-
# try on actual field:
|
|
1568
|
-
return getattr(self._field, key)
|
|
1569
|
-
|
|
1570
|
-
def __eq__(self, other: Any) -> Query:
|
|
1571
|
-
"""
|
|
1572
|
-
Performing == on a Field will result in a Query.
|
|
1573
|
-
"""
|
|
1574
|
-
return typing.cast(Query, self._field == other)
|
|
1575
|
-
|
|
1576
|
-
def __ne__(self, other: Any) -> Query:
|
|
1577
|
-
"""
|
|
1578
|
-
Performing != on a Field will result in a Query.
|
|
1579
|
-
"""
|
|
1580
|
-
return typing.cast(Query, self._field != other)
|
|
1581
|
-
|
|
1582
|
-
def __gt__(self, other: Any) -> Query:
|
|
1583
|
-
"""
|
|
1584
|
-
Performing > on a Field will result in a Query.
|
|
1585
|
-
"""
|
|
1586
|
-
return typing.cast(Query, self._field > other)
|
|
1587
|
-
|
|
1588
|
-
def __lt__(self, other: Any) -> Query:
|
|
1589
|
-
"""
|
|
1590
|
-
Performing < on a Field will result in a Query.
|
|
1591
|
-
"""
|
|
1592
|
-
return typing.cast(Query, self._field < other)
|
|
1593
|
-
|
|
1594
|
-
def __ge__(self, other: Any) -> Query:
|
|
1595
|
-
"""
|
|
1596
|
-
Performing >= on a Field will result in a Query.
|
|
1597
|
-
"""
|
|
1598
|
-
return typing.cast(Query, self._field >= other)
|
|
1599
|
-
|
|
1600
|
-
def __le__(self, other: Any) -> Query:
|
|
1601
|
-
"""
|
|
1602
|
-
Performing <= on a Field will result in a Query.
|
|
1603
|
-
"""
|
|
1604
|
-
return typing.cast(Query, self._field <= other)
|
|
1605
|
-
|
|
1606
|
-
def __hash__(self) -> int:
|
|
1607
|
-
"""
|
|
1608
|
-
Shadow Field.__hash__.
|
|
1609
|
-
"""
|
|
1610
|
-
return hash(self._field)
|
|
1611
|
-
|
|
1612
|
-
def __invert__(self) -> Expression:
|
|
1613
|
-
"""
|
|
1614
|
-
Performing ~ on a Field will result in an Expression.
|
|
1615
|
-
"""
|
|
1616
|
-
if not self._field: # pragma: no cover
|
|
1617
|
-
raise ValueError("Unbound Field can not be inverted!")
|
|
1618
|
-
|
|
1619
|
-
return typing.cast(Expression, ~self._field)
|
|
1620
|
-
|
|
1621
|
-
def lower(self) -> Expression:
|
|
1622
|
-
"""
|
|
1623
|
-
For string-fields: compare lowercased values.
|
|
1624
|
-
"""
|
|
1625
|
-
if not self._field: # pragma: no cover
|
|
1626
|
-
raise ValueError("Unbound Field can not be lowered!")
|
|
1627
|
-
|
|
1628
|
-
return typing.cast(Expression, self._field.lower())
|
|
1629
|
-
|
|
1630
|
-
# ... etc
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
class _TypedTable:
|
|
1634
|
-
"""
|
|
1635
|
-
This class is a final shared parent between TypedTable and Mixins.
|
|
1636
|
-
|
|
1637
|
-
This needs to exist because otherwise the __on_define__ of Mixins are not executed.
|
|
1638
|
-
Notably, this class exists at a level ABOVE the `metaclass=TableMeta`,
|
|
1639
|
-
because otherwise typing gets confused when Mixins are used and multiple types could satisfy
|
|
1640
|
-
generic 'T subclass of TypedTable'
|
|
1641
|
-
-> Setting 'TypedTable' as the parent for Mixin does not work at runtime (and works semi at type check time)
|
|
1642
|
-
"""
|
|
1643
|
-
|
|
1644
|
-
id: "TypedField[int]"
|
|
1645
|
-
|
|
1646
|
-
_before_insert: list[typing.Callable[[Self], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]]]
|
|
1647
|
-
_after_insert: list[
|
|
1648
|
-
typing.Callable[[Self, Reference], Optional[bool]] | typing.Callable[[OpRow, Reference], Optional[bool]]
|
|
1649
|
-
]
|
|
1650
|
-
_before_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
|
|
1651
|
-
_after_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
|
|
1652
|
-
_before_delete: list[typing.Callable[[Set], Optional[bool]]]
|
|
1653
|
-
_after_delete: list[typing.Callable[[Set], Optional[bool]]]
|
|
1654
|
-
|
|
1655
|
-
@classmethod
|
|
1656
|
-
def __on_define__(cls, db: TypeDAL) -> None:
|
|
1657
|
-
"""
|
|
1658
|
-
Method that can be implemented by tables to do an action after db.define is completed.
|
|
1659
|
-
|
|
1660
|
-
This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
|
|
1661
|
-
where you need a reference to the current database, which may not exist yet when defining the model.
|
|
1662
|
-
"""
|
|
1663
|
-
|
|
1664
|
-
@classproperty
|
|
1665
|
-
def _hooks(cls) -> dict[str, list[typing.Callable[..., Optional[bool]]]]:
|
|
1666
|
-
return {
|
|
1667
|
-
"before_insert": cls._before_insert,
|
|
1668
|
-
"after_insert": cls._after_insert,
|
|
1669
|
-
"before_update": cls._before_update,
|
|
1670
|
-
"after_update": cls._after_update,
|
|
1671
|
-
"before_delete": cls._before_delete,
|
|
1672
|
-
"after_delete": cls._after_delete,
|
|
1673
|
-
}
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
class TypedTable(_TypedTable, metaclass=TableMeta):
|
|
1677
|
-
"""
|
|
1678
|
-
Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
|
|
1679
|
-
"""
|
|
1680
|
-
|
|
1681
|
-
# set up by 'new':
|
|
1682
|
-
_row: Row | None = None
|
|
1683
|
-
_rows: tuple[Row, ...] = ()
|
|
1684
|
-
|
|
1685
|
-
_with: list[str]
|
|
1686
|
-
|
|
1687
|
-
def _setup_instance_methods(self) -> None:
|
|
1688
|
-
self.as_dict = self._as_dict # type: ignore
|
|
1689
|
-
self.__json__ = self.as_json = self._as_json # type: ignore
|
|
1690
|
-
# self.as_yaml = self._as_yaml # type: ignore
|
|
1691
|
-
self.as_xml = self._as_xml # type: ignore
|
|
1692
|
-
|
|
1693
|
-
self.update = self._update # type: ignore
|
|
1694
|
-
|
|
1695
|
-
self.delete_record = self._delete_record # type: ignore
|
|
1696
|
-
self.update_record = self._update_record # type: ignore
|
|
1697
|
-
|
|
1698
|
-
def __new__(
|
|
1699
|
-
cls,
|
|
1700
|
-
row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None,
|
|
1701
|
-
**filters: Any,
|
|
1702
|
-
) -> Self:
|
|
1703
|
-
"""
|
|
1704
|
-
Create a Typed Rows model instance from an existing row, ID or query.
|
|
1705
|
-
|
|
1706
|
-
Examples:
|
|
1707
|
-
MyTable(1)
|
|
1708
|
-
MyTable(id=1)
|
|
1709
|
-
MyTable(MyTable.id == 1)
|
|
1710
|
-
"""
|
|
1711
|
-
table = cls._ensure_table_defined()
|
|
1712
|
-
inst = super().__new__(cls)
|
|
1713
|
-
|
|
1714
|
-
if isinstance(row_or_id, TypedTable):
|
|
1715
|
-
# existing typed table instance!
|
|
1716
|
-
return typing.cast(Self, row_or_id)
|
|
1717
|
-
|
|
1718
|
-
elif isinstance(row_or_id, pydal.objects.Row):
|
|
1719
|
-
row = row_or_id
|
|
1720
|
-
elif row_or_id is not None:
|
|
1721
|
-
row = table(row_or_id, **filters)
|
|
1722
|
-
elif filters:
|
|
1723
|
-
row = table(**filters)
|
|
1724
|
-
else:
|
|
1725
|
-
# dummy object
|
|
1726
|
-
return inst
|
|
1727
|
-
|
|
1728
|
-
if not row:
|
|
1729
|
-
return None # type: ignore
|
|
1730
|
-
|
|
1731
|
-
inst._row = row
|
|
1732
|
-
|
|
1733
|
-
if hasattr(row, "id"):
|
|
1734
|
-
inst.__dict__.update(row)
|
|
1735
|
-
else:
|
|
1736
|
-
# deal with _extra (and possibly others?)
|
|
1737
|
-
# Row <{actual: {}, _extra: ...}>
|
|
1738
|
-
inst.__dict__.update(row[str(cls)])
|
|
1739
|
-
|
|
1740
|
-
inst._setup_instance_methods()
|
|
1741
|
-
return inst
|
|
1742
|
-
|
|
1743
|
-
def __iter__(self) -> typing.Generator[Any, None, None]:
|
|
1744
|
-
"""
|
|
1745
|
-
Allows looping through the columns.
|
|
1746
|
-
"""
|
|
1747
|
-
row = self._ensure_matching_row()
|
|
1748
|
-
yield from iter(row)
|
|
1749
|
-
|
|
1750
|
-
def __getitem__(self, item: str) -> Any:
|
|
1751
|
-
"""
|
|
1752
|
-
Allows dictionary notation to get columns.
|
|
1753
|
-
"""
|
|
1754
|
-
if item in self.__dict__:
|
|
1755
|
-
return self.__dict__.get(item)
|
|
1756
|
-
|
|
1757
|
-
# fallback to lookup in row
|
|
1758
|
-
if self._row:
|
|
1759
|
-
return self._row[item]
|
|
1760
|
-
|
|
1761
|
-
# nothing found!
|
|
1762
|
-
raise KeyError(item)
|
|
1763
|
-
|
|
1764
|
-
def __getattr__(self, item: str) -> Any:
|
|
1765
|
-
"""
|
|
1766
|
-
Allows dot notation to get columns.
|
|
1767
|
-
"""
|
|
1768
|
-
if value := self.get(item):
|
|
1769
|
-
return value
|
|
1770
|
-
|
|
1771
|
-
raise AttributeError(item)
|
|
1772
|
-
|
|
1773
|
-
def keys(self):
|
|
1774
|
-
"""
|
|
1775
|
-
Return the combination of row + relationship keys.
|
|
1776
|
-
|
|
1777
|
-
Used by dict(row).
|
|
1778
|
-
"""
|
|
1779
|
-
return list(self._row.keys()) + getattr(self, "_with", [])
|
|
1780
|
-
|
|
1781
|
-
def get(self, item: str, default: Any = None) -> Any:
|
|
1782
|
-
"""
|
|
1783
|
-
Try to get a column from this instance, else return default.
|
|
1784
|
-
"""
|
|
1785
|
-
try:
|
|
1786
|
-
return self.__getitem__(item)
|
|
1787
|
-
except KeyError:
|
|
1788
|
-
return default
|
|
1789
|
-
|
|
1790
|
-
def __setitem__(self, key: str, value: Any) -> None:
|
|
1791
|
-
"""
|
|
1792
|
-
Data can both be updated via dot and dict notation.
|
|
1793
|
-
"""
|
|
1794
|
-
return setattr(self, key, value)
|
|
1795
|
-
|
|
1796
|
-
def __int__(self) -> int:
|
|
1797
|
-
"""
|
|
1798
|
-
Calling int on a model instance will return its id.
|
|
1799
|
-
"""
|
|
1800
|
-
return getattr(self, "id", 0)
|
|
1801
|
-
|
|
1802
|
-
def __bool__(self) -> bool:
|
|
1803
|
-
"""
|
|
1804
|
-
If the instance has an underlying row with data, it is truthy.
|
|
1805
|
-
"""
|
|
1806
|
-
return bool(getattr(self, "_row", False))
|
|
1807
|
-
|
|
1808
|
-
def _ensure_matching_row(self) -> Row:
|
|
1809
|
-
if not getattr(self, "_row", None):
|
|
1810
|
-
raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
|
|
1811
|
-
return self._row
|
|
1812
|
-
|
|
1813
|
-
def __repr__(self) -> str:
|
|
1814
|
-
"""
|
|
1815
|
-
String representation of the model instance.
|
|
1816
|
-
"""
|
|
1817
|
-
model_name = self.__class__.__name__
|
|
1818
|
-
model_data = {}
|
|
1819
|
-
|
|
1820
|
-
if self._row:
|
|
1821
|
-
model_data = self._row.as_json()
|
|
1822
|
-
|
|
1823
|
-
details = model_name
|
|
1824
|
-
details += f"({model_data})"
|
|
1825
|
-
|
|
1826
|
-
if relationships := getattr(self, "_with", []):
|
|
1827
|
-
details += f" + {relationships}"
|
|
1828
|
-
|
|
1829
|
-
return f"<{details}>"
|
|
1830
|
-
|
|
1831
|
-
# serialization
|
|
1832
|
-
# underscore variants work for class instances (set up by _setup_instance_methods)
|
|
1833
|
-
|
|
1834
|
-
@classmethod
|
|
1835
|
-
def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
|
|
1836
|
-
"""
|
|
1837
|
-
Dump the object to a plain dict.
|
|
1838
|
-
|
|
1839
|
-
Can be used as both a class or instance method:
|
|
1840
|
-
- dumps the table info if it's a class
|
|
1841
|
-
- dumps the row info if it's an instance (see _as_dict)
|
|
1842
|
-
"""
|
|
1843
|
-
table = cls._ensure_table_defined()
|
|
1844
|
-
result = table.as_dict(flat, sanitize)
|
|
1845
|
-
return typing.cast(AnyDict, result)
|
|
1846
|
-
|
|
1847
|
-
@classmethod
|
|
1848
|
-
def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
1849
|
-
"""
|
|
1850
|
-
Dump the object to json.
|
|
1851
|
-
|
|
1852
|
-
Can be used as both a class or instance method:
|
|
1853
|
-
- dumps the table info if it's a class
|
|
1854
|
-
- dumps the row info if it's an instance (see _as_json)
|
|
1855
|
-
"""
|
|
1856
|
-
data = cls.as_dict(sanitize=sanitize)
|
|
1857
|
-
return as_json.encode(data, indent=indent, **kwargs)
|
|
1858
|
-
|
|
1859
|
-
@classmethod
|
|
1860
|
-
def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
|
|
1861
|
-
"""
|
|
1862
|
-
Dump the object to xml.
|
|
1863
|
-
|
|
1864
|
-
Can be used as both a class or instance method:
|
|
1865
|
-
- dumps the table info if it's a class
|
|
1866
|
-
- dumps the row info if it's an instance (see _as_xml)
|
|
1867
|
-
"""
|
|
1868
|
-
table = cls._ensure_table_defined()
|
|
1869
|
-
return typing.cast(str, table.as_xml(sanitize))
|
|
1870
|
-
|
|
1871
|
-
@classmethod
|
|
1872
|
-
def as_yaml(cls, sanitize: bool = True) -> str:
|
|
1873
|
-
"""
|
|
1874
|
-
Dump the object to yaml.
|
|
1875
|
-
|
|
1876
|
-
Can be used as both a class or instance method:
|
|
1877
|
-
- dumps the table info if it's a class
|
|
1878
|
-
- dumps the row info if it's an instance (see _as_yaml)
|
|
1879
|
-
"""
|
|
1880
|
-
table = cls._ensure_table_defined()
|
|
1881
|
-
return typing.cast(str, table.as_yaml(sanitize))
|
|
1882
|
-
|
|
1883
|
-
def _as_dict(
|
|
1884
|
-
self,
|
|
1885
|
-
datetime_to_str: bool = False,
|
|
1886
|
-
custom_types: typing.Iterable[type] | type | None = None,
|
|
1887
|
-
) -> AnyDict:
|
|
1888
|
-
row = self._ensure_matching_row()
|
|
1889
|
-
|
|
1890
|
-
result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
|
|
1891
|
-
|
|
1892
|
-
def asdict_method(obj: Any) -> Any: # pragma: no cover
|
|
1893
|
-
if hasattr(obj, "_as_dict"): # typedal
|
|
1894
|
-
return obj._as_dict()
|
|
1895
|
-
elif hasattr(obj, "as_dict"): # pydal
|
|
1896
|
-
return obj.as_dict()
|
|
1897
|
-
else: # something else??
|
|
1898
|
-
return obj.__dict__
|
|
1899
|
-
|
|
1900
|
-
if _with := getattr(self, "_with", None):
|
|
1901
|
-
for relationship in _with:
|
|
1902
|
-
data = self.get(relationship)
|
|
1903
|
-
|
|
1904
|
-
if isinstance(data, list):
|
|
1905
|
-
data = [asdict_method(_) for _ in data]
|
|
1906
|
-
elif data:
|
|
1907
|
-
data = asdict_method(data)
|
|
1908
|
-
|
|
1909
|
-
result[relationship] = data
|
|
1910
|
-
|
|
1911
|
-
return typing.cast(AnyDict, result)
|
|
1912
|
-
|
|
1913
|
-
def _as_json(
|
|
1914
|
-
self,
|
|
1915
|
-
default: typing.Callable[[Any], Any] = None,
|
|
1916
|
-
indent: Optional[int] = None,
|
|
1917
|
-
**kwargs: Any,
|
|
1918
|
-
) -> str:
|
|
1919
|
-
data = self._as_dict()
|
|
1920
|
-
return as_json.encode(data, default=default, indent=indent, **kwargs)
|
|
1921
|
-
|
|
1922
|
-
def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
|
|
1923
|
-
row = self._ensure_matching_row()
|
|
1924
|
-
return typing.cast(str, row.as_xml(sanitize))
|
|
1925
|
-
|
|
1926
|
-
# def _as_yaml(self, sanitize: bool = True) -> str:
|
|
1927
|
-
# row = self._ensure_matching_row()
|
|
1928
|
-
# return typing.cast(str, row.as_yaml(sanitize))
|
|
1929
|
-
|
|
1930
|
-
def __setattr__(self, key: str, value: Any) -> None:
|
|
1931
|
-
"""
|
|
1932
|
-
When setting a property on a Typed Table model instance, also update the underlying row.
|
|
1933
|
-
"""
|
|
1934
|
-
if self._row and key in self._row.__dict__ and not callable(value):
|
|
1935
|
-
# enables `row.key = value; row.update_record()`
|
|
1936
|
-
self._row[key] = value
|
|
1937
|
-
|
|
1938
|
-
super().__setattr__(key, value)
|
|
1939
|
-
|
|
1940
|
-
@classmethod
|
|
1941
|
-
def update(cls: Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
|
|
1942
|
-
"""
|
|
1943
|
-
Update one record.
|
|
1944
|
-
|
|
1945
|
-
Example:
|
|
1946
|
-
MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
|
|
1947
|
-
"""
|
|
1948
|
-
# todo: update multiple?
|
|
1949
|
-
if record := cls(query):
|
|
1950
|
-
return record.update_record(**fields)
|
|
1951
|
-
else:
|
|
1952
|
-
return None
|
|
1953
|
-
|
|
1954
|
-
def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
|
|
1955
|
-
row = self._ensure_matching_row()
|
|
1956
|
-
row.update(**fields)
|
|
1957
|
-
self.__dict__.update(**fields)
|
|
1958
|
-
return self
|
|
1959
|
-
|
|
1960
|
-
def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
|
|
1961
|
-
row = self._ensure_matching_row()
|
|
1962
|
-
new_row = row.update_record(**fields)
|
|
1963
|
-
self.update(**new_row)
|
|
1964
|
-
return self
|
|
1965
|
-
|
|
1966
|
-
def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
|
|
1967
|
-
"""
|
|
1968
|
-
Here as a placeholder for _update_record.
|
|
1969
|
-
|
|
1970
|
-
Will be replaced on instance creation!
|
|
1971
|
-
"""
|
|
1972
|
-
return self._update_record(**fields)
|
|
1973
|
-
|
|
1974
|
-
def _delete_record(self) -> int:
|
|
1975
|
-
"""
|
|
1976
|
-
Actual logic in `pydal.helpers.classes.RecordDeleter`.
|
|
1977
|
-
"""
|
|
1978
|
-
row = self._ensure_matching_row()
|
|
1979
|
-
result = row.delete_record()
|
|
1980
|
-
self.__dict__ = {} # empty self, since row is no more.
|
|
1981
|
-
self._row = None # just to be sure
|
|
1982
|
-
self._setup_instance_methods()
|
|
1983
|
-
# ^ instance methods might've been deleted by emptying dict,
|
|
1984
|
-
# but we still want .as_dict to show an error, not the table's as_dict.
|
|
1985
|
-
return typing.cast(int, result)
|
|
1986
|
-
|
|
1987
|
-
def delete_record(self) -> int: # pragma: no cover
|
|
1988
|
-
"""
|
|
1989
|
-
Here as a placeholder for _delete_record.
|
|
1990
|
-
|
|
1991
|
-
Will be replaced on instance creation!
|
|
1992
|
-
"""
|
|
1993
|
-
return self._delete_record()
|
|
1994
|
-
|
|
1995
|
-
# __del__ is also called on the end of a scope so don't remove records on every del!!
|
|
1996
|
-
|
|
1997
|
-
# pickling:
|
|
1998
|
-
|
|
1999
|
-
def __getstate__(self) -> AnyDict:
|
|
2000
|
-
"""
|
|
2001
|
-
State to save when pickling.
|
|
2002
|
-
|
|
2003
|
-
Prevents db connection from being pickled.
|
|
2004
|
-
Similar to as_dict but without changing the data of the relationships (dill does that recursively)
|
|
2005
|
-
"""
|
|
2006
|
-
row = self._ensure_matching_row()
|
|
2007
|
-
result: AnyDict = row.as_dict()
|
|
2008
|
-
|
|
2009
|
-
if _with := getattr(self, "_with", None):
|
|
2010
|
-
result["_with"] = _with
|
|
2011
|
-
for relationship in _with:
|
|
2012
|
-
data = self.get(relationship)
|
|
2013
|
-
|
|
2014
|
-
result[relationship] = data
|
|
2015
|
-
|
|
2016
|
-
result["_row"] = self._row.as_json() if self._row else ""
|
|
2017
|
-
return result
|
|
2018
|
-
|
|
2019
|
-
def __setstate__(self, state: AnyDict) -> None:
|
|
2020
|
-
"""
|
|
2021
|
-
Used by dill when loading from a bytestring.
|
|
2022
|
-
"""
|
|
2023
|
-
# as_dict also includes table info, so dump as json to only get the actual row data
|
|
2024
|
-
# then create a new (more empty) row object:
|
|
2025
|
-
state["_row"] = Row(json.loads(state["_row"]))
|
|
2026
|
-
self.__dict__ |= state
|
|
2027
|
-
|
|
2028
|
-
@classmethod
|
|
2029
|
-
def _sql(cls) -> str:
|
|
2030
|
-
"""
|
|
2031
|
-
Generate SQL Schema for this table via pydal2sql (if 'migrations' extra is installed).
|
|
2032
|
-
"""
|
|
2033
|
-
try:
|
|
2034
|
-
import pydal2sql
|
|
2035
|
-
except ImportError as e: # pragma: no cover
|
|
2036
|
-
raise RuntimeError("Can not generate SQL without the 'migration' extra or `pydal2sql` installed!") from e
|
|
2037
|
-
|
|
2038
|
-
return pydal2sql.generate_sql(cls)
|
|
2039
|
-
|
|
2040
|
-
def render(self, fields=None, compact=False) -> Self:
|
|
2041
|
-
row = copy.deepcopy(self)
|
|
2042
|
-
keys = list(row)
|
|
2043
|
-
if not fields:
|
|
2044
|
-
fields = [self._table[f] for f in self._table._fields]
|
|
2045
|
-
fields = [f for f in fields if isinstance(f, Field) and f.represent]
|
|
2046
|
-
|
|
2047
|
-
for field in fields:
|
|
2048
|
-
if field._table == self._table:
|
|
2049
|
-
row[field.name] = self._db.represent(
|
|
2050
|
-
"rows_render",
|
|
2051
|
-
field,
|
|
2052
|
-
row[field.name],
|
|
2053
|
-
row,
|
|
2054
|
-
)
|
|
2055
|
-
# else: relationship, different logic:
|
|
2056
|
-
|
|
2057
|
-
for relation_name in getattr(row, "_with", []):
|
|
2058
|
-
if relation := self._relationships.get(relation_name):
|
|
2059
|
-
relation_table = relation.table
|
|
2060
|
-
|
|
2061
|
-
relation_row = row[relation_name]
|
|
2062
|
-
|
|
2063
|
-
if isinstance(relation_row, list):
|
|
2064
|
-
# list of rows
|
|
2065
|
-
combined = []
|
|
2066
|
-
|
|
2067
|
-
for related_og in relation_row:
|
|
2068
|
-
related = copy.deepcopy(related_og)
|
|
2069
|
-
for fieldname in related:
|
|
2070
|
-
field = relation_table[fieldname]
|
|
2071
|
-
related[field.name] = self._db.represent(
|
|
2072
|
-
"rows_render",
|
|
2073
|
-
field,
|
|
2074
|
-
related[field.name],
|
|
2075
|
-
related,
|
|
2076
|
-
)
|
|
2077
|
-
combined.append(related)
|
|
2078
|
-
|
|
2079
|
-
row[relation_name] = combined
|
|
2080
|
-
else:
|
|
2081
|
-
# 1 row
|
|
2082
|
-
for fieldname in relation_row:
|
|
2083
|
-
field = relation_table[fieldname]
|
|
2084
|
-
row[relation_name][fieldname] = self._db.represent(
|
|
2085
|
-
"rows_render",
|
|
2086
|
-
field,
|
|
2087
|
-
relation_row[field.name],
|
|
2088
|
-
relation_row,
|
|
2089
|
-
)
|
|
2090
|
-
|
|
2091
|
-
if compact and len(keys) == 1 and keys[0] != "_extra": # pragma: no cover
|
|
2092
|
-
return row[keys[0]]
|
|
2093
|
-
return row
|
|
2094
|
-
|
|
2095
|
-
|
|
2096
|
-
# backwards compat:
|
|
2097
|
-
TypedRow = TypedTable
|
|
2098
|
-
|
|
2099
|
-
|
|
2100
|
-
class TypedRows(typing.Collection[T_MetaInstance], Rows):
|
|
2101
|
-
"""
|
|
2102
|
-
Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
|
|
2103
|
-
"""
|
|
2104
|
-
|
|
2105
|
-
records: dict[int, T_MetaInstance]
|
|
2106
|
-
# _rows: Rows
|
|
2107
|
-
model: Type[T_MetaInstance]
|
|
2108
|
-
metadata: Metadata
|
|
2109
|
-
|
|
2110
|
-
# pseudo-properties: actually stored in _rows
|
|
2111
|
-
db: TypeDAL
|
|
2112
|
-
colnames: list[str]
|
|
2113
|
-
fields: list[Field]
|
|
2114
|
-
colnames_fields: list[Field]
|
|
2115
|
-
response: list[tuple[Any, ...]]
|
|
2116
|
-
|
|
2117
|
-
def __init__(
|
|
2118
|
-
self,
|
|
2119
|
-
rows: Rows,
|
|
2120
|
-
model: Type[T_MetaInstance],
|
|
2121
|
-
records: dict[int, T_MetaInstance] = None,
|
|
2122
|
-
metadata: Metadata = None,
|
|
2123
|
-
raw: dict[int, list[Row]] = None,
|
|
2124
|
-
) -> None:
|
|
2125
|
-
"""
|
|
2126
|
-
Should not be called manually!
|
|
2127
|
-
|
|
2128
|
-
Normally, the `records` from an existing `Rows` object are used
|
|
2129
|
-
but these can be overwritten with a `records` dict.
|
|
2130
|
-
`metadata` can be any (un)structured data
|
|
2131
|
-
`model` is a Typed Table class
|
|
2132
|
-
"""
|
|
2133
|
-
|
|
2134
|
-
def _get_id(row: Row) -> int:
|
|
2135
|
-
"""
|
|
2136
|
-
Try to find the id field in a row.
|
|
2137
|
-
|
|
2138
|
-
If _extra exists, the row changes:
|
|
2139
|
-
<Row {'test_relationship': {'id': 1}, '_extra': {'COUNT("test_relationship"."querytable")': 8}}>
|
|
2140
|
-
"""
|
|
2141
|
-
if idx := getattr(row, "id", None):
|
|
2142
|
-
return typing.cast(int, idx)
|
|
2143
|
-
elif main := getattr(row, str(model), None):
|
|
2144
|
-
return typing.cast(int, main.id)
|
|
2145
|
-
else: # pragma: no cover
|
|
2146
|
-
raise NotImplementedError(f"`id` could not be found for {row}")
|
|
2147
|
-
|
|
2148
|
-
records = records or {_get_id(row): model(row) for row in rows}
|
|
2149
|
-
raw = raw or {}
|
|
2150
|
-
|
|
2151
|
-
for idx, entity in records.items():
|
|
2152
|
-
entity._rows = tuple(raw.get(idx, []))
|
|
2153
|
-
|
|
2154
|
-
super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
|
|
2155
|
-
self.model = model
|
|
2156
|
-
self.metadata = metadata or {}
|
|
2157
|
-
self.colnames = rows.colnames
|
|
2158
|
-
|
|
2159
|
-
def __len__(self) -> int:
|
|
2160
|
-
"""
|
|
2161
|
-
Return the count of rows.
|
|
2162
|
-
"""
|
|
2163
|
-
return len(self.records)
|
|
2164
|
-
|
|
2165
|
-
def __iter__(self) -> typing.Iterator[T_MetaInstance]:
|
|
2166
|
-
"""
|
|
2167
|
-
Loop through the rows.
|
|
2168
|
-
"""
|
|
2169
|
-
yield from self.records.values()
|
|
2170
|
-
|
|
2171
|
-
def __contains__(self, ind: Any) -> bool:
|
|
2172
|
-
"""
|
|
2173
|
-
Check if an id exists in this result set.
|
|
2174
|
-
"""
|
|
2175
|
-
return ind in self.records
|
|
2176
|
-
|
|
2177
|
-
def first(self) -> T_MetaInstance | None:
|
|
2178
|
-
"""
|
|
2179
|
-
Get the row with the lowest id.
|
|
2180
|
-
"""
|
|
2181
|
-
if not self.records:
|
|
2182
|
-
return None
|
|
2183
|
-
|
|
2184
|
-
return next(iter(self))
|
|
2185
|
-
|
|
2186
|
-
def last(self) -> T_MetaInstance | None:
|
|
2187
|
-
"""
|
|
2188
|
-
Get the row with the highest id.
|
|
2189
|
-
"""
|
|
2190
|
-
if not self.records:
|
|
2191
|
-
return None
|
|
2192
|
-
|
|
2193
|
-
max_id = max(self.records.keys())
|
|
2194
|
-
return self[max_id]
|
|
2195
|
-
|
|
2196
|
-
def find(
|
|
2197
|
-
self,
|
|
2198
|
-
f: typing.Callable[[T_MetaInstance], Query],
|
|
2199
|
-
limitby: tuple[int, int] = None,
|
|
2200
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2201
|
-
"""
|
|
2202
|
-
Returns a new Rows object, a subset of the original object, filtered by the function `f`.
|
|
2203
|
-
"""
|
|
2204
|
-
if not self.records:
|
|
2205
|
-
return self.__class__(self, self.model, {})
|
|
2206
|
-
|
|
2207
|
-
records = {}
|
|
2208
|
-
if limitby:
|
|
2209
|
-
_min, _max = limitby
|
|
2210
|
-
else:
|
|
2211
|
-
_min, _max = 0, len(self)
|
|
2212
|
-
count = 0
|
|
2213
|
-
for i, row in self.records.items():
|
|
2214
|
-
if f(row):
|
|
2215
|
-
if _min <= count:
|
|
2216
|
-
records[i] = row
|
|
2217
|
-
count += 1
|
|
2218
|
-
if count == _max:
|
|
2219
|
-
break
|
|
2220
|
-
|
|
2221
|
-
return self.__class__(self, self.model, records)
|
|
2222
|
-
|
|
2223
|
-
def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
|
|
2224
|
-
"""
|
|
2225
|
-
Removes elements from the calling Rows object, filtered by the function `f`, \
|
|
2226
|
-
and returns a new Rows object containing the removed elements.
|
|
2227
|
-
"""
|
|
2228
|
-
if not self.records:
|
|
2229
|
-
return self.__class__(self, self.model, {})
|
|
2230
|
-
removed = {}
|
|
2231
|
-
to_remove = []
|
|
2232
|
-
for i in self.records:
|
|
2233
|
-
row = self[i]
|
|
2234
|
-
if f(row):
|
|
2235
|
-
removed[i] = self.records[i]
|
|
2236
|
-
to_remove.append(i)
|
|
2237
|
-
|
|
2238
|
-
[self.records.pop(i) for i in to_remove]
|
|
2239
|
-
|
|
2240
|
-
return self.__class__(
|
|
2241
|
-
self,
|
|
2242
|
-
self.model,
|
|
2243
|
-
removed,
|
|
2244
|
-
)
|
|
2245
|
-
|
|
2246
|
-
def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
|
|
2247
|
-
"""
|
|
2248
|
-
Returns a list of sorted elements (not sorted in place).
|
|
2249
|
-
"""
|
|
2250
|
-
return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
|
|
2251
|
-
|
|
2252
|
-
def __str__(self) -> str:
|
|
2253
|
-
"""
|
|
2254
|
-
Simple string representation.
|
|
2255
|
-
"""
|
|
2256
|
-
return f"<TypedRows with {len(self)} records>"
|
|
2257
|
-
|
|
2258
|
-
def __repr__(self) -> str:
|
|
2259
|
-
"""
|
|
2260
|
-
Print a table on repr().
|
|
2261
|
-
"""
|
|
2262
|
-
data = self.as_dict()
|
|
2263
|
-
try:
|
|
2264
|
-
headers = list(next(iter(data.values())).keys())
|
|
2265
|
-
except StopIteration:
|
|
2266
|
-
headers = []
|
|
2267
|
-
|
|
2268
|
-
return mktable(data, headers)
|
|
2269
|
-
|
|
2270
|
-
def group_by_value(
|
|
2271
|
-
self,
|
|
2272
|
-
*fields: "str | Field | TypedField[T]",
|
|
2273
|
-
one_result: bool = False,
|
|
2274
|
-
**kwargs: Any,
|
|
2275
|
-
) -> dict[T, list[T_MetaInstance]]:
|
|
2276
|
-
"""
|
|
2277
|
-
Group the rows by a specific field (which will be the dict key).
|
|
2278
|
-
"""
|
|
2279
|
-
kwargs["one_result"] = one_result
|
|
2280
|
-
result = super().group_by_value(*fields, **kwargs)
|
|
2281
|
-
return typing.cast(dict[T, list[T_MetaInstance]], result)
|
|
2282
|
-
|
|
2283
|
-
def as_csv(self) -> str:
|
|
2284
|
-
"""
|
|
2285
|
-
Dump the data to csv.
|
|
2286
|
-
"""
|
|
2287
|
-
return typing.cast(str, super().as_csv())
|
|
2288
|
-
|
|
2289
|
-
def as_dict(
|
|
2290
|
-
self,
|
|
2291
|
-
key: str | Field = None,
|
|
2292
|
-
compact: bool = False,
|
|
2293
|
-
storage_to_dict: bool = False,
|
|
2294
|
-
datetime_to_str: bool = False,
|
|
2295
|
-
custom_types: list[type] = None,
|
|
2296
|
-
) -> dict[int, AnyDict]:
|
|
2297
|
-
"""
|
|
2298
|
-
Get the data in a dict of dicts.
|
|
2299
|
-
"""
|
|
2300
|
-
if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
|
|
2301
|
-
# functionality not guaranteed
|
|
2302
|
-
if isinstance(key, Field):
|
|
2303
|
-
key = key.name
|
|
2304
|
-
|
|
2305
|
-
return typing.cast(
|
|
2306
|
-
dict[int, AnyDict],
|
|
2307
|
-
super().as_dict(
|
|
2308
|
-
key or "id",
|
|
2309
|
-
compact,
|
|
2310
|
-
storage_to_dict,
|
|
2311
|
-
datetime_to_str,
|
|
2312
|
-
custom_types,
|
|
2313
|
-
),
|
|
2314
|
-
)
|
|
2315
|
-
|
|
2316
|
-
return {k: v.as_dict() for k, v in self.records.items()}
|
|
2317
|
-
|
|
2318
|
-
def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
2319
|
-
"""
|
|
2320
|
-
Turn the data into a dict and then dump to JSON.
|
|
2321
|
-
"""
|
|
2322
|
-
data = self.as_list()
|
|
2323
|
-
|
|
2324
|
-
return as_json.encode(data, default=default, indent=indent, **kwargs)
|
|
2325
|
-
|
|
2326
|
-
def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
|
|
2327
|
-
"""
|
|
2328
|
-
Turn the data into a dict and then dump to JSON.
|
|
2329
|
-
"""
|
|
2330
|
-
return self.as_json(default=default, indent=indent, **kwargs)
|
|
2331
|
-
|
|
2332
|
-
def as_list(
|
|
2333
|
-
self,
|
|
2334
|
-
compact: bool = False,
|
|
2335
|
-
storage_to_dict: bool = False,
|
|
2336
|
-
datetime_to_str: bool = False,
|
|
2337
|
-
custom_types: list[type] = None,
|
|
2338
|
-
) -> list[AnyDict]:
|
|
2339
|
-
"""
|
|
2340
|
-
Get the data in a list of dicts.
|
|
2341
|
-
"""
|
|
2342
|
-
if any([compact, storage_to_dict, datetime_to_str, custom_types]):
|
|
2343
|
-
return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
|
|
2344
|
-
|
|
2345
|
-
return [_.as_dict() for _ in self.records.values()]
|
|
2346
|
-
|
|
2347
|
-
def __getitem__(self, item: int) -> T_MetaInstance:
|
|
2348
|
-
"""
|
|
2349
|
-
You can get a specific row by ID from a typedrows by using rows[idx] notation.
|
|
2350
|
-
|
|
2351
|
-
Since pydal's implementation differs (they expect a list instead of a dict with id keys),
|
|
2352
|
-
using rows[0] will return the first row, regardless of its id.
|
|
2353
|
-
"""
|
|
2354
|
-
try:
|
|
2355
|
-
return self.records[item]
|
|
2356
|
-
except KeyError as e:
|
|
2357
|
-
if item == 0 and (row := self.first()):
|
|
2358
|
-
# special case: pydal internals think Rows.records is a list, not a dict
|
|
2359
|
-
return row
|
|
2360
|
-
|
|
2361
|
-
raise e
|
|
2362
|
-
|
|
2363
|
-
def get(self, item: int) -> typing.Optional[T_MetaInstance]:
|
|
2364
|
-
"""
|
|
2365
|
-
Get a row by ID, or receive None if it isn't in this result set.
|
|
2366
|
-
"""
|
|
2367
|
-
return self.records.get(item)
|
|
2368
|
-
|
|
2369
|
-
def update(self, **new_values: Any) -> bool:
|
|
2370
|
-
"""
|
|
2371
|
-
Update the current rows in the database with new_values.
|
|
2372
|
-
"""
|
|
2373
|
-
# cast to make mypy understand .id is a TypedField and not an int!
|
|
2374
|
-
table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
|
|
2375
|
-
|
|
2376
|
-
ids = set(self.column("id"))
|
|
2377
|
-
query = table.id.belongs(ids)
|
|
2378
|
-
return bool(self.db(query).update(**new_values))
|
|
2379
|
-
|
|
2380
|
-
def delete(self) -> bool:
|
|
2381
|
-
"""
|
|
2382
|
-
Delete the currently selected rows from the database.
|
|
2383
|
-
"""
|
|
2384
|
-
# cast to make mypy understand .id is a TypedField and not an int!
|
|
2385
|
-
table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
|
|
2386
|
-
|
|
2387
|
-
ids = set(self.column("id"))
|
|
2388
|
-
query = table.id.belongs(ids)
|
|
2389
|
-
return bool(self.db(query).delete())
|
|
2390
|
-
|
|
2391
|
-
def join(
|
|
2392
|
-
self,
|
|
2393
|
-
field: "Field | TypedField[Any]",
|
|
2394
|
-
name: str = None,
|
|
2395
|
-
constraint: Query = None,
|
|
2396
|
-
fields: list[str | Field] = None,
|
|
2397
|
-
orderby: Optional[str | Field] = None,
|
|
2398
|
-
) -> T_MetaInstance:
|
|
2399
|
-
"""
|
|
2400
|
-
This can be used to JOIN with some relationships after the initial select.
|
|
2401
|
-
|
|
2402
|
-
Using the querybuilder's .join() method is prefered!
|
|
2403
|
-
"""
|
|
2404
|
-
result = super().join(field, name, constraint, fields or [], orderby)
|
|
2405
|
-
return typing.cast(T_MetaInstance, result)
|
|
2406
|
-
|
|
2407
|
-
def export_to_csv_file(
|
|
2408
|
-
self,
|
|
2409
|
-
ofile: typing.TextIO,
|
|
2410
|
-
null: Any = "<NULL>",
|
|
2411
|
-
delimiter: str = ",",
|
|
2412
|
-
quotechar: str = '"',
|
|
2413
|
-
quoting: int = csv.QUOTE_MINIMAL,
|
|
2414
|
-
represent: bool = False,
|
|
2415
|
-
colnames: list[str] = None,
|
|
2416
|
-
write_colnames: bool = True,
|
|
2417
|
-
*args: Any,
|
|
2418
|
-
**kwargs: Any,
|
|
2419
|
-
) -> None:
|
|
2420
|
-
"""
|
|
2421
|
-
Shadow export_to_csv_file from Rows, but with typing.
|
|
2422
|
-
|
|
2423
|
-
See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
|
|
2424
|
-
"""
|
|
2425
|
-
super().export_to_csv_file(
|
|
2426
|
-
ofile,
|
|
2427
|
-
null,
|
|
2428
|
-
*args,
|
|
2429
|
-
delimiter=delimiter,
|
|
2430
|
-
quotechar=quotechar,
|
|
2431
|
-
quoting=quoting,
|
|
2432
|
-
represent=represent,
|
|
2433
|
-
colnames=colnames or self.colnames,
|
|
2434
|
-
write_colnames=write_colnames,
|
|
2435
|
-
**kwargs,
|
|
2436
|
-
)
|
|
2437
|
-
|
|
2438
|
-
@classmethod
|
|
2439
|
-
def from_rows(
|
|
2440
|
-
cls,
|
|
2441
|
-
rows: Rows,
|
|
2442
|
-
model: Type[T_MetaInstance],
|
|
2443
|
-
metadata: Metadata = None,
|
|
2444
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2445
|
-
"""
|
|
2446
|
-
Internal method to convert a Rows object to a TypedRows.
|
|
2447
|
-
"""
|
|
2448
|
-
return cls(rows, model, metadata=metadata)
|
|
2449
|
-
|
|
2450
|
-
def __getstate__(self) -> AnyDict:
|
|
2451
|
-
"""
|
|
2452
|
-
Used by dill to dump to bytes (exclude db connection etc).
|
|
2453
|
-
"""
|
|
2454
|
-
return {
|
|
2455
|
-
"metadata": json.dumps(self.metadata, default=str),
|
|
2456
|
-
"records": self.records,
|
|
2457
|
-
"model": str(self.model._table),
|
|
2458
|
-
"colnames": self.colnames,
|
|
2459
|
-
}
|
|
2460
|
-
|
|
2461
|
-
def __setstate__(self, state: AnyDict) -> None:
|
|
2462
|
-
"""
|
|
2463
|
-
Used by dill when loading from a bytestring.
|
|
2464
|
-
"""
|
|
2465
|
-
state["metadata"] = json.loads(state["metadata"])
|
|
2466
|
-
self.__dict__.update(state)
|
|
2467
|
-
# db etc. set after undill by caching.py
|
|
2468
|
-
|
|
2469
|
-
def render(self, i=None, fields=None) -> typing.Generator[T_MetaInstance, None, None]:
|
|
2470
|
-
"""
|
|
2471
|
-
Takes an index and returns a copy of the indexed row with values
|
|
2472
|
-
transformed via the "represent" attributes of the associated fields.
|
|
2473
|
-
|
|
2474
|
-
Args:
|
|
2475
|
-
i: index. If not specified, a generator is returned for iteration
|
|
2476
|
-
over all the rows.
|
|
2477
|
-
fields: a list of fields to transform (if None, all fields with
|
|
2478
|
-
"represent" attributes will be transformed)
|
|
2479
|
-
"""
|
|
2480
|
-
if i is None:
|
|
2481
|
-
# difference: uses .keys() instead of index
|
|
2482
|
-
return (self.render(i, fields=fields) for i in self.records.keys())
|
|
2483
|
-
|
|
2484
|
-
if not self.db.has_representer("rows_render"): # pragma: no cover
|
|
2485
|
-
raise RuntimeError(
|
|
2486
|
-
"Rows.render() needs a `rows_render` representer in DAL instance",
|
|
2487
|
-
)
|
|
2488
|
-
|
|
2489
|
-
row = self.records[i]
|
|
2490
|
-
return row.render(fields, compact=self.compact)
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
from .caching import ( # noqa: E402
|
|
2494
|
-
_remove_cache,
|
|
459
|
+
from .caching import ( # isort: skip # noqa: E402
|
|
2495
460
|
_TypedalCache,
|
|
2496
461
|
_TypedalCacheDependency,
|
|
2497
|
-
create_and_hash_cache_key,
|
|
2498
|
-
get_expire,
|
|
2499
|
-
load_from_cache,
|
|
2500
|
-
save_to_cache,
|
|
2501
462
|
)
|
|
2502
|
-
|
|
2503
|
-
|
|
2504
|
-
def normalize_table_keys(row: Row, pattern: re.Pattern = re.compile(r"^([a-zA-Z_]+)_(\d{5,})$")) -> Row:
|
|
2505
|
-
"""
|
|
2506
|
-
Normalize table keys in a PyDAL Row object by stripping numeric hash suffixes
|
|
2507
|
-
from table names, only if the suffix is 5 or more digits.
|
|
2508
|
-
|
|
2509
|
-
For example:
|
|
2510
|
-
Row({'articles_12345': {...}}) -> Row({'articles': {...}})
|
|
2511
|
-
Row({'articles_123': {...}}) -> unchanged
|
|
2512
|
-
|
|
2513
|
-
Returns:
|
|
2514
|
-
Row: A new Row object with normalized keys.
|
|
2515
|
-
"""
|
|
2516
|
-
new_data: dict[str, Any] = {}
|
|
2517
|
-
for key, value in row.items():
|
|
2518
|
-
if match := pattern.match(key):
|
|
2519
|
-
base, _suffix = match.groups()
|
|
2520
|
-
normalized_key = base
|
|
2521
|
-
new_data[normalized_key] = value
|
|
2522
|
-
else:
|
|
2523
|
-
new_data[key] = value
|
|
2524
|
-
return Row(new_data)
|
|
2525
|
-
|
|
2526
|
-
|
|
2527
|
-
class QueryBuilder(typing.Generic[T_MetaInstance]):
|
|
2528
|
-
"""
|
|
2529
|
-
Abstration on top of pydal's query system.
|
|
2530
|
-
"""
|
|
2531
|
-
|
|
2532
|
-
model: Type[T_MetaInstance]
|
|
2533
|
-
query: Query
|
|
2534
|
-
select_args: list[Any]
|
|
2535
|
-
select_kwargs: SelectKwargs
|
|
2536
|
-
relationships: dict[str, Relationship[Any]]
|
|
2537
|
-
metadata: Metadata
|
|
2538
|
-
|
|
2539
|
-
def __init__(
|
|
2540
|
-
self,
|
|
2541
|
-
model: Type[T_MetaInstance],
|
|
2542
|
-
add_query: Optional[Query] = None,
|
|
2543
|
-
select_args: Optional[list[Any]] = None,
|
|
2544
|
-
select_kwargs: Optional[SelectKwargs] = None,
|
|
2545
|
-
relationships: dict[str, Relationship[Any]] = None,
|
|
2546
|
-
metadata: Metadata = None,
|
|
2547
|
-
):
|
|
2548
|
-
"""
|
|
2549
|
-
Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
|
|
2550
|
-
|
|
2551
|
-
Example:
|
|
2552
|
-
MyTable.where(...) -> QueryBuilder[MyTable]
|
|
2553
|
-
"""
|
|
2554
|
-
self.model = model
|
|
2555
|
-
table = model._ensure_table_defined()
|
|
2556
|
-
default_query = typing.cast(Query, table.id > 0)
|
|
2557
|
-
self.query = add_query or default_query
|
|
2558
|
-
self.select_args = select_args or []
|
|
2559
|
-
self.select_kwargs = select_kwargs or {}
|
|
2560
|
-
self.relationships = relationships or {}
|
|
2561
|
-
self.metadata = metadata or {}
|
|
2562
|
-
|
|
2563
|
-
def __str__(self) -> str:
|
|
2564
|
-
"""
|
|
2565
|
-
Simple string representation for the query builder.
|
|
2566
|
-
"""
|
|
2567
|
-
return f"QueryBuilder for {self.model}"
|
|
2568
|
-
|
|
2569
|
-
def __repr__(self) -> str:
|
|
2570
|
-
"""
|
|
2571
|
-
Advanced string representation for the query builder.
|
|
2572
|
-
"""
|
|
2573
|
-
return (
|
|
2574
|
-
f"<QueryBuilder for {self.model} with "
|
|
2575
|
-
f"{len(self.select_args)} select args; "
|
|
2576
|
-
f"{len(self.select_kwargs)} select kwargs; "
|
|
2577
|
-
f"{len(self.relationships)} relationships; "
|
|
2578
|
-
f"query: {bool(self.query)}; "
|
|
2579
|
-
f"metadata: {self.metadata}; "
|
|
2580
|
-
f">"
|
|
2581
|
-
)
|
|
2582
|
-
|
|
2583
|
-
def __bool__(self) -> bool:
|
|
2584
|
-
"""
|
|
2585
|
-
Querybuilder is truthy if it has any conditions.
|
|
2586
|
-
"""
|
|
2587
|
-
table = self.model._ensure_table_defined()
|
|
2588
|
-
default_query = typing.cast(Query, table.id > 0)
|
|
2589
|
-
return any(
|
|
2590
|
-
[
|
|
2591
|
-
self.query != default_query,
|
|
2592
|
-
self.select_args,
|
|
2593
|
-
self.select_kwargs,
|
|
2594
|
-
self.relationships,
|
|
2595
|
-
self.metadata,
|
|
2596
|
-
],
|
|
2597
|
-
)
|
|
2598
|
-
|
|
2599
|
-
def _extend(
|
|
2600
|
-
self,
|
|
2601
|
-
add_query: Optional[Query] = None,
|
|
2602
|
-
overwrite_query: Optional[Query] = None,
|
|
2603
|
-
select_args: Optional[list[Any]] = None,
|
|
2604
|
-
select_kwargs: Optional[SelectKwargs] = None,
|
|
2605
|
-
relationships: dict[str, Relationship[Any]] = None,
|
|
2606
|
-
metadata: Metadata = None,
|
|
2607
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2608
|
-
return QueryBuilder(
|
|
2609
|
-
self.model,
|
|
2610
|
-
(add_query & self.query) if add_query else overwrite_query or self.query,
|
|
2611
|
-
(self.select_args + select_args) if select_args else self.select_args,
|
|
2612
|
-
(self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
|
|
2613
|
-
(self.relationships | relationships) if relationships else self.relationships,
|
|
2614
|
-
(self.metadata | (metadata or {})) if metadata else self.metadata,
|
|
2615
|
-
)
|
|
2616
|
-
|
|
2617
|
-
def select(self, *fields: Any, **options: Unpack[SelectKwargs]) -> "QueryBuilder[T_MetaInstance]":
|
|
2618
|
-
"""
|
|
2619
|
-
Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
|
|
2620
|
-
|
|
2621
|
-
Options:
|
|
2622
|
-
paraphrased from the web2py pydal docs,
|
|
2623
|
-
For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
|
|
2624
|
-
|
|
2625
|
-
orderby: field(s) to order by. Supported:
|
|
2626
|
-
table.name - sort by name, ascending
|
|
2627
|
-
~table.name - sort by name, descending
|
|
2628
|
-
<random> - sort randomly
|
|
2629
|
-
table.name|table.id - sort by two fields (first name, then id)
|
|
2630
|
-
|
|
2631
|
-
groupby, having: together with orderby:
|
|
2632
|
-
groupby can be a field (e.g. table.name) to group records by
|
|
2633
|
-
having can be a query, only those `having` the condition are grouped
|
|
2634
|
-
|
|
2635
|
-
limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
|
|
2636
|
-
distinct: bool/field. Only select rows that differ
|
|
2637
|
-
orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
|
|
2638
|
-
join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
|
|
2639
|
-
left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
|
|
2640
|
-
cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
|
|
2641
|
-
"""
|
|
2642
|
-
return self._extend(select_args=list(fields), select_kwargs=options)
|
|
2643
|
-
|
|
2644
|
-
def where(
|
|
2645
|
-
self,
|
|
2646
|
-
*queries_or_lambdas: Query | typing.Callable[[Type[T_MetaInstance]], Query] | dict,
|
|
2647
|
-
**filters: Any,
|
|
2648
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2649
|
-
"""
|
|
2650
|
-
Extend the builder's query.
|
|
2651
|
-
|
|
2652
|
-
Can be used in multiple ways:
|
|
2653
|
-
.where(Query) -> with a direct query such as `Table.id == 5`
|
|
2654
|
-
.where(lambda table: table.id == 5) -> with a query via a lambda
|
|
2655
|
-
.where(id=5) -> via keyword arguments
|
|
2656
|
-
|
|
2657
|
-
When using multiple where's, they will be ANDed:
|
|
2658
|
-
.where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
|
|
2659
|
-
When passing multiple queries to a single .where, they will be ORed:
|
|
2660
|
-
.where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
|
|
2661
|
-
"""
|
|
2662
|
-
new_query = self.query
|
|
2663
|
-
table = self.model._ensure_table_defined()
|
|
2664
|
-
|
|
2665
|
-
queries_or_lambdas = (
|
|
2666
|
-
*queries_or_lambdas,
|
|
2667
|
-
filters,
|
|
2668
|
-
)
|
|
2669
|
-
|
|
2670
|
-
subquery: DummyQuery | Query = DummyQuery()
|
|
2671
|
-
for query_part in queries_or_lambdas:
|
|
2672
|
-
if isinstance(query_part, _Query):
|
|
2673
|
-
subquery |= typing.cast(Query, query_part)
|
|
2674
|
-
elif callable(query_part):
|
|
2675
|
-
if result := query_part(self.model):
|
|
2676
|
-
subquery |= result
|
|
2677
|
-
elif isinstance(query_part, (Field, _Field)) or is_typed_field(query_part):
|
|
2678
|
-
subquery |= typing.cast(Query, query_part != None)
|
|
2679
|
-
elif isinstance(query_part, dict):
|
|
2680
|
-
subsubquery = DummyQuery()
|
|
2681
|
-
for field, value in query_part.items():
|
|
2682
|
-
subsubquery &= table[field] == value
|
|
2683
|
-
if subsubquery:
|
|
2684
|
-
subquery |= subsubquery
|
|
2685
|
-
else:
|
|
2686
|
-
raise ValueError(f"Unexpected query type ({type(query_part)}).")
|
|
2687
|
-
|
|
2688
|
-
if subquery:
|
|
2689
|
-
new_query &= subquery
|
|
2690
|
-
|
|
2691
|
-
return self._extend(overwrite_query=new_query)
|
|
2692
|
-
|
|
2693
|
-
def join(
|
|
2694
|
-
self,
|
|
2695
|
-
*fields: str | Type[TypedTable],
|
|
2696
|
-
method: JOIN_OPTIONS = None,
|
|
2697
|
-
on: OnQuery | list[Expression] | Expression = None,
|
|
2698
|
-
condition: Condition = None,
|
|
2699
|
-
condition_and: Condition = None,
|
|
2700
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2701
|
-
"""
|
|
2702
|
-
Include relationship fields in the result.
|
|
2703
|
-
|
|
2704
|
-
`fields` can be names of Relationships on the current model.
|
|
2705
|
-
If no fields are passed, all will be used.
|
|
2706
|
-
|
|
2707
|
-
By default, the `method` defined in the relationship is used.
|
|
2708
|
-
This can be overwritten with the `method` keyword argument (left or inner)
|
|
2709
|
-
|
|
2710
|
-
`condition_and` can be used to add extra conditions to an inner join.
|
|
2711
|
-
"""
|
|
2712
|
-
# todo: allow limiting amount of related rows returned for join?
|
|
2713
|
-
# todo: it would be nice if 'fields' could be an actual relationship
|
|
2714
|
-
# (Article.tags = list[Tag]) and you could change the .condition and .on
|
|
2715
|
-
# this could deprecate condition_and
|
|
2716
|
-
|
|
2717
|
-
relationships = self.model.get_relationships()
|
|
2718
|
-
|
|
2719
|
-
if condition and on:
|
|
2720
|
-
raise ValueError("condition and on can not be used together!")
|
|
2721
|
-
elif condition:
|
|
2722
|
-
if len(fields) != 1:
|
|
2723
|
-
raise ValueError("join(field, condition=...) can only be used with exactly one field!")
|
|
2724
|
-
|
|
2725
|
-
if isinstance(condition, pydal.objects.Query):
|
|
2726
|
-
condition = as_lambda(condition)
|
|
2727
|
-
|
|
2728
|
-
relationships = {
|
|
2729
|
-
str(fields[0]): Relationship(fields[0], condition=condition, join=method, condition_and=condition_and)
|
|
2730
|
-
}
|
|
2731
|
-
elif on:
|
|
2732
|
-
if len(fields) != 1:
|
|
2733
|
-
raise ValueError("join(field, on=...) can only be used with exactly one field!")
|
|
2734
|
-
|
|
2735
|
-
if isinstance(on, pydal.objects.Expression):
|
|
2736
|
-
on = [on]
|
|
2737
|
-
|
|
2738
|
-
if isinstance(on, list):
|
|
2739
|
-
on = as_lambda(on)
|
|
2740
|
-
relationships = {str(fields[0]): Relationship(fields[0], on=on, join=method, condition_and=condition_and)}
|
|
2741
|
-
|
|
2742
|
-
else:
|
|
2743
|
-
if fields:
|
|
2744
|
-
# join on every relationship
|
|
2745
|
-
relationships = {str(k): relationships[str(k)].clone(condition_and=condition_and) for k in fields}
|
|
2746
|
-
|
|
2747
|
-
if method:
|
|
2748
|
-
relationships = {
|
|
2749
|
-
str(k): r.clone(join=method, condition_and=condition_and) for k, r in relationships.items()
|
|
2750
|
-
}
|
|
2751
|
-
|
|
2752
|
-
return self._extend(relationships=relationships)
|
|
2753
|
-
|
|
2754
|
-
def cache(
|
|
2755
|
-
self,
|
|
2756
|
-
*deps: Any,
|
|
2757
|
-
expires_at: Optional[dt.datetime] = None,
|
|
2758
|
-
ttl: Optional[int | dt.timedelta] = None,
|
|
2759
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
2760
|
-
"""
|
|
2761
|
-
Enable caching for this query to load repeated calls from a dill row \
|
|
2762
|
-
instead of executing the sql and collecing matching rows again.
|
|
2763
|
-
"""
|
|
2764
|
-
existing = self.metadata.get("cache", {})
|
|
2765
|
-
|
|
2766
|
-
metadata: Metadata = {}
|
|
2767
|
-
|
|
2768
|
-
cache_meta = typing.cast(
|
|
2769
|
-
CacheMetadata,
|
|
2770
|
-
self.metadata.get("cache", {})
|
|
2771
|
-
| {
|
|
2772
|
-
"enabled": True,
|
|
2773
|
-
"depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
|
|
2774
|
-
"expires_at": get_expire(expires_at=expires_at, ttl=ttl),
|
|
2775
|
-
},
|
|
2776
|
-
)
|
|
2777
|
-
|
|
2778
|
-
metadata["cache"] = cache_meta
|
|
2779
|
-
return self._extend(metadata=metadata)
|
|
2780
|
-
|
|
2781
|
-
def _get_db(self) -> TypeDAL:
|
|
2782
|
-
if db := self.model._db:
|
|
2783
|
-
return db
|
|
2784
|
-
else: # pragma: no cover
|
|
2785
|
-
raise EnvironmentError("@define or db.define is not called on this class yet!")
|
|
2786
|
-
|
|
2787
|
-
def _select_arg_convert(self, arg: Any) -> Any:
|
|
2788
|
-
# typedfield are not really used at runtime anymore, but leave it in for safety:
|
|
2789
|
-
if isinstance(arg, TypedField): # pragma: no cover
|
|
2790
|
-
arg = arg._field
|
|
2791
|
-
|
|
2792
|
-
return arg
|
|
2793
|
-
|
|
2794
|
-
def delete(self) -> list[int]:
|
|
2795
|
-
"""
|
|
2796
|
-
Based on the current query, delete rows and return a list of deleted IDs.
|
|
2797
|
-
"""
|
|
2798
|
-
db = self._get_db()
|
|
2799
|
-
removed_ids = [_.id for _ in db(self.query).select("id")]
|
|
2800
|
-
if db(self.query).delete():
|
|
2801
|
-
# success!
|
|
2802
|
-
return removed_ids
|
|
2803
|
-
|
|
2804
|
-
return []
|
|
2805
|
-
|
|
2806
|
-
def _delete(self) -> str:
|
|
2807
|
-
db = self._get_db()
|
|
2808
|
-
return str(db(self.query)._delete())
|
|
2809
|
-
|
|
2810
|
-
def update(self, **fields: Any) -> list[int]:
|
|
2811
|
-
"""
|
|
2812
|
-
Based on the current query, update `fields` and return a list of updated IDs.
|
|
2813
|
-
"""
|
|
2814
|
-
# todo: limit?
|
|
2815
|
-
db = self._get_db()
|
|
2816
|
-
updated_ids = db(self.query).select("id").column("id")
|
|
2817
|
-
if db(self.query).update(**fields):
|
|
2818
|
-
# success!
|
|
2819
|
-
return updated_ids
|
|
2820
|
-
|
|
2821
|
-
return []
|
|
2822
|
-
|
|
2823
|
-
def _update(self, **fields: Any) -> str:
|
|
2824
|
-
db = self._get_db()
|
|
2825
|
-
return str(db(self.query)._update(**fields))
|
|
2826
|
-
|
|
2827
|
-
def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], SelectKwargs]:
|
|
2828
|
-
select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
|
|
2829
|
-
select_kwargs = self.select_kwargs.copy()
|
|
2830
|
-
query = self.query
|
|
2831
|
-
model = self.model
|
|
2832
|
-
mut_metadata["query"] = query
|
|
2833
|
-
# require at least id of main table:
|
|
2834
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
2835
|
-
tablename = str(model)
|
|
2836
|
-
|
|
2837
|
-
if add_id and f"{tablename}.id" not in select_fields:
|
|
2838
|
-
# fields of other selected, but required ID is missing.
|
|
2839
|
-
select_args.append(model.id)
|
|
2840
|
-
|
|
2841
|
-
if self.relationships:
|
|
2842
|
-
query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
|
|
2843
|
-
|
|
2844
|
-
return query, select_args, select_kwargs
|
|
2845
|
-
|
|
2846
|
-
def to_sql(self, add_id: bool = False) -> str:
|
|
2847
|
-
"""
|
|
2848
|
-
Generate the SQL for the built query.
|
|
2849
|
-
"""
|
|
2850
|
-
db = self._get_db()
|
|
2851
|
-
|
|
2852
|
-
query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
|
|
2853
|
-
|
|
2854
|
-
return str(db(query)._select(*select_args, **select_kwargs))
|
|
2855
|
-
|
|
2856
|
-
def _collect(self) -> str:
|
|
2857
|
-
"""
|
|
2858
|
-
Alias for to_sql, pydal-like syntax.
|
|
2859
|
-
"""
|
|
2860
|
-
return self.to_sql()
|
|
2861
|
-
|
|
2862
|
-
def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
|
|
2863
|
-
expires_at = metadata["cache"].get("expires_at")
|
|
2864
|
-
metadata["cache"] |= {
|
|
2865
|
-
# key is partly dependant on cache metadata but not these:
|
|
2866
|
-
"key": None,
|
|
2867
|
-
"status": None,
|
|
2868
|
-
"cached_at": None,
|
|
2869
|
-
"expires_at": None,
|
|
2870
|
-
}
|
|
2871
|
-
|
|
2872
|
-
_, key = create_and_hash_cache_key(
|
|
2873
|
-
self.model,
|
|
2874
|
-
metadata,
|
|
2875
|
-
self.query,
|
|
2876
|
-
self.select_args,
|
|
2877
|
-
self.select_kwargs,
|
|
2878
|
-
self.relationships.keys(),
|
|
2879
|
-
)
|
|
2880
|
-
|
|
2881
|
-
# re-set after creating key:
|
|
2882
|
-
metadata["cache"]["expires_at"] = expires_at
|
|
2883
|
-
metadata["cache"]["key"] = key
|
|
2884
|
-
|
|
2885
|
-
return load_from_cache(key, self._get_db())
|
|
2886
|
-
|
|
2887
|
-
def execute(self, add_id: bool = False) -> Rows:
|
|
2888
|
-
"""
|
|
2889
|
-
Raw version of .collect which only executes the SQL, without performing any magic afterwards.
|
|
2890
|
-
"""
|
|
2891
|
-
db = self._get_db()
|
|
2892
|
-
metadata = typing.cast(Metadata, self.metadata.copy())
|
|
2893
|
-
|
|
2894
|
-
query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
|
|
2895
|
-
|
|
2896
|
-
return db(query).select(*select_args, **select_kwargs)
|
|
2897
|
-
|
|
2898
|
-
def collect(
|
|
2899
|
-
self,
|
|
2900
|
-
verbose: bool = False,
|
|
2901
|
-
_to: Type["TypedRows[Any]"] = None,
|
|
2902
|
-
add_id: bool = True,
|
|
2903
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
2904
|
-
"""
|
|
2905
|
-
Execute the built query and turn it into model instances, while handling relationships.
|
|
2906
|
-
"""
|
|
2907
|
-
if _to is None:
|
|
2908
|
-
_to = TypedRows
|
|
2909
|
-
|
|
2910
|
-
db = self._get_db()
|
|
2911
|
-
metadata = typing.cast(Metadata, self.metadata.copy())
|
|
2912
|
-
|
|
2913
|
-
if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
|
|
2914
|
-
return result
|
|
2915
|
-
|
|
2916
|
-
query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
|
|
2917
|
-
|
|
2918
|
-
metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
|
|
2919
|
-
|
|
2920
|
-
if verbose: # pragma: no cover
|
|
2921
|
-
print(metadata["sql"])
|
|
2922
|
-
|
|
2923
|
-
rows: Rows = db(query).select(*select_args, **select_kwargs)
|
|
2924
|
-
|
|
2925
|
-
metadata["final_query"] = str(query)
|
|
2926
|
-
metadata["final_args"] = [str(_) for _ in select_args]
|
|
2927
|
-
metadata["final_kwargs"] = select_kwargs
|
|
2928
|
-
|
|
2929
|
-
if verbose: # pragma: no cover
|
|
2930
|
-
print(rows)
|
|
2931
|
-
|
|
2932
|
-
if not self.relationships:
|
|
2933
|
-
# easy
|
|
2934
|
-
typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
|
|
2935
|
-
|
|
2936
|
-
else:
|
|
2937
|
-
# harder: try to match rows to the belonging objects
|
|
2938
|
-
# assume structure of {'table': <data>} per row.
|
|
2939
|
-
# if that's not the case, return default behavior again
|
|
2940
|
-
typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
|
|
2941
|
-
|
|
2942
|
-
# only saves if requested in metadata:
|
|
2943
|
-
return save_to_cache(typed_rows, rows)
|
|
2944
|
-
|
|
2945
|
-
@typing.overload
|
|
2946
|
-
def column(self, field: TypedField[T], **options: Unpack[SelectKwargs]) -> list[T]:
|
|
2947
|
-
"""
|
|
2948
|
-
If a typedfield is passed, the output type can be safely determined.
|
|
2949
|
-
"""
|
|
2950
|
-
|
|
2951
|
-
@typing.overload
|
|
2952
|
-
def column(self, field: T, **options: Unpack[SelectKwargs]) -> list[T]:
|
|
2953
|
-
"""
|
|
2954
|
-
Otherwise, the output type is loosely determined (assumes `field: type` or Any).
|
|
2955
|
-
"""
|
|
2956
|
-
|
|
2957
|
-
def column(self, field: TypedField[T] | T, **options: Unpack[SelectKwargs]) -> list[T]:
|
|
2958
|
-
"""
|
|
2959
|
-
Get all values in a specific column.
|
|
2960
|
-
|
|
2961
|
-
Shortcut for `.select(field).execute().column(field)`.
|
|
2962
|
-
"""
|
|
2963
|
-
return self.select(field, **options).execute().column(field)
|
|
2964
|
-
|
|
2965
|
-
def _handle_relationships_pre_select(
|
|
2966
|
-
self,
|
|
2967
|
-
query: Query,
|
|
2968
|
-
select_args: list[Any],
|
|
2969
|
-
select_kwargs: SelectKwargs,
|
|
2970
|
-
metadata: Metadata,
|
|
2971
|
-
) -> tuple[Query, list[Any]]:
|
|
2972
|
-
db = self._get_db()
|
|
2973
|
-
model = self.model
|
|
2974
|
-
|
|
2975
|
-
metadata["relationships"] = set(self.relationships.keys())
|
|
2976
|
-
|
|
2977
|
-
join = []
|
|
2978
|
-
for key, relation in self.relationships.items():
|
|
2979
|
-
if not relation.condition or relation.join != "inner":
|
|
2980
|
-
continue
|
|
2981
|
-
|
|
2982
|
-
other = relation.get_table(db)
|
|
2983
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
2984
|
-
condition = relation.condition(model, other)
|
|
2985
|
-
if callable(relation.condition_and):
|
|
2986
|
-
condition &= relation.condition_and(model, other)
|
|
2987
|
-
|
|
2988
|
-
join.append(other.on(condition))
|
|
2989
|
-
|
|
2990
|
-
if limitby := select_kwargs.pop("limitby", ()):
|
|
2991
|
-
# if limitby + relationships:
|
|
2992
|
-
# 1. get IDs of main table entries that match 'query'
|
|
2993
|
-
# 2. change query to .belongs(id)
|
|
2994
|
-
# 3. add joins etc
|
|
2995
|
-
|
|
2996
|
-
kwargs: SelectKwargs = select_kwargs | {"limitby": limitby}
|
|
2997
|
-
# if orderby := select_kwargs.get("orderby"):
|
|
2998
|
-
# kwargs["orderby"] = orderby
|
|
2999
|
-
|
|
3000
|
-
if join:
|
|
3001
|
-
kwargs["join"] = join
|
|
3002
|
-
|
|
3003
|
-
ids = db(query)._select(model.id, **kwargs)
|
|
3004
|
-
query = model.id.belongs(ids)
|
|
3005
|
-
metadata["ids"] = ids
|
|
3006
|
-
|
|
3007
|
-
if join:
|
|
3008
|
-
select_kwargs["join"] = join
|
|
3009
|
-
|
|
3010
|
-
left = []
|
|
3011
|
-
|
|
3012
|
-
for key, relation in self.relationships.items():
|
|
3013
|
-
other = relation.get_table(db)
|
|
3014
|
-
method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
|
|
3015
|
-
|
|
3016
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
3017
|
-
pre_alias = str(other)
|
|
3018
|
-
|
|
3019
|
-
if f"{other}." not in select_fields:
|
|
3020
|
-
# no fields of other selected. add .ALL:
|
|
3021
|
-
select_args.append(other.ALL)
|
|
3022
|
-
elif f"{other}.id" not in select_fields:
|
|
3023
|
-
# fields of other selected, but required ID is missing.
|
|
3024
|
-
select_args.append(other.id)
|
|
3025
|
-
|
|
3026
|
-
if relation.on:
|
|
3027
|
-
# if it has a .on, it's always a left join!
|
|
3028
|
-
on = relation.on(model, other)
|
|
3029
|
-
if not isinstance(on, list): # pragma: no cover
|
|
3030
|
-
on = [on]
|
|
3031
|
-
|
|
3032
|
-
on = [
|
|
3033
|
-
_
|
|
3034
|
-
for _ in on
|
|
3035
|
-
# only allow Expressions (query and such):
|
|
3036
|
-
if isinstance(_, pydal.objects.Expression)
|
|
3037
|
-
]
|
|
3038
|
-
left.extend(on)
|
|
3039
|
-
elif method == "left":
|
|
3040
|
-
# .on not given, generate it:
|
|
3041
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
3042
|
-
condition = typing.cast(Query, relation.condition(model, other))
|
|
3043
|
-
if callable(relation.condition_and):
|
|
3044
|
-
condition &= relation.condition_and(model, other)
|
|
3045
|
-
left.append(other.on(condition))
|
|
3046
|
-
else:
|
|
3047
|
-
# else: inner join (handled earlier)
|
|
3048
|
-
other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
|
|
3049
|
-
|
|
3050
|
-
# if no fields of 'other' are included, add other.ALL
|
|
3051
|
-
# else: only add other.id if missing
|
|
3052
|
-
select_fields = ", ".join([str(_) for _ in select_args])
|
|
3053
|
-
|
|
3054
|
-
post_alias = str(other).split(" AS ")[-1]
|
|
3055
|
-
if pre_alias != post_alias:
|
|
3056
|
-
# replace .select's with aliased:
|
|
3057
|
-
select_fields = select_fields.replace(
|
|
3058
|
-
f"{pre_alias}.",
|
|
3059
|
-
f"{post_alias}.",
|
|
3060
|
-
)
|
|
3061
|
-
|
|
3062
|
-
select_args = select_fields.split(", ")
|
|
3063
|
-
|
|
3064
|
-
select_kwargs["left"] = left
|
|
3065
|
-
return query, select_args
|
|
3066
|
-
|
|
3067
|
-
def _collect_with_relationships(
|
|
3068
|
-
self,
|
|
3069
|
-
rows: Rows,
|
|
3070
|
-
metadata: Metadata,
|
|
3071
|
-
_to: Type["TypedRows[Any]"],
|
|
3072
|
-
) -> "TypedRows[T_MetaInstance]":
|
|
3073
|
-
"""
|
|
3074
|
-
Transform the raw rows into Typed Table model instances.
|
|
3075
|
-
"""
|
|
3076
|
-
db = self._get_db()
|
|
3077
|
-
main_table = self.model._ensure_table_defined()
|
|
3078
|
-
|
|
3079
|
-
# id: Model
|
|
3080
|
-
records = {}
|
|
3081
|
-
|
|
3082
|
-
# id: [Row]
|
|
3083
|
-
raw_per_id = defaultdict(list)
|
|
3084
|
-
|
|
3085
|
-
seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
|
|
3086
|
-
|
|
3087
|
-
for row in rows:
|
|
3088
|
-
main = row[main_table]
|
|
3089
|
-
main_id = main.id
|
|
3090
|
-
|
|
3091
|
-
raw_per_id[main_id].append(normalize_table_keys(row))
|
|
3092
|
-
|
|
3093
|
-
if main_id not in records:
|
|
3094
|
-
records[main_id] = self.model(main)
|
|
3095
|
-
records[main_id]._with = list(self.relationships.keys())
|
|
3096
|
-
|
|
3097
|
-
# setup up all relationship defaults (once)
|
|
3098
|
-
for col, relationship in self.relationships.items():
|
|
3099
|
-
records[main_id][col] = [] if relationship.multiple else None
|
|
3100
|
-
|
|
3101
|
-
# now add other relationship data
|
|
3102
|
-
for column, relation in self.relationships.items():
|
|
3103
|
-
relationship_column = f"{column}_{hash(relation)}"
|
|
3104
|
-
|
|
3105
|
-
# relationship_column works for aliases with the same target column.
|
|
3106
|
-
# if col + relationship not in the row, just use the regular name.
|
|
3107
|
-
|
|
3108
|
-
relation_data = (
|
|
3109
|
-
row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
|
|
3110
|
-
)
|
|
3111
|
-
|
|
3112
|
-
if relation_data.id is None:
|
|
3113
|
-
# always skip None ids
|
|
3114
|
-
continue
|
|
3115
|
-
|
|
3116
|
-
if f"{column}-{relation_data.id}" in seen_relations[main_id]:
|
|
3117
|
-
# speed up duplicates
|
|
3118
|
-
continue
|
|
3119
|
-
else:
|
|
3120
|
-
seen_relations[main_id].add(f"{column}-{relation_data.id}")
|
|
3121
|
-
|
|
3122
|
-
relation_table = relation.get_table(db)
|
|
3123
|
-
# hopefully an instance of a typed table and a regular row otherwise:
|
|
3124
|
-
instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
|
|
3125
|
-
|
|
3126
|
-
if relation.multiple:
|
|
3127
|
-
# create list of T
|
|
3128
|
-
if not isinstance(records[main_id].get(column), list): # pragma: no cover
|
|
3129
|
-
# should already be set up before!
|
|
3130
|
-
setattr(records[main_id], column, [])
|
|
3131
|
-
|
|
3132
|
-
records[main_id][column].append(instance)
|
|
3133
|
-
else:
|
|
3134
|
-
# create single T
|
|
3135
|
-
records[main_id][column] = instance
|
|
3136
|
-
|
|
3137
|
-
return _to(rows, self.model, records, metadata=metadata, raw=raw_per_id)
|
|
3138
|
-
|
|
3139
|
-
def collect_or_fail(self, exception: typing.Optional[Exception] = None) -> "TypedRows[T_MetaInstance]":
|
|
3140
|
-
"""
|
|
3141
|
-
Call .collect() and raise an error if nothing found.
|
|
3142
|
-
|
|
3143
|
-
Basically unwraps Optional type.
|
|
3144
|
-
"""
|
|
3145
|
-
if result := self.collect():
|
|
3146
|
-
return result
|
|
3147
|
-
|
|
3148
|
-
if not exception:
|
|
3149
|
-
exception = ValueError("Nothing found!")
|
|
3150
|
-
|
|
3151
|
-
raise exception
|
|
3152
|
-
|
|
3153
|
-
def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
|
|
3154
|
-
"""
|
|
3155
|
-
You can start iterating a Query Builder object before calling collect, for ease of use.
|
|
3156
|
-
"""
|
|
3157
|
-
yield from self.collect()
|
|
3158
|
-
|
|
3159
|
-
def __count(self, db: TypeDAL, distinct: typing.Optional[bool] = None) -> Query:
|
|
3160
|
-
# internal, shared logic between .count and ._count
|
|
3161
|
-
model = self.model
|
|
3162
|
-
query = self.query
|
|
3163
|
-
for key, relation in self.relationships.items():
|
|
3164
|
-
if (not relation.condition or relation.join != "inner") and not distinct:
|
|
3165
|
-
continue
|
|
3166
|
-
|
|
3167
|
-
other = relation.get_table(db)
|
|
3168
|
-
if not distinct:
|
|
3169
|
-
# todo: can this lead to other issues?
|
|
3170
|
-
other = other.with_alias(f"{key}_{hash(relation)}")
|
|
3171
|
-
query &= relation.condition(model, other)
|
|
3172
|
-
|
|
3173
|
-
return query
|
|
3174
|
-
|
|
3175
|
-
def count(self, distinct: typing.Optional[bool] = None) -> int:
|
|
3176
|
-
"""
|
|
3177
|
-
Return the amount of rows matching the current query.
|
|
3178
|
-
"""
|
|
3179
|
-
db = self._get_db()
|
|
3180
|
-
query = self.__count(db, distinct=distinct)
|
|
3181
|
-
|
|
3182
|
-
return db(query).count(distinct)
|
|
3183
|
-
|
|
3184
|
-
def _count(self, distinct: typing.Optional[bool] = None) -> str:
|
|
3185
|
-
"""
|
|
3186
|
-
Return the SQL for .count().
|
|
3187
|
-
"""
|
|
3188
|
-
db = self._get_db()
|
|
3189
|
-
query = self.__count(db, distinct=distinct)
|
|
3190
|
-
|
|
3191
|
-
return typing.cast(str, db(query)._count(distinct))
|
|
3192
|
-
|
|
3193
|
-
def exists(self) -> bool:
|
|
3194
|
-
"""
|
|
3195
|
-
Determines if any records exist matching the current query.
|
|
3196
|
-
|
|
3197
|
-
Returns True if one or more records exist; otherwise, False.
|
|
3198
|
-
|
|
3199
|
-
Returns:
|
|
3200
|
-
bool: A boolean indicating whether any records exist.
|
|
3201
|
-
"""
|
|
3202
|
-
return bool(self.count())
|
|
3203
|
-
|
|
3204
|
-
def __paginate(
|
|
3205
|
-
self,
|
|
3206
|
-
limit: int,
|
|
3207
|
-
page: int = 1,
|
|
3208
|
-
) -> "QueryBuilder[T_MetaInstance]":
|
|
3209
|
-
available = self.count()
|
|
3210
|
-
|
|
3211
|
-
_from = limit * (page - 1)
|
|
3212
|
-
_to = (limit * page) if limit else available
|
|
3213
|
-
|
|
3214
|
-
metadata: Metadata = {}
|
|
3215
|
-
|
|
3216
|
-
metadata["pagination"] = {
|
|
3217
|
-
"limit": limit,
|
|
3218
|
-
"current_page": page,
|
|
3219
|
-
"max_page": math.ceil(available / limit) if limit else 1,
|
|
3220
|
-
"rows": available,
|
|
3221
|
-
"min_max": (_from, _to),
|
|
3222
|
-
}
|
|
3223
|
-
|
|
3224
|
-
return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
|
|
3225
|
-
|
|
3226
|
-
def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
|
|
3227
|
-
"""
|
|
3228
|
-
Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
|
|
3229
|
-
|
|
3230
|
-
Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
|
|
3231
|
-
can be loaded with relationship data!
|
|
3232
|
-
"""
|
|
3233
|
-
builder = self.__paginate(limit, page)
|
|
3234
|
-
|
|
3235
|
-
rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
|
|
3236
|
-
|
|
3237
|
-
rows._query_builder = builder
|
|
3238
|
-
return rows
|
|
3239
|
-
|
|
3240
|
-
def _paginate(
|
|
3241
|
-
self,
|
|
3242
|
-
limit: int,
|
|
3243
|
-
page: int = 1,
|
|
3244
|
-
) -> str:
|
|
3245
|
-
builder = self.__paginate(limit, page)
|
|
3246
|
-
return builder._collect()
|
|
3247
|
-
|
|
3248
|
-
def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
|
|
3249
|
-
"""
|
|
3250
|
-
Generator that yields rows from a paginated source in chunks.
|
|
3251
|
-
|
|
3252
|
-
This function retrieves rows from a paginated data source in chunks of the
|
|
3253
|
-
specified `chunk_size` and yields them as TypedRows.
|
|
3254
|
-
|
|
3255
|
-
Example:
|
|
3256
|
-
```
|
|
3257
|
-
for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
|
|
3258
|
-
for row in chunk_of_rows:
|
|
3259
|
-
# Process each row within the chunk.
|
|
3260
|
-
pass
|
|
3261
|
-
```
|
|
3262
|
-
"""
|
|
3263
|
-
page = 1
|
|
3264
|
-
|
|
3265
|
-
while rows := self.__paginate(chunk_size, page).collect():
|
|
3266
|
-
yield rows
|
|
3267
|
-
page += 1
|
|
3268
|
-
|
|
3269
|
-
def first(self, verbose: bool = False) -> T_MetaInstance | None:
|
|
3270
|
-
"""
|
|
3271
|
-
Get the first row matching the currently built query.
|
|
3272
|
-
|
|
3273
|
-
Also adds paginate, since it would be a waste to select more rows than needed.
|
|
3274
|
-
"""
|
|
3275
|
-
if row := self.paginate(page=1, limit=1, verbose=verbose).first():
|
|
3276
|
-
return self.model.from_row(row)
|
|
3277
|
-
else:
|
|
3278
|
-
return None
|
|
3279
|
-
|
|
3280
|
-
def _first(self) -> str:
|
|
3281
|
-
return self._paginate(page=1, limit=1)
|
|
3282
|
-
|
|
3283
|
-
def first_or_fail(self, exception: typing.Optional[Exception] = None, verbose: bool = False) -> T_MetaInstance:
|
|
3284
|
-
"""
|
|
3285
|
-
Call .first() and raise an error if nothing found.
|
|
3286
|
-
|
|
3287
|
-
Basically unwraps Optional type.
|
|
3288
|
-
"""
|
|
3289
|
-
if inst := self.first(verbose=verbose):
|
|
3290
|
-
return inst
|
|
3291
|
-
|
|
3292
|
-
if not exception:
|
|
3293
|
-
exception = ValueError("Nothing found!")
|
|
3294
|
-
|
|
3295
|
-
raise exception
|
|
3296
|
-
|
|
3297
|
-
|
|
3298
|
-
S = typing.TypeVar("S")
|
|
3299
|
-
|
|
3300
|
-
|
|
3301
|
-
class PaginatedRows(TypedRows[T_MetaInstance]):
|
|
3302
|
-
"""
|
|
3303
|
-
Extension on top of rows that is used when calling .paginate() instead of .collect().
|
|
3304
|
-
"""
|
|
3305
|
-
|
|
3306
|
-
_query_builder: QueryBuilder[T_MetaInstance]
|
|
3307
|
-
|
|
3308
|
-
@property
|
|
3309
|
-
def data(self) -> list[T_MetaInstance]:
|
|
3310
|
-
"""
|
|
3311
|
-
Get the underlying data.
|
|
3312
|
-
"""
|
|
3313
|
-
return list(self.records.values())
|
|
3314
|
-
|
|
3315
|
-
@property
|
|
3316
|
-
def pagination(self) -> Pagination:
|
|
3317
|
-
"""
|
|
3318
|
-
Get all page info.
|
|
3319
|
-
"""
|
|
3320
|
-
pagination_data = self.metadata["pagination"]
|
|
3321
|
-
|
|
3322
|
-
has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
|
|
3323
|
-
has_prev_page = pagination_data["current_page"] > 1
|
|
3324
|
-
return {
|
|
3325
|
-
"total_items": pagination_data["rows"],
|
|
3326
|
-
"current_page": pagination_data["current_page"],
|
|
3327
|
-
"per_page": pagination_data["limit"],
|
|
3328
|
-
"total_pages": pagination_data["max_page"],
|
|
3329
|
-
"has_next_page": has_next_page,
|
|
3330
|
-
"has_prev_page": has_prev_page,
|
|
3331
|
-
"next_page": pagination_data["current_page"] + 1 if has_next_page else None,
|
|
3332
|
-
"prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
|
|
3333
|
-
}
|
|
3334
|
-
|
|
3335
|
-
def next(self) -> Self:
|
|
3336
|
-
"""
|
|
3337
|
-
Get the next page.
|
|
3338
|
-
"""
|
|
3339
|
-
data = self.metadata["pagination"]
|
|
3340
|
-
if data["current_page"] >= data["max_page"]:
|
|
3341
|
-
raise StopIteration("Final Page")
|
|
3342
|
-
|
|
3343
|
-
return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
|
|
3344
|
-
|
|
3345
|
-
def previous(self) -> Self:
|
|
3346
|
-
"""
|
|
3347
|
-
Get the previous page.
|
|
3348
|
-
"""
|
|
3349
|
-
data = self.metadata["pagination"]
|
|
3350
|
-
if data["current_page"] <= 1:
|
|
3351
|
-
raise StopIteration("First Page")
|
|
3352
|
-
|
|
3353
|
-
return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
|
|
3354
|
-
|
|
3355
|
-
def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
|
|
3356
|
-
"""
|
|
3357
|
-
Convert to a dictionary with pagination info and original data.
|
|
3358
|
-
|
|
3359
|
-
All arguments are ignored!
|
|
3360
|
-
"""
|
|
3361
|
-
return {"data": super().as_dict(), "pagination": self.pagination}
|
|
3362
|
-
|
|
3363
|
-
|
|
3364
|
-
class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
|
|
3365
|
-
"""
|
|
3366
|
-
Used to make pydal Set more typed.
|
|
3367
|
-
|
|
3368
|
-
This class is not actually used, only 'cast' by TypeDAL.__call__
|
|
3369
|
-
"""
|
|
3370
|
-
|
|
3371
|
-
def count(self, distinct: typing.Optional[bool] = None, cache: AnyDict = None) -> int:
|
|
3372
|
-
"""
|
|
3373
|
-
Count returns an int.
|
|
3374
|
-
"""
|
|
3375
|
-
result = super().count(distinct, cache)
|
|
3376
|
-
return typing.cast(int, result)
|
|
3377
|
-
|
|
3378
|
-
def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
|
|
3379
|
-
"""
|
|
3380
|
-
Select returns a TypedRows of a user defined table.
|
|
3381
|
-
|
|
3382
|
-
Example:
|
|
3383
|
-
result: TypedRows[MyTable] = db(MyTable.id > 0).select()
|
|
3384
|
-
|
|
3385
|
-
for row in result:
|
|
3386
|
-
reveal_type(row) # MyTable
|
|
3387
|
-
"""
|
|
3388
|
-
rows = super().select(*fields, **attributes)
|
|
3389
|
-
return typing.cast(TypedRows[T_MetaInstance], rows)
|