TypeDAL 3.12.1__py3-none-any.whl → 4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
typedal/core.py CHANGED
@@ -2,382 +2,149 @@
2
2
  Core functionality of TypeDAL.
3
3
  """
4
4
 
5
- import contextlib
6
- import csv
7
- import datetime as dt
8
- import inspect
9
- import json
10
- import math
5
+ from __future__ import annotations
6
+
11
7
  import sys
12
- import types
13
- import typing
14
- import uuid
8
+ import typing as t
15
9
  import warnings
16
- from collections import defaultdict
17
- from copy import copy
18
- from decimal import Decimal
19
10
  from pathlib import Path
20
- from typing import Any, Optional, Type
11
+ from typing import Optional
21
12
 
22
13
  import pydal
23
- from pydal._globals import DEFAULT
24
- from pydal.objects import Field as _Field
25
- from pydal.objects import Query as _Query
26
- from pydal.objects import Row
27
- from pydal.objects import Table as _Table
28
- from typing_extensions import Self, Unpack
29
-
30
- from .config import TypeDALConfig, load_config
14
+
15
+ from .config import LazyPolicy, TypeDALConfig, load_config
31
16
  from .helpers import (
32
- DummyQuery,
33
- all_annotations,
34
- all_dict,
35
- as_lambda,
36
- extract_type_optional,
37
- filter_out,
38
- instanciate,
39
- is_union,
40
- looks_like,
41
- mktable,
42
- origin_is_subclass,
17
+ SYSTEM_SUPPORTS_TEMPLATES,
18
+ default_representer,
19
+ sql_escape_template,
20
+ sql_expression,
43
21
  to_snake,
44
- unwrap_type,
45
22
  )
46
- from .serializers import as_json
47
- from .types import (
48
- AnyDict,
49
- CacheMetadata,
50
- Expression,
51
- Field,
52
- FieldSettings,
53
- Metadata,
54
- OpRow,
55
- PaginateDict,
56
- Pagination,
57
- Query,
58
- Reference,
59
- Rows,
60
- SelectKwargs,
61
- Set,
62
- Table,
63
- Validator,
64
- _Types,
65
- )
66
-
67
- # use typing.cast(type, ...) to make mypy happy with unions
68
- T_annotation = Type[Any] | types.UnionType
69
- T_Query = typing.Union["Table", Query, bool, None, "TypedTable", Type["TypedTable"]]
70
- T_Value = typing.TypeVar("T_Value") # actual type of the Field (via Generic)
71
- T_MetaInstance = typing.TypeVar("T_MetaInstance", bound="TypedTable") # bound="TypedTable"; bound="TableMeta"
72
- T = typing.TypeVar("T")
73
-
74
- BASIC_MAPPINGS: dict[T_annotation, str] = {
75
- str: "string",
76
- int: "integer",
77
- bool: "boolean",
78
- bytes: "blob",
79
- float: "double",
80
- object: "json",
81
- Decimal: "decimal(10,2)",
82
- dt.date: "date",
83
- dt.time: "time",
84
- dt.datetime: "datetime",
85
- }
86
-
87
-
88
- def is_typed_field(cls: Any) -> typing.TypeGuard["TypedField[Any]"]:
89
- """
90
- Is `cls` an instance or subclass of TypedField?
91
-
92
- Deprecated
93
- """
94
- return (
95
- isinstance(cls, TypedField)
96
- or isinstance(typing.get_origin(cls), type)
97
- and issubclass(typing.get_origin(cls), TypedField)
98
- )
23
+ from .types import Field, T, Template # type: ignore
99
24
 
25
+ try:
26
+ # python 3.14+
27
+ from annotationlib import ForwardRef
28
+ except ImportError: # pragma: no cover
29
+ # python 3.13-
30
+ from typing import ForwardRef
100
31
 
101
- JOIN_OPTIONS = typing.Literal["left", "inner", None]
102
- DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
32
+ if t.TYPE_CHECKING:
33
+ from .fields import TypedField
34
+ from .types import AnyDict, Expression, T_Query, Table
103
35
 
104
- # table-ish paramter:
105
- P_Table = typing.Union[Type["TypedTable"], pydal.objects.Table]
106
36
 
107
- Condition: typing.TypeAlias = typing.Optional[
108
- typing.Callable[
109
- # self, other -> Query
110
- [P_Table, P_Table],
111
- Query | bool,
112
- ]
113
- ]
37
+ # note: these functions can not be moved to a different file,
38
+ # because then they will have different globals and it breaks!
114
39
 
115
- OnQuery: typing.TypeAlias = typing.Optional[
116
- typing.Callable[
117
- # self, other -> list of .on statements
118
- [P_Table, P_Table],
119
- list[Expression],
120
- ]
121
- ]
122
40
 
123
- To_Type = typing.TypeVar("To_Type", type[Any], Type[Any], str)
124
-
125
-
126
- class Relationship(typing.Generic[To_Type]):
41
+ def evaluate_forward_reference_312(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
127
42
  """
128
- Define a relationship to another table.
129
- """
130
-
131
- _type: To_Type
132
- table: Type["TypedTable"] | type | str
133
- condition: Condition
134
- condition_and: Condition
135
- on: OnQuery
136
- multiple: bool
137
- join: JOIN_OPTIONS
138
-
139
- def __init__(
140
- self,
141
- _type: To_Type,
142
- condition: Condition = None,
143
- join: JOIN_OPTIONS = None,
144
- on: OnQuery = None,
145
- condition_and: Condition = None,
146
- ):
147
- """
148
- Should not be called directly, use relationship() instead!
149
- """
150
- if condition and on:
151
- warnings.warn(f"Relation | Both specified! {condition=} {on=} {_type=}")
152
- raise ValueError("Please specify either a condition or an 'on' statement for this relationship!")
153
-
154
- self._type = _type
155
- self.condition = condition
156
- self.join = "left" if on else join # .on is always left join!
157
- self.on = on
158
- self.condition_and = condition_and
159
-
160
- if args := typing.get_args(_type):
161
- self.table = unwrap_type(args[0])
162
- self.multiple = True
163
- else:
164
- self.table = _type
165
- self.multiple = False
166
-
167
- if isinstance(self.table, str):
168
- self.table = TypeDAL.to_snake(self.table)
169
-
170
- def clone(self, **update: Any) -> "Relationship[To_Type]":
171
- """
172
- Create a copy of the relationship, possibly updated.
173
- """
174
- return self.__class__(
175
- update.get("_type") or self._type,
176
- update.get("condition") or self.condition,
177
- update.get("join") or self.join,
178
- update.get("on") or self.on,
179
- update.get("condition_and") or self.condition_and,
180
- )
181
-
182
- def __repr__(self) -> str:
183
- """
184
- Representation of the relationship.
185
- """
186
- if callback := self.condition or self.on:
187
- src_code = inspect.getsource(callback).strip()
188
-
189
- if c_and := self.condition_and:
190
- and_code = inspect.getsource(c_and).strip()
191
- src_code += " AND " + and_code
192
- else:
193
- cls_name = self._type if isinstance(self._type, str) else self._type.__name__
194
- src_code = f"to {cls_name} (missing condition)"
195
-
196
- join = f":{self.join}" if self.join else ""
197
- return f"<Relationship{join} {src_code}>"
198
-
199
- def get_table(self, db: "TypeDAL") -> Type["TypedTable"]:
200
- """
201
- Get the table this relationship is bound to.
202
- """
203
- table = self.table # can be a string because db wasn't available yet
204
- if isinstance(table, str):
205
- if mapped := db._class_map.get(table):
206
- # yay
207
- return mapped
208
-
209
- # boo, fall back to untyped table but pretend it is typed:
210
- return typing.cast(Type["TypedTable"], db[table]) # eh close enough!
211
-
212
- return table
213
-
214
- def get_table_name(self) -> str:
215
- """
216
- Get the name of the table this relationship is bound to.
217
- """
218
- if isinstance(self.table, str):
219
- return self.table
220
-
221
- if isinstance(self.table, pydal.objects.Table):
222
- return str(self.table)
223
-
224
- # else: typed table
225
- try:
226
- table = self.table._ensure_table_defined() if issubclass(self.table, TypedTable) else self.table
227
- except Exception: # pragma: no cover
228
- table = self.table
229
-
230
- return str(table)
231
-
232
- def __get__(self, instance: Any, owner: Any) -> typing.Optional[list[Any]] | "Relationship[To_Type]":
233
- """
234
- Relationship is a descriptor class, which can be returned from a class but not an instance.
235
-
236
- For an instance, using .join() will replace the Relationship with the actual data.
237
- If you forgot to join, a warning will be shown and empty data will be returned.
238
- """
239
- if not instance:
240
- # relationship queried on class, that's allowed
241
- return self
242
-
243
- warnings.warn(
244
- "Trying to get data from a relationship object! Did you forget to join it?", category=RuntimeWarning
245
- )
246
- if self.multiple:
247
- return []
248
- else:
249
- return None
250
-
43
+ Extract the original type from a forward reference string.
251
44
 
252
- def relationship(_type: To_Type, condition: Condition = None, join: JOIN_OPTIONS = None, on: OnQuery = None) -> To_Type:
45
+ Variant for python 3.12 and below
253
46
  """
254
- Define a relationship to another table, when its id is not stored in the current table.
255
-
256
- Example:
257
- class User(TypedTable):
258
- name: str
259
-
260
- posts = relationship(list["Post"], condition=lambda self, post: self.id == post.author, join='left')
261
-
262
- class Post(TypedTable):
263
- title: str
264
- author: User
265
-
266
- User.join("posts").first() # User instance with list[Post] in .posts
267
-
268
- Here, Post stores the User ID, but `relationship(list["Post"])` still allows you to get the user's posts.
269
- In this case, the join strategy is set to LEFT so users without posts are also still selected.
47
+ return t.cast(
48
+ type,
49
+ fw_ref._evaluate(
50
+ localns=locals(),
51
+ globalns=globals() | namespace,
52
+ recursive_guard=frozenset(),
53
+ ),
54
+ )
270
55
 
271
- For complex queries with a pivot table, a `on` can be set insteaad of `condition`:
272
- class User(TypedTable):
273
- ...
274
56
 
275
- tags = relationship(list["Tag"], on=lambda self, tag: [
276
- Tagged.on(Tagged.entity == entity.gid),
277
- Tag.on((Tagged.tag == tag.id)),
278
- ])
57
+ def evaluate_forward_reference_313(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
58
+ """
59
+ Extract the original type from a forward reference string.
279
60
 
280
- If you'd try to capture this in a single 'condition', pydal would create a cross join which is much less efficient.
61
+ Variant for python 3.13
281
62
  """
282
- return typing.cast(
283
- # note: The descriptor `Relationship[To_Type]` is more correct, but pycharm doesn't really get that.
284
- # so for ease of use, just cast to the refered type for now!
285
- # e.g. x = relationship(Author) -> x: Author
286
- To_Type,
287
- Relationship(_type, condition, join, on),
63
+ return t.cast(
64
+ type,
65
+ fw_ref._evaluate(
66
+ localns=locals(),
67
+ globalns=globals() | namespace,
68
+ recursive_guard=frozenset(),
69
+ type_params=(), # suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
70
+ ),
288
71
  )
289
72
 
290
73
 
291
- T_Field: typing.TypeAlias = typing.Union["TypedField[Any]", "Table", Type["TypedTable"]]
74
+ def evaluate_forward_reference_314(fw_ref: ForwardRef, namespace: dict[str, type]) -> type: # pragma: no cover
75
+ """
76
+ Extract the original type from a forward reference string.
292
77
 
78
+ Variant for python 3.14 (and hopefully above)
79
+ """
80
+ return t.cast(
81
+ type,
82
+ fw_ref.evaluate(
83
+ locals=locals(),
84
+ globals=globals() | namespace,
85
+ type_params=(),
86
+ ),
87
+ )
293
88
 
294
- def _generate_relationship_condition(_: Type["TypedTable"], key: str, field: T_Field) -> Condition:
295
- origin = typing.get_origin(field)
296
- # else: generic
297
89
 
298
- if origin is list:
299
- # field = typing.get_args(field)[0] # actual field
300
- # return lambda _self, _other: cls[key].contains(field)
90
+ def evaluate_forward_reference(
91
+ fw_ref: ForwardRef,
92
+ namespace: dict[str, type] | None = None,
93
+ ) -> type: # pragma: no cover
94
+ """
95
+ Extract the original type from a forward reference string.
301
96
 
302
- return lambda _self, _other: _self[key].contains(_other.id)
97
+ Automatically chooses strategy based on current Python version.
98
+ """
99
+ if sys.version_info.minor < 13:
100
+ return evaluate_forward_reference_312(fw_ref, namespace=namespace or {})
101
+ elif sys.version_info.minor == 13:
102
+ return evaluate_forward_reference_313(fw_ref, namespace=namespace or {})
303
103
  else:
304
- # normal reference
305
- # return lambda _self, _other: cls[key] == field.id
306
- return lambda _self, _other: _self[key] == _other.id
104
+ return evaluate_forward_reference_314(fw_ref, namespace=namespace or {})
307
105
 
308
106
 
309
- def to_relationship(
310
- cls: Type["TypedTable"] | type[Any],
311
- key: str,
312
- field: T_Field,
313
- ) -> typing.Optional[Relationship[Any]]:
107
+ def resolve_annotation_313(ftype: str) -> type: # pragma: no cover
314
108
  """
315
- Used to automatically create relationship instance for reference fields.
316
-
317
- Example:
318
- class MyTable(TypedTable):
319
- reference: OtherTable
109
+ Resolve an annotation that's in string representation.
320
110
 
321
- `reference` contains the id of an Other Table row.
322
- MyTable.relationships should have 'reference' as a relationship, so `MyTable.join('reference')` should work.
323
-
324
- This function will automatically perform this logic (called in db.define):
325
- to_relationship(MyTable, 'reference', OtherTable) -> Relationship[OtherTable]
326
-
327
- Also works for list:reference (list[OtherTable]) and TypedField[OtherTable].
111
+ Variant for Python 3.13
328
112
  """
329
- if looks_like(field, TypedField):
330
- # typing.get_args works for list[str] but not for TypedField[role] :(
331
- if args := typing.get_args(field):
332
- # TypedField[SomeType] -> SomeType
333
- field = args[0]
334
- elif hasattr(field, "_type"):
335
- # TypedField(SomeType) -> SomeType
336
- field = typing.cast(T_Field, field._type)
337
- else: # pragma: no cover
338
- # weird
339
- return None
340
-
341
- field, optional = extract_type_optional(field)
342
-
343
- try:
344
- condition = _generate_relationship_condition(cls, key, field)
345
- except Exception as e: # pragma: no cover
346
- warnings.warn("Could not generate Relationship condition", source=e)
347
- condition = None
113
+ fw_ref: ForwardRef = t.get_args(t.Type[ftype])[0]
114
+ return evaluate_forward_reference(fw_ref)
348
115
 
349
- if not condition: # pragma: no cover
350
- # something went wrong, not a valid relationship
351
- warnings.warn(f"Invalid relationship for {cls.__name__}.{key}: {field}")
352
- return None
353
116
 
354
- join = "left" if optional or typing.get_origin(field) is list else "inner"
117
+ def resolve_annotation_314(ftype: str) -> type: # pragma: no cover
118
+ """
119
+ Resolve an annotation that's in string representation.
355
120
 
356
- return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
121
+ Variant for Python 3.14 + using annotationlib
122
+ """
123
+ fw_ref = ForwardRef(ftype)
124
+ return evaluate_forward_reference(fw_ref)
357
125
 
358
126
 
359
- def evaluate_forward_reference(fw_ref: typing.ForwardRef) -> type:
360
- """
361
- Extract the original type from a forward reference string.
127
+ def resolve_annotation(ftype: str) -> type: # pragma: no cover
362
128
  """
363
- kwargs = dict(
364
- localns=locals(),
365
- globalns=globals(),
366
- recursive_guard=frozenset(),
367
- )
368
- if sys.version_info >= (3, 13): # pragma: no cover
369
- # suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
370
- kwargs["type_params"] = ()
129
+ Resolve an annotation that's in string representation.
371
130
 
372
- return fw_ref._evaluate(**kwargs) # type: ignore
131
+ Automatically chooses strategy based on current Python version.
132
+ """
133
+ if sys.version_info.major != 3:
134
+ raise EnvironmentError("Only python 3 is supported.")
135
+ elif sys.version_info.minor <= 13:
136
+ return resolve_annotation_313(ftype)
137
+ else:
138
+ return resolve_annotation_314(ftype)
373
139
 
374
140
 
375
- class TypeDAL(pydal.DAL): # type: ignore
141
+ class TypeDAL(pydal.DAL):
376
142
  """
377
143
  Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
378
144
  """
379
145
 
380
146
  _config: TypeDALConfig
147
+ _builder: TableDefinitionBuilder
381
148
 
382
149
  def __init__(
383
150
  self,
@@ -399,7 +166,7 @@ class TypeDAL(pydal.DAL): # type: ignore
399
166
  debug: bool = False,
400
167
  lazy_tables: bool = False,
401
168
  db_uid: Optional[str] = None,
402
- after_connection: typing.Callable[..., Any] = None,
169
+ after_connection: t.Callable[..., t.Any] = None,
403
170
  tables: Optional[list[str]] = None,
404
171
  ignore_field_case: bool = True,
405
172
  entity_quoting: bool = True,
@@ -409,6 +176,7 @@ class TypeDAL(pydal.DAL): # type: ignore
409
176
  use_env: bool | str = True,
410
177
  connection: Optional[str] = None,
411
178
  config: Optional[TypeDALConfig] = None,
179
+ lazy_policy: LazyPolicy | None = None,
412
180
  ) -> None:
413
181
  """
414
182
  Adds some internal tables after calling pydal's default init.
@@ -424,10 +192,12 @@ class TypeDAL(pydal.DAL): # type: ignore
424
192
  fake_migrate=fake_migrate,
425
193
  caching=enable_typedal_caching,
426
194
  pool_size=pool_size,
195
+ lazy_policy=lazy_policy,
427
196
  )
428
197
 
429
198
  self._config = config
430
199
  self.db = self
200
+ self._builder = TableDefinitionBuilder(self)
431
201
 
432
202
  if config.folder:
433
203
  Path(config.folder).mkdir(exist_ok=True)
@@ -462,7 +232,7 @@ class TypeDAL(pydal.DAL): # type: ignore
462
232
  self.try_define(_TypedalCache)
463
233
  self.try_define(_TypedalCacheDependency)
464
234
 
465
- def try_define(self, model: Type[T], verbose: bool = False) -> Type[T]:
235
+ def try_define(self, model: t.Type[T], verbose: bool = False) -> t.Type[T]:
466
236
  """
467
237
  Try to define a model with migrate or fall back to fake migrate.
468
238
  """
@@ -480,123 +250,13 @@ class TypeDAL(pydal.DAL): # type: ignore
480
250
  # try again:
481
251
  return self.define(model, migrate=True, fake_migrate=True, redefine=True)
482
252
 
483
- default_kwargs: typing.ClassVar[AnyDict] = {
253
+ default_kwargs: t.ClassVar[AnyDict] = {
484
254
  # fields are 'required' (notnull) by default:
485
255
  "notnull": True,
486
256
  }
487
257
 
488
- # maps table name to typedal class, for resolving future references
489
- _class_map: typing.ClassVar[dict[str, Type["TypedTable"]]] = {}
490
-
491
- def _define(self, cls: Type[T], **kwargs: Any) -> Type[T]:
492
- # todo: new relationship item added should also invalidate (previously unrelated) cache result
493
-
494
- # todo: option to enable/disable cache dependency behavior:
495
- # - don't set _before_update and _before_delete
496
- # - don't add TypedalCacheDependency entry
497
- # - don't invalidate other item on new row of this type
498
-
499
- # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
500
- # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
501
- # typing.get_type_hints(cls, globalns=None, localns=None)
502
- # -> ERR e.g. `pytest -svxk cli` -> name 'BestFriend' is not defined
503
-
504
- # dirty way (with evil eval):
505
- # [eval(v) for k, v in cls.__annotations__.items()]
506
- # this however also stops working when variables outside this scope or even references to other
507
- # objects are used. So for now, this package will NOT work when from __future__ import annotations is used,
508
- # and might break in the future, when this annotations behavior is enabled by default.
509
-
510
- # non-annotated variables have to be passed to define_table as kwargs
511
- full_dict = all_dict(cls) # includes properties from parents (e.g. useful for mixins)
512
-
513
- tablename = self.to_snake(cls.__name__)
514
- # grab annotations of cls and it's parents:
515
- annotations = all_annotations(cls)
516
- # extend with `prop = TypedField()` 'annotations':
517
- annotations |= {k: typing.cast(type, v) for k, v in full_dict.items() if is_typed_field(v)}
518
- # remove internal stuff:
519
- annotations = {k: v for k, v in annotations.items() if not k.startswith("_")}
520
-
521
- typedfields: dict[str, TypedField[Any]] = {
522
- k: instanciate(v, True) for k, v in annotations.items() if is_typed_field(v)
523
- }
524
-
525
- relationships: dict[str, type[Relationship[Any]]] = filter_out(annotations, Relationship)
526
-
527
- fields = {fname: self._to_field(fname, ftype) for fname, ftype in annotations.items()}
528
-
529
- # ! dont' use full_dict here:
530
- other_kwargs = kwargs | {
531
- k: v for k, v in cls.__dict__.items() if k not in annotations and not k.startswith("_")
532
- } # other_kwargs was previously used to pass kwargs to typedal, but use @define(**kwargs) for that.
533
- # now it's only used to extract relationships from the object.
534
- # other properties of the class (incl methods) should not be touched
535
-
536
- # for key in typedfields.keys() - full_dict.keys():
537
- # # typed fields that don't haven't been added to the object yet
538
- # setattr(cls, key, typedfields[key])
539
-
540
- for key, field in typedfields.items():
541
- # clone every property so it can be re-used across mixins:
542
- clone = copy(field)
543
- setattr(cls, key, clone)
544
- typedfields[key] = clone
545
-
546
- # start with base classes and overwrite with current class:
547
- relationships = filter_out(full_dict, Relationship) | relationships | filter_out(other_kwargs, Relationship)
548
-
549
- # DEPRECATED: Relationship as annotation is currently not supported!
550
- # ensure they are all instances and
551
- # not mix of instances (`= relationship()`) and classes (`: Relationship[...]`):
552
- # relationships = {
553
- # k: v if isinstance(v, Relationship) else to_relationship(cls, k, v) for k, v in relationships.items()
554
- # }
555
-
556
- # keys of implicit references (also relationships):
557
- reference_field_keys = [
558
- k for k, v in fields.items() if str(v.type).split(" ")[0] in ("list:reference", "reference")
559
- ]
560
-
561
- # add implicit relationships:
562
- # User; list[User]; TypedField[User]; TypedField[list[User]]; TypedField(User); TypedField(list[User])
563
- relationships |= {
564
- k: new_relationship
565
- for k in reference_field_keys
566
- if k not in relationships and (new_relationship := to_relationship(cls, k, annotations[k]))
567
- }
568
-
569
- # fixme: list[Reference] is recognized as relationship,
570
- # TypedField(list[Reference]) is NOT recognized!!!
571
-
572
- cache_dependency = self._config.caching and kwargs.pop("cache_dependency", True)
573
-
574
- table: Table = self.define_table(tablename, *fields.values(), **kwargs)
575
-
576
- for name, typed_field in typedfields.items():
577
- field = fields[name]
578
- typed_field.bind(field, table)
579
-
580
- if issubclass(cls, TypedTable):
581
- cls.__set_internals__(
582
- db=self,
583
- table=table,
584
- # by now, all relationships should be instances!
585
- relationships=typing.cast(dict[str, Relationship[Any]], relationships),
586
- )
587
- self._class_map[str(table)] = cls
588
- cls.__on_define__(self)
589
- else:
590
- warnings.warn("db.define used without inheriting TypedTable. This could lead to strange problems!")
591
-
592
- if not tablename.startswith("typedal_") and cache_dependency:
593
- table._before_update.append(lambda s, _: _remove_cache(s, tablename))
594
- table._before_delete.append(lambda s: _remove_cache(s, tablename))
595
-
596
- return cls
597
-
598
- @typing.overload
599
- def define(self, maybe_cls: None = None, **kwargs: Any) -> typing.Callable[[Type[T]], Type[T]]:
258
+ @t.overload
259
+ def define(self, maybe_cls: None = None, **kwargs: t.Any) -> t.Callable[[t.Type[T]], t.Type[T]]:
600
260
  """
601
261
  Typing Overload for define without a class.
602
262
 
@@ -604,8 +264,8 @@ class TypeDAL(pydal.DAL): # type: ignore
604
264
  class MyTable(TypedTable): ...
605
265
  """
606
266
 
607
- @typing.overload
608
- def define(self, maybe_cls: Type[T], **kwargs: Any) -> Type[T]:
267
+ @t.overload
268
+ def define(self, maybe_cls: t.Type[T], **kwargs: t.Any) -> t.Type[T]:
609
269
  """
610
270
  Typing Overload for define with a class.
611
271
 
@@ -613,7 +273,11 @@ class TypeDAL(pydal.DAL): # type: ignore
613
273
  class MyTable(TypedTable): ...
614
274
  """
615
275
 
616
- def define(self, maybe_cls: Type[T] | None = None, **kwargs: Any) -> Type[T] | typing.Callable[[Type[T]], Type[T]]:
276
+ def define(
277
+ self,
278
+ maybe_cls: t.Type[T] | None = None,
279
+ **kwargs: t.Any,
280
+ ) -> t.Type[T] | t.Callable[[t.Type[T]], t.Type[T]]:
617
281
  """
618
282
  Can be used as a decorator on a class that inherits `TypedTable`, \
619
283
  or as a regular method if you need to define your classes before you have access to a 'db' instance.
@@ -636,39 +300,15 @@ class TypeDAL(pydal.DAL): # type: ignore
636
300
  the result of pydal.define_table
637
301
  """
638
302
 
639
- def wrapper(cls: Type[T]) -> Type[T]:
640
- return self._define(cls, **kwargs)
303
+ def wrapper(cls: t.Type[T]) -> t.Type[T]:
304
+ return self._builder.define(cls, **kwargs)
641
305
 
642
306
  if maybe_cls:
643
307
  return wrapper(maybe_cls)
644
308
 
645
309
  return wrapper
646
310
 
647
- # def drop(self, table_name: str) -> None:
648
- # """
649
- # Remove a table by name (both on the database level and the typedal level).
650
- # """
651
- # # drop calls TypedTable.drop() and removes it from the `_class_map`
652
- # if cls := self._class_map.pop(table_name, None):
653
- # cls.drop()
654
-
655
- # def drop_all(self, max_retries: int = None) -> None:
656
- # """
657
- # Remove all tables and keep doing so until everything is gone!
658
- # """
659
- # retries = 0
660
- # if max_retries is None:
661
- # max_retries = len(self.tables)
662
- #
663
- # while self.tables:
664
- # retries += 1
665
- # for table in self.tables:
666
- # self.drop(table)
667
- #
668
- # if retries > max_retries:
669
- # raise RuntimeError("Could not delete all tables")
670
-
671
- def __call__(self, *_args: T_Query, **kwargs: Any) -> "TypedSet":
311
+ def __call__(self, *_args: T_Query, **kwargs: t.Any) -> "TypedSet":
672
312
  """
673
313
  A db instance can be called directly to perform a query.
674
314
 
@@ -686,107 +326,44 @@ class TypeDAL(pydal.DAL): # type: ignore
686
326
 
687
327
  if isinstance(cls, type) and issubclass(type(cls), type) and issubclass(cls, TypedTable):
688
328
  # table defined without @db.define decorator!
689
- _cls: Type[TypedTable] = cls
329
+ _cls: t.Type[TypedTable] = cls
690
330
  args[0] = _cls.id != None
691
331
 
692
332
  _set = super().__call__(*args, **kwargs)
693
- return typing.cast(TypedSet, _set)
333
+ return t.cast(TypedSet, _set)
694
334
 
695
335
  def __getitem__(self, key: str) -> "Table":
696
336
  """
697
337
  Allows dynamically accessing a table by its name as a string.
698
338
 
339
+ If you need the TypedTable class instead of the pydal table, use find_model instead.
340
+
699
341
  Example:
700
342
  db['users'] -> user
701
343
  """
702
- return typing.cast(Table, super().__getitem__(str(key)))
703
-
704
- @classmethod
705
- def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
706
- # return Field(name, _type, **{**cls.default_kwargs, **kw})
707
- kw_combined = cls.default_kwargs | kw
708
- return Field(name, _type, **kw_combined)
709
-
710
- @classmethod
711
- def _annotation_to_pydal_fieldtype(
712
- cls, _ftype: T_annotation, mut_kw: typing.MutableMapping[str, Any]
713
- ) -> Optional[str]:
714
- # ftype can be a union or type. typing.cast is sometimes used to tell mypy when it's not a union.
715
- ftype = typing.cast(type, _ftype) # cast from Type to type to make mypy happy)
716
-
717
- if isinstance(ftype, str):
718
- # extract type from string
719
- fw_ref: typing.ForwardRef = typing.get_args(Type[ftype])[0]
720
- ftype = evaluate_forward_reference(fw_ref)
721
-
722
- if mapping := BASIC_MAPPINGS.get(ftype):
723
- # basi types
724
- return mapping
725
- elif isinstance(ftype, _Table):
726
- # db.table
727
- return f"reference {ftype._tablename}"
728
- elif issubclass(type(ftype), type) and issubclass(ftype, TypedTable):
729
- # SomeTable
730
- snakename = cls.to_snake(ftype.__name__)
731
- return f"reference {snakename}"
732
- elif isinstance(ftype, TypedField):
733
- # FieldType(type, ...)
734
- return ftype._to_field(mut_kw)
735
- elif origin_is_subclass(ftype, TypedField):
736
- # TypedField[int]
737
- return cls._annotation_to_pydal_fieldtype(typing.get_args(ftype)[0], mut_kw)
738
- elif isinstance(ftype, types.GenericAlias) and typing.get_origin(ftype) in (list, TypedField):
739
- # list[str] -> str -> string -> list:string
740
- _child_type = typing.get_args(ftype)[0]
741
- _child_type = cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
742
- return f"list:{_child_type}"
743
- elif is_union(ftype):
744
- # str | int -> UnionType
745
- # typing.Union[str | int] -> typing._UnionGenericAlias
746
-
747
- # Optional[type] == type | None
748
-
749
- match typing.get_args(ftype):
750
- case (_child_type, _Types.NONETYPE) | (_Types.NONETYPE, _child_type):
751
- # good union of Nullable
752
-
753
- # if a field is optional, it is nullable:
754
- mut_kw["notnull"] = False
755
- return cls._annotation_to_pydal_fieldtype(_child_type, mut_kw)
756
- case _:
757
- # two types is not supported by the db!
758
- return None
759
- else:
760
- return None
761
-
762
- @classmethod
763
- def _to_field(cls, fname: str, ftype: type, **kw: Any) -> Field:
344
+ return t.cast(Table, super().__getitem__(str(key)))
345
+
346
+ def find_model(self, table_name: str) -> t.Type["TypedTable"] | None:
764
347
  """
765
- Convert a annotation into a pydal Field.
348
+ Retrieves a mapped table class by its name.
766
349
 
767
- Args:
768
- fname: name of the property
769
- ftype: annotation of the property
770
- kw: when using TypedField or a function returning it (e.g. StringField),
771
- keyword args can be used to pass any other settings you would normally to a pydal Field
350
+ This method searches for a table class matching the given table name
351
+ in the defined class map dictionary. If a match is found, the corresponding
352
+ table class is returned; otherwise, None is returned, indicating that no
353
+ table class matches the input name.
772
354
 
773
- -> pydal.Field(fname, ftype, **kw)
355
+ Args:
356
+ table_name: The rname of the table to retrieve the mapped class for.
774
357
 
775
- Example:
776
- class MyTable:
777
- fname: ftype
778
- id: int
779
- name: str
780
- reference: Table
781
- other: TypedField(str, default="John Doe") # default will be in kwargs
358
+ Returns:
359
+ The mapped table class if it exists, otherwise None.
782
360
  """
783
- fname = cls.to_snake(fname)
361
+ return self._builder.class_map.get(table_name, None)
784
362
 
785
- # note: 'kw' is updated in `_annotation_to_pydal_fieldtype` by the kwargs provided to the TypedField(...)
786
- if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
787
- return cls._build_field(fname, converted_type, **kw)
788
- else:
789
- raise NotImplementedError(f"Unsupported type {ftype}/{type(ftype)}")
363
+ @property
364
+ def _class_map(self) -> dict[str, t.Type["TypedTable"]]:
365
+ # alias for backward-compatibility
366
+ return self._builder.class_map
790
367
 
791
368
  @staticmethod
792
369
  def to_snake(camel: str) -> str:
@@ -795,2250 +372,91 @@ class TypeDAL(pydal.DAL): # type: ignore
795
372
  """
796
373
  return to_snake(camel)
797
374
 
798
-
799
- class TableMeta(type):
800
- """
801
- This metaclass contains functionality on table classes, that doesn't exist on its instances.
802
-
803
- Example:
804
- class MyTable(TypedTable):
805
- some_field: TypedField[int]
806
-
807
- MyTable.update_or_insert(...) # should work
808
-
809
- MyTable.some_field # -> Field, can be used to query etc.
810
-
811
- row = MyTable.first() # returns instance of MyTable
812
-
813
- # row.update_or_insert(...) # shouldn't work!
814
-
815
- row.some_field # -> int, with actual data
816
-
817
- """
818
-
819
- # set up by db.define:
820
- # _db: TypeDAL | None = None
821
- # _table: Table | None = None
822
- _db: TypeDAL | None = None
823
- _table: Table | None = None
824
- _relationships: dict[str, Relationship[Any]] | None = None
825
-
826
- #########################
827
- # TypeDAL custom logic: #
828
- #########################
829
-
830
- def __set_internals__(self, db: pydal.DAL, table: Table, relationships: dict[str, Relationship[Any]]) -> None:
831
- """
832
- Store the related database and pydal table for later usage.
833
- """
834
- self._db = db
835
- self._table = table
836
- self._relationships = relationships
837
-
838
- def __getattr__(self, col: str) -> Optional[Field]:
839
- """
840
- Magic method used by TypedTableMeta to get a database field with dot notation on a class.
841
-
842
- Example:
843
- SomeTypedTable.col -> db.table.col (via TypedTableMeta.__getattr__)
844
-
845
- """
846
- if self._table:
847
- return getattr(self._table, col, None)
848
-
849
- return None
850
-
851
- def _ensure_table_defined(self) -> Table:
852
- if not self._table:
853
- raise EnvironmentError("@define or db.define is not called on this class yet!")
854
- return self._table
855
-
856
- def __iter__(self) -> typing.Generator[Field, None, None]:
857
- """
858
- Loop through the columns of this model.
859
- """
860
- table = self._ensure_table_defined()
861
- yield from iter(table)
862
-
863
- def __getitem__(self, item: str) -> Field:
864
- """
865
- Allow dict notation to get a column of this table (-> Field instance).
866
- """
867
- table = self._ensure_table_defined()
868
- return table[item]
869
-
870
- def __str__(self) -> str:
871
- """
872
- Normally, just returns the underlying table name, but with a fallback if the model is unbound.
873
- """
874
- if self._table:
875
- return str(self._table)
876
- else:
877
- return f"<unbound table {self.__name__}>"
878
-
879
- def from_row(self: Type[T_MetaInstance], row: pydal.objects.Row) -> T_MetaInstance:
880
- """
881
- Create a model instance from a pydal row.
882
- """
883
- return self(row)
884
-
885
- def all(self: Type[T_MetaInstance]) -> "TypedRows[T_MetaInstance]":
886
- """
887
- Return all rows for this model.
888
- """
889
- return self.collect()
890
-
891
- def get_relationships(self) -> dict[str, Relationship[Any]]:
892
- """
893
- Return the registered relationships of the current model.
894
- """
895
- return self._relationships or {}
896
-
897
- ##########################
898
- # TypeDAL Modified Logic #
899
- ##########################
900
-
901
- def insert(self: Type[T_MetaInstance], **fields: Any) -> T_MetaInstance:
375
+ def executesql(
376
+ self,
377
+ query: str | Template,
378
+ placeholders: t.Iterable[str] | dict[str, str] | None = None,
379
+ as_dict: bool = False,
380
+ fields: t.Iterable[Field | TypedField[t.Any]] | None = None,
381
+ colnames: t.Iterable[str] | None = None,
382
+ as_ordered_dict: bool = False,
383
+ ) -> list[t.Any]:
902
384
  """
903
- This is only called when db.define is not used as a decorator.
385
+ Executes a raw SQL statement or a TypeDAL template query.
904
386
 
905
- cls.__table functions as 'self'
387
+ If `query` is provided as a `Template` and the system supports template
388
+ rendering, it will be processed with `sql_escape_template` before being
389
+ executed. Otherwise, the query is passed to the underlying DAL as-is.
906
390
 
907
391
  Args:
908
- **fields: anything you want to insert in the database
909
-
910
- Returns: the ID of the new row.
392
+ query (str | Template): The SQL query to execute, either a plain
393
+ string or a `Template` (created via the `t""` syntax).
394
+ placeholders (Iterable[str] | dict[str, str] | None, optional):
395
+ Parameters to substitute into the SQL statement. Can be a sequence
396
+ (for positional parameters) or a dictionary (for named parameters).
397
+ Usually not applicable when using a t-string, since template
398
+ expressions handle interpolation directly.
399
+ as_dict (bool, optional): If True, return rows as dictionaries keyed by
400
+ column name. Defaults to False.
401
+ fields (Iterable[Field | TypedField] | None, optional): Explicit set of
402
+ fields to map results onto. Defaults to None.
403
+ colnames (Iterable[str] | None, optional): Explicit column names to use
404
+ in the result set. Defaults to None.
405
+ as_ordered_dict (bool, optional): If True, return rows as `OrderedDict`s
406
+ preserving column order. Defaults to False.
911
407
 
912
- """
913
- table = self._ensure_table_defined()
914
-
915
- result = table.insert(**fields)
916
- # it already is an int but mypy doesn't understand that
917
- return self(result)
918
-
919
- def _insert(self, **fields: Any) -> str:
920
- table = self._ensure_table_defined()
921
-
922
- return str(table._insert(**fields))
923
-
924
- def bulk_insert(self: Type[T_MetaInstance], items: list[AnyDict]) -> "TypedRows[T_MetaInstance]":
925
- """
926
- Insert multiple rows, returns a TypedRows set of new instances.
927
- """
928
- table = self._ensure_table_defined()
929
- result = table.bulk_insert(items)
930
- return self.where(lambda row: row.id.belongs(result)).collect()
931
-
932
- def update_or_insert(
933
- self: Type[T_MetaInstance], query: T_Query | AnyDict = DEFAULT, **values: Any
934
- ) -> T_MetaInstance:
935
- """
936
- Update a row if query matches, else insert a new one.
937
-
938
- Returns the created or updated instance.
939
- """
940
- table = self._ensure_table_defined()
941
-
942
- if query is DEFAULT:
943
- record = table(**values)
944
- elif isinstance(query, dict):
945
- record = table(**query)
946
- else:
947
- record = table(query)
948
-
949
- if not record:
950
- return self.insert(**values)
951
-
952
- record.update_record(**values)
953
- return self(record)
954
-
955
- def validate_and_insert(
956
- self: Type[T_MetaInstance], **fields: Any
957
- ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
958
- """
959
- Validate input data and then insert a row.
960
-
961
- Returns a tuple of (the created instance, a dict of errors).
962
- """
963
- table = self._ensure_table_defined()
964
- result = table.validate_and_insert(**fields)
965
- if row_id := result.get("id"):
966
- return self(row_id), None
967
- else:
968
- return None, result.get("errors")
969
-
970
- def validate_and_update(
971
- self: Type[T_MetaInstance], query: Query, **fields: Any
972
- ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
973
- """
974
- Validate input data and then update max 1 row.
975
-
976
- Returns a tuple of (the updated instance, a dict of errors).
977
- """
978
- table = self._ensure_table_defined()
979
-
980
- result = table.validate_and_update(query, **fields)
981
-
982
- if errors := result.get("errors"):
983
- return None, errors
984
- elif row_id := result.get("id"):
985
- return self(row_id), None
986
- else: # pragma: no cover
987
- # update on query without result (shouldnt happen)
988
- return None, None
989
-
990
- def validate_and_update_or_insert(
991
- self: Type[T_MetaInstance], query: Query, **fields: Any
992
- ) -> tuple[Optional[T_MetaInstance], Optional[dict[str, str]]]:
993
- """
994
- Validate input data and then update_and_insert (on max 1 row).
995
-
996
- Returns a tuple of (the updated/created instance, a dict of errors).
997
- """
998
- table = self._ensure_table_defined()
999
- result = table.validate_and_update_or_insert(query, **fields)
1000
-
1001
- if errors := result.get("errors"):
1002
- return None, errors
1003
- elif row_id := result.get("id"):
1004
- return self(row_id), None
1005
- else: # pragma: no cover
1006
- # update on query without result (shouldnt happen)
1007
- return None, None
1008
-
1009
- def select(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
1010
- """
1011
- See QueryBuilder.select!
1012
- """
1013
- return QueryBuilder(self).select(*a, **kw)
1014
-
1015
- def paginate(self: Type[T_MetaInstance], limit: int, page: int = 1) -> "PaginatedRows[T_MetaInstance]":
1016
- """
1017
- See QueryBuilder.paginate!
1018
- """
1019
- return QueryBuilder(self).paginate(limit=limit, page=page)
1020
-
1021
- def chunk(self: Type[T_MetaInstance], chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
1022
- """
1023
- See QueryBuilder.chunk!
1024
- """
1025
- return QueryBuilder(self).chunk(chunk_size)
1026
-
1027
- def where(self: Type[T_MetaInstance], *a: Any, **kw: Any) -> "QueryBuilder[T_MetaInstance]":
1028
- """
1029
- See QueryBuilder.where!
1030
- """
1031
- return QueryBuilder(self).where(*a, **kw)
1032
-
1033
- def cache(self: Type[T_MetaInstance], *deps: Any, **kwargs: Any) -> "QueryBuilder[T_MetaInstance]":
1034
- """
1035
- See QueryBuilder.cache!
1036
- """
1037
- return QueryBuilder(self).cache(*deps, **kwargs)
1038
-
1039
- def count(self: Type[T_MetaInstance]) -> int:
1040
- """
1041
- See QueryBuilder.count!
1042
- """
1043
- return QueryBuilder(self).count()
1044
-
1045
- def exists(self: Type[T_MetaInstance]) -> bool:
1046
- """
1047
- See QueryBuilder.exists!
1048
- """
1049
- return QueryBuilder(self).exists()
1050
-
1051
- def first(self: Type[T_MetaInstance]) -> T_MetaInstance | None:
1052
- """
1053
- See QueryBuilder.first!
1054
- """
1055
- return QueryBuilder(self).first()
1056
-
1057
- def first_or_fail(self: Type[T_MetaInstance]) -> T_MetaInstance:
1058
- """
1059
- See QueryBuilder.first_or_fail!
1060
- """
1061
- return QueryBuilder(self).first_or_fail()
1062
-
1063
- def join(
1064
- self: Type[T_MetaInstance],
1065
- *fields: str | Type["TypedTable"],
1066
- method: JOIN_OPTIONS = None,
1067
- on: OnQuery | list[Expression] | Expression = None,
1068
- condition: Condition = None,
1069
- condition_and: Condition = None,
1070
- ) -> "QueryBuilder[T_MetaInstance]":
1071
- """
1072
- See QueryBuilder.join!
1073
- """
1074
- return QueryBuilder(self).join(*fields, on=on, condition=condition, method=method, condition_and=condition_and)
1075
-
1076
- def collect(self: Type[T_MetaInstance], verbose: bool = False) -> "TypedRows[T_MetaInstance]":
1077
- """
1078
- See QueryBuilder.collect!
1079
- """
1080
- return QueryBuilder(self).collect(verbose=verbose)
1081
-
1082
- @property
1083
- def ALL(cls) -> pydal.objects.SQLALL:
1084
- """
1085
- Select all fields for this table.
1086
- """
1087
- table = cls._ensure_table_defined()
1088
-
1089
- return table.ALL
1090
-
1091
- ##########################
1092
- # TypeDAL Shadowed Logic #
1093
- ##########################
1094
- fields: list[str]
1095
-
1096
- # other table methods:
1097
-
1098
- def truncate(self, mode: str = "") -> None:
1099
- """
1100
- Remove all data and reset index.
1101
- """
1102
- table = self._ensure_table_defined()
1103
- table.truncate(mode)
1104
-
1105
- def drop(self, mode: str = "") -> None:
1106
- """
1107
- Remove the underlying table.
1108
- """
1109
- table = self._ensure_table_defined()
1110
- table.drop(mode)
1111
-
1112
- def create_index(self, name: str, *fields: Field | str, **kwargs: Any) -> bool:
1113
- """
1114
- Add an index on some columns of this table.
1115
- """
1116
- table = self._ensure_table_defined()
1117
- result = table.create_index(name, *fields, **kwargs)
1118
- return typing.cast(bool, result)
1119
-
1120
- def drop_index(self, name: str, if_exists: bool = False) -> bool:
1121
- """
1122
- Remove an index from this table.
1123
- """
1124
- table = self._ensure_table_defined()
1125
- result = table.drop_index(name, if_exists)
1126
- return typing.cast(bool, result)
1127
-
1128
- def import_from_csv_file(
1129
- self,
1130
- csvfile: typing.TextIO,
1131
- id_map: dict[str, str] = None,
1132
- null: Any = "<NULL>",
1133
- unique: str = "uuid",
1134
- id_offset: dict[str, int] = None, # id_offset used only when id_map is None
1135
- transform: typing.Callable[[dict[Any, Any]], dict[Any, Any]] = None,
1136
- validate: bool = False,
1137
- encoding: str = "utf-8",
1138
- delimiter: str = ",",
1139
- quotechar: str = '"',
1140
- quoting: int = csv.QUOTE_MINIMAL,
1141
- restore: bool = False,
1142
- **kwargs: Any,
1143
- ) -> None:
1144
- """
1145
- Load a csv file into the database.
1146
- """
1147
- table = self._ensure_table_defined()
1148
- table.import_from_csv_file(
1149
- csvfile,
1150
- id_map=id_map,
1151
- null=null,
1152
- unique=unique,
1153
- id_offset=id_offset,
1154
- transform=transform,
1155
- validate=validate,
1156
- encoding=encoding,
1157
- delimiter=delimiter,
1158
- quotechar=quotechar,
1159
- quoting=quoting,
1160
- restore=restore,
1161
- **kwargs,
408
+ Returns:
409
+ list[t.Any]: The query result set. Typically a list of tuples if
410
+ `as_dict` and `as_ordered_dict` are False, or a list of dict-like
411
+ objects if those flags are enabled.
412
+ """
413
+ if SYSTEM_SUPPORTS_TEMPLATES and isinstance(query, Template): # pragma: no cover
414
+ query = sql_escape_template(self, query)
415
+
416
+ rows: list[t.Any] = super().executesql(
417
+ query,
418
+ placeholders=placeholders,
419
+ as_dict=as_dict,
420
+ fields=fields,
421
+ colnames=colnames,
422
+ as_ordered_dict=as_ordered_dict,
1162
423
  )
1163
424
 
1164
- def on(self, query: Query | bool) -> Expression:
1165
- """
1166
- Shadow Table.on.
1167
-
1168
- Used for joins.
1169
-
1170
- See Also:
1171
- http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#One-to-many-relation
1172
- """
1173
- table = self._ensure_table_defined()
1174
- return typing.cast(Expression, table.on(query))
1175
-
1176
- def with_alias(self: Type[T_MetaInstance], alias: str) -> Type[T_MetaInstance]:
1177
- """
1178
- Shadow Table.with_alias.
1179
-
1180
- Useful for joins when joining the same table multiple times.
1181
-
1182
- See Also:
1183
- http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#One-to-many-relation
1184
- """
1185
- table = self._ensure_table_defined()
1186
- return typing.cast(Type[T_MetaInstance], table.with_alias(alias))
1187
-
1188
- def unique_alias(self: Type[T_MetaInstance]) -> Type[T_MetaInstance]:
1189
- """
1190
- Generates a unique alias for this table.
425
+ return rows
1191
426
 
1192
- Useful for joins when joining the same table multiple times
1193
- and you don't want to keep track of aliases yourself.
1194
- """
1195
- key = f"{self.__name__.lower()}_{hash(uuid.uuid4())}"
1196
- return self.with_alias(key)
1197
-
1198
- # hooks:
1199
- def before_insert(
1200
- cls: Type[T_MetaInstance],
1201
- fn: typing.Callable[[T_MetaInstance], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]],
1202
- ) -> Type[T_MetaInstance]:
1203
- """
1204
- Add a before insert hook.
1205
- """
1206
- if fn not in cls._before_insert:
1207
- cls._before_insert.append(fn)
1208
- return cls
1209
-
1210
- def after_insert(
1211
- cls: Type[T_MetaInstance],
1212
- fn: (
1213
- typing.Callable[[T_MetaInstance, Reference], Optional[bool]]
1214
- | typing.Callable[[OpRow, Reference], Optional[bool]]
1215
- ),
1216
- ) -> Type[T_MetaInstance]:
1217
- """
1218
- Add an after insert hook.
1219
- """
1220
- if fn not in cls._after_insert:
1221
- cls._after_insert.append(fn)
1222
- return cls
1223
-
1224
- def before_update(
1225
- cls: Type[T_MetaInstance],
1226
- fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
1227
- ) -> Type[T_MetaInstance]:
1228
- """
1229
- Add a before update hook.
1230
- """
1231
- if fn not in cls._before_update:
1232
- cls._before_update.append(fn)
1233
- return cls
1234
-
1235
- def after_update(
1236
- cls: Type[T_MetaInstance],
1237
- fn: typing.Callable[[Set, T_MetaInstance], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]],
1238
- ) -> Type[T_MetaInstance]:
1239
- """
1240
- Add an after update hook.
427
+ def sql_expression(
428
+ self,
429
+ sql_fragment: str | Template,
430
+ *raw_args: t.Any,
431
+ output_type: str | None = None,
432
+ **raw_kwargs: t.Any,
433
+ ) -> Expression:
1241
434
  """
1242
- if fn not in cls._after_update:
1243
- cls._after_update.append(fn)
1244
- return cls
435
+ Creates a pydal Expression object representing a raw SQL fragment.
1245
436
 
1246
- def before_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
1247
- """
1248
- Add a before delete hook.
1249
- """
1250
- if fn not in cls._before_delete:
1251
- cls._before_delete.append(fn)
1252
- return cls
437
+ Args:
438
+ sql_fragment: The raw SQL fragment.
439
+ In python 3.14+, this can also be a t-string. In that case, don't pass other args or kwargs.
440
+ *raw_args: Arguments to be interpolated into the SQL fragment.
441
+ output_type: The expected output type of the expression.
442
+ **raw_kwargs: Keyword arguments to be interpolated into the SQL fragment.
1253
443
 
1254
- def after_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
1255
- """
1256
- Add an after delete hook.
444
+ Returns:
445
+ A pydal Expression object.
1257
446
  """
1258
- if fn not in cls._after_delete:
1259
- cls._after_delete.append(fn)
1260
- return cls
1261
-
1262
-
1263
- class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
1264
- """
1265
- Typed version of pydal.Field, which will be converted to a normal Field in the background.
1266
- """
1267
-
1268
- # will be set by .bind on db.define
1269
- name = ""
1270
- _db: Optional[pydal.DAL] = None
1271
- _rname: Optional[str] = None
1272
- _table: Optional[Table] = None
1273
- _field: Optional[Field] = None
1274
-
1275
- _type: T_annotation
1276
- kwargs: Any
447
+ return sql_expression(self, sql_fragment, *raw_args, output_type=output_type, **raw_kwargs)
1277
448
 
1278
- requires: Validator | typing.Iterable[Validator]
1279
-
1280
- # NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
1281
-
1282
- def __init__(
1283
- self,
1284
- _type: Type[T_Value] | types.UnionType = str, # type: ignore
1285
- /,
1286
- **settings: Unpack[FieldSettings],
1287
- ) -> None:
1288
- """
1289
- Typed version of pydal.Field, which will be converted to a normal Field in the background.
1290
449
 
1291
- Provide the Python type for this field as the first positional argument
1292
- and any other settings to Field() as keyword parameters.
1293
- """
1294
- self._type = _type
1295
- self.kwargs = settings
1296
- # super().__init__()
450
+ TypeDAL.representers.setdefault("rows_render", default_representer)
1297
451
 
1298
- @typing.overload
1299
- def __get__(self, instance: T_MetaInstance, owner: Type[T_MetaInstance]) -> T_Value: # pragma: no cover
1300
- """
1301
- row.field -> (actual data).
1302
- """
452
+ # note: these imports exist at the bottom of this file to prevent circular import issues:
1303
453
 
1304
- @typing.overload
1305
- def __get__(self, instance: None, owner: "Type[TypedTable]") -> "TypedField[T_Value]": # pragma: no cover
1306
- """
1307
- Table.field -> Field.
1308
- """
454
+ from .fields import * # noqa: E402 F403 # isort: skip ; to fill globals() scope
455
+ from .define import TableDefinitionBuilder # noqa: E402
456
+ from .rows import TypedSet # noqa: E402
457
+ from .tables import TypedTable # noqa: E402
1309
458
 
1310
- def __get__(
1311
- self, instance: T_MetaInstance | None, owner: Type[T_MetaInstance]
1312
- ) -> typing.Union[T_Value, "TypedField[T_Value]"]:
1313
- """
1314
- Since this class is a Descriptor field, \
1315
- it returns something else depending on if it's called on a class or instance.
1316
-
1317
- (this is mostly for mypy/typing)
1318
- """
1319
- if instance:
1320
- # this is only reached in a very specific case:
1321
- # an instance of the object was created with a specific set of fields selected (excluding the current one)
1322
- # in that case, no value was stored in the owner -> return None (since the field was not selected)
1323
- return typing.cast(T_Value, None) # cast as T_Value so mypy understands it for selected fields
1324
- else:
1325
- # getting as class -> return actual field so pydal understands it when using in query etc.
1326
- return typing.cast(TypedField[T_Value], self._field) # pretend it's still typed for IDE support
1327
-
1328
- def __str__(self) -> str:
1329
- """
1330
- String representation of a Typed Field.
1331
-
1332
- If `type` is set explicitly (e.g. TypedField(str, type="text")), that type is used: `TypedField.text`,
1333
- otherwise the type annotation is used (e.g. TypedField(str) -> TypedField.str)
1334
- """
1335
- return str(self._field) if self._field else ""
1336
-
1337
- def __repr__(self) -> str:
1338
- """
1339
- More detailed string representation of a Typed Field.
1340
-
1341
- Uses __str__ and adds the provided extra options (kwargs) in the representation.
1342
- """
1343
- s = self.__str__()
1344
-
1345
- if "type" in self.kwargs:
1346
- # manual type in kwargs supplied
1347
- t = self.kwargs["type"]
1348
- elif issubclass(type, type(self._type)):
1349
- # normal type, str.__name__ = 'str'
1350
- t = getattr(self._type, "__name__", str(self._type))
1351
- elif t_args := typing.get_args(self._type):
1352
- # list[str] -> 'str'
1353
- t = t_args[0].__name__
1354
- else: # pragma: no cover
1355
- # fallback - something else, may not even happen, I'm not sure
1356
- t = self._type
1357
-
1358
- s = f"TypedField[{t}].{s}" if s else f"TypedField[{t}]"
1359
-
1360
- kw = self.kwargs.copy()
1361
- kw.pop("type", None)
1362
- return f"<{s} with options {kw}>"
1363
-
1364
- def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1365
- """
1366
- Convert a Typed Field instance to a pydal.Field.
1367
-
1368
- Actual logic in TypeDAL._to_field but this function creates the pydal type name and updates the kwarg settings.
1369
- """
1370
- other_kwargs = self.kwargs.copy()
1371
- extra_kwargs.update(other_kwargs) # <- modifies and overwrites the default kwargs with user-specified ones
1372
- return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1373
-
1374
- def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
1375
- """
1376
- Bind the right db/table/field info to this class, so queries can be made using `Class.field == ...`.
1377
- """
1378
- self._table = table
1379
- self._field = field
1380
-
1381
- def __getattr__(self, key: str) -> Any:
1382
- """
1383
- If the regular getattribute does not work, try to get info from the related Field.
1384
- """
1385
- with contextlib.suppress(AttributeError):
1386
- return super().__getattribute__(key)
1387
-
1388
- # try on actual field:
1389
- return getattr(self._field, key)
1390
-
1391
- def __eq__(self, other: Any) -> Query:
1392
- """
1393
- Performing == on a Field will result in a Query.
1394
- """
1395
- return typing.cast(Query, self._field == other)
1396
-
1397
- def __ne__(self, other: Any) -> Query:
1398
- """
1399
- Performing != on a Field will result in a Query.
1400
- """
1401
- return typing.cast(Query, self._field != other)
1402
-
1403
- def __gt__(self, other: Any) -> Query:
1404
- """
1405
- Performing > on a Field will result in a Query.
1406
- """
1407
- return typing.cast(Query, self._field > other)
1408
-
1409
- def __lt__(self, other: Any) -> Query:
1410
- """
1411
- Performing < on a Field will result in a Query.
1412
- """
1413
- return typing.cast(Query, self._field < other)
1414
-
1415
- def __ge__(self, other: Any) -> Query:
1416
- """
1417
- Performing >= on a Field will result in a Query.
1418
- """
1419
- return typing.cast(Query, self._field >= other)
1420
-
1421
- def __le__(self, other: Any) -> Query:
1422
- """
1423
- Performing <= on a Field will result in a Query.
1424
- """
1425
- return typing.cast(Query, self._field <= other)
1426
-
1427
- def __hash__(self) -> int:
1428
- """
1429
- Shadow Field.__hash__.
1430
- """
1431
- return hash(self._field)
1432
-
1433
- def __invert__(self) -> Expression:
1434
- """
1435
- Performing ~ on a Field will result in an Expression.
1436
- """
1437
- if not self._field: # pragma: no cover
1438
- raise ValueError("Unbound Field can not be inverted!")
1439
-
1440
- return typing.cast(Expression, ~self._field)
1441
-
1442
- def lower(self) -> Expression:
1443
- """
1444
- For string-fields: compare lowercased values.
1445
- """
1446
- if not self._field: # pragma: no cover
1447
- raise ValueError("Unbound Field can not be lowered!")
1448
-
1449
- return typing.cast(Expression, self._field.lower())
1450
-
1451
- # ... etc
1452
-
1453
-
1454
- class _TypedTable:
1455
- """
1456
- This class is a final shared parent between TypedTable and Mixins.
1457
-
1458
- This needs to exist because otherwise the __on_define__ of Mixins are not executed.
1459
- Notably, this class exists at a level ABOVE the `metaclass=TableMeta`,
1460
- because otherwise typing gets confused when Mixins are used and multiple types could satisfy
1461
- generic 'T subclass of TypedTable'
1462
- -> Setting 'TypedTable' as the parent for Mixin does not work at runtime (and works semi at type check time)
1463
- """
1464
-
1465
- id: "TypedField[int]"
1466
-
1467
- _before_insert: list[typing.Callable[[Self], Optional[bool]] | typing.Callable[[OpRow], Optional[bool]]]
1468
- _after_insert: list[
1469
- typing.Callable[[Self, Reference], Optional[bool]] | typing.Callable[[OpRow, Reference], Optional[bool]]
1470
- ]
1471
- _before_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
1472
- _after_update: list[typing.Callable[[Set, Self], Optional[bool]] | typing.Callable[[Set, OpRow], Optional[bool]]]
1473
- _before_delete: list[typing.Callable[[Set], Optional[bool]]]
1474
- _after_delete: list[typing.Callable[[Set], Optional[bool]]]
1475
-
1476
- @classmethod
1477
- def __on_define__(cls, db: TypeDAL) -> None:
1478
- """
1479
- Method that can be implemented by tables to do an action after db.define is completed.
1480
-
1481
- This can be useful if you need to add something like requires=IS_NOT_IN_DB(db, "table.field"),
1482
- where you need a reference to the current database, which may not exist yet when defining the model.
1483
- """
1484
-
1485
-
1486
- class TypedTable(_TypedTable, metaclass=TableMeta):
1487
- """
1488
- Enhanded modeling system on top of pydal's Table that adds typing and additional functionality.
1489
- """
1490
-
1491
- # set up by 'new':
1492
- _row: Row | None = None
1493
-
1494
- _with: list[str]
1495
-
1496
- def _setup_instance_methods(self) -> None:
1497
- self.as_dict = self._as_dict # type: ignore
1498
- self.__json__ = self.as_json = self._as_json # type: ignore
1499
- # self.as_yaml = self._as_yaml # type: ignore
1500
- self.as_xml = self._as_xml # type: ignore
1501
-
1502
- self.update = self._update # type: ignore
1503
-
1504
- self.delete_record = self._delete_record # type: ignore
1505
- self.update_record = self._update_record # type: ignore
1506
-
1507
- def __new__(
1508
- cls, row_or_id: typing.Union[Row, Query, pydal.objects.Set, int, str, None, "TypedTable"] = None, **filters: Any
1509
- ) -> Self:
1510
- """
1511
- Create a Typed Rows model instance from an existing row, ID or query.
1512
-
1513
- Examples:
1514
- MyTable(1)
1515
- MyTable(id=1)
1516
- MyTable(MyTable.id == 1)
1517
- """
1518
- table = cls._ensure_table_defined()
1519
- inst = super().__new__(cls)
1520
-
1521
- if isinstance(row_or_id, TypedTable):
1522
- # existing typed table instance!
1523
- return typing.cast(Self, row_or_id)
1524
-
1525
- elif isinstance(row_or_id, pydal.objects.Row):
1526
- row = row_or_id
1527
- elif row_or_id is not None:
1528
- row = table(row_or_id, **filters)
1529
- elif filters:
1530
- row = table(**filters)
1531
- else:
1532
- # dummy object
1533
- return inst
1534
-
1535
- if not row:
1536
- return None # type: ignore
1537
-
1538
- inst._row = row
1539
-
1540
- if hasattr(row, "id"):
1541
- inst.__dict__.update(row)
1542
- else:
1543
- # deal with _extra (and possibly others?)
1544
- # Row <{actual: {}, _extra: ...}>
1545
- inst.__dict__.update(row[str(cls)])
1546
-
1547
- inst._setup_instance_methods()
1548
- return inst
1549
-
1550
- def __iter__(self) -> typing.Generator[Any, None, None]:
1551
- """
1552
- Allows looping through the columns.
1553
- """
1554
- row = self._ensure_matching_row()
1555
- yield from iter(row)
1556
-
1557
- def __getitem__(self, item: str) -> Any:
1558
- """
1559
- Allows dictionary notation to get columns.
1560
- """
1561
- if item in self.__dict__:
1562
- return self.__dict__.get(item)
1563
-
1564
- # fallback to lookup in row
1565
- if self._row:
1566
- return self._row[item]
1567
-
1568
- # nothing found!
1569
- raise KeyError(item)
1570
-
1571
- def __getattr__(self, item: str) -> Any:
1572
- """
1573
- Allows dot notation to get columns.
1574
- """
1575
- if value := self.get(item):
1576
- return value
1577
-
1578
- raise AttributeError(item)
1579
-
1580
- def get(self, item: str, default: Any = None) -> Any:
1581
- """
1582
- Try to get a column from this instance, else return default.
1583
- """
1584
- try:
1585
- return self.__getitem__(item)
1586
- except KeyError:
1587
- return default
1588
-
1589
- def __setitem__(self, key: str, value: Any) -> None:
1590
- """
1591
- Data can both be updated via dot and dict notation.
1592
- """
1593
- return setattr(self, key, value)
1594
-
1595
- def __int__(self) -> int:
1596
- """
1597
- Calling int on a model instance will return its id.
1598
- """
1599
- return getattr(self, "id", 0)
1600
-
1601
- def __bool__(self) -> bool:
1602
- """
1603
- If the instance has an underlying row with data, it is truthy.
1604
- """
1605
- return bool(getattr(self, "_row", False))
1606
-
1607
- def _ensure_matching_row(self) -> Row:
1608
- if not getattr(self, "_row", None):
1609
- raise EnvironmentError("Trying to access non-existant row. Maybe it was deleted or not yet initialized?")
1610
- return self._row
1611
-
1612
- def __repr__(self) -> str:
1613
- """
1614
- String representation of the model instance.
1615
- """
1616
- model_name = self.__class__.__name__
1617
- model_data = {}
1618
-
1619
- if self._row:
1620
- model_data = self._row.as_json()
1621
-
1622
- details = model_name
1623
- details += f"({model_data})"
1624
-
1625
- if relationships := getattr(self, "_with", []):
1626
- details += f" + {relationships}"
1627
-
1628
- return f"<{details}>"
1629
-
1630
- # serialization
1631
- # underscore variants work for class instances (set up by _setup_instance_methods)
1632
-
1633
- @classmethod
1634
- def as_dict(cls, flat: bool = False, sanitize: bool = True) -> AnyDict:
1635
- """
1636
- Dump the object to a plain dict.
1637
-
1638
- Can be used as both a class or instance method:
1639
- - dumps the table info if it's a class
1640
- - dumps the row info if it's an instance (see _as_dict)
1641
- """
1642
- table = cls._ensure_table_defined()
1643
- result = table.as_dict(flat, sanitize)
1644
- return typing.cast(AnyDict, result)
1645
-
1646
- @classmethod
1647
- def as_json(cls, sanitize: bool = True, indent: Optional[int] = None, **kwargs: Any) -> str:
1648
- """
1649
- Dump the object to json.
1650
-
1651
- Can be used as both a class or instance method:
1652
- - dumps the table info if it's a class
1653
- - dumps the row info if it's an instance (see _as_json)
1654
- """
1655
- data = cls.as_dict(sanitize=sanitize)
1656
- return as_json.encode(data, indent=indent, **kwargs)
1657
-
1658
- @classmethod
1659
- def as_xml(cls, sanitize: bool = True) -> str: # pragma: no cover
1660
- """
1661
- Dump the object to xml.
1662
-
1663
- Can be used as both a class or instance method:
1664
- - dumps the table info if it's a class
1665
- - dumps the row info if it's an instance (see _as_xml)
1666
- """
1667
- table = cls._ensure_table_defined()
1668
- return typing.cast(str, table.as_xml(sanitize))
1669
-
1670
- @classmethod
1671
- def as_yaml(cls, sanitize: bool = True) -> str:
1672
- """
1673
- Dump the object to yaml.
1674
-
1675
- Can be used as both a class or instance method:
1676
- - dumps the table info if it's a class
1677
- - dumps the row info if it's an instance (see _as_yaml)
1678
- """
1679
- table = cls._ensure_table_defined()
1680
- return typing.cast(str, table.as_yaml(sanitize))
1681
-
1682
- def _as_dict(
1683
- self, datetime_to_str: bool = False, custom_types: typing.Iterable[type] | type | None = None
1684
- ) -> AnyDict:
1685
- row = self._ensure_matching_row()
1686
-
1687
- result = row.as_dict(datetime_to_str=datetime_to_str, custom_types=custom_types)
1688
-
1689
- def asdict_method(obj: Any) -> Any: # pragma: no cover
1690
- if hasattr(obj, "_as_dict"): # typedal
1691
- return obj._as_dict()
1692
- elif hasattr(obj, "as_dict"): # pydal
1693
- return obj.as_dict()
1694
- else: # something else??
1695
- return obj.__dict__
1696
-
1697
- if _with := getattr(self, "_with", None):
1698
- for relationship in _with:
1699
- data = self.get(relationship)
1700
-
1701
- if isinstance(data, list):
1702
- data = [asdict_method(_) for _ in data]
1703
- elif data:
1704
- data = asdict_method(data)
1705
-
1706
- result[relationship] = data
1707
-
1708
- return typing.cast(AnyDict, result)
1709
-
1710
- def _as_json(
1711
- self,
1712
- default: typing.Callable[[Any], Any] = None,
1713
- indent: Optional[int] = None,
1714
- **kwargs: Any,
1715
- ) -> str:
1716
- data = self._as_dict()
1717
- return as_json.encode(data, default=default, indent=indent, **kwargs)
1718
-
1719
- def _as_xml(self, sanitize: bool = True) -> str: # pragma: no cover
1720
- row = self._ensure_matching_row()
1721
- return typing.cast(str, row.as_xml(sanitize))
1722
-
1723
- # def _as_yaml(self, sanitize: bool = True) -> str:
1724
- # row = self._ensure_matching_row()
1725
- # return typing.cast(str, row.as_yaml(sanitize))
1726
-
1727
- def __setattr__(self, key: str, value: Any) -> None:
1728
- """
1729
- When setting a property on a Typed Table model instance, also update the underlying row.
1730
- """
1731
- if self._row and key in self._row.__dict__ and not callable(value):
1732
- # enables `row.key = value; row.update_record()`
1733
- self._row[key] = value
1734
-
1735
- super().__setattr__(key, value)
1736
-
1737
- @classmethod
1738
- def update(cls: Type[T_MetaInstance], query: Query, **fields: Any) -> T_MetaInstance | None:
1739
- """
1740
- Update one record.
1741
-
1742
- Example:
1743
- MyTable.update(MyTable.id == 1, name="NewName") -> MyTable
1744
- """
1745
- # todo: update multiple?
1746
- if record := cls(query):
1747
- return record.update_record(**fields)
1748
- else:
1749
- return None
1750
-
1751
- def _update(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1752
- row = self._ensure_matching_row()
1753
- row.update(**fields)
1754
- self.__dict__.update(**fields)
1755
- return self
1756
-
1757
- def _update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance:
1758
- row = self._ensure_matching_row()
1759
- new_row = row.update_record(**fields)
1760
- self.update(**new_row)
1761
- return self
1762
-
1763
- def update_record(self: T_MetaInstance, **fields: Any) -> T_MetaInstance: # pragma: no cover
1764
- """
1765
- Here as a placeholder for _update_record.
1766
-
1767
- Will be replaced on instance creation!
1768
- """
1769
- return self._update_record(**fields)
1770
-
1771
- def _delete_record(self) -> int:
1772
- """
1773
- Actual logic in `pydal.helpers.classes.RecordDeleter`.
1774
- """
1775
- row = self._ensure_matching_row()
1776
- result = row.delete_record()
1777
- self.__dict__ = {} # empty self, since row is no more.
1778
- self._row = None # just to be sure
1779
- self._setup_instance_methods()
1780
- # ^ instance methods might've been deleted by emptying dict,
1781
- # but we still want .as_dict to show an error, not the table's as_dict.
1782
- return typing.cast(int, result)
1783
-
1784
- def delete_record(self) -> int: # pragma: no cover
1785
- """
1786
- Here as a placeholder for _delete_record.
1787
-
1788
- Will be replaced on instance creation!
1789
- """
1790
- return self._delete_record()
1791
-
1792
- # __del__ is also called on the end of a scope so don't remove records on every del!!
1793
-
1794
- # pickling:
1795
-
1796
- def __getstate__(self) -> AnyDict:
1797
- """
1798
- State to save when pickling.
1799
-
1800
- Prevents db connection from being pickled.
1801
- Similar to as_dict but without changing the data of the relationships (dill does that recursively)
1802
- """
1803
- row = self._ensure_matching_row()
1804
- result: AnyDict = row.as_dict()
1805
-
1806
- if _with := getattr(self, "_with", None):
1807
- result["_with"] = _with
1808
- for relationship in _with:
1809
- data = self.get(relationship)
1810
-
1811
- result[relationship] = data
1812
-
1813
- result["_row"] = self._row.as_json() if self._row else ""
1814
- return result
1815
-
1816
- def __setstate__(self, state: AnyDict) -> None:
1817
- """
1818
- Used by dill when loading from a bytestring.
1819
- """
1820
- # as_dict also includes table info, so dump as json to only get the actual row data
1821
- # then create a new (more empty) row object:
1822
- state["_row"] = Row(json.loads(state["_row"]))
1823
- self.__dict__ |= state
1824
-
1825
- @classmethod
1826
- def _sql(cls) -> str:
1827
- """
1828
- Generate SQL Schema for this table via pydal2sql (if 'migrations' extra is installed).
1829
- """
1830
- try:
1831
- import pydal2sql
1832
- except ImportError as e: # pragma: no cover
1833
- raise RuntimeError("Can not generate SQL without the 'migration' extra or `pydal2sql` installed!") from e
1834
-
1835
- return pydal2sql.generate_sql(cls)
1836
-
1837
-
1838
- # backwards compat:
1839
- TypedRow = TypedTable
1840
-
1841
-
1842
- class TypedRows(typing.Collection[T_MetaInstance], Rows):
1843
- """
1844
- Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
1845
- """
1846
-
1847
- records: dict[int, T_MetaInstance]
1848
- # _rows: Rows
1849
- model: Type[T_MetaInstance]
1850
- metadata: Metadata
1851
-
1852
- # pseudo-properties: actually stored in _rows
1853
- db: TypeDAL
1854
- colnames: list[str]
1855
- fields: list[Field]
1856
- colnames_fields: list[Field]
1857
- response: list[tuple[Any, ...]]
1858
-
1859
- def __init__(
1860
- self,
1861
- rows: Rows,
1862
- model: Type[T_MetaInstance],
1863
- records: dict[int, T_MetaInstance] = None,
1864
- metadata: Metadata = None,
1865
- ) -> None:
1866
- """
1867
- Should not be called manually!
1868
-
1869
- Normally, the `records` from an existing `Rows` object are used
1870
- but these can be overwritten with a `records` dict.
1871
- `metadata` can be any (un)structured data
1872
- `model` is a Typed Table class
1873
- """
1874
-
1875
- def _get_id(row: Row) -> int:
1876
- """
1877
- Try to find the id field in a row.
1878
-
1879
- If _extra exists, the row changes:
1880
- <Row {'test_relationship': {'id': 1}, '_extra': {'COUNT("test_relationship"."querytable")': 8}}>
1881
- """
1882
- if idx := getattr(row, "id", None):
1883
- return typing.cast(int, idx)
1884
- elif main := getattr(row, str(model), None):
1885
- return typing.cast(int, main.id)
1886
- else: # pragma: no cover
1887
- raise NotImplementedError(f"`id` could not be found for {row}")
1888
-
1889
- records = records or {_get_id(row): model(row) for row in rows}
1890
- super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
1891
- self.model = model
1892
- self.metadata = metadata or {}
1893
- self.colnames = rows.colnames
1894
-
1895
- def __len__(self) -> int:
1896
- """
1897
- Return the count of rows.
1898
- """
1899
- return len(self.records)
1900
-
1901
- def __iter__(self) -> typing.Iterator[T_MetaInstance]:
1902
- """
1903
- Loop through the rows.
1904
- """
1905
- yield from self.records.values()
1906
-
1907
- def __contains__(self, ind: Any) -> bool:
1908
- """
1909
- Check if an id exists in this result set.
1910
- """
1911
- return ind in self.records
1912
-
1913
- def first(self) -> T_MetaInstance | None:
1914
- """
1915
- Get the row with the lowest id.
1916
- """
1917
- if not self.records:
1918
- return None
1919
-
1920
- return next(iter(self))
1921
-
1922
- def last(self) -> T_MetaInstance | None:
1923
- """
1924
- Get the row with the highest id.
1925
- """
1926
- if not self.records:
1927
- return None
1928
-
1929
- max_id = max(self.records.keys())
1930
- return self[max_id]
1931
-
1932
- def find(
1933
- self, f: typing.Callable[[T_MetaInstance], Query], limitby: tuple[int, int] = None
1934
- ) -> "TypedRows[T_MetaInstance]":
1935
- """
1936
- Returns a new Rows object, a subset of the original object, filtered by the function `f`.
1937
- """
1938
- if not self.records:
1939
- return self.__class__(self, self.model, {})
1940
-
1941
- records = {}
1942
- if limitby:
1943
- _min, _max = limitby
1944
- else:
1945
- _min, _max = 0, len(self)
1946
- count = 0
1947
- for i, row in self.records.items():
1948
- if f(row):
1949
- if _min <= count:
1950
- records[i] = row
1951
- count += 1
1952
- if count == _max:
1953
- break
1954
-
1955
- return self.__class__(self, self.model, records)
1956
-
1957
- def exclude(self, f: typing.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
1958
- """
1959
- Removes elements from the calling Rows object, filtered by the function `f`, \
1960
- and returns a new Rows object containing the removed elements.
1961
- """
1962
- if not self.records:
1963
- return self.__class__(self, self.model, {})
1964
- removed = {}
1965
- to_remove = []
1966
- for i in self.records:
1967
- row = self[i]
1968
- if f(row):
1969
- removed[i] = self.records[i]
1970
- to_remove.append(i)
1971
-
1972
- [self.records.pop(i) for i in to_remove]
1973
-
1974
- return self.__class__(
1975
- self,
1976
- self.model,
1977
- removed,
1978
- )
1979
-
1980
- def sort(self, f: typing.Callable[[T_MetaInstance], Any], reverse: bool = False) -> list[T_MetaInstance]:
1981
- """
1982
- Returns a list of sorted elements (not sorted in place).
1983
- """
1984
- return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
1985
-
1986
- def __str__(self) -> str:
1987
- """
1988
- Simple string representation.
1989
- """
1990
- return f"<TypedRows with {len(self)} records>"
1991
-
1992
- def __repr__(self) -> str:
1993
- """
1994
- Print a table on repr().
1995
- """
1996
- data = self.as_dict()
1997
- try:
1998
- headers = list(next(iter(data.values())).keys())
1999
- except StopIteration:
2000
- headers = []
2001
-
2002
- return mktable(data, headers)
2003
-
2004
- def group_by_value(
2005
- self, *fields: "str | Field | TypedField[T]", one_result: bool = False, **kwargs: Any
2006
- ) -> dict[T, list[T_MetaInstance]]:
2007
- """
2008
- Group the rows by a specific field (which will be the dict key).
2009
- """
2010
- kwargs["one_result"] = one_result
2011
- result = super().group_by_value(*fields, **kwargs)
2012
- return typing.cast(dict[T, list[T_MetaInstance]], result)
2013
-
2014
- def as_csv(self) -> str:
2015
- """
2016
- Dump the data to csv.
2017
- """
2018
- return typing.cast(str, super().as_csv())
2019
-
2020
- def as_dict(
2021
- self,
2022
- key: str = None,
2023
- compact: bool = False,
2024
- storage_to_dict: bool = False,
2025
- datetime_to_str: bool = False,
2026
- custom_types: list[type] = None,
2027
- ) -> dict[int, AnyDict]:
2028
- """
2029
- Get the data in a dict of dicts.
2030
- """
2031
- if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
2032
- # functionality not guaranteed
2033
- return typing.cast(
2034
- dict[int, AnyDict],
2035
- super().as_dict(
2036
- key or "id",
2037
- compact,
2038
- storage_to_dict,
2039
- datetime_to_str,
2040
- custom_types,
2041
- ),
2042
- )
2043
-
2044
- return {k: v.as_dict() for k, v in self.records.items()}
2045
-
2046
- def as_json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
2047
- """
2048
- Turn the data into a dict and then dump to JSON.
2049
- """
2050
- data = self.as_list()
2051
-
2052
- return as_json.encode(data, default=default, indent=indent, **kwargs)
2053
-
2054
- def json(self, default: typing.Callable[[Any], Any] = None, indent: Optional[int] = None, **kwargs: Any) -> str:
2055
- """
2056
- Turn the data into a dict and then dump to JSON.
2057
- """
2058
- return self.as_json(default=default, indent=indent, **kwargs)
2059
-
2060
- def as_list(
2061
- self,
2062
- compact: bool = False,
2063
- storage_to_dict: bool = False,
2064
- datetime_to_str: bool = False,
2065
- custom_types: list[type] = None,
2066
- ) -> list[AnyDict]:
2067
- """
2068
- Get the data in a list of dicts.
2069
- """
2070
- if any([compact, storage_to_dict, datetime_to_str, custom_types]):
2071
- return typing.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
2072
-
2073
- return [_.as_dict() for _ in self.records.values()]
2074
-
2075
- def __getitem__(self, item: int) -> T_MetaInstance:
2076
- """
2077
- You can get a specific row by ID from a typedrows by using rows[idx] notation.
2078
-
2079
- Since pydal's implementation differs (they expect a list instead of a dict with id keys),
2080
- using rows[0] will return the first row, regardless of its id.
2081
- """
2082
- try:
2083
- return self.records[item]
2084
- except KeyError as e:
2085
- if item == 0 and (row := self.first()):
2086
- # special case: pydal internals think Rows.records is a list, not a dict
2087
- return row
2088
-
2089
- raise e
2090
-
2091
- def get(self, item: int) -> typing.Optional[T_MetaInstance]:
2092
- """
2093
- Get a row by ID, or receive None if it isn't in this result set.
2094
- """
2095
- return self.records.get(item)
2096
-
2097
- def update(self, **new_values: Any) -> bool:
2098
- """
2099
- Update the current rows in the database with new_values.
2100
- """
2101
- # cast to make mypy understand .id is a TypedField and not an int!
2102
- table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
2103
-
2104
- ids = set(self.column("id"))
2105
- query = table.id.belongs(ids)
2106
- return bool(self.db(query).update(**new_values))
2107
-
2108
- def delete(self) -> bool:
2109
- """
2110
- Delete the currently selected rows from the database.
2111
- """
2112
- # cast to make mypy understand .id is a TypedField and not an int!
2113
- table = typing.cast(Type[TypedTable], self.model._ensure_table_defined())
2114
-
2115
- ids = set(self.column("id"))
2116
- query = table.id.belongs(ids)
2117
- return bool(self.db(query).delete())
2118
-
2119
- def join(
2120
- self,
2121
- field: "Field | TypedField[Any]",
2122
- name: str = None,
2123
- constraint: Query = None,
2124
- fields: list[str | Field] = None,
2125
- orderby: Optional[str | Field] = None,
2126
- ) -> T_MetaInstance:
2127
- """
2128
- This can be used to JOIN with some relationships after the initial select.
2129
-
2130
- Using the querybuilder's .join() method is prefered!
2131
- """
2132
- result = super().join(field, name, constraint, fields or [], orderby)
2133
- return typing.cast(T_MetaInstance, result)
2134
-
2135
- def export_to_csv_file(
2136
- self,
2137
- ofile: typing.TextIO,
2138
- null: Any = "<NULL>",
2139
- delimiter: str = ",",
2140
- quotechar: str = '"',
2141
- quoting: int = csv.QUOTE_MINIMAL,
2142
- represent: bool = False,
2143
- colnames: list[str] = None,
2144
- write_colnames: bool = True,
2145
- *args: Any,
2146
- **kwargs: Any,
2147
- ) -> None:
2148
- """
2149
- Shadow export_to_csv_file from Rows, but with typing.
2150
-
2151
- See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
2152
- """
2153
- super().export_to_csv_file(
2154
- ofile,
2155
- null,
2156
- *args,
2157
- delimiter=delimiter,
2158
- quotechar=quotechar,
2159
- quoting=quoting,
2160
- represent=represent,
2161
- colnames=colnames or self.colnames,
2162
- write_colnames=write_colnames,
2163
- **kwargs,
2164
- )
2165
-
2166
- @classmethod
2167
- def from_rows(
2168
- cls, rows: Rows, model: Type[T_MetaInstance], metadata: Metadata = None
2169
- ) -> "TypedRows[T_MetaInstance]":
2170
- """
2171
- Internal method to convert a Rows object to a TypedRows.
2172
- """
2173
- return cls(rows, model, metadata=metadata)
2174
-
2175
- def __getstate__(self) -> AnyDict:
2176
- """
2177
- Used by dill to dump to bytes (exclude db connection etc).
2178
- """
2179
- return {
2180
- "metadata": json.dumps(self.metadata, default=str),
2181
- "records": self.records,
2182
- "model": str(self.model._table),
2183
- "colnames": self.colnames,
2184
- }
2185
-
2186
- def __setstate__(self, state: AnyDict) -> None:
2187
- """
2188
- Used by dill when loading from a bytestring.
2189
- """
2190
- state["metadata"] = json.loads(state["metadata"])
2191
- self.__dict__.update(state)
2192
- # db etc. set after undill by caching.py
2193
-
2194
-
2195
- from .caching import ( # noqa: E402
2196
- _remove_cache,
459
+ from .caching import ( # isort: skip # noqa: E402
2197
460
  _TypedalCache,
2198
461
  _TypedalCacheDependency,
2199
- create_and_hash_cache_key,
2200
- get_expire,
2201
- load_from_cache,
2202
- save_to_cache,
2203
462
  )
2204
-
2205
-
2206
- class QueryBuilder(typing.Generic[T_MetaInstance]):
2207
- """
2208
- Abstration on top of pydal's query system.
2209
- """
2210
-
2211
- model: Type[T_MetaInstance]
2212
- query: Query
2213
- select_args: list[Any]
2214
- select_kwargs: SelectKwargs
2215
- relationships: dict[str, Relationship[Any]]
2216
- metadata: Metadata
2217
-
2218
- def __init__(
2219
- self,
2220
- model: Type[T_MetaInstance],
2221
- add_query: Optional[Query] = None,
2222
- select_args: Optional[list[Any]] = None,
2223
- select_kwargs: Optional[SelectKwargs] = None,
2224
- relationships: dict[str, Relationship[Any]] = None,
2225
- metadata: Metadata = None,
2226
- ):
2227
- """
2228
- Normally, you wouldn't manually initialize a QueryBuilder but start using a method on a TypedTable.
2229
-
2230
- Example:
2231
- MyTable.where(...) -> QueryBuilder[MyTable]
2232
- """
2233
- self.model = model
2234
- table = model._ensure_table_defined()
2235
- default_query = typing.cast(Query, table.id > 0)
2236
- self.query = add_query or default_query
2237
- self.select_args = select_args or []
2238
- self.select_kwargs = select_kwargs or {}
2239
- self.relationships = relationships or {}
2240
- self.metadata = metadata or {}
2241
-
2242
- def __str__(self) -> str:
2243
- """
2244
- Simple string representation for the query builder.
2245
- """
2246
- return f"QueryBuilder for {self.model}"
2247
-
2248
- def __repr__(self) -> str:
2249
- """
2250
- Advanced string representation for the query builder.
2251
- """
2252
- return (
2253
- f"<QueryBuilder for {self.model} with "
2254
- f"{len(self.select_args)} select args; "
2255
- f"{len(self.select_kwargs)} select kwargs; "
2256
- f"{len(self.relationships)} relationships; "
2257
- f"query: {bool(self.query)}; "
2258
- f"metadata: {self.metadata}; "
2259
- f">"
2260
- )
2261
-
2262
- def __bool__(self) -> bool:
2263
- """
2264
- Querybuilder is truthy if it has any conditions.
2265
- """
2266
- table = self.model._ensure_table_defined()
2267
- default_query = typing.cast(Query, table.id > 0)
2268
- return any(
2269
- [
2270
- self.query != default_query,
2271
- self.select_args,
2272
- self.select_kwargs,
2273
- self.relationships,
2274
- self.metadata,
2275
- ]
2276
- )
2277
-
2278
- def _extend(
2279
- self,
2280
- add_query: Optional[Query] = None,
2281
- overwrite_query: Optional[Query] = None,
2282
- select_args: Optional[list[Any]] = None,
2283
- select_kwargs: Optional[SelectKwargs] = None,
2284
- relationships: dict[str, Relationship[Any]] = None,
2285
- metadata: Metadata = None,
2286
- ) -> "QueryBuilder[T_MetaInstance]":
2287
- return QueryBuilder(
2288
- self.model,
2289
- (add_query & self.query) if add_query else overwrite_query or self.query,
2290
- (self.select_args + select_args) if select_args else self.select_args,
2291
- (self.select_kwargs | select_kwargs) if select_kwargs else self.select_kwargs,
2292
- (self.relationships | relationships) if relationships else self.relationships,
2293
- (self.metadata | (metadata or {})) if metadata else self.metadata,
2294
- )
2295
-
2296
- def select(self, *fields: Any, **options: Unpack[SelectKwargs]) -> "QueryBuilder[T_MetaInstance]":
2297
- """
2298
- Fields: database columns by name ('id'), by field reference (table.id) or other (e.g. table.ALL).
2299
-
2300
- Options:
2301
- paraphrased from the web2py pydal docs,
2302
- For more info, see http://www.web2py.com/books/default/chapter/29/06/the-database-abstraction-layer#orderby-groupby-limitby-distinct-having-orderby_on_limitby-join-left-cache
2303
-
2304
- orderby: field(s) to order by. Supported:
2305
- table.name - sort by name, ascending
2306
- ~table.name - sort by name, descending
2307
- <random> - sort randomly
2308
- table.name|table.id - sort by two fields (first name, then id)
2309
-
2310
- groupby, having: together with orderby:
2311
- groupby can be a field (e.g. table.name) to group records by
2312
- having can be a query, only those `having` the condition are grouped
2313
-
2314
- limitby: tuple of min and max. When using the query builder, .paginate(limit, page) is recommended.
2315
- distinct: bool/field. Only select rows that differ
2316
- orderby_on_limitby (bool, default: True): by default, an implicit orderby is added when doing limitby.
2317
- join: othertable.on(query) - do an INNER JOIN. Using TypeDAL relationships with .join() is recommended!
2318
- left: othertable.on(query) - do a LEFT JOIN. Using TypeDAL relationships with .join() is recommended!
2319
- cache: cache the query result to speed up repeated queries; e.g. (cache=(cache.ram, 3600), cacheable=True)
2320
- """
2321
- return self._extend(select_args=list(fields), select_kwargs=options)
2322
-
2323
- def where(
2324
- self,
2325
- *queries_or_lambdas: Query | typing.Callable[[Type[T_MetaInstance]], Query],
2326
- **filters: Any,
2327
- ) -> "QueryBuilder[T_MetaInstance]":
2328
- """
2329
- Extend the builder's query.
2330
-
2331
- Can be used in multiple ways:
2332
- .where(Query) -> with a direct query such as `Table.id == 5`
2333
- .where(lambda table: table.id == 5) -> with a query via a lambda
2334
- .where(id=5) -> via keyword arguments
2335
-
2336
- When using multiple where's, they will be ANDed:
2337
- .where(lambda table: table.id == 5).where(lambda table: table.id == 6) == (table.id == 5) & (table.id=6)
2338
- When passing multiple queries to a single .where, they will be ORed:
2339
- .where(lambda table: table.id == 5, lambda table: table.id == 6) == (table.id == 5) | (table.id=6)
2340
- """
2341
- new_query = self.query
2342
- table = self.model._ensure_table_defined()
2343
-
2344
- for field, value in filters.items():
2345
- new_query &= table[field] == value
2346
-
2347
- subquery: DummyQuery | Query = DummyQuery()
2348
- for query_or_lambda in queries_or_lambdas:
2349
- if isinstance(query_or_lambda, _Query):
2350
- subquery |= typing.cast(Query, query_or_lambda)
2351
- elif callable(query_or_lambda):
2352
- if result := query_or_lambda(self.model):
2353
- subquery |= result
2354
- elif isinstance(query_or_lambda, (Field, _Field)) or is_typed_field(query_or_lambda):
2355
- subquery |= typing.cast(Query, query_or_lambda != None)
2356
- else:
2357
- raise ValueError(f"Unexpected query type ({type(query_or_lambda)}).")
2358
-
2359
- if subquery:
2360
- new_query &= subquery
2361
-
2362
- return self._extend(overwrite_query=new_query)
2363
-
2364
- def join(
2365
- self,
2366
- *fields: str | Type[TypedTable],
2367
- method: JOIN_OPTIONS = None,
2368
- on: OnQuery | list[Expression] | Expression = None,
2369
- condition: Condition = None,
2370
- condition_and: Condition = None,
2371
- ) -> "QueryBuilder[T_MetaInstance]":
2372
- """
2373
- Include relationship fields in the result.
2374
-
2375
- `fields` can be names of Relationships on the current model.
2376
- If no fields are passed, all will be used.
2377
-
2378
- By default, the `method` defined in the relationship is used.
2379
- This can be overwritten with the `method` keyword argument (left or inner)
2380
-
2381
- `condition_and` can be used to add extra conditions to an inner join.
2382
- """
2383
- # todo: allow limiting amount of related rows returned for join?
2384
- # todo: it would be nice if 'fields' could be an actual relationship
2385
- # (Article.tags = list[Tag]) and you could change the .condition and .on
2386
- # this could deprecate condition_and
2387
-
2388
- relationships = self.model.get_relationships()
2389
-
2390
- if condition and on:
2391
- raise ValueError("condition and on can not be used together!")
2392
- elif condition:
2393
- if len(fields) != 1:
2394
- raise ValueError("join(field, condition=...) can only be used with exactly one field!")
2395
-
2396
- if isinstance(condition, pydal.objects.Query):
2397
- condition = as_lambda(condition)
2398
-
2399
- relationships = {
2400
- str(fields[0]): Relationship(fields[0], condition=condition, join=method, condition_and=condition_and)
2401
- }
2402
- elif on:
2403
- if len(fields) != 1:
2404
- raise ValueError("join(field, on=...) can only be used with exactly one field!")
2405
-
2406
- if isinstance(on, pydal.objects.Expression):
2407
- on = [on]
2408
-
2409
- if isinstance(on, list):
2410
- on = as_lambda(on)
2411
- relationships = {str(fields[0]): Relationship(fields[0], on=on, join=method, condition_and=condition_and)}
2412
-
2413
- else:
2414
- if fields:
2415
- # join on every relationship
2416
- relationships = {str(k): relationships[str(k)].clone(condition_and=condition_and) for k in fields}
2417
-
2418
- if method:
2419
- relationships = {
2420
- str(k): r.clone(join=method, condition_and=condition_and) for k, r in relationships.items()
2421
- }
2422
-
2423
- return self._extend(relationships=relationships)
2424
-
2425
- def cache(
2426
- self, *deps: Any, expires_at: Optional[dt.datetime] = None, ttl: Optional[int | dt.timedelta] = None
2427
- ) -> "QueryBuilder[T_MetaInstance]":
2428
- """
2429
- Enable caching for this query to load repeated calls from a dill row \
2430
- instead of executing the sql and collecing matching rows again.
2431
- """
2432
- existing = self.metadata.get("cache", {})
2433
-
2434
- metadata: Metadata = {}
2435
-
2436
- cache_meta = typing.cast(
2437
- CacheMetadata,
2438
- self.metadata.get("cache", {})
2439
- | {
2440
- "enabled": True,
2441
- "depends_on": existing.get("depends_on", []) + [str(_) for _ in deps],
2442
- "expires_at": get_expire(expires_at=expires_at, ttl=ttl),
2443
- },
2444
- )
2445
-
2446
- metadata["cache"] = cache_meta
2447
- return self._extend(metadata=metadata)
2448
-
2449
- def _get_db(self) -> TypeDAL:
2450
- if db := self.model._db:
2451
- return db
2452
- else: # pragma: no cover
2453
- raise EnvironmentError("@define or db.define is not called on this class yet!")
2454
-
2455
- def _select_arg_convert(self, arg: Any) -> Any:
2456
- # typedfield are not really used at runtime anymore, but leave it in for safety:
2457
- if isinstance(arg, TypedField): # pragma: no cover
2458
- arg = arg._field
2459
-
2460
- return arg
2461
-
2462
- def delete(self) -> list[int]:
2463
- """
2464
- Based on the current query, delete rows and return a list of deleted IDs.
2465
- """
2466
- db = self._get_db()
2467
- removed_ids = [_.id for _ in db(self.query).select("id")]
2468
- if db(self.query).delete():
2469
- # success!
2470
- return removed_ids
2471
-
2472
- return []
2473
-
2474
- def _delete(self) -> str:
2475
- db = self._get_db()
2476
- return str(db(self.query)._delete())
2477
-
2478
- def update(self, **fields: Any) -> list[int]:
2479
- """
2480
- Based on the current query, update `fields` and return a list of updated IDs.
2481
- """
2482
- # todo: limit?
2483
- db = self._get_db()
2484
- updated_ids = db(self.query).select("id").column("id")
2485
- if db(self.query).update(**fields):
2486
- # success!
2487
- return updated_ids
2488
-
2489
- return []
2490
-
2491
- def _update(self, **fields: Any) -> str:
2492
- db = self._get_db()
2493
- return str(db(self.query)._update(**fields))
2494
-
2495
- def _before_query(self, mut_metadata: Metadata, add_id: bool = True) -> tuple[Query, list[Any], SelectKwargs]:
2496
- select_args = [self._select_arg_convert(_) for _ in self.select_args] or [self.model.ALL]
2497
- select_kwargs = self.select_kwargs.copy()
2498
- query = self.query
2499
- model = self.model
2500
- mut_metadata["query"] = query
2501
- # require at least id of main table:
2502
- select_fields = ", ".join([str(_) for _ in select_args])
2503
- tablename = str(model)
2504
-
2505
- if add_id and f"{tablename}.id" not in select_fields:
2506
- # fields of other selected, but required ID is missing.
2507
- select_args.append(model.id)
2508
-
2509
- if self.relationships:
2510
- query, select_args = self._handle_relationships_pre_select(query, select_args, select_kwargs, mut_metadata)
2511
-
2512
- return query, select_args, select_kwargs
2513
-
2514
- def to_sql(self, add_id: bool = False) -> str:
2515
- """
2516
- Generate the SQL for the built query.
2517
- """
2518
- db = self._get_db()
2519
-
2520
- query, select_args, select_kwargs = self._before_query({}, add_id=add_id)
2521
-
2522
- return str(db(query)._select(*select_args, **select_kwargs))
2523
-
2524
- def _collect(self) -> str:
2525
- """
2526
- Alias for to_sql, pydal-like syntax.
2527
- """
2528
- return self.to_sql()
2529
-
2530
- def _collect_cached(self, metadata: Metadata) -> "TypedRows[T_MetaInstance] | None":
2531
- expires_at = metadata["cache"].get("expires_at")
2532
- metadata["cache"] |= {
2533
- # key is partly dependant on cache metadata but not these:
2534
- "key": None,
2535
- "status": None,
2536
- "cached_at": None,
2537
- "expires_at": None,
2538
- }
2539
-
2540
- _, key = create_and_hash_cache_key(
2541
- self.model,
2542
- metadata,
2543
- self.query,
2544
- self.select_args,
2545
- self.select_kwargs,
2546
- self.relationships.keys(),
2547
- )
2548
-
2549
- # re-set after creating key:
2550
- metadata["cache"]["expires_at"] = expires_at
2551
- metadata["cache"]["key"] = key
2552
-
2553
- return load_from_cache(key, self._get_db())
2554
-
2555
- def execute(self, add_id: bool = False) -> Rows:
2556
- """
2557
- Raw version of .collect which only executes the SQL, without performing any magic afterwards.
2558
- """
2559
- db = self._get_db()
2560
- metadata = typing.cast(Metadata, self.metadata.copy())
2561
-
2562
- query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2563
-
2564
- return db(query).select(*select_args, **select_kwargs)
2565
-
2566
- def collect(
2567
- self, verbose: bool = False, _to: Type["TypedRows[Any]"] = None, add_id: bool = True
2568
- ) -> "TypedRows[T_MetaInstance]":
2569
- """
2570
- Execute the built query and turn it into model instances, while handling relationships.
2571
- """
2572
- if _to is None:
2573
- _to = TypedRows
2574
-
2575
- db = self._get_db()
2576
- metadata = typing.cast(Metadata, self.metadata.copy())
2577
-
2578
- if metadata.get("cache", {}).get("enabled") and (result := self._collect_cached(metadata)):
2579
- return result
2580
-
2581
- query, select_args, select_kwargs = self._before_query(metadata, add_id=add_id)
2582
-
2583
- metadata["sql"] = db(query)._select(*select_args, **select_kwargs)
2584
-
2585
- if verbose: # pragma: no cover
2586
- print(metadata["sql"])
2587
-
2588
- rows: Rows = db(query).select(*select_args, **select_kwargs)
2589
-
2590
- metadata["final_query"] = str(query)
2591
- metadata["final_args"] = [str(_) for _ in select_args]
2592
- metadata["final_kwargs"] = select_kwargs
2593
-
2594
- if verbose: # pragma: no cover
2595
- print(rows)
2596
-
2597
- if not self.relationships:
2598
- # easy
2599
- typed_rows = _to.from_rows(rows, self.model, metadata=metadata)
2600
-
2601
- else:
2602
- # harder: try to match rows to the belonging objects
2603
- # assume structure of {'table': <data>} per row.
2604
- # if that's not the case, return default behavior again
2605
- typed_rows = self._collect_with_relationships(rows, metadata=metadata, _to=_to)
2606
-
2607
- # only saves if requested in metadata:
2608
- return save_to_cache(typed_rows, rows)
2609
-
2610
- @typing.overload
2611
- def column(self, field: TypedField[T]) -> list[T]:
2612
- """
2613
- If a typedfield is passed, the output type can be safely determined.
2614
- """
2615
-
2616
- @typing.overload
2617
- def column(self, field: T) -> list[T]:
2618
- """
2619
- Otherwise, the output type is loosely determined (assumes `field: type` or Any).
2620
- """
2621
-
2622
- def column(self, field: TypedField[T] | T) -> list[T]:
2623
- """
2624
- Get all values in a specific column.
2625
-
2626
- Shortcut for `.select(field).execute().column(field)`.
2627
- """
2628
- return self.select(field).execute().column(field)
2629
-
2630
- def _handle_relationships_pre_select(
2631
- self,
2632
- query: Query,
2633
- select_args: list[Any],
2634
- select_kwargs: SelectKwargs,
2635
- metadata: Metadata,
2636
- ) -> tuple[Query, list[Any]]:
2637
- db = self._get_db()
2638
- model = self.model
2639
-
2640
- metadata["relationships"] = set(self.relationships.keys())
2641
-
2642
- join = []
2643
- for key, relation in self.relationships.items():
2644
- if not relation.condition or relation.join != "inner":
2645
- continue
2646
-
2647
- other = relation.get_table(db)
2648
- other = other.with_alias(f"{key}_{hash(relation)}")
2649
- condition = relation.condition(model, other)
2650
- if callable(relation.condition_and):
2651
- condition &= relation.condition_and(model, other)
2652
-
2653
- join.append(other.on(condition))
2654
-
2655
- if limitby := select_kwargs.pop("limitby", ()):
2656
- # if limitby + relationships:
2657
- # 1. get IDs of main table entries that match 'query'
2658
- # 2. change query to .belongs(id)
2659
- # 3. add joins etc
2660
-
2661
- kwargs: SelectKwargs = select_kwargs | {"limitby": limitby}
2662
- # if orderby := select_kwargs.get("orderby"):
2663
- # kwargs["orderby"] = orderby
2664
-
2665
- if join:
2666
- kwargs["join"] = join
2667
-
2668
- ids = db(query)._select(model.id, **kwargs)
2669
- query = model.id.belongs(ids)
2670
- metadata["ids"] = ids
2671
-
2672
- if join:
2673
- select_kwargs["join"] = join
2674
-
2675
- left = []
2676
-
2677
- for key, relation in self.relationships.items():
2678
- other = relation.get_table(db)
2679
- method: JOIN_OPTIONS = relation.join or DEFAULT_JOIN_OPTION
2680
-
2681
- select_fields = ", ".join([str(_) for _ in select_args])
2682
- pre_alias = str(other)
2683
-
2684
- if f"{other}." not in select_fields:
2685
- # no fields of other selected. add .ALL:
2686
- select_args.append(other.ALL)
2687
- elif f"{other}.id" not in select_fields:
2688
- # fields of other selected, but required ID is missing.
2689
- select_args.append(other.id)
2690
-
2691
- if relation.on:
2692
- # if it has a .on, it's always a left join!
2693
- on = relation.on(model, other)
2694
- if not isinstance(on, list): # pragma: no cover
2695
- on = [on]
2696
-
2697
- on = [
2698
- _
2699
- for _ in on
2700
- # only allow Expressions (query and such):
2701
- if isinstance(_, pydal.objects.Expression)
2702
- ]
2703
- left.extend(on)
2704
- elif method == "left":
2705
- # .on not given, generate it:
2706
- other = other.with_alias(f"{key}_{hash(relation)}")
2707
- condition = typing.cast(Query, relation.condition(model, other))
2708
- if callable(relation.condition_and):
2709
- condition &= relation.condition_and(model, other)
2710
- left.append(other.on(condition))
2711
- else:
2712
- # else: inner join (handled earlier)
2713
- other = other.with_alias(f"{key}_{hash(relation)}") # only for replace
2714
-
2715
- # if no fields of 'other' are included, add other.ALL
2716
- # else: only add other.id if missing
2717
- select_fields = ", ".join([str(_) for _ in select_args])
2718
-
2719
- post_alias = str(other).split(" AS ")[-1]
2720
- if pre_alias != post_alias:
2721
- # replace .select's with aliased:
2722
- select_fields = select_fields.replace(
2723
- f"{pre_alias}.",
2724
- f"{post_alias}.",
2725
- )
2726
-
2727
- select_args = select_fields.split(", ")
2728
-
2729
- select_kwargs["left"] = left
2730
- return query, select_args
2731
-
2732
- def _collect_with_relationships(
2733
- self, rows: Rows, metadata: Metadata, _to: Type["TypedRows[Any]"]
2734
- ) -> "TypedRows[T_MetaInstance]":
2735
- """
2736
- Transform the raw rows into Typed Table model instances.
2737
- """
2738
- db = self._get_db()
2739
- main_table = self.model._ensure_table_defined()
2740
-
2741
- records = {}
2742
- seen_relations: dict[str, set[str]] = defaultdict(set) # main id -> set of col + id for relation
2743
-
2744
- for row in rows:
2745
- main = row[main_table]
2746
- main_id = main.id
2747
-
2748
- if main_id not in records:
2749
- records[main_id] = self.model(main)
2750
- records[main_id]._with = list(self.relationships.keys())
2751
-
2752
- # setup up all relationship defaults (once)
2753
- for col, relationship in self.relationships.items():
2754
- records[main_id][col] = [] if relationship.multiple else None
2755
-
2756
- # now add other relationship data
2757
- for column, relation in self.relationships.items():
2758
- relationship_column = f"{column}_{hash(relation)}"
2759
-
2760
- # relationship_column works for aliases with the same target column.
2761
- # if col + relationship not in the row, just use the regular name.
2762
-
2763
- relation_data = (
2764
- row[relationship_column] if relationship_column in row else row[relation.get_table_name()]
2765
- )
2766
-
2767
- if relation_data.id is None:
2768
- # always skip None ids
2769
- continue
2770
-
2771
- if f"{column}-{relation_data.id}" in seen_relations[main_id]:
2772
- # speed up duplicates
2773
- continue
2774
- else:
2775
- seen_relations[main_id].add(f"{column}-{relation_data.id}")
2776
-
2777
- relation_table = relation.get_table(db)
2778
- # hopefully an instance of a typed table and a regular row otherwise:
2779
- instance = relation_table(relation_data) if looks_like(relation_table, TypedTable) else relation_data
2780
-
2781
- if relation.multiple:
2782
- # create list of T
2783
- if not isinstance(records[main_id].get(column), list): # pragma: no cover
2784
- # should already be set up before!
2785
- setattr(records[main_id], column, [])
2786
-
2787
- records[main_id][column].append(instance)
2788
- else:
2789
- # create single T
2790
- records[main_id][column] = instance
2791
-
2792
- return _to(rows, self.model, records, metadata=metadata)
2793
-
2794
- def collect_or_fail(self, exception: Exception = None) -> "TypedRows[T_MetaInstance]":
2795
- """
2796
- Call .collect() and raise an error if nothing found.
2797
-
2798
- Basically unwraps Optional type.
2799
- """
2800
- if result := self.collect():
2801
- return result
2802
-
2803
- if not exception:
2804
- exception = ValueError("Nothing found!")
2805
-
2806
- raise exception
2807
-
2808
- def __iter__(self) -> typing.Generator[T_MetaInstance, None, None]:
2809
- """
2810
- You can start iterating a Query Builder object before calling collect, for ease of use.
2811
- """
2812
- yield from self.collect()
2813
-
2814
- def __count(self, db: TypeDAL, distinct: bool = None) -> Query:
2815
- # internal, shared logic between .count and ._count
2816
- model = self.model
2817
- query = self.query
2818
- for key, relation in self.relationships.items():
2819
- if (not relation.condition or relation.join != "inner") and not distinct:
2820
- continue
2821
-
2822
- other = relation.get_table(db)
2823
- if not distinct:
2824
- # todo: can this lead to other issues?
2825
- other = other.with_alias(f"{key}_{hash(relation)}")
2826
- query &= relation.condition(model, other)
2827
-
2828
- return query
2829
-
2830
- def count(self, distinct: bool = None) -> int:
2831
- """
2832
- Return the amount of rows matching the current query.
2833
- """
2834
- db = self._get_db()
2835
- query = self.__count(db, distinct=distinct)
2836
-
2837
- return db(query).count(distinct)
2838
-
2839
- def _count(self, distinct: bool = None) -> str:
2840
- """
2841
- Return the SQL for .count().
2842
- """
2843
- db = self._get_db()
2844
- query = self.__count(db, distinct=distinct)
2845
-
2846
- return typing.cast(str, db(query)._count(distinct))
2847
-
2848
- def exists(self) -> bool:
2849
- """
2850
- Determines if any records exist matching the current query.
2851
-
2852
- Returns True if one or more records exist; otherwise, False.
2853
-
2854
- Returns:
2855
- bool: A boolean indicating whether any records exist.
2856
- """
2857
- return bool(self.count())
2858
-
2859
- def __paginate(
2860
- self,
2861
- limit: int,
2862
- page: int = 1,
2863
- ) -> "QueryBuilder[T_MetaInstance]":
2864
- available = self.count()
2865
-
2866
- _from = limit * (page - 1)
2867
- _to = (limit * page) if limit else available
2868
-
2869
- metadata: Metadata = {}
2870
-
2871
- metadata["pagination"] = {
2872
- "limit": limit,
2873
- "current_page": page,
2874
- "max_page": math.ceil(available / limit) if limit else 1,
2875
- "rows": available,
2876
- "min_max": (_from, _to),
2877
- }
2878
-
2879
- return self._extend(select_kwargs={"limitby": (_from, _to)}, metadata=metadata)
2880
-
2881
- def paginate(self, limit: int, page: int = 1, verbose: bool = False) -> "PaginatedRows[T_MetaInstance]":
2882
- """
2883
- Paginate transforms the more readable `page` and `limit` to pydals internal limit and offset.
2884
-
2885
- Note: when using relationships, this limit is only applied to the 'main' table and any number of extra rows \
2886
- can be loaded with relationship data!
2887
- """
2888
- builder = self.__paginate(limit, page)
2889
-
2890
- rows = typing.cast(PaginatedRows[T_MetaInstance], builder.collect(verbose=verbose, _to=PaginatedRows))
2891
-
2892
- rows._query_builder = builder
2893
- return rows
2894
-
2895
- def _paginate(
2896
- self,
2897
- limit: int,
2898
- page: int = 1,
2899
- ) -> str:
2900
- builder = self.__paginate(limit, page)
2901
- return builder._collect()
2902
-
2903
- def chunk(self, chunk_size: int) -> typing.Generator["TypedRows[T_MetaInstance]", Any, None]:
2904
- """
2905
- Generator that yields rows from a paginated source in chunks.
2906
-
2907
- This function retrieves rows from a paginated data source in chunks of the
2908
- specified `chunk_size` and yields them as TypedRows.
2909
-
2910
- Example:
2911
- ```
2912
- for chunk_of_rows in Table.where(SomeTable.id > 5).chunk(100):
2913
- for row in chunk_of_rows:
2914
- # Process each row within the chunk.
2915
- pass
2916
- ```
2917
- """
2918
- page = 1
2919
-
2920
- while rows := self.__paginate(chunk_size, page).collect():
2921
- yield rows
2922
- page += 1
2923
-
2924
- def first(self, verbose: bool = False) -> T_MetaInstance | None:
2925
- """
2926
- Get the first row matching the currently built query.
2927
-
2928
- Also adds paginate, since it would be a waste to select more rows than needed.
2929
- """
2930
- if row := self.paginate(page=1, limit=1, verbose=verbose).first():
2931
- return self.model.from_row(row)
2932
- else:
2933
- return None
2934
-
2935
- def _first(self) -> str:
2936
- return self._paginate(page=1, limit=1)
2937
-
2938
- def first_or_fail(self, exception: Exception = None, verbose: bool = False) -> T_MetaInstance:
2939
- """
2940
- Call .first() and raise an error if nothing found.
2941
-
2942
- Basically unwraps Optional type.
2943
- """
2944
- if inst := self.first(verbose=verbose):
2945
- return inst
2946
-
2947
- if not exception:
2948
- exception = ValueError("Nothing found!")
2949
-
2950
- raise exception
2951
-
2952
-
2953
- S = typing.TypeVar("S")
2954
-
2955
-
2956
- class PaginatedRows(TypedRows[T_MetaInstance]):
2957
- """
2958
- Extension on top of rows that is used when calling .paginate() instead of .collect().
2959
- """
2960
-
2961
- _query_builder: QueryBuilder[T_MetaInstance]
2962
-
2963
- @property
2964
- def data(self) -> list[T_MetaInstance]:
2965
- """
2966
- Get the underlying data.
2967
- """
2968
- return list(self.records.values())
2969
-
2970
- @property
2971
- def pagination(self) -> Pagination:
2972
- """
2973
- Get all page info.
2974
- """
2975
- pagination_data = self.metadata["pagination"]
2976
-
2977
- has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
2978
- has_prev_page = pagination_data["current_page"] > 1
2979
- return {
2980
- "total_items": pagination_data["rows"],
2981
- "current_page": pagination_data["current_page"],
2982
- "per_page": pagination_data["limit"],
2983
- "total_pages": pagination_data["max_page"],
2984
- "has_next_page": has_next_page,
2985
- "has_prev_page": has_prev_page,
2986
- "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
2987
- "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
2988
- }
2989
-
2990
- def next(self) -> Self:
2991
- """
2992
- Get the next page.
2993
- """
2994
- data = self.metadata["pagination"]
2995
- if data["current_page"] >= data["max_page"]:
2996
- raise StopIteration("Final Page")
2997
-
2998
- return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
2999
-
3000
- def previous(self) -> Self:
3001
- """
3002
- Get the previous page.
3003
- """
3004
- data = self.metadata["pagination"]
3005
- if data["current_page"] <= 1:
3006
- raise StopIteration("First Page")
3007
-
3008
- return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
3009
-
3010
- def as_dict(self, *_: Any, **__: Any) -> PaginateDict: # type: ignore
3011
- """
3012
- Convert to a dictionary with pagination info and original data.
3013
-
3014
- All arguments are ignored!
3015
- """
3016
- return {"data": super().as_dict(), "pagination": self.pagination}
3017
-
3018
-
3019
- class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
3020
- """
3021
- Used to make pydal Set more typed.
3022
-
3023
- This class is not actually used, only 'cast' by TypeDAL.__call__
3024
- """
3025
-
3026
- def count(self, distinct: bool = None, cache: AnyDict = None) -> int:
3027
- """
3028
- Count returns an int.
3029
- """
3030
- result = super().count(distinct, cache)
3031
- return typing.cast(int, result)
3032
-
3033
- def select(self, *fields: Any, **attributes: Any) -> TypedRows[T_MetaInstance]:
3034
- """
3035
- Select returns a TypedRows of a user defined table.
3036
-
3037
- Example:
3038
- result: TypedRows[MyTable] = db(MyTable.id > 0).select()
3039
-
3040
- for row in result:
3041
- reveal_type(row) # MyTable
3042
- """
3043
- rows = super().select(*fields, **attributes)
3044
- return typing.cast(TypedRows[T_MetaInstance], rows)