TypeDAL 3.17.3__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of TypeDAL might be problematic. Click here for more details.

typedal/rows.py ADDED
@@ -0,0 +1,524 @@
1
+ """
2
+ Contains base functionality related to Rows (raw result of a database query).
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import csv
8
+ import json
9
+ import typing as t
10
+
11
+ import pydal.objects
12
+
13
+ from .core import TypeDAL
14
+ from .helpers import mktable
15
+ from .query_builder import QueryBuilder
16
+ from .serializers import as_json
17
+ from .tables import TypedTable
18
+ from .types import (
19
+ AnyDict,
20
+ Field,
21
+ Metadata,
22
+ PaginateDict,
23
+ Pagination,
24
+ Query,
25
+ Row,
26
+ Rows,
27
+ T,
28
+ T_MetaInstance,
29
+ )
30
+
31
+
32
+ class TypedRows(t.Collection[T_MetaInstance], Rows):
33
+ """
34
+ Slighly enhaned and typed functionality on top of pydal Rows (the result of a select).
35
+ """
36
+
37
+ records: dict[int, T_MetaInstance]
38
+ # _rows: Rows
39
+ model: t.Type[T_MetaInstance]
40
+ metadata: Metadata
41
+
42
+ # pseudo-properties: actually stored in _rows
43
+ db: TypeDAL
44
+ colnames: list[str]
45
+ fields: list[Field]
46
+ colnames_fields: list[Field]
47
+ response: list[tuple[t.Any, ...]]
48
+
49
+ def __init__(
50
+ self,
51
+ rows: Rows,
52
+ model: t.Type[T_MetaInstance],
53
+ records: dict[int, T_MetaInstance] = None,
54
+ metadata: Metadata = None,
55
+ raw: dict[int, list[Row]] = None,
56
+ ) -> None:
57
+ """
58
+ Should not be called manually!
59
+
60
+ Normally, the `records` from an existing `Rows` object are used
61
+ but these can be overwritten with a `records` dict.
62
+ `metadata` can be t.Any (un)structured data
63
+ `model` is a Typed Table class
64
+ """
65
+
66
+ def _get_id(row: Row) -> int:
67
+ """
68
+ Try to find the id field in a row.
69
+
70
+ If _extra exists, the row changes:
71
+ <Row {'test_relationship': {'id': 1}, '_extra': {'COUNT("test_relationship"."querytable")': 8}}>
72
+ """
73
+ if idx := getattr(row, "id", None):
74
+ return t.cast(int, idx)
75
+ elif main := getattr(row, str(model), None):
76
+ return t.cast(int, main.id)
77
+ else: # pragma: no cover
78
+ raise NotImplementedError(f"`id` could not be found for {row}")
79
+
80
+ records = records or {_get_id(row): model(row) for row in rows}
81
+ raw = raw or {}
82
+
83
+ for idx, entity in records.items():
84
+ entity._rows = tuple(raw.get(idx, []))
85
+
86
+ super().__init__(rows.db, records, rows.colnames, rows.compact, rows.response, rows.fields)
87
+ self.model = model
88
+ self.metadata = metadata or {}
89
+ self.colnames = rows.colnames
90
+
91
+ def __len__(self) -> int:
92
+ """
93
+ Return the count of rows.
94
+ """
95
+ return len(self.records)
96
+
97
+ def __iter__(self) -> t.Iterator[T_MetaInstance]:
98
+ """
99
+ Loop through the rows.
100
+ """
101
+ yield from self.records.values()
102
+
103
+ def __contains__(self, ind: t.Any) -> bool:
104
+ """
105
+ Check if an id exists in this result set.
106
+ """
107
+ return ind in self.records
108
+
109
+ def first(self) -> T_MetaInstance | None:
110
+ """
111
+ Get the row with the lowest id.
112
+ """
113
+ if not self.records:
114
+ return None
115
+
116
+ return next(iter(self))
117
+
118
+ def last(self) -> T_MetaInstance | None:
119
+ """
120
+ Get the row with the highest id.
121
+ """
122
+ if not self.records:
123
+ return None
124
+
125
+ max_id = max(self.records.keys())
126
+ return self[max_id]
127
+
128
+ def find(
129
+ self,
130
+ f: t.Callable[[T_MetaInstance], Query],
131
+ limitby: tuple[int, int] = None,
132
+ ) -> "TypedRows[T_MetaInstance]":
133
+ """
134
+ Returns a new Rows object, a subset of the original object, filtered by the function `f`.
135
+ """
136
+ if not self.records:
137
+ return self.__class__(self, self.model, {})
138
+
139
+ records = {}
140
+ if limitby:
141
+ _min, _max = limitby
142
+ else:
143
+ _min, _max = 0, len(self)
144
+ count = 0
145
+ for i, row in self.records.items():
146
+ if f(row):
147
+ if _min <= count:
148
+ records[i] = row
149
+ count += 1
150
+ if count == _max:
151
+ break
152
+
153
+ return self.__class__(self, self.model, records)
154
+
155
+ def exclude(self, f: t.Callable[[T_MetaInstance], Query]) -> "TypedRows[T_MetaInstance]":
156
+ """
157
+ Removes elements from the calling Rows object, filtered by the function `f`, \
158
+ and returns a new Rows object containing the removed elements.
159
+ """
160
+ if not self.records:
161
+ return self.__class__(self, self.model, {})
162
+ removed = {}
163
+ to_remove = []
164
+ for i in self.records:
165
+ row = self[i]
166
+ if f(row):
167
+ removed[i] = self.records[i]
168
+ to_remove.append(i)
169
+
170
+ [self.records.pop(i) for i in to_remove]
171
+
172
+ return self.__class__(
173
+ self,
174
+ self.model,
175
+ removed,
176
+ )
177
+
178
+ def sort(self, f: t.Callable[[T_MetaInstance], t.Any], reverse: bool = False) -> list[T_MetaInstance]:
179
+ """
180
+ Returns a list of sorted elements (not sorted in place).
181
+ """
182
+ return [r for (r, s) in sorted(zip(self.records.values(), self), key=lambda r: f(r[1]), reverse=reverse)]
183
+
184
+ def __str__(self) -> str:
185
+ """
186
+ Simple string representation.
187
+ """
188
+ return f"<TypedRows with {len(self)} records>"
189
+
190
+ def __repr__(self) -> str:
191
+ """
192
+ Print a table on repr().
193
+ """
194
+ data = self.as_dict()
195
+ try:
196
+ headers = list(next(iter(data.values())).keys())
197
+ except StopIteration:
198
+ headers = []
199
+
200
+ return mktable(data, headers)
201
+
202
+ def group_by_value(
203
+ self,
204
+ *fields: "str | Field | TypedField[T]",
205
+ one_result: bool = False,
206
+ **kwargs: t.Any,
207
+ ) -> dict[T, list[T_MetaInstance]]:
208
+ """
209
+ Group the rows by a specific field (which will be the dict key).
210
+ """
211
+ kwargs["one_result"] = one_result
212
+ result = super().group_by_value(*fields, **kwargs)
213
+ return t.cast(dict[T, list[T_MetaInstance]], result)
214
+
215
+ def as_csv(self) -> str:
216
+ """
217
+ Dump the data to csv.
218
+ """
219
+ return t.cast(str, super().as_csv())
220
+
221
+ def as_dict(
222
+ self,
223
+ key: str | Field | None = None,
224
+ compact: bool = False,
225
+ storage_to_dict: bool = False,
226
+ datetime_to_str: bool = False,
227
+ custom_types: list[type] | None = None,
228
+ ) -> dict[int, AnyDict]:
229
+ """
230
+ Get the data in a dict of dicts.
231
+ """
232
+ if any([key, compact, storage_to_dict, datetime_to_str, custom_types]):
233
+ # functionality not guaranteed
234
+ if isinstance(key, Field):
235
+ key = key.name
236
+
237
+ return t.cast(
238
+ dict[int, AnyDict],
239
+ super().as_dict(
240
+ key or "id",
241
+ compact,
242
+ storage_to_dict,
243
+ datetime_to_str,
244
+ custom_types,
245
+ ),
246
+ )
247
+
248
+ return {k: v.as_dict() for k, v in self.records.items()}
249
+
250
+ def as_json(
251
+ self, default: t.Callable[[t.Any], t.Any] = None, indent: t.Optional[int] = None, **kwargs: t.Any
252
+ ) -> str:
253
+ """
254
+ Turn the data into a dict and then dump to JSON.
255
+ """
256
+ data = self.as_list()
257
+
258
+ return as_json.encode(data, default=default, indent=indent, **kwargs)
259
+
260
+ def json(self, default: t.Callable[[t.Any], t.Any] = None, indent: t.Optional[int] = None, **kwargs: t.Any) -> str:
261
+ """
262
+ Turn the data into a dict and then dump to JSON.
263
+ """
264
+ return self.as_json(default=default, indent=indent, **kwargs)
265
+
266
+ def as_list(
267
+ self,
268
+ compact: bool = False,
269
+ storage_to_dict: bool = False,
270
+ datetime_to_str: bool = False,
271
+ custom_types: list[type] = None,
272
+ ) -> list[AnyDict]:
273
+ """
274
+ Get the data in a list of dicts.
275
+ """
276
+ if any([compact, storage_to_dict, datetime_to_str, custom_types]):
277
+ return t.cast(list[AnyDict], super().as_list(compact, storage_to_dict, datetime_to_str, custom_types))
278
+
279
+ return [_.as_dict() for _ in self.records.values()]
280
+
281
+ def __getitem__(self, item: int) -> T_MetaInstance:
282
+ """
283
+ You can get a specific row by ID from a typedrows by using rows[idx] notation.
284
+
285
+ Since pydal's implementation differs (they expect a list instead of a dict with id keys),
286
+ using rows[0] will return the first row, regardless of its id.
287
+ """
288
+ try:
289
+ return self.records[item]
290
+ except KeyError as e:
291
+ if item == 0 and (row := self.first()):
292
+ # special case: pydal internals think Rows.records is a list, not a dict
293
+ return row
294
+
295
+ raise e
296
+
297
+ def get(self, item: int) -> t.Optional[T_MetaInstance]:
298
+ """
299
+ Get a row by ID, or receive None if it isn't in this result set.
300
+ """
301
+ return self.records.get(item)
302
+
303
+ def update(self, **new_values: t.Any) -> bool:
304
+ """
305
+ Update the current rows in the database with new_values.
306
+ """
307
+ # cast to make mypy understand .id is a TypedField and not an int!
308
+ table = t.cast(t.Type[TypedTable], self.model._ensure_table_defined())
309
+
310
+ ids = set(self.column("id"))
311
+ query = table.id.belongs(ids)
312
+ return bool(self.db(query).update(**new_values))
313
+
314
+ def delete(self) -> bool:
315
+ """
316
+ Delete the currently selected rows from the database.
317
+ """
318
+ # cast to make mypy understand .id is a TypedField and not an int!
319
+ table = t.cast(t.Type[TypedTable], self.model._ensure_table_defined())
320
+
321
+ ids = set(self.column("id"))
322
+ query = table.id.belongs(ids)
323
+ return bool(self.db(query).delete())
324
+
325
+ def join(
326
+ self,
327
+ field: "Field | TypedField[t.Any]",
328
+ name: str = None,
329
+ constraint: Query = None,
330
+ fields: list[str | Field] = None,
331
+ orderby: t.Optional[str | Field] = None,
332
+ ) -> T_MetaInstance:
333
+ """
334
+ This can be used to JOIN with some relationships after the initial select.
335
+
336
+ Using the querybuilder's .join() method is prefered!
337
+ """
338
+ result = super().join(field, name, constraint, fields or [], orderby)
339
+ return t.cast(T_MetaInstance, result)
340
+
341
+ def export_to_csv_file(
342
+ self,
343
+ ofile: t.TextIO,
344
+ null: t.Any = "<NULL>",
345
+ delimiter: str = ",",
346
+ quotechar: str = '"',
347
+ quoting: int = csv.QUOTE_MINIMAL,
348
+ represent: bool = False,
349
+ colnames: list[str] = None,
350
+ write_colnames: bool = True,
351
+ *args: t.Any,
352
+ **kwargs: t.Any,
353
+ ) -> None:
354
+ """
355
+ Shadow export_to_csv_file from Rows, but with typing.
356
+
357
+ See http://web2py.com/books/default/chapter/29/06/the-database-abstraction-layer?search=export_to_csv_file#Exporting-and-importing-data
358
+ """
359
+ super().export_to_csv_file(
360
+ ofile,
361
+ null,
362
+ *args,
363
+ delimiter=delimiter,
364
+ quotechar=quotechar,
365
+ quoting=quoting,
366
+ represent=represent,
367
+ colnames=colnames or self.colnames,
368
+ write_colnames=write_colnames,
369
+ **kwargs,
370
+ )
371
+
372
+ @classmethod
373
+ def from_rows(
374
+ cls,
375
+ rows: Rows,
376
+ model: t.Type[T_MetaInstance],
377
+ metadata: Metadata = None,
378
+ ) -> "TypedRows[T_MetaInstance]":
379
+ """
380
+ Internal method to convert a Rows object to a TypedRows.
381
+ """
382
+ return cls(rows, model, metadata=metadata)
383
+
384
+ def __getstate__(self) -> AnyDict:
385
+ """
386
+ Used by dill to dump to bytes (exclude db connection etc).
387
+ """
388
+ return {
389
+ "metadata": json.dumps(self.metadata, default=str),
390
+ "records": self.records,
391
+ "model": str(self.model._table),
392
+ "colnames": self.colnames,
393
+ }
394
+
395
+ def __setstate__(self, state: AnyDict) -> None:
396
+ """
397
+ Used by dill when loading from a bytestring.
398
+ """
399
+ state["metadata"] = json.loads(state["metadata"])
400
+ self.__dict__.update(state)
401
+ # db etc. set after undill by caching.py
402
+
403
+ def render(
404
+ self,
405
+ i: int | None = None,
406
+ fields: list[Field] | None = None,
407
+ ) -> t.Generator[T_MetaInstance, None, None]:
408
+ """
409
+ Takes an index and returns a copy of the indexed row with values \
410
+ transformed via the "represent" attributes of the associated fields.
411
+
412
+ Args:
413
+ i: index. If not specified, a generator is returned for iteration
414
+ over all the rows.
415
+ fields: a list of fields to transform (if None, all fields with
416
+ "represent" attributes will be transformed)
417
+ """
418
+ if i is None:
419
+ # difference: uses .keys() instead of index
420
+ return (self.render(i, fields=fields) for i in self.records)
421
+
422
+ if not self.db.has_representer("rows_render"): # pragma: no cover
423
+ raise RuntimeError(
424
+ "Rows.render() needs a `rows_render` representer in DAL instance",
425
+ )
426
+
427
+ row = self.records[i]
428
+ return row.render(fields, compact=self.compact)
429
+
430
+
431
+ class PaginatedRows(TypedRows[T_MetaInstance]):
432
+ """
433
+ Extension on top of rows that is used when calling .paginate() instead of .collect().
434
+ """
435
+
436
+ _query_builder: QueryBuilder[T_MetaInstance]
437
+
438
+ @property
439
+ def data(self) -> list[T_MetaInstance]:
440
+ """
441
+ Get the underlying data.
442
+ """
443
+ return list(self.records.values())
444
+
445
+ @property
446
+ def pagination(self) -> Pagination:
447
+ """
448
+ Get all page info.
449
+ """
450
+ pagination_data = self.metadata["pagination"]
451
+
452
+ has_next_page = pagination_data["current_page"] < pagination_data["max_page"]
453
+ has_prev_page = pagination_data["current_page"] > 1
454
+ return {
455
+ "total_items": pagination_data["rows"],
456
+ "current_page": pagination_data["current_page"],
457
+ "per_page": pagination_data["limit"],
458
+ "total_pages": pagination_data["max_page"],
459
+ "has_next_page": has_next_page,
460
+ "has_prev_page": has_prev_page,
461
+ "next_page": pagination_data["current_page"] + 1 if has_next_page else None,
462
+ "prev_page": pagination_data["current_page"] - 1 if has_prev_page else None,
463
+ }
464
+
465
+ def next(self) -> t.Self:
466
+ """
467
+ Get the next page.
468
+ """
469
+ data = self.metadata["pagination"]
470
+ if data["current_page"] >= data["max_page"]:
471
+ raise StopIteration("Final Page")
472
+
473
+ return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] + 1)
474
+
475
+ def previous(self) -> t.Self:
476
+ """
477
+ Get the previous page.
478
+ """
479
+ data = self.metadata["pagination"]
480
+ if data["current_page"] <= 1:
481
+ raise StopIteration("First Page")
482
+
483
+ return self._query_builder.paginate(limit=data["limit"], page=data["current_page"] - 1)
484
+
485
+ def as_dict(self, *_: t.Any, **__: t.Any) -> PaginateDict: # type: ignore
486
+ """
487
+ Convert to a dictionary with pagination info and original data.
488
+
489
+ All arguments are ignored!
490
+ """
491
+ return {"data": super().as_dict(), "pagination": self.pagination}
492
+
493
+
494
+ class TypedSet(pydal.objects.Set): # type: ignore # pragma: no cover
495
+ """
496
+ Used to make pydal Set more typed.
497
+
498
+ This class is not actually used, only 'cast' by TypeDAL.__call__
499
+ """
500
+
501
+ def count(self, distinct: t.Optional[bool] = None, cache: AnyDict = None) -> int:
502
+ """
503
+ Count returns an int.
504
+ """
505
+ result = super().count(distinct, cache)
506
+ return t.cast(int, result)
507
+
508
+ def select(self, *fields: t.Any, **attributes: t.Any) -> TypedRows[T_MetaInstance]:
509
+ """
510
+ Select returns a TypedRows of a user defined table.
511
+
512
+ Example:
513
+ result: TypedRows[MyTable] = db(MyTable.id > 0).select()
514
+
515
+ for row in result:
516
+ reveal_type(row) # MyTable
517
+ """
518
+ rows = super().select(*fields, **attributes)
519
+ return t.cast(TypedRows[T_MetaInstance], rows)
520
+
521
+
522
+ # note: these imports exist at the bottom of this file to prevent circular import issues:
523
+
524
+ from .fields import TypedField # noqa: E402
@@ -3,8 +3,7 @@ Replacement for pydal's json serializer.
3
3
  """
4
4
 
5
5
  import json
6
- import typing
7
- from typing import Any
6
+ import typing as t
8
7
 
9
8
  from configurablejson import ConfigurableJsonEncoder, JSONRule
10
9
 
@@ -14,7 +13,7 @@ class SerializedJson(ConfigurableJsonEncoder):
14
13
  Custom encoder class with slightly improved defaults.
15
14
  """
16
15
 
17
- def _default(self, o: Any) -> Any: # pragma: no cover
16
+ def _default(self, o: t.Any) -> t.Any: # pragma: no cover
18
17
  if hasattr(o, "as_dict"):
19
18
  return o.as_dict()
20
19
  elif hasattr(o, "asdict"):
@@ -41,25 +40,25 @@ class SerializedJson(ConfigurableJsonEncoder):
41
40
 
42
41
  return str(o)
43
42
 
44
- @typing.overload
45
- def rules(self, o: Any, with_default: typing.Literal[False]) -> JSONRule | None:
43
+ @t.overload
44
+ def rules(self, o: t.Any, with_default: t.Literal[False]) -> JSONRule | None:
46
45
  """
47
46
  If you pass with_default=False, you could get a None result.
48
47
  """
49
48
 
50
- @typing.overload
51
- def rules(self, o: Any, with_default: typing.Literal[True] = True) -> JSONRule:
49
+ @t.overload
50
+ def rules(self, o: t.Any, with_default: t.Literal[True] = True) -> JSONRule:
52
51
  """
53
52
  If you don't pass with_default=False, you will always get a JSONRule result.
54
53
  """
55
54
 
56
- def rules(self, o: Any, with_default: bool = True) -> JSONRule | None:
55
+ def rules(self, o: t.Any, with_default: bool = True) -> JSONRule | None:
57
56
  """
58
57
  Custom rules, such as set to list and as_dict/__json__ etc. lookups.
59
58
  """
60
59
  _type = type(o)
61
60
 
62
- _rules: dict[type[Any], JSONRule] = {
61
+ _rules: dict[type[t.Any], JSONRule] = {
63
62
  # convert set to list
64
63
  set: JSONRule(preprocess=lambda o: list(o)),
65
64
  }
@@ -68,7 +67,7 @@ class SerializedJson(ConfigurableJsonEncoder):
68
67
  return _rules.get(_type, JSONRule(transform=self._default) if with_default else None)
69
68
 
70
69
 
71
- def encode(something: Any, indent: typing.Optional[int] = None, **kw: Any) -> str:
70
+ def encode(something: t.Any, indent: t.Optional[int] = None, **kw: t.Any) -> str:
72
71
  """
73
72
  Encode anything to JSON with some improved defaults.
74
73
  """