TypeDAL 3.17.3__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of TypeDAL might be problematic. Click here for more details.

typedal/__about__.py CHANGED
@@ -5,4 +5,4 @@ This file contains the Version info for this package.
5
5
  # SPDX-FileCopyrightText: 2023-present Robin van der Noord <robinvandernoord@gmail.com>
6
6
  #
7
7
  # SPDX-License-Identifier: MIT
8
- __version__ = "3.17.3"
8
+ __version__ = "4.0.0"
typedal/__init__.py CHANGED
@@ -2,16 +2,15 @@
2
2
  TypeDAL Library.
3
3
  """
4
4
 
5
- from . import fields
6
- from .core import (
7
- Relationship,
8
- TypeDAL,
9
- TypedField,
10
- TypedRows,
11
- TypedTable,
12
- relationship,
13
- )
5
+ from .core import TypeDAL
6
+ from .fields import TypedField
14
7
  from .helpers import sql_expression
8
+ from .query_builder import QueryBuilder
9
+ from .relationships import Relationship, relationship
10
+ from .rows import TypedRows
11
+ from .tables import TypedTable
12
+
13
+ from . import fields # isort: skip
15
14
 
16
15
  try:
17
16
  from .for_py4web import DAL as P4W_DAL
@@ -19,6 +18,7 @@ except ImportError: # pragma: no cover
19
18
  P4W_DAL = None # type: ignore
20
19
 
21
20
  __all__ = [
21
+ "QueryBuilder",
22
22
  "Relationship",
23
23
  "TypeDAL",
24
24
  "TypedField",
typedal/caching.py CHANGED
@@ -3,27 +3,28 @@ Helpers to facilitate db-based caching.
3
3
  """
4
4
 
5
5
  import contextlib
6
+ import datetime as dt
6
7
  import hashlib
7
8
  import json
8
- import typing
9
- from datetime import datetime, timedelta, timezone
10
- from typing import Any, Iterable, Mapping, Optional, TypeVar
9
+ import typing as t
11
10
 
12
11
  import dill # nosec
13
12
  from pydal.objects import Field, Rows, Set
14
13
 
15
- from .core import TypedField, TypedRows, TypedTable
14
+ from .fields import TypedField
15
+ from .rows import TypedRows
16
+ from .tables import TypedTable
16
17
  from .types import Query
17
18
 
18
- if typing.TYPE_CHECKING:
19
+ if t.TYPE_CHECKING:
19
20
  from .core import TypeDAL
20
21
 
21
22
 
22
- def get_now(tz: timezone = timezone.utc) -> datetime:
23
+ def get_now(tz: dt.timezone = dt.timezone.utc) -> dt.datetime:
23
24
  """
24
25
  Get the default datetime, optionally in a specific timezone.
25
26
  """
26
- return datetime.now(tz)
27
+ return dt.datetime.now(tz)
27
28
 
28
29
 
29
30
  class _TypedalCache(TypedTable):
@@ -33,8 +34,8 @@ class _TypedalCache(TypedTable):
33
34
 
34
35
  key: TypedField[str]
35
36
  data: TypedField[bytes]
36
- cached_at = TypedField(datetime, default=get_now)
37
- expires_at: TypedField[datetime | None]
37
+ cached_at = TypedField(dt.datetime, default=get_now)
38
+ expires_at: TypedField[dt.datetime | None]
38
39
 
39
40
 
40
41
  class _TypedalCacheDependency(TypedTable):
@@ -47,7 +48,7 @@ class _TypedalCacheDependency(TypedTable):
47
48
  idx: TypedField[int]
48
49
 
49
50
 
50
- def prepare(field: Any) -> str:
51
+ def prepare(field: t.Any) -> str:
51
52
  """
52
53
  Prepare data to be used in a cache key.
53
54
 
@@ -56,10 +57,10 @@ def prepare(field: Any) -> str:
56
57
  """
57
58
  if isinstance(field, str):
58
59
  return field
59
- elif isinstance(field, (dict, Mapping)):
60
+ elif isinstance(field, (dict, t.Mapping)):
60
61
  data = {str(k): prepare(v) for k, v in field.items()}
61
62
  return json.dumps(data, sort_keys=True)
62
- elif isinstance(field, Iterable):
63
+ elif isinstance(field, t.Iterable):
63
64
  return ",".join(sorted([prepare(_) for _ in field]))
64
65
  elif isinstance(field, bool):
65
66
  return str(int(field))
@@ -67,7 +68,7 @@ def prepare(field: Any) -> str:
67
68
  return str(field)
68
69
 
69
70
 
70
- def create_cache_key(*fields: Any) -> str:
71
+ def create_cache_key(*fields: t.Any) -> str:
71
72
  """
72
73
  Turn any fields of data into a string.
73
74
  """
@@ -83,7 +84,7 @@ def hash_cache_key(cache_key: str | bytes) -> str:
83
84
  return h.hexdigest()
84
85
 
85
86
 
86
- def create_and_hash_cache_key(*fields: Any) -> tuple[str, str]:
87
+ def create_and_hash_cache_key(*fields: t.Any) -> tuple[str, str]:
87
88
  """
88
89
  Combine the input fields into one key and hash it with SHA 256.
89
90
  """
@@ -112,7 +113,7 @@ def _get_dependency_ids(rows: Rows, dependency_keys: list[tuple[Field, str]]) ->
112
113
  return dependencies
113
114
 
114
115
 
115
- def _determine_dependencies_auto(_: TypedRows[Any], rows: Rows) -> DependencyTupleSet:
116
+ def _determine_dependencies_auto(_: TypedRows[t.Any], rows: Rows) -> DependencyTupleSet:
116
117
  dependency_keys = []
117
118
  for field in rows.fields:
118
119
  if str(field).endswith(".id"):
@@ -123,7 +124,7 @@ def _determine_dependencies_auto(_: TypedRows[Any], rows: Rows) -> DependencyTup
123
124
  return _get_dependency_ids(rows, dependency_keys)
124
125
 
125
126
 
126
- def _determine_dependencies(instance: TypedRows[Any], rows: Rows, depends_on: list[Any]) -> DependencyTupleSet:
127
+ def _determine_dependencies(instance: TypedRows[t.Any], rows: Rows, depends_on: list[t.Any]) -> DependencyTupleSet:
127
128
  if not depends_on:
128
129
  return _determine_dependencies_auto(instance, rows)
129
130
 
@@ -144,11 +145,11 @@ def _determine_dependencies(instance: TypedRows[Any], rows: Rows, depends_on: li
144
145
  return _get_dependency_ids(rows, dependency_keys)
145
146
 
146
147
 
147
- def remove_cache(idx: int | Iterable[int], table: str) -> None:
148
+ def remove_cache(idx: int | t.Iterable[int], table: str) -> None:
148
149
  """
149
150
  Remove any cache entries that are dependant on one or multiple indices of a table.
150
151
  """
151
- if not isinstance(idx, Iterable):
152
+ if not isinstance(idx, t.Iterable):
152
153
  idx = [idx]
153
154
 
154
155
  related = (
@@ -184,12 +185,14 @@ def _remove_cache(s: Set, tablename: str) -> None:
184
185
  remove_cache(indeces, tablename)
185
186
 
186
187
 
187
- T_TypedTable = TypeVar("T_TypedTable", bound=TypedTable)
188
+ T_TypedTable = t.TypeVar("T_TypedTable", bound=TypedTable)
188
189
 
189
190
 
190
191
  def get_expire(
191
- expires_at: Optional[datetime] = None, ttl: Optional[int | timedelta] = None, now: Optional[datetime] = None
192
- ) -> datetime | None:
192
+ expires_at: t.Optional[dt.datetime] = None,
193
+ ttl: t.Optional[int | dt.timedelta] = None,
194
+ now: t.Optional[dt.datetime] = None,
195
+ ) -> dt.datetime | None:
193
196
  """
194
197
  Based on an expires_at date or a ttl (in seconds or a time delta), determine the expire date.
195
198
  """
@@ -197,10 +200,10 @@ def get_expire(
197
200
 
198
201
  if expires_at and ttl:
199
202
  raise ValueError("Please only supply an `expired at` date or a `ttl` in seconds!")
200
- elif isinstance(ttl, timedelta):
203
+ elif isinstance(ttl, dt.timedelta):
201
204
  return now + ttl
202
205
  elif ttl:
203
- return now + timedelta(seconds=ttl)
206
+ return now + dt.timedelta(seconds=ttl)
204
207
  elif expires_at:
205
208
  return expires_at
206
209
 
@@ -210,8 +213,8 @@ def get_expire(
210
213
  def save_to_cache(
211
214
  instance: TypedRows[T_TypedTable],
212
215
  rows: Rows,
213
- expires_at: Optional[datetime] = None,
214
- ttl: Optional[int | timedelta] = None,
216
+ expires_at: t.Optional[dt.datetime] = None,
217
+ ttl: t.Optional[int | dt.timedelta] = None,
215
218
  ) -> TypedRows[T_TypedTable]:
216
219
  """
217
220
  Save a typedrows result to the database, and save dependencies from rows.
@@ -237,13 +240,13 @@ def save_to_cache(
237
240
  return instance
238
241
 
239
242
 
240
- def _load_from_cache(key: str, db: "TypeDAL") -> Any | None:
243
+ def _load_from_cache(key: str, db: "TypeDAL") -> t.Any | None:
241
244
  if not (row := _TypedalCache.where(key=key).first()):
242
245
  return None
243
246
 
244
247
  now = get_now()
245
248
 
246
- expires = row.expires_at.replace(tzinfo=timezone.utc) if row.expires_at else None
249
+ expires = row.expires_at.replace(tzinfo=dt.timezone.utc) if row.expires_at else None
247
250
 
248
251
  if expires and now >= expires:
249
252
  row.delete_record()
@@ -261,7 +264,7 @@ def _load_from_cache(key: str, db: "TypeDAL") -> Any | None:
261
264
  return inst
262
265
 
263
266
 
264
- def load_from_cache(key: str, db: "TypeDAL") -> Any | None:
267
+ def load_from_cache(key: str, db: "TypeDAL") -> t.Any | None:
265
268
  """
266
269
  If 'key' matches a non-expired row in the database, try to load the dill.
267
270
 
@@ -302,10 +305,10 @@ def _expired_and_valid_query() -> tuple[str, str]:
302
305
  return expired_items, valid_items
303
306
 
304
307
 
305
- T = typing.TypeVar("T")
306
- Stats = typing.TypedDict("Stats", {"total": T, "valid": T, "expired": T})
308
+ T = t.TypeVar("T")
309
+ Stats = t.TypedDict("Stats", {"total": T, "valid": T, "expired": T})
307
310
 
308
- RowStats = typing.TypedDict(
311
+ RowStats = t.TypedDict(
309
312
  "RowStats",
310
313
  {
311
314
  "Dependent Cache Entries": int,
@@ -338,7 +341,7 @@ def row_stats(db: "TypeDAL", table: str, row_id: str) -> Stats[RowStats]:
338
341
  }
339
342
 
340
343
 
341
- TableStats = typing.TypedDict(
344
+ TableStats = t.TypedDict(
342
345
  "TableStats",
343
346
  {
344
347
  "Dependent Cache Entries": int,
@@ -371,7 +374,7 @@ def table_stats(db: "TypeDAL", table: str) -> Stats[TableStats]:
371
374
  }
372
375
 
373
376
 
374
- GenericStats = typing.TypedDict(
377
+ GenericStats = t.TypedDict(
375
378
  "GenericStats",
376
379
  {
377
380
  "entries": int,
typedal/config.py CHANGED
@@ -4,11 +4,10 @@ TypeDAL can be configured by a combination of pyproject.toml (static), env (dyna
4
4
 
5
5
  import os
6
6
  import re
7
- import typing
7
+ import typing as t
8
8
  import warnings
9
9
  from collections import defaultdict
10
10
  from pathlib import Path
11
- from typing import Any, Optional
12
11
 
13
12
  import tomli
14
13
  from configuraptor import TypedConfig, alias
@@ -17,7 +16,7 @@ from dotenv import dotenv_values, find_dotenv
17
16
 
18
17
  from .types import AnyDict
19
18
 
20
- if typing.TYPE_CHECKING:
19
+ if t.TYPE_CHECKING:
21
20
  from edwh_migrate import Config as MigrateConfig
22
21
  from pydal2sql.typer_support import Config as P2SConfig
23
22
 
@@ -41,15 +40,15 @@ class TypeDALConfig(TypedConfig):
41
40
  output: str = ""
42
41
  noop: bool = False
43
42
  magic: bool = True
44
- tables: Optional[list[str]] = None
43
+ tables: t.Optional[list[str]] = None
45
44
  function: str = "define_tables"
46
45
 
47
46
  # edwh-migrate:
48
47
  # migrate uri = database
49
- database_to_restore: Optional[str]
50
- migrate_cat_command: Optional[str]
51
- schema_version: Optional[str]
52
- redis_host: Optional[str]
48
+ database_to_restore: t.Optional[str]
49
+ migrate_cat_command: t.Optional[str]
50
+ schema_version: t.Optional[str]
51
+ redis_host: t.Optional[str]
53
52
  migrate_table: str = "typedal_implemented_features"
54
53
  flag_location: str
55
54
  create_flag_location: bool = True
@@ -148,7 +147,7 @@ def _load_toml(path: str | bool | Path | None = True) -> tuple[str, AnyDict]:
148
147
  with open(toml_path, "rb") as f:
149
148
  data = tomli.load(f)
150
149
 
151
- return str(toml_path) or "", typing.cast(AnyDict, data["tool"]["typedal"])
150
+ return str(toml_path) or "", t.cast(AnyDict, data["tool"]["typedal"])
152
151
  except Exception as e:
153
152
  warnings.warn(f"Could not load typedal config toml: {e}", source=e)
154
153
  return str(toml_path) or "", {}
@@ -194,7 +193,7 @@ def get_db_for_alias(db_name: str) -> str:
194
193
  return DB_ALIASES.get(db_name, db_name)
195
194
 
196
195
 
197
- DEFAULTS: dict[str, Any | typing.Callable[[AnyDict], Any]] = {
196
+ DEFAULTS: dict[str, t.Any | t.Callable[[AnyDict], t.Any]] = {
198
197
  "database": lambda data: data.get("db_uri") or "sqlite:memory",
199
198
  "dialect": lambda data: (
200
199
  get_db_for_alias(data["database"].split(":")[0]) if ":" in data["database"] else data.get("db_type")
@@ -208,7 +207,7 @@ DEFAULTS: dict[str, Any | typing.Callable[[AnyDict], Any]] = {
208
207
  }
209
208
 
210
209
 
211
- def _fill_defaults(data: AnyDict, prop: str, fallback: Any = None) -> None:
210
+ def _fill_defaults(data: AnyDict, prop: str, fallback: t.Any = None) -> None:
212
211
  default = DEFAULTS.get(prop, fallback)
213
212
  if callable(default):
214
213
  default = default(data)
@@ -223,7 +222,7 @@ def fill_defaults(data: AnyDict, prop: str) -> None:
223
222
  _fill_defaults(data, prop)
224
223
 
225
224
 
226
- TRANSFORMS: dict[str, typing.Callable[[AnyDict], Any]] = {
225
+ TRANSFORMS: dict[str, t.Callable[[AnyDict], t.Any]] = {
227
226
  "database": lambda data: (
228
227
  data["database"]
229
228
  if (":" in data["database"] or not data.get("dialect"))
@@ -264,7 +263,7 @@ def expand_posix_vars(posix_expr: str, context: dict[str, str]) -> str:
264
263
  # Regular expression to match "${VAR:default}" pattern
265
264
  pattern = r"\$\{([^}]+)\}"
266
265
 
267
- def replace_var(match: re.Match[Any]) -> str:
266
+ def replace_var(match: re.Match[t.Any]) -> str:
268
267
  var_with_default = match.group(1)
269
268
  var_name, default_value = var_with_default.split(":") if ":" in var_with_default else (var_with_default, "")
270
269
  return env.get(var_name.lower(), default_value)
@@ -325,10 +324,10 @@ def expand_env_vars_into_toml_values(toml: AnyDict, env: AnyDict) -> None:
325
324
 
326
325
 
327
326
  def load_config(
328
- connection_name: Optional[str] = None,
327
+ connection_name: t.Optional[str] = None,
329
328
  _use_pyproject: bool | str | None = True,
330
329
  _use_env: bool | str | None = True,
331
- **fallback: Any,
330
+ **fallback: t.Any,
332
331
  ) -> TypeDALConfig:
333
332
  """
334
333
  Combines multiple sources of config into one config instance.
@@ -338,7 +337,7 @@ def load_config(
338
337
  # combine and fill with fallback values
339
338
  # load typedal config or fail
340
339
  toml_path, toml = _load_toml(_use_pyproject)
341
- dotenv_path, dotenv = _load_dotenv(_use_env)
340
+ _dotenv_path, dotenv = _load_dotenv(_use_env)
342
341
 
343
342
  expand_env_vars_into_toml_values(toml, dotenv)
344
343
 
typedal/constants.py ADDED
@@ -0,0 +1,25 @@
1
+ """
2
+ Constants values.
3
+ """
4
+
5
+ import datetime as dt
6
+ import typing as t
7
+ from decimal import Decimal
8
+
9
+ from .types import T_annotation
10
+
11
+ JOIN_OPTIONS = t.Literal["left", "inner", None]
12
+ DEFAULT_JOIN_OPTION: JOIN_OPTIONS = "left"
13
+
14
+ BASIC_MAPPINGS: dict[T_annotation, str] = {
15
+ str: "string",
16
+ int: "integer",
17
+ bool: "boolean",
18
+ bytes: "blob",
19
+ float: "double",
20
+ object: "json",
21
+ Decimal: "decimal(10,2)",
22
+ dt.date: "date",
23
+ dt.time: "time",
24
+ dt.datetime: "datetime",
25
+ }