TypeDAL 3.7.0__tar.gz → 3.8.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of TypeDAL might be problematic. Click here for more details.

Files changed (59) hide show
  1. {typedal-3.7.0 → typedal-3.8.0}/CHANGELOG.md +20 -0
  2. {typedal-3.7.0 → typedal-3.8.0}/PKG-INFO +6 -2
  3. {typedal-3.7.0 → typedal-3.8.0}/README.md +2 -0
  4. {typedal-3.7.0 → typedal-3.8.0}/example_new.py +2 -0
  5. {typedal-3.7.0 → typedal-3.8.0}/pyproject.toml +3 -1
  6. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/__about__.py +1 -1
  7. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/core.py +56 -12
  8. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/fields.py +79 -33
  9. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/types.py +65 -2
  10. typedal-3.8.0/tests/test_config.py +315 -0
  11. typedal-3.7.0/tests/test_config.py +0 -155
  12. {typedal-3.7.0 → typedal-3.8.0}/.github/workflows/su6.yml +0 -0
  13. {typedal-3.7.0 → typedal-3.8.0}/.gitignore +0 -0
  14. {typedal-3.7.0 → typedal-3.8.0}/.readthedocs.yml +0 -0
  15. {typedal-3.7.0 → typedal-3.8.0}/coverage.svg +0 -0
  16. {typedal-3.7.0 → typedal-3.8.0}/docs/1_getting_started.md +0 -0
  17. {typedal-3.7.0 → typedal-3.8.0}/docs/2_defining_tables.md +0 -0
  18. {typedal-3.7.0 → typedal-3.8.0}/docs/3_building_queries.md +0 -0
  19. {typedal-3.7.0 → typedal-3.8.0}/docs/4_relationships.md +0 -0
  20. {typedal-3.7.0 → typedal-3.8.0}/docs/5_py4web.md +0 -0
  21. {typedal-3.7.0 → typedal-3.8.0}/docs/6_migrations.md +0 -0
  22. {typedal-3.7.0 → typedal-3.8.0}/docs/7_mixins.md +0 -0
  23. {typedal-3.7.0 → typedal-3.8.0}/docs/css/code_blocks.css +0 -0
  24. {typedal-3.7.0 → typedal-3.8.0}/docs/index.md +0 -0
  25. {typedal-3.7.0 → typedal-3.8.0}/docs/requirements.txt +0 -0
  26. {typedal-3.7.0 → typedal-3.8.0}/example_old.py +0 -0
  27. {typedal-3.7.0 → typedal-3.8.0}/mkdocs.yml +0 -0
  28. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/__init__.py +0 -0
  29. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/caching.py +0 -0
  30. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/cli.py +0 -0
  31. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/config.py +0 -0
  32. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/for_py4web.py +0 -0
  33. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/for_web2py.py +0 -0
  34. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/helpers.py +0 -0
  35. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/mixins.py +0 -0
  36. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/py.typed +0 -0
  37. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/serializers/as_json.py +0 -0
  38. {typedal-3.7.0 → typedal-3.8.0}/src/typedal/web2py_py4web_shared.py +0 -0
  39. {typedal-3.7.0 → typedal-3.8.0}/tests/__init__.py +0 -0
  40. {typedal-3.7.0 → typedal-3.8.0}/tests/configs/simple.toml +0 -0
  41. {typedal-3.7.0 → typedal-3.8.0}/tests/configs/valid.env +0 -0
  42. {typedal-3.7.0 → typedal-3.8.0}/tests/configs/valid.toml +0 -0
  43. {typedal-3.7.0 → typedal-3.8.0}/tests/test_cli.py +0 -0
  44. {typedal-3.7.0 → typedal-3.8.0}/tests/test_docs_examples.py +0 -0
  45. {typedal-3.7.0 → typedal-3.8.0}/tests/test_helpers.py +0 -0
  46. {typedal-3.7.0 → typedal-3.8.0}/tests/test_json.py +0 -0
  47. {typedal-3.7.0 → typedal-3.8.0}/tests/test_main.py +0 -0
  48. {typedal-3.7.0 → typedal-3.8.0}/tests/test_mixins.py +0 -0
  49. {typedal-3.7.0 → typedal-3.8.0}/tests/test_mypy.py +0 -0
  50. {typedal-3.7.0 → typedal-3.8.0}/tests/test_orm.py +0 -0
  51. {typedal-3.7.0 → typedal-3.8.0}/tests/test_py4web.py +0 -0
  52. {typedal-3.7.0 → typedal-3.8.0}/tests/test_query_builder.py +0 -0
  53. {typedal-3.7.0 → typedal-3.8.0}/tests/test_relationships.py +0 -0
  54. {typedal-3.7.0 → typedal-3.8.0}/tests/test_row.py +0 -0
  55. {typedal-3.7.0 → typedal-3.8.0}/tests/test_stats.py +0 -0
  56. {typedal-3.7.0 → typedal-3.8.0}/tests/test_table.py +0 -0
  57. {typedal-3.7.0 → typedal-3.8.0}/tests/test_web2py.py +0 -0
  58. {typedal-3.7.0 → typedal-3.8.0}/tests/test_xx_others.py +0 -0
  59. {typedal-3.7.0 → typedal-3.8.0}/tests/timings.py +0 -0
@@ -2,6 +2,26 @@
2
2
 
3
3
  <!--next-version-placeholder-->
4
4
 
5
+ ## v3.8.0 (2024-10-11)
6
+
7
+ ### Feature
8
+
9
+ * Add `_sql()` function to TypedTable to generate SQL Schema code. (only if 'migration' extra/pydal2sql is installed) ([`31f86de`](https://github.com/trialandsuccess/TypeDAL/commit/31f86de30cc53cf320f6231c27dd545103b50d10))
10
+ * Add FieldSettings typed dict for better hinting for options when creating a TypedField() or any of the fields using it ([`97a7c7a`](https://github.com/trialandsuccess/TypeDAL/commit/97a7c7ad6112a6098088c44bbc6ae438bbfc0040))
11
+ * Add custom TypedFields for timestamp, point and uuid (valid types in postgres and sqlite is okay with anything) ([`a7bc9d1`](https://github.com/trialandsuccess/TypeDAL/commit/a7bc9d1b7ab0c88d4937956a68305b4d61a0851f))
12
+ * Started on custom types (timestamp) ([`981da83`](https://github.com/trialandsuccess/TypeDAL/commit/981da83cc8f4fec442b2cf74e0b555ce0633f96a))
13
+
14
+ ## v3.7.1 (2024-10-09)
15
+
16
+ ### Fix
17
+
18
+ * Prepare for python 3.13 (-> cgi dependency, changes in forward reference evaluation); except psycopg2 ([`bbcca8f`](https://github.com/trialandsuccess/TypeDAL/commit/bbcca8f7a5d2f8a6ddc8caf3a1b05fde3ed2fdd2))
19
+ * Require legacy-cgi for python 3.13+ ([`7ba9489`](https://github.com/trialandsuccess/TypeDAL/commit/7ba94898cde600008a350e718783a4d0dbc05e45))
20
+
21
+ ### Documentation
22
+
23
+ * **readme:** Include `from typedal.helpers import get_db` in example ([`8853052`](https://github.com/trialandsuccess/TypeDAL/commit/8853052575b4576945901eb87da94bf709e99526))
24
+
5
25
  ## v3.7.0 (2024-08-17)
6
26
 
7
27
  ### Feature
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: TypeDAL
3
- Version: 3.7.0
3
+ Version: 3.8.0
4
4
  Summary: Typing support for PyDAL
5
5
  Project-URL: Documentation, https://typedal.readthedocs.io/
6
6
  Project-URL: Issues, https://github.com/trialandsuccess/TypeDAL/issues
@@ -12,16 +12,18 @@ Classifier: Programming Language :: Python
12
12
  Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Programming Language :: Python :: 3.13
15
16
  Classifier: Programming Language :: Python :: Implementation :: CPython
16
17
  Classifier: Programming Language :: Python :: Implementation :: PyPy
17
18
  Requires-Python: >=3.10
18
19
  Requires-Dist: configurable-json
19
20
  Requires-Dist: configuraptor>=1.26.2
20
21
  Requires-Dist: dill
22
+ Requires-Dist: legacy-cgi; python_version >= '3.13'
21
23
  Requires-Dist: pydal
22
24
  Requires-Dist: python-slugify
23
25
  Provides-Extra: all
24
- Requires-Dist: edwh-migrate>=0.8.0; extra == 'all'
26
+ Requires-Dist: edwh-migrate[full]>=0.8.0; extra == 'all'
25
27
  Requires-Dist: py4web; extra == 'all'
26
28
  Requires-Dist: pydal2sql[all]>=1.2.0; extra == 'all'
27
29
  Requires-Dist: questionary; extra == 'all'
@@ -318,6 +320,8 @@ These helpers are useful for scenarios where direct access to the PyDAL objects
318
320
  An example of this is when you need to do a `db.commit()` but you can't import `db` directly:
319
321
 
320
322
  ```python
323
+ from typedal.helpers import get_db #, get_table, get_field
324
+
321
325
  MyTable.insert(...)
322
326
  db = get_db(MyTable)
323
327
  db.commit() # this is usually done automatically but sometimes you want to manually commit.
@@ -264,6 +264,8 @@ These helpers are useful for scenarios where direct access to the PyDAL objects
264
264
  An example of this is when you need to do a `db.commit()` but you can't import `db` directly:
265
265
 
266
266
  ```python
267
+ from typedal.helpers import get_db #, get_table, get_field
268
+
267
269
  MyTable.insert(...)
268
270
  db = get_db(MyTable)
269
271
  db.commit() # this is usually done automatically but sometimes you want to manually commit.
@@ -19,6 +19,8 @@ class Person(TypedTable):
19
19
  age = TypedField(int, default=18)
20
20
  nicknames: list[str]
21
21
 
22
+ ts = TypedField(dt.datetime, type="timestamp")
23
+
22
24
 
23
25
  assert db.person._format == "%(name)s"
24
26
 
@@ -19,6 +19,7 @@ classifiers = [
19
19
  "Programming Language :: Python :: 3.10",
20
20
  "Programming Language :: Python :: 3.11",
21
21
  "Programming Language :: Python :: 3.12",
22
+ "Programming Language :: Python :: 3.13",
22
23
  "Programming Language :: Python :: Implementation :: CPython",
23
24
  "Programming Language :: Python :: Implementation :: PyPy",
24
25
  ]
@@ -28,6 +29,7 @@ dependencies = [
28
29
  "configuraptor >= 1.26.2", # config
29
30
  "Configurable-JSON", # json dumping
30
31
  "python-slugify",
32
+ "legacy-cgi; python_version >= '3.13'"
31
33
  ]
32
34
 
33
35
  [project.optional-dependencies]
@@ -49,7 +51,7 @@ all = [
49
51
  "typer",
50
52
  "tabulate",
51
53
  "pydal2sql[all]>=1.2.0",
52
- "edwh-migrate>=0.8.0",
54
+ "edwh-migrate[full]>=0.8.0",
53
55
  "questionary",
54
56
  "tomlkit",
55
57
  ]
@@ -5,4 +5,4 @@ This file contains the Version info for this package.
5
5
  # SPDX-FileCopyrightText: 2023-present Robin van der Noord <robinvandernoord@gmail.com>
6
6
  #
7
7
  # SPDX-License-Identifier: MIT
8
- __version__ = "3.7.0"
8
+ __version__ = "3.8.0"
@@ -8,6 +8,7 @@ import datetime as dt
8
8
  import inspect
9
9
  import json
10
10
  import math
11
+ import sys
11
12
  import types
12
13
  import typing
13
14
  import warnings
@@ -47,6 +48,7 @@ from .types import (
47
48
  CacheMetadata,
48
49
  Expression,
49
50
  Field,
51
+ FieldSettings,
50
52
  Metadata,
51
53
  OpRow,
52
54
  PaginateDict,
@@ -339,6 +341,22 @@ def to_relationship(
339
341
  return Relationship(typing.cast(type[TypedTable], field), condition, typing.cast(JOIN_OPTIONS, join))
340
342
 
341
343
 
344
+ def evaluate_forward_reference(fw_ref: typing.ForwardRef) -> type:
345
+ """
346
+ Extract the original type from a forward reference string.
347
+ """
348
+ kwargs = dict(
349
+ localns=locals(),
350
+ globalns=globals(),
351
+ recursive_guard=frozenset(),
352
+ )
353
+ if sys.version_info >= (3, 13): # pragma: no cover
354
+ # suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
355
+ kwargs["type_params"] = ()
356
+
357
+ return fw_ref._evaluate(**kwargs) # type: ignore
358
+
359
+
342
360
  class TypeDAL(pydal.DAL): # type: ignore
343
361
  """
344
362
  Drop-in replacement for pyDAL with layer to convert class-based table definitions to classical pydal define_tables.
@@ -465,6 +483,7 @@ class TypeDAL(pydal.DAL): # type: ignore
465
483
  # when __future__.annotations is implemented, cls.__annotations__ will not work anymore as below.
466
484
  # proper way to handle this would be (but gives error right now due to Table implementing magic methods):
467
485
  # typing.get_type_hints(cls, globalns=None, localns=None)
486
+ # -> ERR e.g. `pytest -svxk cli` -> name 'BestFriend' is not defined
468
487
 
469
488
  # dirty way (with evil eval):
470
489
  # [eval(v) for k, v in cls.__annotations__.items()]
@@ -519,7 +538,9 @@ class TypeDAL(pydal.DAL): # type: ignore
519
538
  # }
520
539
 
521
540
  # keys of implicit references (also relationships):
522
- reference_field_keys = [k for k, v in fields.items() if v.type.split(" ")[0] in ("list:reference", "reference")]
541
+ reference_field_keys = [
542
+ k for k, v in fields.items() if str(v.type).split(" ")[0] in ("list:reference", "reference")
543
+ ]
523
544
 
524
545
  # add implicit relationships:
525
546
  # User; list[User]; TypedField[User]; TypedField[list[User]]
@@ -663,7 +684,9 @@ class TypeDAL(pydal.DAL): # type: ignore
663
684
 
664
685
  @classmethod
665
686
  def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
666
- return Field(name, _type, **{**cls.default_kwargs, **kw})
687
+ # return Field(name, _type, **{**cls.default_kwargs, **kw})
688
+ kw_combined = cls.default_kwargs | kw
689
+ return Field(name, _type, **kw_combined)
667
690
 
668
691
  @classmethod
669
692
  def _annotation_to_pydal_fieldtype(
@@ -674,9 +697,8 @@ class TypeDAL(pydal.DAL): # type: ignore
674
697
 
675
698
  if isinstance(ftype, str):
676
699
  # extract type from string
677
- ftype = typing.get_args(Type[ftype])[0]._evaluate(
678
- localns=locals(), globalns=globals(), recursive_guard=frozenset()
679
- )
700
+ fw_ref: typing.ForwardRef = typing.get_args(Type[ftype])[0]
701
+ ftype = evaluate_forward_reference(fw_ref)
680
702
 
681
703
  if mapping := BASIC_MAPPINGS.get(ftype):
682
704
  # basi types
@@ -741,6 +763,7 @@ class TypeDAL(pydal.DAL): # type: ignore
741
763
  """
742
764
  fname = cls.to_snake(fname)
743
765
 
766
+ # note: 'kw' is updated in `_annotation_to_pydal_fieldtype` by the kwargs provided to the TypedField(...)
744
767
  if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
745
768
  return cls._build_field(fname, converted_type, **kw)
746
769
  else:
@@ -1144,7 +1167,7 @@ class TableMeta(type):
1144
1167
  """
1145
1168
  Add a before insert hook.
1146
1169
  """
1147
- cls._before_insert.append(fn) # type: ignore
1170
+ cls._before_insert.append(fn)
1148
1171
  return cls
1149
1172
 
1150
1173
  def after_insert(
@@ -1157,7 +1180,7 @@ class TableMeta(type):
1157
1180
  """
1158
1181
  Add an after insert hook.
1159
1182
  """
1160
- cls._after_insert.append(fn) # type: ignore
1183
+ cls._after_insert.append(fn)
1161
1184
  return cls
1162
1185
 
1163
1186
  def before_update(
@@ -1167,7 +1190,7 @@ class TableMeta(type):
1167
1190
  """
1168
1191
  Add a before update hook.
1169
1192
  """
1170
- cls._before_update.append(fn) # type: ignore
1193
+ cls._before_update.append(fn)
1171
1194
  return cls
1172
1195
 
1173
1196
  def after_update(
@@ -1177,7 +1200,7 @@ class TableMeta(type):
1177
1200
  """
1178
1201
  Add an after update hook.
1179
1202
  """
1180
- cls._after_update.append(fn) # type: ignore
1203
+ cls._after_update.append(fn)
1181
1204
  return cls
1182
1205
 
1183
1206
  def before_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
@@ -1212,9 +1235,16 @@ class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
1212
1235
 
1213
1236
  requires: Validator | typing.Iterable[Validator]
1214
1237
 
1215
- def __init__(self, _type: Type[T_Value] | types.UnionType = str, /, **settings: Any) -> None: # type: ignore
1238
+ # NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
1239
+
1240
+ def __init__(
1241
+ self, _type: Type[T_Value] | types.UnionType = str, /, **settings: Unpack[FieldSettings] # type: ignore
1242
+ ) -> None:
1216
1243
  """
1217
- A TypedFieldType should not be inited manually, but TypedField (from `fields.py`) should be used!
1244
+ Typed version of pydal.Field, which will be converted to a normal Field in the background.
1245
+
1246
+ Provide the Python type for this field as the first positional argument
1247
+ and any other settings to Field() as keyword parameters.
1218
1248
  """
1219
1249
  self._type = _type
1220
1250
  self.kwargs = settings
@@ -1289,9 +1319,11 @@ class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
1289
1319
  def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
1290
1320
  """
1291
1321
  Convert a Typed Field instance to a pydal.Field.
1322
+
1323
+ Actual logic in TypeDAL._to_field but this function creates the pydal type name and updates the kwarg settings.
1292
1324
  """
1293
1325
  other_kwargs = self.kwargs.copy()
1294
- extra_kwargs.update(other_kwargs)
1326
+ extra_kwargs.update(other_kwargs) # <- modifies and overwrites the default kwargs with user-specified ones
1295
1327
  return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
1296
1328
 
1297
1329
  def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
@@ -1738,6 +1770,18 @@ class TypedTable(_TypedTable, metaclass=TableMeta):
1738
1770
  state["_row"] = Row(json.loads(state["_row"]))
1739
1771
  self.__dict__ |= state
1740
1772
 
1773
+ @classmethod
1774
+ def _sql(cls) -> str:
1775
+ """
1776
+ Generate SQL Schema for this table via pydal2sql (if 'migrations' extra is installed).
1777
+ """
1778
+ try:
1779
+ import pydal2sql
1780
+ except ImportError as e: # pragma: no cover
1781
+ raise RuntimeError("Can not generate SQL without the 'migration' extra or `pydal2sql` installed!") from e
1782
+
1783
+ return pydal2sql.generate_sql(cls)
1784
+
1741
1785
 
1742
1786
  # backwards compat:
1743
1787
  TypedRow = TypedTable
@@ -2,13 +2,18 @@
2
2
  This file contains available Field types.
3
3
  """
4
4
 
5
+ import ast
5
6
  import datetime as dt
6
7
  import decimal
7
8
  import typing
9
+ import uuid
8
10
 
11
+ from pydal.helpers.classes import SQLCustomType
9
12
  from pydal.objects import Table
13
+ from typing_extensions import Unpack
10
14
 
11
15
  from .core import TypeDAL, TypedField, TypedTable
16
+ from .types import FieldSettings
12
17
 
13
18
  T = typing.TypeVar("T", bound=typing.Any)
14
19
 
@@ -16,23 +21,8 @@ T = typing.TypeVar("T", bound=typing.Any)
16
21
  ## general
17
22
 
18
23
 
19
- # def TypedField(
20
- # _type: typing.Type[T] | types.UnionType,
21
- # **kwargs: typing.Any,
22
- # ) -> T:
23
- # """
24
- # sneaky: its a function and not a class, because there's a return type.
25
- #
26
- # and the return type (T) is the input type in _type
27
- #
28
- # Example:
29
- # age: TypedField(int, default=18)
30
- # """
31
- # return typing.cast(T, TypedFieldType(_type, **kwargs))
32
-
33
-
34
24
  ## specific
35
- def StringField(**kw: typing.Any) -> TypedField[str]:
25
+ def StringField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
36
26
  """
37
27
  Pydal type is string, Python type is str.
38
28
  """
@@ -43,7 +33,7 @@ def StringField(**kw: typing.Any) -> TypedField[str]:
43
33
  String = StringField
44
34
 
45
35
 
46
- def TextField(**kw: typing.Any) -> TypedField[str]:
36
+ def TextField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
47
37
  """
48
38
  Pydal type is text, Python type is str.
49
39
  """
@@ -54,7 +44,7 @@ def TextField(**kw: typing.Any) -> TypedField[str]:
54
44
  Text = TextField
55
45
 
56
46
 
57
- def BlobField(**kw: typing.Any) -> TypedField[bytes]:
47
+ def BlobField(**kw: Unpack[FieldSettings]) -> TypedField[bytes]:
58
48
  """
59
49
  Pydal type is blob, Python type is bytes.
60
50
  """
@@ -65,7 +55,7 @@ def BlobField(**kw: typing.Any) -> TypedField[bytes]:
65
55
  Blob = BlobField
66
56
 
67
57
 
68
- def BooleanField(**kw: typing.Any) -> TypedField[bool]:
58
+ def BooleanField(**kw: Unpack[FieldSettings]) -> TypedField[bool]:
69
59
  """
70
60
  Pydal type is boolean, Python type is bool.
71
61
  """
@@ -76,7 +66,7 @@ def BooleanField(**kw: typing.Any) -> TypedField[bool]:
76
66
  Boolean = BooleanField
77
67
 
78
68
 
79
- def IntegerField(**kw: typing.Any) -> TypedField[int]:
69
+ def IntegerField(**kw: Unpack[FieldSettings]) -> TypedField[int]:
80
70
  """
81
71
  Pydal type is integer, Python type is int.
82
72
  """
@@ -87,7 +77,7 @@ def IntegerField(**kw: typing.Any) -> TypedField[int]:
87
77
  Integer = IntegerField
88
78
 
89
79
 
90
- def DoubleField(**kw: typing.Any) -> TypedField[float]:
80
+ def DoubleField(**kw: Unpack[FieldSettings]) -> TypedField[float]:
91
81
  """
92
82
  Pydal type is double, Python type is float.
93
83
  """
@@ -98,7 +88,7 @@ def DoubleField(**kw: typing.Any) -> TypedField[float]:
98
88
  Double = DoubleField
99
89
 
100
90
 
101
- def DecimalField(n: int, m: int, **kw: typing.Any) -> TypedField[decimal.Decimal]:
91
+ def DecimalField(n: int, m: int, **kw: Unpack[FieldSettings]) -> TypedField[decimal.Decimal]:
102
92
  """
103
93
  Pydal type is decimal, Python type is Decimal.
104
94
  """
@@ -109,7 +99,7 @@ def DecimalField(n: int, m: int, **kw: typing.Any) -> TypedField[decimal.Decimal
109
99
  Decimal = DecimalField
110
100
 
111
101
 
112
- def DateField(**kw: typing.Any) -> TypedField[dt.date]:
102
+ def DateField(**kw: Unpack[FieldSettings]) -> TypedField[dt.date]:
113
103
  """
114
104
  Pydal type is date, Python type is datetime.date.
115
105
  """
@@ -120,7 +110,7 @@ def DateField(**kw: typing.Any) -> TypedField[dt.date]:
120
110
  Date = DateField
121
111
 
122
112
 
123
- def TimeField(**kw: typing.Any) -> TypedField[dt.time]:
113
+ def TimeField(**kw: Unpack[FieldSettings]) -> TypedField[dt.time]:
124
114
  """
125
115
  Pydal type is time, Python type is datetime.time.
126
116
  """
@@ -131,7 +121,7 @@ def TimeField(**kw: typing.Any) -> TypedField[dt.time]:
131
121
  Time = TimeField
132
122
 
133
123
 
134
- def DatetimeField(**kw: typing.Any) -> TypedField[dt.datetime]:
124
+ def DatetimeField(**kw: Unpack[FieldSettings]) -> TypedField[dt.datetime]:
135
125
  """
136
126
  Pydal type is datetime, Python type is datetime.datetime.
137
127
  """
@@ -142,7 +132,7 @@ def DatetimeField(**kw: typing.Any) -> TypedField[dt.datetime]:
142
132
  Datetime = DatetimeField
143
133
 
144
134
 
145
- def PasswordField(**kw: typing.Any) -> TypedField[str]:
135
+ def PasswordField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
146
136
  """
147
137
  Pydal type is password, Python type is str.
148
138
  """
@@ -153,7 +143,7 @@ def PasswordField(**kw: typing.Any) -> TypedField[str]:
153
143
  Password = PasswordField
154
144
 
155
145
 
156
- def UploadField(**kw: typing.Any) -> TypedField[str]:
146
+ def UploadField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
157
147
  """
158
148
  Pydal type is upload, Python type is str.
159
149
  """
@@ -167,7 +157,7 @@ T_subclass = typing.TypeVar("T_subclass", TypedTable, Table)
167
157
 
168
158
 
169
159
  def ReferenceField(
170
- other_table: str | typing.Type[TypedTable] | TypedTable | Table | T_subclass, **kw: typing.Any
160
+ other_table: str | typing.Type[TypedTable] | TypedTable | Table | T_subclass, **kw: Unpack[FieldSettings]
171
161
  ) -> TypedField[int]:
172
162
  """
173
163
  Pydal type is reference, Python type is int (id).
@@ -190,7 +180,7 @@ def ReferenceField(
190
180
  Reference = ReferenceField
191
181
 
192
182
 
193
- def ListStringField(**kw: typing.Any) -> TypedField[list[str]]:
183
+ def ListStringField(**kw: Unpack[FieldSettings]) -> TypedField[list[str]]:
194
184
  """
195
185
  Pydal type is list:string, Python type is list of str.
196
186
  """
@@ -201,7 +191,7 @@ def ListStringField(**kw: typing.Any) -> TypedField[list[str]]:
201
191
  ListString = ListStringField
202
192
 
203
193
 
204
- def ListIntegerField(**kw: typing.Any) -> TypedField[list[int]]:
194
+ def ListIntegerField(**kw: Unpack[FieldSettings]) -> TypedField[list[int]]:
205
195
  """
206
196
  Pydal type is list:integer, Python type is list of int.
207
197
  """
@@ -212,7 +202,7 @@ def ListIntegerField(**kw: typing.Any) -> TypedField[list[int]]:
212
202
  ListInteger = ListIntegerField
213
203
 
214
204
 
215
- def ListReferenceField(other_table: str, **kw: typing.Any) -> TypedField[list[int]]:
205
+ def ListReferenceField(other_table: str, **kw: Unpack[FieldSettings]) -> TypedField[list[int]]:
216
206
  """
217
207
  Pydal type is list:reference, Python type is list of int (id).
218
208
  """
@@ -223,7 +213,7 @@ def ListReferenceField(other_table: str, **kw: typing.Any) -> TypedField[list[in
223
213
  ListReference = ListReferenceField
224
214
 
225
215
 
226
- def JSONField(**kw: typing.Any) -> TypedField[object]:
216
+ def JSONField(**kw: Unpack[FieldSettings]) -> TypedField[object]:
227
217
  """
228
218
  Pydal type is json, Python type is object (can be anything JSON-encodable).
229
219
  """
@@ -231,7 +221,7 @@ def JSONField(**kw: typing.Any) -> TypedField[object]:
231
221
  return TypedField(object, **kw)
232
222
 
233
223
 
234
- def BigintField(**kw: typing.Any) -> TypedField[int]:
224
+ def BigintField(**kw: Unpack[FieldSettings]) -> TypedField[int]:
235
225
  """
236
226
  Pydal type is bigint, Python type is int.
237
227
  """
@@ -240,3 +230,59 @@ def BigintField(**kw: typing.Any) -> TypedField[int]:
240
230
 
241
231
 
242
232
  Bigint = BigintField
233
+
234
+ ## Custom:
235
+
236
+ NativeTimestampField = SQLCustomType(
237
+ type="datetime",
238
+ native="timestamp",
239
+ encoder=lambda x: f"'{x}'", # extra quotes
240
+ # decoder=lambda x: x, # already parsed into datetime
241
+ )
242
+
243
+
244
+ def TimestampField(**kw: Unpack[FieldSettings]) -> TypedField[dt.datetime]:
245
+ """
246
+ Database type is timestamp, Python type is datetime.
247
+
248
+ Advantage over the regular datetime type is that
249
+ a timestamp has millisecond precision (2024-10-11 20:18:24.505194)
250
+ whereas a regular datetime only has precision up to the second (2024-10-11 20:18:24)
251
+ """
252
+ kw["type"] = NativeTimestampField
253
+ return TypedField(
254
+ dt.datetime,
255
+ **kw,
256
+ )
257
+
258
+
259
+ NativePointField = SQLCustomType(
260
+ type="string",
261
+ native="point",
262
+ encoder=str,
263
+ decoder=ast.literal_eval,
264
+ )
265
+
266
+
267
+ def PointField(**kw: Unpack[FieldSettings]) -> TypedField[tuple[float, float]]:
268
+ """
269
+ Database type is point, Python type is tuple[float, float].
270
+ """
271
+ kw["type"] = NativePointField
272
+ return TypedField(tuple[float, float], **kw)
273
+
274
+
275
+ NativeUUIDField = SQLCustomType(
276
+ type="string",
277
+ native="uuid",
278
+ encoder=str,
279
+ decoder=uuid.UUID,
280
+ )
281
+
282
+
283
+ def UUIDField(**kw: Unpack[FieldSettings]) -> TypedField[uuid.UUID]:
284
+ """
285
+ Database type is uuid, Python type is UUID.
286
+ """
287
+ kw["type"] = NativeUUIDField
288
+ return TypedField(uuid.UUID, **kw)
@@ -4,11 +4,12 @@ Stuff to make mypy happy.
4
4
 
5
5
  import typing
6
6
  from datetime import datetime
7
- from typing import Any, Optional, TypedDict
7
+ from typing import Any, Callable, Optional, TypedDict
8
8
 
9
9
  from pydal.adapters.base import BaseAdapter
10
10
  from pydal.helpers.classes import OpRow as _OpRow
11
11
  from pydal.helpers.classes import Reference as _Reference
12
+ from pydal.helpers.classes import SQLCustomType
12
13
  from pydal.objects import Expression as _Expression
13
14
  from pydal.objects import Field as _Field
14
15
  from pydal.objects import Query as _Query
@@ -218,7 +219,7 @@ CacheModel = typing.Callable[[str, CacheFn, int], Rows]
218
219
  CacheTuple = tuple[CacheModel, int]
219
220
 
220
221
 
221
- class SelectKwargs(typing.TypedDict, total=False):
222
+ class SelectKwargs(TypedDict, total=False):
222
223
  """
223
224
  Possible keyword arguments for .select().
224
225
  """
@@ -250,3 +251,65 @@ class Metadata(TypedDict):
250
251
  relationships: NotRequired[set[str]]
251
252
 
252
253
  sql: NotRequired[str]
254
+
255
+
256
+ class FileSystemLike(typing.Protocol): # pragma: no cover
257
+ """
258
+ Protocol for any class that has an 'open' function.
259
+
260
+ An example of this is OSFS from PyFilesystem2.
261
+ """
262
+
263
+ def open(self, file: str, mode: str = "r") -> typing.IO[typing.Any]:
264
+ """
265
+ Opens a file for reading, writing or other modes.
266
+ """
267
+ ...
268
+
269
+
270
+ AnyCallable: typing.TypeAlias = Callable[..., Any]
271
+
272
+
273
+ class FieldSettings(TypedDict, total=False):
274
+ """
275
+ The supported keyword arguments for `pydal.Field()`.
276
+
277
+ Other arguments can be passed.
278
+ """
279
+
280
+ type: str | type | SQLCustomType
281
+ length: int
282
+ default: Any
283
+ required: bool
284
+ requires: list[AnyCallable | Any]
285
+ ondelete: str
286
+ onupdate: str
287
+ notnull: bool
288
+ unique: bool
289
+ uploadfield: bool | str
290
+ widget: AnyCallable
291
+ label: str
292
+ comment: str
293
+ writable: bool
294
+ readable: bool
295
+ searchable: bool
296
+ listable: bool
297
+ regex: str
298
+ options: list[Any] | AnyCallable
299
+ update: Any
300
+ authorize: AnyCallable
301
+ autodelete: bool
302
+ represent: AnyCallable
303
+ uploadfolder: str
304
+ uploadseparate: bool
305
+ uploadfs: FileSystemLike
306
+ compute: AnyCallable
307
+ custom_store: AnyCallable
308
+ custom_retrieve: AnyCallable
309
+ custom_retrieve_file_properties: AnyCallable
310
+ custom_delete: AnyCallable
311
+ filter_in: AnyCallable
312
+ filter_out: AnyCallable
313
+ custom_qualifier: Any
314
+ map_none: Any
315
+ rname: str
@@ -0,0 +1,315 @@
1
+ import os
2
+ import shutil
3
+ import tempfile
4
+ from pathlib import Path
5
+ import datetime as dt
6
+ import uuid
7
+
8
+ from pydal2sql import generate_sql
9
+ import pytest
10
+ # from contextlib import chdir
11
+ from contextlib_chdir import chdir
12
+ from testcontainers.postgres import PostgresContainer
13
+
14
+ from src.typedal import TypeDAL, TypedTable, TypedField
15
+ from src.typedal.config import (
16
+ _load_dotenv,
17
+ _load_toml,
18
+ expand_env_vars_into_toml_values,
19
+ load_config,
20
+ )
21
+ from src.typedal.fields import TimestampField, PointField, UUIDField
22
+
23
+ postgres = PostgresContainer(
24
+ dbname="postgres",
25
+ username="someuser",
26
+ password="somepass",
27
+ )
28
+
29
+
30
+ @pytest.fixture(scope="module", autouse=True)
31
+ def psql(request):
32
+ postgres.ports = {
33
+ 5432: 9631, # as set in valid.env
34
+ }
35
+
36
+ request.addfinalizer(postgres.stop)
37
+ postgres.start()
38
+
39
+
40
+ @pytest.fixture
41
+ def at_temp_dir():
42
+ with tempfile.TemporaryDirectory() as d:
43
+ with chdir(d):
44
+ yield d
45
+
46
+
47
+ def _load_db_after_setup(dialect: str):
48
+ config = load_config()
49
+ db = TypeDAL(attempts=1)
50
+ assert db._uri == config.database
51
+
52
+ assert f"'dialect': '{dialect}'" in repr(config)
53
+
54
+ return True
55
+
56
+
57
+ def test_load_toml(at_temp_dir):
58
+ base = Path("pyproject.toml")
59
+ base.write_text("# empty")
60
+
61
+ assert _load_toml(False) == ("", {})
62
+ assert _load_toml(None) == (str(base.resolve().absolute()), {})
63
+ assert _load_toml(str(base)) == ("pyproject.toml", {})
64
+ assert _load_toml(".") == (str(base.resolve().absolute()), {})
65
+
66
+
67
+ def test_load_dotenv(at_temp_dir):
68
+ base = Path(".env")
69
+ base.write_text("# empty")
70
+
71
+ assert _load_dotenv(False)[0] == ""
72
+ assert _load_dotenv(None)[0] == str(base.resolve().absolute())
73
+ assert _load_dotenv(str(base))[0] == ".env"
74
+ assert _load_dotenv(".")[0] == ".env"
75
+
76
+
77
+ def test_load_empty_config(at_temp_dir):
78
+ assert _load_db_after_setup("sqlite")
79
+
80
+
81
+ def test_load_toml_config(at_temp_dir):
82
+ examples = Path(__file__).parent / "configs"
83
+ shutil.copy(examples / "valid.toml", "./pyproject.toml")
84
+
85
+ assert _load_db_after_setup("sqlite")
86
+
87
+
88
+ def test_load_env_config(at_temp_dir):
89
+ examples = Path(__file__).parent / "configs"
90
+ shutil.copy(examples / "valid.env", "./.env")
91
+
92
+ assert _load_db_after_setup("postgres")
93
+
94
+
95
+ def test_load_simple_config(at_temp_dir):
96
+ examples = Path(__file__).parent / "configs"
97
+ shutil.copy(examples / "valid.env", "./.env")
98
+ shutil.copy(examples / "simple.toml", "./pyproject.toml")
99
+
100
+ assert _load_db_after_setup("postgres")
101
+
102
+
103
+ def test_load_both_config(at_temp_dir):
104
+ examples = Path(__file__).parent / "configs"
105
+ shutil.copy(examples / "valid.env", "./.env")
106
+ shutil.copy(examples / "valid.toml", "./pyproject.toml")
107
+
108
+ assert _load_db_after_setup("postgres")
109
+
110
+
111
+ def test_converting(at_temp_dir):
112
+ from edwh_migrate import Config as MigrateConfig
113
+ from pydal2sql.typer_support import Config as P2SConfig
114
+
115
+ config = load_config()
116
+
117
+ assert isinstance(config.to_migrate(), MigrateConfig)
118
+ assert isinstance(config.to_pydal2sql(), P2SConfig)
119
+
120
+
121
+ def test_environ(at_temp_dir):
122
+ os.environ["DB_URI"] = "sqlite:///tmp/db.sqlite"
123
+ config = load_config(False, True)
124
+
125
+ assert config.database == "sqlite:///tmp/db.sqlite"
126
+
127
+
128
+ def test_expand_env_vars():
129
+ # str
130
+ input_str = "${MYVALUE:default}"
131
+ data = {"myvar": input_str}
132
+ expand_env_vars_into_toml_values(data, {})
133
+ assert data["myvar"] == input_str
134
+
135
+ expand_env_vars_into_toml_values(data, {"unrelated": "data"})
136
+ assert data["myvar"] == "default"
137
+
138
+ data = {"myvar": input_str}
139
+ expand_env_vars_into_toml_values(data, {"myvalue": "123"})
140
+
141
+ assert data["myvar"] == "123"
142
+
143
+ # list
144
+ data = {"myvar": [input_str, input_str]}
145
+ expand_env_vars_into_toml_values(data, {"myvalue": "456"})
146
+
147
+ assert data["myvar"] == ["456", "456"]
148
+
149
+ # dict
150
+ data = {"myvar": {"value": input_str}}
151
+ expand_env_vars_into_toml_values(data, {"myvalue": "789"})
152
+ assert data["myvar"]["value"] == "789"
153
+
154
+ # other - non-str
155
+ data = {"myvar": None, "mynumber": 123}
156
+ expand_env_vars_into_toml_values(data, {"myvalue": "789"})
157
+ assert data["myvar"] is None
158
+ assert data["mynumber"] == 123
159
+
160
+
161
+ # note: these are not really 'config' specific but we already have access to postgres here so good enough:
162
+
163
+ def test_timestamp_fields_sqlite(at_temp_dir):
164
+ db = TypeDAL("sqlite:memory")
165
+
166
+ class Timestamp(TypedTable):
167
+ ts = TimestampField(default=dt.datetime.now)
168
+ dt = TypedField(dt.datetime, default=dt.datetime.now)
169
+
170
+ db.define(Timestamp)
171
+
172
+ row = Timestamp.insert()
173
+
174
+ # old:
175
+ assert isinstance(row.dt, dt.datetime), "not a datetime"
176
+ assert "." not in str(row.dt) # no ms precision
177
+
178
+ # new:
179
+ assert isinstance(row.ts, dt.datetime), "not a datetime"
180
+ assert "." in str(row.ts) # ms precision
181
+
182
+ assert '"ts" timestamp NOT NULL' in Timestamp._sql()
183
+
184
+
185
+ def test_timestamp_fields_psql(at_temp_dir):
186
+ examples = Path(__file__).parent / "configs"
187
+ shutil.copy(examples / "valid.env", "./.env")
188
+ shutil.copy(examples / "simple.toml", "./pyproject.toml")
189
+
190
+ assert _load_db_after_setup("postgres")
191
+ db = TypeDAL(attempts=1)
192
+
193
+ class Timestamp(TypedTable):
194
+ ts = TimestampField(default=dt.datetime.now)
195
+ dt = TypedField(dt.datetime, default=dt.datetime.now)
196
+
197
+ db.define(Timestamp)
198
+
199
+ row = Timestamp.insert()
200
+
201
+ # old:
202
+ assert isinstance(row.dt, dt.datetime), "not a datetime"
203
+ assert "." not in str(row.dt) # no ms precision
204
+
205
+ # new:
206
+ assert isinstance(row.ts, dt.datetime), "not a datetime"
207
+ assert "." in str(row.ts) # ms precision
208
+
209
+ assert '"ts" timestamp NOT NULL' in Timestamp._sql()
210
+
211
+
212
+ def test_point_fields_sqlite(at_temp_dir):
213
+ db = TypeDAL("sqlite:memory")
214
+
215
+ class Point(TypedTable):
216
+ pt = PointField()
217
+
218
+ db.define(Point)
219
+
220
+ row1 = Point.insert(pt=(1, 0))
221
+ row2 = Point.insert(pt="(1, 0)")
222
+
223
+ assert row1.pt == row2.pt
224
+
225
+ x, y = row1.pt
226
+ assert x == 1
227
+ assert y == 0
228
+
229
+ with pytest.raises(Exception):
230
+ Point.insert(pt=123)
231
+
232
+ with pytest.raises(Exception):
233
+ Point.insert(pt="123")
234
+
235
+ # note: psql will check this whereas sqlite won't:
236
+ Point.insert(pt=())
237
+
238
+ assert '"pt" point NOT NULL' in Point._sql()
239
+
240
+
241
+ def test_point_fields_psql(at_temp_dir):
242
+ examples = Path(__file__).parent / "configs"
243
+ shutil.copy(examples / "valid.env", "./.env")
244
+ shutil.copy(examples / "simple.toml", "./pyproject.toml")
245
+
246
+ assert _load_db_after_setup("postgres")
247
+ db = TypeDAL(attempts=1)
248
+
249
+ class Point(TypedTable):
250
+ pt = PointField()
251
+
252
+ db.define(Point)
253
+
254
+ row1 = Point.insert(pt=(1, 0))
255
+ row2 = Point.insert(pt="(1, 0)")
256
+
257
+ assert row1.pt == row2.pt
258
+
259
+ x, y = row1.pt
260
+ assert x == 1
261
+ assert y == 0
262
+
263
+ with pytest.raises(Exception):
264
+ Point.insert(pt=123)
265
+
266
+ with pytest.raises(Exception):
267
+ Point.insert(pt="123")
268
+
269
+ # note: psql will check this whereas sqlite won't:
270
+
271
+ with pytest.raises(Exception):
272
+ Point.insert(pt=())
273
+
274
+ assert '"pt" point NOT NULL' in Point._sql()
275
+
276
+
277
+ def test_uuid_fields_psql(at_temp_dir):
278
+ examples = Path(__file__).parent / "configs"
279
+ shutil.copy(examples / "valid.env", "./.env")
280
+ shutil.copy(examples / "simple.toml", "./pyproject.toml")
281
+
282
+ assert _load_db_after_setup("postgres")
283
+ db = TypeDAL(attempts=1)
284
+
285
+ class UUIDTable(TypedTable):
286
+ gid = UUIDField(default=uuid.uuid4)
287
+
288
+ db.define(UUIDTable)
289
+
290
+ row = UUIDTable.insert()
291
+
292
+ assert isinstance(row.gid, uuid.UUID)
293
+
294
+ with pytest.raises(Exception):
295
+ UUIDTable.insert(gid="not-a-uuid")
296
+
297
+ assert '"gid" uuid NOT NULL' in UUIDTable._sql()
298
+
299
+
300
+ def test_uuid_fields_sqlite(at_temp_dir):
301
+ db = TypeDAL("sqlite:memory")
302
+
303
+ class UUIDTable(TypedTable):
304
+ gid = UUIDField(default=uuid.uuid4)
305
+
306
+ db.define(UUIDTable)
307
+
308
+ row = UUIDTable.insert()
309
+
310
+ assert isinstance(row.gid, uuid.UUID)
311
+
312
+ with pytest.raises(Exception):
313
+ UUIDTable.insert(gid="not-a-uuid")
314
+
315
+ assert '"gid" uuid NOT NULL' in UUIDTable._sql()
@@ -1,155 +0,0 @@
1
- import os
2
- import shutil
3
- import tempfile
4
- from pathlib import Path
5
-
6
- import pytest
7
-
8
- # from contextlib import chdir
9
- from contextlib_chdir import chdir
10
- from testcontainers.postgres import PostgresContainer
11
-
12
- from src.typedal import TypeDAL
13
- from src.typedal.config import (
14
- _load_dotenv,
15
- _load_toml,
16
- expand_env_vars_into_toml_values,
17
- load_config,
18
- )
19
-
20
- postgres = PostgresContainer(
21
- dbname="postgres",
22
- username="someuser",
23
- password="somepass",
24
- )
25
-
26
-
27
- @pytest.fixture(scope="module", autouse=True)
28
- def psql(request):
29
- postgres.ports = {
30
- 5432: 9631, # as set in valid.env
31
- }
32
-
33
- request.addfinalizer(postgres.stop)
34
- postgres.start()
35
-
36
-
37
- @pytest.fixture
38
- def at_temp_dir():
39
- with tempfile.TemporaryDirectory() as d:
40
- with chdir(d):
41
- yield d
42
-
43
-
44
- def _load_db_after_setup(dialect: str):
45
- config = load_config()
46
- db = TypeDAL(attempts=1)
47
- assert db._uri == config.database
48
-
49
- assert f"'dialect': '{dialect}'" in repr(config)
50
-
51
- return True
52
-
53
-
54
- def test_load_toml(at_temp_dir):
55
- base = Path("pyproject.toml")
56
- base.write_text("# empty")
57
-
58
- assert _load_toml(False) == ("", {})
59
- assert _load_toml(None) == (str(base.resolve().absolute()), {})
60
- assert _load_toml(str(base)) == ("pyproject.toml", {})
61
- assert _load_toml(".") == (str(base.resolve().absolute()), {})
62
-
63
-
64
- def test_load_dotenv(at_temp_dir):
65
- base = Path(".env")
66
- base.write_text("# empty")
67
-
68
- assert _load_dotenv(False)[0] == ""
69
- assert _load_dotenv(None)[0] == str(base.resolve().absolute())
70
- assert _load_dotenv(str(base))[0] == ".env"
71
- assert _load_dotenv(".")[0] == ".env"
72
-
73
-
74
- def test_load_empty_config(at_temp_dir):
75
- assert _load_db_after_setup("sqlite")
76
-
77
-
78
- def test_load_toml_config(at_temp_dir):
79
- examples = Path(__file__).parent / "configs"
80
- shutil.copy(examples / "valid.toml", "./pyproject.toml")
81
-
82
- assert _load_db_after_setup("sqlite")
83
-
84
-
85
- def test_load_env_config(at_temp_dir):
86
- examples = Path(__file__).parent / "configs"
87
- shutil.copy(examples / "valid.env", "./.env")
88
-
89
- assert _load_db_after_setup("postgres")
90
-
91
-
92
- def test_load_simple_config(at_temp_dir):
93
- examples = Path(__file__).parent / "configs"
94
- shutil.copy(examples / "valid.env", "./.env")
95
- shutil.copy(examples / "simple.toml", "./pyproject.toml")
96
-
97
- assert _load_db_after_setup("postgres")
98
-
99
-
100
- def test_load_both_config(at_temp_dir):
101
- examples = Path(__file__).parent / "configs"
102
- shutil.copy(examples / "valid.env", "./.env")
103
- shutil.copy(examples / "valid.toml", "./pyproject.toml")
104
-
105
- assert _load_db_after_setup("postgres")
106
-
107
-
108
- def test_converting(at_temp_dir):
109
- from edwh_migrate import Config as MigrateConfig
110
- from pydal2sql.typer_support import Config as P2SConfig
111
-
112
- config = load_config()
113
-
114
- assert isinstance(config.to_migrate(), MigrateConfig)
115
- assert isinstance(config.to_pydal2sql(), P2SConfig)
116
-
117
-
118
- def test_environ(at_temp_dir):
119
- os.environ["DB_URI"] = "sqlite:///tmp/db.sqlite"
120
- config = load_config(False, True)
121
-
122
- assert config.database == "sqlite:///tmp/db.sqlite"
123
-
124
-
125
- def test_expand_env_vars():
126
- # str
127
- input_str = "${MYVALUE:default}"
128
- data = {"myvar": input_str}
129
- expand_env_vars_into_toml_values(data, {})
130
- assert data["myvar"] == input_str
131
-
132
- expand_env_vars_into_toml_values(data, {"unrelated": "data"})
133
- assert data["myvar"] == "default"
134
-
135
- data = {"myvar": input_str}
136
- expand_env_vars_into_toml_values(data, {"myvalue": "123"})
137
-
138
- assert data["myvar"] == "123"
139
-
140
- # list
141
- data = {"myvar": [input_str, input_str]}
142
- expand_env_vars_into_toml_values(data, {"myvalue": "456"})
143
-
144
- assert data["myvar"] == ["456", "456"]
145
-
146
- # dict
147
- data = {"myvar": {"value": input_str}}
148
- expand_env_vars_into_toml_values(data, {"myvalue": "789"})
149
- assert data["myvar"]["value"] == "789"
150
-
151
- # other - non-str
152
- data = {"myvar": None, "mynumber": 123}
153
- expand_env_vars_into_toml_values(data, {"myvalue": "789"})
154
- assert data["myvar"] is None
155
- assert data["mynumber"] == 123
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes