TypeDAL 3.7.1__tar.gz → 3.8.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of TypeDAL might be problematic. Click here for more details.
- {typedal-3.7.1 → typedal-3.8.1}/.github/workflows/su6.yml +1 -1
- {typedal-3.7.1 → typedal-3.8.1}/CHANGELOG.md +15 -0
- {typedal-3.7.1 → typedal-3.8.1}/PKG-INFO +1 -1
- {typedal-3.7.1 → typedal-3.8.1}/example_new.py +5 -2
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/__about__.py +1 -1
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/core.py +34 -7
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/fields.py +79 -33
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/types.py +65 -2
- typedal-3.8.1/tests/test_config.py +315 -0
- typedal-3.7.1/tests/test_config.py +0 -155
- {typedal-3.7.1 → typedal-3.8.1}/.gitignore +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/.readthedocs.yml +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/README.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/coverage.svg +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/1_getting_started.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/2_defining_tables.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/3_building_queries.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/4_relationships.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/5_py4web.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/6_migrations.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/7_mixins.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/css/code_blocks.css +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/index.md +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/docs/requirements.txt +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/example_old.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/mkdocs.yml +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/pyproject.toml +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/__init__.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/caching.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/cli.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/config.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/for_py4web.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/for_web2py.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/helpers.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/mixins.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/py.typed +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/serializers/as_json.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/src/typedal/web2py_py4web_shared.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/__init__.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/configs/simple.toml +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/configs/valid.env +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/configs/valid.toml +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_cli.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_docs_examples.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_helpers.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_json.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_main.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_mixins.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_mypy.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_orm.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_py4web.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_query_builder.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_relationships.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_row.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_stats.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_table.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_web2py.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/test_xx_others.py +0 -0
- {typedal-3.7.1 → typedal-3.8.1}/tests/timings.py +0 -0
|
@@ -2,6 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
<!--next-version-placeholder-->
|
|
4
4
|
|
|
5
|
+
## v3.8.1 (2024-10-22)
|
|
6
|
+
|
|
7
|
+
### Fix
|
|
8
|
+
|
|
9
|
+
* Make 'requires=' also accept list[Validator] or a single Validator/Callable ([`a4a7c00`](https://github.com/trialandsuccess/TypeDAL/commit/a4a7c002186f8824971987f96d573fe455dcd01d))
|
|
10
|
+
|
|
11
|
+
## v3.8.0 (2024-10-11)
|
|
12
|
+
|
|
13
|
+
### Feature
|
|
14
|
+
|
|
15
|
+
* Add `_sql()` function to TypedTable to generate SQL Schema code. (only if 'migration' extra/pydal2sql is installed) ([`31f86de`](https://github.com/trialandsuccess/TypeDAL/commit/31f86de30cc53cf320f6231c27dd545103b50d10))
|
|
16
|
+
* Add FieldSettings typed dict for better hinting for options when creating a TypedField() or any of the fields using it ([`97a7c7a`](https://github.com/trialandsuccess/TypeDAL/commit/97a7c7ad6112a6098088c44bbc6ae438bbfc0040))
|
|
17
|
+
* Add custom TypedFields for timestamp, point and uuid (valid types in postgres and sqlite is okay with anything) ([`a7bc9d1`](https://github.com/trialandsuccess/TypeDAL/commit/a7bc9d1b7ab0c88d4937956a68305b4d61a0851f))
|
|
18
|
+
* Started on custom types (timestamp) ([`981da83`](https://github.com/trialandsuccess/TypeDAL/commit/981da83cc8f4fec442b2cf74e0b555ce0633f96a))
|
|
19
|
+
|
|
5
20
|
## v3.7.1 (2024-10-09)
|
|
6
21
|
|
|
7
22
|
### Fix
|
|
@@ -5,7 +5,8 @@ from decimal import Decimal
|
|
|
5
5
|
import datetime as dt
|
|
6
6
|
|
|
7
7
|
from src.typedal.fields import TextField
|
|
8
|
-
from typedal.helpers import utcnow
|
|
8
|
+
from src.typedal.helpers import utcnow
|
|
9
|
+
from pydal.validators import IS_NOT_EMPTY
|
|
9
10
|
|
|
10
11
|
db = TypeDAL("sqlite:memory")
|
|
11
12
|
|
|
@@ -16,9 +17,11 @@ db = TypeDAL("sqlite:memory")
|
|
|
16
17
|
class Person(TypedTable):
|
|
17
18
|
name: TypedField[str]
|
|
18
19
|
|
|
19
|
-
age = TypedField(int, default=18)
|
|
20
|
+
age = TypedField(int, default=18, requires=IS_NOT_EMPTY())
|
|
20
21
|
nicknames: list[str]
|
|
21
22
|
|
|
23
|
+
ts = TypedField(dt.datetime, type="timestamp")
|
|
24
|
+
|
|
22
25
|
|
|
23
26
|
assert db.person._format == "%(name)s"
|
|
24
27
|
|
|
@@ -48,6 +48,7 @@ from .types import (
|
|
|
48
48
|
CacheMetadata,
|
|
49
49
|
Expression,
|
|
50
50
|
Field,
|
|
51
|
+
FieldSettings,
|
|
51
52
|
Metadata,
|
|
52
53
|
OpRow,
|
|
53
54
|
PaginateDict,
|
|
@@ -349,8 +350,8 @@ def evaluate_forward_reference(fw_ref: typing.ForwardRef) -> type:
|
|
|
349
350
|
globalns=globals(),
|
|
350
351
|
recursive_guard=frozenset(),
|
|
351
352
|
)
|
|
352
|
-
if sys.version_info >= (3, 13):
|
|
353
|
-
#
|
|
353
|
+
if sys.version_info >= (3, 13): # pragma: no cover
|
|
354
|
+
# suggested since 3.13 (warning) and not supported before. Mandatory after 1.15!
|
|
354
355
|
kwargs["type_params"] = ()
|
|
355
356
|
|
|
356
357
|
return fw_ref._evaluate(**kwargs) # type: ignore
|
|
@@ -537,7 +538,9 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
537
538
|
# }
|
|
538
539
|
|
|
539
540
|
# keys of implicit references (also relationships):
|
|
540
|
-
reference_field_keys = [
|
|
541
|
+
reference_field_keys = [
|
|
542
|
+
k for k, v in fields.items() if str(v.type).split(" ")[0] in ("list:reference", "reference")
|
|
543
|
+
]
|
|
541
544
|
|
|
542
545
|
# add implicit relationships:
|
|
543
546
|
# User; list[User]; TypedField[User]; TypedField[list[User]]
|
|
@@ -681,7 +684,9 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
681
684
|
|
|
682
685
|
@classmethod
|
|
683
686
|
def _build_field(cls, name: str, _type: str, **kw: Any) -> Field:
|
|
684
|
-
return Field(name, _type, **{**cls.default_kwargs, **kw})
|
|
687
|
+
# return Field(name, _type, **{**cls.default_kwargs, **kw})
|
|
688
|
+
kw_combined = cls.default_kwargs | kw
|
|
689
|
+
return Field(name, _type, **kw_combined)
|
|
685
690
|
|
|
686
691
|
@classmethod
|
|
687
692
|
def _annotation_to_pydal_fieldtype(
|
|
@@ -758,6 +763,7 @@ class TypeDAL(pydal.DAL): # type: ignore
|
|
|
758
763
|
"""
|
|
759
764
|
fname = cls.to_snake(fname)
|
|
760
765
|
|
|
766
|
+
# note: 'kw' is updated in `_annotation_to_pydal_fieldtype` by the kwargs provided to the TypedField(...)
|
|
761
767
|
if converted_type := cls._annotation_to_pydal_fieldtype(ftype, kw):
|
|
762
768
|
return cls._build_field(fname, converted_type, **kw)
|
|
763
769
|
else:
|
|
@@ -1229,9 +1235,16 @@ class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
|
|
|
1229
1235
|
|
|
1230
1236
|
requires: Validator | typing.Iterable[Validator]
|
|
1231
1237
|
|
|
1232
|
-
|
|
1238
|
+
# NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
|
|
1239
|
+
|
|
1240
|
+
def __init__(
|
|
1241
|
+
self, _type: Type[T_Value] | types.UnionType = str, /, **settings: Unpack[FieldSettings] # type: ignore
|
|
1242
|
+
) -> None:
|
|
1233
1243
|
"""
|
|
1234
|
-
|
|
1244
|
+
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
1245
|
+
|
|
1246
|
+
Provide the Python type for this field as the first positional argument
|
|
1247
|
+
and any other settings to Field() as keyword parameters.
|
|
1235
1248
|
"""
|
|
1236
1249
|
self._type = _type
|
|
1237
1250
|
self.kwargs = settings
|
|
@@ -1306,9 +1319,11 @@ class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
|
|
|
1306
1319
|
def _to_field(self, extra_kwargs: typing.MutableMapping[str, Any]) -> Optional[str]:
|
|
1307
1320
|
"""
|
|
1308
1321
|
Convert a Typed Field instance to a pydal.Field.
|
|
1322
|
+
|
|
1323
|
+
Actual logic in TypeDAL._to_field but this function creates the pydal type name and updates the kwarg settings.
|
|
1309
1324
|
"""
|
|
1310
1325
|
other_kwargs = self.kwargs.copy()
|
|
1311
|
-
extra_kwargs.update(other_kwargs)
|
|
1326
|
+
extra_kwargs.update(other_kwargs) # <- modifies and overwrites the default kwargs with user-specified ones
|
|
1312
1327
|
return extra_kwargs.pop("type", False) or TypeDAL._annotation_to_pydal_fieldtype(self._type, extra_kwargs)
|
|
1313
1328
|
|
|
1314
1329
|
def bind(self, field: pydal.objects.Field, table: pydal.objects.Table) -> None:
|
|
@@ -1755,6 +1770,18 @@ class TypedTable(_TypedTable, metaclass=TableMeta):
|
|
|
1755
1770
|
state["_row"] = Row(json.loads(state["_row"]))
|
|
1756
1771
|
self.__dict__ |= state
|
|
1757
1772
|
|
|
1773
|
+
@classmethod
|
|
1774
|
+
def _sql(cls) -> str:
|
|
1775
|
+
"""
|
|
1776
|
+
Generate SQL Schema for this table via pydal2sql (if 'migrations' extra is installed).
|
|
1777
|
+
"""
|
|
1778
|
+
try:
|
|
1779
|
+
import pydal2sql
|
|
1780
|
+
except ImportError as e: # pragma: no cover
|
|
1781
|
+
raise RuntimeError("Can not generate SQL without the 'migration' extra or `pydal2sql` installed!") from e
|
|
1782
|
+
|
|
1783
|
+
return pydal2sql.generate_sql(cls)
|
|
1784
|
+
|
|
1758
1785
|
|
|
1759
1786
|
# backwards compat:
|
|
1760
1787
|
TypedRow = TypedTable
|
|
@@ -2,13 +2,18 @@
|
|
|
2
2
|
This file contains available Field types.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
+
import ast
|
|
5
6
|
import datetime as dt
|
|
6
7
|
import decimal
|
|
7
8
|
import typing
|
|
9
|
+
import uuid
|
|
8
10
|
|
|
11
|
+
from pydal.helpers.classes import SQLCustomType
|
|
9
12
|
from pydal.objects import Table
|
|
13
|
+
from typing_extensions import Unpack
|
|
10
14
|
|
|
11
15
|
from .core import TypeDAL, TypedField, TypedTable
|
|
16
|
+
from .types import FieldSettings
|
|
12
17
|
|
|
13
18
|
T = typing.TypeVar("T", bound=typing.Any)
|
|
14
19
|
|
|
@@ -16,23 +21,8 @@ T = typing.TypeVar("T", bound=typing.Any)
|
|
|
16
21
|
## general
|
|
17
22
|
|
|
18
23
|
|
|
19
|
-
# def TypedField(
|
|
20
|
-
# _type: typing.Type[T] | types.UnionType,
|
|
21
|
-
# **kwargs: typing.Any,
|
|
22
|
-
# ) -> T:
|
|
23
|
-
# """
|
|
24
|
-
# sneaky: its a function and not a class, because there's a return type.
|
|
25
|
-
#
|
|
26
|
-
# and the return type (T) is the input type in _type
|
|
27
|
-
#
|
|
28
|
-
# Example:
|
|
29
|
-
# age: TypedField(int, default=18)
|
|
30
|
-
# """
|
|
31
|
-
# return typing.cast(T, TypedFieldType(_type, **kwargs))
|
|
32
|
-
|
|
33
|
-
|
|
34
24
|
## specific
|
|
35
|
-
def StringField(**kw:
|
|
25
|
+
def StringField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
|
|
36
26
|
"""
|
|
37
27
|
Pydal type is string, Python type is str.
|
|
38
28
|
"""
|
|
@@ -43,7 +33,7 @@ def StringField(**kw: typing.Any) -> TypedField[str]:
|
|
|
43
33
|
String = StringField
|
|
44
34
|
|
|
45
35
|
|
|
46
|
-
def TextField(**kw:
|
|
36
|
+
def TextField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
|
|
47
37
|
"""
|
|
48
38
|
Pydal type is text, Python type is str.
|
|
49
39
|
"""
|
|
@@ -54,7 +44,7 @@ def TextField(**kw: typing.Any) -> TypedField[str]:
|
|
|
54
44
|
Text = TextField
|
|
55
45
|
|
|
56
46
|
|
|
57
|
-
def BlobField(**kw:
|
|
47
|
+
def BlobField(**kw: Unpack[FieldSettings]) -> TypedField[bytes]:
|
|
58
48
|
"""
|
|
59
49
|
Pydal type is blob, Python type is bytes.
|
|
60
50
|
"""
|
|
@@ -65,7 +55,7 @@ def BlobField(**kw: typing.Any) -> TypedField[bytes]:
|
|
|
65
55
|
Blob = BlobField
|
|
66
56
|
|
|
67
57
|
|
|
68
|
-
def BooleanField(**kw:
|
|
58
|
+
def BooleanField(**kw: Unpack[FieldSettings]) -> TypedField[bool]:
|
|
69
59
|
"""
|
|
70
60
|
Pydal type is boolean, Python type is bool.
|
|
71
61
|
"""
|
|
@@ -76,7 +66,7 @@ def BooleanField(**kw: typing.Any) -> TypedField[bool]:
|
|
|
76
66
|
Boolean = BooleanField
|
|
77
67
|
|
|
78
68
|
|
|
79
|
-
def IntegerField(**kw:
|
|
69
|
+
def IntegerField(**kw: Unpack[FieldSettings]) -> TypedField[int]:
|
|
80
70
|
"""
|
|
81
71
|
Pydal type is integer, Python type is int.
|
|
82
72
|
"""
|
|
@@ -87,7 +77,7 @@ def IntegerField(**kw: typing.Any) -> TypedField[int]:
|
|
|
87
77
|
Integer = IntegerField
|
|
88
78
|
|
|
89
79
|
|
|
90
|
-
def DoubleField(**kw:
|
|
80
|
+
def DoubleField(**kw: Unpack[FieldSettings]) -> TypedField[float]:
|
|
91
81
|
"""
|
|
92
82
|
Pydal type is double, Python type is float.
|
|
93
83
|
"""
|
|
@@ -98,7 +88,7 @@ def DoubleField(**kw: typing.Any) -> TypedField[float]:
|
|
|
98
88
|
Double = DoubleField
|
|
99
89
|
|
|
100
90
|
|
|
101
|
-
def DecimalField(n: int, m: int, **kw:
|
|
91
|
+
def DecimalField(n: int, m: int, **kw: Unpack[FieldSettings]) -> TypedField[decimal.Decimal]:
|
|
102
92
|
"""
|
|
103
93
|
Pydal type is decimal, Python type is Decimal.
|
|
104
94
|
"""
|
|
@@ -109,7 +99,7 @@ def DecimalField(n: int, m: int, **kw: typing.Any) -> TypedField[decimal.Decimal
|
|
|
109
99
|
Decimal = DecimalField
|
|
110
100
|
|
|
111
101
|
|
|
112
|
-
def DateField(**kw:
|
|
102
|
+
def DateField(**kw: Unpack[FieldSettings]) -> TypedField[dt.date]:
|
|
113
103
|
"""
|
|
114
104
|
Pydal type is date, Python type is datetime.date.
|
|
115
105
|
"""
|
|
@@ -120,7 +110,7 @@ def DateField(**kw: typing.Any) -> TypedField[dt.date]:
|
|
|
120
110
|
Date = DateField
|
|
121
111
|
|
|
122
112
|
|
|
123
|
-
def TimeField(**kw:
|
|
113
|
+
def TimeField(**kw: Unpack[FieldSettings]) -> TypedField[dt.time]:
|
|
124
114
|
"""
|
|
125
115
|
Pydal type is time, Python type is datetime.time.
|
|
126
116
|
"""
|
|
@@ -131,7 +121,7 @@ def TimeField(**kw: typing.Any) -> TypedField[dt.time]:
|
|
|
131
121
|
Time = TimeField
|
|
132
122
|
|
|
133
123
|
|
|
134
|
-
def DatetimeField(**kw:
|
|
124
|
+
def DatetimeField(**kw: Unpack[FieldSettings]) -> TypedField[dt.datetime]:
|
|
135
125
|
"""
|
|
136
126
|
Pydal type is datetime, Python type is datetime.datetime.
|
|
137
127
|
"""
|
|
@@ -142,7 +132,7 @@ def DatetimeField(**kw: typing.Any) -> TypedField[dt.datetime]:
|
|
|
142
132
|
Datetime = DatetimeField
|
|
143
133
|
|
|
144
134
|
|
|
145
|
-
def PasswordField(**kw:
|
|
135
|
+
def PasswordField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
|
|
146
136
|
"""
|
|
147
137
|
Pydal type is password, Python type is str.
|
|
148
138
|
"""
|
|
@@ -153,7 +143,7 @@ def PasswordField(**kw: typing.Any) -> TypedField[str]:
|
|
|
153
143
|
Password = PasswordField
|
|
154
144
|
|
|
155
145
|
|
|
156
|
-
def UploadField(**kw:
|
|
146
|
+
def UploadField(**kw: Unpack[FieldSettings]) -> TypedField[str]:
|
|
157
147
|
"""
|
|
158
148
|
Pydal type is upload, Python type is str.
|
|
159
149
|
"""
|
|
@@ -167,7 +157,7 @@ T_subclass = typing.TypeVar("T_subclass", TypedTable, Table)
|
|
|
167
157
|
|
|
168
158
|
|
|
169
159
|
def ReferenceField(
|
|
170
|
-
other_table: str | typing.Type[TypedTable] | TypedTable | Table | T_subclass, **kw:
|
|
160
|
+
other_table: str | typing.Type[TypedTable] | TypedTable | Table | T_subclass, **kw: Unpack[FieldSettings]
|
|
171
161
|
) -> TypedField[int]:
|
|
172
162
|
"""
|
|
173
163
|
Pydal type is reference, Python type is int (id).
|
|
@@ -190,7 +180,7 @@ def ReferenceField(
|
|
|
190
180
|
Reference = ReferenceField
|
|
191
181
|
|
|
192
182
|
|
|
193
|
-
def ListStringField(**kw:
|
|
183
|
+
def ListStringField(**kw: Unpack[FieldSettings]) -> TypedField[list[str]]:
|
|
194
184
|
"""
|
|
195
185
|
Pydal type is list:string, Python type is list of str.
|
|
196
186
|
"""
|
|
@@ -201,7 +191,7 @@ def ListStringField(**kw: typing.Any) -> TypedField[list[str]]:
|
|
|
201
191
|
ListString = ListStringField
|
|
202
192
|
|
|
203
193
|
|
|
204
|
-
def ListIntegerField(**kw:
|
|
194
|
+
def ListIntegerField(**kw: Unpack[FieldSettings]) -> TypedField[list[int]]:
|
|
205
195
|
"""
|
|
206
196
|
Pydal type is list:integer, Python type is list of int.
|
|
207
197
|
"""
|
|
@@ -212,7 +202,7 @@ def ListIntegerField(**kw: typing.Any) -> TypedField[list[int]]:
|
|
|
212
202
|
ListInteger = ListIntegerField
|
|
213
203
|
|
|
214
204
|
|
|
215
|
-
def ListReferenceField(other_table: str, **kw:
|
|
205
|
+
def ListReferenceField(other_table: str, **kw: Unpack[FieldSettings]) -> TypedField[list[int]]:
|
|
216
206
|
"""
|
|
217
207
|
Pydal type is list:reference, Python type is list of int (id).
|
|
218
208
|
"""
|
|
@@ -223,7 +213,7 @@ def ListReferenceField(other_table: str, **kw: typing.Any) -> TypedField[list[in
|
|
|
223
213
|
ListReference = ListReferenceField
|
|
224
214
|
|
|
225
215
|
|
|
226
|
-
def JSONField(**kw:
|
|
216
|
+
def JSONField(**kw: Unpack[FieldSettings]) -> TypedField[object]:
|
|
227
217
|
"""
|
|
228
218
|
Pydal type is json, Python type is object (can be anything JSON-encodable).
|
|
229
219
|
"""
|
|
@@ -231,7 +221,7 @@ def JSONField(**kw: typing.Any) -> TypedField[object]:
|
|
|
231
221
|
return TypedField(object, **kw)
|
|
232
222
|
|
|
233
223
|
|
|
234
|
-
def BigintField(**kw:
|
|
224
|
+
def BigintField(**kw: Unpack[FieldSettings]) -> TypedField[int]:
|
|
235
225
|
"""
|
|
236
226
|
Pydal type is bigint, Python type is int.
|
|
237
227
|
"""
|
|
@@ -240,3 +230,59 @@ def BigintField(**kw: typing.Any) -> TypedField[int]:
|
|
|
240
230
|
|
|
241
231
|
|
|
242
232
|
Bigint = BigintField
|
|
233
|
+
|
|
234
|
+
## Custom:
|
|
235
|
+
|
|
236
|
+
NativeTimestampField = SQLCustomType(
|
|
237
|
+
type="datetime",
|
|
238
|
+
native="timestamp",
|
|
239
|
+
encoder=lambda x: f"'{x}'", # extra quotes
|
|
240
|
+
# decoder=lambda x: x, # already parsed into datetime
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def TimestampField(**kw: Unpack[FieldSettings]) -> TypedField[dt.datetime]:
|
|
245
|
+
"""
|
|
246
|
+
Database type is timestamp, Python type is datetime.
|
|
247
|
+
|
|
248
|
+
Advantage over the regular datetime type is that
|
|
249
|
+
a timestamp has millisecond precision (2024-10-11 20:18:24.505194)
|
|
250
|
+
whereas a regular datetime only has precision up to the second (2024-10-11 20:18:24)
|
|
251
|
+
"""
|
|
252
|
+
kw["type"] = NativeTimestampField
|
|
253
|
+
return TypedField(
|
|
254
|
+
dt.datetime,
|
|
255
|
+
**kw,
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
NativePointField = SQLCustomType(
|
|
260
|
+
type="string",
|
|
261
|
+
native="point",
|
|
262
|
+
encoder=str,
|
|
263
|
+
decoder=ast.literal_eval,
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def PointField(**kw: Unpack[FieldSettings]) -> TypedField[tuple[float, float]]:
|
|
268
|
+
"""
|
|
269
|
+
Database type is point, Python type is tuple[float, float].
|
|
270
|
+
"""
|
|
271
|
+
kw["type"] = NativePointField
|
|
272
|
+
return TypedField(tuple[float, float], **kw)
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
NativeUUIDField = SQLCustomType(
|
|
276
|
+
type="string",
|
|
277
|
+
native="uuid",
|
|
278
|
+
encoder=str,
|
|
279
|
+
decoder=uuid.UUID,
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def UUIDField(**kw: Unpack[FieldSettings]) -> TypedField[uuid.UUID]:
|
|
284
|
+
"""
|
|
285
|
+
Database type is uuid, Python type is UUID.
|
|
286
|
+
"""
|
|
287
|
+
kw["type"] = NativeUUIDField
|
|
288
|
+
return TypedField(uuid.UUID, **kw)
|
|
@@ -4,11 +4,12 @@ Stuff to make mypy happy.
|
|
|
4
4
|
|
|
5
5
|
import typing
|
|
6
6
|
from datetime import datetime
|
|
7
|
-
from typing import Any, Optional, TypedDict
|
|
7
|
+
from typing import Any, Callable, Optional, TypedDict
|
|
8
8
|
|
|
9
9
|
from pydal.adapters.base import BaseAdapter
|
|
10
10
|
from pydal.helpers.classes import OpRow as _OpRow
|
|
11
11
|
from pydal.helpers.classes import Reference as _Reference
|
|
12
|
+
from pydal.helpers.classes import SQLCustomType
|
|
12
13
|
from pydal.objects import Expression as _Expression
|
|
13
14
|
from pydal.objects import Field as _Field
|
|
14
15
|
from pydal.objects import Query as _Query
|
|
@@ -218,7 +219,7 @@ CacheModel = typing.Callable[[str, CacheFn, int], Rows]
|
|
|
218
219
|
CacheTuple = tuple[CacheModel, int]
|
|
219
220
|
|
|
220
221
|
|
|
221
|
-
class SelectKwargs(
|
|
222
|
+
class SelectKwargs(TypedDict, total=False):
|
|
222
223
|
"""
|
|
223
224
|
Possible keyword arguments for .select().
|
|
224
225
|
"""
|
|
@@ -250,3 +251,65 @@ class Metadata(TypedDict):
|
|
|
250
251
|
relationships: NotRequired[set[str]]
|
|
251
252
|
|
|
252
253
|
sql: NotRequired[str]
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class FileSystemLike(typing.Protocol): # pragma: no cover
|
|
257
|
+
"""
|
|
258
|
+
Protocol for any class that has an 'open' function.
|
|
259
|
+
|
|
260
|
+
An example of this is OSFS from PyFilesystem2.
|
|
261
|
+
"""
|
|
262
|
+
|
|
263
|
+
def open(self, file: str, mode: str = "r") -> typing.IO[typing.Any]:
|
|
264
|
+
"""
|
|
265
|
+
Opens a file for reading, writing or other modes.
|
|
266
|
+
"""
|
|
267
|
+
...
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
AnyCallable: typing.TypeAlias = Callable[..., Any]
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
class FieldSettings(TypedDict, total=False):
|
|
274
|
+
"""
|
|
275
|
+
The supported keyword arguments for `pydal.Field()`.
|
|
276
|
+
|
|
277
|
+
Other arguments can be passed.
|
|
278
|
+
"""
|
|
279
|
+
|
|
280
|
+
type: str | type | SQLCustomType
|
|
281
|
+
length: int
|
|
282
|
+
default: Any
|
|
283
|
+
required: bool
|
|
284
|
+
requires: list[AnyCallable | Any | Validator] | Validator | AnyCallable
|
|
285
|
+
ondelete: str
|
|
286
|
+
onupdate: str
|
|
287
|
+
notnull: bool
|
|
288
|
+
unique: bool
|
|
289
|
+
uploadfield: bool | str
|
|
290
|
+
widget: AnyCallable
|
|
291
|
+
label: str
|
|
292
|
+
comment: str
|
|
293
|
+
writable: bool
|
|
294
|
+
readable: bool
|
|
295
|
+
searchable: bool
|
|
296
|
+
listable: bool
|
|
297
|
+
regex: str
|
|
298
|
+
options: list[Any] | AnyCallable
|
|
299
|
+
update: Any
|
|
300
|
+
authorize: AnyCallable
|
|
301
|
+
autodelete: bool
|
|
302
|
+
represent: AnyCallable
|
|
303
|
+
uploadfolder: str
|
|
304
|
+
uploadseparate: bool
|
|
305
|
+
uploadfs: FileSystemLike
|
|
306
|
+
compute: AnyCallable
|
|
307
|
+
custom_store: AnyCallable
|
|
308
|
+
custom_retrieve: AnyCallable
|
|
309
|
+
custom_retrieve_file_properties: AnyCallable
|
|
310
|
+
custom_delete: AnyCallable
|
|
311
|
+
filter_in: AnyCallable
|
|
312
|
+
filter_out: AnyCallable
|
|
313
|
+
custom_qualifier: Any
|
|
314
|
+
map_none: Any
|
|
315
|
+
rname: str
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import tempfile
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import datetime as dt
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
from pydal2sql import generate_sql
|
|
9
|
+
import pytest
|
|
10
|
+
# from contextlib import chdir
|
|
11
|
+
from contextlib_chdir import chdir
|
|
12
|
+
from testcontainers.postgres import PostgresContainer
|
|
13
|
+
|
|
14
|
+
from src.typedal import TypeDAL, TypedTable, TypedField
|
|
15
|
+
from src.typedal.config import (
|
|
16
|
+
_load_dotenv,
|
|
17
|
+
_load_toml,
|
|
18
|
+
expand_env_vars_into_toml_values,
|
|
19
|
+
load_config,
|
|
20
|
+
)
|
|
21
|
+
from src.typedal.fields import TimestampField, PointField, UUIDField
|
|
22
|
+
|
|
23
|
+
postgres = PostgresContainer(
|
|
24
|
+
dbname="postgres",
|
|
25
|
+
username="someuser",
|
|
26
|
+
password="somepass",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@pytest.fixture(scope="module", autouse=True)
|
|
31
|
+
def psql(request):
|
|
32
|
+
postgres.ports = {
|
|
33
|
+
5432: 9631, # as set in valid.env
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
request.addfinalizer(postgres.stop)
|
|
37
|
+
postgres.start()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@pytest.fixture
|
|
41
|
+
def at_temp_dir():
|
|
42
|
+
with tempfile.TemporaryDirectory() as d:
|
|
43
|
+
with chdir(d):
|
|
44
|
+
yield d
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _load_db_after_setup(dialect: str):
|
|
48
|
+
config = load_config()
|
|
49
|
+
db = TypeDAL(attempts=1)
|
|
50
|
+
assert db._uri == config.database
|
|
51
|
+
|
|
52
|
+
assert f"'dialect': '{dialect}'" in repr(config)
|
|
53
|
+
|
|
54
|
+
return True
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def test_load_toml(at_temp_dir):
|
|
58
|
+
base = Path("pyproject.toml")
|
|
59
|
+
base.write_text("# empty")
|
|
60
|
+
|
|
61
|
+
assert _load_toml(False) == ("", {})
|
|
62
|
+
assert _load_toml(None) == (str(base.resolve().absolute()), {})
|
|
63
|
+
assert _load_toml(str(base)) == ("pyproject.toml", {})
|
|
64
|
+
assert _load_toml(".") == (str(base.resolve().absolute()), {})
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def test_load_dotenv(at_temp_dir):
|
|
68
|
+
base = Path(".env")
|
|
69
|
+
base.write_text("# empty")
|
|
70
|
+
|
|
71
|
+
assert _load_dotenv(False)[0] == ""
|
|
72
|
+
assert _load_dotenv(None)[0] == str(base.resolve().absolute())
|
|
73
|
+
assert _load_dotenv(str(base))[0] == ".env"
|
|
74
|
+
assert _load_dotenv(".")[0] == ".env"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_load_empty_config(at_temp_dir):
|
|
78
|
+
assert _load_db_after_setup("sqlite")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def test_load_toml_config(at_temp_dir):
|
|
82
|
+
examples = Path(__file__).parent / "configs"
|
|
83
|
+
shutil.copy(examples / "valid.toml", "./pyproject.toml")
|
|
84
|
+
|
|
85
|
+
assert _load_db_after_setup("sqlite")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def test_load_env_config(at_temp_dir):
|
|
89
|
+
examples = Path(__file__).parent / "configs"
|
|
90
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
91
|
+
|
|
92
|
+
assert _load_db_after_setup("postgres")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def test_load_simple_config(at_temp_dir):
|
|
96
|
+
examples = Path(__file__).parent / "configs"
|
|
97
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
98
|
+
shutil.copy(examples / "simple.toml", "./pyproject.toml")
|
|
99
|
+
|
|
100
|
+
assert _load_db_after_setup("postgres")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def test_load_both_config(at_temp_dir):
|
|
104
|
+
examples = Path(__file__).parent / "configs"
|
|
105
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
106
|
+
shutil.copy(examples / "valid.toml", "./pyproject.toml")
|
|
107
|
+
|
|
108
|
+
assert _load_db_after_setup("postgres")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def test_converting(at_temp_dir):
|
|
112
|
+
from edwh_migrate import Config as MigrateConfig
|
|
113
|
+
from pydal2sql.typer_support import Config as P2SConfig
|
|
114
|
+
|
|
115
|
+
config = load_config()
|
|
116
|
+
|
|
117
|
+
assert isinstance(config.to_migrate(), MigrateConfig)
|
|
118
|
+
assert isinstance(config.to_pydal2sql(), P2SConfig)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def test_environ(at_temp_dir):
|
|
122
|
+
os.environ["DB_URI"] = "sqlite:///tmp/db.sqlite"
|
|
123
|
+
config = load_config(False, True)
|
|
124
|
+
|
|
125
|
+
assert config.database == "sqlite:///tmp/db.sqlite"
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def test_expand_env_vars():
|
|
129
|
+
# str
|
|
130
|
+
input_str = "${MYVALUE:default}"
|
|
131
|
+
data = {"myvar": input_str}
|
|
132
|
+
expand_env_vars_into_toml_values(data, {})
|
|
133
|
+
assert data["myvar"] == input_str
|
|
134
|
+
|
|
135
|
+
expand_env_vars_into_toml_values(data, {"unrelated": "data"})
|
|
136
|
+
assert data["myvar"] == "default"
|
|
137
|
+
|
|
138
|
+
data = {"myvar": input_str}
|
|
139
|
+
expand_env_vars_into_toml_values(data, {"myvalue": "123"})
|
|
140
|
+
|
|
141
|
+
assert data["myvar"] == "123"
|
|
142
|
+
|
|
143
|
+
# list
|
|
144
|
+
data = {"myvar": [input_str, input_str]}
|
|
145
|
+
expand_env_vars_into_toml_values(data, {"myvalue": "456"})
|
|
146
|
+
|
|
147
|
+
assert data["myvar"] == ["456", "456"]
|
|
148
|
+
|
|
149
|
+
# dict
|
|
150
|
+
data = {"myvar": {"value": input_str}}
|
|
151
|
+
expand_env_vars_into_toml_values(data, {"myvalue": "789"})
|
|
152
|
+
assert data["myvar"]["value"] == "789"
|
|
153
|
+
|
|
154
|
+
# other - non-str
|
|
155
|
+
data = {"myvar": None, "mynumber": 123}
|
|
156
|
+
expand_env_vars_into_toml_values(data, {"myvalue": "789"})
|
|
157
|
+
assert data["myvar"] is None
|
|
158
|
+
assert data["mynumber"] == 123
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
# note: these are not really 'config' specific but we already have access to postgres here so good enough:
|
|
162
|
+
|
|
163
|
+
def test_timestamp_fields_sqlite(at_temp_dir):
|
|
164
|
+
db = TypeDAL("sqlite:memory")
|
|
165
|
+
|
|
166
|
+
class Timestamp(TypedTable):
|
|
167
|
+
ts = TimestampField(default=dt.datetime.now)
|
|
168
|
+
dt = TypedField(dt.datetime, default=dt.datetime.now)
|
|
169
|
+
|
|
170
|
+
db.define(Timestamp)
|
|
171
|
+
|
|
172
|
+
row = Timestamp.insert()
|
|
173
|
+
|
|
174
|
+
# old:
|
|
175
|
+
assert isinstance(row.dt, dt.datetime), "not a datetime"
|
|
176
|
+
assert "." not in str(row.dt) # no ms precision
|
|
177
|
+
|
|
178
|
+
# new:
|
|
179
|
+
assert isinstance(row.ts, dt.datetime), "not a datetime"
|
|
180
|
+
assert "." in str(row.ts) # ms precision
|
|
181
|
+
|
|
182
|
+
assert '"ts" timestamp NOT NULL' in Timestamp._sql()
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def test_timestamp_fields_psql(at_temp_dir):
|
|
186
|
+
examples = Path(__file__).parent / "configs"
|
|
187
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
188
|
+
shutil.copy(examples / "simple.toml", "./pyproject.toml")
|
|
189
|
+
|
|
190
|
+
assert _load_db_after_setup("postgres")
|
|
191
|
+
db = TypeDAL(attempts=1)
|
|
192
|
+
|
|
193
|
+
class Timestamp(TypedTable):
|
|
194
|
+
ts = TimestampField(default=dt.datetime.now)
|
|
195
|
+
dt = TypedField(dt.datetime, default=dt.datetime.now)
|
|
196
|
+
|
|
197
|
+
db.define(Timestamp)
|
|
198
|
+
|
|
199
|
+
row = Timestamp.insert()
|
|
200
|
+
|
|
201
|
+
# old:
|
|
202
|
+
assert isinstance(row.dt, dt.datetime), "not a datetime"
|
|
203
|
+
assert "." not in str(row.dt) # no ms precision
|
|
204
|
+
|
|
205
|
+
# new:
|
|
206
|
+
assert isinstance(row.ts, dt.datetime), "not a datetime"
|
|
207
|
+
assert "." in str(row.ts) # ms precision
|
|
208
|
+
|
|
209
|
+
assert '"ts" timestamp NOT NULL' in Timestamp._sql()
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def test_point_fields_sqlite(at_temp_dir):
|
|
213
|
+
db = TypeDAL("sqlite:memory")
|
|
214
|
+
|
|
215
|
+
class Point(TypedTable):
|
|
216
|
+
pt = PointField()
|
|
217
|
+
|
|
218
|
+
db.define(Point)
|
|
219
|
+
|
|
220
|
+
row1 = Point.insert(pt=(1, 0))
|
|
221
|
+
row2 = Point.insert(pt="(1, 0)")
|
|
222
|
+
|
|
223
|
+
assert row1.pt == row2.pt
|
|
224
|
+
|
|
225
|
+
x, y = row1.pt
|
|
226
|
+
assert x == 1
|
|
227
|
+
assert y == 0
|
|
228
|
+
|
|
229
|
+
with pytest.raises(Exception):
|
|
230
|
+
Point.insert(pt=123)
|
|
231
|
+
|
|
232
|
+
with pytest.raises(Exception):
|
|
233
|
+
Point.insert(pt="123")
|
|
234
|
+
|
|
235
|
+
# note: psql will check this whereas sqlite won't:
|
|
236
|
+
Point.insert(pt=())
|
|
237
|
+
|
|
238
|
+
assert '"pt" point NOT NULL' in Point._sql()
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def test_point_fields_psql(at_temp_dir):
|
|
242
|
+
examples = Path(__file__).parent / "configs"
|
|
243
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
244
|
+
shutil.copy(examples / "simple.toml", "./pyproject.toml")
|
|
245
|
+
|
|
246
|
+
assert _load_db_after_setup("postgres")
|
|
247
|
+
db = TypeDAL(attempts=1)
|
|
248
|
+
|
|
249
|
+
class Point(TypedTable):
|
|
250
|
+
pt = PointField()
|
|
251
|
+
|
|
252
|
+
db.define(Point)
|
|
253
|
+
|
|
254
|
+
row1 = Point.insert(pt=(1, 0))
|
|
255
|
+
row2 = Point.insert(pt="(1, 0)")
|
|
256
|
+
|
|
257
|
+
assert row1.pt == row2.pt
|
|
258
|
+
|
|
259
|
+
x, y = row1.pt
|
|
260
|
+
assert x == 1
|
|
261
|
+
assert y == 0
|
|
262
|
+
|
|
263
|
+
with pytest.raises(Exception):
|
|
264
|
+
Point.insert(pt=123)
|
|
265
|
+
|
|
266
|
+
with pytest.raises(Exception):
|
|
267
|
+
Point.insert(pt="123")
|
|
268
|
+
|
|
269
|
+
# note: psql will check this whereas sqlite won't:
|
|
270
|
+
|
|
271
|
+
with pytest.raises(Exception):
|
|
272
|
+
Point.insert(pt=())
|
|
273
|
+
|
|
274
|
+
assert '"pt" point NOT NULL' in Point._sql()
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def test_uuid_fields_psql(at_temp_dir):
|
|
278
|
+
examples = Path(__file__).parent / "configs"
|
|
279
|
+
shutil.copy(examples / "valid.env", "./.env")
|
|
280
|
+
shutil.copy(examples / "simple.toml", "./pyproject.toml")
|
|
281
|
+
|
|
282
|
+
assert _load_db_after_setup("postgres")
|
|
283
|
+
db = TypeDAL(attempts=1)
|
|
284
|
+
|
|
285
|
+
class UUIDTable(TypedTable):
|
|
286
|
+
gid = UUIDField(default=uuid.uuid4)
|
|
287
|
+
|
|
288
|
+
db.define(UUIDTable)
|
|
289
|
+
|
|
290
|
+
row = UUIDTable.insert()
|
|
291
|
+
|
|
292
|
+
assert isinstance(row.gid, uuid.UUID)
|
|
293
|
+
|
|
294
|
+
with pytest.raises(Exception):
|
|
295
|
+
UUIDTable.insert(gid="not-a-uuid")
|
|
296
|
+
|
|
297
|
+
assert '"gid" uuid NOT NULL' in UUIDTable._sql()
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def test_uuid_fields_sqlite(at_temp_dir):
|
|
301
|
+
db = TypeDAL("sqlite:memory")
|
|
302
|
+
|
|
303
|
+
class UUIDTable(TypedTable):
|
|
304
|
+
gid = UUIDField(default=uuid.uuid4)
|
|
305
|
+
|
|
306
|
+
db.define(UUIDTable)
|
|
307
|
+
|
|
308
|
+
row = UUIDTable.insert()
|
|
309
|
+
|
|
310
|
+
assert isinstance(row.gid, uuid.UUID)
|
|
311
|
+
|
|
312
|
+
with pytest.raises(Exception):
|
|
313
|
+
UUIDTable.insert(gid="not-a-uuid")
|
|
314
|
+
|
|
315
|
+
assert '"gid" uuid NOT NULL' in UUIDTable._sql()
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import shutil
|
|
3
|
-
import tempfile
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
|
|
6
|
-
import pytest
|
|
7
|
-
|
|
8
|
-
# from contextlib import chdir
|
|
9
|
-
from contextlib_chdir import chdir
|
|
10
|
-
from testcontainers.postgres import PostgresContainer
|
|
11
|
-
|
|
12
|
-
from src.typedal import TypeDAL
|
|
13
|
-
from src.typedal.config import (
|
|
14
|
-
_load_dotenv,
|
|
15
|
-
_load_toml,
|
|
16
|
-
expand_env_vars_into_toml_values,
|
|
17
|
-
load_config,
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
postgres = PostgresContainer(
|
|
21
|
-
dbname="postgres",
|
|
22
|
-
username="someuser",
|
|
23
|
-
password="somepass",
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
@pytest.fixture(scope="module", autouse=True)
|
|
28
|
-
def psql(request):
|
|
29
|
-
postgres.ports = {
|
|
30
|
-
5432: 9631, # as set in valid.env
|
|
31
|
-
}
|
|
32
|
-
|
|
33
|
-
request.addfinalizer(postgres.stop)
|
|
34
|
-
postgres.start()
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
@pytest.fixture
|
|
38
|
-
def at_temp_dir():
|
|
39
|
-
with tempfile.TemporaryDirectory() as d:
|
|
40
|
-
with chdir(d):
|
|
41
|
-
yield d
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def _load_db_after_setup(dialect: str):
|
|
45
|
-
config = load_config()
|
|
46
|
-
db = TypeDAL(attempts=1)
|
|
47
|
-
assert db._uri == config.database
|
|
48
|
-
|
|
49
|
-
assert f"'dialect': '{dialect}'" in repr(config)
|
|
50
|
-
|
|
51
|
-
return True
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def test_load_toml(at_temp_dir):
|
|
55
|
-
base = Path("pyproject.toml")
|
|
56
|
-
base.write_text("# empty")
|
|
57
|
-
|
|
58
|
-
assert _load_toml(False) == ("", {})
|
|
59
|
-
assert _load_toml(None) == (str(base.resolve().absolute()), {})
|
|
60
|
-
assert _load_toml(str(base)) == ("pyproject.toml", {})
|
|
61
|
-
assert _load_toml(".") == (str(base.resolve().absolute()), {})
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
def test_load_dotenv(at_temp_dir):
|
|
65
|
-
base = Path(".env")
|
|
66
|
-
base.write_text("# empty")
|
|
67
|
-
|
|
68
|
-
assert _load_dotenv(False)[0] == ""
|
|
69
|
-
assert _load_dotenv(None)[0] == str(base.resolve().absolute())
|
|
70
|
-
assert _load_dotenv(str(base))[0] == ".env"
|
|
71
|
-
assert _load_dotenv(".")[0] == ".env"
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def test_load_empty_config(at_temp_dir):
|
|
75
|
-
assert _load_db_after_setup("sqlite")
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def test_load_toml_config(at_temp_dir):
|
|
79
|
-
examples = Path(__file__).parent / "configs"
|
|
80
|
-
shutil.copy(examples / "valid.toml", "./pyproject.toml")
|
|
81
|
-
|
|
82
|
-
assert _load_db_after_setup("sqlite")
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def test_load_env_config(at_temp_dir):
|
|
86
|
-
examples = Path(__file__).parent / "configs"
|
|
87
|
-
shutil.copy(examples / "valid.env", "./.env")
|
|
88
|
-
|
|
89
|
-
assert _load_db_after_setup("postgres")
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
def test_load_simple_config(at_temp_dir):
|
|
93
|
-
examples = Path(__file__).parent / "configs"
|
|
94
|
-
shutil.copy(examples / "valid.env", "./.env")
|
|
95
|
-
shutil.copy(examples / "simple.toml", "./pyproject.toml")
|
|
96
|
-
|
|
97
|
-
assert _load_db_after_setup("postgres")
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
def test_load_both_config(at_temp_dir):
|
|
101
|
-
examples = Path(__file__).parent / "configs"
|
|
102
|
-
shutil.copy(examples / "valid.env", "./.env")
|
|
103
|
-
shutil.copy(examples / "valid.toml", "./pyproject.toml")
|
|
104
|
-
|
|
105
|
-
assert _load_db_after_setup("postgres")
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def test_converting(at_temp_dir):
|
|
109
|
-
from edwh_migrate import Config as MigrateConfig
|
|
110
|
-
from pydal2sql.typer_support import Config as P2SConfig
|
|
111
|
-
|
|
112
|
-
config = load_config()
|
|
113
|
-
|
|
114
|
-
assert isinstance(config.to_migrate(), MigrateConfig)
|
|
115
|
-
assert isinstance(config.to_pydal2sql(), P2SConfig)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
def test_environ(at_temp_dir):
|
|
119
|
-
os.environ["DB_URI"] = "sqlite:///tmp/db.sqlite"
|
|
120
|
-
config = load_config(False, True)
|
|
121
|
-
|
|
122
|
-
assert config.database == "sqlite:///tmp/db.sqlite"
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
def test_expand_env_vars():
|
|
126
|
-
# str
|
|
127
|
-
input_str = "${MYVALUE:default}"
|
|
128
|
-
data = {"myvar": input_str}
|
|
129
|
-
expand_env_vars_into_toml_values(data, {})
|
|
130
|
-
assert data["myvar"] == input_str
|
|
131
|
-
|
|
132
|
-
expand_env_vars_into_toml_values(data, {"unrelated": "data"})
|
|
133
|
-
assert data["myvar"] == "default"
|
|
134
|
-
|
|
135
|
-
data = {"myvar": input_str}
|
|
136
|
-
expand_env_vars_into_toml_values(data, {"myvalue": "123"})
|
|
137
|
-
|
|
138
|
-
assert data["myvar"] == "123"
|
|
139
|
-
|
|
140
|
-
# list
|
|
141
|
-
data = {"myvar": [input_str, input_str]}
|
|
142
|
-
expand_env_vars_into_toml_values(data, {"myvalue": "456"})
|
|
143
|
-
|
|
144
|
-
assert data["myvar"] == ["456", "456"]
|
|
145
|
-
|
|
146
|
-
# dict
|
|
147
|
-
data = {"myvar": {"value": input_str}}
|
|
148
|
-
expand_env_vars_into_toml_values(data, {"myvalue": "789"})
|
|
149
|
-
assert data["myvar"]["value"] == "789"
|
|
150
|
-
|
|
151
|
-
# other - non-str
|
|
152
|
-
data = {"myvar": None, "mynumber": 123}
|
|
153
|
-
expand_env_vars_into_toml_values(data, {"myvalue": "789"})
|
|
154
|
-
assert data["myvar"] is None
|
|
155
|
-
assert data["mynumber"] == 123
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|