TypeDAL 3.10.3__tar.gz → 3.10.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of TypeDAL might be problematic. Click here for more details.
- {typedal-3.10.3 → typedal-3.10.5}/CHANGELOG.md +12 -0
- {typedal-3.10.3 → typedal-3.10.5}/PKG-INFO +2 -2
- {typedal-3.10.3 → typedal-3.10.5}/example_new.py +17 -17
- typedal-3.10.5/example_old.py +93 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/__about__.py +1 -1
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/cli.py +1 -2
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/core.py +21 -9
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_main.py +34 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_mixins.py +1 -1
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_mypy.py +1 -1
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_py4web.py +5 -10
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_relationships.py +3 -3
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_row.py +4 -0
- typedal-3.10.3/example_old.py +0 -97
- {typedal-3.10.3 → typedal-3.10.5}/.github/workflows/su6.yml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/.gitignore +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/.readthedocs.yml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/README.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/coverage.svg +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/1_getting_started.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/2_defining_tables.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/3_building_queries.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/4_relationships.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/5_py4web.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/6_migrations.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/7_mixins.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/css/code_blocks.css +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/index.md +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/docs/requirements.txt +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/mkdocs.yml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/pyproject.toml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/__init__.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/caching.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/config.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/fields.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/for_py4web.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/for_web2py.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/helpers.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/mixins.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/py.typed +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/serializers/as_json.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/types.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/src/typedal/web2py_py4web_shared.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/__init__.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/configs/simple.toml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/configs/valid.env +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/configs/valid.toml +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_cli.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_config.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_docs_examples.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_helpers.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_json.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_orm.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_query_builder.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_stats.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_table.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_web2py.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/test_xx_others.py +0 -0
- {typedal-3.10.3 → typedal-3.10.5}/tests/timings.py +0 -0
|
@@ -2,6 +2,18 @@
|
|
|
2
2
|
|
|
3
3
|
<!--next-version-placeholder-->
|
|
4
4
|
|
|
5
|
+
## v3.10.5 (2025-04-22)
|
|
6
|
+
|
|
7
|
+
### Fix
|
|
8
|
+
|
|
9
|
+
* `repr(rows)` crashed when `rows` contained no data ([`cc3b2d0`](https://github.com/trialandsuccess/TypeDAL/commit/cc3b2d0bd332faee579e6fdfb7068bd511f22093))
|
|
10
|
+
|
|
11
|
+
## v3.10.4 (2025-04-17)
|
|
12
|
+
|
|
13
|
+
### Fix
|
|
14
|
+
|
|
15
|
+
* Prevent duplicate callback hooks (before/after insert/update/delete) ([`e8e2271`](https://github.com/trialandsuccess/TypeDAL/commit/e8e2271da87d1993afc586060052dd5d057c28e1))
|
|
16
|
+
|
|
5
17
|
## v3.10.3 (2025-04-03)
|
|
6
18
|
|
|
7
19
|
### Fix
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
2
|
Name: TypeDAL
|
|
3
|
-
Version: 3.10.
|
|
3
|
+
Version: 3.10.5
|
|
4
4
|
Summary: Typing support for PyDAL
|
|
5
5
|
Project-URL: Documentation, https://typedal.readthedocs.io/
|
|
6
6
|
Project-URL: Issues, https://github.com/trialandsuccess/TypeDAL/issues
|
|
@@ -1,13 +1,12 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import datetime as dt
|
|
3
2
|
import typing
|
|
4
3
|
from decimal import Decimal
|
|
5
|
-
import datetime as dt
|
|
6
4
|
|
|
7
|
-
from src.typedal.fields import TextField
|
|
8
|
-
from src.typedal.helpers import utcnow
|
|
9
5
|
from pydal.validators import IS_NOT_EMPTY
|
|
10
6
|
|
|
7
|
+
from src.typedal import TypeDAL, TypedField, TypedRows, TypedTable, fields
|
|
8
|
+
from src.typedal.fields import TextField
|
|
9
|
+
from src.typedal.helpers import utcnow
|
|
11
10
|
from typedal.fields import TimestampField
|
|
12
11
|
|
|
13
12
|
db = TypeDAL("sqlite:memory")
|
|
@@ -15,6 +14,7 @@ db = TypeDAL("sqlite:memory")
|
|
|
15
14
|
|
|
16
15
|
### basic examples
|
|
17
16
|
|
|
17
|
+
|
|
18
18
|
@db.define(format="%(name)s")
|
|
19
19
|
class Person(TypedTable):
|
|
20
20
|
name: TypedField[str]
|
|
@@ -81,9 +81,9 @@ assert not max.owners
|
|
|
81
81
|
|
|
82
82
|
### example with all possible field types;
|
|
83
83
|
|
|
84
|
+
|
|
84
85
|
@db.define
|
|
85
|
-
class OtherTable(TypedTable):
|
|
86
|
-
...
|
|
86
|
+
class OtherTable(TypedTable): ...
|
|
87
87
|
|
|
88
88
|
|
|
89
89
|
@db.define
|
|
@@ -161,7 +161,7 @@ class AllFieldsExplicit(TypedTable):
|
|
|
161
161
|
reference_two = fields.ReferenceField("other_table", notnull=False)
|
|
162
162
|
list_string = fields.ListStringField()
|
|
163
163
|
list_integer = fields.ListIntegerField()
|
|
164
|
-
list_reference = fields.ListReferenceField(
|
|
164
|
+
list_reference = fields.ListReferenceField("other_table")
|
|
165
165
|
json = fields.JSONField()
|
|
166
166
|
bigint = fields.BigintField()
|
|
167
167
|
|
|
@@ -173,7 +173,7 @@ db.other_table.insert()
|
|
|
173
173
|
other1 = db.other_table(id=1)
|
|
174
174
|
other2 = db.other_table(id=2)
|
|
175
175
|
|
|
176
|
-
with open(
|
|
176
|
+
with open("example_new.py", "rb") as stream:
|
|
177
177
|
db.all_fields_basic.insert(
|
|
178
178
|
string="hi!",
|
|
179
179
|
text="hi but longer",
|
|
@@ -192,11 +192,11 @@ with open('example_new.py', 'rb') as stream:
|
|
|
192
192
|
list_string=["hi", "there"],
|
|
193
193
|
list_integer=[1, 2],
|
|
194
194
|
list_reference=[other1, other2],
|
|
195
|
-
json={
|
|
195
|
+
json={"hi": "there"},
|
|
196
196
|
bigint=42,
|
|
197
197
|
)
|
|
198
198
|
|
|
199
|
-
with open(
|
|
199
|
+
with open("example_new.py", "rb") as stream:
|
|
200
200
|
(
|
|
201
201
|
AllFieldsAdvanced.insert(
|
|
202
202
|
string="hi!",
|
|
@@ -216,12 +216,12 @@ with open('example_new.py', 'rb') as stream:
|
|
|
216
216
|
list_string=["hi", "there"],
|
|
217
217
|
list_integer=[1, 2],
|
|
218
218
|
list_reference=[other1, other2],
|
|
219
|
-
json={
|
|
219
|
+
json={"hi": "there"},
|
|
220
220
|
bigint=42,
|
|
221
221
|
)
|
|
222
222
|
)
|
|
223
223
|
|
|
224
|
-
with open(
|
|
224
|
+
with open("example_new.py", "rb") as stream:
|
|
225
225
|
AllFieldsExplicit.insert(
|
|
226
226
|
string="hi!",
|
|
227
227
|
text="hi but longer",
|
|
@@ -240,23 +240,23 @@ with open('example_new.py', 'rb') as stream:
|
|
|
240
240
|
list_string=["hi", "there"],
|
|
241
241
|
list_integer=[1, 2],
|
|
242
242
|
list_reference=[other1, other2],
|
|
243
|
-
json={
|
|
243
|
+
json={"hi": "there"},
|
|
244
244
|
bigint=42,
|
|
245
245
|
)
|
|
246
246
|
|
|
247
247
|
rowa = db.all_fields_advanced(string="hi!")
|
|
248
|
-
print(
|
|
248
|
+
print("advanced")
|
|
249
249
|
# for field in rowa:
|
|
250
250
|
# print(field, type(rowa[field]))
|
|
251
251
|
print(rowa)
|
|
252
252
|
|
|
253
|
-
print(
|
|
253
|
+
print("basic")
|
|
254
254
|
rowb = db.all_fields_basic(string="hi!")
|
|
255
255
|
# for field in rowa:
|
|
256
256
|
# print(field, type(rowa[field]))
|
|
257
257
|
print(rowb)
|
|
258
258
|
|
|
259
|
-
print(
|
|
259
|
+
print("explicit")
|
|
260
260
|
rowb = db.all_fields_explicit(string="hi!")
|
|
261
261
|
# for field in rowa:
|
|
262
262
|
# print(field, type(rowa[field]))
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
from pydal import DAL, Field
|
|
4
|
+
|
|
5
|
+
from typedal.helpers import utcnow
|
|
6
|
+
|
|
7
|
+
db = DAL("sqlite:memory")
|
|
8
|
+
|
|
9
|
+
### basic examples
|
|
10
|
+
|
|
11
|
+
db.define_table(
|
|
12
|
+
"person", Field("name", "string"), Field("age", "integer", default=18), Field("nicknames", "list:string")
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
db.define_table("pet", Field("name", "string"), Field("owners", "list:reference person"))
|
|
16
|
+
|
|
17
|
+
db.person.insert(name="Henk", age=44, nicknames=["Henkie", "Gekke Henk"])
|
|
18
|
+
db.person.insert(name="Ingrid", age=47, nicknames=[])
|
|
19
|
+
|
|
20
|
+
henk = db(db.person.name == "Henk").select().first()
|
|
21
|
+
ingrid = db(db.person.name == "Ingrid").select().first()
|
|
22
|
+
print(henk, ingrid)
|
|
23
|
+
|
|
24
|
+
db.pet.insert(name="Max", owners=[henk, ingrid])
|
|
25
|
+
|
|
26
|
+
max = db.pet(name="Max")
|
|
27
|
+
print(max)
|
|
28
|
+
|
|
29
|
+
people = db(db.person.id > 0).select()
|
|
30
|
+
|
|
31
|
+
for person in people:
|
|
32
|
+
print(person.name)
|
|
33
|
+
|
|
34
|
+
### example with all possible field types;
|
|
35
|
+
db.define_table("other_table")
|
|
36
|
+
|
|
37
|
+
db.define_table(
|
|
38
|
+
"all_fields",
|
|
39
|
+
Field("string", "string"),
|
|
40
|
+
Field("text", "text"),
|
|
41
|
+
Field("blob", "blob"),
|
|
42
|
+
Field("boolean", "boolean"),
|
|
43
|
+
Field("integer", "integer"),
|
|
44
|
+
Field("double", "double"),
|
|
45
|
+
Field("decimal", "decimal(2,3)"),
|
|
46
|
+
Field("date", "date"),
|
|
47
|
+
Field("time", "time"),
|
|
48
|
+
Field("datetime", "datetime"),
|
|
49
|
+
Field("password", "password"),
|
|
50
|
+
Field("upload", "upload", uploadfield="upload_data"),
|
|
51
|
+
Field("upload_data", "blob"),
|
|
52
|
+
Field("reference", "reference other_table"),
|
|
53
|
+
Field("list_string", "list:string"),
|
|
54
|
+
Field("list_integer", "list:integer"),
|
|
55
|
+
Field("list_reference", "list:reference other_table"),
|
|
56
|
+
Field("json", "json"),
|
|
57
|
+
Field("bigint", "bigint"),
|
|
58
|
+
# The big-id and, big-reference are only supported by some of the database engines and are experimental.
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
now = utcnow()
|
|
62
|
+
|
|
63
|
+
db.other_table.insert()
|
|
64
|
+
db.other_table.insert()
|
|
65
|
+
other1 = db.other_table(id=1)
|
|
66
|
+
other2 = db.other_table(id=2)
|
|
67
|
+
|
|
68
|
+
with open("example_old.py", "rb") as stream:
|
|
69
|
+
db.all_fields.insert(
|
|
70
|
+
string="hi",
|
|
71
|
+
text="hi but longer",
|
|
72
|
+
blob=b"\x23",
|
|
73
|
+
boolean=True,
|
|
74
|
+
integer=1,
|
|
75
|
+
double=1.11111111111111111111111111111,
|
|
76
|
+
decimal=1.11111111111111111111111111111,
|
|
77
|
+
date=now.date(),
|
|
78
|
+
time=now.time(),
|
|
79
|
+
datetime=now,
|
|
80
|
+
password="secret",
|
|
81
|
+
upload=stream,
|
|
82
|
+
upload_data=stream.read(),
|
|
83
|
+
reference=other1,
|
|
84
|
+
list_string=["hi", "there"],
|
|
85
|
+
list_integer=[1, 2],
|
|
86
|
+
list_reference=[other1, other2],
|
|
87
|
+
json={"hi": "there"},
|
|
88
|
+
bigint=42,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
row = db.all_fields(string="hi")
|
|
92
|
+
for field in row:
|
|
93
|
+
print(field, type(row[field]))
|
|
@@ -392,8 +392,7 @@ def fake_migrations(
|
|
|
392
392
|
|
|
393
393
|
previously_migrated = (
|
|
394
394
|
db(
|
|
395
|
-
db.ewh_implemented_features.name.belongs(to_fake)
|
|
396
|
-
& (db.ewh_implemented_features.installed == True) # noqa E712
|
|
395
|
+
db.ewh_implemented_features.name.belongs(to_fake) & (db.ewh_implemented_features.installed == True) # noqa E712
|
|
397
396
|
)
|
|
398
397
|
.select(db.ewh_implemented_features.name)
|
|
399
398
|
.column("name")
|
|
@@ -1186,7 +1186,8 @@ class TableMeta(type):
|
|
|
1186
1186
|
"""
|
|
1187
1187
|
Add a before insert hook.
|
|
1188
1188
|
"""
|
|
1189
|
-
cls._before_insert
|
|
1189
|
+
if fn not in cls._before_insert:
|
|
1190
|
+
cls._before_insert.append(fn)
|
|
1190
1191
|
return cls
|
|
1191
1192
|
|
|
1192
1193
|
def after_insert(
|
|
@@ -1199,7 +1200,8 @@ class TableMeta(type):
|
|
|
1199
1200
|
"""
|
|
1200
1201
|
Add an after insert hook.
|
|
1201
1202
|
"""
|
|
1202
|
-
cls._after_insert
|
|
1203
|
+
if fn not in cls._after_insert:
|
|
1204
|
+
cls._after_insert.append(fn)
|
|
1203
1205
|
return cls
|
|
1204
1206
|
|
|
1205
1207
|
def before_update(
|
|
@@ -1209,7 +1211,8 @@ class TableMeta(type):
|
|
|
1209
1211
|
"""
|
|
1210
1212
|
Add a before update hook.
|
|
1211
1213
|
"""
|
|
1212
|
-
cls._before_update
|
|
1214
|
+
if fn not in cls._before_update:
|
|
1215
|
+
cls._before_update.append(fn)
|
|
1213
1216
|
return cls
|
|
1214
1217
|
|
|
1215
1218
|
def after_update(
|
|
@@ -1219,21 +1222,24 @@ class TableMeta(type):
|
|
|
1219
1222
|
"""
|
|
1220
1223
|
Add an after update hook.
|
|
1221
1224
|
"""
|
|
1222
|
-
cls._after_update
|
|
1225
|
+
if fn not in cls._after_update:
|
|
1226
|
+
cls._after_update.append(fn)
|
|
1223
1227
|
return cls
|
|
1224
1228
|
|
|
1225
1229
|
def before_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
|
|
1226
1230
|
"""
|
|
1227
1231
|
Add a before delete hook.
|
|
1228
1232
|
"""
|
|
1229
|
-
cls._before_delete
|
|
1233
|
+
if fn not in cls._before_delete:
|
|
1234
|
+
cls._before_delete.append(fn)
|
|
1230
1235
|
return cls
|
|
1231
1236
|
|
|
1232
1237
|
def after_delete(cls: Type[T_MetaInstance], fn: typing.Callable[[Set], Optional[bool]]) -> Type[T_MetaInstance]:
|
|
1233
1238
|
"""
|
|
1234
1239
|
Add an after delete hook.
|
|
1235
1240
|
"""
|
|
1236
|
-
cls._after_delete
|
|
1241
|
+
if fn not in cls._after_delete:
|
|
1242
|
+
cls._after_delete.append(fn)
|
|
1237
1243
|
return cls
|
|
1238
1244
|
|
|
1239
1245
|
|
|
@@ -1257,7 +1263,10 @@ class TypedField(Expression, typing.Generic[T_Value]): # pragma: no cover
|
|
|
1257
1263
|
# NOTE: for the logic of converting a TypedField into a pydal Field, see TypeDAL._to_field
|
|
1258
1264
|
|
|
1259
1265
|
def __init__(
|
|
1260
|
-
self,
|
|
1266
|
+
self,
|
|
1267
|
+
_type: Type[T_Value] | types.UnionType = str,
|
|
1268
|
+
/,
|
|
1269
|
+
**settings: Unpack[FieldSettings], # type: ignore
|
|
1261
1270
|
) -> None:
|
|
1262
1271
|
"""
|
|
1263
1272
|
Typed version of pydal.Field, which will be converted to a normal Field in the background.
|
|
@@ -1968,7 +1977,11 @@ class TypedRows(typing.Collection[T_MetaInstance], Rows):
|
|
|
1968
1977
|
Print a table on repr().
|
|
1969
1978
|
"""
|
|
1970
1979
|
data = self.as_dict()
|
|
1971
|
-
|
|
1980
|
+
try:
|
|
1981
|
+
headers = list(next(iter(data.values())).keys())
|
|
1982
|
+
except StopIteration:
|
|
1983
|
+
headers = []
|
|
1984
|
+
|
|
1972
1985
|
return mktable(data, headers)
|
|
1973
1986
|
|
|
1974
1987
|
def group_by_value(
|
|
@@ -2623,7 +2636,6 @@ class QueryBuilder(typing.Generic[T_MetaInstance]):
|
|
|
2623
2636
|
join.append(other.on(condition))
|
|
2624
2637
|
|
|
2625
2638
|
if limitby := select_kwargs.pop("limitby", ()):
|
|
2626
|
-
|
|
2627
2639
|
# if limitby + relationships:
|
|
2628
2640
|
# 1. get IDs of main table entries that match 'query'
|
|
2629
2641
|
# 2. change query to .belongs(id)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import re
|
|
2
|
+
from copy import copy
|
|
2
3
|
from sqlite3 import IntegrityError
|
|
3
4
|
|
|
4
5
|
import pydal
|
|
@@ -480,6 +481,39 @@ def test_hooks_v2(capsys):
|
|
|
480
481
|
assert "after delete" in captured.out
|
|
481
482
|
|
|
482
483
|
|
|
484
|
+
def test_hooks_duplicates():
|
|
485
|
+
counter = 0
|
|
486
|
+
|
|
487
|
+
@db.define()
|
|
488
|
+
class HookedTableV3(TypedTable):
|
|
489
|
+
name: str
|
|
490
|
+
|
|
491
|
+
def increase_counter(_, __):
|
|
492
|
+
nonlocal counter
|
|
493
|
+
counter += 1
|
|
494
|
+
|
|
495
|
+
HookedTableV3.after_insert(increase_counter)
|
|
496
|
+
HookedTableV3.after_insert(increase_counter)
|
|
497
|
+
HookedTableV3.after_insert(copy(increase_counter)) # other id, same hash
|
|
498
|
+
|
|
499
|
+
assert counter == 0
|
|
500
|
+
|
|
501
|
+
HookedTableV3.insert(name="Should increase counter once")
|
|
502
|
+
|
|
503
|
+
assert counter == 1
|
|
504
|
+
|
|
505
|
+
# other function hash -> allow 'duplicate'
|
|
506
|
+
def increase_counter_v2(_, __):
|
|
507
|
+
nonlocal counter
|
|
508
|
+
counter += 1
|
|
509
|
+
|
|
510
|
+
HookedTableV3.after_insert(increase_counter_v2) # other hash
|
|
511
|
+
|
|
512
|
+
HookedTableV3.insert(name="Should increase counter twice")
|
|
513
|
+
|
|
514
|
+
assert counter == 3
|
|
515
|
+
|
|
516
|
+
|
|
483
517
|
def test_try():
|
|
484
518
|
class SomeTableToRetry(TypedTable):
|
|
485
519
|
key: int
|
|
@@ -86,7 +86,7 @@ def test_slug(db):
|
|
|
86
86
|
row, error = TableWithMixins.validate_and_insert(name="Two Words")
|
|
87
87
|
assert row is None
|
|
88
88
|
assert error == {
|
|
89
|
-
|
|
89
|
+
"name": "This slug is not unique: two-words.",
|
|
90
90
|
}
|
|
91
91
|
|
|
92
92
|
# with random suffix: duplicates are fine
|
|
@@ -52,7 +52,7 @@ def mypy_test_typedal_define() -> None:
|
|
|
52
52
|
reveal_type(MyTable().fancy.lower()) # R: builtins.str
|
|
53
53
|
|
|
54
54
|
aliased_cls = MyTable.with_alias("---")
|
|
55
|
-
reveal_type(aliased_cls), # R: type[tests.test_mypy.MyTable]
|
|
55
|
+
(reveal_type(aliased_cls),) # R: type[tests.test_mypy.MyTable]
|
|
56
56
|
aliased_instance = aliased_cls()
|
|
57
57
|
reveal_type(aliased_instance) # R: tests.test_mypy.MyTable
|
|
58
58
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import tempfile
|
|
3
|
-
from contextlib_chdir import chdir
|
|
4
3
|
|
|
4
|
+
from contextlib_chdir import chdir
|
|
5
5
|
from pydal.validators import IS_EMAIL, IS_NOT_IN_DB
|
|
6
6
|
|
|
7
7
|
from src.typedal import TypedTable
|
|
@@ -60,27 +60,22 @@ def test_py4web_dal_singleton():
|
|
|
60
60
|
db_2a = DAL("sqlite://test_py4web_dal_singleton", folder=d, enable_typedal_caching=False)
|
|
61
61
|
db_2b = DAL("sqlite://test_py4web_dal_singleton", folder=d, enable_typedal_caching=False)
|
|
62
62
|
|
|
63
|
-
conf = {
|
|
64
|
-
"database": "sqlite:memory",
|
|
65
|
-
"dialect": "sqlite",
|
|
66
|
-
"pyproject": "",
|
|
67
|
-
"flag_location": f"{d}/flags"
|
|
68
|
-
}
|
|
63
|
+
conf = {"database": "sqlite:memory", "dialect": "sqlite", "pyproject": "", "flag_location": f"{d}/flags"}
|
|
69
64
|
db_3a = DAL(config=TypeDALConfig.load(conf))
|
|
70
65
|
db_3b = DAL(config=TypeDALConfig.load(conf))
|
|
71
66
|
|
|
72
67
|
assert db_1a is db_1b
|
|
73
68
|
assert db_1a._uri == db_1b._uri
|
|
74
69
|
assert db_1a._db_uid == db_1b._db_uid
|
|
75
|
-
|
|
70
|
+
|
|
76
71
|
assert db_2a is db_2b
|
|
77
72
|
assert db_2a._uri == db_2b._uri
|
|
78
73
|
assert db_2a._db_uid == db_2b._db_uid
|
|
79
|
-
|
|
74
|
+
|
|
80
75
|
assert db_1a is not db_2a
|
|
81
76
|
assert db_1a._uri != db_2a._uri
|
|
82
77
|
assert db_1a._db_uid != db_2a._db_uid
|
|
83
|
-
|
|
78
|
+
|
|
84
79
|
assert db_1b is not db_2b
|
|
85
80
|
assert db_1b._uri != db_2b._uri
|
|
86
81
|
assert db_1b._db_uid != db_2b._db_uid
|
|
@@ -328,9 +328,9 @@ def test_reprs():
|
|
|
328
328
|
|
|
329
329
|
assert "AND" not in repr(relation)
|
|
330
330
|
|
|
331
|
-
relation = Article.join("author",
|
|
332
|
-
|
|
333
|
-
|
|
331
|
+
relation = Article.join("author", condition_and=lambda article, author: author.name != "Hank").relationships[
|
|
332
|
+
"author"
|
|
333
|
+
]
|
|
334
334
|
|
|
335
335
|
assert "AND" in repr(relation) and "Hank" in repr(relation)
|
|
336
336
|
|
|
@@ -214,3 +214,7 @@ def test_rows():
|
|
|
214
214
|
assert len(empty) == 0
|
|
215
215
|
assert len(empty.exclude(lambda x: x)) == 0
|
|
216
216
|
assert len(empty.find(lambda x: x)) == 0
|
|
217
|
+
|
|
218
|
+
empty_rows = NewStyleClass.where(NewStyleClass.id < 0).collect()
|
|
219
|
+
assert str(empty_rows)
|
|
220
|
+
assert repr(empty_rows)
|
typedal-3.10.3/example_old.py
DELETED
|
@@ -1,97 +0,0 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
|
|
3
|
-
from pydal import DAL, Field
|
|
4
|
-
|
|
5
|
-
from typedal.helpers import utcnow
|
|
6
|
-
|
|
7
|
-
db = DAL("sqlite:memory")
|
|
8
|
-
|
|
9
|
-
### basic examples
|
|
10
|
-
|
|
11
|
-
db.define_table("person",
|
|
12
|
-
Field("name", "string"),
|
|
13
|
-
Field("age", "integer", default=18),
|
|
14
|
-
Field("nicknames", "list:string")
|
|
15
|
-
)
|
|
16
|
-
|
|
17
|
-
db.define_table("pet",
|
|
18
|
-
Field("name", "string"),
|
|
19
|
-
Field("owners", "list:reference person")
|
|
20
|
-
)
|
|
21
|
-
|
|
22
|
-
db.person.insert(name="Henk", age=44, nicknames=["Henkie", "Gekke Henk"])
|
|
23
|
-
db.person.insert(name="Ingrid", age=47, nicknames=[])
|
|
24
|
-
|
|
25
|
-
henk = db(db.person.name == "Henk").select().first()
|
|
26
|
-
ingrid = db(db.person.name == "Ingrid").select().first()
|
|
27
|
-
print(henk, ingrid)
|
|
28
|
-
|
|
29
|
-
db.pet.insert(name="Max", owners=[henk, ingrid])
|
|
30
|
-
|
|
31
|
-
max = db.pet(name="Max")
|
|
32
|
-
print(max)
|
|
33
|
-
|
|
34
|
-
people = db(db.person.id > 0).select()
|
|
35
|
-
|
|
36
|
-
for person in people:
|
|
37
|
-
print(person.name)
|
|
38
|
-
|
|
39
|
-
### example with all possible field types;
|
|
40
|
-
db.define_table("other_table")
|
|
41
|
-
|
|
42
|
-
db.define_table("all_fields",
|
|
43
|
-
Field("string", "string"),
|
|
44
|
-
Field("text", "text"),
|
|
45
|
-
Field("blob", "blob"),
|
|
46
|
-
Field("boolean", "boolean"),
|
|
47
|
-
Field("integer", "integer"),
|
|
48
|
-
Field("double", "double"),
|
|
49
|
-
Field("decimal", "decimal(2,3)"),
|
|
50
|
-
Field("date", "date"),
|
|
51
|
-
Field("time", "time"),
|
|
52
|
-
Field("datetime", "datetime"),
|
|
53
|
-
Field("password", "password"),
|
|
54
|
-
Field("upload", "upload", uploadfield="upload_data"),
|
|
55
|
-
Field("upload_data", "blob"),
|
|
56
|
-
Field("reference", "reference other_table"),
|
|
57
|
-
Field("list_string", "list:string"),
|
|
58
|
-
Field("list_integer", "list:integer"),
|
|
59
|
-
Field("list_reference", "list:reference other_table"),
|
|
60
|
-
Field("json", "json"),
|
|
61
|
-
Field("bigint", "bigint"),
|
|
62
|
-
# The big-id and, big-reference are only supported by some of the database engines and are experimental.
|
|
63
|
-
)
|
|
64
|
-
|
|
65
|
-
now = utcnow()
|
|
66
|
-
|
|
67
|
-
db.other_table.insert()
|
|
68
|
-
db.other_table.insert()
|
|
69
|
-
other1 = db.other_table(id=1)
|
|
70
|
-
other2 = db.other_table(id=2)
|
|
71
|
-
|
|
72
|
-
with open('example_old.py', 'rb') as stream:
|
|
73
|
-
db.all_fields.insert(
|
|
74
|
-
string="hi",
|
|
75
|
-
text="hi but longer",
|
|
76
|
-
blob=b"\x23",
|
|
77
|
-
boolean=True,
|
|
78
|
-
integer=1,
|
|
79
|
-
double=1.11111111111111111111111111111,
|
|
80
|
-
decimal=1.11111111111111111111111111111,
|
|
81
|
-
date=now.date(),
|
|
82
|
-
time=now.time(),
|
|
83
|
-
datetime=now,
|
|
84
|
-
password="secret",
|
|
85
|
-
upload=stream,
|
|
86
|
-
upload_data=stream.read(),
|
|
87
|
-
reference=other1,
|
|
88
|
-
list_string=["hi", "there"],
|
|
89
|
-
list_integer=[1, 2],
|
|
90
|
-
list_reference=[other1, other2],
|
|
91
|
-
json={'hi': 'there'},
|
|
92
|
-
bigint=42,
|
|
93
|
-
)
|
|
94
|
-
|
|
95
|
-
row = db.all_fields(string="hi")
|
|
96
|
-
for field in row:
|
|
97
|
-
print(field, type(row[field]))
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|