sera-2 1.19.3__tar.gz → 1.20.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sera_2-1.19.3 → sera_2-1.20.12}/PKG-INFO +4 -4
- {sera_2-1.19.3 → sera_2-1.20.12}/pyproject.toml +4 -4
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/api_helper.py +50 -8
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/base_service.py +41 -15
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/middlewares/uscp.py +4 -9
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/make_python_api.py +12 -1
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/make_python_model.py +4 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/make_typescript_model.py +162 -66
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/misc/__init__.py +2 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/misc/_utils.py +55 -3
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_collection.py +17 -4
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_datatype.py +86 -12
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_parse.py +9 -2
- {sera_2-1.19.3 → sera_2-1.20.12}/README.md +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/constants.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/exports/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/exports/schema.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/exports/test.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/api_test_helper.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/base_orm.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_dcg.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_edge.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_flow.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_fn_signature.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_node.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_runtime.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/directed_computing_graph/_type_conversion.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/middlewares/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/libs/middlewares/auth.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/__main__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/make_app.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/make/make_python_services.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/misc/_formatter.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/__init__.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_class.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_constraints.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_default.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_enum.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_module.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_multi_lingual_string.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_property.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/models/_schema.py +0 -0
- {sera_2-1.19.3 → sera_2-1.20.12}/sera/typing.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: sera-2
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.20.12
|
4
4
|
Summary:
|
5
5
|
Author: Binh Vu
|
6
6
|
Author-email: bvu687@gmail.com
|
@@ -9,8 +9,8 @@ Classifier: Programming Language :: Python :: 3
|
|
9
9
|
Classifier: Programming Language :: Python :: 3.12
|
10
10
|
Classifier: Programming Language :: Python :: 3.13
|
11
11
|
Requires-Dist: black (==25.1.0)
|
12
|
-
Requires-Dist: codegen-2 (>=2.12.
|
13
|
-
Requires-Dist: graph-wrapper (>=1.7.
|
12
|
+
Requires-Dist: codegen-2 (>=2.12.2,<3.0.0)
|
13
|
+
Requires-Dist: graph-wrapper (>=1.7.3,<2.0.0)
|
14
14
|
Requires-Dist: isort (==6.0.1)
|
15
15
|
Requires-Dist: litestar (>=2.15.1,<3.0.0)
|
16
16
|
Requires-Dist: loguru (>=0.7.0,<0.8.0)
|
@@ -18,7 +18,7 @@ Requires-Dist: msgspec (>=0.19.0,<0.20.0)
|
|
18
18
|
Requires-Dist: serde2 (>=1.9.2,<2.0.0)
|
19
19
|
Requires-Dist: sqlalchemy[asyncio] (>=2.0.41,<3.0.0)
|
20
20
|
Requires-Dist: tqdm (>=4.67.1,<5.0.0)
|
21
|
-
Requires-Dist: typer (>=0.
|
21
|
+
Requires-Dist: typer (>=0.16.0,<0.17.0)
|
22
22
|
Project-URL: Repository, https://github.com/binh-vu/sera
|
23
23
|
Description-Content-Type: text/markdown
|
24
24
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "sera-2"
|
3
|
-
version = "1.
|
3
|
+
version = "1.20.12"
|
4
4
|
description = ""
|
5
5
|
authors = ["Binh Vu <bvu687@gmail.com>"]
|
6
6
|
readme = "README.md"
|
@@ -9,17 +9,17 @@ repository = "https://github.com/binh-vu/sera"
|
|
9
9
|
|
10
10
|
[tool.poetry.dependencies]
|
11
11
|
python = "^3.12"
|
12
|
-
codegen-2 = "^2.12.
|
12
|
+
codegen-2 = "^2.12.2"
|
13
13
|
msgspec = "^0.19.0"
|
14
14
|
litestar = "^2.15.1"
|
15
15
|
loguru = "^0.7.0"
|
16
|
-
typer = "^0.
|
16
|
+
typer = "^0.16.0"
|
17
17
|
black = "==25.1.0"
|
18
18
|
sqlalchemy = { extras = ["asyncio"], version = "^2.0.41" }
|
19
19
|
isort = "==6.0.1"
|
20
20
|
serde2 = "^1.9.2"
|
21
21
|
tqdm = "^4.67.1"
|
22
|
-
graph-wrapper = "^1.7.
|
22
|
+
graph-wrapper = "^1.7.3"
|
23
23
|
|
24
24
|
[build-system]
|
25
25
|
requires = ["poetry-core"]
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import re
|
4
|
-
from typing import Collection, Generic, cast
|
4
|
+
from typing import Callable, Collection, Generic, TypeVar, cast
|
5
5
|
|
6
6
|
from litestar import Request, status_codes
|
7
7
|
from litestar.connection import ASGIConnection
|
@@ -15,16 +15,34 @@ from litestar.typing import FieldDefinition
|
|
15
15
|
from msgspec import Struct
|
16
16
|
|
17
17
|
from sera.libs.base_service import Query, QueryOp
|
18
|
-
from sera.libs.middlewares.uscp import
|
18
|
+
from sera.libs.middlewares.uscp import SKIP_UPDATE_SYSTEM_CONTROLLED_PROPS_KEY
|
19
19
|
from sera.typing import T
|
20
20
|
|
21
21
|
# for parsing field names and operations from query string
|
22
|
-
FIELD_REG = re.compile(r"(?P<name>[a-zA-Z_0-9]+)(?:\[(?P<op>[a-zA-
|
22
|
+
FIELD_REG = re.compile(r"(?P<name>[a-zA-Z_0-9]+)(?:\[(?P<op>[a-zA-Z_0-9]+)\])?")
|
23
23
|
QUERY_OPS = {op.value for op in QueryOp}
|
24
24
|
KEYWORDS = {"field", "limit", "offset", "unique", "sorted_by", "group_by"}
|
25
25
|
|
26
26
|
|
27
|
-
|
27
|
+
class TypeConversion:
|
28
|
+
|
29
|
+
to_int = int
|
30
|
+
to_float = float
|
31
|
+
|
32
|
+
@staticmethod
|
33
|
+
def to_bool(value: str) -> bool:
|
34
|
+
if value == "1":
|
35
|
+
return True
|
36
|
+
elif value == "0":
|
37
|
+
return False
|
38
|
+
raise ValueError(f"Invalid boolean value: {value}")
|
39
|
+
|
40
|
+
|
41
|
+
def parse_query(
|
42
|
+
request: Request,
|
43
|
+
fields: dict[str, Callable[[str], str | int | bool | float]],
|
44
|
+
debug: bool,
|
45
|
+
) -> Query:
|
28
46
|
"""Parse query for retrieving records that match a query.
|
29
47
|
|
30
48
|
If a field name collides with a keyword, you can add `_` to the field name.
|
@@ -56,6 +74,7 @@ def parse_query(request: Request, fields: set[str], debug: bool) -> Query:
|
|
56
74
|
continue
|
57
75
|
|
58
76
|
# Process based on operation or default to equality check
|
77
|
+
# TODO: validate if the operation is allowed for the field
|
59
78
|
if not operation:
|
60
79
|
operation = QueryOp.eq
|
61
80
|
else:
|
@@ -65,6 +84,21 @@ def parse_query(request: Request, fields: set[str], debug: bool) -> Query:
|
|
65
84
|
detail=f"Invalid operation: {operation}",
|
66
85
|
)
|
67
86
|
operation = QueryOp(operation)
|
87
|
+
|
88
|
+
try:
|
89
|
+
norm_func = fields[field_name]
|
90
|
+
if isinstance(v, list):
|
91
|
+
v = [norm_func(x) for x in v]
|
92
|
+
else:
|
93
|
+
v = norm_func(v)
|
94
|
+
except ValueError:
|
95
|
+
if debug:
|
96
|
+
raise HTTPException(
|
97
|
+
status_code=status_codes.HTTP_400_BAD_REQUEST,
|
98
|
+
detail=f"Invalid value for field {field_name}: {v}",
|
99
|
+
)
|
100
|
+
continue
|
101
|
+
|
68
102
|
query[field_name] = {operation: v}
|
69
103
|
else:
|
70
104
|
# Invalid field name format
|
@@ -78,7 +112,10 @@ def parse_query(request: Request, fields: set[str], debug: bool) -> Query:
|
|
78
112
|
return query
|
79
113
|
|
80
114
|
|
81
|
-
|
115
|
+
S = TypeVar("S", bound=Struct)
|
116
|
+
|
117
|
+
|
118
|
+
class SingleAutoUSCP(MsgspecDTO[S], Generic[S]):
|
82
119
|
"""Auto Update System Controlled Property DTO"""
|
83
120
|
|
84
121
|
@classmethod
|
@@ -99,9 +136,14 @@ class SingleAutoUSCP(MsgspecDTO[T], Generic[T]):
|
|
99
136
|
"data_backend"
|
100
137
|
] # pyright: ignore
|
101
138
|
obj = backend.populate_data_from_raw(value, self.asgi_connection)
|
102
|
-
|
103
|
-
|
104
|
-
|
139
|
+
if self.asgi_connection.scope["state"][SKIP_UPDATE_SYSTEM_CONTROLLED_PROPS_KEY]:
|
140
|
+
# Skip updating system-controlled properties
|
141
|
+
# TODO: dirty fix as this assumes every struct has _is_scp_updated property. find a
|
142
|
+
# better solution and fix me!
|
143
|
+
obj._is_scp_updated = True
|
144
|
+
return obj
|
145
|
+
|
146
|
+
obj.update_system_controlled_props(self.asgi_connection)
|
105
147
|
return obj
|
106
148
|
|
107
149
|
|
@@ -17,15 +17,15 @@ from sera.typing import FieldName, T, doc
|
|
17
17
|
|
18
18
|
|
19
19
|
class QueryOp(str, Enum):
|
20
|
-
lt = "
|
21
|
-
lte = "
|
22
|
-
gt = "
|
23
|
-
gte = "
|
24
|
-
eq = "
|
25
|
-
ne = "
|
20
|
+
lt = "lt"
|
21
|
+
lte = "lte"
|
22
|
+
gt = "gt"
|
23
|
+
gte = "gte"
|
24
|
+
eq = "eq"
|
25
|
+
ne = "ne"
|
26
26
|
# select records where values are in the given list
|
27
27
|
in_ = "in"
|
28
|
-
not_in = "
|
28
|
+
not_in = "not_in"
|
29
29
|
# for full text search
|
30
30
|
fuzzy = "fuzzy"
|
31
31
|
|
@@ -94,15 +94,41 @@ class BaseAsyncService(Generic[ID, R]):
|
|
94
94
|
)
|
95
95
|
if unique:
|
96
96
|
q = q.distinct()
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
97
|
+
for field in sorted_by:
|
98
|
+
if field.startswith("-"):
|
99
|
+
q = q.order_by(getattr(self.orm_cls, field[1:]).desc())
|
100
|
+
else:
|
101
|
+
q = q.order_by(getattr(self.orm_cls, field))
|
102
|
+
for field in group_by:
|
103
|
+
q = q.group_by(getattr(self.orm_cls, field))
|
104
|
+
|
105
|
+
for field, conditions in query.items():
|
106
|
+
for op, value in conditions.items():
|
107
|
+
# TODO: check if the operation is valid for the field.
|
108
|
+
if op == QueryOp.eq:
|
109
|
+
q = q.where(getattr(self.orm_cls, field) == value)
|
110
|
+
elif op == QueryOp.ne:
|
111
|
+
q = q.where(getattr(self.orm_cls, field) != value)
|
112
|
+
elif op == QueryOp.lt:
|
113
|
+
q = q.where(getattr(self.orm_cls, field) < value)
|
114
|
+
elif op == QueryOp.lte:
|
115
|
+
q = q.where(getattr(self.orm_cls, field) <= value)
|
116
|
+
elif op == QueryOp.gt:
|
117
|
+
q = q.where(getattr(self.orm_cls, field) > value)
|
118
|
+
elif op == QueryOp.gte:
|
119
|
+
q = q.where(getattr(self.orm_cls, field) >= value)
|
120
|
+
elif op == QueryOp.in_:
|
121
|
+
q = q.where(getattr(self.orm_cls, field).in_(value))
|
122
|
+
elif op == QueryOp.not_in:
|
123
|
+
q = q.where(~getattr(self.orm_cls, field).in_(value))
|
101
124
|
else:
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
125
|
+
assert op == QueryOp.fuzzy
|
126
|
+
# Assuming fuzzy search is implemented as a full-text search
|
127
|
+
q = q.where(
|
128
|
+
func.to_tsvector(getattr(self.orm_cls, field)).match(value)
|
129
|
+
)
|
130
|
+
|
131
|
+
print(">>>", q)
|
106
132
|
|
107
133
|
cq = select(func.count()).select_from(q.subquery())
|
108
134
|
rq = q.limit(limit).offset(offset)
|
@@ -6,7 +6,7 @@ from litestar.connection.base import UserT
|
|
6
6
|
from litestar.middleware import AbstractMiddleware
|
7
7
|
from litestar.types import ASGIApp, Message, Receive, Scope, Scopes, Send
|
8
8
|
|
9
|
-
|
9
|
+
SKIP_UPDATE_SYSTEM_CONTROLLED_PROPS_KEY = "skip_uscp_1157"
|
10
10
|
|
11
11
|
|
12
12
|
class USCPMiddleware(AbstractMiddleware):
|
@@ -21,7 +21,6 @@ class USCPMiddleware(AbstractMiddleware):
|
|
21
21
|
def __init__(
|
22
22
|
self,
|
23
23
|
app: ASGIApp,
|
24
|
-
get_system_controlled_props: Callable[[UserT], dict],
|
25
24
|
skip_update_system_controlled_props: Callable[[UserT], bool],
|
26
25
|
exclude: str | list[str] | None = None,
|
27
26
|
exclude_opt_key: str | None = None,
|
@@ -39,17 +38,13 @@ class USCPMiddleware(AbstractMiddleware):
|
|
39
38
|
either or both 'ScopeType.HTTP' and 'ScopeType.WEBSOCKET'.
|
40
39
|
"""
|
41
40
|
super().__init__(app, exclude, exclude_opt_key, scopes)
|
42
|
-
self.get_system_controlled_props = get_system_controlled_props
|
43
41
|
self.skip_update_system_controlled_props = skip_update_system_controlled_props
|
44
42
|
|
45
43
|
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
46
44
|
user = scope["user"]
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
scope["state"][STATE_SYSTEM_CONTROLLED_PROP_KEY] = (
|
51
|
-
self.get_system_controlled_props(user)
|
52
|
-
)
|
45
|
+
scope["state"][SKIP_UPDATE_SYSTEM_CONTROLLED_PROPS_KEY] = (
|
46
|
+
self.skip_update_system_controlled_props(user)
|
47
|
+
)
|
53
48
|
await self.app(scope, receive, send)
|
54
49
|
|
55
50
|
|
@@ -4,6 +4,7 @@ from typing import Sequence
|
|
4
4
|
|
5
5
|
from codegen.models import DeferredVar, ImportHelper, PredefinedFn, Program, expr, stmt
|
6
6
|
from loguru import logger
|
7
|
+
from msgspec import convert
|
7
8
|
|
8
9
|
from sera.misc import assert_not_null, to_snake_case
|
9
10
|
from sera.models import App, DataCollection, Module, Package
|
@@ -134,11 +135,21 @@ def make_python_get_api(
|
|
134
135
|
|
135
136
|
func_name = "get_"
|
136
137
|
|
138
|
+
queryable_fields = []
|
139
|
+
for propname, (
|
140
|
+
convert_func,
|
141
|
+
convert_func_import,
|
142
|
+
) in collection.get_queryable_fields():
|
143
|
+
program.import_(convert_func_import, True)
|
144
|
+
queryable_fields.append(
|
145
|
+
(expr.ExprConstant(propname), expr.ExprIdent(convert_func))
|
146
|
+
)
|
147
|
+
|
137
148
|
program.root(
|
138
149
|
stmt.LineBreak(),
|
139
150
|
lambda ast00: ast00.assign(
|
140
151
|
DeferredVar.simple("QUERYABLE_FIELDS"),
|
141
|
-
|
152
|
+
PredefinedFn.dict(queryable_fields),
|
142
153
|
),
|
143
154
|
stmt.PythonDecoratorStatement(
|
144
155
|
expr.ExprFuncCall(
|
@@ -1296,6 +1296,10 @@ def make_python_relational_object_property(
|
|
1296
1296
|
idprop = prop.target.get_id_property()
|
1297
1297
|
assert idprop is not None
|
1298
1298
|
idprop_pytype = idprop.datatype.get_sqlalchemy_type()
|
1299
|
+
|
1300
|
+
if prop.is_optional:
|
1301
|
+
idprop_pytype = idprop_pytype.as_optional_type()
|
1302
|
+
|
1299
1303
|
for dep in idprop_pytype.deps:
|
1300
1304
|
program.import_(dep, True)
|
1301
1305
|
|
@@ -29,6 +29,8 @@ from sera.typing import is_set
|
|
29
29
|
TS_GLOBAL_IDENTS = {
|
30
30
|
"normalizers.normalizeNumber": "sera-db.normalizers",
|
31
31
|
"normalizers.normalizeOptionalNumber": "sera-db.normalizers",
|
32
|
+
"normalizers.normalizeDate": "sera-db.normalizers",
|
33
|
+
"normalizers.normalizeOptionalDate": "sera-db.normalizers",
|
32
34
|
}
|
33
35
|
|
34
36
|
|
@@ -50,6 +52,74 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
50
52
|
# the original object, then it's okay.
|
51
53
|
return value
|
52
54
|
|
55
|
+
def get_normal_deser_args(
|
56
|
+
prop: DataProperty | ObjectProperty,
|
57
|
+
) -> expr.Expr:
|
58
|
+
"""Extract the value from the data record from the server response to set to the class property in the client."""
|
59
|
+
handle_optional = lambda value: expr.ExprTernary(
|
60
|
+
expr.ExprNotEqual(value, expr.ExprConstant(None)),
|
61
|
+
value,
|
62
|
+
expr.ExprConstant("undefined"),
|
63
|
+
)
|
64
|
+
|
65
|
+
if isinstance(prop, DataProperty):
|
66
|
+
value = PredefinedFn.attr_getter(
|
67
|
+
expr.ExprIdent("data"), expr.ExprIdent(prop.name)
|
68
|
+
)
|
69
|
+
if prop.is_optional:
|
70
|
+
value = handle_optional(value)
|
71
|
+
value.true_expr = (
|
72
|
+
prop.datatype.get_typescript_type().get_json_deser_func(
|
73
|
+
value.true_expr
|
74
|
+
)
|
75
|
+
)
|
76
|
+
else:
|
77
|
+
value = prop.datatype.get_typescript_type().get_json_deser_func(value)
|
78
|
+
|
79
|
+
return value
|
80
|
+
|
81
|
+
assert isinstance(prop, ObjectProperty)
|
82
|
+
if prop.target.db is not None:
|
83
|
+
value = PredefinedFn.attr_getter(
|
84
|
+
expr.ExprIdent("data"), expr.ExprIdent(prop.name + "_id")
|
85
|
+
)
|
86
|
+
if prop.is_optional:
|
87
|
+
value = handle_optional(value)
|
88
|
+
return value
|
89
|
+
else:
|
90
|
+
if prop.cardinality.is_star_to_many():
|
91
|
+
# optional type for a list is simply an empty list, we don't need to check for None
|
92
|
+
value = PredefinedFn.map_list(
|
93
|
+
PredefinedFn.attr_getter(
|
94
|
+
expr.ExprIdent("data"),
|
95
|
+
expr.ExprIdent(prop.name),
|
96
|
+
),
|
97
|
+
lambda item: expr.ExprMethodCall(
|
98
|
+
expr.ExprIdent(
|
99
|
+
assert_isinstance(prop, ObjectProperty).target.name
|
100
|
+
),
|
101
|
+
"deser",
|
102
|
+
[item],
|
103
|
+
),
|
104
|
+
)
|
105
|
+
return value
|
106
|
+
else:
|
107
|
+
value = expr.ExprFuncCall(
|
108
|
+
PredefinedFn.attr_getter(
|
109
|
+
expr.ExprIdent(prop.target.name),
|
110
|
+
expr.ExprIdent("deser"),
|
111
|
+
),
|
112
|
+
[
|
113
|
+
PredefinedFn.attr_getter(
|
114
|
+
expr.ExprIdent("data"),
|
115
|
+
expr.ExprIdent(prop.name),
|
116
|
+
)
|
117
|
+
],
|
118
|
+
)
|
119
|
+
if prop.is_optional:
|
120
|
+
value = handle_optional(value)
|
121
|
+
return value
|
122
|
+
|
53
123
|
def make_normal(cls: Class, pkg: Package):
|
54
124
|
"""Make a data model for the normal Python data model"""
|
55
125
|
if not cls.is_public:
|
@@ -80,7 +150,9 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
80
150
|
|
81
151
|
if idprop is not None and prop.name == idprop.name:
|
82
152
|
# use id type alias
|
83
|
-
tstype = TsTypeWithDep(
|
153
|
+
tstype = TsTypeWithDep(
|
154
|
+
type=f"{cls.name}Id", spectype=tstype.spectype
|
155
|
+
)
|
84
156
|
|
85
157
|
if prop.is_optional:
|
86
158
|
# convert type to optional
|
@@ -89,9 +161,7 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
89
161
|
deser_args.append(
|
90
162
|
(
|
91
163
|
expr.ExprIdent(propname),
|
92
|
-
|
93
|
-
expr.ExprIdent("data"), expr.ExprIdent(prop.name)
|
94
|
-
),
|
164
|
+
get_normal_deser_args(prop),
|
95
165
|
)
|
96
166
|
)
|
97
167
|
else:
|
@@ -100,10 +170,18 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
100
170
|
# this class is stored in the database, we store the id instead
|
101
171
|
propname = propname + "Id"
|
102
172
|
tstype = TsTypeWithDep(
|
103
|
-
f"{prop.target.name}Id",
|
104
|
-
|
105
|
-
|
106
|
-
|
173
|
+
type=f"{prop.target.name}Id",
|
174
|
+
spectype=assert_not_null(prop.target.get_id_property())
|
175
|
+
.get_data_model_datatype()
|
176
|
+
.get_typescript_type()
|
177
|
+
.spectype,
|
178
|
+
deps=(
|
179
|
+
[
|
180
|
+
f"@.models.{prop.target.get_tsmodule_name()}.{prop.target.name}.{prop.target.name}Id"
|
181
|
+
]
|
182
|
+
if prop.target.name != cls.name
|
183
|
+
else []
|
184
|
+
),
|
107
185
|
)
|
108
186
|
if prop.cardinality.is_star_to_many():
|
109
187
|
tstype = tstype.as_list_type()
|
@@ -113,17 +191,15 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
113
191
|
deser_args.append(
|
114
192
|
(
|
115
193
|
expr.ExprIdent(propname),
|
116
|
-
|
117
|
-
expr.ExprIdent("data"),
|
118
|
-
expr.ExprIdent(prop.name + "_id"),
|
119
|
-
),
|
194
|
+
get_normal_deser_args(prop),
|
120
195
|
)
|
121
196
|
)
|
122
197
|
else:
|
123
198
|
# we are going to store the whole object
|
124
199
|
tstype = TsTypeWithDep(
|
125
|
-
prop.target.name,
|
126
|
-
|
200
|
+
type=prop.target.name,
|
201
|
+
spectype=prop.target.name,
|
202
|
+
deps=[
|
127
203
|
f"@.models.{prop.target.get_tsmodule_name()}.{prop.target.name}.{prop.target.name}"
|
128
204
|
],
|
129
205
|
)
|
@@ -132,21 +208,7 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
132
208
|
deser_args.append(
|
133
209
|
(
|
134
210
|
expr.ExprIdent(propname),
|
135
|
-
|
136
|
-
PredefinedFn.attr_getter(
|
137
|
-
expr.ExprIdent("data"),
|
138
|
-
expr.ExprIdent(prop.name),
|
139
|
-
),
|
140
|
-
lambda item: expr.ExprMethodCall(
|
141
|
-
expr.ExprIdent(
|
142
|
-
assert_isinstance(
|
143
|
-
prop, ObjectProperty
|
144
|
-
).target.name
|
145
|
-
),
|
146
|
-
"deser",
|
147
|
-
[item],
|
148
|
-
),
|
149
|
-
),
|
211
|
+
get_normal_deser_args(prop),
|
150
212
|
)
|
151
213
|
)
|
152
214
|
else:
|
@@ -156,18 +218,7 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
156
218
|
deser_args.append(
|
157
219
|
(
|
158
220
|
expr.ExprIdent(propname),
|
159
|
-
|
160
|
-
PredefinedFn.attr_getter(
|
161
|
-
expr.ExprIdent(prop.target.name),
|
162
|
-
expr.ExprIdent("deser"),
|
163
|
-
),
|
164
|
-
[
|
165
|
-
PredefinedFn.attr_getter(
|
166
|
-
expr.ExprIdent("data"),
|
167
|
-
expr.ExprIdent(prop.name),
|
168
|
-
)
|
169
|
-
],
|
170
|
-
),
|
221
|
+
get_normal_deser_args(prop),
|
171
222
|
)
|
172
223
|
)
|
173
224
|
|
@@ -215,6 +266,17 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
215
266
|
],
|
216
267
|
)(*prop_constructor_assigns),
|
217
268
|
stmt.LineBreak(),
|
269
|
+
lambda ast12: ast12.func(
|
270
|
+
"className",
|
271
|
+
[],
|
272
|
+
expr.ExprIdent("string"),
|
273
|
+
is_static=True,
|
274
|
+
modifiers=["get"],
|
275
|
+
comment="Name of the class in the Schema",
|
276
|
+
)(
|
277
|
+
stmt.ReturnStatement(expr.ExprConstant(cls.name)),
|
278
|
+
),
|
279
|
+
stmt.LineBreak(),
|
218
280
|
lambda ast12: ast12.func(
|
219
281
|
"deser",
|
220
282
|
[
|
@@ -348,10 +410,10 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
348
410
|
if idprop is not None and prop.name == idprop.name:
|
349
411
|
# use id type alias
|
350
412
|
tstype = TsTypeWithDep(
|
351
|
-
f"{cls.name}Id",
|
413
|
+
type=f"{cls.name}Id",
|
414
|
+
spectype=tstype.spectype,
|
352
415
|
deps=[f"@.models.{pkg.dir.name}.{cls.name}.{cls.name}Id"],
|
353
416
|
)
|
354
|
-
original_tstype = tstype
|
355
417
|
elif tstype.type not in schema.enums:
|
356
418
|
# for none id & none enum properties, we need to include a type for "invalid" value
|
357
419
|
tstype = _inject_type_for_invalid_value(tstype)
|
@@ -378,8 +440,13 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
378
440
|
if tstype.type in idprop_aliases:
|
379
441
|
create_propvalue = idprop_aliases[tstype.type].get_default()
|
380
442
|
elif tstype.type in schema.enums:
|
381
|
-
|
382
|
-
|
443
|
+
enum_value = next(
|
444
|
+
iter(schema.enums[tstype.type].values.values())
|
445
|
+
).value
|
446
|
+
# TODO: handle enum value integer
|
447
|
+
assert isinstance(enum_value, str)
|
448
|
+
create_propvalue = expr.ExprIdent(
|
449
|
+
tstype.type + "." + enum_value
|
383
450
|
)
|
384
451
|
else:
|
385
452
|
create_propvalue = tstype.get_default()
|
@@ -416,7 +483,10 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
416
483
|
expr.ExprIdent("record"), expr.ExprIdent(propname)
|
417
484
|
)
|
418
485
|
|
419
|
-
if
|
486
|
+
if (
|
487
|
+
original_tstype.type != tstype.type
|
488
|
+
and tstype.type != f"{cls.name}Id"
|
489
|
+
):
|
420
490
|
norm_func = get_norm_func(original_tstype, import_helper)
|
421
491
|
else:
|
422
492
|
norm_func = identity
|
@@ -441,17 +511,22 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
441
511
|
),
|
442
512
|
expr.ExprIdent("isValid"),
|
443
513
|
),
|
444
|
-
|
445
|
-
|
446
|
-
|
514
|
+
original_tstype.get_json_ser_func(
|
515
|
+
norm_func(
|
516
|
+
PredefinedFn.attr_getter(
|
517
|
+
expr.ExprIdent("this"),
|
518
|
+
expr.ExprIdent(propname),
|
519
|
+
)
|
447
520
|
)
|
448
521
|
),
|
449
522
|
expr.ExprIdent("undefined"),
|
450
523
|
)
|
451
524
|
if prop.is_optional
|
452
|
-
else
|
453
|
-
|
454
|
-
|
525
|
+
else original_tstype.get_json_ser_func(
|
526
|
+
norm_func(
|
527
|
+
PredefinedFn.attr_getter(
|
528
|
+
expr.ExprIdent("this"), expr.ExprIdent(propname)
|
529
|
+
)
|
455
530
|
)
|
456
531
|
)
|
457
532
|
),
|
@@ -516,8 +591,12 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
516
591
|
if prop.target.db is not None:
|
517
592
|
# this class is stored in the database, we store the id instead
|
518
593
|
tstype = TsTypeWithDep(
|
519
|
-
f"{prop.target.name}Id",
|
520
|
-
|
594
|
+
type=f"{prop.target.name}Id",
|
595
|
+
spectype=assert_not_null(prop.target.get_id_property())
|
596
|
+
.get_data_model_datatype()
|
597
|
+
.get_typescript_type()
|
598
|
+
.spectype,
|
599
|
+
deps=[
|
521
600
|
f"@.models.{prop.target.get_tsmodule_name()}.{prop.target.name}.{prop.target.name}Id"
|
522
601
|
],
|
523
602
|
)
|
@@ -568,8 +647,9 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
568
647
|
else:
|
569
648
|
# we are going to store the whole object
|
570
649
|
tstype = TsTypeWithDep(
|
571
|
-
f"Draft{prop.target.name}",
|
572
|
-
|
650
|
+
type=f"Draft{prop.target.name}",
|
651
|
+
spectype=f"Draft{prop.target.name}",
|
652
|
+
deps=[
|
573
653
|
f"@.models.{prop.target.get_tsmodule_name()}.Draft{prop.target.name}.Draft{prop.target.name}"
|
574
654
|
],
|
575
655
|
)
|
@@ -1131,7 +1211,13 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1131
1211
|
# if prop.data.is_private:
|
1132
1212
|
# # skip private fields as this is for APIs exchange
|
1133
1213
|
# continue
|
1134
|
-
|
1214
|
+
tspropname = to_camel_case(prop.name)
|
1215
|
+
pypropname = prop.name
|
1216
|
+
if isinstance(prop, ObjectProperty) and prop.target.db is not None:
|
1217
|
+
# this is a database object, we append id to the property name
|
1218
|
+
tspropname = tspropname + "Id"
|
1219
|
+
pypropname = prop.name + "_id"
|
1220
|
+
|
1135
1221
|
tsprop = {}
|
1136
1222
|
|
1137
1223
|
if isinstance(prop, DataProperty):
|
@@ -1146,7 +1232,7 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1146
1232
|
(
|
1147
1233
|
expr.ExprIdent("datatype"),
|
1148
1234
|
(
|
1149
|
-
expr.ExprConstant(tstype.
|
1235
|
+
expr.ExprConstant(tstype.spectype)
|
1150
1236
|
if tstype.type not in schema.enums
|
1151
1237
|
else expr.ExprConstant("enum")
|
1152
1238
|
),
|
@@ -1169,7 +1255,7 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1169
1255
|
norm_func = get_normalizer(tstype, import_helper)
|
1170
1256
|
if norm_func is not None:
|
1171
1257
|
# we have a normalizer for this type
|
1172
|
-
prop_normalizers.append((expr.ExprIdent(
|
1258
|
+
prop_normalizers.append((expr.ExprIdent(tspropname), norm_func))
|
1173
1259
|
else:
|
1174
1260
|
assert isinstance(prop, ObjectProperty)
|
1175
1261
|
if prop.target.db is not None:
|
@@ -1182,8 +1268,9 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1182
1268
|
else:
|
1183
1269
|
# we are going to store the whole object
|
1184
1270
|
tstype = TsTypeWithDep(
|
1185
|
-
prop.target.name,
|
1186
|
-
|
1271
|
+
type=prop.target.name,
|
1272
|
+
spectype=prop.target.name,
|
1273
|
+
deps=[
|
1187
1274
|
f"@.models.{prop.target.get_tsmodule_name()}.{prop.target.name}.{prop.target.name}"
|
1188
1275
|
],
|
1189
1276
|
)
|
@@ -1204,7 +1291,9 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1204
1291
|
(
|
1205
1292
|
expr.ExprIdent("datatype"),
|
1206
1293
|
expr.ExprConstant(
|
1207
|
-
tstype.
|
1294
|
+
tstype.spectype
|
1295
|
+
if prop.target.db is not None
|
1296
|
+
else "undefined"
|
1208
1297
|
),
|
1209
1298
|
),
|
1210
1299
|
(
|
@@ -1224,11 +1313,11 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1224
1313
|
prop_defs.append(
|
1225
1314
|
(
|
1226
1315
|
prop,
|
1227
|
-
expr.ExprIdent(
|
1316
|
+
expr.ExprIdent(tspropname),
|
1228
1317
|
PredefinedFn.dict(
|
1229
1318
|
[
|
1230
|
-
(expr.ExprIdent("name"), expr.ExprConstant(
|
1231
|
-
(expr.ExprIdent("tsName"), expr.ExprConstant(
|
1319
|
+
(expr.ExprIdent("name"), expr.ExprConstant(pypropname)),
|
1320
|
+
(expr.ExprIdent("tsName"), expr.ExprConstant(tspropname)),
|
1232
1321
|
(
|
1233
1322
|
expr.ExprIdent("updateFuncName"),
|
1234
1323
|
expr.ExprConstant(f"update{to_pascal_case(prop.name)}"),
|
@@ -1298,7 +1387,10 @@ def make_typescript_data_model(schema: Schema, target_pkg: Package):
|
|
1298
1387
|
" | ".join(
|
1299
1388
|
[
|
1300
1389
|
expr.ExprConstant(
|
1301
|
-
to_camel_case(prop.name)
|
1390
|
+
to_camel_case(prop.name) + "Id"
|
1391
|
+
if isinstance(prop, ObjectProperty)
|
1392
|
+
and prop.target.db is not None
|
1393
|
+
else to_camel_case(prop.name)
|
1302
1394
|
).to_typescript()
|
1303
1395
|
for prop in cls.properties.values()
|
1304
1396
|
if not prop.data.is_private
|
@@ -1557,6 +1649,10 @@ def get_normalizer(
|
|
1557
1649
|
return import_helper.use("normalizers.normalizeNumber")
|
1558
1650
|
if tstype.type == "number | undefined":
|
1559
1651
|
return import_helper.use("normalizers.normalizeOptionalNumber")
|
1652
|
+
if tstype.type == "Date":
|
1653
|
+
return import_helper.use("normalizers.normalizeDate")
|
1654
|
+
if tstype.type == "Date | undefined":
|
1655
|
+
return import_helper.use("normalizers.normalizeOptionalDate")
|
1560
1656
|
|
1561
1657
|
assert "number" not in tstype.type, tstype.type
|
1562
1658
|
return None
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from sera.misc._formatter import File, Formatter
|
2
2
|
from sera.misc._utils import (
|
3
3
|
LoadTableDataArgs,
|
4
|
+
RelTableIndex,
|
4
5
|
assert_isinstance,
|
5
6
|
assert_not_null,
|
6
7
|
filter_duplication,
|
@@ -26,4 +27,5 @@ __all__ = [
|
|
26
27
|
"identity",
|
27
28
|
"get_classpath",
|
28
29
|
"LoadTableDataArgs",
|
30
|
+
"RelTableIndex",
|
29
31
|
]
|
@@ -1,6 +1,8 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
+
import inspect
|
3
4
|
import re
|
5
|
+
from collections import defaultdict
|
4
6
|
from dataclasses import dataclass
|
5
7
|
from pathlib import Path
|
6
8
|
from typing import Any, Callable, Iterable, Optional, Sequence, Type, TypedDict, TypeVar
|
@@ -120,8 +122,38 @@ class LoadTableDataArgs(TypedDict, total=False):
|
|
120
122
|
tables: Sequence[type]
|
121
123
|
file: Path
|
122
124
|
files: Sequence[Path]
|
123
|
-
file_deser: Callable[[Path], list[
|
124
|
-
record_deser:
|
125
|
+
file_deser: Callable[[Path], list[Any]]
|
126
|
+
record_deser: (
|
127
|
+
Callable[[dict], Any | list[Any]]
|
128
|
+
| Callable[[dict, RelTableIndex], Any | list[Any]]
|
129
|
+
)
|
130
|
+
table_unique_index: dict[type, list[str]]
|
131
|
+
|
132
|
+
|
133
|
+
class RelTableIndex:
|
134
|
+
"""An index of relational tables to find a record by its unique property."""
|
135
|
+
|
136
|
+
def __init__(self, cls2index: Optional[dict[str, list[str]]] = None):
|
137
|
+
self.table2rows: dict[str, dict[str, Any]] = defaultdict(dict)
|
138
|
+
self.table2uniqindex2id: dict[str, dict[str, int]] = defaultdict(dict)
|
139
|
+
self.cls2index = cls2index or {}
|
140
|
+
|
141
|
+
def set_index(self, clsname: str, props: list[str]):
|
142
|
+
"""Set the unique index for a class."""
|
143
|
+
self.cls2index[clsname] = props
|
144
|
+
|
145
|
+
def add(self, record: Any):
|
146
|
+
clsname = record.__class__.__name__
|
147
|
+
self.table2rows[clsname][record.id] = record
|
148
|
+
if clsname in self.cls2index:
|
149
|
+
for prop in self.cls2index[clsname]:
|
150
|
+
self.table2uniqindex2id[clsname][getattr(record, prop)] = record.id
|
151
|
+
|
152
|
+
def get_record(self, clsname: str, uniq_prop: str) -> Optional[Any]:
|
153
|
+
tbl = self.table2uniqindex2id[clsname]
|
154
|
+
if uniq_prop not in tbl:
|
155
|
+
return None
|
156
|
+
return self.table2rows[clsname][tbl[uniq_prop]]
|
125
157
|
|
126
158
|
|
127
159
|
def load_data(
|
@@ -143,6 +175,8 @@ def load_data(
|
|
143
175
|
with Session(engine) as session:
|
144
176
|
create_db_and_tables()
|
145
177
|
|
178
|
+
reltable_index = RelTableIndex()
|
179
|
+
|
146
180
|
for args in tqdm(table_data, disable=not verbose, desc="Loading data"):
|
147
181
|
if "table" in args:
|
148
182
|
tbls = [args["table"]]
|
@@ -162,6 +196,12 @@ def load_data(
|
|
162
196
|
else:
|
163
197
|
raise ValueError("Either 'file' or 'files' must be provided in args.")
|
164
198
|
|
199
|
+
if "table_unique_index" in args:
|
200
|
+
for tbl in tbls:
|
201
|
+
reltable_index.set_index(
|
202
|
+
tbl.__name__, args["table_unique_index"].get(tbl, [])
|
203
|
+
)
|
204
|
+
|
165
205
|
raw_records = []
|
166
206
|
if "file_deser" not in args:
|
167
207
|
for file in files:
|
@@ -175,8 +215,17 @@ def load_data(
|
|
175
215
|
for file in files:
|
176
216
|
raw_records.extend(args["file_deser"](file))
|
177
217
|
|
218
|
+
assert "record_deser" in args
|
178
219
|
deser = args["record_deser"]
|
179
|
-
|
220
|
+
|
221
|
+
sig = inspect.signature(deser)
|
222
|
+
param_count = len(sig.parameters)
|
223
|
+
if param_count == 1:
|
224
|
+
records = [deser(row) for row in raw_records]
|
225
|
+
else:
|
226
|
+
assert param_count == 2
|
227
|
+
records = [deser(row, reltable_index) for row in raw_records]
|
228
|
+
|
180
229
|
for r in tqdm(
|
181
230
|
records,
|
182
231
|
desc=f"load {', '.join(tbl.__name__ for tbl in tbls)}",
|
@@ -185,8 +234,11 @@ def load_data(
|
|
185
234
|
if isinstance(r, Sequence):
|
186
235
|
for x in r:
|
187
236
|
session.merge(x)
|
237
|
+
reltable_index.add(x)
|
188
238
|
else:
|
189
239
|
session.merge(r)
|
240
|
+
reltable_index.add(r)
|
241
|
+
|
190
242
|
session.flush()
|
191
243
|
|
192
244
|
# Reset the sequence for each table
|
@@ -2,6 +2,8 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
from dataclasses import dataclass
|
4
4
|
|
5
|
+
from codegen.models import ImportHelper
|
6
|
+
|
5
7
|
from sera.models._class import Class
|
6
8
|
from sera.models._property import DataProperty, ObjectProperty
|
7
9
|
|
@@ -21,9 +23,9 @@ class DataCollection:
|
|
21
23
|
"""Get the python module name of this collection as if there is a python module created to store this collection only."""
|
22
24
|
return self.cls.get_pymodule_name()
|
23
25
|
|
24
|
-
def get_queryable_fields(self) ->
|
26
|
+
def get_queryable_fields(self) -> list[tuple[str, tuple[str, str]]]:
|
25
27
|
"""Get the fields of this collection that can be used in a queries."""
|
26
|
-
|
28
|
+
output = []
|
27
29
|
for prop in self.cls.properties.values():
|
28
30
|
if prop.db is None or prop.data.is_private:
|
29
31
|
# This property is not stored in the database or it's private, so we skip it
|
@@ -45,8 +47,19 @@ class DataCollection:
|
|
45
47
|
else:
|
46
48
|
# This property is a data property or an object property not stored in the database, so we use its name
|
47
49
|
propname = prop.name
|
48
|
-
|
49
|
-
|
50
|
+
|
51
|
+
if isinstance(prop, DataProperty):
|
52
|
+
convert_func = prop.datatype.pytype.get_string_conversion_func()
|
53
|
+
else:
|
54
|
+
assert isinstance(prop, ObjectProperty) and prop.target.db is not None
|
55
|
+
target_idprop = prop.target.get_id_property()
|
56
|
+
assert target_idprop is not None
|
57
|
+
convert_func = (
|
58
|
+
target_idprop.datatype.pytype.get_string_conversion_func()
|
59
|
+
)
|
60
|
+
|
61
|
+
output.append((propname, convert_func))
|
62
|
+
return output
|
50
63
|
|
51
64
|
def get_service_name(self):
|
52
65
|
return f"{self.name}Service"
|
@@ -2,10 +2,12 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import datetime
|
4
4
|
from dataclasses import dataclass, field
|
5
|
-
from typing import Literal
|
5
|
+
from typing import Callable, Literal
|
6
6
|
|
7
7
|
from codegen.models import expr
|
8
8
|
|
9
|
+
from sera.misc import identity
|
10
|
+
|
9
11
|
PyDataType = Literal["str", "int", "datetime", "float", "bool", "bytes", "dict"]
|
10
12
|
TypescriptDataType = Literal["string", "number", "boolean"]
|
11
13
|
SQLAlchemyDataType = Literal[
|
@@ -67,10 +69,25 @@ class PyTypeWithDep:
|
|
67
69
|
"""Clone the type with the same dependencies."""
|
68
70
|
return PyTypeWithDep(type=self.type, deps=list(self.deps))
|
69
71
|
|
72
|
+
def get_string_conversion_func(self) -> tuple[str, str]:
|
73
|
+
if self.type == "str":
|
74
|
+
return ("identity", "sera.misc.identity")
|
75
|
+
if self.type == "int":
|
76
|
+
return ("TypeConversion.to_int", "sera.libs.api_helper.TypeConversion")
|
77
|
+
if self.type == "float":
|
78
|
+
return ("TypeConversion.to_float", "sera.libs.api_helper.TypeConversion")
|
79
|
+
if self.type == "bool":
|
80
|
+
return ("TypeConversion.to_bool", "sera.libs.api_helper.TypeConversion")
|
81
|
+
else:
|
82
|
+
raise NotImplementedError()
|
83
|
+
|
70
84
|
|
71
85
|
@dataclass
|
72
86
|
class TsTypeWithDep:
|
73
87
|
type: str
|
88
|
+
# the specific type of the value, to provide more details for the type because typescript use
|
89
|
+
# number for both int and float, date for both date and datetime.
|
90
|
+
spectype: str
|
74
91
|
deps: list[str] = field(default_factory=list)
|
75
92
|
|
76
93
|
def get_default(self) -> expr.ExprConstant:
|
@@ -82,6 +99,8 @@ class TsTypeWithDep:
|
|
82
99
|
return expr.ExprConstant(0)
|
83
100
|
if self.type == "boolean":
|
84
101
|
return expr.ExprConstant(False)
|
102
|
+
if self.type == "Date":
|
103
|
+
return expr.ExprConstant("new Date()")
|
85
104
|
if self.type.endswith("| undefined"):
|
86
105
|
return expr.ExprConstant("undefined")
|
87
106
|
if self.type.endswith("| string)") or self.type.endswith("| string"):
|
@@ -96,16 +115,54 @@ class TsTypeWithDep:
|
|
96
115
|
if not all(c.isalnum() or c == "_" for c in self.type.strip()):
|
97
116
|
# Type contains special chars like | or spaces, wrap in parentheses
|
98
117
|
list_type = f"({self.type})[]"
|
118
|
+
list_spectype = f"({self.spectype})[]"
|
99
119
|
else:
|
100
120
|
list_type = f"{self.type}[]"
|
101
|
-
|
121
|
+
list_spectype = f"{self.spectype}[]"
|
122
|
+
return TsTypeWithDep(type=list_type, spectype=list_spectype, deps=self.deps)
|
102
123
|
|
103
124
|
def as_optional_type(self) -> TsTypeWithDep:
|
104
125
|
if "undefined" in self.type:
|
105
126
|
raise NotImplementedError(
|
106
127
|
f"Have not handle nested optional yet: {self.type}"
|
107
128
|
)
|
108
|
-
return TsTypeWithDep(
|
129
|
+
return TsTypeWithDep(
|
130
|
+
type=f"{self.type} | undefined",
|
131
|
+
# not changing the spectype because we convert to optional when the value is missing
|
132
|
+
# spectype is used to tell the main type of the value when it is present.
|
133
|
+
spectype=self.spectype,
|
134
|
+
deps=self.deps,
|
135
|
+
)
|
136
|
+
|
137
|
+
def get_json_deser_func(self, value: expr.Expr) -> expr.Expr:
|
138
|
+
"""Get the typescript expression to convert the value from json format to the correct type."""
|
139
|
+
if self.type in {"string", "number", "boolean", "string[]"}:
|
140
|
+
return value
|
141
|
+
if self.type == "Date":
|
142
|
+
return expr.ExprRawTypescript(f"new Date({value.to_typescript()})")
|
143
|
+
if any(x.startswith("@.models.enum") for x in self.deps):
|
144
|
+
# enum type, we don't need to do anything as we use strings for enums
|
145
|
+
return value
|
146
|
+
raise ValueError(f"Unknown type: {self.type}")
|
147
|
+
|
148
|
+
def get_json_ser_func(self, value: expr.Expr) -> expr.Expr:
|
149
|
+
"""Get the typescript expression to convert the value to json format."""
|
150
|
+
if self.type in {
|
151
|
+
"string",
|
152
|
+
"number",
|
153
|
+
"boolean",
|
154
|
+
"string[]",
|
155
|
+
"number | undefined",
|
156
|
+
"boolean | undefined",
|
157
|
+
"string | undefined",
|
158
|
+
}:
|
159
|
+
return value
|
160
|
+
if self.type == "Date":
|
161
|
+
return expr.ExprRawTypescript(f"{value.to_typescript()}.toISOString()")
|
162
|
+
if any(x.startswith("@.models.enum") for x in self.deps):
|
163
|
+
# enum type, we don't need to do anything as we use strings for enums
|
164
|
+
return value
|
165
|
+
raise ValueError(f"Unknown type: {self.type}")
|
109
166
|
|
110
167
|
|
111
168
|
@dataclass
|
@@ -122,6 +179,23 @@ class SQLTypeWithDep:
|
|
122
179
|
mapped_pytype=f"list[{self.mapped_pytype}]",
|
123
180
|
)
|
124
181
|
|
182
|
+
def as_optional_type(self) -> SQLTypeWithDep:
|
183
|
+
"""Convert the type to an optional type."""
|
184
|
+
if "typing.Optional" not in self.deps:
|
185
|
+
deps = self.deps + ["typing.Optional"]
|
186
|
+
else:
|
187
|
+
deps = self.deps
|
188
|
+
|
189
|
+
if "Optional[" in self.mapped_pytype:
|
190
|
+
raise NotImplementedError(
|
191
|
+
f"Have not handle nested optional yet: {self.mapped_pytype}"
|
192
|
+
)
|
193
|
+
return SQLTypeWithDep(
|
194
|
+
type=self.type,
|
195
|
+
mapped_pytype=f"Optional[{self.mapped_pytype}]",
|
196
|
+
deps=deps,
|
197
|
+
)
|
198
|
+
|
125
199
|
|
126
200
|
@dataclass
|
127
201
|
class DataType:
|
@@ -156,7 +230,7 @@ predefined_datatypes = {
|
|
156
230
|
sqltype=SQLTypeWithDep(
|
157
231
|
type="String", mapped_pytype="str", deps=["sqlalchemy.String"]
|
158
232
|
),
|
159
|
-
tstype=TsTypeWithDep(type="string"),
|
233
|
+
tstype=TsTypeWithDep(type="string", spectype="string"),
|
160
234
|
is_list=False,
|
161
235
|
),
|
162
236
|
"integer": DataType(
|
@@ -164,7 +238,7 @@ predefined_datatypes = {
|
|
164
238
|
sqltype=SQLTypeWithDep(
|
165
239
|
type="Integer", mapped_pytype="int", deps=["sqlalchemy.Integer"]
|
166
240
|
),
|
167
|
-
tstype=TsTypeWithDep(type="number"),
|
241
|
+
tstype=TsTypeWithDep(type="number", spectype="integer"),
|
168
242
|
is_list=False,
|
169
243
|
),
|
170
244
|
"date": DataType(
|
@@ -174,7 +248,7 @@ predefined_datatypes = {
|
|
174
248
|
mapped_pytype="date",
|
175
249
|
deps=["sqlalchemy.Date", "datetime.date"],
|
176
250
|
),
|
177
|
-
tstype=TsTypeWithDep(type="
|
251
|
+
tstype=TsTypeWithDep(type="Date", spectype="date"),
|
178
252
|
is_list=False,
|
179
253
|
),
|
180
254
|
"datetime": DataType(
|
@@ -184,7 +258,7 @@ predefined_datatypes = {
|
|
184
258
|
mapped_pytype="datetime",
|
185
259
|
deps=["sqlalchemy.DateTime", "datetime.datetime"],
|
186
260
|
),
|
187
|
-
tstype=TsTypeWithDep(type="
|
261
|
+
tstype=TsTypeWithDep(type="Date", spectype="datetime"),
|
188
262
|
is_list=False,
|
189
263
|
),
|
190
264
|
"float": DataType(
|
@@ -192,7 +266,7 @@ predefined_datatypes = {
|
|
192
266
|
sqltype=SQLTypeWithDep(
|
193
267
|
type="Float", mapped_pytype="float", deps=["sqlalchemy.Float"]
|
194
268
|
),
|
195
|
-
tstype=TsTypeWithDep(type="number"),
|
269
|
+
tstype=TsTypeWithDep(type="number", spectype="float"),
|
196
270
|
is_list=False,
|
197
271
|
),
|
198
272
|
"boolean": DataType(
|
@@ -200,7 +274,7 @@ predefined_datatypes = {
|
|
200
274
|
sqltype=SQLTypeWithDep(
|
201
275
|
type="Boolean", mapped_pytype="bool", deps=["sqlalchemy.Boolean"]
|
202
276
|
),
|
203
|
-
tstype=TsTypeWithDep(type="boolean"),
|
277
|
+
tstype=TsTypeWithDep(type="boolean", spectype="boolean"),
|
204
278
|
is_list=False,
|
205
279
|
),
|
206
280
|
"bytes": DataType(
|
@@ -208,7 +282,7 @@ predefined_datatypes = {
|
|
208
282
|
sqltype=SQLTypeWithDep(
|
209
283
|
type="LargeBinary", mapped_pytype="bytes", deps=["sqlalchemy.LargeBinary"]
|
210
284
|
),
|
211
|
-
tstype=TsTypeWithDep(type="string"),
|
285
|
+
tstype=TsTypeWithDep(type="string", spectype="bytes"),
|
212
286
|
is_list=False,
|
213
287
|
),
|
214
288
|
"dict": DataType(
|
@@ -216,7 +290,7 @@ predefined_datatypes = {
|
|
216
290
|
sqltype=SQLTypeWithDep(
|
217
291
|
type="JSON", mapped_pytype="dict", deps=["sqlalchemy.JSON"]
|
218
292
|
),
|
219
|
-
tstype=TsTypeWithDep(type="string"),
|
293
|
+
tstype=TsTypeWithDep(type="string", spectype="dict"),
|
220
294
|
is_list=False,
|
221
295
|
),
|
222
296
|
}
|
@@ -228,5 +302,5 @@ predefined_sql_datatypes = {
|
|
228
302
|
),
|
229
303
|
}
|
230
304
|
predefined_ts_datatypes = {
|
231
|
-
"string": TsTypeWithDep(type="string"),
|
305
|
+
"string": TsTypeWithDep(type="string", spectype="string"),
|
232
306
|
}
|
@@ -238,10 +238,17 @@ def _parse_datatype(schema: Schema, datatype: dict | str) -> DataType:
|
|
238
238
|
],
|
239
239
|
),
|
240
240
|
sqltype=SQLTypeWithDep(
|
241
|
-
type="
|
241
|
+
type=f"Enum({enum.name})",
|
242
|
+
mapped_pytype=enum.name,
|
243
|
+
deps=[
|
244
|
+
"sqlalchemy.Enum",
|
245
|
+
f"{schema.name}.models.enums.{enum.get_pymodule_name()}.{enum.name}",
|
246
|
+
],
|
242
247
|
),
|
243
248
|
tstype=TsTypeWithDep(
|
244
|
-
type=enum.name,
|
249
|
+
type=enum.name,
|
250
|
+
spectype=enum.name,
|
251
|
+
deps=[f"@.models.enums.{enum.name}"],
|
245
252
|
),
|
246
253
|
is_list=is_list,
|
247
254
|
)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|