sera-2 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sera/__init__.py +0 -0
- sera/libs/__init__.py +0 -0
- sera/libs/api_helper.py +66 -0
- sera/libs/base_orm.py +109 -0
- sera/libs/base_service.py +78 -0
- sera/make/__init__.py +0 -0
- sera/make/__main__.py +38 -0
- sera/make/make_app.py +142 -0
- sera/make/make_python_api.py +242 -0
- sera/make/make_python_model.py +282 -0
- sera/make/make_python_services.py +64 -0
- sera/make/make_typescript_model.py +1 -0
- sera/misc/__init__.py +16 -0
- sera/misc/_rdf.py +60 -0
- sera/misc/_utils.py +46 -0
- sera/models/__init__.py +24 -0
- sera/models/_class.py +56 -0
- sera/models/_collection.py +34 -0
- sera/models/_datatype.py +54 -0
- sera/models/_module.py +140 -0
- sera/models/_multi_lingual_string.py +38 -0
- sera/models/_parse.py +153 -0
- sera/models/_property.py +124 -0
- sera/models/_schema.py +32 -0
- sera/namespace.py +5 -0
- sera/typing.py +11 -0
- sera_2-1.1.0.dist-info/METADATA +20 -0
- sera_2-1.1.0.dist-info/RECORD +29 -0
- sera_2-1.1.0.dist-info/WHEEL +4 -0
@@ -0,0 +1,282 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Sequence
|
4
|
+
|
5
|
+
from codegen.models import DeferredVar, PredefinedFn, Program, expr, stmt
|
6
|
+
from sera.misc import assert_isinstance, filter_duplication
|
7
|
+
from sera.models import DataProperty, ObjectProperty, Package, Schema
|
8
|
+
|
9
|
+
|
10
|
+
def make_python_data_model(schema: Schema, target_pkg: Package):
|
11
|
+
"""Generate public classes for the API from the schema."""
|
12
|
+
for cls in schema.topological_sort():
|
13
|
+
program = Program()
|
14
|
+
program.import_("__future__.annotations", True)
|
15
|
+
program.import_("msgspec", False)
|
16
|
+
# program.import_("dataclasses.dataclass", True)
|
17
|
+
|
18
|
+
# program.root(stmt.PythonStatement("@dataclass"))
|
19
|
+
cls_ast = program.root.class_(cls.name, [expr.ExprIdent("msgspec.Struct")])
|
20
|
+
for prop in cls.properties.values():
|
21
|
+
if prop.is_private:
|
22
|
+
# skip private fields as this is for APIs exchange
|
23
|
+
continue
|
24
|
+
|
25
|
+
if isinstance(prop, DataProperty):
|
26
|
+
pytype = prop.datatype.get_python_type()
|
27
|
+
if pytype.dep is not None:
|
28
|
+
program.import_(pytype.dep, True)
|
29
|
+
cls_ast(stmt.DefClassVarStatement(prop.name, pytype.type))
|
30
|
+
elif isinstance(prop, ObjectProperty):
|
31
|
+
program.import_(
|
32
|
+
f"{target_pkg.module(prop.target.get_pymodule_name()).path}.{prop.target.name}",
|
33
|
+
is_import_attr=True,
|
34
|
+
)
|
35
|
+
pytype = prop.target.name
|
36
|
+
cls_ast(stmt.DefClassVarStatement(prop.name, pytype))
|
37
|
+
|
38
|
+
target_pkg.module(cls.get_pymodule_name()).write(program)
|
39
|
+
|
40
|
+
|
41
|
+
def make_python_relational_model(
|
42
|
+
schema: Schema, target_pkg: Package, target_data_pkg: Package
|
43
|
+
):
|
44
|
+
"""Make python classes for relational database using SQLAlchemy.
|
45
|
+
|
46
|
+
The new classes is going be compatible with SQLAlchemy 2.
|
47
|
+
"""
|
48
|
+
app = target_pkg.app
|
49
|
+
|
50
|
+
def make_base(custom_types: Sequence[ObjectProperty]):
|
51
|
+
"""Make a base class for our database."""
|
52
|
+
program = Program()
|
53
|
+
program.import_("__future__.annotations", True)
|
54
|
+
program.import_("sera.libs.base_orm.BaseORM", True)
|
55
|
+
program.import_("sera.libs.base_orm.create_engine", True)
|
56
|
+
program.import_("contextlib.contextmanager", True)
|
57
|
+
program.import_("sqlalchemy.orm.Session", True)
|
58
|
+
|
59
|
+
# assume configuration for the app at the top level
|
60
|
+
program.import_(f"{app.config.path}.DB_CONNECTION", True)
|
61
|
+
program.import_(f"{app.config.path}.DB_DEBUG", True)
|
62
|
+
|
63
|
+
program.root.linebreak()
|
64
|
+
|
65
|
+
type_map = []
|
66
|
+
for custom_type in custom_types:
|
67
|
+
program.import_(
|
68
|
+
f"{target_data_pkg.module(custom_type.target.get_pymodule_name()).path}.{custom_type.target.name}",
|
69
|
+
is_import_attr=True,
|
70
|
+
)
|
71
|
+
|
72
|
+
if custom_type.cardinality.is_star_to_many():
|
73
|
+
if custom_type.is_map:
|
74
|
+
program.import_("typing.Mapping", True)
|
75
|
+
program.import_("sera.libs.baseorm.DictDataClassType", True)
|
76
|
+
type = f"Mapping[str, {custom_type.target.name}]"
|
77
|
+
maptype = f"DictDataClassType({custom_type.target.name})"
|
78
|
+
else:
|
79
|
+
program.import_("typing.Sequence", True)
|
80
|
+
program.import_("sera.libs.baseorm.ListDataClassType", True)
|
81
|
+
type = f"Sequence[str, {custom_type.target.name}]"
|
82
|
+
maptype = f"ListDataClassType({custom_type.target.name})"
|
83
|
+
else:
|
84
|
+
program.import_("sera.libs.baseorm.DataClassType", True)
|
85
|
+
type = custom_type.target.name
|
86
|
+
maptype = f"DataClassType({custom_type.target.name})"
|
87
|
+
|
88
|
+
if custom_type.is_optional:
|
89
|
+
program.import_("typing.Optional", True)
|
90
|
+
type = f"Optional[{type}]"
|
91
|
+
|
92
|
+
type_map.append((expr.ExprIdent(type), expr.ExprIdent(maptype)))
|
93
|
+
|
94
|
+
cls_ast = program.root.class_("Base", [expr.ExprIdent("BaseORM")])(
|
95
|
+
stmt.DefClassVarStatement(
|
96
|
+
"type_annotation_map", "dict", PredefinedFn.dict(type_map)
|
97
|
+
),
|
98
|
+
return_self=True,
|
99
|
+
)
|
100
|
+
|
101
|
+
program.root.linebreak()
|
102
|
+
program.root.assign(
|
103
|
+
DeferredVar("engine", force_name="engine"),
|
104
|
+
expr.ExprFuncCall(
|
105
|
+
expr.ExprIdent("create_engine"),
|
106
|
+
[
|
107
|
+
expr.ExprIdent("DB_CONNECTION"),
|
108
|
+
PredefinedFn.keyword_assignment(
|
109
|
+
"debug", expr.ExprIdent("DB_DEBUG")
|
110
|
+
),
|
111
|
+
],
|
112
|
+
),
|
113
|
+
)
|
114
|
+
|
115
|
+
program.root.linebreak()
|
116
|
+
program.root.func("create_db_and_tables", [])(
|
117
|
+
stmt.PythonStatement("Base.metadata.create_all(engine)"),
|
118
|
+
)
|
119
|
+
|
120
|
+
program.root.linebreak()
|
121
|
+
program.root.python_stmt("@contextmanager")
|
122
|
+
program.root.func("get_session", [])(
|
123
|
+
lambda ast00: ast00.python_stmt("with Session(engine) as session:")(
|
124
|
+
lambda ast01: ast01.python_stmt("yield session")
|
125
|
+
)
|
126
|
+
)
|
127
|
+
|
128
|
+
target_pkg.module("base").write(program)
|
129
|
+
|
130
|
+
custom_types: list[ObjectProperty] = []
|
131
|
+
|
132
|
+
for cls in schema.topological_sort():
|
133
|
+
if cls.db is None:
|
134
|
+
# skip classes that are not stored in the database
|
135
|
+
continue
|
136
|
+
|
137
|
+
program = Program()
|
138
|
+
program.import_("__future__.annotations", True)
|
139
|
+
program.import_("sqlalchemy.orm.MappedAsDataclass", True)
|
140
|
+
program.import_("sqlalchemy.orm.mapped_column", True)
|
141
|
+
program.import_("sqlalchemy.orm.Mapped", True)
|
142
|
+
program.import_("typing.ClassVar", True)
|
143
|
+
program.import_(f"{target_pkg.path}.base.Base", True)
|
144
|
+
|
145
|
+
cls_ast = program.root.class_(
|
146
|
+
cls.name, [expr.ExprIdent("MappedAsDataclass"), expr.ExprIdent("Base")]
|
147
|
+
)
|
148
|
+
cls_ast(
|
149
|
+
stmt.DefClassVarStatement(
|
150
|
+
"__tablename__",
|
151
|
+
type="ClassVar[str]",
|
152
|
+
value=expr.ExprConstant(cls.db.table_name),
|
153
|
+
),
|
154
|
+
stmt.LineBreak(),
|
155
|
+
)
|
156
|
+
|
157
|
+
for prop in cls.properties.values():
|
158
|
+
if prop.db is None:
|
159
|
+
# skip properties that are not stored in the database
|
160
|
+
continue
|
161
|
+
|
162
|
+
if isinstance(prop, DataProperty):
|
163
|
+
pytype = prop.datatype.get_sqlalchemy_type()
|
164
|
+
if pytype.dep is not None:
|
165
|
+
program.import_(pytype.dep, True)
|
166
|
+
|
167
|
+
propname = prop.name
|
168
|
+
proptype = f"Mapped[{pytype.type}]"
|
169
|
+
|
170
|
+
propvalargs = []
|
171
|
+
if prop.db.is_primary_key:
|
172
|
+
propvalargs.append(
|
173
|
+
PredefinedFn.keyword_assignment(
|
174
|
+
"primary_key", expr.ExprConstant(True)
|
175
|
+
)
|
176
|
+
)
|
177
|
+
if prop.db.is_auto_increment:
|
178
|
+
propvalargs.append(
|
179
|
+
PredefinedFn.keyword_assignment(
|
180
|
+
"autoincrement", expr.ExprConstant("auto")
|
181
|
+
)
|
182
|
+
)
|
183
|
+
if prop.db.is_unique:
|
184
|
+
propvalargs.append(
|
185
|
+
PredefinedFn.keyword_assignment(
|
186
|
+
"unique", expr.ExprConstant(True)
|
187
|
+
)
|
188
|
+
)
|
189
|
+
propval = expr.ExprFuncCall(
|
190
|
+
expr.ExprIdent("mapped_column"), propvalargs
|
191
|
+
)
|
192
|
+
else:
|
193
|
+
assert isinstance(prop, ObjectProperty)
|
194
|
+
if prop.target.db is not None:
|
195
|
+
# if the target class is in the database, we generate a foreign key for it.
|
196
|
+
program.import_("sqlalchemy.ForeignKey", True)
|
197
|
+
|
198
|
+
# we store this class in the database
|
199
|
+
propname = f"{prop.name}_id"
|
200
|
+
idprop = prop.target.get_id_property()
|
201
|
+
assert idprop is not None
|
202
|
+
idprop_pytype = idprop.datatype.get_sqlalchemy_type()
|
203
|
+
if idprop_pytype.dep is not None:
|
204
|
+
program.import_(idprop_pytype.dep, True)
|
205
|
+
|
206
|
+
proptype = f"Mapped[{idprop_pytype.type}]"
|
207
|
+
|
208
|
+
propvalargs: list[expr.Expr] = [
|
209
|
+
expr.ExprConstant(f"{prop.target.db.table_name}.{idprop.name}")
|
210
|
+
]
|
211
|
+
propvalargs.append(
|
212
|
+
PredefinedFn.keyword_assignment(
|
213
|
+
"ondelete",
|
214
|
+
expr.ExprConstant(prop.db.on_delete.to_sqlalchemy()),
|
215
|
+
)
|
216
|
+
)
|
217
|
+
propvalargs.append(
|
218
|
+
PredefinedFn.keyword_assignment(
|
219
|
+
"onupdate",
|
220
|
+
expr.ExprConstant(prop.db.on_update.to_sqlalchemy()),
|
221
|
+
)
|
222
|
+
)
|
223
|
+
|
224
|
+
propval = expr.ExprFuncCall(
|
225
|
+
expr.ExprIdent("mapped_column"),
|
226
|
+
[
|
227
|
+
expr.ExprFuncCall(
|
228
|
+
expr.ExprIdent("ForeignKey"),
|
229
|
+
propvalargs,
|
230
|
+
),
|
231
|
+
],
|
232
|
+
)
|
233
|
+
else:
|
234
|
+
# if the target class is not in the database,
|
235
|
+
program.import_(
|
236
|
+
f"{target_pkg.module(prop.target.get_pymodule_name()).path}.{prop.target.name}",
|
237
|
+
is_import_attr=True,
|
238
|
+
)
|
239
|
+
propname = prop.name
|
240
|
+
proptype = f"Mapped[{prop.target.name}]"
|
241
|
+
|
242
|
+
# we have two choices, one is to create a composite class, one is to create a custom field
|
243
|
+
if prop.db.is_embedded == "composite":
|
244
|
+
# for a class to be composite, it must have only data properties
|
245
|
+
program.import_("sqlalchemy.orm.composite", True)
|
246
|
+
propvalargs = [expr.ExprIdent(prop.target.name)]
|
247
|
+
for p in prop.target.properties.values():
|
248
|
+
propvalargs.append(
|
249
|
+
expr.ExprFuncCall(
|
250
|
+
expr.ExprIdent("mapped_column"),
|
251
|
+
[
|
252
|
+
expr.ExprIdent(f"{prop.name}_{p.name}"),
|
253
|
+
expr.ExprIdent(
|
254
|
+
assert_isinstance(p, DataProperty)
|
255
|
+
.datatype.get_sqlalchemy_type()
|
256
|
+
.type
|
257
|
+
),
|
258
|
+
expr.PredefinedFn.keyword_assignment(
|
259
|
+
"nullable",
|
260
|
+
expr.ExprConstant(prop.is_optional),
|
261
|
+
),
|
262
|
+
],
|
263
|
+
)
|
264
|
+
)
|
265
|
+
propval = expr.ExprFuncCall(
|
266
|
+
expr.ExprIdent("composite"),
|
267
|
+
propvalargs,
|
268
|
+
)
|
269
|
+
else:
|
270
|
+
# we create a custom field, the custom field mapping need to be defined in the base
|
271
|
+
propval = expr.ExprFuncCall(expr.ExprIdent("mapped_column"), [])
|
272
|
+
custom_types.append(prop)
|
273
|
+
|
274
|
+
cls_ast(stmt.DefClassVarStatement(propname, proptype, propval))
|
275
|
+
|
276
|
+
target_pkg.module(cls.get_pymodule_name()).write(program)
|
277
|
+
|
278
|
+
# make a base class that implements the mapping for custom types
|
279
|
+
custom_types = filter_duplication(
|
280
|
+
custom_types, lambda p: (p.target.name, p.cardinality, p.is_optional, p.is_map)
|
281
|
+
)
|
282
|
+
make_base(custom_types)
|
@@ -0,0 +1,64 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Sequence
|
4
|
+
|
5
|
+
from codegen.models import DeferredVar, Program, expr, stmt
|
6
|
+
from loguru import logger
|
7
|
+
from sera.misc import assert_not_null
|
8
|
+
from sera.models import App, DataCollection, Package
|
9
|
+
|
10
|
+
|
11
|
+
def make_python_service_structure(app: App, collections: Sequence[DataCollection]):
|
12
|
+
"""Make the basic structure for the service."""
|
13
|
+
app.services.ensure_exists()
|
14
|
+
|
15
|
+
for collection in collections:
|
16
|
+
make_python_service(collection, app.services)
|
17
|
+
|
18
|
+
|
19
|
+
def make_python_service(collection: DataCollection, target_pkg: Package):
|
20
|
+
app = target_pkg.app
|
21
|
+
|
22
|
+
outmod = target_pkg.module(collection.get_pymodule_name())
|
23
|
+
if outmod.exists():
|
24
|
+
logger.info("`{}` already exists. Skip generation.", outmod.path)
|
25
|
+
return
|
26
|
+
|
27
|
+
# assuming the collection has only one class
|
28
|
+
cls = collection.cls
|
29
|
+
id_type = assert_not_null(cls.get_id_property()).datatype.get_python_type().type
|
30
|
+
|
31
|
+
program = Program()
|
32
|
+
program.import_("__future__.annotations", True)
|
33
|
+
program.import_(
|
34
|
+
app.models.db.path + f".{collection.get_pymodule_name()}.{collection.name}",
|
35
|
+
True,
|
36
|
+
)
|
37
|
+
program.import_(app.config.path + f".schema", True)
|
38
|
+
program.import_("sera.libs.base_service.BaseService", True)
|
39
|
+
|
40
|
+
program.root(
|
41
|
+
stmt.LineBreak(),
|
42
|
+
lambda ast00: ast00.class_(
|
43
|
+
collection.get_service_name(),
|
44
|
+
[expr.ExprIdent(f"BaseService[{id_type}, {cls.name}]")],
|
45
|
+
)(
|
46
|
+
lambda ast01: ast01.func(
|
47
|
+
"__init__",
|
48
|
+
[
|
49
|
+
DeferredVar.simple("self"),
|
50
|
+
],
|
51
|
+
)(
|
52
|
+
lambda ast02: ast02.expr(
|
53
|
+
expr.ExprFuncCall(
|
54
|
+
expr.ExprIdent("super().__init__"),
|
55
|
+
[
|
56
|
+
expr.ExprRawPython(f"schema.classes['{cls.name}']"),
|
57
|
+
expr.ExprIdent(cls.name),
|
58
|
+
],
|
59
|
+
)
|
60
|
+
),
|
61
|
+
),
|
62
|
+
),
|
63
|
+
)
|
64
|
+
outmod.write(program)
|
@@ -0,0 +1 @@
|
|
1
|
+
from __future__ import annotations
|
sera/misc/__init__.py
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
from sera.misc._rdf import SingleNS, Term
|
2
|
+
from sera.misc._utils import (
|
3
|
+
assert_isinstance,
|
4
|
+
assert_not_null,
|
5
|
+
filter_duplication,
|
6
|
+
to_snake_case,
|
7
|
+
)
|
8
|
+
|
9
|
+
__all__ = [
|
10
|
+
"SingleNS",
|
11
|
+
"Term",
|
12
|
+
"to_snake_case",
|
13
|
+
"assert_isinstance",
|
14
|
+
"filter_duplication",
|
15
|
+
"assert_not_null",
|
16
|
+
]
|
sera/misc/_rdf.py
ADDED
@@ -0,0 +1,60 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass, field
|
4
|
+
from functools import cached_property
|
5
|
+
from typing import Iterable
|
6
|
+
|
7
|
+
from rdflib import OWL, RDF, RDFS, SKOS, XSD, Graph, URIRef
|
8
|
+
from rdflib.namespace import NamespaceManager
|
9
|
+
from sm.typing import IRI, InternalID, RelIRI
|
10
|
+
|
11
|
+
|
12
|
+
@dataclass
|
13
|
+
class SingleNS:
|
14
|
+
alias: str
|
15
|
+
namespace: str
|
16
|
+
|
17
|
+
def __post_init__(self):
|
18
|
+
assert self.namespace.endswith("/") or self.namespace.endswith(
|
19
|
+
"#"
|
20
|
+
), f"Namespace {self.namespace} should end with / or #"
|
21
|
+
|
22
|
+
def term(self, name: str) -> Term:
|
23
|
+
return Term(self, name)
|
24
|
+
|
25
|
+
def id(self, uri: IRI | URIRef) -> str:
|
26
|
+
assert uri.startswith(self.namespace), (uri, self.namespace)
|
27
|
+
return uri[len(self.namespace) :]
|
28
|
+
|
29
|
+
def uri(self, name: InternalID) -> URIRef:
|
30
|
+
return URIRef(self.namespace + name)
|
31
|
+
|
32
|
+
def uristr(self, name: InternalID) -> IRI:
|
33
|
+
return self.namespace + name
|
34
|
+
|
35
|
+
def __getattr__(self, name: InternalID):
|
36
|
+
return self.alias + ":" + name
|
37
|
+
|
38
|
+
def __getitem__(self, name: InternalID):
|
39
|
+
return self.alias + ":" + name
|
40
|
+
|
41
|
+
def __contains__(self, uri: IRI | URIRef) -> bool:
|
42
|
+
return uri.startswith(self.namespace)
|
43
|
+
|
44
|
+
def rel2abs(self, reluri: RelIRI) -> URIRef:
|
45
|
+
return URIRef(self.namespace + reluri.split(":")[1])
|
46
|
+
|
47
|
+
def abs2rel(self, uri: IRI | URIRef) -> RelIRI:
|
48
|
+
return self.alias + ":" + self.id(uri)
|
49
|
+
|
50
|
+
|
51
|
+
@dataclass
|
52
|
+
class Term:
|
53
|
+
ns: SingleNS
|
54
|
+
name: str
|
55
|
+
reluri: str = field(init=False)
|
56
|
+
uri: URIRef = field(init=False)
|
57
|
+
|
58
|
+
def __post_init__(self):
|
59
|
+
self.reluri = self.ns[self.name]
|
60
|
+
self.uri = self.ns.uri(self.name)
|
sera/misc/_utils.py
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import re
|
4
|
+
from typing import Any, Callable, Iterable, Optional, TypeVar
|
5
|
+
|
6
|
+
T = TypeVar("T")
|
7
|
+
|
8
|
+
|
9
|
+
def to_snake_case(camelcase: str) -> str:
|
10
|
+
"""Convert camelCase to snake_case."""
|
11
|
+
snake = re.sub(r"([A-Z]+)([A-Z][a-z])", r"\1_\2", camelcase)
|
12
|
+
snake = re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", snake)
|
13
|
+
return snake.lower()
|
14
|
+
|
15
|
+
|
16
|
+
def assert_isinstance(x: Any, cls: type[T]) -> T:
|
17
|
+
if not isinstance(x, cls):
|
18
|
+
raise Exception(f"{type(x)} doesn't match with {type(cls)}")
|
19
|
+
return x
|
20
|
+
|
21
|
+
|
22
|
+
def assert_not_null(x: Optional[T]) -> T:
|
23
|
+
assert x is not None
|
24
|
+
return x
|
25
|
+
|
26
|
+
|
27
|
+
def filter_duplication(
|
28
|
+
lst: Iterable[T], key_fn: Optional[Callable[[T], Any]] = None
|
29
|
+
) -> list[T]:
|
30
|
+
keys = set()
|
31
|
+
new_lst = []
|
32
|
+
if key_fn is not None:
|
33
|
+
for item in lst:
|
34
|
+
k = key_fn(item)
|
35
|
+
if k in keys:
|
36
|
+
continue
|
37
|
+
|
38
|
+
keys.add(k)
|
39
|
+
new_lst.append(item)
|
40
|
+
else:
|
41
|
+
for k in lst:
|
42
|
+
if k in keys:
|
43
|
+
continue
|
44
|
+
keys.add(k)
|
45
|
+
new_lst.append(k)
|
46
|
+
return new_lst
|
sera/models/__init__.py
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
from sera.models._class import Class
|
2
|
+
from sera.models._collection import DataCollection
|
3
|
+
from sera.models._datatype import DataType
|
4
|
+
from sera.models._module import App, Module, Package
|
5
|
+
from sera.models._multi_lingual_string import MultiLingualString
|
6
|
+
from sera.models._parse import parse_schema
|
7
|
+
from sera.models._property import Cardinality, DataProperty, ObjectProperty, Property
|
8
|
+
from sera.models._schema import Schema
|
9
|
+
|
10
|
+
__all__ = [
|
11
|
+
"parse_schema",
|
12
|
+
"Schema",
|
13
|
+
"Property",
|
14
|
+
"DataProperty",
|
15
|
+
"ObjectProperty",
|
16
|
+
"Class",
|
17
|
+
"Cardinality",
|
18
|
+
"DataType",
|
19
|
+
"MultiLingualString",
|
20
|
+
"Package",
|
21
|
+
"DataCollection",
|
22
|
+
"Module",
|
23
|
+
"App",
|
24
|
+
]
|
sera/models/_class.py
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass, field
|
4
|
+
from typing import Optional
|
5
|
+
|
6
|
+
from sera.misc import to_snake_case
|
7
|
+
from sera.models._multi_lingual_string import MultiLingualString
|
8
|
+
from sera.models._property import DataProperty, ObjectProperty
|
9
|
+
|
10
|
+
|
11
|
+
@dataclass(kw_only=True)
|
12
|
+
class ClassDBMapInfo:
|
13
|
+
"""Represent database information for a class."""
|
14
|
+
|
15
|
+
# name of a corresponding table in the database for this class
|
16
|
+
table_name: str
|
17
|
+
|
18
|
+
|
19
|
+
@dataclass(kw_only=True)
|
20
|
+
class Class:
|
21
|
+
"""Represent a class in the schema."""
|
22
|
+
|
23
|
+
# name of the class in the application layer
|
24
|
+
name: str = field(
|
25
|
+
metadata={
|
26
|
+
"description": "Name of the property in the application layer, so it must be a valid Python identifier"
|
27
|
+
}
|
28
|
+
)
|
29
|
+
# human-readable name of the class
|
30
|
+
label: MultiLingualString
|
31
|
+
# human-readable description of the class
|
32
|
+
description: MultiLingualString
|
33
|
+
# properties of the class
|
34
|
+
properties: dict[str, DataProperty | ObjectProperty]
|
35
|
+
|
36
|
+
# whether to store this class in a table in the database
|
37
|
+
db: Optional[ClassDBMapInfo]
|
38
|
+
|
39
|
+
def get_id_property(self) -> Optional[DataProperty]:
|
40
|
+
"""
|
41
|
+
Get the ID property of this class.
|
42
|
+
The ID property is the one tagged with is_primary_key
|
43
|
+
"""
|
44
|
+
assert self.db is not None, "This class is not stored in the database"
|
45
|
+
for prop in self.properties.values():
|
46
|
+
if (
|
47
|
+
isinstance(prop, DataProperty)
|
48
|
+
and prop.db is not None
|
49
|
+
and prop.db.is_primary_key
|
50
|
+
):
|
51
|
+
return prop
|
52
|
+
return None
|
53
|
+
|
54
|
+
def get_pymodule_name(self) -> str:
|
55
|
+
"""Get the python module name of this class as if there is a python module created to store this class only."""
|
56
|
+
return to_snake_case(self.name)
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from dataclasses import dataclass
|
4
|
+
|
5
|
+
from sera.models._class import Class
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class DataCollection:
|
10
|
+
"""Represent a data collection, which can be a class or a data product created via some transformation."""
|
11
|
+
|
12
|
+
cls: Class
|
13
|
+
|
14
|
+
@property
|
15
|
+
def name(self) -> str:
|
16
|
+
"""Get the name of the collection."""
|
17
|
+
return self.cls.name
|
18
|
+
|
19
|
+
def get_pymodule_name(self) -> str:
|
20
|
+
"""Get the python module name of this collection as if there is a python module created to store this collection only."""
|
21
|
+
return self.cls.get_pymodule_name()
|
22
|
+
|
23
|
+
def get_queryable_fields(self) -> set[str]:
|
24
|
+
"""Get the fields of this collection that can be used in a queries."""
|
25
|
+
field_names = set()
|
26
|
+
for prop in self.cls.properties.values():
|
27
|
+
if prop.db is None:
|
28
|
+
# This property is not stored in the database, so we skip it
|
29
|
+
continue
|
30
|
+
field_names.add(prop.name)
|
31
|
+
return field_names
|
32
|
+
|
33
|
+
def get_service_name(self):
|
34
|
+
return f"{self.name}Service"
|
sera/models/_datatype.py
ADDED
@@ -0,0 +1,54 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import datetime
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from enum import Enum
|
6
|
+
from typing import Literal
|
7
|
+
|
8
|
+
|
9
|
+
@dataclass
|
10
|
+
class TypeWithDep:
|
11
|
+
type: str
|
12
|
+
dep: str | None = None
|
13
|
+
|
14
|
+
def get_python_type(self) -> type:
|
15
|
+
"""Get the Python type from the type string for typing annotation in Python."""
|
16
|
+
if self.type == "str":
|
17
|
+
return str
|
18
|
+
elif self.type == "int":
|
19
|
+
return int
|
20
|
+
elif self.type == "float":
|
21
|
+
return float
|
22
|
+
elif self.type == "bool":
|
23
|
+
return bool
|
24
|
+
elif self.type == "bytes":
|
25
|
+
return bytes
|
26
|
+
elif self.type == "dict":
|
27
|
+
return dict
|
28
|
+
elif self.type == "datetime":
|
29
|
+
return datetime.datetime
|
30
|
+
else:
|
31
|
+
raise ValueError(f"Unknown type: {self.type}")
|
32
|
+
|
33
|
+
|
34
|
+
@dataclass
|
35
|
+
class DataType:
|
36
|
+
type: Literal["str", "int", "datetime", "float", "bool", "bytes", "dict"]
|
37
|
+
is_list: bool = False
|
38
|
+
parent: DataType | None = None
|
39
|
+
|
40
|
+
def get_python_type(self) -> TypeWithDep:
|
41
|
+
if self.type in ["str", "int", "float", "bool", "bytes", "dict"]:
|
42
|
+
return TypeWithDep(type=self.type)
|
43
|
+
if self.type == "datetime":
|
44
|
+
return TypeWithDep(type="datetime", dep="datetime.datetime")
|
45
|
+
raise NotImplementedError(self.type)
|
46
|
+
|
47
|
+
def get_sqlalchemy_type(self) -> TypeWithDep:
|
48
|
+
if self.type in ["str", "int", "float", "bool", "bytes"]:
|
49
|
+
return TypeWithDep(type=self.type)
|
50
|
+
if self.type == "dict":
|
51
|
+
return TypeWithDep(type="JSON")
|
52
|
+
if self.type == "datetime":
|
53
|
+
return TypeWithDep(type="datetime", dep="datetime.datetime")
|
54
|
+
raise NotImplementedError(self.type)
|