sera-2 1.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sera_2-1.1.0/PKG-INFO +20 -0
- sera_2-1.1.0/README.md +3 -0
- sera_2-1.1.0/pyproject.toml +18 -0
- sera_2-1.1.0/sera/__init__.py +0 -0
- sera_2-1.1.0/sera/libs/__init__.py +0 -0
- sera_2-1.1.0/sera/libs/api_helper.py +66 -0
- sera_2-1.1.0/sera/libs/base_orm.py +109 -0
- sera_2-1.1.0/sera/libs/base_service.py +78 -0
- sera_2-1.1.0/sera/make/__init__.py +0 -0
- sera_2-1.1.0/sera/make/__main__.py +38 -0
- sera_2-1.1.0/sera/make/make_app.py +142 -0
- sera_2-1.1.0/sera/make/make_python_api.py +242 -0
- sera_2-1.1.0/sera/make/make_python_model.py +282 -0
- sera_2-1.1.0/sera/make/make_python_services.py +64 -0
- sera_2-1.1.0/sera/make/make_typescript_model.py +1 -0
- sera_2-1.1.0/sera/misc/__init__.py +16 -0
- sera_2-1.1.0/sera/misc/_rdf.py +60 -0
- sera_2-1.1.0/sera/misc/_utils.py +46 -0
- sera_2-1.1.0/sera/models/__init__.py +24 -0
- sera_2-1.1.0/sera/models/_class.py +56 -0
- sera_2-1.1.0/sera/models/_collection.py +34 -0
- sera_2-1.1.0/sera/models/_datatype.py +54 -0
- sera_2-1.1.0/sera/models/_module.py +140 -0
- sera_2-1.1.0/sera/models/_multi_lingual_string.py +38 -0
- sera_2-1.1.0/sera/models/_parse.py +153 -0
- sera_2-1.1.0/sera/models/_property.py +124 -0
- sera_2-1.1.0/sera/models/_schema.py +32 -0
- sera_2-1.1.0/sera/namespace.py +5 -0
- sera_2-1.1.0/sera/typing.py +11 -0
sera_2-1.1.0/PKG-INFO
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: sera-2
|
3
|
+
Version: 1.1.0
|
4
|
+
Summary:
|
5
|
+
Author: Binh Vu
|
6
|
+
Author-email: bvu687@gmail.com
|
7
|
+
Requires-Python: >=3.12,<4.0
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
9
|
+
Classifier: Programming Language :: Python :: 3.12
|
10
|
+
Classifier: Programming Language :: Python :: 3.13
|
11
|
+
Requires-Dist: black (>=25.0.1,<26.0.0)
|
12
|
+
Requires-Dist: codegen-2 (>=2.1.4,<3.0.0)
|
13
|
+
Requires-Dist: litestar (>=2.15.1,<3.0.0)
|
14
|
+
Requires-Dist: msgspec (>=0.19.0,<0.20.0)
|
15
|
+
Description-Content-Type: text/markdown
|
16
|
+
|
17
|
+
# Overview
|
18
|
+
|
19
|
+
This library enables rapid application development by leveraging a graph-based architecture.
|
20
|
+
|
sera_2-1.1.0/README.md
ADDED
@@ -0,0 +1,18 @@
|
|
1
|
+
[tool.poetry]
|
2
|
+
name = "sera-2"
|
3
|
+
version = "1.1.0"
|
4
|
+
description = ""
|
5
|
+
authors = ["Binh Vu <bvu687@gmail.com>"]
|
6
|
+
readme = "README.md"
|
7
|
+
packages = [{ include = "sera" }]
|
8
|
+
|
9
|
+
[tool.poetry.dependencies]
|
10
|
+
python = "^3.12"
|
11
|
+
codegen-2 = "^2.1.4"
|
12
|
+
black = "^25.0.1"
|
13
|
+
msgspec = "^0.19.0"
|
14
|
+
litestar = "^2.15.1"
|
15
|
+
|
16
|
+
[build-system]
|
17
|
+
requires = ["poetry-core"]
|
18
|
+
build-backend = "poetry.core.masonry.api"
|
File without changes
|
File without changes
|
@@ -0,0 +1,66 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import re
|
4
|
+
|
5
|
+
from litestar import Request, status_codes
|
6
|
+
from litestar.exceptions import HTTPException
|
7
|
+
from sera.libs.base_service import Query, QueryOp
|
8
|
+
|
9
|
+
# for parsing field names and operations from query string
|
10
|
+
FIELD_REG = re.compile(r"(?P<name>[a-zA-Z_0-9]+)(?:\[(?P<op>[a-zA-Z0-9]+)\])?")
|
11
|
+
QUERY_OPS = {op.value for op in QueryOp}
|
12
|
+
KEYWORDS = {"field", "limit", "offset", "unique", "sorted_by", "group_by"}
|
13
|
+
|
14
|
+
|
15
|
+
def parse_query(request: Request, fields: set[str], debug: bool) -> Query:
|
16
|
+
"""Parse query for retrieving records that match a query.
|
17
|
+
|
18
|
+
If a field name collides with a keyword, you can add `_` to the field name.
|
19
|
+
|
20
|
+
To filter records, you can apply a condition on a column using <field>=<value> (equal condition). Or you can
|
21
|
+
be explicit by using <field>[op]=<value>, where op is one of the operators defined in QueryOp.
|
22
|
+
"""
|
23
|
+
query: Query = {}
|
24
|
+
|
25
|
+
for k, v in request.query_params.items():
|
26
|
+
if k in KEYWORDS:
|
27
|
+
continue
|
28
|
+
m = FIELD_REG.match(k)
|
29
|
+
if m:
|
30
|
+
field_name = m.group("name")
|
31
|
+
operation = m.group("op") # This will be None if no operation is specified
|
32
|
+
|
33
|
+
# If field name ends with '_' and it's to avoid keyword conflict, remove it
|
34
|
+
if field_name.endswith("_") and field_name[:-1] in KEYWORDS:
|
35
|
+
field_name = field_name[:-1]
|
36
|
+
|
37
|
+
if field_name not in fields:
|
38
|
+
# Invalid field name, skip
|
39
|
+
if debug:
|
40
|
+
raise HTTPException(
|
41
|
+
status_code=status_codes.HTTP_400_BAD_REQUEST,
|
42
|
+
detail=f"Invalid field name: {field_name}",
|
43
|
+
)
|
44
|
+
continue
|
45
|
+
|
46
|
+
# Process based on operation or default to equality check
|
47
|
+
if not operation:
|
48
|
+
operation = QueryOp.eq
|
49
|
+
else:
|
50
|
+
if operation not in QUERY_OPS:
|
51
|
+
raise HTTPException(
|
52
|
+
status_code=status_codes.HTTP_400_BAD_REQUEST,
|
53
|
+
detail=f"Invalid operation: {operation}",
|
54
|
+
)
|
55
|
+
operation = QueryOp(operation)
|
56
|
+
query[field_name] = {operation: v}
|
57
|
+
else:
|
58
|
+
# Invalid field name format
|
59
|
+
if debug:
|
60
|
+
raise HTTPException(
|
61
|
+
status_code=status_codes.HTTP_400_BAD_REQUEST,
|
62
|
+
detail=f"Invalid field name: {k}",
|
63
|
+
)
|
64
|
+
continue
|
65
|
+
|
66
|
+
return query
|
@@ -0,0 +1,109 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
import orjson
|
6
|
+
from sqlalchemy import LargeBinary, TypeDecorator
|
7
|
+
from sqlalchemy import create_engine as sqlalchemy_create_engine
|
8
|
+
from sqlalchemy import update
|
9
|
+
from sqlalchemy.orm import DeclarativeBase, Session
|
10
|
+
|
11
|
+
|
12
|
+
class BaseORM(DeclarativeBase):
|
13
|
+
|
14
|
+
def get_update_query(self):
|
15
|
+
q = update(self.__class__)
|
16
|
+
args = {}
|
17
|
+
for col in self.__table__.columns:
|
18
|
+
val = getattr(self, col.name)
|
19
|
+
if col.primary_key:
|
20
|
+
q = q.where(getattr(self.__class__, col.name) == val)
|
21
|
+
args[col.name] = val
|
22
|
+
|
23
|
+
return q.values(**args)
|
24
|
+
|
25
|
+
def get_update_args(self):
|
26
|
+
return {col.name: getattr(self, col.name) for col in self.__table__.columns}
|
27
|
+
|
28
|
+
@classmethod
|
29
|
+
def from_dict(cls, data: dict):
|
30
|
+
raise NotImplementedError()
|
31
|
+
|
32
|
+
|
33
|
+
class DataclassType(TypeDecorator):
|
34
|
+
"""SqlAlchemy Type decorator to serialize dataclasses"""
|
35
|
+
|
36
|
+
impl = LargeBinary
|
37
|
+
cache_ok = True
|
38
|
+
|
39
|
+
def __init__(self, cls):
|
40
|
+
super().__init__()
|
41
|
+
self.cls = cls
|
42
|
+
|
43
|
+
def process_bind_param(self, value, dialect):
|
44
|
+
if value is None:
|
45
|
+
return None
|
46
|
+
return orjson.dumps(value.to_dict())
|
47
|
+
|
48
|
+
def process_result_value(self, value, dialect):
|
49
|
+
if value is None:
|
50
|
+
return None
|
51
|
+
result = orjson.loads(value)
|
52
|
+
return self.cls.from_dict(result)
|
53
|
+
|
54
|
+
|
55
|
+
class ListDataclassType(TypeDecorator):
|
56
|
+
"""SqlAlchemy Type decorator to serialize list of dataclasses"""
|
57
|
+
|
58
|
+
impl = LargeBinary
|
59
|
+
cache_ok = True
|
60
|
+
|
61
|
+
def __init__(self, cls):
|
62
|
+
super().__init__()
|
63
|
+
self.cls = cls
|
64
|
+
|
65
|
+
def process_bind_param(self, value, dialect):
|
66
|
+
if value is None:
|
67
|
+
return None
|
68
|
+
return orjson.dumps([x.to_dict() for x in value])
|
69
|
+
|
70
|
+
def process_result_value(self, value, dialect):
|
71
|
+
if value is None:
|
72
|
+
return None
|
73
|
+
result = orjson.loads(value)
|
74
|
+
return [self.cls.from_dict(x) for x in result]
|
75
|
+
|
76
|
+
|
77
|
+
class DictDataClassType(TypeDecorator):
|
78
|
+
"""SqlAlchemy Type decorator to serialize mapping of dataclasses"""
|
79
|
+
|
80
|
+
impl = LargeBinary
|
81
|
+
cache_ok = True
|
82
|
+
|
83
|
+
def __init__(self, cls):
|
84
|
+
super().__init__()
|
85
|
+
self.cls = cls
|
86
|
+
|
87
|
+
def process_bind_param(self, value, dialect):
|
88
|
+
if value is None:
|
89
|
+
return None
|
90
|
+
return orjson.dumps({k: v.to_dict() for k, v in value.items()})
|
91
|
+
|
92
|
+
def process_result_value(self, value, dialect):
|
93
|
+
if value is None:
|
94
|
+
return None
|
95
|
+
result = orjson.loads(value)
|
96
|
+
return {k: self.cls.from_dict(v) for k, v in result.items()}
|
97
|
+
|
98
|
+
|
99
|
+
def create_engine(
|
100
|
+
dbconn: str,
|
101
|
+
connect_args: Optional[dict] = None,
|
102
|
+
debug: bool = False,
|
103
|
+
):
|
104
|
+
if dbconn.startswith("sqlite"):
|
105
|
+
connect_args = {"check_same_thread": False}
|
106
|
+
else:
|
107
|
+
connect_args = {}
|
108
|
+
engine = sqlalchemy_create_engine(dbconn, connect_args=connect_args, echo=debug)
|
109
|
+
return engine
|
@@ -0,0 +1,78 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from enum import Enum
|
4
|
+
from typing import Annotated, Any, Generic, Optional, Sequence, TypeVar
|
5
|
+
|
6
|
+
from sera.libs.base_orm import BaseORM
|
7
|
+
from sera.misc import assert_not_null
|
8
|
+
from sera.models import Class
|
9
|
+
from sera.typing import FieldName, T, doc
|
10
|
+
from sqlalchemy import exists, select
|
11
|
+
from sqlalchemy.orm import Session
|
12
|
+
|
13
|
+
|
14
|
+
class QueryOp(str, Enum):
|
15
|
+
lt = "<"
|
16
|
+
lte = "<="
|
17
|
+
gt = ">"
|
18
|
+
gte = ">="
|
19
|
+
eq = "="
|
20
|
+
ne = "!="
|
21
|
+
# select records where values are in the given list
|
22
|
+
in_ = "in"
|
23
|
+
not_in = "not in"
|
24
|
+
# for full text search
|
25
|
+
fuzzy = "fuzzy"
|
26
|
+
|
27
|
+
|
28
|
+
Query = Annotated[
|
29
|
+
dict[FieldName, dict[QueryOp, Annotated[Any, doc("query value")]]],
|
30
|
+
doc("query operations"),
|
31
|
+
]
|
32
|
+
C = TypeVar("C", bound=BaseORM)
|
33
|
+
ID = Annotated[TypeVar("ID"), doc("ID of a class")]
|
34
|
+
|
35
|
+
|
36
|
+
class BaseService(Generic[ID, C]):
|
37
|
+
|
38
|
+
def __init__(self, cls: Class, orm_cls: type[C]):
|
39
|
+
self.cls = cls
|
40
|
+
self.orm_cls = orm_cls
|
41
|
+
self.id_prop = assert_not_null(cls.get_id_property())
|
42
|
+
|
43
|
+
self._cls_id_prop = getattr(self.orm_cls, self.id_prop.name)
|
44
|
+
|
45
|
+
def get(
|
46
|
+
self,
|
47
|
+
query: Query,
|
48
|
+
limit: int,
|
49
|
+
offset: int,
|
50
|
+
unique: bool,
|
51
|
+
sorted_by: list[str],
|
52
|
+
group_by: list[str],
|
53
|
+
fields: list[str],
|
54
|
+
) -> Sequence[C]:
|
55
|
+
"""Retrieving records matched a query.
|
56
|
+
|
57
|
+
Args:
|
58
|
+
query: The query to filter the records
|
59
|
+
limit: The maximum number of records to return
|
60
|
+
offset: The number of records to skip before returning results
|
61
|
+
unique: Whether to return unique results only
|
62
|
+
sorted_by: list of field names to sort by, prefix a field with '-' to sort that field in descending order
|
63
|
+
group_by: list of field names to group by
|
64
|
+
fields: list of field names to include in the results
|
65
|
+
"""
|
66
|
+
return []
|
67
|
+
|
68
|
+
def get_by_id(self, id: ID, session: Session) -> Optional[C]:
|
69
|
+
"""Retrieving a record by ID."""
|
70
|
+
q = select(self.orm_cls).where(self._cls_id_prop == id)
|
71
|
+
result = session.execute(q).scalar_one_or_none()
|
72
|
+
return result
|
73
|
+
|
74
|
+
def has_id(self, id: ID, session: Session) -> bool:
|
75
|
+
"""Check whether we have a record with the given ID."""
|
76
|
+
q = exists().where(self._cls_id_prop == id)
|
77
|
+
result = session.query(q).scalar()
|
78
|
+
return bool(result)
|
File without changes
|
@@ -0,0 +1,38 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import Annotated
|
5
|
+
|
6
|
+
import typer
|
7
|
+
from sera.make.make_app import make_app
|
8
|
+
|
9
|
+
app = typer.Typer(pretty_exceptions_short=True, pretty_exceptions_enable=False)
|
10
|
+
|
11
|
+
|
12
|
+
@app.command()
|
13
|
+
def cli(
|
14
|
+
app_dir: Annotated[
|
15
|
+
Path,
|
16
|
+
typer.Option("--app", help="Directory of the generated application"),
|
17
|
+
],
|
18
|
+
schema_files: Annotated[
|
19
|
+
list[Path],
|
20
|
+
typer.Option(
|
21
|
+
"-s", help="YAML schema files. Multiple files are merged automatically"
|
22
|
+
),
|
23
|
+
],
|
24
|
+
api_collections: Annotated[
|
25
|
+
list[str],
|
26
|
+
typer.Option(
|
27
|
+
"-c",
|
28
|
+
"--collection",
|
29
|
+
help="API collections to generate.",
|
30
|
+
),
|
31
|
+
],
|
32
|
+
):
|
33
|
+
"""Generate Python model classes from a schema file."""
|
34
|
+
typer.echo(f"Generating application in {app_dir}")
|
35
|
+
make_app(app_dir, schema_files, api_collections)
|
36
|
+
|
37
|
+
|
38
|
+
app()
|
@@ -0,0 +1,142 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import os
|
4
|
+
from pathlib import Path
|
5
|
+
from pydoc import doc
|
6
|
+
from typing import Annotated
|
7
|
+
|
8
|
+
from codegen.models import DeferredVar, PredefinedFn, Program, expr, stmt
|
9
|
+
from loguru import logger
|
10
|
+
from sera.make.make_python_api import make_python_api
|
11
|
+
from sera.make.make_python_model import (
|
12
|
+
make_python_data_model,
|
13
|
+
make_python_relational_model,
|
14
|
+
)
|
15
|
+
from sera.make.make_python_services import make_python_service_structure
|
16
|
+
from sera.models import App, DataCollection, parse_schema
|
17
|
+
|
18
|
+
|
19
|
+
def make_config(app: App):
|
20
|
+
"""Make the configuration for the application."""
|
21
|
+
if app.config.exists():
|
22
|
+
logger.info("`{}` already exists. Skip generation.", app.config.path)
|
23
|
+
return
|
24
|
+
|
25
|
+
program = Program()
|
26
|
+
program.import_("__future__.annotations", True)
|
27
|
+
program.import_("os", False)
|
28
|
+
program.import_("serde.yaml", False)
|
29
|
+
program.import_("pathlib.Path", True)
|
30
|
+
program.import_("sera.models.parse_schema", True)
|
31
|
+
|
32
|
+
program.root(
|
33
|
+
lambda ast: ast.if_(
|
34
|
+
PredefinedFn.not_has_item(
|
35
|
+
expr.ExprIdent("os.environ"), expr.ExprConstant("CFG_FILE")
|
36
|
+
)
|
37
|
+
)(
|
38
|
+
lambda ast01: ast01.assign(
|
39
|
+
DeferredVar.simple("CFG_FILE"),
|
40
|
+
expr.ExprRawPython("Path(__file__).parent.parent / 'config.yaml'"),
|
41
|
+
),
|
42
|
+
),
|
43
|
+
lambda ast: ast.else_()(
|
44
|
+
lambda ast01: ast01.assign(
|
45
|
+
DeferredVar.simple("CFG_FILE"),
|
46
|
+
expr.ExprRawPython('Path(os.environ["CFG_FILE"])'),
|
47
|
+
),
|
48
|
+
),
|
49
|
+
lambda ast: ast.assign(
|
50
|
+
DeferredVar.simple("cfg"),
|
51
|
+
expr.ExprFuncCall(
|
52
|
+
expr.ExprIdent("serde.yaml.deser"), [expr.ExprIdent("CFG_FILE")]
|
53
|
+
),
|
54
|
+
),
|
55
|
+
stmt.LineBreak(),
|
56
|
+
lambda ast: ast.assign(
|
57
|
+
DeferredVar.simple("DB_CONNECTION"),
|
58
|
+
expr.ExprIdent("cfg['db']['connection']"),
|
59
|
+
),
|
60
|
+
lambda ast: ast.assign(
|
61
|
+
DeferredVar.simple("DB_DEBUG"),
|
62
|
+
expr.ExprIdent('os.environ.get("DB_DEBUG", "0") == "1"'),
|
63
|
+
),
|
64
|
+
lambda ast: ast.assign(
|
65
|
+
DeferredVar.simple("ROUTER_DEBUG"),
|
66
|
+
expr.ExprIdent('os.environ.get("ROUTER_DEBUG", "0") == "1"'),
|
67
|
+
),
|
68
|
+
stmt.LineBreak(),
|
69
|
+
lambda ast: ast.assign(
|
70
|
+
DeferredVar.simple("PKG_DIR"),
|
71
|
+
expr.ExprRawPython("Path(__file__).parent"),
|
72
|
+
),
|
73
|
+
lambda ast: ast.assign(
|
74
|
+
DeferredVar.simple("schema"),
|
75
|
+
expr.ExprFuncCall(
|
76
|
+
expr.ExprIdent("parse_schema"),
|
77
|
+
[
|
78
|
+
PredefinedFn.list(
|
79
|
+
[
|
80
|
+
expr.ExprDivision(
|
81
|
+
expr.ExprIdent("PKG_DIR"),
|
82
|
+
expr.ExprConstant(
|
83
|
+
os.path.relpath(path.absolute(), app.root.dir)
|
84
|
+
),
|
85
|
+
)
|
86
|
+
for path in app.schema_files
|
87
|
+
]
|
88
|
+
)
|
89
|
+
],
|
90
|
+
),
|
91
|
+
),
|
92
|
+
)
|
93
|
+
|
94
|
+
app.config.write(program)
|
95
|
+
|
96
|
+
|
97
|
+
def make_app(
|
98
|
+
app_dir: Annotated[
|
99
|
+
Path,
|
100
|
+
doc("Directory of the generated application"),
|
101
|
+
],
|
102
|
+
schema_files: Annotated[
|
103
|
+
list[Path],
|
104
|
+
doc("YAML schema files. Multiple files are merged automatically"),
|
105
|
+
],
|
106
|
+
api_collections: Annotated[
|
107
|
+
list[str],
|
108
|
+
doc("API collections to generate."),
|
109
|
+
],
|
110
|
+
):
|
111
|
+
schema = parse_schema(schema_files)
|
112
|
+
|
113
|
+
app = App(app_dir.name, app_dir, schema_files)
|
114
|
+
|
115
|
+
# generate application configuration
|
116
|
+
make_config(app)
|
117
|
+
|
118
|
+
# generate models from schema
|
119
|
+
make_python_data_model(schema, app.models.pkg("data"))
|
120
|
+
make_python_relational_model(schema, app.models.pkg("db"), app.models.pkg("data"))
|
121
|
+
|
122
|
+
collections = [DataCollection(schema.classes[cname]) for cname in api_collections]
|
123
|
+
|
124
|
+
# generate API
|
125
|
+
make_python_api(app, collections)
|
126
|
+
|
127
|
+
# generate services
|
128
|
+
make_python_service_structure(app, collections)
|
129
|
+
|
130
|
+
return app
|
131
|
+
|
132
|
+
|
133
|
+
if __name__ == "__main__":
|
134
|
+
make_app(
|
135
|
+
Path("/Volumes/research/workspace/libs/sera/tests/resources/myapp"),
|
136
|
+
[
|
137
|
+
Path(
|
138
|
+
"/Volumes/research/workspace/libs/sera/tests/resources/schema/product.yml"
|
139
|
+
)
|
140
|
+
],
|
141
|
+
["Product", "Category"],
|
142
|
+
)
|