python-general-be-lib 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- general/__init__.py +0 -0
- general/exception/__init__.py +0 -0
- general/exception/access_exceptions.py +11 -0
- general/exception/crud_exceptions.py +37 -0
- general/exception/exception_interface.py +10 -0
- general/interface/base/__init__.py +2 -0
- general/interface/base/base_model.py +27 -0
- general/interface/base/declarative_base.py +57 -0
- general/interface/metadata/__init__.py +0 -0
- general/interface/metadata/crud_metadata.py +179 -0
- general/interface/metadata/geom_metadata.py +88 -0
- general/interface/repository/__init__.py +4 -0
- general/interface/repository/crud_repository.py +272 -0
- general/interface/repository/geometry_repository.py +103 -0
- general/interface/repository/handler/__init__.py +2 -0
- general/interface/repository/handler/base_handler.py +40 -0
- general/interface/repository/handler/ilike_handler.py +38 -0
- general/interface/repository/handler/interval_handler.py +34 -0
- general/interface/repository/many_to_many_repository.py +101 -0
- general/interface/repository/view_repository.py +57 -0
- general/log_config.ini +21 -0
- general/logger.py +54 -0
- general/paginator_dto.py +23 -0
- python_general_be_lib-0.1.0.dist-info/METADATA +33 -0
- python_general_be_lib-0.1.0.dist-info/RECORD +28 -0
- python_general_be_lib-0.1.0.dist-info/WHEEL +5 -0
- python_general_be_lib-0.1.0.dist-info/licenses/LICENSE +21 -0
- python_general_be_lib-0.1.0.dist-info/top_level.txt +1 -0
general/__init__.py
ADDED
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from .exception_interface import ExceptionInterface
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ForbiddenException(ExceptionInterface):
|
|
5
|
+
status_code = 403
|
|
6
|
+
default_message = "Risorsa non accessibile da questo utente"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class UnauthorizedException(ExceptionInterface):
|
|
10
|
+
status_code = 401
|
|
11
|
+
default_message = "Non autorizzato"
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from .exception_interface import ExceptionInterface
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class BadRequestException(ExceptionInterface):
|
|
5
|
+
status_code = 400
|
|
6
|
+
default_message = "Richiesta non corretta"
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class EntityNotFoundException(ExceptionInterface):
|
|
10
|
+
status_code = 404
|
|
11
|
+
default_message = "Dato non trovato"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ServiceUnavailableException(ExceptionInterface):
|
|
15
|
+
status_code = 503
|
|
16
|
+
default_message = "Servizio momentaneamente non disponibile, riprovare più tardi"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class UnprocessableEntityException(ExceptionInterface):
|
|
20
|
+
status_code = 422
|
|
21
|
+
default_message = "Entità non processabile"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class HasNoAttributeException(ExceptionInterface):
|
|
25
|
+
status_code = 400
|
|
26
|
+
default_message = "Richiesta non corretta"
|
|
27
|
+
|
|
28
|
+
def __init__(self, attr: str = ""):
|
|
29
|
+
if attr:
|
|
30
|
+
super().__init__(custom_message=f"Attributo {attr} non presente", )
|
|
31
|
+
else:
|
|
32
|
+
super().__init__(custom_message="Alcuni attributi non sono presenti per questa entità", )
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class InternalServerException(ExceptionInterface):
|
|
36
|
+
status_code = 500
|
|
37
|
+
default_message = "Impossibile gestire la richiesta al momento"
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from fastapi import HTTPException
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ExceptionInterface(HTTPException):
|
|
5
|
+
status_code: int
|
|
6
|
+
default_message: str
|
|
7
|
+
|
|
8
|
+
def __init__(self, custom_message: str = None):
|
|
9
|
+
detail = f"{self.default_message} - {custom_message}" if custom_message else self.default_message
|
|
10
|
+
super().__init__(status_code=self.status_code, detail=detail)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
__all__ = ["ExtBaseModel", "CamelExtBaseModel"]
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, ConfigDict
|
|
4
|
+
from pydantic.alias_generators import to_camel
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ConfigModel(BaseModel):
|
|
8
|
+
model_config = ConfigDict(from_attributes=True, populate_by_name=True, arbitrary_types_allowed=True, validate_default=True)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CamelConfigModel(ConfigModel):
|
|
12
|
+
model_config = ConfigDict(alias_generator=to_camel)
|
|
13
|
+
|
|
14
|
+
@classmethod
|
|
15
|
+
def get_attr_by_alias(cls, alias: str) -> str:
|
|
16
|
+
for name, field in cls.model_fields.items():
|
|
17
|
+
if field.alias == alias:
|
|
18
|
+
return name
|
|
19
|
+
return alias
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ExtBaseModel(ConfigModel):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class CamelExtBaseModel(CamelConfigModel):
|
|
27
|
+
pass
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
__all__ = ["Base", "retrieve_mapper_entity", "retrieve_mapper_from_table"]
|
|
2
|
+
|
|
3
|
+
from typing import Optional, Any
|
|
4
|
+
|
|
5
|
+
from geoalchemy2 import Geometry, WKBElement
|
|
6
|
+
from pydantic import create_model
|
|
7
|
+
from sqlalchemy.inspection import inspect
|
|
8
|
+
from sqlalchemy.orm import Mapper, as_declarative
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@as_declarative()
|
|
12
|
+
class Base:
|
|
13
|
+
|
|
14
|
+
@classmethod
|
|
15
|
+
def to_pydantic(cls, model_name: str = None, full_optional: bool = False, default_values: dict[str, Any] = None, **create_model_kwargs):
|
|
16
|
+
if not default_values:
|
|
17
|
+
default_values = dict()
|
|
18
|
+
mapper: Mapper = inspect(cls)
|
|
19
|
+
model_name = f"{cls.__name__}Model" if not model_name else model_name
|
|
20
|
+
fields = dict()
|
|
21
|
+
field_values = {col.name: None if full_optional else ... for col in mapper.columns}
|
|
22
|
+
field_values.update(default_values)
|
|
23
|
+
for col in mapper.columns:
|
|
24
|
+
field_name = col.name
|
|
25
|
+
if isinstance(col.type, Geometry):
|
|
26
|
+
field_type = Optional[WKBElement] if col.nullable or full_optional else WKBElement
|
|
27
|
+
else:
|
|
28
|
+
field_type = Optional[col.type.python_type] if col.nullable or full_optional else col.type.python_type
|
|
29
|
+
fields[field_name] = (field_type, field_values[field_name])
|
|
30
|
+
return create_model(model_name, **create_model_kwargs, **fields)
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def columns(cls):
|
|
34
|
+
return cls.__table__.c
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def columns_names(cls):
|
|
38
|
+
return [col.name for col in cls.__table__.c]
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def primary_key(cls):
|
|
42
|
+
return cls.__table__.primary_key.columns.values()
|
|
43
|
+
|
|
44
|
+
def to_dict(self):
|
|
45
|
+
return {key: getattr(self, key) for key in self.__class__.columns_names()}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def retrieve_mapper_entity(entity_name: str):
|
|
49
|
+
for mapper in Base.__subclasses__():
|
|
50
|
+
if mapper.__name__ == entity_name:
|
|
51
|
+
return mapper
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def retrieve_mapper_from_table(table_name: str):
|
|
55
|
+
for mapper in Base.__subclasses__():
|
|
56
|
+
if mapper.__tablename__ == table_name:
|
|
57
|
+
return mapper
|
|
File without changes
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
__all__ = ["CrudMetadata"]
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Optional, Any, Type, Iterable, Sequence
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
from sqlalchemy import MetaData, Engine, Column, Table, inspect, Connection, select, RowMapping, CursorResult, not_
|
|
8
|
+
|
|
9
|
+
from ...exception.crud_exceptions import HasNoAttributeException
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CrudMetadata:
|
|
13
|
+
__slots__ = "schema", "metadata", "engine", "engine_inspection"
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
def open_connection(self):
|
|
17
|
+
return self.engine.connect()
|
|
18
|
+
|
|
19
|
+
def __init__(self, engine: Engine, schema_name: str = None, reflect: bool = True):
|
|
20
|
+
self.schema = schema_name
|
|
21
|
+
self.metadata = MetaData(schema_name) if schema_name else MetaData()
|
|
22
|
+
self.engine = engine
|
|
23
|
+
self.engine_inspection = inspect(self.engine)
|
|
24
|
+
if reflect:
|
|
25
|
+
self.metadata.reflect(bind=self.engine, schema=schema_name)
|
|
26
|
+
|
|
27
|
+
# ---------------------------
|
|
28
|
+
|
|
29
|
+
def _handle_exception(self, connection: Connection, e: Exception, to_raise: bool = True):
|
|
30
|
+
connection.close()
|
|
31
|
+
logging.error(e)
|
|
32
|
+
if to_raise:
|
|
33
|
+
raise e
|
|
34
|
+
|
|
35
|
+
def commit(self, connection: Connection):
|
|
36
|
+
try:
|
|
37
|
+
connection.commit()
|
|
38
|
+
except Exception as e:
|
|
39
|
+
self._handle_exception(connection, e)
|
|
40
|
+
|
|
41
|
+
def has_columns(self, *columns: Iterable[str], table: Table):
|
|
42
|
+
return all([column in [c.key for c in table.c] for column in columns])
|
|
43
|
+
|
|
44
|
+
def _select(self, table: Table, columns: list[str] = None):
|
|
45
|
+
try:
|
|
46
|
+
stmt = table.select() if not columns else select(*[table.c[column] for column in columns])
|
|
47
|
+
except:
|
|
48
|
+
raise HasNoAttributeException()
|
|
49
|
+
return stmt
|
|
50
|
+
|
|
51
|
+
def _return(self, connection: Connection, result: CursorResult | RowMapping | Sequence[RowMapping] | Sequence[dict] = None, keep_open: bool = False, commit: bool = True,
|
|
52
|
+
parsing_model: Type[BaseModel] | BaseModel = None):
|
|
53
|
+
if keep_open:
|
|
54
|
+
return result
|
|
55
|
+
else:
|
|
56
|
+
if commit:
|
|
57
|
+
self.commit(connection=connection)
|
|
58
|
+
result = result if parsing_model is None else [parsing_model(**r) for r in result]
|
|
59
|
+
connection.close()
|
|
60
|
+
return result
|
|
61
|
+
|
|
62
|
+
def create_table(self, name: str, columns: Iterable[Column], **kwargs):
|
|
63
|
+
table = Table(name=name, metadata=self.metadata, *columns, **kwargs)
|
|
64
|
+
table.create(self.engine)
|
|
65
|
+
|
|
66
|
+
def drop_table(self, name: str):
|
|
67
|
+
table = self.get_table(name=name)
|
|
68
|
+
table.drop(self.engine)
|
|
69
|
+
self.metadata.remove(table=table)
|
|
70
|
+
|
|
71
|
+
def find(self, from_table: str, connection: Connection = None, where: dict[str, Any] = None, columns: list[str] = None, limit: int = 0, parsing_model: Type[BaseModel] = None, **kwhere):
|
|
72
|
+
keep_open = False if connection is None else True
|
|
73
|
+
connection = self.open_connection if connection is None else connection
|
|
74
|
+
table = self.get_table(name=from_table)
|
|
75
|
+
result = self._find(table=table, connection=connection, where=where, columns=columns, limit=limit, **kwhere)
|
|
76
|
+
return self._return(connection=connection, result=result, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
77
|
+
|
|
78
|
+
def _find(self, table: Table, connection: Connection, where: dict[str, Any] = None, columns: list[str] = None, limit: int = 0, **kwhere):
|
|
79
|
+
if where is None:
|
|
80
|
+
where = kwhere if kwhere else {}
|
|
81
|
+
stmt = self._select(table=table, columns=columns)
|
|
82
|
+
stmt = stmt.where(*[self._eq_where(table.c[column], condition) for column, condition in where.items()])
|
|
83
|
+
stmt = self._add_limit_offset_condition(stmt, limit)
|
|
84
|
+
return self.execute(connection=connection, stmt=stmt, mapping=True)
|
|
85
|
+
|
|
86
|
+
def insert(self, from_table: str, connection: Connection = None, models: list[Type[BaseModel] | BaseModel] = None, values: list[dict[str, Any]] = None, returning: bool = True,
|
|
87
|
+
parsing_model: Type[BaseModel] | BaseModel = None):
|
|
88
|
+
keep_open = False if connection is None else True
|
|
89
|
+
connection = self.open_connection if connection is None else connection
|
|
90
|
+
table = self.get_table(name=from_table)
|
|
91
|
+
values_condition = values if values else []
|
|
92
|
+
values_condition.extend([model.model_dump(exclude_none=True) for model in models]) if models else values_condition
|
|
93
|
+
result = self._insert(table=table, connection=connection, values=values_condition, returning=returning)
|
|
94
|
+
return self._return(connection=connection, result=result, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
95
|
+
|
|
96
|
+
def _insert(self, table: Table, connection: Connection, values: list[dict[str, Any]] = None, returning: bool = True):
|
|
97
|
+
if values:
|
|
98
|
+
stmt = table.insert().values(values)
|
|
99
|
+
stmt = stmt if not returning else stmt.returning()
|
|
100
|
+
return self.execute(connection=connection, stmt=stmt, mapping=True)
|
|
101
|
+
else:
|
|
102
|
+
return []
|
|
103
|
+
|
|
104
|
+
def update(self, from_table: str, connection: Connection = None, where: dict[str, Any] = None, returning: bool = True, parsing_model: Type[BaseModel] | BaseModel = None, **kwargs):
|
|
105
|
+
keep_open = False if connection is None else True
|
|
106
|
+
connection = self.open_connection if connection is None else connection
|
|
107
|
+
table = self.get_table(name=from_table)
|
|
108
|
+
result = self._update(table=table, connection=connection, where=where, returning=returning, **kwargs)
|
|
109
|
+
return self._return(connection=connection, result=result, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
110
|
+
|
|
111
|
+
def _update(self, table: Table, connection: Connection, where: dict[str, Any] = None, returning: bool = True, **kwargs):
|
|
112
|
+
if where is None:
|
|
113
|
+
where = {}
|
|
114
|
+
stmt = table.update()
|
|
115
|
+
stmt = stmt.where(*[self._eq_where(table.c[column], condition) for column, condition in where.items()])
|
|
116
|
+
stmt = stmt.values(**kwargs)
|
|
117
|
+
stmt = stmt if not returning else stmt.returning()
|
|
118
|
+
return self.execute(connection=connection, stmt=stmt, mapping=True)
|
|
119
|
+
|
|
120
|
+
def delete(self, from_table: str, connection: Connection = None, where: dict[str, Any] = None, **kwhere):
|
|
121
|
+
keep_open = False if connection is None else True
|
|
122
|
+
connection = self.open_connection if connection is None else connection
|
|
123
|
+
table = self.get_table(name=from_table)
|
|
124
|
+
num_rows = self._delete(table=table, connection=connection, where=where, **kwhere)
|
|
125
|
+
if not num_rows or keep_open:
|
|
126
|
+
return num_rows
|
|
127
|
+
else:
|
|
128
|
+
self.commit(connection)
|
|
129
|
+
connection.close()
|
|
130
|
+
return num_rows
|
|
131
|
+
|
|
132
|
+
def _delete(self, table: Table, connection: Connection, where: dict[str, Any] = None, **kwhere):
|
|
133
|
+
if where is None:
|
|
134
|
+
where = kwhere if kwhere else {}
|
|
135
|
+
stmt = table.delete()
|
|
136
|
+
stmt = stmt.where(*[self._eq_where(table.c[column], condition) for column, condition in where.items()])
|
|
137
|
+
return self.execute(connection=connection, stmt=stmt).rowcount
|
|
138
|
+
|
|
139
|
+
# --------------
|
|
140
|
+
|
|
141
|
+
def execute(self, connection: Connection, stmt, mapping: bool = False):
|
|
142
|
+
try:
|
|
143
|
+
result = connection.execute(stmt)
|
|
144
|
+
except Exception as e:
|
|
145
|
+
self._handle_exception(connection, e)
|
|
146
|
+
else:
|
|
147
|
+
if mapping:
|
|
148
|
+
result = result.mappings().all()
|
|
149
|
+
return result
|
|
150
|
+
|
|
151
|
+
def _get_table(self, name: str):
|
|
152
|
+
if self.schema is not None:
|
|
153
|
+
table_name = name if name.startswith(f"{self.schema}.") else f"{self.schema}.{name}"
|
|
154
|
+
else:
|
|
155
|
+
table_name = name
|
|
156
|
+
return self.metadata.tables.get(table_name)
|
|
157
|
+
|
|
158
|
+
def get_table(self, name: str) -> Optional[Table]:
|
|
159
|
+
table = self._get_table(name=name)
|
|
160
|
+
if table is None:
|
|
161
|
+
table = Table(name, self.metadata, schema=self.schema, autoload_with=self.engine)
|
|
162
|
+
return table
|
|
163
|
+
|
|
164
|
+
def has_table(self, name: str):
|
|
165
|
+
return self.engine_inspection.has_table(name, self.schema)
|
|
166
|
+
|
|
167
|
+
def _add_limit_offset_condition(self, stmt, limit: int, page: int = None):
|
|
168
|
+
if limit and limit > 0:
|
|
169
|
+
stmt = stmt.limit(limit)
|
|
170
|
+
if page and page > 0:
|
|
171
|
+
offset = (page - 1) * limit
|
|
172
|
+
stmt = stmt.offset(offset)
|
|
173
|
+
return stmt
|
|
174
|
+
|
|
175
|
+
def _eq_where(self, col: Column, condition, neq: bool = False):
|
|
176
|
+
eq_where = col.in_(condition) if isinstance(condition, list) else col == condition if str(col.type) != "ARRAY" else condition == col.any_()
|
|
177
|
+
if neq:
|
|
178
|
+
not_(eq_where)
|
|
179
|
+
return eq_where
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
__all__ = ["GeomMetadata"]
|
|
2
|
+
|
|
3
|
+
from typing import Any, Type
|
|
4
|
+
|
|
5
|
+
from geoalchemy2 import WKBElement
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
from sqlalchemy import Connection, Table, Column
|
|
8
|
+
|
|
9
|
+
from .crud_metadata import CrudMetadata
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GeomMetadata(CrudMetadata):
|
|
13
|
+
|
|
14
|
+
def intersects(self, from_table: str, geom: WKBElement, polygon_column_name: str, polygon_column_srid: int, connection: Connection = None, tolerance: float = 0.0, columns: list[str] = None,
|
|
15
|
+
where: dict[str, Any] = None, not_where: dict[str, Any] = None, limit: int = 0, geom_srid: int = 4326, parsing_model: Type[BaseModel] | BaseModel = None, intersection: bool = False,
|
|
16
|
+
intersection_area: bool = False):
|
|
17
|
+
|
|
18
|
+
keep_open = False if connection is None else True
|
|
19
|
+
connection = self.open_connection if connection is None else connection
|
|
20
|
+
table = self.get_table(name=from_table)
|
|
21
|
+
polygon_column = table.c[polygon_column_name]
|
|
22
|
+
result = self._intersects(table=table, connection=connection, polygon_column=polygon_column, polygon_column_srid=polygon_column_srid, geom=geom, tolerance=tolerance, where=where, not_where=not_where,
|
|
23
|
+
columns=columns, limit=limit, geom_srid=geom_srid, intersection=intersection, intersection_area=intersection_area)
|
|
24
|
+
return self._return(connection=connection, result=result, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
25
|
+
|
|
26
|
+
def _intersects(self, table: Table, geom: WKBElement, polygon_column: Column, polygon_column_srid: int, connection: Connection = None, tolerance: float = 0.0, columns: list[str] = None, where: dict[str, Any] = None,
|
|
27
|
+
not_where: dict[str, Any] = None, limit: int = 0, geom_srid: int = 4326, intersection: bool = False, intersection_area: bool = False):
|
|
28
|
+
if where is None:
|
|
29
|
+
where = {}
|
|
30
|
+
if not_where is None:
|
|
31
|
+
not_where = {}
|
|
32
|
+
|
|
33
|
+
stmt = self._select(table, columns)
|
|
34
|
+
|
|
35
|
+
intersecting_geom = self._geom_condition(geom=geom, geom_srid=geom_srid, polygon_column_srid=polygon_column_srid, buffer=tolerance)
|
|
36
|
+
|
|
37
|
+
if intersection:
|
|
38
|
+
stmt = stmt.add_columns(polygon_column.ST_Intersection(intersecting_geom).label("intersection"))
|
|
39
|
+
if intersection_area:
|
|
40
|
+
stmt = stmt.add_columns(polygon_column.ST_Intersection(intersecting_geom).ST_Transform(3857).ST_Area().label("intersection_area"))
|
|
41
|
+
|
|
42
|
+
eq_where = [self._eq_where(table.c[column], condition) for column, condition in where.items()]
|
|
43
|
+
eq_where.extend([self._eq_where(table.c[column], condition, True) for column, condition in not_where.items()])
|
|
44
|
+
|
|
45
|
+
geom_condition = polygon_column.ST_Intersects(intersecting_geom)
|
|
46
|
+
|
|
47
|
+
eq_where.append(geom_condition)
|
|
48
|
+
|
|
49
|
+
stmt = stmt.where(*eq_where)
|
|
50
|
+
|
|
51
|
+
stmt = self._add_limit_offset_condition(stmt, limit)
|
|
52
|
+
return self.execute(connection=connection, stmt=stmt, mapping=True)
|
|
53
|
+
|
|
54
|
+
def contains(self, from_table: str, geom: WKBElement, polygon_column_name: str, polygon_column_srid: int, connection: Connection = None, tolerance: float = 0.0, columns: list[str] = None,
|
|
55
|
+
where: dict[str, Any] = None, limit: int = 0, contained: bool = False, geom_srid: int = 4326, parsing_model: Type[BaseModel] | BaseModel = None):
|
|
56
|
+
|
|
57
|
+
keep_open = False if connection is None else True
|
|
58
|
+
connection = self.open_connection if connection is None else connection
|
|
59
|
+
table = self.get_table(name=from_table)
|
|
60
|
+
polygon_column = table.c[polygon_column_name]
|
|
61
|
+
result = self._contains(table=table, connection=connection, polygon_column=polygon_column, polygon_column_srid=polygon_column_srid, geom=geom, tolerance=tolerance, where=where, contained=contained,
|
|
62
|
+
columns=columns, limit=limit, geom_srid=geom_srid)
|
|
63
|
+
return self._return(connection=connection, result=result, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
64
|
+
|
|
65
|
+
def _contains(self, table: Table, geom: WKBElement, polygon_column: Column, polygon_column_srid: int, connection: Connection = None, tolerance: float = 0.0, columns: list[str] = None, where: dict[str, Any] = None,
|
|
66
|
+
limit: int = 0, contained: bool = False, geom_srid: int = 4326):
|
|
67
|
+
if columns is None:
|
|
68
|
+
columns = []
|
|
69
|
+
if where is None:
|
|
70
|
+
where = {}
|
|
71
|
+
|
|
72
|
+
stmt = self._select(table, columns)
|
|
73
|
+
eq_where = [self._eq_where(table.c[column], condition) for column, condition in where.items()]
|
|
74
|
+
|
|
75
|
+
geom_condition = self._geom_condition(geom=geom, geom_srid=geom_srid, polygon_column_srid=polygon_column_srid, buffer=tolerance)
|
|
76
|
+
geom_condition = polygon_column.ST_Contains(geom_condition) if not contained else geom_condition.ST_Contains(polygon_column)
|
|
77
|
+
|
|
78
|
+
eq_where.append(geom_condition)
|
|
79
|
+
|
|
80
|
+
stmt = stmt.where(*eq_where)
|
|
81
|
+
stmt = self._add_limit_offset_condition(stmt, limit)
|
|
82
|
+
return self.execute(connection=connection, stmt=stmt, mapping=True)
|
|
83
|
+
|
|
84
|
+
def _geom_condition(self, geom: WKBElement, polygon_column_srid: int = 4326, geom_srid: int = 4326, buffer: float = 0):
|
|
85
|
+
geom_condition = geom.ST_Transform(polygon_column_srid) if geom_srid != polygon_column_srid else geom
|
|
86
|
+
geom_condition = geom_condition.ST_Buffer(buffer) if buffer else geom_condition
|
|
87
|
+
|
|
88
|
+
return geom_condition
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
__all__ = ["CrudRepository"]
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import math
|
|
5
|
+
from typing import Any, Type, Sequence, Iterable, Optional, Mapping
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
from sqlalchemy import select, delete, update, insert, Row, RowMapping, ColumnElement, Select, Insert, Update, Delete
|
|
9
|
+
from sqlalchemy.engine.base import Engine
|
|
10
|
+
from sqlalchemy.exc import DataError, IntegrityError, NoResultFound, OperationalError
|
|
11
|
+
from sqlalchemy.inspection import inspect
|
|
12
|
+
from sqlalchemy.orm import Session, load_only, defer
|
|
13
|
+
from sqlalchemy.sql.functions import count
|
|
14
|
+
|
|
15
|
+
from .handler.base_handler import BaseHandler
|
|
16
|
+
from ...exception.crud_exceptions import HasNoAttributeException, EntityNotFoundException, ServiceUnavailableException, BadRequestException
|
|
17
|
+
from ...interface.base.declarative_base import Base
|
|
18
|
+
from ...paginator_dto import PaginatorResponseModel
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class CrudRepository[Entity: Base | Type[Base]]:
|
|
22
|
+
__slots__ = "model", "entity", "engine", "columns", "handler", "relationship_attributes"
|
|
23
|
+
|
|
24
|
+
model: Type[BaseModel] | BaseModel
|
|
25
|
+
entity: Entity
|
|
26
|
+
engine: Type[Engine] | Engine
|
|
27
|
+
handler: Type[BaseHandler] | BaseHandler
|
|
28
|
+
|
|
29
|
+
@property
|
|
30
|
+
def open_session(self):
|
|
31
|
+
return Session(self.engine)
|
|
32
|
+
|
|
33
|
+
def __init__(self, entity: Entity, model: Type[BaseModel] | BaseModel, engine: Type[Engine] | Engine, handler_class: Type[BaseHandler] | BaseHandler = None) -> None:
|
|
34
|
+
self.entity = entity
|
|
35
|
+
self.model = model
|
|
36
|
+
self.engine = engine
|
|
37
|
+
self.columns = self.entity.columns()
|
|
38
|
+
|
|
39
|
+
self.handler = handler_class(self.columns) if handler_class else BaseHandler(self.columns)
|
|
40
|
+
|
|
41
|
+
# -------------------------
|
|
42
|
+
|
|
43
|
+
def _handle_exception(self, session: Session, e: Exception):
|
|
44
|
+
session.close()
|
|
45
|
+
logging.error(e)
|
|
46
|
+
match e:
|
|
47
|
+
case OperationalError():
|
|
48
|
+
raise ServiceUnavailableException()
|
|
49
|
+
case NoResultFound():
|
|
50
|
+
raise EntityNotFoundException()
|
|
51
|
+
case DataError() | IntegrityError():
|
|
52
|
+
raise BadRequestException()
|
|
53
|
+
case _:
|
|
54
|
+
raise e
|
|
55
|
+
|
|
56
|
+
def commit(self, session: Session):
|
|
57
|
+
try:
|
|
58
|
+
session.commit()
|
|
59
|
+
except Exception as e:
|
|
60
|
+
self._handle_exception(session, e)
|
|
61
|
+
|
|
62
|
+
def _entity_has_columns(self, *columns: Iterable[str], entity: Type[Base] | Base = None):
|
|
63
|
+
return all([column in entity.columns_names() for column in columns])
|
|
64
|
+
|
|
65
|
+
def _select(self, entity: Type[Base] | Base, columns: list[str] = None, exclude_columns: list[str] = None):
|
|
66
|
+
try:
|
|
67
|
+
stmt = select(entity)
|
|
68
|
+
if columns:
|
|
69
|
+
relationship_attributes = set(entity.__mapper__.relationships.keys())
|
|
70
|
+
entity_columns = {column for column in columns if column not in relationship_attributes}
|
|
71
|
+
stmt = stmt.options(load_only(*[getattr(self.entity, column) for column in entity_columns]))
|
|
72
|
+
if exclude_columns:
|
|
73
|
+
stmt = stmt.options(defer(*[getattr(self.entity, column) for column in exclude_columns]))
|
|
74
|
+
except AttributeError:
|
|
75
|
+
raise HasNoAttributeException()
|
|
76
|
+
return stmt
|
|
77
|
+
|
|
78
|
+
def _return(self, session: Session, entities: Sequence[Row] | Sequence[Row | RowMapping | Type[Base]], keep_open: bool = False,
|
|
79
|
+
commit: bool = True, parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity] | list[BaseModel]:
|
|
80
|
+
if keep_open:
|
|
81
|
+
return entities
|
|
82
|
+
else:
|
|
83
|
+
if commit:
|
|
84
|
+
self.commit(session=session)
|
|
85
|
+
return_model = parsing_model or self.model
|
|
86
|
+
models = [return_model.model_validate(entity) for entity in entities]
|
|
87
|
+
session.close()
|
|
88
|
+
return models
|
|
89
|
+
|
|
90
|
+
def create(self):
|
|
91
|
+
if not inspect(self.engine).has_table(table_name=self.entity.__table__.name, schema=self.entity.__table__.schema):
|
|
92
|
+
try:
|
|
93
|
+
self.entity.__table__.create(self.engine)
|
|
94
|
+
except Exception as e:
|
|
95
|
+
logging.error(f"{self.entity.__table__.name} creation error - {repr(e)}")
|
|
96
|
+
else:
|
|
97
|
+
pass
|
|
98
|
+
|
|
99
|
+
def find(self, session: Session = None, where: dict[str, Any] = None, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 0, parsing_model: Type[BaseModel] = None, **kwhere) -> list[Entity]:
|
|
100
|
+
keep_open = False if session is None else True
|
|
101
|
+
session = self.open_session if session is None else session
|
|
102
|
+
entities = self._find(session=session, where=where, columns=columns, exclude_columns=exclude_columns, limit=limit, **kwhere)
|
|
103
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
104
|
+
|
|
105
|
+
def _find(self, session: Session, where: dict[str, Any] = None, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 0, **kwhere):
|
|
106
|
+
if where is None:
|
|
107
|
+
where = kwhere if kwhere else {}
|
|
108
|
+
stmt = self._select(entity=self.entity, columns=columns, exclude_columns=exclude_columns)
|
|
109
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
110
|
+
stmt = self.handler.limit_offset_condition(stmt=stmt, limit=limit)
|
|
111
|
+
return self.execute(session, stmt)
|
|
112
|
+
|
|
113
|
+
def insert(self, session: Session = None, models: list[Type[BaseModel] | BaseModel] = None, returning: bool = True, parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity]:
|
|
114
|
+
keep_open = False if session is None else True
|
|
115
|
+
session = self.open_session if session is None else session
|
|
116
|
+
entities = self._insert(session=session, models=models, returning=returning)
|
|
117
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
118
|
+
|
|
119
|
+
def _insert(self, session: Session, models: list[Type[BaseModel] | BaseModel] = None, returning: bool = True):
|
|
120
|
+
if models is None:
|
|
121
|
+
models = []
|
|
122
|
+
values = [model.model_dump(exclude_none=True) for model in models]
|
|
123
|
+
if values:
|
|
124
|
+
stmt = insert(self.entity)
|
|
125
|
+
stmt = stmt if not returning else stmt.returning(self.entity)
|
|
126
|
+
return self.execute(session=session, stmt=stmt, values=values)
|
|
127
|
+
else:
|
|
128
|
+
return []
|
|
129
|
+
|
|
130
|
+
def add(self, session: Session = None, entities: list[Type[Base]] = None, parsing_model: Type[BaseModel] | BaseModel = None):
|
|
131
|
+
keep_open = False if session is None else True
|
|
132
|
+
session = self.open_session if session is None else session
|
|
133
|
+
try:
|
|
134
|
+
session.add_all(entities)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
self._handle_exception(session, e)
|
|
137
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
138
|
+
|
|
139
|
+
def update(self, session: Session = None, where: dict[str, Any] = None, returning: bool = True, parsing_model: Type[BaseModel] | BaseModel = None, **kwargs) -> list[Entity]:
|
|
140
|
+
keep_open = False if session is None else True
|
|
141
|
+
session = self.open_session if session is None else session
|
|
142
|
+
entities = self._update(session=session, where=where, returning=returning, **kwargs)
|
|
143
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
144
|
+
|
|
145
|
+
def _update(self, session: Session, where: dict[str, Any] = None, returning: bool = True, **kwargs):
|
|
146
|
+
if where is None:
|
|
147
|
+
where = {}
|
|
148
|
+
stmt = update(self.entity)
|
|
149
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
150
|
+
|
|
151
|
+
stmt = stmt.values(**kwargs)
|
|
152
|
+
stmt = stmt if not returning else stmt.returning(self.entity)
|
|
153
|
+
return self.execute(session, stmt)
|
|
154
|
+
|
|
155
|
+
def delete(self, session: Session = None, where: dict[str, Any] = None, **kwhere) -> int:
|
|
156
|
+
keep_open = False if session is None else True
|
|
157
|
+
session = self.open_session if session is None else session
|
|
158
|
+
num_rows = self._delete(session=session, where=where, **kwhere)
|
|
159
|
+
if not num_rows or keep_open:
|
|
160
|
+
return num_rows
|
|
161
|
+
else:
|
|
162
|
+
self.commit(session)
|
|
163
|
+
session.close()
|
|
164
|
+
return num_rows
|
|
165
|
+
|
|
166
|
+
def _delete(self, session: Session, where: dict[str, Any] = None, **kwhere):
|
|
167
|
+
if where is None:
|
|
168
|
+
where = kwhere if kwhere else {}
|
|
169
|
+
stmt = delete(self.entity)
|
|
170
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
171
|
+
try:
|
|
172
|
+
result = session.execute(stmt)
|
|
173
|
+
except Exception as e:
|
|
174
|
+
self._handle_exception(session, e)
|
|
175
|
+
else:
|
|
176
|
+
return result.rowcount
|
|
177
|
+
|
|
178
|
+
def upsert(self, session: Session = None, models: list[Type[BaseModel] | BaseModel] = None, parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity]:
|
|
179
|
+
keep_open = False if session is None else True
|
|
180
|
+
session = self.open_session if session is None else session
|
|
181
|
+
entities = self._upsert(session=session, models=models)
|
|
182
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=True, parsing_model=parsing_model)
|
|
183
|
+
|
|
184
|
+
def _upsert(self, session: Session, models: list[Type[BaseModel] | BaseModel] = None):
|
|
185
|
+
if models is None:
|
|
186
|
+
models = []
|
|
187
|
+
updates = {}
|
|
188
|
+
for i, model in enumerate(models):
|
|
189
|
+
pks = {pk.name: getattr(model, pk.name) for pk in self.entity.primary_key()}
|
|
190
|
+
if any([not value for value in pks.values()]):
|
|
191
|
+
continue
|
|
192
|
+
else:
|
|
193
|
+
updates[i] = session.get(self.entity, pks)
|
|
194
|
+
inserts = [models[i] for i in range(len(models)) if i not in updates.keys()]
|
|
195
|
+
|
|
196
|
+
result = self._insert(session=session, models=inserts)
|
|
197
|
+
for i, entity in updates.items():
|
|
198
|
+
if entity:
|
|
199
|
+
for attr, value in models[i].model_dump(exclude_none=True).items():
|
|
200
|
+
setattr(entity, attr, value)
|
|
201
|
+
|
|
202
|
+
result.extend(updates.values())
|
|
203
|
+
|
|
204
|
+
return result
|
|
205
|
+
|
|
206
|
+
def count(self, session: Session = None, where: dict[str, Any] = None, **kwhere):
|
|
207
|
+
keep_open = False if session is None else True
|
|
208
|
+
session = self.open_session if session is None else session
|
|
209
|
+
count_ = self._count(session=session, where=where, **kwhere)
|
|
210
|
+
if keep_open:
|
|
211
|
+
pass
|
|
212
|
+
else:
|
|
213
|
+
session.close()
|
|
214
|
+
return count_
|
|
215
|
+
|
|
216
|
+
def _count(self, session: Session, where: dict[str, Any] = None, **kwhere):
|
|
217
|
+
pk = self.entity.primary_key()[0]
|
|
218
|
+
if where is None:
|
|
219
|
+
where = kwhere if kwhere else {}
|
|
220
|
+
stmt = select(count(pk))
|
|
221
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
222
|
+
result = session.execute(stmt).all()[0]
|
|
223
|
+
return result[0]
|
|
224
|
+
|
|
225
|
+
def _preconf_count(self, session: Session, where: Optional[ColumnElement[Any]]):
|
|
226
|
+
pk = self.entity.primary_key()[0]
|
|
227
|
+
stmt = select(count(pk))
|
|
228
|
+
|
|
229
|
+
stmt = stmt.where(where)
|
|
230
|
+
result = session.execute(stmt).all()[0]
|
|
231
|
+
return result[0]
|
|
232
|
+
|
|
233
|
+
def paging_find(self, session: Session = None, where: dict[str, Any] = None, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 0, page: int = 1, order_by: str = None, asc: bool = True,
|
|
234
|
+
parsing_model: Type[BaseModel] = None, **kwhere) -> PaginatorResponseModel[Entity]:
|
|
235
|
+
keep_open = False if session is None else True
|
|
236
|
+
session = self.open_session if session is None else session
|
|
237
|
+
paginated = self._paging_find(session=session, where=where, columns=columns, exclude_columns=exclude_columns, limit=limit, page=page, order_by=order_by, asc=asc, **kwhere)
|
|
238
|
+
if not keep_open:
|
|
239
|
+
paginated.data = [parsing_model.model_validate(entity) for entity in paginated.data] if parsing_model else [self.model.model_validate(entity) for entity in paginated.data]
|
|
240
|
+
session.close()
|
|
241
|
+
return paginated
|
|
242
|
+
|
|
243
|
+
def _paging_find(self, session: Session, where: dict[str, Any] = None, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 0, page: int = 1, order_by: str = None, asc: bool = True, **kwhere):
|
|
244
|
+
if where is None:
|
|
245
|
+
where = kwhere if kwhere else {}
|
|
246
|
+
stmt = self._select(entity=self.entity, columns=columns, exclude_columns=exclude_columns)
|
|
247
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
248
|
+
|
|
249
|
+
row_numbers = self._preconf_count(session=session, where=stmt.whereclause)
|
|
250
|
+
|
|
251
|
+
stmt = self.handler.limit_offset_condition(stmt=stmt, limit=limit, page=page)
|
|
252
|
+
if order_by:
|
|
253
|
+
stmt = self.handler.order_by_condition(stmt=stmt, order_by=order_by, asc=asc)
|
|
254
|
+
|
|
255
|
+
entities = self.execute(session, stmt)
|
|
256
|
+
return self.parse_pagination(limit=limit, page=page, order_by=order_by, count=row_numbers, asc=asc, data=entities)
|
|
257
|
+
|
|
258
|
+
def parse_pagination(self, limit: int, page: int, order_by: str, asc: bool, count: int, data: Sequence[Row] | Sequence[Row | RowMapping] | Sequence[RowMapping]) -> PaginatorResponseModel:
|
|
259
|
+
limit = limit if limit else len(data)
|
|
260
|
+
order_by = order_by if order_by else self.entity.primary_key()[0].name
|
|
261
|
+
tot_pages = math.ceil(count / limit) if count and limit > 0 else 1
|
|
262
|
+
|
|
263
|
+
return PaginatorResponseModel(limit=limit, page=page, order_by=order_by, asc=asc, tot_pages=tot_pages, data=data)
|
|
264
|
+
|
|
265
|
+
def execute(self, session: Session, stmt: Select | Insert | Update | Delete, values: Sequence[Mapping[str, Any]] | Mapping[str, Any] = None):
|
|
266
|
+
try:
|
|
267
|
+
result = session.execute(statement=stmt, params=values)
|
|
268
|
+
except Exception as e:
|
|
269
|
+
self._handle_exception(session, e)
|
|
270
|
+
else:
|
|
271
|
+
rows = result.scalars().all()
|
|
272
|
+
return rows
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
__all__ = ["GeometryRepository"]
|
|
2
|
+
|
|
3
|
+
from typing import Any, Type
|
|
4
|
+
|
|
5
|
+
from geoalchemy2 import WKBElement, Geometry
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
from sqlalchemy import literal_column, Column
|
|
8
|
+
from sqlalchemy.orm import Session
|
|
9
|
+
|
|
10
|
+
from .crud_repository import CrudRepository
|
|
11
|
+
from ..base import Base
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class GeometryRepository[Entity: Base | Type[Base]](CrudRepository[Entity]):
|
|
15
|
+
__slots__ = "geom_column"
|
|
16
|
+
geom_column: Column[Geometry]
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def geom_srid(self):
|
|
20
|
+
return self.geom_column.type.srid
|
|
21
|
+
|
|
22
|
+
def intersects(self, geom: WKBElement, session: Session = None, tolerance: float = 0.0, columns: list[str] = None, exclude_columns: list[str] = None, where: dict[str, Any] = None, not_where: dict[str, Any] = None,
|
|
23
|
+
limit: int = 0, geom_srid: int = 4326, parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity]:
|
|
24
|
+
|
|
25
|
+
keep_open = False if session is None else True
|
|
26
|
+
session = self.open_session if session is None else session
|
|
27
|
+
entities = self._intersects(session=session, geom=geom, tolerance=tolerance, where=where, not_where=not_where, columns=columns, exclude_columns=exclude_columns, limit=limit, geom_srid=geom_srid)
|
|
28
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
29
|
+
|
|
30
|
+
def _intersects(self, session: Session, geom: WKBElement, tolerance: float = 0.0, columns: list[str] = None, exclude_columns: list[str] = None, where: dict[str, Any] = None, not_where: dict[str, Any] = None,
|
|
31
|
+
limit: int = 0, geom_srid: int = 4326):
|
|
32
|
+
if where is None:
|
|
33
|
+
where = {}
|
|
34
|
+
if not_where is None:
|
|
35
|
+
not_where = {}
|
|
36
|
+
|
|
37
|
+
stmt = self._select(entity=self.entity, columns=columns, exclude_columns=exclude_columns)
|
|
38
|
+
|
|
39
|
+
intersecting_geom = self._geom_condition(geom=geom, geom_srid=geom_srid, buffer=tolerance)
|
|
40
|
+
|
|
41
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
42
|
+
stmt = self.handler.prepare_statement(stmt=stmt, neq=True, **not_where)
|
|
43
|
+
|
|
44
|
+
stmt = stmt.where(self.geom_column.ST_Intersects(intersecting_geom))
|
|
45
|
+
stmt = self.handler.limit_offset_condition(stmt=stmt, limit=limit)
|
|
46
|
+
|
|
47
|
+
return self.execute(session, stmt)
|
|
48
|
+
|
|
49
|
+
def contains(self, geom: WKBElement, session: Session = None, tolerance: float = 0.0, columns: list[str] = None, exclude_columns: list[str] = None, where: dict[str, Any] = None, limit: int = 0,
|
|
50
|
+
contained: bool = False, geom_srid: int = 4326, parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity]:
|
|
51
|
+
|
|
52
|
+
keep_open = False if session is None else True
|
|
53
|
+
session = self.open_session if session is None else session
|
|
54
|
+
entities = self._contains(session=session, geom=geom, tolerance=tolerance, where=where, contained=contained, columns=columns, exclude_columns=exclude_columns, limit=limit, geom_srid=geom_srid)
|
|
55
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
56
|
+
|
|
57
|
+
def _contains(self, session: Session, geom: WKBElement, tolerance: float = 0.0, columns: list[str] = None, exclude_columns: list[str] = None, where: dict[str, Any] = None, limit: int = 0, contained: bool = False,
|
|
58
|
+
geom_srid: int = 4326):
|
|
59
|
+
if where is None:
|
|
60
|
+
where = {}
|
|
61
|
+
|
|
62
|
+
stmt = self._select(entity=self.entity, columns=columns, exclude_columns=exclude_columns)
|
|
63
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
64
|
+
|
|
65
|
+
geom_condition = self._geom_condition(geom=geom, geom_srid=geom_srid, buffer=tolerance)
|
|
66
|
+
geom_condition = self.geom_column.ST_Contains(geom_condition) if not contained else geom_condition.ST_Contains(self.geom_column)
|
|
67
|
+
|
|
68
|
+
stmt = stmt.where(geom_condition)
|
|
69
|
+
|
|
70
|
+
stmt = self.handler.limit_offset_condition(stmt=stmt, limit=limit)
|
|
71
|
+
return self.execute(session, stmt)
|
|
72
|
+
|
|
73
|
+
def nearest(self, geom: WKBElement, session: Session = None, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 1, where: dict[str, Any] = None, geom_srid: int = 4326,
|
|
74
|
+
parsing_model: Type[BaseModel] | BaseModel = None) -> list[Entity]:
|
|
75
|
+
keep_open = False if session is None else True
|
|
76
|
+
session = self.open_session if session is None else session
|
|
77
|
+
entities = self._nearest(session=session, geom=geom, where=where, columns=columns, exclude_columns=exclude_columns, limit=limit, geom_srid=geom_srid)
|
|
78
|
+
return self._return(session=session, entities=entities, keep_open=keep_open, commit=False, parsing_model=parsing_model)
|
|
79
|
+
|
|
80
|
+
def _nearest(self, session: Session, geom: WKBElement, columns: list[str] = None, exclude_columns: list[str] = None, limit: int = 1, where: dict[str, Any] = None, geom_srid: int = 4326, distance: bool = True):
|
|
81
|
+
if where is None:
|
|
82
|
+
where = {}
|
|
83
|
+
stmt = self._select(entity=self.entity, columns=columns, exclude_columns=exclude_columns)
|
|
84
|
+
|
|
85
|
+
geom_condition = self._geom_condition(geom=geom, geom_srid=geom_srid)
|
|
86
|
+
|
|
87
|
+
distance_col = self.geom_column.ST_Distance(geom_condition).label("distance")
|
|
88
|
+
|
|
89
|
+
if distance:
|
|
90
|
+
stmt = stmt.add_columns(distance_col)
|
|
91
|
+
|
|
92
|
+
stmt = self.handler.prepare_statement(stmt=stmt, **where)
|
|
93
|
+
stmt = stmt.order_by(literal_column("distance"))
|
|
94
|
+
stmt = self.handler.limit_offset_condition(stmt=stmt, limit=limit)
|
|
95
|
+
|
|
96
|
+
rows = self.execute(session, stmt)
|
|
97
|
+
return rows
|
|
98
|
+
|
|
99
|
+
def _geom_condition(self, geom: WKBElement, geom_srid: int = 4326, buffer: float = 0):
|
|
100
|
+
geom_condition = geom.ST_Transform(self.geom_srid) if geom_srid != self.geom_srid else geom
|
|
101
|
+
geom_condition = geom_condition.ST_Buffer(buffer) if buffer else geom_condition
|
|
102
|
+
|
|
103
|
+
return geom_condition
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
__all__ = ["BaseHandler"]
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import Select, Update, Delete, Column, not_
|
|
6
|
+
from sqlalchemy.sql.base import ReadOnlyColumnCollection
|
|
7
|
+
|
|
8
|
+
from ....exception.crud_exceptions import HasNoAttributeException
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BaseHandler:
|
|
12
|
+
__slots__ = "columns"
|
|
13
|
+
|
|
14
|
+
def __init__(self, columns: ReadOnlyColumnCollection[str, Column[Any]]):
|
|
15
|
+
self.columns = columns
|
|
16
|
+
|
|
17
|
+
def prepare_statement(self, stmt: Select | Update | Delete, neq: bool = False, **where):
|
|
18
|
+
stmt = stmt.where(*[self.handle(column=self.columns[column], condition=condition, neq=neq) for column, condition in where.items()])
|
|
19
|
+
return stmt
|
|
20
|
+
|
|
21
|
+
def order_by_condition(self, stmt: Select, order_by: str, asc: bool):
|
|
22
|
+
try:
|
|
23
|
+
stmt = stmt.order_by(self.columns[order_by].asc()) if asc else stmt.order_by(self.columns[order_by].desc())
|
|
24
|
+
except KeyError:
|
|
25
|
+
raise HasNoAttributeException(order_by)
|
|
26
|
+
return stmt
|
|
27
|
+
|
|
28
|
+
def limit_offset_condition(self, stmt: Select, limit: int, page: int = None):
|
|
29
|
+
if limit and limit > 0:
|
|
30
|
+
stmt = stmt.limit(limit)
|
|
31
|
+
if page and page > 0:
|
|
32
|
+
offset = (page - 1) * limit
|
|
33
|
+
stmt = stmt.offset(offset)
|
|
34
|
+
return stmt
|
|
35
|
+
|
|
36
|
+
def handle(self, column: Column, condition: Any, neq: bool = False):
|
|
37
|
+
eq_where = column.in_(condition) if isinstance(condition, list) else column == condition if str(column.type) != "ARRAY" else condition == column.any_()
|
|
38
|
+
if neq:
|
|
39
|
+
eq_where = not_(eq_where)
|
|
40
|
+
return eq_where
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
__all__ = ["ILikeHandler"]
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from sqlalchemy import Column, Select, Update, Delete, or_
|
|
6
|
+
from sqlalchemy.sql.base import ReadOnlyColumnCollection
|
|
7
|
+
|
|
8
|
+
from .base_handler import BaseHandler
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ILikeHandler(BaseHandler):
|
|
12
|
+
__slots__ = "ilike_aliases", "ilike_attributes"
|
|
13
|
+
ilike_aliases: list[str]
|
|
14
|
+
ilike_attributes: list[str]
|
|
15
|
+
|
|
16
|
+
def __init__(self, columns: ReadOnlyColumnCollection[str, Column[Any]], ilike_aliases: list[str] = None, ilike_attributes: list[str] = None):
|
|
17
|
+
self.columns = columns
|
|
18
|
+
self.ilike_aliases = ilike_aliases if not ilike_aliases else []
|
|
19
|
+
self.ilike_attributes = ilike_attributes if not ilike_attributes else []
|
|
20
|
+
|
|
21
|
+
def prepare_statement(self, stmt: Select | Update | Delete, **where):
|
|
22
|
+
ilike_values = self.retrieve_ilike_values(where)
|
|
23
|
+
ilike_conditions = [self._ilike_condition(ilike_value) for _, ilike_value in ilike_values]
|
|
24
|
+
|
|
25
|
+
stmt = super().prepare_statement(stmt=stmt, **where)
|
|
26
|
+
stmt = stmt.where(*ilike_conditions)
|
|
27
|
+
return stmt
|
|
28
|
+
|
|
29
|
+
def _ilike_condition(self, condition):
|
|
30
|
+
return or_(*[self.columns[col].ilike(f'%{condition}%') for col in self.ilike_attributes])
|
|
31
|
+
|
|
32
|
+
def retrieve_ilike_values(self, where: dict[str, Any]):
|
|
33
|
+
ilike_values = []
|
|
34
|
+
for alias in self.ilike_aliases:
|
|
35
|
+
condition = where.pop(alias, None)
|
|
36
|
+
if condition:
|
|
37
|
+
ilike_values.append((alias, condition))
|
|
38
|
+
return ilike_values
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import Column, Select, Update, Delete, or_
|
|
4
|
+
from sqlalchemy.sql.base import ReadOnlyColumnCollection
|
|
5
|
+
|
|
6
|
+
from .base_handler import BaseHandler
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class IntervalHandler(BaseHandler):
|
|
10
|
+
__slots__ = "from_", "to_", "inf_is_none"
|
|
11
|
+
from_: Any
|
|
12
|
+
to_: Any
|
|
13
|
+
inf_is_none: bool
|
|
14
|
+
|
|
15
|
+
def __init__(self, columns: ReadOnlyColumnCollection[str, Column[Any]], from_: Any = None, to_: Any = None, inf_is_none: bool = None):
|
|
16
|
+
self.columns = columns
|
|
17
|
+
self.from_ = from_
|
|
18
|
+
self.to_ = to_
|
|
19
|
+
self.inf_is_none = inf_is_none
|
|
20
|
+
|
|
21
|
+
def prepare_statement(self, stmt: Select | Update | Delete, **where):
|
|
22
|
+
|
|
23
|
+
from_condition = where.pop(self.from_, None)
|
|
24
|
+
to_condition = where.pop(self.from_, None)
|
|
25
|
+
|
|
26
|
+
stmt = super().prepare_statement(stmt=stmt, **where)
|
|
27
|
+
if from_condition:
|
|
28
|
+
stmt = stmt.where(self.columns[self.from_] >= from_condition)
|
|
29
|
+
if to_condition:
|
|
30
|
+
condition = self.columns[self.to_] <= to_condition
|
|
31
|
+
if self.inf_is_none:
|
|
32
|
+
condition = or_(condition, self.columns[self.to_] == None)
|
|
33
|
+
stmt = stmt.where(condition)
|
|
34
|
+
return stmt
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
__all__ = ["ManyToManyRepository"]
|
|
2
|
+
|
|
3
|
+
from typing import Type, Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
from sqlalchemy.orm import Session
|
|
7
|
+
|
|
8
|
+
from .crud_repository import CrudRepository
|
|
9
|
+
from ...interface.base import Base
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ManyToManyRepository[Entity: Base | Type[Base]](CrudRepository[Entity]):
|
|
13
|
+
|
|
14
|
+
def diff_upsert_from_parent(self, session: Session, parent: Type[Base], children: list[Type[BaseModel]], relationship_attribute: str, child_fks: dict[str, Any], child_pk: str = "id") -> list[Entity]:
|
|
15
|
+
ids = {getattr(child, child_pk) for child in children if getattr(child, child_pk)}
|
|
16
|
+
for child in getattr(parent, relationship_attribute):
|
|
17
|
+
if getattr(child, child_pk) not in ids:
|
|
18
|
+
session.delete(child)
|
|
19
|
+
|
|
20
|
+
for model in children:
|
|
21
|
+
for key, value in child_fks.items():
|
|
22
|
+
setattr(model, key, value)
|
|
23
|
+
|
|
24
|
+
entities = self.upsert(session=session, models=children)
|
|
25
|
+
|
|
26
|
+
return entities
|
|
27
|
+
|
|
28
|
+
def diff_update_from_parent(self, session: Session, parent: Type[Base], children: list[Type[BaseModel]], relationship_attribute: str) -> list[Entity]:
|
|
29
|
+
relationship_children = getattr(parent, relationship_attribute)
|
|
30
|
+
current_size = len(relationship_children)
|
|
31
|
+
size_updating = len(children)
|
|
32
|
+
diff = size_updating - current_size
|
|
33
|
+
|
|
34
|
+
if diff > 0:
|
|
35
|
+
self._apply_updates(entities=relationship_children, models=children, count=current_size)
|
|
36
|
+
entities = self.insert(session=session, models=children[current_size:])
|
|
37
|
+
entities.extend(relationship_children)
|
|
38
|
+
elif diff < 0:
|
|
39
|
+
self._apply_updates(entities=relationship_children, models=children, count=size_updating)
|
|
40
|
+
for cp in relationship_children[size_updating:]:
|
|
41
|
+
session.delete(cp)
|
|
42
|
+
entities = relationship_children[:size_updating]
|
|
43
|
+
else:
|
|
44
|
+
self._apply_updates(entities=relationship_children, models=children, count=current_size)
|
|
45
|
+
entities = relationship_children
|
|
46
|
+
return entities
|
|
47
|
+
|
|
48
|
+
def diff_update_left_right(self, session: Session, parent: Type[Base], children: list[Type[BaseModel]], relationship_attribute: str, left_fks: dict[str, Any], right_fks: list[str]) -> list[Entity]:
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
:param session:
|
|
52
|
+
:param parent:
|
|
53
|
+
:param children:
|
|
54
|
+
:param relationship_attribute:
|
|
55
|
+
:param left_fks: attributi da aggiornare sui figli da inserire
|
|
56
|
+
:param right_fks: attributi da aggiornare sui figli già presenti
|
|
57
|
+
:return:
|
|
58
|
+
"""
|
|
59
|
+
size_new = len(children)
|
|
60
|
+
|
|
61
|
+
relationship_children = getattr(parent, relationship_attribute)
|
|
62
|
+
|
|
63
|
+
size = len(relationship_children)
|
|
64
|
+
entities = []
|
|
65
|
+
if size_new < size:
|
|
66
|
+
for r_child in relationship_children[size_new:]:
|
|
67
|
+
session.delete(r_child)
|
|
68
|
+
elif size < size_new:
|
|
69
|
+
for i in range(size, size_new):
|
|
70
|
+
for key, value in left_fks.items():
|
|
71
|
+
setattr(children[i], key, value)
|
|
72
|
+
entities = self.insert(session=session, models=children[size:size_new])
|
|
73
|
+
size_new = size
|
|
74
|
+
for i in range(size_new):
|
|
75
|
+
for key in right_fks:
|
|
76
|
+
setattr(relationship_children[i], key, getattr(children[i], key))
|
|
77
|
+
entities.extend(relationship_children[:size_new])
|
|
78
|
+
return entities
|
|
79
|
+
|
|
80
|
+
def diff_update(self, session: Session, entities: list[Type[Base]], models: list[Type[BaseModel]]) -> list[Entity]:
|
|
81
|
+
current_size = len(entities)
|
|
82
|
+
new_size = len(models)
|
|
83
|
+
|
|
84
|
+
diff = new_size - current_size
|
|
85
|
+
|
|
86
|
+
if diff > 0:
|
|
87
|
+
self._apply_updates(entities=entities, models=models, count=current_size)
|
|
88
|
+
entities.extend(self.insert(session=session, models=models[current_size:]))
|
|
89
|
+
elif diff < 0:
|
|
90
|
+
self._apply_updates(entities=entities, models=models, count=new_size)
|
|
91
|
+
for entity in entities[new_size:]:
|
|
92
|
+
session.delete(entity)
|
|
93
|
+
entities = entities[:new_size]
|
|
94
|
+
else:
|
|
95
|
+
self._apply_updates(entities=entities, models=models, count=current_size)
|
|
96
|
+
return entities
|
|
97
|
+
|
|
98
|
+
def _apply_updates(self, entities: list[Type[Base]], models: list[Type[BaseModel]], count: int):
|
|
99
|
+
for i in range(count):
|
|
100
|
+
for attr, value in models[i].model_dump(exclude_none=True).items():
|
|
101
|
+
setattr(entities[i], attr, value)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
__all__ = ["ViewRepository"]
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from time import time
|
|
5
|
+
from typing import Type
|
|
6
|
+
|
|
7
|
+
from geoalchemy2 import Geometry
|
|
8
|
+
from sqlalchemy import text
|
|
9
|
+
|
|
10
|
+
from .crud_repository import CrudRepository
|
|
11
|
+
from ..base import Base
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ViewRepository[Entity: Base | Type[Base]](CrudRepository[Entity]):
|
|
15
|
+
select_stmt: str
|
|
16
|
+
materialized: bool = False
|
|
17
|
+
|
|
18
|
+
@property
|
|
19
|
+
def view_name(self):
|
|
20
|
+
return f"{self.entity.__table_args__['schema']}.{self.entity.__tablename__}"
|
|
21
|
+
|
|
22
|
+
def create(self, drop_before: bool = False, visualizer_db_role: str = None):
|
|
23
|
+
logging.info(f"Creating view {self.view_name}")
|
|
24
|
+
start = time()
|
|
25
|
+
stmts = []
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
if drop_before:
|
|
29
|
+
drop_stmt = f"drop materialized view if exists {self.view_name} cascade;" if self.materialized else f"drop view if exists {self.view_name} cascade;"
|
|
30
|
+
stmts.append(drop_stmt)
|
|
31
|
+
create_stmt = f"create materialized view if not exists {self.view_name} as {self.select_stmt};" if self.materialized else f"create or replace view {self.view_name} as {self.select_stmt};"
|
|
32
|
+
stmts.append(create_stmt)
|
|
33
|
+
if self.materialized:
|
|
34
|
+
unique_id_index = f"create unique index on {self.view_name} ({self.entity.primary_key()[0].name});"
|
|
35
|
+
stmts.append(unique_id_index)
|
|
36
|
+
if visualizer_db_role and any([isinstance(col.type, Geometry) for col in self.entity.columns()]):
|
|
37
|
+
grant_visualized = f"grant select on table {self.view_name} to {visualizer_db_role}"
|
|
38
|
+
stmts.append(grant_visualized)
|
|
39
|
+
|
|
40
|
+
with self.open_session as session:
|
|
41
|
+
for stmt in stmts:
|
|
42
|
+
session.execute(text(stmt))
|
|
43
|
+
session.flush()
|
|
44
|
+
session.commit()
|
|
45
|
+
logging.info(f"Created {self.view_name} in {time() - start}")
|
|
46
|
+
except Exception as e:
|
|
47
|
+
logging.error(f"{self.entity.__table__.name} creation error - {repr(e)}")
|
|
48
|
+
|
|
49
|
+
def refresh_materialized_view(self, concurrently: bool = True):
|
|
50
|
+
logging.info(f"Refreshing {self.view_name}")
|
|
51
|
+
start = time()
|
|
52
|
+
stmt_str = f"refresh materialized view concurrently {self.view_name}" if concurrently else f"refresh materialized view {self.view_name}"
|
|
53
|
+
stmt = text(stmt_str)
|
|
54
|
+
with self.open_session as session:
|
|
55
|
+
session.execute(stmt)
|
|
56
|
+
session.commit()
|
|
57
|
+
logging.info(f"Refreshed {self.view_name} in {time() - start}")
|
general/log_config.ini
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
[loggers]
|
|
2
|
+
keys = root
|
|
3
|
+
|
|
4
|
+
[handlers]
|
|
5
|
+
keys = logconsole
|
|
6
|
+
|
|
7
|
+
[formatters]
|
|
8
|
+
keys = logformatter
|
|
9
|
+
|
|
10
|
+
[logger_root]
|
|
11
|
+
level = INFO
|
|
12
|
+
handlers = logconsole
|
|
13
|
+
|
|
14
|
+
[formatter_logformatter]
|
|
15
|
+
format = [%(asctime)s] %(levelname)s - %(message)s
|
|
16
|
+
|
|
17
|
+
[handler_logconsole]
|
|
18
|
+
class = handlers.logging.StreamHandler
|
|
19
|
+
level = INFO
|
|
20
|
+
args = ()
|
|
21
|
+
formatter = logformatter
|
general/logger.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import logging.config
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from time import time
|
|
5
|
+
|
|
6
|
+
from fastapi import HTTPException
|
|
7
|
+
|
|
8
|
+
from .exception.crud_exceptions import InternalServerException
|
|
9
|
+
|
|
10
|
+
__all__ = ["init_logger", "log"]
|
|
11
|
+
|
|
12
|
+
COLOR = {
|
|
13
|
+
"HEADER": "\033[95m",
|
|
14
|
+
"BLUE": "\033[94m",
|
|
15
|
+
"GREEN": "\033[92m",
|
|
16
|
+
"RED": "\033[91m",
|
|
17
|
+
"ENDC": "\033[0m",
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
color = COLOR['BLUE']
|
|
21
|
+
end_color = COLOR['ENDC']
|
|
22
|
+
exc_color = COLOR['RED']
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class EndpointFilter(logging.Filter):
|
|
26
|
+
def __init__(self, endpoint: str = '', *args, **kwargs):
|
|
27
|
+
super().__init__(*args, **kwargs)
|
|
28
|
+
self.endpoint = endpoint
|
|
29
|
+
|
|
30
|
+
def filter(self, record: logging.LogRecord) -> bool:
|
|
31
|
+
return record.getMessage().find(self.endpoint) == -1
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def init_logger(*filtering_endpoints: str):
|
|
35
|
+
logging.config.fileConfig(fname="./general/log_config.ini", disable_existing_loggers=True)
|
|
36
|
+
logging.info("Logger setted up")
|
|
37
|
+
[logging.getLogger("uvicorn.access").addFilter(EndpointFilter(filtering_endpoint)) for filtering_endpoint in filtering_endpoints]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def log(func):
|
|
41
|
+
def wrapper(*args, **kwargs):
|
|
42
|
+
func_name = f"{func.__name__}"
|
|
43
|
+
start = time()
|
|
44
|
+
logging.info(f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] {color} {func_name} {end_color}| args: {args} | kwargs: {kwargs}")
|
|
45
|
+
try:
|
|
46
|
+
result = func(*args, **kwargs)
|
|
47
|
+
except Exception as e:
|
|
48
|
+
logging.exception(f"[{datetime.now().strftime('%Y-%m-%d %H:%M')}] Exception {exc_color} {func_name} {end_color}: {repr(e)}")
|
|
49
|
+
raise e.with_traceback(None) if isinstance(e, HTTPException) else InternalServerException(repr(e))
|
|
50
|
+
else:
|
|
51
|
+
logging.info(f"{color} {func_name} {end_color} completed in {time() - start} seconds")
|
|
52
|
+
return result
|
|
53
|
+
|
|
54
|
+
return wrapper
|
general/paginator_dto.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
__all__ = ["PaginatorRequestModel", "PaginatorResponseModel"]
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from pydantic import Field
|
|
6
|
+
|
|
7
|
+
from .interface.base.base_model import ExtBaseModel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PaginatorBaseModel(ExtBaseModel):
|
|
11
|
+
page: int = Field(alias="numeroPagina")
|
|
12
|
+
limit: Optional[int] = Field(default=None, alias="numeroElementi")
|
|
13
|
+
order_by: Optional[str] = Field(default=None, alias="campoOrdinamento")
|
|
14
|
+
asc: bool = Field(alias="crescente")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PaginatorRequestModel(PaginatorBaseModel):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class PaginatorResponseModel[ResponseType](PaginatorBaseModel):
|
|
22
|
+
tot_pages: int = Field(alias="pagineTotali")
|
|
23
|
+
data: list[ResponseType]
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: python-general-be-lib
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: General purpose backend library — SQLAlchemy CRUD/Geometry repositories, FastAPI exceptions, Pydantic base models, logger utilities.
|
|
5
|
+
Author-email: Andrea Di Placido <a.diplacido@arpes.it>, "Arpes S.r.l." <it.admin@arpes.it>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/Arpes-IS/python-general-be-lib
|
|
8
|
+
Project-URL: Repository, https://github.com/Arpes-IS/python-general-be-lib
|
|
9
|
+
Keywords: backend,sqlalchemy,fastapi,pydantic,repository,crud,geoalchemy2
|
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
16
|
+
Classifier: Topic :: Database
|
|
17
|
+
Classifier: Topic :: Internet :: WWW/HTTP
|
|
18
|
+
Requires-Python: >=3.12
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
License-File: LICENSE
|
|
21
|
+
Requires-Dist: fastapi>=0.100.0
|
|
22
|
+
Requires-Dist: pydantic>=2.0.0
|
|
23
|
+
Requires-Dist: sqlalchemy>=2.0.0
|
|
24
|
+
Requires-Dist: geoalchemy2>=0.14.0
|
|
25
|
+
Provides-Extra: dev
|
|
26
|
+
Requires-Dist: pytest>=7; extra == "dev"
|
|
27
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
|
28
|
+
Requires-Dist: build; extra == "dev"
|
|
29
|
+
Requires-Dist: twine; extra == "dev"
|
|
30
|
+
Dynamic: license-file
|
|
31
|
+
Dynamic: requires-python
|
|
32
|
+
|
|
33
|
+
# python-general-be-lib
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
general/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
general/log_config.ini,sha256=-uFliGt3D0PYauup7XGJ-B87JF1AvkiS8IakwubNYs8,321
|
|
3
|
+
general/logger.py,sha256=WAghhs-qU3mkN7GFQxHWXv8_iTWltX8z645pp0vYEwU,1776
|
|
4
|
+
general/paginator_dto.py,sha256=5goyGpg5SPAkDaw8yVXmpWB-9M5vlXBD-Tt_GIuKJCc,650
|
|
5
|
+
general/exception/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
general/exception/access_exceptions.py,sha256=8AV4_gybcdA7mTpA2XiZdl9rRGx08QCjHpBGv60EL4I,300
|
|
7
|
+
general/exception/crud_exceptions.py,sha256=c-VlatTPjEp7FWzA7rDByqfMqsEOworJqqfUJ3s0Z2w,1101
|
|
8
|
+
general/exception/exception_interface.py,sha256=T4bq8jwZ1YFpGlP60TxbvhkcyIrfsjcw5I6Xlxvzjig,352
|
|
9
|
+
general/interface/base/__init__.py,sha256=SDlTi28M8rzJ9yiKefo5l7V3K109LnnBHW-SuZ-5JtM,58
|
|
10
|
+
general/interface/base/base_model.py,sha256=kRJPFxmX55DMjrd7CD6GbW7j9-V1vS6sCjw_RVn72cY,700
|
|
11
|
+
general/interface/base/declarative_base.py,sha256=n6XqkL7UHUwYfI7lJ9MhOJ85E9yn1Uc6GiKIKziju9c,2014
|
|
12
|
+
general/interface/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
general/interface/metadata/crud_metadata.py,sha256=z1fTh77K7fkhDgNus5Xrv4EYerbdlPVl4F1NLbSxqcM,8478
|
|
14
|
+
general/interface/metadata/geom_metadata.py,sha256=CZPzlqos5XuDWn_FeKLxxYqOuGx1Bg9C0xH5-ZR0vXw,5326
|
|
15
|
+
general/interface/repository/__init__.py,sha256=EcRukwf61Ua-9-JfM_ingVv1wXNRV8HBrrWwjj8m23w,136
|
|
16
|
+
general/interface/repository/crud_repository.py,sha256=OR0mlVnoUvRDuxTwvNTEHvmUfgW8aHVuASxVe-RSm70,13577
|
|
17
|
+
general/interface/repository/geometry_repository.py,sha256=Rfmmzlt_U5iai0syS28jMxFpgfsJLO2im0AAiphXwXg,5756
|
|
18
|
+
general/interface/repository/many_to_many_repository.py,sha256=66EOFWdt7iKYmDI-f9MqQRKV1kuiL7NwJts0extL81g,4410
|
|
19
|
+
general/interface/repository/view_repository.py,sha256=amohSpucY20R6IE5ZSi1OpgzkLostZKxfI56eJOP4NY,2482
|
|
20
|
+
general/interface/repository/handler/__init__.py,sha256=sJK6s-siQisHdz8Z3Ymeid4mSyuXt8ysQC6b0cE44OI,57
|
|
21
|
+
general/interface/repository/handler/base_handler.py,sha256=cmi2TOfuMX-CRmBaLzNnC9k-Pua8wd3zLlnByUIGX10,1555
|
|
22
|
+
general/interface/repository/handler/ilike_handler.py,sha256=dDXSsQR7cROh5azT3lt6zOTvNiD_r17_19NE1EQwOrE,1471
|
|
23
|
+
general/interface/repository/handler/interval_handler.py,sha256=c_Ovh_q_M_A7etE4RuW-uNZFXw774_v4pTNsF1joe4A,1173
|
|
24
|
+
python_general_be_lib-0.1.0.dist-info/licenses/LICENSE,sha256=iUaO1XZyB9P3Tmog0OILuTisP6vXGe3QKz-4yRTxOFk,1069
|
|
25
|
+
python_general_be_lib-0.1.0.dist-info/METADATA,sha256=kDs3MuObw93NClCJM22Ot3B3mgff-wzopuC0JkfSyHY,1384
|
|
26
|
+
python_general_be_lib-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
27
|
+
python_general_be_lib-0.1.0.dist-info/top_level.txt,sha256=tTZePW8_CNUqSgKFd2SEH72ZbnhS0OYjRsgcv0ikSFY,8
|
|
28
|
+
python_general_be_lib-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Arpes S.r.l.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
general
|