ecodev-core 0.0.67__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ecodev_core/__init__.py +129 -0
- ecodev_core/app_activity.py +126 -0
- ecodev_core/app_rights.py +24 -0
- ecodev_core/app_user.py +92 -0
- ecodev_core/auth_configuration.py +24 -0
- ecodev_core/authentication.py +316 -0
- ecodev_core/backup.py +105 -0
- ecodev_core/check_dependencies.py +179 -0
- ecodev_core/custom_equal.py +27 -0
- ecodev_core/db_connection.py +94 -0
- ecodev_core/db_filters.py +142 -0
- ecodev_core/db_i18n.py +211 -0
- ecodev_core/db_insertion.py +128 -0
- ecodev_core/db_retrieval.py +193 -0
- ecodev_core/db_upsertion.py +382 -0
- ecodev_core/deployment.py +16 -0
- ecodev_core/email_sender.py +60 -0
- ecodev_core/encryption.py +46 -0
- ecodev_core/enum_utils.py +21 -0
- ecodev_core/es_connection.py +79 -0
- ecodev_core/list_utils.py +134 -0
- ecodev_core/logger.py +122 -0
- ecodev_core/pandas_utils.py +69 -0
- ecodev_core/permissions.py +21 -0
- ecodev_core/pydantic_utils.py +33 -0
- ecodev_core/read_write.py +52 -0
- ecodev_core/rest_api_client.py +211 -0
- ecodev_core/rest_api_configuration.py +25 -0
- ecodev_core/safe_utils.py +241 -0
- ecodev_core/settings.py +51 -0
- ecodev_core/sqlmodel_utils.py +16 -0
- ecodev_core/token_banlist.py +18 -0
- ecodev_core/version.py +144 -0
- ecodev_core-0.0.67.dist-info/LICENSE.md +11 -0
- ecodev_core-0.0.67.dist-info/METADATA +87 -0
- ecodev_core-0.0.67.dist-info/RECORD +37 -0
- ecodev_core-0.0.67.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module implementing postgresql connection
|
|
3
|
+
"""
|
|
4
|
+
from typing import Callable
|
|
5
|
+
from typing import List
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from urllib.parse import quote
|
|
8
|
+
|
|
9
|
+
from pydantic_settings import BaseSettings
|
|
10
|
+
from pydantic_settings import SettingsConfigDict
|
|
11
|
+
from sqlalchemy import delete
|
|
12
|
+
from sqlalchemy import text
|
|
13
|
+
from sqlmodel import create_engine
|
|
14
|
+
from sqlmodel import Session
|
|
15
|
+
from sqlmodel import SQLModel
|
|
16
|
+
|
|
17
|
+
from ecodev_core.logger import logger_get
|
|
18
|
+
from ecodev_core.settings import SETTINGS
|
|
19
|
+
|
|
20
|
+
log = logger_get(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DbSettings(BaseSettings):
|
|
24
|
+
"""
|
|
25
|
+
Settings class used to connect to the postgresql database
|
|
26
|
+
"""
|
|
27
|
+
db_host: str = ''
|
|
28
|
+
db_port: str = ''
|
|
29
|
+
db_name: str = ''
|
|
30
|
+
db_username: str = ''
|
|
31
|
+
db_password: str = ''
|
|
32
|
+
db_test_name: str = ''
|
|
33
|
+
model_config = SettingsConfigDict(env_file='.env')
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
DB, SETTINGS_DB = DbSettings(), SETTINGS.database # type: ignore[attr-defined]
|
|
37
|
+
_PASSWORD = quote(SETTINGS_DB.db_password or DB.db_password, safe='')
|
|
38
|
+
_USER, _HOST = SETTINGS_DB.db_username or DB.db_username, SETTINGS_DB.db_host or DB.db_host
|
|
39
|
+
_PORT, _NAME = SETTINGS_DB.db_port or DB.db_port, SETTINGS_DB.db_name or DB.db_name
|
|
40
|
+
DB_URL = f'postgresql://{_USER}:{_PASSWORD}@{_HOST}:{_PORT}/{_NAME}'
|
|
41
|
+
TEST_DB = SETTINGS_DB.db_test_name or DB.db_test_name
|
|
42
|
+
TEST_DB_URL = f'postgresql://{_USER}:{_PASSWORD}@{_HOST}:{_PORT}/{TEST_DB}'
|
|
43
|
+
_ADMIN_DB_URL = f'postgresql://{_USER}:{_PASSWORD}@{_HOST}:{_PORT}/postgres'
|
|
44
|
+
engine = create_engine(DB_URL, pool_pre_ping=True)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def exec_admin_queries(queries: list[str]) -> None:
|
|
48
|
+
"""
|
|
49
|
+
execute sequentially queries from the admin db
|
|
50
|
+
"""
|
|
51
|
+
admin_engine = create_engine(_ADMIN_DB_URL, isolation_level='AUTOCOMMIT')
|
|
52
|
+
with admin_engine.connect() as conn:
|
|
53
|
+
for query in queries:
|
|
54
|
+
conn.execute(text(query))
|
|
55
|
+
admin_engine.dispose()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def create_db_and_tables(model: Callable, excluded_tables: Optional[List[str]] = None) -> None:
|
|
59
|
+
"""
|
|
60
|
+
Create all tables based on the declared schemas in core/models thanks to sqlmodel
|
|
61
|
+
Does not create the tables which are passed in the optional list of excluded tables,
|
|
62
|
+
must be the table names
|
|
63
|
+
"""
|
|
64
|
+
log.info(f'Inserting on the fly {model} and all other domain tables')
|
|
65
|
+
SQLModel.metadata.tables = {table: meta_data for table, meta_data in
|
|
66
|
+
SQLModel.metadata.__dict__.get('tables').items()
|
|
67
|
+
if not excluded_tables or table
|
|
68
|
+
not in excluded_tables}
|
|
69
|
+
SQLModel.metadata.create_all(engine)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def delete_table(model: Callable) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Delete all rows of the passed model from db
|
|
75
|
+
"""
|
|
76
|
+
with Session(engine) as session:
|
|
77
|
+
result = session.execute(delete(model))
|
|
78
|
+
session.commit()
|
|
79
|
+
log.info(f'Deleted {result.rowcount} rows')
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_session():
|
|
83
|
+
"""
|
|
84
|
+
Retrieves the session, used in Depends() attributes of fastapi routes
|
|
85
|
+
"""
|
|
86
|
+
with Session(engine) as session:
|
|
87
|
+
yield session
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def info_message(model: Callable):
|
|
91
|
+
"""
|
|
92
|
+
Create all tables based on the declared schemas in core/models thanks to sqlmodel
|
|
93
|
+
"""
|
|
94
|
+
log.info(f'hack to get all models imported in an alembic env.py. {model}')
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Low level db filtering methods
|
|
3
|
+
"""
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from enum import unique
|
|
7
|
+
from typing import Callable
|
|
8
|
+
from typing import Dict
|
|
9
|
+
|
|
10
|
+
from sqlalchemy import func
|
|
11
|
+
from sqlalchemy.orm.attributes import InstrumentedAttribute
|
|
12
|
+
from sqlmodel import col
|
|
13
|
+
from sqlmodel.sql.expression import Select
|
|
14
|
+
from sqlmodel.sql.expression import SelectOfScalar
|
|
15
|
+
|
|
16
|
+
SelectOfScalar.inherit_cache = True # type: ignore
|
|
17
|
+
Select.inherit_cache = True # type: ignore
|
|
18
|
+
OPERATORS = ['>=', '<=', '!=', '=', '<', '>', 'contains ']
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@unique
|
|
22
|
+
class ServerSideFilter(str, Enum):
|
|
23
|
+
"""
|
|
24
|
+
All possible server side filtering mechanisms
|
|
25
|
+
"""
|
|
26
|
+
STARTSTR = 'start_str'
|
|
27
|
+
ILIKESTR = 'ilike_str'
|
|
28
|
+
STRICTSTR = 'strict_str'
|
|
29
|
+
LIKESTR = 'like_str'
|
|
30
|
+
BOOL = 'bool'
|
|
31
|
+
NUM = 'num'
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _filter_start_str_field(field: InstrumentedAttribute,
|
|
35
|
+
query: SelectOfScalar,
|
|
36
|
+
operator: str,
|
|
37
|
+
value: str
|
|
38
|
+
) -> SelectOfScalar:
|
|
39
|
+
"""
|
|
40
|
+
Add filter to the passed query for a str like field. The filtering is done by checking if
|
|
41
|
+
the passed value starts the field.
|
|
42
|
+
|
|
43
|
+
NB: case-insensitive!
|
|
44
|
+
"""
|
|
45
|
+
return query.where(func.lower(col(field)).startswith(value.lower())) if value else query
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _filter_str_ilike_field(field: InstrumentedAttribute,
|
|
49
|
+
query: SelectOfScalar,
|
|
50
|
+
operator: str,
|
|
51
|
+
value: str
|
|
52
|
+
) -> SelectOfScalar:
|
|
53
|
+
"""
|
|
54
|
+
Add filter to the passed query for a str like field. The filtering is done by checking if
|
|
55
|
+
the passed value is contained in db values
|
|
56
|
+
|
|
57
|
+
NB: case-insensitive!
|
|
58
|
+
"""
|
|
59
|
+
return query.where(col(field).ilike(f'%{value}%')) if value else query
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _filter_str_like_field(field: InstrumentedAttribute,
|
|
63
|
+
query: SelectOfScalar,
|
|
64
|
+
operator: str,
|
|
65
|
+
value: str
|
|
66
|
+
) -> SelectOfScalar:
|
|
67
|
+
"""
|
|
68
|
+
Add filter to the passed query for a str like field. The filtering is done by checking if
|
|
69
|
+
the passed value is contained in db values
|
|
70
|
+
"""
|
|
71
|
+
return query.where(col(field).contains(value)) if value else query
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _filter_strict_str_field(field: InstrumentedAttribute,
|
|
75
|
+
query: SelectOfScalar,
|
|
76
|
+
operator: str,
|
|
77
|
+
value: str
|
|
78
|
+
) -> SelectOfScalar:
|
|
79
|
+
"""
|
|
80
|
+
Add filter to the passed query for a strict str equality matching.
|
|
81
|
+
The filtering is done by checking if the passed value is equal to one of the db values
|
|
82
|
+
"""
|
|
83
|
+
return query.where(col(field) == value) if value else query
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def _filter_bool_like_field(field: InstrumentedAttribute,
|
|
87
|
+
query: SelectOfScalar,
|
|
88
|
+
operator: str,
|
|
89
|
+
value: str
|
|
90
|
+
) -> SelectOfScalar:
|
|
91
|
+
"""
|
|
92
|
+
Add filter to the passed query for a bool like field. The filtering is done by comparing
|
|
93
|
+
the passed value to db values
|
|
94
|
+
"""
|
|
95
|
+
return query.where(col(field) == value) if value else query
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _filter_num_like_field(field: InstrumentedAttribute,
|
|
99
|
+
query: SelectOfScalar,
|
|
100
|
+
operator: str,
|
|
101
|
+
value: str,
|
|
102
|
+
is_date: bool = False
|
|
103
|
+
) -> SelectOfScalar:
|
|
104
|
+
"""
|
|
105
|
+
Add filter to the passed query for a num like (even datetime in case where is_date
|
|
106
|
+
is set to True) field. The filtering is done by comparing the passed value to db values
|
|
107
|
+
with the passed operator.
|
|
108
|
+
"""
|
|
109
|
+
if not operator or not value:
|
|
110
|
+
return query
|
|
111
|
+
|
|
112
|
+
if operator == '>=':
|
|
113
|
+
query = query.where(col(field) >= (_date(value) if is_date else float(value)))
|
|
114
|
+
elif operator == '<=':
|
|
115
|
+
query = query.where(col(field) <= (_date(value) if is_date else float(value)))
|
|
116
|
+
elif operator == '!=':
|
|
117
|
+
query = query.where(col(field) != (_date(value) if is_date else float(value)))
|
|
118
|
+
elif operator == '=':
|
|
119
|
+
query = query.where(col(field) == (_date(value) if is_date else float(value)))
|
|
120
|
+
elif operator == '>':
|
|
121
|
+
query = query.where(col(field) > (_date(value) if is_date else float(value)))
|
|
122
|
+
elif operator == '<':
|
|
123
|
+
query = query.where(col(field) < (_date(value) if is_date else float(value)))
|
|
124
|
+
|
|
125
|
+
return query
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
SERVER_SIDE_FILTERS: Dict[ServerSideFilter, Callable] = {
|
|
129
|
+
ServerSideFilter.STARTSTR: _filter_start_str_field,
|
|
130
|
+
ServerSideFilter.STRICTSTR: _filter_strict_str_field,
|
|
131
|
+
ServerSideFilter.LIKESTR: _filter_str_like_field,
|
|
132
|
+
ServerSideFilter.ILIKESTR: _filter_str_ilike_field,
|
|
133
|
+
ServerSideFilter.BOOL: _filter_bool_like_field,
|
|
134
|
+
ServerSideFilter.NUM: _filter_num_like_field
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _date(year: str) -> datetime:
|
|
139
|
+
"""
|
|
140
|
+
Convert the passed str year to a datetime to allow filtering on datetime years.
|
|
141
|
+
"""
|
|
142
|
+
return datetime(year=int(year), month=1, day=1)
|
ecodev_core/db_i18n.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module implementing internationalization (i18n) for sqlmodel
|
|
3
|
+
"""
|
|
4
|
+
import contextvars
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from sqlalchemy import Label
|
|
9
|
+
from sqlalchemy import label
|
|
10
|
+
from sqlmodel import func
|
|
11
|
+
from sqlmodel.main import SQLModelMetaclass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Lang(str, Enum):
|
|
15
|
+
"""
|
|
16
|
+
Enum of the languages available for localization.
|
|
17
|
+
"""
|
|
18
|
+
EN = 'en'
|
|
19
|
+
FR = 'fr'
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
DB_Lang = 'db_Lang'
|
|
23
|
+
CONTEXT_DB_Lang = contextvars.ContextVar(DB_Lang, default=Lang.EN)
|
|
24
|
+
"""Context variables for storing the active database language, defaults to Lang.EN"""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def set_lang(lang: Lang) -> None:
|
|
28
|
+
"""
|
|
29
|
+
Sets the `CONTEXT_DB_Lang` context var.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
lang (Lang): The language to assign to the `CONTEXT_DB_Lang` context var.
|
|
33
|
+
"""
|
|
34
|
+
CONTEXT_DB_Lang.set(lang)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def get_lang() -> Lang:
|
|
38
|
+
"""
|
|
39
|
+
Fetches the value of `CONTEXT_DB_Lang` context var.
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
lang (Lang): The value of `CONTEXT_DB_Lang` context var
|
|
43
|
+
"""
|
|
44
|
+
return Lang(CONTEXT_DB_Lang.get())
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class I18nMixin:
|
|
48
|
+
"""
|
|
49
|
+
I18n (localization) mixin for string attributes of pydantic BaseModel classes.
|
|
50
|
+
|
|
51
|
+
Maps arbitrary string attributes of the class to their localized values. Localized fields
|
|
52
|
+
should be defined following the rules below :
|
|
53
|
+
- The field name must be defined as a key of the private attribute `__localized_fields__`
|
|
54
|
+
- Each field defined in `__localized_fields__` must be present as an attribute for \
|
|
55
|
+
each of its localized versions in the following format <field>_<lang>.
|
|
56
|
+
For example :
|
|
57
|
+
```
|
|
58
|
+
__localized_fields__ = {
|
|
59
|
+
'name':[Lang.EN, Lang.FR]
|
|
60
|
+
}
|
|
61
|
+
```
|
|
62
|
+
assumes that the attributes `name_en` and `name_fr` are attribute of the class.
|
|
63
|
+
These attributes must have a type `str`.
|
|
64
|
+
- All localized field must have a localized version using `__fallback_lang__`
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
__localized_fields__ (dict[str, list[Lang]]): Mapping between localized fields and a \
|
|
68
|
+
list of their available localized versions. Defaults to {}.
|
|
69
|
+
__fallback_lang__ (Lang): Fallback locale if the requested localized version of a \
|
|
70
|
+
field is None. Defaults to Lang.EN.
|
|
71
|
+
"""
|
|
72
|
+
__localized_fields__: dict[str, list[Lang]] = {}
|
|
73
|
+
__fallback_lang__: Lang = Lang.EN
|
|
74
|
+
|
|
75
|
+
@classmethod
|
|
76
|
+
def _get_lang_chain(cls, field: str, lang: Optional[Lang] = None) -> list[Lang]:
|
|
77
|
+
"""
|
|
78
|
+
Returns the chain of localized versions of the requested field with the priority given
|
|
79
|
+
to the `lang` argument, followed by the lang returned by
|
|
80
|
+
[get_lang][ecodev_core.db_i18n.get_lang] and finally the lang defined in
|
|
81
|
+
`cls.__fallback_lang__`.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
field (str): Name of the attribute/field to localize
|
|
85
|
+
lang (Optional[Lang]): The requested locale language. If none, then uses that
|
|
86
|
+
returned by [get_lang][ecodev_core.db_i18n.get_lang]. Defaults to None.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
list[Lang]: List of Lang enums to use for generating the name of the localized \
|
|
90
|
+
fields.
|
|
91
|
+
"""
|
|
92
|
+
if field not in cls.__localized_fields__:
|
|
93
|
+
raise AttributeError(f'Field {field!r} is not internationalized.')
|
|
94
|
+
|
|
95
|
+
available_langs = cls.__localized_fields__[field]
|
|
96
|
+
|
|
97
|
+
if cls.__fallback_lang__ not in available_langs:
|
|
98
|
+
raise AttributeError(
|
|
99
|
+
f'Fallback language {cls.__fallback_lang__!r} not available for field {field!r}. '
|
|
100
|
+
f'Available: {available_langs}'
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
lang = lang or get_lang()
|
|
104
|
+
if lang not in cls.__localized_fields__[field]:
|
|
105
|
+
raise AttributeError(f'Field {field!r} is not localized to {lang!r}')
|
|
106
|
+
|
|
107
|
+
return [lang] if cls.__fallback_lang__ == lang else [lang, cls.__fallback_lang__]
|
|
108
|
+
|
|
109
|
+
@classmethod
|
|
110
|
+
def _get_localized_field_name(cls, field: str, lang: Lang) -> str:
|
|
111
|
+
"""
|
|
112
|
+
Returns the name of the localized version of `field` for the requested `lang` in the
|
|
113
|
+
following format <field>_<lang>.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
field (str): Name of the attribute/field to localize
|
|
117
|
+
lang (Optional[Lang]): The requested locale language.
|
|
118
|
+
Returns:
|
|
119
|
+
str: the name of the localized version of `field` for the requested `lang`
|
|
120
|
+
"""
|
|
121
|
+
return f'{field}_{lang.value}'
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def get_localized_field_chain(cls, field: str, lang: Optional[Lang] = None) -> list[str]:
|
|
125
|
+
"""
|
|
126
|
+
Returns a chain of the localized versions of the requested field with the priority given
|
|
127
|
+
to the `lang` argument, followed by the lang returned by
|
|
128
|
+
[get_lang][ecodev_core.db_i18n.get_lang] and finally
|
|
129
|
+
the lang defined in `cls.__fallback_lang__`
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
field (str): Name of the attribute/field to localize
|
|
133
|
+
lang (Optional[Lang]): The requested locale language. If none, then uses that
|
|
134
|
+
returned by [get_lang][ecodev_core.db_i18n.get_lang]. Defaults to None.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
list[str]: chain of the localized versions of the requested field.
|
|
138
|
+
|
|
139
|
+
"""
|
|
140
|
+
return [cls._get_localized_field_name(field, lang)
|
|
141
|
+
for lang in cls._get_lang_chain(field, lang)]
|
|
142
|
+
|
|
143
|
+
def _get_localized(self, field: str, lang: Optional[Lang] = None) -> Optional[str]:
|
|
144
|
+
"""
|
|
145
|
+
Returns the localized version of a field.
|
|
146
|
+
|
|
147
|
+
The localized version is returned following the rules defined below :
|
|
148
|
+
- If the requested localized version is not available then the an attempt \
|
|
149
|
+
will be made to localize the field using `__fallback_lang__`
|
|
150
|
+
- The specified language can be passed to `_get_localized`. If it is not passed, \
|
|
151
|
+
the value returned by [get_lang][ecodev_core.db_i18n.get_lang]
|
|
152
|
+
is used instead (Defaults to Lang.EN)
|
|
153
|
+
- If None is returned using the format <field>_<lang> for the language defined in \
|
|
154
|
+
`__localized_fields__` & `__fallback_lang__` is found then returns None
|
|
155
|
+
|
|
156
|
+
Args:
|
|
157
|
+
field (str): Name of the attribute/field to localize
|
|
158
|
+
lang (Optional[Lang]): Requested locale. If None, then fetched from \
|
|
159
|
+
[get_lang][ecodev_core.db_i18n.get_lang]. Defaults to None.
|
|
160
|
+
|
|
161
|
+
Return:
|
|
162
|
+
localized_field (Optional[str]): localized version of the field
|
|
163
|
+
"""
|
|
164
|
+
lang_chain = self._get_lang_chain(field=field, lang=lang)
|
|
165
|
+
|
|
166
|
+
for lang in lang_chain:
|
|
167
|
+
attr = self._get_localized_field_name(field=field, lang=lang)
|
|
168
|
+
value = getattr(self, attr, None)
|
|
169
|
+
if value:
|
|
170
|
+
return value
|
|
171
|
+
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
def __getattr__(self, item: str) -> Optional[str]:
|
|
175
|
+
"""
|
|
176
|
+
Overrides __getattr__ to get the localized value of a item if it figures in
|
|
177
|
+
`__localized_fields__`.
|
|
178
|
+
"""
|
|
179
|
+
if item in self.__localized_fields__:
|
|
180
|
+
return self._get_localized(item)
|
|
181
|
+
raise AttributeError(f'{self.__class__.__name__!r} object has no attribute {item!r}')
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def localized_col(
|
|
185
|
+
field: str,
|
|
186
|
+
db_schema: SQLModelMetaclass,
|
|
187
|
+
lang: Optional[Lang] = None,
|
|
188
|
+
) -> Label:
|
|
189
|
+
"""
|
|
190
|
+
Returns the localized version of `field` for the requested `lang` of a
|
|
191
|
+
given SqlModel class. If `lang` is not specified, then fetches the active
|
|
192
|
+
locale from [get_lang][ecodev_core.db_i18n.get_lang].
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
field (str): Name of the field to localize
|
|
196
|
+
db_schema (SQLModelMetaclass): SQLModelMetaclass instance from which the localized \
|
|
197
|
+
fields will be fetched
|
|
198
|
+
lang (Optional[Lang]): Requested locale language. If None, then fetches language \
|
|
199
|
+
from [get_lang][ecodev_core.db_i18n.get_lang] Defaults to None.
|
|
200
|
+
|
|
201
|
+
Returns:
|
|
202
|
+
localized_field (Label): Localized version of the requested `field` wrapped in label \
|
|
203
|
+
with the name of `field`.
|
|
204
|
+
"""
|
|
205
|
+
if not issubclass(db_schema, I18nMixin):
|
|
206
|
+
raise TypeError(f"{db_schema.__name__} does not inherit from I18nMixin")
|
|
207
|
+
|
|
208
|
+
localized_fields_chain = db_schema.get_localized_field_chain(field, lang)
|
|
209
|
+
coalesce_fields = [getattr(db_schema, field_name) for field_name in localized_fields_chain]
|
|
210
|
+
|
|
211
|
+
return label(field, func.coalesce(*coalesce_fields))
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module implementing functions to insert data within the db
|
|
3
|
+
"""
|
|
4
|
+
from io import BytesIO
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
from typing import Callable
|
|
8
|
+
from typing import Dict
|
|
9
|
+
from typing import List
|
|
10
|
+
from typing import Union
|
|
11
|
+
|
|
12
|
+
import pandas as pd
|
|
13
|
+
import progressbar
|
|
14
|
+
from fastapi import BackgroundTasks
|
|
15
|
+
from fastapi import UploadFile
|
|
16
|
+
from pandas import ExcelFile
|
|
17
|
+
from sqlmodel import Session
|
|
18
|
+
from sqlmodel import SQLModel
|
|
19
|
+
from sqlmodel.main import SQLModelMetaclass
|
|
20
|
+
from sqlmodel.sql.expression import SelectOfScalar
|
|
21
|
+
|
|
22
|
+
from ecodev_core.db_upsertion import BATCH_SIZE
|
|
23
|
+
from ecodev_core.logger import log_critical
|
|
24
|
+
from ecodev_core.logger import logger_get
|
|
25
|
+
from ecodev_core.pydantic_utils import CustomFrozen
|
|
26
|
+
from ecodev_core.safe_utils import SimpleReturn
|
|
27
|
+
|
|
28
|
+
log = logger_get(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class Insertor(CustomFrozen):
|
|
32
|
+
"""
|
|
33
|
+
Configuration class to insert date into the postgresql sb.
|
|
34
|
+
|
|
35
|
+
Attributes are:
|
|
36
|
+
|
|
37
|
+
- reductor: how to create or update a row in db
|
|
38
|
+
- db_schema: the default constructor of the sqlmodel based class defining the db table
|
|
39
|
+
- selector: the criteria on which to decide whether to create or update (example: only add
|
|
40
|
+
a user if a user with the same name is not already present in the db)
|
|
41
|
+
- convertor: how to convert the raw csv/excel passed by the user to json like db rows
|
|
42
|
+
- read_excel_file: whether to insert data based on an xlsx (if true) or a csv (if false)
|
|
43
|
+
"""
|
|
44
|
+
reductor: Callable[[Any, Any], Any]
|
|
45
|
+
db_schema: Callable
|
|
46
|
+
selector: Callable[[Any], SelectOfScalar]
|
|
47
|
+
convertor: Callable[[Union[pd.DataFrame, ExcelFile]], List[Dict]]
|
|
48
|
+
read_excel_file: bool = True
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def generic_insertion(df_or_xl: Union[pd.DataFrame, ExcelFile, Path],
|
|
52
|
+
session: Session,
|
|
53
|
+
insertor: Callable[[Union[pd.DataFrame, pd.ExcelFile], Session], None],
|
|
54
|
+
background_tasks: Union[BackgroundTasks, None] = None):
|
|
55
|
+
"""
|
|
56
|
+
Generic insertion of temperature tool related csv into db
|
|
57
|
+
"""
|
|
58
|
+
try:
|
|
59
|
+
if background_tasks:
|
|
60
|
+
background_tasks.add_task(insertor, df_or_xl, session)
|
|
61
|
+
else:
|
|
62
|
+
insertor(df_or_xl, session)
|
|
63
|
+
return SimpleReturn.route_success()
|
|
64
|
+
except Exception as error:
|
|
65
|
+
log_critical(f'Something wrong happened: {error}', log)
|
|
66
|
+
return SimpleReturn.route_failure(str(error))
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
async def insert_file(file: UploadFile, insertor: Insertor, session: Session) -> None:
|
|
70
|
+
"""
|
|
71
|
+
Inserts an uploaded file into a database
|
|
72
|
+
"""
|
|
73
|
+
df_raw = await get_raw_df(file, insertor.read_excel_file)
|
|
74
|
+
insert_data(df_raw, insertor, session)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session: Session) -> None:
|
|
78
|
+
"""
|
|
79
|
+
Inserts a csv/df into a database
|
|
80
|
+
"""
|
|
81
|
+
for row in insertor.convertor(df):
|
|
82
|
+
session.add(create_or_update(session, row, insertor))
|
|
83
|
+
session.commit()
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def insert_batch_data(data: list[dict | SQLModelMetaclass],
|
|
87
|
+
session: Session,
|
|
88
|
+
raw_db_schema: SQLModelMetaclass | None = None) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Insert the passed list of dicts (corresponding to db_schema) into db_schema db.
|
|
91
|
+
Warning: this only inserts data, without checking for pre-existence.
|
|
92
|
+
Ensure deleting the data before using it to avoid duplicates.
|
|
93
|
+
"""
|
|
94
|
+
db_schema = raw_db_schema or data[0].__class__
|
|
95
|
+
batches = [data[i:i + BATCH_SIZE] for i in range(0, len(data), BATCH_SIZE)]
|
|
96
|
+
|
|
97
|
+
for batch in progressbar.progressbar(batches, redirect_stdout=False):
|
|
98
|
+
for row in batch:
|
|
99
|
+
new_object = db_schema(**row) if isinstance(row, dict) else row
|
|
100
|
+
session.add(new_object)
|
|
101
|
+
session.commit()
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def create_or_update(session: Session, row: Dict, insertor: Insertor) -> SQLModel:
|
|
105
|
+
"""
|
|
106
|
+
Create a new row in db if the selector insertor does not find existing row in db. Update the row
|
|
107
|
+
otherwise.
|
|
108
|
+
"""
|
|
109
|
+
db_row = insertor.db_schema(**row)
|
|
110
|
+
if in_db_row := session.exec(insertor.selector(db_row)).first():
|
|
111
|
+
return insertor.reductor(in_db_row, db_row)
|
|
112
|
+
return db_row
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
async def get_raw_df(file: UploadFile,
|
|
116
|
+
read_excel_file: bool,
|
|
117
|
+
sep: str = ',') -> Union[pd.DataFrame, ExcelFile]:
|
|
118
|
+
"""
|
|
119
|
+
Retrieves the raw data from the uploaded file at pandas format
|
|
120
|
+
"""
|
|
121
|
+
contents = await file.read()
|
|
122
|
+
if read_excel_file:
|
|
123
|
+
return pd.ExcelFile(contents)
|
|
124
|
+
|
|
125
|
+
buffer = BytesIO(contents)
|
|
126
|
+
raw_content = pd.read_csv(buffer, sep=sep)
|
|
127
|
+
buffer.close()
|
|
128
|
+
return raw_content
|