ecodev-core 0.0.49__tar.gz → 0.0.51__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ecodev-core might be problematic. Click here for more details.
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/PKG-INFO +1 -1
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/__init__.py +3 -1
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/auth_configuration.py +6 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/authentication.py +6 -4
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/backup.py +6 -3
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/db_connection.py +6 -3
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/db_insertion.py +22 -2
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/db_retrieval.py +1 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/db_upsertion.py +31 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/email_sender.py +12 -5
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/es_connection.py +16 -13
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/settings.py +20 -7
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/pyproject.toml +1 -1
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/LICENSE.md +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/README.md +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/app_activity.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/app_rights.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/app_user.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/check_dependencies.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/custom_equal.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/db_filters.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/deployment.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/enum_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/list_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/logger.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/pandas_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/permissions.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/pydantic_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/read_write.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/safe_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/sqlmodel_utils.py +0 -0
- {ecodev_core-0.0.49 → ecodev_core-0.0.51}/ecodev_core/version.py +0 -0
|
@@ -46,6 +46,8 @@ from ecodev_core.db_upsertion import upsert_data
|
|
|
46
46
|
from ecodev_core.db_upsertion import upsert_deletor
|
|
47
47
|
from ecodev_core.db_upsertion import upsert_df_data
|
|
48
48
|
from ecodev_core.db_upsertion import upsert_selector
|
|
49
|
+
from ecodev_core.db_upsertion import get_sfield_columns
|
|
50
|
+
from ecodev_core.db_upsertion import filter_to_sfield_dict
|
|
49
51
|
from ecodev_core.deployment import Deployment
|
|
50
52
|
from ecodev_core.email_sender import send_email
|
|
51
53
|
from ecodev_core.enum_utils import enum_converter
|
|
@@ -103,4 +105,4 @@ __all__ = [
|
|
|
103
105
|
'datify', 'safe_drop_columns', 'get_value', 'is_null', 'send_email', 'first_func_or_default',
|
|
104
106
|
'sort_by_keys', 'sort_by_values', 'Settings', 'load_yaml_file', 'Deployment', 'Version',
|
|
105
107
|
'sfield', 'field', 'upsert_df_data', 'upsert_deletor', 'get_row_versions', 'get_versions',
|
|
106
|
-
'db_to_value', 'upsert_data', 'upsert_selector']
|
|
108
|
+
'db_to_value', 'upsert_data', 'upsert_selector', 'get_sfield_columns', 'filter_to_sfield_dict']
|
|
@@ -4,6 +4,8 @@ Module implementing authentication configuration.
|
|
|
4
4
|
from pydantic_settings import BaseSettings
|
|
5
5
|
from pydantic_settings import SettingsConfigDict
|
|
6
6
|
|
|
7
|
+
from ecodev_core.settings import SETTINGS
|
|
8
|
+
|
|
7
9
|
|
|
8
10
|
class AuthenticationConfiguration(BaseSettings):
|
|
9
11
|
"""
|
|
@@ -16,3 +18,7 @@ class AuthenticationConfiguration(BaseSettings):
|
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
AUTH = AuthenticationConfiguration()
|
|
21
|
+
SETTINGS_AUTH = SETTINGS.authentication # type: ignore[attr-defined]
|
|
22
|
+
SECRET_KEY = SETTINGS_AUTH.secret_key or AUTH.secret_key
|
|
23
|
+
ALGO = SETTINGS_AUTH.algorithm or AUTH.algorithm
|
|
24
|
+
EXPIRATION_LENGTH = SETTINGS_AUTH.access_token_expire_minutes or AUTH.access_token_expire_minutes
|
|
@@ -26,7 +26,9 @@ from starlette.requests import Request
|
|
|
26
26
|
from starlette.responses import RedirectResponse
|
|
27
27
|
|
|
28
28
|
from ecodev_core.app_user import AppUser
|
|
29
|
-
from ecodev_core.auth_configuration import
|
|
29
|
+
from ecodev_core.auth_configuration import ALGO
|
|
30
|
+
from ecodev_core.auth_configuration import EXPIRATION_LENGTH
|
|
31
|
+
from ecodev_core.auth_configuration import SECRET_KEY
|
|
30
32
|
from ecodev_core.db_connection import engine
|
|
31
33
|
from ecodev_core.logger import logger_get
|
|
32
34
|
from ecodev_core.permissions import Permission
|
|
@@ -235,11 +237,11 @@ def _create_access_token(data: Dict, tfa_value: Optional[str] = None) -> str:
|
|
|
235
237
|
Create an access token out of the passed data. Only called if credentials are valid
|
|
236
238
|
"""
|
|
237
239
|
to_encode = data.copy()
|
|
238
|
-
expire = datetime.now(timezone.utc) + timedelta(minutes=
|
|
240
|
+
expire = datetime.now(timezone.utc) + timedelta(minutes=EXPIRATION_LENGTH)
|
|
239
241
|
to_encode['exp'] = expire
|
|
240
242
|
if tfa_value:
|
|
241
243
|
to_encode['tfa'] = _hash_password(tfa_value)
|
|
242
|
-
return jwt.encode(to_encode,
|
|
244
|
+
return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGO)
|
|
243
245
|
|
|
244
246
|
|
|
245
247
|
def _verify_access_token(token: str,
|
|
@@ -249,7 +251,7 @@ def _verify_access_token(token: str,
|
|
|
249
251
|
Retrieves the token data associated to the passed token if it contains valid credential info.
|
|
250
252
|
"""
|
|
251
253
|
try:
|
|
252
|
-
payload = jwt.decode(token,
|
|
254
|
+
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGO])
|
|
253
255
|
if tfa_check and (not tfa_value or not _check_password(tfa_value, payload.get('tfa'))):
|
|
254
256
|
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=INVALID_TFA,
|
|
255
257
|
headers={'WWW-Authenticate': 'Bearer'})
|
|
@@ -15,7 +15,7 @@ from pydantic_settings import SettingsConfigDict
|
|
|
15
15
|
|
|
16
16
|
from ecodev_core.db_connection import DB_URL
|
|
17
17
|
from ecodev_core.logger import logger_get
|
|
18
|
-
|
|
18
|
+
from ecodev_core.settings import SETTINGS
|
|
19
19
|
|
|
20
20
|
log = logger_get(__name__)
|
|
21
21
|
|
|
@@ -30,8 +30,11 @@ class BackUpSettings(BaseSettings):
|
|
|
30
30
|
model_config = SettingsConfigDict(env_file='.env')
|
|
31
31
|
|
|
32
32
|
|
|
33
|
-
BCK = BackUpSettings()
|
|
34
|
-
|
|
33
|
+
BCK, SETTINGS_BCK = BackUpSettings(), SETTINGS.backup # type: ignore[attr-defined]
|
|
34
|
+
_USER = SETTINGS_BCK.backup_username or BCK.backup_username
|
|
35
|
+
_PASSWD = SETTINGS_BCK.backup_password or BCK.backup_password
|
|
36
|
+
_URL = SETTINGS_BCK.backup_url or BCK.backup_url
|
|
37
|
+
BACKUP_URL = f'ftp://{_USER}:{_PASSWD}@{_URL}'
|
|
35
38
|
|
|
36
39
|
|
|
37
40
|
def backup(backed_folder: Path, nb_saves: int = 5, additional_id: str = 'default') -> None:
|
|
@@ -14,6 +14,7 @@ from sqlmodel import Session
|
|
|
14
14
|
from sqlmodel import SQLModel
|
|
15
15
|
|
|
16
16
|
from ecodev_core.logger import logger_get
|
|
17
|
+
from ecodev_core.settings import SETTINGS
|
|
17
18
|
|
|
18
19
|
log = logger_get(__name__)
|
|
19
20
|
|
|
@@ -30,9 +31,11 @@ class DbSettings(BaseSettings):
|
|
|
30
31
|
model_config = SettingsConfigDict(env_file='.env')
|
|
31
32
|
|
|
32
33
|
|
|
33
|
-
DB = DbSettings()
|
|
34
|
-
_PASSWORD = quote(DB.db_password, safe='')
|
|
35
|
-
|
|
34
|
+
DB, SETTINGS_DB = DbSettings(), SETTINGS.database # type: ignore[attr-defined]
|
|
35
|
+
_PASSWORD = quote(SETTINGS_DB.db_password or DB.db_password, safe='')
|
|
36
|
+
_USER, _HOST = SETTINGS_DB.db_username or DB.db_username, SETTINGS_DB.db_host or DB.db_host
|
|
37
|
+
_PORT, _NAME = SETTINGS_DB.db_port or DB.db_port, SETTINGS_DB.db_name or DB.db_name
|
|
38
|
+
DB_URL = f'postgresql://{_USER}:{_PASSWORD}@{_HOST}:{_PORT}/{_NAME}'
|
|
36
39
|
engine = create_engine(DB_URL)
|
|
37
40
|
|
|
38
41
|
|
|
@@ -10,19 +10,21 @@ from typing import List
|
|
|
10
10
|
from typing import Union
|
|
11
11
|
|
|
12
12
|
import pandas as pd
|
|
13
|
+
import progressbar
|
|
13
14
|
from fastapi import BackgroundTasks
|
|
14
15
|
from fastapi import UploadFile
|
|
15
16
|
from pandas import ExcelFile
|
|
16
17
|
from sqlmodel import Session
|
|
17
18
|
from sqlmodel import SQLModel
|
|
19
|
+
from sqlmodel.main import SQLModelMetaclass
|
|
18
20
|
from sqlmodel.sql.expression import SelectOfScalar
|
|
19
21
|
|
|
22
|
+
from ecodev_core.db_upsertion import BATCH_SIZE
|
|
20
23
|
from ecodev_core.logger import log_critical
|
|
21
24
|
from ecodev_core.logger import logger_get
|
|
22
25
|
from ecodev_core.pydantic_utils import CustomFrozen
|
|
23
26
|
from ecodev_core.safe_utils import SimpleReturn
|
|
24
27
|
|
|
25
|
-
|
|
26
28
|
log = logger_get(__name__)
|
|
27
29
|
|
|
28
30
|
|
|
@@ -72,7 +74,7 @@ async def insert_file(file: UploadFile, insertor: Insertor, session: Session) ->
|
|
|
72
74
|
insert_data(df_raw, insertor, session)
|
|
73
75
|
|
|
74
76
|
|
|
75
|
-
def insert_data(df:
|
|
77
|
+
def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session: Session) -> None:
|
|
76
78
|
"""
|
|
77
79
|
Inserts a csv/df into a database
|
|
78
80
|
"""
|
|
@@ -81,6 +83,24 @@ def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session
|
|
|
81
83
|
session.commit()
|
|
82
84
|
|
|
83
85
|
|
|
86
|
+
def insert_batch_data(data: list[dict | SQLModelMetaclass],
|
|
87
|
+
session: Session,
|
|
88
|
+
raw_db_schema: SQLModelMetaclass | None = None) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Insert the passed list of dicts (corresponding to db_schema) into db_schema db.
|
|
91
|
+
Warning: this only inserts data, without checking for pre-existence.
|
|
92
|
+
Ensure deleting the data before using it to avoid duplicates.
|
|
93
|
+
"""
|
|
94
|
+
db_schema = raw_db_schema or data[0].__class__
|
|
95
|
+
batches = [data[i:i + BATCH_SIZE] for i in range(0, len(data), BATCH_SIZE)]
|
|
96
|
+
|
|
97
|
+
for batch in progressbar.progressbar(batches, redirect_stdout=False):
|
|
98
|
+
for row in batch:
|
|
99
|
+
new_object = db_schema(**row) if isinstance(row, dict) else row
|
|
100
|
+
session.add(new_object)
|
|
101
|
+
session.commit()
|
|
102
|
+
|
|
103
|
+
|
|
84
104
|
def create_or_update(session: Session, row: Dict, insertor: Insertor) -> SQLModel:
|
|
85
105
|
"""
|
|
86
106
|
Create a new row in db if the selector insertor does not find existing row in db. Update the row
|
|
@@ -22,6 +22,7 @@ from sqlmodel.sql.expression import SelectOfScalar
|
|
|
22
22
|
from ecodev_core.db_connection import engine
|
|
23
23
|
from ecodev_core.db_filters import SERVER_SIDE_FILTERS
|
|
24
24
|
from ecodev_core.db_filters import ServerSideFilter
|
|
25
|
+
from ecodev_core.db_upsertion import FILTER_ON
|
|
25
26
|
from ecodev_core.list_utils import first_or_default
|
|
26
27
|
from ecodev_core.pydantic_utils import Frozen
|
|
27
28
|
|
|
@@ -132,3 +132,34 @@ def upsert_data(data: list[dict | SQLModelMetaclass],
|
|
|
132
132
|
else:
|
|
133
133
|
session.add(new_object)
|
|
134
134
|
session.commit()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def get_sfield_columns(db_model: SQLModelMetaclass) -> list[str]:
|
|
138
|
+
"""
|
|
139
|
+
get all the columsn flagged as sfields from schema
|
|
140
|
+
Args:
|
|
141
|
+
db_model (SQLModelMetaclass): db_model
|
|
142
|
+
Returns:
|
|
143
|
+
list of str with the names of the columns
|
|
144
|
+
"""
|
|
145
|
+
return [
|
|
146
|
+
x.name
|
|
147
|
+
for x in inspect(db_model).c
|
|
148
|
+
if x.info.get(FILTER_ON) is True
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def filter_to_sfield_dict(row: dict | SQLModelMetaclass,
|
|
153
|
+
db_schema: SQLModelMetaclass | None = None) \
|
|
154
|
+
-> dict[str, dict | SQLModelMetaclass]:
|
|
155
|
+
"""
|
|
156
|
+
Returns a dict with only sfields from object
|
|
157
|
+
Args:
|
|
158
|
+
row: any object with ecodev_core field and sfield
|
|
159
|
+
db_schema (SQLModelMetaclass): db_schema. Use the schema of row if not specified
|
|
160
|
+
Returns:
|
|
161
|
+
dict
|
|
162
|
+
"""
|
|
163
|
+
return {pk: getattr(row, pk)
|
|
164
|
+
for pk in get_sfield_columns(db_schema or row.__class__)}
|
|
165
|
+
|
|
@@ -11,6 +11,8 @@ from ssl import create_default_context
|
|
|
11
11
|
from pydantic_settings import BaseSettings
|
|
12
12
|
from pydantic_settings import SettingsConfigDict
|
|
13
13
|
|
|
14
|
+
from ecodev_core.settings import SETTINGS
|
|
15
|
+
|
|
14
16
|
|
|
15
17
|
class EmailAuth(BaseSettings):
|
|
16
18
|
"""
|
|
@@ -19,10 +21,15 @@ class EmailAuth(BaseSettings):
|
|
|
19
21
|
email_smtp: str = ''
|
|
20
22
|
email_sender: str = ''
|
|
21
23
|
email_password: str = ''
|
|
24
|
+
email_port: int = 587
|
|
22
25
|
model_config = SettingsConfigDict(env_file='.env')
|
|
23
26
|
|
|
24
27
|
|
|
25
|
-
EMAIL_AUTH = EmailAuth()
|
|
28
|
+
EMAIL_AUTH, EMAIL_SETTINGS = EmailAuth(), SETTINGS.smtp # type: ignore[attr-defined]
|
|
29
|
+
_SENDER = EMAIL_SETTINGS.email_sender or EMAIL_AUTH.email_sender
|
|
30
|
+
_SMTP = EMAIL_SETTINGS.email_smtp or EMAIL_AUTH.email_smtp
|
|
31
|
+
_PASSWD = EMAIL_SETTINGS.email_password or EMAIL_AUTH.email_password
|
|
32
|
+
_PORT = EMAIL_SETTINGS.email_port or EMAIL_AUTH.email_port
|
|
26
33
|
|
|
27
34
|
|
|
28
35
|
def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None = None) -> None:
|
|
@@ -36,7 +43,7 @@ def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None
|
|
|
36
43
|
- images: if any, the Dict of image tags:image paths to incorporate in the email
|
|
37
44
|
"""
|
|
38
45
|
em = MIMEMultipart('related')
|
|
39
|
-
em['From'] =
|
|
46
|
+
em['From'] = _SENDER
|
|
40
47
|
em['To'] = email
|
|
41
48
|
em['Subject'] = topic
|
|
42
49
|
em.attach(MIMEText(body, 'html'))
|
|
@@ -46,8 +53,8 @@ def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None
|
|
|
46
53
|
img.add_header('Content-ID', f'<{tag}>')
|
|
47
54
|
em.attach(img)
|
|
48
55
|
|
|
49
|
-
with SMTP(
|
|
56
|
+
with SMTP(_SMTP, _PORT) as server:
|
|
50
57
|
server.ehlo()
|
|
51
58
|
server.starttls(context=create_default_context())
|
|
52
|
-
server.login(
|
|
53
|
-
server.sendmail(
|
|
59
|
+
server.login(_SENDER, _PASSWD)
|
|
60
|
+
server.sendmail(_SENDER, email, em.as_string())
|
|
@@ -11,6 +11,7 @@ from pydantic_settings import BaseSettings
|
|
|
11
11
|
from pydantic_settings import SettingsConfigDict
|
|
12
12
|
|
|
13
13
|
from ecodev_core.logger import logger_get
|
|
14
|
+
from ecodev_core.settings import SETTINGS
|
|
14
15
|
|
|
15
16
|
ES_CLIENT: Union[Elasticsearch, None] = None
|
|
16
17
|
log = logger_get(__name__)
|
|
@@ -21,15 +22,18 @@ class ESAuth(BaseSettings):
|
|
|
21
22
|
"""
|
|
22
23
|
Simple ES authentication configuration class
|
|
23
24
|
"""
|
|
24
|
-
host: str
|
|
25
|
-
user: str
|
|
26
|
-
password: str
|
|
27
|
-
port: int
|
|
28
|
-
index: str
|
|
25
|
+
host: str = ''
|
|
26
|
+
user: str = ''
|
|
27
|
+
password: str = ''
|
|
28
|
+
port: int = 9200
|
|
29
|
+
index: str = ''
|
|
29
30
|
model_config = SettingsConfigDict(env_file='.env', env_prefix='ES_')
|
|
30
31
|
|
|
31
32
|
|
|
32
|
-
ES_AUTH = ESAuth() # type: ignore
|
|
33
|
+
ES_AUTH, ES_SETTINGS = ESAuth(), SETTINGS.elastic_search # type: ignore[attr-defined]
|
|
34
|
+
_HOST, _PORT = ES_SETTINGS.host or ES_AUTH.host, ES_SETTINGS.port or ES_AUTH.port
|
|
35
|
+
_USER, _PASSWD = ES_SETTINGS.user or ES_AUTH.user, ES_SETTINGS.password or ES_AUTH.password
|
|
36
|
+
_INDEX = ES_SETTINGS.index or ES_AUTH.index
|
|
33
37
|
|
|
34
38
|
|
|
35
39
|
def get_es_client():
|
|
@@ -39,8 +43,7 @@ def get_es_client():
|
|
|
39
43
|
global ES_CLIENT
|
|
40
44
|
|
|
41
45
|
if ES_CLIENT is None:
|
|
42
|
-
ES_CLIENT = Elasticsearch(f'http://{
|
|
43
|
-
basic_auth=[ES_AUTH.user, ES_AUTH.password])
|
|
46
|
+
ES_CLIENT = Elasticsearch(f'http://{_HOST}:{_PORT}/', basic_auth=[_USER, _PASSWD])
|
|
44
47
|
|
|
45
48
|
return ES_CLIENT
|
|
46
49
|
|
|
@@ -51,11 +54,11 @@ def create_es_index(body: dict) -> None:
|
|
|
51
54
|
"""
|
|
52
55
|
client = get_es_client()
|
|
53
56
|
try:
|
|
54
|
-
client.indices.delete(index=
|
|
57
|
+
client.indices.delete(index=_INDEX)
|
|
55
58
|
except Exception:
|
|
56
59
|
pass
|
|
57
|
-
client.indices.create(index=
|
|
58
|
-
log.info(f'index {
|
|
60
|
+
client.indices.create(index=_INDEX, body=body)
|
|
61
|
+
log.info(f'index {_INDEX} created')
|
|
59
62
|
|
|
60
63
|
|
|
61
64
|
def insert_es_fields(operations: list[dict], batch_size: int = ES_BATCH_SIZE) -> None:
|
|
@@ -66,11 +69,11 @@ def insert_es_fields(operations: list[dict], batch_size: int = ES_BATCH_SIZE) ->
|
|
|
66
69
|
batches = [list(operations)[i:i + batch_size] for i in range(0, len(operations), batch_size)]
|
|
67
70
|
log.info('indexing fields')
|
|
68
71
|
for batch in progressbar.progressbar(batches, redirect_stdout=False):
|
|
69
|
-
helpers.bulk(client, batch, index=
|
|
72
|
+
helpers.bulk(client, batch, index=_INDEX)
|
|
70
73
|
|
|
71
74
|
|
|
72
75
|
def retrieve_es_fields(body: dict[str, Any]) -> list[dict]:
|
|
73
76
|
"""
|
|
74
77
|
Core call to the elasticsearch index
|
|
75
78
|
"""
|
|
76
|
-
return get_es_client().search(index=
|
|
79
|
+
return get_es_client().search(index=_INDEX, body=body)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Module defining a dynamic setting class
|
|
3
3
|
"""
|
|
4
|
+
from contextlib import suppress
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
|
|
6
7
|
from pydantic.v1.utils import deep_update
|
|
@@ -9,18 +10,24 @@ from pydantic_settings import SettingsConfigDict
|
|
|
9
10
|
|
|
10
11
|
from ecodev_core.deployment import Deployment
|
|
11
12
|
from ecodev_core.list_utils import dict_to_class
|
|
13
|
+
from ecodev_core.logger import logger_get
|
|
12
14
|
from ecodev_core.read_write import load_yaml_file
|
|
13
15
|
|
|
16
|
+
log = logger_get(__name__)
|
|
17
|
+
|
|
14
18
|
|
|
15
19
|
class DeploymentSetting(BaseSettings):
|
|
16
20
|
"""
|
|
17
21
|
Settings class used to load the deployment type from environment variables.
|
|
18
22
|
"""
|
|
19
23
|
environment: str = 'local'
|
|
24
|
+
base_path: str = '/app'
|
|
20
25
|
model_config = SettingsConfigDict(env_file='.env')
|
|
21
26
|
|
|
22
27
|
|
|
23
|
-
|
|
28
|
+
DEPLOYMENT_SETTINGS = DeploymentSetting()
|
|
29
|
+
DEPLOYMENT = Deployment(DEPLOYMENT_SETTINGS.environment.lower())
|
|
30
|
+
BASE_PATH = Path(DEPLOYMENT_SETTINGS.base_path)
|
|
24
31
|
|
|
25
32
|
|
|
26
33
|
class Settings:
|
|
@@ -29,15 +36,21 @@ class Settings:
|
|
|
29
36
|
this configuration with additional information coming from a secret file.
|
|
30
37
|
"""
|
|
31
38
|
|
|
32
|
-
def __init__(self, base_path: Path =
|
|
39
|
+
def __init__(self, base_path: Path = BASE_PATH, deployment: Deployment = DEPLOYMENT):
|
|
33
40
|
"""
|
|
34
41
|
Dynamically setting Settings attributes, doing so recursively. Attributes are loaded
|
|
35
42
|
from config file, possibly overwriting some of this configuration with additional
|
|
36
43
|
information coming from a secret file.
|
|
37
44
|
"""
|
|
38
45
|
self.deployment = deployment
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
46
|
+
|
|
47
|
+
with suppress(FileNotFoundError):
|
|
48
|
+
log.info((base_path / 'config' / f'{deployment.value}.yaml').exists())
|
|
49
|
+
data = load_yaml_file(base_path / 'config' / f'{deployment.value}.yaml')
|
|
50
|
+
if (secrets_file := base_path / 'secrets' / f'{deployment.value}.yaml').exists():
|
|
51
|
+
data = deep_update(data, load_yaml_file(secrets_file))
|
|
52
|
+
for k, v in dict_to_class(data).items():
|
|
53
|
+
setattr(self, k, v)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
SETTINGS = Settings()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|