ecodev-core 0.0.49__py3-none-any.whl → 0.0.51__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ecodev-core might be problematic. Click here for more details.

ecodev_core/__init__.py CHANGED
@@ -46,6 +46,8 @@ from ecodev_core.db_upsertion import upsert_data
46
46
  from ecodev_core.db_upsertion import upsert_deletor
47
47
  from ecodev_core.db_upsertion import upsert_df_data
48
48
  from ecodev_core.db_upsertion import upsert_selector
49
+ from ecodev_core.db_upsertion import get_sfield_columns
50
+ from ecodev_core.db_upsertion import filter_to_sfield_dict
49
51
  from ecodev_core.deployment import Deployment
50
52
  from ecodev_core.email_sender import send_email
51
53
  from ecodev_core.enum_utils import enum_converter
@@ -103,4 +105,4 @@ __all__ = [
103
105
  'datify', 'safe_drop_columns', 'get_value', 'is_null', 'send_email', 'first_func_or_default',
104
106
  'sort_by_keys', 'sort_by_values', 'Settings', 'load_yaml_file', 'Deployment', 'Version',
105
107
  'sfield', 'field', 'upsert_df_data', 'upsert_deletor', 'get_row_versions', 'get_versions',
106
- 'db_to_value', 'upsert_data', 'upsert_selector']
108
+ 'db_to_value', 'upsert_data', 'upsert_selector', 'get_sfield_columns', 'filter_to_sfield_dict']
@@ -4,6 +4,8 @@ Module implementing authentication configuration.
4
4
  from pydantic_settings import BaseSettings
5
5
  from pydantic_settings import SettingsConfigDict
6
6
 
7
+ from ecodev_core.settings import SETTINGS
8
+
7
9
 
8
10
  class AuthenticationConfiguration(BaseSettings):
9
11
  """
@@ -16,3 +18,7 @@ class AuthenticationConfiguration(BaseSettings):
16
18
 
17
19
 
18
20
  AUTH = AuthenticationConfiguration()
21
+ SETTINGS_AUTH = SETTINGS.authentication # type: ignore[attr-defined]
22
+ SECRET_KEY = SETTINGS_AUTH.secret_key or AUTH.secret_key
23
+ ALGO = SETTINGS_AUTH.algorithm or AUTH.algorithm
24
+ EXPIRATION_LENGTH = SETTINGS_AUTH.access_token_expire_minutes or AUTH.access_token_expire_minutes
@@ -26,7 +26,9 @@ from starlette.requests import Request
26
26
  from starlette.responses import RedirectResponse
27
27
 
28
28
  from ecodev_core.app_user import AppUser
29
- from ecodev_core.auth_configuration import AUTH
29
+ from ecodev_core.auth_configuration import ALGO
30
+ from ecodev_core.auth_configuration import EXPIRATION_LENGTH
31
+ from ecodev_core.auth_configuration import SECRET_KEY
30
32
  from ecodev_core.db_connection import engine
31
33
  from ecodev_core.logger import logger_get
32
34
  from ecodev_core.permissions import Permission
@@ -235,11 +237,11 @@ def _create_access_token(data: Dict, tfa_value: Optional[str] = None) -> str:
235
237
  Create an access token out of the passed data. Only called if credentials are valid
236
238
  """
237
239
  to_encode = data.copy()
238
- expire = datetime.now(timezone.utc) + timedelta(minutes=AUTH.access_token_expire_minutes)
240
+ expire = datetime.now(timezone.utc) + timedelta(minutes=EXPIRATION_LENGTH)
239
241
  to_encode['exp'] = expire
240
242
  if tfa_value:
241
243
  to_encode['tfa'] = _hash_password(tfa_value)
242
- return jwt.encode(to_encode, AUTH.secret_key, algorithm=AUTH.algorithm)
244
+ return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGO)
243
245
 
244
246
 
245
247
  def _verify_access_token(token: str,
@@ -249,7 +251,7 @@ def _verify_access_token(token: str,
249
251
  Retrieves the token data associated to the passed token if it contains valid credential info.
250
252
  """
251
253
  try:
252
- payload = jwt.decode(token, AUTH.secret_key, algorithms=[AUTH.algorithm])
254
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGO])
253
255
  if tfa_check and (not tfa_value or not _check_password(tfa_value, payload.get('tfa'))):
254
256
  raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=INVALID_TFA,
255
257
  headers={'WWW-Authenticate': 'Bearer'})
ecodev_core/backup.py CHANGED
@@ -15,7 +15,7 @@ from pydantic_settings import SettingsConfigDict
15
15
 
16
16
  from ecodev_core.db_connection import DB_URL
17
17
  from ecodev_core.logger import logger_get
18
-
18
+ from ecodev_core.settings import SETTINGS
19
19
 
20
20
  log = logger_get(__name__)
21
21
 
@@ -30,8 +30,11 @@ class BackUpSettings(BaseSettings):
30
30
  model_config = SettingsConfigDict(env_file='.env')
31
31
 
32
32
 
33
- BCK = BackUpSettings()
34
- BACKUP_URL = f'ftp://{BCK.backup_username}:{BCK.backup_password}@{BCK.backup_url}'
33
+ BCK, SETTINGS_BCK = BackUpSettings(), SETTINGS.backup # type: ignore[attr-defined]
34
+ _USER = SETTINGS_BCK.backup_username or BCK.backup_username
35
+ _PASSWD = SETTINGS_BCK.backup_password or BCK.backup_password
36
+ _URL = SETTINGS_BCK.backup_url or BCK.backup_url
37
+ BACKUP_URL = f'ftp://{_USER}:{_PASSWD}@{_URL}'
35
38
 
36
39
 
37
40
  def backup(backed_folder: Path, nb_saves: int = 5, additional_id: str = 'default') -> None:
@@ -14,6 +14,7 @@ from sqlmodel import Session
14
14
  from sqlmodel import SQLModel
15
15
 
16
16
  from ecodev_core.logger import logger_get
17
+ from ecodev_core.settings import SETTINGS
17
18
 
18
19
  log = logger_get(__name__)
19
20
 
@@ -30,9 +31,11 @@ class DbSettings(BaseSettings):
30
31
  model_config = SettingsConfigDict(env_file='.env')
31
32
 
32
33
 
33
- DB = DbSettings()
34
- _PASSWORD = quote(DB.db_password, safe='')
35
- DB_URL = f'postgresql://{DB.db_username}:{_PASSWORD}@{DB.db_host}:{DB.db_port}/{DB.db_name}'
34
+ DB, SETTINGS_DB = DbSettings(), SETTINGS.database # type: ignore[attr-defined]
35
+ _PASSWORD = quote(SETTINGS_DB.db_password or DB.db_password, safe='')
36
+ _USER, _HOST = SETTINGS_DB.db_username or DB.db_username, SETTINGS_DB.db_host or DB.db_host
37
+ _PORT, _NAME = SETTINGS_DB.db_port or DB.db_port, SETTINGS_DB.db_name or DB.db_name
38
+ DB_URL = f'postgresql://{_USER}:{_PASSWORD}@{_HOST}:{_PORT}/{_NAME}'
36
39
  engine = create_engine(DB_URL)
37
40
 
38
41
 
@@ -10,19 +10,21 @@ from typing import List
10
10
  from typing import Union
11
11
 
12
12
  import pandas as pd
13
+ import progressbar
13
14
  from fastapi import BackgroundTasks
14
15
  from fastapi import UploadFile
15
16
  from pandas import ExcelFile
16
17
  from sqlmodel import Session
17
18
  from sqlmodel import SQLModel
19
+ from sqlmodel.main import SQLModelMetaclass
18
20
  from sqlmodel.sql.expression import SelectOfScalar
19
21
 
22
+ from ecodev_core.db_upsertion import BATCH_SIZE
20
23
  from ecodev_core.logger import log_critical
21
24
  from ecodev_core.logger import logger_get
22
25
  from ecodev_core.pydantic_utils import CustomFrozen
23
26
  from ecodev_core.safe_utils import SimpleReturn
24
27
 
25
-
26
28
  log = logger_get(__name__)
27
29
 
28
30
 
@@ -72,7 +74,7 @@ async def insert_file(file: UploadFile, insertor: Insertor, session: Session) ->
72
74
  insert_data(df_raw, insertor, session)
73
75
 
74
76
 
75
- def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session: Session) -> None:
77
+ def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session: Session) -> None:
76
78
  """
77
79
  Inserts a csv/df into a database
78
80
  """
@@ -81,6 +83,24 @@ def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session
81
83
  session.commit()
82
84
 
83
85
 
86
+ def insert_batch_data(data: list[dict | SQLModelMetaclass],
87
+ session: Session,
88
+ raw_db_schema: SQLModelMetaclass | None = None) -> None:
89
+ """
90
+ Insert the passed list of dicts (corresponding to db_schema) into db_schema db.
91
+ Warning: this only inserts data, without checking for pre-existence.
92
+ Ensure deleting the data before using it to avoid duplicates.
93
+ """
94
+ db_schema = raw_db_schema or data[0].__class__
95
+ batches = [data[i:i + BATCH_SIZE] for i in range(0, len(data), BATCH_SIZE)]
96
+
97
+ for batch in progressbar.progressbar(batches, redirect_stdout=False):
98
+ for row in batch:
99
+ new_object = db_schema(**row) if isinstance(row, dict) else row
100
+ session.add(new_object)
101
+ session.commit()
102
+
103
+
84
104
  def create_or_update(session: Session, row: Dict, insertor: Insertor) -> SQLModel:
85
105
  """
86
106
  Create a new row in db if the selector insertor does not find existing row in db. Update the row
@@ -22,6 +22,7 @@ from sqlmodel.sql.expression import SelectOfScalar
22
22
  from ecodev_core.db_connection import engine
23
23
  from ecodev_core.db_filters import SERVER_SIDE_FILTERS
24
24
  from ecodev_core.db_filters import ServerSideFilter
25
+ from ecodev_core.db_upsertion import FILTER_ON
25
26
  from ecodev_core.list_utils import first_or_default
26
27
  from ecodev_core.pydantic_utils import Frozen
27
28
 
@@ -132,3 +132,34 @@ def upsert_data(data: list[dict | SQLModelMetaclass],
132
132
  else:
133
133
  session.add(new_object)
134
134
  session.commit()
135
+
136
+
137
+ def get_sfield_columns(db_model: SQLModelMetaclass) -> list[str]:
138
+ """
139
+ get all the columsn flagged as sfields from schema
140
+ Args:
141
+ db_model (SQLModelMetaclass): db_model
142
+ Returns:
143
+ list of str with the names of the columns
144
+ """
145
+ return [
146
+ x.name
147
+ for x in inspect(db_model).c
148
+ if x.info.get(FILTER_ON) is True
149
+ ]
150
+
151
+
152
+ def filter_to_sfield_dict(row: dict | SQLModelMetaclass,
153
+ db_schema: SQLModelMetaclass | None = None) \
154
+ -> dict[str, dict | SQLModelMetaclass]:
155
+ """
156
+ Returns a dict with only sfields from object
157
+ Args:
158
+ row: any object with ecodev_core field and sfield
159
+ db_schema (SQLModelMetaclass): db_schema. Use the schema of row if not specified
160
+ Returns:
161
+ dict
162
+ """
163
+ return {pk: getattr(row, pk)
164
+ for pk in get_sfield_columns(db_schema or row.__class__)}
165
+
@@ -11,6 +11,8 @@ from ssl import create_default_context
11
11
  from pydantic_settings import BaseSettings
12
12
  from pydantic_settings import SettingsConfigDict
13
13
 
14
+ from ecodev_core.settings import SETTINGS
15
+
14
16
 
15
17
  class EmailAuth(BaseSettings):
16
18
  """
@@ -19,10 +21,15 @@ class EmailAuth(BaseSettings):
19
21
  email_smtp: str = ''
20
22
  email_sender: str = ''
21
23
  email_password: str = ''
24
+ email_port: int = 587
22
25
  model_config = SettingsConfigDict(env_file='.env')
23
26
 
24
27
 
25
- EMAIL_AUTH = EmailAuth()
28
+ EMAIL_AUTH, EMAIL_SETTINGS = EmailAuth(), SETTINGS.smtp # type: ignore[attr-defined]
29
+ _SENDER = EMAIL_SETTINGS.email_sender or EMAIL_AUTH.email_sender
30
+ _SMTP = EMAIL_SETTINGS.email_smtp or EMAIL_AUTH.email_smtp
31
+ _PASSWD = EMAIL_SETTINGS.email_password or EMAIL_AUTH.email_password
32
+ _PORT = EMAIL_SETTINGS.email_port or EMAIL_AUTH.email_port
26
33
 
27
34
 
28
35
  def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None = None) -> None:
@@ -36,7 +43,7 @@ def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None
36
43
  - images: if any, the Dict of image tags:image paths to incorporate in the email
37
44
  """
38
45
  em = MIMEMultipart('related')
39
- em['From'] = EMAIL_AUTH.email_sender
46
+ em['From'] = _SENDER
40
47
  em['To'] = email
41
48
  em['Subject'] = topic
42
49
  em.attach(MIMEText(body, 'html'))
@@ -46,8 +53,8 @@ def send_email(email: str, body: str, topic: str, images: dict[str, Path] | None
46
53
  img.add_header('Content-ID', f'<{tag}>')
47
54
  em.attach(img)
48
55
 
49
- with SMTP(EMAIL_AUTH.email_smtp, 587) as server:
56
+ with SMTP(_SMTP, _PORT) as server:
50
57
  server.ehlo()
51
58
  server.starttls(context=create_default_context())
52
- server.login(EMAIL_AUTH.email_sender, EMAIL_AUTH.email_password)
53
- server.sendmail(EMAIL_AUTH.email_sender, email, em.as_string())
59
+ server.login(_SENDER, _PASSWD)
60
+ server.sendmail(_SENDER, email, em.as_string())
@@ -11,6 +11,7 @@ from pydantic_settings import BaseSettings
11
11
  from pydantic_settings import SettingsConfigDict
12
12
 
13
13
  from ecodev_core.logger import logger_get
14
+ from ecodev_core.settings import SETTINGS
14
15
 
15
16
  ES_CLIENT: Union[Elasticsearch, None] = None
16
17
  log = logger_get(__name__)
@@ -21,15 +22,18 @@ class ESAuth(BaseSettings):
21
22
  """
22
23
  Simple ES authentication configuration class
23
24
  """
24
- host: str
25
- user: str
26
- password: str
27
- port: int
28
- index: str
25
+ host: str = ''
26
+ user: str = ''
27
+ password: str = ''
28
+ port: int = 9200
29
+ index: str = ''
29
30
  model_config = SettingsConfigDict(env_file='.env', env_prefix='ES_')
30
31
 
31
32
 
32
- ES_AUTH = ESAuth() # type: ignore
33
+ ES_AUTH, ES_SETTINGS = ESAuth(), SETTINGS.elastic_search # type: ignore[attr-defined]
34
+ _HOST, _PORT = ES_SETTINGS.host or ES_AUTH.host, ES_SETTINGS.port or ES_AUTH.port
35
+ _USER, _PASSWD = ES_SETTINGS.user or ES_AUTH.user, ES_SETTINGS.password or ES_AUTH.password
36
+ _INDEX = ES_SETTINGS.index or ES_AUTH.index
33
37
 
34
38
 
35
39
  def get_es_client():
@@ -39,8 +43,7 @@ def get_es_client():
39
43
  global ES_CLIENT
40
44
 
41
45
  if ES_CLIENT is None:
42
- ES_CLIENT = Elasticsearch(f'http://{ES_AUTH.host}:{ES_AUTH.port}/',
43
- basic_auth=[ES_AUTH.user, ES_AUTH.password])
46
+ ES_CLIENT = Elasticsearch(f'http://{_HOST}:{_PORT}/', basic_auth=[_USER, _PASSWD])
44
47
 
45
48
  return ES_CLIENT
46
49
 
@@ -51,11 +54,11 @@ def create_es_index(body: dict) -> None:
51
54
  """
52
55
  client = get_es_client()
53
56
  try:
54
- client.indices.delete(index=ES_AUTH.index)
57
+ client.indices.delete(index=_INDEX)
55
58
  except Exception:
56
59
  pass
57
- client.indices.create(index=ES_AUTH.index, body=body)
58
- log.info(f'index {ES_AUTH.index} created')
60
+ client.indices.create(index=_INDEX, body=body)
61
+ log.info(f'index {_INDEX} created')
59
62
 
60
63
 
61
64
  def insert_es_fields(operations: list[dict], batch_size: int = ES_BATCH_SIZE) -> None:
@@ -66,11 +69,11 @@ def insert_es_fields(operations: list[dict], batch_size: int = ES_BATCH_SIZE) ->
66
69
  batches = [list(operations)[i:i + batch_size] for i in range(0, len(operations), batch_size)]
67
70
  log.info('indexing fields')
68
71
  for batch in progressbar.progressbar(batches, redirect_stdout=False):
69
- helpers.bulk(client, batch, index=ES_AUTH.index)
72
+ helpers.bulk(client, batch, index=_INDEX)
70
73
 
71
74
 
72
75
  def retrieve_es_fields(body: dict[str, Any]) -> list[dict]:
73
76
  """
74
77
  Core call to the elasticsearch index
75
78
  """
76
- return get_es_client().search(index=ES_AUTH.index, body=body)
79
+ return get_es_client().search(index=_INDEX, body=body)
ecodev_core/settings.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Module defining a dynamic setting class
3
3
  """
4
+ from contextlib import suppress
4
5
  from pathlib import Path
5
6
 
6
7
  from pydantic.v1.utils import deep_update
@@ -9,18 +10,24 @@ from pydantic_settings import SettingsConfigDict
9
10
 
10
11
  from ecodev_core.deployment import Deployment
11
12
  from ecodev_core.list_utils import dict_to_class
13
+ from ecodev_core.logger import logger_get
12
14
  from ecodev_core.read_write import load_yaml_file
13
15
 
16
+ log = logger_get(__name__)
17
+
14
18
 
15
19
  class DeploymentSetting(BaseSettings):
16
20
  """
17
21
  Settings class used to load the deployment type from environment variables.
18
22
  """
19
23
  environment: str = 'local'
24
+ base_path: str = '/app'
20
25
  model_config = SettingsConfigDict(env_file='.env')
21
26
 
22
27
 
23
- DEPLOYMENT = Deployment(DeploymentSetting().environment.lower())
28
+ DEPLOYMENT_SETTINGS = DeploymentSetting()
29
+ DEPLOYMENT = Deployment(DEPLOYMENT_SETTINGS.environment.lower())
30
+ BASE_PATH = Path(DEPLOYMENT_SETTINGS.base_path)
24
31
 
25
32
 
26
33
  class Settings:
@@ -29,15 +36,21 @@ class Settings:
29
36
  this configuration with additional information coming from a secret file.
30
37
  """
31
38
 
32
- def __init__(self, base_path: Path = Path('/app'), deployment: Deployment = DEPLOYMENT):
39
+ def __init__(self, base_path: Path = BASE_PATH, deployment: Deployment = DEPLOYMENT):
33
40
  """
34
41
  Dynamically setting Settings attributes, doing so recursively. Attributes are loaded
35
42
  from config file, possibly overwriting some of this configuration with additional
36
43
  information coming from a secret file.
37
44
  """
38
45
  self.deployment = deployment
39
- data = load_yaml_file(base_path / 'config' / f'{deployment.value}.yaml')
40
- if (secrets_file := base_path / 'secrets' / f'{deployment.value}.yaml').exists():
41
- data = deep_update(data, load_yaml_file(secrets_file))
42
- for k, v in dict_to_class(data).items():
43
- setattr(self, k, v)
46
+
47
+ with suppress(FileNotFoundError):
48
+ log.info((base_path / 'config' / f'{deployment.value}.yaml').exists())
49
+ data = load_yaml_file(base_path / 'config' / f'{deployment.value}.yaml')
50
+ if (secrets_file := base_path / 'secrets' / f'{deployment.value}.yaml').exists():
51
+ data = deep_update(data, load_yaml_file(secrets_file))
52
+ for k, v in dict_to_class(data).items():
53
+ setattr(self, k, v)
54
+
55
+
56
+ SETTINGS = Settings()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ecodev-core
3
- Version: 0.0.49
3
+ Version: 0.0.51
4
4
  Summary: Low level sqlmodel/fastapi/pydantic building blocks
5
5
  License: MIT
6
6
  Author: Thomas Epelbaum
@@ -1,21 +1,21 @@
1
- ecodev_core/__init__.py,sha256=cIICQkHHfjmPIpLnpGlJ6GzGDvFT51bDeI8c1Kg3MH0,5781
1
+ ecodev_core/__init__.py,sha256=YsvkU4bKXgYW_0jWVx0Z7_31UUQCxfADcVRxR-CcaxI,5943
2
2
  ecodev_core/app_activity.py,sha256=KBtI-35LBLPDppFB7xjxWthXQrY3Z_aGDnC-HrW8Ea0,4641
3
3
  ecodev_core/app_rights.py,sha256=RZPdDtydFqc_nFj96huKAc56BS0qS6ScKv4Kghqd6lc,726
4
4
  ecodev_core/app_user.py,sha256=r1bqA4H08x53XmxmjwyGKl_PFjYQazzBbVErdkztqeE,2947
5
- ecodev_core/auth_configuration.py,sha256=R8XH674J1HQSxMr-abvrG0zRqANRf07eMLiG4yB04gM,447
6
- ecodev_core/authentication.py,sha256=aLMk2_fn1Fodrby2ywZraB3JTSsSrPsBiQq0ag0ySiY,10023
7
- ecodev_core/backup.py,sha256=tay3-m54gfLWpEG8as83wlj8EH31be730k1PjZaZLas,3653
5
+ ecodev_core/auth_configuration.py,sha256=qZ1Dkk7n1AH7w0tVKQ8AYswukOeMZH6mmbixPEAQnJ8,764
6
+ ecodev_core/authentication.py,sha256=HYi4C7cHFwGUoskiMK2q-X7QeGofONPYWB55kxzJ6vI,10093
7
+ ecodev_core/backup.py,sha256=N5AtoqtHJRp92Bj0Nr7WW5WDcpjTIET8haxZoYDOtyI,3890
8
8
  ecodev_core/check_dependencies.py,sha256=aFn8GI4eBbuJT8RxsfhSSnlpNYYj_LPOH-tZF0EqfKQ,6917
9
9
  ecodev_core/custom_equal.py,sha256=2gRn0qpyJ8-Kw9GQSueu0nLngLrRrwyMPlP6zqPac0U,899
10
- ecodev_core/db_connection.py,sha256=bc5MujZ57f204wTsuNVdn1JdP-zBzkDJxHmdxBDTiNs,2286
10
+ ecodev_core/db_connection.py,sha256=hhqeyTrl0DlQA7RkUs6pIOpZeE3yS_Q5mqj5uGPfG_Y,2569
11
11
  ecodev_core/db_filters.py,sha256=T_5JVF27UEu7sC6NOm7-W3_Y0GLfbWQO_EeTXcD2cv8,5041
12
- ecodev_core/db_insertion.py,sha256=RSCyAlUObbBlWJuMRX-YFY4VgtWqYLdwRqMWw--x95Y,3646
13
- ecodev_core/db_retrieval.py,sha256=IxyF3ZtKgACLiNFggK7boKggvMRKYDRD2IimxU4dap4,7345
14
- ecodev_core/db_upsertion.py,sha256=xL70gr3_NTpMDSGnakzE3mm_i3xm-dfgbD9zYCYjc64,4924
12
+ ecodev_core/db_insertion.py,sha256=k-r798MMrW1sRb-gb8lQTxyJrb4QP5iZT8GDzCYYwlo,4544
13
+ ecodev_core/db_retrieval.py,sha256=sCP7TDGIcTOK5gT3Inga91bE4S31HbQPw4yI22WJbss,7392
14
+ ecodev_core/db_upsertion.py,sha256=_OdCILP1NEw1hkbvTn9ZOF5YkU9U02Fj_U3E7uY5AoI,5861
15
15
  ecodev_core/deployment.py,sha256=z8ACI00EtKknXOB8xyPwYIXTvPjIDOH9z9cBGEU0YrA,281
16
- ecodev_core/email_sender.py,sha256=XD7jAVXhGzvbiHqMhK9_aTEIS70Lw_CmPeAxRZGji-Y,1610
16
+ ecodev_core/email_sender.py,sha256=V3UGweuq6Iy09Z9to8HzM6JOVDVGHZXHGjUSkW94Tac,1912
17
17
  ecodev_core/enum_utils.py,sha256=BkQ4YQ97tXBYmMcQiSIi0mbioD5CgVU79myg1BBAXuA,556
18
- ecodev_core/es_connection.py,sha256=3z8KWF9yUzCW4xC9nhLlIcoW3Gw6m19MvH4Z6nxy7R4,1967
18
+ ecodev_core/es_connection.py,sha256=WC2_BIWBoxgihF1tyHhHsBlcFhM6nZD7eDrPuoJJOqI,2208
19
19
  ecodev_core/list_utils.py,sha256=QFchUnD9CvMiBFBUaxkZTyDyEqpIJBwCNPubNUSl8TA,4877
20
20
  ecodev_core/logger.py,sha256=_v63vq7jh72RRkzajHB9QkRfEPcwCMNcLh7maLeKwRg,3596
21
21
  ecodev_core/pandas_utils.py,sha256=Juc6gvPnoBiSVF2SR6_vfMi5W-QEkY3fnpo5ROB1L9s,2191
@@ -23,10 +23,10 @@ ecodev_core/permissions.py,sha256=WAx-ilMu8LlQp2sjJVdkhNQieytEaEm8577ZF1HWeTY,50
23
23
  ecodev_core/pydantic_utils.py,sha256=e3GH50JmcpTmd2UgrB94QSwWOlOCW3WIlVdyX9C4T-U,741
24
24
  ecodev_core/read_write.py,sha256=YIGRERvFHU7vy-JIaCWAza4CPMysLRUHKJxN-ZgFmu0,1208
25
25
  ecodev_core/safe_utils.py,sha256=Q8N15El1tSxZJJsy1i_1CCycuBN1_98QQoHmYJRcLIY,6904
26
- ecodev_core/settings.py,sha256=ARAPkXxggVUsYqSQIAgCK8C2DKSMPia1CekULn428bA,1562
26
+ ecodev_core/settings.py,sha256=ucN370enphetG3LIOC7uHV8qYo4rlx4neYuHnpxSKNI,1949
27
27
  ecodev_core/sqlmodel_utils.py,sha256=t57H3QPtKRy4ujic1clMK_2L4p0yjGJLZbDjHPZ8M94,453
28
28
  ecodev_core/version.py,sha256=eyIf8KkW_t-hMuYFIoy0cUlNaMewLe6i45m2HKZKh0Q,4403
29
- ecodev_core-0.0.49.dist-info/LICENSE.md,sha256=8dqVJEbwXjPWjjRKjdLMym5k9Gi8hwtrHh84sti6KIs,1068
30
- ecodev_core-0.0.49.dist-info/METADATA,sha256=e9xDBogoX5SU9v4tCFbcW_Y0zgHAft8YIisV3u_wX5Y,3509
31
- ecodev_core-0.0.49.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
32
- ecodev_core-0.0.49.dist-info/RECORD,,
29
+ ecodev_core-0.0.51.dist-info/LICENSE.md,sha256=8dqVJEbwXjPWjjRKjdLMym5k9Gi8hwtrHh84sti6KIs,1068
30
+ ecodev_core-0.0.51.dist-info/METADATA,sha256=DGtUqB01Nbk_oAkGNYY20AbtrWBC3xfrpdH_SYVxL1c,3509
31
+ ecodev_core-0.0.51.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
32
+ ecodev_core-0.0.51.dist-info/RECORD,,