ecodev-core 0.0.48__tar.gz → 0.0.50__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ecodev-core might be problematic. Click here for more details.
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/PKG-INFO +1 -1
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/__init__.py +3 -1
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/db_insertion.py +22 -2
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/db_retrieval.py +1 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/db_upsertion.py +31 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/version.py +5 -5
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/pyproject.toml +1 -1
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/LICENSE.md +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/README.md +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/app_activity.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/app_rights.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/app_user.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/auth_configuration.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/authentication.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/backup.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/check_dependencies.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/custom_equal.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/db_connection.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/db_filters.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/deployment.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/email_sender.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/enum_utils.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/es_connection.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/list_utils.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/logger.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/pandas_utils.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/permissions.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/pydantic_utils.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/read_write.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/safe_utils.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/settings.py +0 -0
- {ecodev_core-0.0.48 → ecodev_core-0.0.50}/ecodev_core/sqlmodel_utils.py +0 -0
|
@@ -46,6 +46,8 @@ from ecodev_core.db_upsertion import upsert_data
|
|
|
46
46
|
from ecodev_core.db_upsertion import upsert_deletor
|
|
47
47
|
from ecodev_core.db_upsertion import upsert_df_data
|
|
48
48
|
from ecodev_core.db_upsertion import upsert_selector
|
|
49
|
+
from ecodev_core.db_upsertion import get_sfield_columns
|
|
50
|
+
from ecodev_core.db_upsertion import filter_to_sfield_dict
|
|
49
51
|
from ecodev_core.deployment import Deployment
|
|
50
52
|
from ecodev_core.email_sender import send_email
|
|
51
53
|
from ecodev_core.enum_utils import enum_converter
|
|
@@ -103,4 +105,4 @@ __all__ = [
|
|
|
103
105
|
'datify', 'safe_drop_columns', 'get_value', 'is_null', 'send_email', 'first_func_or_default',
|
|
104
106
|
'sort_by_keys', 'sort_by_values', 'Settings', 'load_yaml_file', 'Deployment', 'Version',
|
|
105
107
|
'sfield', 'field', 'upsert_df_data', 'upsert_deletor', 'get_row_versions', 'get_versions',
|
|
106
|
-
'db_to_value', 'upsert_data', 'upsert_selector']
|
|
108
|
+
'db_to_value', 'upsert_data', 'upsert_selector', 'get_sfield_columns', 'filter_to_sfield_dict']
|
|
@@ -10,19 +10,21 @@ from typing import List
|
|
|
10
10
|
from typing import Union
|
|
11
11
|
|
|
12
12
|
import pandas as pd
|
|
13
|
+
import progressbar
|
|
13
14
|
from fastapi import BackgroundTasks
|
|
14
15
|
from fastapi import UploadFile
|
|
15
16
|
from pandas import ExcelFile
|
|
16
17
|
from sqlmodel import Session
|
|
17
18
|
from sqlmodel import SQLModel
|
|
19
|
+
from sqlmodel.main import SQLModelMetaclass
|
|
18
20
|
from sqlmodel.sql.expression import SelectOfScalar
|
|
19
21
|
|
|
22
|
+
from ecodev_core.db_upsertion import BATCH_SIZE
|
|
20
23
|
from ecodev_core.logger import log_critical
|
|
21
24
|
from ecodev_core.logger import logger_get
|
|
22
25
|
from ecodev_core.pydantic_utils import CustomFrozen
|
|
23
26
|
from ecodev_core.safe_utils import SimpleReturn
|
|
24
27
|
|
|
25
|
-
|
|
26
28
|
log = logger_get(__name__)
|
|
27
29
|
|
|
28
30
|
|
|
@@ -72,7 +74,7 @@ async def insert_file(file: UploadFile, insertor: Insertor, session: Session) ->
|
|
|
72
74
|
insert_data(df_raw, insertor, session)
|
|
73
75
|
|
|
74
76
|
|
|
75
|
-
def insert_data(df:
|
|
77
|
+
def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session: Session) -> None:
|
|
76
78
|
"""
|
|
77
79
|
Inserts a csv/df into a database
|
|
78
80
|
"""
|
|
@@ -81,6 +83,24 @@ def insert_data(df: Union[pd.DataFrame, ExcelFile], insertor: Insertor, session
|
|
|
81
83
|
session.commit()
|
|
82
84
|
|
|
83
85
|
|
|
86
|
+
def insert_batch_data(data: list[dict | SQLModelMetaclass],
|
|
87
|
+
session: Session,
|
|
88
|
+
raw_db_schema: SQLModelMetaclass | None = None) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Insert the passed list of dicts (corresponding to db_schema) into db_schema db.
|
|
91
|
+
Warning: this only inserts data, without checking for pre-existence.
|
|
92
|
+
Ensure deleting the data before using it to avoid duplicates.
|
|
93
|
+
"""
|
|
94
|
+
db_schema = raw_db_schema or data[0].__class__
|
|
95
|
+
batches = [data[i:i + BATCH_SIZE] for i in range(0, len(data), BATCH_SIZE)]
|
|
96
|
+
|
|
97
|
+
for batch in progressbar.progressbar(batches, redirect_stdout=False):
|
|
98
|
+
for row in batch:
|
|
99
|
+
new_object = db_schema(**row) if isinstance(row, dict) else row
|
|
100
|
+
session.add(new_object)
|
|
101
|
+
session.commit()
|
|
102
|
+
|
|
103
|
+
|
|
84
104
|
def create_or_update(session: Session, row: Dict, insertor: Insertor) -> SQLModel:
|
|
85
105
|
"""
|
|
86
106
|
Create a new row in db if the selector insertor does not find existing row in db. Update the row
|
|
@@ -22,6 +22,7 @@ from sqlmodel.sql.expression import SelectOfScalar
|
|
|
22
22
|
from ecodev_core.db_connection import engine
|
|
23
23
|
from ecodev_core.db_filters import SERVER_SIDE_FILTERS
|
|
24
24
|
from ecodev_core.db_filters import ServerSideFilter
|
|
25
|
+
from ecodev_core.db_upsertion import FILTER_ON
|
|
25
26
|
from ecodev_core.list_utils import first_or_default
|
|
26
27
|
from ecodev_core.pydantic_utils import Frozen
|
|
27
28
|
|
|
@@ -132,3 +132,34 @@ def upsert_data(data: list[dict | SQLModelMetaclass],
|
|
|
132
132
|
else:
|
|
133
133
|
session.add(new_object)
|
|
134
134
|
session.commit()
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def get_sfield_columns(db_model: SQLModelMetaclass) -> list[str]:
|
|
138
|
+
"""
|
|
139
|
+
get all the columsn flagged as sfields from schema
|
|
140
|
+
Args:
|
|
141
|
+
db_model (SQLModelMetaclass): db_model
|
|
142
|
+
Returns:
|
|
143
|
+
list of str with the names of the columns
|
|
144
|
+
"""
|
|
145
|
+
return [
|
|
146
|
+
x.name
|
|
147
|
+
for x in inspect(db_model).c
|
|
148
|
+
if x.info.get(FILTER_ON) is True
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def filter_to_sfield_dict(row: dict | SQLModelMetaclass,
|
|
153
|
+
db_schema: SQLModelMetaclass | None = None) \
|
|
154
|
+
-> dict[str, dict | SQLModelMetaclass]:
|
|
155
|
+
"""
|
|
156
|
+
Returns a dict with only sfields from object
|
|
157
|
+
Args:
|
|
158
|
+
row: any object with ecodev_core field and sfield
|
|
159
|
+
db_schema (SQLModelMetaclass): db_schema. Use the schema of row if not specified
|
|
160
|
+
Returns:
|
|
161
|
+
dict
|
|
162
|
+
"""
|
|
163
|
+
return {pk: getattr(row, pk)
|
|
164
|
+
for pk in get_sfield_columns(db_schema or row.__class__)}
|
|
165
|
+
|
|
@@ -130,15 +130,15 @@ def _value_to_db(value: COL_TYPES, col_type: ColType) -> str | None:
|
|
|
130
130
|
|
|
131
131
|
def db_to_value(db_value: str | None, col_type: type | EnumType) -> COL_TYPES:
|
|
132
132
|
"""
|
|
133
|
-
Convert back a str version value stored to a real value (types
|
|
133
|
+
Convert back a str version value stored to a real value (types handled listed in ColType)
|
|
134
134
|
NB: assumption that if the type is not known, this is an enum type.
|
|
135
135
|
"""
|
|
136
136
|
if db_value is None:
|
|
137
137
|
return None
|
|
138
|
-
if
|
|
138
|
+
if col_type in [int, str, float]:
|
|
139
139
|
return col_type(db_value)
|
|
140
|
-
if
|
|
140
|
+
if col_type == bool:
|
|
141
141
|
return db_value == 'True'
|
|
142
|
-
if
|
|
142
|
+
if col_type == datetime:
|
|
143
143
|
return datetime.strptime(db_value, '%Y-%m-%d %H:%M:%S.%f')
|
|
144
|
-
return col_type[db_value]
|
|
144
|
+
return col_type[db_value] # type: ignore[index]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|