database-wrapper 0.1.28__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- database_wrapper-0.1.28/PKG-INFO +69 -0
- database_wrapper-0.1.28/README.md +13 -0
- database_wrapper-0.1.28/database_wrapper/__init__.py +29 -0
- database_wrapper-0.1.28/database_wrapper/config.py +9 -0
- database_wrapper-0.1.28/database_wrapper/db_backend.py +161 -0
- database_wrapper-0.1.28/database_wrapper/db_data_model.py +323 -0
- database_wrapper-0.1.28/database_wrapper/db_wrapper.py +817 -0
- database_wrapper-0.1.28/database_wrapper/py.typed +0 -0
- database_wrapper-0.1.28/database_wrapper/utils/__init__.py +7 -0
- database_wrapper-0.1.28/database_wrapper/utils/dataclass_addons.py +23 -0
- database_wrapper-0.1.28/database_wrapper/utils/timer.py +297 -0
- database_wrapper-0.1.28/database_wrapper.egg-info/PKG-INFO +69 -0
- database_wrapper-0.1.28/database_wrapper.egg-info/SOURCES.txt +16 -0
- database_wrapper-0.1.28/database_wrapper.egg-info/dependency_links.txt +1 -0
- database_wrapper-0.1.28/database_wrapper.egg-info/requires.txt +27 -0
- database_wrapper-0.1.28/database_wrapper.egg-info/top_level.txt +1 -0
- database_wrapper-0.1.28/pyproject.toml +73 -0
- database_wrapper-0.1.28/setup.cfg +4 -0
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: database_wrapper
|
|
3
|
+
Version: 0.1.28
|
|
4
|
+
Summary: A Different Approach to Database Wrappers in Python
|
|
5
|
+
Author-email: Gints Murans <gm@gm.lv>
|
|
6
|
+
License: GNU General Public License v3.0 (GPL-3.0)
|
|
7
|
+
Project-URL: Homepage, https://github.com/gintsmurans/py_database_wrapper
|
|
8
|
+
Project-URL: Documentation, https://github.com/gintsmurans/py_database_wrapper
|
|
9
|
+
Project-URL: Changes, https://github.com/gintsmurans/py_database_wrapper
|
|
10
|
+
Project-URL: Code, https://github.com/gintsmurans/py_database_wrapper
|
|
11
|
+
Project-URL: Issue Tracker, https://github.com/gintsmurans/py_database_wrapper/issues
|
|
12
|
+
Project-URL: Download, https://pypi.org/project/database_wrapper/
|
|
13
|
+
Keywords: database,wrapper,python,pgsql,mysql,mssql,sqlite
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
17
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
18
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
19
|
+
Classifier: Operating System :: POSIX
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
27
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
28
|
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
29
|
+
Classifier: Topic :: Database
|
|
30
|
+
Classifier: Topic :: Database :: Front-Ends
|
|
31
|
+
Classifier: Topic :: Software Development
|
|
32
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
33
|
+
Requires-Python: >=3.8
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
Provides-Extra: pgsql
|
|
36
|
+
Requires-Dist: database_wrapper_pgsql==0.1.28; extra == "pgsql"
|
|
37
|
+
Provides-Extra: mysql
|
|
38
|
+
Requires-Dist: database_wrapper_mysql==0.1.28; extra == "mysql"
|
|
39
|
+
Provides-Extra: mssql
|
|
40
|
+
Requires-Dist: database_wrapper_mssql==0.1.28; extra == "mssql"
|
|
41
|
+
Provides-Extra: sqlite
|
|
42
|
+
Requires-Dist: database_wrapper_sqlite==0.1.28; extra == "sqlite"
|
|
43
|
+
Provides-Extra: all
|
|
44
|
+
Requires-Dist: database_wrapper[mssql,mysql,pgsql,sqlite]; extra == "all"
|
|
45
|
+
Provides-Extra: dev
|
|
46
|
+
Requires-Dist: ast-comments>=1.1.2; extra == "dev"
|
|
47
|
+
Requires-Dist: codespell>=2.2; extra == "dev"
|
|
48
|
+
Requires-Dist: build>=1.2.1; extra == "dev"
|
|
49
|
+
Requires-Dist: black>=24.1.0; extra == "dev"
|
|
50
|
+
Requires-Dist: types-setuptools>=61.0.0; extra == "dev"
|
|
51
|
+
Requires-Dist: types-pymssql>=2.1.0; extra == "dev"
|
|
52
|
+
Requires-Dist: psycopg[binary]>=3.2.0; extra == "dev"
|
|
53
|
+
Requires-Dist: psycopg[pool]>=3.2.0; extra == "dev"
|
|
54
|
+
Requires-Dist: mysqlclient>=2.2.2; extra == "dev"
|
|
55
|
+
Requires-Dist: pymssql>=2.2.10; extra == "dev"
|
|
56
|
+
|
|
57
|
+
# database_wrapper
|
|
58
|
+
|
|
59
|
+
_Part of the `database_wrapper` package._
|
|
60
|
+
|
|
61
|
+
This package is a base package for database wrappers. It is not intended to be used directly, but rather to be used via one of the database specific packages.
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
See the README.md files in the database specific packages for more information.
|
|
65
|
+
|
|
66
|
+
* [database_wrapper_pgsql](https://pypi.org/project/database_wrapper_pgsql/)
|
|
67
|
+
* [database_wrapper_mysql](https://pypi.org/project/database_wrapper_mysql/)
|
|
68
|
+
* [database_wrapper_mssql](https://pypi.org/project/database_wrapper_mssql/)
|
|
69
|
+
* [database_wrapper_sqlite](https://pypi.org/project/database_wrapper_sqlite/)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# database_wrapper
|
|
2
|
+
|
|
3
|
+
_Part of the `database_wrapper` package._
|
|
4
|
+
|
|
5
|
+
This package is a base package for database wrappers. It is not intended to be used directly, but rather to be used via one of the database specific packages.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
See the README.md files in the database specific packages for more information.
|
|
9
|
+
|
|
10
|
+
* [database_wrapper_pgsql](https://pypi.org/project/database_wrapper_pgsql/)
|
|
11
|
+
* [database_wrapper_mysql](https://pypi.org/project/database_wrapper_mysql/)
|
|
12
|
+
* [database_wrapper_mssql](https://pypi.org/project/database_wrapper_mssql/)
|
|
13
|
+
* [database_wrapper_sqlite](https://pypi.org/project/database_wrapper_sqlite/)
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""
|
|
2
|
+
database_wrapper package - Base for database wrappers
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
# Copyright 2024 Gints Murans
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
from . import utils
|
|
10
|
+
from .db_backend import DatabaseBackend
|
|
11
|
+
from .db_data_model import DBDataModel, DBDefaultsDataModel
|
|
12
|
+
from .db_wrapper import DBWrapper, T, OrderByItem
|
|
13
|
+
|
|
14
|
+
# Set the logger to a quiet default, can be enabled if needed
|
|
15
|
+
logger = logging.getLogger("database_wrapper")
|
|
16
|
+
if logger.level == logging.NOTSET:
|
|
17
|
+
logger.setLevel(logging.WARNING)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# Expose the classes
|
|
21
|
+
__all__ = [
|
|
22
|
+
"DatabaseBackend",
|
|
23
|
+
"DBDataModel",
|
|
24
|
+
"DBDefaultsDataModel",
|
|
25
|
+
"DBWrapper",
|
|
26
|
+
"T",
|
|
27
|
+
"OrderByItem",
|
|
28
|
+
"utils",
|
|
29
|
+
]
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
CONFIG: dict[str, Any] = {
|
|
4
|
+
# These are supposed to be set automatically by a git pre-compile script
|
|
5
|
+
# They are one git commit hash behind, if used automatically
|
|
6
|
+
"git_commit_hash": "0a78ab759900ae378034f586a361aa24f43aad15",
|
|
7
|
+
"git_commit_date": "08.11.2024 15:05",
|
|
8
|
+
"app_version": "0.1.28",
|
|
9
|
+
}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import socket
|
|
3
|
+
|
|
4
|
+
from typing import Any
|
|
5
|
+
from threading import Event
|
|
6
|
+
from contextvars import ContextVar
|
|
7
|
+
|
|
8
|
+
from .utils.timer import Timer
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DatabaseBackend:
|
|
12
|
+
connection: Any
|
|
13
|
+
cursor: Any
|
|
14
|
+
contextConnection: ContextVar[Any | None]
|
|
15
|
+
contextAsyncConnection: ContextVar[Any | None]
|
|
16
|
+
|
|
17
|
+
config: Any
|
|
18
|
+
|
|
19
|
+
connectionTimeout: int
|
|
20
|
+
slowDownTimeout: int = 5
|
|
21
|
+
|
|
22
|
+
name: str
|
|
23
|
+
logger: logging.Logger
|
|
24
|
+
timer: ContextVar[Timer | None]
|
|
25
|
+
|
|
26
|
+
shutdownRequested: Event
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
dbConfig: Any,
|
|
31
|
+
connectionTimeout: int = 5,
|
|
32
|
+
instanceName: str = "database_backend",
|
|
33
|
+
) -> None:
|
|
34
|
+
"""
|
|
35
|
+
Main concept here is that in init we do not connect to database,
|
|
36
|
+
so that class instances can be safely made regardless of connection statuss.
|
|
37
|
+
|
|
38
|
+
Remember to call open() before using this class.
|
|
39
|
+
Close will be called automatically when class is destroyed.
|
|
40
|
+
But sometimes in async environment you should call close() proactively.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
self.config = dbConfig
|
|
44
|
+
self.connectionTimeout = connectionTimeout
|
|
45
|
+
self.name = instanceName
|
|
46
|
+
|
|
47
|
+
loggerName = f"{__name__}.{self.__class__.__name__}.{self.name}"
|
|
48
|
+
self.logger = logging.getLogger(loggerName)
|
|
49
|
+
self.timer = ContextVar(f"db_timer", default=None)
|
|
50
|
+
|
|
51
|
+
self.connection = None
|
|
52
|
+
self.cursor = None
|
|
53
|
+
self.shutdownRequested = Event()
|
|
54
|
+
self.contextConnection = ContextVar(f"pg_connection_{self.name}", default=None)
|
|
55
|
+
self.contextAsyncConnection = ContextVar(
|
|
56
|
+
f"pg_async_connection_{self.name}", default=None
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
def __del__(self) -> None:
|
|
60
|
+
"""What to do when class is destroyed"""
|
|
61
|
+
self.logger.debug("Dealloc")
|
|
62
|
+
self.close()
|
|
63
|
+
|
|
64
|
+
# Context
|
|
65
|
+
def __enter__(self) -> tuple[Any, Any]:
|
|
66
|
+
"""Context manager"""
|
|
67
|
+
raise Exception("Not implemented")
|
|
68
|
+
|
|
69
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
70
|
+
"""Context manager"""
|
|
71
|
+
raise Exception("Not implemented")
|
|
72
|
+
|
|
73
|
+
async def __aenter__(self) -> tuple[Any, Any]:
|
|
74
|
+
"""Context manager"""
|
|
75
|
+
raise Exception("Not implemented")
|
|
76
|
+
|
|
77
|
+
async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
78
|
+
"""Context manager"""
|
|
79
|
+
raise Exception("Not implemented")
|
|
80
|
+
|
|
81
|
+
# Connection
|
|
82
|
+
def open(self) -> None:
|
|
83
|
+
"""Connect to database"""
|
|
84
|
+
raise Exception("Not implemented")
|
|
85
|
+
|
|
86
|
+
async def openAsync(self) -> None:
|
|
87
|
+
"""Connect to database"""
|
|
88
|
+
raise Exception("Not implemented")
|
|
89
|
+
|
|
90
|
+
def close(self) -> None:
|
|
91
|
+
"""Close connections"""
|
|
92
|
+
if self.cursor:
|
|
93
|
+
self.logger.debug("Closing cursor")
|
|
94
|
+
self.cursor.close()
|
|
95
|
+
self.cursor = None
|
|
96
|
+
|
|
97
|
+
if self.connection:
|
|
98
|
+
self.logger.debug("Closing connection")
|
|
99
|
+
self.connection.close()
|
|
100
|
+
self.connection = None
|
|
101
|
+
|
|
102
|
+
def fixSocketTimeouts(self, fd: Any):
|
|
103
|
+
# Lets do some socket magic
|
|
104
|
+
s = socket.fromfd(fd, socket.AF_INET, socket.SOCK_STREAM)
|
|
105
|
+
# Enable sending of keep-alive messages
|
|
106
|
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
|
107
|
+
# Time the connection needs to remain idle before start sending
|
|
108
|
+
# keepalive probes
|
|
109
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, self.connectionTimeout)
|
|
110
|
+
# Time between individual keepalive probes
|
|
111
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 1)
|
|
112
|
+
# The maximum number of keepalive probes should send before dropping
|
|
113
|
+
# the connection
|
|
114
|
+
s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 3)
|
|
115
|
+
# To set timeout for an RTO you must set TCP_USER_TIMEOUT timeout
|
|
116
|
+
# (in milliseconds) for socket.
|
|
117
|
+
s.setsockopt(
|
|
118
|
+
socket.IPPROTO_TCP, socket.TCP_USER_TIMEOUT, self.connectionTimeout * 1000
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
async def newConnection(
|
|
122
|
+
self,
|
|
123
|
+
) -> tuple[Any, Any] | None:
|
|
124
|
+
"""
|
|
125
|
+
Create new connection
|
|
126
|
+
|
|
127
|
+
Used for async context manager and async connection creation
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
tuple[Any, Any] | None: Connection and cursor
|
|
131
|
+
"""
|
|
132
|
+
raise Exception("Not implemented")
|
|
133
|
+
|
|
134
|
+
async def returnConnection(self, connection: Any) -> None:
|
|
135
|
+
"""
|
|
136
|
+
Return connection to pool
|
|
137
|
+
|
|
138
|
+
Used for async context manager and async connections return.
|
|
139
|
+
For example to return connection to a pool.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
connection (Any): Connection to return to pool
|
|
143
|
+
"""
|
|
144
|
+
raise Exception("Not implemented")
|
|
145
|
+
|
|
146
|
+
# Data
|
|
147
|
+
def lastInsertId(self) -> int:
|
|
148
|
+
"""Get last inserted row id generated by auto increment"""
|
|
149
|
+
raise Exception("Not implemented")
|
|
150
|
+
|
|
151
|
+
def affectedRows(self) -> int:
|
|
152
|
+
"""Get affected rows count"""
|
|
153
|
+
raise Exception("Not implemented")
|
|
154
|
+
|
|
155
|
+
def commit(self) -> None:
|
|
156
|
+
"""Commit DB queries"""
|
|
157
|
+
raise Exception("Not implemented")
|
|
158
|
+
|
|
159
|
+
def rollback(self) -> None:
|
|
160
|
+
"""Rollback DB queries"""
|
|
161
|
+
raise Exception("Not implemented")
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import json
|
|
3
|
+
import datetime
|
|
4
|
+
import dataclasses
|
|
5
|
+
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from dataclasses import dataclass, field, asdict
|
|
8
|
+
|
|
9
|
+
from decimal import Decimal
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
from psycopg import sql
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class DBDataModel(object):
|
|
17
|
+
"""
|
|
18
|
+
Base class for all database models.
|
|
19
|
+
|
|
20
|
+
Attributes:
|
|
21
|
+
- schemaName (str): The name of the schema in the database.
|
|
22
|
+
- tableName (str): The name of the table in the database.
|
|
23
|
+
- tableAlias (str): The alias of the table in the database.
|
|
24
|
+
- idKey (str): The name of the primary key column in the database.
|
|
25
|
+
- idValue (Any): The value of the primary key for the current instance.
|
|
26
|
+
- id (int): The primary key value for the current instance.
|
|
27
|
+
|
|
28
|
+
Methods:
|
|
29
|
+
- __post_init__(): Initializes the instance after it has been created.
|
|
30
|
+
- __repr__(): Returns a string representation of the instance.
|
|
31
|
+
- __str__(): Returns a JSON string representation of the instance.
|
|
32
|
+
- toDict(): Returns a dictionary representation of the instance.
|
|
33
|
+
- toFormattedDict(): Returns a formatted dictionary representation of the instance.
|
|
34
|
+
- toJsonSchema(): Returns a JSON schema for the instance.
|
|
35
|
+
- jsonEncoder(obj: Any): Encodes the given object as JSON.
|
|
36
|
+
- toJsonString(pretty: bool = False): Returns a JSON string representation of the instance.
|
|
37
|
+
- strToDatetime(value: Any): Converts a string to a datetime object.
|
|
38
|
+
- strToBool(value: Any): Converts a string to a boolean value.
|
|
39
|
+
- strToInt(value: Any): Converts a string to an integer value.
|
|
40
|
+
- validate(): Validates the instance.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
######################
|
|
44
|
+
### Default fields ###
|
|
45
|
+
######################
|
|
46
|
+
|
|
47
|
+
@property
|
|
48
|
+
def schemaName(self) -> str | None:
|
|
49
|
+
return None
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def tableName(self) -> str:
|
|
53
|
+
raise NotImplementedError("`tableName` property is not implemented")
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def tableAlias(self) -> str | None:
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def idKey(self) -> str:
|
|
61
|
+
return "id"
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def idValue(self) -> Any:
|
|
65
|
+
return getattr(self, self.idKey, None)
|
|
66
|
+
|
|
67
|
+
# Id should be readonly by default and should be always present if record exists
|
|
68
|
+
id: int = field(
|
|
69
|
+
default=0,
|
|
70
|
+
metadata={
|
|
71
|
+
"db_field": ("id", "bigint"),
|
|
72
|
+
"store": False,
|
|
73
|
+
"update": False,
|
|
74
|
+
},
|
|
75
|
+
)
|
|
76
|
+
"""id is readonly by default"""
|
|
77
|
+
|
|
78
|
+
# Raw data
|
|
79
|
+
raw_data: dict[str, Any] = field(
|
|
80
|
+
default_factory=dict,
|
|
81
|
+
metadata={
|
|
82
|
+
"db_field": ("raw_data", "jsonb"),
|
|
83
|
+
"store": False,
|
|
84
|
+
"update": False,
|
|
85
|
+
},
|
|
86
|
+
)
|
|
87
|
+
"""This is for storing temporary raw data"""
|
|
88
|
+
|
|
89
|
+
##########################
|
|
90
|
+
### Conversion methods ###
|
|
91
|
+
##########################
|
|
92
|
+
|
|
93
|
+
def fillDataFromDict(self, kwargs: dict[str, Any]):
|
|
94
|
+
fieldNames = set([f.name for f in dataclasses.fields(self)])
|
|
95
|
+
for key in kwargs:
|
|
96
|
+
if key in fieldNames:
|
|
97
|
+
setattr(self, key, kwargs[key])
|
|
98
|
+
|
|
99
|
+
self.__post_init__()
|
|
100
|
+
|
|
101
|
+
# Init data
|
|
102
|
+
def __post_init__(self):
|
|
103
|
+
for field_name, field_obj in self.__dataclass_fields__.items():
|
|
104
|
+
metadata = field_obj.metadata
|
|
105
|
+
encode = metadata.get("encode", None)
|
|
106
|
+
if encode is not None:
|
|
107
|
+
setattr(self, field_name, encode(getattr(self, field_name)))
|
|
108
|
+
|
|
109
|
+
# String - representation
|
|
110
|
+
def __repr__(self) -> str:
|
|
111
|
+
return "<%s %s>" % (self.__class__.__name__, self.__dict__)
|
|
112
|
+
|
|
113
|
+
def __str__(self) -> str:
|
|
114
|
+
return self.toJsonString()
|
|
115
|
+
|
|
116
|
+
# Dict
|
|
117
|
+
def toDict(self) -> dict[str, Any]:
|
|
118
|
+
return asdict(self)
|
|
119
|
+
|
|
120
|
+
def toFormattedDict(self) -> dict[str, Any]:
|
|
121
|
+
return self.toDict()
|
|
122
|
+
|
|
123
|
+
# JSON
|
|
124
|
+
def toJsonSchema(self) -> dict[str, Any]:
|
|
125
|
+
schema: dict[str, Any] = {
|
|
126
|
+
"type": "object",
|
|
127
|
+
"properties": {
|
|
128
|
+
"id": {"type": "number"},
|
|
129
|
+
},
|
|
130
|
+
}
|
|
131
|
+
for field_name, field_obj in self.__dataclass_fields__.items():
|
|
132
|
+
metadata = field_obj.metadata
|
|
133
|
+
assert (
|
|
134
|
+
"db_field" in metadata
|
|
135
|
+
and isinstance(metadata["db_field"], tuple)
|
|
136
|
+
and len(metadata["db_field"]) == 2
|
|
137
|
+
), f"db_field metadata is not set for {field_name}"
|
|
138
|
+
fieldType: str = metadata["db_field"][1]
|
|
139
|
+
schema["properties"][field_name] = {"type": fieldType}
|
|
140
|
+
|
|
141
|
+
return schema
|
|
142
|
+
|
|
143
|
+
def jsonEncoder(self, obj: Any) -> Any:
|
|
144
|
+
if isinstance(obj, Decimal):
|
|
145
|
+
return float(obj)
|
|
146
|
+
|
|
147
|
+
if isinstance(obj, datetime.date) or isinstance(obj, datetime.datetime):
|
|
148
|
+
return obj.strftime("%Y-%m-%dT%H:%M:%S")
|
|
149
|
+
|
|
150
|
+
if isinstance(obj, Enum):
|
|
151
|
+
return obj.value
|
|
152
|
+
|
|
153
|
+
if isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, str):
|
|
154
|
+
return obj
|
|
155
|
+
|
|
156
|
+
return str(obj)
|
|
157
|
+
|
|
158
|
+
def toJsonString(self, pretty: bool = False) -> str:
|
|
159
|
+
if pretty:
|
|
160
|
+
return json.dumps(
|
|
161
|
+
self.toDict(),
|
|
162
|
+
ensure_ascii=False,
|
|
163
|
+
sort_keys=True,
|
|
164
|
+
indent=4,
|
|
165
|
+
separators=(",", ": "),
|
|
166
|
+
default=self.jsonEncoder,
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
return json.dumps(self.toDict(), default=self.jsonEncoder)
|
|
170
|
+
|
|
171
|
+
#######################
|
|
172
|
+
### Helper methods ####
|
|
173
|
+
#######################
|
|
174
|
+
|
|
175
|
+
@staticmethod
|
|
176
|
+
def strToDatetime(value: Any) -> datetime.datetime:
|
|
177
|
+
if isinstance(value, datetime.datetime):
|
|
178
|
+
return value
|
|
179
|
+
|
|
180
|
+
if value and isinstance(value, str):
|
|
181
|
+
pattern = r"^\d+(\.\d+)?$"
|
|
182
|
+
if re.match(pattern, value):
|
|
183
|
+
return datetime.datetime.fromtimestamp(float(value))
|
|
184
|
+
|
|
185
|
+
return datetime.datetime.fromisoformat(value)
|
|
186
|
+
|
|
187
|
+
return datetime.datetime.now(datetime.UTC)
|
|
188
|
+
|
|
189
|
+
@staticmethod
|
|
190
|
+
def strToBool(value: Any) -> bool:
|
|
191
|
+
if isinstance(value, bool):
|
|
192
|
+
return value
|
|
193
|
+
|
|
194
|
+
if value:
|
|
195
|
+
if isinstance(value, str):
|
|
196
|
+
return value.lower() in ("true", "1")
|
|
197
|
+
|
|
198
|
+
if isinstance(value, int):
|
|
199
|
+
return value == 1
|
|
200
|
+
|
|
201
|
+
return False
|
|
202
|
+
|
|
203
|
+
@staticmethod
|
|
204
|
+
def strToInt(value: Any) -> int:
|
|
205
|
+
if isinstance(value, int):
|
|
206
|
+
return value
|
|
207
|
+
|
|
208
|
+
if value and isinstance(value, str):
|
|
209
|
+
return int(value)
|
|
210
|
+
|
|
211
|
+
return 0
|
|
212
|
+
|
|
213
|
+
def validate(self) -> bool:
|
|
214
|
+
raise NotImplementedError("`validate` is not implemented")
|
|
215
|
+
|
|
216
|
+
########################
|
|
217
|
+
### Database methods ###
|
|
218
|
+
########################
|
|
219
|
+
|
|
220
|
+
def queryBase(self) -> sql.SQL | sql.Composed | str | None:
|
|
221
|
+
"""
|
|
222
|
+
Base query for all queries
|
|
223
|
+
"""
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
def storeData(self) -> dict[str, Any] | None:
|
|
227
|
+
"""
|
|
228
|
+
Store data to database
|
|
229
|
+
"""
|
|
230
|
+
storeData: dict[str, Any] = {}
|
|
231
|
+
for field_name, field_obj in self.__dataclass_fields__.items():
|
|
232
|
+
metadata = field_obj.metadata
|
|
233
|
+
if "store" in metadata and metadata["store"] == True:
|
|
234
|
+
storeData[field_name] = getattr(self, field_name)
|
|
235
|
+
|
|
236
|
+
if "decode" in metadata and metadata["decode"] is not None:
|
|
237
|
+
storeData[field_name] = metadata["decode"](storeData[field_name])
|
|
238
|
+
|
|
239
|
+
return storeData
|
|
240
|
+
|
|
241
|
+
def updateData(self) -> dict[str, Any] | None:
|
|
242
|
+
"""
|
|
243
|
+
Update data to database
|
|
244
|
+
"""
|
|
245
|
+
|
|
246
|
+
updateData: dict[str, Any] = {}
|
|
247
|
+
for field_name, field_obj in self.__dataclass_fields__.items():
|
|
248
|
+
metadata = field_obj.metadata
|
|
249
|
+
if "update" in metadata and metadata["update"] == True:
|
|
250
|
+
updateData[field_name] = getattr(self, field_name)
|
|
251
|
+
|
|
252
|
+
if "decode" in metadata and metadata["decode"] is not None:
|
|
253
|
+
updateData[field_name] = metadata["decode"](updateData[field_name])
|
|
254
|
+
|
|
255
|
+
return updateData
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
@dataclass
|
|
259
|
+
class DBDefaultsDataModel(DBDataModel):
|
|
260
|
+
"""
|
|
261
|
+
This class includes default fields for all database models.
|
|
262
|
+
|
|
263
|
+
Attributes:
|
|
264
|
+
- created_at (datetime.datetime): The timestamp of when the instance was created.
|
|
265
|
+
- updated_at (datetime.datetime): The timestamp of when the instance was last updated.
|
|
266
|
+
- enabled (bool): Whether the instance is enabled or not.
|
|
267
|
+
- deleted (bool): Whether the instance is deleted or not.
|
|
268
|
+
"""
|
|
269
|
+
|
|
270
|
+
######################
|
|
271
|
+
### Default fields ###
|
|
272
|
+
######################
|
|
273
|
+
|
|
274
|
+
created_at: datetime.datetime = field(
|
|
275
|
+
default_factory=datetime.datetime.now,
|
|
276
|
+
metadata={
|
|
277
|
+
"db_field": ("created_at", "timestamptz"),
|
|
278
|
+
"store": True,
|
|
279
|
+
"update": False,
|
|
280
|
+
"encode": lambda value: DBDataModel.strToDatetime(value), # type: ignore
|
|
281
|
+
"decode": lambda x: x.isoformat(), # type: ignore
|
|
282
|
+
},
|
|
283
|
+
)
|
|
284
|
+
"""created_at is readonly by default and should be present in all tables"""
|
|
285
|
+
|
|
286
|
+
updated_at: datetime.datetime = field(
|
|
287
|
+
default_factory=datetime.datetime.now,
|
|
288
|
+
metadata={
|
|
289
|
+
"db_field": ("updated_at", "timestamptz"),
|
|
290
|
+
"store": True,
|
|
291
|
+
"update": True,
|
|
292
|
+
"encode": lambda value: DBDataModel.strToDatetime(value), # type: ignore
|
|
293
|
+
"decode": lambda x: x.isoformat(), # type: ignore
|
|
294
|
+
},
|
|
295
|
+
)
|
|
296
|
+
"""updated_at is readonly by default and should be present in all tables"""
|
|
297
|
+
|
|
298
|
+
enabled: bool = field(
|
|
299
|
+
default=True,
|
|
300
|
+
metadata={
|
|
301
|
+
"db_field": ("enabled", "boolean"),
|
|
302
|
+
"store": False,
|
|
303
|
+
"update": False,
|
|
304
|
+
},
|
|
305
|
+
)
|
|
306
|
+
deleted: bool = field(
|
|
307
|
+
default=False,
|
|
308
|
+
metadata={
|
|
309
|
+
"db_field": ("deleted", "boolean"),
|
|
310
|
+
"store": False,
|
|
311
|
+
"update": False,
|
|
312
|
+
},
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
def updateData(self) -> dict[str, Any] | None:
|
|
316
|
+
"""
|
|
317
|
+
Update data to database
|
|
318
|
+
"""
|
|
319
|
+
|
|
320
|
+
# Update updated_at
|
|
321
|
+
self.updated_at = datetime.datetime.now(datetime.UTC)
|
|
322
|
+
|
|
323
|
+
return super().updateData()
|