database-wrapper-pgsql 0.1.28__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- database_wrapper_pgsql-0.1.28/PKG-INFO +93 -0
- database_wrapper_pgsql-0.1.28/README.md +55 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql/__init__.py +25 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql/connector.py +323 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql/db_wrapper_pgsql.py +556 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql/py.typed +0 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql.egg-info/PKG-INFO +93 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql.egg-info/SOURCES.txt +11 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql.egg-info/dependency_links.txt +1 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql.egg-info/requires.txt +3 -0
- database_wrapper_pgsql-0.1.28/database_wrapper_pgsql.egg-info/top_level.txt +1 -0
- database_wrapper_pgsql-0.1.28/pyproject.toml +52 -0
- database_wrapper_pgsql-0.1.28/setup.cfg +4 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: database_wrapper_pgsql
|
|
3
|
+
Version: 0.1.28
|
|
4
|
+
Summary: database_wrapper for PostgreSQL database
|
|
5
|
+
Author-email: Gints Murans <gm@gm.lv>
|
|
6
|
+
License: GNU General Public License v3.0 (GPL-3.0)
|
|
7
|
+
Project-URL: Homepage, https://github.com/gintsmurans/py_database_wrapper
|
|
8
|
+
Project-URL: Documentation, https://github.com/gintsmurans/py_database_wrapper
|
|
9
|
+
Project-URL: Changes, https://github.com/gintsmurans/py_database_wrapper
|
|
10
|
+
Project-URL: Code, https://github.com/gintsmurans/py_database_wrapper
|
|
11
|
+
Project-URL: Issue Tracker, https://github.com/gintsmurans/py_database_wrapper/issues
|
|
12
|
+
Project-URL: Download, https://pypi.org/project/database_wrapper/
|
|
13
|
+
Keywords: database,wrapper,python,postgresql,pgsql
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
17
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
18
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
19
|
+
Classifier: Operating System :: POSIX
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
27
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
28
|
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
29
|
+
Classifier: Topic :: Database
|
|
30
|
+
Classifier: Topic :: Database :: Front-Ends
|
|
31
|
+
Classifier: Topic :: Software Development
|
|
32
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
33
|
+
Requires-Python: >=3.8
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
Requires-Dist: database_wrapper==0.1.28
|
|
36
|
+
Requires-Dist: psycopg[binary]>=3.2.0
|
|
37
|
+
Requires-Dist: psycopg[pool]>=3.2.0
|
|
38
|
+
|
|
39
|
+
# database_wrapper
|
|
40
|
+
|
|
41
|
+
_Part of the `database_wrapper` package._
|
|
42
|
+
|
|
43
|
+
This python package is a database wrapper for [PostgreSQL](https://www.postgresql.org/) (also called pgsql) databases.
|
|
44
|
+
|
|
45
|
+
## Installation
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
pip install database_wrapper[pgsql]
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Usage
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
from database_wrapper_pgsql import AsyncPgSQLWithPooling, DBWrapperPgSQL
|
|
55
|
+
|
|
56
|
+
db = MySQL({
|
|
57
|
+
"hostname": "localhost",
|
|
58
|
+
"port": 3306,
|
|
59
|
+
"username": "root",
|
|
60
|
+
"password": "your_password",
|
|
61
|
+
"database": "my_database"
|
|
62
|
+
})
|
|
63
|
+
db.open()
|
|
64
|
+
dbWrapper = DBWrapperMySQL(db=db)
|
|
65
|
+
|
|
66
|
+
# Simple query
|
|
67
|
+
aModel = MyModel()
|
|
68
|
+
res = await dbWrapper.getByKey(
|
|
69
|
+
aModel,
|
|
70
|
+
"id",
|
|
71
|
+
3005,
|
|
72
|
+
)
|
|
73
|
+
if res:
|
|
74
|
+
print(f"getByKey: {res.toDict()}")
|
|
75
|
+
else:
|
|
76
|
+
print("No results")
|
|
77
|
+
|
|
78
|
+
# Raw query
|
|
79
|
+
res = await dbWrapper.getAll(
|
|
80
|
+
aModel,
|
|
81
|
+
"""
|
|
82
|
+
SELECT t1.*, t2.name AS other_name
|
|
83
|
+
FROM my_table AS t1
|
|
84
|
+
LEFT JOIN other_table AS t2 ON t1.other_id = t2.id
|
|
85
|
+
"""
|
|
86
|
+
)
|
|
87
|
+
async for record in res:
|
|
88
|
+
print(f"getAll: {record.toDict()}")
|
|
89
|
+
else:
|
|
90
|
+
print("No results")
|
|
91
|
+
|
|
92
|
+
db.close()
|
|
93
|
+
```
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# database_wrapper
|
|
2
|
+
|
|
3
|
+
_Part of the `database_wrapper` package._
|
|
4
|
+
|
|
5
|
+
This python package is a database wrapper for [PostgreSQL](https://www.postgresql.org/) (also called pgsql) databases.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
pip install database_wrapper[pgsql]
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
```python
|
|
16
|
+
from database_wrapper_pgsql import AsyncPgSQLWithPooling, DBWrapperPgSQL
|
|
17
|
+
|
|
18
|
+
db = MySQL({
|
|
19
|
+
"hostname": "localhost",
|
|
20
|
+
"port": 3306,
|
|
21
|
+
"username": "root",
|
|
22
|
+
"password": "your_password",
|
|
23
|
+
"database": "my_database"
|
|
24
|
+
})
|
|
25
|
+
db.open()
|
|
26
|
+
dbWrapper = DBWrapperMySQL(db=db)
|
|
27
|
+
|
|
28
|
+
# Simple query
|
|
29
|
+
aModel = MyModel()
|
|
30
|
+
res = await dbWrapper.getByKey(
|
|
31
|
+
aModel,
|
|
32
|
+
"id",
|
|
33
|
+
3005,
|
|
34
|
+
)
|
|
35
|
+
if res:
|
|
36
|
+
print(f"getByKey: {res.toDict()}")
|
|
37
|
+
else:
|
|
38
|
+
print("No results")
|
|
39
|
+
|
|
40
|
+
# Raw query
|
|
41
|
+
res = await dbWrapper.getAll(
|
|
42
|
+
aModel,
|
|
43
|
+
"""
|
|
44
|
+
SELECT t1.*, t2.name AS other_name
|
|
45
|
+
FROM my_table AS t1
|
|
46
|
+
LEFT JOIN other_table AS t2 ON t1.other_id = t2.id
|
|
47
|
+
"""
|
|
48
|
+
)
|
|
49
|
+
async for record in res:
|
|
50
|
+
print(f"getAll: {record.toDict()}")
|
|
51
|
+
else:
|
|
52
|
+
print("No results")
|
|
53
|
+
|
|
54
|
+
db.close()
|
|
55
|
+
```
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""
|
|
2
|
+
database_wrapper_pgsql package - PostgreSQL database wrapper
|
|
3
|
+
|
|
4
|
+
Part of the database_wrapper package
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
# Copyright 2024 Gints Murans
|
|
8
|
+
|
|
9
|
+
import logging
|
|
10
|
+
|
|
11
|
+
from .db_wrapper_pgsql import DBWrapperPgSQL
|
|
12
|
+
from .connector import PgConfig, AsyncPgSQLWithPooling, PgSQL
|
|
13
|
+
|
|
14
|
+
# Set the logger to a quiet default, can be enabled if needed
|
|
15
|
+
logger = logging.getLogger("database_wrapper_pgsql")
|
|
16
|
+
if logger.level == logging.NOTSET:
|
|
17
|
+
logger.setLevel(logging.WARNING)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"DBWrapperPgSQL",
|
|
22
|
+
"PgConfig",
|
|
23
|
+
"AsyncPgSQLWithPooling",
|
|
24
|
+
"PgSQL",
|
|
25
|
+
]
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from contextvars import ContextVar
|
|
3
|
+
from typing import Any, NotRequired, TypedDict, cast
|
|
4
|
+
|
|
5
|
+
from psycopg import (
|
|
6
|
+
# Async
|
|
7
|
+
AsyncConnection as PgAsyncConnection,
|
|
8
|
+
AsyncCursor as PgAsyncCursor,
|
|
9
|
+
# Sync
|
|
10
|
+
Connection as PgConnection,
|
|
11
|
+
Cursor as PgCursor,
|
|
12
|
+
connect as PgConnect,
|
|
13
|
+
)
|
|
14
|
+
from psycopg.rows import (
|
|
15
|
+
DictRow as PgDictRow,
|
|
16
|
+
dict_row as PgDictRowFactory,
|
|
17
|
+
)
|
|
18
|
+
from psycopg_pool import AsyncConnectionPool
|
|
19
|
+
|
|
20
|
+
from database_wrapper import DatabaseBackend
|
|
21
|
+
from database_wrapper.utils.timer import Timer
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
PgConnectionType = PgConnection[PgDictRow]
|
|
25
|
+
PgCursorType = PgCursor[PgDictRow]
|
|
26
|
+
|
|
27
|
+
PgAsyncConnectionType = PgAsyncConnection[PgDictRow]
|
|
28
|
+
PgAsyncCursorType = PgAsyncCursor[PgDictRow]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class PgConfig(TypedDict):
|
|
32
|
+
hostname: str
|
|
33
|
+
port: NotRequired[int]
|
|
34
|
+
username: str
|
|
35
|
+
password: str
|
|
36
|
+
database: str
|
|
37
|
+
ssl: NotRequired[str]
|
|
38
|
+
kwargs: NotRequired[dict[str, Any]]
|
|
39
|
+
|
|
40
|
+
# Connection Pooling
|
|
41
|
+
maxconnections: int
|
|
42
|
+
pool_kwargs: NotRequired[dict[str, Any]]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PgSQL(DatabaseBackend):
|
|
46
|
+
"""
|
|
47
|
+
PostgreSQL database implementation
|
|
48
|
+
|
|
49
|
+
:param config: Configuration for PostgreSQL
|
|
50
|
+
:type config: PgConfig
|
|
51
|
+
|
|
52
|
+
Defaults:
|
|
53
|
+
port = 5432
|
|
54
|
+
ssl = prefer
|
|
55
|
+
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
config: PgConfig
|
|
59
|
+
|
|
60
|
+
connection: PgConnectionType | None
|
|
61
|
+
cursor: PgCursorType | None
|
|
62
|
+
|
|
63
|
+
def open(self):
|
|
64
|
+
# Free resources
|
|
65
|
+
if hasattr(self, "connection") and self.connection:
|
|
66
|
+
self.close()
|
|
67
|
+
|
|
68
|
+
# Set defaults
|
|
69
|
+
if "port" not in self.config or not self.config["port"]:
|
|
70
|
+
self.config["port"] = 5432
|
|
71
|
+
|
|
72
|
+
if "ssl" not in self.config or not self.config["ssl"]:
|
|
73
|
+
self.config["ssl"] = "prefer"
|
|
74
|
+
|
|
75
|
+
if "kwargs" not in self.config or not self.config["kwargs"]:
|
|
76
|
+
self.config["kwargs"] = {}
|
|
77
|
+
|
|
78
|
+
self.logger.debug("Connecting to DB")
|
|
79
|
+
self.connection = cast(
|
|
80
|
+
PgConnectionType,
|
|
81
|
+
PgConnect(
|
|
82
|
+
host=self.config["hostname"],
|
|
83
|
+
port=self.config["port"],
|
|
84
|
+
sslmode=self.config["ssl"],
|
|
85
|
+
user=self.config["username"],
|
|
86
|
+
password=self.config["password"],
|
|
87
|
+
dbname=self.config["database"],
|
|
88
|
+
connect_timeout=self.connectionTimeout,
|
|
89
|
+
row_factory=PgDictRowFactory, # type: ignore
|
|
90
|
+
**self.config["kwargs"],
|
|
91
|
+
),
|
|
92
|
+
)
|
|
93
|
+
self.cursor = self.connection.cursor(row_factory=PgDictRowFactory)
|
|
94
|
+
|
|
95
|
+
# Lets do some socket magic
|
|
96
|
+
self.fixSocketTimeouts(self.connection.fileno())
|
|
97
|
+
|
|
98
|
+
def affectedRows(self) -> int:
|
|
99
|
+
assert self.cursor, "Cursor is not initialized"
|
|
100
|
+
|
|
101
|
+
return self.cursor.rowcount
|
|
102
|
+
|
|
103
|
+
def commit(self) -> None:
|
|
104
|
+
"""Commit DB queries"""
|
|
105
|
+
assert self.connection, "Connection is not initialized"
|
|
106
|
+
|
|
107
|
+
self.logger.debug(f"Commit DB queries")
|
|
108
|
+
self.connection.commit()
|
|
109
|
+
|
|
110
|
+
def rollback(self) -> None:
|
|
111
|
+
"""Rollback DB queries"""
|
|
112
|
+
assert self.connection, "Connection is not initialized"
|
|
113
|
+
|
|
114
|
+
self.logger.debug(f"Rollback DB queries")
|
|
115
|
+
self.connection.rollback()
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class AsyncPgSQLWithPooling(DatabaseBackend):
|
|
119
|
+
"""
|
|
120
|
+
PostgreSQL database implementation with async and connection pooling
|
|
121
|
+
|
|
122
|
+
:param config: Configuration for PostgreSQL
|
|
123
|
+
:type config: PgConfig
|
|
124
|
+
|
|
125
|
+
Defaults:
|
|
126
|
+
port = 5432
|
|
127
|
+
ssl = prefer
|
|
128
|
+
maxconnections = 5
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
config: PgConfig
|
|
132
|
+
|
|
133
|
+
asyncPool: AsyncConnectionPool[PgAsyncConnectionType]
|
|
134
|
+
contextAsyncConnection: ContextVar[
|
|
135
|
+
tuple[PgAsyncConnectionType, PgAsyncCursorType] | None
|
|
136
|
+
]
|
|
137
|
+
|
|
138
|
+
def __init__(
|
|
139
|
+
self,
|
|
140
|
+
dbConfig: PgConfig,
|
|
141
|
+
connectionTimeout: int = 5,
|
|
142
|
+
instanceName: str = "async_postgresql",
|
|
143
|
+
) -> None:
|
|
144
|
+
"""
|
|
145
|
+
Main concept here is that in init we do not connect to database,
|
|
146
|
+
so that class instances can be safely made regardless of connection statuss.
|
|
147
|
+
|
|
148
|
+
Remember to call open() after creating instance to actually open the pool to the database
|
|
149
|
+
and also close() to close the pool.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
super().__init__(dbConfig, connectionTimeout, instanceName)
|
|
153
|
+
|
|
154
|
+
# Set defaults
|
|
155
|
+
if not "port" in self.config or not self.config["port"]:
|
|
156
|
+
self.config["port"] = 5432
|
|
157
|
+
|
|
158
|
+
if not "ssl" in self.config or not self.config["ssl"]:
|
|
159
|
+
self.config["ssl"] = "prefer"
|
|
160
|
+
|
|
161
|
+
if not "kwargs" in self.config or not self.config["kwargs"]:
|
|
162
|
+
self.config["kwargs"] = {}
|
|
163
|
+
|
|
164
|
+
if not "auto_commit" in self.config["kwargs"]:
|
|
165
|
+
self.config["kwargs"]["auto_commit"] = True
|
|
166
|
+
|
|
167
|
+
# Connection pooling defaults
|
|
168
|
+
if not "maxconnections" in self.config or not self.config["maxconnections"]:
|
|
169
|
+
self.config["maxconnections"] = 5
|
|
170
|
+
|
|
171
|
+
if not "pool_kwargs" in self.config or not self.config["pool_kwargs"]:
|
|
172
|
+
self.config["pool_kwargs"] = {}
|
|
173
|
+
|
|
174
|
+
connStr = (
|
|
175
|
+
f"postgresql://{self.config['username']}:{self.config['password']}@{self.config['hostname']}:{self.config['port']}"
|
|
176
|
+
f"/{self.config['database']}?connect_timeout={self.connectionTimeout}&application_name={self.name}"
|
|
177
|
+
f"&sslmode={self.config['ssl']}"
|
|
178
|
+
)
|
|
179
|
+
self.asyncPool = AsyncConnectionPool(
|
|
180
|
+
connStr,
|
|
181
|
+
open=False,
|
|
182
|
+
min_size=2,
|
|
183
|
+
max_size=self.config["maxconnections"],
|
|
184
|
+
max_lifetime=20 * 60,
|
|
185
|
+
max_idle=400,
|
|
186
|
+
timeout=self.connectionTimeout,
|
|
187
|
+
reconnect_timeout=0,
|
|
188
|
+
num_workers=4,
|
|
189
|
+
connection_class=PgAsyncConnectionType,
|
|
190
|
+
kwargs={
|
|
191
|
+
"autocommit": True,
|
|
192
|
+
},
|
|
193
|
+
**self.config["pool_kwargs"],
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
async def openAsync(self) -> None:
|
|
197
|
+
await self.asyncPool.open(wait=True, timeout=self.connectionTimeout)
|
|
198
|
+
|
|
199
|
+
async def newConnection(
|
|
200
|
+
self,
|
|
201
|
+
) -> tuple[PgAsyncConnectionType, PgAsyncCursorType] | None:
|
|
202
|
+
timer = self.timer.get()
|
|
203
|
+
assert self.asyncPool, "Async pool is not initialized"
|
|
204
|
+
|
|
205
|
+
# Create dummy timer
|
|
206
|
+
if timer is None:
|
|
207
|
+
timer = Timer("db")
|
|
208
|
+
self.timer.set(timer)
|
|
209
|
+
|
|
210
|
+
# Log
|
|
211
|
+
self.logger.debug("Getting connection from the pool")
|
|
212
|
+
|
|
213
|
+
# Get connection from the pool
|
|
214
|
+
tries = 0
|
|
215
|
+
while not self.shutdownRequested.is_set():
|
|
216
|
+
connection = None
|
|
217
|
+
try:
|
|
218
|
+
connection = await self.asyncPool.getconn(
|
|
219
|
+
timeout=self.connectionTimeout
|
|
220
|
+
)
|
|
221
|
+
cursor = connection.cursor(row_factory=PgDictRowFactory)
|
|
222
|
+
|
|
223
|
+
# Lets do some socket magic
|
|
224
|
+
self.fixSocketTimeouts(connection.fileno())
|
|
225
|
+
|
|
226
|
+
async with timer.aenter("AsyncPgSQLWithPooling.__aenter__.ping"):
|
|
227
|
+
async with connection.transaction():
|
|
228
|
+
await cursor.execute("SELECT 1")
|
|
229
|
+
await cursor.fetchone()
|
|
230
|
+
|
|
231
|
+
return (connection, cursor)
|
|
232
|
+
|
|
233
|
+
except Exception as e:
|
|
234
|
+
if connection:
|
|
235
|
+
await connection.close()
|
|
236
|
+
await self.asyncPool.putconn(connection)
|
|
237
|
+
|
|
238
|
+
self.logger.error(f"Error while getting connection from the pool: {e}")
|
|
239
|
+
self.shutdownRequested.wait(self.slowDownTimeout)
|
|
240
|
+
tries += 1
|
|
241
|
+
if tries >= 3:
|
|
242
|
+
break
|
|
243
|
+
continue
|
|
244
|
+
|
|
245
|
+
return None
|
|
246
|
+
|
|
247
|
+
async def returnConnection(self, connection: PgAsyncConnectionType) -> None:
|
|
248
|
+
"""Return connection to the pool"""
|
|
249
|
+
timer = self.timer.get()
|
|
250
|
+
assert self.asyncPool, "Async pool is not initialized"
|
|
251
|
+
|
|
252
|
+
# Create dummy timer
|
|
253
|
+
if timer is None:
|
|
254
|
+
timer = Timer("db")
|
|
255
|
+
|
|
256
|
+
# Log
|
|
257
|
+
self.logger.debug("Putting connection back to the pool")
|
|
258
|
+
|
|
259
|
+
# Put connection back to the pool
|
|
260
|
+
await self.asyncPool.putconn(connection)
|
|
261
|
+
|
|
262
|
+
# Debug
|
|
263
|
+
self.logger.debug(self.asyncPool.get_stats())
|
|
264
|
+
timer.printTimerStats()
|
|
265
|
+
timer.resetTimers()
|
|
266
|
+
|
|
267
|
+
async def __aenter__(
|
|
268
|
+
self,
|
|
269
|
+
) -> tuple[PgAsyncConnectionType | None, PgAsyncCursorType | None]:
|
|
270
|
+
"""Context manager"""
|
|
271
|
+
|
|
272
|
+
# Init timer
|
|
273
|
+
timer = Timer("db")
|
|
274
|
+
self.timer.set(timer)
|
|
275
|
+
|
|
276
|
+
# Lets set the context var so that it is set even if we fail to get connection
|
|
277
|
+
self.contextAsyncConnection.set(None)
|
|
278
|
+
|
|
279
|
+
res = await self.newConnection()
|
|
280
|
+
if res:
|
|
281
|
+
self.contextAsyncConnection.set(res)
|
|
282
|
+
return res
|
|
283
|
+
|
|
284
|
+
return (
|
|
285
|
+
None,
|
|
286
|
+
None,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
async def __aexit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
290
|
+
"""Context manager"""
|
|
291
|
+
|
|
292
|
+
testData = self.contextAsyncConnection.get()
|
|
293
|
+
if testData:
|
|
294
|
+
await self.returnConnection(testData[0])
|
|
295
|
+
|
|
296
|
+
# Reset context
|
|
297
|
+
self.contextAsyncConnection.set(None)
|
|
298
|
+
self.timer.set(None)
|
|
299
|
+
|
|
300
|
+
def close(self) -> None:
|
|
301
|
+
"""Close connections"""
|
|
302
|
+
|
|
303
|
+
if self.shutdownRequested.is_set():
|
|
304
|
+
return
|
|
305
|
+
|
|
306
|
+
self.logger.debug("Closing connection pool")
|
|
307
|
+
|
|
308
|
+
# Shutdown
|
|
309
|
+
self.shutdownRequested.set()
|
|
310
|
+
|
|
311
|
+
# Close async pool
|
|
312
|
+
if hasattr(self, "asyncPool") and self.asyncPool.closed is False:
|
|
313
|
+
try:
|
|
314
|
+
loop = asyncio.get_event_loop()
|
|
315
|
+
if loop.is_running():
|
|
316
|
+
loop.create_task(self.asyncPool.close())
|
|
317
|
+
else:
|
|
318
|
+
loop.run_until_complete(self.asyncPool.close())
|
|
319
|
+
|
|
320
|
+
except RuntimeError as e:
|
|
321
|
+
... # Ignore, as it is expected
|
|
322
|
+
except Exception as e:
|
|
323
|
+
self.logger.debug(f"Error while closing async pool: {e}", exc_info=True)
|
|
@@ -0,0 +1,556 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any, AsyncGenerator, overload
|
|
3
|
+
|
|
4
|
+
from psycopg import Cursor, AsyncCursor, sql
|
|
5
|
+
from psycopg.rows import class_row
|
|
6
|
+
|
|
7
|
+
from database_wrapper import T, OrderByItem, DBWrapper, DBDataModel
|
|
8
|
+
|
|
9
|
+
from .connector import (
|
|
10
|
+
# Sync
|
|
11
|
+
PgConnectionType,
|
|
12
|
+
PgCursorType,
|
|
13
|
+
PgSQL,
|
|
14
|
+
# Async
|
|
15
|
+
PgAsyncConnectionType,
|
|
16
|
+
PgAsyncCursorType,
|
|
17
|
+
AsyncPgSQLWithPooling,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DBWrapperPgSQL(DBWrapper):
|
|
22
|
+
"""
|
|
23
|
+
Database wrapper for postgres
|
|
24
|
+
|
|
25
|
+
This is meant to be used in async environments. Also remember to call close() when done.
|
|
26
|
+
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
# Override db instance
|
|
30
|
+
db: PgSQL | AsyncPgSQLWithPooling
|
|
31
|
+
dbConn: PgConnectionType | PgAsyncConnectionType | None = None
|
|
32
|
+
|
|
33
|
+
#######################
|
|
34
|
+
### Class lifecycle ###
|
|
35
|
+
#######################
|
|
36
|
+
|
|
37
|
+
# Meta methods
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
db: PgSQL | AsyncPgSQLWithPooling,
|
|
41
|
+
dbConn: PgConnectionType | PgAsyncConnectionType | None = None,
|
|
42
|
+
logger: logging.Logger | None = None,
|
|
43
|
+
):
|
|
44
|
+
"""
|
|
45
|
+
Initializes a new instance of the DBWrapper class.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
db (MySQL): The MySQL object.
|
|
49
|
+
logger (logging.Logger, optional): The logger object. Defaults to None.
|
|
50
|
+
"""
|
|
51
|
+
super().__init__(db, dbConn, logger)
|
|
52
|
+
|
|
53
|
+
async def close(self) -> None:
|
|
54
|
+
if hasattr(self, "dbConn") and self.dbConn and hasattr(self, "db") and self.db:
|
|
55
|
+
if isinstance(self.db, AsyncPgSQLWithPooling):
|
|
56
|
+
await self.db.returnConnection(self.dbConn) # type: ignore
|
|
57
|
+
self.dbConn = None
|
|
58
|
+
|
|
59
|
+
######################
|
|
60
|
+
### Helper methods ###
|
|
61
|
+
######################
|
|
62
|
+
|
|
63
|
+
def makeIdentifier(self, schema: str | None, name: str) -> sql.Identifier | str:
|
|
64
|
+
"""
|
|
65
|
+
Creates a SQL identifier object from the given name.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
name (str): The name to create the identifier from.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
sql.Identifier: The created SQL identifier object.
|
|
72
|
+
"""
|
|
73
|
+
if schema:
|
|
74
|
+
return sql.Identifier(schema, name)
|
|
75
|
+
|
|
76
|
+
return sql.Identifier(name)
|
|
77
|
+
|
|
78
|
+
@overload
|
|
79
|
+
async def createCursor(self) -> PgCursorType | PgAsyncCursorType: ...
|
|
80
|
+
|
|
81
|
+
@overload
|
|
82
|
+
async def createCursor(
|
|
83
|
+
self,
|
|
84
|
+
emptyDataClass: T,
|
|
85
|
+
) -> Cursor[T] | AsyncCursor[T]: ...
|
|
86
|
+
|
|
87
|
+
async def createCursor(
|
|
88
|
+
self,
|
|
89
|
+
emptyDataClass: T | None = None,
|
|
90
|
+
) -> Cursor[T] | PgCursorType | AsyncCursor[T] | PgAsyncCursorType:
|
|
91
|
+
"""
|
|
92
|
+
Creates a new cursor object.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
emptyDataClass (DBDataModel | None, optional): The data model to use for the cursor.
|
|
96
|
+
Defaults to None.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
PgAsyncCursorType | AsyncCursor[DBDataModel]: The created cursor object.
|
|
100
|
+
"""
|
|
101
|
+
assert self.db is not None, "Database connection is not set"
|
|
102
|
+
|
|
103
|
+
# First we need connection
|
|
104
|
+
if self.dbConn is None:
|
|
105
|
+
if isinstance(self.db, PgSQL):
|
|
106
|
+
self.dbConn = self.db.connection
|
|
107
|
+
|
|
108
|
+
if isinstance(self.db, AsyncPgSQLWithPooling):
|
|
109
|
+
status = await self.db.newConnection()
|
|
110
|
+
if not status:
|
|
111
|
+
raise Exception("Failed to create new connection")
|
|
112
|
+
|
|
113
|
+
(pgConn, _pgCur) = status
|
|
114
|
+
self.dbConn = pgConn
|
|
115
|
+
|
|
116
|
+
# Lets make sure we have a connection
|
|
117
|
+
if self.dbConn is None:
|
|
118
|
+
raise Exception("Failed to get connection")
|
|
119
|
+
|
|
120
|
+
if emptyDataClass is None:
|
|
121
|
+
return self.dbConn.cursor()
|
|
122
|
+
|
|
123
|
+
return self.dbConn.cursor(row_factory=class_row(emptyDataClass.__class__))
|
|
124
|
+
|
|
125
|
+
def logQuery(
|
|
126
|
+
self,
|
|
127
|
+
cursor: AsyncCursor[Any] | Cursor[Any],
|
|
128
|
+
query: sql.SQL | sql.Composed,
|
|
129
|
+
params: tuple[Any, ...],
|
|
130
|
+
) -> None:
|
|
131
|
+
"""
|
|
132
|
+
Logs the given query and parameters.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
cursor (Any): The database cursor.
|
|
136
|
+
query (Any): The query to log.
|
|
137
|
+
params (tuple[Any, ...]): The parameters to log.
|
|
138
|
+
"""
|
|
139
|
+
queryString = query.as_string(self.dbConn)
|
|
140
|
+
self.logger.debug(f"Query: {queryString}")
|
|
141
|
+
|
|
142
|
+
#####################
|
|
143
|
+
### Query methods ###
|
|
144
|
+
#####################
|
|
145
|
+
|
|
146
|
+
def filterQuery(
|
|
147
|
+
self,
|
|
148
|
+
schemaName: str | None,
|
|
149
|
+
tableName: str,
|
|
150
|
+
) -> sql.SQL | sql.Composed | str:
|
|
151
|
+
"""
|
|
152
|
+
Creates a SQL query to filter data from the given table.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
schemaName (str): The name of the schema to filter data from.
|
|
156
|
+
tableName (str): The name of the table to filter data from.
|
|
157
|
+
|
|
158
|
+
Returns:
|
|
159
|
+
sql.SQL | sql.Composed: The created SQL query object.
|
|
160
|
+
"""
|
|
161
|
+
return sql.SQL("SELECT * FROM {table}").format(
|
|
162
|
+
table=self.makeIdentifier(schemaName, tableName)
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
def limitQuery(self, offset: int = 0, limit: int = 100) -> sql.Composed | sql.SQL:
|
|
166
|
+
return sql.SQL("LIMIT {} OFFSET {}").format(limit, offset)
|
|
167
|
+
|
|
168
|
+
# Action methods
|
|
169
|
+
async def getOne(
|
|
170
|
+
self,
|
|
171
|
+
emptyDataClass: T,
|
|
172
|
+
customQuery: sql.SQL | sql.Composed | str | None = None,
|
|
173
|
+
) -> T | None:
|
|
174
|
+
"""
|
|
175
|
+
Retrieves a single record from the database.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
emptyDataClass (T): The data model to use for the query.
|
|
179
|
+
customQuery (sql.SQL | sql.Composed | str | None, optional): The custom query to use.
|
|
180
|
+
Defaults to None.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
T | None: The result of the query.
|
|
184
|
+
"""
|
|
185
|
+
# Query
|
|
186
|
+
_query = (
|
|
187
|
+
customQuery
|
|
188
|
+
or emptyDataClass.queryBase()
|
|
189
|
+
or self.filterQuery(emptyDataClass.schemaName, emptyDataClass.tableName)
|
|
190
|
+
)
|
|
191
|
+
idKey = emptyDataClass.idKey
|
|
192
|
+
idValue = emptyDataClass.id
|
|
193
|
+
if not idKey:
|
|
194
|
+
raise ValueError("Id key is not set")
|
|
195
|
+
if not idValue:
|
|
196
|
+
raise ValueError("Id value is not set")
|
|
197
|
+
|
|
198
|
+
# Create a SQL object for the query and format it
|
|
199
|
+
querySql = sql.SQL("{query} WHERE {idkey} = %s").format(
|
|
200
|
+
query=_query, idkey=self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
# Create a new cursor
|
|
204
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
205
|
+
|
|
206
|
+
# Log
|
|
207
|
+
self.logQuery(newCursor, querySql, (idValue,))
|
|
208
|
+
|
|
209
|
+
# Load data
|
|
210
|
+
try:
|
|
211
|
+
if isinstance(newCursor, AsyncCursor):
|
|
212
|
+
await newCursor.execute(querySql, (idValue,))
|
|
213
|
+
dbData = await newCursor.fetchone()
|
|
214
|
+
else:
|
|
215
|
+
newCursor.execute(querySql, (idValue,))
|
|
216
|
+
dbData = newCursor.fetchone()
|
|
217
|
+
|
|
218
|
+
return dbData
|
|
219
|
+
|
|
220
|
+
finally:
|
|
221
|
+
# Close the cursor
|
|
222
|
+
if isinstance(newCursor, AsyncCursor):
|
|
223
|
+
await newCursor.close()
|
|
224
|
+
else:
|
|
225
|
+
newCursor.close()
|
|
226
|
+
|
|
227
|
+
async def getByKey(
|
|
228
|
+
self,
|
|
229
|
+
emptyDataClass: T,
|
|
230
|
+
idKey: str,
|
|
231
|
+
idValue: Any,
|
|
232
|
+
customQuery: sql.SQL | sql.Composed | str | None = None,
|
|
233
|
+
) -> T | None:
|
|
234
|
+
"""
|
|
235
|
+
Retrieves a single record from the database using the given key.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
emptyDataClass (T): The data model to use for the query.
|
|
239
|
+
idKey (str): The name of the key to use for the query.
|
|
240
|
+
idValue (Any): The value of the key to use for the query.
|
|
241
|
+
customQuery (sql.SQL | sql.Composed | str | None, optional): The custom query to use.
|
|
242
|
+
Defaults to None.
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
T | None: The result of the query.
|
|
246
|
+
"""
|
|
247
|
+
# Query
|
|
248
|
+
_query = (
|
|
249
|
+
customQuery
|
|
250
|
+
or emptyDataClass.queryBase()
|
|
251
|
+
or self.filterQuery(emptyDataClass.schemaName, emptyDataClass.tableName)
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
# Create a SQL object for the query and format it
|
|
255
|
+
querySql = sql.SQL("{} WHERE {} = %s").format(
|
|
256
|
+
_query, self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
# Create a new cursor
|
|
260
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
261
|
+
|
|
262
|
+
# Log
|
|
263
|
+
self.logQuery(newCursor, querySql, (idValue,))
|
|
264
|
+
|
|
265
|
+
# Load data
|
|
266
|
+
try:
|
|
267
|
+
if isinstance(newCursor, AsyncCursor):
|
|
268
|
+
await newCursor.execute(querySql, (idValue,))
|
|
269
|
+
dbData = await newCursor.fetchone()
|
|
270
|
+
else:
|
|
271
|
+
newCursor.execute(querySql, (idValue,))
|
|
272
|
+
dbData = newCursor.fetchone()
|
|
273
|
+
|
|
274
|
+
return dbData
|
|
275
|
+
|
|
276
|
+
finally:
|
|
277
|
+
# Ensure the cursor is closed after the generator is exhausted or an error occurs
|
|
278
|
+
if isinstance(newCursor, AsyncCursor):
|
|
279
|
+
await newCursor.close()
|
|
280
|
+
else:
|
|
281
|
+
newCursor.close()
|
|
282
|
+
|
|
283
|
+
async def getAll(
|
|
284
|
+
self,
|
|
285
|
+
emptyDataClass: T,
|
|
286
|
+
idKey: str | None = None,
|
|
287
|
+
idValue: Any | None = None,
|
|
288
|
+
orderBy: OrderByItem | None = None,
|
|
289
|
+
offset: int = 0,
|
|
290
|
+
limit: int = 100,
|
|
291
|
+
customQuery: sql.SQL | sql.Composed | str | None = None,
|
|
292
|
+
) -> AsyncGenerator[T, None]:
|
|
293
|
+
"""
|
|
294
|
+
Retrieves all records from the database.
|
|
295
|
+
|
|
296
|
+
Args:
|
|
297
|
+
emptyDataClass (T): The data model to use for the query.
|
|
298
|
+
idKey (str | None, optional): The name of the key to use for filtering. Defaults to None.
|
|
299
|
+
idValue (Any | None, optional): The value of the key to use for filtering. Defaults to None.
|
|
300
|
+
orderBy (OrderByItem | None, optional): The order by item to use for sorting. Defaults to None.
|
|
301
|
+
offset (int, optional): The number of results to skip. Defaults to 0.
|
|
302
|
+
limit (int, optional): The maximum number of results to return. Defaults to 100.
|
|
303
|
+
customQuery (sql.SQL | sql.Composed | str | None, optional): The custom query to use. Defaults to None.
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
AsyncGenerator[T, None]: The result of the query.
|
|
307
|
+
"""
|
|
308
|
+
# Query
|
|
309
|
+
_query = (
|
|
310
|
+
customQuery
|
|
311
|
+
or emptyDataClass.queryBase()
|
|
312
|
+
or self.filterQuery(emptyDataClass.schemaName, emptyDataClass.tableName)
|
|
313
|
+
)
|
|
314
|
+
_params: tuple[Any, ...] = ()
|
|
315
|
+
|
|
316
|
+
# Filter
|
|
317
|
+
if idKey and idValue:
|
|
318
|
+
_query = sql.SQL("{} WHERE {} = %s").format(
|
|
319
|
+
_query, self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
320
|
+
)
|
|
321
|
+
_params = (idValue,)
|
|
322
|
+
|
|
323
|
+
# Limits
|
|
324
|
+
_order: sql.Composable = sql.SQL("")
|
|
325
|
+
_limit: sql.Composable = sql.SQL("")
|
|
326
|
+
|
|
327
|
+
if orderBy:
|
|
328
|
+
orderList = [
|
|
329
|
+
f"{item[0]} {item[1] if len(item) > 1 and item[1] != None else 'ASC'}"
|
|
330
|
+
for item in orderBy
|
|
331
|
+
]
|
|
332
|
+
_order = sql.SQL("ORDER BY %s" % ", ".join(orderList)) # type: ignore
|
|
333
|
+
if offset or limit:
|
|
334
|
+
_limit = sql.SQL("{}").format(self.limitQuery(offset, limit))
|
|
335
|
+
|
|
336
|
+
# Create a SQL object for the query and format it
|
|
337
|
+
querySql = sql.SQL("{query} {order} {limit}").format(
|
|
338
|
+
query=_query, order=_order, limit=_limit
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
# Create a new cursor
|
|
342
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
343
|
+
|
|
344
|
+
# Log
|
|
345
|
+
self.logQuery(newCursor, querySql, _params)
|
|
346
|
+
|
|
347
|
+
# Load data
|
|
348
|
+
try:
|
|
349
|
+
if isinstance(newCursor, AsyncCursor):
|
|
350
|
+
# Execute the query
|
|
351
|
+
await newCursor.execute(querySql, _params)
|
|
352
|
+
|
|
353
|
+
# Instead of fetchall(), we'll use a generator to yield results one by one
|
|
354
|
+
while True:
|
|
355
|
+
row = await newCursor.fetchone()
|
|
356
|
+
if row is None:
|
|
357
|
+
break
|
|
358
|
+
yield row
|
|
359
|
+
else:
|
|
360
|
+
newCursor.execute(querySql, _params)
|
|
361
|
+
while True:
|
|
362
|
+
row = newCursor.fetchone()
|
|
363
|
+
if row is None:
|
|
364
|
+
break
|
|
365
|
+
yield row
|
|
366
|
+
finally:
|
|
367
|
+
# Ensure the cursor is closed after the generator is exhausted or an error occurs
|
|
368
|
+
if isinstance(newCursor, AsyncCursor):
|
|
369
|
+
await newCursor.close()
|
|
370
|
+
else:
|
|
371
|
+
newCursor.close()
|
|
372
|
+
|
|
373
|
+
async def getFiltered(
|
|
374
|
+
self,
|
|
375
|
+
emptyDataClass: T,
|
|
376
|
+
filter: dict[str, Any],
|
|
377
|
+
orderBy: OrderByItem | None = None,
|
|
378
|
+
offset: int = 0,
|
|
379
|
+
limit: int = 100,
|
|
380
|
+
customQuery: sql.SQL | sql.Composed | str | None = None,
|
|
381
|
+
) -> AsyncGenerator[T, None]:
|
|
382
|
+
# Filter
|
|
383
|
+
_query = (
|
|
384
|
+
customQuery
|
|
385
|
+
or emptyDataClass.queryBase()
|
|
386
|
+
or self.filterQuery(emptyDataClass.schemaName, emptyDataClass.tableName)
|
|
387
|
+
)
|
|
388
|
+
(_filter, _params) = self.createFilter(filter)
|
|
389
|
+
_filter = sql.SQL(_filter) # type: ignore
|
|
390
|
+
|
|
391
|
+
# Limits
|
|
392
|
+
_order: sql.Composable = sql.SQL("")
|
|
393
|
+
_limit: sql.Composable = sql.SQL("")
|
|
394
|
+
|
|
395
|
+
if orderBy:
|
|
396
|
+
orderList = [
|
|
397
|
+
f"{item[0]} {item[1] if len(item) > 1 and item[1] != None else 'ASC'}"
|
|
398
|
+
for item in orderBy
|
|
399
|
+
]
|
|
400
|
+
_order = sql.SQL("ORDER BY %s" % ", ".join(orderList)) # type: ignore
|
|
401
|
+
if offset or limit:
|
|
402
|
+
_limit = sql.SQL("{}").format(self.limitQuery(offset, limit))
|
|
403
|
+
|
|
404
|
+
# Create a SQL object for the query and format it
|
|
405
|
+
querySql = sql.SQL("{query} {filter} {order} {limit}").format(
|
|
406
|
+
query=_query, filter=_filter, order=_order, limit=_limit
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
# Create a new cursor
|
|
410
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
411
|
+
|
|
412
|
+
# Log
|
|
413
|
+
self.logQuery(newCursor, querySql, _params)
|
|
414
|
+
|
|
415
|
+
# Load data
|
|
416
|
+
try:
|
|
417
|
+
if isinstance(newCursor, AsyncCursor):
|
|
418
|
+
# Execute the query
|
|
419
|
+
await newCursor.execute(querySql, _params)
|
|
420
|
+
|
|
421
|
+
# Instead of fetchall(), we'll use a generator to yield results one by one
|
|
422
|
+
while True:
|
|
423
|
+
row = await newCursor.fetchone()
|
|
424
|
+
if row is None:
|
|
425
|
+
break
|
|
426
|
+
yield row
|
|
427
|
+
else:
|
|
428
|
+
newCursor.execute(querySql, _params)
|
|
429
|
+
while True:
|
|
430
|
+
row = newCursor.fetchone()
|
|
431
|
+
if row is None:
|
|
432
|
+
break
|
|
433
|
+
yield row
|
|
434
|
+
finally:
|
|
435
|
+
# Ensure the cursor is closed after the generator is exhausted or an error occurs
|
|
436
|
+
if isinstance(newCursor, AsyncCursor):
|
|
437
|
+
await newCursor.close()
|
|
438
|
+
else:
|
|
439
|
+
newCursor.close()
|
|
440
|
+
|
|
441
|
+
async def _store(
|
|
442
|
+
self,
|
|
443
|
+
emptyDataClass: DBDataModel,
|
|
444
|
+
schemaName: str | None,
|
|
445
|
+
tableName: str,
|
|
446
|
+
storeData: dict[str, Any],
|
|
447
|
+
idKey: str,
|
|
448
|
+
) -> tuple[int, int]:
|
|
449
|
+
keys = storeData.keys()
|
|
450
|
+
values = list(storeData.values())
|
|
451
|
+
|
|
452
|
+
tableIdentifier = self.makeIdentifier(schemaName, tableName)
|
|
453
|
+
returnKey = self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
454
|
+
|
|
455
|
+
insertQuery = sql.SQL(
|
|
456
|
+
"INSERT INTO {table} ({columns}) VALUES ({values}) RETURNING {id_key}"
|
|
457
|
+
).format(
|
|
458
|
+
table=tableIdentifier,
|
|
459
|
+
columns=sql.SQL(", ").join(map(sql.Identifier, keys)),
|
|
460
|
+
values=sql.SQL(", ").join(sql.Placeholder() * len(values)),
|
|
461
|
+
id_key=returnKey,
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
# Create a new cursor
|
|
465
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
466
|
+
|
|
467
|
+
# Log
|
|
468
|
+
self.logQuery(newCursor, insertQuery, tuple(values))
|
|
469
|
+
|
|
470
|
+
# Insert
|
|
471
|
+
if isinstance(newCursor, AsyncCursor):
|
|
472
|
+
await newCursor.execute(insertQuery, tuple(values))
|
|
473
|
+
affectedRows = newCursor.rowcount
|
|
474
|
+
result = await newCursor.fetchone()
|
|
475
|
+
else:
|
|
476
|
+
newCursor.execute(insertQuery, tuple(values))
|
|
477
|
+
affectedRows = newCursor.rowcount
|
|
478
|
+
result = newCursor.fetchone()
|
|
479
|
+
|
|
480
|
+
return (
|
|
481
|
+
result.id if result and hasattr(result, "id") else 0,
|
|
482
|
+
affectedRows,
|
|
483
|
+
)
|
|
484
|
+
|
|
485
|
+
async def _update(
|
|
486
|
+
self,
|
|
487
|
+
emptyDataClass: DBDataModel,
|
|
488
|
+
schemaName: str | None,
|
|
489
|
+
tableName: str,
|
|
490
|
+
updateData: dict[str, Any],
|
|
491
|
+
updateId: tuple[str, Any],
|
|
492
|
+
) -> int:
|
|
493
|
+
(idKey, idValue) = updateId
|
|
494
|
+
keys = updateData.keys()
|
|
495
|
+
values = list(updateData.values())
|
|
496
|
+
values.append(idValue)
|
|
497
|
+
|
|
498
|
+
set_clause = sql.SQL(", ").join(
|
|
499
|
+
sql.Identifier(key) + sql.SQL(" = %s") for key in keys
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
tableIdentifier = self.makeIdentifier(schemaName, tableName)
|
|
503
|
+
updateKey = self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
504
|
+
updateQuery = sql.SQL(
|
|
505
|
+
"UPDATE {table} SET {set_clause} WHERE {id_key} = %s"
|
|
506
|
+
).format(
|
|
507
|
+
table=tableIdentifier,
|
|
508
|
+
set_clause=set_clause,
|
|
509
|
+
id_key=updateKey,
|
|
510
|
+
)
|
|
511
|
+
|
|
512
|
+
# Create a new cursor
|
|
513
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
514
|
+
|
|
515
|
+
# Log
|
|
516
|
+
self.logQuery(newCursor, updateQuery, tuple(values))
|
|
517
|
+
|
|
518
|
+
# Update
|
|
519
|
+
if isinstance(newCursor, AsyncCursor):
|
|
520
|
+
await newCursor.execute(updateQuery, tuple(values))
|
|
521
|
+
else:
|
|
522
|
+
newCursor.execute(updateQuery, tuple(values))
|
|
523
|
+
affectedRows = newCursor.rowcount
|
|
524
|
+
|
|
525
|
+
return affectedRows
|
|
526
|
+
|
|
527
|
+
async def _delete(
|
|
528
|
+
self,
|
|
529
|
+
emptyDataClass: DBDataModel,
|
|
530
|
+
schemaName: str | None,
|
|
531
|
+
tableName: str,
|
|
532
|
+
deleteId: tuple[str, Any],
|
|
533
|
+
) -> int:
|
|
534
|
+
(idKey, idValue) = deleteId
|
|
535
|
+
|
|
536
|
+
tableIdentifier = self.makeIdentifier(schemaName, tableName)
|
|
537
|
+
deleteKey = self.makeIdentifier(emptyDataClass.tableAlias, idKey)
|
|
538
|
+
|
|
539
|
+
delete_query = sql.SQL("DELETE FROM {table} WHERE {id_key} = %s").format(
|
|
540
|
+
table=tableIdentifier, id_key=deleteKey
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
# Create a new cursor
|
|
544
|
+
newCursor = await self.createCursor(emptyDataClass)
|
|
545
|
+
|
|
546
|
+
# Log
|
|
547
|
+
self.logQuery(newCursor, delete_query, (idValue,))
|
|
548
|
+
|
|
549
|
+
# Delete
|
|
550
|
+
if isinstance(newCursor, AsyncCursor):
|
|
551
|
+
await newCursor.execute(delete_query, (idValue,))
|
|
552
|
+
else:
|
|
553
|
+
newCursor.execute(delete_query, (idValue,))
|
|
554
|
+
affected_rows = newCursor.rowcount
|
|
555
|
+
|
|
556
|
+
return affected_rows
|
|
File without changes
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: database_wrapper_pgsql
|
|
3
|
+
Version: 0.1.28
|
|
4
|
+
Summary: database_wrapper for PostgreSQL database
|
|
5
|
+
Author-email: Gints Murans <gm@gm.lv>
|
|
6
|
+
License: GNU General Public License v3.0 (GPL-3.0)
|
|
7
|
+
Project-URL: Homepage, https://github.com/gintsmurans/py_database_wrapper
|
|
8
|
+
Project-URL: Documentation, https://github.com/gintsmurans/py_database_wrapper
|
|
9
|
+
Project-URL: Changes, https://github.com/gintsmurans/py_database_wrapper
|
|
10
|
+
Project-URL: Code, https://github.com/gintsmurans/py_database_wrapper
|
|
11
|
+
Project-URL: Issue Tracker, https://github.com/gintsmurans/py_database_wrapper/issues
|
|
12
|
+
Project-URL: Download, https://pypi.org/project/database_wrapper/
|
|
13
|
+
Keywords: database,wrapper,python,postgresql,pgsql
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
17
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
18
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
19
|
+
Classifier: Operating System :: POSIX
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
27
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
28
|
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
29
|
+
Classifier: Topic :: Database
|
|
30
|
+
Classifier: Topic :: Database :: Front-Ends
|
|
31
|
+
Classifier: Topic :: Software Development
|
|
32
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
33
|
+
Requires-Python: >=3.8
|
|
34
|
+
Description-Content-Type: text/markdown
|
|
35
|
+
Requires-Dist: database_wrapper==0.1.28
|
|
36
|
+
Requires-Dist: psycopg[binary]>=3.2.0
|
|
37
|
+
Requires-Dist: psycopg[pool]>=3.2.0
|
|
38
|
+
|
|
39
|
+
# database_wrapper
|
|
40
|
+
|
|
41
|
+
_Part of the `database_wrapper` package._
|
|
42
|
+
|
|
43
|
+
This python package is a database wrapper for [PostgreSQL](https://www.postgresql.org/) (also called pgsql) databases.
|
|
44
|
+
|
|
45
|
+
## Installation
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
pip install database_wrapper[pgsql]
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Usage
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
from database_wrapper_pgsql import AsyncPgSQLWithPooling, DBWrapperPgSQL
|
|
55
|
+
|
|
56
|
+
db = MySQL({
|
|
57
|
+
"hostname": "localhost",
|
|
58
|
+
"port": 3306,
|
|
59
|
+
"username": "root",
|
|
60
|
+
"password": "your_password",
|
|
61
|
+
"database": "my_database"
|
|
62
|
+
})
|
|
63
|
+
db.open()
|
|
64
|
+
dbWrapper = DBWrapperMySQL(db=db)
|
|
65
|
+
|
|
66
|
+
# Simple query
|
|
67
|
+
aModel = MyModel()
|
|
68
|
+
res = await dbWrapper.getByKey(
|
|
69
|
+
aModel,
|
|
70
|
+
"id",
|
|
71
|
+
3005,
|
|
72
|
+
)
|
|
73
|
+
if res:
|
|
74
|
+
print(f"getByKey: {res.toDict()}")
|
|
75
|
+
else:
|
|
76
|
+
print("No results")
|
|
77
|
+
|
|
78
|
+
# Raw query
|
|
79
|
+
res = await dbWrapper.getAll(
|
|
80
|
+
aModel,
|
|
81
|
+
"""
|
|
82
|
+
SELECT t1.*, t2.name AS other_name
|
|
83
|
+
FROM my_table AS t1
|
|
84
|
+
LEFT JOIN other_table AS t2 ON t1.other_id = t2.id
|
|
85
|
+
"""
|
|
86
|
+
)
|
|
87
|
+
async for record in res:
|
|
88
|
+
print(f"getAll: {record.toDict()}")
|
|
89
|
+
else:
|
|
90
|
+
print("No results")
|
|
91
|
+
|
|
92
|
+
db.close()
|
|
93
|
+
```
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
README.md
|
|
2
|
+
pyproject.toml
|
|
3
|
+
database_wrapper_pgsql/__init__.py
|
|
4
|
+
database_wrapper_pgsql/connector.py
|
|
5
|
+
database_wrapper_pgsql/db_wrapper_pgsql.py
|
|
6
|
+
database_wrapper_pgsql/py.typed
|
|
7
|
+
database_wrapper_pgsql.egg-info/PKG-INFO
|
|
8
|
+
database_wrapper_pgsql.egg-info/SOURCES.txt
|
|
9
|
+
database_wrapper_pgsql.egg-info/dependency_links.txt
|
|
10
|
+
database_wrapper_pgsql.egg-info/requires.txt
|
|
11
|
+
database_wrapper_pgsql.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
database_wrapper_pgsql
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools >= 61.0.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "database_wrapper_pgsql"
|
|
7
|
+
version = "0.1.28"
|
|
8
|
+
description = "database_wrapper for PostgreSQL database"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
requires-python = ">=3.8"
|
|
11
|
+
license = {text = "GNU General Public License v3.0 (GPL-3.0)"}
|
|
12
|
+
authors = [
|
|
13
|
+
{name = "Gints Murans", email = "gm@gm.lv"}
|
|
14
|
+
]
|
|
15
|
+
classifiers = [
|
|
16
|
+
"Development Status :: 4 - Beta",
|
|
17
|
+
"Intended Audience :: Developers",
|
|
18
|
+
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
|
19
|
+
"Operating System :: MacOS :: MacOS X",
|
|
20
|
+
"Operating System :: Microsoft :: Windows",
|
|
21
|
+
"Operating System :: POSIX",
|
|
22
|
+
"Programming Language :: Python :: 3",
|
|
23
|
+
"Programming Language :: Python :: 3.8",
|
|
24
|
+
"Programming Language :: Python :: 3.9",
|
|
25
|
+
"Programming Language :: Python :: 3.10",
|
|
26
|
+
"Programming Language :: Python :: 3.11",
|
|
27
|
+
"Programming Language :: Python :: 3.12",
|
|
28
|
+
"Programming Language :: Python :: 3.13",
|
|
29
|
+
"Programming Language :: Python :: Implementation :: CPython",
|
|
30
|
+
"Programming Language :: Python :: Implementation :: PyPy",
|
|
31
|
+
"Topic :: Database",
|
|
32
|
+
"Topic :: Database :: Front-Ends",
|
|
33
|
+
"Topic :: Software Development",
|
|
34
|
+
"Topic :: Software Development :: Libraries :: Python Modules"
|
|
35
|
+
]
|
|
36
|
+
keywords = ["database", "wrapper", "python", "postgresql", "pgsql"]
|
|
37
|
+
dependencies = [
|
|
38
|
+
"database_wrapper == 0.1.28",
|
|
39
|
+
"psycopg[binary] >= 3.2.0",
|
|
40
|
+
"psycopg[pool] >= 3.2.0",
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
[project.urls]
|
|
44
|
+
Homepage = "https://github.com/gintsmurans/py_database_wrapper"
|
|
45
|
+
Documentation = "https://github.com/gintsmurans/py_database_wrapper"
|
|
46
|
+
Changes = "https://github.com/gintsmurans/py_database_wrapper"
|
|
47
|
+
Code = "https://github.com/gintsmurans/py_database_wrapper"
|
|
48
|
+
"Issue Tracker" = "https://github.com/gintsmurans/py_database_wrapper/issues"
|
|
49
|
+
Download = "https://pypi.org/project/database_wrapper/"
|
|
50
|
+
|
|
51
|
+
[tool.setuptools.package-data]
|
|
52
|
+
database_wrapper_pgsql = ["py.typed"]
|