psqlpy 0.11.10__cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- psqlpy/__init__.py +45 -0
- psqlpy/_internal/__init__.pyi +1874 -0
- psqlpy/_internal/exceptions.pyi +152 -0
- psqlpy/_internal/extra_types.pyi +628 -0
- psqlpy/_internal/row_factories.pyi +67 -0
- psqlpy/_internal.cpython-310-x86_64-linux-gnu.so +0 -0
- psqlpy/exceptions.py +79 -0
- psqlpy/extra_types.py +103 -0
- psqlpy/py.typed +0 -0
- psqlpy/row_factories.py +6 -0
- psqlpy-0.11.10.dist-info/METADATA +31 -0
- psqlpy-0.11.10.dist-info/RECORD +15 -0
- psqlpy-0.11.10.dist-info/WHEEL +5 -0
- psqlpy-0.11.10.dist-info/entry_points.txt +2 -0
- psqlpy-0.11.10.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1874 @@
|
|
|
1
|
+
import types
|
|
2
|
+
import typing
|
|
3
|
+
from collections.abc import Awaitable, Callable, Mapping, Sequence
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from io import BytesIO
|
|
6
|
+
from ipaddress import IPv4Address, IPv6Address
|
|
7
|
+
from typing import Any, TypeAlias, TypeVar
|
|
8
|
+
|
|
9
|
+
from typing_extensions import Buffer, Self
|
|
10
|
+
|
|
11
|
+
_CustomClass = TypeVar(
|
|
12
|
+
"_CustomClass",
|
|
13
|
+
)
|
|
14
|
+
_RowFactoryRV = TypeVar(
|
|
15
|
+
"_RowFactoryRV",
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
ParamsT: TypeAlias = Sequence[Any] | Mapping[str, Any] | None
|
|
19
|
+
|
|
20
|
+
class QueryResult:
|
|
21
|
+
"""Result."""
|
|
22
|
+
|
|
23
|
+
@typing.overload
|
|
24
|
+
def result(
|
|
25
|
+
self: Self,
|
|
26
|
+
as_tuple: None = None,
|
|
27
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
28
|
+
) -> list[dict[str, Any]]: ...
|
|
29
|
+
@typing.overload
|
|
30
|
+
def result(
|
|
31
|
+
self: Self,
|
|
32
|
+
as_tuple: typing.Literal[False],
|
|
33
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
34
|
+
) -> list[dict[str, Any]]: ...
|
|
35
|
+
@typing.overload
|
|
36
|
+
def result(
|
|
37
|
+
self: Self,
|
|
38
|
+
as_tuple: typing.Literal[True],
|
|
39
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
40
|
+
) -> list[tuple[typing.Any, ...]]: ...
|
|
41
|
+
@typing.overload
|
|
42
|
+
def result(
|
|
43
|
+
self: Self,
|
|
44
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
45
|
+
as_tuple: bool | None = None,
|
|
46
|
+
) -> list[dict[str, Any]]:
|
|
47
|
+
"""Return result from database.
|
|
48
|
+
|
|
49
|
+
By default it returns result as a list of dicts.
|
|
50
|
+
|
|
51
|
+
`custom_decoders` must be used when you use
|
|
52
|
+
PostgreSQL Type which isn't supported, read more in our docs.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def as_class(
|
|
56
|
+
self: Self,
|
|
57
|
+
as_class: Callable[..., _CustomClass],
|
|
58
|
+
) -> list[_CustomClass]:
|
|
59
|
+
"""Convert results to passed class.
|
|
60
|
+
|
|
61
|
+
The main goal of this method is pydantic,
|
|
62
|
+
msgspec and dataclasses support.
|
|
63
|
+
|
|
64
|
+
### Parameters:
|
|
65
|
+
- `as_class`: Any callable python class for the results.
|
|
66
|
+
|
|
67
|
+
### Example:
|
|
68
|
+
```python
|
|
69
|
+
import asyncio
|
|
70
|
+
|
|
71
|
+
from psqlpy import PSQLPool, QueryResult
|
|
72
|
+
|
|
73
|
+
class ExampleOfAsClass:
|
|
74
|
+
def __init__(self, username: str) -> None:
|
|
75
|
+
self.username = username
|
|
76
|
+
|
|
77
|
+
async def main() -> None:
|
|
78
|
+
db_pool = PSQLPool()
|
|
79
|
+
query_result: QueryResult = await db_pool.execute(
|
|
80
|
+
"SELECT username FROM users WHERE id = $1",
|
|
81
|
+
[100],
|
|
82
|
+
)
|
|
83
|
+
class_results: List[ExampleOfAsClass] = query_result.as_class(
|
|
84
|
+
as_class=ExampleOfAsClass,
|
|
85
|
+
)
|
|
86
|
+
```
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
def row_factory(
|
|
90
|
+
self,
|
|
91
|
+
row_factory: Callable[[dict[str, Any]], _RowFactoryRV],
|
|
92
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
93
|
+
) -> list[_RowFactoryRV]:
|
|
94
|
+
"""Use custom function to convert results from database.
|
|
95
|
+
|
|
96
|
+
`custom_decoders` must be used when you use
|
|
97
|
+
PostgreSQL Type isn't supported, read more in the docs.
|
|
98
|
+
|
|
99
|
+
Argument order: firstly we apply `custom_decoders` (if specified),
|
|
100
|
+
then we apply `row_factory`.
|
|
101
|
+
|
|
102
|
+
### Parameters:
|
|
103
|
+
- `row_factory`: function which takes `dict[str, Any]` as an argument.
|
|
104
|
+
- `custom_decoders`: functions for custom decoding.
|
|
105
|
+
|
|
106
|
+
### Returns:
|
|
107
|
+
List of type that return passed `row_factory`.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
class SingleQueryResult:
|
|
111
|
+
"""Single result."""
|
|
112
|
+
|
|
113
|
+
@typing.overload
|
|
114
|
+
def result(
|
|
115
|
+
self: Self,
|
|
116
|
+
as_tuple: None = None,
|
|
117
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
118
|
+
) -> dict[str, Any]: ...
|
|
119
|
+
@typing.overload
|
|
120
|
+
def result(
|
|
121
|
+
self: Self,
|
|
122
|
+
as_tuple: typing.Literal[False],
|
|
123
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
124
|
+
) -> dict[str, Any]: ...
|
|
125
|
+
@typing.overload
|
|
126
|
+
def result(
|
|
127
|
+
self: Self,
|
|
128
|
+
as_tuple: typing.Literal[True],
|
|
129
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
130
|
+
) -> tuple[typing.Any, ...]: ...
|
|
131
|
+
@typing.overload
|
|
132
|
+
def result(
|
|
133
|
+
self: Self,
|
|
134
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
135
|
+
as_tuple: bool | None = None,
|
|
136
|
+
) -> dict[Any, Any]:
|
|
137
|
+
"""Return result from database.
|
|
138
|
+
|
|
139
|
+
By default it returns result as a dict.
|
|
140
|
+
|
|
141
|
+
`custom_decoders` must be used when you use
|
|
142
|
+
PostgreSQL Type which isn't supported, read more in our docs.
|
|
143
|
+
"""
|
|
144
|
+
|
|
145
|
+
def as_class(
|
|
146
|
+
self: Self,
|
|
147
|
+
as_class: Callable[..., _CustomClass],
|
|
148
|
+
) -> list[_CustomClass]:
|
|
149
|
+
"""Convert results to passed class.
|
|
150
|
+
|
|
151
|
+
The main goal of this method is pydantic,
|
|
152
|
+
msgspec and dataclasses support.
|
|
153
|
+
|
|
154
|
+
### Parameters:
|
|
155
|
+
- `as_class`: Any callable python class for the results.
|
|
156
|
+
|
|
157
|
+
### Example:
|
|
158
|
+
```python
|
|
159
|
+
import asyncio
|
|
160
|
+
|
|
161
|
+
from psqlpy import PSQLPool, QueryResult
|
|
162
|
+
|
|
163
|
+
class ExampleOfAsClass:
|
|
164
|
+
def __init__(self, username: str) -> None:
|
|
165
|
+
self.username = username
|
|
166
|
+
|
|
167
|
+
async def main() -> None:
|
|
168
|
+
db_pool = PSQLPool()
|
|
169
|
+
connection = await db_pool.connection()
|
|
170
|
+
async with connection.transaction() as trans:
|
|
171
|
+
query_result: SingleQueryResult = await trans.fetch_row(
|
|
172
|
+
"SELECT username FROM users WHERE id = $1",
|
|
173
|
+
[100],
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
class_result: ExampleOfAsClass = query_result.as_class(
|
|
177
|
+
as_class=ExampleOfAsClass,
|
|
178
|
+
)
|
|
179
|
+
```
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
def row_factory(
|
|
183
|
+
self,
|
|
184
|
+
row_factory: Callable[[dict[str, Any]], _RowFactoryRV],
|
|
185
|
+
custom_decoders: dict[str, Callable[[bytes], Any]] | None = None,
|
|
186
|
+
) -> _RowFactoryRV:
|
|
187
|
+
"""Use custom function to convert results from database.
|
|
188
|
+
|
|
189
|
+
`custom_decoders` must be used when you use
|
|
190
|
+
PostgreSQL Type isn't supported, read more in our docs.
|
|
191
|
+
|
|
192
|
+
Argument order: firstly we apply `custom_decoders` (if specified),
|
|
193
|
+
then we apply `row_factory`.
|
|
194
|
+
|
|
195
|
+
### Parameters:
|
|
196
|
+
- `row_factory`: function which takes `list[dict[str, Any]]` as an argument.
|
|
197
|
+
- `custom_decoders`: functions for custom decoding.
|
|
198
|
+
|
|
199
|
+
### Returns:
|
|
200
|
+
Type that return passed function.
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
class IsolationLevel(Enum):
|
|
204
|
+
"""Isolation Level for transactions."""
|
|
205
|
+
|
|
206
|
+
ReadUncommitted = 1
|
|
207
|
+
ReadCommitted = 2
|
|
208
|
+
RepeatableRead = 3
|
|
209
|
+
Serializable = 4
|
|
210
|
+
|
|
211
|
+
class LoadBalanceHosts(Enum):
|
|
212
|
+
"""Load balancing configuration."""
|
|
213
|
+
|
|
214
|
+
# Make connection attempts to hosts in the order provided.
|
|
215
|
+
Disable = 1
|
|
216
|
+
# Make connection attempts to hosts in a random order.
|
|
217
|
+
Random = 2
|
|
218
|
+
|
|
219
|
+
class TargetSessionAttrs(Enum):
|
|
220
|
+
"""Properties required of a session."""
|
|
221
|
+
|
|
222
|
+
# No special properties are required.
|
|
223
|
+
Any = 1
|
|
224
|
+
# The session must allow writes.
|
|
225
|
+
ReadWrite = 2
|
|
226
|
+
# The session allow only reads.
|
|
227
|
+
ReadOnly = 3
|
|
228
|
+
|
|
229
|
+
class ReadVariant(Enum):
|
|
230
|
+
"""Class for Read Variant for transaction."""
|
|
231
|
+
|
|
232
|
+
ReadOnly = 1
|
|
233
|
+
ReadWrite = 2
|
|
234
|
+
|
|
235
|
+
class ConnRecyclingMethod(Enum):
|
|
236
|
+
"""Possible methods of how a connection is recycled.
|
|
237
|
+
|
|
238
|
+
The default is [`Fast`] which does not check the connection health or
|
|
239
|
+
perform any clean-up queries.
|
|
240
|
+
|
|
241
|
+
# Description:
|
|
242
|
+
## Fast:
|
|
243
|
+
Only run [`is_closed()`] when recycling existing connections.
|
|
244
|
+
|
|
245
|
+
Unless you have special needs this is a safe choice.
|
|
246
|
+
|
|
247
|
+
## Verified:
|
|
248
|
+
Run [`is_closed()`] and execute a test query.
|
|
249
|
+
|
|
250
|
+
This is slower, but guarantees that the database connection is ready to
|
|
251
|
+
be used. Normally, [`is_closed()`] should be enough to filter
|
|
252
|
+
out bad connections, but under some circumstances (i.e. hard-closed
|
|
253
|
+
network connections) it's possible that [`is_closed()`]
|
|
254
|
+
returns `false` while the connection is dead. You will receive an error
|
|
255
|
+
on your first query then.
|
|
256
|
+
|
|
257
|
+
## Clean:
|
|
258
|
+
Like [`Verified`] query method, but instead use the following sequence
|
|
259
|
+
of statements which guarantees a pristine connection:
|
|
260
|
+
```sql
|
|
261
|
+
CLOSE ALL;
|
|
262
|
+
SET SESSION AUTHORIZATION DEFAULT;
|
|
263
|
+
RESET ALL;
|
|
264
|
+
UNLISTEN *;
|
|
265
|
+
SELECT pg_advisory_unlock_all();
|
|
266
|
+
DISCARD TEMP;
|
|
267
|
+
DISCARD SEQUENCES;
|
|
268
|
+
```
|
|
269
|
+
This is similar to calling `DISCARD ALL`. but doesn't call
|
|
270
|
+
`DEALLOCATE ALL` and `DISCARD PLAN`, so that the statement cache is not
|
|
271
|
+
rendered ineffective.
|
|
272
|
+
"""
|
|
273
|
+
|
|
274
|
+
Fast = 1
|
|
275
|
+
Verified = 2
|
|
276
|
+
Clean = 3
|
|
277
|
+
|
|
278
|
+
class SslMode(Enum):
|
|
279
|
+
"""TLS configuration."""
|
|
280
|
+
|
|
281
|
+
# Do not use TLS.
|
|
282
|
+
Disable = 1
|
|
283
|
+
# Pay the overhead of encryption if the server insists on it.
|
|
284
|
+
Allow = 2
|
|
285
|
+
# Attempt to connect with TLS but allow sessions without.
|
|
286
|
+
Prefer = 3
|
|
287
|
+
# Require the use of TLS.
|
|
288
|
+
Require = 4
|
|
289
|
+
# I want my data encrypted,
|
|
290
|
+
# and I accept the overhead.
|
|
291
|
+
# I want to be sure that I connect to a server that I trust.
|
|
292
|
+
VerifyCa = 5
|
|
293
|
+
# I want my data encrypted,
|
|
294
|
+
# and I accept the overhead.
|
|
295
|
+
# I want to be sure that I connect to a server I trust,
|
|
296
|
+
# and that it's the one I specify.
|
|
297
|
+
VerifyFull = 6
|
|
298
|
+
|
|
299
|
+
class KeepaliveConfig:
|
|
300
|
+
"""Config for configuring keepalive."""
|
|
301
|
+
|
|
302
|
+
def __init__(self: Self, idle: int, interval: int, retries: int) -> None:
|
|
303
|
+
"""Initialize new config."""
|
|
304
|
+
|
|
305
|
+
class Cursor:
|
|
306
|
+
"""Represent binary cursor in a transaction.
|
|
307
|
+
|
|
308
|
+
It can be used as an asynchronous iterator.
|
|
309
|
+
"""
|
|
310
|
+
|
|
311
|
+
array_size: int
|
|
312
|
+
querystring: str
|
|
313
|
+
parameters: ParamsT = None
|
|
314
|
+
|
|
315
|
+
def __aiter__(self: Self) -> Self: ...
|
|
316
|
+
async def __anext__(self: Self) -> QueryResult: ...
|
|
317
|
+
async def __aenter__(self: Self) -> Self: ...
|
|
318
|
+
async def __aexit__(
|
|
319
|
+
self: Self,
|
|
320
|
+
exception_type: type[BaseException] | None,
|
|
321
|
+
exception: BaseException | None,
|
|
322
|
+
traceback: types.TracebackType | None,
|
|
323
|
+
) -> None: ...
|
|
324
|
+
async def start(self: Self) -> None:
|
|
325
|
+
"""Start the cursor.
|
|
326
|
+
|
|
327
|
+
Execute DECLARE command for the cursor.
|
|
328
|
+
"""
|
|
329
|
+
|
|
330
|
+
def close(self: Self) -> None:
|
|
331
|
+
"""Close the cursor.
|
|
332
|
+
|
|
333
|
+
Execute CLOSE command for the cursor.
|
|
334
|
+
"""
|
|
335
|
+
|
|
336
|
+
async def execute(
|
|
337
|
+
self: Self,
|
|
338
|
+
querystring: str,
|
|
339
|
+
parameters: ParamsT = None,
|
|
340
|
+
) -> QueryResult:
|
|
341
|
+
"""Start cursor with querystring and parameters.
|
|
342
|
+
|
|
343
|
+
Method should be used instead of context manager
|
|
344
|
+
and `start` method.
|
|
345
|
+
"""
|
|
346
|
+
|
|
347
|
+
async def fetchone(self: Self) -> QueryResult:
|
|
348
|
+
"""Return next one row from the cursor."""
|
|
349
|
+
|
|
350
|
+
async def fetchmany(self: Self, size: int | None = None) -> QueryResult:
|
|
351
|
+
"""Return <size> rows from the cursor."""
|
|
352
|
+
|
|
353
|
+
async def fetchall(self: Self, size: int | None = None) -> QueryResult:
|
|
354
|
+
"""Return all remaining rows from the cursor."""
|
|
355
|
+
|
|
356
|
+
class Transaction:
|
|
357
|
+
"""Single connection for executing queries.
|
|
358
|
+
|
|
359
|
+
It represents transaction in database.
|
|
360
|
+
|
|
361
|
+
You can create it only from `PSQLPool` with method
|
|
362
|
+
`.transaction()`.
|
|
363
|
+
"""
|
|
364
|
+
|
|
365
|
+
conn_dbname: str | None
|
|
366
|
+
user: str | None
|
|
367
|
+
host_addrs: list[str]
|
|
368
|
+
hosts: list[str]
|
|
369
|
+
ports: list[int]
|
|
370
|
+
|
|
371
|
+
async def __aenter__(self: Self) -> Self: ...
|
|
372
|
+
async def __aexit__(
|
|
373
|
+
self: Self,
|
|
374
|
+
exception_type: type[BaseException] | None,
|
|
375
|
+
exception: BaseException | None,
|
|
376
|
+
traceback: types.TracebackType | None,
|
|
377
|
+
) -> None: ...
|
|
378
|
+
async def begin(self: Self) -> None:
|
|
379
|
+
"""Start the transaction.
|
|
380
|
+
|
|
381
|
+
Execute `BEGIN`.
|
|
382
|
+
|
|
383
|
+
`begin()` can be called only once per transaction.
|
|
384
|
+
"""
|
|
385
|
+
|
|
386
|
+
async def commit(self: Self) -> None:
|
|
387
|
+
"""Commit the transaction.
|
|
388
|
+
|
|
389
|
+
Execute `COMMIT`.
|
|
390
|
+
|
|
391
|
+
`commit()` can be called only once per transaction.
|
|
392
|
+
"""
|
|
393
|
+
|
|
394
|
+
async def rollback(self: Self) -> None:
|
|
395
|
+
"""Rollback all queries in the transaction.
|
|
396
|
+
|
|
397
|
+
It can be done only one, after execution transaction marked
|
|
398
|
+
as `done`.
|
|
399
|
+
|
|
400
|
+
### Example:
|
|
401
|
+
```python
|
|
402
|
+
import asyncio
|
|
403
|
+
|
|
404
|
+
from psqlpy import PSQLPool, QueryResult
|
|
405
|
+
|
|
406
|
+
async def main() -> None:
|
|
407
|
+
db_pool = PSQLPool()
|
|
408
|
+
connection = await db_pool.connection()
|
|
409
|
+
transaction = connection.transaction()
|
|
410
|
+
await transaction.execute(...)
|
|
411
|
+
await transaction.rollback()
|
|
412
|
+
```
|
|
413
|
+
"""
|
|
414
|
+
|
|
415
|
+
async def execute(
|
|
416
|
+
self: Self,
|
|
417
|
+
querystring: str,
|
|
418
|
+
parameters: ParamsT = None,
|
|
419
|
+
prepared: bool = True,
|
|
420
|
+
) -> QueryResult:
|
|
421
|
+
"""Execute the query.
|
|
422
|
+
|
|
423
|
+
Querystring can contain `$<number>` parameters
|
|
424
|
+
for converting them in the driver side.
|
|
425
|
+
|
|
426
|
+
### Parameters:
|
|
427
|
+
- `querystring`: querystring to execute.
|
|
428
|
+
- `parameters`: list of parameters to pass in the query.
|
|
429
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
430
|
+
By default any querystring will be prepared.
|
|
431
|
+
|
|
432
|
+
### Example:
|
|
433
|
+
```python
|
|
434
|
+
import asyncio
|
|
435
|
+
|
|
436
|
+
from psqlpy import PSQLPool, QueryResult
|
|
437
|
+
|
|
438
|
+
async def main() -> None:
|
|
439
|
+
db_pool = PSQLPool()
|
|
440
|
+
connection = await db_pool.connection()
|
|
441
|
+
transaction = connection.transaction()
|
|
442
|
+
await transaction.begin()
|
|
443
|
+
query_result: QueryResult = await transaction.execute(
|
|
444
|
+
"SELECT username FROM users WHERE id = $1",
|
|
445
|
+
[100],
|
|
446
|
+
)
|
|
447
|
+
dict_result: List[Dict[Any, Any]] = query_result.result()
|
|
448
|
+
# You must call commit manually
|
|
449
|
+
await transaction.commit()
|
|
450
|
+
```
|
|
451
|
+
"""
|
|
452
|
+
|
|
453
|
+
async def execute_batch(
|
|
454
|
+
self: Self,
|
|
455
|
+
querystring: str,
|
|
456
|
+
) -> None:
|
|
457
|
+
"""
|
|
458
|
+
Execute a sequence of SQL statements using the simple query protocol.
|
|
459
|
+
|
|
460
|
+
Statements should be separated by semicolons.
|
|
461
|
+
If an error occurs, execution of the sequence will stop at that point.
|
|
462
|
+
This is intended for use when, for example,
|
|
463
|
+
initializing a database schema.
|
|
464
|
+
|
|
465
|
+
### Parameters:
|
|
466
|
+
- `querystring`: querystrings separated by semicolons.
|
|
467
|
+
"""
|
|
468
|
+
|
|
469
|
+
async def execute_many(
|
|
470
|
+
self: Self,
|
|
471
|
+
querystring: str,
|
|
472
|
+
parameters: Sequence[Sequence[Any]] | None = None,
|
|
473
|
+
prepared: bool = True,
|
|
474
|
+
) -> None: ...
|
|
475
|
+
"""Execute query multiple times with different parameters.
|
|
476
|
+
|
|
477
|
+
Querystring can contain `$<number>` parameters
|
|
478
|
+
for converting them in the driver side.
|
|
479
|
+
|
|
480
|
+
### Parameters:
|
|
481
|
+
- `querystring`: querystring to execute.
|
|
482
|
+
- `parameters`: list of list of parameters to pass in the query.
|
|
483
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
484
|
+
By default any querystring will be prepared.
|
|
485
|
+
|
|
486
|
+
### Example:
|
|
487
|
+
```python
|
|
488
|
+
import asyncio
|
|
489
|
+
|
|
490
|
+
from psqlpy import PSQLPool, QueryResult
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
async def main() -> None:
|
|
494
|
+
db_pool = PSQLPool()
|
|
495
|
+
connection = await db_pool.connection()
|
|
496
|
+
transaction = connection.transaction()
|
|
497
|
+
await transaction.begin()
|
|
498
|
+
query_result: QueryResult = await transaction.execute_many(
|
|
499
|
+
"INSERT INTO users (name, age) VALUES ($1, $2)",
|
|
500
|
+
[["boba", 10], ["boba", 20]],
|
|
501
|
+
)
|
|
502
|
+
dict_result: List[Dict[Any, Any]] = query_result.result()
|
|
503
|
+
# You must call commit manually
|
|
504
|
+
await transaction.commit()
|
|
505
|
+
```
|
|
506
|
+
"""
|
|
507
|
+
async def fetch(
|
|
508
|
+
self: Self,
|
|
509
|
+
querystring: str,
|
|
510
|
+
parameters: ParamsT = None,
|
|
511
|
+
prepared: bool = True,
|
|
512
|
+
) -> QueryResult:
|
|
513
|
+
"""Fetch the result from database.
|
|
514
|
+
|
|
515
|
+
It's the same as `execute` method, we made it because people are used
|
|
516
|
+
to `fetch` method name.
|
|
517
|
+
|
|
518
|
+
Querystring can contain `$<number>` parameters
|
|
519
|
+
for converting them in the driver side.
|
|
520
|
+
|
|
521
|
+
### Parameters:
|
|
522
|
+
- `querystring`: querystring to execute.
|
|
523
|
+
- `parameters`: list of parameters to pass in the query.
|
|
524
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
525
|
+
By default any querystring will be prepared.
|
|
526
|
+
"""
|
|
527
|
+
|
|
528
|
+
async def fetch_row(
|
|
529
|
+
self: Self,
|
|
530
|
+
querystring: str,
|
|
531
|
+
parameters: ParamsT = None,
|
|
532
|
+
prepared: bool = True,
|
|
533
|
+
) -> SingleQueryResult:
|
|
534
|
+
"""Fetch exaclty single row from query.
|
|
535
|
+
|
|
536
|
+
Query must return exactly one row, otherwise error will be raised.
|
|
537
|
+
Querystring can contain `$<number>` parameters
|
|
538
|
+
for converting them in the driver side.
|
|
539
|
+
|
|
540
|
+
|
|
541
|
+
### Parameters:
|
|
542
|
+
- `querystring`: querystring to execute.
|
|
543
|
+
- `parameters`: list of parameters to pass in the query.
|
|
544
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
545
|
+
By default any querystring will be prepared.
|
|
546
|
+
|
|
547
|
+
### Example:
|
|
548
|
+
```python
|
|
549
|
+
import asyncio
|
|
550
|
+
|
|
551
|
+
from psqlpy import PSQLPool, QueryResult
|
|
552
|
+
|
|
553
|
+
async def main() -> None:
|
|
554
|
+
db_pool = PSQLPool()
|
|
555
|
+
connection = await db_pool.connection()
|
|
556
|
+
transaction = connection.transaction()
|
|
557
|
+
await transaction.begin()
|
|
558
|
+
query_result: SingleQueryResult = await transaction.fetch_row(
|
|
559
|
+
"SELECT username FROM users WHERE id = $1",
|
|
560
|
+
[100],
|
|
561
|
+
)
|
|
562
|
+
dict_result: Dict[Any, Any] = query_result.result()
|
|
563
|
+
# You must call commit manually
|
|
564
|
+
await transaction.commit()
|
|
565
|
+
```
|
|
566
|
+
"""
|
|
567
|
+
|
|
568
|
+
async def fetch_val(
|
|
569
|
+
self: Self,
|
|
570
|
+
querystring: str,
|
|
571
|
+
parameters: ParamsT = None,
|
|
572
|
+
prepared: bool = True,
|
|
573
|
+
) -> Any | None:
|
|
574
|
+
"""Execute the query and return first value of the first row.
|
|
575
|
+
|
|
576
|
+
Returns an error if the query does not return exactly one row.
|
|
577
|
+
|
|
578
|
+
Querystring can contain `$<number>` parameters
|
|
579
|
+
for converting them in the driver side.
|
|
580
|
+
|
|
581
|
+
### Parameters:
|
|
582
|
+
- `querystring`: querystring to execute.
|
|
583
|
+
- `parameters`: list of parameters to pass in the query.
|
|
584
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
585
|
+
By default any querystring will be prepared.
|
|
586
|
+
|
|
587
|
+
### Raises
|
|
588
|
+
- `RustPSQLDriverPyBaseError`: if the query does not
|
|
589
|
+
return exactly one row
|
|
590
|
+
|
|
591
|
+
### Example:
|
|
592
|
+
```python
|
|
593
|
+
import asyncio
|
|
594
|
+
|
|
595
|
+
from psqlpy import PSQLPool, QueryResult
|
|
596
|
+
|
|
597
|
+
async def main() -> None:
|
|
598
|
+
db_pool = PSQLPool()
|
|
599
|
+
connection = await db_pool.connection()
|
|
600
|
+
transaction = connection.transaction()
|
|
601
|
+
await transaction.begin()
|
|
602
|
+
value: Any = await transaction.fetch_val(
|
|
603
|
+
"SELECT username FROM users WHERE id = $1",
|
|
604
|
+
[100],
|
|
605
|
+
)
|
|
606
|
+
```
|
|
607
|
+
"""
|
|
608
|
+
|
|
609
|
+
async def pipeline(
|
|
610
|
+
self,
|
|
611
|
+
queries: list[tuple[str, list[Any] | None]],
|
|
612
|
+
prepared: bool = True,
|
|
613
|
+
) -> list[QueryResult]:
|
|
614
|
+
"""Execute queries in pipeline.
|
|
615
|
+
|
|
616
|
+
Pipelining can improve performance in use cases in which multiple,
|
|
617
|
+
independent queries need to be executed.
|
|
618
|
+
In a traditional workflow,
|
|
619
|
+
each query is sent to the server after the previous query completes.
|
|
620
|
+
In contrast, pipelining allows the client to send all of the
|
|
621
|
+
queries to the server up front, minimizing time spent
|
|
622
|
+
by one side waiting for the other to finish sending data:
|
|
623
|
+
```
|
|
624
|
+
Sequential Pipelined
|
|
625
|
+
| Client | Server | | Client | Server |
|
|
626
|
+
|----------------|-----------------| |----------------|-----------------|
|
|
627
|
+
| send query 1 | | | send query 1 | |
|
|
628
|
+
| | process query 1 | | send query 2 | process query 1 |
|
|
629
|
+
| receive rows 1 | | | send query 3 | process query 2 |
|
|
630
|
+
| send query 2 | | | receive rows 1 | process query 3 |
|
|
631
|
+
| | process query 2 | | receive rows 2 | |
|
|
632
|
+
| receive rows 2 | | | receive rows 3 | |
|
|
633
|
+
| send query 3 | |
|
|
634
|
+
| | process query 3 |
|
|
635
|
+
| receive rows 3 | |
|
|
636
|
+
```
|
|
637
|
+
Read more: https://docs.rs/tokio-postgres/latest/tokio_postgres/#pipelining
|
|
638
|
+
|
|
639
|
+
### Parameters:
|
|
640
|
+
- `queries`: queries with parameters to execute.
|
|
641
|
+
- `prepared`: should the querystring/querystrings be prepared before the request.
|
|
642
|
+
By default any querystrings will be prepared.
|
|
643
|
+
|
|
644
|
+
### Example:
|
|
645
|
+
```python
|
|
646
|
+
import asyncio
|
|
647
|
+
|
|
648
|
+
from psqlpy import PSQLPool, QueryResult
|
|
649
|
+
|
|
650
|
+
async def main() -> None:
|
|
651
|
+
db_pool = PSQLPool()
|
|
652
|
+
connection = await db_pool.connection()
|
|
653
|
+
transaction = connection.transaction()
|
|
654
|
+
|
|
655
|
+
results: list[QueryResult] = await transaction.pipeline(
|
|
656
|
+
queries=[
|
|
657
|
+
(
|
|
658
|
+
"SELECT username FROM users WHERE id = $1",
|
|
659
|
+
[100],
|
|
660
|
+
),
|
|
661
|
+
(
|
|
662
|
+
"SELECT some_data FROM profiles",
|
|
663
|
+
None,
|
|
664
|
+
),
|
|
665
|
+
(
|
|
666
|
+
"INSERT INTO users (username, id) VALUES ($1, $2)",
|
|
667
|
+
["PSQLPy", 1],
|
|
668
|
+
),
|
|
669
|
+
]
|
|
670
|
+
)
|
|
671
|
+
```
|
|
672
|
+
"""
|
|
673
|
+
|
|
674
|
+
async def create_savepoint(self: Self, savepoint_name: str) -> None:
|
|
675
|
+
"""Create new savepoint.
|
|
676
|
+
|
|
677
|
+
One `savepoint_name` can be used once.
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
If you specify the same savepoint name more than once
|
|
681
|
+
exception will be raised.
|
|
682
|
+
|
|
683
|
+
### Parameters:
|
|
684
|
+
- `savepoint_name`: name of the savepoint.
|
|
685
|
+
|
|
686
|
+
### Example:
|
|
687
|
+
```python
|
|
688
|
+
import asyncio
|
|
689
|
+
|
|
690
|
+
from psqlpy import PSQLPool, QueryResult
|
|
691
|
+
|
|
692
|
+
async def main() -> None:
|
|
693
|
+
db_pool = PSQLPool()
|
|
694
|
+
connection = await db_pool.connection()
|
|
695
|
+
transaction = connection.transaction()
|
|
696
|
+
|
|
697
|
+
await transaction.create_savepoint("my_savepoint")
|
|
698
|
+
await transaction.execute(...)
|
|
699
|
+
await transaction.rollback_savepoint("my_savepoint")
|
|
700
|
+
```
|
|
701
|
+
"""
|
|
702
|
+
|
|
703
|
+
async def rollback_savepoint(self: Self, savepoint_name: str) -> None:
|
|
704
|
+
"""ROLLBACK to the specified `savepoint_name`.
|
|
705
|
+
|
|
706
|
+
If you specified wrong savepoint name
|
|
707
|
+
then exception will be raised.
|
|
708
|
+
|
|
709
|
+
### Parameters:
|
|
710
|
+
- `savepoint_name`: name of the SAVEPOINT.
|
|
711
|
+
|
|
712
|
+
### Example:
|
|
713
|
+
```python
|
|
714
|
+
import asyncio
|
|
715
|
+
|
|
716
|
+
from psqlpy import PSQLPool, QueryResult
|
|
717
|
+
|
|
718
|
+
async def main() -> None:
|
|
719
|
+
db_pool = PSQLPool()
|
|
720
|
+
connection = await db_pool.connection()
|
|
721
|
+
transaction = connection.transaction()
|
|
722
|
+
|
|
723
|
+
await transaction.savepoint("my_savepoint")
|
|
724
|
+
await transaction.execute(...)
|
|
725
|
+
await transaction.rollback_savepoint("my_savepoint")
|
|
726
|
+
```
|
|
727
|
+
"""
|
|
728
|
+
|
|
729
|
+
async def release_savepoint(self: Self, savepoint_name: str) -> None:
|
|
730
|
+
"""Execute ROLLBACK TO SAVEPOINT.
|
|
731
|
+
|
|
732
|
+
If you specified wrong savepoint name
|
|
733
|
+
then exception will be raised.
|
|
734
|
+
|
|
735
|
+
### Parameters:
|
|
736
|
+
- `savepoint_name`: name of the SAVEPOINT.
|
|
737
|
+
|
|
738
|
+
### Example:
|
|
739
|
+
```python
|
|
740
|
+
import asyncio
|
|
741
|
+
|
|
742
|
+
from psqlpy import PSQLPool, QueryResult
|
|
743
|
+
|
|
744
|
+
async def main() -> None:
|
|
745
|
+
db_pool = PSQLPool()
|
|
746
|
+
connection = await db_pool.connection()
|
|
747
|
+
transaction = connection.transaction()
|
|
748
|
+
|
|
749
|
+
await transaction.savepoint("my_savepoint")
|
|
750
|
+
await transaction.release_savepoint
|
|
751
|
+
```
|
|
752
|
+
"""
|
|
753
|
+
|
|
754
|
+
def cursor(
|
|
755
|
+
self: Self,
|
|
756
|
+
querystring: str,
|
|
757
|
+
parameters: ParamsT = None,
|
|
758
|
+
fetch_number: int | None = None,
|
|
759
|
+
) -> Cursor:
|
|
760
|
+
"""Create new cursor object.
|
|
761
|
+
|
|
762
|
+
Cursor can be used as an asynchronous iterator.
|
|
763
|
+
|
|
764
|
+
### Parameters:
|
|
765
|
+
- `querystring`: querystring to execute.
|
|
766
|
+
- `parameters`: list of parameters to pass in the query.
|
|
767
|
+
- `fetch_number`: how many rows need to fetch.
|
|
768
|
+
|
|
769
|
+
### Returns:
|
|
770
|
+
new initialized cursor.
|
|
771
|
+
|
|
772
|
+
### Example:
|
|
773
|
+
```python
|
|
774
|
+
import asyncio
|
|
775
|
+
|
|
776
|
+
from psqlpy import PSQLPool, QueryResult
|
|
777
|
+
|
|
778
|
+
async def main() -> None:
|
|
779
|
+
db_pool = PSQLPool()
|
|
780
|
+
connection = await db_pool.connection()
|
|
781
|
+
transaction = await connection.transaction()
|
|
782
|
+
|
|
783
|
+
cursor = transaction.cursor(
|
|
784
|
+
querystring="SELECT * FROM users WHERE username = $1",
|
|
785
|
+
parameters=["Some_Username"],
|
|
786
|
+
fetch_number=5,
|
|
787
|
+
)
|
|
788
|
+
await cursor.start()
|
|
789
|
+
|
|
790
|
+
async for fetched_result in cursor:
|
|
791
|
+
dict_result: List[Dict[Any, Any]] = fetched_result.result()
|
|
792
|
+
... # do something with this result.
|
|
793
|
+
|
|
794
|
+
await cursor.close()
|
|
795
|
+
```
|
|
796
|
+
"""
|
|
797
|
+
|
|
798
|
+
async def binary_copy_to_table(
|
|
799
|
+
self: Self,
|
|
800
|
+
source: bytes | bytearray | Buffer | BytesIO,
|
|
801
|
+
table_name: str,
|
|
802
|
+
columns: Sequence[str] | None = None,
|
|
803
|
+
schema_name: str | None = None,
|
|
804
|
+
) -> int:
|
|
805
|
+
"""Perform binary copy to PostgreSQL.
|
|
806
|
+
|
|
807
|
+
Execute `COPY table_name (<columns>) FROM STDIN (FORMAT binary)`
|
|
808
|
+
and start sending bytes to PostgreSQL.
|
|
809
|
+
|
|
810
|
+
IMPORTANT! User is responsible for the bytes passed to the database.
|
|
811
|
+
If bytes are incorrect user will get error from the database.
|
|
812
|
+
|
|
813
|
+
### Parameters:
|
|
814
|
+
- `source`: source of bytes.
|
|
815
|
+
- `table_name`: name of the table.
|
|
816
|
+
- `columns`: sequence of str columns.
|
|
817
|
+
- `schema_name`: name of the schema.
|
|
818
|
+
|
|
819
|
+
### Returns:
|
|
820
|
+
number of inserted rows;
|
|
821
|
+
"""
|
|
822
|
+
|
|
823
|
+
async def connect(
|
|
824
|
+
dsn: str | None = None,
|
|
825
|
+
username: str | None = None,
|
|
826
|
+
password: str | None = None,
|
|
827
|
+
host: str | None = None,
|
|
828
|
+
hosts: list[str] | None = None,
|
|
829
|
+
port: int | None = None,
|
|
830
|
+
ports: list[int] | None = None,
|
|
831
|
+
db_name: str | None = None,
|
|
832
|
+
target_session_attrs: TargetSessionAttrs | None = None,
|
|
833
|
+
options: str | None = None,
|
|
834
|
+
application_name: str | None = None,
|
|
835
|
+
connect_timeout_sec: int | None = None,
|
|
836
|
+
connect_timeout_nanosec: int | None = None,
|
|
837
|
+
tcp_user_timeout_sec: int | None = None,
|
|
838
|
+
tcp_user_timeout_nanosec: int | None = None,
|
|
839
|
+
keepalives: bool | None = None,
|
|
840
|
+
keepalives_idle_sec: int | None = None,
|
|
841
|
+
keepalives_idle_nanosec: int | None = None,
|
|
842
|
+
keepalives_interval_sec: int | None = None,
|
|
843
|
+
keepalives_interval_nanosec: int | None = None,
|
|
844
|
+
keepalives_retries: int | None = None,
|
|
845
|
+
load_balance_hosts: LoadBalanceHosts | None = None,
|
|
846
|
+
ssl_mode: SslMode | None = None,
|
|
847
|
+
ca_file: str | None = None,
|
|
848
|
+
) -> Connection:
|
|
849
|
+
"""Create new standalone connection."""
|
|
850
|
+
|
|
851
|
+
class Connection:
|
|
852
|
+
"""Connection from Database Connection Pool.
|
|
853
|
+
|
|
854
|
+
It can be created only from connection pool.
|
|
855
|
+
"""
|
|
856
|
+
|
|
857
|
+
conn_dbname: str | None
|
|
858
|
+
user: str | None
|
|
859
|
+
host_addrs: list[str]
|
|
860
|
+
hosts: list[str]
|
|
861
|
+
ports: list[int]
|
|
862
|
+
|
|
863
|
+
async def __aenter__(self: Self) -> Self: ...
|
|
864
|
+
async def __aexit__(
|
|
865
|
+
self: Self,
|
|
866
|
+
exception_type: type[BaseException] | None,
|
|
867
|
+
exception: BaseException | None,
|
|
868
|
+
traceback: types.TracebackType | None,
|
|
869
|
+
) -> None: ...
|
|
870
|
+
async def prepare(
|
|
871
|
+
self,
|
|
872
|
+
querystring: str,
|
|
873
|
+
parameters: ParamsT = None,
|
|
874
|
+
) -> PreparedStatement:
|
|
875
|
+
"""Prepare statement.
|
|
876
|
+
|
|
877
|
+
Return representation of prepared statement.
|
|
878
|
+
"""
|
|
879
|
+
|
|
880
|
+
async def execute(
|
|
881
|
+
self: Self,
|
|
882
|
+
querystring: str,
|
|
883
|
+
parameters: ParamsT = None,
|
|
884
|
+
prepared: bool = True,
|
|
885
|
+
) -> QueryResult:
|
|
886
|
+
"""Execute the query.
|
|
887
|
+
|
|
888
|
+
Querystring can contain `$<number>` parameters
|
|
889
|
+
for converting them in the driver side.
|
|
890
|
+
|
|
891
|
+
### Parameters:
|
|
892
|
+
- `querystring`: querystring to execute.
|
|
893
|
+
- `parameters`: list of parameters to pass in the query.
|
|
894
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
895
|
+
By default any querystring will be prepared.
|
|
896
|
+
|
|
897
|
+
### Returns:
|
|
898
|
+
query result as `QueryResult`
|
|
899
|
+
|
|
900
|
+
### Example:
|
|
901
|
+
```python
|
|
902
|
+
import asyncio
|
|
903
|
+
|
|
904
|
+
from psqlpy import PSQLPool, QueryResult
|
|
905
|
+
|
|
906
|
+
async def main() -> None:
|
|
907
|
+
db_pool = PSQLPool()
|
|
908
|
+
connection = await db_pool.connection()
|
|
909
|
+
query_result: QueryResult = await connection.execute(
|
|
910
|
+
"SELECT username FROM users WHERE id = $1",
|
|
911
|
+
[100],
|
|
912
|
+
)
|
|
913
|
+
dict_result: List[Dict[Any, Any]] = query_result.result()
|
|
914
|
+
```
|
|
915
|
+
"""
|
|
916
|
+
|
|
917
|
+
async def execute_batch(
|
|
918
|
+
self: Self,
|
|
919
|
+
querystring: str,
|
|
920
|
+
) -> None:
|
|
921
|
+
"""
|
|
922
|
+
Execute a sequence of SQL statements using the simple query protocol.
|
|
923
|
+
|
|
924
|
+
Statements should be separated by semicolons.
|
|
925
|
+
If an error occurs, execution of the sequence will stop at that point.
|
|
926
|
+
This is intended for use when, for example,
|
|
927
|
+
initializing a database schema.
|
|
928
|
+
|
|
929
|
+
### Parameters:
|
|
930
|
+
- `querystring`: querystrings separated by semicolons.
|
|
931
|
+
"""
|
|
932
|
+
|
|
933
|
+
async def execute_many(
|
|
934
|
+
self: Self,
|
|
935
|
+
querystring: str,
|
|
936
|
+
parameters: list[list[Any]] | None = None,
|
|
937
|
+
prepared: bool = True,
|
|
938
|
+
) -> None: ...
|
|
939
|
+
"""Execute query multiple times with different parameters.
|
|
940
|
+
|
|
941
|
+
Querystring can contain `$<number>` parameters
|
|
942
|
+
for converting them in the driver side.
|
|
943
|
+
|
|
944
|
+
### Parameters:
|
|
945
|
+
- `querystring`: querystring to execute.
|
|
946
|
+
- `parameters`: list of list of parameters to pass in the query.
|
|
947
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
948
|
+
By default any querystring will be prepared.
|
|
949
|
+
|
|
950
|
+
### Example:
|
|
951
|
+
```python
|
|
952
|
+
import asyncio
|
|
953
|
+
|
|
954
|
+
from psqlpy import PSQLPool, QueryResult
|
|
955
|
+
|
|
956
|
+
|
|
957
|
+
async def main() -> None:
|
|
958
|
+
db_pool = PSQLPool()
|
|
959
|
+
connection = await db_pool.connection()
|
|
960
|
+
await connection.execute_many(
|
|
961
|
+
"INSERT INTO users (name, age) VALUES ($1, $2)",
|
|
962
|
+
[["boba", 10], ["boba", 20]],
|
|
963
|
+
)
|
|
964
|
+
```
|
|
965
|
+
"""
|
|
966
|
+
async def fetch(
|
|
967
|
+
self: Self,
|
|
968
|
+
querystring: str,
|
|
969
|
+
parameters: ParamsT = None,
|
|
970
|
+
prepared: bool = True,
|
|
971
|
+
) -> QueryResult:
|
|
972
|
+
"""Fetch the result from database.
|
|
973
|
+
|
|
974
|
+
It's the same as `execute` method, we made it because people are used
|
|
975
|
+
to `fetch` method name.
|
|
976
|
+
|
|
977
|
+
Querystring can contain `$<number>` parameters
|
|
978
|
+
for converting them in the driver side.
|
|
979
|
+
|
|
980
|
+
### Parameters:
|
|
981
|
+
- `querystring`: querystring to execute.
|
|
982
|
+
- `parameters`: list of parameters to pass in the query.
|
|
983
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
984
|
+
By default any querystring will be prepared.
|
|
985
|
+
"""
|
|
986
|
+
|
|
987
|
+
async def fetch_row(
|
|
988
|
+
self: Self,
|
|
989
|
+
querystring: str,
|
|
990
|
+
parameters: ParamsT = None,
|
|
991
|
+
prepared: bool = True,
|
|
992
|
+
) -> SingleQueryResult:
|
|
993
|
+
"""Fetch exaclty single row from query.
|
|
994
|
+
|
|
995
|
+
Query must return exactly one row, otherwise error will be raised.
|
|
996
|
+
Querystring can contain `$<number>` parameters
|
|
997
|
+
for converting them in the driver side.
|
|
998
|
+
|
|
999
|
+
|
|
1000
|
+
### Parameters:
|
|
1001
|
+
- `querystring`: querystring to execute.
|
|
1002
|
+
- `parameters`: list of parameters to pass in the query.
|
|
1003
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
1004
|
+
By default any querystring will be prepared.
|
|
1005
|
+
|
|
1006
|
+
### Example:
|
|
1007
|
+
```python
|
|
1008
|
+
import asyncio
|
|
1009
|
+
|
|
1010
|
+
from psqlpy import PSQLPool, QueryResult
|
|
1011
|
+
|
|
1012
|
+
async def main() -> None:
|
|
1013
|
+
db_pool = PSQLPool()
|
|
1014
|
+
|
|
1015
|
+
connection = await db_pool.connection()
|
|
1016
|
+
query_result: SingleQueryResult = await transaction.fetch_row(
|
|
1017
|
+
"SELECT username FROM users WHERE id = $1",
|
|
1018
|
+
[100],
|
|
1019
|
+
)
|
|
1020
|
+
dict_result: Dict[Any, Any] = query_result.result()
|
|
1021
|
+
```
|
|
1022
|
+
"""
|
|
1023
|
+
|
|
1024
|
+
async def fetch_val(
|
|
1025
|
+
self: Self,
|
|
1026
|
+
querystring: str,
|
|
1027
|
+
parameters: ParamsT = None,
|
|
1028
|
+
prepared: bool = True,
|
|
1029
|
+
) -> Any:
|
|
1030
|
+
"""Execute the query and return first value of the first row.
|
|
1031
|
+
|
|
1032
|
+
Returns an error if the query does not return exactly one row.
|
|
1033
|
+
|
|
1034
|
+
Querystring can contain `$<number>` parameters
|
|
1035
|
+
for converting them in the driver side.
|
|
1036
|
+
|
|
1037
|
+
### Parameters:
|
|
1038
|
+
- `querystring`: querystring to execute.
|
|
1039
|
+
- `parameters`: list of parameters to pass in the query.
|
|
1040
|
+
- `prepared`: should the querystring be prepared before the request.
|
|
1041
|
+
By default any querystring will be prepared.
|
|
1042
|
+
|
|
1043
|
+
### Raises
|
|
1044
|
+
- `RustPSQLDriverPyBaseError`: if the query does not
|
|
1045
|
+
return exactly one row
|
|
1046
|
+
|
|
1047
|
+
### Example:
|
|
1048
|
+
```python
|
|
1049
|
+
import asyncio
|
|
1050
|
+
|
|
1051
|
+
from psqlpy import PSQLPool, QueryResult
|
|
1052
|
+
|
|
1053
|
+
async def main() -> None:
|
|
1054
|
+
db_pool = PSQLPool()
|
|
1055
|
+
connection = await db_pool.connection()
|
|
1056
|
+
# this will be an int value
|
|
1057
|
+
query_result_value = await connection.fetch_row(
|
|
1058
|
+
"SELECT COUNT(*) FROM users WHERE id > $1",
|
|
1059
|
+
[100],
|
|
1060
|
+
)
|
|
1061
|
+
```
|
|
1062
|
+
"""
|
|
1063
|
+
|
|
1064
|
+
def transaction(
|
|
1065
|
+
self,
|
|
1066
|
+
isolation_level: IsolationLevel | None = None,
|
|
1067
|
+
read_variant: ReadVariant | None = None,
|
|
1068
|
+
deferrable: bool | None = None,
|
|
1069
|
+
) -> Transaction:
|
|
1070
|
+
"""Create new transaction.
|
|
1071
|
+
|
|
1072
|
+
### Parameters:
|
|
1073
|
+
- `isolation_level`: configure isolation level of the transaction.
|
|
1074
|
+
- `read_variant`: configure read variant of the transaction.
|
|
1075
|
+
- `deferrable`: configure deferrable of the transaction.
|
|
1076
|
+
"""
|
|
1077
|
+
|
|
1078
|
+
def cursor(
|
|
1079
|
+
self: Self,
|
|
1080
|
+
querystring: str,
|
|
1081
|
+
parameters: ParamsT = None,
|
|
1082
|
+
fetch_number: int | None = None,
|
|
1083
|
+
) -> Cursor:
|
|
1084
|
+
"""Create new cursor object.
|
|
1085
|
+
|
|
1086
|
+
Cursor can be used as an asynchronous iterator.
|
|
1087
|
+
|
|
1088
|
+
### Parameters:
|
|
1089
|
+
- `querystring`: querystring to execute.
|
|
1090
|
+
- `parameters`: list of parameters to pass in the query.
|
|
1091
|
+
- `fetch_number`: how many rows need to fetch.
|
|
1092
|
+
|
|
1093
|
+
### Returns:
|
|
1094
|
+
new initialized cursor.
|
|
1095
|
+
|
|
1096
|
+
### Example:
|
|
1097
|
+
```python
|
|
1098
|
+
import asyncio
|
|
1099
|
+
|
|
1100
|
+
from psqlpy import PSQLPool, QueryResult
|
|
1101
|
+
|
|
1102
|
+
async def main() -> None:
|
|
1103
|
+
db_pool = PSQLPool()
|
|
1104
|
+
connection = await db_pool.connection()
|
|
1105
|
+
async with connection.transaction():
|
|
1106
|
+
async with connection.cursor(
|
|
1107
|
+
querystring="SELECT * FROM users WHERE username = $1",
|
|
1108
|
+
parameters=["Some_Username"],
|
|
1109
|
+
fetch_number=5,
|
|
1110
|
+
) as cursor:
|
|
1111
|
+
async for fetched_result in cursor:
|
|
1112
|
+
dict_result: List[Dict[Any, Any]] = fetched_result.result()
|
|
1113
|
+
... # do something with this result.
|
|
1114
|
+
```
|
|
1115
|
+
"""
|
|
1116
|
+
|
|
1117
|
+
def close(self: Self) -> None:
|
|
1118
|
+
"""Return connection back to the pool.
|
|
1119
|
+
|
|
1120
|
+
It necessary to commit all transactions and close all cursor
|
|
1121
|
+
made by this connection. Otherwise, it won't have any practical usage.
|
|
1122
|
+
"""
|
|
1123
|
+
|
|
1124
|
+
async def binary_copy_to_table(
|
|
1125
|
+
self: Self,
|
|
1126
|
+
source: bytes | bytearray | Buffer | BytesIO,
|
|
1127
|
+
table_name: str,
|
|
1128
|
+
columns: Sequence[str] | None = None,
|
|
1129
|
+
schema_name: str | None = None,
|
|
1130
|
+
) -> int:
|
|
1131
|
+
"""Perform binary copy to PostgreSQL.
|
|
1132
|
+
|
|
1133
|
+
Execute `COPY table_name (<columns>) FROM STDIN (FORMAT binary)`
|
|
1134
|
+
and start sending bytes to PostgreSQL.
|
|
1135
|
+
|
|
1136
|
+
IMPORTANT! User is responsible for the bytes passed to the database.
|
|
1137
|
+
If bytes are incorrect user will get error from the database.
|
|
1138
|
+
|
|
1139
|
+
### Parameters:
|
|
1140
|
+
- `source`: source of bytes.
|
|
1141
|
+
- `table_name`: name of the table.
|
|
1142
|
+
- `columns`: sequence of str columns.
|
|
1143
|
+
- `schema_name`: name of the schema.
|
|
1144
|
+
|
|
1145
|
+
### Returns:
|
|
1146
|
+
number of inserted rows;
|
|
1147
|
+
"""
|
|
1148
|
+
|
|
1149
|
+
class ConnectionPoolStatus:
|
|
1150
|
+
max_size: int
|
|
1151
|
+
size: int
|
|
1152
|
+
available: int
|
|
1153
|
+
waiting: int
|
|
1154
|
+
|
|
1155
|
+
class ConnectionPool:
|
|
1156
|
+
"""Connection pool for executing queries.
|
|
1157
|
+
|
|
1158
|
+
This is the main entrypoint in the library.
|
|
1159
|
+
"""
|
|
1160
|
+
|
|
1161
|
+
def __init__(
|
|
1162
|
+
self: Self,
|
|
1163
|
+
dsn: str | None = None,
|
|
1164
|
+
username: str | None = None,
|
|
1165
|
+
password: str | None = None,
|
|
1166
|
+
host: str | None = None,
|
|
1167
|
+
hosts: list[str] | None = None,
|
|
1168
|
+
port: int | None = None,
|
|
1169
|
+
ports: list[int] | None = None,
|
|
1170
|
+
db_name: str | None = None,
|
|
1171
|
+
target_session_attrs: TargetSessionAttrs | None = None,
|
|
1172
|
+
options: str | None = None,
|
|
1173
|
+
application_name: str | None = None,
|
|
1174
|
+
connect_timeout_sec: int | None = None,
|
|
1175
|
+
connect_timeout_nanosec: int | None = None,
|
|
1176
|
+
tcp_user_timeout_sec: int | None = None,
|
|
1177
|
+
tcp_user_timeout_nanosec: int | None = None,
|
|
1178
|
+
keepalives: bool | None = None,
|
|
1179
|
+
keepalives_idle_sec: int | None = None,
|
|
1180
|
+
keepalives_idle_nanosec: int | None = None,
|
|
1181
|
+
keepalives_interval_sec: int | None = None,
|
|
1182
|
+
keepalives_interval_nanosec: int | None = None,
|
|
1183
|
+
keepalives_retries: int | None = None,
|
|
1184
|
+
load_balance_hosts: LoadBalanceHosts | None = None,
|
|
1185
|
+
max_db_pool_size: int = 2,
|
|
1186
|
+
conn_recycling_method: ConnRecyclingMethod | None = None,
|
|
1187
|
+
ssl_mode: SslMode | None = None,
|
|
1188
|
+
ca_file: str | None = None,
|
|
1189
|
+
) -> None:
|
|
1190
|
+
"""Create new PostgreSQL connection pool.
|
|
1191
|
+
|
|
1192
|
+
It connects to the database and create pool.
|
|
1193
|
+
|
|
1194
|
+
You cannot set the minimum size for the connection
|
|
1195
|
+
pool, by it is 0.
|
|
1196
|
+
`ConnectionPool` doesn't create connections on startup.
|
|
1197
|
+
It makes new connection on demand.
|
|
1198
|
+
|
|
1199
|
+
If you specify `dsn` parameter then `username`, `password`,
|
|
1200
|
+
`host`, `hosts`, `port`, `ports`, `db_name` and `target_session_attrs`
|
|
1201
|
+
parameters will be ignored.
|
|
1202
|
+
|
|
1203
|
+
### Parameters:
|
|
1204
|
+
- `dsn`: Full dsn connection string.
|
|
1205
|
+
`postgres://postgres:postgres@localhost:5432/postgres?target_session_attrs=read-write`
|
|
1206
|
+
- `username`: Username of the user in the PostgreSQL
|
|
1207
|
+
- `password`: Password of the user in the PostgreSQL
|
|
1208
|
+
- `host`: Host of the PostgreSQL
|
|
1209
|
+
- `hosts`: Hosts of the PostgreSQL
|
|
1210
|
+
- `port`: Port of the PostgreSQL
|
|
1211
|
+
- `ports`: Ports of the PostgreSQL
|
|
1212
|
+
- `db_name`: Name of the database in PostgreSQL
|
|
1213
|
+
- `target_session_attrs`: Specifies requirements of the session.
|
|
1214
|
+
- `options`: Command line options used to configure the server
|
|
1215
|
+
- `application_name`: Sets the application_name parameter on the server.
|
|
1216
|
+
- `connect_timeout_sec`: The time limit in seconds applied to each socket-level
|
|
1217
|
+
connection attempt.
|
|
1218
|
+
Note that hostnames can resolve to multiple IP addresses,
|
|
1219
|
+
and this limit is applied to each address. Defaults to no timeout.
|
|
1220
|
+
- `connect_timeout_nanosec`: nanosec for connection timeout,
|
|
1221
|
+
can be used only with connect_timeout_sec.
|
|
1222
|
+
- `tcp_user_timeout_sec`: The time limit that
|
|
1223
|
+
transmitted data may remain unacknowledged
|
|
1224
|
+
before a connection is forcibly closed.
|
|
1225
|
+
This is ignored for Unix domain socket connections.
|
|
1226
|
+
It is only supported on systems where TCP_USER_TIMEOUT
|
|
1227
|
+
is available and will default to the system default if omitted
|
|
1228
|
+
or set to 0; on other systems, it has no effect.
|
|
1229
|
+
- `tcp_user_timeout_nanosec`: nanosec for cp_user_timeout,
|
|
1230
|
+
can be used only with tcp_user_timeout_sec.
|
|
1231
|
+
- `keepalives`: Controls the use of TCP keepalive.
|
|
1232
|
+
This option is ignored when connecting with Unix sockets.
|
|
1233
|
+
Defaults to on.
|
|
1234
|
+
- `keepalives_idle_sec`: The number of seconds of inactivity after
|
|
1235
|
+
which a keepalive message is sent to the server.
|
|
1236
|
+
This option is ignored when connecting with Unix sockets.
|
|
1237
|
+
Defaults to 2 hours.
|
|
1238
|
+
- `keepalives_idle_nanosec`: Nanosec for keepalives_idle_sec.
|
|
1239
|
+
- `keepalives_interval_sec`: The time interval between TCP keepalive probes.
|
|
1240
|
+
This option is ignored when connecting with Unix sockets.
|
|
1241
|
+
- `keepalives_interval_nanosec`: Nanosec for keepalives_interval_sec.
|
|
1242
|
+
- `keepalives_retries`: The maximum number of TCP keepalive probes
|
|
1243
|
+
that will be sent before dropping a connection.
|
|
1244
|
+
This option is ignored when connecting with Unix sockets.
|
|
1245
|
+
- `load_balance_hosts`: Controls the order in which the client tries to connect
|
|
1246
|
+
to the available hosts and addresses.
|
|
1247
|
+
Once a connection attempt is successful no other
|
|
1248
|
+
hosts and addresses will be tried.
|
|
1249
|
+
This parameter is typically used in combination with multiple host names
|
|
1250
|
+
or a DNS record that returns multiple IPs.
|
|
1251
|
+
If set to disable, hosts and addresses will be tried in the order provided.
|
|
1252
|
+
If set to random, hosts will be tried in a random order, and the IP addresses
|
|
1253
|
+
resolved from a hostname will also be tried in a random order.
|
|
1254
|
+
Defaults to disable.
|
|
1255
|
+
- `max_db_pool_size`: maximum size of the connection pool.
|
|
1256
|
+
- `conn_recycling_method`: how a connection is recycled.
|
|
1257
|
+
- `ssl_mode`: mode for ssl.
|
|
1258
|
+
- `ca_file`: Loads trusted root certificates from a file.
|
|
1259
|
+
The file should contain a sequence of PEM-formatted CA certificates.
|
|
1260
|
+
"""
|
|
1261
|
+
|
|
1262
|
+
def __iter__(self: Self) -> Self: ...
|
|
1263
|
+
def __enter__(self: Self) -> Self: ...
|
|
1264
|
+
def __exit__(
|
|
1265
|
+
self: Self,
|
|
1266
|
+
exception_type: type[BaseException] | None,
|
|
1267
|
+
exception: BaseException | None,
|
|
1268
|
+
traceback: types.TracebackType | None,
|
|
1269
|
+
) -> None: ...
|
|
1270
|
+
def status(self: Self) -> ConnectionPoolStatus:
|
|
1271
|
+
"""Return information about connection pool.
|
|
1272
|
+
|
|
1273
|
+
### Returns
|
|
1274
|
+
`ConnectionPoolStatus`
|
|
1275
|
+
"""
|
|
1276
|
+
|
|
1277
|
+
def resize(self: Self, new_max_size: int) -> None:
|
|
1278
|
+
"""Resize the connection pool.
|
|
1279
|
+
|
|
1280
|
+
This change the max_size of the pool dropping
|
|
1281
|
+
excess objects and/or making space for new ones.
|
|
1282
|
+
|
|
1283
|
+
### Parameters:
|
|
1284
|
+
- `new_max_size`: new size for the connection pool.
|
|
1285
|
+
"""
|
|
1286
|
+
|
|
1287
|
+
async def connection(self: Self) -> Connection:
|
|
1288
|
+
"""Create new connection.
|
|
1289
|
+
|
|
1290
|
+
It acquires new connection from the database pool.
|
|
1291
|
+
"""
|
|
1292
|
+
|
|
1293
|
+
def acquire(self: Self) -> Connection:
|
|
1294
|
+
"""Create new connection for async context manager.
|
|
1295
|
+
|
|
1296
|
+
Must be used only in async context manager.
|
|
1297
|
+
|
|
1298
|
+
### Example:
|
|
1299
|
+
```python
|
|
1300
|
+
import asyncio
|
|
1301
|
+
|
|
1302
|
+
from psqlpy import PSQLPool, QueryResult
|
|
1303
|
+
|
|
1304
|
+
async def main() -> None:
|
|
1305
|
+
db_pool = PSQLPool()
|
|
1306
|
+
async with db_pool.acquire() as connection:
|
|
1307
|
+
res = await connection.execute(...)
|
|
1308
|
+
```
|
|
1309
|
+
"""
|
|
1310
|
+
|
|
1311
|
+
def listener(self: Self) -> Listener:
|
|
1312
|
+
"""Create new listener."""
|
|
1313
|
+
|
|
1314
|
+
def close(self: Self) -> None:
|
|
1315
|
+
"""Close the connection pool."""
|
|
1316
|
+
|
|
1317
|
+
def connect_pool(
|
|
1318
|
+
dsn: str | None = None,
|
|
1319
|
+
username: str | None = None,
|
|
1320
|
+
password: str | None = None,
|
|
1321
|
+
host: str | None = None,
|
|
1322
|
+
hosts: list[str] | None = None,
|
|
1323
|
+
port: int | None = None,
|
|
1324
|
+
ports: list[int] | None = None,
|
|
1325
|
+
db_name: str | None = None,
|
|
1326
|
+
target_session_attrs: TargetSessionAttrs | None = None,
|
|
1327
|
+
options: str | None = None,
|
|
1328
|
+
application_name: str | None = None,
|
|
1329
|
+
connect_timeout_sec: int | None = None,
|
|
1330
|
+
connect_timeout_nanosec: int | None = None,
|
|
1331
|
+
tcp_user_timeout_sec: int | None = None,
|
|
1332
|
+
tcp_user_timeout_nanosec: int | None = None,
|
|
1333
|
+
keepalives: bool | None = None,
|
|
1334
|
+
keepalives_idle_sec: int | None = None,
|
|
1335
|
+
keepalives_idle_nanosec: int | None = None,
|
|
1336
|
+
keepalives_interval_sec: int | None = None,
|
|
1337
|
+
keepalives_interval_nanosec: int | None = None,
|
|
1338
|
+
keepalives_retries: int | None = None,
|
|
1339
|
+
load_balance_hosts: LoadBalanceHosts | None = None,
|
|
1340
|
+
max_db_pool_size: int = 2,
|
|
1341
|
+
conn_recycling_method: ConnRecyclingMethod | None = None,
|
|
1342
|
+
ssl_mode: SslMode | None = None,
|
|
1343
|
+
ca_file: str | None = None,
|
|
1344
|
+
) -> ConnectionPool:
|
|
1345
|
+
"""Create new PostgreSQL connection pool.
|
|
1346
|
+
|
|
1347
|
+
It connects to the database and create pool.
|
|
1348
|
+
|
|
1349
|
+
You cannot set the minimum size for the connection
|
|
1350
|
+
pool, by it is 0.
|
|
1351
|
+
`ConnectionPool` doesn't create connections on startup.
|
|
1352
|
+
It makes new connection on demand.
|
|
1353
|
+
|
|
1354
|
+
If you specify `dsn` parameter then `username`, `password`,
|
|
1355
|
+
`host`, `hosts`, `port`, `ports`, `db_name` and `target_session_attrs`
|
|
1356
|
+
parameters will be ignored.
|
|
1357
|
+
|
|
1358
|
+
### Parameters:
|
|
1359
|
+
- `dsn`: Full dsn connection string.
|
|
1360
|
+
`postgres://postgres:postgres@localhost:5432/postgres?target_session_attrs=read-write`
|
|
1361
|
+
- `username`: Username of the user in the PostgreSQL
|
|
1362
|
+
- `password`: Password of the user in the PostgreSQL
|
|
1363
|
+
- `host`: Host of the PostgreSQL
|
|
1364
|
+
- `hosts`: Hosts of the PostgreSQL
|
|
1365
|
+
- `port`: Port of the PostgreSQL
|
|
1366
|
+
- `ports`: Ports of the PostgreSQL
|
|
1367
|
+
- `db_name`: Name of the database in PostgreSQL
|
|
1368
|
+
- `target_session_attrs`: Specifies requirements of the session.
|
|
1369
|
+
- `options`: Command line options used to configure the server
|
|
1370
|
+
- `application_name`: Sets the application_name parameter on the server.
|
|
1371
|
+
- `connect_timeout_sec`: The time limit in seconds applied to each socket-level
|
|
1372
|
+
connection attempt.
|
|
1373
|
+
Note that hostnames can resolve to multiple IP addresses,
|
|
1374
|
+
and this limit is applied to each address. Defaults to no timeout.
|
|
1375
|
+
- `connect_timeout_nanosec`: nanosec for connection timeout,
|
|
1376
|
+
can be used only with connect_timeout_sec.
|
|
1377
|
+
- `tcp_user_timeout_sec`: The time limit that
|
|
1378
|
+
transmitted data may remain unacknowledged
|
|
1379
|
+
before a connection is forcibly closed.
|
|
1380
|
+
This is ignored for Unix domain socket connections.
|
|
1381
|
+
It is only supported on systems where TCP_USER_TIMEOUT
|
|
1382
|
+
is available and will default to the system default if omitted
|
|
1383
|
+
or set to 0; on other systems, it has no effect.
|
|
1384
|
+
- `tcp_user_timeout_nanosec`: nanosec for cp_user_timeout,
|
|
1385
|
+
can be used only with tcp_user_timeout_sec.
|
|
1386
|
+
- `keepalives`: Controls the use of TCP keepalive.
|
|
1387
|
+
This option is ignored when connecting with Unix sockets.
|
|
1388
|
+
Defaults to on.
|
|
1389
|
+
- `keepalives_idle_sec`: The number of seconds of inactivity after
|
|
1390
|
+
which a keepalive message is sent to the server.
|
|
1391
|
+
This option is ignored when connecting with Unix sockets.
|
|
1392
|
+
Defaults to 2 hours.
|
|
1393
|
+
- `keepalives_idle_nanosec`: Nanosec for keepalives_idle_sec.
|
|
1394
|
+
- `keepalives_interval_sec`: The time interval between TCP keepalive probes.
|
|
1395
|
+
This option is ignored when connecting with Unix sockets.
|
|
1396
|
+
- `keepalives_interval_nanosec`: Nanosec for keepalives_interval_sec.
|
|
1397
|
+
- `keepalives_retries`: The maximum number of TCP keepalive probes
|
|
1398
|
+
that will be sent before dropping a connection.
|
|
1399
|
+
This option is ignored when connecting with Unix sockets.
|
|
1400
|
+
- `load_balance_hosts`: Controls the order in which the client tries to connect
|
|
1401
|
+
to the available hosts and addresses.
|
|
1402
|
+
Once a connection attempt is successful no other
|
|
1403
|
+
hosts and addresses will be tried.
|
|
1404
|
+
This parameter is typically used in combination with multiple host names
|
|
1405
|
+
or a DNS record that returns multiple IPs.
|
|
1406
|
+
If set to disable, hosts and addresses will be tried in the order provided.
|
|
1407
|
+
If set to random, hosts will be tried in a random order, and the IP addresses
|
|
1408
|
+
resolved from a hostname will also be tried in a random order.
|
|
1409
|
+
Defaults to disable.
|
|
1410
|
+
- `max_db_pool_size`: maximum size of the connection pool.
|
|
1411
|
+
- `conn_recycling_method`: how a connection is recycled.
|
|
1412
|
+
- `ssl_mode`: mode for ssl.
|
|
1413
|
+
- `ca_file`: Loads trusted root certificates from a file.
|
|
1414
|
+
The file should contain a sequence of PEM-formatted CA certificates.
|
|
1415
|
+
"""
|
|
1416
|
+
|
|
1417
|
+
class ConnectionPoolBuilder:
|
|
1418
|
+
"""Builder for `ConnectionPool`."""
|
|
1419
|
+
|
|
1420
|
+
def __init__(self: Self) -> None:
|
|
1421
|
+
"""Initialize new instance of `ConnectionPoolBuilder`."""
|
|
1422
|
+
|
|
1423
|
+
def build(self: Self) -> ConnectionPool:
|
|
1424
|
+
"""
|
|
1425
|
+
Build `ConnectionPool`.
|
|
1426
|
+
|
|
1427
|
+
### Returns:
|
|
1428
|
+
`ConnectionPool`
|
|
1429
|
+
"""
|
|
1430
|
+
|
|
1431
|
+
def max_pool_size(self: Self, pool_size: int) -> Self:
|
|
1432
|
+
"""
|
|
1433
|
+
Set maximum connection pool size.
|
|
1434
|
+
|
|
1435
|
+
### Parameters:
|
|
1436
|
+
- `pool_size`: size of the pool, must be more than 1.
|
|
1437
|
+
|
|
1438
|
+
### Returns:
|
|
1439
|
+
`ConnectionPoolBuilder`
|
|
1440
|
+
"""
|
|
1441
|
+
|
|
1442
|
+
def conn_recycling_method(
|
|
1443
|
+
self: Self,
|
|
1444
|
+
conn_recycling_method: ConnRecyclingMethod,
|
|
1445
|
+
) -> Self:
|
|
1446
|
+
"""
|
|
1447
|
+
Set connection recycling method.
|
|
1448
|
+
|
|
1449
|
+
Connection recycling method is how a connection is recycled.
|
|
1450
|
+
|
|
1451
|
+
### Parameters:
|
|
1452
|
+
- `conn_recycling_method`: ConnRecyclingMethod enum.
|
|
1453
|
+
|
|
1454
|
+
### Returns:
|
|
1455
|
+
`ConnectionPoolBuilder`
|
|
1456
|
+
"""
|
|
1457
|
+
|
|
1458
|
+
def user(self: Self, user: str) -> Self:
|
|
1459
|
+
"""
|
|
1460
|
+
Set username to `PostgreSQL`.
|
|
1461
|
+
|
|
1462
|
+
### Parameters:
|
|
1463
|
+
- `user`: username of the PostgreSQL user.
|
|
1464
|
+
|
|
1465
|
+
### Returns:
|
|
1466
|
+
`ConnectionPoolBuilder`
|
|
1467
|
+
"""
|
|
1468
|
+
|
|
1469
|
+
def password(self: Self, password: str) -> Self:
|
|
1470
|
+
"""
|
|
1471
|
+
Set password for `PostgreSQL`.
|
|
1472
|
+
|
|
1473
|
+
### Parameters:
|
|
1474
|
+
- `password`: password for the `PostgreSQL` user.
|
|
1475
|
+
|
|
1476
|
+
### Returns:
|
|
1477
|
+
`ConnectionPoolBuilder`
|
|
1478
|
+
"""
|
|
1479
|
+
|
|
1480
|
+
def dbname(self: Self, dbname: str) -> Self:
|
|
1481
|
+
"""
|
|
1482
|
+
Set database name for the `PostgreSQL`.
|
|
1483
|
+
|
|
1484
|
+
### Parameters:
|
|
1485
|
+
- `dbname`: database for the `PostgreSQL`.
|
|
1486
|
+
|
|
1487
|
+
### Returns:
|
|
1488
|
+
`ConnectionPoolBuilder`
|
|
1489
|
+
"""
|
|
1490
|
+
|
|
1491
|
+
def options(self: Self, options: str) -> Self:
|
|
1492
|
+
"""
|
|
1493
|
+
Set command line options used to configure the server.
|
|
1494
|
+
|
|
1495
|
+
### Parameters:
|
|
1496
|
+
- `options`: command line options
|
|
1497
|
+
|
|
1498
|
+
### Returns:
|
|
1499
|
+
`ConnectionPoolBuilder`
|
|
1500
|
+
"""
|
|
1501
|
+
|
|
1502
|
+
def application_name(self: Self, application_name: str) -> Self:
|
|
1503
|
+
"""
|
|
1504
|
+
Set the value of the `application_name` runtime parameter.
|
|
1505
|
+
|
|
1506
|
+
### Parameters:
|
|
1507
|
+
- `application_name`: `application_name` runtime parameter
|
|
1508
|
+
|
|
1509
|
+
### Returns:
|
|
1510
|
+
`ConnectionPoolBuilder`
|
|
1511
|
+
"""
|
|
1512
|
+
|
|
1513
|
+
def ssl_mode(self: Self, ssl_mode: SslMode) -> Self:
|
|
1514
|
+
"""
|
|
1515
|
+
Set the SSL configuration.
|
|
1516
|
+
|
|
1517
|
+
### Parameters:
|
|
1518
|
+
- `ssl_mode`: mode for TLS.
|
|
1519
|
+
|
|
1520
|
+
### Returns:
|
|
1521
|
+
`ConnectionPoolBuilder`
|
|
1522
|
+
"""
|
|
1523
|
+
|
|
1524
|
+
def ca_file(self: Self, ca_file: str) -> Self:
|
|
1525
|
+
"""
|
|
1526
|
+
Set ca_file for SSL.
|
|
1527
|
+
|
|
1528
|
+
### Parameters:
|
|
1529
|
+
- `ca_file`: certificate file to connection to PostgreSQL.
|
|
1530
|
+
|
|
1531
|
+
### Returns:
|
|
1532
|
+
`ConnectionPoolBuilder`
|
|
1533
|
+
"""
|
|
1534
|
+
|
|
1535
|
+
def host(self: Self, host: str) -> Self:
|
|
1536
|
+
"""
|
|
1537
|
+
Add a host to the configuration.
|
|
1538
|
+
|
|
1539
|
+
Multiple hosts can be specified by calling this method multiple times,
|
|
1540
|
+
and each will be tried in order.
|
|
1541
|
+
On Unix systems, a host starting with a `/` is interpreted
|
|
1542
|
+
as a path to a directory containing Unix domain sockets.
|
|
1543
|
+
There must be either no hosts,
|
|
1544
|
+
or the same number of hosts as hostaddrs.
|
|
1545
|
+
|
|
1546
|
+
### Parameters:
|
|
1547
|
+
- `host`: host to `PostgreSQL`.
|
|
1548
|
+
|
|
1549
|
+
### Returns:
|
|
1550
|
+
`ConnectionPoolBuilder`
|
|
1551
|
+
"""
|
|
1552
|
+
|
|
1553
|
+
def hostaddr(self: Self, hostaddr: IPv4Address | IPv6Address) -> Self:
|
|
1554
|
+
"""
|
|
1555
|
+
Add a hostaddr to the configuration.
|
|
1556
|
+
|
|
1557
|
+
Multiple hostaddrs can be specified by calling
|
|
1558
|
+
this method multiple times, and each will be tried in order.
|
|
1559
|
+
There must be either no hostaddrs,
|
|
1560
|
+
or the same number of hostaddrs as hosts.
|
|
1561
|
+
|
|
1562
|
+
### Parameters:
|
|
1563
|
+
- `hostaddr`: hostaddr to `PostgreSQL`.
|
|
1564
|
+
|
|
1565
|
+
### Returns:
|
|
1566
|
+
`ConnectionPoolBuilder`
|
|
1567
|
+
"""
|
|
1568
|
+
|
|
1569
|
+
def port(self: Self, port: int) -> Self:
|
|
1570
|
+
"""
|
|
1571
|
+
Add a port to the configuration.
|
|
1572
|
+
|
|
1573
|
+
Multiple ports can be specified by calling this method multiple times.
|
|
1574
|
+
There must either be no ports,
|
|
1575
|
+
in which case the default of 5432 is used,
|
|
1576
|
+
a single port, in which it is used for all hosts,
|
|
1577
|
+
or the same number of ports as hosts.
|
|
1578
|
+
|
|
1579
|
+
### Parameters:
|
|
1580
|
+
- `port`: port for hosts to `PostgreSQL`.
|
|
1581
|
+
|
|
1582
|
+
### Returns:
|
|
1583
|
+
`ConnectionPoolBuilder`
|
|
1584
|
+
"""
|
|
1585
|
+
|
|
1586
|
+
def connect_timeout(self: Self, connect_timeout: int) -> Self:
|
|
1587
|
+
"""
|
|
1588
|
+
Set the timeout applied to socket-level connection attempts.
|
|
1589
|
+
|
|
1590
|
+
Note that hostnames can resolve to multiple IP addresses,
|
|
1591
|
+
and this timeout will apply to each address of each
|
|
1592
|
+
host separately. Defaults to no limit.
|
|
1593
|
+
|
|
1594
|
+
### Parameters:
|
|
1595
|
+
- `connect_timeout`: connection timeout to `PostgreSQL`.
|
|
1596
|
+
|
|
1597
|
+
### Returns:
|
|
1598
|
+
`ConnectionPoolBuilder`
|
|
1599
|
+
"""
|
|
1600
|
+
|
|
1601
|
+
def tcp_user_timeout(self: Self, tcp_user_timeout: int) -> Self:
|
|
1602
|
+
"""
|
|
1603
|
+
Set the TCP user timeout.
|
|
1604
|
+
|
|
1605
|
+
This is ignored for Unix domain socket connections.
|
|
1606
|
+
It is only supported on systems where TCP_USER_TIMEOUT is available
|
|
1607
|
+
and will default to the system default if omitted or set to 0;
|
|
1608
|
+
on other systems, it has no effect.
|
|
1609
|
+
|
|
1610
|
+
### Parameters:
|
|
1611
|
+
- `tcp_user_timeout`: tcp_user_timeout to `PostgreSQL`.
|
|
1612
|
+
|
|
1613
|
+
### Returns:
|
|
1614
|
+
`ConnectionPoolBuilder`
|
|
1615
|
+
"""
|
|
1616
|
+
|
|
1617
|
+
def target_session_attrs(
|
|
1618
|
+
self: Self,
|
|
1619
|
+
target_session_attrs: TargetSessionAttrs,
|
|
1620
|
+
) -> Self:
|
|
1621
|
+
"""
|
|
1622
|
+
Set the requirements of the session.
|
|
1623
|
+
|
|
1624
|
+
This can be used to connect to the primary server in a
|
|
1625
|
+
clustered database rather than one of the read-only
|
|
1626
|
+
secondary servers. Defaults to `Any`.
|
|
1627
|
+
|
|
1628
|
+
### Parameters:
|
|
1629
|
+
- `target_session_attrs`: target_session_attrs for `PostgreSQL`.
|
|
1630
|
+
|
|
1631
|
+
### Returns:
|
|
1632
|
+
`ConnectionPoolBuilder`
|
|
1633
|
+
"""
|
|
1634
|
+
|
|
1635
|
+
def load_balance_hosts(
|
|
1636
|
+
self: Self,
|
|
1637
|
+
load_balance_hosts: LoadBalanceHosts,
|
|
1638
|
+
) -> Self:
|
|
1639
|
+
"""
|
|
1640
|
+
Set the host load balancing behavior.
|
|
1641
|
+
|
|
1642
|
+
Defaults to `disable`.
|
|
1643
|
+
|
|
1644
|
+
### Parameters:
|
|
1645
|
+
- `load_balance_hosts`: load_balance_hosts for `PostgreSQL`.
|
|
1646
|
+
|
|
1647
|
+
### Returns:
|
|
1648
|
+
`ConnectionPoolBuilder`
|
|
1649
|
+
"""
|
|
1650
|
+
|
|
1651
|
+
def keepalives(
|
|
1652
|
+
self: Self,
|
|
1653
|
+
keepalives: bool,
|
|
1654
|
+
) -> Self:
|
|
1655
|
+
"""
|
|
1656
|
+
Control the use of TCP keepalive.
|
|
1657
|
+
|
|
1658
|
+
This is ignored for Unix domain socket connections.
|
|
1659
|
+
|
|
1660
|
+
Defaults to `true`.
|
|
1661
|
+
|
|
1662
|
+
### Parameters:
|
|
1663
|
+
- `keepalives`: boolean value for keepalives.
|
|
1664
|
+
|
|
1665
|
+
### Returns:
|
|
1666
|
+
`ConnectionPoolBuilder`
|
|
1667
|
+
"""
|
|
1668
|
+
|
|
1669
|
+
def keepalives_idle(
|
|
1670
|
+
self: Self,
|
|
1671
|
+
keepalives_idle: int,
|
|
1672
|
+
) -> Self:
|
|
1673
|
+
"""
|
|
1674
|
+
Set the amount of idle time before a keepalive packet is sent on the connection.
|
|
1675
|
+
|
|
1676
|
+
This is ignored for Unix domain sockets,
|
|
1677
|
+
or if the `keepalives` option is disabled.
|
|
1678
|
+
|
|
1679
|
+
Defaults to 2 hours.
|
|
1680
|
+
|
|
1681
|
+
### Parameters:
|
|
1682
|
+
- `keepalives_idle`: number in secs for idle.
|
|
1683
|
+
|
|
1684
|
+
### Returns:
|
|
1685
|
+
`ConnectionPoolBuilder`
|
|
1686
|
+
"""
|
|
1687
|
+
|
|
1688
|
+
def keepalives_interval(
|
|
1689
|
+
self: Self,
|
|
1690
|
+
keepalives_interval: int,
|
|
1691
|
+
) -> Self:
|
|
1692
|
+
"""
|
|
1693
|
+
Set the time interval between TCP keepalive probes.
|
|
1694
|
+
|
|
1695
|
+
On Windows, this sets the value of the
|
|
1696
|
+
tcp_keepalive struct keepalive interval field.
|
|
1697
|
+
|
|
1698
|
+
This is ignored for Unix domain sockets,
|
|
1699
|
+
or if the `keepalives` option is disabled.
|
|
1700
|
+
|
|
1701
|
+
### Parameters:
|
|
1702
|
+
- `keepalives_interval`: number in secs for interval.
|
|
1703
|
+
|
|
1704
|
+
### Returns:
|
|
1705
|
+
`ConnectionPoolBuilder`
|
|
1706
|
+
"""
|
|
1707
|
+
|
|
1708
|
+
def keepalives_retries(
|
|
1709
|
+
self: Self,
|
|
1710
|
+
keepalives_retries: int,
|
|
1711
|
+
) -> Self:
|
|
1712
|
+
"""Keepalives Retries.
|
|
1713
|
+
|
|
1714
|
+
Set the maximum number of TCP keepalive probes
|
|
1715
|
+
that will be sent before dropping a connection.
|
|
1716
|
+
|
|
1717
|
+
This is ignored for Unix domain sockets,
|
|
1718
|
+
or if the `keepalives` option is disabled.
|
|
1719
|
+
|
|
1720
|
+
### Parameters:
|
|
1721
|
+
- `keepalives_retries`: number of retries.
|
|
1722
|
+
|
|
1723
|
+
### Returns:
|
|
1724
|
+
`ConnectionPoolBuilder`
|
|
1725
|
+
"""
|
|
1726
|
+
|
|
1727
|
+
class Listener:
|
|
1728
|
+
"""Listener for LISTEN command.
|
|
1729
|
+
|
|
1730
|
+
Can be used two ways:
|
|
1731
|
+
1) As a background task
|
|
1732
|
+
2) As an asynchronous iterator
|
|
1733
|
+
|
|
1734
|
+
## Examples
|
|
1735
|
+
|
|
1736
|
+
### Background task:
|
|
1737
|
+
|
|
1738
|
+
```python
|
|
1739
|
+
async def callback(
|
|
1740
|
+
channel: str,
|
|
1741
|
+
payload: str,
|
|
1742
|
+
process_id: int,
|
|
1743
|
+
connection: Connection,
|
|
1744
|
+
) -> None: ...
|
|
1745
|
+
async def main():
|
|
1746
|
+
pool = ConnectionPool()
|
|
1747
|
+
|
|
1748
|
+
listener = pool.listener()
|
|
1749
|
+
await listener.add_callback(
|
|
1750
|
+
channel="test_channel",
|
|
1751
|
+
callback=callback,
|
|
1752
|
+
)
|
|
1753
|
+
await listener.startup()
|
|
1754
|
+
|
|
1755
|
+
listener.listen()
|
|
1756
|
+
```
|
|
1757
|
+
|
|
1758
|
+
### Async iterator
|
|
1759
|
+
```python
|
|
1760
|
+
from psqlpy import
|
|
1761
|
+
|
|
1762
|
+
async def msg_processor(
|
|
1763
|
+
msg: ListenerNotificationMsg,
|
|
1764
|
+
) -> None:
|
|
1765
|
+
...
|
|
1766
|
+
|
|
1767
|
+
|
|
1768
|
+
async def main():
|
|
1769
|
+
pool = ConnectionPool()
|
|
1770
|
+
|
|
1771
|
+
listener = pool.listener()
|
|
1772
|
+
await listener.add_callback(
|
|
1773
|
+
channel="test_channel",
|
|
1774
|
+
callback=callback,
|
|
1775
|
+
)
|
|
1776
|
+
await listener.startup()
|
|
1777
|
+
|
|
1778
|
+
for msg in listener:
|
|
1779
|
+
await msg_processor(msg)
|
|
1780
|
+
```
|
|
1781
|
+
"""
|
|
1782
|
+
|
|
1783
|
+
connection: Connection
|
|
1784
|
+
is_started: bool
|
|
1785
|
+
|
|
1786
|
+
def __aiter__(self: Self) -> Self: ...
|
|
1787
|
+
async def __anext__(self: Self) -> ListenerNotificationMsg: ...
|
|
1788
|
+
async def __aenter__(self: Self) -> Self: ...
|
|
1789
|
+
async def __aexit__(
|
|
1790
|
+
self: Self,
|
|
1791
|
+
exception_type: type[BaseException] | None,
|
|
1792
|
+
exception: BaseException | None,
|
|
1793
|
+
traceback: types.TracebackType | None,
|
|
1794
|
+
) -> None: ...
|
|
1795
|
+
async def startup(self: Self) -> None:
|
|
1796
|
+
"""Startup the listener.
|
|
1797
|
+
|
|
1798
|
+
Each listener MUST be started up.
|
|
1799
|
+
"""
|
|
1800
|
+
|
|
1801
|
+
async def shutdown(self: Self) -> None:
|
|
1802
|
+
"""Shutdown the listener.
|
|
1803
|
+
|
|
1804
|
+
Abort listen and release underlying connection.
|
|
1805
|
+
"""
|
|
1806
|
+
|
|
1807
|
+
async def add_callback(
|
|
1808
|
+
self: Self,
|
|
1809
|
+
channel: str,
|
|
1810
|
+
callback: Callable[
|
|
1811
|
+
[Connection, str, str, int],
|
|
1812
|
+
Awaitable[None],
|
|
1813
|
+
],
|
|
1814
|
+
) -> None:
|
|
1815
|
+
"""Add callback to the channel.
|
|
1816
|
+
|
|
1817
|
+
Callback must be async function and have signature like this:
|
|
1818
|
+
```python
|
|
1819
|
+
async def callback(
|
|
1820
|
+
connection: Connection,
|
|
1821
|
+
payload: str,
|
|
1822
|
+
channel: str,
|
|
1823
|
+
process_id: int,
|
|
1824
|
+
) -> None: ...
|
|
1825
|
+
```
|
|
1826
|
+
|
|
1827
|
+
Callback parameters are passed as args on the Rust side.
|
|
1828
|
+
"""
|
|
1829
|
+
|
|
1830
|
+
async def clear_channel_callbacks(self, channel: str) -> None:
|
|
1831
|
+
"""Remove all callbacks for the channel.
|
|
1832
|
+
|
|
1833
|
+
### Parameters:
|
|
1834
|
+
- `channel`: name of the channel.
|
|
1835
|
+
"""
|
|
1836
|
+
|
|
1837
|
+
async def clear_all_channels(self) -> None:
|
|
1838
|
+
"""Clear all channels callbacks."""
|
|
1839
|
+
|
|
1840
|
+
def listen(self: Self) -> None:
|
|
1841
|
+
"""Start listening.
|
|
1842
|
+
|
|
1843
|
+
Start actual listening.
|
|
1844
|
+
In the background it creates task in Rust event loop.
|
|
1845
|
+
"""
|
|
1846
|
+
|
|
1847
|
+
def abort_listen(self: Self) -> None:
|
|
1848
|
+
"""Abort listen.
|
|
1849
|
+
|
|
1850
|
+
If `listen()` method was called, stop listening,
|
|
1851
|
+
else don't do anything.
|
|
1852
|
+
"""
|
|
1853
|
+
|
|
1854
|
+
class ListenerNotificationMsg:
|
|
1855
|
+
"""Listener message in async iterator."""
|
|
1856
|
+
|
|
1857
|
+
process_id: int
|
|
1858
|
+
channel: str
|
|
1859
|
+
payload: str
|
|
1860
|
+
connection: Connection
|
|
1861
|
+
|
|
1862
|
+
class Column:
|
|
1863
|
+
name: str
|
|
1864
|
+
table_oid: int | None
|
|
1865
|
+
|
|
1866
|
+
class PreparedStatement:
|
|
1867
|
+
async def execute(self: Self) -> QueryResult:
|
|
1868
|
+
"""Execute prepared statement."""
|
|
1869
|
+
|
|
1870
|
+
def cursor(self: Self) -> Cursor:
|
|
1871
|
+
"""Create new server-side cursor based on prepared statement."""
|
|
1872
|
+
|
|
1873
|
+
def columns(self: Self) -> list[Column]:
|
|
1874
|
+
"""Return information about statement columns."""
|