asyncpg-typed 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- asyncpg_typed/__init__.py +227 -373
- {asyncpg_typed-0.1.0.dist-info → asyncpg_typed-0.1.2.dist-info}/METADATA +39 -13
- asyncpg_typed-0.1.2.dist-info/RECORD +8 -0
- asyncpg_typed-0.1.0.dist-info/RECORD +0 -8
- {asyncpg_typed-0.1.0.dist-info → asyncpg_typed-0.1.2.dist-info}/WHEEL +0 -0
- {asyncpg_typed-0.1.0.dist-info → asyncpg_typed-0.1.2.dist-info}/licenses/LICENSE +0 -0
- {asyncpg_typed-0.1.0.dist-info → asyncpg_typed-0.1.2.dist-info}/top_level.txt +0 -0
- {asyncpg_typed-0.1.0.dist-info → asyncpg_typed-0.1.2.dist-info}/zip-safe +0 -0
asyncpg_typed/__init__.py
CHANGED
|
@@ -4,43 +4,63 @@ Type-safe queries for asyncpg.
|
|
|
4
4
|
:see: https://github.com/hunyadi/asyncpg_typed
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
__version__ = "0.1.
|
|
7
|
+
__version__ = "0.1.2"
|
|
8
8
|
__author__ = "Levente Hunyadi"
|
|
9
9
|
__copyright__ = "Copyright 2025, Levente Hunyadi"
|
|
10
10
|
__license__ = "MIT"
|
|
11
11
|
__maintainer__ = "Levente Hunyadi"
|
|
12
12
|
__status__ = "Production"
|
|
13
13
|
|
|
14
|
+
import enum
|
|
14
15
|
import sys
|
|
15
16
|
import typing
|
|
16
17
|
from abc import abstractmethod
|
|
17
|
-
from collections.abc import Iterable, Sequence
|
|
18
|
-
from datetime import date, datetime, time
|
|
18
|
+
from collections.abc import Callable, Iterable, Sequence
|
|
19
|
+
from datetime import date, datetime, time, timedelta
|
|
19
20
|
from decimal import Decimal
|
|
20
21
|
from functools import reduce
|
|
21
22
|
from io import StringIO
|
|
22
23
|
from types import UnionType
|
|
23
|
-
from typing import Any,
|
|
24
|
+
from typing import Any, Protocol, TypeAlias, TypeVar, Union, get_args, get_origin, overload
|
|
24
25
|
from uuid import UUID
|
|
25
26
|
|
|
26
27
|
import asyncpg
|
|
27
28
|
from asyncpg.prepared_stmt import PreparedStatement
|
|
28
29
|
|
|
29
30
|
if sys.version_info < (3, 11):
|
|
30
|
-
from typing_extensions import TypeVarTuple, Unpack
|
|
31
|
+
from typing_extensions import LiteralString, TypeVarTuple, Unpack
|
|
31
32
|
else:
|
|
32
|
-
from typing import TypeVarTuple, Unpack
|
|
33
|
+
from typing import LiteralString, TypeVarTuple, Unpack
|
|
33
34
|
|
|
34
|
-
|
|
35
|
-
DATA_TYPES: list[type[Any]] = [bool, int, float, Decimal, date, time, datetime, str, bytes, UUID]
|
|
35
|
+
JsonType = None | bool | int | float | str | dict[str, "JsonType"] | list["JsonType"]
|
|
36
36
|
|
|
37
|
-
|
|
38
|
-
|
|
37
|
+
RequiredJsonType = bool | int | float | str | dict[str, "JsonType"] | list["JsonType"]
|
|
38
|
+
|
|
39
|
+
TargetType: TypeAlias = type[Any] | UnionType
|
|
40
|
+
|
|
41
|
+
if sys.version_info >= (3, 11):
|
|
42
|
+
|
|
43
|
+
def is_enum_type(typ: object) -> bool:
|
|
44
|
+
"""
|
|
45
|
+
`True` if the specified type is an enumeration type.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
return isinstance(typ, enum.EnumType)
|
|
49
|
+
|
|
50
|
+
else:
|
|
51
|
+
|
|
52
|
+
def is_enum_type(typ: object) -> bool:
|
|
53
|
+
"""
|
|
54
|
+
`True` if the specified type is an enumeration type.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
# use an explicit isinstance(..., type) check to filter out special forms like generics
|
|
58
|
+
return isinstance(typ, type) and issubclass(typ, enum.Enum)
|
|
39
59
|
|
|
40
60
|
|
|
41
61
|
def is_union_type(tp: Any) -> bool:
|
|
42
62
|
"""
|
|
43
|
-
|
|
63
|
+
`True` if `tp` is a union type such as `A | B` or `Union[A, B]`.
|
|
44
64
|
"""
|
|
45
65
|
|
|
46
66
|
origin = get_origin(tp)
|
|
@@ -49,7 +69,7 @@ def is_union_type(tp: Any) -> bool:
|
|
|
49
69
|
|
|
50
70
|
def is_optional_type(tp: Any) -> bool:
|
|
51
71
|
"""
|
|
52
|
-
|
|
72
|
+
`True` if `tp` is an optional type such as `T | None`, `Optional[T]` or `Union[T, None]`.
|
|
53
73
|
"""
|
|
54
74
|
|
|
55
75
|
return is_union_type(tp) and any(a is type(None) for a in get_args(tp))
|
|
@@ -57,12 +77,20 @@ def is_optional_type(tp: Any) -> bool:
|
|
|
57
77
|
|
|
58
78
|
def is_standard_type(tp: Any) -> bool:
|
|
59
79
|
"""
|
|
60
|
-
|
|
80
|
+
`True` if the type represents a built-in or a well-known standard type.
|
|
61
81
|
"""
|
|
62
82
|
|
|
63
83
|
return tp.__module__ == "builtins" or tp.__module__ == UnionType.__module__
|
|
64
84
|
|
|
65
85
|
|
|
86
|
+
def is_json_type(tp: Any) -> bool:
|
|
87
|
+
"""
|
|
88
|
+
`True` if the type represents an object de-serialized from a JSON string.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
return tp in [JsonType, RequiredJsonType]
|
|
92
|
+
|
|
93
|
+
|
|
66
94
|
def make_union_type(tpl: list[Any]) -> UnionType:
|
|
67
95
|
"""
|
|
68
96
|
Creates a `UnionType` (a.k.a. `A | B | C`) dynamically at run time.
|
|
@@ -91,52 +119,90 @@ def get_required_type(tp: Any) -> Any:
|
|
|
91
119
|
return type(None)
|
|
92
120
|
|
|
93
121
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
"
|
|
114
|
-
|
|
122
|
+
_json_converter: Callable[[str], JsonType]
|
|
123
|
+
if typing.TYPE_CHECKING:
|
|
124
|
+
import json
|
|
125
|
+
|
|
126
|
+
_json_decoder = json.JSONDecoder()
|
|
127
|
+
_json_converter = _json_decoder.decode
|
|
128
|
+
else:
|
|
129
|
+
try:
|
|
130
|
+
import orjson
|
|
131
|
+
|
|
132
|
+
_json_converter = orjson.loads
|
|
133
|
+
except ModuleNotFoundError:
|
|
134
|
+
import json
|
|
135
|
+
|
|
136
|
+
_json_decoder = json.JSONDecoder()
|
|
137
|
+
_json_converter = _json_decoder.decode
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def get_converter_for(tp: Any) -> Callable[[Any], Any]:
|
|
141
|
+
"""
|
|
142
|
+
Returns a callable that takes a wire type and returns a target type.
|
|
143
|
+
|
|
144
|
+
A wire type is one of the types returned by asyncpg.
|
|
145
|
+
A target type is one of the types supported by the library.
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
if is_json_type(tp):
|
|
149
|
+
# asyncpg returns fields of type `json` and `jsonb` as `str`, which must be de-serialized
|
|
150
|
+
return _json_converter
|
|
151
|
+
else:
|
|
152
|
+
# target data types that require conversion must have a single-argument `__init__` that takes an object of the source type
|
|
153
|
+
return tp
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
# maps PostgreSQL internal type names to compatible Python types
|
|
157
|
+
_name_to_type: dict[str, tuple[Any, ...]] = {
|
|
158
|
+
"bool": (bool,),
|
|
159
|
+
"int2": (int,),
|
|
160
|
+
"int4": (int,),
|
|
161
|
+
"int8": (int,),
|
|
162
|
+
"float4": (float,),
|
|
163
|
+
"float8": (float,),
|
|
164
|
+
"numeric": (Decimal,),
|
|
165
|
+
"date": (date,),
|
|
166
|
+
"time": (time,),
|
|
167
|
+
"timetz": (time,),
|
|
168
|
+
"timestamp": (datetime,),
|
|
169
|
+
"timestamptz": (datetime,),
|
|
170
|
+
"interval": (timedelta,),
|
|
171
|
+
"bpchar": (str,),
|
|
172
|
+
"varchar": (str,),
|
|
173
|
+
"text": (str,),
|
|
174
|
+
"bytea": (bytes,),
|
|
175
|
+
"json": (str, RequiredJsonType),
|
|
176
|
+
"jsonb": (str, RequiredJsonType),
|
|
177
|
+
"uuid": (UUID,),
|
|
178
|
+
"xml": (str,),
|
|
115
179
|
}
|
|
116
180
|
|
|
117
181
|
|
|
118
|
-
def check_data_type(name: str, data_type:
|
|
182
|
+
def check_data_type(schema: str, name: str, data_type: TargetType) -> bool:
|
|
119
183
|
"""
|
|
120
184
|
Verifies if the Python target type can represent the PostgreSQL source type.
|
|
121
185
|
"""
|
|
122
186
|
|
|
123
|
-
|
|
124
|
-
|
|
187
|
+
if schema == "pg_catalog":
|
|
188
|
+
if is_enum_type(data_type):
|
|
189
|
+
return name in ["bpchar", "varchar", "text"]
|
|
125
190
|
|
|
126
|
-
|
|
127
|
-
return
|
|
128
|
-
|
|
129
|
-
|
|
191
|
+
expected_types = _name_to_type.get(name)
|
|
192
|
+
return expected_types is not None and data_type in expected_types
|
|
193
|
+
else:
|
|
194
|
+
if is_standard_type(data_type):
|
|
195
|
+
return False
|
|
130
196
|
|
|
131
|
-
|
|
132
|
-
|
|
197
|
+
# user-defined type registered with `conn.set_type_codec()`
|
|
198
|
+
return True
|
|
133
199
|
|
|
134
200
|
|
|
135
201
|
class _SQLPlaceholder:
|
|
136
202
|
ordinal: int
|
|
137
|
-
data_type:
|
|
203
|
+
data_type: TargetType
|
|
138
204
|
|
|
139
|
-
def __init__(self, ordinal: int, data_type:
|
|
205
|
+
def __init__(self, ordinal: int, data_type: TargetType) -> None:
|
|
140
206
|
self.ordinal = ordinal
|
|
141
207
|
self.data_type = data_type
|
|
142
208
|
|
|
@@ -150,37 +216,33 @@ class _SQLObject:
|
|
|
150
216
|
"""
|
|
151
217
|
|
|
152
218
|
parameter_data_types: tuple[_SQLPlaceholder, ...]
|
|
153
|
-
resultset_data_types: tuple[
|
|
219
|
+
resultset_data_types: tuple[TargetType, ...]
|
|
154
220
|
required: int
|
|
221
|
+
cast: int
|
|
222
|
+
converters: tuple[Callable[[Any], Any], ...]
|
|
155
223
|
|
|
156
224
|
def __init__(
|
|
157
225
|
self,
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
resultset: type[Any] | None = None,
|
|
226
|
+
input_data_types: tuple[TargetType, ...],
|
|
227
|
+
output_data_types: tuple[TargetType, ...],
|
|
161
228
|
) -> None:
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
self.parameter_data_types = tuple(_SQLPlaceholder(ordinal, arg) for ordinal, arg in enumerate(get_args(args), start=1))
|
|
165
|
-
else:
|
|
166
|
-
self.parameter_data_types = (_SQLPlaceholder(1, args),)
|
|
167
|
-
else:
|
|
168
|
-
self.parameter_data_types = ()
|
|
169
|
-
|
|
170
|
-
if resultset is not None:
|
|
171
|
-
if get_origin(resultset) is tuple:
|
|
172
|
-
self.resultset_data_types = get_args(resultset)
|
|
173
|
-
else:
|
|
174
|
-
self.resultset_data_types = (resultset,)
|
|
175
|
-
else:
|
|
176
|
-
self.resultset_data_types = ()
|
|
229
|
+
self.parameter_data_types = tuple(_SQLPlaceholder(ordinal, get_required_type(arg)) for ordinal, arg in enumerate(input_data_types, start=1))
|
|
230
|
+
self.resultset_data_types = tuple(get_required_type(data_type) for data_type in output_data_types)
|
|
177
231
|
|
|
178
232
|
# create a bit-field of required types (1: required; 0: optional)
|
|
179
233
|
required = 0
|
|
180
|
-
for index, data_type in enumerate(
|
|
234
|
+
for index, data_type in enumerate(output_data_types):
|
|
181
235
|
required |= (not is_optional_type(data_type)) << index
|
|
182
236
|
self.required = required
|
|
183
237
|
|
|
238
|
+
# create a bit-field of types that require cast or serialization (1: apply conversion; 0: forward value as-is)
|
|
239
|
+
cast = 0
|
|
240
|
+
for index, data_type in enumerate(self.resultset_data_types):
|
|
241
|
+
cast |= (is_enum_type(data_type) or is_json_type(data_type)) << index
|
|
242
|
+
self.cast = cast
|
|
243
|
+
|
|
244
|
+
self.converters = tuple(get_converter_for(data_type) for data_type in self.resultset_data_types)
|
|
245
|
+
|
|
184
246
|
def _raise_required_is_none(self, row: tuple[Any, ...], row_index: int | None = None) -> None:
|
|
185
247
|
"""
|
|
186
248
|
Raises an error with the index of the first column value that is of a required type but has been assigned a value of `None`.
|
|
@@ -319,7 +381,7 @@ class _SQLObject:
|
|
|
319
381
|
if sys.version_info >= (3, 14):
|
|
320
382
|
from string.templatelib import Interpolation, Template # type: ignore[import-not-found]
|
|
321
383
|
|
|
322
|
-
SQLExpression: TypeAlias = Template |
|
|
384
|
+
SQLExpression: TypeAlias = Template | LiteralString
|
|
323
385
|
|
|
324
386
|
class _SQLTemplate(_SQLObject):
|
|
325
387
|
"""
|
|
@@ -333,10 +395,10 @@ if sys.version_info >= (3, 14):
|
|
|
333
395
|
self,
|
|
334
396
|
template: Template,
|
|
335
397
|
*,
|
|
336
|
-
args:
|
|
337
|
-
resultset:
|
|
398
|
+
args: tuple[TargetType, ...],
|
|
399
|
+
resultset: tuple[TargetType, ...],
|
|
338
400
|
) -> None:
|
|
339
|
-
super().__init__(args
|
|
401
|
+
super().__init__(args, resultset)
|
|
340
402
|
|
|
341
403
|
for ip in template.interpolations:
|
|
342
404
|
if ip.conversion is not None:
|
|
@@ -348,7 +410,7 @@ if sys.version_info >= (3, 14):
|
|
|
348
410
|
|
|
349
411
|
self.strings = template.strings
|
|
350
412
|
|
|
351
|
-
if
|
|
413
|
+
if len(self.parameter_data_types) > 0:
|
|
352
414
|
|
|
353
415
|
def _to_placeholder(ip: Interpolation) -> _SQLPlaceholder:
|
|
354
416
|
ordinal = int(ip.value)
|
|
@@ -369,7 +431,7 @@ if sys.version_info >= (3, 14):
|
|
|
369
431
|
return buf.getvalue()
|
|
370
432
|
|
|
371
433
|
else:
|
|
372
|
-
SQLExpression =
|
|
434
|
+
SQLExpression = LiteralString
|
|
373
435
|
|
|
374
436
|
|
|
375
437
|
class _SQLString(_SQLObject):
|
|
@@ -383,17 +445,17 @@ class _SQLString(_SQLObject):
|
|
|
383
445
|
self,
|
|
384
446
|
sql: str,
|
|
385
447
|
*,
|
|
386
|
-
args:
|
|
387
|
-
resultset:
|
|
448
|
+
args: tuple[TargetType, ...],
|
|
449
|
+
resultset: tuple[TargetType, ...],
|
|
388
450
|
) -> None:
|
|
389
|
-
super().__init__(args
|
|
451
|
+
super().__init__(args, resultset)
|
|
390
452
|
self.sql = sql
|
|
391
453
|
|
|
392
454
|
def query(self) -> str:
|
|
393
455
|
return self.sql
|
|
394
456
|
|
|
395
457
|
|
|
396
|
-
class _SQL:
|
|
458
|
+
class _SQL(Protocol):
|
|
397
459
|
"""
|
|
398
460
|
Represents a SQL statement with associated type information.
|
|
399
461
|
"""
|
|
@@ -422,7 +484,7 @@ class _SQLImpl(_SQL):
|
|
|
422
484
|
stmt = await connection.prepare(self.sql.query())
|
|
423
485
|
|
|
424
486
|
for attr, data_type in zip(stmt.get_attributes(), self.sql.resultset_data_types, strict=True):
|
|
425
|
-
if not check_data_type(attr.type.name, data_type):
|
|
487
|
+
if not check_data_type(attr.type.schema, attr.type.name, data_type):
|
|
426
488
|
raise TypeError(f"expected: {data_type} in column `{attr.name}`; got: `{attr.type.kind}` of `{attr.type.name}`")
|
|
427
489
|
|
|
428
490
|
return stmt
|
|
@@ -434,60 +496,66 @@ class _SQLImpl(_SQL):
|
|
|
434
496
|
stmt = await self._prepare(connection)
|
|
435
497
|
await stmt.executemany(args)
|
|
436
498
|
|
|
499
|
+
def _cast_fetch(self, rows: list[asyncpg.Record]) -> list[tuple[Any, ...]]:
|
|
500
|
+
cast = self.sql.cast
|
|
501
|
+
if cast:
|
|
502
|
+
converters = self.sql.converters
|
|
503
|
+
resultset = [tuple((converters[i](value) if (value := row[i]) is not None and cast >> i & 1 else value) for i in range(len(row))) for row in rows]
|
|
504
|
+
else:
|
|
505
|
+
resultset = [tuple(value for value in row) for row in rows]
|
|
506
|
+
self.sql.check_rows(resultset)
|
|
507
|
+
return resultset
|
|
508
|
+
|
|
437
509
|
async def fetch(self, connection: asyncpg.Connection, *args: Any) -> list[tuple[Any, ...]]:
|
|
438
510
|
stmt = await self._prepare(connection)
|
|
439
511
|
rows = await stmt.fetch(*args)
|
|
440
|
-
|
|
441
|
-
self.sql.check_rows(resultset)
|
|
442
|
-
return resultset
|
|
512
|
+
return self._cast_fetch(rows)
|
|
443
513
|
|
|
444
514
|
async def fetchmany(self, connection: asyncpg.Connection, args: Iterable[Sequence[Any]]) -> list[tuple[Any, ...]]:
|
|
445
515
|
stmt = await self._prepare(connection)
|
|
446
|
-
rows = await stmt.fetchmany(args)
|
|
447
|
-
|
|
448
|
-
resultset = [tuple(value for value in row) for row in rows]
|
|
449
|
-
self.sql.check_rows(resultset)
|
|
450
|
-
return resultset
|
|
516
|
+
rows = await stmt.fetchmany(args)
|
|
517
|
+
return self._cast_fetch(rows)
|
|
451
518
|
|
|
452
519
|
async def fetchrow(self, connection: asyncpg.Connection, *args: Any) -> tuple[Any, ...] | None:
|
|
453
520
|
stmt = await self._prepare(connection)
|
|
454
521
|
row = await stmt.fetchrow(*args)
|
|
455
522
|
if row is None:
|
|
456
523
|
return None
|
|
457
|
-
|
|
524
|
+
cast = self.sql.cast
|
|
525
|
+
if cast:
|
|
526
|
+
converters = self.sql.converters
|
|
527
|
+
resultset = tuple((converters[i](value) if (value := row[i]) is not None and cast >> i & 1 else value) for i in range(len(row)))
|
|
528
|
+
else:
|
|
529
|
+
resultset = tuple(value for value in row)
|
|
458
530
|
self.sql.check_row(resultset)
|
|
459
531
|
return resultset
|
|
460
532
|
|
|
461
533
|
async def fetchval(self, connection: asyncpg.Connection, *args: Any) -> Any:
|
|
462
534
|
stmt = await self._prepare(connection)
|
|
463
535
|
value = await stmt.fetchval(*args)
|
|
464
|
-
self.sql.
|
|
465
|
-
|
|
536
|
+
result = self.sql.converters[0](value) if value is not None and self.sql.cast else value
|
|
537
|
+
self.sql.check_value(result)
|
|
538
|
+
return result
|
|
466
539
|
|
|
467
540
|
|
|
468
|
-
### START OF AUTO-GENERATED BLOCK ###
|
|
469
|
-
|
|
470
|
-
PS = TypeVar("PS", bool, bool | None, int, int | None, float, float | None, Decimal, Decimal | None, date, date | None, time, time | None, datetime, datetime | None, str, str | None, bytes, bytes | None, UUID, UUID | None)
|
|
471
541
|
P1 = TypeVar("P1")
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
P5 = TypeVar("P5")
|
|
476
|
-
P6 = TypeVar("P6")
|
|
477
|
-
P7 = TypeVar("P7")
|
|
478
|
-
P8 = TypeVar("P8")
|
|
479
|
-
RS = TypeVar("RS", bool, bool | None, int, int | None, float, float | None, Decimal, Decimal | None, date, date | None, time, time | None, datetime, datetime | None, str, str | None, bytes, bytes | None, UUID, UUID | None)
|
|
542
|
+
PX = TypeVarTuple("PX")
|
|
543
|
+
|
|
544
|
+
RT = TypeVar("RT")
|
|
480
545
|
R1 = TypeVar("R1")
|
|
481
546
|
R2 = TypeVar("R2")
|
|
482
547
|
RX = TypeVarTuple("RX")
|
|
483
548
|
|
|
484
549
|
|
|
485
|
-
|
|
550
|
+
### START OF AUTO-GENERATED BLOCK ###
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
class SQL_P0(Protocol):
|
|
486
554
|
@abstractmethod
|
|
487
555
|
async def execute(self, connection: Connection) -> None: ...
|
|
488
556
|
|
|
489
557
|
|
|
490
|
-
class
|
|
558
|
+
class SQL_R1_P0(SQL_P0, Protocol[R1]):
|
|
491
559
|
@abstractmethod
|
|
492
560
|
async def fetch(self, connection: Connection) -> list[tuple[R1]]: ...
|
|
493
561
|
@abstractmethod
|
|
@@ -496,305 +564,64 @@ class SQL_P0_RS(Generic[R1], SQL_P0):
|
|
|
496
564
|
async def fetchval(self, connection: Connection) -> R1: ...
|
|
497
565
|
|
|
498
566
|
|
|
499
|
-
class
|
|
500
|
-
@abstractmethod
|
|
501
|
-
async def fetch(self, connection: Connection) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
502
|
-
@abstractmethod
|
|
503
|
-
async def fetchrow(self, connection: Connection) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
class SQL_P1(Generic[P1], _SQL):
|
|
507
|
-
@abstractmethod
|
|
508
|
-
async def execute(self, connection: Connection, arg1: P1) -> None: ...
|
|
509
|
-
@abstractmethod
|
|
510
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1]]) -> None: ...
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
class SQL_P1_RS(Generic[P1, R1], SQL_P1[P1]):
|
|
514
|
-
@abstractmethod
|
|
515
|
-
async def fetch(self, connection: Connection, arg1: P1) -> list[tuple[R1]]: ...
|
|
516
|
-
@abstractmethod
|
|
517
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1]]) -> list[tuple[R1]]: ...
|
|
518
|
-
@abstractmethod
|
|
519
|
-
async def fetchrow(self, connection: Connection, arg1: P1) -> tuple[R1] | None: ...
|
|
520
|
-
@abstractmethod
|
|
521
|
-
async def fetchval(self, connection: Connection, arg1: P1) -> R1: ...
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
class SQL_P1_RX(Generic[P1, R1, R2, Unpack[RX]], SQL_P1[P1]):
|
|
525
|
-
@abstractmethod
|
|
526
|
-
async def fetch(self, connection: Connection, arg1: P1) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
527
|
-
@abstractmethod
|
|
528
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
529
|
-
@abstractmethod
|
|
530
|
-
async def fetchrow(self, connection: Connection, arg1: P1) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
class SQL_P2(Generic[P1, P2], _SQL):
|
|
534
|
-
@abstractmethod
|
|
535
|
-
async def execute(self, connection: Connection, arg1: P1, arg2: P2) -> None: ...
|
|
536
|
-
@abstractmethod
|
|
537
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1, P2]]) -> None: ...
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
class SQL_P2_RS(Generic[P1, P2, R1], SQL_P2[P1, P2]):
|
|
541
|
-
@abstractmethod
|
|
542
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2) -> list[tuple[R1]]: ...
|
|
543
|
-
@abstractmethod
|
|
544
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2]]) -> list[tuple[R1]]: ...
|
|
545
|
-
@abstractmethod
|
|
546
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2) -> tuple[R1] | None: ...
|
|
547
|
-
@abstractmethod
|
|
548
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2) -> R1: ...
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
class SQL_P2_RX(Generic[P1, P2, R1, R2, Unpack[RX]], SQL_P2[P1, P2]):
|
|
552
|
-
@abstractmethod
|
|
553
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
567
|
+
class SQL_RX_P0(SQL_P0, Protocol[RT]):
|
|
554
568
|
@abstractmethod
|
|
555
|
-
async def
|
|
569
|
+
async def fetch(self, connection: Connection) -> list[RT]: ...
|
|
556
570
|
@abstractmethod
|
|
557
|
-
async def fetchrow(self, connection: Connection
|
|
571
|
+
async def fetchrow(self, connection: Connection) -> RT | None: ...
|
|
558
572
|
|
|
559
573
|
|
|
560
|
-
class
|
|
574
|
+
class SQL_PX(Protocol[Unpack[PX]]):
|
|
561
575
|
@abstractmethod
|
|
562
|
-
async def execute(self, connection: Connection,
|
|
576
|
+
async def execute(self, connection: Connection, *args: Unpack[PX]) -> None: ...
|
|
563
577
|
@abstractmethod
|
|
564
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[
|
|
578
|
+
async def executemany(self, connection: Connection, args: Iterable[tuple[Unpack[PX]]]) -> None: ...
|
|
565
579
|
|
|
566
580
|
|
|
567
|
-
class
|
|
581
|
+
class SQL_R1_PX(SQL_PX[Unpack[PX]], Protocol[R1, Unpack[PX]]):
|
|
568
582
|
@abstractmethod
|
|
569
|
-
async def fetch(self, connection: Connection,
|
|
583
|
+
async def fetch(self, connection: Connection, *args: Unpack[PX]) -> list[tuple[R1]]: ...
|
|
570
584
|
@abstractmethod
|
|
571
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[
|
|
585
|
+
async def fetchmany(self, connection: Connection, args: Iterable[tuple[Unpack[PX]]]) -> list[tuple[R1]]: ...
|
|
572
586
|
@abstractmethod
|
|
573
|
-
async def fetchrow(self, connection: Connection,
|
|
587
|
+
async def fetchrow(self, connection: Connection, *args: Unpack[PX]) -> tuple[R1] | None: ...
|
|
574
588
|
@abstractmethod
|
|
575
|
-
async def fetchval(self, connection: Connection,
|
|
589
|
+
async def fetchval(self, connection: Connection, *args: Unpack[PX]) -> R1: ...
|
|
576
590
|
|
|
577
591
|
|
|
578
|
-
class
|
|
592
|
+
class SQL_RX_PX(SQL_PX[Unpack[PX]], Protocol[RT, Unpack[PX]]):
|
|
579
593
|
@abstractmethod
|
|
580
|
-
async def fetch(self, connection: Connection,
|
|
581
|
-
@abstractmethod
|
|
582
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
583
|
-
@abstractmethod
|
|
584
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
class SQL_P4(Generic[P1, P2, P3, P4], _SQL):
|
|
594
|
+
async def fetch(self, connection: Connection, *args: Unpack[PX]) -> list[RT]: ...
|
|
588
595
|
@abstractmethod
|
|
589
|
-
async def
|
|
596
|
+
async def fetchmany(self, connection: Connection, args: Iterable[tuple[Unpack[PX]]]) -> list[RT]: ...
|
|
590
597
|
@abstractmethod
|
|
591
|
-
async def
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
class SQL_P4_RS(Generic[P1, P2, P3, P4, R1], SQL_P4[P1, P2, P3, P4]):
|
|
595
|
-
@abstractmethod
|
|
596
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4) -> list[tuple[R1]]: ...
|
|
597
|
-
@abstractmethod
|
|
598
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4]]) -> list[tuple[R1]]: ...
|
|
599
|
-
@abstractmethod
|
|
600
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4) -> tuple[R1] | None: ...
|
|
601
|
-
@abstractmethod
|
|
602
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4) -> R1: ...
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
class SQL_P4_RX(Generic[P1, P2, P3, P4, R1, R2, Unpack[RX]], SQL_P4[P1, P2, P3, P4]):
|
|
606
|
-
@abstractmethod
|
|
607
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
608
|
-
@abstractmethod
|
|
609
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
610
|
-
@abstractmethod
|
|
611
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
class SQL_P5(Generic[P1, P2, P3, P4, P5], _SQL):
|
|
615
|
-
@abstractmethod
|
|
616
|
-
async def execute(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> None: ...
|
|
617
|
-
@abstractmethod
|
|
618
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5]]) -> None: ...
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
class SQL_P5_RS(Generic[P1, P2, P3, P4, P5, R1], SQL_P5[P1, P2, P3, P4, P5]):
|
|
622
|
-
@abstractmethod
|
|
623
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> list[tuple[R1]]: ...
|
|
624
|
-
@abstractmethod
|
|
625
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5]]) -> list[tuple[R1]]: ...
|
|
626
|
-
@abstractmethod
|
|
627
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> tuple[R1] | None: ...
|
|
628
|
-
@abstractmethod
|
|
629
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> R1: ...
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
class SQL_P5_RX(Generic[P1, P2, P3, P4, P5, R1, R2, Unpack[RX]], SQL_P5[P1, P2, P3, P4, P5]):
|
|
633
|
-
@abstractmethod
|
|
634
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
635
|
-
@abstractmethod
|
|
636
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
637
|
-
@abstractmethod
|
|
638
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
class SQL_P6(Generic[P1, P2, P3, P4, P5, P6], _SQL):
|
|
642
|
-
@abstractmethod
|
|
643
|
-
async def execute(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> None: ...
|
|
644
|
-
@abstractmethod
|
|
645
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6]]) -> None: ...
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
class SQL_P6_RS(Generic[P1, P2, P3, P4, P5, P6, R1], SQL_P6[P1, P2, P3, P4, P5, P6]):
|
|
649
|
-
@abstractmethod
|
|
650
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> list[tuple[R1]]: ...
|
|
651
|
-
@abstractmethod
|
|
652
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6]]) -> list[tuple[R1]]: ...
|
|
653
|
-
@abstractmethod
|
|
654
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> tuple[R1] | None: ...
|
|
655
|
-
@abstractmethod
|
|
656
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> R1: ...
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
class SQL_P6_RX(Generic[P1, P2, P3, P4, P5, P6, R1, R2, Unpack[RX]], SQL_P6[P1, P2, P3, P4, P5, P6]):
|
|
660
|
-
@abstractmethod
|
|
661
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
662
|
-
@abstractmethod
|
|
663
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
664
|
-
@abstractmethod
|
|
665
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
class SQL_P7(Generic[P1, P2, P3, P4, P5, P6, P7], _SQL):
|
|
669
|
-
@abstractmethod
|
|
670
|
-
async def execute(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> None: ...
|
|
671
|
-
@abstractmethod
|
|
672
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7]]) -> None: ...
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
class SQL_P7_RS(Generic[P1, P2, P3, P4, P5, P6, P7, R1], SQL_P7[P1, P2, P3, P4, P5, P6, P7]):
|
|
676
|
-
@abstractmethod
|
|
677
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> list[tuple[R1]]: ...
|
|
678
|
-
@abstractmethod
|
|
679
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7]]) -> list[tuple[R1]]: ...
|
|
680
|
-
@abstractmethod
|
|
681
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> tuple[R1] | None: ...
|
|
682
|
-
@abstractmethod
|
|
683
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> R1: ...
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
class SQL_P7_RX(Generic[P1, P2, P3, P4, P5, P6, P7, R1, R2, Unpack[RX]], SQL_P7[P1, P2, P3, P4, P5, P6, P7]):
|
|
687
|
-
@abstractmethod
|
|
688
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
689
|
-
@abstractmethod
|
|
690
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
691
|
-
@abstractmethod
|
|
692
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
class SQL_P8(Generic[P1, P2, P3, P4, P5, P6, P7, P8], _SQL):
|
|
696
|
-
@abstractmethod
|
|
697
|
-
async def execute(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> None: ...
|
|
698
|
-
@abstractmethod
|
|
699
|
-
async def executemany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7, P8]]) -> None: ...
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
class SQL_P8_RS(Generic[P1, P2, P3, P4, P5, P6, P7, P8, R1], SQL_P8[P1, P2, P3, P4, P5, P6, P7, P8]):
|
|
703
|
-
@abstractmethod
|
|
704
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> list[tuple[R1]]: ...
|
|
705
|
-
@abstractmethod
|
|
706
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7, P8]]) -> list[tuple[R1]]: ...
|
|
707
|
-
@abstractmethod
|
|
708
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> tuple[R1] | None: ...
|
|
709
|
-
@abstractmethod
|
|
710
|
-
async def fetchval(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> R1: ...
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
class SQL_P8_RX(Generic[P1, P2, P3, P4, P5, P6, P7, P8, R1, R2, Unpack[RX]], SQL_P8[P1, P2, P3, P4, P5, P6, P7, P8]):
|
|
714
|
-
@abstractmethod
|
|
715
|
-
async def fetch(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
716
|
-
@abstractmethod
|
|
717
|
-
async def fetchmany(self, connection: Connection, args: Iterable[tuple[P1, P2, P3, P4, P5, P6, P7, P8]]) -> list[tuple[R1, R2, Unpack[RX]]]: ...
|
|
718
|
-
@abstractmethod
|
|
719
|
-
async def fetchrow(self, connection: Connection, arg1: P1, arg2: P2, arg3: P3, arg4: P4, arg5: P5, arg6: P6, arg7: P7, arg8: P8) -> tuple[R1, R2, Unpack[RX]] | None: ...
|
|
598
|
+
async def fetchrow(self, connection: Connection, *args: Unpack[PX]) -> RT | None: ...
|
|
720
599
|
|
|
721
600
|
|
|
722
601
|
@overload
|
|
723
602
|
def sql(stmt: SQLExpression) -> SQL_P0: ...
|
|
724
603
|
@overload
|
|
725
|
-
def sql(stmt: SQLExpression, *,
|
|
726
|
-
@overload
|
|
727
|
-
def sql(stmt: SQLExpression, *, resultset: type[tuple[R1]]) -> SQL_P0_RS[R1]: ...
|
|
728
|
-
@overload
|
|
729
|
-
def sql(stmt: SQLExpression, *, resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P0_RX[R1, R2, Unpack[RX]]: ...
|
|
730
|
-
@overload
|
|
731
|
-
def sql(stmt: SQLExpression, *, args: type[PS]) -> SQL_P1[PS]: ...
|
|
732
|
-
@overload
|
|
733
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1]]) -> SQL_P1[P1]: ...
|
|
734
|
-
@overload
|
|
735
|
-
def sql(stmt: SQLExpression, *, args: type[PS], resultset: type[RS]) -> SQL_P1_RS[PS, RS]: ...
|
|
736
|
-
@overload
|
|
737
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1]], resultset: type[tuple[R1]]) -> SQL_P1_RS[P1, R1]: ...
|
|
604
|
+
def sql(stmt: SQLExpression, *, result: type[R1]) -> SQL_R1_P0[R1]: ...
|
|
738
605
|
@overload
|
|
739
|
-
def sql(stmt: SQLExpression, *,
|
|
606
|
+
def sql(stmt: SQLExpression, *, resultset: type[tuple[R1]]) -> SQL_R1_P0[R1]: ...
|
|
740
607
|
@overload
|
|
741
|
-
def sql(stmt: SQLExpression, *,
|
|
608
|
+
def sql(stmt: SQLExpression, *, resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_RX_P0[tuple[R1, R2, Unpack[RX]]]: ...
|
|
742
609
|
@overload
|
|
743
|
-
def sql(stmt: SQLExpression, *,
|
|
610
|
+
def sql(stmt: SQLExpression, *, arg: type[P1]) -> SQL_PX[P1]: ...
|
|
744
611
|
@overload
|
|
745
|
-
def sql(stmt: SQLExpression, *,
|
|
612
|
+
def sql(stmt: SQLExpression, *, arg: type[P1], result: type[R1]) -> SQL_R1_PX[R1, P1]: ...
|
|
746
613
|
@overload
|
|
747
|
-
def sql(stmt: SQLExpression, *,
|
|
614
|
+
def sql(stmt: SQLExpression, *, arg: type[P1], resultset: type[tuple[R1]]) -> SQL_R1_PX[R1, P1]: ...
|
|
748
615
|
@overload
|
|
749
|
-
def sql(stmt: SQLExpression, *,
|
|
616
|
+
def sql(stmt: SQLExpression, *, arg: type[P1], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_RX_PX[tuple[R1, R2, Unpack[RX]], P1]: ...
|
|
750
617
|
@overload
|
|
751
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1,
|
|
618
|
+
def sql(stmt: SQLExpression, *, args: type[tuple[P1, Unpack[PX]]]) -> SQL_PX[P1, Unpack[PX]]: ...
|
|
752
619
|
@overload
|
|
753
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1,
|
|
620
|
+
def sql(stmt: SQLExpression, *, args: type[tuple[P1, Unpack[PX]]], result: type[R1]) -> SQL_R1_PX[R1, P1, Unpack[PX]]: ...
|
|
754
621
|
@overload
|
|
755
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1,
|
|
622
|
+
def sql(stmt: SQLExpression, *, args: type[tuple[P1, Unpack[PX]]], resultset: type[tuple[R1]]) -> SQL_R1_PX[R1, P1, Unpack[PX]]: ...
|
|
756
623
|
@overload
|
|
757
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1,
|
|
758
|
-
@overload
|
|
759
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4]]) -> SQL_P4[P1, P2, P3, P4]: ...
|
|
760
|
-
@overload
|
|
761
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4]], resultset: type[RS]) -> SQL_P4_RS[P1, P2, P3, P4, RS]: ...
|
|
762
|
-
@overload
|
|
763
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4]], resultset: type[tuple[R1]]) -> SQL_P4_RS[P1, P2, P3, P4, R1]: ...
|
|
764
|
-
@overload
|
|
765
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P4_RX[P1, P2, P3, P4, R1, R2, Unpack[RX]]: ...
|
|
766
|
-
@overload
|
|
767
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5]]) -> SQL_P5[P1, P2, P3, P4, P5]: ...
|
|
768
|
-
@overload
|
|
769
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5]], resultset: type[RS]) -> SQL_P5_RS[P1, P2, P3, P4, P5, RS]: ...
|
|
770
|
-
@overload
|
|
771
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5]], resultset: type[tuple[R1]]) -> SQL_P5_RS[P1, P2, P3, P4, P5, R1]: ...
|
|
772
|
-
@overload
|
|
773
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P5_RX[P1, P2, P3, P4, P5, R1, R2, Unpack[RX]]: ...
|
|
774
|
-
@overload
|
|
775
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6]]) -> SQL_P6[P1, P2, P3, P4, P5, P6]: ...
|
|
776
|
-
@overload
|
|
777
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6]], resultset: type[RS]) -> SQL_P6_RS[P1, P2, P3, P4, P5, P6, RS]: ...
|
|
778
|
-
@overload
|
|
779
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6]], resultset: type[tuple[R1]]) -> SQL_P6_RS[P1, P2, P3, P4, P5, P6, R1]: ...
|
|
780
|
-
@overload
|
|
781
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P6_RX[P1, P2, P3, P4, P5, P6, R1, R2, Unpack[RX]]: ...
|
|
782
|
-
@overload
|
|
783
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7]]) -> SQL_P7[P1, P2, P3, P4, P5, P6, P7]: ...
|
|
784
|
-
@overload
|
|
785
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7]], resultset: type[RS]) -> SQL_P7_RS[P1, P2, P3, P4, P5, P6, P7, RS]: ...
|
|
786
|
-
@overload
|
|
787
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7]], resultset: type[tuple[R1]]) -> SQL_P7_RS[P1, P2, P3, P4, P5, P6, P7, R1]: ...
|
|
788
|
-
@overload
|
|
789
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P7_RX[P1, P2, P3, P4, P5, P6, P7, R1, R2, Unpack[RX]]: ...
|
|
790
|
-
@overload
|
|
791
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7, P8]]) -> SQL_P8[P1, P2, P3, P4, P5, P6, P7, P8]: ...
|
|
792
|
-
@overload
|
|
793
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7, P8]], resultset: type[RS]) -> SQL_P8_RS[P1, P2, P3, P4, P5, P6, P7, P8, RS]: ...
|
|
794
|
-
@overload
|
|
795
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7, P8]], resultset: type[tuple[R1]]) -> SQL_P8_RS[P1, P2, P3, P4, P5, P6, P7, P8, R1]: ...
|
|
796
|
-
@overload
|
|
797
|
-
def sql(stmt: SQLExpression, *, args: type[tuple[P1, P2, P3, P4, P5, P6, P7, P8]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_P8_RX[P1, P2, P3, P4, P5, P6, P7, P8, R1, R2, Unpack[RX]]: ...
|
|
624
|
+
def sql(stmt: SQLExpression, *, args: type[tuple[P1, Unpack[PX]]], resultset: type[tuple[R1, R2, Unpack[RX]]]) -> SQL_RX_PX[tuple[R1, R2, Unpack[RX]], P1, Unpack[PX]]: ...
|
|
798
625
|
|
|
799
626
|
|
|
800
627
|
### END OF AUTO-GENERATED BLOCK ###
|
|
@@ -805,23 +632,50 @@ def sql(
|
|
|
805
632
|
*,
|
|
806
633
|
args: type[Any] | None = None,
|
|
807
634
|
resultset: type[Any] | None = None,
|
|
635
|
+
arg: type[Any] | None = None,
|
|
636
|
+
result: type[Any] | None = None,
|
|
808
637
|
) -> _SQL:
|
|
809
638
|
"""
|
|
810
639
|
Creates a SQL statement with associated type information.
|
|
811
640
|
|
|
812
|
-
:param stmt: SQL statement as a string or template.
|
|
813
|
-
:param args: Type signature for input parameters
|
|
814
|
-
:param resultset: Type signature for
|
|
641
|
+
:param stmt: SQL statement as a literal string or template.
|
|
642
|
+
:param args: Type signature for multiple input parameters (e.g. `tuple[bool, int, str]`).
|
|
643
|
+
:param resultset: Type signature for multiple resultset columns (e.g. `tuple[datetime, Decimal, str]`).
|
|
644
|
+
:param arg: Type signature for a single input parameter (e.g. `int`).
|
|
645
|
+
:param result: Type signature for a single result column (e.g. `UUID`).
|
|
815
646
|
"""
|
|
816
647
|
|
|
648
|
+
if args is not None and arg is not None:
|
|
649
|
+
raise TypeError("expected: either `args` or `arg`; got: both")
|
|
650
|
+
if resultset is not None and result is not None:
|
|
651
|
+
raise TypeError("expected: either `resultset` or `result`; got: both")
|
|
652
|
+
|
|
653
|
+
if args is not None:
|
|
654
|
+
if get_origin(args) is not tuple:
|
|
655
|
+
raise TypeError(f"expected: `type[tuple[T, ...]]` for `args`; got: {type(args)}")
|
|
656
|
+
input_data_types = get_args(args)
|
|
657
|
+
elif arg is not None:
|
|
658
|
+
input_data_types = (arg,)
|
|
659
|
+
else:
|
|
660
|
+
input_data_types = ()
|
|
661
|
+
|
|
662
|
+
if resultset is not None:
|
|
663
|
+
if get_origin(resultset) is not tuple:
|
|
664
|
+
raise TypeError(f"expected: `type[tuple[T, ...]]` for `resultset`; got: {type(resultset)}")
|
|
665
|
+
output_data_types = get_args(resultset)
|
|
666
|
+
elif result is not None:
|
|
667
|
+
output_data_types = (result,)
|
|
668
|
+
else:
|
|
669
|
+
output_data_types = ()
|
|
670
|
+
|
|
817
671
|
if sys.version_info >= (3, 14):
|
|
818
672
|
obj: _SQLObject
|
|
819
673
|
match stmt:
|
|
820
674
|
case Template():
|
|
821
|
-
obj = _SQLTemplate(stmt, args=
|
|
675
|
+
obj = _SQLTemplate(stmt, args=input_data_types, resultset=output_data_types)
|
|
822
676
|
case str():
|
|
823
|
-
obj = _SQLString(stmt, args=
|
|
677
|
+
obj = _SQLString(stmt, args=input_data_types, resultset=output_data_types)
|
|
824
678
|
else:
|
|
825
|
-
obj = _SQLString(stmt, args=
|
|
679
|
+
obj = _SQLString(stmt, args=input_data_types, resultset=output_data_types)
|
|
826
680
|
|
|
827
681
|
return _SQLImpl(obj)
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: asyncpg_typed
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.2
|
|
4
4
|
Summary: Type-safe queries for asyncpg
|
|
5
5
|
Author-email: Levente Hunyadi <hunyadi@gmail.com>
|
|
6
6
|
Maintainer-email: Levente Hunyadi <hunyadi@gmail.com>
|
|
7
7
|
License-Expression: MIT
|
|
8
8
|
Project-URL: Homepage, https://github.com/hunyadi/asyncpg_typed
|
|
9
9
|
Project-URL: Source, https://github.com/hunyadi/asyncpg_typed
|
|
10
|
-
Keywords: asyncpg,typed,database-client
|
|
10
|
+
Keywords: asyncpg,typed,database-client,postgres
|
|
11
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
13
13
|
Classifier: Operating System :: OS Independent
|
|
@@ -74,6 +74,25 @@ try:
|
|
|
74
74
|
|
|
75
75
|
finally:
|
|
76
76
|
await conn.close()
|
|
77
|
+
|
|
78
|
+
# create a list of data-class instances from a list of typed tuples
|
|
79
|
+
@dataclass
|
|
80
|
+
class DataObject:
|
|
81
|
+
boolean_value: bool
|
|
82
|
+
integer_value: int
|
|
83
|
+
string_value: str | None
|
|
84
|
+
|
|
85
|
+
# ✅ Valid initializer call
|
|
86
|
+
items = [DataObject(*row) for row in rows]
|
|
87
|
+
|
|
88
|
+
@dataclass
|
|
89
|
+
class MismatchedObject:
|
|
90
|
+
boolean_value: bool
|
|
91
|
+
integer_value: int
|
|
92
|
+
string_value: str
|
|
93
|
+
|
|
94
|
+
# ⚠️ Argument of type "int | None" cannot be assigned to parameter "integer_value" of type "int" in function "__init__"; "None" is not assignable to "int"
|
|
95
|
+
items = [MismatchedObject(*row) for row in rows]
|
|
77
96
|
```
|
|
78
97
|
|
|
79
98
|
|
|
@@ -85,19 +104,21 @@ Instantiate a SQL object with the `sql` function:
|
|
|
85
104
|
|
|
86
105
|
```python
|
|
87
106
|
def sql(
|
|
88
|
-
stmt:
|
|
107
|
+
stmt: LiteralString | string.templatelib.Template,
|
|
89
108
|
*,
|
|
90
|
-
args: None | type[
|
|
91
|
-
resultset: None | type[
|
|
109
|
+
args: None | type[tuple[P1, P2]] | type[tuple[P1, P2, P3]] | ... = None,
|
|
110
|
+
resultset: None | type[tuple[R1, R2]] | type[tuple[R1, R2, R3]] | ... = None,
|
|
111
|
+
arg: None | type[P] = None,
|
|
112
|
+
result: None | type[R] = None,
|
|
92
113
|
) -> _SQL: ...
|
|
93
114
|
```
|
|
94
115
|
|
|
95
|
-
The parameter `stmt` represents a SQL expression, either as a
|
|
116
|
+
The parameter `stmt` represents a SQL expression, either as a literal string or a template (i.e. a *t-string*).
|
|
96
117
|
|
|
97
118
|
If the expression is a string, it can have PostgreSQL parameter placeholders such as `$1`, `$2` or `$3`:
|
|
98
119
|
|
|
99
120
|
```python
|
|
100
|
-
|
|
121
|
+
"INSERT INTO table_name (col_1, col_2, col_3) VALUES ($1, $2, $3);"
|
|
101
122
|
```
|
|
102
123
|
|
|
103
124
|
If the expression is a *t-string*, it can have replacement fields that evaluate to integers:
|
|
@@ -106,11 +127,10 @@ If the expression is a *t-string*, it can have replacement fields that evaluate
|
|
|
106
127
|
t"INSERT INTO table_name (col_1, col_2, col_3) VALUES ({1}, {2}, {3});"
|
|
107
128
|
```
|
|
108
129
|
|
|
109
|
-
The parameters `args` and `resultset` take a
|
|
130
|
+
The parameters `args` and `resultset` take a `tuple` of several types `Px` or `Rx`, each of which may be any of the following:
|
|
110
131
|
|
|
111
132
|
* (required) simple type
|
|
112
133
|
* optional simple type (`T | None`)
|
|
113
|
-
* `tuple` of several (required or optional) simple types.
|
|
114
134
|
|
|
115
135
|
Simple types include:
|
|
116
136
|
|
|
@@ -124,6 +144,7 @@ Simple types include:
|
|
|
124
144
|
* `str`
|
|
125
145
|
* `bytes`
|
|
126
146
|
* `uuid.UUID`
|
|
147
|
+
* a user-defined class that derives from `StrEnum`
|
|
127
148
|
|
|
128
149
|
Types are grouped together with `tuple`:
|
|
129
150
|
|
|
@@ -131,11 +152,11 @@ Types are grouped together with `tuple`:
|
|
|
131
152
|
tuple[bool, int, str | None]
|
|
132
153
|
```
|
|
133
154
|
|
|
134
|
-
Passing a simple type
|
|
155
|
+
The parameters `arg` and `result` take a single type `P` or `R`. Passing a simple type (e.g. `type[T]`) directly via `arg` and `result` is for convenience, and is equivalent to passing a one-element tuple of the same simple type (i.e. `type[tuple[T]]`) via `args` and `resultset`.
|
|
135
156
|
|
|
136
157
|
The number of types in `args` must correspond to the number of query parameters. (This is validated on calling `sql(...)` for the *t-string* syntax.) The number of types in `resultset` must correspond to the number of columns returned by the query.
|
|
137
158
|
|
|
138
|
-
Both `args` and `resultset` types must be compatible with their corresponding PostgreSQL query parameter types and resultset column types, respectively. The following table shows the mapping between PostgreSQL and Python types.
|
|
159
|
+
Both `args` and `resultset` types must be compatible with their corresponding PostgreSQL query parameter types and resultset column types, respectively. The following table shows the mapping between PostgreSQL and Python types. When there are multiple options separated by a slash, either of the types can be specified as a source or target type.
|
|
139
160
|
|
|
140
161
|
| PostgreSQL type | Python type |
|
|
141
162
|
| ----------------- | ------------------ |
|
|
@@ -152,13 +173,18 @@ Both `args` and `resultset` types must be compatible with their corresponding Po
|
|
|
152
173
|
| `timetz` | `time` (tz) |
|
|
153
174
|
| `timestamp` | `datetime` (naive) |
|
|
154
175
|
| `timestamptz` | `datetime` (tz) |
|
|
176
|
+
| `interval` | `timedelta` |
|
|
155
177
|
| `char(N)` | `str` |
|
|
156
178
|
| `varchar(N)` | `str` |
|
|
157
179
|
| `text` | `str` |
|
|
158
180
|
| `bytea` | `bytes` |
|
|
159
|
-
| `json` | `str`
|
|
160
|
-
| `jsonb` | `str`
|
|
181
|
+
| `json` | `str`/`JsonType` |
|
|
182
|
+
| `jsonb` | `str`/`JsonType` |
|
|
183
|
+
| `xml` | `str` |
|
|
161
184
|
| `uuid` | `UUID` |
|
|
185
|
+
| enumeration | `E: StrEnum` |
|
|
186
|
+
|
|
187
|
+
PostgreSQL types `json` and `jsonb` are [returned by asyncpg](https://magicstack.github.io/asyncpg/current/usage.html#type-conversion) as Python type `str`. However, if we specify the union type `JsonType` in `args` or `resultset`, the JSON string is parsed as if by calling `json.loads()`. (`JsonType` is defined in the module `asyncpg_typed`.) If the library `orjson` is present, its faster routines are invoked instead of the slower standard library implementation in the module `json`.
|
|
162
188
|
|
|
163
189
|
### Using a SQL object
|
|
164
190
|
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
asyncpg_typed/__init__.py,sha256=Z9UqmIr2QcSpGe7qC-ddMDDkwnJSGg5mm1dqiWPKYQM,24915
|
|
2
|
+
asyncpg_typed/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
asyncpg_typed-0.1.2.dist-info/licenses/LICENSE,sha256=rx4jD36wX8TyLZaR2HEOJ6TphFPjKUqoCSSYWzwWNRk,1093
|
|
4
|
+
asyncpg_typed-0.1.2.dist-info/METADATA,sha256=9wNzfDUQWAOhedM3g3cx_TYYlaaDjlqTrNq1qEqcK0k,9932
|
|
5
|
+
asyncpg_typed-0.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
+
asyncpg_typed-0.1.2.dist-info/top_level.txt,sha256=T0X1nWnXRTi5a5oTErGy572ORDbM9UV9wfhRXWLsaoY,14
|
|
7
|
+
asyncpg_typed-0.1.2.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
|
|
8
|
+
asyncpg_typed-0.1.2.dist-info/RECORD,,
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
asyncpg_typed/__init__.py,sha256=6F8tV2H1ayXFptYmsXLEi3puqKT-U904qamONeCJXUA,36489
|
|
2
|
-
asyncpg_typed/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
asyncpg_typed-0.1.0.dist-info/licenses/LICENSE,sha256=rx4jD36wX8TyLZaR2HEOJ6TphFPjKUqoCSSYWzwWNRk,1093
|
|
4
|
-
asyncpg_typed-0.1.0.dist-info/METADATA,sha256=ti6ld6HyUOodNUCmbNru0xiUu5mNHM_Z2TiDvQm4CNA,8429
|
|
5
|
-
asyncpg_typed-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
6
|
-
asyncpg_typed-0.1.0.dist-info/top_level.txt,sha256=T0X1nWnXRTi5a5oTErGy572ORDbM9UV9wfhRXWLsaoY,14
|
|
7
|
-
asyncpg_typed-0.1.0.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2
|
|
8
|
-
asyncpg_typed-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|