sqlspec 0.8.0__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/_typing.py +39 -6
- sqlspec/adapters/adbc/__init__.py +2 -2
- sqlspec/adapters/adbc/config.py +34 -11
- sqlspec/adapters/adbc/driver.py +167 -108
- sqlspec/adapters/aiosqlite/__init__.py +2 -2
- sqlspec/adapters/aiosqlite/config.py +2 -2
- sqlspec/adapters/aiosqlite/driver.py +28 -39
- sqlspec/adapters/asyncmy/__init__.py +3 -3
- sqlspec/adapters/asyncmy/config.py +11 -12
- sqlspec/adapters/asyncmy/driver.py +25 -34
- sqlspec/adapters/asyncpg/__init__.py +5 -5
- sqlspec/adapters/asyncpg/config.py +17 -19
- sqlspec/adapters/asyncpg/driver.py +249 -93
- sqlspec/adapters/duckdb/__init__.py +2 -2
- sqlspec/adapters/duckdb/config.py +2 -2
- sqlspec/adapters/duckdb/driver.py +49 -49
- sqlspec/adapters/oracledb/__init__.py +8 -8
- sqlspec/adapters/oracledb/config/__init__.py +6 -6
- sqlspec/adapters/oracledb/config/_asyncio.py +9 -10
- sqlspec/adapters/oracledb/config/_sync.py +8 -9
- sqlspec/adapters/oracledb/driver.py +114 -41
- sqlspec/adapters/psqlpy/__init__.py +0 -0
- sqlspec/adapters/psqlpy/config.py +258 -0
- sqlspec/adapters/psqlpy/driver.py +335 -0
- sqlspec/adapters/psycopg/__init__.py +10 -5
- sqlspec/adapters/psycopg/config/__init__.py +6 -6
- sqlspec/adapters/psycopg/config/_async.py +12 -12
- sqlspec/adapters/psycopg/config/_sync.py +13 -13
- sqlspec/adapters/psycopg/driver.py +180 -218
- sqlspec/adapters/sqlite/__init__.py +2 -2
- sqlspec/adapters/sqlite/config.py +2 -2
- sqlspec/adapters/sqlite/driver.py +43 -41
- sqlspec/base.py +275 -153
- sqlspec/exceptions.py +30 -0
- sqlspec/extensions/litestar/config.py +6 -0
- sqlspec/extensions/litestar/handlers.py +25 -0
- sqlspec/extensions/litestar/plugin.py +6 -1
- sqlspec/statement.py +373 -0
- sqlspec/typing.py +10 -1
- {sqlspec-0.8.0.dist-info → sqlspec-0.9.0.dist-info}/METADATA +4 -1
- sqlspec-0.9.0.dist-info/RECORD +61 -0
- sqlspec-0.8.0.dist-info/RECORD +0 -57
- {sqlspec-0.8.0.dist-info → sqlspec-0.9.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.8.0.dist-info → sqlspec-0.9.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.8.0.dist-info → sqlspec-0.9.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
"""Configuration for the psqlpy PostgreSQL adapter."""
|
|
2
|
+
|
|
3
|
+
from contextlib import asynccontextmanager
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
6
|
+
|
|
7
|
+
from psqlpy import Connection, ConnectionPool
|
|
8
|
+
|
|
9
|
+
from sqlspec.adapters.psqlpy.driver import PsqlpyDriver
|
|
10
|
+
from sqlspec.base import AsyncDatabaseConfig, GenericPoolConfig
|
|
11
|
+
from sqlspec.exceptions import ImproperConfigurationError
|
|
12
|
+
from sqlspec.typing import Empty, EmptyType, dataclass_to_dict
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from collections.abc import AsyncGenerator, Awaitable
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
__all__ = (
|
|
19
|
+
"PsqlpyConfig",
|
|
20
|
+
"PsqlpyPoolConfig",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class PsqlpyPoolConfig(GenericPoolConfig):
|
|
26
|
+
"""Configuration for psqlpy connection pool.
|
|
27
|
+
|
|
28
|
+
Ref: https://psqlpy-python.github.io/components/connection_pool.html#all-available-connectionpool-parameters
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
dsn: Optional[Union[str, EmptyType]] = Empty
|
|
32
|
+
"""DSN of the PostgreSQL."""
|
|
33
|
+
# Required connection parameters
|
|
34
|
+
username: Optional[Union[str, EmptyType]] = Empty
|
|
35
|
+
"""Username of the user in the PostgreSQL."""
|
|
36
|
+
password: Optional[Union[str, EmptyType]] = Empty
|
|
37
|
+
"""Password of the user in the PostgreSQL."""
|
|
38
|
+
db_name: Optional[Union[str, EmptyType]] = Empty
|
|
39
|
+
"""Name of the database in PostgreSQL."""
|
|
40
|
+
|
|
41
|
+
# Single or Multi-host parameters (mutually exclusive)
|
|
42
|
+
host: Optional[Union[str, EmptyType]] = Empty
|
|
43
|
+
"""Host of the PostgreSQL (use for single host)."""
|
|
44
|
+
port: Optional[Union[int, EmptyType]] = Empty
|
|
45
|
+
"""Port of the PostgreSQL (use for single host)."""
|
|
46
|
+
hosts: Optional[Union[list[str], EmptyType]] = Empty
|
|
47
|
+
"""List of hosts of the PostgreSQL (use for multiple hosts)."""
|
|
48
|
+
ports: Optional[Union[list[int], EmptyType]] = Empty
|
|
49
|
+
"""List of ports of the PostgreSQL (use for multiple hosts)."""
|
|
50
|
+
|
|
51
|
+
# Pool size
|
|
52
|
+
max_db_pool_size: int = 10
|
|
53
|
+
"""Maximum size of the connection pool. Defaults to 10."""
|
|
54
|
+
|
|
55
|
+
# Optional timeouts
|
|
56
|
+
connect_timeout_sec: Optional[Union[int, EmptyType]] = Empty
|
|
57
|
+
"""The time limit in seconds applied to each socket-level connection attempt."""
|
|
58
|
+
connect_timeout_nanosec: Optional[Union[int, EmptyType]] = Empty
|
|
59
|
+
"""Nanoseconds for connection timeout, can be used only with `connect_timeout_sec`."""
|
|
60
|
+
tcp_user_timeout_sec: Optional[Union[int, EmptyType]] = Empty
|
|
61
|
+
"""The time limit that transmitted data may remain unacknowledged before a connection is forcibly closed."""
|
|
62
|
+
tcp_user_timeout_nanosec: Optional[Union[int, EmptyType]] = Empty
|
|
63
|
+
"""Nanoseconds for tcp_user_timeout, can be used only with `tcp_user_timeout_sec`."""
|
|
64
|
+
|
|
65
|
+
# Optional keepalives
|
|
66
|
+
keepalives: bool = True
|
|
67
|
+
"""Controls the use of TCP keepalive. Defaults to True (on)."""
|
|
68
|
+
keepalives_idle_sec: Optional[Union[int, EmptyType]] = Empty
|
|
69
|
+
"""The number of seconds of inactivity after which a keepalive message is sent to the server."""
|
|
70
|
+
keepalives_idle_nanosec: Optional[Union[int, EmptyType]] = Empty
|
|
71
|
+
"""Nanoseconds for keepalives_idle_sec."""
|
|
72
|
+
keepalives_interval_sec: Optional[Union[int, EmptyType]] = Empty
|
|
73
|
+
"""The time interval between TCP keepalive probes."""
|
|
74
|
+
keepalives_interval_nanosec: Optional[Union[int, EmptyType]] = Empty
|
|
75
|
+
"""Nanoseconds for keepalives_interval_sec."""
|
|
76
|
+
keepalives_retries: Optional[Union[int, EmptyType]] = Empty
|
|
77
|
+
"""The maximum number of TCP keepalive probes that will be sent before dropping a connection."""
|
|
78
|
+
|
|
79
|
+
# Other optional parameters
|
|
80
|
+
load_balance_hosts: Optional[Union[str, EmptyType]] = Empty
|
|
81
|
+
"""Controls the order in which the client tries to connect to the available hosts and addresses ('disable' or 'random')."""
|
|
82
|
+
conn_recycling_method: Optional[Union[str, EmptyType]] = Empty
|
|
83
|
+
"""How a connection is recycled."""
|
|
84
|
+
ssl_mode: Optional[Union[str, EmptyType]] = Empty
|
|
85
|
+
"""SSL mode."""
|
|
86
|
+
ca_file: Optional[Union[str, EmptyType]] = Empty
|
|
87
|
+
"""Path to ca_file for SSL."""
|
|
88
|
+
target_session_attrs: Optional[Union[str, EmptyType]] = Empty
|
|
89
|
+
"""Specifies requirements of the session (e.g., 'read-write')."""
|
|
90
|
+
options: Optional[Union[str, EmptyType]] = Empty
|
|
91
|
+
"""Command line options used to configure the server."""
|
|
92
|
+
application_name: Optional[Union[str, EmptyType]] = Empty
|
|
93
|
+
"""Sets the application_name parameter on the server."""
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
@dataclass
|
|
97
|
+
class PsqlpyConfig(AsyncDatabaseConfig[Connection, ConnectionPool, PsqlpyDriver]):
|
|
98
|
+
"""Configuration for psqlpy database connections, managing a connection pool.
|
|
99
|
+
|
|
100
|
+
This configuration class wraps `PsqlpyPoolConfig` and manages the lifecycle
|
|
101
|
+
of a `psqlpy.ConnectionPool`.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
pool_config: Optional[PsqlpyPoolConfig] = field(default=None)
|
|
105
|
+
"""Psqlpy Pool configuration"""
|
|
106
|
+
driver_type: type[PsqlpyDriver] = field(default=PsqlpyDriver, init=False, hash=False)
|
|
107
|
+
"""Type of the driver object"""
|
|
108
|
+
connection_type: type[Connection] = field(default=Connection, init=False, hash=False)
|
|
109
|
+
"""Type of the connection object"""
|
|
110
|
+
pool_instance: Optional[ConnectionPool] = field(default=None, hash=False)
|
|
111
|
+
"""The connection pool instance. If set, this will be used instead of creating a new pool."""
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def connection_config_dict(self) -> "dict[str, Any]":
|
|
115
|
+
"""Return the minimal connection configuration as a dict for standalone use.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
A string keyed dict of config kwargs for a psqlpy.Connection.
|
|
119
|
+
|
|
120
|
+
Raises:
|
|
121
|
+
ImproperConfigurationError: If essential connection parameters are missing.
|
|
122
|
+
"""
|
|
123
|
+
if self.pool_config:
|
|
124
|
+
# Exclude pool-specific keys and internal metadata
|
|
125
|
+
pool_specific_keys = {
|
|
126
|
+
"max_db_pool_size",
|
|
127
|
+
"load_balance_hosts",
|
|
128
|
+
"conn_recycling_method",
|
|
129
|
+
"pool_instance",
|
|
130
|
+
"connection_type",
|
|
131
|
+
"driver_type",
|
|
132
|
+
}
|
|
133
|
+
return dataclass_to_dict(
|
|
134
|
+
self.pool_config,
|
|
135
|
+
exclude_empty=True,
|
|
136
|
+
convert_nested=False,
|
|
137
|
+
exclude_none=True,
|
|
138
|
+
exclude=pool_specific_keys,
|
|
139
|
+
)
|
|
140
|
+
msg = "You must provide a 'pool_config' for this adapter."
|
|
141
|
+
raise ImproperConfigurationError(msg)
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
def pool_config_dict(self) -> "dict[str, Any]":
|
|
145
|
+
"""Return the pool configuration as a dict.
|
|
146
|
+
|
|
147
|
+
Raises:
|
|
148
|
+
ImproperConfigurationError: If no pool_config is provided but a pool_instance
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
A string keyed dict of config kwargs for creating a psqlpy pool.
|
|
152
|
+
"""
|
|
153
|
+
if self.pool_config:
|
|
154
|
+
# Extract the config from the pool_config
|
|
155
|
+
return dataclass_to_dict(
|
|
156
|
+
self.pool_config,
|
|
157
|
+
exclude_empty=True,
|
|
158
|
+
convert_nested=False,
|
|
159
|
+
exclude_none=True,
|
|
160
|
+
exclude={"pool_instance", "connection_type", "driver_type"},
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
msg = "'pool_config' methods can not be used when a 'pool_instance' is provided."
|
|
164
|
+
raise ImproperConfigurationError(msg)
|
|
165
|
+
|
|
166
|
+
async def create_pool(self) -> "ConnectionPool":
|
|
167
|
+
"""Return a pool. If none exists yet, create one.
|
|
168
|
+
|
|
169
|
+
Ensures that the pool is initialized and returns the instance.
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
The pool instance used by the plugin.
|
|
173
|
+
|
|
174
|
+
Raises:
|
|
175
|
+
ImproperConfigurationError: If the pool could not be configured.
|
|
176
|
+
"""
|
|
177
|
+
if self.pool_instance is not None:
|
|
178
|
+
return self.pool_instance
|
|
179
|
+
|
|
180
|
+
if self.pool_config is None:
|
|
181
|
+
msg = "One of 'pool_config' or 'pool_instance' must be provided."
|
|
182
|
+
raise ImproperConfigurationError(msg)
|
|
183
|
+
|
|
184
|
+
# pool_config is guaranteed to exist due to __post_init__
|
|
185
|
+
try:
|
|
186
|
+
# psqlpy ConnectionPool doesn't have an explicit async connect/startup method
|
|
187
|
+
# It creates connections on demand.
|
|
188
|
+
self.pool_instance = ConnectionPool(**self.pool_config_dict)
|
|
189
|
+
except Exception as e:
|
|
190
|
+
msg = f"Could not configure the 'pool_instance'. Error: {e!s}. Please check your configuration."
|
|
191
|
+
raise ImproperConfigurationError(msg) from e
|
|
192
|
+
|
|
193
|
+
return self.pool_instance
|
|
194
|
+
|
|
195
|
+
def provide_pool(self, *args: "Any", **kwargs: "Any") -> "Awaitable[ConnectionPool]":
|
|
196
|
+
"""Create or return the pool instance.
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
An awaitable resolving to the Pool instance.
|
|
200
|
+
"""
|
|
201
|
+
|
|
202
|
+
async def _create() -> "ConnectionPool":
|
|
203
|
+
return await self.create_pool()
|
|
204
|
+
|
|
205
|
+
return _create()
|
|
206
|
+
|
|
207
|
+
def create_connection(self) -> "Awaitable[Connection]":
|
|
208
|
+
"""Create and return a new, standalone psqlpy connection using the configured parameters.
|
|
209
|
+
|
|
210
|
+
Note: This method is not supported by the psqlpy adapter as connection
|
|
211
|
+
creation is primarily handled via the ConnectionPool.
|
|
212
|
+
Use `provide_connection` or `provide_session` for pooled connections.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
An awaitable that resolves to a new Connection instance.
|
|
216
|
+
|
|
217
|
+
Raises:
|
|
218
|
+
NotImplementedError: This method is not implemented for psqlpy.
|
|
219
|
+
"""
|
|
220
|
+
|
|
221
|
+
async def _create() -> "Connection":
|
|
222
|
+
# psqlpy does not seem to offer a public API for creating
|
|
223
|
+
# standalone async connections easily outside the pool context.
|
|
224
|
+
msg = (
|
|
225
|
+
"Creating standalone connections is not directly supported by the psqlpy adapter. "
|
|
226
|
+
"Please use the pool via `provide_connection` or `provide_session`."
|
|
227
|
+
)
|
|
228
|
+
raise NotImplementedError(msg)
|
|
229
|
+
|
|
230
|
+
return _create()
|
|
231
|
+
|
|
232
|
+
@asynccontextmanager
|
|
233
|
+
async def provide_connection(self, *args: "Any", **kwargs: "Any") -> "AsyncGenerator[Connection, None]":
|
|
234
|
+
"""Acquire a connection from the pool.
|
|
235
|
+
|
|
236
|
+
Yields:
|
|
237
|
+
A connection instance managed by the pool.
|
|
238
|
+
"""
|
|
239
|
+
db_pool = await self.provide_pool(*args, **kwargs)
|
|
240
|
+
async with db_pool.acquire() as conn:
|
|
241
|
+
yield conn
|
|
242
|
+
|
|
243
|
+
def close_pool(self) -> None:
|
|
244
|
+
"""Close the connection pool."""
|
|
245
|
+
if self.pool_instance is not None:
|
|
246
|
+
# psqlpy pool close is synchronous
|
|
247
|
+
self.pool_instance.close()
|
|
248
|
+
self.pool_instance = None
|
|
249
|
+
|
|
250
|
+
@asynccontextmanager
|
|
251
|
+
async def provide_session(self, *args: Any, **kwargs: Any) -> "AsyncGenerator[PsqlpyDriver, None]":
|
|
252
|
+
"""Create and provide a database session using a pooled connection.
|
|
253
|
+
|
|
254
|
+
Yields:
|
|
255
|
+
A Psqlpy driver instance wrapping a pooled connection.
|
|
256
|
+
"""
|
|
257
|
+
async with self.provide_connection(*args, **kwargs) as connection:
|
|
258
|
+
yield self.driver_type(connection)
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
# ruff: noqa: PLR0915, PLR0914, PLR0912, C901
|
|
2
|
+
"""Psqlpy Driver Implementation."""
|
|
3
|
+
|
|
4
|
+
import logging
|
|
5
|
+
import re
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Optional, Union, cast
|
|
7
|
+
|
|
8
|
+
from psqlpy.exceptions import RustPSQLDriverPyBaseError
|
|
9
|
+
|
|
10
|
+
from sqlspec.base import AsyncDriverAdapterProtocol, T
|
|
11
|
+
from sqlspec.exceptions import SQLParsingError
|
|
12
|
+
from sqlspec.statement import PARAM_REGEX, SQLStatement
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from psqlpy import Connection, QueryResult
|
|
16
|
+
|
|
17
|
+
from sqlspec.typing import ModelDTOT, StatementParameterType
|
|
18
|
+
|
|
19
|
+
__all__ = ("PsqlpyDriver",)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# Regex to find '?' placeholders, skipping those inside quotes or SQL comments
|
|
23
|
+
QMARK_REGEX = re.compile(
|
|
24
|
+
r"""(?P<dquote>"[^"]*") | # Double-quoted strings
|
|
25
|
+
(?P<squote>\'[^\']*\') | # Single-quoted strings
|
|
26
|
+
(?P<comment>--[^\n]*|/\*.*?\*/) | # SQL comments (single/multi-line)
|
|
27
|
+
(?P<qmark>\?) # The question mark placeholder
|
|
28
|
+
""",
|
|
29
|
+
re.VERBOSE | re.DOTALL,
|
|
30
|
+
)
|
|
31
|
+
logger = logging.getLogger("sqlspec")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class PsqlpyDriver(AsyncDriverAdapterProtocol["Connection"]):
|
|
35
|
+
"""Psqlpy Postgres Driver Adapter."""
|
|
36
|
+
|
|
37
|
+
connection: "Connection"
|
|
38
|
+
dialect: str = "postgres"
|
|
39
|
+
|
|
40
|
+
def __init__(self, connection: "Connection") -> None:
|
|
41
|
+
self.connection = connection
|
|
42
|
+
|
|
43
|
+
def _process_sql_params(
|
|
44
|
+
self,
|
|
45
|
+
sql: str,
|
|
46
|
+
parameters: "Optional[StatementParameterType]" = None,
|
|
47
|
+
/,
|
|
48
|
+
**kwargs: Any,
|
|
49
|
+
) -> "tuple[str, Optional[Union[tuple[Any, ...], list[Any], dict[str, Any]]]]":
|
|
50
|
+
"""Process SQL and parameters for psqlpy.
|
|
51
|
+
|
|
52
|
+
psqlpy uses $1, $2 style parameters natively.
|
|
53
|
+
This method converts '?' (tuple/list) and ':name' (dict) styles to $n.
|
|
54
|
+
It relies on SQLStatement for initial parameter validation and merging.
|
|
55
|
+
"""
|
|
56
|
+
stmt = SQLStatement(sql=sql, parameters=parameters, dialect=self.dialect, kwargs=kwargs or None)
|
|
57
|
+
sql, parameters = stmt.process()
|
|
58
|
+
|
|
59
|
+
# Case 1: Parameters are a dictionary
|
|
60
|
+
if isinstance(parameters, dict):
|
|
61
|
+
processed_sql_parts: list[str] = []
|
|
62
|
+
ordered_params = []
|
|
63
|
+
last_end = 0
|
|
64
|
+
param_index = 1
|
|
65
|
+
found_params_regex: list[str] = []
|
|
66
|
+
|
|
67
|
+
for match in PARAM_REGEX.finditer(sql):
|
|
68
|
+
if match.group("dquote") or match.group("squote") or match.group("comment"):
|
|
69
|
+
continue
|
|
70
|
+
|
|
71
|
+
if match.group("var_name"): # Finds :var_name
|
|
72
|
+
var_name = match.group("var_name")
|
|
73
|
+
found_params_regex.append(var_name)
|
|
74
|
+
start = match.start("var_name") - 1
|
|
75
|
+
end = match.end("var_name")
|
|
76
|
+
|
|
77
|
+
if var_name not in parameters:
|
|
78
|
+
msg = f"Named parameter ':{var_name}' missing from parameters. SQL: {sql}"
|
|
79
|
+
raise SQLParsingError(msg)
|
|
80
|
+
|
|
81
|
+
processed_sql_parts.extend((sql[last_end:start], f"${param_index}"))
|
|
82
|
+
ordered_params.append(parameters[var_name])
|
|
83
|
+
last_end = end
|
|
84
|
+
param_index += 1
|
|
85
|
+
|
|
86
|
+
processed_sql_parts.append(sql[last_end:])
|
|
87
|
+
final_sql = "".join(processed_sql_parts)
|
|
88
|
+
|
|
89
|
+
if not found_params_regex and parameters:
|
|
90
|
+
logger.warning(
|
|
91
|
+
"Dict params provided (%s), but no :name placeholders found. SQL: %s",
|
|
92
|
+
list(parameters.keys()),
|
|
93
|
+
sql,
|
|
94
|
+
)
|
|
95
|
+
return sql, ()
|
|
96
|
+
|
|
97
|
+
provided_keys = set(parameters.keys())
|
|
98
|
+
found_keys = set(found_params_regex)
|
|
99
|
+
unused_keys = provided_keys - found_keys
|
|
100
|
+
if unused_keys:
|
|
101
|
+
logger.warning("Unused parameters provided: %s. SQL: %s", unused_keys, sql)
|
|
102
|
+
|
|
103
|
+
return final_sql, tuple(ordered_params)
|
|
104
|
+
|
|
105
|
+
# Case 2: Parameters are a sequence/scalar
|
|
106
|
+
if isinstance(parameters, (list, tuple)):
|
|
107
|
+
sequence_processed_parts: list[str] = []
|
|
108
|
+
param_index = 1
|
|
109
|
+
last_end = 0
|
|
110
|
+
qmark_found = False
|
|
111
|
+
|
|
112
|
+
for match in QMARK_REGEX.finditer(sql):
|
|
113
|
+
if match.group("dquote") or match.group("squote") or match.group("comment"):
|
|
114
|
+
continue
|
|
115
|
+
|
|
116
|
+
if match.group("qmark"):
|
|
117
|
+
qmark_found = True
|
|
118
|
+
start = match.start("qmark")
|
|
119
|
+
end = match.end("qmark")
|
|
120
|
+
sequence_processed_parts.extend((sql[last_end:start], f"${param_index}"))
|
|
121
|
+
last_end = end
|
|
122
|
+
param_index += 1
|
|
123
|
+
|
|
124
|
+
sequence_processed_parts.append(sql[last_end:])
|
|
125
|
+
final_sql = "".join(sequence_processed_parts)
|
|
126
|
+
|
|
127
|
+
if parameters and not qmark_found:
|
|
128
|
+
logger.warning("Sequence parameters provided, but no '?' placeholders found. SQL: %s", sql)
|
|
129
|
+
return sql, parameters
|
|
130
|
+
|
|
131
|
+
expected_params = param_index - 1
|
|
132
|
+
actual_params = len(parameters)
|
|
133
|
+
if expected_params != actual_params:
|
|
134
|
+
msg = f"Parameter count mismatch: Expected {expected_params}, got {actual_params}. SQL: {final_sql}"
|
|
135
|
+
raise SQLParsingError(msg)
|
|
136
|
+
|
|
137
|
+
return final_sql, parameters
|
|
138
|
+
|
|
139
|
+
# Case 3: Parameters are None
|
|
140
|
+
if PARAM_REGEX.search(sql) or QMARK_REGEX.search(sql):
|
|
141
|
+
# Perform a simpler check if any placeholders might exist if no params are given
|
|
142
|
+
for match in PARAM_REGEX.finditer(sql):
|
|
143
|
+
if not (match.group("dquote") or match.group("squote") or match.group("comment")) and match.group(
|
|
144
|
+
"var_name"
|
|
145
|
+
):
|
|
146
|
+
msg = f"SQL contains named parameters (:name) but no parameters provided. SQL: {sql}"
|
|
147
|
+
raise SQLParsingError(msg)
|
|
148
|
+
for match in QMARK_REGEX.finditer(sql):
|
|
149
|
+
if not (match.group("dquote") or match.group("squote") or match.group("comment")) and match.group(
|
|
150
|
+
"qmark"
|
|
151
|
+
):
|
|
152
|
+
msg = f"SQL contains positional parameters (?) but no parameters provided. SQL: {sql}"
|
|
153
|
+
raise SQLParsingError(msg)
|
|
154
|
+
|
|
155
|
+
return sql, ()
|
|
156
|
+
|
|
157
|
+
async def select(
|
|
158
|
+
self,
|
|
159
|
+
sql: str,
|
|
160
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
161
|
+
/,
|
|
162
|
+
*,
|
|
163
|
+
connection: Optional["Connection"] = None,
|
|
164
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
165
|
+
**kwargs: Any,
|
|
166
|
+
) -> "list[Union[ModelDTOT, dict[str, Any]]]":
|
|
167
|
+
connection = self._connection(connection)
|
|
168
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
169
|
+
parameters = parameters or [] # psqlpy expects a list/tuple
|
|
170
|
+
|
|
171
|
+
results: QueryResult = await connection.fetch(sql, parameters=parameters)
|
|
172
|
+
|
|
173
|
+
if schema_type is None:
|
|
174
|
+
return cast("list[dict[str, Any]]", results.result()) # type: ignore[return-value]
|
|
175
|
+
return results.as_class(as_class=schema_type)
|
|
176
|
+
|
|
177
|
+
async def select_one(
|
|
178
|
+
self,
|
|
179
|
+
sql: str,
|
|
180
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
181
|
+
/,
|
|
182
|
+
*,
|
|
183
|
+
connection: Optional["Connection"] = None,
|
|
184
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
185
|
+
**kwargs: Any,
|
|
186
|
+
) -> "Union[ModelDTOT, dict[str, Any]]":
|
|
187
|
+
connection = self._connection(connection)
|
|
188
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
189
|
+
parameters = parameters or []
|
|
190
|
+
|
|
191
|
+
result = await connection.fetch(sql, parameters=parameters)
|
|
192
|
+
|
|
193
|
+
if schema_type is None:
|
|
194
|
+
result = cast("list[dict[str, Any]]", result.result()) # type: ignore[assignment]
|
|
195
|
+
return cast("dict[str, Any]", result[0]) # type: ignore[index]
|
|
196
|
+
return result.as_class(as_class=schema_type)[0]
|
|
197
|
+
|
|
198
|
+
async def select_one_or_none(
|
|
199
|
+
self,
|
|
200
|
+
sql: str,
|
|
201
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
202
|
+
/,
|
|
203
|
+
*,
|
|
204
|
+
connection: Optional["Connection"] = None,
|
|
205
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
206
|
+
**kwargs: Any,
|
|
207
|
+
) -> "Optional[Union[ModelDTOT, dict[str, Any]]]":
|
|
208
|
+
connection = self._connection(connection)
|
|
209
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
210
|
+
parameters = parameters or []
|
|
211
|
+
|
|
212
|
+
result = await connection.fetch(sql, parameters=parameters)
|
|
213
|
+
if schema_type is None:
|
|
214
|
+
result = cast("list[dict[str, Any]]", result.result()) # type: ignore[assignment]
|
|
215
|
+
if len(result) == 0: # type: ignore[arg-type]
|
|
216
|
+
return None
|
|
217
|
+
return cast("dict[str, Any]", result[0]) # type: ignore[index]
|
|
218
|
+
result = cast("list[ModelDTOT]", result.as_class(as_class=schema_type)) # type: ignore[assignment]
|
|
219
|
+
if len(result) == 0: # type: ignore[arg-type]
|
|
220
|
+
return None
|
|
221
|
+
return cast("ModelDTOT", result[0]) # type: ignore[index]
|
|
222
|
+
|
|
223
|
+
async def select_value(
|
|
224
|
+
self,
|
|
225
|
+
sql: str,
|
|
226
|
+
parameters: "Optional[StatementParameterType]" = None,
|
|
227
|
+
/,
|
|
228
|
+
*,
|
|
229
|
+
connection: "Optional[Connection]" = None,
|
|
230
|
+
schema_type: "Optional[type[T]]" = None,
|
|
231
|
+
**kwargs: Any,
|
|
232
|
+
) -> "Union[T, Any]":
|
|
233
|
+
connection = self._connection(connection)
|
|
234
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
235
|
+
parameters = parameters or []
|
|
236
|
+
|
|
237
|
+
value = await connection.fetch_val(sql, parameters=parameters)
|
|
238
|
+
|
|
239
|
+
if schema_type is None:
|
|
240
|
+
return value
|
|
241
|
+
return schema_type(value) # type: ignore[call-arg]
|
|
242
|
+
|
|
243
|
+
async def select_value_or_none(
|
|
244
|
+
self,
|
|
245
|
+
sql: str,
|
|
246
|
+
parameters: "Optional[StatementParameterType]" = None,
|
|
247
|
+
/,
|
|
248
|
+
*,
|
|
249
|
+
connection: "Optional[Connection]" = None,
|
|
250
|
+
schema_type: "Optional[type[T]]" = None,
|
|
251
|
+
**kwargs: Any,
|
|
252
|
+
) -> "Optional[Union[T, Any]]":
|
|
253
|
+
connection = self._connection(connection)
|
|
254
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
255
|
+
parameters = parameters or []
|
|
256
|
+
try:
|
|
257
|
+
value = await connection.fetch_val(sql, parameters=parameters)
|
|
258
|
+
except RustPSQLDriverPyBaseError:
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
if value is None:
|
|
262
|
+
return None
|
|
263
|
+
if schema_type is None:
|
|
264
|
+
return value
|
|
265
|
+
return schema_type(value) # type: ignore[call-arg]
|
|
266
|
+
|
|
267
|
+
async def insert_update_delete(
|
|
268
|
+
self,
|
|
269
|
+
sql: str,
|
|
270
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
271
|
+
/,
|
|
272
|
+
*,
|
|
273
|
+
connection: Optional["Connection"] = None,
|
|
274
|
+
**kwargs: Any,
|
|
275
|
+
) -> int:
|
|
276
|
+
connection = self._connection(connection)
|
|
277
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
278
|
+
parameters = parameters or []
|
|
279
|
+
|
|
280
|
+
await connection.execute(sql, parameters=parameters)
|
|
281
|
+
# For INSERT/UPDATE/DELETE, psqlpy returns an empty list but the operation succeeded
|
|
282
|
+
# if no error was raised
|
|
283
|
+
return 1
|
|
284
|
+
|
|
285
|
+
async def insert_update_delete_returning(
|
|
286
|
+
self,
|
|
287
|
+
sql: str,
|
|
288
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
289
|
+
/,
|
|
290
|
+
*,
|
|
291
|
+
connection: Optional["Connection"] = None,
|
|
292
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
293
|
+
**kwargs: Any,
|
|
294
|
+
) -> "Optional[Union[dict[str, Any], ModelDTOT]]":
|
|
295
|
+
connection = self._connection(connection)
|
|
296
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
297
|
+
parameters = parameters or []
|
|
298
|
+
|
|
299
|
+
result = await connection.execute(sql, parameters=parameters)
|
|
300
|
+
if schema_type is None:
|
|
301
|
+
result = result.result() # type: ignore[assignment]
|
|
302
|
+
if len(result) == 0: # type: ignore[arg-type]
|
|
303
|
+
return None
|
|
304
|
+
return cast("dict[str, Any]", result[0]) # type: ignore[index]
|
|
305
|
+
result = result.as_class(as_class=schema_type) # type: ignore[assignment]
|
|
306
|
+
if len(result) == 0: # type: ignore[arg-type]
|
|
307
|
+
return None
|
|
308
|
+
return cast("ModelDTOT", result[0]) # type: ignore[index]
|
|
309
|
+
|
|
310
|
+
async def execute_script(
|
|
311
|
+
self,
|
|
312
|
+
sql: str,
|
|
313
|
+
parameters: Optional["StatementParameterType"] = None,
|
|
314
|
+
/,
|
|
315
|
+
*,
|
|
316
|
+
connection: Optional["Connection"] = None,
|
|
317
|
+
**kwargs: Any,
|
|
318
|
+
) -> str:
|
|
319
|
+
connection = self._connection(connection)
|
|
320
|
+
sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
|
|
321
|
+
parameters = parameters or []
|
|
322
|
+
|
|
323
|
+
await connection.execute(sql, parameters=parameters)
|
|
324
|
+
return sql
|
|
325
|
+
|
|
326
|
+
def _connection(self, connection: Optional["Connection"] = None) -> "Connection":
|
|
327
|
+
"""Get the connection to use.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
connection: Optional connection to use. If not provided, use the default connection.
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
The connection to use.
|
|
334
|
+
"""
|
|
335
|
+
return connection or self.connection
|
|
@@ -1,11 +1,16 @@
|
|
|
1
|
-
from sqlspec.adapters.psycopg.config import
|
|
1
|
+
from sqlspec.adapters.psycopg.config import (
|
|
2
|
+
PsycopgAsyncConfig,
|
|
3
|
+
PsycopgAsyncPoolConfig,
|
|
4
|
+
PsycopgSyncConfig,
|
|
5
|
+
PsycopgSyncPoolConfig,
|
|
6
|
+
)
|
|
2
7
|
from sqlspec.adapters.psycopg.driver import PsycopgAsyncDriver, PsycopgSyncDriver
|
|
3
8
|
|
|
4
9
|
__all__ = (
|
|
5
|
-
"
|
|
10
|
+
"PsycopgAsyncConfig",
|
|
6
11
|
"PsycopgAsyncDriver",
|
|
7
|
-
"
|
|
8
|
-
"
|
|
12
|
+
"PsycopgAsyncPoolConfig",
|
|
13
|
+
"PsycopgSyncConfig",
|
|
9
14
|
"PsycopgSyncDriver",
|
|
10
|
-
"
|
|
15
|
+
"PsycopgSyncPoolConfig",
|
|
11
16
|
)
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
from sqlspec.adapters.psycopg.config._async import
|
|
2
|
-
from sqlspec.adapters.psycopg.config._sync import
|
|
1
|
+
from sqlspec.adapters.psycopg.config._async import PsycopgAsyncConfig, PsycopgAsyncPoolConfig
|
|
2
|
+
from sqlspec.adapters.psycopg.config._sync import PsycopgSyncConfig, PsycopgSyncPoolConfig
|
|
3
3
|
|
|
4
4
|
__all__ = (
|
|
5
|
-
"
|
|
6
|
-
"
|
|
7
|
-
"
|
|
8
|
-
"
|
|
5
|
+
"PsycopgAsyncConfig",
|
|
6
|
+
"PsycopgAsyncPoolConfig",
|
|
7
|
+
"PsycopgSyncConfig",
|
|
8
|
+
"PsycopgSyncPoolConfig",
|
|
9
9
|
)
|