sqlspec 0.16.0__cp311-cp311-macosx_13_0_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- 51ff5a9eadfdefd49f98__mypyc.cpython-311-darwin.so +0 -0
- sqlspec/__init__.py +92 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +77 -0
- sqlspec/_sql.py +1347 -0
- sqlspec/_typing.py +680 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_types.py +12 -0
- sqlspec/adapters/adbc/config.py +361 -0
- sqlspec/adapters/adbc/driver.py +512 -0
- sqlspec/adapters/aiosqlite/__init__.py +19 -0
- sqlspec/adapters/aiosqlite/_types.py +13 -0
- sqlspec/adapters/aiosqlite/config.py +253 -0
- sqlspec/adapters/aiosqlite/driver.py +248 -0
- sqlspec/adapters/asyncmy/__init__.py +19 -0
- sqlspec/adapters/asyncmy/_types.py +12 -0
- sqlspec/adapters/asyncmy/config.py +180 -0
- sqlspec/adapters/asyncmy/driver.py +274 -0
- sqlspec/adapters/asyncpg/__init__.py +21 -0
- sqlspec/adapters/asyncpg/_types.py +17 -0
- sqlspec/adapters/asyncpg/config.py +229 -0
- sqlspec/adapters/asyncpg/driver.py +344 -0
- sqlspec/adapters/bigquery/__init__.py +18 -0
- sqlspec/adapters/bigquery/_types.py +12 -0
- sqlspec/adapters/bigquery/config.py +298 -0
- sqlspec/adapters/bigquery/driver.py +558 -0
- sqlspec/adapters/duckdb/__init__.py +22 -0
- sqlspec/adapters/duckdb/_types.py +12 -0
- sqlspec/adapters/duckdb/config.py +504 -0
- sqlspec/adapters/duckdb/driver.py +368 -0
- sqlspec/adapters/oracledb/__init__.py +32 -0
- sqlspec/adapters/oracledb/_types.py +14 -0
- sqlspec/adapters/oracledb/config.py +317 -0
- sqlspec/adapters/oracledb/driver.py +538 -0
- sqlspec/adapters/psqlpy/__init__.py +16 -0
- sqlspec/adapters/psqlpy/_types.py +11 -0
- sqlspec/adapters/psqlpy/config.py +214 -0
- sqlspec/adapters/psqlpy/driver.py +530 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_types.py +17 -0
- sqlspec/adapters/psycopg/config.py +426 -0
- sqlspec/adapters/psycopg/driver.py +796 -0
- sqlspec/adapters/sqlite/__init__.py +15 -0
- sqlspec/adapters/sqlite/_types.py +11 -0
- sqlspec/adapters/sqlite/config.py +240 -0
- sqlspec/adapters/sqlite/driver.py +294 -0
- sqlspec/base.py +571 -0
- sqlspec/builder/__init__.py +62 -0
- sqlspec/builder/_base.py +440 -0
- sqlspec/builder/_column.py +324 -0
- sqlspec/builder/_ddl.py +1383 -0
- sqlspec/builder/_ddl_utils.py +104 -0
- sqlspec/builder/_delete.py +77 -0
- sqlspec/builder/_insert.py +241 -0
- sqlspec/builder/_merge.py +56 -0
- sqlspec/builder/_parsing_utils.py +140 -0
- sqlspec/builder/_select.py +174 -0
- sqlspec/builder/_update.py +186 -0
- sqlspec/builder/mixins/__init__.py +55 -0
- sqlspec/builder/mixins/_cte_and_set_ops.py +195 -0
- sqlspec/builder/mixins/_delete_operations.py +36 -0
- sqlspec/builder/mixins/_insert_operations.py +152 -0
- sqlspec/builder/mixins/_join_operations.py +115 -0
- sqlspec/builder/mixins/_merge_operations.py +416 -0
- sqlspec/builder/mixins/_order_limit_operations.py +123 -0
- sqlspec/builder/mixins/_pivot_operations.py +144 -0
- sqlspec/builder/mixins/_select_operations.py +599 -0
- sqlspec/builder/mixins/_update_operations.py +164 -0
- sqlspec/builder/mixins/_where_clause.py +609 -0
- sqlspec/cli.py +247 -0
- sqlspec/config.py +395 -0
- sqlspec/core/__init__.py +63 -0
- sqlspec/core/cache.cpython-311-darwin.so +0 -0
- sqlspec/core/cache.py +873 -0
- sqlspec/core/compiler.cpython-311-darwin.so +0 -0
- sqlspec/core/compiler.py +396 -0
- sqlspec/core/filters.cpython-311-darwin.so +0 -0
- sqlspec/core/filters.py +830 -0
- sqlspec/core/hashing.cpython-311-darwin.so +0 -0
- sqlspec/core/hashing.py +310 -0
- sqlspec/core/parameters.cpython-311-darwin.so +0 -0
- sqlspec/core/parameters.py +1209 -0
- sqlspec/core/result.cpython-311-darwin.so +0 -0
- sqlspec/core/result.py +664 -0
- sqlspec/core/splitter.cpython-311-darwin.so +0 -0
- sqlspec/core/splitter.py +819 -0
- sqlspec/core/statement.cpython-311-darwin.so +0 -0
- sqlspec/core/statement.py +666 -0
- sqlspec/driver/__init__.py +19 -0
- sqlspec/driver/_async.py +472 -0
- sqlspec/driver/_common.py +612 -0
- sqlspec/driver/_sync.py +473 -0
- sqlspec/driver/mixins/__init__.py +6 -0
- sqlspec/driver/mixins/_result_tools.py +164 -0
- sqlspec/driver/mixins/_sql_translator.py +36 -0
- sqlspec/exceptions.py +193 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/aiosql/__init__.py +10 -0
- sqlspec/extensions/aiosql/adapter.py +461 -0
- sqlspec/extensions/litestar/__init__.py +6 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/cli.py +48 -0
- sqlspec/extensions/litestar/config.py +92 -0
- sqlspec/extensions/litestar/handlers.py +260 -0
- sqlspec/extensions/litestar/plugin.py +145 -0
- sqlspec/extensions/litestar/providers.py +454 -0
- sqlspec/loader.cpython-311-darwin.so +0 -0
- sqlspec/loader.py +760 -0
- sqlspec/migrations/__init__.py +35 -0
- sqlspec/migrations/base.py +414 -0
- sqlspec/migrations/commands.py +443 -0
- sqlspec/migrations/loaders.py +402 -0
- sqlspec/migrations/runner.py +213 -0
- sqlspec/migrations/tracker.py +140 -0
- sqlspec/migrations/utils.py +129 -0
- sqlspec/protocols.py +400 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +23 -0
- sqlspec/storage/backends/__init__.py +0 -0
- sqlspec/storage/backends/base.py +163 -0
- sqlspec/storage/backends/fsspec.py +386 -0
- sqlspec/storage/backends/obstore.py +459 -0
- sqlspec/storage/capabilities.py +102 -0
- sqlspec/storage/registry.py +239 -0
- sqlspec/typing.py +299 -0
- sqlspec/utils/__init__.py +3 -0
- sqlspec/utils/correlation.py +150 -0
- sqlspec/utils/deprecation.py +106 -0
- sqlspec/utils/fixtures.cpython-311-darwin.so +0 -0
- sqlspec/utils/fixtures.py +58 -0
- sqlspec/utils/logging.py +127 -0
- sqlspec/utils/module_loader.py +89 -0
- sqlspec/utils/serializers.py +4 -0
- sqlspec/utils/singleton.py +32 -0
- sqlspec/utils/sync_tools.cpython-311-darwin.so +0 -0
- sqlspec/utils/sync_tools.py +237 -0
- sqlspec/utils/text.cpython-311-darwin.so +0 -0
- sqlspec/utils/text.py +96 -0
- sqlspec/utils/type_guards.cpython-311-darwin.so +0 -0
- sqlspec/utils/type_guards.py +1135 -0
- sqlspec-0.16.0.dist-info/METADATA +365 -0
- sqlspec-0.16.0.dist-info/RECORD +148 -0
- sqlspec-0.16.0.dist-info/WHEEL +4 -0
- sqlspec-0.16.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.16.0.dist-info/licenses/LICENSE +21 -0
- sqlspec-0.16.0.dist-info/licenses/NOTICE +29 -0
sqlspec/driver/_sync.py
ADDED
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
"""Synchronous driver protocol implementation.
|
|
2
|
+
|
|
3
|
+
This module provides the sync driver infrastructure for database adapters,
|
|
4
|
+
including connection management, transaction support, and result processing.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from abc import abstractmethod
|
|
8
|
+
from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload
|
|
9
|
+
|
|
10
|
+
from sqlspec.core import SQL
|
|
11
|
+
from sqlspec.driver._common import CommonDriverAttributesMixin, ExecutionResult
|
|
12
|
+
from sqlspec.driver.mixins import SQLTranslatorMixin, ToSchemaMixin
|
|
13
|
+
from sqlspec.exceptions import NotFoundError
|
|
14
|
+
from sqlspec.utils.logging import get_logger
|
|
15
|
+
from sqlspec.utils.type_guards import is_dict_row, is_indexable_row
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from collections.abc import Sequence
|
|
19
|
+
from contextlib import AbstractContextManager
|
|
20
|
+
|
|
21
|
+
from sqlspec.builder import QueryBuilder
|
|
22
|
+
from sqlspec.core import SQLResult, Statement, StatementConfig, StatementFilter
|
|
23
|
+
from sqlspec.typing import ModelDTOT, ModelT, RowT, StatementParameters
|
|
24
|
+
|
|
25
|
+
logger = get_logger("sqlspec")
|
|
26
|
+
|
|
27
|
+
__all__ = ("SyncDriverAdapterBase",)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
EMPTY_FILTERS: "list[StatementFilter]" = []
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class SyncDriverAdapterBase(CommonDriverAttributesMixin, SQLTranslatorMixin, ToSchemaMixin):
|
|
34
|
+
"""Base class for synchronous database drivers.
|
|
35
|
+
|
|
36
|
+
Provides the foundation for sync database adapters, including connection management,
|
|
37
|
+
transaction support, and SQL execution methods. All database operations are performed
|
|
38
|
+
synchronously and support context manager patterns for proper resource cleanup.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
__slots__ = ()
|
|
42
|
+
|
|
43
|
+
def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> "SQLResult":
|
|
44
|
+
"""Central execution dispatcher using the Template Method Pattern.
|
|
45
|
+
|
|
46
|
+
Orchestrates the common execution flow, delegating database-specific steps
|
|
47
|
+
to abstract methods that concrete adapters must implement.
|
|
48
|
+
All database operations are wrapped in exception handling.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
statement: The SQL statement to execute
|
|
52
|
+
connection: The database connection to use
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
The result of the SQL execution
|
|
56
|
+
"""
|
|
57
|
+
with self.handle_database_exceptions(), self.with_cursor(connection) as cursor:
|
|
58
|
+
special_result = self._try_special_handling(cursor, statement)
|
|
59
|
+
if special_result is not None:
|
|
60
|
+
return special_result
|
|
61
|
+
|
|
62
|
+
if statement.is_script:
|
|
63
|
+
execution_result = self._execute_script(cursor, statement)
|
|
64
|
+
elif statement.is_many:
|
|
65
|
+
execution_result = self._execute_many(cursor, statement)
|
|
66
|
+
else:
|
|
67
|
+
execution_result = self._execute_statement(cursor, statement)
|
|
68
|
+
|
|
69
|
+
return self.build_statement_result(statement, execution_result)
|
|
70
|
+
|
|
71
|
+
@abstractmethod
|
|
72
|
+
def with_cursor(self, connection: Any) -> Any:
|
|
73
|
+
"""Create and return a context manager for cursor acquisition and cleanup.
|
|
74
|
+
|
|
75
|
+
Returns a context manager that yields a cursor for database operations.
|
|
76
|
+
Concrete implementations handle database-specific cursor creation and cleanup.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
@abstractmethod
|
|
80
|
+
def handle_database_exceptions(self) -> "AbstractContextManager[None]":
|
|
81
|
+
"""Handle database-specific exceptions and wrap them appropriately.
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
ContextManager that can be used in with statements
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
@abstractmethod
|
|
88
|
+
def begin(self) -> None:
|
|
89
|
+
"""Begin a database transaction on the current connection."""
|
|
90
|
+
|
|
91
|
+
@abstractmethod
|
|
92
|
+
def rollback(self) -> None:
|
|
93
|
+
"""Rollback the current transaction on the current connection."""
|
|
94
|
+
|
|
95
|
+
@abstractmethod
|
|
96
|
+
def commit(self) -> None:
|
|
97
|
+
"""Commit the current transaction on the current connection."""
|
|
98
|
+
|
|
99
|
+
@abstractmethod
|
|
100
|
+
def _try_special_handling(self, cursor: Any, statement: "SQL") -> "Optional[SQLResult]":
|
|
101
|
+
"""Hook for database-specific special operations (e.g., PostgreSQL COPY, bulk operations).
|
|
102
|
+
|
|
103
|
+
This method is called first in dispatch_statement_execution() to allow drivers to handle
|
|
104
|
+
special operations that don't follow the standard SQL execution pattern.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
cursor: Database cursor/connection object
|
|
108
|
+
statement: SQL statement to analyze
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
SQLResult if the special operation was handled and completed,
|
|
112
|
+
None if standard execution should proceed
|
|
113
|
+
"""
|
|
114
|
+
|
|
115
|
+
def _execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
116
|
+
"""Execute a SQL script containing multiple statements.
|
|
117
|
+
|
|
118
|
+
Default implementation splits the script and executes statements individually.
|
|
119
|
+
Drivers can override for database-specific script execution methods.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
cursor: Database cursor/connection object
|
|
123
|
+
statement: SQL statement object with all necessary data and configuration
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
ExecutionResult with script execution data including statement counts
|
|
127
|
+
"""
|
|
128
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
129
|
+
statements = self.split_script_statements(sql, self.statement_config, strip_trailing_semicolon=True)
|
|
130
|
+
|
|
131
|
+
for stmt in statements:
|
|
132
|
+
single_stmt = statement.copy(statement=stmt, parameters=prepared_parameters)
|
|
133
|
+
self._execute_statement(cursor, single_stmt)
|
|
134
|
+
|
|
135
|
+
return self.create_execution_result(
|
|
136
|
+
cursor, statement_count=len(statements), successful_statements=len(statements), is_script_result=True
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
@abstractmethod
|
|
140
|
+
def _execute_many(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
141
|
+
"""Execute SQL with multiple parameter sets (executemany).
|
|
142
|
+
|
|
143
|
+
Must be implemented by each driver for database-specific executemany logic.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
cursor: Database cursor/connection object
|
|
147
|
+
statement: SQL statement object with all necessary data and configuration
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
ExecutionResult with execution data for the many operation
|
|
151
|
+
"""
|
|
152
|
+
|
|
153
|
+
@abstractmethod
|
|
154
|
+
def _execute_statement(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
155
|
+
"""Execute a single SQL statement.
|
|
156
|
+
|
|
157
|
+
Must be implemented by each driver for database-specific execution logic.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
cursor: Database cursor/connection object
|
|
161
|
+
statement: SQL statement object with all necessary data and configuration
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
ExecutionResult with execution data
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
def execute(
|
|
168
|
+
self,
|
|
169
|
+
statement: "Union[SQL, Statement, QueryBuilder]",
|
|
170
|
+
/,
|
|
171
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
172
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
173
|
+
**kwargs: Any,
|
|
174
|
+
) -> "SQLResult":
|
|
175
|
+
"""Execute a statement with parameter handling."""
|
|
176
|
+
sql_statement = self.prepare_statement(
|
|
177
|
+
statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
|
|
178
|
+
)
|
|
179
|
+
return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
|
|
180
|
+
|
|
181
|
+
def execute_many(
|
|
182
|
+
self,
|
|
183
|
+
statement: "Union[SQL, Statement, QueryBuilder]",
|
|
184
|
+
/,
|
|
185
|
+
parameters: "Sequence[StatementParameters]",
|
|
186
|
+
*filters: "Union[StatementParameters, StatementFilter]",
|
|
187
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
188
|
+
**kwargs: Any,
|
|
189
|
+
) -> "SQLResult":
|
|
190
|
+
"""Execute statement multiple times with different parameters.
|
|
191
|
+
|
|
192
|
+
Parameters passed will be used as the batch execution sequence.
|
|
193
|
+
"""
|
|
194
|
+
config = statement_config or self.statement_config
|
|
195
|
+
|
|
196
|
+
if isinstance(statement, SQL):
|
|
197
|
+
sql_statement = SQL(statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs)
|
|
198
|
+
else:
|
|
199
|
+
base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs)
|
|
200
|
+
sql_statement = SQL(base_statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs)
|
|
201
|
+
|
|
202
|
+
return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
|
|
203
|
+
|
|
204
|
+
def execute_script(
|
|
205
|
+
self,
|
|
206
|
+
statement: "Union[str, SQL]",
|
|
207
|
+
/,
|
|
208
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
209
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
210
|
+
**kwargs: Any,
|
|
211
|
+
) -> "SQLResult":
|
|
212
|
+
"""Execute a multi-statement script.
|
|
213
|
+
|
|
214
|
+
By default, validates each statement and logs warnings for dangerous
|
|
215
|
+
operations. Use suppress_warnings=True for migrations and admin scripts.
|
|
216
|
+
"""
|
|
217
|
+
script_config = statement_config or self.statement_config
|
|
218
|
+
sql_statement = self.prepare_statement(statement, parameters, statement_config=script_config, kwargs=kwargs)
|
|
219
|
+
|
|
220
|
+
return self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection)
|
|
221
|
+
|
|
222
|
+
@overload
|
|
223
|
+
def select_one(
|
|
224
|
+
self,
|
|
225
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
226
|
+
/,
|
|
227
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
228
|
+
schema_type: "type[ModelDTOT]",
|
|
229
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
230
|
+
**kwargs: Any,
|
|
231
|
+
) -> "ModelDTOT": ...
|
|
232
|
+
|
|
233
|
+
@overload
|
|
234
|
+
def select_one(
|
|
235
|
+
self,
|
|
236
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
237
|
+
/,
|
|
238
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
239
|
+
schema_type: None = None,
|
|
240
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
241
|
+
**kwargs: Any,
|
|
242
|
+
) -> "Union[ModelT, RowT, dict[str, Any]]": ... # pyright: ignore[reportInvalidTypeVarUse]
|
|
243
|
+
|
|
244
|
+
def select_one(
|
|
245
|
+
self,
|
|
246
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
247
|
+
/,
|
|
248
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
249
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
250
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
251
|
+
**kwargs: Any,
|
|
252
|
+
) -> "Union[ModelT, RowT, ModelDTOT]": # pyright: ignore[reportInvalidTypeVarUse]
|
|
253
|
+
"""Execute a select statement and return exactly one row.
|
|
254
|
+
|
|
255
|
+
Raises an exception if no rows or more than one row is returned.
|
|
256
|
+
"""
|
|
257
|
+
result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
258
|
+
data = result.get_data()
|
|
259
|
+
if not data:
|
|
260
|
+
msg = "No rows found"
|
|
261
|
+
raise NotFoundError(msg)
|
|
262
|
+
if len(data) > 1:
|
|
263
|
+
msg = f"Expected exactly one row, found {len(data)}"
|
|
264
|
+
raise ValueError(msg)
|
|
265
|
+
return cast(
|
|
266
|
+
"Union[ModelT, RowT, ModelDTOT]",
|
|
267
|
+
self.to_schema(data[0], schema_type=schema_type) if schema_type else data[0],
|
|
268
|
+
)
|
|
269
|
+
|
|
270
|
+
@overload
|
|
271
|
+
def select_one_or_none(
|
|
272
|
+
self,
|
|
273
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
274
|
+
/,
|
|
275
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
276
|
+
schema_type: "type[ModelDTOT]",
|
|
277
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
278
|
+
**kwargs: Any,
|
|
279
|
+
) -> "Optional[ModelDTOT]": ...
|
|
280
|
+
|
|
281
|
+
@overload
|
|
282
|
+
def select_one_or_none(
|
|
283
|
+
self,
|
|
284
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
285
|
+
/,
|
|
286
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
287
|
+
schema_type: None = None,
|
|
288
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
289
|
+
**kwargs: Any,
|
|
290
|
+
) -> "Optional[ModelT]": ... # pyright: ignore[reportInvalidTypeVarUse]
|
|
291
|
+
|
|
292
|
+
def select_one_or_none(
|
|
293
|
+
self,
|
|
294
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
295
|
+
/,
|
|
296
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
297
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
298
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
299
|
+
**kwargs: Any,
|
|
300
|
+
) -> "Optional[Union[ModelT, ModelDTOT]]": # pyright: ignore[reportInvalidTypeVarUse]
|
|
301
|
+
"""Execute a select statement and return at most one row.
|
|
302
|
+
|
|
303
|
+
Returns None if no rows are found.
|
|
304
|
+
Raises an exception if more than one row is returned.
|
|
305
|
+
"""
|
|
306
|
+
result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
307
|
+
data = result.get_data()
|
|
308
|
+
if not data:
|
|
309
|
+
return None
|
|
310
|
+
if len(data) > 1:
|
|
311
|
+
msg = f"Expected at most one row, found {len(data)}"
|
|
312
|
+
raise ValueError(msg)
|
|
313
|
+
return cast("Optional[Union[ModelT, ModelDTOT]]", self.to_schema(data[0], schema_type=schema_type))
|
|
314
|
+
|
|
315
|
+
@overload
|
|
316
|
+
def select(
|
|
317
|
+
self,
|
|
318
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
319
|
+
/,
|
|
320
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
321
|
+
schema_type: "type[ModelDTOT]",
|
|
322
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
323
|
+
**kwargs: Any,
|
|
324
|
+
) -> "list[ModelDTOT]": ...
|
|
325
|
+
|
|
326
|
+
@overload
|
|
327
|
+
def select(
|
|
328
|
+
self,
|
|
329
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
330
|
+
/,
|
|
331
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
332
|
+
schema_type: None = None,
|
|
333
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
334
|
+
**kwargs: Any,
|
|
335
|
+
) -> "list[ModelT]": ... # pyright: ignore[reportInvalidTypeVarUse]
|
|
336
|
+
|
|
337
|
+
def select(
|
|
338
|
+
self,
|
|
339
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
340
|
+
/,
|
|
341
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
342
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
343
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
344
|
+
**kwargs: Any,
|
|
345
|
+
) -> "Union[list[ModelT], list[ModelDTOT]]": # pyright: ignore[reportInvalidTypeVarUse]
|
|
346
|
+
"""Execute a select statement and return all rows."""
|
|
347
|
+
result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
348
|
+
return cast(
|
|
349
|
+
"Union[list[ModelT], list[ModelDTOT]]",
|
|
350
|
+
self.to_schema(cast("list[ModelT]", result.get_data()), schema_type=schema_type),
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
def select_value(
|
|
354
|
+
self,
|
|
355
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
356
|
+
/,
|
|
357
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
358
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
359
|
+
**kwargs: Any,
|
|
360
|
+
) -> Any:
|
|
361
|
+
"""Execute a select statement and return a single scalar value.
|
|
362
|
+
|
|
363
|
+
Expects exactly one row with one column.
|
|
364
|
+
Raises an exception if no rows or more than one row/column is returned.
|
|
365
|
+
"""
|
|
366
|
+
result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
367
|
+
try:
|
|
368
|
+
row = result.one()
|
|
369
|
+
except ValueError as e:
|
|
370
|
+
msg = "No rows found"
|
|
371
|
+
raise NotFoundError(msg) from e
|
|
372
|
+
if not row:
|
|
373
|
+
msg = "No rows found"
|
|
374
|
+
raise NotFoundError(msg)
|
|
375
|
+
if is_dict_row(row):
|
|
376
|
+
if not row:
|
|
377
|
+
msg = "Row has no columns"
|
|
378
|
+
raise ValueError(msg)
|
|
379
|
+
return next(iter(row.values()))
|
|
380
|
+
if is_indexable_row(row):
|
|
381
|
+
if not row:
|
|
382
|
+
msg = "Row has no columns"
|
|
383
|
+
raise ValueError(msg)
|
|
384
|
+
return row[0]
|
|
385
|
+
msg = f"Unexpected row type: {type(row)}"
|
|
386
|
+
raise ValueError(msg)
|
|
387
|
+
|
|
388
|
+
def select_value_or_none(
|
|
389
|
+
self,
|
|
390
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
391
|
+
/,
|
|
392
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
393
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
394
|
+
**kwargs: Any,
|
|
395
|
+
) -> Any:
|
|
396
|
+
"""Execute a select statement and return a single scalar value or None.
|
|
397
|
+
|
|
398
|
+
Returns None if no rows are found.
|
|
399
|
+
Expects at most one row with one column.
|
|
400
|
+
Raises an exception if more than one row is returned.
|
|
401
|
+
"""
|
|
402
|
+
result = self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
403
|
+
data = result.get_data()
|
|
404
|
+
if not data:
|
|
405
|
+
return None
|
|
406
|
+
if len(data) > 1:
|
|
407
|
+
msg = f"Expected at most one row, found {len(data)}"
|
|
408
|
+
raise ValueError(msg)
|
|
409
|
+
row = data[0]
|
|
410
|
+
if isinstance(row, dict):
|
|
411
|
+
if not row:
|
|
412
|
+
return None
|
|
413
|
+
return next(iter(row.values()))
|
|
414
|
+
if isinstance(row, (tuple, list)):
|
|
415
|
+
return row[0]
|
|
416
|
+
msg = f"Cannot extract value from row type {type(row).__name__}"
|
|
417
|
+
raise TypeError(msg)
|
|
418
|
+
|
|
419
|
+
@overload
|
|
420
|
+
def select_with_total(
|
|
421
|
+
self,
|
|
422
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
423
|
+
/,
|
|
424
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
425
|
+
schema_type: "type[ModelDTOT]",
|
|
426
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
427
|
+
**kwargs: Any,
|
|
428
|
+
) -> "tuple[list[ModelDTOT], int]": ...
|
|
429
|
+
|
|
430
|
+
@overload
|
|
431
|
+
def select_with_total(
|
|
432
|
+
self,
|
|
433
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
434
|
+
/,
|
|
435
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
436
|
+
schema_type: None = None,
|
|
437
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
438
|
+
**kwargs: Any,
|
|
439
|
+
) -> "tuple[list[dict[str, Any]], int]": ...
|
|
440
|
+
|
|
441
|
+
def select_with_total(
|
|
442
|
+
self,
|
|
443
|
+
statement: "Union[Statement, QueryBuilder]",
|
|
444
|
+
/,
|
|
445
|
+
*parameters: "Union[StatementParameters, StatementFilter]",
|
|
446
|
+
schema_type: "Optional[type[ModelDTOT]]" = None,
|
|
447
|
+
statement_config: "Optional[StatementConfig]" = None,
|
|
448
|
+
**kwargs: Any,
|
|
449
|
+
) -> "tuple[Union[list[dict[str, Any]], list[ModelDTOT]], int]":
|
|
450
|
+
"""Execute a select statement and return both the data and total count.
|
|
451
|
+
|
|
452
|
+
This method is designed for pagination scenarios where you need both
|
|
453
|
+
the current page of data and the total number of rows that match the query.
|
|
454
|
+
|
|
455
|
+
Args:
|
|
456
|
+
statement: The SQL statement, QueryBuilder, or raw SQL string
|
|
457
|
+
*parameters: Parameters for the SQL statement
|
|
458
|
+
schema_type: Optional schema type for data transformation
|
|
459
|
+
statement_config: Optional SQL configuration
|
|
460
|
+
**kwargs: Additional keyword arguments
|
|
461
|
+
|
|
462
|
+
Returns:
|
|
463
|
+
A tuple containing:
|
|
464
|
+
- List of data rows (transformed by schema_type if provided)
|
|
465
|
+
- Total count of rows matching the query (ignoring LIMIT/OFFSET)
|
|
466
|
+
"""
|
|
467
|
+
sql_statement = self.prepare_statement(
|
|
468
|
+
statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
|
|
469
|
+
)
|
|
470
|
+
count_result = self.dispatch_statement_execution(self._create_count_query(sql_statement), self.connection)
|
|
471
|
+
select_result = self.execute(sql_statement)
|
|
472
|
+
|
|
473
|
+
return (self.to_schema(select_result.get_data(), schema_type=schema_type), count_result.scalar())
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
# pyright: reportCallIssue=false, reportAttributeAccessIssue=false, reportArgumentType=false
|
|
2
|
+
import datetime
|
|
3
|
+
import logging
|
|
4
|
+
from collections.abc import Sequence
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from functools import partial
|
|
7
|
+
from pathlib import Path, PurePath
|
|
8
|
+
from typing import Any, Callable, Optional, overload
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
from mypy_extensions import trait
|
|
12
|
+
|
|
13
|
+
from sqlspec.exceptions import SQLSpecError, wrap_exceptions
|
|
14
|
+
from sqlspec.typing import (
|
|
15
|
+
CATTRS_INSTALLED,
|
|
16
|
+
ModelDTOT,
|
|
17
|
+
ModelT,
|
|
18
|
+
attrs_asdict,
|
|
19
|
+
cattrs_structure,
|
|
20
|
+
cattrs_unstructure,
|
|
21
|
+
convert,
|
|
22
|
+
get_type_adapter,
|
|
23
|
+
)
|
|
24
|
+
from sqlspec.utils.type_guards import is_attrs_schema, is_dataclass, is_msgspec_struct, is_pydantic_model
|
|
25
|
+
|
|
26
|
+
__all__ = ("_DEFAULT_TYPE_DECODERS", "_default_msgspec_deserializer")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
logger = logging.getLogger(__name__)
|
|
30
|
+
_DEFAULT_TYPE_DECODERS: list[tuple[Callable[[Any], bool], Callable[[Any, Any], Any]]] = [
|
|
31
|
+
(lambda x: x is UUID, lambda t, v: t(v.hex)),
|
|
32
|
+
(lambda x: x is datetime.datetime, lambda t, v: t(v.isoformat())),
|
|
33
|
+
(lambda x: x is datetime.date, lambda t, v: t(v.isoformat())),
|
|
34
|
+
(lambda x: x is datetime.time, lambda t, v: t(v.isoformat())),
|
|
35
|
+
(lambda x: x is Enum, lambda t, v: t(v.value)),
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _default_msgspec_deserializer(
|
|
40
|
+
target_type: Any, value: Any, type_decoders: "Optional[Sequence[tuple[Any, Any]]]" = None
|
|
41
|
+
) -> Any:
|
|
42
|
+
"""Default msgspec deserializer with type conversion support.
|
|
43
|
+
|
|
44
|
+
Converts values to appropriate types for msgspec deserialization, including
|
|
45
|
+
UUID, datetime, date, time, Enum, Path, and PurePath types.
|
|
46
|
+
"""
|
|
47
|
+
if type_decoders:
|
|
48
|
+
for predicate, decoder in type_decoders:
|
|
49
|
+
if predicate(target_type):
|
|
50
|
+
return decoder(target_type, value)
|
|
51
|
+
if target_type is UUID and isinstance(value, UUID):
|
|
52
|
+
return value.hex
|
|
53
|
+
if target_type in {datetime.datetime, datetime.date, datetime.time}:
|
|
54
|
+
with wrap_exceptions(suppress=AttributeError):
|
|
55
|
+
return value.isoformat()
|
|
56
|
+
if isinstance(target_type, type) and issubclass(target_type, Enum) and isinstance(value, Enum):
|
|
57
|
+
return value.value
|
|
58
|
+
if isinstance(value, target_type):
|
|
59
|
+
return value
|
|
60
|
+
if issubclass(target_type, (Path, PurePath, UUID)):
|
|
61
|
+
return target_type(value)
|
|
62
|
+
return value
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@trait
|
|
66
|
+
class ToSchemaMixin:
|
|
67
|
+
__slots__ = ()
|
|
68
|
+
|
|
69
|
+
# Schema conversion overloads - handle common cases first
|
|
70
|
+
@overload
|
|
71
|
+
@staticmethod
|
|
72
|
+
def to_schema(data: "list[dict[str, Any]]") -> "list[dict[str, Any]]": ...
|
|
73
|
+
@overload
|
|
74
|
+
@staticmethod
|
|
75
|
+
def to_schema(data: "list[dict[str, Any]]", *, schema_type: "type[ModelDTOT]") -> "list[ModelDTOT]": ...
|
|
76
|
+
@overload
|
|
77
|
+
@staticmethod
|
|
78
|
+
def to_schema(data: "list[dict[str, Any]]", *, schema_type: None = None) -> "list[dict[str, Any]]": ...
|
|
79
|
+
@overload
|
|
80
|
+
@staticmethod
|
|
81
|
+
def to_schema(data: "dict[str, Any]") -> "dict[str, Any]": ...
|
|
82
|
+
@overload
|
|
83
|
+
@staticmethod
|
|
84
|
+
def to_schema(data: "dict[str, Any]", *, schema_type: "type[ModelDTOT]") -> "ModelDTOT": ...
|
|
85
|
+
@overload
|
|
86
|
+
@staticmethod
|
|
87
|
+
def to_schema(data: "dict[str, Any]", *, schema_type: None = None) -> "dict[str, Any]": ...
|
|
88
|
+
@overload
|
|
89
|
+
@staticmethod
|
|
90
|
+
def to_schema(data: "list[ModelT]") -> "list[ModelT]": ...
|
|
91
|
+
@overload
|
|
92
|
+
@staticmethod
|
|
93
|
+
def to_schema(data: "list[ModelT]", *, schema_type: "type[ModelDTOT]") -> "list[ModelDTOT]": ...
|
|
94
|
+
@overload
|
|
95
|
+
@staticmethod
|
|
96
|
+
def to_schema(data: "list[ModelT]", *, schema_type: None = None) -> "list[ModelT]": ...
|
|
97
|
+
@overload
|
|
98
|
+
@staticmethod
|
|
99
|
+
def to_schema(data: "ModelT") -> "ModelT": ...
|
|
100
|
+
@overload
|
|
101
|
+
@staticmethod
|
|
102
|
+
def to_schema(data: Any, *, schema_type: None = None) -> Any: ...
|
|
103
|
+
|
|
104
|
+
@staticmethod
|
|
105
|
+
def to_schema(data: Any, *, schema_type: "Optional[type[ModelDTOT]]" = None) -> Any:
|
|
106
|
+
"""Convert data to a specified schema type.
|
|
107
|
+
|
|
108
|
+
Supports conversion to dataclasses, msgspec structs, Pydantic models, and attrs classes.
|
|
109
|
+
Handles both single objects and sequences.
|
|
110
|
+
|
|
111
|
+
Raises:
|
|
112
|
+
SQLSpecError if `schema_type` is not a valid type.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Converted data in the specified schema type.
|
|
116
|
+
|
|
117
|
+
"""
|
|
118
|
+
if schema_type is None:
|
|
119
|
+
return data
|
|
120
|
+
if is_dataclass(schema_type):
|
|
121
|
+
if isinstance(data, list):
|
|
122
|
+
return [schema_type(**dict(item) if hasattr(item, "keys") else item) for item in data] # type: ignore[operator]
|
|
123
|
+
if hasattr(data, "keys"):
|
|
124
|
+
return schema_type(**dict(data)) # type: ignore[operator]
|
|
125
|
+
if isinstance(data, dict):
|
|
126
|
+
return schema_type(**data) # type: ignore[operator]
|
|
127
|
+
# Fallback for other types
|
|
128
|
+
return data
|
|
129
|
+
if is_msgspec_struct(schema_type):
|
|
130
|
+
if not isinstance(data, Sequence):
|
|
131
|
+
return convert(
|
|
132
|
+
obj=data,
|
|
133
|
+
type=schema_type,
|
|
134
|
+
from_attributes=True,
|
|
135
|
+
dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
|
|
136
|
+
)
|
|
137
|
+
return convert(
|
|
138
|
+
obj=data,
|
|
139
|
+
type=list[schema_type], # type: ignore[valid-type] # pyright: ignore
|
|
140
|
+
from_attributes=True,
|
|
141
|
+
dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
|
|
142
|
+
)
|
|
143
|
+
if is_pydantic_model(schema_type):
|
|
144
|
+
if not isinstance(data, Sequence):
|
|
145
|
+
return get_type_adapter(schema_type).validate_python(data, from_attributes=True) # pyright: ignore
|
|
146
|
+
return get_type_adapter(list[schema_type]).validate_python(data, from_attributes=True) # type: ignore[valid-type] # pyright: ignore
|
|
147
|
+
if is_attrs_schema(schema_type):
|
|
148
|
+
if CATTRS_INSTALLED:
|
|
149
|
+
if isinstance(data, Sequence):
|
|
150
|
+
return cattrs_structure(data, list[schema_type]) # type: ignore[valid-type] # pyright: ignore
|
|
151
|
+
# If data is already structured (attrs instance), unstructure it first
|
|
152
|
+
if hasattr(data, "__attrs_attrs__"):
|
|
153
|
+
data = cattrs_unstructure(data)
|
|
154
|
+
return cattrs_structure(data, schema_type) # pyright: ignore
|
|
155
|
+
if isinstance(data, list):
|
|
156
|
+
return [schema_type(**dict(item) if hasattr(item, "keys") else attrs_asdict(item)) for item in data]
|
|
157
|
+
if hasattr(data, "keys"):
|
|
158
|
+
return schema_type(**dict(data))
|
|
159
|
+
if isinstance(data, dict):
|
|
160
|
+
return schema_type(**data)
|
|
161
|
+
# Fallback for other types
|
|
162
|
+
return data
|
|
163
|
+
msg = "`schema_type` should be a valid Dataclass, Pydantic model, Msgspec struct, or Attrs class"
|
|
164
|
+
raise SQLSpecError(msg)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from mypy_extensions import trait
|
|
2
|
+
from sqlglot import exp, parse_one
|
|
3
|
+
from sqlglot.dialects.dialect import DialectType
|
|
4
|
+
|
|
5
|
+
from sqlspec.core.statement import SQL, Statement
|
|
6
|
+
from sqlspec.exceptions import SQLConversionError
|
|
7
|
+
|
|
8
|
+
__all__ = ("SQLTranslatorMixin",)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@trait
|
|
12
|
+
class SQLTranslatorMixin:
|
|
13
|
+
"""Mixin for drivers supporting SQL translation."""
|
|
14
|
+
|
|
15
|
+
__slots__ = ()
|
|
16
|
+
|
|
17
|
+
def convert_to_dialect(self, statement: "Statement", to_dialect: DialectType = None, pretty: bool = True) -> str:
|
|
18
|
+
if statement is not None and isinstance(statement, SQL):
|
|
19
|
+
if statement.expression is None:
|
|
20
|
+
msg = "Statement could not be parsed"
|
|
21
|
+
raise SQLConversionError(msg)
|
|
22
|
+
parsed_expression = statement.expression
|
|
23
|
+
elif isinstance(statement, exp.Expression):
|
|
24
|
+
parsed_expression = statement
|
|
25
|
+
else:
|
|
26
|
+
try:
|
|
27
|
+
parsed_expression = parse_one(statement, dialect=self.dialect) # type: ignore[attr-defined]
|
|
28
|
+
except Exception as e:
|
|
29
|
+
error_msg = f"Failed to parse SQL statement: {e!s}"
|
|
30
|
+
raise SQLConversionError(error_msg) from e
|
|
31
|
+
target_dialect = to_dialect or self.dialect # type: ignore[attr-defined]
|
|
32
|
+
try:
|
|
33
|
+
return parsed_expression.sql(dialect=target_dialect, pretty=pretty)
|
|
34
|
+
except Exception as e:
|
|
35
|
+
error_msg = f"Failed to convert SQL expression to {target_dialect}: {e!s}"
|
|
36
|
+
raise SQLConversionError(error_msg) from e
|