sqlspec 0.12.1__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/_sql.py +21 -180
- sqlspec/adapters/adbc/config.py +10 -12
- sqlspec/adapters/adbc/driver.py +120 -118
- sqlspec/adapters/aiosqlite/config.py +3 -3
- sqlspec/adapters/aiosqlite/driver.py +116 -141
- sqlspec/adapters/asyncmy/config.py +3 -4
- sqlspec/adapters/asyncmy/driver.py +123 -135
- sqlspec/adapters/asyncpg/config.py +3 -7
- sqlspec/adapters/asyncpg/driver.py +98 -140
- sqlspec/adapters/bigquery/config.py +4 -5
- sqlspec/adapters/bigquery/driver.py +231 -181
- sqlspec/adapters/duckdb/config.py +3 -6
- sqlspec/adapters/duckdb/driver.py +132 -124
- sqlspec/adapters/oracledb/config.py +6 -5
- sqlspec/adapters/oracledb/driver.py +242 -259
- sqlspec/adapters/psqlpy/config.py +3 -7
- sqlspec/adapters/psqlpy/driver.py +118 -93
- sqlspec/adapters/psycopg/config.py +34 -30
- sqlspec/adapters/psycopg/driver.py +342 -214
- sqlspec/adapters/sqlite/config.py +3 -3
- sqlspec/adapters/sqlite/driver.py +150 -104
- sqlspec/config.py +0 -4
- sqlspec/driver/_async.py +89 -98
- sqlspec/driver/_common.py +52 -17
- sqlspec/driver/_sync.py +81 -105
- sqlspec/driver/connection.py +207 -0
- sqlspec/driver/mixins/_csv_writer.py +91 -0
- sqlspec/driver/mixins/_pipeline.py +38 -49
- sqlspec/driver/mixins/_result_utils.py +27 -9
- sqlspec/driver/mixins/_storage.py +149 -216
- sqlspec/driver/mixins/_type_coercion.py +3 -4
- sqlspec/driver/parameters.py +138 -0
- sqlspec/exceptions.py +10 -2
- sqlspec/extensions/aiosql/adapter.py +0 -10
- sqlspec/extensions/litestar/handlers.py +0 -1
- sqlspec/extensions/litestar/plugin.py +0 -3
- sqlspec/extensions/litestar/providers.py +0 -14
- sqlspec/loader.py +31 -118
- sqlspec/protocols.py +542 -0
- sqlspec/service/__init__.py +3 -2
- sqlspec/service/_util.py +147 -0
- sqlspec/service/base.py +1116 -9
- sqlspec/statement/builder/__init__.py +42 -32
- sqlspec/statement/builder/_ddl_utils.py +0 -10
- sqlspec/statement/builder/_parsing_utils.py +10 -4
- sqlspec/statement/builder/base.py +70 -23
- sqlspec/statement/builder/column.py +283 -0
- sqlspec/statement/builder/ddl.py +102 -65
- sqlspec/statement/builder/delete.py +23 -7
- sqlspec/statement/builder/insert.py +29 -15
- sqlspec/statement/builder/merge.py +4 -4
- sqlspec/statement/builder/mixins/_aggregate_functions.py +113 -14
- sqlspec/statement/builder/mixins/_common_table_expr.py +0 -1
- sqlspec/statement/builder/mixins/_delete_from.py +1 -1
- sqlspec/statement/builder/mixins/_from.py +10 -8
- sqlspec/statement/builder/mixins/_group_by.py +0 -1
- sqlspec/statement/builder/mixins/_insert_from_select.py +0 -1
- sqlspec/statement/builder/mixins/_insert_values.py +0 -2
- sqlspec/statement/builder/mixins/_join.py +20 -13
- sqlspec/statement/builder/mixins/_limit_offset.py +3 -3
- sqlspec/statement/builder/mixins/_merge_clauses.py +3 -4
- sqlspec/statement/builder/mixins/_order_by.py +2 -2
- sqlspec/statement/builder/mixins/_pivot.py +4 -7
- sqlspec/statement/builder/mixins/_select_columns.py +6 -5
- sqlspec/statement/builder/mixins/_unpivot.py +6 -9
- sqlspec/statement/builder/mixins/_update_from.py +2 -1
- sqlspec/statement/builder/mixins/_update_set.py +11 -8
- sqlspec/statement/builder/mixins/_where.py +61 -34
- sqlspec/statement/builder/select.py +32 -17
- sqlspec/statement/builder/update.py +25 -11
- sqlspec/statement/filters.py +39 -14
- sqlspec/statement/parameter_manager.py +220 -0
- sqlspec/statement/parameters.py +210 -79
- sqlspec/statement/pipelines/__init__.py +166 -23
- sqlspec/statement/pipelines/analyzers/_analyzer.py +22 -25
- sqlspec/statement/pipelines/context.py +35 -39
- sqlspec/statement/pipelines/transformers/__init__.py +2 -3
- sqlspec/statement/pipelines/transformers/_expression_simplifier.py +19 -187
- sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +667 -43
- sqlspec/statement/pipelines/transformers/_remove_comments_and_hints.py +76 -0
- sqlspec/statement/pipelines/validators/_dml_safety.py +33 -18
- sqlspec/statement/pipelines/validators/_parameter_style.py +87 -14
- sqlspec/statement/pipelines/validators/_performance.py +38 -23
- sqlspec/statement/pipelines/validators/_security.py +39 -62
- sqlspec/statement/result.py +37 -129
- sqlspec/statement/splitter.py +0 -12
- sqlspec/statement/sql.py +885 -379
- sqlspec/statement/sql_compiler.py +140 -0
- sqlspec/storage/__init__.py +10 -2
- sqlspec/storage/backends/fsspec.py +82 -35
- sqlspec/storage/backends/obstore.py +66 -49
- sqlspec/storage/capabilities.py +101 -0
- sqlspec/storage/registry.py +56 -83
- sqlspec/typing.py +6 -434
- sqlspec/utils/cached_property.py +25 -0
- sqlspec/utils/correlation.py +0 -2
- sqlspec/utils/logging.py +0 -6
- sqlspec/utils/sync_tools.py +0 -4
- sqlspec/utils/text.py +0 -5
- sqlspec/utils/type_guards.py +892 -0
- {sqlspec-0.12.1.dist-info → sqlspec-0.13.0.dist-info}/METADATA +1 -1
- sqlspec-0.13.0.dist-info/RECORD +150 -0
- sqlspec/statement/builder/protocols.py +0 -20
- sqlspec/statement/pipelines/base.py +0 -315
- sqlspec/statement/pipelines/result_types.py +0 -41
- sqlspec/statement/pipelines/transformers/_remove_comments.py +0 -66
- sqlspec/statement/pipelines/transformers/_remove_hints.py +0 -81
- sqlspec/statement/pipelines/validators/base.py +0 -67
- sqlspec/storage/protocol.py +0 -170
- sqlspec-0.12.1.dist-info/RECORD +0 -145
- {sqlspec-0.12.1.dist-info → sqlspec-0.13.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.12.1.dist-info → sqlspec-0.13.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.12.1.dist-info → sqlspec-0.13.0.dist-info}/licenses/NOTICE +0 -0
sqlspec/protocols.py
ADDED
|
@@ -0,0 +1,542 @@
|
|
|
1
|
+
"""Runtime-checkable protocols for SQLSpec to replace duck typing.
|
|
2
|
+
|
|
3
|
+
This module provides protocols that can be used for static type checking
|
|
4
|
+
and runtime isinstance() checks, replacing defensive hasattr() patterns.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from abc import abstractmethod
|
|
8
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, Union, runtime_checkable
|
|
9
|
+
|
|
10
|
+
from typing_extensions import Self
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from collections.abc import AsyncIterator, Iterator
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
from sqlglot import exp
|
|
17
|
+
|
|
18
|
+
from sqlspec.statement.pipelines.context import SQLProcessingContext
|
|
19
|
+
from sqlspec.storage.capabilities import StorageCapabilities
|
|
20
|
+
from sqlspec.typing import ArrowRecordBatch, ArrowTable
|
|
21
|
+
|
|
22
|
+
__all__ = (
|
|
23
|
+
# Database Connection Protocols
|
|
24
|
+
"AsyncCloseableConnectionProtocol",
|
|
25
|
+
"AsyncCopyCapableConnectionProtocol",
|
|
26
|
+
"AsyncPipelineCapableDriverProtocol",
|
|
27
|
+
"AsyncTransactionCapableConnectionProtocol",
|
|
28
|
+
"AsyncTransactionStateConnectionProtocol",
|
|
29
|
+
"BytesConvertibleProtocol",
|
|
30
|
+
"DictProtocol",
|
|
31
|
+
"FilterAppenderProtocol",
|
|
32
|
+
"FilterParameterProtocol",
|
|
33
|
+
"HasExpressionsProtocol",
|
|
34
|
+
"HasLimitProtocol",
|
|
35
|
+
"HasOffsetProtocol",
|
|
36
|
+
"HasOrderByProtocol",
|
|
37
|
+
"HasRiskLevelProtocol",
|
|
38
|
+
"HasSQLMethodProtocol",
|
|
39
|
+
"HasWhereProtocol",
|
|
40
|
+
"IndexableRow",
|
|
41
|
+
"IterableParameters",
|
|
42
|
+
"ObjectStoreItemProtocol",
|
|
43
|
+
"ObjectStoreProtocol",
|
|
44
|
+
"ParameterValueProtocol",
|
|
45
|
+
"ProcessorProtocol",
|
|
46
|
+
"SQLBuilderProtocol",
|
|
47
|
+
"SelectBuilderProtocol",
|
|
48
|
+
"SyncCloseableConnectionProtocol",
|
|
49
|
+
"SyncCopyCapableConnectionProtocol",
|
|
50
|
+
"SyncPipelineCapableDriverProtocol",
|
|
51
|
+
"SyncTransactionCapableConnectionProtocol",
|
|
52
|
+
"SyncTransactionStateConnectionProtocol",
|
|
53
|
+
"WithMethodProtocol",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@runtime_checkable
|
|
58
|
+
class IndexableRow(Protocol):
|
|
59
|
+
"""Protocol for row types that support index access."""
|
|
60
|
+
|
|
61
|
+
def __getitem__(self, index: int) -> Any:
|
|
62
|
+
"""Get item by index."""
|
|
63
|
+
...
|
|
64
|
+
|
|
65
|
+
def __len__(self) -> int:
|
|
66
|
+
"""Get length of the row."""
|
|
67
|
+
...
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@runtime_checkable
|
|
71
|
+
class IterableParameters(Protocol):
|
|
72
|
+
"""Protocol for parameter sequences."""
|
|
73
|
+
|
|
74
|
+
def __iter__(self) -> Any:
|
|
75
|
+
"""Iterate over parameters."""
|
|
76
|
+
...
|
|
77
|
+
|
|
78
|
+
def __len__(self) -> int:
|
|
79
|
+
"""Get number of parameters."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@runtime_checkable
|
|
84
|
+
class WithMethodProtocol(Protocol):
|
|
85
|
+
"""Protocol for objects with a with_ method (SQLGlot expressions)."""
|
|
86
|
+
|
|
87
|
+
def with_(self, *args: Any, **kwargs: Any) -> Any:
|
|
88
|
+
"""Add WITH clause to expression."""
|
|
89
|
+
...
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@runtime_checkable
|
|
93
|
+
class HasWhereProtocol(Protocol):
|
|
94
|
+
"""Protocol for SQL expressions that support WHERE clauses."""
|
|
95
|
+
|
|
96
|
+
def where(self, *args: Any, **kwargs: Any) -> Any:
|
|
97
|
+
"""Add WHERE clause to expression."""
|
|
98
|
+
...
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@runtime_checkable
|
|
102
|
+
class HasLimitProtocol(Protocol):
|
|
103
|
+
"""Protocol for SQL expressions that support LIMIT clauses."""
|
|
104
|
+
|
|
105
|
+
def limit(self, *args: Any, **kwargs: Any) -> Any:
|
|
106
|
+
"""Add LIMIT clause to expression."""
|
|
107
|
+
...
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@runtime_checkable
|
|
111
|
+
class HasOffsetProtocol(Protocol):
|
|
112
|
+
"""Protocol for SQL expressions that support OFFSET clauses."""
|
|
113
|
+
|
|
114
|
+
def offset(self, *args: Any, **kwargs: Any) -> Any:
|
|
115
|
+
"""Add OFFSET clause to expression."""
|
|
116
|
+
...
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@runtime_checkable
|
|
120
|
+
class HasOrderByProtocol(Protocol):
|
|
121
|
+
"""Protocol for SQL expressions that support ORDER BY clauses."""
|
|
122
|
+
|
|
123
|
+
def order_by(self, *args: Any, **kwargs: Any) -> Any:
|
|
124
|
+
"""Add ORDER BY clause to expression."""
|
|
125
|
+
...
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
@runtime_checkable
|
|
129
|
+
class HasExpressionsProtocol(Protocol):
|
|
130
|
+
"""Protocol for SQL expressions that have an expressions attribute."""
|
|
131
|
+
|
|
132
|
+
expressions: Any
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@runtime_checkable
|
|
136
|
+
class HasSQLMethodProtocol(Protocol):
|
|
137
|
+
"""Protocol for objects that have a sql() method for rendering SQL."""
|
|
138
|
+
|
|
139
|
+
def sql(self, *args: Any, **kwargs: Any) -> str:
|
|
140
|
+
"""Render object to SQL string."""
|
|
141
|
+
...
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@runtime_checkable
|
|
145
|
+
class FilterParameterProtocol(Protocol):
|
|
146
|
+
"""Protocol for filter objects that can extract parameters."""
|
|
147
|
+
|
|
148
|
+
def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]:
|
|
149
|
+
"""Extract parameters from the filter."""
|
|
150
|
+
...
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@runtime_checkable
|
|
154
|
+
class FilterAppenderProtocol(Protocol):
|
|
155
|
+
"""Protocol for filter objects that can append to SQL statements."""
|
|
156
|
+
|
|
157
|
+
def append_to_statement(self, sql: Any) -> Any:
|
|
158
|
+
"""Append this filter to a SQL statement."""
|
|
159
|
+
...
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
@runtime_checkable
|
|
163
|
+
class ParameterValueProtocol(Protocol):
|
|
164
|
+
"""Protocol for parameter objects with value and type_hint attributes."""
|
|
165
|
+
|
|
166
|
+
value: Any
|
|
167
|
+
type_hint: str
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
@runtime_checkable
|
|
171
|
+
class HasRiskLevelProtocol(Protocol):
|
|
172
|
+
"""Protocol for objects with a risk_level attribute."""
|
|
173
|
+
|
|
174
|
+
@property
|
|
175
|
+
def risk_level(self) -> Any:
|
|
176
|
+
"""Get the risk level of this object."""
|
|
177
|
+
...
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@runtime_checkable
|
|
181
|
+
class DictProtocol(Protocol):
|
|
182
|
+
"""Protocol for objects with a __dict__ attribute."""
|
|
183
|
+
|
|
184
|
+
__dict__: dict[str, Any]
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class ProcessorProtocol(Protocol):
|
|
188
|
+
"""Defines the interface for a single processing step in the SQL pipeline."""
|
|
189
|
+
|
|
190
|
+
@abstractmethod
|
|
191
|
+
def process(
|
|
192
|
+
self, expression: "Optional[exp.Expression]", context: "SQLProcessingContext"
|
|
193
|
+
) -> "Optional[exp.Expression]":
|
|
194
|
+
"""Processes an SQL expression.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
expression: The SQL expression to process.
|
|
198
|
+
context: The SQLProcessingContext holding the current state and config.
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
The (possibly modified) SQL expression for transformers, or None for validators/analyzers.
|
|
202
|
+
"""
|
|
203
|
+
...
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
@runtime_checkable
|
|
207
|
+
class BytesConvertibleProtocol(Protocol):
|
|
208
|
+
"""Protocol for objects that can be converted to bytes."""
|
|
209
|
+
|
|
210
|
+
def __bytes__(self) -> bytes:
|
|
211
|
+
"""Convert object to bytes."""
|
|
212
|
+
...
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
@runtime_checkable
|
|
216
|
+
class ObjectStoreItemProtocol(Protocol):
|
|
217
|
+
"""Protocol for object store items with path/key attributes."""
|
|
218
|
+
|
|
219
|
+
path: str
|
|
220
|
+
key: "Optional[str]"
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
@runtime_checkable
|
|
224
|
+
class SyncTransactionCapableConnectionProtocol(Protocol):
|
|
225
|
+
"""Protocol for sync connections that support transactions."""
|
|
226
|
+
|
|
227
|
+
def commit(self) -> None:
|
|
228
|
+
"""Commit the current transaction."""
|
|
229
|
+
...
|
|
230
|
+
|
|
231
|
+
def rollback(self) -> None:
|
|
232
|
+
"""Rollback the current transaction."""
|
|
233
|
+
...
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
@runtime_checkable
|
|
237
|
+
class AsyncTransactionCapableConnectionProtocol(Protocol):
|
|
238
|
+
"""Protocol for async connections that support transactions."""
|
|
239
|
+
|
|
240
|
+
async def commit(self) -> None:
|
|
241
|
+
"""Commit the current transaction."""
|
|
242
|
+
...
|
|
243
|
+
|
|
244
|
+
async def rollback(self) -> None:
|
|
245
|
+
"""Rollback the current transaction."""
|
|
246
|
+
...
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
@runtime_checkable
|
|
250
|
+
class SyncTransactionStateConnectionProtocol(SyncTransactionCapableConnectionProtocol, Protocol):
|
|
251
|
+
"""Protocol for sync connections that can report transaction state."""
|
|
252
|
+
|
|
253
|
+
def in_transaction(self) -> bool:
|
|
254
|
+
"""Check if connection is currently in a transaction."""
|
|
255
|
+
...
|
|
256
|
+
|
|
257
|
+
def begin(self) -> None:
|
|
258
|
+
"""Begin a new transaction."""
|
|
259
|
+
...
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
@runtime_checkable
|
|
263
|
+
class AsyncTransactionStateConnectionProtocol(AsyncTransactionCapableConnectionProtocol, Protocol):
|
|
264
|
+
"""Protocol for async connections that can report transaction state."""
|
|
265
|
+
|
|
266
|
+
def in_transaction(self) -> bool:
|
|
267
|
+
"""Check if connection is currently in a transaction."""
|
|
268
|
+
...
|
|
269
|
+
|
|
270
|
+
async def begin(self) -> None:
|
|
271
|
+
"""Begin a new transaction."""
|
|
272
|
+
...
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
@runtime_checkable
|
|
276
|
+
class SyncCloseableConnectionProtocol(Protocol):
|
|
277
|
+
"""Protocol for sync connections that can be closed."""
|
|
278
|
+
|
|
279
|
+
def close(self) -> None:
|
|
280
|
+
"""Close the connection."""
|
|
281
|
+
...
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
@runtime_checkable
|
|
285
|
+
class AsyncCloseableConnectionProtocol(Protocol):
|
|
286
|
+
"""Protocol for async connections that can be closed."""
|
|
287
|
+
|
|
288
|
+
async def close(self) -> None:
|
|
289
|
+
"""Close the connection."""
|
|
290
|
+
...
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
@runtime_checkable
|
|
294
|
+
class SyncCopyCapableConnectionProtocol(Protocol):
|
|
295
|
+
"""Protocol for sync connections that support COPY operations."""
|
|
296
|
+
|
|
297
|
+
def copy_from(self, table: str, file: Any, **kwargs: Any) -> None:
|
|
298
|
+
"""Copy data from file to table."""
|
|
299
|
+
...
|
|
300
|
+
|
|
301
|
+
def copy_to(self, table: str, file: Any, **kwargs: Any) -> None:
|
|
302
|
+
"""Copy data from table to file."""
|
|
303
|
+
...
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
@runtime_checkable
|
|
307
|
+
class AsyncCopyCapableConnectionProtocol(Protocol):
|
|
308
|
+
"""Protocol for async connections that support COPY operations."""
|
|
309
|
+
|
|
310
|
+
async def copy_from(self, table: str, file: Any, **kwargs: Any) -> None:
|
|
311
|
+
"""Copy data from file to table."""
|
|
312
|
+
...
|
|
313
|
+
|
|
314
|
+
async def copy_to(self, table: str, file: Any, **kwargs: Any) -> None:
|
|
315
|
+
"""Copy data from table to file."""
|
|
316
|
+
...
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
@runtime_checkable
|
|
320
|
+
class SyncPipelineCapableDriverProtocol(Protocol):
|
|
321
|
+
"""Protocol for sync drivers that support native pipeline execution."""
|
|
322
|
+
|
|
323
|
+
def _execute_pipeline_native(self, operations: list[Any], **options: Any) -> list[Any]:
|
|
324
|
+
"""Execute pipeline operations natively."""
|
|
325
|
+
...
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
@runtime_checkable
|
|
329
|
+
class AsyncPipelineCapableDriverProtocol(Protocol):
|
|
330
|
+
"""Protocol for async drivers that support native pipeline execution."""
|
|
331
|
+
|
|
332
|
+
async def _execute_pipeline_native(self, operations: list[Any], **options: Any) -> list[Any]:
|
|
333
|
+
"""Execute pipeline operations natively."""
|
|
334
|
+
...
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
@runtime_checkable
|
|
338
|
+
class ObjectStoreProtocol(Protocol):
|
|
339
|
+
"""Unified protocol for object storage operations.
|
|
340
|
+
|
|
341
|
+
This protocol defines the interface for all storage backends with built-in
|
|
342
|
+
instrumentation support. Backends must implement both sync and async operations
|
|
343
|
+
where possible, with async operations suffixed with _async.
|
|
344
|
+
|
|
345
|
+
All methods use 'path' terminology for consistency with object store patterns.
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
# Class-level capability descriptor
|
|
349
|
+
capabilities: ClassVar["StorageCapabilities"]
|
|
350
|
+
|
|
351
|
+
def __init__(self, uri: str, **kwargs: Any) -> None:
|
|
352
|
+
return
|
|
353
|
+
|
|
354
|
+
# Core Operations (sync)
|
|
355
|
+
def read_bytes(self, path: "Union[str, Path]", **kwargs: Any) -> bytes:
|
|
356
|
+
"""Read bytes from an object."""
|
|
357
|
+
return b""
|
|
358
|
+
|
|
359
|
+
def write_bytes(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None:
|
|
360
|
+
"""Write bytes to an object."""
|
|
361
|
+
return
|
|
362
|
+
|
|
363
|
+
def read_text(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str:
|
|
364
|
+
"""Read text from an object."""
|
|
365
|
+
return ""
|
|
366
|
+
|
|
367
|
+
def write_text(self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any) -> None:
|
|
368
|
+
"""Write text to an object."""
|
|
369
|
+
return
|
|
370
|
+
|
|
371
|
+
# Object Operations
|
|
372
|
+
def exists(self, path: "Union[str, Path]", **kwargs: Any) -> bool:
|
|
373
|
+
"""Check if an object exists."""
|
|
374
|
+
return False
|
|
375
|
+
|
|
376
|
+
def delete(self, path: "Union[str, Path]", **kwargs: Any) -> None:
|
|
377
|
+
"""Delete an object."""
|
|
378
|
+
return
|
|
379
|
+
|
|
380
|
+
def copy(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None:
|
|
381
|
+
"""Copy an object."""
|
|
382
|
+
return
|
|
383
|
+
|
|
384
|
+
def move(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None:
|
|
385
|
+
"""Move an object."""
|
|
386
|
+
return
|
|
387
|
+
|
|
388
|
+
# Listing Operations
|
|
389
|
+
def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]:
|
|
390
|
+
"""List objects with optional prefix."""
|
|
391
|
+
return []
|
|
392
|
+
|
|
393
|
+
def glob(self, pattern: str, **kwargs: Any) -> list[str]:
|
|
394
|
+
"""Find objects matching a glob pattern."""
|
|
395
|
+
return []
|
|
396
|
+
|
|
397
|
+
# Path Operations
|
|
398
|
+
def is_object(self, path: "Union[str, Path]") -> bool:
|
|
399
|
+
"""Check if path points to an object."""
|
|
400
|
+
return False
|
|
401
|
+
|
|
402
|
+
def is_path(self, path: "Union[str, Path]") -> bool:
|
|
403
|
+
"""Check if path points to a prefix (directory-like)."""
|
|
404
|
+
return False
|
|
405
|
+
|
|
406
|
+
def get_metadata(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]:
|
|
407
|
+
"""Get object metadata."""
|
|
408
|
+
return {}
|
|
409
|
+
|
|
410
|
+
# Arrow Operations
|
|
411
|
+
def read_arrow(self, path: "Union[str, Path]", **kwargs: Any) -> "ArrowTable":
|
|
412
|
+
"""Read an Arrow table from storage.
|
|
413
|
+
|
|
414
|
+
For obstore backend, this should use native arrow operations when available.
|
|
415
|
+
"""
|
|
416
|
+
msg = "Arrow reading not implemented"
|
|
417
|
+
raise NotImplementedError(msg)
|
|
418
|
+
|
|
419
|
+
def write_arrow(self, path: "Union[str, Path]", table: "ArrowTable", **kwargs: Any) -> None:
|
|
420
|
+
"""Write an Arrow table to storage.
|
|
421
|
+
|
|
422
|
+
For obstore backend, this should use native arrow operations when available.
|
|
423
|
+
"""
|
|
424
|
+
msg = "Arrow writing not implemented"
|
|
425
|
+
raise NotImplementedError(msg)
|
|
426
|
+
|
|
427
|
+
def stream_arrow(self, pattern: str, **kwargs: Any) -> "Iterator[ArrowRecordBatch]":
|
|
428
|
+
"""Stream Arrow record batches from matching objects.
|
|
429
|
+
|
|
430
|
+
For obstore backend, this should use native streaming when available.
|
|
431
|
+
"""
|
|
432
|
+
msg = "Arrow streaming not implemented"
|
|
433
|
+
raise NotImplementedError(msg)
|
|
434
|
+
|
|
435
|
+
# Async versions
|
|
436
|
+
async def read_bytes_async(self, path: "Union[str, Path]", **kwargs: Any) -> bytes:
|
|
437
|
+
"""Async read bytes from an object."""
|
|
438
|
+
msg = "Async operations not implemented"
|
|
439
|
+
raise NotImplementedError(msg)
|
|
440
|
+
|
|
441
|
+
async def write_bytes_async(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None:
|
|
442
|
+
"""Async write bytes to an object."""
|
|
443
|
+
msg = "Async operations not implemented"
|
|
444
|
+
raise NotImplementedError(msg)
|
|
445
|
+
|
|
446
|
+
async def read_text_async(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str:
|
|
447
|
+
"""Async read text from an object."""
|
|
448
|
+
msg = "Async operations not implemented"
|
|
449
|
+
raise NotImplementedError(msg)
|
|
450
|
+
|
|
451
|
+
async def write_text_async(
|
|
452
|
+
self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any
|
|
453
|
+
) -> None:
|
|
454
|
+
"""Async write text to an object."""
|
|
455
|
+
msg = "Async operations not implemented"
|
|
456
|
+
raise NotImplementedError(msg)
|
|
457
|
+
|
|
458
|
+
async def exists_async(self, path: "Union[str, Path]", **kwargs: Any) -> bool:
|
|
459
|
+
"""Async check if an object exists."""
|
|
460
|
+
msg = "Async operations not implemented"
|
|
461
|
+
raise NotImplementedError(msg)
|
|
462
|
+
|
|
463
|
+
async def delete_async(self, path: "Union[str, Path]", **kwargs: Any) -> None:
|
|
464
|
+
"""Async delete an object."""
|
|
465
|
+
msg = "Async operations not implemented"
|
|
466
|
+
raise NotImplementedError(msg)
|
|
467
|
+
|
|
468
|
+
async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]:
|
|
469
|
+
"""Async list objects with optional prefix."""
|
|
470
|
+
msg = "Async operations not implemented"
|
|
471
|
+
raise NotImplementedError(msg)
|
|
472
|
+
|
|
473
|
+
async def copy_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None:
|
|
474
|
+
"""Async copy an object."""
|
|
475
|
+
msg = "Async operations not implemented"
|
|
476
|
+
raise NotImplementedError(msg)
|
|
477
|
+
|
|
478
|
+
async def move_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None:
|
|
479
|
+
"""Async move an object."""
|
|
480
|
+
msg = "Async operations not implemented"
|
|
481
|
+
raise NotImplementedError(msg)
|
|
482
|
+
|
|
483
|
+
async def get_metadata_async(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]:
|
|
484
|
+
"""Async get object metadata."""
|
|
485
|
+
msg = "Async operations not implemented"
|
|
486
|
+
raise NotImplementedError(msg)
|
|
487
|
+
|
|
488
|
+
async def read_arrow_async(self, path: "Union[str, Path]", **kwargs: Any) -> "ArrowTable":
|
|
489
|
+
"""Async read an Arrow table from storage."""
|
|
490
|
+
msg = "Async arrow reading not implemented"
|
|
491
|
+
raise NotImplementedError(msg)
|
|
492
|
+
|
|
493
|
+
async def write_arrow_async(self, path: "Union[str, Path]", table: "ArrowTable", **kwargs: Any) -> None:
|
|
494
|
+
"""Async write an Arrow table to storage."""
|
|
495
|
+
msg = "Async arrow writing not implemented"
|
|
496
|
+
raise NotImplementedError(msg)
|
|
497
|
+
|
|
498
|
+
async def stream_arrow_async(self, pattern: str, **kwargs: Any) -> "AsyncIterator[ArrowRecordBatch]":
|
|
499
|
+
"""Async stream Arrow record batches from matching objects."""
|
|
500
|
+
msg = "Async arrow streaming not implemented"
|
|
501
|
+
raise NotImplementedError(msg)
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
# =============================================================================
|
|
505
|
+
# SQL Builder Protocols
|
|
506
|
+
# =============================================================================
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
@runtime_checkable
|
|
510
|
+
class SQLBuilderProtocol(Protocol):
|
|
511
|
+
"""Protocol for SQL query builders."""
|
|
512
|
+
|
|
513
|
+
_expression: "Optional[exp.Expression]"
|
|
514
|
+
_parameters: dict[str, Any]
|
|
515
|
+
_parameter_counter: int
|
|
516
|
+
dialect: Any
|
|
517
|
+
dialect_name: "Optional[str]"
|
|
518
|
+
|
|
519
|
+
@property
|
|
520
|
+
def parameters(self) -> dict[str, Any]:
|
|
521
|
+
"""Public access to query parameters."""
|
|
522
|
+
...
|
|
523
|
+
|
|
524
|
+
def add_parameter(self, value: Any, name: "Optional[str]" = None) -> tuple[Any, str]:
|
|
525
|
+
"""Add a parameter to the builder."""
|
|
526
|
+
...
|
|
527
|
+
|
|
528
|
+
def _parameterize_expression(self, expression: "exp.Expression") -> "exp.Expression":
|
|
529
|
+
"""Replace literal values in an expression with bound parameters."""
|
|
530
|
+
...
|
|
531
|
+
|
|
532
|
+
def build(self) -> "Union[exp.Expression, Any]":
|
|
533
|
+
"""Build and return the final expression."""
|
|
534
|
+
...
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
class SelectBuilderProtocol(SQLBuilderProtocol, Protocol):
|
|
538
|
+
"""Protocol for SELECT query builders."""
|
|
539
|
+
|
|
540
|
+
def select(self, *columns: "Union[str, exp.Expression]") -> Self:
|
|
541
|
+
"""Add SELECT columns to the query."""
|
|
542
|
+
...
|
sqlspec/service/__init__.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
-
from sqlspec.service.base import
|
|
1
|
+
from sqlspec.service.base import SQLSpecAsyncService, SQLSpecSyncService
|
|
2
|
+
from sqlspec.service.pagination import OffsetPagination
|
|
2
3
|
|
|
3
|
-
__all__ = ("
|
|
4
|
+
__all__ = ("OffsetPagination", "SQLSpecAsyncService", "SQLSpecSyncService")
|
sqlspec/service/_util.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from functools import partial
|
|
3
|
+
from typing import Any, Optional, TypeVar, Union, cast, overload
|
|
4
|
+
|
|
5
|
+
from sqlspec.driver.mixins._result_utils import _DEFAULT_TYPE_DECODERS, _default_msgspec_deserializer
|
|
6
|
+
from sqlspec.exceptions import SQLSpecError
|
|
7
|
+
from sqlspec.service.pagination import OffsetPagination
|
|
8
|
+
from sqlspec.statement.filters import FilterTypeT, LimitOffsetFilter, StatementFilter
|
|
9
|
+
from sqlspec.typing import BaseModel, DataclassProtocol, ModelDTOT, ModelT, Struct, convert, get_type_adapter
|
|
10
|
+
from sqlspec.utils.type_guards import is_dataclass, is_msgspec_struct, is_pydantic_model
|
|
11
|
+
|
|
12
|
+
__all__ = ("ResultConverter", "find_filter")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
T = TypeVar("T")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def find_filter(
|
|
19
|
+
filter_type: "type[FilterTypeT]", filters: "Optional[Sequence[StatementFilter]]" = None
|
|
20
|
+
) -> "Optional[FilterTypeT]":
|
|
21
|
+
"""Get the filter specified by filter type from the filters.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
filter_type: The type of filter to find.
|
|
25
|
+
filters: filter types to apply to the query
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
The match filter instance or None
|
|
29
|
+
"""
|
|
30
|
+
if filters is None:
|
|
31
|
+
return None
|
|
32
|
+
return next(
|
|
33
|
+
(cast("Optional[FilterTypeT]", filter_) for filter_ in filters if isinstance(filter_, filter_type)), None
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# TODO: add overloads for each type of pagination in the future
|
|
38
|
+
class ResultConverter:
|
|
39
|
+
"""Simple mixin to help convert to dictionary or list of dictionaries to specified schema type.
|
|
40
|
+
|
|
41
|
+
Single objects are transformed to the supplied schema type, and lists of objects are transformed into a list of the supplied schema type.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
data: A database model instance or row mapping.
|
|
45
|
+
Type: :class:`~sqlspec.typing.ModelDictT`
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
The converted schema object.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
@overload
|
|
52
|
+
def to_schema(
|
|
53
|
+
self,
|
|
54
|
+
data: "ModelT",
|
|
55
|
+
total: "int | None" = None,
|
|
56
|
+
filters: "Sequence[StatementFilter] | None" = None,
|
|
57
|
+
*,
|
|
58
|
+
schema_type: None = None,
|
|
59
|
+
) -> "ModelT": ...
|
|
60
|
+
@overload
|
|
61
|
+
def to_schema(
|
|
62
|
+
self,
|
|
63
|
+
data: "dict[str, Any] | Struct | BaseModel | DataclassProtocol",
|
|
64
|
+
total: "int | None" = None,
|
|
65
|
+
filters: "Sequence[StatementFilter] | None" = None,
|
|
66
|
+
*,
|
|
67
|
+
schema_type: "type[ModelDTOT]",
|
|
68
|
+
) -> "ModelDTOT": ...
|
|
69
|
+
@overload
|
|
70
|
+
def to_schema(
|
|
71
|
+
self,
|
|
72
|
+
data: "Sequence[ModelT]",
|
|
73
|
+
total: "int | None" = None,
|
|
74
|
+
filters: "Sequence[StatementFilter] | None" = None,
|
|
75
|
+
*,
|
|
76
|
+
schema_type: None = None,
|
|
77
|
+
) -> "OffsetPagination[ModelT]": ...
|
|
78
|
+
@overload
|
|
79
|
+
def to_schema(
|
|
80
|
+
self,
|
|
81
|
+
data: "Sequence[dict[str, Any] | Struct | BaseModel | DataclassProtocol]",
|
|
82
|
+
total: "int | None" = None,
|
|
83
|
+
filters: "Sequence[StatementFilter] | None" = None,
|
|
84
|
+
*,
|
|
85
|
+
schema_type: "type[ModelDTOT]",
|
|
86
|
+
) -> "OffsetPagination[ModelDTOT]": ...
|
|
87
|
+
def to_schema(
|
|
88
|
+
self,
|
|
89
|
+
data: "ModelT | Sequence[ModelT] | dict[str, Any] | Struct | BaseModel | DataclassProtocol | Sequence[dict[str, Any] | Struct | BaseModel | DataclassProtocol]",
|
|
90
|
+
total: "int | None" = None,
|
|
91
|
+
filters: "Sequence[StatementFilter] | None" = None,
|
|
92
|
+
*,
|
|
93
|
+
schema_type: "type[ModelDTOT] | None" = None,
|
|
94
|
+
) -> "Union[ModelT, ModelDTOT , OffsetPagination[ModelT] , OffsetPagination[ModelDTOT]]":
|
|
95
|
+
if not isinstance(data, Sequence):
|
|
96
|
+
if schema_type is None:
|
|
97
|
+
return cast("ModelT", data)
|
|
98
|
+
if is_dataclass(schema_type):
|
|
99
|
+
return cast("ModelDTOT", schema_type(**data)) # type: ignore[operator]
|
|
100
|
+
if is_msgspec_struct(schema_type):
|
|
101
|
+
return cast(
|
|
102
|
+
"ModelDTOT",
|
|
103
|
+
convert(
|
|
104
|
+
obj=data,
|
|
105
|
+
type=schema_type,
|
|
106
|
+
from_attributes=True,
|
|
107
|
+
dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
|
|
108
|
+
),
|
|
109
|
+
)
|
|
110
|
+
if is_pydantic_model(schema_type): # pyright: ignore
|
|
111
|
+
return cast(
|
|
112
|
+
"ModelDTOT",
|
|
113
|
+
get_type_adapter(schema_type).validate_python(data, from_attributes=True), # pyright: ignore
|
|
114
|
+
)
|
|
115
|
+
assert isinstance(data, Sequence)
|
|
116
|
+
limit_offset = find_filter(LimitOffsetFilter, filters=filters)
|
|
117
|
+
if schema_type is None:
|
|
118
|
+
return OffsetPagination[ModelT](
|
|
119
|
+
items=cast("list[ModelT]", data),
|
|
120
|
+
limit=limit_offset.limit if limit_offset else len(data),
|
|
121
|
+
offset=limit_offset.offset if limit_offset else 0,
|
|
122
|
+
total=total if total is not None else len(data),
|
|
123
|
+
)
|
|
124
|
+
converted_items: Sequence[ModelDTOT]
|
|
125
|
+
if is_dataclass(schema_type):
|
|
126
|
+
converted_items = [schema_type(**item) for item in data] # type: ignore[operator]
|
|
127
|
+
elif is_msgspec_struct(schema_type):
|
|
128
|
+
converted_items = convert(
|
|
129
|
+
obj=data,
|
|
130
|
+
type=list[schema_type], # type: ignore[valid-type]
|
|
131
|
+
from_attributes=True,
|
|
132
|
+
dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
|
|
133
|
+
)
|
|
134
|
+
elif is_pydantic_model(schema_type): # pyright: ignore
|
|
135
|
+
converted_items = get_type_adapter(list[schema_type]).validate_python(data, from_attributes=True) # type: ignore[valid-type] # pyright: ignore[reportUnknownArgumentType]
|
|
136
|
+
else:
|
|
137
|
+
# This will also catch the case where a single item had an unrecognized schema_type
|
|
138
|
+
# if it somehow bypassed the initial single-item checks.
|
|
139
|
+
msg = "`schema_type` should be a valid Dataclass, Pydantic model or Msgspec struct"
|
|
140
|
+
raise SQLSpecError(msg)
|
|
141
|
+
|
|
142
|
+
return OffsetPagination[ModelDTOT](
|
|
143
|
+
items=cast("list[ModelDTOT]", converted_items),
|
|
144
|
+
limit=limit_offset.limit if limit_offset else len(data),
|
|
145
|
+
offset=limit_offset.offset if limit_offset else 0,
|
|
146
|
+
total=total if total is not None else len(data),
|
|
147
|
+
)
|