sqlspec 0.11.1__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (155) hide show
  1. sqlspec/__init__.py +16 -3
  2. sqlspec/_serialization.py +3 -10
  3. sqlspec/_sql.py +1147 -0
  4. sqlspec/_typing.py +343 -41
  5. sqlspec/adapters/adbc/__init__.py +2 -6
  6. sqlspec/adapters/adbc/config.py +474 -149
  7. sqlspec/adapters/adbc/driver.py +330 -621
  8. sqlspec/adapters/aiosqlite/__init__.py +2 -6
  9. sqlspec/adapters/aiosqlite/config.py +143 -57
  10. sqlspec/adapters/aiosqlite/driver.py +269 -431
  11. sqlspec/adapters/asyncmy/__init__.py +3 -8
  12. sqlspec/adapters/asyncmy/config.py +247 -202
  13. sqlspec/adapters/asyncmy/driver.py +218 -436
  14. sqlspec/adapters/asyncpg/__init__.py +4 -7
  15. sqlspec/adapters/asyncpg/config.py +329 -176
  16. sqlspec/adapters/asyncpg/driver.py +417 -487
  17. sqlspec/adapters/bigquery/__init__.py +2 -2
  18. sqlspec/adapters/bigquery/config.py +407 -0
  19. sqlspec/adapters/bigquery/driver.py +600 -553
  20. sqlspec/adapters/duckdb/__init__.py +4 -1
  21. sqlspec/adapters/duckdb/config.py +432 -321
  22. sqlspec/adapters/duckdb/driver.py +392 -406
  23. sqlspec/adapters/oracledb/__init__.py +3 -8
  24. sqlspec/adapters/oracledb/config.py +625 -0
  25. sqlspec/adapters/oracledb/driver.py +548 -921
  26. sqlspec/adapters/psqlpy/__init__.py +4 -7
  27. sqlspec/adapters/psqlpy/config.py +372 -203
  28. sqlspec/adapters/psqlpy/driver.py +197 -533
  29. sqlspec/adapters/psycopg/__init__.py +3 -8
  30. sqlspec/adapters/psycopg/config.py +741 -0
  31. sqlspec/adapters/psycopg/driver.py +734 -694
  32. sqlspec/adapters/sqlite/__init__.py +2 -6
  33. sqlspec/adapters/sqlite/config.py +146 -81
  34. sqlspec/adapters/sqlite/driver.py +242 -405
  35. sqlspec/base.py +220 -784
  36. sqlspec/config.py +354 -0
  37. sqlspec/driver/__init__.py +22 -0
  38. sqlspec/driver/_async.py +252 -0
  39. sqlspec/driver/_common.py +338 -0
  40. sqlspec/driver/_sync.py +261 -0
  41. sqlspec/driver/mixins/__init__.py +17 -0
  42. sqlspec/driver/mixins/_pipeline.py +523 -0
  43. sqlspec/driver/mixins/_result_utils.py +122 -0
  44. sqlspec/driver/mixins/_sql_translator.py +35 -0
  45. sqlspec/driver/mixins/_storage.py +993 -0
  46. sqlspec/driver/mixins/_type_coercion.py +131 -0
  47. sqlspec/exceptions.py +299 -7
  48. sqlspec/extensions/aiosql/__init__.py +10 -0
  49. sqlspec/extensions/aiosql/adapter.py +474 -0
  50. sqlspec/extensions/litestar/__init__.py +1 -6
  51. sqlspec/extensions/litestar/_utils.py +1 -5
  52. sqlspec/extensions/litestar/config.py +5 -6
  53. sqlspec/extensions/litestar/handlers.py +13 -12
  54. sqlspec/extensions/litestar/plugin.py +22 -24
  55. sqlspec/extensions/litestar/providers.py +37 -55
  56. sqlspec/loader.py +528 -0
  57. sqlspec/service/__init__.py +3 -0
  58. sqlspec/service/base.py +24 -0
  59. sqlspec/service/pagination.py +26 -0
  60. sqlspec/statement/__init__.py +21 -0
  61. sqlspec/statement/builder/__init__.py +54 -0
  62. sqlspec/statement/builder/_ddl_utils.py +119 -0
  63. sqlspec/statement/builder/_parsing_utils.py +135 -0
  64. sqlspec/statement/builder/base.py +328 -0
  65. sqlspec/statement/builder/ddl.py +1379 -0
  66. sqlspec/statement/builder/delete.py +80 -0
  67. sqlspec/statement/builder/insert.py +274 -0
  68. sqlspec/statement/builder/merge.py +95 -0
  69. sqlspec/statement/builder/mixins/__init__.py +65 -0
  70. sqlspec/statement/builder/mixins/_aggregate_functions.py +151 -0
  71. sqlspec/statement/builder/mixins/_case_builder.py +91 -0
  72. sqlspec/statement/builder/mixins/_common_table_expr.py +91 -0
  73. sqlspec/statement/builder/mixins/_delete_from.py +34 -0
  74. sqlspec/statement/builder/mixins/_from.py +61 -0
  75. sqlspec/statement/builder/mixins/_group_by.py +119 -0
  76. sqlspec/statement/builder/mixins/_having.py +35 -0
  77. sqlspec/statement/builder/mixins/_insert_from_select.py +48 -0
  78. sqlspec/statement/builder/mixins/_insert_into.py +36 -0
  79. sqlspec/statement/builder/mixins/_insert_values.py +69 -0
  80. sqlspec/statement/builder/mixins/_join.py +110 -0
  81. sqlspec/statement/builder/mixins/_limit_offset.py +53 -0
  82. sqlspec/statement/builder/mixins/_merge_clauses.py +405 -0
  83. sqlspec/statement/builder/mixins/_order_by.py +46 -0
  84. sqlspec/statement/builder/mixins/_pivot.py +82 -0
  85. sqlspec/statement/builder/mixins/_returning.py +37 -0
  86. sqlspec/statement/builder/mixins/_select_columns.py +60 -0
  87. sqlspec/statement/builder/mixins/_set_ops.py +122 -0
  88. sqlspec/statement/builder/mixins/_unpivot.py +80 -0
  89. sqlspec/statement/builder/mixins/_update_from.py +54 -0
  90. sqlspec/statement/builder/mixins/_update_set.py +91 -0
  91. sqlspec/statement/builder/mixins/_update_table.py +29 -0
  92. sqlspec/statement/builder/mixins/_where.py +374 -0
  93. sqlspec/statement/builder/mixins/_window_functions.py +86 -0
  94. sqlspec/statement/builder/protocols.py +20 -0
  95. sqlspec/statement/builder/select.py +206 -0
  96. sqlspec/statement/builder/update.py +178 -0
  97. sqlspec/statement/filters.py +571 -0
  98. sqlspec/statement/parameters.py +736 -0
  99. sqlspec/statement/pipelines/__init__.py +67 -0
  100. sqlspec/statement/pipelines/analyzers/__init__.py +9 -0
  101. sqlspec/statement/pipelines/analyzers/_analyzer.py +649 -0
  102. sqlspec/statement/pipelines/base.py +315 -0
  103. sqlspec/statement/pipelines/context.py +119 -0
  104. sqlspec/statement/pipelines/result_types.py +41 -0
  105. sqlspec/statement/pipelines/transformers/__init__.py +8 -0
  106. sqlspec/statement/pipelines/transformers/_expression_simplifier.py +256 -0
  107. sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +623 -0
  108. sqlspec/statement/pipelines/transformers/_remove_comments.py +66 -0
  109. sqlspec/statement/pipelines/transformers/_remove_hints.py +81 -0
  110. sqlspec/statement/pipelines/validators/__init__.py +23 -0
  111. sqlspec/statement/pipelines/validators/_dml_safety.py +275 -0
  112. sqlspec/statement/pipelines/validators/_parameter_style.py +297 -0
  113. sqlspec/statement/pipelines/validators/_performance.py +703 -0
  114. sqlspec/statement/pipelines/validators/_security.py +990 -0
  115. sqlspec/statement/pipelines/validators/base.py +67 -0
  116. sqlspec/statement/result.py +527 -0
  117. sqlspec/statement/splitter.py +701 -0
  118. sqlspec/statement/sql.py +1198 -0
  119. sqlspec/storage/__init__.py +15 -0
  120. sqlspec/storage/backends/__init__.py +0 -0
  121. sqlspec/storage/backends/base.py +166 -0
  122. sqlspec/storage/backends/fsspec.py +315 -0
  123. sqlspec/storage/backends/obstore.py +464 -0
  124. sqlspec/storage/protocol.py +170 -0
  125. sqlspec/storage/registry.py +315 -0
  126. sqlspec/typing.py +157 -36
  127. sqlspec/utils/correlation.py +155 -0
  128. sqlspec/utils/deprecation.py +3 -6
  129. sqlspec/utils/fixtures.py +6 -11
  130. sqlspec/utils/logging.py +135 -0
  131. sqlspec/utils/module_loader.py +45 -43
  132. sqlspec/utils/serializers.py +4 -0
  133. sqlspec/utils/singleton.py +6 -8
  134. sqlspec/utils/sync_tools.py +15 -27
  135. sqlspec/utils/text.py +58 -26
  136. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/METADATA +97 -26
  137. sqlspec-0.12.0.dist-info/RECORD +145 -0
  138. sqlspec/adapters/bigquery/config/__init__.py +0 -3
  139. sqlspec/adapters/bigquery/config/_common.py +0 -40
  140. sqlspec/adapters/bigquery/config/_sync.py +0 -87
  141. sqlspec/adapters/oracledb/config/__init__.py +0 -9
  142. sqlspec/adapters/oracledb/config/_asyncio.py +0 -186
  143. sqlspec/adapters/oracledb/config/_common.py +0 -131
  144. sqlspec/adapters/oracledb/config/_sync.py +0 -186
  145. sqlspec/adapters/psycopg/config/__init__.py +0 -19
  146. sqlspec/adapters/psycopg/config/_async.py +0 -169
  147. sqlspec/adapters/psycopg/config/_common.py +0 -56
  148. sqlspec/adapters/psycopg/config/_sync.py +0 -168
  149. sqlspec/filters.py +0 -331
  150. sqlspec/mixins.py +0 -305
  151. sqlspec/statement.py +0 -378
  152. sqlspec-0.11.1.dist-info/RECORD +0 -69
  153. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/WHEEL +0 -0
  154. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/licenses/LICENSE +0 -0
  155. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/licenses/NOTICE +0 -0
@@ -0,0 +1,523 @@
1
+ """Pipeline execution mixin for batch database operations.
2
+
3
+ This module provides mixins that enable pipelined execution of SQL statements,
4
+ allowing multiple operations to be sent to the database in a single network
5
+ round-trip for improved performance.
6
+
7
+ The implementation leverages native driver support where available (psycopg, asyncpg, oracledb)
8
+ and provides high-quality simulated behavior for others.
9
+ """
10
+
11
+ from dataclasses import dataclass
12
+ from typing import TYPE_CHECKING, Any, Optional, Union, cast
13
+
14
+ from sqlspec.exceptions import PipelineExecutionError
15
+ from sqlspec.statement.filters import StatementFilter
16
+ from sqlspec.statement.result import SQLResult
17
+ from sqlspec.statement.sql import SQL
18
+ from sqlspec.utils.logging import get_logger
19
+
20
+ if TYPE_CHECKING:
21
+ from typing import Literal
22
+
23
+ from sqlspec.config import DriverT
24
+ from sqlspec.driver import AsyncDriverAdapterProtocol, SyncDriverAdapterProtocol
25
+ from sqlspec.typing import StatementParameters
26
+
27
+ __all__ = (
28
+ "AsyncPipeline",
29
+ "AsyncPipelinedExecutionMixin",
30
+ "Pipeline",
31
+ "PipelineOperation",
32
+ "SyncPipelinedExecutionMixin",
33
+ )
34
+
35
+ logger = get_logger(__name__)
36
+
37
+
38
+ @dataclass
39
+ class PipelineOperation:
40
+ """Container for a queued pipeline operation."""
41
+
42
+ sql: SQL
43
+ operation_type: "Literal['execute', 'execute_many', 'execute_script', 'select']"
44
+ filters: "Optional[list[StatementFilter]]" = None
45
+ original_params: "Optional[Any]" = None
46
+
47
+
48
+ class SyncPipelinedExecutionMixin:
49
+ """Mixin providing pipeline execution for sync drivers."""
50
+
51
+ __slots__ = ()
52
+
53
+ def pipeline(
54
+ self,
55
+ *,
56
+ isolation_level: "Optional[str]" = None,
57
+ continue_on_error: bool = False,
58
+ max_operations: int = 1000,
59
+ **options: Any,
60
+ ) -> "Pipeline":
61
+ """Create a new pipeline for batch operations.
62
+
63
+ Args:
64
+ isolation_level: Transaction isolation level
65
+ continue_on_error: Continue processing after errors
66
+ max_operations: Maximum operations before auto-flush
67
+ **options: Driver-specific pipeline options
68
+
69
+ Returns:
70
+ A new Pipeline instance for queuing operations
71
+ """
72
+ return Pipeline(
73
+ driver=cast("SyncDriverAdapterProtocol[Any, Any]", self),
74
+ isolation_level=isolation_level,
75
+ continue_on_error=continue_on_error,
76
+ max_operations=max_operations,
77
+ options=options,
78
+ )
79
+
80
+
81
+ class AsyncPipelinedExecutionMixin:
82
+ """Async version of pipeline execution mixin."""
83
+
84
+ __slots__ = ()
85
+
86
+ def pipeline(
87
+ self,
88
+ *,
89
+ isolation_level: "Optional[str]" = None,
90
+ continue_on_error: bool = False,
91
+ max_operations: int = 1000,
92
+ **options: Any,
93
+ ) -> "AsyncPipeline":
94
+ """Create a new async pipeline for batch operations."""
95
+ return AsyncPipeline(
96
+ driver=cast("AsyncDriverAdapterProtocol[Any, Any]", self),
97
+ isolation_level=isolation_level,
98
+ continue_on_error=continue_on_error,
99
+ max_operations=max_operations,
100
+ options=options,
101
+ )
102
+
103
+
104
+ class Pipeline:
105
+ """Synchronous pipeline with enhanced parameter handling."""
106
+
107
+ def __init__(
108
+ self,
109
+ driver: "DriverT", # pyright: ignore
110
+ isolation_level: "Optional[str]" = None,
111
+ continue_on_error: bool = False,
112
+ max_operations: int = 1000,
113
+ options: "Optional[dict[str, Any]]" = None,
114
+ ) -> None:
115
+ self.driver = driver
116
+ self.isolation_level = isolation_level
117
+ self.continue_on_error = continue_on_error
118
+ self.max_operations = max_operations
119
+ self.options = options or {}
120
+ self._operations: list[PipelineOperation] = []
121
+ self._results: Optional[list[SQLResult[Any]]] = None
122
+ self._simulation_logged = False
123
+
124
+ def add_execute(
125
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
126
+ ) -> "Pipeline":
127
+ """Add an execute operation to the pipeline.
128
+
129
+ Args:
130
+ statement: SQL statement to execute
131
+ *parameters: Mixed positional args containing parameters and filters
132
+ **kwargs: Named parameters
133
+
134
+ Returns:
135
+ Self for fluent API
136
+ """
137
+ self._operations.append(
138
+ PipelineOperation(
139
+ sql=SQL(statement, *parameters, _config=self.driver.config, **kwargs), operation_type="execute"
140
+ )
141
+ )
142
+
143
+ # Check for auto-flush
144
+ if len(self._operations) >= self.max_operations:
145
+ logger.warning("Pipeline auto-flushing at %s operations", len(self._operations))
146
+ self.process()
147
+
148
+ return self
149
+
150
+ def add_select(
151
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
152
+ ) -> "Pipeline":
153
+ """Add a select operation to the pipeline."""
154
+ self._operations.append(
155
+ PipelineOperation(
156
+ sql=SQL(statement, *parameters, _config=self.driver.config, **kwargs), operation_type="select"
157
+ )
158
+ )
159
+ return self
160
+
161
+ def add_execute_many(
162
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
163
+ ) -> "Pipeline":
164
+ """Add batch execution preserving parameter types.
165
+
166
+ Args:
167
+ statement: SQL statement to execute multiple times
168
+ *parameters: First arg should be batch data (list of param sets),
169
+ followed by optional StatementFilter instances
170
+ **kwargs: Not typically used for execute_many
171
+ """
172
+ # First parameter should be the batch data
173
+ if not parameters or not isinstance(parameters[0], (list, tuple)):
174
+ msg = "execute_many requires a sequence of parameter sets as first parameter"
175
+ raise ValueError(msg)
176
+
177
+ batch_params = parameters[0]
178
+ # Convert tuple to list if needed
179
+ if isinstance(batch_params, tuple):
180
+ batch_params = list(batch_params)
181
+ # Create SQL object and mark as many, passing remaining args as filters
182
+ sql_obj = SQL(statement, *parameters[1:], **kwargs).as_many(batch_params)
183
+
184
+ self._operations.append(PipelineOperation(sql=sql_obj, operation_type="execute_many"))
185
+ return self
186
+
187
+ def add_execute_script(self, script: "Union[str, SQL]", *filters: StatementFilter, **kwargs: Any) -> "Pipeline":
188
+ """Add a multi-statement script to the pipeline."""
189
+ if isinstance(script, SQL):
190
+ sql_obj = script.as_script()
191
+ else:
192
+ sql_obj = SQL(script, *filters, _config=self.driver.config, **kwargs).as_script()
193
+
194
+ self._operations.append(PipelineOperation(sql=sql_obj, operation_type="execute_script"))
195
+ return self
196
+
197
+ def process(self, filters: "Optional[list[StatementFilter]]" = None) -> "list[SQLResult]":
198
+ """Execute all queued operations.
199
+
200
+ Args:
201
+ filters: Global filters to apply to all operations
202
+
203
+ Returns:
204
+ List of results from all operations
205
+ """
206
+ if not self._operations:
207
+ return []
208
+
209
+ # Apply global filters
210
+ if filters:
211
+ self._apply_global_filters(filters)
212
+
213
+ # Check for native support
214
+ if hasattr(self.driver, "_execute_pipeline_native"):
215
+ results = self.driver._execute_pipeline_native(self._operations, **self.options) # pyright: ignore
216
+ else:
217
+ results = self._execute_pipeline_simulated()
218
+
219
+ self._results = results
220
+ self._operations.clear()
221
+ return cast("list[SQLResult]", results)
222
+
223
+ def _execute_pipeline_simulated(self) -> "list[SQLResult]":
224
+ """Enhanced simulation with transaction support and error handling."""
225
+ results: list[SQLResult[Any]] = []
226
+ connection = None
227
+ auto_transaction = False
228
+
229
+ # Only log once per pipeline, not for each operation
230
+ if not self._simulation_logged:
231
+ logger.info(
232
+ "%s using simulated pipeline. Native support: %s",
233
+ self.driver.__class__.__name__,
234
+ self._has_native_support(),
235
+ )
236
+ self._simulation_logged = True
237
+
238
+ try:
239
+ # Get a connection for the entire pipeline
240
+ connection = self.driver._connection()
241
+
242
+ # Start transaction if not already in one
243
+ if self.isolation_level:
244
+ # Set isolation level if specified
245
+ pass # Driver-specific implementation
246
+
247
+ if hasattr(connection, "in_transaction") and not connection.in_transaction():
248
+ if hasattr(connection, "begin"):
249
+ connection.begin()
250
+ auto_transaction = True
251
+
252
+ # Process each operation
253
+ for i, op in enumerate(self._operations):
254
+ self._execute_single_operation(i, op, results, connection, auto_transaction)
255
+
256
+ # Commit if we started the transaction
257
+ if auto_transaction and hasattr(connection, "commit"):
258
+ connection.commit()
259
+
260
+ except Exception as e:
261
+ if connection and auto_transaction and hasattr(connection, "rollback"):
262
+ connection.rollback()
263
+ if not isinstance(e, PipelineExecutionError):
264
+ msg = f"Pipeline execution failed: {e}"
265
+ raise PipelineExecutionError(msg) from e
266
+ raise
267
+
268
+ return results
269
+
270
+ def _execute_single_operation(
271
+ self, i: int, op: PipelineOperation, results: "list[SQLResult[Any]]", connection: Any, auto_transaction: bool
272
+ ) -> None:
273
+ """Execute a single pipeline operation with error handling."""
274
+ try:
275
+ # Execute based on operation type
276
+ result: SQLResult[Any]
277
+ if op.operation_type == "execute_script":
278
+ result = cast("SQLResult[Any]", self.driver.execute_script(op.sql, _connection=connection))
279
+ elif op.operation_type == "execute_many":
280
+ result = cast("SQLResult[Any]", self.driver.execute_many(op.sql, _connection=connection))
281
+ else:
282
+ result = cast("SQLResult[Any]", self.driver.execute(op.sql, _connection=connection))
283
+
284
+ # Add operation context to result
285
+ result.operation_index = i
286
+ result.pipeline_sql = op.sql
287
+ results.append(result)
288
+
289
+ except Exception as e:
290
+ if self.continue_on_error:
291
+ # Create error result
292
+ error_result = SQLResult(
293
+ statement=op.sql, data=[], error=e, operation_index=i, parameters=op.sql.parameters
294
+ )
295
+ results.append(error_result)
296
+ else:
297
+ if auto_transaction and hasattr(connection, "rollback"):
298
+ connection.rollback()
299
+ msg = f"Pipeline failed at operation {i}: {e}"
300
+ raise PipelineExecutionError(
301
+ msg, operation_index=i, partial_results=results, failed_operation=op
302
+ ) from e
303
+
304
+ def _apply_global_filters(self, filters: "list[StatementFilter]") -> None:
305
+ """Apply global filters to all operations."""
306
+ for operation in self._operations:
307
+ # Add filters to each operation
308
+ if operation.filters is None:
309
+ operation.filters = []
310
+ operation.filters.extend(filters)
311
+
312
+ def _apply_operation_filters(self, sql: SQL, filters: "list[StatementFilter]") -> SQL:
313
+ """Apply filters to a SQL object."""
314
+ result = sql
315
+ for filter_obj in filters:
316
+ if hasattr(filter_obj, "apply"):
317
+ result = cast("Any", filter_obj).apply(result)
318
+ return result
319
+
320
+ def _has_native_support(self) -> bool:
321
+ """Check if driver has native pipeline support."""
322
+ return hasattr(self.driver, "_execute_pipeline_native")
323
+
324
+ def _process_parameters(self, params: tuple[Any, ...]) -> tuple["list[StatementFilter]", "Optional[Any]"]:
325
+ """Extract filters and parameters from mixed args.
326
+
327
+ Returns:
328
+ Tuple of (filters, parameters)
329
+ """
330
+ filters: list[StatementFilter] = []
331
+ parameters: list[Any] = []
332
+
333
+ for param in params:
334
+ if isinstance(param, StatementFilter):
335
+ filters.append(param)
336
+ else:
337
+ parameters.append(param)
338
+
339
+ # Return parameters based on count
340
+ if not parameters:
341
+ return filters, None
342
+ if len(parameters) == 1:
343
+ return filters, parameters[0]
344
+ return filters, parameters
345
+
346
+ @property
347
+ def operations(self) -> "list[PipelineOperation]":
348
+ """Get the current list of queued operations."""
349
+ return self._operations.copy()
350
+
351
+
352
+ class AsyncPipeline:
353
+ """Asynchronous pipeline with identical structure to Pipeline."""
354
+
355
+ def __init__(
356
+ self,
357
+ driver: "AsyncDriverAdapterProtocol[Any, Any]",
358
+ isolation_level: "Optional[str]" = None,
359
+ continue_on_error: bool = False,
360
+ max_operations: int = 1000,
361
+ options: "Optional[dict[str, Any]]" = None,
362
+ ) -> None:
363
+ self.driver = driver
364
+ self.isolation_level = isolation_level
365
+ self.continue_on_error = continue_on_error
366
+ self.max_operations = max_operations
367
+ self.options = options or {}
368
+ self._operations: list[PipelineOperation] = []
369
+ self._results: Optional[list[SQLResult[Any]]] = None
370
+ self._simulation_logged = False
371
+
372
+ async def add_execute(
373
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
374
+ ) -> "AsyncPipeline":
375
+ """Add an execute operation to the async pipeline."""
376
+ self._operations.append(
377
+ PipelineOperation(
378
+ sql=SQL(statement, *parameters, _config=self.driver.config, **kwargs), operation_type="execute"
379
+ )
380
+ )
381
+
382
+ # Check for auto-flush
383
+ if len(self._operations) >= self.max_operations:
384
+ logger.warning("Async pipeline auto-flushing at %s operations", len(self._operations))
385
+ await self.process()
386
+
387
+ return self
388
+
389
+ async def add_select(
390
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
391
+ ) -> "AsyncPipeline":
392
+ """Add a select operation to the async pipeline."""
393
+ self._operations.append(
394
+ PipelineOperation(
395
+ sql=SQL(statement, *parameters, _config=self.driver.config, **kwargs), operation_type="select"
396
+ )
397
+ )
398
+ return self
399
+
400
+ async def add_execute_many(
401
+ self, statement: "Union[str, SQL]", /, *parameters: "Union[StatementParameters, StatementFilter]", **kwargs: Any
402
+ ) -> "AsyncPipeline":
403
+ """Add batch execution to the async pipeline."""
404
+ # First parameter should be the batch data
405
+ if not parameters or not isinstance(parameters[0], (list, tuple)):
406
+ msg = "execute_many requires a sequence of parameter sets as first parameter"
407
+ raise ValueError(msg)
408
+
409
+ batch_params = parameters[0]
410
+ # Convert tuple to list if needed
411
+ if isinstance(batch_params, tuple):
412
+ batch_params = list(batch_params)
413
+ # Create SQL object and mark as many, passing remaining args as filters
414
+ sql_obj = SQL(statement, *parameters[1:], **kwargs).as_many(batch_params)
415
+
416
+ self._operations.append(PipelineOperation(sql=sql_obj, operation_type="execute_many"))
417
+ return self
418
+
419
+ async def add_execute_script(
420
+ self, script: "Union[str, SQL]", *filters: StatementFilter, **kwargs: Any
421
+ ) -> "AsyncPipeline":
422
+ """Add a script to the async pipeline."""
423
+ if isinstance(script, SQL):
424
+ sql_obj = script.as_script()
425
+ else:
426
+ sql_obj = SQL(script, *filters, _config=self.driver.config, **kwargs).as_script()
427
+
428
+ self._operations.append(PipelineOperation(sql=sql_obj, operation_type="execute_script"))
429
+ return self
430
+
431
+ async def process(self, filters: "Optional[list[StatementFilter]]" = None) -> "list[SQLResult]":
432
+ """Execute all queued operations asynchronously."""
433
+ if not self._operations:
434
+ return []
435
+
436
+ # Check for native support
437
+ if hasattr(self.driver, "_execute_pipeline_native"):
438
+ results = await cast("Any", self.driver)._execute_pipeline_native(self._operations, **self.options)
439
+ else:
440
+ results = await self._execute_pipeline_simulated()
441
+
442
+ self._results = results
443
+ self._operations.clear()
444
+ return cast("list[SQLResult]", results)
445
+
446
+ async def _execute_pipeline_simulated(self) -> "list[SQLResult]":
447
+ """Async version of simulated pipeline execution."""
448
+ results: list[SQLResult[Any]] = []
449
+ connection = None
450
+ auto_transaction = False
451
+
452
+ if not self._simulation_logged:
453
+ logger.info(
454
+ "%s using simulated async pipeline. Native support: %s",
455
+ self.driver.__class__.__name__,
456
+ self._has_native_support(),
457
+ )
458
+ self._simulation_logged = True
459
+
460
+ try:
461
+ connection = self.driver._connection()
462
+
463
+ if hasattr(connection, "in_transaction") and not connection.in_transaction():
464
+ if hasattr(connection, "begin"):
465
+ await connection.begin()
466
+ auto_transaction = True
467
+
468
+ # Process each operation
469
+ for i, op in enumerate(self._operations):
470
+ await self._execute_single_operation_async(i, op, results, connection, auto_transaction)
471
+
472
+ if auto_transaction and hasattr(connection, "commit"):
473
+ await connection.commit()
474
+
475
+ except Exception as e:
476
+ if connection and auto_transaction and hasattr(connection, "rollback"):
477
+ await connection.rollback()
478
+ if not isinstance(e, PipelineExecutionError):
479
+ msg = f"Async pipeline execution failed: {e}"
480
+ raise PipelineExecutionError(msg) from e
481
+ raise
482
+
483
+ return results
484
+
485
+ async def _execute_single_operation_async(
486
+ self, i: int, op: PipelineOperation, results: "list[SQLResult[Any]]", connection: Any, auto_transaction: bool
487
+ ) -> None:
488
+ """Execute a single async pipeline operation with error handling."""
489
+ try:
490
+ result: SQLResult[Any]
491
+ if op.operation_type == "execute_script":
492
+ result = await self.driver.execute_script(op.sql, _connection=connection)
493
+ elif op.operation_type == "execute_many":
494
+ result = await self.driver.execute_many(op.sql, _connection=connection)
495
+ else:
496
+ result = await self.driver.execute(op.sql, _connection=connection)
497
+
498
+ result.operation_index = i
499
+ result.pipeline_sql = op.sql
500
+ results.append(result)
501
+
502
+ except Exception as e:
503
+ if self.continue_on_error:
504
+ error_result = SQLResult(
505
+ statement=op.sql, data=[], error=e, operation_index=i, parameters=op.sql.parameters
506
+ )
507
+ results.append(error_result)
508
+ else:
509
+ if auto_transaction and hasattr(connection, "rollback"):
510
+ await connection.rollback()
511
+ msg = f"Async pipeline failed at operation {i}: {e}"
512
+ raise PipelineExecutionError(
513
+ msg, operation_index=i, partial_results=results, failed_operation=op
514
+ ) from e
515
+
516
+ def _has_native_support(self) -> bool:
517
+ """Check if driver has native pipeline support."""
518
+ return hasattr(self.driver, "_execute_pipeline_native")
519
+
520
+ @property
521
+ def operations(self) -> "list[PipelineOperation]":
522
+ """Get the current list of queued operations."""
523
+ return self._operations.copy()
@@ -0,0 +1,122 @@
1
+ """Result conversion utilities for unified storage architecture.
2
+
3
+ This module contains the result conversion functionality integrated with the unified
4
+ storage architecture.
5
+ """
6
+
7
+ import datetime
8
+ from collections.abc import Sequence
9
+ from enum import Enum
10
+ from functools import partial
11
+ from pathlib import Path, PurePath
12
+ from typing import Any, Callable, Optional, Union, cast, overload
13
+ from uuid import UUID
14
+
15
+ from sqlspec.exceptions import SQLSpecError, wrap_exceptions
16
+ from sqlspec.typing import (
17
+ ModelDTOT,
18
+ ModelT,
19
+ convert,
20
+ get_type_adapter,
21
+ is_dataclass,
22
+ is_msgspec_struct,
23
+ is_pydantic_model,
24
+ )
25
+
26
+ __all__ = ("_DEFAULT_TYPE_DECODERS", "ToSchemaMixin", "_default_msgspec_deserializer")
27
+
28
+
29
+ _DEFAULT_TYPE_DECODERS: list[tuple[Callable[[Any], bool], Callable[[Any, Any], Any]]] = [
30
+ (lambda x: x is UUID, lambda t, v: t(v.hex)),
31
+ (lambda x: x is datetime.datetime, lambda t, v: t(v.isoformat())),
32
+ (lambda x: x is datetime.date, lambda t, v: t(v.isoformat())),
33
+ (lambda x: x is datetime.time, lambda t, v: t(v.isoformat())),
34
+ (lambda x: x is Enum, lambda t, v: t(v.value)),
35
+ ]
36
+
37
+
38
+ def _default_msgspec_deserializer(
39
+ target_type: Any, value: Any, type_decoders: "Optional[Sequence[tuple[Any, Any]]]" = None
40
+ ) -> Any:
41
+ """Default msgspec deserializer with type conversion support."""
42
+ if type_decoders:
43
+ for predicate, decoder in type_decoders:
44
+ if predicate(target_type):
45
+ return decoder(target_type, value)
46
+ if target_type is UUID and isinstance(value, UUID):
47
+ return value.hex
48
+ if target_type in {datetime.datetime, datetime.date, datetime.time}:
49
+ with wrap_exceptions(suppress=AttributeError):
50
+ return value.isoformat()
51
+ if isinstance(target_type, type) and issubclass(target_type, Enum) and isinstance(value, Enum):
52
+ return value.value
53
+ if isinstance(value, target_type):
54
+ return value
55
+ if issubclass(target_type, (Path, PurePath, UUID)):
56
+ return target_type(value)
57
+ return value
58
+
59
+
60
+ class ToSchemaMixin:
61
+ __slots__ = ()
62
+
63
+ @overload
64
+ @staticmethod
65
+ def to_schema(data: "ModelT", *, schema_type: None = None) -> "ModelT": ...
66
+ @overload
67
+ @staticmethod
68
+ def to_schema(data: "dict[str, Any]", *, schema_type: "type[ModelDTOT]") -> "ModelDTOT": ...
69
+ @overload
70
+ @staticmethod
71
+ def to_schema(data: "Sequence[ModelT]", *, schema_type: None = None) -> "Sequence[ModelT]": ...
72
+ @overload
73
+ @staticmethod
74
+ def to_schema(data: "Sequence[dict[str, Any]]", *, schema_type: "type[ModelDTOT]") -> "Sequence[ModelDTOT]": ...
75
+
76
+ @staticmethod
77
+ def to_schema(
78
+ data: "Union[ModelT, dict[str, Any], Sequence[ModelT], Sequence[dict[str, Any]]]",
79
+ *,
80
+ schema_type: "Optional[type[ModelDTOT]]" = None,
81
+ ) -> "Union[ModelT, ModelDTOT, Sequence[ModelT], Sequence[ModelDTOT]]":
82
+ """Convert data to a specified schema type."""
83
+ if schema_type is None:
84
+ if not isinstance(data, Sequence):
85
+ return cast("ModelT", data)
86
+ return cast("Sequence[ModelT]", data)
87
+ if is_dataclass(schema_type):
88
+ if not isinstance(data, Sequence):
89
+ return cast("ModelDTOT", schema_type(**data)) # type: ignore[operator]
90
+ return cast("Sequence[ModelDTOT]", [schema_type(**item) for item in data]) # type: ignore[operator]
91
+ if is_msgspec_struct(schema_type):
92
+ if not isinstance(data, Sequence):
93
+ return cast(
94
+ "ModelDTOT",
95
+ convert(
96
+ obj=data,
97
+ type=schema_type,
98
+ from_attributes=True,
99
+ dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
100
+ ),
101
+ )
102
+ return cast(
103
+ "Sequence[ModelDTOT]",
104
+ convert(
105
+ obj=data,
106
+ type=list[schema_type], # type: ignore[valid-type] # pyright: ignore
107
+ from_attributes=True,
108
+ dec_hook=partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS),
109
+ ),
110
+ )
111
+ if schema_type is not None and is_pydantic_model(schema_type):
112
+ if not isinstance(data, Sequence):
113
+ return cast(
114
+ "ModelDTOT",
115
+ get_type_adapter(schema_type).validate_python(data, from_attributes=True), # pyright: ignore
116
+ )
117
+ return cast(
118
+ "Sequence[ModelDTOT]",
119
+ get_type_adapter(list[schema_type]).validate_python(data, from_attributes=True), # type: ignore[valid-type] # pyright: ignore
120
+ )
121
+ msg = "`schema_type` should be a valid Dataclass, Pydantic model or Msgspec struct"
122
+ raise SQLSpecError(msg)
@@ -0,0 +1,35 @@
1
+ from sqlglot import exp, parse_one
2
+ from sqlglot.dialects.dialect import DialectType
3
+
4
+ from sqlspec.exceptions import SQLConversionError
5
+ from sqlspec.statement.sql import SQL, Statement
6
+
7
+ __all__ = ("SQLTranslatorMixin",)
8
+
9
+
10
+ class SQLTranslatorMixin:
11
+ """Mixin for drivers supporting SQL translation."""
12
+
13
+ __slots__ = ()
14
+
15
+ def convert_to_dialect(self, statement: "Statement", to_dialect: DialectType = None, pretty: bool = True) -> str:
16
+ parsed_expression: exp.Expression
17
+ if statement is not None and isinstance(statement, SQL):
18
+ if statement.expression is None:
19
+ msg = "Statement could not be parsed"
20
+ raise SQLConversionError(msg)
21
+ parsed_expression = statement.expression
22
+ elif isinstance(statement, exp.Expression):
23
+ parsed_expression = statement
24
+ else:
25
+ try:
26
+ parsed_expression = parse_one(statement, dialect=self.dialect) # type: ignore[attr-defined]
27
+ except Exception as e:
28
+ error_msg = f"Failed to parse SQL statement: {e!s}"
29
+ raise SQLConversionError(error_msg) from e
30
+ target_dialect = to_dialect if to_dialect is not None else self.dialect # type: ignore[attr-defined]
31
+ try:
32
+ return parsed_expression.sql(dialect=target_dialect, pretty=pretty)
33
+ except Exception as e:
34
+ error_msg = f"Failed to convert SQL expression to {target_dialect}: {e!s}"
35
+ raise SQLConversionError(error_msg) from e