sqlspec 0.19.0__py3-none-any.whl → 0.21.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/adapters/adbc/driver.py +192 -28
- sqlspec/adapters/asyncmy/driver.py +72 -15
- sqlspec/adapters/asyncpg/config.py +23 -3
- sqlspec/adapters/asyncpg/driver.py +30 -14
- sqlspec/adapters/bigquery/driver.py +79 -9
- sqlspec/adapters/duckdb/driver.py +39 -56
- sqlspec/adapters/oracledb/driver.py +99 -52
- sqlspec/adapters/psqlpy/driver.py +89 -31
- sqlspec/adapters/psycopg/driver.py +11 -23
- sqlspec/adapters/sqlite/driver.py +77 -8
- sqlspec/base.py +11 -11
- sqlspec/builder/__init__.py +1 -1
- sqlspec/builder/_base.py +4 -5
- sqlspec/builder/_column.py +3 -3
- sqlspec/builder/_ddl.py +5 -1
- sqlspec/builder/_delete.py +5 -6
- sqlspec/builder/_insert.py +6 -7
- sqlspec/builder/_merge.py +5 -5
- sqlspec/builder/_parsing_utils.py +3 -3
- sqlspec/builder/_select.py +6 -5
- sqlspec/builder/_update.py +4 -5
- sqlspec/builder/mixins/_cte_and_set_ops.py +5 -1
- sqlspec/builder/mixins/_delete_operations.py +5 -1
- sqlspec/builder/mixins/_insert_operations.py +5 -1
- sqlspec/builder/mixins/_join_operations.py +5 -0
- sqlspec/builder/mixins/_merge_operations.py +5 -1
- sqlspec/builder/mixins/_order_limit_operations.py +5 -1
- sqlspec/builder/mixins/_pivot_operations.py +4 -1
- sqlspec/builder/mixins/_select_operations.py +5 -1
- sqlspec/builder/mixins/_update_operations.py +5 -1
- sqlspec/builder/mixins/_where_clause.py +5 -1
- sqlspec/config.py +15 -15
- sqlspec/core/compiler.py +11 -3
- sqlspec/core/filters.py +30 -9
- sqlspec/core/parameters.py +67 -67
- sqlspec/core/result.py +62 -31
- sqlspec/core/splitter.py +160 -34
- sqlspec/core/statement.py +95 -14
- sqlspec/driver/_common.py +12 -3
- sqlspec/driver/mixins/_result_tools.py +21 -4
- sqlspec/driver/mixins/_sql_translator.py +45 -7
- sqlspec/extensions/aiosql/adapter.py +1 -1
- sqlspec/extensions/litestar/_utils.py +1 -1
- sqlspec/extensions/litestar/config.py +186 -2
- sqlspec/extensions/litestar/handlers.py +21 -0
- sqlspec/extensions/litestar/plugin.py +237 -3
- sqlspec/loader.py +12 -12
- sqlspec/migrations/loaders.py +5 -2
- sqlspec/migrations/utils.py +2 -2
- sqlspec/storage/backends/obstore.py +1 -3
- sqlspec/storage/registry.py +1 -1
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/deprecation.py +6 -0
- sqlspec/utils/fixtures.py +239 -30
- sqlspec/utils/module_loader.py +5 -1
- sqlspec/utils/serializers.py +6 -0
- sqlspec/utils/singleton.py +6 -0
- sqlspec/utils/sync_tools.py +10 -1
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/METADATA +230 -44
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/RECORD +64 -64
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.19.0.dist-info → sqlspec-0.21.0.dist-info}/licenses/NOTICE +0 -0
sqlspec/core/splitter.py
CHANGED
|
@@ -1,21 +1,16 @@
|
|
|
1
1
|
"""SQL statement splitter with caching and dialect support.
|
|
2
2
|
|
|
3
|
-
This module provides
|
|
4
|
-
multiple
|
|
3
|
+
This module provides SQL script statement splitting functionality
|
|
4
|
+
with support for multiple SQL dialects and caching for performance.
|
|
5
5
|
|
|
6
6
|
Components:
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
- Support for multiple SQL dialects (Oracle, T-SQL, PostgreSQL, MySQL, SQLite, DuckDB, BigQuery)
|
|
15
|
-
- Cached pattern compilation
|
|
16
|
-
- LRU caching for split results
|
|
17
|
-
- Optimized tokenization
|
|
18
|
-
- Complete preservation of split_sql_script function
|
|
7
|
+
StatementSplitter: Main SQL script splitter with caching
|
|
8
|
+
DialectConfig: Base class for dialect-specific configurations
|
|
9
|
+
Token/TokenType: Token representation and classification
|
|
10
|
+
Caching: Pattern and result caching for performance
|
|
11
|
+
|
|
12
|
+
Supported dialects include Oracle PL/SQL, T-SQL, PostgreSQL,
|
|
13
|
+
MySQL, SQLite, DuckDB, and BigQuery.
|
|
19
14
|
"""
|
|
20
15
|
|
|
21
16
|
import re
|
|
@@ -166,7 +161,11 @@ class DialectConfig(ABC):
|
|
|
166
161
|
return self._max_nesting_depth
|
|
167
162
|
|
|
168
163
|
def get_all_token_patterns(self) -> list[tuple[TokenType, TokenPattern]]:
|
|
169
|
-
"""
|
|
164
|
+
"""Get the complete ordered list of token patterns for this dialect.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
List of tuples containing token types and their regex patterns
|
|
168
|
+
"""
|
|
170
169
|
patterns: list[tuple[TokenType, TokenPattern]] = [
|
|
171
170
|
(TokenType.COMMENT_LINE, r"--[^\n]*"),
|
|
172
171
|
(TokenType.COMMENT_BLOCK, r"/\*[\s\S]*?\*/"),
|
|
@@ -190,16 +189,36 @@ class DialectConfig(ABC):
|
|
|
190
189
|
return patterns
|
|
191
190
|
|
|
192
191
|
def _get_dialect_specific_patterns(self) -> list[tuple[TokenType, TokenPattern]]:
|
|
193
|
-
"""
|
|
192
|
+
"""Get dialect-specific token patterns.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
List of dialect-specific token patterns
|
|
196
|
+
"""
|
|
194
197
|
return []
|
|
195
198
|
|
|
196
199
|
@staticmethod
|
|
197
200
|
def is_real_block_ender(tokens: list[Token], current_pos: int) -> bool: # noqa: ARG004
|
|
198
|
-
"""Check if
|
|
201
|
+
"""Check if END keyword represents an actual block terminator.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
tokens: List of all tokens
|
|
205
|
+
current_pos: Position of END token
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
True if END represents a block terminator, False otherwise
|
|
209
|
+
"""
|
|
199
210
|
return True
|
|
200
211
|
|
|
201
212
|
def should_delay_semicolon_termination(self, tokens: list[Token], current_pos: int) -> bool:
|
|
202
|
-
"""Check if semicolon termination should be delayed.
|
|
213
|
+
"""Check if semicolon termination should be delayed.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
tokens: List of all tokens
|
|
217
|
+
current_pos: Current position in token list
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
True if termination should be delayed, False otherwise
|
|
221
|
+
"""
|
|
203
222
|
return False
|
|
204
223
|
|
|
205
224
|
|
|
@@ -237,7 +256,15 @@ class OracleDialectConfig(DialectConfig):
|
|
|
237
256
|
return self._special_terminators
|
|
238
257
|
|
|
239
258
|
def should_delay_semicolon_termination(self, tokens: list[Token], current_pos: int) -> bool:
|
|
240
|
-
"""Check if
|
|
259
|
+
"""Check if semicolon termination should be delayed for Oracle slash terminators.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
tokens: List of all tokens
|
|
263
|
+
current_pos: Current position in token list
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
True if termination should be delayed, False otherwise
|
|
267
|
+
"""
|
|
241
268
|
pos = current_pos - 1
|
|
242
269
|
while pos >= 0:
|
|
243
270
|
token = tokens[pos]
|
|
@@ -251,7 +278,15 @@ class OracleDialectConfig(DialectConfig):
|
|
|
251
278
|
return False
|
|
252
279
|
|
|
253
280
|
def _has_upcoming_slash(self, tokens: list[Token], current_pos: int) -> bool:
|
|
254
|
-
"""Check if there's a
|
|
281
|
+
"""Check if there's a slash terminator on its own line ahead.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
tokens: List of all tokens
|
|
285
|
+
current_pos: Current position in token list
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
True if slash terminator found on its own line, False otherwise
|
|
289
|
+
"""
|
|
255
290
|
pos = current_pos + 1
|
|
256
291
|
found_newline = False
|
|
257
292
|
|
|
@@ -273,7 +308,15 @@ class OracleDialectConfig(DialectConfig):
|
|
|
273
308
|
|
|
274
309
|
@staticmethod
|
|
275
310
|
def is_real_block_ender(tokens: list[Token], current_pos: int) -> bool:
|
|
276
|
-
"""Check if
|
|
311
|
+
"""Check if END keyword represents a block terminator in Oracle PL/SQL.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
tokens: List of all tokens
|
|
315
|
+
current_pos: Position of END token
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
True if END represents a block terminator, False otherwise
|
|
319
|
+
"""
|
|
277
320
|
pos = current_pos + 1
|
|
278
321
|
while pos < len(tokens):
|
|
279
322
|
next_token = tokens[pos]
|
|
@@ -296,7 +339,18 @@ class OracleDialectConfig(DialectConfig):
|
|
|
296
339
|
|
|
297
340
|
@staticmethod
|
|
298
341
|
def _handle_slash_terminator(tokens: list[Token], current_pos: int) -> bool:
|
|
299
|
-
"""
|
|
342
|
+
"""Check if Oracle slash terminator is properly positioned.
|
|
343
|
+
|
|
344
|
+
Oracle slash terminators must be on their own line with only
|
|
345
|
+
whitespace or comments preceding them on the same line.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
tokens: List of all tokens
|
|
349
|
+
current_pos: Position of slash token
|
|
350
|
+
|
|
351
|
+
Returns:
|
|
352
|
+
True if slash is properly positioned, False otherwise
|
|
353
|
+
"""
|
|
300
354
|
if current_pos == 0:
|
|
301
355
|
return True
|
|
302
356
|
|
|
@@ -374,12 +428,29 @@ class PostgreSQLDialectConfig(DialectConfig):
|
|
|
374
428
|
return self._statement_terminators
|
|
375
429
|
|
|
376
430
|
def _get_dialect_specific_patterns(self) -> list[tuple[TokenType, TokenPattern]]:
|
|
377
|
-
"""
|
|
431
|
+
"""Get PostgreSQL-specific token patterns.
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
List of dialect-specific token patterns
|
|
435
|
+
"""
|
|
378
436
|
return [(TokenType.STRING_LITERAL, self._handle_dollar_quoted_string)]
|
|
379
437
|
|
|
380
438
|
@staticmethod
|
|
381
439
|
def _handle_dollar_quoted_string(text: str, position: int, line: int, column: int) -> Optional[Token]:
|
|
382
|
-
"""Handle PostgreSQL dollar-quoted
|
|
440
|
+
"""Handle PostgreSQL dollar-quoted string literals.
|
|
441
|
+
|
|
442
|
+
Parses dollar-quoted strings in the format $tag$content$tag$
|
|
443
|
+
where tag is optional.
|
|
444
|
+
|
|
445
|
+
Args:
|
|
446
|
+
text: The full SQL text being tokenized
|
|
447
|
+
position: Current position in the text
|
|
448
|
+
line: Current line number
|
|
449
|
+
column: Current column number
|
|
450
|
+
|
|
451
|
+
Returns:
|
|
452
|
+
Token representing the dollar-quoted string, or None if no match
|
|
453
|
+
"""
|
|
383
454
|
start_match = re.match(r"\$([a-zA-Z_][a-zA-Z0-9_]*)?\$", text[position:])
|
|
384
455
|
if not start_match:
|
|
385
456
|
return None
|
|
@@ -548,7 +619,11 @@ _cache_lock = threading.Lock()
|
|
|
548
619
|
|
|
549
620
|
|
|
550
621
|
def _get_pattern_cache() -> UnifiedCache:
|
|
551
|
-
"""Get or create the pattern compilation cache.
|
|
622
|
+
"""Get or create the global pattern compilation cache.
|
|
623
|
+
|
|
624
|
+
Returns:
|
|
625
|
+
The pattern cache instance
|
|
626
|
+
"""
|
|
552
627
|
global _pattern_cache
|
|
553
628
|
if _pattern_cache is None:
|
|
554
629
|
with _cache_lock:
|
|
@@ -558,7 +633,11 @@ def _get_pattern_cache() -> UnifiedCache:
|
|
|
558
633
|
|
|
559
634
|
|
|
560
635
|
def _get_result_cache() -> UnifiedCache:
|
|
561
|
-
"""Get or create the result cache.
|
|
636
|
+
"""Get or create the global result cache.
|
|
637
|
+
|
|
638
|
+
Returns:
|
|
639
|
+
The result cache instance
|
|
640
|
+
"""
|
|
562
641
|
global _result_cache
|
|
563
642
|
if _result_cache is None:
|
|
564
643
|
with _cache_lock:
|
|
@@ -574,7 +653,12 @@ class StatementSplitter:
|
|
|
574
653
|
__slots__ = SPLITTER_SLOTS
|
|
575
654
|
|
|
576
655
|
def __init__(self, dialect: DialectConfig, strip_trailing_semicolon: bool = False) -> None:
|
|
577
|
-
"""Initialize the splitter
|
|
656
|
+
"""Initialize the statement splitter.
|
|
657
|
+
|
|
658
|
+
Args:
|
|
659
|
+
dialect: The SQL dialect configuration to use
|
|
660
|
+
strip_trailing_semicolon: Whether to remove trailing semicolons from statements
|
|
661
|
+
"""
|
|
578
662
|
self._dialect = dialect
|
|
579
663
|
self._strip_trailing_semicolon = strip_trailing_semicolon
|
|
580
664
|
self._token_patterns = dialect.get_all_token_patterns()
|
|
@@ -587,7 +671,11 @@ class StatementSplitter:
|
|
|
587
671
|
self._compiled_patterns = self._get_or_compile_patterns()
|
|
588
672
|
|
|
589
673
|
def _get_or_compile_patterns(self) -> list[tuple[TokenType, CompiledTokenPattern]]:
|
|
590
|
-
"""Get compiled patterns from cache or compile and cache them.
|
|
674
|
+
"""Get compiled regex patterns from cache or compile and cache them.
|
|
675
|
+
|
|
676
|
+
Returns:
|
|
677
|
+
List of compiled token patterns with their types
|
|
678
|
+
"""
|
|
591
679
|
cache_key = CacheKey(("pattern", self._pattern_cache_key))
|
|
592
680
|
|
|
593
681
|
cached_patterns = self._pattern_cache.get(cache_key)
|
|
@@ -605,7 +693,14 @@ class StatementSplitter:
|
|
|
605
693
|
return compiled
|
|
606
694
|
|
|
607
695
|
def _tokenize(self, sql: str) -> Generator[Token, None, None]:
|
|
608
|
-
"""Tokenize SQL string.
|
|
696
|
+
"""Tokenize SQL string into Token objects.
|
|
697
|
+
|
|
698
|
+
Args:
|
|
699
|
+
sql: The SQL string to tokenize
|
|
700
|
+
|
|
701
|
+
Yields:
|
|
702
|
+
Token objects representing the lexical elements
|
|
703
|
+
"""
|
|
609
704
|
pos = 0
|
|
610
705
|
line = 1
|
|
611
706
|
line_start = 0
|
|
@@ -650,7 +745,14 @@ class StatementSplitter:
|
|
|
650
745
|
pos += 1
|
|
651
746
|
|
|
652
747
|
def split(self, sql: str) -> list[str]:
|
|
653
|
-
"""Split SQL script
|
|
748
|
+
"""Split SQL script into individual statements.
|
|
749
|
+
|
|
750
|
+
Args:
|
|
751
|
+
sql: The SQL script to split
|
|
752
|
+
|
|
753
|
+
Returns:
|
|
754
|
+
List of individual SQL statements
|
|
755
|
+
"""
|
|
654
756
|
script_hash = hash(sql)
|
|
655
757
|
cache_key = CacheKey(("split", self._dialect.name, script_hash, self._strip_trailing_semicolon))
|
|
656
758
|
|
|
@@ -664,7 +766,14 @@ class StatementSplitter:
|
|
|
664
766
|
return statements
|
|
665
767
|
|
|
666
768
|
def _do_split(self, sql: str) -> list[str]:
|
|
667
|
-
"""Perform SQL script splitting.
|
|
769
|
+
"""Perform the actual SQL script splitting logic.
|
|
770
|
+
|
|
771
|
+
Args:
|
|
772
|
+
sql: The SQL script to split
|
|
773
|
+
|
|
774
|
+
Returns:
|
|
775
|
+
List of individual SQL statements
|
|
776
|
+
"""
|
|
668
777
|
statements = []
|
|
669
778
|
current_statement_tokens = []
|
|
670
779
|
current_statement_chars = []
|
|
@@ -738,14 +847,28 @@ class StatementSplitter:
|
|
|
738
847
|
|
|
739
848
|
@staticmethod
|
|
740
849
|
def _is_plsql_block(tokens: list[Token]) -> bool:
|
|
741
|
-
"""Check if the token list represents a PL/SQL block.
|
|
850
|
+
"""Check if the token list represents a PL/SQL block.
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
tokens: List of tokens to examine
|
|
854
|
+
|
|
855
|
+
Returns:
|
|
856
|
+
True if tokens represent a PL/SQL block, False otherwise
|
|
857
|
+
"""
|
|
742
858
|
for token in tokens:
|
|
743
859
|
if token.type == TokenType.KEYWORD:
|
|
744
860
|
return token.value.upper() in {"BEGIN", "DECLARE"}
|
|
745
861
|
return False
|
|
746
862
|
|
|
747
863
|
def _contains_executable_content(self, statement: str) -> bool:
|
|
748
|
-
"""Check if a statement contains
|
|
864
|
+
"""Check if a statement contains executable content.
|
|
865
|
+
|
|
866
|
+
Args:
|
|
867
|
+
statement: The SQL statement to check
|
|
868
|
+
|
|
869
|
+
Returns:
|
|
870
|
+
True if statement contains non-whitespace/non-comment content
|
|
871
|
+
"""
|
|
749
872
|
tokens = list(self._tokenize(statement))
|
|
750
873
|
|
|
751
874
|
for token in tokens:
|
|
@@ -793,7 +916,10 @@ def split_sql_script(script: str, dialect: Optional[str] = None, strip_trailing_
|
|
|
793
916
|
|
|
794
917
|
|
|
795
918
|
def clear_splitter_caches() -> None:
|
|
796
|
-
"""Clear all splitter caches
|
|
919
|
+
"""Clear all splitter caches.
|
|
920
|
+
|
|
921
|
+
Clears both pattern and result caches to free memory.
|
|
922
|
+
"""
|
|
797
923
|
pattern_cache = _get_pattern_cache()
|
|
798
924
|
result_cache = _get_result_cache()
|
|
799
925
|
pattern_cache.clear()
|
sqlspec/core/statement.py
CHANGED
|
@@ -10,6 +10,7 @@ from typing_extensions import TypeAlias
|
|
|
10
10
|
|
|
11
11
|
from sqlspec.core.compiler import OperationType, SQLProcessor
|
|
12
12
|
from sqlspec.core.parameters import ParameterConverter, ParameterStyle, ParameterStyleConfig, ParameterValidator
|
|
13
|
+
from sqlspec.exceptions import SQLSpecError
|
|
13
14
|
from sqlspec.typing import Empty, EmptyEnum
|
|
14
15
|
from sqlspec.utils.logging import get_logger
|
|
15
16
|
from sqlspec.utils.type_guards import is_statement_filter, supports_where
|
|
@@ -64,7 +65,11 @@ PROCESSED_STATE_SLOTS: Final = (
|
|
|
64
65
|
|
|
65
66
|
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
66
67
|
class ProcessedState:
|
|
67
|
-
"""Processing results for SQL statements.
|
|
68
|
+
"""Processing results for SQL statements.
|
|
69
|
+
|
|
70
|
+
Contains the compiled SQL, execution parameters, parsed expression,
|
|
71
|
+
operation type, and validation errors for a processed SQL statement.
|
|
72
|
+
"""
|
|
68
73
|
|
|
69
74
|
__slots__ = PROCESSED_STATE_SLOTS
|
|
70
75
|
operation_type: "OperationType"
|
|
@@ -91,7 +96,12 @@ class ProcessedState:
|
|
|
91
96
|
|
|
92
97
|
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
93
98
|
class SQL:
|
|
94
|
-
"""SQL statement with parameter and filter support.
|
|
99
|
+
"""SQL statement with parameter and filter support.
|
|
100
|
+
|
|
101
|
+
Represents a SQL statement that can be compiled with parameters and filters.
|
|
102
|
+
Supports both positional and named parameters, statement filtering,
|
|
103
|
+
and various execution modes including batch operations.
|
|
104
|
+
"""
|
|
95
105
|
|
|
96
106
|
__slots__ = (
|
|
97
107
|
"_dialect",
|
|
@@ -153,11 +163,27 @@ class SQL:
|
|
|
153
163
|
def _create_auto_config(
|
|
154
164
|
self, statement: "Union[str, exp.Expression, 'SQL']", parameters: tuple, kwargs: dict[str, Any]
|
|
155
165
|
) -> "StatementConfig":
|
|
156
|
-
"""Create
|
|
166
|
+
"""Create default StatementConfig when none provided.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
statement: The SQL statement
|
|
170
|
+
parameters: Statement parameters
|
|
171
|
+
kwargs: Additional keyword arguments
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Default StatementConfig instance
|
|
175
|
+
"""
|
|
157
176
|
return get_default_config()
|
|
158
177
|
|
|
159
178
|
def _normalize_dialect(self, dialect: "Optional[DialectType]") -> "Optional[str]":
|
|
160
|
-
"""
|
|
179
|
+
"""Convert dialect to string representation.
|
|
180
|
+
|
|
181
|
+
Args:
|
|
182
|
+
dialect: Dialect type or string
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
String representation of the dialect or None
|
|
186
|
+
"""
|
|
161
187
|
if dialect is None:
|
|
162
188
|
return None
|
|
163
189
|
if isinstance(dialect, str):
|
|
@@ -165,7 +191,11 @@ class SQL:
|
|
|
165
191
|
return dialect.__class__.__name__.lower()
|
|
166
192
|
|
|
167
193
|
def _init_from_sql_object(self, sql_obj: "SQL") -> None:
|
|
168
|
-
"""Initialize from existing SQL object.
|
|
194
|
+
"""Initialize instance attributes from existing SQL object.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
sql_obj: Existing SQL object to copy from
|
|
198
|
+
"""
|
|
169
199
|
self._raw_sql = sql_obj._raw_sql
|
|
170
200
|
self._filters = sql_obj._filters.copy()
|
|
171
201
|
self._named_parameters = sql_obj._named_parameters.copy()
|
|
@@ -176,7 +206,14 @@ class SQL:
|
|
|
176
206
|
self._processed_state = sql_obj._processed_state
|
|
177
207
|
|
|
178
208
|
def _should_auto_detect_many(self, parameters: tuple) -> bool:
|
|
179
|
-
"""
|
|
209
|
+
"""Detect execute_many mode from parameter structure.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
parameters: Parameter tuple to analyze
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
True if parameters indicate batch execution
|
|
216
|
+
"""
|
|
180
217
|
if len(parameters) == 1 and isinstance(parameters[0], list):
|
|
181
218
|
param_list = parameters[0]
|
|
182
219
|
if param_list and all(isinstance(item, (tuple, list)) for item in param_list):
|
|
@@ -184,7 +221,13 @@ class SQL:
|
|
|
184
221
|
return False
|
|
185
222
|
|
|
186
223
|
def _process_parameters(self, *parameters: Any, dialect: Optional[str] = None, **kwargs: Any) -> None:
|
|
187
|
-
"""Process parameters and filters.
|
|
224
|
+
"""Process and organize parameters and filters.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
*parameters: Variable parameters and filters
|
|
228
|
+
dialect: SQL dialect override
|
|
229
|
+
**kwargs: Additional named parameters
|
|
230
|
+
"""
|
|
188
231
|
if dialect is not None:
|
|
189
232
|
self._dialect = self._normalize_dialect(dialect)
|
|
190
233
|
|
|
@@ -211,6 +254,9 @@ class SQL:
|
|
|
211
254
|
if self._is_many:
|
|
212
255
|
self._positional_parameters = list(param)
|
|
213
256
|
else:
|
|
257
|
+
# For drivers with native list expansion support, each item in the tuple/list
|
|
258
|
+
# should be treated as a separate parameter (but preserve inner lists/arrays)
|
|
259
|
+
# This allows passing arrays/lists as single JSONB parameters
|
|
214
260
|
self._positional_parameters.extend(param)
|
|
215
261
|
else:
|
|
216
262
|
self._positional_parameters.append(param)
|
|
@@ -288,7 +334,11 @@ class SQL:
|
|
|
288
334
|
return len(self.validation_errors) > 0
|
|
289
335
|
|
|
290
336
|
def returns_rows(self) -> bool:
|
|
291
|
-
"""Check if statement returns rows.
|
|
337
|
+
"""Check if statement returns rows.
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
True if the SQL statement returns result rows
|
|
341
|
+
"""
|
|
292
342
|
if self._processed_state is Empty:
|
|
293
343
|
self.compile()
|
|
294
344
|
if self._processed_state is Empty:
|
|
@@ -324,7 +374,11 @@ class SQL:
|
|
|
324
374
|
return False
|
|
325
375
|
|
|
326
376
|
def compile(self) -> tuple[str, Any]:
|
|
327
|
-
"""Compile
|
|
377
|
+
"""Compile SQL statement with parameters.
|
|
378
|
+
|
|
379
|
+
Returns:
|
|
380
|
+
Tuple of compiled SQL string and execution parameters
|
|
381
|
+
"""
|
|
328
382
|
if self._processed_state is Empty:
|
|
329
383
|
try:
|
|
330
384
|
config = self._statement_config
|
|
@@ -341,6 +395,8 @@ class SQL:
|
|
|
341
395
|
validation_errors=[],
|
|
342
396
|
is_many=self._is_many,
|
|
343
397
|
)
|
|
398
|
+
except SQLSpecError:
|
|
399
|
+
raise
|
|
344
400
|
except Exception as e:
|
|
345
401
|
logger.warning("Processing failed, using fallback: %s", e)
|
|
346
402
|
self._processed_state = ProcessedState(
|
|
@@ -353,7 +409,11 @@ class SQL:
|
|
|
353
409
|
return self._processed_state.compiled_sql, self._processed_state.execution_parameters
|
|
354
410
|
|
|
355
411
|
def as_script(self) -> "SQL":
|
|
356
|
-
"""
|
|
412
|
+
"""Create copy marked for script execution.
|
|
413
|
+
|
|
414
|
+
Returns:
|
|
415
|
+
New SQL instance configured for script execution
|
|
416
|
+
"""
|
|
357
417
|
original_params = self._original_parameters
|
|
358
418
|
config = self._statement_config
|
|
359
419
|
is_many = self._is_many
|
|
@@ -367,7 +427,16 @@ class SQL:
|
|
|
367
427
|
def copy(
|
|
368
428
|
self, statement: "Optional[Union[str, exp.Expression]]" = None, parameters: Optional[Any] = None, **kwargs: Any
|
|
369
429
|
) -> "SQL":
|
|
370
|
-
"""Create copy with modifications.
|
|
430
|
+
"""Create copy with modifications.
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
statement: New SQL statement to use
|
|
434
|
+
parameters: New parameters to use
|
|
435
|
+
**kwargs: Additional modifications
|
|
436
|
+
|
|
437
|
+
Returns:
|
|
438
|
+
New SQL instance with modifications applied
|
|
439
|
+
"""
|
|
371
440
|
new_sql = SQL(
|
|
372
441
|
statement or self._raw_sql,
|
|
373
442
|
*(parameters if parameters is not None else self._original_parameters),
|
|
@@ -484,7 +553,11 @@ class SQL:
|
|
|
484
553
|
|
|
485
554
|
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
486
555
|
class StatementConfig:
|
|
487
|
-
"""Configuration for SQL statement processing.
|
|
556
|
+
"""Configuration for SQL statement processing.
|
|
557
|
+
|
|
558
|
+
Controls SQL parsing, validation, transformations, parameter handling,
|
|
559
|
+
and other processing options for SQL statements.
|
|
560
|
+
"""
|
|
488
561
|
|
|
489
562
|
__slots__ = SQL_CONFIG_SLOTS
|
|
490
563
|
|
|
@@ -653,12 +726,20 @@ class StatementConfig:
|
|
|
653
726
|
|
|
654
727
|
|
|
655
728
|
def get_default_config() -> StatementConfig:
|
|
656
|
-
"""Get default statement configuration.
|
|
729
|
+
"""Get default statement configuration.
|
|
730
|
+
|
|
731
|
+
Returns:
|
|
732
|
+
StatementConfig with default settings
|
|
733
|
+
"""
|
|
657
734
|
return StatementConfig()
|
|
658
735
|
|
|
659
736
|
|
|
660
737
|
def get_default_parameter_config() -> ParameterStyleConfig:
|
|
661
|
-
"""Get default parameter configuration.
|
|
738
|
+
"""Get default parameter configuration.
|
|
739
|
+
|
|
740
|
+
Returns:
|
|
741
|
+
ParameterStyleConfig with QMARK style as default
|
|
742
|
+
"""
|
|
662
743
|
return ParameterStyleConfig(
|
|
663
744
|
default_parameter_style=ParameterStyle.QMARK, supported_parameter_styles={ParameterStyle.QMARK}
|
|
664
745
|
)
|
sqlspec/driver/_common.py
CHANGED
|
@@ -190,6 +190,15 @@ class CommonDriverAttributesMixin:
|
|
|
190
190
|
"""Build SQL statement from various input types.
|
|
191
191
|
|
|
192
192
|
Ensures dialect is set and preserves existing state when rebuilding SQL objects.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
statement: SQL statement or QueryBuilder to prepare
|
|
196
|
+
parameters: Parameters for the SQL statement
|
|
197
|
+
statement_config: Statement configuration
|
|
198
|
+
kwargs: Additional keyword arguments
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Prepared SQL statement
|
|
193
202
|
"""
|
|
194
203
|
kwargs = kwargs or {}
|
|
195
204
|
|
|
@@ -291,8 +300,8 @@ class CommonDriverAttributesMixin:
|
|
|
291
300
|
def _format_parameter_set_for_many(self, parameters: Any, statement_config: "StatementConfig") -> Any:
|
|
292
301
|
"""Prepare a single parameter set for execute_many operations.
|
|
293
302
|
|
|
294
|
-
|
|
295
|
-
|
|
303
|
+
Handles parameter sets without converting the structure to array format,
|
|
304
|
+
applying type coercion to individual values while preserving structure.
|
|
296
305
|
|
|
297
306
|
Args:
|
|
298
307
|
parameters: Single parameter set (tuple, list, or dict)
|
|
@@ -394,7 +403,7 @@ class CommonDriverAttributesMixin:
|
|
|
394
403
|
|
|
395
404
|
Args:
|
|
396
405
|
statement: SQL statement to compile
|
|
397
|
-
statement_config:
|
|
406
|
+
statement_config: Statement configuration including parameter config and dialect
|
|
398
407
|
flatten_single_parameters: If True, flatten single-element lists for scalar parameters
|
|
399
408
|
|
|
400
409
|
Returns:
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Result handling and schema conversion mixins for database drivers."""
|
|
2
|
+
|
|
1
3
|
import datetime
|
|
2
4
|
import logging
|
|
3
5
|
from collections.abc import Sequence
|
|
@@ -41,7 +43,16 @@ _DEFAULT_TYPE_DECODERS: Final[list[tuple[Callable[[Any], bool], Callable[[Any, A
|
|
|
41
43
|
def _default_msgspec_deserializer(
|
|
42
44
|
target_type: Any, value: Any, type_decoders: "Optional[Sequence[tuple[Any, Any]]]" = None
|
|
43
45
|
) -> Any:
|
|
44
|
-
"""
|
|
46
|
+
"""Convert msgspec types with type decoder support.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
target_type: Type to convert to
|
|
50
|
+
value: Value to convert
|
|
51
|
+
type_decoders: Optional sequence of (predicate, decoder) pairs
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
Converted value or original value if conversion not applicable
|
|
55
|
+
"""
|
|
45
56
|
if type_decoders:
|
|
46
57
|
for predicate, decoder in type_decoders:
|
|
47
58
|
if predicate(target_type):
|
|
@@ -74,6 +85,8 @@ def _default_msgspec_deserializer(
|
|
|
74
85
|
|
|
75
86
|
@trait
|
|
76
87
|
class ToSchemaMixin:
|
|
88
|
+
"""Mixin providing data transformation methods for various schema types."""
|
|
89
|
+
|
|
77
90
|
__slots__ = ()
|
|
78
91
|
|
|
79
92
|
@overload
|
|
@@ -114,11 +127,15 @@ class ToSchemaMixin:
|
|
|
114
127
|
def to_schema(data: Any, *, schema_type: "Optional[type[ModelDTOT]]" = None) -> Any:
|
|
115
128
|
"""Convert data to a specified schema type.
|
|
116
129
|
|
|
117
|
-
|
|
118
|
-
|
|
130
|
+
Args:
|
|
131
|
+
data: Input data to convert
|
|
132
|
+
schema_type: Target schema type for conversion
|
|
119
133
|
|
|
120
134
|
Returns:
|
|
121
|
-
Converted data in the specified schema type
|
|
135
|
+
Converted data in the specified schema type
|
|
136
|
+
|
|
137
|
+
Raises:
|
|
138
|
+
SQLSpecError: If schema_type is not a supported type
|
|
122
139
|
"""
|
|
123
140
|
if schema_type is None:
|
|
124
141
|
return data
|