sqlspec 0.25.0__py3-none-any.whl → 0.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/_serialization.py +223 -21
- sqlspec/_sql.py +12 -50
- sqlspec/_typing.py +9 -0
- sqlspec/adapters/adbc/config.py +8 -1
- sqlspec/adapters/adbc/data_dictionary.py +290 -0
- sqlspec/adapters/adbc/driver.py +127 -18
- sqlspec/adapters/adbc/type_converter.py +159 -0
- sqlspec/adapters/aiosqlite/config.py +3 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +117 -0
- sqlspec/adapters/aiosqlite/driver.py +17 -3
- sqlspec/adapters/asyncmy/_types.py +1 -1
- sqlspec/adapters/asyncmy/config.py +11 -8
- sqlspec/adapters/asyncmy/data_dictionary.py +122 -0
- sqlspec/adapters/asyncmy/driver.py +31 -7
- sqlspec/adapters/asyncpg/config.py +3 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +134 -0
- sqlspec/adapters/asyncpg/driver.py +19 -4
- sqlspec/adapters/bigquery/config.py +3 -0
- sqlspec/adapters/bigquery/data_dictionary.py +109 -0
- sqlspec/adapters/bigquery/driver.py +21 -3
- sqlspec/adapters/bigquery/type_converter.py +93 -0
- sqlspec/adapters/duckdb/_types.py +1 -1
- sqlspec/adapters/duckdb/config.py +2 -0
- sqlspec/adapters/duckdb/data_dictionary.py +124 -0
- sqlspec/adapters/duckdb/driver.py +32 -5
- sqlspec/adapters/duckdb/pool.py +1 -1
- sqlspec/adapters/duckdb/type_converter.py +103 -0
- sqlspec/adapters/oracledb/config.py +6 -0
- sqlspec/adapters/oracledb/data_dictionary.py +442 -0
- sqlspec/adapters/oracledb/driver.py +63 -9
- sqlspec/adapters/oracledb/migrations.py +51 -67
- sqlspec/adapters/oracledb/type_converter.py +132 -0
- sqlspec/adapters/psqlpy/config.py +3 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +133 -0
- sqlspec/adapters/psqlpy/driver.py +23 -179
- sqlspec/adapters/psqlpy/type_converter.py +73 -0
- sqlspec/adapters/psycopg/config.py +6 -0
- sqlspec/adapters/psycopg/data_dictionary.py +257 -0
- sqlspec/adapters/psycopg/driver.py +40 -5
- sqlspec/adapters/sqlite/config.py +3 -0
- sqlspec/adapters/sqlite/data_dictionary.py +117 -0
- sqlspec/adapters/sqlite/driver.py +18 -3
- sqlspec/adapters/sqlite/pool.py +13 -4
- sqlspec/builder/_base.py +82 -42
- sqlspec/builder/_column.py +57 -24
- sqlspec/builder/_ddl.py +84 -34
- sqlspec/builder/_insert.py +30 -52
- sqlspec/builder/_parsing_utils.py +104 -8
- sqlspec/builder/_select.py +147 -2
- sqlspec/builder/mixins/_cte_and_set_ops.py +1 -2
- sqlspec/builder/mixins/_join_operations.py +14 -30
- sqlspec/builder/mixins/_merge_operations.py +167 -61
- sqlspec/builder/mixins/_order_limit_operations.py +3 -10
- sqlspec/builder/mixins/_select_operations.py +3 -9
- sqlspec/builder/mixins/_update_operations.py +3 -22
- sqlspec/builder/mixins/_where_clause.py +4 -10
- sqlspec/cli.py +246 -140
- sqlspec/config.py +33 -19
- sqlspec/core/cache.py +2 -2
- sqlspec/core/compiler.py +56 -1
- sqlspec/core/parameters.py +7 -3
- sqlspec/core/statement.py +5 -0
- sqlspec/core/type_conversion.py +234 -0
- sqlspec/driver/__init__.py +6 -3
- sqlspec/driver/_async.py +106 -3
- sqlspec/driver/_common.py +156 -4
- sqlspec/driver/_sync.py +106 -3
- sqlspec/exceptions.py +5 -0
- sqlspec/migrations/__init__.py +4 -3
- sqlspec/migrations/base.py +153 -14
- sqlspec/migrations/commands.py +34 -96
- sqlspec/migrations/context.py +145 -0
- sqlspec/migrations/loaders.py +25 -8
- sqlspec/migrations/runner.py +352 -82
- sqlspec/typing.py +2 -0
- sqlspec/utils/config_resolver.py +153 -0
- sqlspec/utils/serializers.py +50 -2
- {sqlspec-0.25.0.dist-info → sqlspec-0.26.0.dist-info}/METADATA +1 -1
- sqlspec-0.26.0.dist-info/RECORD +157 -0
- sqlspec-0.25.0.dist-info/RECORD +0 -139
- {sqlspec-0.25.0.dist-info → sqlspec-0.26.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.26.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.26.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.26.0.dist-info}/licenses/NOTICE +0 -0
sqlspec/_serialization.py
CHANGED
|
@@ -1,11 +1,28 @@
|
|
|
1
|
+
"""Enhanced serialization module with byte-aware encoding and class-based architecture.
|
|
2
|
+
|
|
3
|
+
Provides a Protocol-based serialization system that users can extend.
|
|
4
|
+
Supports msgspec, orjson, and standard library JSON with automatic fallback.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import contextlib
|
|
1
8
|
import datetime
|
|
2
9
|
import enum
|
|
3
|
-
|
|
10
|
+
import json
|
|
11
|
+
from abc import ABC, abstractmethod
|
|
12
|
+
from typing import Any, Final, Literal, Optional, Protocol, Union, overload
|
|
4
13
|
|
|
5
|
-
from sqlspec.typing import PYDANTIC_INSTALLED, BaseModel
|
|
14
|
+
from sqlspec.typing import MSGSPEC_INSTALLED, ORJSON_INSTALLED, PYDANTIC_INSTALLED, BaseModel
|
|
6
15
|
|
|
7
16
|
|
|
8
17
|
def _type_to_string(value: Any) -> str: # pragma: no cover
|
|
18
|
+
"""Convert special types to strings for JSON serialization.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
value: Value to convert.
|
|
22
|
+
|
|
23
|
+
Returns:
|
|
24
|
+
String representation of the value.
|
|
25
|
+
"""
|
|
9
26
|
if isinstance(value, datetime.datetime):
|
|
10
27
|
return convert_datetime_to_gmt_iso(value)
|
|
11
28
|
if isinstance(value, datetime.date):
|
|
@@ -20,35 +37,206 @@ def _type_to_string(value: Any) -> str: # pragma: no cover
|
|
|
20
37
|
raise TypeError from exc
|
|
21
38
|
|
|
22
39
|
|
|
23
|
-
|
|
24
|
-
|
|
40
|
+
class JSONSerializer(Protocol):
|
|
41
|
+
"""Protocol for JSON serialization implementations.
|
|
25
42
|
|
|
26
|
-
|
|
27
|
-
|
|
43
|
+
Users can implement this protocol to create custom serializers.
|
|
44
|
+
"""
|
|
28
45
|
|
|
29
|
-
def
|
|
30
|
-
|
|
46
|
+
def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
47
|
+
"""Encode data to JSON.
|
|
31
48
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
49
|
+
Args:
|
|
50
|
+
data: Data to encode.
|
|
51
|
+
as_bytes: Whether to return bytes instead of string.
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
JSON string or bytes depending on as_bytes parameter.
|
|
55
|
+
"""
|
|
56
|
+
...
|
|
57
|
+
|
|
58
|
+
def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
59
|
+
"""Decode from JSON.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
data: JSON string or bytes to decode.
|
|
63
|
+
decode_bytes: Whether to decode bytes input.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Decoded Python object.
|
|
67
|
+
"""
|
|
68
|
+
...
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class BaseJSONSerializer(ABC):
|
|
72
|
+
"""Base class for JSON serializers with common functionality."""
|
|
73
|
+
|
|
74
|
+
__slots__ = ()
|
|
75
|
+
|
|
76
|
+
@abstractmethod
|
|
77
|
+
def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
78
|
+
"""Encode data to JSON."""
|
|
79
|
+
...
|
|
80
|
+
|
|
81
|
+
@abstractmethod
|
|
82
|
+
def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
83
|
+
"""Decode from JSON."""
|
|
84
|
+
...
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class MsgspecSerializer(BaseJSONSerializer):
|
|
88
|
+
"""Msgspec-based JSON serializer for optimal performance."""
|
|
89
|
+
|
|
90
|
+
__slots__ = ("_decoder", "_encoder")
|
|
91
|
+
|
|
92
|
+
def __init__(self) -> None:
|
|
93
|
+
"""Initialize msgspec encoder and decoder."""
|
|
94
|
+
from msgspec.json import Decoder, Encoder
|
|
95
|
+
|
|
96
|
+
self._encoder: Final[Encoder] = Encoder(enc_hook=_type_to_string)
|
|
97
|
+
self._decoder: Final[Decoder] = Decoder()
|
|
98
|
+
|
|
99
|
+
def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
100
|
+
"""Encode data using msgspec."""
|
|
101
|
+
try:
|
|
102
|
+
if as_bytes:
|
|
103
|
+
return self._encoder.encode(data)
|
|
104
|
+
return self._encoder.encode(data).decode("utf-8")
|
|
105
|
+
except (TypeError, ValueError):
|
|
106
|
+
if ORJSON_INSTALLED:
|
|
107
|
+
return OrjsonSerializer().encode(data, as_bytes=as_bytes)
|
|
108
|
+
return StandardLibSerializer().encode(data, as_bytes=as_bytes)
|
|
109
|
+
|
|
110
|
+
def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
111
|
+
"""Decode data using msgspec."""
|
|
112
|
+
if isinstance(data, bytes):
|
|
113
|
+
if decode_bytes:
|
|
114
|
+
try:
|
|
115
|
+
return self._decoder.decode(data)
|
|
116
|
+
except (TypeError, ValueError):
|
|
117
|
+
if ORJSON_INSTALLED:
|
|
118
|
+
return OrjsonSerializer().decode(data, decode_bytes=decode_bytes)
|
|
119
|
+
return StandardLibSerializer().decode(data, decode_bytes=decode_bytes)
|
|
120
|
+
return data
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
return self._decoder.decode(data.encode("utf-8"))
|
|
124
|
+
except (TypeError, ValueError):
|
|
125
|
+
if ORJSON_INSTALLED:
|
|
126
|
+
return OrjsonSerializer().decode(data, decode_bytes=decode_bytes)
|
|
127
|
+
return StandardLibSerializer().decode(data, decode_bytes=decode_bytes)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class OrjsonSerializer(BaseJSONSerializer):
|
|
131
|
+
"""Orjson-based JSON serializer with native datetime/UUID support."""
|
|
132
|
+
|
|
133
|
+
__slots__ = ()
|
|
134
|
+
|
|
135
|
+
def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
136
|
+
"""Encode data using orjson."""
|
|
137
|
+
from orjson import (
|
|
35
138
|
OPT_NAIVE_UTC, # pyright: ignore[reportUnknownVariableType]
|
|
36
139
|
OPT_SERIALIZE_NUMPY, # pyright: ignore[reportUnknownVariableType]
|
|
37
140
|
OPT_SERIALIZE_UUID, # pyright: ignore[reportUnknownVariableType]
|
|
38
141
|
)
|
|
39
|
-
from orjson import dumps as
|
|
40
|
-
|
|
142
|
+
from orjson import dumps as _orjson_dumps # pyright: ignore[reportMissingImports]
|
|
143
|
+
|
|
144
|
+
result = _orjson_dumps(
|
|
145
|
+
data, default=_type_to_string, option=OPT_SERIALIZE_NUMPY | OPT_NAIVE_UTC | OPT_SERIALIZE_UUID
|
|
146
|
+
)
|
|
147
|
+
return result if as_bytes else result.decode("utf-8")
|
|
148
|
+
|
|
149
|
+
def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
150
|
+
"""Decode data using orjson."""
|
|
151
|
+
from orjson import loads as _orjson_loads # pyright: ignore[reportMissingImports]
|
|
152
|
+
|
|
153
|
+
if isinstance(data, bytes):
|
|
154
|
+
if decode_bytes:
|
|
155
|
+
return _orjson_loads(data)
|
|
156
|
+
return data
|
|
157
|
+
return _orjson_loads(data)
|
|
41
158
|
|
|
42
|
-
def encode_json(data: Any) -> str: # pragma: no cover
|
|
43
|
-
return _encode_json(
|
|
44
|
-
data, default=_type_to_string, option=OPT_SERIALIZE_NUMPY | OPT_NAIVE_UTC | OPT_SERIALIZE_UUID
|
|
45
|
-
).decode("utf-8")
|
|
46
159
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
from json import loads as decode_json # type: ignore[assignment]
|
|
160
|
+
class StandardLibSerializer(BaseJSONSerializer):
|
|
161
|
+
"""Standard library JSON serializer as fallback."""
|
|
50
162
|
|
|
51
|
-
|
|
163
|
+
__slots__ = ()
|
|
164
|
+
|
|
165
|
+
def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
166
|
+
"""Encode data using standard library json."""
|
|
167
|
+
json_str = json.dumps(data, default=_type_to_string)
|
|
168
|
+
return json_str.encode("utf-8") if as_bytes else json_str
|
|
169
|
+
|
|
170
|
+
def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
171
|
+
"""Decode data using standard library json."""
|
|
172
|
+
if isinstance(data, bytes):
|
|
173
|
+
if decode_bytes:
|
|
174
|
+
return json.loads(data.decode("utf-8"))
|
|
175
|
+
return data
|
|
176
|
+
return json.loads(data)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
_default_serializer: Optional[JSONSerializer] = None
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def get_default_serializer() -> JSONSerializer:
|
|
183
|
+
"""Get the default serializer based on available libraries.
|
|
184
|
+
|
|
185
|
+
Priority: msgspec > orjson > stdlib
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
The best available JSON serializer.
|
|
189
|
+
"""
|
|
190
|
+
global _default_serializer
|
|
191
|
+
|
|
192
|
+
if _default_serializer is None:
|
|
193
|
+
if MSGSPEC_INSTALLED:
|
|
194
|
+
with contextlib.suppress(ImportError):
|
|
195
|
+
_default_serializer = MsgspecSerializer()
|
|
196
|
+
|
|
197
|
+
if _default_serializer is None and ORJSON_INSTALLED:
|
|
198
|
+
with contextlib.suppress(ImportError):
|
|
199
|
+
_default_serializer = OrjsonSerializer()
|
|
200
|
+
|
|
201
|
+
if _default_serializer is None:
|
|
202
|
+
_default_serializer = StandardLibSerializer()
|
|
203
|
+
|
|
204
|
+
assert _default_serializer is not None
|
|
205
|
+
return _default_serializer
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@overload
|
|
209
|
+
def encode_json(data: Any, *, as_bytes: Literal[False] = ...) -> str: ... # pragma: no cover
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
@overload
|
|
213
|
+
def encode_json(data: Any, *, as_bytes: Literal[True]) -> bytes: ... # pragma: no cover
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def encode_json(data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
|
|
217
|
+
"""Encode to JSON, optionally returning bytes for optimal performance.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
data: The data to encode.
|
|
221
|
+
as_bytes: Whether to return bytes instead of string.
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
JSON string or bytes depending on as_bytes parameter.
|
|
225
|
+
"""
|
|
226
|
+
return get_default_serializer().encode(data, as_bytes=as_bytes)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def decode_json(data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
|
|
230
|
+
"""Decode from JSON string or bytes efficiently.
|
|
231
|
+
|
|
232
|
+
Args:
|
|
233
|
+
data: JSON string or bytes to decode.
|
|
234
|
+
decode_bytes: Whether to decode bytes input.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
Decoded Python object.
|
|
238
|
+
"""
|
|
239
|
+
return get_default_serializer().decode(data, decode_bytes=decode_bytes)
|
|
52
240
|
|
|
53
241
|
|
|
54
242
|
def convert_datetime_to_gmt_iso(dt: datetime.datetime) -> str: # pragma: no cover
|
|
@@ -75,3 +263,17 @@ def convert_date_to_iso(dt: datetime.date) -> str: # pragma: no cover
|
|
|
75
263
|
The ISO formatted date string.
|
|
76
264
|
"""
|
|
77
265
|
return dt.isoformat()
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
__all__ = (
|
|
269
|
+
"BaseJSONSerializer",
|
|
270
|
+
"JSONSerializer",
|
|
271
|
+
"MsgspecSerializer",
|
|
272
|
+
"OrjsonSerializer",
|
|
273
|
+
"StandardLibSerializer",
|
|
274
|
+
"convert_date_to_iso",
|
|
275
|
+
"convert_datetime_to_gmt_iso",
|
|
276
|
+
"decode_json",
|
|
277
|
+
"encode_json",
|
|
278
|
+
"get_default_serializer",
|
|
279
|
+
)
|
sqlspec/_sql.py
CHANGED
|
@@ -40,6 +40,7 @@ from sqlspec.builder._expression_wrappers import (
|
|
|
40
40
|
MathExpression,
|
|
41
41
|
StringExpression,
|
|
42
42
|
)
|
|
43
|
+
from sqlspec.builder._parsing_utils import extract_expression, to_expression
|
|
43
44
|
from sqlspec.builder.mixins._join_operations import JoinBuilder
|
|
44
45
|
from sqlspec.builder.mixins._select_operations import Case, SubqueryBuilder, WindowFunctionBuilder
|
|
45
46
|
from sqlspec.core.statement import SQL
|
|
@@ -746,7 +747,7 @@ class SQLFactory:
|
|
|
746
747
|
if isinstance(column, str) and column == "*":
|
|
747
748
|
expr = exp.Count(this=exp.Star(), distinct=distinct)
|
|
748
749
|
else:
|
|
749
|
-
col_expr =
|
|
750
|
+
col_expr = extract_expression(column)
|
|
750
751
|
expr = exp.Count(this=col_expr, distinct=distinct)
|
|
751
752
|
return AggregateExpression(expr)
|
|
752
753
|
|
|
@@ -774,7 +775,7 @@ class SQLFactory:
|
|
|
774
775
|
Returns:
|
|
775
776
|
SUM expression.
|
|
776
777
|
"""
|
|
777
|
-
col_expr =
|
|
778
|
+
col_expr = extract_expression(column)
|
|
778
779
|
return AggregateExpression(exp.Sum(this=col_expr, distinct=distinct))
|
|
779
780
|
|
|
780
781
|
@staticmethod
|
|
@@ -787,7 +788,7 @@ class SQLFactory:
|
|
|
787
788
|
Returns:
|
|
788
789
|
AVG expression.
|
|
789
790
|
"""
|
|
790
|
-
col_expr =
|
|
791
|
+
col_expr = extract_expression(column)
|
|
791
792
|
return AggregateExpression(exp.Avg(this=col_expr))
|
|
792
793
|
|
|
793
794
|
@staticmethod
|
|
@@ -800,7 +801,7 @@ class SQLFactory:
|
|
|
800
801
|
Returns:
|
|
801
802
|
MAX expression.
|
|
802
803
|
"""
|
|
803
|
-
col_expr =
|
|
804
|
+
col_expr = extract_expression(column)
|
|
804
805
|
return AggregateExpression(exp.Max(this=col_expr))
|
|
805
806
|
|
|
806
807
|
@staticmethod
|
|
@@ -813,7 +814,7 @@ class SQLFactory:
|
|
|
813
814
|
Returns:
|
|
814
815
|
MIN expression.
|
|
815
816
|
"""
|
|
816
|
-
col_expr =
|
|
817
|
+
col_expr = extract_expression(column)
|
|
817
818
|
return AggregateExpression(exp.Min(this=col_expr))
|
|
818
819
|
|
|
819
820
|
@staticmethod
|
|
@@ -1034,45 +1035,6 @@ class SQLFactory:
|
|
|
1034
1035
|
return FunctionExpression(value)
|
|
1035
1036
|
return FunctionExpression(exp.convert(value))
|
|
1036
1037
|
|
|
1037
|
-
@staticmethod
|
|
1038
|
-
def _to_expression(value: Any) -> exp.Expression:
|
|
1039
|
-
"""Convert a Python value to a raw SQLGlot expression.
|
|
1040
|
-
|
|
1041
|
-
Args:
|
|
1042
|
-
value: Python value or SQLGlot expression to convert.
|
|
1043
|
-
|
|
1044
|
-
Returns:
|
|
1045
|
-
Raw SQLGlot expression.
|
|
1046
|
-
"""
|
|
1047
|
-
if isinstance(value, exp.Expression):
|
|
1048
|
-
return value
|
|
1049
|
-
return exp.convert(value)
|
|
1050
|
-
|
|
1051
|
-
@staticmethod
|
|
1052
|
-
def _extract_expression(value: Any) -> exp.Expression:
|
|
1053
|
-
"""Extract SQLGlot expression from value, handling our wrapper types.
|
|
1054
|
-
|
|
1055
|
-
Args:
|
|
1056
|
-
value: String, SQLGlot expression, or our wrapper type.
|
|
1057
|
-
|
|
1058
|
-
Returns:
|
|
1059
|
-
Raw SQLGlot expression.
|
|
1060
|
-
"""
|
|
1061
|
-
from sqlspec.builder._expression_wrappers import ExpressionWrapper
|
|
1062
|
-
from sqlspec.builder.mixins._select_operations import Case
|
|
1063
|
-
|
|
1064
|
-
if isinstance(value, str):
|
|
1065
|
-
return exp.column(value)
|
|
1066
|
-
if isinstance(value, Column):
|
|
1067
|
-
return value.sqlglot_expression
|
|
1068
|
-
if isinstance(value, ExpressionWrapper):
|
|
1069
|
-
return value.expression
|
|
1070
|
-
if isinstance(value, Case):
|
|
1071
|
-
return exp.Case(ifs=value.conditions, default=value.default)
|
|
1072
|
-
if isinstance(value, exp.Expression):
|
|
1073
|
-
return value
|
|
1074
|
-
return exp.convert(value)
|
|
1075
|
-
|
|
1076
1038
|
@staticmethod
|
|
1077
1039
|
def decode(column: Union[str, exp.Expression], *args: Union[str, exp.Expression, Any]) -> FunctionExpression:
|
|
1078
1040
|
"""Create a DECODE expression (Oracle-style conditional logic).
|
|
@@ -1109,14 +1071,14 @@ class SQLFactory:
|
|
|
1109
1071
|
|
|
1110
1072
|
for i in range(0, len(args) - 1, 2):
|
|
1111
1073
|
if i + 1 >= len(args):
|
|
1112
|
-
default =
|
|
1074
|
+
default = to_expression(args[i])
|
|
1113
1075
|
break
|
|
1114
1076
|
|
|
1115
1077
|
search_val = args[i]
|
|
1116
1078
|
result_val = args[i + 1]
|
|
1117
1079
|
|
|
1118
|
-
search_expr =
|
|
1119
|
-
result_expr =
|
|
1080
|
+
search_expr = to_expression(search_val)
|
|
1081
|
+
result_expr = to_expression(result_val)
|
|
1120
1082
|
|
|
1121
1083
|
condition = exp.EQ(this=col_expr, expression=search_expr)
|
|
1122
1084
|
conditions.append(exp.If(this=condition, true=result_expr))
|
|
@@ -1164,7 +1126,7 @@ class SQLFactory:
|
|
|
1164
1126
|
COALESCE expression equivalent to NVL.
|
|
1165
1127
|
"""
|
|
1166
1128
|
col_expr = exp.column(column) if isinstance(column, str) else column
|
|
1167
|
-
sub_expr =
|
|
1129
|
+
sub_expr = to_expression(substitute_value)
|
|
1168
1130
|
return ConversionExpression(exp.Coalesce(expressions=[col_expr, sub_expr]))
|
|
1169
1131
|
|
|
1170
1132
|
@staticmethod
|
|
@@ -1192,8 +1154,8 @@ class SQLFactory:
|
|
|
1192
1154
|
```
|
|
1193
1155
|
"""
|
|
1194
1156
|
col_expr = exp.column(column) if isinstance(column, str) else column
|
|
1195
|
-
not_null_expr =
|
|
1196
|
-
null_expr =
|
|
1157
|
+
not_null_expr = to_expression(value_if_not_null)
|
|
1158
|
+
null_expr = to_expression(value_if_null)
|
|
1197
1159
|
|
|
1198
1160
|
is_null = exp.Is(this=col_expr, expression=exp.Null())
|
|
1199
1161
|
condition = exp.Not(this=is_null)
|
sqlspec/_typing.py
CHANGED
|
@@ -177,6 +177,14 @@ except ImportError:
|
|
|
177
177
|
MSGSPEC_INSTALLED = False # pyright: ignore[reportConstantRedefinition]
|
|
178
178
|
|
|
179
179
|
|
|
180
|
+
try:
|
|
181
|
+
import orjson # noqa: F401
|
|
182
|
+
|
|
183
|
+
ORJSON_INSTALLED = True # pyright: ignore[reportConstantRedefinition]
|
|
184
|
+
except ImportError:
|
|
185
|
+
ORJSON_INSTALLED = False # pyright: ignore[reportConstantRedefinition]
|
|
186
|
+
|
|
187
|
+
|
|
180
188
|
# Always define stub type for DTOData
|
|
181
189
|
@runtime_checkable
|
|
182
190
|
class DTODataStub(Protocol[T]):
|
|
@@ -621,6 +629,7 @@ __all__ = (
|
|
|
621
629
|
"NUMPY_INSTALLED",
|
|
622
630
|
"OBSTORE_INSTALLED",
|
|
623
631
|
"OPENTELEMETRY_INSTALLED",
|
|
632
|
+
"ORJSON_INSTALLED",
|
|
624
633
|
"PGVECTOR_INSTALLED",
|
|
625
634
|
"PROMETHEUS_INSTALLED",
|
|
626
635
|
"PYARROW_INSTALLED",
|
sqlspec/adapters/adbc/config.py
CHANGED
|
@@ -77,6 +77,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
|
|
|
77
77
|
migration_config: Optional[dict[str, Any]] = None,
|
|
78
78
|
statement_config: Optional[StatementConfig] = None,
|
|
79
79
|
driver_features: Optional[dict[str, Any]] = None,
|
|
80
|
+
bind_key: Optional[str] = None,
|
|
80
81
|
) -> None:
|
|
81
82
|
"""Initialize configuration.
|
|
82
83
|
|
|
@@ -85,6 +86,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
|
|
|
85
86
|
migration_config: Migration configuration
|
|
86
87
|
statement_config: Default SQL statement configuration
|
|
87
88
|
driver_features: Driver feature configuration
|
|
89
|
+
bind_key: Optional unique identifier for this configuration
|
|
88
90
|
"""
|
|
89
91
|
if connection_config is None:
|
|
90
92
|
connection_config = {}
|
|
@@ -104,6 +106,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
|
|
|
104
106
|
migration_config=migration_config,
|
|
105
107
|
statement_config=statement_config,
|
|
106
108
|
driver_features=driver_features or {},
|
|
109
|
+
bind_key=bind_key,
|
|
107
110
|
)
|
|
108
111
|
|
|
109
112
|
def _resolve_driver_name(self) -> str:
|
|
@@ -174,7 +177,11 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
|
|
|
174
177
|
try:
|
|
175
178
|
connect_func = import_string(driver_path)
|
|
176
179
|
except ImportError as e:
|
|
177
|
-
|
|
180
|
+
# Only add .dbapi.connect if it's not already there
|
|
181
|
+
if not driver_path.endswith(".dbapi.connect"):
|
|
182
|
+
driver_path_with_suffix = f"{driver_path}.dbapi.connect"
|
|
183
|
+
else:
|
|
184
|
+
driver_path_with_suffix = driver_path
|
|
178
185
|
try:
|
|
179
186
|
connect_func = import_string(driver_path_with_suffix)
|
|
180
187
|
except ImportError as e2:
|