sqlspec 0.24.1__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (95) hide show
  1. sqlspec/_serialization.py +223 -21
  2. sqlspec/_sql.py +20 -62
  3. sqlspec/_typing.py +11 -0
  4. sqlspec/adapters/adbc/config.py +8 -1
  5. sqlspec/adapters/adbc/data_dictionary.py +290 -0
  6. sqlspec/adapters/adbc/driver.py +129 -20
  7. sqlspec/adapters/adbc/type_converter.py +159 -0
  8. sqlspec/adapters/aiosqlite/config.py +3 -0
  9. sqlspec/adapters/aiosqlite/data_dictionary.py +117 -0
  10. sqlspec/adapters/aiosqlite/driver.py +17 -3
  11. sqlspec/adapters/asyncmy/_types.py +1 -1
  12. sqlspec/adapters/asyncmy/config.py +11 -8
  13. sqlspec/adapters/asyncmy/data_dictionary.py +122 -0
  14. sqlspec/adapters/asyncmy/driver.py +31 -7
  15. sqlspec/adapters/asyncpg/config.py +3 -0
  16. sqlspec/adapters/asyncpg/data_dictionary.py +134 -0
  17. sqlspec/adapters/asyncpg/driver.py +19 -4
  18. sqlspec/adapters/bigquery/config.py +3 -0
  19. sqlspec/adapters/bigquery/data_dictionary.py +109 -0
  20. sqlspec/adapters/bigquery/driver.py +21 -3
  21. sqlspec/adapters/bigquery/type_converter.py +93 -0
  22. sqlspec/adapters/duckdb/_types.py +1 -1
  23. sqlspec/adapters/duckdb/config.py +2 -0
  24. sqlspec/adapters/duckdb/data_dictionary.py +124 -0
  25. sqlspec/adapters/duckdb/driver.py +32 -5
  26. sqlspec/adapters/duckdb/pool.py +1 -1
  27. sqlspec/adapters/duckdb/type_converter.py +103 -0
  28. sqlspec/adapters/oracledb/config.py +6 -0
  29. sqlspec/adapters/oracledb/data_dictionary.py +442 -0
  30. sqlspec/adapters/oracledb/driver.py +68 -9
  31. sqlspec/adapters/oracledb/migrations.py +51 -67
  32. sqlspec/adapters/oracledb/type_converter.py +132 -0
  33. sqlspec/adapters/psqlpy/config.py +3 -0
  34. sqlspec/adapters/psqlpy/data_dictionary.py +133 -0
  35. sqlspec/adapters/psqlpy/driver.py +23 -179
  36. sqlspec/adapters/psqlpy/type_converter.py +73 -0
  37. sqlspec/adapters/psycopg/config.py +8 -4
  38. sqlspec/adapters/psycopg/data_dictionary.py +257 -0
  39. sqlspec/adapters/psycopg/driver.py +40 -5
  40. sqlspec/adapters/sqlite/config.py +3 -0
  41. sqlspec/adapters/sqlite/data_dictionary.py +117 -0
  42. sqlspec/adapters/sqlite/driver.py +18 -3
  43. sqlspec/adapters/sqlite/pool.py +13 -4
  44. sqlspec/base.py +3 -4
  45. sqlspec/builder/_base.py +130 -48
  46. sqlspec/builder/_column.py +66 -24
  47. sqlspec/builder/_ddl.py +91 -41
  48. sqlspec/builder/_insert.py +40 -58
  49. sqlspec/builder/_parsing_utils.py +127 -12
  50. sqlspec/builder/_select.py +147 -2
  51. sqlspec/builder/_update.py +1 -1
  52. sqlspec/builder/mixins/_cte_and_set_ops.py +31 -23
  53. sqlspec/builder/mixins/_delete_operations.py +12 -7
  54. sqlspec/builder/mixins/_insert_operations.py +50 -36
  55. sqlspec/builder/mixins/_join_operations.py +15 -30
  56. sqlspec/builder/mixins/_merge_operations.py +210 -78
  57. sqlspec/builder/mixins/_order_limit_operations.py +4 -10
  58. sqlspec/builder/mixins/_pivot_operations.py +1 -0
  59. sqlspec/builder/mixins/_select_operations.py +44 -22
  60. sqlspec/builder/mixins/_update_operations.py +30 -37
  61. sqlspec/builder/mixins/_where_clause.py +52 -70
  62. sqlspec/cli.py +246 -140
  63. sqlspec/config.py +33 -19
  64. sqlspec/core/__init__.py +3 -2
  65. sqlspec/core/cache.py +298 -352
  66. sqlspec/core/compiler.py +61 -4
  67. sqlspec/core/filters.py +246 -213
  68. sqlspec/core/hashing.py +9 -11
  69. sqlspec/core/parameters.py +27 -10
  70. sqlspec/core/statement.py +72 -12
  71. sqlspec/core/type_conversion.py +234 -0
  72. sqlspec/driver/__init__.py +6 -3
  73. sqlspec/driver/_async.py +108 -5
  74. sqlspec/driver/_common.py +186 -17
  75. sqlspec/driver/_sync.py +108 -5
  76. sqlspec/driver/mixins/_result_tools.py +60 -7
  77. sqlspec/exceptions.py +5 -0
  78. sqlspec/loader.py +8 -9
  79. sqlspec/migrations/__init__.py +4 -3
  80. sqlspec/migrations/base.py +153 -14
  81. sqlspec/migrations/commands.py +34 -96
  82. sqlspec/migrations/context.py +145 -0
  83. sqlspec/migrations/loaders.py +25 -8
  84. sqlspec/migrations/runner.py +352 -82
  85. sqlspec/storage/backends/fsspec.py +1 -0
  86. sqlspec/typing.py +4 -0
  87. sqlspec/utils/config_resolver.py +153 -0
  88. sqlspec/utils/serializers.py +50 -2
  89. {sqlspec-0.24.1.dist-info → sqlspec-0.26.0.dist-info}/METADATA +1 -1
  90. sqlspec-0.26.0.dist-info/RECORD +157 -0
  91. sqlspec-0.24.1.dist-info/RECORD +0 -139
  92. {sqlspec-0.24.1.dist-info → sqlspec-0.26.0.dist-info}/WHEEL +0 -0
  93. {sqlspec-0.24.1.dist-info → sqlspec-0.26.0.dist-info}/entry_points.txt +0 -0
  94. {sqlspec-0.24.1.dist-info → sqlspec-0.26.0.dist-info}/licenses/LICENSE +0 -0
  95. {sqlspec-0.24.1.dist-info → sqlspec-0.26.0.dist-info}/licenses/NOTICE +0 -0
sqlspec/_serialization.py CHANGED
@@ -1,11 +1,28 @@
1
+ """Enhanced serialization module with byte-aware encoding and class-based architecture.
2
+
3
+ Provides a Protocol-based serialization system that users can extend.
4
+ Supports msgspec, orjson, and standard library JSON with automatic fallback.
5
+ """
6
+
7
+ import contextlib
1
8
  import datetime
2
9
  import enum
3
- from typing import Any
10
+ import json
11
+ from abc import ABC, abstractmethod
12
+ from typing import Any, Final, Literal, Optional, Protocol, Union, overload
4
13
 
5
- from sqlspec.typing import PYDANTIC_INSTALLED, BaseModel
14
+ from sqlspec.typing import MSGSPEC_INSTALLED, ORJSON_INSTALLED, PYDANTIC_INSTALLED, BaseModel
6
15
 
7
16
 
8
17
  def _type_to_string(value: Any) -> str: # pragma: no cover
18
+ """Convert special types to strings for JSON serialization.
19
+
20
+ Args:
21
+ value: Value to convert.
22
+
23
+ Returns:
24
+ String representation of the value.
25
+ """
9
26
  if isinstance(value, datetime.datetime):
10
27
  return convert_datetime_to_gmt_iso(value)
11
28
  if isinstance(value, datetime.date):
@@ -20,35 +37,206 @@ def _type_to_string(value: Any) -> str: # pragma: no cover
20
37
  raise TypeError from exc
21
38
 
22
39
 
23
- try:
24
- from msgspec.json import Decoder, Encoder
40
+ class JSONSerializer(Protocol):
41
+ """Protocol for JSON serialization implementations.
25
42
 
26
- encoder, decoder = Encoder(enc_hook=_type_to_string), Decoder()
27
- decode_json = decoder.decode
43
+ Users can implement this protocol to create custom serializers.
44
+ """
28
45
 
29
- def encode_json(data: Any) -> str: # pragma: no cover
30
- return encoder.encode(data).decode("utf-8")
46
+ def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
47
+ """Encode data to JSON.
31
48
 
32
- except ImportError:
33
- try:
34
- from orjson import ( # pyright: ignore[reportMissingImports]
49
+ Args:
50
+ data: Data to encode.
51
+ as_bytes: Whether to return bytes instead of string.
52
+
53
+ Returns:
54
+ JSON string or bytes depending on as_bytes parameter.
55
+ """
56
+ ...
57
+
58
+ def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
59
+ """Decode from JSON.
60
+
61
+ Args:
62
+ data: JSON string or bytes to decode.
63
+ decode_bytes: Whether to decode bytes input.
64
+
65
+ Returns:
66
+ Decoded Python object.
67
+ """
68
+ ...
69
+
70
+
71
+ class BaseJSONSerializer(ABC):
72
+ """Base class for JSON serializers with common functionality."""
73
+
74
+ __slots__ = ()
75
+
76
+ @abstractmethod
77
+ def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
78
+ """Encode data to JSON."""
79
+ ...
80
+
81
+ @abstractmethod
82
+ def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
83
+ """Decode from JSON."""
84
+ ...
85
+
86
+
87
+ class MsgspecSerializer(BaseJSONSerializer):
88
+ """Msgspec-based JSON serializer for optimal performance."""
89
+
90
+ __slots__ = ("_decoder", "_encoder")
91
+
92
+ def __init__(self) -> None:
93
+ """Initialize msgspec encoder and decoder."""
94
+ from msgspec.json import Decoder, Encoder
95
+
96
+ self._encoder: Final[Encoder] = Encoder(enc_hook=_type_to_string)
97
+ self._decoder: Final[Decoder] = Decoder()
98
+
99
+ def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
100
+ """Encode data using msgspec."""
101
+ try:
102
+ if as_bytes:
103
+ return self._encoder.encode(data)
104
+ return self._encoder.encode(data).decode("utf-8")
105
+ except (TypeError, ValueError):
106
+ if ORJSON_INSTALLED:
107
+ return OrjsonSerializer().encode(data, as_bytes=as_bytes)
108
+ return StandardLibSerializer().encode(data, as_bytes=as_bytes)
109
+
110
+ def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
111
+ """Decode data using msgspec."""
112
+ if isinstance(data, bytes):
113
+ if decode_bytes:
114
+ try:
115
+ return self._decoder.decode(data)
116
+ except (TypeError, ValueError):
117
+ if ORJSON_INSTALLED:
118
+ return OrjsonSerializer().decode(data, decode_bytes=decode_bytes)
119
+ return StandardLibSerializer().decode(data, decode_bytes=decode_bytes)
120
+ return data
121
+
122
+ try:
123
+ return self._decoder.decode(data.encode("utf-8"))
124
+ except (TypeError, ValueError):
125
+ if ORJSON_INSTALLED:
126
+ return OrjsonSerializer().decode(data, decode_bytes=decode_bytes)
127
+ return StandardLibSerializer().decode(data, decode_bytes=decode_bytes)
128
+
129
+
130
+ class OrjsonSerializer(BaseJSONSerializer):
131
+ """Orjson-based JSON serializer with native datetime/UUID support."""
132
+
133
+ __slots__ = ()
134
+
135
+ def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
136
+ """Encode data using orjson."""
137
+ from orjson import (
35
138
  OPT_NAIVE_UTC, # pyright: ignore[reportUnknownVariableType]
36
139
  OPT_SERIALIZE_NUMPY, # pyright: ignore[reportUnknownVariableType]
37
140
  OPT_SERIALIZE_UUID, # pyright: ignore[reportUnknownVariableType]
38
141
  )
39
- from orjson import dumps as _encode_json # pyright: ignore[reportUnknownVariableType,reportMissingImports]
40
- from orjson import loads as decode_json # type: ignore[no-redef,assignment,unused-ignore]
142
+ from orjson import dumps as _orjson_dumps # pyright: ignore[reportMissingImports]
143
+
144
+ result = _orjson_dumps(
145
+ data, default=_type_to_string, option=OPT_SERIALIZE_NUMPY | OPT_NAIVE_UTC | OPT_SERIALIZE_UUID
146
+ )
147
+ return result if as_bytes else result.decode("utf-8")
148
+
149
+ def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
150
+ """Decode data using orjson."""
151
+ from orjson import loads as _orjson_loads # pyright: ignore[reportMissingImports]
152
+
153
+ if isinstance(data, bytes):
154
+ if decode_bytes:
155
+ return _orjson_loads(data)
156
+ return data
157
+ return _orjson_loads(data)
41
158
 
42
- def encode_json(data: Any) -> str: # pragma: no cover
43
- return _encode_json(
44
- data, default=_type_to_string, option=OPT_SERIALIZE_NUMPY | OPT_NAIVE_UTC | OPT_SERIALIZE_UUID
45
- ).decode("utf-8")
46
159
 
47
- except ImportError:
48
- from json import dumps as encode_json # type: ignore[assignment]
49
- from json import loads as decode_json # type: ignore[assignment]
160
+ class StandardLibSerializer(BaseJSONSerializer):
161
+ """Standard library JSON serializer as fallback."""
50
162
 
51
- __all__ = ("convert_date_to_iso", "convert_datetime_to_gmt_iso", "decode_json", "encode_json")
163
+ __slots__ = ()
164
+
165
+ def encode(self, data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
166
+ """Encode data using standard library json."""
167
+ json_str = json.dumps(data, default=_type_to_string)
168
+ return json_str.encode("utf-8") if as_bytes else json_str
169
+
170
+ def decode(self, data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
171
+ """Decode data using standard library json."""
172
+ if isinstance(data, bytes):
173
+ if decode_bytes:
174
+ return json.loads(data.decode("utf-8"))
175
+ return data
176
+ return json.loads(data)
177
+
178
+
179
+ _default_serializer: Optional[JSONSerializer] = None
180
+
181
+
182
+ def get_default_serializer() -> JSONSerializer:
183
+ """Get the default serializer based on available libraries.
184
+
185
+ Priority: msgspec > orjson > stdlib
186
+
187
+ Returns:
188
+ The best available JSON serializer.
189
+ """
190
+ global _default_serializer
191
+
192
+ if _default_serializer is None:
193
+ if MSGSPEC_INSTALLED:
194
+ with contextlib.suppress(ImportError):
195
+ _default_serializer = MsgspecSerializer()
196
+
197
+ if _default_serializer is None and ORJSON_INSTALLED:
198
+ with contextlib.suppress(ImportError):
199
+ _default_serializer = OrjsonSerializer()
200
+
201
+ if _default_serializer is None:
202
+ _default_serializer = StandardLibSerializer()
203
+
204
+ assert _default_serializer is not None
205
+ return _default_serializer
206
+
207
+
208
+ @overload
209
+ def encode_json(data: Any, *, as_bytes: Literal[False] = ...) -> str: ... # pragma: no cover
210
+
211
+
212
+ @overload
213
+ def encode_json(data: Any, *, as_bytes: Literal[True]) -> bytes: ... # pragma: no cover
214
+
215
+
216
+ def encode_json(data: Any, *, as_bytes: bool = False) -> Union[str, bytes]:
217
+ """Encode to JSON, optionally returning bytes for optimal performance.
218
+
219
+ Args:
220
+ data: The data to encode.
221
+ as_bytes: Whether to return bytes instead of string.
222
+
223
+ Returns:
224
+ JSON string or bytes depending on as_bytes parameter.
225
+ """
226
+ return get_default_serializer().encode(data, as_bytes=as_bytes)
227
+
228
+
229
+ def decode_json(data: Union[str, bytes], *, decode_bytes: bool = True) -> Any:
230
+ """Decode from JSON string or bytes efficiently.
231
+
232
+ Args:
233
+ data: JSON string or bytes to decode.
234
+ decode_bytes: Whether to decode bytes input.
235
+
236
+ Returns:
237
+ Decoded Python object.
238
+ """
239
+ return get_default_serializer().decode(data, decode_bytes=decode_bytes)
52
240
 
53
241
 
54
242
  def convert_datetime_to_gmt_iso(dt: datetime.datetime) -> str: # pragma: no cover
@@ -75,3 +263,17 @@ def convert_date_to_iso(dt: datetime.date) -> str: # pragma: no cover
75
263
  The ISO formatted date string.
76
264
  """
77
265
  return dt.isoformat()
266
+
267
+
268
+ __all__ = (
269
+ "BaseJSONSerializer",
270
+ "JSONSerializer",
271
+ "MsgspecSerializer",
272
+ "OrjsonSerializer",
273
+ "StandardLibSerializer",
274
+ "convert_date_to_iso",
275
+ "convert_datetime_to_gmt_iso",
276
+ "decode_json",
277
+ "encode_json",
278
+ "get_default_serializer",
279
+ )
sqlspec/_sql.py CHANGED
@@ -40,6 +40,7 @@ from sqlspec.builder._expression_wrappers import (
40
40
  MathExpression,
41
41
  StringExpression,
42
42
  )
43
+ from sqlspec.builder._parsing_utils import extract_expression, to_expression
43
44
  from sqlspec.builder.mixins._join_operations import JoinBuilder
44
45
  from sqlspec.builder.mixins._select_operations import Case, SubqueryBuilder, WindowFunctionBuilder
45
46
  from sqlspec.core.statement import SQL
@@ -170,7 +171,7 @@ class SQLFactory:
170
171
  actual_type_str == "WITH" and parsed_expr.this and isinstance(parsed_expr.this, exp.Select)
171
172
  ):
172
173
  builder = Select(dialect=dialect or self.dialect)
173
- builder._expression = parsed_expr
174
+ builder.set_expression(parsed_expr)
174
175
  return builder
175
176
 
176
177
  if actual_type_str in {"INSERT", "UPDATE", "DELETE"} and parsed_expr.args.get("returning") is not None:
@@ -451,7 +452,7 @@ class SQLFactory:
451
452
  parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect)
452
453
 
453
454
  if isinstance(parsed_expr, exp.Insert):
454
- builder._expression = parsed_expr
455
+ builder.set_expression(parsed_expr)
455
456
  return builder
456
457
 
457
458
  if isinstance(parsed_expr, exp.Select):
@@ -470,7 +471,7 @@ class SQLFactory:
470
471
  parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect)
471
472
 
472
473
  if isinstance(parsed_expr, exp.Select):
473
- builder._expression = parsed_expr
474
+ builder.set_expression(parsed_expr)
474
475
  return builder
475
476
 
476
477
  logger.warning("Cannot create SELECT from %s statement", type(parsed_expr).__name__)
@@ -485,7 +486,7 @@ class SQLFactory:
485
486
  parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect)
486
487
 
487
488
  if isinstance(parsed_expr, exp.Update):
488
- builder._expression = parsed_expr
489
+ builder.set_expression(parsed_expr)
489
490
  return builder
490
491
 
491
492
  logger.warning("Cannot create UPDATE from %s statement", type(parsed_expr).__name__)
@@ -500,7 +501,7 @@ class SQLFactory:
500
501
  parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect)
501
502
 
502
503
  if isinstance(parsed_expr, exp.Delete):
503
- builder._expression = parsed_expr
504
+ builder.set_expression(parsed_expr)
504
505
  return builder
505
506
 
506
507
  logger.warning("Cannot create DELETE from %s statement", type(parsed_expr).__name__)
@@ -515,7 +516,7 @@ class SQLFactory:
515
516
  parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect)
516
517
 
517
518
  if isinstance(parsed_expr, exp.Merge):
518
- builder._expression = parsed_expr
519
+ builder.set_expression(parsed_expr)
519
520
  return builder
520
521
 
521
522
  logger.warning("Cannot create MERGE from %s statement", type(parsed_expr).__name__)
@@ -724,19 +725,15 @@ class SQLFactory:
724
725
  if not parameters:
725
726
  try:
726
727
  parsed: exp.Expression = exp.maybe_parse(sql_fragment)
727
- return parsed
728
- if sql_fragment.strip().replace("_", "").replace(".", "").isalnum():
729
- return exp.to_identifier(sql_fragment)
730
- return exp.Literal.string(sql_fragment)
731
728
  except Exception as e:
732
729
  msg = f"Failed to parse raw SQL fragment '{sql_fragment}': {e}"
733
730
  raise SQLBuilderError(msg) from e
731
+ return parsed
734
732
 
735
733
  return SQL(sql_fragment, parameters)
736
734
 
737
- @staticmethod
738
735
  def count(
739
- column: Union[str, exp.Expression, "ExpressionWrapper", "Case", "Column"] = "*", distinct: bool = False
736
+ self, column: Union[str, exp.Expression, "ExpressionWrapper", "Case", "Column"] = "*", distinct: bool = False
740
737
  ) -> AggregateExpression:
741
738
  """Create a COUNT expression.
742
739
 
@@ -750,7 +747,7 @@ class SQLFactory:
750
747
  if isinstance(column, str) and column == "*":
751
748
  expr = exp.Count(this=exp.Star(), distinct=distinct)
752
749
  else:
753
- col_expr = SQLFactory._extract_expression(column)
750
+ col_expr = extract_expression(column)
754
751
  expr = exp.Count(this=col_expr, distinct=distinct)
755
752
  return AggregateExpression(expr)
756
753
 
@@ -778,7 +775,7 @@ class SQLFactory:
778
775
  Returns:
779
776
  SUM expression.
780
777
  """
781
- col_expr = SQLFactory._extract_expression(column)
778
+ col_expr = extract_expression(column)
782
779
  return AggregateExpression(exp.Sum(this=col_expr, distinct=distinct))
783
780
 
784
781
  @staticmethod
@@ -791,7 +788,7 @@ class SQLFactory:
791
788
  Returns:
792
789
  AVG expression.
793
790
  """
794
- col_expr = SQLFactory._extract_expression(column)
791
+ col_expr = extract_expression(column)
795
792
  return AggregateExpression(exp.Avg(this=col_expr))
796
793
 
797
794
  @staticmethod
@@ -804,7 +801,7 @@ class SQLFactory:
804
801
  Returns:
805
802
  MAX expression.
806
803
  """
807
- col_expr = SQLFactory._extract_expression(column)
804
+ col_expr = extract_expression(column)
808
805
  return AggregateExpression(exp.Max(this=col_expr))
809
806
 
810
807
  @staticmethod
@@ -817,7 +814,7 @@ class SQLFactory:
817
814
  Returns:
818
815
  MIN expression.
819
816
  """
820
- col_expr = SQLFactory._extract_expression(column)
817
+ col_expr = extract_expression(column)
821
818
  return AggregateExpression(exp.Min(this=col_expr))
822
819
 
823
820
  @staticmethod
@@ -1038,45 +1035,6 @@ class SQLFactory:
1038
1035
  return FunctionExpression(value)
1039
1036
  return FunctionExpression(exp.convert(value))
1040
1037
 
1041
- @staticmethod
1042
- def _to_expression(value: Any) -> exp.Expression:
1043
- """Convert a Python value to a raw SQLGlot expression.
1044
-
1045
- Args:
1046
- value: Python value or SQLGlot expression to convert.
1047
-
1048
- Returns:
1049
- Raw SQLGlot expression.
1050
- """
1051
- if isinstance(value, exp.Expression):
1052
- return value
1053
- return exp.convert(value)
1054
-
1055
- @staticmethod
1056
- def _extract_expression(value: Any) -> exp.Expression:
1057
- """Extract SQLGlot expression from value, handling our wrapper types.
1058
-
1059
- Args:
1060
- value: String, SQLGlot expression, or our wrapper type.
1061
-
1062
- Returns:
1063
- Raw SQLGlot expression.
1064
- """
1065
- from sqlspec.builder._expression_wrappers import ExpressionWrapper
1066
- from sqlspec.builder.mixins._select_operations import Case
1067
-
1068
- if isinstance(value, str):
1069
- return exp.column(value)
1070
- if isinstance(value, Column):
1071
- return value._expression
1072
- if isinstance(value, ExpressionWrapper):
1073
- return value.expression
1074
- if isinstance(value, Case):
1075
- return exp.Case(ifs=value._conditions, default=value._default)
1076
- if isinstance(value, exp.Expression):
1077
- return value
1078
- return exp.convert(value)
1079
-
1080
1038
  @staticmethod
1081
1039
  def decode(column: Union[str, exp.Expression], *args: Union[str, exp.Expression, Any]) -> FunctionExpression:
1082
1040
  """Create a DECODE expression (Oracle-style conditional logic).
@@ -1113,14 +1071,14 @@ class SQLFactory:
1113
1071
 
1114
1072
  for i in range(0, len(args) - 1, 2):
1115
1073
  if i + 1 >= len(args):
1116
- default = SQLFactory._to_expression(args[i])
1074
+ default = to_expression(args[i])
1117
1075
  break
1118
1076
 
1119
1077
  search_val = args[i]
1120
1078
  result_val = args[i + 1]
1121
1079
 
1122
- search_expr = SQLFactory._to_expression(search_val)
1123
- result_expr = SQLFactory._to_expression(result_val)
1080
+ search_expr = to_expression(search_val)
1081
+ result_expr = to_expression(result_val)
1124
1082
 
1125
1083
  condition = exp.EQ(this=col_expr, expression=search_expr)
1126
1084
  conditions.append(exp.If(this=condition, true=result_expr))
@@ -1168,7 +1126,7 @@ class SQLFactory:
1168
1126
  COALESCE expression equivalent to NVL.
1169
1127
  """
1170
1128
  col_expr = exp.column(column) if isinstance(column, str) else column
1171
- sub_expr = SQLFactory._to_expression(substitute_value)
1129
+ sub_expr = to_expression(substitute_value)
1172
1130
  return ConversionExpression(exp.Coalesce(expressions=[col_expr, sub_expr]))
1173
1131
 
1174
1132
  @staticmethod
@@ -1196,8 +1154,8 @@ class SQLFactory:
1196
1154
  ```
1197
1155
  """
1198
1156
  col_expr = exp.column(column) if isinstance(column, str) else column
1199
- not_null_expr = SQLFactory._to_expression(value_if_not_null)
1200
- null_expr = SQLFactory._to_expression(value_if_null)
1157
+ not_null_expr = to_expression(value_if_not_null)
1158
+ null_expr = to_expression(value_if_null)
1201
1159
 
1202
1160
  is_null = exp.Is(this=col_expr, expression=exp.Null())
1203
1161
  condition = exp.Not(this=is_null)
sqlspec/_typing.py CHANGED
@@ -177,6 +177,14 @@ except ImportError:
177
177
  MSGSPEC_INSTALLED = False # pyright: ignore[reportConstantRedefinition]
178
178
 
179
179
 
180
+ try:
181
+ import orjson # noqa: F401
182
+
183
+ ORJSON_INSTALLED = True # pyright: ignore[reportConstantRedefinition]
184
+ except ImportError:
185
+ ORJSON_INSTALLED = False # pyright: ignore[reportConstantRedefinition]
186
+
187
+
180
188
  # Always define stub type for DTOData
181
189
  @runtime_checkable
182
190
  class DTODataStub(Protocol[T]):
@@ -606,6 +614,7 @@ except ImportError:
606
614
 
607
615
 
608
616
  FSSPEC_INSTALLED = bool(find_spec("fsspec"))
617
+ NUMPY_INSTALLED = bool(find_spec("numpy"))
609
618
  OBSTORE_INSTALLED = bool(find_spec("obstore"))
610
619
  PGVECTOR_INSTALLED = bool(find_spec("pgvector"))
611
620
 
@@ -617,8 +626,10 @@ __all__ = (
617
626
  "FSSPEC_INSTALLED",
618
627
  "LITESTAR_INSTALLED",
619
628
  "MSGSPEC_INSTALLED",
629
+ "NUMPY_INSTALLED",
620
630
  "OBSTORE_INSTALLED",
621
631
  "OPENTELEMETRY_INSTALLED",
632
+ "ORJSON_INSTALLED",
622
633
  "PGVECTOR_INSTALLED",
623
634
  "PROMETHEUS_INSTALLED",
624
635
  "PYARROW_INSTALLED",
@@ -77,6 +77,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
77
77
  migration_config: Optional[dict[str, Any]] = None,
78
78
  statement_config: Optional[StatementConfig] = None,
79
79
  driver_features: Optional[dict[str, Any]] = None,
80
+ bind_key: Optional[str] = None,
80
81
  ) -> None:
81
82
  """Initialize configuration.
82
83
 
@@ -85,6 +86,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
85
86
  migration_config: Migration configuration
86
87
  statement_config: Default SQL statement configuration
87
88
  driver_features: Driver feature configuration
89
+ bind_key: Optional unique identifier for this configuration
88
90
  """
89
91
  if connection_config is None:
90
92
  connection_config = {}
@@ -104,6 +106,7 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
104
106
  migration_config=migration_config,
105
107
  statement_config=statement_config,
106
108
  driver_features=driver_features or {},
109
+ bind_key=bind_key,
107
110
  )
108
111
 
109
112
  def _resolve_driver_name(self) -> str:
@@ -174,7 +177,11 @@ class AdbcConfig(NoPoolSyncConfig[AdbcConnection, AdbcDriver]):
174
177
  try:
175
178
  connect_func = import_string(driver_path)
176
179
  except ImportError as e:
177
- driver_path_with_suffix = f"{driver_path}.dbapi.connect"
180
+ # Only add .dbapi.connect if it's not already there
181
+ if not driver_path.endswith(".dbapi.connect"):
182
+ driver_path_with_suffix = f"{driver_path}.dbapi.connect"
183
+ else:
184
+ driver_path_with_suffix = driver_path
178
185
  try:
179
186
  connect_func = import_string(driver_path_with_suffix)
180
187
  except ImportError as e2: