sqlspec 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (197) hide show
  1. sqlspec/__init__.py +7 -15
  2. sqlspec/_serialization.py +55 -25
  3. sqlspec/_typing.py +62 -52
  4. sqlspec/adapters/adbc/_types.py +1 -1
  5. sqlspec/adapters/adbc/adk/__init__.py +5 -0
  6. sqlspec/adapters/adbc/adk/store.py +870 -0
  7. sqlspec/adapters/adbc/config.py +62 -12
  8. sqlspec/adapters/adbc/data_dictionary.py +52 -2
  9. sqlspec/adapters/adbc/driver.py +144 -45
  10. sqlspec/adapters/adbc/litestar/__init__.py +5 -0
  11. sqlspec/adapters/adbc/litestar/store.py +504 -0
  12. sqlspec/adapters/adbc/type_converter.py +44 -50
  13. sqlspec/adapters/aiosqlite/_types.py +1 -1
  14. sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
  15. sqlspec/adapters/aiosqlite/adk/store.py +527 -0
  16. sqlspec/adapters/aiosqlite/config.py +86 -16
  17. sqlspec/adapters/aiosqlite/data_dictionary.py +34 -2
  18. sqlspec/adapters/aiosqlite/driver.py +127 -38
  19. sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
  20. sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
  21. sqlspec/adapters/aiosqlite/pool.py +7 -7
  22. sqlspec/adapters/asyncmy/__init__.py +7 -1
  23. sqlspec/adapters/asyncmy/_types.py +1 -1
  24. sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
  25. sqlspec/adapters/asyncmy/adk/store.py +493 -0
  26. sqlspec/adapters/asyncmy/config.py +59 -17
  27. sqlspec/adapters/asyncmy/data_dictionary.py +41 -2
  28. sqlspec/adapters/asyncmy/driver.py +293 -62
  29. sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
  30. sqlspec/adapters/asyncmy/litestar/store.py +296 -0
  31. sqlspec/adapters/asyncpg/__init__.py +2 -1
  32. sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
  33. sqlspec/adapters/asyncpg/_types.py +11 -7
  34. sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
  35. sqlspec/adapters/asyncpg/adk/store.py +450 -0
  36. sqlspec/adapters/asyncpg/config.py +57 -36
  37. sqlspec/adapters/asyncpg/data_dictionary.py +41 -2
  38. sqlspec/adapters/asyncpg/driver.py +153 -23
  39. sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
  40. sqlspec/adapters/asyncpg/litestar/store.py +253 -0
  41. sqlspec/adapters/bigquery/_types.py +1 -1
  42. sqlspec/adapters/bigquery/adk/__init__.py +5 -0
  43. sqlspec/adapters/bigquery/adk/store.py +576 -0
  44. sqlspec/adapters/bigquery/config.py +25 -11
  45. sqlspec/adapters/bigquery/data_dictionary.py +42 -2
  46. sqlspec/adapters/bigquery/driver.py +352 -144
  47. sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
  48. sqlspec/adapters/bigquery/litestar/store.py +327 -0
  49. sqlspec/adapters/bigquery/type_converter.py +55 -23
  50. sqlspec/adapters/duckdb/_types.py +2 -2
  51. sqlspec/adapters/duckdb/adk/__init__.py +14 -0
  52. sqlspec/adapters/duckdb/adk/store.py +553 -0
  53. sqlspec/adapters/duckdb/config.py +79 -21
  54. sqlspec/adapters/duckdb/data_dictionary.py +41 -2
  55. sqlspec/adapters/duckdb/driver.py +138 -43
  56. sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
  57. sqlspec/adapters/duckdb/litestar/store.py +332 -0
  58. sqlspec/adapters/duckdb/pool.py +5 -5
  59. sqlspec/adapters/duckdb/type_converter.py +51 -21
  60. sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
  61. sqlspec/adapters/oracledb/_types.py +20 -2
  62. sqlspec/adapters/oracledb/adk/__init__.py +5 -0
  63. sqlspec/adapters/oracledb/adk/store.py +1745 -0
  64. sqlspec/adapters/oracledb/config.py +120 -36
  65. sqlspec/adapters/oracledb/data_dictionary.py +87 -20
  66. sqlspec/adapters/oracledb/driver.py +292 -84
  67. sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
  68. sqlspec/adapters/oracledb/litestar/store.py +767 -0
  69. sqlspec/adapters/oracledb/migrations.py +316 -25
  70. sqlspec/adapters/oracledb/type_converter.py +91 -16
  71. sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
  72. sqlspec/adapters/psqlpy/_types.py +2 -1
  73. sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
  74. sqlspec/adapters/psqlpy/adk/store.py +482 -0
  75. sqlspec/adapters/psqlpy/config.py +45 -19
  76. sqlspec/adapters/psqlpy/data_dictionary.py +41 -2
  77. sqlspec/adapters/psqlpy/driver.py +101 -31
  78. sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
  79. sqlspec/adapters/psqlpy/litestar/store.py +272 -0
  80. sqlspec/adapters/psqlpy/type_converter.py +40 -11
  81. sqlspec/adapters/psycopg/_type_handlers.py +80 -0
  82. sqlspec/adapters/psycopg/_types.py +2 -1
  83. sqlspec/adapters/psycopg/adk/__init__.py +5 -0
  84. sqlspec/adapters/psycopg/adk/store.py +944 -0
  85. sqlspec/adapters/psycopg/config.py +65 -37
  86. sqlspec/adapters/psycopg/data_dictionary.py +77 -3
  87. sqlspec/adapters/psycopg/driver.py +200 -78
  88. sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
  89. sqlspec/adapters/psycopg/litestar/store.py +554 -0
  90. sqlspec/adapters/sqlite/__init__.py +2 -1
  91. sqlspec/adapters/sqlite/_type_handlers.py +86 -0
  92. sqlspec/adapters/sqlite/_types.py +1 -1
  93. sqlspec/adapters/sqlite/adk/__init__.py +5 -0
  94. sqlspec/adapters/sqlite/adk/store.py +572 -0
  95. sqlspec/adapters/sqlite/config.py +85 -16
  96. sqlspec/adapters/sqlite/data_dictionary.py +34 -2
  97. sqlspec/adapters/sqlite/driver.py +120 -52
  98. sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
  99. sqlspec/adapters/sqlite/litestar/store.py +318 -0
  100. sqlspec/adapters/sqlite/pool.py +5 -5
  101. sqlspec/base.py +45 -26
  102. sqlspec/builder/__init__.py +73 -4
  103. sqlspec/builder/_base.py +91 -58
  104. sqlspec/builder/_column.py +5 -5
  105. sqlspec/builder/_ddl.py +98 -89
  106. sqlspec/builder/_delete.py +5 -4
  107. sqlspec/builder/_dml.py +388 -0
  108. sqlspec/{_sql.py → builder/_factory.py} +41 -44
  109. sqlspec/builder/_insert.py +5 -82
  110. sqlspec/builder/{mixins/_join_operations.py → _join.py} +145 -143
  111. sqlspec/builder/_merge.py +446 -11
  112. sqlspec/builder/_parsing_utils.py +9 -11
  113. sqlspec/builder/_select.py +1313 -25
  114. sqlspec/builder/_update.py +11 -42
  115. sqlspec/cli.py +76 -69
  116. sqlspec/config.py +231 -60
  117. sqlspec/core/__init__.py +5 -4
  118. sqlspec/core/cache.py +18 -18
  119. sqlspec/core/compiler.py +6 -8
  120. sqlspec/core/filters.py +37 -37
  121. sqlspec/core/hashing.py +9 -9
  122. sqlspec/core/parameters.py +76 -45
  123. sqlspec/core/result.py +102 -46
  124. sqlspec/core/splitter.py +16 -17
  125. sqlspec/core/statement.py +32 -31
  126. sqlspec/core/type_conversion.py +3 -2
  127. sqlspec/driver/__init__.py +1 -3
  128. sqlspec/driver/_async.py +95 -161
  129. sqlspec/driver/_common.py +133 -80
  130. sqlspec/driver/_sync.py +95 -162
  131. sqlspec/driver/mixins/_result_tools.py +20 -236
  132. sqlspec/driver/mixins/_sql_translator.py +4 -4
  133. sqlspec/exceptions.py +70 -7
  134. sqlspec/extensions/adk/__init__.py +53 -0
  135. sqlspec/extensions/adk/_types.py +51 -0
  136. sqlspec/extensions/adk/converters.py +172 -0
  137. sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
  138. sqlspec/extensions/adk/migrations/__init__.py +0 -0
  139. sqlspec/extensions/adk/service.py +181 -0
  140. sqlspec/extensions/adk/store.py +536 -0
  141. sqlspec/extensions/aiosql/adapter.py +73 -53
  142. sqlspec/extensions/litestar/__init__.py +21 -4
  143. sqlspec/extensions/litestar/cli.py +54 -10
  144. sqlspec/extensions/litestar/config.py +59 -266
  145. sqlspec/extensions/litestar/handlers.py +46 -17
  146. sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
  147. sqlspec/extensions/litestar/migrations/__init__.py +3 -0
  148. sqlspec/extensions/litestar/plugin.py +324 -223
  149. sqlspec/extensions/litestar/providers.py +25 -25
  150. sqlspec/extensions/litestar/store.py +265 -0
  151. sqlspec/loader.py +30 -49
  152. sqlspec/migrations/base.py +200 -76
  153. sqlspec/migrations/commands.py +591 -62
  154. sqlspec/migrations/context.py +6 -9
  155. sqlspec/migrations/fix.py +199 -0
  156. sqlspec/migrations/loaders.py +47 -19
  157. sqlspec/migrations/runner.py +241 -75
  158. sqlspec/migrations/tracker.py +237 -21
  159. sqlspec/migrations/utils.py +51 -3
  160. sqlspec/migrations/validation.py +177 -0
  161. sqlspec/protocols.py +66 -36
  162. sqlspec/storage/_utils.py +98 -0
  163. sqlspec/storage/backends/fsspec.py +134 -106
  164. sqlspec/storage/backends/local.py +78 -51
  165. sqlspec/storage/backends/obstore.py +278 -162
  166. sqlspec/storage/registry.py +75 -39
  167. sqlspec/typing.py +14 -84
  168. sqlspec/utils/config_resolver.py +6 -6
  169. sqlspec/utils/correlation.py +4 -5
  170. sqlspec/utils/data_transformation.py +3 -2
  171. sqlspec/utils/deprecation.py +9 -8
  172. sqlspec/utils/fixtures.py +4 -4
  173. sqlspec/utils/logging.py +46 -6
  174. sqlspec/utils/module_loader.py +2 -2
  175. sqlspec/utils/schema.py +288 -0
  176. sqlspec/utils/serializers.py +3 -3
  177. sqlspec/utils/sync_tools.py +21 -17
  178. sqlspec/utils/text.py +1 -2
  179. sqlspec/utils/type_guards.py +111 -20
  180. sqlspec/utils/version.py +433 -0
  181. {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/METADATA +40 -21
  182. sqlspec-0.27.0.dist-info/RECORD +207 -0
  183. sqlspec/builder/mixins/__init__.py +0 -55
  184. sqlspec/builder/mixins/_cte_and_set_ops.py +0 -253
  185. sqlspec/builder/mixins/_delete_operations.py +0 -50
  186. sqlspec/builder/mixins/_insert_operations.py +0 -282
  187. sqlspec/builder/mixins/_merge_operations.py +0 -698
  188. sqlspec/builder/mixins/_order_limit_operations.py +0 -145
  189. sqlspec/builder/mixins/_pivot_operations.py +0 -157
  190. sqlspec/builder/mixins/_select_operations.py +0 -930
  191. sqlspec/builder/mixins/_update_operations.py +0 -199
  192. sqlspec/builder/mixins/_where_clause.py +0 -1298
  193. sqlspec-0.26.0.dist-info/RECORD +0 -157
  194. sqlspec-0.26.0.dist-info/licenses/NOTICE +0 -29
  195. {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/WHEEL +0 -0
  196. {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/entry_points.txt +0 -0
  197. {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/licenses/LICENSE +0 -0
@@ -7,7 +7,7 @@ type coercion, error handling, and query job management.
7
7
  import datetime
8
8
  import logging
9
9
  from decimal import Decimal
10
- from typing import TYPE_CHECKING, Any, Optional, Union
10
+ from typing import TYPE_CHECKING, Any
11
11
 
12
12
  import sqlglot
13
13
  import sqlglot.expressions as exp
@@ -16,29 +16,41 @@ from google.cloud.exceptions import GoogleCloudError
16
16
 
17
17
  from sqlspec.adapters.bigquery._types import BigQueryConnection
18
18
  from sqlspec.adapters.bigquery.type_converter import BigQueryTypeConverter
19
- from sqlspec.core.cache import get_cache_config
20
- from sqlspec.core.parameters import ParameterStyle, ParameterStyleConfig
21
- from sqlspec.core.statement import StatementConfig
22
- from sqlspec.driver import SyncDriverAdapterBase
23
- from sqlspec.driver._common import ExecutionResult
24
- from sqlspec.exceptions import SQLParsingError, SQLSpecError
19
+ from sqlspec.core import ParameterStyle, ParameterStyleConfig, StatementConfig, get_cache_config
20
+ from sqlspec.driver import ExecutionResult, SyncDriverAdapterBase
21
+ from sqlspec.exceptions import (
22
+ DatabaseConnectionError,
23
+ DataError,
24
+ NotFoundError,
25
+ OperationalError,
26
+ SQLParsingError,
27
+ SQLSpecError,
28
+ UniqueViolationError,
29
+ )
25
30
  from sqlspec.utils.serializers import to_json
26
31
 
27
32
  if TYPE_CHECKING:
33
+ from collections.abc import Callable
28
34
  from contextlib import AbstractContextManager
29
35
 
30
- from sqlspec.core.result import SQLResult
31
- from sqlspec.core.statement import SQL
32
- from sqlspec.driver._sync import SyncDataDictionaryBase
36
+ from sqlspec.core import SQL, SQLResult
37
+ from sqlspec.driver import SyncDataDictionaryBase
33
38
 
34
39
  logger = logging.getLogger(__name__)
35
40
 
36
41
  __all__ = ("BigQueryCursor", "BigQueryDriver", "BigQueryExceptionHandler", "bigquery_statement_config")
37
42
 
38
- _type_converter = BigQueryTypeConverter()
43
+ HTTP_CONFLICT = 409
44
+ HTTP_NOT_FOUND = 404
45
+ HTTP_BAD_REQUEST = 400
46
+ HTTP_FORBIDDEN = 403
47
+ HTTP_SERVER_ERROR = 500
48
+
39
49
 
50
+ _default_type_converter = BigQueryTypeConverter()
40
51
 
41
- _BQ_TYPE_MAP: dict[type, tuple[str, Optional[str]]] = {
52
+
53
+ _BQ_TYPE_MAP: dict[type, tuple[str, str | None]] = {
42
54
  bool: ("BOOL", None),
43
55
  int: ("INT64", None),
44
56
  float: ("FLOAT64", None),
@@ -51,7 +63,134 @@ _BQ_TYPE_MAP: dict[type, tuple[str, Optional[str]]] = {
51
63
  }
52
64
 
53
65
 
54
- def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
66
+ def _create_array_parameter(name: str, value: Any, array_type: str) -> ArrayQueryParameter:
67
+ """Create BigQuery ARRAY parameter.
68
+
69
+ Args:
70
+ name: Parameter name.
71
+ value: Array value (converted to list, empty list if None).
72
+ array_type: BigQuery array element type.
73
+
74
+ Returns:
75
+ ArrayQueryParameter instance.
76
+ """
77
+ return ArrayQueryParameter(name, array_type, [] if value is None else list(value))
78
+
79
+
80
+ def _create_json_parameter(name: str, value: Any, json_serializer: "Callable[[Any], str]") -> ScalarQueryParameter:
81
+ """Create BigQuery JSON parameter as STRING type.
82
+
83
+ Args:
84
+ name: Parameter name.
85
+ value: JSON-serializable value.
86
+ json_serializer: Function to serialize to JSON string.
87
+
88
+ Returns:
89
+ ScalarQueryParameter with STRING type.
90
+ """
91
+ return ScalarQueryParameter(name, "STRING", json_serializer(value))
92
+
93
+
94
+ def _create_scalar_parameter(name: str, value: Any, param_type: str) -> ScalarQueryParameter:
95
+ """Create BigQuery scalar parameter.
96
+
97
+ Args:
98
+ name: Parameter name.
99
+ value: Scalar value.
100
+ param_type: BigQuery parameter type (INT64, FLOAT64, etc.).
101
+
102
+ Returns:
103
+ ScalarQueryParameter instance.
104
+ """
105
+ return ScalarQueryParameter(name, param_type, value)
106
+
107
+
108
+ def _create_literal_node(value: Any, json_serializer: "Callable[[Any], str]") -> "exp.Expression":
109
+ """Create a SQLGlot literal expression from a Python value.
110
+
111
+ Args:
112
+ value: Python value to convert to SQLGlot literal.
113
+ json_serializer: Function to serialize dict/list to JSON string.
114
+
115
+ Returns:
116
+ SQLGlot expression representing the literal value.
117
+ """
118
+ if value is None:
119
+ return exp.Null()
120
+ if isinstance(value, bool):
121
+ return exp.Boolean(this=value)
122
+ if isinstance(value, (int, float)):
123
+ return exp.Literal.number(str(value))
124
+ if isinstance(value, str):
125
+ return exp.Literal.string(value)
126
+ if isinstance(value, (list, tuple)):
127
+ items = [_create_literal_node(item, json_serializer) for item in value]
128
+ return exp.Array(expressions=items)
129
+ if isinstance(value, dict):
130
+ json_str = json_serializer(value)
131
+ return exp.Literal.string(json_str)
132
+
133
+ return exp.Literal.string(str(value))
134
+
135
+
136
+ def _replace_placeholder_node(
137
+ node: "exp.Expression",
138
+ parameters: Any,
139
+ placeholder_counter: dict[str, int],
140
+ json_serializer: "Callable[[Any], str]",
141
+ ) -> "exp.Expression":
142
+ """Replace placeholder or parameter nodes with literal values.
143
+
144
+ Handles both positional placeholders (?) and named parameters (@name, :name).
145
+ Converts values to SQLGlot literal expressions for safe embedding in SQL.
146
+
147
+ Args:
148
+ node: SQLGlot expression node to check and potentially replace.
149
+ parameters: Parameter values (dict, list, or tuple).
150
+ placeholder_counter: Mutable counter dict for positional placeholders.
151
+ json_serializer: Function to serialize dict/list to JSON string.
152
+
153
+ Returns:
154
+ Literal expression if replacement made, otherwise original node.
155
+ """
156
+ if isinstance(node, exp.Placeholder):
157
+ if isinstance(parameters, (list, tuple)):
158
+ current_index = placeholder_counter["index"]
159
+ placeholder_counter["index"] += 1
160
+ if current_index < len(parameters):
161
+ return _create_literal_node(parameters[current_index], json_serializer)
162
+ return node
163
+
164
+ if isinstance(node, exp.Parameter):
165
+ param_name = str(node.this) if hasattr(node.this, "__str__") else node.this
166
+
167
+ if isinstance(parameters, dict):
168
+ possible_names = [param_name, f"@{param_name}", f":{param_name}", f"param_{param_name}"]
169
+ for name in possible_names:
170
+ if name in parameters:
171
+ actual_value = getattr(parameters[name], "value", parameters[name])
172
+ return _create_literal_node(actual_value, json_serializer)
173
+ return node
174
+
175
+ if isinstance(parameters, (list, tuple)):
176
+ try:
177
+ if param_name.startswith("param_"):
178
+ param_index = int(param_name[6:])
179
+ if param_index < len(parameters):
180
+ return _create_literal_node(parameters[param_index], json_serializer)
181
+
182
+ if param_name.isdigit():
183
+ param_index = int(param_name)
184
+ if param_index < len(parameters):
185
+ return _create_literal_node(parameters[param_index], json_serializer)
186
+ except (ValueError, IndexError, AttributeError):
187
+ pass
188
+ return node
189
+
190
+ return node
191
+
192
+
193
+ def _get_bq_param_type(value: Any) -> tuple[str | None, str | None]:
55
194
  """Determine BigQuery parameter type from Python value.
56
195
 
57
196
  Args:
@@ -84,20 +223,30 @@ def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
84
223
  return None, None
85
224
 
86
225
 
87
- _BQ_PARAM_CREATOR_MAP: dict[str, Any] = {
88
- "ARRAY": lambda name, value, array_type: ArrayQueryParameter(
89
- name, array_type, [] if value is None else list(value)
90
- ),
91
- "JSON": lambda name, value, _: ScalarQueryParameter(name, "STRING", to_json(value)),
92
- "SCALAR": lambda name, value, param_type: ScalarQueryParameter(name, param_type, value),
93
- }
226
+ def _get_bq_param_creator_map(json_serializer: "Callable[[Any], str]") -> dict[str, Any]:
227
+ """Get BigQuery parameter creator map with configurable JSON serializer.
228
+
229
+ Args:
230
+ json_serializer: Function to serialize dict/list to JSON string.
231
+
232
+ Returns:
233
+ Dictionary mapping parameter types to creator functions.
234
+ """
235
+ return {
236
+ "ARRAY": _create_array_parameter,
237
+ "JSON": lambda name, value, _: _create_json_parameter(name, value, json_serializer),
238
+ "SCALAR": _create_scalar_parameter,
239
+ }
94
240
 
95
241
 
96
- def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, ScalarQueryParameter]]":
242
+ def _create_bq_parameters(
243
+ parameters: Any, json_serializer: "Callable[[Any], str]"
244
+ ) -> "list[ArrayQueryParameter | ScalarQueryParameter]":
97
245
  """Create BigQuery QueryParameter objects from parameters.
98
246
 
99
247
  Args:
100
248
  parameters: Dict of named parameters or list of positional parameters
249
+ json_serializer: Function to serialize dict/list to JSON string
101
250
 
102
251
  Returns:
103
252
  List of BigQuery QueryParameter objects
@@ -105,7 +254,8 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
105
254
  if not parameters:
106
255
  return []
107
256
 
108
- bq_parameters: list[Union[ArrayQueryParameter, ScalarQueryParameter]] = []
257
+ bq_parameters: list[ArrayQueryParameter | ScalarQueryParameter] = []
258
+ param_creator_map = _get_bq_param_creator_map(json_serializer)
109
259
 
110
260
  if isinstance(parameters, dict):
111
261
  for name, value in parameters.items():
@@ -114,13 +264,13 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
114
264
  param_type, array_element_type = _get_bq_param_type(actual_value)
115
265
 
116
266
  if param_type == "ARRAY" and array_element_type:
117
- creator = _BQ_PARAM_CREATOR_MAP["ARRAY"]
267
+ creator = param_creator_map["ARRAY"]
118
268
  bq_parameters.append(creator(param_name_for_bq, actual_value, array_element_type))
119
269
  elif param_type == "JSON":
120
- creator = _BQ_PARAM_CREATOR_MAP["JSON"]
270
+ creator = param_creator_map["JSON"]
121
271
  bq_parameters.append(creator(param_name_for_bq, actual_value, None))
122
272
  elif param_type:
123
- creator = _BQ_PARAM_CREATOR_MAP["SCALAR"]
273
+ creator = param_creator_map["SCALAR"]
124
274
  bq_parameters.append(creator(param_name_for_bq, actual_value, param_type))
125
275
  else:
126
276
  msg = f"Unsupported BigQuery parameter type for value of param '{name}': {type(actual_value)}"
@@ -133,21 +283,33 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
133
283
  return bq_parameters
134
284
 
135
285
 
136
- bigquery_type_coercion_map = {
137
- tuple: list,
138
- bool: lambda x: x,
139
- int: lambda x: x,
140
- float: lambda x: x,
141
- str: _type_converter.convert_if_detected,
142
- bytes: lambda x: x,
143
- datetime.datetime: lambda x: x,
144
- datetime.date: lambda x: x,
145
- datetime.time: lambda x: x,
146
- Decimal: lambda x: x,
147
- dict: lambda x: x,
148
- list: lambda x: x,
149
- type(None): lambda _: None,
150
- }
286
+ def _get_bigquery_type_coercion_map(type_converter: BigQueryTypeConverter) -> dict[type, Any]:
287
+ """Get BigQuery type coercion map with configurable type converter.
288
+
289
+ Args:
290
+ type_converter: BigQuery type converter instance
291
+
292
+ Returns:
293
+ Type coercion map for BigQuery
294
+ """
295
+ return {
296
+ tuple: list,
297
+ bool: lambda x: x,
298
+ int: lambda x: x,
299
+ float: lambda x: x,
300
+ str: type_converter.convert_if_detected,
301
+ bytes: lambda x: x,
302
+ datetime.datetime: lambda x: x,
303
+ datetime.date: lambda x: x,
304
+ datetime.time: lambda x: x,
305
+ Decimal: lambda x: x,
306
+ dict: lambda x: x,
307
+ list: lambda x: x,
308
+ type(None): lambda _: None,
309
+ }
310
+
311
+
312
+ bigquery_type_coercion_map = _get_bigquery_type_coercion_map(_default_type_converter)
151
313
 
152
314
 
153
315
  bigquery_statement_config = StatementConfig(
@@ -176,7 +338,7 @@ class BigQueryCursor:
176
338
 
177
339
  def __init__(self, connection: "BigQueryConnection") -> None:
178
340
  self.connection = connection
179
- self.job: Optional[QueryJob] = None
341
+ self.job: QueryJob | None = None
180
342
 
181
343
  def __enter__(self) -> "BigQueryConnection":
182
344
  return self.connection
@@ -195,7 +357,11 @@ class BigQueryCursor:
195
357
 
196
358
 
197
359
  class BigQueryExceptionHandler:
198
- """Custom sync context manager for handling BigQuery database exceptions."""
360
+ """Context manager for handling BigQuery API exceptions.
361
+
362
+ Maps HTTP status codes and error reasons to specific SQLSpec exceptions
363
+ for better error handling in application code.
364
+ """
199
365
 
200
366
  __slots__ = ()
201
367
 
@@ -203,28 +369,82 @@ class BigQueryExceptionHandler:
203
369
  return None
204
370
 
205
371
  def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
372
+ _ = exc_tb
206
373
  if exc_type is None:
207
374
  return
208
-
209
375
  if issubclass(exc_type, GoogleCloudError):
210
- e = exc_val
211
- error_msg = str(e).lower()
212
- if "syntax" in error_msg or "invalid" in error_msg:
213
- msg = f"BigQuery SQL syntax error: {e}"
214
- raise SQLParsingError(msg) from e
215
- if "permission" in error_msg or "access" in error_msg:
216
- msg = f"BigQuery access error: {e}"
217
- raise SQLSpecError(msg) from e
218
- msg = f"BigQuery cloud error: {e}"
219
- raise SQLSpecError(msg) from e
220
- if issubclass(exc_type, Exception):
221
- e = exc_val
222
- error_msg = str(e).lower()
223
- if "parse" in error_msg or "syntax" in error_msg:
224
- msg = f"SQL parsing failed: {e}"
225
- raise SQLParsingError(msg) from e
226
- msg = f"Unexpected BigQuery operation error: {e}"
227
- raise SQLSpecError(msg) from e
376
+ self._map_bigquery_exception(exc_val)
377
+
378
+ def _map_bigquery_exception(self, e: Any) -> None:
379
+ """Map BigQuery exception to SQLSpec exception.
380
+
381
+ Args:
382
+ e: Google API exception instance
383
+ """
384
+ status_code = getattr(e, "code", None)
385
+ error_msg = str(e).lower()
386
+
387
+ if status_code == HTTP_CONFLICT or "already exists" in error_msg:
388
+ self._raise_unique_violation(e, status_code)
389
+ elif status_code == HTTP_NOT_FOUND or "not found" in error_msg:
390
+ self._raise_not_found_error(e, status_code)
391
+ elif status_code == HTTP_BAD_REQUEST:
392
+ self._handle_bad_request(e, status_code, error_msg)
393
+ elif status_code == HTTP_FORBIDDEN:
394
+ self._raise_connection_error(e, status_code)
395
+ elif status_code and status_code >= HTTP_SERVER_ERROR:
396
+ self._raise_operational_error(e, status_code)
397
+ else:
398
+ self._raise_generic_error(e, status_code)
399
+
400
+ def _handle_bad_request(self, e: Any, code: "int | None", error_msg: str) -> None:
401
+ """Handle 400 Bad Request errors.
402
+
403
+ Args:
404
+ e: Exception instance
405
+ code: HTTP status code
406
+ error_msg: Lowercase error message
407
+ """
408
+ if "syntax" in error_msg or "invalid query" in error_msg:
409
+ self._raise_parsing_error(e, code)
410
+ elif "type" in error_msg or "format" in error_msg:
411
+ self._raise_data_error(e, code)
412
+ else:
413
+ self._raise_generic_error(e, code)
414
+
415
+ def _raise_unique_violation(self, e: Any, code: "int | None") -> None:
416
+ code_str = f"[HTTP {code}]" if code else ""
417
+ msg = f"BigQuery resource already exists {code_str}: {e}"
418
+ raise UniqueViolationError(msg) from e
419
+
420
+ def _raise_not_found_error(self, e: Any, code: "int | None") -> None:
421
+ code_str = f"[HTTP {code}]" if code else ""
422
+ msg = f"BigQuery resource not found {code_str}: {e}"
423
+ raise NotFoundError(msg) from e
424
+
425
+ def _raise_parsing_error(self, e: Any, code: "int | None") -> None:
426
+ code_str = f"[HTTP {code}]" if code else ""
427
+ msg = f"BigQuery query syntax error {code_str}: {e}"
428
+ raise SQLParsingError(msg) from e
429
+
430
+ def _raise_data_error(self, e: Any, code: "int | None") -> None:
431
+ code_str = f"[HTTP {code}]" if code else ""
432
+ msg = f"BigQuery data error {code_str}: {e}"
433
+ raise DataError(msg) from e
434
+
435
+ def _raise_connection_error(self, e: Any, code: "int | None") -> None:
436
+ code_str = f"[HTTP {code}]" if code else ""
437
+ msg = f"BigQuery permission denied {code_str}: {e}"
438
+ raise DatabaseConnectionError(msg) from e
439
+
440
+ def _raise_operational_error(self, e: Any, code: "int | None") -> None:
441
+ code_str = f"[HTTP {code}]" if code else ""
442
+ msg = f"BigQuery operational error {code_str}: {e}"
443
+ raise OperationalError(msg) from e
444
+
445
+ def _raise_generic_error(self, e: Any, code: "int | None") -> None:
446
+ msg = f"BigQuery error [HTTP {code}]: {e}" if code else f"BigQuery error: {e}"
447
+ raise SQLSpecError(msg) from e
228
448
 
229
449
 
230
450
  class BigQueryDriver(SyncDriverAdapterBase):
@@ -234,29 +454,53 @@ class BigQueryDriver(SyncDriverAdapterBase):
234
454
  type coercion, error handling, and query job management.
235
455
  """
236
456
 
237
- __slots__ = ("_data_dictionary", "_default_query_job_config")
457
+ __slots__ = ("_data_dictionary", "_default_query_job_config", "_json_serializer", "_type_converter")
238
458
  dialect = "bigquery"
239
459
 
240
460
  def __init__(
241
461
  self,
242
462
  connection: BigQueryConnection,
243
- statement_config: "Optional[StatementConfig]" = None,
244
- driver_features: "Optional[dict[str, Any]]" = None,
463
+ statement_config: "StatementConfig | None" = None,
464
+ driver_features: "dict[str, Any] | None" = None,
245
465
  ) -> None:
466
+ features = driver_features or {}
467
+
468
+ json_serializer = features.get("json_serializer")
469
+ if json_serializer is None:
470
+ json_serializer = to_json
471
+
472
+ self._json_serializer: Callable[[Any], str] = json_serializer
473
+
474
+ enable_uuid_conversion = features.get("enable_uuid_conversion", True)
475
+ self._type_converter = BigQueryTypeConverter(enable_uuid_conversion=enable_uuid_conversion)
476
+
246
477
  if statement_config is None:
247
478
  cache_config = get_cache_config()
248
- statement_config = bigquery_statement_config.replace(
249
- enable_caching=cache_config.compiled_cache_enabled,
479
+ type_coercion_map = _get_bigquery_type_coercion_map(self._type_converter)
480
+
481
+ param_config = ParameterStyleConfig(
482
+ default_parameter_style=ParameterStyle.NAMED_AT,
483
+ supported_parameter_styles={ParameterStyle.NAMED_AT, ParameterStyle.QMARK},
484
+ default_execution_parameter_style=ParameterStyle.NAMED_AT,
485
+ supported_execution_parameter_styles={ParameterStyle.NAMED_AT},
486
+ type_coercion_map=type_coercion_map,
487
+ has_native_list_expansion=True,
488
+ needs_static_script_compilation=False,
489
+ preserve_original_params_for_many=True,
490
+ )
491
+
492
+ statement_config = StatementConfig(
493
+ dialect="bigquery",
494
+ parameter_config=param_config,
250
495
  enable_parsing=True,
251
496
  enable_validation=True,
252
- dialect="bigquery",
497
+ enable_caching=cache_config.compiled_cache_enabled,
498
+ enable_parameter_type_wrapping=True,
253
499
  )
254
500
 
255
501
  super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
256
- self._default_query_job_config: Optional[QueryJobConfig] = (driver_features or {}).get(
257
- "default_query_job_config"
258
- )
259
- self._data_dictionary: Optional[SyncDataDictionaryBase] = None
502
+ self._default_query_job_config: QueryJobConfig | None = (driver_features or {}).get("default_query_job_config")
503
+ self._data_dictionary: SyncDataDictionaryBase | None = None
260
504
 
261
505
  def with_cursor(self, connection: "BigQueryConnection") -> "BigQueryCursor":
262
506
  """Create context manager for cursor management.
@@ -279,20 +523,39 @@ class BigQueryDriver(SyncDriverAdapterBase):
279
523
  """Handle database-specific exceptions and wrap them appropriately."""
280
524
  return BigQueryExceptionHandler()
281
525
 
526
+ def _should_copy_attribute(self, attr: str, source_config: QueryJobConfig) -> bool:
527
+ """Check if attribute should be copied between job configs.
528
+
529
+ Args:
530
+ attr: Attribute name to check.
531
+ source_config: Source configuration object.
532
+
533
+ Returns:
534
+ True if attribute should be copied, False otherwise.
535
+ """
536
+ if attr.startswith("_"):
537
+ return False
538
+
539
+ try:
540
+ value = getattr(source_config, attr)
541
+ return value is not None and not callable(value)
542
+ except (AttributeError, TypeError):
543
+ return False
544
+
282
545
  def _copy_job_config_attrs(self, source_config: QueryJobConfig, target_config: QueryJobConfig) -> None:
283
546
  """Copy non-private attributes from source config to target config.
284
547
 
285
548
  Args:
286
- source_config: Configuration to copy attributes from
287
- target_config: Configuration to copy attributes to
549
+ source_config: Configuration to copy attributes from.
550
+ target_config: Configuration to copy attributes to.
288
551
  """
289
552
  for attr in dir(source_config):
290
- if attr.startswith("_"):
553
+ if not self._should_copy_attribute(attr, source_config):
291
554
  continue
555
+
292
556
  try:
293
557
  value = getattr(source_config, attr)
294
- if value is not None and not callable(value):
295
- setattr(target_config, attr, value)
558
+ setattr(target_config, attr, value)
296
559
  except (AttributeError, TypeError):
297
560
  continue
298
561
 
@@ -300,8 +563,8 @@ class BigQueryDriver(SyncDriverAdapterBase):
300
563
  self,
301
564
  sql_str: str,
302
565
  parameters: Any,
303
- connection: Optional[BigQueryConnection] = None,
304
- job_config: Optional[QueryJobConfig] = None,
566
+ connection: BigQueryConnection | None = None,
567
+ job_config: QueryJobConfig | None = None,
305
568
  ) -> QueryJob:
306
569
  """Execute a BigQuery job with configuration support.
307
570
 
@@ -324,7 +587,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
324
587
  if job_config:
325
588
  self._copy_job_config_attrs(job_config, final_job_config)
326
589
 
327
- bq_parameters = _create_bq_parameters(parameters)
590
+ bq_parameters = _create_bq_parameters(parameters, self._json_serializer)
328
591
  final_job_config.query_parameters = bq_parameters
329
592
 
330
593
  return conn.query(sql_str, job_config=final_job_config)
@@ -341,7 +604,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
341
604
  """
342
605
  return [dict(row) for row in rows_iterator]
343
606
 
344
- def _try_special_handling(self, cursor: "Any", statement: "SQL") -> "Optional[SQLResult]":
607
+ def _try_special_handling(self, cursor: "Any", statement: "SQL") -> "SQLResult | None":
345
608
  """Hook for BigQuery-specific special operations.
346
609
 
347
610
  BigQuery doesn't have complex special operations like PostgreSQL COPY,
@@ -360,12 +623,15 @@ class BigQueryDriver(SyncDriverAdapterBase):
360
623
  def _transform_ast_with_literals(self, sql: str, parameters: Any) -> str:
361
624
  """Transform SQL AST by replacing placeholders with literal values.
362
625
 
626
+ Used for BigQuery script execution and execute_many operations where
627
+ parameter binding is not supported. Safely embeds values as SQL literals.
628
+
363
629
  Args:
364
- sql: SQL string to transform
365
- parameters: Parameters to embed as literals
630
+ sql: SQL string to transform.
631
+ parameters: Parameters to embed as literals.
366
632
 
367
633
  Returns:
368
- Transformed SQL string with literals embedded
634
+ Transformed SQL string with literals embedded.
369
635
  """
370
636
  if not parameters:
371
637
  return sql
@@ -377,70 +643,12 @@ class BigQueryDriver(SyncDriverAdapterBase):
377
643
 
378
644
  placeholder_counter = {"index": 0}
379
645
 
380
- def replace_placeholder(node: exp.Expression) -> exp.Expression:
381
- """Replace placeholder nodes with literal values."""
382
- if isinstance(node, exp.Placeholder):
383
- if isinstance(parameters, (list, tuple)):
384
- current_index = placeholder_counter["index"]
385
- placeholder_counter["index"] += 1
386
- if current_index < len(parameters):
387
- return self._create_literal_node(parameters[current_index])
388
- return node
389
- if isinstance(node, exp.Parameter):
390
- param_name = str(node.this) if hasattr(node.this, "__str__") else node.this
391
- if isinstance(parameters, dict):
392
- possible_names = [param_name, f"@{param_name}", f":{param_name}", f"param_{param_name}"]
393
- for name in possible_names:
394
- if name in parameters:
395
- actual_value = getattr(parameters[name], "value", parameters[name])
396
- return self._create_literal_node(actual_value)
397
- return node
398
- if isinstance(parameters, (list, tuple)):
399
- try:
400
- if param_name.startswith("param_"):
401
- param_index = int(param_name[6:])
402
- if param_index < len(parameters):
403
- return self._create_literal_node(parameters[param_index])
404
-
405
- if param_name.isdigit():
406
- param_index = int(param_name)
407
- if param_index < len(parameters):
408
- return self._create_literal_node(parameters[param_index])
409
- except (ValueError, IndexError, AttributeError):
410
- pass
411
- return node
412
- return node
413
-
414
- transformed_ast = ast.transform(replace_placeholder)
646
+ transformed_ast = ast.transform(
647
+ lambda node: _replace_placeholder_node(node, parameters, placeholder_counter, self._json_serializer)
648
+ )
415
649
 
416
650
  return transformed_ast.sql(dialect="bigquery")
417
651
 
418
- def _create_literal_node(self, value: Any) -> "exp.Expression":
419
- """Create a SQLGlot literal expression from a Python value.
420
-
421
- Args:
422
- value: Python value to convert to SQLGlot literal
423
-
424
- Returns:
425
- SQLGlot expression representing the literal value
426
- """
427
- if value is None:
428
- return exp.Null()
429
- if isinstance(value, bool):
430
- return exp.Boolean(this=value)
431
- if isinstance(value, (int, float)):
432
- return exp.Literal.number(str(value))
433
- if isinstance(value, str):
434
- return exp.Literal.string(value)
435
- if isinstance(value, (list, tuple)):
436
- items = [self._create_literal_node(item) for item in value]
437
- return exp.Array(expressions=items)
438
- if isinstance(value, dict):
439
- json_str = to_json(value)
440
- return exp.Literal.string(json_str)
441
-
442
- return exp.Literal.string(str(value))
443
-
444
652
  def _execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
445
653
  """Execute SQL script with statement splitting and parameter handling.
446
654
 
@@ -0,0 +1,5 @@
1
+ """BigQuery Litestar integration."""
2
+
3
+ from sqlspec.adapters.bigquery.litestar.store import BigQueryStore
4
+
5
+ __all__ = ("BigQueryStore",)