sqlspec 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (199) hide show
  1. sqlspec/__init__.py +7 -15
  2. sqlspec/_serialization.py +256 -24
  3. sqlspec/_typing.py +71 -52
  4. sqlspec/adapters/adbc/_types.py +1 -1
  5. sqlspec/adapters/adbc/adk/__init__.py +5 -0
  6. sqlspec/adapters/adbc/adk/store.py +870 -0
  7. sqlspec/adapters/adbc/config.py +69 -12
  8. sqlspec/adapters/adbc/data_dictionary.py +340 -0
  9. sqlspec/adapters/adbc/driver.py +266 -58
  10. sqlspec/adapters/adbc/litestar/__init__.py +5 -0
  11. sqlspec/adapters/adbc/litestar/store.py +504 -0
  12. sqlspec/adapters/adbc/type_converter.py +153 -0
  13. sqlspec/adapters/aiosqlite/_types.py +1 -1
  14. sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
  15. sqlspec/adapters/aiosqlite/adk/store.py +527 -0
  16. sqlspec/adapters/aiosqlite/config.py +88 -15
  17. sqlspec/adapters/aiosqlite/data_dictionary.py +149 -0
  18. sqlspec/adapters/aiosqlite/driver.py +143 -40
  19. sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
  20. sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
  21. sqlspec/adapters/aiosqlite/pool.py +7 -7
  22. sqlspec/adapters/asyncmy/__init__.py +7 -1
  23. sqlspec/adapters/asyncmy/_types.py +2 -2
  24. sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
  25. sqlspec/adapters/asyncmy/adk/store.py +493 -0
  26. sqlspec/adapters/asyncmy/config.py +68 -23
  27. sqlspec/adapters/asyncmy/data_dictionary.py +161 -0
  28. sqlspec/adapters/asyncmy/driver.py +313 -58
  29. sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
  30. sqlspec/adapters/asyncmy/litestar/store.py +296 -0
  31. sqlspec/adapters/asyncpg/__init__.py +2 -1
  32. sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
  33. sqlspec/adapters/asyncpg/_types.py +11 -7
  34. sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
  35. sqlspec/adapters/asyncpg/adk/store.py +450 -0
  36. sqlspec/adapters/asyncpg/config.py +59 -35
  37. sqlspec/adapters/asyncpg/data_dictionary.py +173 -0
  38. sqlspec/adapters/asyncpg/driver.py +170 -25
  39. sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
  40. sqlspec/adapters/asyncpg/litestar/store.py +253 -0
  41. sqlspec/adapters/bigquery/_types.py +1 -1
  42. sqlspec/adapters/bigquery/adk/__init__.py +5 -0
  43. sqlspec/adapters/bigquery/adk/store.py +576 -0
  44. sqlspec/adapters/bigquery/config.py +27 -10
  45. sqlspec/adapters/bigquery/data_dictionary.py +149 -0
  46. sqlspec/adapters/bigquery/driver.py +368 -142
  47. sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
  48. sqlspec/adapters/bigquery/litestar/store.py +327 -0
  49. sqlspec/adapters/bigquery/type_converter.py +125 -0
  50. sqlspec/adapters/duckdb/_types.py +1 -1
  51. sqlspec/adapters/duckdb/adk/__init__.py +14 -0
  52. sqlspec/adapters/duckdb/adk/store.py +553 -0
  53. sqlspec/adapters/duckdb/config.py +80 -20
  54. sqlspec/adapters/duckdb/data_dictionary.py +163 -0
  55. sqlspec/adapters/duckdb/driver.py +167 -45
  56. sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
  57. sqlspec/adapters/duckdb/litestar/store.py +332 -0
  58. sqlspec/adapters/duckdb/pool.py +4 -4
  59. sqlspec/adapters/duckdb/type_converter.py +133 -0
  60. sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
  61. sqlspec/adapters/oracledb/_types.py +20 -2
  62. sqlspec/adapters/oracledb/adk/__init__.py +5 -0
  63. sqlspec/adapters/oracledb/adk/store.py +1745 -0
  64. sqlspec/adapters/oracledb/config.py +122 -32
  65. sqlspec/adapters/oracledb/data_dictionary.py +509 -0
  66. sqlspec/adapters/oracledb/driver.py +353 -91
  67. sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
  68. sqlspec/adapters/oracledb/litestar/store.py +767 -0
  69. sqlspec/adapters/oracledb/migrations.py +348 -73
  70. sqlspec/adapters/oracledb/type_converter.py +207 -0
  71. sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
  72. sqlspec/adapters/psqlpy/_types.py +2 -1
  73. sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
  74. sqlspec/adapters/psqlpy/adk/store.py +482 -0
  75. sqlspec/adapters/psqlpy/config.py +46 -17
  76. sqlspec/adapters/psqlpy/data_dictionary.py +172 -0
  77. sqlspec/adapters/psqlpy/driver.py +123 -209
  78. sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
  79. sqlspec/adapters/psqlpy/litestar/store.py +272 -0
  80. sqlspec/adapters/psqlpy/type_converter.py +102 -0
  81. sqlspec/adapters/psycopg/_type_handlers.py +80 -0
  82. sqlspec/adapters/psycopg/_types.py +2 -1
  83. sqlspec/adapters/psycopg/adk/__init__.py +5 -0
  84. sqlspec/adapters/psycopg/adk/store.py +944 -0
  85. sqlspec/adapters/psycopg/config.py +69 -35
  86. sqlspec/adapters/psycopg/data_dictionary.py +331 -0
  87. sqlspec/adapters/psycopg/driver.py +238 -81
  88. sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
  89. sqlspec/adapters/psycopg/litestar/store.py +554 -0
  90. sqlspec/adapters/sqlite/__init__.py +2 -1
  91. sqlspec/adapters/sqlite/_type_handlers.py +86 -0
  92. sqlspec/adapters/sqlite/_types.py +1 -1
  93. sqlspec/adapters/sqlite/adk/__init__.py +5 -0
  94. sqlspec/adapters/sqlite/adk/store.py +572 -0
  95. sqlspec/adapters/sqlite/config.py +87 -15
  96. sqlspec/adapters/sqlite/data_dictionary.py +149 -0
  97. sqlspec/adapters/sqlite/driver.py +137 -54
  98. sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
  99. sqlspec/adapters/sqlite/litestar/store.py +318 -0
  100. sqlspec/adapters/sqlite/pool.py +18 -9
  101. sqlspec/base.py +45 -26
  102. sqlspec/builder/__init__.py +73 -4
  103. sqlspec/builder/_base.py +162 -89
  104. sqlspec/builder/_column.py +62 -29
  105. sqlspec/builder/_ddl.py +180 -121
  106. sqlspec/builder/_delete.py +5 -4
  107. sqlspec/builder/_dml.py +388 -0
  108. sqlspec/{_sql.py → builder/_factory.py} +53 -94
  109. sqlspec/builder/_insert.py +32 -131
  110. sqlspec/builder/_join.py +375 -0
  111. sqlspec/builder/_merge.py +446 -11
  112. sqlspec/builder/_parsing_utils.py +111 -17
  113. sqlspec/builder/_select.py +1457 -24
  114. sqlspec/builder/_update.py +11 -42
  115. sqlspec/cli.py +307 -194
  116. sqlspec/config.py +252 -67
  117. sqlspec/core/__init__.py +5 -4
  118. sqlspec/core/cache.py +17 -17
  119. sqlspec/core/compiler.py +62 -9
  120. sqlspec/core/filters.py +37 -37
  121. sqlspec/core/hashing.py +9 -9
  122. sqlspec/core/parameters.py +83 -48
  123. sqlspec/core/result.py +102 -46
  124. sqlspec/core/splitter.py +16 -17
  125. sqlspec/core/statement.py +36 -30
  126. sqlspec/core/type_conversion.py +235 -0
  127. sqlspec/driver/__init__.py +7 -6
  128. sqlspec/driver/_async.py +188 -151
  129. sqlspec/driver/_common.py +285 -80
  130. sqlspec/driver/_sync.py +188 -152
  131. sqlspec/driver/mixins/_result_tools.py +20 -236
  132. sqlspec/driver/mixins/_sql_translator.py +4 -4
  133. sqlspec/exceptions.py +75 -7
  134. sqlspec/extensions/adk/__init__.py +53 -0
  135. sqlspec/extensions/adk/_types.py +51 -0
  136. sqlspec/extensions/adk/converters.py +172 -0
  137. sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
  138. sqlspec/extensions/adk/migrations/__init__.py +0 -0
  139. sqlspec/extensions/adk/service.py +181 -0
  140. sqlspec/extensions/adk/store.py +536 -0
  141. sqlspec/extensions/aiosql/adapter.py +73 -53
  142. sqlspec/extensions/litestar/__init__.py +21 -4
  143. sqlspec/extensions/litestar/cli.py +54 -10
  144. sqlspec/extensions/litestar/config.py +59 -266
  145. sqlspec/extensions/litestar/handlers.py +46 -17
  146. sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
  147. sqlspec/extensions/litestar/migrations/__init__.py +3 -0
  148. sqlspec/extensions/litestar/plugin.py +324 -223
  149. sqlspec/extensions/litestar/providers.py +25 -25
  150. sqlspec/extensions/litestar/store.py +265 -0
  151. sqlspec/loader.py +30 -49
  152. sqlspec/migrations/__init__.py +4 -3
  153. sqlspec/migrations/base.py +302 -39
  154. sqlspec/migrations/commands.py +611 -144
  155. sqlspec/migrations/context.py +142 -0
  156. sqlspec/migrations/fix.py +199 -0
  157. sqlspec/migrations/loaders.py +68 -23
  158. sqlspec/migrations/runner.py +543 -107
  159. sqlspec/migrations/tracker.py +237 -21
  160. sqlspec/migrations/utils.py +51 -3
  161. sqlspec/migrations/validation.py +177 -0
  162. sqlspec/protocols.py +66 -36
  163. sqlspec/storage/_utils.py +98 -0
  164. sqlspec/storage/backends/fsspec.py +134 -106
  165. sqlspec/storage/backends/local.py +78 -51
  166. sqlspec/storage/backends/obstore.py +278 -162
  167. sqlspec/storage/registry.py +75 -39
  168. sqlspec/typing.py +16 -84
  169. sqlspec/utils/config_resolver.py +153 -0
  170. sqlspec/utils/correlation.py +4 -5
  171. sqlspec/utils/data_transformation.py +3 -2
  172. sqlspec/utils/deprecation.py +9 -8
  173. sqlspec/utils/fixtures.py +4 -4
  174. sqlspec/utils/logging.py +46 -6
  175. sqlspec/utils/module_loader.py +2 -2
  176. sqlspec/utils/schema.py +288 -0
  177. sqlspec/utils/serializers.py +50 -2
  178. sqlspec/utils/sync_tools.py +21 -17
  179. sqlspec/utils/text.py +1 -2
  180. sqlspec/utils/type_guards.py +111 -20
  181. sqlspec/utils/version.py +433 -0
  182. {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/METADATA +40 -21
  183. sqlspec-0.27.0.dist-info/RECORD +207 -0
  184. sqlspec/builder/mixins/__init__.py +0 -55
  185. sqlspec/builder/mixins/_cte_and_set_ops.py +0 -254
  186. sqlspec/builder/mixins/_delete_operations.py +0 -50
  187. sqlspec/builder/mixins/_insert_operations.py +0 -282
  188. sqlspec/builder/mixins/_join_operations.py +0 -389
  189. sqlspec/builder/mixins/_merge_operations.py +0 -592
  190. sqlspec/builder/mixins/_order_limit_operations.py +0 -152
  191. sqlspec/builder/mixins/_pivot_operations.py +0 -157
  192. sqlspec/builder/mixins/_select_operations.py +0 -936
  193. sqlspec/builder/mixins/_update_operations.py +0 -218
  194. sqlspec/builder/mixins/_where_clause.py +0 -1304
  195. sqlspec-0.25.0.dist-info/RECORD +0 -139
  196. sqlspec-0.25.0.dist-info/licenses/NOTICE +0 -29
  197. {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/WHEEL +0 -0
  198. {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/entry_points.txt +0 -0
  199. {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/licenses/LICENSE +0 -0
@@ -7,7 +7,7 @@ type coercion, error handling, and query job management.
7
7
  import datetime
8
8
  import logging
9
9
  from decimal import Decimal
10
- from typing import TYPE_CHECKING, Any, Optional, Union
10
+ from typing import TYPE_CHECKING, Any
11
11
 
12
12
  import sqlglot
13
13
  import sqlglot.expressions as exp
@@ -15,26 +15,42 @@ from google.cloud.bigquery import ArrayQueryParameter, QueryJob, QueryJobConfig,
15
15
  from google.cloud.exceptions import GoogleCloudError
16
16
 
17
17
  from sqlspec.adapters.bigquery._types import BigQueryConnection
18
- from sqlspec.core.cache import get_cache_config
19
- from sqlspec.core.parameters import ParameterStyle, ParameterStyleConfig
20
- from sqlspec.core.statement import StatementConfig
21
- from sqlspec.driver import SyncDriverAdapterBase
22
- from sqlspec.driver._common import ExecutionResult
23
- from sqlspec.exceptions import SQLParsingError, SQLSpecError
18
+ from sqlspec.adapters.bigquery.type_converter import BigQueryTypeConverter
19
+ from sqlspec.core import ParameterStyle, ParameterStyleConfig, StatementConfig, get_cache_config
20
+ from sqlspec.driver import ExecutionResult, SyncDriverAdapterBase
21
+ from sqlspec.exceptions import (
22
+ DatabaseConnectionError,
23
+ DataError,
24
+ NotFoundError,
25
+ OperationalError,
26
+ SQLParsingError,
27
+ SQLSpecError,
28
+ UniqueViolationError,
29
+ )
24
30
  from sqlspec.utils.serializers import to_json
25
31
 
26
32
  if TYPE_CHECKING:
33
+ from collections.abc import Callable
27
34
  from contextlib import AbstractContextManager
28
35
 
29
- from sqlspec.core.result import SQLResult
30
- from sqlspec.core.statement import SQL
36
+ from sqlspec.core import SQL, SQLResult
37
+ from sqlspec.driver import SyncDataDictionaryBase
31
38
 
32
39
  logger = logging.getLogger(__name__)
33
40
 
34
41
  __all__ = ("BigQueryCursor", "BigQueryDriver", "BigQueryExceptionHandler", "bigquery_statement_config")
35
42
 
43
+ HTTP_CONFLICT = 409
44
+ HTTP_NOT_FOUND = 404
45
+ HTTP_BAD_REQUEST = 400
46
+ HTTP_FORBIDDEN = 403
47
+ HTTP_SERVER_ERROR = 500
48
+
49
+
50
+ _default_type_converter = BigQueryTypeConverter()
51
+
36
52
 
37
- _BQ_TYPE_MAP: dict[type, tuple[str, Optional[str]]] = {
53
+ _BQ_TYPE_MAP: dict[type, tuple[str, str | None]] = {
38
54
  bool: ("BOOL", None),
39
55
  int: ("INT64", None),
40
56
  float: ("FLOAT64", None),
@@ -47,7 +63,134 @@ _BQ_TYPE_MAP: dict[type, tuple[str, Optional[str]]] = {
47
63
  }
48
64
 
49
65
 
50
- def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
66
+ def _create_array_parameter(name: str, value: Any, array_type: str) -> ArrayQueryParameter:
67
+ """Create BigQuery ARRAY parameter.
68
+
69
+ Args:
70
+ name: Parameter name.
71
+ value: Array value (converted to list, empty list if None).
72
+ array_type: BigQuery array element type.
73
+
74
+ Returns:
75
+ ArrayQueryParameter instance.
76
+ """
77
+ return ArrayQueryParameter(name, array_type, [] if value is None else list(value))
78
+
79
+
80
+ def _create_json_parameter(name: str, value: Any, json_serializer: "Callable[[Any], str]") -> ScalarQueryParameter:
81
+ """Create BigQuery JSON parameter as STRING type.
82
+
83
+ Args:
84
+ name: Parameter name.
85
+ value: JSON-serializable value.
86
+ json_serializer: Function to serialize to JSON string.
87
+
88
+ Returns:
89
+ ScalarQueryParameter with STRING type.
90
+ """
91
+ return ScalarQueryParameter(name, "STRING", json_serializer(value))
92
+
93
+
94
+ def _create_scalar_parameter(name: str, value: Any, param_type: str) -> ScalarQueryParameter:
95
+ """Create BigQuery scalar parameter.
96
+
97
+ Args:
98
+ name: Parameter name.
99
+ value: Scalar value.
100
+ param_type: BigQuery parameter type (INT64, FLOAT64, etc.).
101
+
102
+ Returns:
103
+ ScalarQueryParameter instance.
104
+ """
105
+ return ScalarQueryParameter(name, param_type, value)
106
+
107
+
108
+ def _create_literal_node(value: Any, json_serializer: "Callable[[Any], str]") -> "exp.Expression":
109
+ """Create a SQLGlot literal expression from a Python value.
110
+
111
+ Args:
112
+ value: Python value to convert to SQLGlot literal.
113
+ json_serializer: Function to serialize dict/list to JSON string.
114
+
115
+ Returns:
116
+ SQLGlot expression representing the literal value.
117
+ """
118
+ if value is None:
119
+ return exp.Null()
120
+ if isinstance(value, bool):
121
+ return exp.Boolean(this=value)
122
+ if isinstance(value, (int, float)):
123
+ return exp.Literal.number(str(value))
124
+ if isinstance(value, str):
125
+ return exp.Literal.string(value)
126
+ if isinstance(value, (list, tuple)):
127
+ items = [_create_literal_node(item, json_serializer) for item in value]
128
+ return exp.Array(expressions=items)
129
+ if isinstance(value, dict):
130
+ json_str = json_serializer(value)
131
+ return exp.Literal.string(json_str)
132
+
133
+ return exp.Literal.string(str(value))
134
+
135
+
136
+ def _replace_placeholder_node(
137
+ node: "exp.Expression",
138
+ parameters: Any,
139
+ placeholder_counter: dict[str, int],
140
+ json_serializer: "Callable[[Any], str]",
141
+ ) -> "exp.Expression":
142
+ """Replace placeholder or parameter nodes with literal values.
143
+
144
+ Handles both positional placeholders (?) and named parameters (@name, :name).
145
+ Converts values to SQLGlot literal expressions for safe embedding in SQL.
146
+
147
+ Args:
148
+ node: SQLGlot expression node to check and potentially replace.
149
+ parameters: Parameter values (dict, list, or tuple).
150
+ placeholder_counter: Mutable counter dict for positional placeholders.
151
+ json_serializer: Function to serialize dict/list to JSON string.
152
+
153
+ Returns:
154
+ Literal expression if replacement made, otherwise original node.
155
+ """
156
+ if isinstance(node, exp.Placeholder):
157
+ if isinstance(parameters, (list, tuple)):
158
+ current_index = placeholder_counter["index"]
159
+ placeholder_counter["index"] += 1
160
+ if current_index < len(parameters):
161
+ return _create_literal_node(parameters[current_index], json_serializer)
162
+ return node
163
+
164
+ if isinstance(node, exp.Parameter):
165
+ param_name = str(node.this) if hasattr(node.this, "__str__") else node.this
166
+
167
+ if isinstance(parameters, dict):
168
+ possible_names = [param_name, f"@{param_name}", f":{param_name}", f"param_{param_name}"]
169
+ for name in possible_names:
170
+ if name in parameters:
171
+ actual_value = getattr(parameters[name], "value", parameters[name])
172
+ return _create_literal_node(actual_value, json_serializer)
173
+ return node
174
+
175
+ if isinstance(parameters, (list, tuple)):
176
+ try:
177
+ if param_name.startswith("param_"):
178
+ param_index = int(param_name[6:])
179
+ if param_index < len(parameters):
180
+ return _create_literal_node(parameters[param_index], json_serializer)
181
+
182
+ if param_name.isdigit():
183
+ param_index = int(param_name)
184
+ if param_index < len(parameters):
185
+ return _create_literal_node(parameters[param_index], json_serializer)
186
+ except (ValueError, IndexError, AttributeError):
187
+ pass
188
+ return node
189
+
190
+ return node
191
+
192
+
193
+ def _get_bq_param_type(value: Any) -> tuple[str | None, str | None]:
51
194
  """Determine BigQuery parameter type from Python value.
52
195
 
53
196
  Args:
@@ -80,20 +223,30 @@ def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
80
223
  return None, None
81
224
 
82
225
 
83
- _BQ_PARAM_CREATOR_MAP: dict[str, Any] = {
84
- "ARRAY": lambda name, value, array_type: ArrayQueryParameter(
85
- name, array_type, [] if value is None else list(value)
86
- ),
87
- "JSON": lambda name, value, _: ScalarQueryParameter(name, "STRING", to_json(value)),
88
- "SCALAR": lambda name, value, param_type: ScalarQueryParameter(name, param_type, value),
89
- }
226
+ def _get_bq_param_creator_map(json_serializer: "Callable[[Any], str]") -> dict[str, Any]:
227
+ """Get BigQuery parameter creator map with configurable JSON serializer.
228
+
229
+ Args:
230
+ json_serializer: Function to serialize dict/list to JSON string.
90
231
 
232
+ Returns:
233
+ Dictionary mapping parameter types to creator functions.
234
+ """
235
+ return {
236
+ "ARRAY": _create_array_parameter,
237
+ "JSON": lambda name, value, _: _create_json_parameter(name, value, json_serializer),
238
+ "SCALAR": _create_scalar_parameter,
239
+ }
91
240
 
92
- def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, ScalarQueryParameter]]":
241
+
242
+ def _create_bq_parameters(
243
+ parameters: Any, json_serializer: "Callable[[Any], str]"
244
+ ) -> "list[ArrayQueryParameter | ScalarQueryParameter]":
93
245
  """Create BigQuery QueryParameter objects from parameters.
94
246
 
95
247
  Args:
96
248
  parameters: Dict of named parameters or list of positional parameters
249
+ json_serializer: Function to serialize dict/list to JSON string
97
250
 
98
251
  Returns:
99
252
  List of BigQuery QueryParameter objects
@@ -101,7 +254,8 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
101
254
  if not parameters:
102
255
  return []
103
256
 
104
- bq_parameters: list[Union[ArrayQueryParameter, ScalarQueryParameter]] = []
257
+ bq_parameters: list[ArrayQueryParameter | ScalarQueryParameter] = []
258
+ param_creator_map = _get_bq_param_creator_map(json_serializer)
105
259
 
106
260
  if isinstance(parameters, dict):
107
261
  for name, value in parameters.items():
@@ -110,13 +264,13 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
110
264
  param_type, array_element_type = _get_bq_param_type(actual_value)
111
265
 
112
266
  if param_type == "ARRAY" and array_element_type:
113
- creator = _BQ_PARAM_CREATOR_MAP["ARRAY"]
267
+ creator = param_creator_map["ARRAY"]
114
268
  bq_parameters.append(creator(param_name_for_bq, actual_value, array_element_type))
115
269
  elif param_type == "JSON":
116
- creator = _BQ_PARAM_CREATOR_MAP["JSON"]
270
+ creator = param_creator_map["JSON"]
117
271
  bq_parameters.append(creator(param_name_for_bq, actual_value, None))
118
272
  elif param_type:
119
- creator = _BQ_PARAM_CREATOR_MAP["SCALAR"]
273
+ creator = param_creator_map["SCALAR"]
120
274
  bq_parameters.append(creator(param_name_for_bq, actual_value, param_type))
121
275
  else:
122
276
  msg = f"Unsupported BigQuery parameter type for value of param '{name}': {type(actual_value)}"
@@ -129,21 +283,33 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
129
283
  return bq_parameters
130
284
 
131
285
 
132
- bigquery_type_coercion_map = {
133
- tuple: list,
134
- bool: lambda x: x,
135
- int: lambda x: x,
136
- float: lambda x: x,
137
- str: lambda x: x,
138
- bytes: lambda x: x,
139
- datetime.datetime: lambda x: x,
140
- datetime.date: lambda x: x,
141
- datetime.time: lambda x: x,
142
- Decimal: lambda x: x,
143
- dict: lambda x: x,
144
- list: lambda x: x,
145
- type(None): lambda _: None,
146
- }
286
+ def _get_bigquery_type_coercion_map(type_converter: BigQueryTypeConverter) -> dict[type, Any]:
287
+ """Get BigQuery type coercion map with configurable type converter.
288
+
289
+ Args:
290
+ type_converter: BigQuery type converter instance
291
+
292
+ Returns:
293
+ Type coercion map for BigQuery
294
+ """
295
+ return {
296
+ tuple: list,
297
+ bool: lambda x: x,
298
+ int: lambda x: x,
299
+ float: lambda x: x,
300
+ str: type_converter.convert_if_detected,
301
+ bytes: lambda x: x,
302
+ datetime.datetime: lambda x: x,
303
+ datetime.date: lambda x: x,
304
+ datetime.time: lambda x: x,
305
+ Decimal: lambda x: x,
306
+ dict: lambda x: x,
307
+ list: lambda x: x,
308
+ type(None): lambda _: None,
309
+ }
310
+
311
+
312
+ bigquery_type_coercion_map = _get_bigquery_type_coercion_map(_default_type_converter)
147
313
 
148
314
 
149
315
  bigquery_statement_config = StatementConfig(
@@ -172,12 +338,12 @@ class BigQueryCursor:
172
338
 
173
339
  def __init__(self, connection: "BigQueryConnection") -> None:
174
340
  self.connection = connection
175
- self.job: Optional[QueryJob] = None
341
+ self.job: QueryJob | None = None
176
342
 
177
343
  def __enter__(self) -> "BigQueryConnection":
178
344
  return self.connection
179
345
 
180
- def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
346
+ def __exit__(self, *_: Any) -> None:
181
347
  """Clean up cursor resources including active QueryJobs."""
182
348
  if self.job is not None:
183
349
  try:
@@ -191,7 +357,11 @@ class BigQueryCursor:
191
357
 
192
358
 
193
359
  class BigQueryExceptionHandler:
194
- """Custom sync context manager for handling BigQuery database exceptions."""
360
+ """Context manager for handling BigQuery API exceptions.
361
+
362
+ Maps HTTP status codes and error reasons to specific SQLSpec exceptions
363
+ for better error handling in application code.
364
+ """
195
365
 
196
366
  __slots__ = ()
197
367
 
@@ -199,28 +369,82 @@ class BigQueryExceptionHandler:
199
369
  return None
200
370
 
201
371
  def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
372
+ _ = exc_tb
202
373
  if exc_type is None:
203
374
  return
204
-
205
375
  if issubclass(exc_type, GoogleCloudError):
206
- e = exc_val
207
- error_msg = str(e).lower()
208
- if "syntax" in error_msg or "invalid" in error_msg:
209
- msg = f"BigQuery SQL syntax error: {e}"
210
- raise SQLParsingError(msg) from e
211
- if "permission" in error_msg or "access" in error_msg:
212
- msg = f"BigQuery access error: {e}"
213
- raise SQLSpecError(msg) from e
214
- msg = f"BigQuery cloud error: {e}"
215
- raise SQLSpecError(msg) from e
216
- if issubclass(exc_type, Exception):
217
- e = exc_val
218
- error_msg = str(e).lower()
219
- if "parse" in error_msg or "syntax" in error_msg:
220
- msg = f"SQL parsing failed: {e}"
221
- raise SQLParsingError(msg) from e
222
- msg = f"Unexpected BigQuery operation error: {e}"
223
- raise SQLSpecError(msg) from e
376
+ self._map_bigquery_exception(exc_val)
377
+
378
+ def _map_bigquery_exception(self, e: Any) -> None:
379
+ """Map BigQuery exception to SQLSpec exception.
380
+
381
+ Args:
382
+ e: Google API exception instance
383
+ """
384
+ status_code = getattr(e, "code", None)
385
+ error_msg = str(e).lower()
386
+
387
+ if status_code == HTTP_CONFLICT or "already exists" in error_msg:
388
+ self._raise_unique_violation(e, status_code)
389
+ elif status_code == HTTP_NOT_FOUND or "not found" in error_msg:
390
+ self._raise_not_found_error(e, status_code)
391
+ elif status_code == HTTP_BAD_REQUEST:
392
+ self._handle_bad_request(e, status_code, error_msg)
393
+ elif status_code == HTTP_FORBIDDEN:
394
+ self._raise_connection_error(e, status_code)
395
+ elif status_code and status_code >= HTTP_SERVER_ERROR:
396
+ self._raise_operational_error(e, status_code)
397
+ else:
398
+ self._raise_generic_error(e, status_code)
399
+
400
+ def _handle_bad_request(self, e: Any, code: "int | None", error_msg: str) -> None:
401
+ """Handle 400 Bad Request errors.
402
+
403
+ Args:
404
+ e: Exception instance
405
+ code: HTTP status code
406
+ error_msg: Lowercase error message
407
+ """
408
+ if "syntax" in error_msg or "invalid query" in error_msg:
409
+ self._raise_parsing_error(e, code)
410
+ elif "type" in error_msg or "format" in error_msg:
411
+ self._raise_data_error(e, code)
412
+ else:
413
+ self._raise_generic_error(e, code)
414
+
415
+ def _raise_unique_violation(self, e: Any, code: "int | None") -> None:
416
+ code_str = f"[HTTP {code}]" if code else ""
417
+ msg = f"BigQuery resource already exists {code_str}: {e}"
418
+ raise UniqueViolationError(msg) from e
419
+
420
+ def _raise_not_found_error(self, e: Any, code: "int | None") -> None:
421
+ code_str = f"[HTTP {code}]" if code else ""
422
+ msg = f"BigQuery resource not found {code_str}: {e}"
423
+ raise NotFoundError(msg) from e
424
+
425
+ def _raise_parsing_error(self, e: Any, code: "int | None") -> None:
426
+ code_str = f"[HTTP {code}]" if code else ""
427
+ msg = f"BigQuery query syntax error {code_str}: {e}"
428
+ raise SQLParsingError(msg) from e
429
+
430
+ def _raise_data_error(self, e: Any, code: "int | None") -> None:
431
+ code_str = f"[HTTP {code}]" if code else ""
432
+ msg = f"BigQuery data error {code_str}: {e}"
433
+ raise DataError(msg) from e
434
+
435
+ def _raise_connection_error(self, e: Any, code: "int | None") -> None:
436
+ code_str = f"[HTTP {code}]" if code else ""
437
+ msg = f"BigQuery permission denied {code_str}: {e}"
438
+ raise DatabaseConnectionError(msg) from e
439
+
440
+ def _raise_operational_error(self, e: Any, code: "int | None") -> None:
441
+ code_str = f"[HTTP {code}]" if code else ""
442
+ msg = f"BigQuery operational error {code_str}: {e}"
443
+ raise OperationalError(msg) from e
444
+
445
+ def _raise_generic_error(self, e: Any, code: "int | None") -> None:
446
+ msg = f"BigQuery error [HTTP {code}]: {e}" if code else f"BigQuery error: {e}"
447
+ raise SQLSpecError(msg) from e
224
448
 
225
449
 
226
450
  class BigQueryDriver(SyncDriverAdapterBase):
@@ -230,28 +454,53 @@ class BigQueryDriver(SyncDriverAdapterBase):
230
454
  type coercion, error handling, and query job management.
231
455
  """
232
456
 
233
- __slots__ = ("_default_query_job_config",)
457
+ __slots__ = ("_data_dictionary", "_default_query_job_config", "_json_serializer", "_type_converter")
234
458
  dialect = "bigquery"
235
459
 
236
460
  def __init__(
237
461
  self,
238
462
  connection: BigQueryConnection,
239
- statement_config: "Optional[StatementConfig]" = None,
240
- driver_features: "Optional[dict[str, Any]]" = None,
463
+ statement_config: "StatementConfig | None" = None,
464
+ driver_features: "dict[str, Any] | None" = None,
241
465
  ) -> None:
466
+ features = driver_features or {}
467
+
468
+ json_serializer = features.get("json_serializer")
469
+ if json_serializer is None:
470
+ json_serializer = to_json
471
+
472
+ self._json_serializer: Callable[[Any], str] = json_serializer
473
+
474
+ enable_uuid_conversion = features.get("enable_uuid_conversion", True)
475
+ self._type_converter = BigQueryTypeConverter(enable_uuid_conversion=enable_uuid_conversion)
476
+
242
477
  if statement_config is None:
243
478
  cache_config = get_cache_config()
244
- statement_config = bigquery_statement_config.replace(
245
- enable_caching=cache_config.compiled_cache_enabled,
479
+ type_coercion_map = _get_bigquery_type_coercion_map(self._type_converter)
480
+
481
+ param_config = ParameterStyleConfig(
482
+ default_parameter_style=ParameterStyle.NAMED_AT,
483
+ supported_parameter_styles={ParameterStyle.NAMED_AT, ParameterStyle.QMARK},
484
+ default_execution_parameter_style=ParameterStyle.NAMED_AT,
485
+ supported_execution_parameter_styles={ParameterStyle.NAMED_AT},
486
+ type_coercion_map=type_coercion_map,
487
+ has_native_list_expansion=True,
488
+ needs_static_script_compilation=False,
489
+ preserve_original_params_for_many=True,
490
+ )
491
+
492
+ statement_config = StatementConfig(
493
+ dialect="bigquery",
494
+ parameter_config=param_config,
246
495
  enable_parsing=True,
247
496
  enable_validation=True,
248
- dialect="bigquery",
497
+ enable_caching=cache_config.compiled_cache_enabled,
498
+ enable_parameter_type_wrapping=True,
249
499
  )
250
500
 
251
501
  super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
252
- self._default_query_job_config: Optional[QueryJobConfig] = (driver_features or {}).get(
253
- "default_query_job_config"
254
- )
502
+ self._default_query_job_config: QueryJobConfig | None = (driver_features or {}).get("default_query_job_config")
503
+ self._data_dictionary: SyncDataDictionaryBase | None = None
255
504
 
256
505
  def with_cursor(self, connection: "BigQueryConnection") -> "BigQueryCursor":
257
506
  """Create context manager for cursor management.
@@ -274,20 +523,39 @@ class BigQueryDriver(SyncDriverAdapterBase):
274
523
  """Handle database-specific exceptions and wrap them appropriately."""
275
524
  return BigQueryExceptionHandler()
276
525
 
526
+ def _should_copy_attribute(self, attr: str, source_config: QueryJobConfig) -> bool:
527
+ """Check if attribute should be copied between job configs.
528
+
529
+ Args:
530
+ attr: Attribute name to check.
531
+ source_config: Source configuration object.
532
+
533
+ Returns:
534
+ True if attribute should be copied, False otherwise.
535
+ """
536
+ if attr.startswith("_"):
537
+ return False
538
+
539
+ try:
540
+ value = getattr(source_config, attr)
541
+ return value is not None and not callable(value)
542
+ except (AttributeError, TypeError):
543
+ return False
544
+
277
545
  def _copy_job_config_attrs(self, source_config: QueryJobConfig, target_config: QueryJobConfig) -> None:
278
546
  """Copy non-private attributes from source config to target config.
279
547
 
280
548
  Args:
281
- source_config: Configuration to copy attributes from
282
- target_config: Configuration to copy attributes to
549
+ source_config: Configuration to copy attributes from.
550
+ target_config: Configuration to copy attributes to.
283
551
  """
284
552
  for attr in dir(source_config):
285
- if attr.startswith("_"):
553
+ if not self._should_copy_attribute(attr, source_config):
286
554
  continue
555
+
287
556
  try:
288
557
  value = getattr(source_config, attr)
289
- if value is not None and not callable(value):
290
- setattr(target_config, attr, value)
558
+ setattr(target_config, attr, value)
291
559
  except (AttributeError, TypeError):
292
560
  continue
293
561
 
@@ -295,8 +563,8 @@ class BigQueryDriver(SyncDriverAdapterBase):
295
563
  self,
296
564
  sql_str: str,
297
565
  parameters: Any,
298
- connection: Optional[BigQueryConnection] = None,
299
- job_config: Optional[QueryJobConfig] = None,
566
+ connection: BigQueryConnection | None = None,
567
+ job_config: QueryJobConfig | None = None,
300
568
  ) -> QueryJob:
301
569
  """Execute a BigQuery job with configuration support.
302
570
 
@@ -319,7 +587,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
319
587
  if job_config:
320
588
  self._copy_job_config_attrs(job_config, final_job_config)
321
589
 
322
- bq_parameters = _create_bq_parameters(parameters)
590
+ bq_parameters = _create_bq_parameters(parameters, self._json_serializer)
323
591
  final_job_config.query_parameters = bq_parameters
324
592
 
325
593
  return conn.query(sql_str, job_config=final_job_config)
@@ -336,7 +604,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
336
604
  """
337
605
  return [dict(row) for row in rows_iterator]
338
606
 
339
- def _try_special_handling(self, cursor: "Any", statement: "SQL") -> "Optional[SQLResult]":
607
+ def _try_special_handling(self, cursor: "Any", statement: "SQL") -> "SQLResult | None":
340
608
  """Hook for BigQuery-specific special operations.
341
609
 
342
610
  BigQuery doesn't have complex special operations like PostgreSQL COPY,
@@ -355,12 +623,15 @@ class BigQueryDriver(SyncDriverAdapterBase):
355
623
  def _transform_ast_with_literals(self, sql: str, parameters: Any) -> str:
356
624
  """Transform SQL AST by replacing placeholders with literal values.
357
625
 
626
+ Used for BigQuery script execution and execute_many operations where
627
+ parameter binding is not supported. Safely embeds values as SQL literals.
628
+
358
629
  Args:
359
- sql: SQL string to transform
360
- parameters: Parameters to embed as literals
630
+ sql: SQL string to transform.
631
+ parameters: Parameters to embed as literals.
361
632
 
362
633
  Returns:
363
- Transformed SQL string with literals embedded
634
+ Transformed SQL string with literals embedded.
364
635
  """
365
636
  if not parameters:
366
637
  return sql
@@ -372,70 +643,12 @@ class BigQueryDriver(SyncDriverAdapterBase):
372
643
 
373
644
  placeholder_counter = {"index": 0}
374
645
 
375
- def replace_placeholder(node: exp.Expression) -> exp.Expression:
376
- """Replace placeholder nodes with literal values."""
377
- if isinstance(node, exp.Placeholder):
378
- if isinstance(parameters, (list, tuple)):
379
- current_index = placeholder_counter["index"]
380
- placeholder_counter["index"] += 1
381
- if current_index < len(parameters):
382
- return self._create_literal_node(parameters[current_index])
383
- return node
384
- if isinstance(node, exp.Parameter):
385
- param_name = str(node.this) if hasattr(node.this, "__str__") else node.this
386
- if isinstance(parameters, dict):
387
- possible_names = [param_name, f"@{param_name}", f":{param_name}", f"param_{param_name}"]
388
- for name in possible_names:
389
- if name in parameters:
390
- actual_value = getattr(parameters[name], "value", parameters[name])
391
- return self._create_literal_node(actual_value)
392
- return node
393
- if isinstance(parameters, (list, tuple)):
394
- try:
395
- if param_name.startswith("param_"):
396
- param_index = int(param_name[6:])
397
- if param_index < len(parameters):
398
- return self._create_literal_node(parameters[param_index])
399
-
400
- if param_name.isdigit():
401
- param_index = int(param_name)
402
- if param_index < len(parameters):
403
- return self._create_literal_node(parameters[param_index])
404
- except (ValueError, IndexError, AttributeError):
405
- pass
406
- return node
407
- return node
408
-
409
- transformed_ast = ast.transform(replace_placeholder)
646
+ transformed_ast = ast.transform(
647
+ lambda node: _replace_placeholder_node(node, parameters, placeholder_counter, self._json_serializer)
648
+ )
410
649
 
411
650
  return transformed_ast.sql(dialect="bigquery")
412
651
 
413
- def _create_literal_node(self, value: Any) -> "exp.Expression":
414
- """Create a SQLGlot literal expression from a Python value.
415
-
416
- Args:
417
- value: Python value to convert to SQLGlot literal
418
-
419
- Returns:
420
- SQLGlot expression representing the literal value
421
- """
422
- if value is None:
423
- return exp.Null()
424
- if isinstance(value, bool):
425
- return exp.Boolean(this=value)
426
- if isinstance(value, (int, float)):
427
- return exp.Literal.number(str(value))
428
- if isinstance(value, str):
429
- return exp.Literal.string(value)
430
- if isinstance(value, (list, tuple)):
431
- items = [self._create_literal_node(item) for item in value]
432
- return exp.Array(expressions=items)
433
- if isinstance(value, dict):
434
- json_str = to_json(value)
435
- return exp.Literal.string(json_str)
436
-
437
- return exp.Literal.string(str(value))
438
-
439
652
  def _execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
440
653
  """Execute SQL script with statement splitting and parameter handling.
441
654
 
@@ -532,3 +745,16 @@ class BigQueryDriver(SyncDriverAdapterBase):
532
745
  cursor.job.result()
533
746
  affected_rows = cursor.job.num_dml_affected_rows or 0
534
747
  return self.create_execution_result(cursor, rowcount_override=affected_rows)
748
+
749
+ @property
750
+ def data_dictionary(self) -> "SyncDataDictionaryBase":
751
+ """Get the data dictionary for this driver.
752
+
753
+ Returns:
754
+ Data dictionary instance for metadata queries
755
+ """
756
+ if self._data_dictionary is None:
757
+ from sqlspec.adapters.bigquery.data_dictionary import BigQuerySyncDataDictionary
758
+
759
+ self._data_dictionary = BigQuerySyncDataDictionary()
760
+ return self._data_dictionary