sqlspec 0.17.1__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (77) hide show
  1. sqlspec/__init__.py +1 -1
  2. sqlspec/_sql.py +54 -159
  3. sqlspec/adapters/adbc/config.py +24 -30
  4. sqlspec/adapters/adbc/driver.py +42 -61
  5. sqlspec/adapters/aiosqlite/config.py +5 -10
  6. sqlspec/adapters/aiosqlite/driver.py +9 -25
  7. sqlspec/adapters/aiosqlite/pool.py +43 -35
  8. sqlspec/adapters/asyncmy/config.py +10 -7
  9. sqlspec/adapters/asyncmy/driver.py +18 -39
  10. sqlspec/adapters/asyncpg/config.py +4 -0
  11. sqlspec/adapters/asyncpg/driver.py +32 -79
  12. sqlspec/adapters/bigquery/config.py +12 -65
  13. sqlspec/adapters/bigquery/driver.py +39 -133
  14. sqlspec/adapters/duckdb/config.py +11 -15
  15. sqlspec/adapters/duckdb/driver.py +61 -85
  16. sqlspec/adapters/duckdb/pool.py +2 -5
  17. sqlspec/adapters/oracledb/_types.py +8 -1
  18. sqlspec/adapters/oracledb/config.py +55 -38
  19. sqlspec/adapters/oracledb/driver.py +35 -92
  20. sqlspec/adapters/oracledb/migrations.py +257 -0
  21. sqlspec/adapters/psqlpy/config.py +13 -9
  22. sqlspec/adapters/psqlpy/driver.py +28 -103
  23. sqlspec/adapters/psycopg/config.py +9 -5
  24. sqlspec/adapters/psycopg/driver.py +107 -175
  25. sqlspec/adapters/sqlite/config.py +7 -5
  26. sqlspec/adapters/sqlite/driver.py +37 -73
  27. sqlspec/adapters/sqlite/pool.py +3 -12
  28. sqlspec/base.py +19 -22
  29. sqlspec/builder/__init__.py +1 -1
  30. sqlspec/builder/_base.py +34 -20
  31. sqlspec/builder/_ddl.py +407 -183
  32. sqlspec/builder/_insert.py +1 -1
  33. sqlspec/builder/mixins/_insert_operations.py +26 -6
  34. sqlspec/builder/mixins/_merge_operations.py +1 -1
  35. sqlspec/builder/mixins/_select_operations.py +1 -5
  36. sqlspec/cli.py +281 -33
  37. sqlspec/config.py +183 -14
  38. sqlspec/core/__init__.py +89 -14
  39. sqlspec/core/cache.py +57 -104
  40. sqlspec/core/compiler.py +57 -112
  41. sqlspec/core/filters.py +1 -21
  42. sqlspec/core/hashing.py +13 -47
  43. sqlspec/core/parameters.py +272 -261
  44. sqlspec/core/result.py +12 -27
  45. sqlspec/core/splitter.py +17 -21
  46. sqlspec/core/statement.py +150 -159
  47. sqlspec/driver/_async.py +2 -15
  48. sqlspec/driver/_common.py +16 -95
  49. sqlspec/driver/_sync.py +2 -15
  50. sqlspec/driver/mixins/_result_tools.py +8 -29
  51. sqlspec/driver/mixins/_sql_translator.py +6 -8
  52. sqlspec/exceptions.py +1 -2
  53. sqlspec/extensions/litestar/plugin.py +15 -8
  54. sqlspec/loader.py +43 -115
  55. sqlspec/migrations/__init__.py +1 -1
  56. sqlspec/migrations/base.py +34 -45
  57. sqlspec/migrations/commands.py +34 -15
  58. sqlspec/migrations/loaders.py +1 -1
  59. sqlspec/migrations/runner.py +104 -19
  60. sqlspec/migrations/tracker.py +49 -2
  61. sqlspec/protocols.py +3 -6
  62. sqlspec/storage/__init__.py +4 -4
  63. sqlspec/storage/backends/fsspec.py +5 -6
  64. sqlspec/storage/backends/obstore.py +7 -8
  65. sqlspec/storage/registry.py +3 -3
  66. sqlspec/utils/__init__.py +2 -2
  67. sqlspec/utils/logging.py +6 -10
  68. sqlspec/utils/sync_tools.py +27 -4
  69. sqlspec/utils/text.py +6 -1
  70. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/METADATA +1 -1
  71. sqlspec-0.19.0.dist-info/RECORD +138 -0
  72. sqlspec/builder/_ddl_utils.py +0 -103
  73. sqlspec-0.17.1.dist-info/RECORD +0 -138
  74. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/WHEEL +0 -0
  75. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/entry_points.txt +0 -0
  76. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/licenses/LICENSE +0 -0
  77. {sqlspec-0.17.1.dist-info → sqlspec-0.19.0.dist-info}/licenses/NOTICE +0 -0
@@ -31,7 +31,7 @@ __all__ = ("AsyncpgCursor", "AsyncpgDriver", "AsyncpgExceptionHandler", "asyncpg
31
31
 
32
32
  logger = get_logger("adapters.asyncpg")
33
33
 
34
- # Enhanced AsyncPG statement configuration using core modules with performance optimizations
34
+
35
35
  asyncpg_statement_config = StatementConfig(
36
36
  dialect="postgres",
37
37
  parameter_config=ParameterStyleConfig(
@@ -44,20 +44,19 @@ asyncpg_statement_config = StatementConfig(
44
44
  needs_static_script_compilation=False,
45
45
  preserve_parameter_format=True,
46
46
  ),
47
- # Core processing features enabled for performance
48
47
  enable_parsing=True,
49
48
  enable_validation=True,
50
49
  enable_caching=True,
51
50
  enable_parameter_type_wrapping=True,
52
51
  )
53
52
 
54
- # PostgreSQL status parsing constants for row count extraction
53
+
55
54
  ASYNC_PG_STATUS_REGEX: Final[re.Pattern[str]] = re.compile(r"^([A-Z]+)(?:\s+(\d+))?\s+(\d+)$", re.IGNORECASE)
56
55
  EXPECTED_REGEX_GROUPS: Final[int] = 3
57
56
 
58
57
 
59
58
  class AsyncpgCursor:
60
- """Context manager for AsyncPG cursor management with enhanced error handling."""
59
+ """Context manager for AsyncPG cursor management."""
61
60
 
62
61
  __slots__ = ("connection",)
63
62
 
@@ -68,12 +67,11 @@ class AsyncpgCursor:
68
67
  return self.connection
69
68
 
70
69
  async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
71
- _ = (exc_type, exc_val, exc_tb) # Mark as intentionally unused
72
- # AsyncPG connections don't need explicit cursor cleanup
70
+ _ = (exc_type, exc_val, exc_tb)
73
71
 
74
72
 
75
73
  class AsyncpgExceptionHandler:
76
- """Custom async context manager for handling AsyncPG database exceptions."""
74
+ """Async context manager for handling AsyncPG database exceptions."""
77
75
 
78
76
  __slots__ = ()
79
77
 
@@ -102,34 +100,15 @@ class AsyncpgExceptionHandler:
102
100
 
103
101
 
104
102
  class AsyncpgDriver(AsyncDriverAdapterBase):
105
- """Enhanced AsyncPG PostgreSQL driver with CORE_ROUND_3 architecture integration.
106
-
107
- This driver leverages the complete core module system for maximum performance:
108
-
109
- Performance Improvements:
110
- - 5-10x faster SQL compilation through single-pass processing
111
- - 40-60% memory reduction through __slots__ optimization
112
- - Enhanced caching for repeated statement execution
113
- - Zero-copy parameter processing where possible
114
- - Async-optimized resource management
115
-
116
- Core Integration Features:
117
- - sqlspec.core.statement for enhanced SQL processing
118
- - sqlspec.core.parameters for optimized parameter handling
119
- - sqlspec.core.cache for unified statement caching
120
- - sqlspec.core.config for centralized configuration management
121
-
122
- PostgreSQL Features:
123
- - Advanced COPY operation support
124
- - Numeric parameter style optimization
125
- - PostgreSQL-specific exception handling
126
- - Transaction management with async patterns
127
-
128
- Compatibility:
129
- - 100% backward compatibility with existing AsyncPG driver interface
130
- - All existing async tests pass without modification
131
- - Complete StatementConfig API compatibility
132
- - Preserved async patterns and exception handling
103
+ """AsyncPG PostgreSQL driver for async database operations.
104
+
105
+ Features:
106
+ - COPY operation support
107
+ - Numeric parameter style handling
108
+ - PostgreSQL exception handling
109
+ - Transaction management
110
+ - SQL statement compilation and caching
111
+ - Parameter processing and type coercion
133
112
  """
134
113
 
135
114
  __slots__ = ()
@@ -141,25 +120,23 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
141
120
  statement_config: "Optional[StatementConfig]" = None,
142
121
  driver_features: "Optional[dict[str, Any]]" = None,
143
122
  ) -> None:
144
- # Enhanced configuration with global settings integration
145
123
  if statement_config is None:
146
124
  cache_config = get_cache_config()
147
- enhanced_config = asyncpg_statement_config.replace(
125
+ statement_config = asyncpg_statement_config.replace(
148
126
  enable_caching=cache_config.compiled_cache_enabled,
149
- enable_parsing=True, # Default to enabled
150
- enable_validation=True, # Default to enabled
151
- dialect="postgres", # Use adapter-specific dialect
127
+ enable_parsing=True,
128
+ enable_validation=True,
129
+ dialect="postgres",
152
130
  )
153
- statement_config = enhanced_config
154
131
 
155
132
  super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
156
133
 
157
134
  def with_cursor(self, connection: "AsyncpgConnection") -> "AsyncpgCursor":
158
- """Create context manager for AsyncPG cursor with enhanced resource management."""
135
+ """Create context manager for AsyncPG cursor."""
159
136
  return AsyncpgCursor(connection)
160
137
 
161
138
  def handle_database_exceptions(self) -> "AbstractAsyncContextManager[None]":
162
- """Enhanced async exception handling with detailed error categorization."""
139
+ """Handle database exceptions with PostgreSQL error codes."""
163
140
  return AsyncpgExceptionHandler()
164
141
 
165
142
  async def _try_special_handling(self, cursor: "AsyncpgConnection", statement: "SQL") -> "Optional[SQLResult]":
@@ -179,23 +156,21 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
179
156
  return None
180
157
 
181
158
  async def _handle_copy_operation(self, cursor: "AsyncpgConnection", statement: "SQL") -> None:
182
- """Handle PostgreSQL COPY operations with enhanced data processing.
159
+ """Handle PostgreSQL COPY operations.
183
160
 
184
- Supports both COPY FROM STDIN and COPY TO STDOUT operations
185
- with proper data format handling and error management.
161
+ Supports both COPY FROM STDIN and COPY TO STDOUT operations.
186
162
 
187
163
  Args:
188
164
  cursor: AsyncPG connection object
189
165
  statement: SQL statement with COPY operation
190
166
  """
191
- # Get metadata for copy operation data if available
167
+
192
168
  metadata: dict[str, Any] = getattr(statement, "metadata", {})
193
169
  sql_text = statement.sql
194
170
 
195
171
  copy_data = metadata.get("postgres_copy_data")
196
172
 
197
173
  if copy_data:
198
- # Process different data formats for COPY operations
199
174
  if isinstance(copy_data, dict):
200
175
  data_str = (
201
176
  str(next(iter(copy_data.values())))
@@ -207,25 +182,18 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
207
182
  else:
208
183
  data_str = str(copy_data)
209
184
 
210
- # Handle COPY FROM STDIN operations with binary data support
211
185
  if "FROM STDIN" in sql_text.upper():
212
186
  from io import BytesIO
213
187
 
214
188
  data_io = BytesIO(data_str.encode("utf-8"))
215
189
  await cursor.copy_from_query(sql_text, output=data_io)
216
190
  else:
217
- # Standard COPY operation
218
191
  await cursor.execute(sql_text)
219
192
  else:
220
- # COPY without additional data - execute directly
221
193
  await cursor.execute(sql_text)
222
194
 
223
195
  async def _execute_script(self, cursor: "AsyncpgConnection", statement: "SQL") -> "ExecutionResult":
224
- """Execute SQL script using enhanced statement splitting and parameter handling.
225
-
226
- Uses core module optimization for statement parsing and parameter processing.
227
- Handles PostgreSQL-specific script execution requirements.
228
- """
196
+ """Execute SQL script with statement splitting and parameter handling."""
229
197
  sql, _ = self._get_compiled_sql(statement, self.statement_config)
230
198
  statements = self.split_script_statements(sql, statement.statement_config, strip_trailing_semicolon=True)
231
199
 
@@ -233,8 +201,6 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
233
201
  last_result = None
234
202
 
235
203
  for stmt in statements:
236
- # Execute each statement individually
237
- # If parameters were embedded (static style), prepared_parameters will be None/empty
238
204
  result = await cursor.execute(stmt)
239
205
  last_result = result
240
206
  successful_count += 1
@@ -244,38 +210,28 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
244
210
  )
245
211
 
246
212
  async def _execute_many(self, cursor: "AsyncpgConnection", statement: "SQL") -> "ExecutionResult":
247
- """Execute SQL with multiple parameter sets using optimized batch processing.
248
-
249
- Leverages AsyncPG's executemany for efficient batch operations with
250
- core parameter processing for enhanced type handling and validation.
251
- """
213
+ """Execute SQL with multiple parameter sets using AsyncPG's executemany."""
252
214
  sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
253
215
 
254
216
  if prepared_parameters:
255
- # Use AsyncPG's efficient executemany for batch operations
256
217
  await cursor.executemany(sql, prepared_parameters)
257
- # Calculate affected rows (AsyncPG doesn't provide direct rowcount for executemany)
218
+
258
219
  affected_rows = len(prepared_parameters)
259
220
  else:
260
- # Handle empty parameter case - no operations to execute
261
221
  affected_rows = 0
262
222
 
263
223
  return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
264
224
 
265
225
  async def _execute_statement(self, cursor: "AsyncpgConnection", statement: "SQL") -> "ExecutionResult":
266
- """Execute single SQL statement with enhanced data handling and performance optimization.
226
+ """Execute single SQL statement.
267
227
 
268
- Uses core processing for optimal parameter handling and result processing.
269
- Handles both SELECT queries and non-SELECT operations efficiently.
228
+ Handles both SELECT queries and non-SELECT operations.
270
229
  """
271
230
  sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
272
231
 
273
- # Enhanced SELECT result processing
274
232
  if statement.returns_rows():
275
- # Use AsyncPG's fetch for SELECT operations
276
233
  records = await cursor.fetch(sql, *prepared_parameters) if prepared_parameters else await cursor.fetch(sql)
277
234
 
278
- # Efficient data conversion from asyncpg Records to dicts
279
235
  data = [dict(record) for record in records]
280
236
  column_names = list(records[0].keys()) if records else []
281
237
 
@@ -283,10 +239,8 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
283
239
  cursor, selected_data=data, column_names=column_names, data_row_count=len(data), is_select_result=True
284
240
  )
285
241
 
286
- # Enhanced non-SELECT result processing
287
242
  result = await cursor.execute(sql, *prepared_parameters) if prepared_parameters else await cursor.execute(sql)
288
243
 
289
- # Parse AsyncPG status string for affected rows
290
244
  affected_rows = self._parse_asyncpg_status(result) if isinstance(result, str) else 0
291
245
 
292
246
  return self.create_execution_result(cursor, rowcount_override=affected_rows)
@@ -312,15 +266,14 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
312
266
  groups = match.groups()
313
267
  if len(groups) >= EXPECTED_REGEX_GROUPS:
314
268
  try:
315
- return int(groups[-1]) # Last group contains the row count
269
+ return int(groups[-1])
316
270
  except (ValueError, IndexError):
317
271
  pass
318
272
 
319
273
  return 0
320
274
 
321
- # Async transaction management with enhanced error handling
322
275
  async def begin(self) -> None:
323
- """Begin a database transaction with enhanced error handling."""
276
+ """Begin a database transaction."""
324
277
  try:
325
278
  await self.connection.execute("BEGIN")
326
279
  except asyncpg.PostgresError as e:
@@ -328,7 +281,7 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
328
281
  raise SQLSpecError(msg) from e
329
282
 
330
283
  async def rollback(self) -> None:
331
- """Rollback the current transaction with enhanced error handling."""
284
+ """Rollback the current transaction."""
332
285
  try:
333
286
  await self.connection.execute("ROLLBACK")
334
287
  except asyncpg.PostgresError as e:
@@ -336,7 +289,7 @@ class AsyncpgDriver(AsyncDriverAdapterBase):
336
289
  raise SQLSpecError(msg) from e
337
290
 
338
291
  async def commit(self) -> None:
339
- """Commit the current transaction with enhanced error handling."""
292
+ """Commit the current transaction."""
340
293
  try:
341
294
  await self.connection.execute("COMMIT")
342
295
  except asyncpg.PostgresError as e:
@@ -1,4 +1,4 @@
1
- """BigQuery database configuration with direct field-based configuration."""
1
+ """BigQuery database configuration."""
2
2
 
3
3
  import contextlib
4
4
  import logging
@@ -32,14 +32,12 @@ class BigQueryConnectionParams(TypedDict, total=False):
32
32
  Includes both official BigQuery client parameters and BigQuery-specific configuration options.
33
33
  """
34
34
 
35
- # Official BigQuery client constructor parameters
36
35
  project: NotRequired[str]
37
36
  location: NotRequired[str]
38
37
  credentials: NotRequired["Credentials"]
39
38
  client_options: NotRequired["ClientOptions"]
40
39
  client_info: NotRequired["ClientInfo"]
41
40
 
42
- # BigQuery-specific configuration options
43
41
  default_query_job_config: NotRequired[QueryJobConfig]
44
42
  default_load_job_config: NotRequired[LoadJobConfig]
45
43
  dataset_id: NotRequired[str]
@@ -71,6 +69,7 @@ class BigQueryDriverFeatures(TypedDict, total=False):
71
69
  Only non-standard BigQuery client parameters that are SQLSpec-specific extensions.
72
70
  """
73
71
 
72
+ connection_instance: NotRequired["BigQueryConnection"]
74
73
  on_job_start: NotRequired["Callable[[str], None]"]
75
74
  on_job_complete: NotRequired["Callable[[str, Any], None]"]
76
75
  on_connection_create: NotRequired["Callable[[Any], None]"]
@@ -80,11 +79,9 @@ __all__ = ("BigQueryConfig", "BigQueryConnectionParams", "BigQueryDriverFeatures
80
79
 
81
80
 
82
81
  class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
83
- """Enhanced BigQuery configuration with comprehensive feature support.
82
+ """BigQuery configuration.
84
83
 
85
- BigQuery is Google Cloud's serverless, highly scalable data warehouse with
86
- advanced analytics, machine learning, and AI capabilities. This configuration
87
- supports all BigQuery features including:
84
+ Configuration for Google Cloud BigQuery connections.
88
85
  """
89
86
 
90
87
  driver_type: ClassVar[type[BigQueryDriver]] = BigQueryDriver
@@ -93,69 +90,29 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
93
90
  def __init__(
94
91
  self,
95
92
  *,
96
- connection_instance: "Optional[BigQueryConnection]" = None,
97
93
  connection_config: "Optional[Union[BigQueryConnectionParams, dict[str, Any]]]" = None,
98
94
  migration_config: Optional[dict[str, Any]] = None,
99
95
  statement_config: "Optional[StatementConfig]" = None,
100
96
  driver_features: "Optional[Union[BigQueryDriverFeatures, dict[str, Any]]]" = None,
101
97
  ) -> None:
102
- """Initialize BigQuery configuration with comprehensive feature support.
98
+ """Initialize BigQuery configuration.
103
99
 
104
100
  Args:
105
- connection_config: Standard connection configuration parameters
106
- connection_instance: Existing connection instance to use
101
+ connection_config: Connection configuration parameters
107
102
  migration_config: Migration configuration
108
103
  statement_config: Statement configuration override
109
- driver_features: BigQuery-specific driver features and configurations
110
-
111
- Example:
112
- >>> # Basic BigQuery connection
113
- >>> config = BigQueryConfig(
114
- ... connection_config={
115
- ... "project": "my-project",
116
- ... "location": "US",
117
- ... }
118
- ... )
119
-
120
- >>> # Advanced configuration with ML and AI features
121
- >>> config = BigQueryConfig(
122
- ... connection_config={
123
- ... "project": "my-project",
124
- ... "location": "US",
125
- ... "enable_bigquery_ml": True,
126
- ... "enable_gemini_integration": True,
127
- ... "enable_dataframes": True,
128
- ... "enable_vector_search": True,
129
- ... "maximum_bytes_billed": 1000000000, # 1GB limit
130
- ... }
131
- ... )
132
-
133
- >>> # Enterprise configuration with reservations
134
- >>> config = BigQueryConfig(
135
- ... connection_config={
136
- ... "project": "my-project",
137
- ... "location": "US",
138
- ... "edition": "Enterprise Plus",
139
- ... "reservation_id": "my-reservation",
140
- ... "enable_continuous_queries": True,
141
- ... "enable_cross_cloud": True,
142
- ... }
143
- ... )
104
+ driver_features: BigQuery-specific driver features
144
105
  """
145
106
 
146
- # Store connection instance
147
- self._connection_instance = connection_instance
148
-
149
- # Setup configuration following DuckDB pattern
150
107
  self.connection_config: dict[str, Any] = dict(connection_config) if connection_config else {}
151
108
  if "extra" in self.connection_config:
152
109
  extras = self.connection_config.pop("extra")
153
110
  self.connection_config.update(extras)
154
111
 
155
- # Setup driver features
156
112
  self.driver_features: dict[str, Any] = dict(driver_features) if driver_features else {}
157
113
 
158
- # Setup default job config if not provided
114
+ self._connection_instance: Optional[BigQueryConnection] = self.driver_features.get("connection_instance")
115
+
159
116
  if "default_query_job_config" not in self.connection_config:
160
117
  self._setup_default_job_config()
161
118
 
@@ -170,8 +127,8 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
170
127
  )
171
128
 
172
129
  def _setup_default_job_config(self) -> None:
173
- """Set up default job configuration based on connection config."""
174
- # Check if already provided in connection_config
130
+ """Set up default job configuration."""
131
+
175
132
  if self.connection_config.get("default_query_job_config") is not None:
176
133
  return
177
134
 
@@ -186,14 +143,12 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
186
143
  if use_query_cache is not None:
187
144
  job_config.use_query_cache = use_query_cache
188
145
  else:
189
- job_config.use_query_cache = True # Default to True
146
+ job_config.use_query_cache = True
190
147
 
191
- # Configure cost controls
192
148
  maximum_bytes_billed = self.connection_config.get("maximum_bytes_billed")
193
149
  if maximum_bytes_billed is not None:
194
150
  job_config.maximum_bytes_billed = maximum_bytes_billed
195
151
 
196
- # Configure timeouts
197
152
  query_timeout_ms = self.connection_config.get("query_timeout_ms")
198
153
  if query_timeout_ms is not None:
199
154
  job_config.job_timeout_ms = query_timeout_ms
@@ -214,7 +169,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
214
169
  return self._connection_instance
215
170
 
216
171
  try:
217
- # Filter out extra fields and keep only official BigQuery client constructor fields
218
172
  client_fields = {"project", "location", "credentials", "client_options", "client_info"}
219
173
  config_dict: dict[str, Any] = {
220
174
  field: value
@@ -223,7 +177,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
223
177
  }
224
178
  connection = self.connection_type(**config_dict)
225
179
 
226
- # Store BigQuery-specific config in driver_features for driver access
227
180
  default_query_job_config = self.connection_config.get("default_query_job_config")
228
181
  if default_query_job_config is not None:
229
182
  self.driver_features["default_query_job_config"] = default_query_job_config
@@ -232,13 +185,11 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
232
185
  if default_load_job_config is not None:
233
186
  self.driver_features["default_load_job_config"] = default_load_job_config
234
187
 
235
- # Call connection create callback from driver features
236
188
  on_connection_create = self.driver_features.get("on_connection_create")
237
189
  if on_connection_create:
238
190
  on_connection_create(connection)
239
191
 
240
192
  self._connection_instance = connection
241
-
242
193
  except Exception as e:
243
194
  project = self.connection_config.get("project", "Unknown")
244
195
  msg = f"Could not configure BigQuery connection for project '{project}'. Error: {e}"
@@ -275,7 +226,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
275
226
  """
276
227
 
277
228
  with self.provide_connection(*_args, **_kwargs) as connection:
278
- # Use shared config or user-provided config or instance default
279
229
  final_statement_config = statement_config or self.statement_config
280
230
 
281
231
  driver = self.driver_type(
@@ -286,9 +236,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
286
236
  def get_signature_namespace(self) -> "dict[str, type[Any]]":
287
237
  """Get the signature namespace for BigQuery types.
288
238
 
289
- This provides all BigQuery-specific types that Litestar needs to recognize
290
- to avoid serialization attempts.
291
-
292
239
  Returns:
293
240
  Dictionary mapping type names to types.
294
241
  """