sqlspec 0.17.1__py3-none-any.whl → 0.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (75) hide show
  1. sqlspec/__init__.py +1 -1
  2. sqlspec/_sql.py +54 -159
  3. sqlspec/adapters/adbc/config.py +24 -30
  4. sqlspec/adapters/adbc/driver.py +42 -61
  5. sqlspec/adapters/aiosqlite/config.py +5 -10
  6. sqlspec/adapters/aiosqlite/driver.py +9 -25
  7. sqlspec/adapters/aiosqlite/pool.py +43 -35
  8. sqlspec/adapters/asyncmy/config.py +10 -7
  9. sqlspec/adapters/asyncmy/driver.py +18 -39
  10. sqlspec/adapters/asyncpg/config.py +4 -0
  11. sqlspec/adapters/asyncpg/driver.py +32 -79
  12. sqlspec/adapters/bigquery/config.py +12 -65
  13. sqlspec/adapters/bigquery/driver.py +39 -133
  14. sqlspec/adapters/duckdb/config.py +11 -15
  15. sqlspec/adapters/duckdb/driver.py +61 -85
  16. sqlspec/adapters/duckdb/pool.py +2 -5
  17. sqlspec/adapters/oracledb/_types.py +8 -1
  18. sqlspec/adapters/oracledb/config.py +55 -38
  19. sqlspec/adapters/oracledb/driver.py +35 -92
  20. sqlspec/adapters/oracledb/migrations.py +257 -0
  21. sqlspec/adapters/psqlpy/config.py +13 -9
  22. sqlspec/adapters/psqlpy/driver.py +28 -103
  23. sqlspec/adapters/psycopg/config.py +9 -5
  24. sqlspec/adapters/psycopg/driver.py +107 -175
  25. sqlspec/adapters/sqlite/config.py +7 -5
  26. sqlspec/adapters/sqlite/driver.py +37 -73
  27. sqlspec/adapters/sqlite/pool.py +3 -12
  28. sqlspec/base.py +1 -8
  29. sqlspec/builder/__init__.py +1 -1
  30. sqlspec/builder/_base.py +34 -20
  31. sqlspec/builder/_ddl.py +407 -183
  32. sqlspec/builder/_insert.py +1 -1
  33. sqlspec/builder/mixins/_insert_operations.py +26 -6
  34. sqlspec/builder/mixins/_merge_operations.py +1 -1
  35. sqlspec/builder/mixins/_select_operations.py +1 -5
  36. sqlspec/config.py +32 -13
  37. sqlspec/core/__init__.py +89 -14
  38. sqlspec/core/cache.py +57 -104
  39. sqlspec/core/compiler.py +57 -112
  40. sqlspec/core/filters.py +1 -21
  41. sqlspec/core/hashing.py +13 -47
  42. sqlspec/core/parameters.py +272 -261
  43. sqlspec/core/result.py +12 -27
  44. sqlspec/core/splitter.py +17 -21
  45. sqlspec/core/statement.py +150 -159
  46. sqlspec/driver/_async.py +2 -15
  47. sqlspec/driver/_common.py +16 -95
  48. sqlspec/driver/_sync.py +2 -15
  49. sqlspec/driver/mixins/_result_tools.py +8 -29
  50. sqlspec/driver/mixins/_sql_translator.py +6 -8
  51. sqlspec/exceptions.py +1 -2
  52. sqlspec/loader.py +43 -115
  53. sqlspec/migrations/__init__.py +1 -1
  54. sqlspec/migrations/base.py +34 -45
  55. sqlspec/migrations/commands.py +34 -15
  56. sqlspec/migrations/loaders.py +1 -1
  57. sqlspec/migrations/runner.py +104 -19
  58. sqlspec/migrations/tracker.py +49 -2
  59. sqlspec/protocols.py +3 -6
  60. sqlspec/storage/__init__.py +4 -4
  61. sqlspec/storage/backends/fsspec.py +5 -6
  62. sqlspec/storage/backends/obstore.py +7 -8
  63. sqlspec/storage/registry.py +3 -3
  64. sqlspec/utils/__init__.py +2 -2
  65. sqlspec/utils/logging.py +6 -10
  66. sqlspec/utils/sync_tools.py +27 -4
  67. sqlspec/utils/text.py +6 -1
  68. {sqlspec-0.17.1.dist-info → sqlspec-0.18.0.dist-info}/METADATA +1 -1
  69. sqlspec-0.18.0.dist-info/RECORD +138 -0
  70. sqlspec/builder/_ddl_utils.py +0 -103
  71. sqlspec-0.17.1.dist-info/RECORD +0 -138
  72. {sqlspec-0.17.1.dist-info → sqlspec-0.18.0.dist-info}/WHEEL +0 -0
  73. {sqlspec-0.17.1.dist-info → sqlspec-0.18.0.dist-info}/entry_points.txt +0 -0
  74. {sqlspec-0.17.1.dist-info → sqlspec-0.18.0.dist-info}/licenses/LICENSE +0 -0
  75. {sqlspec-0.17.1.dist-info → sqlspec-0.18.0.dist-info}/licenses/NOTICE +0 -0
@@ -1,25 +1,7 @@
1
- """Enhanced BigQuery driver with CORE_ROUND_3 architecture integration.
2
-
3
- This driver implements the complete CORE_ROUND_3 architecture for BigQuery connections:
4
- - 5-10x faster SQL compilation through single-pass processing
5
- - 40-60% memory reduction through __slots__ optimization
6
- - Enhanced caching for repeated statement execution
7
- - Complete backward compatibility with existing BigQuery functionality
8
-
9
- Architecture Features:
10
- - Direct integration with sqlspec.core modules
11
- - Enhanced BigQuery parameter processing with NAMED_AT conversion
12
- - Thread-safe unified caching system
13
- - MyPyC-optimized performance patterns
14
- - Zero-copy data access where possible
15
- - AST-based literal embedding for execute_many operations
16
-
17
- BigQuery Features:
18
- - Parameter style conversion (QMARK to NAMED_AT)
19
- - BigQuery-specific type coercion and data handling
20
- - Enhanced error categorization for BigQuery/Google Cloud errors
21
- - Support for QueryJobConfig and job management
22
- - Optimized query execution with proper BigQuery parameter handling
1
+ """BigQuery driver implementation.
2
+
3
+ Provides Google Cloud BigQuery connectivity with parameter style conversion,
4
+ type coercion, error handling, and query job management.
23
5
  """
24
6
 
25
7
  import datetime
@@ -66,25 +48,21 @@ _BQ_TYPE_MAP: dict[type, tuple[str, Optional[str]]] = {
66
48
 
67
49
 
68
50
  def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
69
- """Determine BigQuery parameter type from Python value using hash map dispatch.
51
+ """Determine BigQuery parameter type from Python value.
70
52
 
71
- Uses O(1) hash map lookup for common types, with special handling for
72
- datetime and array types.
53
+ Returns the appropriate BigQuery type for common Python types.
73
54
  """
74
55
  if value is None:
75
56
  return ("STRING", None)
76
57
 
77
58
  value_type = type(value)
78
59
 
79
- # Special case for datetime (needs timezone check)
80
60
  if value_type is datetime.datetime:
81
61
  return ("TIMESTAMP" if value.tzinfo else "DATETIME", None)
82
62
 
83
- # Use hash map for O(1) type lookup
84
63
  if value_type in _BQ_TYPE_MAP:
85
64
  return _BQ_TYPE_MAP[value_type]
86
65
 
87
- # Handle array types
88
66
  if isinstance(value, (list, tuple)):
89
67
  if not value:
90
68
  msg = "Cannot determine BigQuery ARRAY type for empty sequence."
@@ -98,7 +76,6 @@ def _get_bq_param_type(value: Any) -> tuple[Optional[str], Optional[str]]:
98
76
  return None, None
99
77
 
100
78
 
101
- # Hash map for BigQuery parameter type creation
102
79
  _BQ_PARAM_CREATOR_MAP: dict[str, Any] = {
103
80
  "ARRAY": lambda name, value, array_type: ArrayQueryParameter(
104
81
  name, array_type, [] if value is None else list(value)
@@ -109,17 +86,15 @@ _BQ_PARAM_CREATOR_MAP: dict[str, Any] = {
109
86
 
110
87
 
111
88
  def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, ScalarQueryParameter]]":
112
- """Create BigQuery QueryParameter objects from parameters using hash map dispatch.
89
+ """Create BigQuery QueryParameter objects from parameters.
113
90
 
114
91
  Handles both dict-style (named) and list-style (positional) parameters.
115
- Uses O(1) hash map lookup for parameter type creation.
116
92
  """
117
93
  if not parameters:
118
94
  return []
119
95
 
120
96
  bq_parameters: list[Union[ArrayQueryParameter, ScalarQueryParameter]] = []
121
97
 
122
- # Handle dict-style parameters (named parameters like @param1, @param2)
123
98
  if isinstance(parameters, dict):
124
99
  for name, value in parameters.items():
125
100
  param_name_for_bq = name.lstrip("@")
@@ -127,37 +102,27 @@ def _create_bq_parameters(parameters: Any) -> "list[Union[ArrayQueryParameter, S
127
102
  param_type, array_element_type = _get_bq_param_type(actual_value)
128
103
 
129
104
  if param_type == "ARRAY" and array_element_type:
130
- # Use hash map for array parameter creation
131
105
  creator = _BQ_PARAM_CREATOR_MAP["ARRAY"]
132
106
  bq_parameters.append(creator(param_name_for_bq, actual_value, array_element_type))
133
107
  elif param_type == "JSON":
134
- # Use hash map for JSON parameter creation
135
108
  creator = _BQ_PARAM_CREATOR_MAP["JSON"]
136
109
  bq_parameters.append(creator(param_name_for_bq, actual_value, None))
137
110
  elif param_type:
138
- # Use hash map for scalar parameter creation
139
111
  creator = _BQ_PARAM_CREATOR_MAP["SCALAR"]
140
112
  bq_parameters.append(creator(param_name_for_bq, actual_value, param_type))
141
113
  else:
142
114
  msg = f"Unsupported BigQuery parameter type for value of param '{name}': {type(actual_value)}"
143
115
  raise SQLSpecError(msg)
144
116
 
145
- # Handle list-style parameters (positional parameters that should have been converted to named)
146
117
  elif isinstance(parameters, (list, tuple)):
147
- # This shouldn't happen if the core parameter system is working correctly
148
- # BigQuery requires named parameters, so positional should be converted
149
118
  logger.warning("BigQuery received positional parameters instead of named parameters")
150
119
  return []
151
120
 
152
121
  return bq_parameters
153
122
 
154
123
 
155
- # Enhanced BigQuery type coercion with core optimization
156
- # This map is used by the core parameter system to coerce types before BigQuery sees them
157
124
  bigquery_type_coercion_map = {
158
- # Convert tuples to lists for BigQuery array compatibility
159
125
  tuple: list,
160
- # Keep other types as-is (BigQuery handles them natively)
161
126
  bool: lambda x: x,
162
127
  int: lambda x: x,
163
128
  float: lambda x: x,
@@ -167,12 +132,12 @@ bigquery_type_coercion_map = {
167
132
  datetime.date: lambda x: x,
168
133
  datetime.time: lambda x: x,
169
134
  Decimal: lambda x: x,
170
- dict: lambda x: x, # BigQuery handles JSON natively
135
+ dict: lambda x: x,
171
136
  list: lambda x: x,
172
137
  type(None): lambda _: None,
173
138
  }
174
139
 
175
- # Enhanced BigQuery statement configuration using core modules with performance optimizations
140
+
176
141
  bigquery_statement_config = StatementConfig(
177
142
  dialect="bigquery",
178
143
  parameter_config=ParameterStyleConfig(
@@ -182,10 +147,9 @@ bigquery_statement_config = StatementConfig(
182
147
  supported_execution_parameter_styles={ParameterStyle.NAMED_AT},
183
148
  type_coercion_map=bigquery_type_coercion_map,
184
149
  has_native_list_expansion=True,
185
- needs_static_script_compilation=False, # Use proper parameter binding for complex types
186
- preserve_original_params_for_many=True, # BigQuery needs original list of tuples for execute_many
150
+ needs_static_script_compilation=False,
151
+ preserve_original_params_for_many=True,
187
152
  ),
188
- # Core processing features enabled for performance
189
153
  enable_parsing=True,
190
154
  enable_validation=True,
191
155
  enable_caching=True,
@@ -194,7 +158,7 @@ bigquery_statement_config = StatementConfig(
194
158
 
195
159
 
196
160
  class BigQueryCursor:
197
- """BigQuery cursor with enhanced resource management and error handling."""
161
+ """BigQuery cursor with resource management."""
198
162
 
199
163
  __slots__ = ("connection", "job")
200
164
 
@@ -206,8 +170,7 @@ class BigQueryCursor:
206
170
  return self.connection
207
171
 
208
172
  def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
209
- _ = (exc_type, exc_val, exc_tb) # Mark as intentionally unused
210
- # BigQuery doesn't need explicit cursor cleanup
173
+ _ = (exc_type, exc_val, exc_tb)
211
174
 
212
175
 
213
176
  class BigQueryExceptionHandler:
@@ -244,37 +207,10 @@ class BigQueryExceptionHandler:
244
207
 
245
208
 
246
209
  class BigQueryDriver(SyncDriverAdapterBase):
247
- """Enhanced BigQuery driver with CORE_ROUND_3 architecture integration.
248
-
249
- This driver leverages the complete core module system for maximum BigQuery performance:
250
-
251
- Performance Improvements:
252
- - 5-10x faster SQL compilation through single-pass processing
253
- - 40-60% memory reduction through __slots__ optimization
254
- - Enhanced caching for repeated statement execution
255
- - Zero-copy parameter processing where possible
256
- - Optimized BigQuery parameter style conversion (QMARK -> NAMED_AT)
257
- - AST-based literal embedding for execute_many operations
258
-
259
- BigQuery Features:
260
- - Parameter style conversion (QMARK to NAMED_AT)
261
- - BigQuery-specific type coercion and data handling
262
- - Enhanced error categorization for BigQuery/Google Cloud errors
263
- - QueryJobConfig support with comprehensive configuration merging
264
- - Optimized query execution with proper BigQuery parameter handling
265
- - Script execution with AST-based parameter embedding
266
-
267
- Core Integration Features:
268
- - sqlspec.core.statement for enhanced SQL processing
269
- - sqlspec.core.parameters for optimized parameter handling
270
- - sqlspec.core.cache for unified statement caching
271
- - sqlspec.core.config for centralized configuration management
272
-
273
- Compatibility:
274
- - 100% backward compatibility with existing BigQuery driver interface
275
- - All existing BigQuery tests pass without modification
276
- - Complete StatementConfig API compatibility
277
- - Preserved QueryJobConfig and job management patterns
210
+ """BigQuery driver implementation.
211
+
212
+ Provides Google Cloud BigQuery connectivity with parameter style conversion,
213
+ type coercion, error handling, and query job management.
278
214
  """
279
215
 
280
216
  __slots__ = ("_default_query_job_config",)
@@ -286,16 +222,14 @@ class BigQueryDriver(SyncDriverAdapterBase):
286
222
  statement_config: "Optional[StatementConfig]" = None,
287
223
  driver_features: "Optional[dict[str, Any]]" = None,
288
224
  ) -> None:
289
- # Enhanced configuration with global settings integration
290
225
  if statement_config is None:
291
226
  cache_config = get_cache_config()
292
- enhanced_config = bigquery_statement_config.replace(
227
+ statement_config = bigquery_statement_config.replace(
293
228
  enable_caching=cache_config.compiled_cache_enabled,
294
- enable_parsing=True, # Default to enabled
295
- enable_validation=True, # Default to enabled
296
- dialect="bigquery", # Use adapter-specific dialect
229
+ enable_parsing=True,
230
+ enable_validation=True,
231
+ dialect="bigquery",
297
232
  )
298
- statement_config = enhanced_config
299
233
 
300
234
  super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
301
235
  self._default_query_job_config: Optional[QueryJobConfig] = (driver_features or {}).get(
@@ -303,7 +237,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
303
237
  )
304
238
 
305
239
  def with_cursor(self, connection: "BigQueryConnection") -> "BigQueryCursor":
306
- """Create and return a context manager for cursor acquisition and cleanup with enhanced resource management.
240
+ """Create context manager for cursor management.
307
241
 
308
242
  Returns:
309
243
  BigQueryCursor: Cursor object for query execution
@@ -324,7 +258,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
324
258
  return BigQueryExceptionHandler()
325
259
 
326
260
  def _copy_job_config_attrs(self, source_config: QueryJobConfig, target_config: QueryJobConfig) -> None:
327
- """Copy non-private attributes from source config to target config with enhanced validation."""
261
+ """Copy non-private attributes from source config to target config."""
328
262
  for attr in dir(source_config):
329
263
  if attr.startswith("_"):
330
264
  continue
@@ -333,7 +267,6 @@ class BigQueryDriver(SyncDriverAdapterBase):
333
267
  if value is not None and not callable(value):
334
268
  setattr(target_config, attr, value)
335
269
  except (AttributeError, TypeError):
336
- # Skip attributes that can't be copied
337
270
  continue
338
271
 
339
272
  def _run_query_job(
@@ -343,19 +276,17 @@ class BigQueryDriver(SyncDriverAdapterBase):
343
276
  connection: Optional[BigQueryConnection] = None,
344
277
  job_config: Optional[QueryJobConfig] = None,
345
278
  ) -> QueryJob:
346
- """Execute a BigQuery job with comprehensive configuration support and enhanced error handling."""
279
+ """Execute a BigQuery job with configuration support."""
347
280
  conn = connection or self.connection
348
281
 
349
282
  final_job_config = QueryJobConfig()
350
283
 
351
- # Merge configurations in priority order: default -> provided -> parameters
352
284
  if self._default_query_job_config:
353
285
  self._copy_job_config_attrs(self._default_query_job_config, final_job_config)
354
286
 
355
287
  if job_config:
356
288
  self._copy_job_config_attrs(job_config, final_job_config)
357
289
 
358
- # Convert parameters to BigQuery QueryParameter objects using enhanced processing
359
290
  bq_parameters = _create_bq_parameters(parameters)
360
291
  final_job_config.query_parameters = bq_parameters
361
292
 
@@ -363,7 +294,7 @@ class BigQueryDriver(SyncDriverAdapterBase):
363
294
 
364
295
  @staticmethod
365
296
  def _rows_to_results(rows_iterator: Any) -> list[dict[str, Any]]:
366
- """Convert BigQuery rows to dictionary format with enhanced type handling."""
297
+ """Convert BigQuery rows to dictionary format."""
367
298
  return [dict(row) for row in rows_iterator]
368
299
 
369
300
  def _try_special_handling(self, cursor: "Any", statement: "SQL") -> "Optional[SQLResult]":
@@ -379,44 +310,36 @@ class BigQueryDriver(SyncDriverAdapterBase):
379
310
  Returns:
380
311
  None - always proceeds with standard execution for BigQuery
381
312
  """
382
- _ = (cursor, statement) # Mark as intentionally unused
313
+ _ = (cursor, statement)
383
314
  return None
384
315
 
385
316
  def _transform_ast_with_literals(self, sql: str, parameters: Any) -> str:
386
- """Transform SQL AST by replacing placeholders with literal values using enhanced core processing.
317
+ """Transform SQL AST by replacing placeholders with literal values.
387
318
 
388
- This approach maintains the single-parse architecture by using proper
389
- AST transformation instead of string manipulation, with core optimization.
319
+ Uses AST transformation instead of string manipulation.
390
320
  """
391
321
  if not parameters:
392
322
  return sql
393
323
 
394
- # Parse the SQL once using core optimization
395
324
  try:
396
325
  ast = sqlglot.parse_one(sql, dialect="bigquery")
397
326
  except sqlglot.ParseError:
398
- # If we can't parse, fall back to original SQL
399
327
  return sql
400
328
 
401
- # Track placeholder index for positional parameters
402
329
  placeholder_counter = {"index": 0}
403
330
 
404
331
  def replace_placeholder(node: exp.Expression) -> exp.Expression:
405
- """Replace placeholder nodes with literal values using enhanced type handling."""
332
+ """Replace placeholder nodes with literal values."""
406
333
  if isinstance(node, exp.Placeholder):
407
- # Handle positional parameters (?, :1, etc.)
408
334
  if isinstance(parameters, (list, tuple)):
409
- # Use the current placeholder index
410
335
  current_index = placeholder_counter["index"]
411
336
  placeholder_counter["index"] += 1
412
337
  if current_index < len(parameters):
413
338
  return self._create_literal_node(parameters[current_index])
414
339
  return node
415
340
  if isinstance(node, exp.Parameter):
416
- # Handle named parameters (@param1, :name, etc.)
417
341
  param_name = str(node.this) if hasattr(node.this, "__str__") else node.this
418
342
  if isinstance(parameters, dict):
419
- # Try different parameter name formats
420
343
  possible_names = [param_name, f"@{param_name}", f":{param_name}", f"param_{param_name}"]
421
344
  for name in possible_names:
422
345
  if name in parameters:
@@ -424,14 +347,12 @@ class BigQueryDriver(SyncDriverAdapterBase):
424
347
  return self._create_literal_node(actual_value)
425
348
  return node
426
349
  if isinstance(parameters, (list, tuple)):
427
- # For named parameters with positional values (e.g., @param_0, @param_1)
428
350
  try:
429
- # Try to extract numeric index from parameter name
430
351
  if param_name.startswith("param_"):
431
- param_index = int(param_name[6:]) # Remove "param_" prefix
352
+ param_index = int(param_name[6:])
432
353
  if param_index < len(parameters):
433
354
  return self._create_literal_node(parameters[param_index])
434
- # Also try simple numeric parameters like @0, @1
355
+
435
356
  if param_name.isdigit():
436
357
  param_index = int(param_name)
437
358
  if param_index < len(parameters):
@@ -441,14 +362,12 @@ class BigQueryDriver(SyncDriverAdapterBase):
441
362
  return node
442
363
  return node
443
364
 
444
- # Transform the AST by replacing placeholders with literals
445
365
  transformed_ast = ast.transform(replace_placeholder)
446
366
 
447
- # Generate SQL from the transformed AST
448
367
  return transformed_ast.sql(dialect="bigquery")
449
368
 
450
369
  def _create_literal_node(self, value: Any) -> "exp.Expression":
451
- """Create a SQLGlot literal expression from a Python value with enhanced type handling."""
370
+ """Create a SQLGlot literal expression from a Python value."""
452
371
  if value is None:
453
372
  return exp.Null()
454
373
  if isinstance(value, bool):
@@ -458,20 +377,17 @@ class BigQueryDriver(SyncDriverAdapterBase):
458
377
  if isinstance(value, str):
459
378
  return exp.Literal.string(value)
460
379
  if isinstance(value, (list, tuple)):
461
- # Create an array literal
462
380
  items = [self._create_literal_node(item) for item in value]
463
381
  return exp.Array(expressions=items)
464
382
  if isinstance(value, dict):
465
- # For dict, convert to JSON string using enhanced serialization
466
383
  json_str = to_json(value)
467
384
  return exp.Literal.string(json_str)
468
- # Fallback to string representation
385
+
469
386
  return exp.Literal.string(str(value))
470
387
 
471
388
  def _execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
472
- """Execute SQL script using enhanced statement splitting and parameter handling.
389
+ """Execute SQL script with statement splitting and parameter handling.
473
390
 
474
- Uses core module optimization for statement parsing and parameter processing.
475
391
  Parameters are embedded as static values for script execution compatibility.
476
392
  """
477
393
  sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
@@ -482,11 +398,10 @@ class BigQueryDriver(SyncDriverAdapterBase):
482
398
 
483
399
  for stmt in statements:
484
400
  job = self._run_query_job(stmt, prepared_parameters or {}, connection=cursor)
485
- job.result() # Wait for completion
401
+ job.result()
486
402
  last_job = job
487
403
  successful_count += 1
488
404
 
489
- # Store the last job for result extraction
490
405
  cursor.job = last_job
491
406
 
492
407
  return self.create_execution_result(
@@ -500,45 +415,37 @@ class BigQueryDriver(SyncDriverAdapterBase):
500
415
  Instead, we generate a script with multiple INSERT statements using
501
416
  AST transformation to embed literals safely.
502
417
  """
503
- # Get parameters from statement (will be original list due to preserve_original_params_for_many flag)
418
+
504
419
  parameters_list = statement.parameters
505
420
 
506
- # Check if we have parameters for execute_many
507
421
  if not parameters_list or not isinstance(parameters_list, (list, tuple)):
508
422
  return self.create_execution_result(cursor, rowcount_override=0, is_many_result=True)
509
423
 
510
- # Get the base SQL from statement
511
424
  base_sql = statement.sql
512
425
 
513
- # Build a script with all statements using AST transformation
514
426
  script_statements = []
515
427
  for param_set in parameters_list:
516
- # Use AST transformation to embed literals safely
517
428
  transformed_sql = self._transform_ast_with_literals(base_sql, param_set)
518
429
  script_statements.append(transformed_sql)
519
430
 
520
- # Combine into a single script
521
431
  script_sql = ";\n".join(script_statements)
522
432
 
523
- # Execute the script as a single job
524
433
  cursor.job = self._run_query_job(script_sql, None, connection=cursor)
525
- cursor.job.result() # Wait for completion
434
+ cursor.job.result()
526
435
 
527
- # Get the actual affected row count from the job
528
436
  affected_rows = (
529
437
  cursor.job.num_dml_affected_rows if cursor.job.num_dml_affected_rows is not None else len(parameters_list)
530
438
  )
531
439
  return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
532
440
 
533
441
  def _execute_statement(self, cursor: Any, statement: "SQL") -> ExecutionResult:
534
- """Execute single SQL statement with enhanced BigQuery data handling and performance optimization.
442
+ """Execute single SQL statement with BigQuery data handling.
535
443
 
536
- Uses core processing for optimal parameter handling and BigQuery result processing.
444
+ Handles parameter processing and BigQuery result processing.
537
445
  """
538
446
  sql, parameters = self._get_compiled_sql(statement, self.statement_config)
539
447
  cursor.job = self._run_query_job(sql, parameters, connection=cursor)
540
448
 
541
- # Enhanced SELECT result processing for BigQuery
542
449
  if statement.returns_rows():
543
450
  job_result = cursor.job.result()
544
451
  rows_list = self._rows_to_results(iter(job_result))
@@ -552,7 +459,6 @@ class BigQueryDriver(SyncDriverAdapterBase):
552
459
  is_select_result=True,
553
460
  )
554
461
 
555
- # Enhanced non-SELECT result processing for BigQuery
556
462
  cursor.job.result()
557
463
  affected_rows = cursor.job.num_dml_affected_rows or 0
558
464
  return self.create_execution_result(cursor, rowcount_override=affected_rows)
@@ -71,7 +71,6 @@ class DuckDBPoolParams(DuckDBConnectionParams, total=False):
71
71
  Combines standardized pool parameters with DuckDB-specific connection parameters.
72
72
  """
73
73
 
74
- # Standardized pool parameters (consistent across ALL adapters)
75
74
  pool_min_size: NotRequired[int]
76
75
  pool_max_size: NotRequired[int]
77
76
  pool_timeout: NotRequired[float]
@@ -122,24 +121,21 @@ class DuckDBDriverFeatures(TypedDict, total=False):
122
121
 
123
122
 
124
123
  class DuckDBConfig(SyncDatabaseConfig[DuckDBConnection, DuckDBConnectionPool, DuckDBDriver]):
125
- """Enhanced DuckDB configuration with connection pooling and intelligent features.
124
+ """DuckDB configuration with connection pooling.
126
125
 
127
- This configuration supports all of DuckDB's unique features including:
126
+ This configuration supports DuckDB's features including:
128
127
 
129
- - Connection pooling optimized for DuckDB's architecture
130
- - Extension auto-management and installation
128
+ - Connection pooling
129
+ - Extension management and installation
131
130
  - Secret management for API integrations
132
- - Intelligent auto configuration settings
133
- - High-performance Arrow integration
131
+ - Auto configuration settings
132
+ - Arrow integration
134
133
  - Direct file querying capabilities
135
- - Performance optimizations for analytics workloads
136
134
 
137
- DuckDB Connection Pool Best Practices:
138
- - DuckDB performs best with long-lived connections that maintain cache
139
- - Default pool size is 1-4 connections (DuckDB is optimized for single connection)
135
+ DuckDB Connection Pool Configuration:
136
+ - Default pool size is 1-4 connections (DuckDB uses single connection by default)
140
137
  - Connection recycling is set to 24 hours by default (set to 0 to disable)
141
138
  - Shared memory databases use `:memory:shared_db` for proper concurrency
142
- - Health checks are minimized to reduce overhead
143
139
  """
144
140
 
145
141
  driver_type: "ClassVar[type[DuckDBDriver]]" = DuckDBDriver
@@ -149,12 +145,12 @@ class DuckDBConfig(SyncDatabaseConfig[DuckDBConnection, DuckDBConnectionPool, Du
149
145
  self,
150
146
  *,
151
147
  pool_config: "Optional[Union[DuckDBPoolParams, dict[str, Any]]]" = None,
152
- migration_config: Optional[dict[str, Any]] = None,
153
148
  pool_instance: "Optional[DuckDBConnectionPool]" = None,
149
+ migration_config: Optional[dict[str, Any]] = None,
154
150
  statement_config: "Optional[StatementConfig]" = None,
155
151
  driver_features: "Optional[Union[DuckDBDriverFeatures, dict[str, Any]]]" = None,
156
152
  ) -> None:
157
- """Initialize DuckDB configuration with intelligent features."""
153
+ """Initialize DuckDB configuration."""
158
154
  if pool_config is None:
159
155
  pool_config = {}
160
156
  if "database" not in pool_config:
@@ -204,7 +200,7 @@ class DuckDBConfig(SyncDatabaseConfig[DuckDBConnection, DuckDBConnectionPool, Du
204
200
  extensions=extensions_dicts,
205
201
  secrets=secrets_dicts,
206
202
  on_connection_create=pool_callback,
207
- **self.pool_config, # Pass all pool_config as kwargs to be filtered by the pool
203
+ **self.pool_config,
208
204
  )
209
205
 
210
206
  def _close_pool(self) -> None: