sqlspec 0.13.0__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +39 -1
- sqlspec/adapters/adbc/config.py +4 -40
- sqlspec/adapters/adbc/driver.py +29 -16
- sqlspec/adapters/aiosqlite/config.py +15 -20
- sqlspec/adapters/aiosqlite/driver.py +36 -18
- sqlspec/adapters/asyncmy/config.py +16 -33
- sqlspec/adapters/asyncmy/driver.py +23 -16
- sqlspec/adapters/asyncpg/config.py +19 -61
- sqlspec/adapters/asyncpg/driver.py +41 -18
- sqlspec/adapters/bigquery/config.py +2 -43
- sqlspec/adapters/bigquery/driver.py +26 -14
- sqlspec/adapters/duckdb/config.py +2 -49
- sqlspec/adapters/duckdb/driver.py +35 -16
- sqlspec/adapters/oracledb/config.py +30 -83
- sqlspec/adapters/oracledb/driver.py +54 -27
- sqlspec/adapters/psqlpy/config.py +17 -57
- sqlspec/adapters/psqlpy/driver.py +28 -8
- sqlspec/adapters/psycopg/config.py +30 -73
- sqlspec/adapters/psycopg/driver.py +69 -24
- sqlspec/adapters/sqlite/config.py +3 -21
- sqlspec/adapters/sqlite/driver.py +50 -26
- sqlspec/cli.py +248 -0
- sqlspec/config.py +18 -20
- sqlspec/driver/_async.py +28 -10
- sqlspec/driver/_common.py +5 -4
- sqlspec/driver/_sync.py +28 -10
- sqlspec/driver/mixins/__init__.py +6 -0
- sqlspec/driver/mixins/_cache.py +114 -0
- sqlspec/driver/mixins/_pipeline.py +0 -4
- sqlspec/{service/base.py → driver/mixins/_query_tools.py} +86 -421
- sqlspec/driver/mixins/_result_utils.py +0 -2
- sqlspec/driver/mixins/_sql_translator.py +0 -2
- sqlspec/driver/mixins/_storage.py +4 -18
- sqlspec/driver/mixins/_type_coercion.py +0 -2
- sqlspec/driver/parameters.py +4 -4
- sqlspec/extensions/aiosql/adapter.py +4 -4
- sqlspec/extensions/litestar/__init__.py +2 -1
- sqlspec/extensions/litestar/cli.py +48 -0
- sqlspec/extensions/litestar/plugin.py +3 -0
- sqlspec/loader.py +1 -1
- sqlspec/migrations/__init__.py +23 -0
- sqlspec/migrations/base.py +390 -0
- sqlspec/migrations/commands.py +525 -0
- sqlspec/migrations/runner.py +215 -0
- sqlspec/migrations/tracker.py +153 -0
- sqlspec/migrations/utils.py +89 -0
- sqlspec/protocols.py +37 -3
- sqlspec/statement/builder/__init__.py +8 -8
- sqlspec/statement/builder/{column.py → _column.py} +82 -52
- sqlspec/statement/builder/{ddl.py → _ddl.py} +5 -5
- sqlspec/statement/builder/_ddl_utils.py +1 -1
- sqlspec/statement/builder/{delete.py → _delete.py} +1 -1
- sqlspec/statement/builder/{insert.py → _insert.py} +1 -1
- sqlspec/statement/builder/{merge.py → _merge.py} +1 -1
- sqlspec/statement/builder/_parsing_utils.py +5 -3
- sqlspec/statement/builder/{select.py → _select.py} +59 -61
- sqlspec/statement/builder/{update.py → _update.py} +2 -2
- sqlspec/statement/builder/mixins/__init__.py +24 -30
- sqlspec/statement/builder/mixins/{_set_ops.py → _cte_and_set_ops.py} +86 -2
- sqlspec/statement/builder/mixins/{_delete_from.py → _delete_operations.py} +2 -0
- sqlspec/statement/builder/mixins/{_insert_values.py → _insert_operations.py} +70 -1
- sqlspec/statement/builder/mixins/{_merge_clauses.py → _merge_operations.py} +2 -0
- sqlspec/statement/builder/mixins/_order_limit_operations.py +123 -0
- sqlspec/statement/builder/mixins/{_pivot.py → _pivot_operations.py} +71 -2
- sqlspec/statement/builder/mixins/_select_operations.py +612 -0
- sqlspec/statement/builder/mixins/{_update_set.py → _update_operations.py} +73 -2
- sqlspec/statement/builder/mixins/_where_clause.py +536 -0
- sqlspec/statement/cache.py +50 -0
- sqlspec/statement/filters.py +37 -8
- sqlspec/statement/parameters.py +154 -25
- sqlspec/statement/pipelines/__init__.py +1 -1
- sqlspec/statement/pipelines/context.py +4 -4
- sqlspec/statement/pipelines/transformers/_expression_simplifier.py +3 -3
- sqlspec/statement/pipelines/validators/_parameter_style.py +22 -22
- sqlspec/statement/pipelines/validators/_performance.py +1 -5
- sqlspec/statement/sql.py +246 -176
- sqlspec/utils/__init__.py +2 -1
- sqlspec/utils/statement_hashing.py +203 -0
- sqlspec/utils/type_guards.py +32 -0
- {sqlspec-0.13.0.dist-info → sqlspec-0.14.0.dist-info}/METADATA +1 -1
- sqlspec-0.14.0.dist-info/RECORD +143 -0
- sqlspec-0.14.0.dist-info/entry_points.txt +2 -0
- sqlspec/service/__init__.py +0 -4
- sqlspec/service/_util.py +0 -147
- sqlspec/service/pagination.py +0 -26
- sqlspec/statement/builder/mixins/_aggregate_functions.py +0 -250
- sqlspec/statement/builder/mixins/_case_builder.py +0 -91
- sqlspec/statement/builder/mixins/_common_table_expr.py +0 -90
- sqlspec/statement/builder/mixins/_from.py +0 -63
- sqlspec/statement/builder/mixins/_group_by.py +0 -118
- sqlspec/statement/builder/mixins/_having.py +0 -35
- sqlspec/statement/builder/mixins/_insert_from_select.py +0 -47
- sqlspec/statement/builder/mixins/_insert_into.py +0 -36
- sqlspec/statement/builder/mixins/_limit_offset.py +0 -53
- sqlspec/statement/builder/mixins/_order_by.py +0 -46
- sqlspec/statement/builder/mixins/_returning.py +0 -37
- sqlspec/statement/builder/mixins/_select_columns.py +0 -61
- sqlspec/statement/builder/mixins/_unpivot.py +0 -77
- sqlspec/statement/builder/mixins/_update_from.py +0 -55
- sqlspec/statement/builder/mixins/_update_table.py +0 -29
- sqlspec/statement/builder/mixins/_where.py +0 -401
- sqlspec/statement/builder/mixins/_window_functions.py +0 -86
- sqlspec/statement/parameter_manager.py +0 -220
- sqlspec/statement/sql_compiler.py +0 -140
- sqlspec-0.13.0.dist-info/RECORD +0 -150
- /sqlspec/statement/builder/{base.py → _base.py} +0 -0
- /sqlspec/statement/builder/mixins/{_join.py → _join_operations.py} +0 -0
- {sqlspec-0.13.0.dist-info → sqlspec-0.14.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.13.0.dist-info → sqlspec-0.14.0.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.13.0.dist-info → sqlspec-0.14.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from collections.abc import AsyncGenerator, Sequence
|
|
3
3
|
from contextlib import asynccontextmanager
|
|
4
|
-
from typing import TYPE_CHECKING, Any,
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
5
5
|
|
|
6
6
|
from asyncmy import Connection
|
|
7
7
|
from typing_extensions import TypeAlias
|
|
@@ -9,13 +9,14 @@ from typing_extensions import TypeAlias
|
|
|
9
9
|
from sqlspec.driver import AsyncDriverAdapterProtocol
|
|
10
10
|
from sqlspec.driver.connection import managed_transaction_async
|
|
11
11
|
from sqlspec.driver.mixins import (
|
|
12
|
+
AsyncAdapterCacheMixin,
|
|
12
13
|
AsyncPipelinedExecutionMixin,
|
|
13
14
|
AsyncStorageMixin,
|
|
14
15
|
SQLTranslatorMixin,
|
|
15
16
|
ToSchemaMixin,
|
|
16
17
|
TypeCoercionMixin,
|
|
17
18
|
)
|
|
18
|
-
from sqlspec.driver.parameters import
|
|
19
|
+
from sqlspec.driver.parameters import convert_parameter_sequence
|
|
19
20
|
from sqlspec.statement.parameters import ParameterStyle, ParameterValidator
|
|
20
21
|
from sqlspec.statement.result import SQLResult
|
|
21
22
|
from sqlspec.statement.sql import SQL, SQLConfig
|
|
@@ -34,6 +35,7 @@ AsyncmyConnection: TypeAlias = Connection
|
|
|
34
35
|
|
|
35
36
|
class AsyncmyDriver(
|
|
36
37
|
AsyncDriverAdapterProtocol[AsyncmyConnection, RowT],
|
|
38
|
+
AsyncAdapterCacheMixin,
|
|
37
39
|
SQLTranslatorMixin,
|
|
38
40
|
TypeCoercionMixin,
|
|
39
41
|
AsyncStorageMixin,
|
|
@@ -45,9 +47,6 @@ class AsyncmyDriver(
|
|
|
45
47
|
dialect: "DialectType" = "mysql"
|
|
46
48
|
supported_parameter_styles: "tuple[ParameterStyle, ...]" = (ParameterStyle.POSITIONAL_PYFORMAT,)
|
|
47
49
|
default_parameter_style: ParameterStyle = ParameterStyle.POSITIONAL_PYFORMAT
|
|
48
|
-
__supports_arrow__: ClassVar[bool] = True
|
|
49
|
-
__supports_parquet__: ClassVar[bool] = False
|
|
50
|
-
__slots__ = ()
|
|
51
50
|
|
|
52
51
|
def __init__(
|
|
53
52
|
self,
|
|
@@ -72,7 +71,7 @@ class AsyncmyDriver(
|
|
|
72
71
|
self, statement: SQL, connection: "Optional[AsyncmyConnection]" = None, **kwargs: Any
|
|
73
72
|
) -> SQLResult[RowT]:
|
|
74
73
|
if statement.is_script:
|
|
75
|
-
sql, _ =
|
|
74
|
+
sql, _ = self._get_compiled_sql(statement, ParameterStyle.STATIC)
|
|
76
75
|
return await self._execute_script(sql, connection=connection, **kwargs)
|
|
77
76
|
|
|
78
77
|
# Detect parameter styles in the SQL
|
|
@@ -99,7 +98,7 @@ class AsyncmyDriver(
|
|
|
99
98
|
break
|
|
100
99
|
|
|
101
100
|
# Compile with the determined style
|
|
102
|
-
sql, params =
|
|
101
|
+
sql, params = self._get_compiled_sql(statement, target_style)
|
|
103
102
|
|
|
104
103
|
if statement.is_many:
|
|
105
104
|
params = self._process_parameters(params)
|
|
@@ -115,12 +114,10 @@ class AsyncmyDriver(
|
|
|
115
114
|
conn = connection if connection is not None else self._connection(None)
|
|
116
115
|
|
|
117
116
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
118
|
-
#
|
|
119
|
-
|
|
117
|
+
# Convert parameters using consolidated utility
|
|
118
|
+
converted_params = convert_parameter_sequence(parameters)
|
|
120
119
|
# AsyncMy doesn't like empty lists/tuples, convert to None
|
|
121
|
-
final_params = (
|
|
122
|
-
normalized_params[0] if normalized_params and len(normalized_params) == 1 else normalized_params
|
|
123
|
-
)
|
|
120
|
+
final_params = converted_params[0] if converted_params and len(converted_params) == 1 else converted_params
|
|
124
121
|
if not final_params:
|
|
125
122
|
final_params = None
|
|
126
123
|
|
|
@@ -157,11 +154,11 @@ class AsyncmyDriver(
|
|
|
157
154
|
|
|
158
155
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
159
156
|
# Normalize parameter list using consolidated utility
|
|
160
|
-
|
|
157
|
+
converted_param_list = convert_parameter_sequence(param_list)
|
|
161
158
|
|
|
162
159
|
params_list: list[Union[list[Any], tuple[Any, ...]]] = []
|
|
163
|
-
if
|
|
164
|
-
for param_set in
|
|
160
|
+
if converted_param_list and isinstance(converted_param_list, Sequence):
|
|
161
|
+
for param_set in converted_param_list:
|
|
165
162
|
if isinstance(param_set, (list, tuple)):
|
|
166
163
|
params_list.append(param_set)
|
|
167
164
|
elif param_set is None:
|
|
@@ -188,18 +185,28 @@ class AsyncmyDriver(
|
|
|
188
185
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
189
186
|
# AsyncMy may not support multi-statement scripts without CLIENT_MULTI_STATEMENTS flag
|
|
190
187
|
statements = self._split_script_statements(script)
|
|
188
|
+
suppress_warnings = kwargs.get("_suppress_warnings", False)
|
|
191
189
|
statements_executed = 0
|
|
190
|
+
total_rows = 0
|
|
192
191
|
|
|
193
192
|
async with self._get_cursor(txn_conn) as cursor:
|
|
194
193
|
for statement_str in statements:
|
|
195
194
|
if statement_str:
|
|
195
|
+
# Validate each statement unless warnings suppressed
|
|
196
|
+
if not suppress_warnings:
|
|
197
|
+
# Run validation through pipeline
|
|
198
|
+
temp_sql = SQL(statement_str, config=self.config)
|
|
199
|
+
temp_sql._ensure_processed()
|
|
200
|
+
# Validation errors are logged as warnings by default
|
|
201
|
+
|
|
196
202
|
await cursor.execute(statement_str)
|
|
197
203
|
statements_executed += 1
|
|
204
|
+
total_rows += cursor.rowcount if cursor.rowcount is not None else 0
|
|
198
205
|
|
|
199
206
|
return SQLResult(
|
|
200
207
|
statement=SQL(script, _dialect=self.dialect).as_script(),
|
|
201
208
|
data=[],
|
|
202
|
-
rows_affected=
|
|
209
|
+
rows_affected=total_rows,
|
|
203
210
|
operation_type="SCRIPT",
|
|
204
211
|
metadata={"status_message": "SCRIPT EXECUTED"},
|
|
205
212
|
total_statements=statements_executed,
|
|
@@ -5,8 +5,10 @@ from collections.abc import AsyncGenerator, Awaitable, Callable
|
|
|
5
5
|
from contextlib import asynccontextmanager
|
|
6
6
|
from typing import TYPE_CHECKING, Any, ClassVar, TypedDict
|
|
7
7
|
|
|
8
|
-
from asyncpg import Record
|
|
8
|
+
from asyncpg import Connection, Record
|
|
9
9
|
from asyncpg import create_pool as asyncpg_create_pool
|
|
10
|
+
from asyncpg.connection import ConnectionMeta
|
|
11
|
+
from asyncpg.pool import Pool, PoolConnectionProxy, PoolConnectionProxyMeta
|
|
10
12
|
from typing_extensions import NotRequired, Unpack
|
|
11
13
|
|
|
12
14
|
from sqlspec.adapters.asyncpg.driver import AsyncpgConnection, AsyncpgDriver
|
|
@@ -18,9 +20,6 @@ from sqlspec.utils.serializers import from_json, to_json
|
|
|
18
20
|
if TYPE_CHECKING:
|
|
19
21
|
from asyncio.events import AbstractEventLoop
|
|
20
22
|
|
|
21
|
-
from asyncpg.pool import Pool
|
|
22
|
-
from sqlglot.dialects.dialect import DialectType
|
|
23
|
-
|
|
24
23
|
|
|
25
24
|
__all__ = ("CONNECTION_FIELDS", "POOL_FIELDS", "AsyncpgConfig")
|
|
26
25
|
|
|
@@ -107,44 +106,10 @@ POOL_FIELDS = CONNECTION_FIELDS.union(
|
|
|
107
106
|
class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, "Pool[Record]", AsyncpgDriver]):
|
|
108
107
|
"""Configuration for AsyncPG database connections using TypedDict."""
|
|
109
108
|
|
|
110
|
-
__slots__ = (
|
|
111
|
-
"_dialect",
|
|
112
|
-
"command_timeout",
|
|
113
|
-
"connect_timeout",
|
|
114
|
-
"connection_class",
|
|
115
|
-
"database",
|
|
116
|
-
"default_row_type",
|
|
117
|
-
"direct_tls",
|
|
118
|
-
"dsn",
|
|
119
|
-
"extras",
|
|
120
|
-
"host",
|
|
121
|
-
"init",
|
|
122
|
-
"json_deserializer",
|
|
123
|
-
"json_serializer",
|
|
124
|
-
"loop",
|
|
125
|
-
"max_cacheable_statement_size",
|
|
126
|
-
"max_cached_statement_lifetime",
|
|
127
|
-
"max_inactive_connection_lifetime",
|
|
128
|
-
"max_queries",
|
|
129
|
-
"max_size",
|
|
130
|
-
"min_size",
|
|
131
|
-
"passfile",
|
|
132
|
-
"password",
|
|
133
|
-
"pool_instance",
|
|
134
|
-
"port",
|
|
135
|
-
"record_class",
|
|
136
|
-
"server_settings",
|
|
137
|
-
"setup",
|
|
138
|
-
"ssl",
|
|
139
|
-
"statement_cache_size",
|
|
140
|
-
"statement_config",
|
|
141
|
-
"user",
|
|
142
|
-
)
|
|
143
|
-
|
|
144
109
|
driver_type: type[AsyncpgDriver] = AsyncpgDriver
|
|
145
110
|
connection_type: type[AsyncpgConnection] = type(AsyncpgConnection) # type: ignore[assignment]
|
|
146
111
|
supported_parameter_styles: ClassVar[tuple[str, ...]] = ("numeric",)
|
|
147
|
-
|
|
112
|
+
default_parameter_style: ClassVar[str] = "numeric"
|
|
148
113
|
|
|
149
114
|
def __init__(self, **kwargs: "Unpack[DriverParameters]") -> None:
|
|
150
115
|
"""Initialize AsyncPG configuration."""
|
|
@@ -219,10 +184,12 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, "Pool[Record]", Async
|
|
|
219
184
|
self.json_serializer = kwargs.get("json_serializer", to_json)
|
|
220
185
|
self.json_deserializer = kwargs.get("json_deserializer", from_json)
|
|
221
186
|
pool_instance_from_kwargs = kwargs.get("pool_instance")
|
|
222
|
-
self._dialect: DialectType = None
|
|
223
187
|
|
|
224
188
|
super().__init__()
|
|
225
189
|
|
|
190
|
+
# Override prepared statements to True for PostgreSQL since it supports them well
|
|
191
|
+
self.enable_prepared_statements = kwargs.get("enable_prepared_statements", True) # type: ignore[assignment]
|
|
192
|
+
|
|
226
193
|
if pool_instance_from_kwargs is not None:
|
|
227
194
|
self.pool_instance = pool_instance_from_kwargs
|
|
228
195
|
|
|
@@ -323,7 +290,7 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, "Pool[Record]", Async
|
|
|
323
290
|
statement_config = replace(
|
|
324
291
|
statement_config,
|
|
325
292
|
allowed_parameter_styles=self.supported_parameter_styles,
|
|
326
|
-
|
|
293
|
+
default_parameter_style=self.default_parameter_style,
|
|
327
294
|
)
|
|
328
295
|
yield self.driver_type(connection=connection, config=statement_config)
|
|
329
296
|
|
|
@@ -347,24 +314,15 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, "Pool[Record]", Async
|
|
|
347
314
|
Dictionary mapping type names to types.
|
|
348
315
|
"""
|
|
349
316
|
namespace = super().get_signature_namespace()
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
"PoolConnectionProxyMeta": PoolConnectionProxyMeta,
|
|
362
|
-
"ConnectionMeta": ConnectionMeta,
|
|
363
|
-
"Record": Record,
|
|
364
|
-
"AsyncpgConnection": type(AsyncpgConnection), # The Union type alias
|
|
365
|
-
}
|
|
366
|
-
)
|
|
367
|
-
except ImportError:
|
|
368
|
-
logger.warning("Failed to import AsyncPG types for signature namespace")
|
|
369
|
-
|
|
317
|
+
namespace.update(
|
|
318
|
+
{
|
|
319
|
+
"Connection": Connection,
|
|
320
|
+
"Pool": Pool,
|
|
321
|
+
"PoolConnectionProxy": PoolConnectionProxy,
|
|
322
|
+
"PoolConnectionProxyMeta": PoolConnectionProxyMeta,
|
|
323
|
+
"ConnectionMeta": ConnectionMeta,
|
|
324
|
+
"Record": Record,
|
|
325
|
+
"AsyncpgConnection": type(AsyncpgConnection),
|
|
326
|
+
}
|
|
327
|
+
)
|
|
370
328
|
return namespace
|
|
@@ -8,13 +8,14 @@ from typing_extensions import TypeAlias
|
|
|
8
8
|
from sqlspec.driver import AsyncDriverAdapterProtocol
|
|
9
9
|
from sqlspec.driver.connection import managed_transaction_async
|
|
10
10
|
from sqlspec.driver.mixins import (
|
|
11
|
+
AsyncAdapterCacheMixin,
|
|
11
12
|
AsyncPipelinedExecutionMixin,
|
|
12
13
|
AsyncStorageMixin,
|
|
13
14
|
SQLTranslatorMixin,
|
|
14
15
|
ToSchemaMixin,
|
|
15
16
|
TypeCoercionMixin,
|
|
16
17
|
)
|
|
17
|
-
from sqlspec.driver.parameters import
|
|
18
|
+
from sqlspec.driver.parameters import convert_parameter_sequence
|
|
18
19
|
from sqlspec.statement.parameters import ParameterStyle, ParameterValidator
|
|
19
20
|
from sqlspec.statement.result import SQLResult
|
|
20
21
|
from sqlspec.statement.sql import SQL, SQLConfig
|
|
@@ -50,6 +51,7 @@ class AsyncpgDriver(
|
|
|
50
51
|
TypeCoercionMixin,
|
|
51
52
|
AsyncStorageMixin,
|
|
52
53
|
AsyncPipelinedExecutionMixin,
|
|
54
|
+
AsyncAdapterCacheMixin,
|
|
53
55
|
ToSchemaMixin,
|
|
54
56
|
):
|
|
55
57
|
"""AsyncPG PostgreSQL Driver Adapter. Modern protocol implementation."""
|
|
@@ -57,7 +59,6 @@ class AsyncpgDriver(
|
|
|
57
59
|
dialect: "DialectType" = "postgres"
|
|
58
60
|
supported_parameter_styles: "tuple[ParameterStyle, ...]" = (ParameterStyle.NUMERIC,)
|
|
59
61
|
default_parameter_style: ParameterStyle = ParameterStyle.NUMERIC
|
|
60
|
-
__slots__ = ()
|
|
61
62
|
|
|
62
63
|
def __init__(
|
|
63
64
|
self,
|
|
@@ -91,7 +92,7 @@ class AsyncpgDriver(
|
|
|
91
92
|
self, statement: SQL, connection: Optional[AsyncpgConnection] = None, **kwargs: Any
|
|
92
93
|
) -> SQLResult[RowT]:
|
|
93
94
|
if statement.is_script:
|
|
94
|
-
sql, _ =
|
|
95
|
+
sql, _ = self._get_compiled_sql(statement, ParameterStyle.STATIC)
|
|
95
96
|
return await self._execute_script(sql, connection=connection, **kwargs)
|
|
96
97
|
|
|
97
98
|
detected_styles = set()
|
|
@@ -112,10 +113,10 @@ class AsyncpgDriver(
|
|
|
112
113
|
break
|
|
113
114
|
|
|
114
115
|
if statement.is_many:
|
|
115
|
-
sql, params =
|
|
116
|
+
sql, params = self._get_compiled_sql(statement, target_style)
|
|
116
117
|
return await self._execute_many(sql, params, connection=connection, **kwargs)
|
|
117
118
|
|
|
118
|
-
sql, params =
|
|
119
|
+
sql, params = self._get_compiled_sql(statement, target_style)
|
|
119
120
|
return await self._execute(sql, params, statement, connection=connection, **kwargs)
|
|
120
121
|
|
|
121
122
|
async def _execute(
|
|
@@ -129,13 +130,13 @@ class AsyncpgDriver(
|
|
|
129
130
|
return await self._execute_many(sql, parameters, connection=connection, **kwargs)
|
|
130
131
|
|
|
131
132
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
132
|
-
#
|
|
133
|
-
|
|
133
|
+
# Convert parameters using consolidated utility
|
|
134
|
+
converted_params = convert_parameter_sequence(parameters)
|
|
134
135
|
# AsyncPG expects parameters as *args, not a single list
|
|
135
136
|
args_for_driver: list[Any] = []
|
|
136
|
-
if
|
|
137
|
-
#
|
|
138
|
-
args_for_driver =
|
|
137
|
+
if converted_params:
|
|
138
|
+
# converted_params is already a list, just use it directly
|
|
139
|
+
args_for_driver = converted_params
|
|
139
140
|
|
|
140
141
|
if self.returns_rows(statement.expression):
|
|
141
142
|
records = await txn_conn.fetch(sql, *args_for_driver)
|
|
@@ -174,12 +175,12 @@ class AsyncpgDriver(
|
|
|
174
175
|
|
|
175
176
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
176
177
|
# Normalize parameter list using consolidated utility
|
|
177
|
-
|
|
178
|
+
converted_param_list = convert_parameter_sequence(param_list)
|
|
178
179
|
|
|
179
180
|
params_list: list[tuple[Any, ...]] = []
|
|
180
181
|
rows_affected = 0
|
|
181
|
-
if
|
|
182
|
-
for param_set in
|
|
182
|
+
if converted_param_list:
|
|
183
|
+
for param_set in converted_param_list:
|
|
183
184
|
if isinstance(param_set, (list, tuple)):
|
|
184
185
|
params_list.append(tuple(param_set))
|
|
185
186
|
elif param_set is None:
|
|
@@ -205,17 +206,39 @@ class AsyncpgDriver(
|
|
|
205
206
|
) -> SQLResult[RowT]:
|
|
206
207
|
# Use provided connection or driver's default connection
|
|
207
208
|
conn = connection if connection is not None else self._connection(None)
|
|
209
|
+
|
|
208
210
|
async with managed_transaction_async(conn, auto_commit=True) as txn_conn:
|
|
209
|
-
|
|
211
|
+
# Split script into individual statements for validation
|
|
212
|
+
statements = self._split_script_statements(script)
|
|
213
|
+
suppress_warnings = kwargs.get("_suppress_warnings", False)
|
|
214
|
+
|
|
215
|
+
executed_count = 0
|
|
216
|
+
total_rows = 0
|
|
217
|
+
last_status = None
|
|
218
|
+
|
|
219
|
+
# Execute each statement individually for better control and validation
|
|
220
|
+
for statement in statements:
|
|
221
|
+
if statement.strip():
|
|
222
|
+
# Validate each statement unless warnings suppressed
|
|
223
|
+
if not suppress_warnings:
|
|
224
|
+
# Run validation through pipeline
|
|
225
|
+
temp_sql = SQL(statement, config=self.config)
|
|
226
|
+
temp_sql._ensure_processed()
|
|
227
|
+
# Validation errors are logged as warnings by default
|
|
228
|
+
|
|
229
|
+
status = await txn_conn.execute(statement)
|
|
230
|
+
executed_count += 1
|
|
231
|
+
last_status = status
|
|
232
|
+
# AsyncPG doesn't provide row count from execute()
|
|
210
233
|
|
|
211
234
|
return SQLResult(
|
|
212
235
|
statement=SQL(script, _dialect=self.dialect).as_script(),
|
|
213
236
|
data=[],
|
|
214
|
-
rows_affected=
|
|
237
|
+
rows_affected=total_rows,
|
|
215
238
|
operation_type="SCRIPT",
|
|
216
|
-
metadata={"status_message":
|
|
217
|
-
total_statements=
|
|
218
|
-
successful_statements=
|
|
239
|
+
metadata={"status_message": last_status or "SCRIPT EXECUTED"},
|
|
240
|
+
total_statements=executed_count,
|
|
241
|
+
successful_statements=executed_count,
|
|
219
242
|
)
|
|
220
243
|
|
|
221
244
|
def _connection(self, connection: Optional[AsyncpgConnection] = None) -> AsyncpgConnection:
|
|
@@ -19,7 +19,6 @@ if TYPE_CHECKING:
|
|
|
19
19
|
from google.api_core.client_info import ClientInfo
|
|
20
20
|
from google.api_core.client_options import ClientOptions
|
|
21
21
|
from google.auth.credentials import Credentials
|
|
22
|
-
from sqlglot.dialects.dialect import DialectType
|
|
23
22
|
|
|
24
23
|
logger = logging.getLogger(__name__)
|
|
25
24
|
|
|
@@ -76,45 +75,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
|
|
|
76
75
|
- Parquet and Arrow format optimization
|
|
77
76
|
"""
|
|
78
77
|
|
|
79
|
-
__slots__ = (
|
|
80
|
-
"_connection_instance",
|
|
81
|
-
"_dialect",
|
|
82
|
-
"client_info",
|
|
83
|
-
"client_options",
|
|
84
|
-
"credentials",
|
|
85
|
-
"credentials_path",
|
|
86
|
-
"dataframes_backend",
|
|
87
|
-
"dataset_id",
|
|
88
|
-
"default_load_job_config",
|
|
89
|
-
"default_query_job_config",
|
|
90
|
-
"default_row_type",
|
|
91
|
-
"edition",
|
|
92
|
-
"enable_bigquery_ml",
|
|
93
|
-
"enable_bigquery_omni",
|
|
94
|
-
"enable_column_level_security",
|
|
95
|
-
"enable_continuous_queries",
|
|
96
|
-
"enable_cross_cloud",
|
|
97
|
-
"enable_dataframes",
|
|
98
|
-
"enable_gemini_integration",
|
|
99
|
-
"enable_row_level_security",
|
|
100
|
-
"enable_vector_search",
|
|
101
|
-
"extras",
|
|
102
|
-
"job_timeout_ms",
|
|
103
|
-
"location",
|
|
104
|
-
"maximum_bytes_billed",
|
|
105
|
-
"on_connection_create",
|
|
106
|
-
"on_job_complete",
|
|
107
|
-
"on_job_start",
|
|
108
|
-
"parquet_enable_list_inference",
|
|
109
|
-
"pool_instance",
|
|
110
|
-
"project",
|
|
111
|
-
"query_timeout_ms",
|
|
112
|
-
"reservation_id",
|
|
113
|
-
"statement_config",
|
|
114
|
-
"use_avro_logical_types",
|
|
115
|
-
"use_query_cache",
|
|
116
|
-
)
|
|
117
|
-
|
|
118
78
|
is_async: ClassVar[bool] = False
|
|
119
79
|
supports_connection_pooling: ClassVar[bool] = False
|
|
120
80
|
|
|
@@ -125,7 +85,7 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
|
|
|
125
85
|
supported_parameter_styles: ClassVar[tuple[str, ...]] = ("named_at",)
|
|
126
86
|
"""BigQuery only supports @name (named_at) parameter style."""
|
|
127
87
|
|
|
128
|
-
|
|
88
|
+
default_parameter_style: ClassVar[str] = "named_at"
|
|
129
89
|
"""BigQuery's native parameter style is @name (named_at)."""
|
|
130
90
|
|
|
131
91
|
def __init__(
|
|
@@ -283,7 +243,6 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
|
|
|
283
243
|
|
|
284
244
|
# Store connection instance for reuse (BigQuery doesn't support traditional pooling)
|
|
285
245
|
self._connection_instance: Optional[BigQueryConnection] = None
|
|
286
|
-
self._dialect: DialectType = None
|
|
287
246
|
|
|
288
247
|
super().__init__()
|
|
289
248
|
|
|
@@ -391,7 +350,7 @@ class BigQueryConfig(NoPoolSyncConfig[BigQueryConnection, BigQueryDriver]):
|
|
|
391
350
|
statement_config = replace(
|
|
392
351
|
statement_config,
|
|
393
352
|
allowed_parameter_styles=self.supported_parameter_styles,
|
|
394
|
-
|
|
353
|
+
default_parameter_style=self.default_parameter_style,
|
|
395
354
|
)
|
|
396
355
|
driver = self.driver_type(
|
|
397
356
|
connection=connection,
|
|
@@ -24,12 +24,13 @@ from sqlspec.driver import SyncDriverAdapterProtocol
|
|
|
24
24
|
from sqlspec.driver.connection import managed_transaction_sync
|
|
25
25
|
from sqlspec.driver.mixins import (
|
|
26
26
|
SQLTranslatorMixin,
|
|
27
|
+
SyncAdapterCacheMixin,
|
|
27
28
|
SyncPipelinedExecutionMixin,
|
|
28
29
|
SyncStorageMixin,
|
|
29
30
|
ToSchemaMixin,
|
|
30
31
|
TypeCoercionMixin,
|
|
31
32
|
)
|
|
32
|
-
from sqlspec.driver.parameters import
|
|
33
|
+
from sqlspec.driver.parameters import convert_parameter_sequence
|
|
33
34
|
from sqlspec.exceptions import SQLSpecError
|
|
34
35
|
from sqlspec.statement.parameters import ParameterStyle, ParameterValidator
|
|
35
36
|
from sqlspec.statement.result import ArrowResult, SQLResult
|
|
@@ -57,6 +58,7 @@ TIMESTAMP_ERROR_MSG_LENGTH = 189 # Length check for timestamp parsing error
|
|
|
57
58
|
|
|
58
59
|
class BigQueryDriver(
|
|
59
60
|
SyncDriverAdapterProtocol["BigQueryConnection", RowT],
|
|
61
|
+
SyncAdapterCacheMixin,
|
|
60
62
|
SQLTranslatorMixin,
|
|
61
63
|
TypeCoercionMixin,
|
|
62
64
|
SyncStorageMixin,
|
|
@@ -71,8 +73,6 @@ class BigQueryDriver(
|
|
|
71
73
|
- execute_script() - Multi-statement scripts and DDL operations
|
|
72
74
|
"""
|
|
73
75
|
|
|
74
|
-
__slots__ = ("_default_query_job_config", "on_job_complete", "on_job_start")
|
|
75
|
-
|
|
76
76
|
dialect: "DialectType" = "bigquery"
|
|
77
77
|
supported_parameter_styles: "tuple[ParameterStyle, ...]" = (ParameterStyle.NAMED_AT,)
|
|
78
78
|
default_parameter_style: ParameterStyle = ParameterStyle.NAMED_AT
|
|
@@ -342,7 +342,7 @@ class BigQueryDriver(
|
|
|
342
342
|
This is now just a pass-through since the core parameter generation
|
|
343
343
|
has been fixed to generate BigQuery-compatible parameter names.
|
|
344
344
|
"""
|
|
345
|
-
return
|
|
345
|
+
return self._get_compiled_sql(statement, target_style)
|
|
346
346
|
|
|
347
347
|
def _execute_statement(
|
|
348
348
|
self, statement: SQL, connection: Optional[BigQueryConnection] = None, **kwargs: Any
|
|
@@ -387,14 +387,14 @@ class BigQueryDriver(
|
|
|
387
387
|
# BigQuery doesn't have traditional transactions, but we'll use the pattern for consistency
|
|
388
388
|
# The managed_transaction_sync will just pass through for BigQuery Client objects
|
|
389
389
|
with managed_transaction_sync(conn, auto_commit=True) as txn_conn:
|
|
390
|
-
#
|
|
391
|
-
|
|
390
|
+
# Convert parameters using consolidated utility
|
|
391
|
+
converted_params = convert_parameter_sequence(parameters)
|
|
392
392
|
param_dict: dict[str, Any] = {}
|
|
393
|
-
if
|
|
394
|
-
if isinstance(
|
|
395
|
-
param_dict =
|
|
393
|
+
if converted_params:
|
|
394
|
+
if isinstance(converted_params[0], dict):
|
|
395
|
+
param_dict = converted_params[0]
|
|
396
396
|
else:
|
|
397
|
-
param_dict = {f"param_{i}": val for i, val in enumerate(
|
|
397
|
+
param_dict = {f"param_{i}": val for i, val in enumerate(converted_params)}
|
|
398
398
|
|
|
399
399
|
bq_params = self._prepare_bq_query_parameters(param_dict)
|
|
400
400
|
|
|
@@ -413,14 +413,14 @@ class BigQueryDriver(
|
|
|
413
413
|
|
|
414
414
|
with managed_transaction_sync(conn, auto_commit=True) as txn_conn:
|
|
415
415
|
# Normalize parameter list using consolidated utility
|
|
416
|
-
|
|
416
|
+
converted_param_list = convert_parameter_sequence(param_list)
|
|
417
417
|
|
|
418
418
|
# Use a multi-statement script for batch execution
|
|
419
419
|
script_parts = []
|
|
420
420
|
all_params: dict[str, Any] = {}
|
|
421
421
|
param_counter = 0
|
|
422
422
|
|
|
423
|
-
for params in
|
|
423
|
+
for params in converted_param_list or []:
|
|
424
424
|
if isinstance(params, dict):
|
|
425
425
|
param_dict = params
|
|
426
426
|
elif isinstance(params, (list, tuple)):
|
|
@@ -470,20 +470,32 @@ class BigQueryDriver(
|
|
|
470
470
|
with managed_transaction_sync(conn, auto_commit=True) as txn_conn:
|
|
471
471
|
# BigQuery does not support multi-statement scripts in a single job
|
|
472
472
|
statements = self._split_script_statements(script)
|
|
473
|
+
suppress_warnings = kwargs.get("_suppress_warnings", False)
|
|
474
|
+
successful = 0
|
|
475
|
+
total_rows = 0
|
|
473
476
|
|
|
474
477
|
for statement in statements:
|
|
475
478
|
if statement:
|
|
479
|
+
# Validate each statement unless warnings suppressed
|
|
480
|
+
if not suppress_warnings:
|
|
481
|
+
# Run validation through pipeline
|
|
482
|
+
temp_sql = SQL(statement, config=self.config)
|
|
483
|
+
temp_sql._ensure_processed()
|
|
484
|
+
# Validation errors are logged as warnings by default
|
|
485
|
+
|
|
476
486
|
query_job = self._run_query_job(statement, [], connection=txn_conn)
|
|
477
487
|
query_job.result(timeout=kwargs.get("bq_job_timeout"))
|
|
488
|
+
successful += 1
|
|
489
|
+
total_rows += query_job.num_dml_affected_rows or 0
|
|
478
490
|
|
|
479
491
|
return SQLResult(
|
|
480
492
|
statement=SQL(script, _dialect=self.dialect).as_script(),
|
|
481
493
|
data=[],
|
|
482
|
-
rows_affected=
|
|
494
|
+
rows_affected=total_rows,
|
|
483
495
|
operation_type="SCRIPT",
|
|
484
496
|
metadata={"status_message": "SCRIPT EXECUTED"},
|
|
485
497
|
total_statements=len(statements),
|
|
486
|
-
successful_statements=
|
|
498
|
+
successful_statements=successful,
|
|
487
499
|
)
|
|
488
500
|
|
|
489
501
|
def _connection(self, connection: "Optional[Client]" = None) -> "Client":
|
|
@@ -16,8 +16,6 @@ if TYPE_CHECKING:
|
|
|
16
16
|
from collections.abc import Generator, Sequence
|
|
17
17
|
from contextlib import AbstractContextManager
|
|
18
18
|
|
|
19
|
-
from sqlglot.dialects.dialect import DialectType
|
|
20
|
-
|
|
21
19
|
|
|
22
20
|
logger = logging.getLogger(__name__)
|
|
23
21
|
|
|
@@ -109,50 +107,6 @@ class DuckDBConfig(NoPoolSyncConfig[DuckDBConnection, DuckDBDriver]):
|
|
|
109
107
|
- Performance optimizations for analytics workloads
|
|
110
108
|
"""
|
|
111
109
|
|
|
112
|
-
__slots__ = (
|
|
113
|
-
"_dialect",
|
|
114
|
-
"allow_community_extensions",
|
|
115
|
-
"allow_persistent_secrets",
|
|
116
|
-
"allow_unsigned_extensions",
|
|
117
|
-
"arrow_large_buffer_size",
|
|
118
|
-
"autoinstall_extension_repository",
|
|
119
|
-
"autoinstall_known_extensions",
|
|
120
|
-
"autoload_known_extensions",
|
|
121
|
-
"binary_as_string",
|
|
122
|
-
"checkpoint_threshold",
|
|
123
|
-
"config",
|
|
124
|
-
"custom_extension_repository",
|
|
125
|
-
"database",
|
|
126
|
-
"default_null_order",
|
|
127
|
-
"default_order",
|
|
128
|
-
"default_row_type",
|
|
129
|
-
"enable_external_access",
|
|
130
|
-
"enable_external_file_cache",
|
|
131
|
-
"enable_logging",
|
|
132
|
-
"enable_object_cache",
|
|
133
|
-
"enable_progress_bar",
|
|
134
|
-
"errors_as_json",
|
|
135
|
-
"extension_directory",
|
|
136
|
-
"extensions",
|
|
137
|
-
"extras",
|
|
138
|
-
"ieee_floating_point_ops",
|
|
139
|
-
"log_query_path",
|
|
140
|
-
"logging_level",
|
|
141
|
-
"max_temp_directory_size",
|
|
142
|
-
"memory_limit",
|
|
143
|
-
"on_connection_create",
|
|
144
|
-
"parquet_metadata_cache",
|
|
145
|
-
"pool_instance",
|
|
146
|
-
"preserve_insertion_order",
|
|
147
|
-
"progress_bar_time",
|
|
148
|
-
"read_only",
|
|
149
|
-
"secret_directory",
|
|
150
|
-
"secrets",
|
|
151
|
-
"statement_config",
|
|
152
|
-
"temp_directory",
|
|
153
|
-
"threads",
|
|
154
|
-
)
|
|
155
|
-
|
|
156
110
|
is_async: ClassVar[bool] = False
|
|
157
111
|
supports_connection_pooling: ClassVar[bool] = False
|
|
158
112
|
|
|
@@ -162,7 +116,7 @@ class DuckDBConfig(NoPoolSyncConfig[DuckDBConnection, DuckDBDriver]):
|
|
|
162
116
|
supported_parameter_styles: ClassVar[tuple[str, ...]] = ("qmark", "numeric")
|
|
163
117
|
"""DuckDB supports ? (qmark) and $1, $2 (numeric) parameter styles."""
|
|
164
118
|
|
|
165
|
-
|
|
119
|
+
default_parameter_style: ClassVar[str] = "qmark"
|
|
166
120
|
"""DuckDB's native parameter style is ? (qmark)."""
|
|
167
121
|
|
|
168
122
|
def __init__(
|
|
@@ -325,7 +279,6 @@ class DuckDBConfig(NoPoolSyncConfig[DuckDBConnection, DuckDBDriver]):
|
|
|
325
279
|
self.extensions = extensions or []
|
|
326
280
|
self.secrets = secrets or []
|
|
327
281
|
self.on_connection_create = on_connection_create
|
|
328
|
-
self._dialect: DialectType = None
|
|
329
282
|
|
|
330
283
|
super().__init__()
|
|
331
284
|
|
|
@@ -479,7 +432,7 @@ class DuckDBConfig(NoPoolSyncConfig[DuckDBConnection, DuckDBDriver]):
|
|
|
479
432
|
statement_config = replace(
|
|
480
433
|
statement_config,
|
|
481
434
|
allowed_parameter_styles=self.supported_parameter_styles,
|
|
482
|
-
|
|
435
|
+
default_parameter_style=self.default_parameter_style,
|
|
483
436
|
)
|
|
484
437
|
driver = self.driver_type(connection=connection, config=statement_config)
|
|
485
438
|
yield driver
|