sqlspec 0.11.1__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (155) hide show
  1. sqlspec/__init__.py +16 -3
  2. sqlspec/_serialization.py +3 -10
  3. sqlspec/_sql.py +1147 -0
  4. sqlspec/_typing.py +343 -41
  5. sqlspec/adapters/adbc/__init__.py +2 -6
  6. sqlspec/adapters/adbc/config.py +474 -149
  7. sqlspec/adapters/adbc/driver.py +330 -621
  8. sqlspec/adapters/aiosqlite/__init__.py +2 -6
  9. sqlspec/adapters/aiosqlite/config.py +143 -57
  10. sqlspec/adapters/aiosqlite/driver.py +269 -431
  11. sqlspec/adapters/asyncmy/__init__.py +3 -8
  12. sqlspec/adapters/asyncmy/config.py +247 -202
  13. sqlspec/adapters/asyncmy/driver.py +218 -436
  14. sqlspec/adapters/asyncpg/__init__.py +4 -7
  15. sqlspec/adapters/asyncpg/config.py +329 -176
  16. sqlspec/adapters/asyncpg/driver.py +417 -487
  17. sqlspec/adapters/bigquery/__init__.py +2 -2
  18. sqlspec/adapters/bigquery/config.py +407 -0
  19. sqlspec/adapters/bigquery/driver.py +600 -553
  20. sqlspec/adapters/duckdb/__init__.py +4 -1
  21. sqlspec/adapters/duckdb/config.py +432 -321
  22. sqlspec/adapters/duckdb/driver.py +392 -406
  23. sqlspec/adapters/oracledb/__init__.py +3 -8
  24. sqlspec/adapters/oracledb/config.py +625 -0
  25. sqlspec/adapters/oracledb/driver.py +548 -921
  26. sqlspec/adapters/psqlpy/__init__.py +4 -7
  27. sqlspec/adapters/psqlpy/config.py +372 -203
  28. sqlspec/adapters/psqlpy/driver.py +197 -533
  29. sqlspec/adapters/psycopg/__init__.py +3 -8
  30. sqlspec/adapters/psycopg/config.py +741 -0
  31. sqlspec/adapters/psycopg/driver.py +734 -694
  32. sqlspec/adapters/sqlite/__init__.py +2 -6
  33. sqlspec/adapters/sqlite/config.py +146 -81
  34. sqlspec/adapters/sqlite/driver.py +242 -405
  35. sqlspec/base.py +220 -784
  36. sqlspec/config.py +354 -0
  37. sqlspec/driver/__init__.py +22 -0
  38. sqlspec/driver/_async.py +252 -0
  39. sqlspec/driver/_common.py +338 -0
  40. sqlspec/driver/_sync.py +261 -0
  41. sqlspec/driver/mixins/__init__.py +17 -0
  42. sqlspec/driver/mixins/_pipeline.py +523 -0
  43. sqlspec/driver/mixins/_result_utils.py +122 -0
  44. sqlspec/driver/mixins/_sql_translator.py +35 -0
  45. sqlspec/driver/mixins/_storage.py +993 -0
  46. sqlspec/driver/mixins/_type_coercion.py +131 -0
  47. sqlspec/exceptions.py +299 -7
  48. sqlspec/extensions/aiosql/__init__.py +10 -0
  49. sqlspec/extensions/aiosql/adapter.py +474 -0
  50. sqlspec/extensions/litestar/__init__.py +1 -6
  51. sqlspec/extensions/litestar/_utils.py +1 -5
  52. sqlspec/extensions/litestar/config.py +5 -6
  53. sqlspec/extensions/litestar/handlers.py +13 -12
  54. sqlspec/extensions/litestar/plugin.py +22 -24
  55. sqlspec/extensions/litestar/providers.py +37 -55
  56. sqlspec/loader.py +528 -0
  57. sqlspec/service/__init__.py +3 -0
  58. sqlspec/service/base.py +24 -0
  59. sqlspec/service/pagination.py +26 -0
  60. sqlspec/statement/__init__.py +21 -0
  61. sqlspec/statement/builder/__init__.py +54 -0
  62. sqlspec/statement/builder/_ddl_utils.py +119 -0
  63. sqlspec/statement/builder/_parsing_utils.py +135 -0
  64. sqlspec/statement/builder/base.py +328 -0
  65. sqlspec/statement/builder/ddl.py +1379 -0
  66. sqlspec/statement/builder/delete.py +80 -0
  67. sqlspec/statement/builder/insert.py +274 -0
  68. sqlspec/statement/builder/merge.py +95 -0
  69. sqlspec/statement/builder/mixins/__init__.py +65 -0
  70. sqlspec/statement/builder/mixins/_aggregate_functions.py +151 -0
  71. sqlspec/statement/builder/mixins/_case_builder.py +91 -0
  72. sqlspec/statement/builder/mixins/_common_table_expr.py +91 -0
  73. sqlspec/statement/builder/mixins/_delete_from.py +34 -0
  74. sqlspec/statement/builder/mixins/_from.py +61 -0
  75. sqlspec/statement/builder/mixins/_group_by.py +119 -0
  76. sqlspec/statement/builder/mixins/_having.py +35 -0
  77. sqlspec/statement/builder/mixins/_insert_from_select.py +48 -0
  78. sqlspec/statement/builder/mixins/_insert_into.py +36 -0
  79. sqlspec/statement/builder/mixins/_insert_values.py +69 -0
  80. sqlspec/statement/builder/mixins/_join.py +110 -0
  81. sqlspec/statement/builder/mixins/_limit_offset.py +53 -0
  82. sqlspec/statement/builder/mixins/_merge_clauses.py +405 -0
  83. sqlspec/statement/builder/mixins/_order_by.py +46 -0
  84. sqlspec/statement/builder/mixins/_pivot.py +82 -0
  85. sqlspec/statement/builder/mixins/_returning.py +37 -0
  86. sqlspec/statement/builder/mixins/_select_columns.py +60 -0
  87. sqlspec/statement/builder/mixins/_set_ops.py +122 -0
  88. sqlspec/statement/builder/mixins/_unpivot.py +80 -0
  89. sqlspec/statement/builder/mixins/_update_from.py +54 -0
  90. sqlspec/statement/builder/mixins/_update_set.py +91 -0
  91. sqlspec/statement/builder/mixins/_update_table.py +29 -0
  92. sqlspec/statement/builder/mixins/_where.py +374 -0
  93. sqlspec/statement/builder/mixins/_window_functions.py +86 -0
  94. sqlspec/statement/builder/protocols.py +20 -0
  95. sqlspec/statement/builder/select.py +206 -0
  96. sqlspec/statement/builder/update.py +178 -0
  97. sqlspec/statement/filters.py +571 -0
  98. sqlspec/statement/parameters.py +736 -0
  99. sqlspec/statement/pipelines/__init__.py +67 -0
  100. sqlspec/statement/pipelines/analyzers/__init__.py +9 -0
  101. sqlspec/statement/pipelines/analyzers/_analyzer.py +649 -0
  102. sqlspec/statement/pipelines/base.py +315 -0
  103. sqlspec/statement/pipelines/context.py +119 -0
  104. sqlspec/statement/pipelines/result_types.py +41 -0
  105. sqlspec/statement/pipelines/transformers/__init__.py +8 -0
  106. sqlspec/statement/pipelines/transformers/_expression_simplifier.py +256 -0
  107. sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +623 -0
  108. sqlspec/statement/pipelines/transformers/_remove_comments.py +66 -0
  109. sqlspec/statement/pipelines/transformers/_remove_hints.py +81 -0
  110. sqlspec/statement/pipelines/validators/__init__.py +23 -0
  111. sqlspec/statement/pipelines/validators/_dml_safety.py +275 -0
  112. sqlspec/statement/pipelines/validators/_parameter_style.py +297 -0
  113. sqlspec/statement/pipelines/validators/_performance.py +703 -0
  114. sqlspec/statement/pipelines/validators/_security.py +990 -0
  115. sqlspec/statement/pipelines/validators/base.py +67 -0
  116. sqlspec/statement/result.py +527 -0
  117. sqlspec/statement/splitter.py +701 -0
  118. sqlspec/statement/sql.py +1198 -0
  119. sqlspec/storage/__init__.py +15 -0
  120. sqlspec/storage/backends/__init__.py +0 -0
  121. sqlspec/storage/backends/base.py +166 -0
  122. sqlspec/storage/backends/fsspec.py +315 -0
  123. sqlspec/storage/backends/obstore.py +464 -0
  124. sqlspec/storage/protocol.py +170 -0
  125. sqlspec/storage/registry.py +315 -0
  126. sqlspec/typing.py +157 -36
  127. sqlspec/utils/correlation.py +155 -0
  128. sqlspec/utils/deprecation.py +3 -6
  129. sqlspec/utils/fixtures.py +6 -11
  130. sqlspec/utils/logging.py +135 -0
  131. sqlspec/utils/module_loader.py +45 -43
  132. sqlspec/utils/serializers.py +4 -0
  133. sqlspec/utils/singleton.py +6 -8
  134. sqlspec/utils/sync_tools.py +15 -27
  135. sqlspec/utils/text.py +58 -26
  136. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/METADATA +97 -26
  137. sqlspec-0.12.0.dist-info/RECORD +145 -0
  138. sqlspec/adapters/bigquery/config/__init__.py +0 -3
  139. sqlspec/adapters/bigquery/config/_common.py +0 -40
  140. sqlspec/adapters/bigquery/config/_sync.py +0 -87
  141. sqlspec/adapters/oracledb/config/__init__.py +0 -9
  142. sqlspec/adapters/oracledb/config/_asyncio.py +0 -186
  143. sqlspec/adapters/oracledb/config/_common.py +0 -131
  144. sqlspec/adapters/oracledb/config/_sync.py +0 -186
  145. sqlspec/adapters/psycopg/config/__init__.py +0 -19
  146. sqlspec/adapters/psycopg/config/_async.py +0 -169
  147. sqlspec/adapters/psycopg/config/_common.py +0 -56
  148. sqlspec/adapters/psycopg/config/_sync.py +0 -168
  149. sqlspec/filters.py +0 -331
  150. sqlspec/mixins.py +0 -305
  151. sqlspec/statement.py +0 -378
  152. sqlspec-0.11.1.dist-info/RECORD +0 -69
  153. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/WHEEL +0 -0
  154. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/licenses/LICENSE +0 -0
  155. {sqlspec-0.11.1.dist-info → sqlspec-0.12.0.dist-info}/licenses/NOTICE +0 -0
@@ -1,749 +1,789 @@
1
- import logging
2
- import re
1
+ import io
2
+ from collections.abc import AsyncGenerator, Generator
3
3
  from contextlib import asynccontextmanager, contextmanager
4
- from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload
4
+ from typing import TYPE_CHECKING, Any, Optional, Union, cast
5
5
 
6
- from psycopg import AsyncConnection, Connection
7
- from psycopg.rows import dict_row
6
+ if TYPE_CHECKING:
7
+ from psycopg.abc import Query
8
8
 
9
- from sqlspec.base import AsyncDriverAdapterProtocol, SyncDriverAdapterProtocol
10
- from sqlspec.exceptions import ParameterStyleMismatchError
11
- from sqlspec.filters import StatementFilter
12
- from sqlspec.mixins import ResultConverter, SQLTranslatorMixin
13
- from sqlspec.statement import SQLStatement
14
- from sqlspec.typing import is_dict
9
+ from psycopg import AsyncConnection, Connection
10
+ from psycopg.rows import DictRow as PsycopgDictRow
11
+ from sqlglot.dialects.dialect import DialectType
12
+
13
+ from sqlspec.driver import AsyncDriverAdapterProtocol, SyncDriverAdapterProtocol
14
+ from sqlspec.driver.mixins import (
15
+ AsyncPipelinedExecutionMixin,
16
+ AsyncStorageMixin,
17
+ SQLTranslatorMixin,
18
+ SyncPipelinedExecutionMixin,
19
+ SyncStorageMixin,
20
+ ToSchemaMixin,
21
+ TypeCoercionMixin,
22
+ )
23
+ from sqlspec.statement.parameters import ParameterStyle
24
+ from sqlspec.statement.result import ArrowResult, DMLResultDict, ScriptResultDict, SelectResultDict, SQLResult
25
+ from sqlspec.statement.splitter import split_sql_script
26
+ from sqlspec.statement.sql import SQL, SQLConfig
27
+ from sqlspec.typing import DictRow, ModelDTOT, RowT, is_dict_with_field
28
+ from sqlspec.utils.logging import get_logger
15
29
 
16
30
  if TYPE_CHECKING:
17
- from collections.abc import AsyncGenerator, Generator, Mapping, Sequence
31
+ from sqlglot.dialects.dialect import DialectType
18
32
 
19
- from sqlspec.typing import ModelDTOT, StatementParameterType, T
20
-
21
- logger = logging.getLogger("sqlspec")
33
+ logger = get_logger("adapters.psycopg")
22
34
 
23
35
  __all__ = ("PsycopgAsyncConnection", "PsycopgAsyncDriver", "PsycopgSyncConnection", "PsycopgSyncDriver")
24
36
 
37
+ PsycopgSyncConnection = Connection[PsycopgDictRow]
38
+ PsycopgAsyncConnection = AsyncConnection[PsycopgDictRow]
25
39
 
26
- NAMED_PARAMS_PATTERN = re.compile(r"(?<!:):([a-zA-Z0-9_]+)")
27
- # Pattern matches %(name)s format while trying to avoid matches in string literals and comments
28
- PSYCOPG_PARAMS_PATTERN = re.compile(r"(?<!'|\"|\w)%\(([a-zA-Z0-9_]+)\)s(?!'|\")")
29
-
30
- PsycopgSyncConnection = Connection
31
- PsycopgAsyncConnection = AsyncConnection
32
40
 
41
+ class PsycopgSyncDriver(
42
+ SyncDriverAdapterProtocol[PsycopgSyncConnection, RowT],
43
+ SQLTranslatorMixin,
44
+ TypeCoercionMixin,
45
+ SyncStorageMixin,
46
+ SyncPipelinedExecutionMixin,
47
+ ToSchemaMixin,
48
+ ):
49
+ """Psycopg Sync Driver Adapter. Refactored for new protocol."""
33
50
 
34
- class PsycopgDriverBase:
35
- dialect: str = "postgres"
51
+ dialect: "DialectType" = "postgres" # pyright: ignore[reportInvalidTypeForm]
52
+ supported_parameter_styles: "tuple[ParameterStyle, ...]" = (
53
+ ParameterStyle.POSITIONAL_PYFORMAT,
54
+ ParameterStyle.NAMED_PYFORMAT,
55
+ )
56
+ default_parameter_style: ParameterStyle = ParameterStyle.POSITIONAL_PYFORMAT
57
+ __slots__ = ()
36
58
 
37
- def _process_sql_params(
59
+ def __init__(
38
60
  self,
39
- sql: str,
40
- parameters: "Optional[StatementParameterType]" = None,
41
- *filters: "StatementFilter",
42
- **kwargs: Any,
43
- ) -> "tuple[str, Optional[Union[tuple[Any, ...], list[Any], dict[str, Any]]]]":
44
- """Process SQL and parameters using SQLStatement with dialect support.
61
+ connection: PsycopgSyncConnection,
62
+ config: "Optional[SQLConfig]" = None,
63
+ default_row_type: "type[DictRow]" = dict,
64
+ ) -> None:
65
+ super().__init__(connection=connection, config=config, default_row_type=default_row_type)
45
66
 
46
- Args:
47
- sql: The SQL statement to process.
48
- parameters: The parameters to bind to the statement.
49
- *filters: Statement filters to apply.
50
- **kwargs: Additional keyword arguments.
67
+ @staticmethod
68
+ @contextmanager
69
+ def _get_cursor(connection: PsycopgSyncConnection) -> Generator[Any, None, None]:
70
+ with connection.cursor() as cursor:
71
+ yield cursor
72
+
73
+ def _execute_statement(
74
+ self, statement: SQL, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
75
+ ) -> Union[SelectResultDict, DMLResultDict, ScriptResultDict]:
76
+ if statement.is_script:
77
+ sql, _ = statement.compile(placeholder_style=ParameterStyle.STATIC)
78
+ return self._execute_script(sql, connection=connection, **kwargs)
79
+
80
+ detected_styles = {p.style for p in statement.parameter_info}
81
+ target_style = self.default_parameter_style
82
+ unsupported_styles = detected_styles - set(self.supported_parameter_styles)
83
+ if unsupported_styles:
84
+ target_style = self.default_parameter_style
85
+ elif detected_styles:
86
+ for style in detected_styles:
87
+ if style in self.supported_parameter_styles:
88
+ target_style = style
89
+ break
90
+
91
+ if statement.is_many:
92
+ sql, params = statement.compile(placeholder_style=target_style)
93
+ # For execute_many, check if parameters were passed via kwargs (legacy support)
94
+ # Otherwise use the parameters from the SQL object
95
+ kwargs_params = kwargs.get("parameters")
96
+ if kwargs_params is not None:
97
+ params = kwargs_params
98
+ if params is not None:
99
+ processed_params = [self._process_parameters(param_set) for param_set in params]
100
+ params = processed_params
101
+ return self._execute_many(sql, params, connection=connection, **kwargs)
102
+
103
+ sql, params = statement.compile(placeholder_style=target_style)
104
+ params = self._process_parameters(params)
105
+ return self._execute(sql, params, statement, connection=connection, **kwargs)
106
+
107
+ def _execute(
108
+ self,
109
+ sql: str,
110
+ parameters: Any,
111
+ statement: SQL,
112
+ connection: Optional[PsycopgSyncConnection] = None,
113
+ **kwargs: Any,
114
+ ) -> Union[SelectResultDict, DMLResultDict]:
115
+ conn = self._connection(connection)
116
+ with conn.cursor() as cursor:
117
+ cursor.execute(cast("Query", sql), parameters)
118
+ # Check if the statement returns rows by checking cursor.description
119
+ # This is more reliable than parsing when parsing is disabled
120
+ if cursor.description is not None:
121
+ fetched_data = cursor.fetchall()
122
+ column_names = [col.name for col in cursor.description]
123
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
124
+ return {"rows_affected": cursor.rowcount, "status_message": cursor.statusmessage or "OK"}
125
+
126
+ def _execute_many(
127
+ self, sql: str, param_list: Any, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
128
+ ) -> DMLResultDict:
129
+ conn = self._connection(connection)
130
+ with self._get_cursor(conn) as cursor:
131
+ cursor.executemany(sql, param_list or [])
132
+ # psycopg's executemany might return -1 or 0 for rowcount
133
+ # In that case, use the length of param_list for DML operations
134
+ rows_affected = cursor.rowcount
135
+ if rows_affected <= 0 and param_list:
136
+ rows_affected = len(param_list)
137
+ result: DMLResultDict = {"rows_affected": rows_affected, "status_message": cursor.statusmessage or "OK"}
138
+ return result
139
+
140
+ def _execute_script(
141
+ self, script: str, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
142
+ ) -> ScriptResultDict:
143
+ conn = self._connection(connection)
144
+ with self._get_cursor(conn) as cursor:
145
+ cursor.execute(script)
146
+ result: ScriptResultDict = {
147
+ "statements_executed": -1,
148
+ "status_message": cursor.statusmessage or "SCRIPT EXECUTED",
149
+ }
150
+ return result
151
+
152
+ def _ingest_arrow_table(self, table: "Any", table_name: str, mode: str = "append", **options: Any) -> int:
153
+ self._ensure_pyarrow_installed()
154
+ import pyarrow.csv as pacsv
155
+
156
+ conn = self._connection(None)
157
+ with self._get_cursor(conn) as cursor:
158
+ if mode == "replace":
159
+ cursor.execute(f"TRUNCATE TABLE {table_name}")
160
+ elif mode == "create":
161
+ msg = "'create' mode is not supported for psycopg ingestion."
162
+ raise NotImplementedError(msg)
163
+
164
+ buffer = io.StringIO()
165
+ pacsv.write_csv(table, buffer)
166
+ buffer.seek(0)
167
+
168
+ with cursor.copy(f"COPY {table_name} FROM STDIN WITH (FORMAT CSV, HEADER)") as copy:
169
+ copy.write(buffer.read())
170
+
171
+ return cursor.rowcount if cursor.rowcount is not None else -1
172
+
173
+ def _wrap_select_result(
174
+ self, statement: SQL, result: SelectResultDict, schema_type: Optional[type[ModelDTOT]] = None, **kwargs: Any
175
+ ) -> Union[SQLResult[ModelDTOT], SQLResult[RowT]]:
176
+ rows_as_dicts: list[dict[str, Any]] = [dict(row) for row in result["data"]]
177
+
178
+ if schema_type:
179
+ return SQLResult[ModelDTOT](
180
+ statement=statement,
181
+ data=list(self.to_schema(data=result["data"], schema_type=schema_type)),
182
+ column_names=result["column_names"],
183
+ rows_affected=result["rows_affected"],
184
+ operation_type="SELECT",
185
+ )
186
+ return SQLResult[RowT](
187
+ statement=statement,
188
+ data=rows_as_dicts,
189
+ column_names=result["column_names"],
190
+ rows_affected=result["rows_affected"],
191
+ operation_type="SELECT",
192
+ )
193
+
194
+ def _wrap_execute_result(
195
+ self, statement: SQL, result: Union[DMLResultDict, ScriptResultDict], **kwargs: Any
196
+ ) -> SQLResult[RowT]:
197
+ operation_type = "UNKNOWN"
198
+ if statement.expression:
199
+ operation_type = str(statement.expression.key).upper()
200
+
201
+ # Handle case where we got a SelectResultDict but it was routed here due to parsing being disabled
202
+ if is_dict_with_field(result, "data") and is_dict_with_field(result, "column_names"):
203
+ # This is actually a SELECT result, wrap it properly
204
+ return self._wrap_select_result(statement, cast("SelectResultDict", result), **kwargs)
205
+
206
+ if is_dict_with_field(result, "statements_executed"):
207
+ return SQLResult[RowT](
208
+ statement=statement,
209
+ data=[],
210
+ rows_affected=0,
211
+ operation_type="SCRIPT",
212
+ metadata={"status_message": result.get("status_message", "")},
213
+ )
214
+
215
+ if is_dict_with_field(result, "rows_affected"):
216
+ return SQLResult[RowT](
217
+ statement=statement,
218
+ data=[],
219
+ rows_affected=cast("int", result.get("rows_affected", -1)),
220
+ operation_type=operation_type,
221
+ metadata={"status_message": result.get("status_message", "")},
222
+ )
223
+
224
+ # This shouldn't happen with TypedDict approach
225
+ msg = f"Unexpected result type: {type(result)}"
226
+ raise ValueError(msg)
227
+
228
+ def _connection(self, connection: Optional[PsycopgSyncConnection] = None) -> PsycopgSyncConnection:
229
+ """Get the connection to use for the operation."""
230
+ return connection or self.connection
231
+
232
+ def _execute_pipeline_native(self, operations: "list[Any]", **options: Any) -> "list[SQLResult[RowT]]":
233
+ """Native pipeline execution using Psycopg's pipeline support.
234
+
235
+ Psycopg has built-in pipeline support through the connection.pipeline() context manager.
236
+ This provides significant performance benefits for batch operations.
51
237
 
52
- Raises:
53
- ParameterStyleMismatchError: If the parameter style is mismatched.
238
+ Args:
239
+ operations: List of PipelineOperation objects
240
+ **options: Pipeline configuration options
54
241
 
55
242
  Returns:
56
- A tuple of (sql, parameters) ready for execution.
243
+ List of SQLResult objects from all operations
57
244
  """
58
- data_params_for_statement: Optional[Union[Mapping[str, Any], Sequence[Any]]] = None
59
- combined_filters_list: list[StatementFilter] = list(filters)
60
-
61
- if parameters is not None:
62
- if isinstance(parameters, StatementFilter):
63
- combined_filters_list.insert(0, parameters)
64
- else:
65
- data_params_for_statement = parameters
66
- if data_params_for_statement is not None and not isinstance(data_params_for_statement, (list, tuple, dict)):
67
- data_params_for_statement = (data_params_for_statement,)
68
- statement = SQLStatement(sql, data_params_for_statement, kwargs=kwargs, dialect=self.dialect)
69
-
70
- # Apply all statement filters
71
- for filter_obj in combined_filters_list:
72
- statement = statement.apply_filter(filter_obj)
73
-
74
- processed_sql, processed_params, _ = statement.process()
75
-
76
- if is_dict(processed_params):
77
- named_params = NAMED_PARAMS_PATTERN.findall(processed_sql)
78
-
79
- if not named_params:
80
- if PSYCOPG_PARAMS_PATTERN.search(processed_sql):
81
- return processed_sql, processed_params
82
-
83
- if processed_params:
84
- msg = "psycopg: Dictionary parameters provided, but no named placeholders found in SQL."
85
- raise ParameterStyleMismatchError(msg)
86
- return processed_sql, None
87
-
88
- # Convert named parameters to psycopg's preferred format
89
- return NAMED_PARAMS_PATTERN.sub("%s", processed_sql), tuple(processed_params[name] for name in named_params)
90
-
91
- # For sequence parameters, ensure they're a tuple
92
- if isinstance(processed_params, (list, tuple)):
93
- return processed_sql, tuple(processed_params)
94
-
95
- # For scalar parameter or None
96
- if processed_params is not None:
97
- return processed_sql, (processed_params,)
245
+ from sqlspec.exceptions import PipelineExecutionError
98
246
 
99
- return processed_sql, None
247
+ results = []
248
+ connection = self._connection()
100
249
 
250
+ try:
251
+ with connection.pipeline():
252
+ for i, op in enumerate(operations):
253
+ result = self._execute_pipeline_operation(i, op, connection, options)
254
+ results.append(result)
101
255
 
102
- class PsycopgSyncDriver(
103
- PsycopgDriverBase,
104
- SQLTranslatorMixin["PsycopgSyncConnection"],
105
- SyncDriverAdapterProtocol["PsycopgSyncConnection"],
106
- ResultConverter,
107
- ):
108
- """Psycopg Sync Driver Adapter."""
256
+ except Exception as e:
257
+ if not isinstance(e, PipelineExecutionError):
258
+ msg = f"Psycopg pipeline execution failed: {e}"
259
+ raise PipelineExecutionError(msg) from e
260
+ raise
109
261
 
110
- connection: "PsycopgSyncConnection"
262
+ return results
111
263
 
112
- def __init__(self, connection: "PsycopgSyncConnection") -> None:
113
- self.connection = connection
264
+ def _execute_pipeline_operation(
265
+ self, index: int, operation: Any, connection: Any, options: dict
266
+ ) -> "SQLResult[RowT]":
267
+ """Execute a single pipeline operation with error handling."""
268
+ from sqlspec.exceptions import PipelineExecutionError
114
269
 
115
- @staticmethod
116
- @contextmanager
117
- def _with_cursor(connection: "PsycopgSyncConnection") -> "Generator[Any, None, None]":
118
- cursor = connection.cursor(row_factory=dict_row)
119
270
  try:
120
- yield cursor
121
- finally:
122
- cursor.close()
271
+ # Prepare SQL and parameters
272
+ filtered_sql = self._apply_operation_filters(operation.sql, operation.filters)
273
+ sql_str = filtered_sql.to_sql(placeholder_style=self.default_parameter_style)
274
+ params = self._convert_psycopg_params(filtered_sql.parameters)
275
+
276
+ # Execute based on operation type
277
+ result = self._dispatch_pipeline_operation(operation, sql_str, params, connection)
278
+
279
+ except Exception as e:
280
+ if options.get("continue_on_error"):
281
+ return SQLResult[RowT](
282
+ statement=operation.sql,
283
+ data=cast("list[RowT]", []),
284
+ error=e,
285
+ operation_index=index,
286
+ parameters=operation.original_params,
287
+ )
288
+ msg = f"Psycopg pipeline failed at operation {index}: {e}"
289
+ raise PipelineExecutionError(
290
+ msg, operation_index=index, partial_results=[], failed_operation=operation
291
+ ) from e
292
+ else:
293
+ result.operation_index = index
294
+ result.pipeline_sql = operation.sql
295
+ return result
296
+
297
+ def _dispatch_pipeline_operation(
298
+ self, operation: Any, sql_str: str, params: Any, connection: Any
299
+ ) -> "SQLResult[RowT]":
300
+ """Dispatch to appropriate handler based on operation type."""
301
+ handlers = {
302
+ "execute_many": self._handle_pipeline_execute_many,
303
+ "select": self._handle_pipeline_select,
304
+ "execute_script": self._handle_pipeline_execute_script,
305
+ }
306
+
307
+ handler = handlers.get(operation.operation_type, self._handle_pipeline_execute)
308
+ return handler(operation.sql, sql_str, params, connection)
309
+
310
+ def _handle_pipeline_execute_many(
311
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
312
+ ) -> "SQLResult[RowT]":
313
+ """Handle execute_many operation in pipeline."""
314
+ with connection.cursor() as cursor:
315
+ cursor.executemany(sql_str, params)
316
+ return SQLResult[RowT](
317
+ statement=sql,
318
+ data=cast("list[RowT]", []),
319
+ rows_affected=cursor.rowcount,
320
+ operation_type="execute_many",
321
+ metadata={"status_message": "OK"},
322
+ )
323
+
324
+ def _handle_pipeline_select(self, sql: "SQL", sql_str: str, params: Any, connection: Any) -> "SQLResult[RowT]":
325
+ """Handle select operation in pipeline."""
326
+ with connection.cursor() as cursor:
327
+ cursor.execute(sql_str, params)
328
+ fetched_data = cursor.fetchall()
329
+ column_names = [col.name for col in cursor.description or []]
330
+ data = [dict(record) for record in fetched_data] if fetched_data else []
331
+ return SQLResult[RowT](
332
+ statement=sql,
333
+ data=cast("list[RowT]", data),
334
+ rows_affected=len(data),
335
+ operation_type="select",
336
+ metadata={"column_names": column_names},
337
+ )
338
+
339
+ def _handle_pipeline_execute_script(
340
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
341
+ ) -> "SQLResult[RowT]":
342
+ """Handle execute_script operation in pipeline."""
343
+ script_statements = self._split_script_statements(sql_str)
344
+ total_affected = 0
345
+
346
+ with connection.cursor() as cursor:
347
+ for stmt in script_statements:
348
+ if stmt.strip():
349
+ cursor.execute(stmt)
350
+ total_affected += cursor.rowcount or 0
351
+
352
+ return SQLResult[RowT](
353
+ statement=sql,
354
+ data=cast("list[RowT]", []),
355
+ rows_affected=total_affected,
356
+ operation_type="execute_script",
357
+ metadata={"status_message": "SCRIPT EXECUTED", "statements_executed": len(script_statements)},
358
+ )
359
+
360
+ def _handle_pipeline_execute(self, sql: "SQL", sql_str: str, params: Any, connection: Any) -> "SQLResult[RowT]":
361
+ """Handle regular execute operation in pipeline."""
362
+ with connection.cursor() as cursor:
363
+ cursor.execute(sql_str, params)
364
+ return SQLResult[RowT](
365
+ statement=sql,
366
+ data=cast("list[RowT]", []),
367
+ rows_affected=cursor.rowcount or 0,
368
+ operation_type="execute",
369
+ metadata={"status_message": "OK"},
370
+ )
371
+
372
+ def _convert_psycopg_params(self, params: Any) -> Any:
373
+ """Convert parameters to Psycopg-compatible format.
374
+
375
+ Psycopg supports both named (%s, %(name)s) and positional (%s) parameters.
123
376
 
124
- # --- Public API Methods --- #
125
- @overload
126
- def select(
127
- self,
128
- sql: str,
129
- parameters: "Optional[StatementParameterType]" = None,
130
- *filters: "StatementFilter",
131
- connection: "Optional[PsycopgSyncConnection]" = None,
132
- schema_type: None = None,
133
- **kwargs: Any,
134
- ) -> "Sequence[dict[str, Any]]": ...
135
- @overload
136
- def select(
137
- self,
138
- sql: str,
139
- parameters: "Optional[StatementParameterType]" = None,
140
- *filters: "StatementFilter",
141
- connection: "Optional[PsycopgSyncConnection]" = None,
142
- schema_type: "type[ModelDTOT]",
143
- **kwargs: Any,
144
- ) -> "Sequence[ModelDTOT]": ...
145
- def select(
146
- self,
147
- sql: str,
148
- parameters: "Optional[StatementParameterType]" = None,
149
- *filters: "StatementFilter",
150
- schema_type: "Optional[type[ModelDTOT]]" = None,
151
- connection: "Optional[PsycopgSyncConnection]" = None,
152
- **kwargs: Any,
153
- ) -> "Sequence[Union[ModelDTOT, dict[str, Any]]]":
154
- """Fetch data from the database.
377
+ Args:
378
+ params: Parameters in various formats
155
379
 
156
380
  Returns:
157
- List of row data as either model instances or dictionaries.
381
+ Parameters in Psycopg-compatible format
158
382
  """
159
- connection = self._connection(connection)
160
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
161
- with self._with_cursor(connection) as cursor:
162
- cursor.execute(sql, parameters)
163
- results = cursor.fetchall()
164
- if not results:
165
- return []
166
-
167
- return self.to_schema(cast("Sequence[dict[str, Any]]", results), schema_type=schema_type)
168
-
169
- @overload
170
- def select_one(
171
- self,
172
- sql: str,
173
- parameters: "Optional[StatementParameterType]" = None,
174
- *filters: "StatementFilter",
175
- connection: "Optional[PsycopgSyncConnection]" = None,
176
- schema_type: None = None,
177
- **kwargs: Any,
178
- ) -> "dict[str, Any]": ...
179
- @overload
180
- def select_one(
181
- self,
182
- sql: str,
183
- parameters: "Optional[StatementParameterType]" = None,
184
- *filters: "StatementFilter",
185
- connection: "Optional[PsycopgSyncConnection]" = None,
186
- schema_type: "type[ModelDTOT]",
187
- **kwargs: Any,
188
- ) -> "ModelDTOT": ...
189
- def select_one(
190
- self,
191
- sql: str,
192
- parameters: "Optional[StatementParameterType]" = None,
193
- *filters: "StatementFilter",
194
- connection: "Optional[PsycopgSyncConnection]" = None,
195
- schema_type: "Optional[type[ModelDTOT]]" = None,
196
- **kwargs: Any,
197
- ) -> "Union[ModelDTOT, dict[str, Any]]":
198
- """Fetch one row from the database.
383
+ if params is None:
384
+ return None
385
+ if isinstance(params, dict):
386
+ # Psycopg handles dict parameters directly for named placeholders
387
+ return params
388
+ if isinstance(params, (list, tuple)):
389
+ # Convert to tuple for positional parameters
390
+ return tuple(params)
391
+ # Single parameter
392
+ return (params,)
199
393
 
200
- Returns:
201
- The first row of the query results.
202
- """
203
- connection = self._connection(connection)
204
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
205
- with self._with_cursor(connection) as cursor:
206
- cursor.execute(sql, parameters)
207
- result = cursor.fetchone()
208
- result = self.check_not_found(result)
209
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
210
-
211
- @overload
212
- def select_one_or_none(
213
- self,
214
- sql: str,
215
- parameters: "Optional[StatementParameterType]" = None,
216
- *filters: "StatementFilter",
217
- connection: "Optional[PsycopgSyncConnection]" = None,
218
- schema_type: None = None,
219
- **kwargs: Any,
220
- ) -> "Optional[dict[str, Any]]": ...
221
- @overload
222
- def select_one_or_none(
223
- self,
224
- sql: str,
225
- parameters: "Optional[StatementParameterType]" = None,
226
- *filters: "StatementFilter",
227
- connection: "Optional[PsycopgSyncConnection]" = None,
228
- schema_type: "type[ModelDTOT]",
229
- **kwargs: Any,
230
- ) -> "Optional[ModelDTOT]": ...
231
- def select_one_or_none(
232
- self,
233
- sql: str,
234
- parameters: "Optional[StatementParameterType]" = None,
235
- *filters: "StatementFilter",
236
- connection: "Optional[PsycopgSyncConnection]" = None,
237
- schema_type: "Optional[type[ModelDTOT]]" = None,
238
- **kwargs: Any,
239
- ) -> "Optional[Union[ModelDTOT, dict[str, Any]]]":
240
- """Fetch one row from the database.
394
+ def _apply_operation_filters(self, sql: "SQL", filters: "list[Any]") -> "SQL":
395
+ """Apply filters to a SQL object for pipeline operations."""
396
+ if not filters:
397
+ return sql
241
398
 
242
- Returns:
243
- The first row of the query results, or None if no results.
244
- """
245
- connection = self._connection(connection)
246
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
247
- with self._with_cursor(connection) as cursor:
248
- cursor.execute(sql, parameters)
249
- result = cursor.fetchone()
250
- if result is None:
251
- return None
252
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
253
-
254
- @overload
255
- def select_value(
256
- self,
257
- sql: str,
258
- parameters: "Optional[StatementParameterType]" = None,
259
- *filters: "StatementFilter",
260
- connection: "Optional[PsycopgSyncConnection]" = None,
261
- schema_type: None = None,
262
- **kwargs: Any,
263
- ) -> "Any": ...
264
- @overload
265
- def select_value(
266
- self,
267
- sql: str,
268
- parameters: "Optional[StatementParameterType]" = None,
269
- *filters: "StatementFilter",
270
- connection: "Optional[PsycopgSyncConnection]" = None,
271
- schema_type: "type[T]",
272
- **kwargs: Any,
273
- ) -> "T": ...
274
- def select_value(
275
- self,
276
- sql: str,
277
- parameters: "Optional[StatementParameterType]" = None,
278
- *filters: "StatementFilter",
279
- connection: "Optional[PsycopgSyncConnection]" = None,
280
- schema_type: "Optional[type[T]]" = None,
281
- **kwargs: Any,
282
- ) -> "Union[T, Any]":
283
- """Fetch a single value from the database.
399
+ result_sql = sql
400
+ for filter_obj in filters:
401
+ if hasattr(filter_obj, "apply"):
402
+ result_sql = filter_obj.apply(result_sql)
284
403
 
285
- Returns:
286
- The first value from the first row of results.
287
- """
288
- connection = self._connection(connection)
289
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
290
- with self._with_cursor(connection) as cursor:
291
- cursor.execute(sql, parameters)
292
- result = cursor.fetchone()
293
- result = self.check_not_found(result)
294
-
295
- value = next(iter(result.values())) # Get the first value from the row
296
- if schema_type is None:
297
- return value
298
- return schema_type(value) # type: ignore[call-arg]
299
-
300
- @overload
301
- def select_value_or_none(
302
- self,
303
- sql: str,
304
- parameters: "Optional[StatementParameterType]" = None,
305
- *filters: "StatementFilter",
306
- connection: "Optional[PsycopgSyncConnection]" = None,
307
- schema_type: None = None,
308
- **kwargs: Any,
309
- ) -> "Optional[Any]": ...
310
- @overload
311
- def select_value_or_none(
312
- self,
313
- sql: str,
314
- parameters: "Optional[StatementParameterType]" = None,
315
- *filters: "StatementFilter",
316
- connection: "Optional[PsycopgSyncConnection]" = None,
317
- schema_type: "type[T]",
318
- **kwargs: Any,
319
- ) -> "Optional[T]": ...
320
- def select_value_or_none(
321
- self,
322
- sql: str,
323
- parameters: "Optional[StatementParameterType]" = None,
324
- *filters: "StatementFilter",
325
- connection: "Optional[PsycopgSyncConnection]" = None,
326
- schema_type: "Optional[type[T]]" = None,
327
- **kwargs: Any,
328
- ) -> "Optional[Union[T, Any]]":
329
- """Fetch a single value from the database.
404
+ return result_sql
330
405
 
331
- Returns:
332
- The first value from the first row of results, or None if no results.
333
- """
334
- connection = self._connection(connection)
335
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
336
- with self._with_cursor(connection) as cursor:
337
- cursor.execute(sql, parameters)
338
- result = cursor.fetchone()
339
- if result is None:
340
- return None
341
-
342
- value = next(iter(result.values())) # Get the first value from the row
343
- if schema_type is None:
344
- return value
345
- return schema_type(value) # type: ignore[call-arg]
346
-
347
- def insert_update_delete(
348
- self,
349
- sql: str,
350
- parameters: "Optional[StatementParameterType]" = None,
351
- *filters: "StatementFilter",
352
- connection: "Optional[PsycopgSyncConnection]" = None,
353
- **kwargs: Any,
354
- ) -> int:
355
- """Insert, update, or delete data from the database.
406
+ def _split_script_statements(self, script: str, strip_trailing_semicolon: bool = False) -> "list[str]":
407
+ """Split a SQL script into individual statements."""
356
408
 
357
- Returns:
358
- Row count affected by the operation.
359
- """
360
- connection = self._connection(connection)
361
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
362
- with self._with_cursor(connection) as cursor:
363
- cursor.execute(sql, parameters)
364
- return getattr(cursor, "rowcount", -1) # pyright: ignore[reportUnknownMemberType]
365
-
366
- @overload
367
- def insert_update_delete_returning(
368
- self,
369
- sql: str,
370
- parameters: "Optional[StatementParameterType]" = None,
371
- *filters: "StatementFilter",
372
- connection: "Optional[PsycopgSyncConnection]" = None,
373
- schema_type: None = None,
374
- **kwargs: Any,
375
- ) -> "dict[str, Any]": ...
376
- @overload
377
- def insert_update_delete_returning(
378
- self,
379
- sql: str,
380
- parameters: "Optional[StatementParameterType]" = None,
381
- *filters: "StatementFilter",
382
- connection: "Optional[PsycopgSyncConnection]" = None,
383
- schema_type: "type[ModelDTOT]",
384
- **kwargs: Any,
385
- ) -> "ModelDTOT": ...
386
- def insert_update_delete_returning(
387
- self,
388
- sql: str,
389
- parameters: "Optional[StatementParameterType]" = None,
390
- *filters: "StatementFilter",
391
- connection: "Optional[PsycopgSyncConnection]" = None,
392
- schema_type: "Optional[type[ModelDTOT]]" = None,
393
- **kwargs: Any,
394
- ) -> "Union[ModelDTOT, dict[str, Any]]":
395
- """Insert, update, or delete data with RETURNING clause.
396
-
397
- Returns:
398
- The returned row data, as either a model instance or dictionary.
399
- """
400
- connection = self._connection(connection)
401
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
402
- with self._with_cursor(connection) as cursor:
403
- cursor.execute(sql, parameters)
404
- result = cursor.fetchone()
405
- result = self.check_not_found(result)
406
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
407
-
408
- def execute_script(
409
- self,
410
- sql: str,
411
- parameters: "Optional[StatementParameterType]" = None,
412
- connection: "Optional[PsycopgSyncConnection]" = None,
413
- **kwargs: Any,
414
- ) -> str:
415
- """Execute a script.
416
-
417
- Returns:
418
- Status message for the operation.
419
- """
420
- connection = self._connection(connection)
421
- sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
422
- with self._with_cursor(connection) as cursor:
423
- cursor.execute(sql, parameters)
424
- return str(cursor.statusmessage) if cursor.statusmessage is not None else "DONE"
409
+ # Use the sophisticated splitter with PostgreSQL dialect
410
+ return split_sql_script(script=script, dialect="postgresql", strip_trailing_semicolon=strip_trailing_semicolon)
425
411
 
426
412
 
427
413
  class PsycopgAsyncDriver(
428
- PsycopgDriverBase,
429
- SQLTranslatorMixin["PsycopgAsyncConnection"],
430
- AsyncDriverAdapterProtocol["PsycopgAsyncConnection"],
431
- ResultConverter,
414
+ AsyncDriverAdapterProtocol[PsycopgAsyncConnection, RowT],
415
+ SQLTranslatorMixin,
416
+ TypeCoercionMixin,
417
+ AsyncStorageMixin,
418
+ AsyncPipelinedExecutionMixin,
419
+ ToSchemaMixin,
432
420
  ):
433
- """Psycopg Async Driver Adapter."""
421
+ """Psycopg Async Driver Adapter. Refactored for new protocol."""
434
422
 
435
- connection: "PsycopgAsyncConnection"
423
+ dialect: "DialectType" = "postgres" # pyright: ignore[reportInvalidTypeForm]
424
+ supported_parameter_styles: "tuple[ParameterStyle, ...]" = (
425
+ ParameterStyle.POSITIONAL_PYFORMAT,
426
+ ParameterStyle.NAMED_PYFORMAT,
427
+ )
428
+ default_parameter_style: ParameterStyle = ParameterStyle.POSITIONAL_PYFORMAT
429
+ __slots__ = ()
436
430
 
437
- def __init__(self, connection: "PsycopgAsyncConnection") -> None:
438
- self.connection = connection
431
+ def __init__(
432
+ self,
433
+ connection: PsycopgAsyncConnection,
434
+ config: Optional[SQLConfig] = None,
435
+ default_row_type: "type[DictRow]" = dict,
436
+ ) -> None:
437
+ super().__init__(connection=connection, config=config, default_row_type=default_row_type)
439
438
 
440
439
  @staticmethod
441
440
  @asynccontextmanager
442
- async def _with_cursor(connection: "PsycopgAsyncConnection") -> "AsyncGenerator[Any, None]":
443
- cursor = connection.cursor(row_factory=dict_row)
444
- try:
441
+ async def _get_cursor(connection: PsycopgAsyncConnection) -> AsyncGenerator[Any, None]:
442
+ async with connection.cursor() as cursor:
445
443
  yield cursor
446
- finally:
447
- await cursor.close()
448
444
 
449
- # --- Public API Methods --- #
450
- @overload
451
- async def select(
452
- self,
453
- sql: str,
454
- parameters: "Optional[StatementParameterType]" = None,
455
- *filters: "StatementFilter",
456
- connection: "Optional[PsycopgAsyncConnection]" = None,
457
- schema_type: None = None,
458
- **kwargs: Any,
459
- ) -> "Sequence[dict[str, Any]]": ...
460
- @overload
461
- async def select(
462
- self,
463
- sql: str,
464
- parameters: "Optional[StatementParameterType]" = None,
465
- *filters: "StatementFilter",
466
- connection: "Optional[PsycopgAsyncConnection]" = None,
467
- schema_type: "type[ModelDTOT]",
468
- **kwargs: Any,
469
- ) -> "Sequence[ModelDTOT]": ...
470
- async def select(
471
- self,
472
- sql: str,
473
- parameters: "Optional[StatementParameterType]" = None,
474
- *filters: "StatementFilter",
475
- schema_type: "Optional[type[ModelDTOT]]" = None,
476
- connection: "Optional[PsycopgAsyncConnection]" = None,
477
- **kwargs: Any,
478
- ) -> "Sequence[Union[ModelDTOT, dict[str, Any]]]":
479
- """Fetch data from the database.
445
+ async def _execute_statement(
446
+ self, statement: SQL, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
447
+ ) -> Union[SelectResultDict, DMLResultDict, ScriptResultDict]:
448
+ if statement.is_script:
449
+ sql, _ = statement.compile(placeholder_style=ParameterStyle.STATIC)
450
+ return await self._execute_script(sql, connection=connection, **kwargs)
451
+
452
+ # Determine if we need to convert parameter style
453
+ detected_styles = {p.style for p in statement.parameter_info}
454
+ target_style = self.default_parameter_style
455
+
456
+ # Check if any detected style is not supported
457
+ unsupported_styles = detected_styles - set(self.supported_parameter_styles)
458
+ if unsupported_styles:
459
+ # Convert to default style if we have unsupported styles
460
+ target_style = self.default_parameter_style
461
+ elif detected_styles:
462
+ # Use the first detected style if all are supported
463
+ # Prefer the first supported style found
464
+ for style in detected_styles:
465
+ if style in self.supported_parameter_styles:
466
+ target_style = style
467
+ break
468
+
469
+ if statement.is_many:
470
+ sql, _ = statement.compile(placeholder_style=target_style)
471
+ # For execute_many, use the parameters passed via kwargs
472
+ params = kwargs.get("parameters")
473
+ if params is not None:
474
+ # Process each parameter set individually
475
+ processed_params = [self._process_parameters(param_set) for param_set in params]
476
+ params = processed_params
477
+ return await self._execute_many(sql, params, connection=connection, **kwargs)
478
+
479
+ sql, params = statement.compile(placeholder_style=target_style)
480
+ params = self._process_parameters(params)
481
+ return await self._execute(sql, params, statement, connection=connection, **kwargs)
482
+
483
+ async def _execute(
484
+ self,
485
+ sql: str,
486
+ parameters: Any,
487
+ statement: SQL,
488
+ connection: Optional[PsycopgAsyncConnection] = None,
489
+ **kwargs: Any,
490
+ ) -> Union[SelectResultDict, DMLResultDict]:
491
+ conn = self._connection(connection)
492
+ async with conn.cursor() as cursor:
493
+ await cursor.execute(cast("Query", sql), parameters)
494
+
495
+ # When parsing is disabled, expression will be None, so check SQL directly
496
+ if statement.expression and self.returns_rows(statement.expression):
497
+ # For SELECT statements, extract data while cursor is open
498
+ fetched_data = await cursor.fetchall()
499
+ column_names = [col.name for col in cursor.description or []]
500
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
501
+ if not statement.expression and sql.strip().upper().startswith("SELECT"):
502
+ # For SELECT statements when parsing is disabled
503
+ fetched_data = await cursor.fetchall()
504
+ column_names = [col.name for col in cursor.description or []]
505
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
506
+ # For DML statements
507
+ dml_result: DMLResultDict = {
508
+ "rows_affected": cursor.rowcount,
509
+ "status_message": cursor.statusmessage or "OK",
510
+ }
511
+ return dml_result
512
+
513
+ async def _execute_many(
514
+ self, sql: str, param_list: Any, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
515
+ ) -> DMLResultDict:
516
+ conn = self._connection(connection)
517
+ async with conn.cursor() as cursor:
518
+ await cursor.executemany(cast("Query", sql), param_list or [])
519
+ return {"rows_affected": cursor.rowcount, "status_message": cursor.statusmessage or "OK"}
520
+
521
+ async def _execute_script(
522
+ self, script: str, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
523
+ ) -> ScriptResultDict:
524
+ conn = self._connection(connection)
525
+ async with conn.cursor() as cursor:
526
+ await cursor.execute(cast("Query", script))
527
+ # For scripts, return script result format
528
+ return {
529
+ "statements_executed": -1, # Psycopg doesn't provide this info
530
+ "status_message": cursor.statusmessage or "SCRIPT EXECUTED",
531
+ }
532
+
533
+ async def _fetch_arrow_table(self, sql: SQL, connection: "Optional[Any]" = None, **kwargs: Any) -> "ArrowResult":
534
+ self._ensure_pyarrow_installed()
535
+ conn = self._connection(connection)
536
+
537
+ async with conn.cursor() as cursor:
538
+ await cursor.execute(
539
+ cast("Query", sql.to_sql(placeholder_style=self.default_parameter_style)),
540
+ sql.get_parameters(style=self.default_parameter_style) or [],
541
+ )
542
+ arrow_table = await cursor.fetch_arrow_table() # type: ignore[attr-defined]
543
+ return ArrowResult(statement=sql, data=arrow_table)
544
+
545
+ async def _ingest_arrow_table(self, table: "Any", table_name: str, mode: str = "append", **options: Any) -> int:
546
+ self._ensure_pyarrow_installed()
547
+ import pyarrow.csv as pacsv
548
+
549
+ conn = self._connection(None)
550
+ async with conn.cursor() as cursor:
551
+ if mode == "replace":
552
+ await cursor.execute(cast("Query", f"TRUNCATE TABLE {table_name}"))
553
+ elif mode == "create":
554
+ msg = "'create' mode is not supported for psycopg ingestion."
555
+ raise NotImplementedError(msg)
556
+
557
+ buffer = io.StringIO()
558
+ pacsv.write_csv(table, buffer)
559
+ buffer.seek(0)
560
+
561
+ async with cursor.copy(cast("Query", f"COPY {table_name} FROM STDIN WITH (FORMAT CSV, HEADER)")) as copy:
562
+ await copy.write(buffer.read())
563
+
564
+ return cursor.rowcount if cursor.rowcount is not None else -1
565
+
566
+ async def _wrap_select_result(
567
+ self, statement: SQL, result: SelectResultDict, schema_type: Optional[type[ModelDTOT]] = None, **kwargs: Any
568
+ ) -> Union[SQLResult[ModelDTOT], SQLResult[RowT]]:
569
+ # result must be a dict with keys: data, column_names, rows_affected
570
+ fetched_data = result["data"]
571
+ column_names = result["column_names"]
572
+ rows_affected = result["rows_affected"]
573
+ rows_as_dicts: list[dict[str, Any]] = [dict(row) for row in fetched_data]
574
+
575
+ if schema_type:
576
+ return SQLResult[ModelDTOT](
577
+ statement=statement,
578
+ data=list(self.to_schema(data=fetched_data, schema_type=schema_type)),
579
+ column_names=column_names,
580
+ rows_affected=rows_affected,
581
+ operation_type="SELECT",
582
+ )
583
+ return SQLResult[RowT](
584
+ statement=statement,
585
+ data=rows_as_dicts,
586
+ column_names=column_names,
587
+ rows_affected=rows_affected,
588
+ operation_type="SELECT",
589
+ )
590
+
591
+ async def _wrap_execute_result(
592
+ self, statement: SQL, result: Union[DMLResultDict, ScriptResultDict], **kwargs: Any
593
+ ) -> SQLResult[RowT]:
594
+ operation_type = "UNKNOWN"
595
+ if statement.expression:
596
+ operation_type = str(statement.expression.key).upper()
597
+
598
+ if is_dict_with_field(result, "statements_executed"):
599
+ return SQLResult[RowT](
600
+ statement=statement,
601
+ data=[],
602
+ rows_affected=0,
603
+ operation_type="SCRIPT",
604
+ metadata={"status_message": result.get("status_message", "")},
605
+ )
606
+
607
+ if is_dict_with_field(result, "rows_affected"):
608
+ return SQLResult[RowT](
609
+ statement=statement,
610
+ data=[],
611
+ rows_affected=cast("int", result.get("rows_affected", -1)),
612
+ operation_type=operation_type,
613
+ metadata={"status_message": result.get("status_message", "")},
614
+ )
615
+ # This shouldn't happen with TypedDict approach
616
+ msg = f"Unexpected result type: {type(result)}"
617
+ raise ValueError(msg)
618
+
619
+ def _connection(self, connection: Optional[PsycopgAsyncConnection] = None) -> PsycopgAsyncConnection:
620
+ """Get the connection to use for the operation."""
621
+ return connection or self.connection
622
+
623
+ async def _execute_pipeline_native(self, operations: "list[Any]", **options: Any) -> "list[SQLResult[RowT]]":
624
+ """Native async pipeline execution using Psycopg's pipeline support."""
625
+ from sqlspec.exceptions import PipelineExecutionError
626
+
627
+ results = []
628
+ connection = self._connection()
480
629
 
481
- Returns:
482
- List of row data as either model instances or dictionaries.
483
- """
484
- connection = self._connection(connection)
485
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
486
- async with self._with_cursor(connection) as cursor:
487
- await cursor.execute(sql, parameters)
488
- results = await cursor.fetchall()
489
- if not results:
490
- return []
491
-
492
- return self.to_schema(cast("Sequence[dict[str, Any]]", results), schema_type=schema_type)
493
-
494
- @overload
495
- async def select_one(
496
- self,
497
- sql: str,
498
- parameters: "Optional[StatementParameterType]" = None,
499
- *filters: "StatementFilter",
500
- connection: "Optional[PsycopgAsyncConnection]" = None,
501
- schema_type: None = None,
502
- **kwargs: Any,
503
- ) -> "dict[str, Any]": ...
504
- @overload
505
- async def select_one(
506
- self,
507
- sql: str,
508
- parameters: "Optional[StatementParameterType]" = None,
509
- *filters: "StatementFilter",
510
- connection: "Optional[PsycopgAsyncConnection]" = None,
511
- schema_type: "type[ModelDTOT]",
512
- **kwargs: Any,
513
- ) -> "ModelDTOT": ...
514
- async def select_one(
515
- self,
516
- sql: str,
517
- parameters: "Optional[StatementParameterType]" = None,
518
- *filters: "StatementFilter",
519
- connection: "Optional[PsycopgAsyncConnection]" = None,
520
- schema_type: "Optional[type[ModelDTOT]]" = None,
521
- **kwargs: Any,
522
- ) -> "Union[ModelDTOT, dict[str, Any]]":
523
- """Fetch one row from the database.
524
-
525
- Returns:
526
- The first row of the query results.
527
- """
528
- connection = self._connection(connection)
529
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
530
- async with self._with_cursor(connection) as cursor:
531
- await cursor.execute(sql, parameters)
532
- result = await cursor.fetchone()
533
- result = self.check_not_found(result)
534
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
535
-
536
- @overload
537
- async def select_one_or_none(
538
- self,
539
- sql: str,
540
- parameters: "Optional[StatementParameterType]" = None,
541
- *filters: "StatementFilter",
542
- connection: "Optional[PsycopgAsyncConnection]" = None,
543
- schema_type: None = None,
544
- **kwargs: Any,
545
- ) -> "Optional[dict[str, Any]]": ...
546
- @overload
547
- async def select_one_or_none(
548
- self,
549
- sql: str,
550
- parameters: "Optional[StatementParameterType]" = None,
551
- *filters: "StatementFilter",
552
- connection: "Optional[PsycopgAsyncConnection]" = None,
553
- schema_type: "type[ModelDTOT]",
554
- **kwargs: Any,
555
- ) -> "Optional[ModelDTOT]": ...
556
- async def select_one_or_none(
557
- self,
558
- sql: str,
559
- parameters: "Optional[StatementParameterType]" = None,
560
- *filters: "StatementFilter",
561
- schema_type: "Optional[type[ModelDTOT]]" = None,
562
- connection: "Optional[PsycopgAsyncConnection]" = None,
563
- **kwargs: Any,
564
- ) -> "Optional[Union[ModelDTOT, dict[str, Any]]]":
565
- """Fetch one row from the database.
630
+ try:
631
+ async with connection.pipeline():
632
+ for i, op in enumerate(operations):
633
+ result = await self._execute_pipeline_operation_async(i, op, connection, options)
634
+ results.append(result)
566
635
 
567
- Returns:
568
- The first row of the query results, or None if no results.
569
- """
570
- connection = self._connection(connection)
571
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
572
- async with self._with_cursor(connection) as cursor:
573
- await cursor.execute(sql, parameters)
574
- result = await cursor.fetchone()
575
- if result is None:
576
- return None
577
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
578
-
579
- @overload
580
- async def select_value(
581
- self,
582
- sql: str,
583
- parameters: "Optional[StatementParameterType]" = None,
584
- *filters: "StatementFilter",
585
- connection: "Optional[PsycopgAsyncConnection]" = None,
586
- schema_type: None = None,
587
- **kwargs: Any,
588
- ) -> "Any": ...
589
- @overload
590
- async def select_value(
591
- self,
592
- sql: str,
593
- parameters: "Optional[StatementParameterType]" = None,
594
- *filters: "StatementFilter",
595
- connection: "Optional[PsycopgAsyncConnection]" = None,
596
- schema_type: "type[T]",
597
- **kwargs: Any,
598
- ) -> "T": ...
599
- async def select_value(
600
- self,
601
- sql: str,
602
- parameters: "Optional[StatementParameterType]" = None,
603
- *filters: "StatementFilter",
604
- connection: "Optional[PsycopgAsyncConnection]" = None,
605
- schema_type: "Optional[type[T]]" = None,
606
- **kwargs: Any,
607
- ) -> "Union[T, Any]":
608
- """Fetch a single value from the database.
636
+ except Exception as e:
637
+ if not isinstance(e, PipelineExecutionError):
638
+ msg = f"Psycopg async pipeline execution failed: {e}"
639
+ raise PipelineExecutionError(msg) from e
640
+ raise
609
641
 
610
- Returns:
611
- The first value from the first row of results.
612
- """
613
- connection = self._connection(connection)
614
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
615
- async with self._with_cursor(connection) as cursor:
616
- await cursor.execute(sql, parameters)
617
- result = await cursor.fetchone()
618
- result = self.check_not_found(result)
619
-
620
- value = next(iter(result.values())) # Get the first value from the row
621
- if schema_type is None:
622
- return value
623
- return schema_type(value) # type: ignore[call-arg]
624
-
625
- @overload
626
- async def select_value_or_none(
627
- self,
628
- sql: str,
629
- parameters: "Optional[StatementParameterType]" = None,
630
- *filters: "StatementFilter",
631
- connection: "Optional[PsycopgAsyncConnection]" = None,
632
- schema_type: None = None,
633
- **kwargs: Any,
634
- ) -> "Optional[Any]": ...
635
- @overload
636
- async def select_value_or_none(
637
- self,
638
- sql: str,
639
- parameters: "Optional[StatementParameterType]" = None,
640
- *filters: "StatementFilter",
641
- connection: "Optional[PsycopgAsyncConnection]" = None,
642
- schema_type: "type[T]",
643
- **kwargs: Any,
644
- ) -> "Optional[T]": ...
645
- async def select_value_or_none(
646
- self,
647
- sql: str,
648
- parameters: "Optional[StatementParameterType]" = None,
649
- *filters: "StatementFilter",
650
- connection: "Optional[PsycopgAsyncConnection]" = None,
651
- schema_type: "Optional[type[T]]" = None,
652
- **kwargs: Any,
653
- ) -> "Optional[Union[T, Any]]":
654
- """Fetch a single value from the database.
642
+ return results
655
643
 
656
- Returns:
657
- The first value from the first row of results, or None if no results.
658
- """
659
- connection = self._connection(connection)
660
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
661
- async with self._with_cursor(connection) as cursor:
662
- await cursor.execute(sql, parameters)
663
- result = await cursor.fetchone()
664
- if result is None:
665
- return None
666
-
667
- value = next(iter(result.values())) # Get the first value from the row
668
- if schema_type is None:
669
- return value
670
- return schema_type(value) # type: ignore[call-arg]
671
-
672
- async def insert_update_delete(
673
- self,
674
- sql: str,
675
- parameters: "Optional[StatementParameterType]" = None,
676
- *filters: "StatementFilter",
677
- connection: "Optional[PsycopgAsyncConnection]" = None,
678
- **kwargs: Any,
679
- ) -> int:
680
- """Insert, update, or delete data from the database.
644
+ async def _execute_pipeline_operation_async(
645
+ self, index: int, operation: Any, connection: Any, options: dict
646
+ ) -> "SQLResult[RowT]":
647
+ """Execute a single async pipeline operation with error handling."""
648
+ from sqlspec.exceptions import PipelineExecutionError
681
649
 
682
- Returns:
683
- Row count affected by the operation.
684
- """
685
- connection = self._connection(connection)
686
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
687
- async with self._with_cursor(connection) as cursor:
688
- await cursor.execute(sql, parameters)
689
- return getattr(cursor, "rowcount", -1) # pyright: ignore[reportUnknownMemberType]
690
-
691
- @overload
692
- async def insert_update_delete_returning(
693
- self,
694
- sql: str,
695
- parameters: "Optional[StatementParameterType]" = None,
696
- *filters: "StatementFilter",
697
- connection: "Optional[PsycopgAsyncConnection]" = None,
698
- schema_type: None = None,
699
- **kwargs: Any,
700
- ) -> "dict[str, Any]": ...
701
- @overload
702
- async def insert_update_delete_returning(
703
- self,
704
- sql: str,
705
- parameters: "Optional[StatementParameterType]" = None,
706
- *filters: "StatementFilter",
707
- connection: "Optional[PsycopgAsyncConnection]" = None,
708
- schema_type: "type[ModelDTOT]",
709
- **kwargs: Any,
710
- ) -> "ModelDTOT": ...
711
- async def insert_update_delete_returning(
712
- self,
713
- sql: str,
714
- parameters: "Optional[StatementParameterType]" = None,
715
- *filters: "StatementFilter",
716
- connection: "Optional[PsycopgAsyncConnection]" = None,
717
- schema_type: "Optional[type[ModelDTOT]]" = None,
718
- **kwargs: Any,
719
- ) -> "Union[ModelDTOT, dict[str, Any]]":
720
- """Insert, update, or delete data with RETURNING clause.
650
+ try:
651
+ # Prepare SQL and parameters
652
+ filtered_sql = self._apply_operation_filters(operation.sql, operation.filters)
653
+ sql_str = filtered_sql.to_sql(placeholder_style=self.default_parameter_style)
654
+ params = self._convert_psycopg_params(filtered_sql.parameters)
655
+
656
+ # Execute based on operation type
657
+ result = await self._dispatch_pipeline_operation_async(operation, sql_str, params, connection)
658
+
659
+ except Exception as e:
660
+ if options.get("continue_on_error"):
661
+ return SQLResult[RowT](
662
+ statement=operation.sql,
663
+ data=cast("list[RowT]", []),
664
+ error=e,
665
+ operation_index=index,
666
+ parameters=operation.original_params,
667
+ )
668
+ msg = f"Psycopg async pipeline failed at operation {index}: {e}"
669
+ raise PipelineExecutionError(
670
+ msg, operation_index=index, partial_results=[], failed_operation=operation
671
+ ) from e
672
+ else:
673
+ # Add pipeline context
674
+ result.operation_index = index
675
+ result.pipeline_sql = operation.sql
676
+ return result
677
+
678
+ async def _dispatch_pipeline_operation_async(
679
+ self, operation: Any, sql_str: str, params: Any, connection: Any
680
+ ) -> "SQLResult[RowT]":
681
+ """Dispatch to appropriate async handler based on operation type."""
682
+ handlers = {
683
+ "execute_many": self._handle_pipeline_execute_many_async,
684
+ "select": self._handle_pipeline_select_async,
685
+ "execute_script": self._handle_pipeline_execute_script_async,
686
+ }
687
+
688
+ handler = handlers.get(operation.operation_type, self._handle_pipeline_execute_async)
689
+ return await handler(operation.sql, sql_str, params, connection)
690
+
691
+ async def _handle_pipeline_execute_many_async(
692
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
693
+ ) -> "SQLResult[RowT]":
694
+ """Handle async execute_many operation in pipeline."""
695
+ async with connection.cursor() as cursor:
696
+ await cursor.executemany(sql_str, params)
697
+ return SQLResult[RowT](
698
+ statement=sql,
699
+ data=cast("list[RowT]", []),
700
+ rows_affected=cursor.rowcount,
701
+ operation_type="execute_many",
702
+ metadata={"status_message": "OK"},
703
+ )
704
+
705
+ async def _handle_pipeline_select_async(
706
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
707
+ ) -> "SQLResult[RowT]":
708
+ """Handle async select operation in pipeline."""
709
+ async with connection.cursor() as cursor:
710
+ await cursor.execute(sql_str, params)
711
+ fetched_data = await cursor.fetchall()
712
+ column_names = [col.name for col in cursor.description or []]
713
+ data = [dict(record) for record in fetched_data] if fetched_data else []
714
+ return SQLResult[RowT](
715
+ statement=sql,
716
+ data=cast("list[RowT]", data),
717
+ rows_affected=len(data),
718
+ operation_type="select",
719
+ metadata={"column_names": column_names},
720
+ )
721
+
722
+ async def _handle_pipeline_execute_script_async(
723
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
724
+ ) -> "SQLResult[RowT]":
725
+ """Handle async execute_script operation in pipeline."""
726
+ script_statements = self._split_script_statements(sql_str)
727
+ total_affected = 0
728
+
729
+ async with connection.cursor() as cursor:
730
+ for stmt in script_statements:
731
+ if stmt.strip():
732
+ await cursor.execute(stmt)
733
+ total_affected += cursor.rowcount or 0
734
+
735
+ return SQLResult[RowT](
736
+ statement=sql,
737
+ data=cast("list[RowT]", []),
738
+ rows_affected=total_affected,
739
+ operation_type="execute_script",
740
+ metadata={"status_message": "SCRIPT EXECUTED", "statements_executed": len(script_statements)},
741
+ )
742
+
743
+ async def _handle_pipeline_execute_async(
744
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
745
+ ) -> "SQLResult[RowT]":
746
+ """Handle async regular execute operation in pipeline."""
747
+ async with connection.cursor() as cursor:
748
+ await cursor.execute(sql_str, params)
749
+ return SQLResult[RowT](
750
+ statement=sql,
751
+ data=cast("list[RowT]", []),
752
+ rows_affected=cursor.rowcount or 0,
753
+ operation_type="execute",
754
+ metadata={"status_message": "OK"},
755
+ )
756
+
757
+ def _convert_psycopg_params(self, params: Any) -> Any:
758
+ """Convert parameters to Psycopg-compatible format.
759
+
760
+ Psycopg supports both named (%s, %(name)s) and positional (%s) parameters.
721
761
 
722
- Returns:
723
- The returned row data, as either a model instance or dictionary.
724
- """
725
- connection = self._connection(connection)
726
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
727
- async with self._with_cursor(connection) as cursor:
728
- await cursor.execute(sql, parameters)
729
- result = await cursor.fetchone()
730
- result = self.check_not_found(result)
731
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
732
-
733
- async def execute_script(
734
- self,
735
- sql: str,
736
- parameters: "Optional[StatementParameterType]" = None,
737
- connection: "Optional[PsycopgAsyncConnection]" = None,
738
- **kwargs: Any,
739
- ) -> str:
740
- """Execute a script.
762
+ Args:
763
+ params: Parameters in various formats
741
764
 
742
765
  Returns:
743
- Status message for the operation.
766
+ Parameters in Psycopg-compatible format
744
767
  """
745
- connection = self._connection(connection)
746
- sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
747
- async with self._with_cursor(connection) as cursor:
748
- await cursor.execute(sql, parameters)
749
- return str(cursor.statusmessage) if cursor.statusmessage is not None else "DONE"
768
+ if params is None:
769
+ return None
770
+ if isinstance(params, dict):
771
+ # Psycopg handles dict parameters directly for named placeholders
772
+ return params
773
+ if isinstance(params, (list, tuple)):
774
+ # Convert to tuple for positional parameters
775
+ return tuple(params)
776
+ # Single parameter
777
+ return (params,)
778
+
779
+ def _apply_operation_filters(self, sql: "SQL", filters: "list[Any]") -> "SQL":
780
+ """Apply filters to a SQL object for pipeline operations."""
781
+ if not filters:
782
+ return sql
783
+
784
+ result_sql = sql
785
+ for filter_obj in filters:
786
+ if hasattr(filter_obj, "apply"):
787
+ result_sql = filter_obj.apply(result_sql)
788
+
789
+ return result_sql