sqlspec 0.11.0__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (155) hide show
  1. sqlspec/__init__.py +16 -3
  2. sqlspec/_serialization.py +3 -10
  3. sqlspec/_sql.py +1147 -0
  4. sqlspec/_typing.py +343 -41
  5. sqlspec/adapters/adbc/__init__.py +2 -6
  6. sqlspec/adapters/adbc/config.py +474 -149
  7. sqlspec/adapters/adbc/driver.py +330 -644
  8. sqlspec/adapters/aiosqlite/__init__.py +2 -6
  9. sqlspec/adapters/aiosqlite/config.py +143 -57
  10. sqlspec/adapters/aiosqlite/driver.py +269 -462
  11. sqlspec/adapters/asyncmy/__init__.py +3 -8
  12. sqlspec/adapters/asyncmy/config.py +247 -202
  13. sqlspec/adapters/asyncmy/driver.py +217 -451
  14. sqlspec/adapters/asyncpg/__init__.py +4 -7
  15. sqlspec/adapters/asyncpg/config.py +329 -176
  16. sqlspec/adapters/asyncpg/driver.py +418 -498
  17. sqlspec/adapters/bigquery/__init__.py +2 -2
  18. sqlspec/adapters/bigquery/config.py +407 -0
  19. sqlspec/adapters/bigquery/driver.py +592 -634
  20. sqlspec/adapters/duckdb/__init__.py +4 -1
  21. sqlspec/adapters/duckdb/config.py +432 -321
  22. sqlspec/adapters/duckdb/driver.py +393 -436
  23. sqlspec/adapters/oracledb/__init__.py +3 -8
  24. sqlspec/adapters/oracledb/config.py +625 -0
  25. sqlspec/adapters/oracledb/driver.py +549 -942
  26. sqlspec/adapters/psqlpy/__init__.py +4 -7
  27. sqlspec/adapters/psqlpy/config.py +372 -203
  28. sqlspec/adapters/psqlpy/driver.py +197 -550
  29. sqlspec/adapters/psycopg/__init__.py +3 -8
  30. sqlspec/adapters/psycopg/config.py +741 -0
  31. sqlspec/adapters/psycopg/driver.py +732 -733
  32. sqlspec/adapters/sqlite/__init__.py +2 -6
  33. sqlspec/adapters/sqlite/config.py +146 -81
  34. sqlspec/adapters/sqlite/driver.py +243 -426
  35. sqlspec/base.py +220 -825
  36. sqlspec/config.py +354 -0
  37. sqlspec/driver/__init__.py +22 -0
  38. sqlspec/driver/_async.py +252 -0
  39. sqlspec/driver/_common.py +338 -0
  40. sqlspec/driver/_sync.py +261 -0
  41. sqlspec/driver/mixins/__init__.py +17 -0
  42. sqlspec/driver/mixins/_pipeline.py +523 -0
  43. sqlspec/driver/mixins/_result_utils.py +122 -0
  44. sqlspec/driver/mixins/_sql_translator.py +35 -0
  45. sqlspec/driver/mixins/_storage.py +993 -0
  46. sqlspec/driver/mixins/_type_coercion.py +131 -0
  47. sqlspec/exceptions.py +299 -7
  48. sqlspec/extensions/aiosql/__init__.py +10 -0
  49. sqlspec/extensions/aiosql/adapter.py +474 -0
  50. sqlspec/extensions/litestar/__init__.py +1 -6
  51. sqlspec/extensions/litestar/_utils.py +1 -5
  52. sqlspec/extensions/litestar/config.py +5 -6
  53. sqlspec/extensions/litestar/handlers.py +13 -12
  54. sqlspec/extensions/litestar/plugin.py +22 -24
  55. sqlspec/extensions/litestar/providers.py +37 -55
  56. sqlspec/loader.py +528 -0
  57. sqlspec/service/__init__.py +3 -0
  58. sqlspec/service/base.py +24 -0
  59. sqlspec/service/pagination.py +26 -0
  60. sqlspec/statement/__init__.py +21 -0
  61. sqlspec/statement/builder/__init__.py +54 -0
  62. sqlspec/statement/builder/_ddl_utils.py +119 -0
  63. sqlspec/statement/builder/_parsing_utils.py +135 -0
  64. sqlspec/statement/builder/base.py +328 -0
  65. sqlspec/statement/builder/ddl.py +1379 -0
  66. sqlspec/statement/builder/delete.py +80 -0
  67. sqlspec/statement/builder/insert.py +274 -0
  68. sqlspec/statement/builder/merge.py +95 -0
  69. sqlspec/statement/builder/mixins/__init__.py +65 -0
  70. sqlspec/statement/builder/mixins/_aggregate_functions.py +151 -0
  71. sqlspec/statement/builder/mixins/_case_builder.py +91 -0
  72. sqlspec/statement/builder/mixins/_common_table_expr.py +91 -0
  73. sqlspec/statement/builder/mixins/_delete_from.py +34 -0
  74. sqlspec/statement/builder/mixins/_from.py +61 -0
  75. sqlspec/statement/builder/mixins/_group_by.py +119 -0
  76. sqlspec/statement/builder/mixins/_having.py +35 -0
  77. sqlspec/statement/builder/mixins/_insert_from_select.py +48 -0
  78. sqlspec/statement/builder/mixins/_insert_into.py +36 -0
  79. sqlspec/statement/builder/mixins/_insert_values.py +69 -0
  80. sqlspec/statement/builder/mixins/_join.py +110 -0
  81. sqlspec/statement/builder/mixins/_limit_offset.py +53 -0
  82. sqlspec/statement/builder/mixins/_merge_clauses.py +405 -0
  83. sqlspec/statement/builder/mixins/_order_by.py +46 -0
  84. sqlspec/statement/builder/mixins/_pivot.py +82 -0
  85. sqlspec/statement/builder/mixins/_returning.py +37 -0
  86. sqlspec/statement/builder/mixins/_select_columns.py +60 -0
  87. sqlspec/statement/builder/mixins/_set_ops.py +122 -0
  88. sqlspec/statement/builder/mixins/_unpivot.py +80 -0
  89. sqlspec/statement/builder/mixins/_update_from.py +54 -0
  90. sqlspec/statement/builder/mixins/_update_set.py +91 -0
  91. sqlspec/statement/builder/mixins/_update_table.py +29 -0
  92. sqlspec/statement/builder/mixins/_where.py +374 -0
  93. sqlspec/statement/builder/mixins/_window_functions.py +86 -0
  94. sqlspec/statement/builder/protocols.py +20 -0
  95. sqlspec/statement/builder/select.py +206 -0
  96. sqlspec/statement/builder/update.py +178 -0
  97. sqlspec/statement/filters.py +571 -0
  98. sqlspec/statement/parameters.py +736 -0
  99. sqlspec/statement/pipelines/__init__.py +67 -0
  100. sqlspec/statement/pipelines/analyzers/__init__.py +9 -0
  101. sqlspec/statement/pipelines/analyzers/_analyzer.py +649 -0
  102. sqlspec/statement/pipelines/base.py +315 -0
  103. sqlspec/statement/pipelines/context.py +119 -0
  104. sqlspec/statement/pipelines/result_types.py +41 -0
  105. sqlspec/statement/pipelines/transformers/__init__.py +8 -0
  106. sqlspec/statement/pipelines/transformers/_expression_simplifier.py +256 -0
  107. sqlspec/statement/pipelines/transformers/_literal_parameterizer.py +623 -0
  108. sqlspec/statement/pipelines/transformers/_remove_comments.py +66 -0
  109. sqlspec/statement/pipelines/transformers/_remove_hints.py +81 -0
  110. sqlspec/statement/pipelines/validators/__init__.py +23 -0
  111. sqlspec/statement/pipelines/validators/_dml_safety.py +275 -0
  112. sqlspec/statement/pipelines/validators/_parameter_style.py +297 -0
  113. sqlspec/statement/pipelines/validators/_performance.py +703 -0
  114. sqlspec/statement/pipelines/validators/_security.py +990 -0
  115. sqlspec/statement/pipelines/validators/base.py +67 -0
  116. sqlspec/statement/result.py +527 -0
  117. sqlspec/statement/splitter.py +701 -0
  118. sqlspec/statement/sql.py +1198 -0
  119. sqlspec/storage/__init__.py +15 -0
  120. sqlspec/storage/backends/__init__.py +0 -0
  121. sqlspec/storage/backends/base.py +166 -0
  122. sqlspec/storage/backends/fsspec.py +315 -0
  123. sqlspec/storage/backends/obstore.py +464 -0
  124. sqlspec/storage/protocol.py +170 -0
  125. sqlspec/storage/registry.py +315 -0
  126. sqlspec/typing.py +157 -36
  127. sqlspec/utils/correlation.py +155 -0
  128. sqlspec/utils/deprecation.py +3 -6
  129. sqlspec/utils/fixtures.py +6 -11
  130. sqlspec/utils/logging.py +135 -0
  131. sqlspec/utils/module_loader.py +45 -43
  132. sqlspec/utils/serializers.py +4 -0
  133. sqlspec/utils/singleton.py +6 -8
  134. sqlspec/utils/sync_tools.py +15 -27
  135. sqlspec/utils/text.py +58 -26
  136. {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/METADATA +100 -26
  137. sqlspec-0.12.0.dist-info/RECORD +145 -0
  138. sqlspec/adapters/bigquery/config/__init__.py +0 -3
  139. sqlspec/adapters/bigquery/config/_common.py +0 -40
  140. sqlspec/adapters/bigquery/config/_sync.py +0 -87
  141. sqlspec/adapters/oracledb/config/__init__.py +0 -9
  142. sqlspec/adapters/oracledb/config/_asyncio.py +0 -186
  143. sqlspec/adapters/oracledb/config/_common.py +0 -131
  144. sqlspec/adapters/oracledb/config/_sync.py +0 -186
  145. sqlspec/adapters/psycopg/config/__init__.py +0 -19
  146. sqlspec/adapters/psycopg/config/_async.py +0 -169
  147. sqlspec/adapters/psycopg/config/_common.py +0 -56
  148. sqlspec/adapters/psycopg/config/_sync.py +0 -168
  149. sqlspec/filters.py +0 -330
  150. sqlspec/mixins.py +0 -306
  151. sqlspec/statement.py +0 -378
  152. sqlspec-0.11.0.dist-info/RECORD +0 -69
  153. {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/WHEEL +0 -0
  154. {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/licenses/LICENSE +0 -0
  155. {sqlspec-0.11.0.dist-info → sqlspec-0.12.0.dist-info}/licenses/NOTICE +0 -0
@@ -1,790 +1,789 @@
1
- import logging
2
- import re
1
+ import io
2
+ from collections.abc import AsyncGenerator, Generator
3
3
  from contextlib import asynccontextmanager, contextmanager
4
- from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload
4
+ from typing import TYPE_CHECKING, Any, Optional, Union, cast
5
5
 
6
- from psycopg import AsyncConnection, Connection
7
- from psycopg.rows import dict_row
6
+ if TYPE_CHECKING:
7
+ from psycopg.abc import Query
8
8
 
9
- from sqlspec.base import AsyncDriverAdapterProtocol, SyncDriverAdapterProtocol
10
- from sqlspec.exceptions import ParameterStyleMismatchError
11
- from sqlspec.mixins import ResultConverter, SQLTranslatorMixin
12
- from sqlspec.statement import SQLStatement
13
- from sqlspec.typing import is_dict
9
+ from psycopg import AsyncConnection, Connection
10
+ from psycopg.rows import DictRow as PsycopgDictRow
11
+ from sqlglot.dialects.dialect import DialectType
12
+
13
+ from sqlspec.driver import AsyncDriverAdapterProtocol, SyncDriverAdapterProtocol
14
+ from sqlspec.driver.mixins import (
15
+ AsyncPipelinedExecutionMixin,
16
+ AsyncStorageMixin,
17
+ SQLTranslatorMixin,
18
+ SyncPipelinedExecutionMixin,
19
+ SyncStorageMixin,
20
+ ToSchemaMixin,
21
+ TypeCoercionMixin,
22
+ )
23
+ from sqlspec.statement.parameters import ParameterStyle
24
+ from sqlspec.statement.result import ArrowResult, DMLResultDict, ScriptResultDict, SelectResultDict, SQLResult
25
+ from sqlspec.statement.splitter import split_sql_script
26
+ from sqlspec.statement.sql import SQL, SQLConfig
27
+ from sqlspec.typing import DictRow, ModelDTOT, RowT, is_dict_with_field
28
+ from sqlspec.utils.logging import get_logger
14
29
 
15
30
  if TYPE_CHECKING:
16
- from collections.abc import AsyncGenerator, Generator, Sequence
17
-
18
- from sqlspec.filters import StatementFilter
19
- from sqlspec.typing import ModelDTOT, StatementParameterType, T
31
+ from sqlglot.dialects.dialect import DialectType
20
32
 
21
- logger = logging.getLogger("sqlspec")
33
+ logger = get_logger("adapters.psycopg")
22
34
 
23
35
  __all__ = ("PsycopgAsyncConnection", "PsycopgAsyncDriver", "PsycopgSyncConnection", "PsycopgSyncDriver")
24
36
 
25
-
26
- NAMED_PARAMS_PATTERN = re.compile(r"(?<!:):([a-zA-Z0-9_]+)")
27
- # Pattern matches %(name)s format while trying to avoid matches in string literals and comments
28
- PSYCOPG_PARAMS_PATTERN = re.compile(r"(?<!'|\"|\w)%\(([a-zA-Z0-9_]+)\)s(?!'|\")")
29
-
30
- PsycopgSyncConnection = Connection
31
- PsycopgAsyncConnection = AsyncConnection
32
-
33
-
34
- class PsycopgDriverBase:
35
- dialect: str = "postgres"
36
-
37
- def _process_sql_params(
38
- self,
39
- sql: str,
40
- parameters: "Optional[StatementParameterType]" = None,
41
- /,
42
- *filters: "StatementFilter",
43
- **kwargs: Any,
44
- ) -> "tuple[str, Optional[Union[tuple[Any, ...], list[Any], dict[str, Any]]]]":
45
- """Process SQL and parameters using SQLStatement with dialect support.
46
-
47
- Args:
48
- sql: The SQL statement to process.
49
- parameters: The parameters to bind to the statement.
50
- *filters: Statement filters to apply.
51
- **kwargs: Additional keyword arguments.
52
-
53
- Raises:
54
- ParameterStyleMismatchError: If the parameter style is mismatched.
55
-
56
- Returns:
57
- A tuple of (sql, parameters) ready for execution.
58
- """
59
- statement = SQLStatement(sql, parameters, kwargs=kwargs, dialect=self.dialect)
60
-
61
- # Apply all statement filters
62
- for filter_obj in filters:
63
- statement = statement.apply_filter(filter_obj)
64
-
65
- processed_sql, processed_params, _ = statement.process()
66
-
67
- if is_dict(processed_params):
68
- named_params = NAMED_PARAMS_PATTERN.findall(processed_sql)
69
-
70
- if not named_params:
71
- if PSYCOPG_PARAMS_PATTERN.search(processed_sql):
72
- return processed_sql, processed_params
73
-
74
- if processed_params:
75
- msg = "psycopg: Dictionary parameters provided, but no named placeholders found in SQL."
76
- raise ParameterStyleMismatchError(msg)
77
- return processed_sql, None
78
-
79
- # Convert named parameters to psycopg's preferred format
80
- return NAMED_PARAMS_PATTERN.sub("%s", processed_sql), tuple(processed_params[name] for name in named_params)
81
-
82
- # For sequence parameters, ensure they're a tuple
83
- if isinstance(processed_params, (list, tuple)):
84
- return processed_sql, tuple(processed_params)
85
-
86
- # For scalar parameter or None
87
- if processed_params is not None:
88
- return processed_sql, (processed_params,)
89
-
90
- return processed_sql, None
37
+ PsycopgSyncConnection = Connection[PsycopgDictRow]
38
+ PsycopgAsyncConnection = AsyncConnection[PsycopgDictRow]
91
39
 
92
40
 
93
41
  class PsycopgSyncDriver(
94
- PsycopgDriverBase,
95
- SQLTranslatorMixin["PsycopgSyncConnection"],
96
- SyncDriverAdapterProtocol["PsycopgSyncConnection"],
97
- ResultConverter,
42
+ SyncDriverAdapterProtocol[PsycopgSyncConnection, RowT],
43
+ SQLTranslatorMixin,
44
+ TypeCoercionMixin,
45
+ SyncStorageMixin,
46
+ SyncPipelinedExecutionMixin,
47
+ ToSchemaMixin,
98
48
  ):
99
- """Psycopg Sync Driver Adapter."""
49
+ """Psycopg Sync Driver Adapter. Refactored for new protocol."""
100
50
 
101
- connection: "PsycopgSyncConnection"
51
+ dialect: "DialectType" = "postgres" # pyright: ignore[reportInvalidTypeForm]
52
+ supported_parameter_styles: "tuple[ParameterStyle, ...]" = (
53
+ ParameterStyle.POSITIONAL_PYFORMAT,
54
+ ParameterStyle.NAMED_PYFORMAT,
55
+ )
56
+ default_parameter_style: ParameterStyle = ParameterStyle.POSITIONAL_PYFORMAT
57
+ __slots__ = ()
102
58
 
103
- def __init__(self, connection: "PsycopgSyncConnection") -> None:
104
- self.connection = connection
59
+ def __init__(
60
+ self,
61
+ connection: PsycopgSyncConnection,
62
+ config: "Optional[SQLConfig]" = None,
63
+ default_row_type: "type[DictRow]" = dict,
64
+ ) -> None:
65
+ super().__init__(connection=connection, config=config, default_row_type=default_row_type)
105
66
 
106
67
  @staticmethod
107
68
  @contextmanager
108
- def _with_cursor(connection: "PsycopgSyncConnection") -> "Generator[Any, None, None]":
109
- cursor = connection.cursor(row_factory=dict_row)
110
- try:
69
+ def _get_cursor(connection: PsycopgSyncConnection) -> Generator[Any, None, None]:
70
+ with connection.cursor() as cursor:
111
71
  yield cursor
112
- finally:
113
- cursor.close()
114
72
 
115
- # --- Public API Methods --- #
116
- @overload
117
- def select(
118
- self,
119
- sql: str,
120
- parameters: "Optional[StatementParameterType]" = None,
121
- /,
122
- *filters: "StatementFilter",
123
- connection: "Optional[PsycopgSyncConnection]" = None,
124
- schema_type: None = None,
125
- **kwargs: Any,
126
- ) -> "Sequence[dict[str, Any]]": ...
127
- @overload
128
- def select(
129
- self,
130
- sql: str,
131
- parameters: "Optional[StatementParameterType]" = None,
132
- /,
133
- *filters: "StatementFilter",
134
- connection: "Optional[PsycopgSyncConnection]" = None,
135
- schema_type: "type[ModelDTOT]",
136
- **kwargs: Any,
137
- ) -> "Sequence[ModelDTOT]": ...
138
- def select(
139
- self,
140
- sql: str,
141
- parameters: "Optional[StatementParameterType]" = None,
142
- /,
143
- *filters: "StatementFilter",
144
- schema_type: "Optional[type[ModelDTOT]]" = None,
145
- connection: "Optional[PsycopgSyncConnection]" = None,
146
- **kwargs: Any,
147
- ) -> "Sequence[Union[ModelDTOT, dict[str, Any]]]":
148
- """Fetch data from the database.
73
+ def _execute_statement(
74
+ self, statement: SQL, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
75
+ ) -> Union[SelectResultDict, DMLResultDict, ScriptResultDict]:
76
+ if statement.is_script:
77
+ sql, _ = statement.compile(placeholder_style=ParameterStyle.STATIC)
78
+ return self._execute_script(sql, connection=connection, **kwargs)
79
+
80
+ detected_styles = {p.style for p in statement.parameter_info}
81
+ target_style = self.default_parameter_style
82
+ unsupported_styles = detected_styles - set(self.supported_parameter_styles)
83
+ if unsupported_styles:
84
+ target_style = self.default_parameter_style
85
+ elif detected_styles:
86
+ for style in detected_styles:
87
+ if style in self.supported_parameter_styles:
88
+ target_style = style
89
+ break
90
+
91
+ if statement.is_many:
92
+ sql, params = statement.compile(placeholder_style=target_style)
93
+ # For execute_many, check if parameters were passed via kwargs (legacy support)
94
+ # Otherwise use the parameters from the SQL object
95
+ kwargs_params = kwargs.get("parameters")
96
+ if kwargs_params is not None:
97
+ params = kwargs_params
98
+ if params is not None:
99
+ processed_params = [self._process_parameters(param_set) for param_set in params]
100
+ params = processed_params
101
+ return self._execute_many(sql, params, connection=connection, **kwargs)
102
+
103
+ sql, params = statement.compile(placeholder_style=target_style)
104
+ params = self._process_parameters(params)
105
+ return self._execute(sql, params, statement, connection=connection, **kwargs)
106
+
107
+ def _execute(
108
+ self,
109
+ sql: str,
110
+ parameters: Any,
111
+ statement: SQL,
112
+ connection: Optional[PsycopgSyncConnection] = None,
113
+ **kwargs: Any,
114
+ ) -> Union[SelectResultDict, DMLResultDict]:
115
+ conn = self._connection(connection)
116
+ with conn.cursor() as cursor:
117
+ cursor.execute(cast("Query", sql), parameters)
118
+ # Check if the statement returns rows by checking cursor.description
119
+ # This is more reliable than parsing when parsing is disabled
120
+ if cursor.description is not None:
121
+ fetched_data = cursor.fetchall()
122
+ column_names = [col.name for col in cursor.description]
123
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
124
+ return {"rows_affected": cursor.rowcount, "status_message": cursor.statusmessage or "OK"}
125
+
126
+ def _execute_many(
127
+ self, sql: str, param_list: Any, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
128
+ ) -> DMLResultDict:
129
+ conn = self._connection(connection)
130
+ with self._get_cursor(conn) as cursor:
131
+ cursor.executemany(sql, param_list or [])
132
+ # psycopg's executemany might return -1 or 0 for rowcount
133
+ # In that case, use the length of param_list for DML operations
134
+ rows_affected = cursor.rowcount
135
+ if rows_affected <= 0 and param_list:
136
+ rows_affected = len(param_list)
137
+ result: DMLResultDict = {"rows_affected": rows_affected, "status_message": cursor.statusmessage or "OK"}
138
+ return result
139
+
140
+ def _execute_script(
141
+ self, script: str, connection: Optional[PsycopgSyncConnection] = None, **kwargs: Any
142
+ ) -> ScriptResultDict:
143
+ conn = self._connection(connection)
144
+ with self._get_cursor(conn) as cursor:
145
+ cursor.execute(script)
146
+ result: ScriptResultDict = {
147
+ "statements_executed": -1,
148
+ "status_message": cursor.statusmessage or "SCRIPT EXECUTED",
149
+ }
150
+ return result
151
+
152
+ def _ingest_arrow_table(self, table: "Any", table_name: str, mode: str = "append", **options: Any) -> int:
153
+ self._ensure_pyarrow_installed()
154
+ import pyarrow.csv as pacsv
155
+
156
+ conn = self._connection(None)
157
+ with self._get_cursor(conn) as cursor:
158
+ if mode == "replace":
159
+ cursor.execute(f"TRUNCATE TABLE {table_name}")
160
+ elif mode == "create":
161
+ msg = "'create' mode is not supported for psycopg ingestion."
162
+ raise NotImplementedError(msg)
163
+
164
+ buffer = io.StringIO()
165
+ pacsv.write_csv(table, buffer)
166
+ buffer.seek(0)
167
+
168
+ with cursor.copy(f"COPY {table_name} FROM STDIN WITH (FORMAT CSV, HEADER)") as copy:
169
+ copy.write(buffer.read())
170
+
171
+ return cursor.rowcount if cursor.rowcount is not None else -1
172
+
173
+ def _wrap_select_result(
174
+ self, statement: SQL, result: SelectResultDict, schema_type: Optional[type[ModelDTOT]] = None, **kwargs: Any
175
+ ) -> Union[SQLResult[ModelDTOT], SQLResult[RowT]]:
176
+ rows_as_dicts: list[dict[str, Any]] = [dict(row) for row in result["data"]]
177
+
178
+ if schema_type:
179
+ return SQLResult[ModelDTOT](
180
+ statement=statement,
181
+ data=list(self.to_schema(data=result["data"], schema_type=schema_type)),
182
+ column_names=result["column_names"],
183
+ rows_affected=result["rows_affected"],
184
+ operation_type="SELECT",
185
+ )
186
+ return SQLResult[RowT](
187
+ statement=statement,
188
+ data=rows_as_dicts,
189
+ column_names=result["column_names"],
190
+ rows_affected=result["rows_affected"],
191
+ operation_type="SELECT",
192
+ )
193
+
194
+ def _wrap_execute_result(
195
+ self, statement: SQL, result: Union[DMLResultDict, ScriptResultDict], **kwargs: Any
196
+ ) -> SQLResult[RowT]:
197
+ operation_type = "UNKNOWN"
198
+ if statement.expression:
199
+ operation_type = str(statement.expression.key).upper()
200
+
201
+ # Handle case where we got a SelectResultDict but it was routed here due to parsing being disabled
202
+ if is_dict_with_field(result, "data") and is_dict_with_field(result, "column_names"):
203
+ # This is actually a SELECT result, wrap it properly
204
+ return self._wrap_select_result(statement, cast("SelectResultDict", result), **kwargs)
205
+
206
+ if is_dict_with_field(result, "statements_executed"):
207
+ return SQLResult[RowT](
208
+ statement=statement,
209
+ data=[],
210
+ rows_affected=0,
211
+ operation_type="SCRIPT",
212
+ metadata={"status_message": result.get("status_message", "")},
213
+ )
214
+
215
+ if is_dict_with_field(result, "rows_affected"):
216
+ return SQLResult[RowT](
217
+ statement=statement,
218
+ data=[],
219
+ rows_affected=cast("int", result.get("rows_affected", -1)),
220
+ operation_type=operation_type,
221
+ metadata={"status_message": result.get("status_message", "")},
222
+ )
223
+
224
+ # This shouldn't happen with TypedDict approach
225
+ msg = f"Unexpected result type: {type(result)}"
226
+ raise ValueError(msg)
227
+
228
+ def _connection(self, connection: Optional[PsycopgSyncConnection] = None) -> PsycopgSyncConnection:
229
+ """Get the connection to use for the operation."""
230
+ return connection or self.connection
231
+
232
+ def _execute_pipeline_native(self, operations: "list[Any]", **options: Any) -> "list[SQLResult[RowT]]":
233
+ """Native pipeline execution using Psycopg's pipeline support.
234
+
235
+ Psycopg has built-in pipeline support through the connection.pipeline() context manager.
236
+ This provides significant performance benefits for batch operations.
149
237
 
150
- Returns:
151
- List of row data as either model instances or dictionaries.
152
- """
153
- connection = self._connection(connection)
154
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
155
- with self._with_cursor(connection) as cursor:
156
- cursor.execute(sql, parameters)
157
- results = cursor.fetchall()
158
- if not results:
159
- return []
160
-
161
- return self.to_schema(cast("Sequence[dict[str, Any]]", results), schema_type=schema_type)
162
-
163
- @overload
164
- def select_one(
165
- self,
166
- sql: str,
167
- parameters: "Optional[StatementParameterType]" = None,
168
- /,
169
- *filters: "StatementFilter",
170
- connection: "Optional[PsycopgSyncConnection]" = None,
171
- schema_type: None = None,
172
- **kwargs: Any,
173
- ) -> "dict[str, Any]": ...
174
- @overload
175
- def select_one(
176
- self,
177
- sql: str,
178
- parameters: "Optional[StatementParameterType]" = None,
179
- /,
180
- *filters: "StatementFilter",
181
- connection: "Optional[PsycopgSyncConnection]" = None,
182
- schema_type: "type[ModelDTOT]",
183
- **kwargs: Any,
184
- ) -> "ModelDTOT": ...
185
- def select_one(
186
- self,
187
- sql: str,
188
- parameters: "Optional[StatementParameterType]" = None,
189
- /,
190
- *filters: "StatementFilter",
191
- connection: "Optional[PsycopgSyncConnection]" = None,
192
- schema_type: "Optional[type[ModelDTOT]]" = None,
193
- **kwargs: Any,
194
- ) -> "Union[ModelDTOT, dict[str, Any]]":
195
- """Fetch one row from the database.
238
+ Args:
239
+ operations: List of PipelineOperation objects
240
+ **options: Pipeline configuration options
196
241
 
197
242
  Returns:
198
- The first row of the query results.
243
+ List of SQLResult objects from all operations
199
244
  """
200
- connection = self._connection(connection)
201
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
202
- with self._with_cursor(connection) as cursor:
203
- cursor.execute(sql, parameters)
204
- row = cursor.fetchone()
205
- row = self.check_not_found(row)
245
+ from sqlspec.exceptions import PipelineExecutionError
206
246
 
207
- return self.to_schema(cast("dict[str, Any]", row), schema_type=schema_type)
247
+ results = []
248
+ connection = self._connection()
208
249
 
209
- @overload
210
- def select_one_or_none(
211
- self,
212
- sql: str,
213
- parameters: "Optional[StatementParameterType]" = None,
214
- /,
215
- *filters: "StatementFilter",
216
- connection: "Optional[PsycopgSyncConnection]" = None,
217
- schema_type: None = None,
218
- **kwargs: Any,
219
- ) -> "Optional[dict[str, Any]]": ...
220
- @overload
221
- def select_one_or_none(
222
- self,
223
- sql: str,
224
- parameters: "Optional[StatementParameterType]" = None,
225
- /,
226
- *filters: "StatementFilter",
227
- connection: "Optional[PsycopgSyncConnection]" = None,
228
- schema_type: "type[ModelDTOT]",
229
- **kwargs: Any,
230
- ) -> "Optional[ModelDTOT]": ...
231
- def select_one_or_none(
232
- self,
233
- sql: str,
234
- parameters: "Optional[StatementParameterType]" = None,
235
- /,
236
- *filters: "StatementFilter",
237
- connection: "Optional[PsycopgSyncConnection]" = None,
238
- schema_type: "Optional[type[ModelDTOT]]" = None,
239
- **kwargs: Any,
240
- ) -> "Optional[Union[ModelDTOT, dict[str, Any]]]":
241
- """Fetch one row from the database.
250
+ try:
251
+ with connection.pipeline():
252
+ for i, op in enumerate(operations):
253
+ result = self._execute_pipeline_operation(i, op, connection, options)
254
+ results.append(result)
242
255
 
243
- Returns:
244
- The first row of the query results.
245
- """
246
- connection = self._connection(connection)
247
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
248
- with self._with_cursor(connection) as cursor:
249
- cursor.execute(sql, parameters)
250
- row = cursor.fetchone()
251
- if row is None:
252
- return None
253
- return self.to_schema(cast("dict[str, Any]", row), schema_type=schema_type)
254
-
255
- @overload
256
- def select_value(
257
- self,
258
- sql: str,
259
- parameters: "Optional[StatementParameterType]" = None,
260
- /,
261
- *filters: "StatementFilter",
262
- connection: "Optional[PsycopgSyncConnection]" = None,
263
- schema_type: None = None,
264
- **kwargs: Any,
265
- ) -> "Any": ...
266
- @overload
267
- def select_value(
268
- self,
269
- sql: str,
270
- parameters: "Optional[StatementParameterType]" = None,
271
- /,
272
- *filters: "StatementFilter",
273
- connection: "Optional[PsycopgSyncConnection]" = None,
274
- schema_type: "type[T]",
275
- **kwargs: Any,
276
- ) -> "T": ...
277
- def select_value(
278
- self,
279
- sql: str,
280
- parameters: "Optional[StatementParameterType]" = None,
281
- /,
282
- *filters: "StatementFilter",
283
- connection: "Optional[PsycopgSyncConnection]" = None,
284
- schema_type: "Optional[type[T]]" = None,
285
- **kwargs: Any,
286
- ) -> "Union[T, Any]":
287
- """Fetch a single value from the database.
256
+ except Exception as e:
257
+ if not isinstance(e, PipelineExecutionError):
258
+ msg = f"Psycopg pipeline execution failed: {e}"
259
+ raise PipelineExecutionError(msg) from e
260
+ raise
288
261
 
289
- Returns:
290
- The first value from the first row of results, or None if no results.
291
- """
292
- connection = self._connection(connection)
293
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
294
- with self._with_cursor(connection) as cursor:
295
- cursor.execute(sql, parameters)
296
- row = cursor.fetchone()
297
- row = self.check_not_found(row)
298
- val = next(iter(row.values())) if row else None
299
- val = self.check_not_found(val)
300
- if schema_type is not None:
301
- return schema_type(val) # type: ignore[call-arg]
302
- return val
303
-
304
- @overload
305
- def select_value_or_none(
306
- self,
307
- sql: str,
308
- parameters: "Optional[StatementParameterType]" = None,
309
- /,
310
- *filters: "StatementFilter",
311
- connection: "Optional[PsycopgSyncConnection]" = None,
312
- schema_type: None = None,
313
- **kwargs: Any,
314
- ) -> "Optional[Any]": ...
315
- @overload
316
- def select_value_or_none(
317
- self,
318
- sql: str,
319
- parameters: "Optional[StatementParameterType]" = None,
320
- /,
321
- *filters: "StatementFilter",
322
- connection: "Optional[PsycopgSyncConnection]" = None,
323
- schema_type: "type[T]",
324
- **kwargs: Any,
325
- ) -> "Optional[T]": ...
326
- def select_value_or_none(
327
- self,
328
- sql: str,
329
- parameters: "Optional[StatementParameterType]" = None,
330
- /,
331
- *filters: "StatementFilter",
332
- connection: "Optional[PsycopgSyncConnection]" = None,
333
- schema_type: "Optional[type[T]]" = None,
334
- **kwargs: Any,
335
- ) -> "Optional[Union[T, Any]]":
336
- """Fetch a single value from the database.
262
+ return results
337
263
 
338
- Returns:
339
- The first value from the first row of results, or None if no results.
340
- """
341
- connection = self._connection(connection)
342
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
343
- with self._with_cursor(connection) as cursor:
344
- cursor.execute(sql, parameters)
345
- row = cursor.fetchone()
346
- if row is None:
347
- return None
348
- val = next(iter(row.values())) if row else None
349
- if val is None:
350
- return None
351
- if schema_type is not None:
352
- return schema_type(val) # type: ignore[call-arg]
353
- return val
354
-
355
- def insert_update_delete(
356
- self,
357
- sql: str,
358
- parameters: "Optional[StatementParameterType]" = None,
359
- /,
360
- *filters: "StatementFilter",
361
- connection: "Optional[PsycopgSyncConnection]" = None,
362
- **kwargs: Any,
363
- ) -> int:
364
- """Execute an INSERT, UPDATE, or DELETE query and return the number of affected rows.
264
+ def _execute_pipeline_operation(
265
+ self, index: int, operation: Any, connection: Any, options: dict
266
+ ) -> "SQLResult[RowT]":
267
+ """Execute a single pipeline operation with error handling."""
268
+ from sqlspec.exceptions import PipelineExecutionError
365
269
 
366
- Returns:
367
- The number of rows affected by the operation.
368
- """
369
- connection = self._connection(connection)
370
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
371
- with self._with_cursor(connection) as cursor:
372
- cursor.execute(sql, parameters)
373
- return getattr(cursor, "rowcount", -1) # pyright: ignore[reportUnknownMemberType]
374
-
375
- @overload
376
- def insert_update_delete_returning(
377
- self,
378
- sql: str,
379
- parameters: "Optional[StatementParameterType]" = None,
380
- /,
381
- *filters: "StatementFilter",
382
- connection: "Optional[PsycopgSyncConnection]" = None,
383
- schema_type: None = None,
384
- **kwargs: Any,
385
- ) -> "dict[str, Any]": ...
386
- @overload
387
- def insert_update_delete_returning(
388
- self,
389
- sql: str,
390
- parameters: "Optional[StatementParameterType]" = None,
391
- /,
392
- *filters: "StatementFilter",
393
- connection: "Optional[PsycopgSyncConnection]" = None,
394
- schema_type: "type[ModelDTOT]",
395
- **kwargs: Any,
396
- ) -> "ModelDTOT": ...
397
- def insert_update_delete_returning(
398
- self,
399
- sql: str,
400
- parameters: "Optional[StatementParameterType]" = None,
401
- /,
402
- *filters: "StatementFilter",
403
- connection: "Optional[PsycopgSyncConnection]" = None,
404
- schema_type: "Optional[type[ModelDTOT]]" = None,
405
- **kwargs: Any,
406
- ) -> "Optional[Union[dict[str, Any], ModelDTOT]]":
407
- """Insert, update, or delete data from the database and return result.
270
+ try:
271
+ # Prepare SQL and parameters
272
+ filtered_sql = self._apply_operation_filters(operation.sql, operation.filters)
273
+ sql_str = filtered_sql.to_sql(placeholder_style=self.default_parameter_style)
274
+ params = self._convert_psycopg_params(filtered_sql.parameters)
275
+
276
+ # Execute based on operation type
277
+ result = self._dispatch_pipeline_operation(operation, sql_str, params, connection)
278
+
279
+ except Exception as e:
280
+ if options.get("continue_on_error"):
281
+ return SQLResult[RowT](
282
+ statement=operation.sql,
283
+ data=cast("list[RowT]", []),
284
+ error=e,
285
+ operation_index=index,
286
+ parameters=operation.original_params,
287
+ )
288
+ msg = f"Psycopg pipeline failed at operation {index}: {e}"
289
+ raise PipelineExecutionError(
290
+ msg, operation_index=index, partial_results=[], failed_operation=operation
291
+ ) from e
292
+ else:
293
+ result.operation_index = index
294
+ result.pipeline_sql = operation.sql
295
+ return result
296
+
297
+ def _dispatch_pipeline_operation(
298
+ self, operation: Any, sql_str: str, params: Any, connection: Any
299
+ ) -> "SQLResult[RowT]":
300
+ """Dispatch to appropriate handler based on operation type."""
301
+ handlers = {
302
+ "execute_many": self._handle_pipeline_execute_many,
303
+ "select": self._handle_pipeline_select,
304
+ "execute_script": self._handle_pipeline_execute_script,
305
+ }
306
+
307
+ handler = handlers.get(operation.operation_type, self._handle_pipeline_execute)
308
+ return handler(operation.sql, sql_str, params, connection)
309
+
310
+ def _handle_pipeline_execute_many(
311
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
312
+ ) -> "SQLResult[RowT]":
313
+ """Handle execute_many operation in pipeline."""
314
+ with connection.cursor() as cursor:
315
+ cursor.executemany(sql_str, params)
316
+ return SQLResult[RowT](
317
+ statement=sql,
318
+ data=cast("list[RowT]", []),
319
+ rows_affected=cursor.rowcount,
320
+ operation_type="execute_many",
321
+ metadata={"status_message": "OK"},
322
+ )
323
+
324
+ def _handle_pipeline_select(self, sql: "SQL", sql_str: str, params: Any, connection: Any) -> "SQLResult[RowT]":
325
+ """Handle select operation in pipeline."""
326
+ with connection.cursor() as cursor:
327
+ cursor.execute(sql_str, params)
328
+ fetched_data = cursor.fetchall()
329
+ column_names = [col.name for col in cursor.description or []]
330
+ data = [dict(record) for record in fetched_data] if fetched_data else []
331
+ return SQLResult[RowT](
332
+ statement=sql,
333
+ data=cast("list[RowT]", data),
334
+ rows_affected=len(data),
335
+ operation_type="select",
336
+ metadata={"column_names": column_names},
337
+ )
338
+
339
+ def _handle_pipeline_execute_script(
340
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
341
+ ) -> "SQLResult[RowT]":
342
+ """Handle execute_script operation in pipeline."""
343
+ script_statements = self._split_script_statements(sql_str)
344
+ total_affected = 0
345
+
346
+ with connection.cursor() as cursor:
347
+ for stmt in script_statements:
348
+ if stmt.strip():
349
+ cursor.execute(stmt)
350
+ total_affected += cursor.rowcount or 0
351
+
352
+ return SQLResult[RowT](
353
+ statement=sql,
354
+ data=cast("list[RowT]", []),
355
+ rows_affected=total_affected,
356
+ operation_type="execute_script",
357
+ metadata={"status_message": "SCRIPT EXECUTED", "statements_executed": len(script_statements)},
358
+ )
359
+
360
+ def _handle_pipeline_execute(self, sql: "SQL", sql_str: str, params: Any, connection: Any) -> "SQLResult[RowT]":
361
+ """Handle regular execute operation in pipeline."""
362
+ with connection.cursor() as cursor:
363
+ cursor.execute(sql_str, params)
364
+ return SQLResult[RowT](
365
+ statement=sql,
366
+ data=cast("list[RowT]", []),
367
+ rows_affected=cursor.rowcount or 0,
368
+ operation_type="execute",
369
+ metadata={"status_message": "OK"},
370
+ )
371
+
372
+ def _convert_psycopg_params(self, params: Any) -> Any:
373
+ """Convert parameters to Psycopg-compatible format.
374
+
375
+ Psycopg supports both named (%s, %(name)s) and positional (%s) parameters.
376
+
377
+ Args:
378
+ params: Parameters in various formats
408
379
 
409
380
  Returns:
410
- The first row of results.
381
+ Parameters in Psycopg-compatible format
411
382
  """
412
- connection = self._connection(connection)
413
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
414
- with self._with_cursor(connection) as cursor:
415
- cursor.execute(sql, parameters)
416
- result = cursor.fetchone()
417
-
418
- if result is None:
419
- return None
383
+ if params is None:
384
+ return None
385
+ if isinstance(params, dict):
386
+ # Psycopg handles dict parameters directly for named placeholders
387
+ return params
388
+ if isinstance(params, (list, tuple)):
389
+ # Convert to tuple for positional parameters
390
+ return tuple(params)
391
+ # Single parameter
392
+ return (params,)
393
+
394
+ def _apply_operation_filters(self, sql: "SQL", filters: "list[Any]") -> "SQL":
395
+ """Apply filters to a SQL object for pipeline operations."""
396
+ if not filters:
397
+ return sql
398
+
399
+ result_sql = sql
400
+ for filter_obj in filters:
401
+ if hasattr(filter_obj, "apply"):
402
+ result_sql = filter_obj.apply(result_sql)
420
403
 
421
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
404
+ return result_sql
422
405
 
423
- def execute_script(
424
- self,
425
- sql: str,
426
- parameters: "Optional[StatementParameterType]" = None,
427
- /,
428
- connection: "Optional[PsycopgSyncConnection]" = None,
429
- **kwargs: Any,
430
- ) -> str:
431
- """Execute a script.
406
+ def _split_script_statements(self, script: str, strip_trailing_semicolon: bool = False) -> "list[str]":
407
+ """Split a SQL script into individual statements."""
432
408
 
433
- Returns:
434
- Status message for the operation.
435
- """
436
- connection = self._connection(connection)
437
- sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
438
- with self._with_cursor(connection) as cursor:
439
- cursor.execute(sql, parameters)
440
- return str(cursor.statusmessage) if cursor.statusmessage is not None else "DONE"
409
+ # Use the sophisticated splitter with PostgreSQL dialect
410
+ return split_sql_script(script=script, dialect="postgresql", strip_trailing_semicolon=strip_trailing_semicolon)
441
411
 
442
412
 
443
413
  class PsycopgAsyncDriver(
444
- PsycopgDriverBase,
445
- SQLTranslatorMixin["PsycopgAsyncConnection"],
446
- AsyncDriverAdapterProtocol["PsycopgAsyncConnection"],
447
- ResultConverter,
414
+ AsyncDriverAdapterProtocol[PsycopgAsyncConnection, RowT],
415
+ SQLTranslatorMixin,
416
+ TypeCoercionMixin,
417
+ AsyncStorageMixin,
418
+ AsyncPipelinedExecutionMixin,
419
+ ToSchemaMixin,
448
420
  ):
449
- """Psycopg Async Driver Adapter."""
421
+ """Psycopg Async Driver Adapter. Refactored for new protocol."""
450
422
 
451
- connection: "PsycopgAsyncConnection"
423
+ dialect: "DialectType" = "postgres" # pyright: ignore[reportInvalidTypeForm]
424
+ supported_parameter_styles: "tuple[ParameterStyle, ...]" = (
425
+ ParameterStyle.POSITIONAL_PYFORMAT,
426
+ ParameterStyle.NAMED_PYFORMAT,
427
+ )
428
+ default_parameter_style: ParameterStyle = ParameterStyle.POSITIONAL_PYFORMAT
429
+ __slots__ = ()
452
430
 
453
- def __init__(self, connection: "PsycopgAsyncConnection") -> None:
454
- self.connection = connection
431
+ def __init__(
432
+ self,
433
+ connection: PsycopgAsyncConnection,
434
+ config: Optional[SQLConfig] = None,
435
+ default_row_type: "type[DictRow]" = dict,
436
+ ) -> None:
437
+ super().__init__(connection=connection, config=config, default_row_type=default_row_type)
455
438
 
456
439
  @staticmethod
457
440
  @asynccontextmanager
458
- async def _with_cursor(connection: "PsycopgAsyncConnection") -> "AsyncGenerator[Any, None]":
459
- cursor = connection.cursor(row_factory=dict_row)
460
- try:
441
+ async def _get_cursor(connection: PsycopgAsyncConnection) -> AsyncGenerator[Any, None]:
442
+ async with connection.cursor() as cursor:
461
443
  yield cursor
462
- finally:
463
- await cursor.close()
464
-
465
- # --- Public API Methods --- #
466
- @overload
467
- async def select(
468
- self,
469
- sql: str,
470
- parameters: "Optional[StatementParameterType]" = None,
471
- /,
472
- *filters: "StatementFilter",
473
- connection: "Optional[PsycopgAsyncConnection]" = None,
474
- schema_type: None = None,
475
- **kwargs: Any,
476
- ) -> "Sequence[dict[str, Any]]": ...
477
- @overload
478
- async def select(
479
- self,
480
- sql: str,
481
- parameters: "Optional[StatementParameterType]" = None,
482
- /,
483
- *filters: "StatementFilter",
484
- connection: "Optional[PsycopgAsyncConnection]" = None,
485
- schema_type: "type[ModelDTOT]",
486
- **kwargs: Any,
487
- ) -> "Sequence[ModelDTOT]": ...
488
- async def select(
489
- self,
490
- sql: str,
491
- parameters: "Optional[StatementParameterType]" = None,
492
- /,
493
- *filters: "StatementFilter",
494
- schema_type: "Optional[type[ModelDTOT]]" = None,
495
- connection: "Optional[PsycopgAsyncConnection]" = None,
496
- **kwargs: Any,
497
- ) -> "Sequence[Union[ModelDTOT, dict[str, Any]]]":
498
- """Fetch data from the database.
499
444
 
500
- Returns:
501
- List of row data as either model instances or dictionaries.
502
- """
503
- connection = self._connection(connection)
504
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
505
- async with self._with_cursor(connection) as cursor:
506
- await cursor.execute(sql, parameters)
507
- results = await cursor.fetchall()
508
- if not results:
509
- return []
510
- return self.to_schema(cast("Sequence[dict[str, Any]]", results), schema_type=schema_type)
511
-
512
- @overload
513
- async def select_one(
514
- self,
515
- sql: str,
516
- parameters: "Optional[StatementParameterType]" = None,
517
- /,
518
- *filters: "StatementFilter",
519
- connection: "Optional[PsycopgAsyncConnection]" = None,
520
- schema_type: None = None,
521
- **kwargs: Any,
522
- ) -> "dict[str, Any]": ...
523
- @overload
524
- async def select_one(
525
- self,
526
- sql: str,
527
- parameters: "Optional[StatementParameterType]" = None,
528
- /,
529
- *filters: "StatementFilter",
530
- connection: "Optional[PsycopgAsyncConnection]" = None,
531
- schema_type: "type[ModelDTOT]",
532
- **kwargs: Any,
533
- ) -> "ModelDTOT": ...
534
- async def select_one(
535
- self,
536
- sql: str,
537
- parameters: "Optional[StatementParameterType]" = None,
538
- /,
539
- *filters: "StatementFilter",
540
- connection: "Optional[PsycopgAsyncConnection]" = None,
541
- schema_type: "Optional[type[ModelDTOT]]" = None,
542
- **kwargs: Any,
543
- ) -> "Union[ModelDTOT, dict[str, Any]]":
544
- """Fetch one row from the database.
545
-
546
- Returns:
547
- The first row of the query results.
548
- """
549
- connection = self._connection(connection)
550
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
551
- async with self._with_cursor(connection) as cursor:
552
- await cursor.execute(sql, parameters)
553
- row = await cursor.fetchone()
554
- row = self.check_not_found(row)
445
+ async def _execute_statement(
446
+ self, statement: SQL, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
447
+ ) -> Union[SelectResultDict, DMLResultDict, ScriptResultDict]:
448
+ if statement.is_script:
449
+ sql, _ = statement.compile(placeholder_style=ParameterStyle.STATIC)
450
+ return await self._execute_script(sql, connection=connection, **kwargs)
451
+
452
+ # Determine if we need to convert parameter style
453
+ detected_styles = {p.style for p in statement.parameter_info}
454
+ target_style = self.default_parameter_style
455
+
456
+ # Check if any detected style is not supported
457
+ unsupported_styles = detected_styles - set(self.supported_parameter_styles)
458
+ if unsupported_styles:
459
+ # Convert to default style if we have unsupported styles
460
+ target_style = self.default_parameter_style
461
+ elif detected_styles:
462
+ # Use the first detected style if all are supported
463
+ # Prefer the first supported style found
464
+ for style in detected_styles:
465
+ if style in self.supported_parameter_styles:
466
+ target_style = style
467
+ break
468
+
469
+ if statement.is_many:
470
+ sql, _ = statement.compile(placeholder_style=target_style)
471
+ # For execute_many, use the parameters passed via kwargs
472
+ params = kwargs.get("parameters")
473
+ if params is not None:
474
+ # Process each parameter set individually
475
+ processed_params = [self._process_parameters(param_set) for param_set in params]
476
+ params = processed_params
477
+ return await self._execute_many(sql, params, connection=connection, **kwargs)
478
+
479
+ sql, params = statement.compile(placeholder_style=target_style)
480
+ params = self._process_parameters(params)
481
+ return await self._execute(sql, params, statement, connection=connection, **kwargs)
482
+
483
+ async def _execute(
484
+ self,
485
+ sql: str,
486
+ parameters: Any,
487
+ statement: SQL,
488
+ connection: Optional[PsycopgAsyncConnection] = None,
489
+ **kwargs: Any,
490
+ ) -> Union[SelectResultDict, DMLResultDict]:
491
+ conn = self._connection(connection)
492
+ async with conn.cursor() as cursor:
493
+ await cursor.execute(cast("Query", sql), parameters)
494
+
495
+ # When parsing is disabled, expression will be None, so check SQL directly
496
+ if statement.expression and self.returns_rows(statement.expression):
497
+ # For SELECT statements, extract data while cursor is open
498
+ fetched_data = await cursor.fetchall()
499
+ column_names = [col.name for col in cursor.description or []]
500
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
501
+ if not statement.expression and sql.strip().upper().startswith("SELECT"):
502
+ # For SELECT statements when parsing is disabled
503
+ fetched_data = await cursor.fetchall()
504
+ column_names = [col.name for col in cursor.description or []]
505
+ return {"data": fetched_data, "column_names": column_names, "rows_affected": len(fetched_data)}
506
+ # For DML statements
507
+ dml_result: DMLResultDict = {
508
+ "rows_affected": cursor.rowcount,
509
+ "status_message": cursor.statusmessage or "OK",
510
+ }
511
+ return dml_result
512
+
513
+ async def _execute_many(
514
+ self, sql: str, param_list: Any, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
515
+ ) -> DMLResultDict:
516
+ conn = self._connection(connection)
517
+ async with conn.cursor() as cursor:
518
+ await cursor.executemany(cast("Query", sql), param_list or [])
519
+ return {"rows_affected": cursor.rowcount, "status_message": cursor.statusmessage or "OK"}
520
+
521
+ async def _execute_script(
522
+ self, script: str, connection: Optional[PsycopgAsyncConnection] = None, **kwargs: Any
523
+ ) -> ScriptResultDict:
524
+ conn = self._connection(connection)
525
+ async with conn.cursor() as cursor:
526
+ await cursor.execute(cast("Query", script))
527
+ # For scripts, return script result format
528
+ return {
529
+ "statements_executed": -1, # Psycopg doesn't provide this info
530
+ "status_message": cursor.statusmessage or "SCRIPT EXECUTED",
531
+ }
532
+
533
+ async def _fetch_arrow_table(self, sql: SQL, connection: "Optional[Any]" = None, **kwargs: Any) -> "ArrowResult":
534
+ self._ensure_pyarrow_installed()
535
+ conn = self._connection(connection)
536
+
537
+ async with conn.cursor() as cursor:
538
+ await cursor.execute(
539
+ cast("Query", sql.to_sql(placeholder_style=self.default_parameter_style)),
540
+ sql.get_parameters(style=self.default_parameter_style) or [],
541
+ )
542
+ arrow_table = await cursor.fetch_arrow_table() # type: ignore[attr-defined]
543
+ return ArrowResult(statement=sql, data=arrow_table)
544
+
545
+ async def _ingest_arrow_table(self, table: "Any", table_name: str, mode: str = "append", **options: Any) -> int:
546
+ self._ensure_pyarrow_installed()
547
+ import pyarrow.csv as pacsv
548
+
549
+ conn = self._connection(None)
550
+ async with conn.cursor() as cursor:
551
+ if mode == "replace":
552
+ await cursor.execute(cast("Query", f"TRUNCATE TABLE {table_name}"))
553
+ elif mode == "create":
554
+ msg = "'create' mode is not supported for psycopg ingestion."
555
+ raise NotImplementedError(msg)
556
+
557
+ buffer = io.StringIO()
558
+ pacsv.write_csv(table, buffer)
559
+ buffer.seek(0)
560
+
561
+ async with cursor.copy(cast("Query", f"COPY {table_name} FROM STDIN WITH (FORMAT CSV, HEADER)")) as copy:
562
+ await copy.write(buffer.read())
563
+
564
+ return cursor.rowcount if cursor.rowcount is not None else -1
565
+
566
+ async def _wrap_select_result(
567
+ self, statement: SQL, result: SelectResultDict, schema_type: Optional[type[ModelDTOT]] = None, **kwargs: Any
568
+ ) -> Union[SQLResult[ModelDTOT], SQLResult[RowT]]:
569
+ # result must be a dict with keys: data, column_names, rows_affected
570
+ fetched_data = result["data"]
571
+ column_names = result["column_names"]
572
+ rows_affected = result["rows_affected"]
573
+ rows_as_dicts: list[dict[str, Any]] = [dict(row) for row in fetched_data]
574
+
575
+ if schema_type:
576
+ return SQLResult[ModelDTOT](
577
+ statement=statement,
578
+ data=list(self.to_schema(data=fetched_data, schema_type=schema_type)),
579
+ column_names=column_names,
580
+ rows_affected=rows_affected,
581
+ operation_type="SELECT",
582
+ )
583
+ return SQLResult[RowT](
584
+ statement=statement,
585
+ data=rows_as_dicts,
586
+ column_names=column_names,
587
+ rows_affected=rows_affected,
588
+ operation_type="SELECT",
589
+ )
590
+
591
+ async def _wrap_execute_result(
592
+ self, statement: SQL, result: Union[DMLResultDict, ScriptResultDict], **kwargs: Any
593
+ ) -> SQLResult[RowT]:
594
+ operation_type = "UNKNOWN"
595
+ if statement.expression:
596
+ operation_type = str(statement.expression.key).upper()
597
+
598
+ if is_dict_with_field(result, "statements_executed"):
599
+ return SQLResult[RowT](
600
+ statement=statement,
601
+ data=[],
602
+ rows_affected=0,
603
+ operation_type="SCRIPT",
604
+ metadata={"status_message": result.get("status_message", "")},
605
+ )
606
+
607
+ if is_dict_with_field(result, "rows_affected"):
608
+ return SQLResult[RowT](
609
+ statement=statement,
610
+ data=[],
611
+ rows_affected=cast("int", result.get("rows_affected", -1)),
612
+ operation_type=operation_type,
613
+ metadata={"status_message": result.get("status_message", "")},
614
+ )
615
+ # This shouldn't happen with TypedDict approach
616
+ msg = f"Unexpected result type: {type(result)}"
617
+ raise ValueError(msg)
618
+
619
+ def _connection(self, connection: Optional[PsycopgAsyncConnection] = None) -> PsycopgAsyncConnection:
620
+ """Get the connection to use for the operation."""
621
+ return connection or self.connection
622
+
623
+ async def _execute_pipeline_native(self, operations: "list[Any]", **options: Any) -> "list[SQLResult[RowT]]":
624
+ """Native async pipeline execution using Psycopg's pipeline support."""
625
+ from sqlspec.exceptions import PipelineExecutionError
626
+
627
+ results = []
628
+ connection = self._connection()
555
629
 
556
- return self.to_schema(cast("dict[str, Any]", row), schema_type=schema_type)
630
+ try:
631
+ async with connection.pipeline():
632
+ for i, op in enumerate(operations):
633
+ result = await self._execute_pipeline_operation_async(i, op, connection, options)
634
+ results.append(result)
557
635
 
558
- @overload
559
- async def select_one_or_none(
560
- self,
561
- sql: str,
562
- parameters: "Optional[StatementParameterType]" = None,
563
- /,
564
- *filters: "StatementFilter",
565
- connection: "Optional[PsycopgAsyncConnection]" = None,
566
- schema_type: None = None,
567
- **kwargs: Any,
568
- ) -> "Optional[dict[str, Any]]": ...
569
- @overload
570
- async def select_one_or_none(
571
- self,
572
- sql: str,
573
- parameters: "Optional[StatementParameterType]" = None,
574
- /,
575
- *filters: "StatementFilter",
576
- connection: "Optional[PsycopgAsyncConnection]" = None,
577
- schema_type: "type[ModelDTOT]",
578
- **kwargs: Any,
579
- ) -> "Optional[ModelDTOT]": ...
580
- async def select_one_or_none(
581
- self,
582
- sql: str,
583
- parameters: "Optional[StatementParameterType]" = None,
584
- /,
585
- *filters: "StatementFilter",
586
- schema_type: "Optional[type[ModelDTOT]]" = None,
587
- connection: "Optional[PsycopgAsyncConnection]" = None,
588
- **kwargs: Any,
589
- ) -> "Optional[Union[ModelDTOT, dict[str, Any]]]":
590
- """Fetch one row from the database.
636
+ except Exception as e:
637
+ if not isinstance(e, PipelineExecutionError):
638
+ msg = f"Psycopg async pipeline execution failed: {e}"
639
+ raise PipelineExecutionError(msg) from e
640
+ raise
591
641
 
592
- Returns:
593
- The first row of the query results.
594
- """
595
- connection = self._connection(connection)
596
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
597
- async with self._with_cursor(connection) as cursor:
598
- await cursor.execute(sql, parameters)
599
- row = await cursor.fetchone()
600
- if row is None:
601
- return None
602
-
603
- # Use self.to_schema from ResultConverter mixin
604
- return self.to_schema(cast("dict[str, Any]", row), schema_type=schema_type)
605
-
606
- @overload
607
- async def select_value(
608
- self,
609
- sql: str,
610
- parameters: "Optional[StatementParameterType]" = None,
611
- /,
612
- *filters: "StatementFilter",
613
- connection: "Optional[PsycopgAsyncConnection]" = None,
614
- schema_type: None = None,
615
- **kwargs: Any,
616
- ) -> "Any": ...
617
- @overload
618
- async def select_value(
619
- self,
620
- sql: str,
621
- parameters: "Optional[StatementParameterType]" = None,
622
- /,
623
- *filters: "StatementFilter",
624
- connection: "Optional[PsycopgAsyncConnection]" = None,
625
- schema_type: "type[T]",
626
- **kwargs: Any,
627
- ) -> "T": ...
628
- async def select_value(
629
- self,
630
- sql: str,
631
- parameters: "Optional[StatementParameterType]" = None,
632
- /,
633
- *filters: "StatementFilter",
634
- connection: "Optional[PsycopgAsyncConnection]" = None,
635
- schema_type: "Optional[type[T]]" = None,
636
- **kwargs: Any,
637
- ) -> "Union[T, Any]":
638
- """Fetch a single value from the database.
642
+ return results
639
643
 
640
- Returns:
641
- The first value from the first row of results, or None if no results.
642
- """
643
- connection = self._connection(connection)
644
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
645
- async with self._with_cursor(connection) as cursor:
646
- await cursor.execute(sql, parameters)
647
- row = await cursor.fetchone()
648
- row = self.check_not_found(row)
649
- val = next(iter(row.values())) if row else None
650
- val = self.check_not_found(val)
651
- if schema_type is not None:
652
- return schema_type(val) # type: ignore[call-arg]
653
- return val
654
-
655
- @overload
656
- async def select_value_or_none(
657
- self,
658
- sql: str,
659
- parameters: "Optional[StatementParameterType]" = None,
660
- /,
661
- *filters: "StatementFilter",
662
- connection: "Optional[PsycopgAsyncConnection]" = None,
663
- schema_type: None = None,
664
- **kwargs: Any,
665
- ) -> "Optional[Any]": ...
666
- @overload
667
- async def select_value_or_none(
668
- self,
669
- sql: str,
670
- parameters: "Optional[StatementParameterType]" = None,
671
- /,
672
- *filters: "StatementFilter",
673
- connection: "Optional[PsycopgAsyncConnection]" = None,
674
- schema_type: "type[T]",
675
- **kwargs: Any,
676
- ) -> "Optional[T]": ...
677
- async def select_value_or_none(
678
- self,
679
- sql: str,
680
- parameters: "Optional[StatementParameterType]" = None,
681
- /,
682
- *filters: "StatementFilter",
683
- connection: "Optional[PsycopgAsyncConnection]" = None,
684
- schema_type: "Optional[type[T]]" = None,
685
- **kwargs: Any,
686
- ) -> "Optional[Union[T, Any]]":
687
- """Fetch a single value from the database.
644
+ async def _execute_pipeline_operation_async(
645
+ self, index: int, operation: Any, connection: Any, options: dict
646
+ ) -> "SQLResult[RowT]":
647
+ """Execute a single async pipeline operation with error handling."""
648
+ from sqlspec.exceptions import PipelineExecutionError
688
649
 
689
- Returns:
690
- The first value from the first row of results, or None if no results.
691
- """
692
- connection = self._connection(connection)
693
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
694
- async with self._with_cursor(connection) as cursor:
695
- await cursor.execute(sql, parameters)
696
- row = await cursor.fetchone()
697
- if row is None:
698
- return None
699
- val = next(iter(row.values())) if row else None
700
- if val is None:
701
- return None
702
- if schema_type is not None:
703
- return schema_type(val) # type: ignore[call-arg]
704
- return val
705
-
706
- async def insert_update_delete(
707
- self,
708
- sql: str,
709
- parameters: "Optional[StatementParameterType]" = None,
710
- /,
711
- *filters: "StatementFilter",
712
- connection: "Optional[PsycopgAsyncConnection]" = None,
713
- **kwargs: Any,
714
- ) -> int:
715
- """Execute an INSERT, UPDATE, or DELETE query and return the number of affected rows.
650
+ try:
651
+ # Prepare SQL and parameters
652
+ filtered_sql = self._apply_operation_filters(operation.sql, operation.filters)
653
+ sql_str = filtered_sql.to_sql(placeholder_style=self.default_parameter_style)
654
+ params = self._convert_psycopg_params(filtered_sql.parameters)
655
+
656
+ # Execute based on operation type
657
+ result = await self._dispatch_pipeline_operation_async(operation, sql_str, params, connection)
658
+
659
+ except Exception as e:
660
+ if options.get("continue_on_error"):
661
+ return SQLResult[RowT](
662
+ statement=operation.sql,
663
+ data=cast("list[RowT]", []),
664
+ error=e,
665
+ operation_index=index,
666
+ parameters=operation.original_params,
667
+ )
668
+ msg = f"Psycopg async pipeline failed at operation {index}: {e}"
669
+ raise PipelineExecutionError(
670
+ msg, operation_index=index, partial_results=[], failed_operation=operation
671
+ ) from e
672
+ else:
673
+ # Add pipeline context
674
+ result.operation_index = index
675
+ result.pipeline_sql = operation.sql
676
+ return result
677
+
678
+ async def _dispatch_pipeline_operation_async(
679
+ self, operation: Any, sql_str: str, params: Any, connection: Any
680
+ ) -> "SQLResult[RowT]":
681
+ """Dispatch to appropriate async handler based on operation type."""
682
+ handlers = {
683
+ "execute_many": self._handle_pipeline_execute_many_async,
684
+ "select": self._handle_pipeline_select_async,
685
+ "execute_script": self._handle_pipeline_execute_script_async,
686
+ }
687
+
688
+ handler = handlers.get(operation.operation_type, self._handle_pipeline_execute_async)
689
+ return await handler(operation.sql, sql_str, params, connection)
690
+
691
+ async def _handle_pipeline_execute_many_async(
692
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
693
+ ) -> "SQLResult[RowT]":
694
+ """Handle async execute_many operation in pipeline."""
695
+ async with connection.cursor() as cursor:
696
+ await cursor.executemany(sql_str, params)
697
+ return SQLResult[RowT](
698
+ statement=sql,
699
+ data=cast("list[RowT]", []),
700
+ rows_affected=cursor.rowcount,
701
+ operation_type="execute_many",
702
+ metadata={"status_message": "OK"},
703
+ )
704
+
705
+ async def _handle_pipeline_select_async(
706
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
707
+ ) -> "SQLResult[RowT]":
708
+ """Handle async select operation in pipeline."""
709
+ async with connection.cursor() as cursor:
710
+ await cursor.execute(sql_str, params)
711
+ fetched_data = await cursor.fetchall()
712
+ column_names = [col.name for col in cursor.description or []]
713
+ data = [dict(record) for record in fetched_data] if fetched_data else []
714
+ return SQLResult[RowT](
715
+ statement=sql,
716
+ data=cast("list[RowT]", data),
717
+ rows_affected=len(data),
718
+ operation_type="select",
719
+ metadata={"column_names": column_names},
720
+ )
721
+
722
+ async def _handle_pipeline_execute_script_async(
723
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
724
+ ) -> "SQLResult[RowT]":
725
+ """Handle async execute_script operation in pipeline."""
726
+ script_statements = self._split_script_statements(sql_str)
727
+ total_affected = 0
728
+
729
+ async with connection.cursor() as cursor:
730
+ for stmt in script_statements:
731
+ if stmt.strip():
732
+ await cursor.execute(stmt)
733
+ total_affected += cursor.rowcount or 0
734
+
735
+ return SQLResult[RowT](
736
+ statement=sql,
737
+ data=cast("list[RowT]", []),
738
+ rows_affected=total_affected,
739
+ operation_type="execute_script",
740
+ metadata={"status_message": "SCRIPT EXECUTED", "statements_executed": len(script_statements)},
741
+ )
742
+
743
+ async def _handle_pipeline_execute_async(
744
+ self, sql: "SQL", sql_str: str, params: Any, connection: Any
745
+ ) -> "SQLResult[RowT]":
746
+ """Handle async regular execute operation in pipeline."""
747
+ async with connection.cursor() as cursor:
748
+ await cursor.execute(sql_str, params)
749
+ return SQLResult[RowT](
750
+ statement=sql,
751
+ data=cast("list[RowT]", []),
752
+ rows_affected=cursor.rowcount or 0,
753
+ operation_type="execute",
754
+ metadata={"status_message": "OK"},
755
+ )
756
+
757
+ def _convert_psycopg_params(self, params: Any) -> Any:
758
+ """Convert parameters to Psycopg-compatible format.
759
+
760
+ Psycopg supports both named (%s, %(name)s) and positional (%s) parameters.
716
761
 
717
- Returns:
718
- The number of rows affected by the operation.
719
- """
720
- connection = self._connection(connection)
721
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
722
- async with self._with_cursor(connection) as cursor:
723
- await cursor.execute(sql, parameters)
724
- return getattr(cursor, "rowcount", -1) # pyright: ignore[reportUnknownMemberType]
725
-
726
- @overload
727
- async def insert_update_delete_returning(
728
- self,
729
- sql: str,
730
- parameters: "Optional[StatementParameterType]" = None,
731
- /,
732
- *filters: "StatementFilter",
733
- connection: "Optional[PsycopgAsyncConnection]" = None,
734
- schema_type: None = None,
735
- **kwargs: Any,
736
- ) -> "dict[str, Any]": ...
737
- @overload
738
- async def insert_update_delete_returning(
739
- self,
740
- sql: str,
741
- parameters: "Optional[StatementParameterType]" = None,
742
- /,
743
- *filters: "StatementFilter",
744
- connection: "Optional[PsycopgAsyncConnection]" = None,
745
- schema_type: "type[ModelDTOT]",
746
- **kwargs: Any,
747
- ) -> "ModelDTOT": ...
748
- async def insert_update_delete_returning(
749
- self,
750
- sql: str,
751
- parameters: "Optional[StatementParameterType]" = None,
752
- /,
753
- *filters: "StatementFilter",
754
- connection: "Optional[PsycopgAsyncConnection]" = None,
755
- schema_type: "Optional[type[ModelDTOT]]" = None,
756
- **kwargs: Any,
757
- ) -> "Optional[Union[dict[str, Any], ModelDTOT]]":
758
- """Insert, update, or delete data from the database and return result.
762
+ Args:
763
+ params: Parameters in various formats
759
764
 
760
765
  Returns:
761
- The first row of results.
766
+ Parameters in Psycopg-compatible format
762
767
  """
763
- connection = self._connection(connection)
764
- sql, parameters = self._process_sql_params(sql, parameters, *filters, **kwargs)
765
- async with self._with_cursor(connection) as cursor:
766
- await cursor.execute(sql, parameters)
767
- result = await cursor.fetchone()
768
- if result is None:
769
- return None
770
-
771
- return self.to_schema(cast("dict[str, Any]", result), schema_type=schema_type)
772
-
773
- async def execute_script(
774
- self,
775
- sql: str,
776
- parameters: "Optional[StatementParameterType]" = None,
777
- /,
778
- connection: "Optional[PsycopgAsyncConnection]" = None,
779
- **kwargs: Any,
780
- ) -> str:
781
- """Execute a script.
768
+ if params is None:
769
+ return None
770
+ if isinstance(params, dict):
771
+ # Psycopg handles dict parameters directly for named placeholders
772
+ return params
773
+ if isinstance(params, (list, tuple)):
774
+ # Convert to tuple for positional parameters
775
+ return tuple(params)
776
+ # Single parameter
777
+ return (params,)
778
+
779
+ def _apply_operation_filters(self, sql: "SQL", filters: "list[Any]") -> "SQL":
780
+ """Apply filters to a SQL object for pipeline operations."""
781
+ if not filters:
782
+ return sql
783
+
784
+ result_sql = sql
785
+ for filter_obj in filters:
786
+ if hasattr(filter_obj, "apply"):
787
+ result_sql = filter_obj.apply(result_sql)
782
788
 
783
- Returns:
784
- Status message for the operation.
785
- """
786
- connection = self._connection(connection)
787
- sql, parameters = self._process_sql_params(sql, parameters, **kwargs)
788
- async with self._with_cursor(connection) as cursor:
789
- await cursor.execute(sql, parameters)
790
- return str(cursor.statusmessage) if cursor.statusmessage is not None else "DONE"
789
+ return result_sql