sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,975 @@
|
|
|
1
|
+
"""PostgreSQL psycopg driver implementation."""
|
|
2
|
+
|
|
3
|
+
from contextlib import AsyncExitStack, ExitStack
|
|
4
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
5
|
+
|
|
6
|
+
import psycopg
|
|
7
|
+
|
|
8
|
+
from sqlspec.adapters.psycopg._typing import (
|
|
9
|
+
PsycopgAsyncConnection,
|
|
10
|
+
PsycopgAsyncSessionContext,
|
|
11
|
+
PsycopgSyncConnection,
|
|
12
|
+
PsycopgSyncSessionContext,
|
|
13
|
+
)
|
|
14
|
+
from sqlspec.adapters.psycopg.core import (
|
|
15
|
+
TRANSACTION_STATUS_IDLE,
|
|
16
|
+
PipelineCursorEntry,
|
|
17
|
+
PreparedStackOperation,
|
|
18
|
+
build_async_pipeline_execution_result,
|
|
19
|
+
build_copy_from_command,
|
|
20
|
+
build_pipeline_execution_result,
|
|
21
|
+
build_truncate_command,
|
|
22
|
+
collect_rows,
|
|
23
|
+
create_mapped_exception,
|
|
24
|
+
default_statement_config,
|
|
25
|
+
driver_profile,
|
|
26
|
+
execute_with_optional_parameters,
|
|
27
|
+
execute_with_optional_parameters_async,
|
|
28
|
+
executemany_or_skip,
|
|
29
|
+
executemany_or_skip_async,
|
|
30
|
+
pipeline_supported,
|
|
31
|
+
resolve_rowcount,
|
|
32
|
+
)
|
|
33
|
+
from sqlspec.adapters.psycopg.data_dictionary import PsycopgAsyncDataDictionary, PsycopgSyncDataDictionary
|
|
34
|
+
from sqlspec.core import (
|
|
35
|
+
SQL,
|
|
36
|
+
SQLResult,
|
|
37
|
+
StackResult,
|
|
38
|
+
StatementConfig,
|
|
39
|
+
StatementStack,
|
|
40
|
+
get_cache_config,
|
|
41
|
+
is_copy_from_operation,
|
|
42
|
+
is_copy_operation,
|
|
43
|
+
is_copy_to_operation,
|
|
44
|
+
register_driver_profile,
|
|
45
|
+
)
|
|
46
|
+
from sqlspec.driver import (
|
|
47
|
+
AsyncDriverAdapterBase,
|
|
48
|
+
StackExecutionObserver,
|
|
49
|
+
SyncDriverAdapterBase,
|
|
50
|
+
describe_stack_statement,
|
|
51
|
+
)
|
|
52
|
+
from sqlspec.exceptions import SQLSpecError, StackExecutionError
|
|
53
|
+
from sqlspec.utils.logging import get_logger
|
|
54
|
+
from sqlspec.utils.type_guards import is_readable
|
|
55
|
+
|
|
56
|
+
if TYPE_CHECKING:
|
|
57
|
+
from sqlspec.adapters.psycopg._typing import PsycopgPipelineDriver
|
|
58
|
+
from sqlspec.core import ArrowResult
|
|
59
|
+
from sqlspec.driver import ExecutionResult
|
|
60
|
+
from sqlspec.storage import StorageBridgeJob, StorageDestination, StorageFormat, StorageTelemetry
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
__all__ = (
|
|
64
|
+
"PsycopgAsyncCursor",
|
|
65
|
+
"PsycopgAsyncDriver",
|
|
66
|
+
"PsycopgAsyncExceptionHandler",
|
|
67
|
+
"PsycopgAsyncSessionContext",
|
|
68
|
+
"PsycopgSyncCursor",
|
|
69
|
+
"PsycopgSyncDriver",
|
|
70
|
+
"PsycopgSyncExceptionHandler",
|
|
71
|
+
"PsycopgSyncSessionContext",
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
logger = get_logger("sqlspec.adapters.psycopg")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class PsycopgPipelineMixin:
|
|
78
|
+
"""Shared helpers for psycopg sync/async pipeline execution."""
|
|
79
|
+
|
|
80
|
+
__slots__ = ()
|
|
81
|
+
|
|
82
|
+
def _prepare_pipeline_operations(self, stack: "StatementStack") -> "list[PreparedStackOperation] | None":
|
|
83
|
+
prepared: list[PreparedStackOperation] = []
|
|
84
|
+
for index, operation in enumerate(stack.operations):
|
|
85
|
+
if operation.method != "execute":
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
kwargs = dict(operation.keyword_arguments) if operation.keyword_arguments else {}
|
|
89
|
+
statement_config = kwargs.pop("statement_config", None)
|
|
90
|
+
driver = cast("PsycopgPipelineDriver", self)
|
|
91
|
+
config = statement_config or driver.statement_config
|
|
92
|
+
|
|
93
|
+
sql_statement = driver.prepare_statement(
|
|
94
|
+
operation.statement, operation.arguments, statement_config=config, kwargs=kwargs
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
if sql_statement.is_script or sql_statement.is_many:
|
|
98
|
+
return None
|
|
99
|
+
|
|
100
|
+
sql_text, prepared_parameters = driver._get_compiled_sql( # pyright: ignore[reportPrivateUsage]
|
|
101
|
+
sql_statement, config
|
|
102
|
+
)
|
|
103
|
+
prepared.append(
|
|
104
|
+
PreparedStackOperation(
|
|
105
|
+
operation_index=index,
|
|
106
|
+
operation=operation,
|
|
107
|
+
statement=sql_statement,
|
|
108
|
+
sql=sql_text,
|
|
109
|
+
parameters=prepared_parameters,
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
return prepared
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class PsycopgSyncCursor:
|
|
116
|
+
"""Context manager for PostgreSQL psycopg cursor management."""
|
|
117
|
+
|
|
118
|
+
__slots__ = ("connection", "cursor")
|
|
119
|
+
|
|
120
|
+
def __init__(self, connection: PsycopgSyncConnection) -> None:
|
|
121
|
+
self.connection = connection
|
|
122
|
+
self.cursor: Any | None = None
|
|
123
|
+
|
|
124
|
+
def __enter__(self) -> Any:
|
|
125
|
+
self.cursor = self.connection.cursor()
|
|
126
|
+
return self.cursor
|
|
127
|
+
|
|
128
|
+
def __exit__(self, *_: Any) -> None:
|
|
129
|
+
if self.cursor is not None:
|
|
130
|
+
self.cursor.close()
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class PsycopgSyncExceptionHandler:
|
|
134
|
+
"""Context manager for handling PostgreSQL psycopg database exceptions.
|
|
135
|
+
|
|
136
|
+
Maps PostgreSQL SQLSTATE error codes to specific SQLSpec exceptions
|
|
137
|
+
for better error handling in application code.
|
|
138
|
+
|
|
139
|
+
Uses deferred exception pattern for mypyc compatibility: exceptions
|
|
140
|
+
are stored in pending_exception rather than raised from __exit__
|
|
141
|
+
to avoid ABI boundary violations with compiled code.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
__slots__ = ("pending_exception",)
|
|
145
|
+
|
|
146
|
+
def __init__(self) -> None:
|
|
147
|
+
self.pending_exception: Exception | None = None
|
|
148
|
+
|
|
149
|
+
def __enter__(self) -> "PsycopgSyncExceptionHandler":
|
|
150
|
+
return self
|
|
151
|
+
|
|
152
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool:
|
|
153
|
+
if exc_type is None:
|
|
154
|
+
return False
|
|
155
|
+
if issubclass(exc_type, psycopg.Error):
|
|
156
|
+
self.pending_exception = create_mapped_exception(exc_val)
|
|
157
|
+
return True
|
|
158
|
+
return False
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class PsycopgSyncDriver(PsycopgPipelineMixin, SyncDriverAdapterBase):
|
|
162
|
+
"""PostgreSQL psycopg synchronous driver.
|
|
163
|
+
|
|
164
|
+
Provides synchronous database operations for PostgreSQL using psycopg3.
|
|
165
|
+
Supports SQL statement execution with parameter binding, transaction
|
|
166
|
+
management, result processing with column metadata, parameter style
|
|
167
|
+
conversion, PostgreSQL arrays and JSON handling, COPY operations for
|
|
168
|
+
bulk data transfer, and PostgreSQL-specific error handling.
|
|
169
|
+
"""
|
|
170
|
+
|
|
171
|
+
__slots__ = ("_data_dictionary",)
|
|
172
|
+
dialect = "postgres"
|
|
173
|
+
|
|
174
|
+
def __init__(
|
|
175
|
+
self,
|
|
176
|
+
connection: PsycopgSyncConnection,
|
|
177
|
+
statement_config: "StatementConfig | None" = None,
|
|
178
|
+
driver_features: "dict[str, Any] | None" = None,
|
|
179
|
+
) -> None:
|
|
180
|
+
if statement_config is None:
|
|
181
|
+
statement_config = default_statement_config.replace(
|
|
182
|
+
enable_caching=get_cache_config().compiled_cache_enabled
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
|
|
186
|
+
self._data_dictionary: PsycopgSyncDataDictionary | None = None
|
|
187
|
+
|
|
188
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
189
|
+
# CORE DISPATCH METHODS
|
|
190
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
191
|
+
|
|
192
|
+
def dispatch_execute(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
193
|
+
"""Execute single SQL statement.
|
|
194
|
+
|
|
195
|
+
Args:
|
|
196
|
+
cursor: Database cursor
|
|
197
|
+
statement: SQL statement to execute
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
ExecutionResult with statement execution details
|
|
201
|
+
"""
|
|
202
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
203
|
+
|
|
204
|
+
execute_with_optional_parameters(cursor, sql, prepared_parameters)
|
|
205
|
+
|
|
206
|
+
if statement.returns_rows():
|
|
207
|
+
fetched_data = cursor.fetchall()
|
|
208
|
+
fetched_data, column_names = collect_rows(cast("list[Any] | None", fetched_data), cursor.description)
|
|
209
|
+
|
|
210
|
+
return self.create_execution_result(
|
|
211
|
+
cursor,
|
|
212
|
+
selected_data=fetched_data,
|
|
213
|
+
column_names=column_names,
|
|
214
|
+
data_row_count=len(fetched_data),
|
|
215
|
+
is_select_result=True,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
affected_rows = resolve_rowcount(cursor)
|
|
219
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows)
|
|
220
|
+
|
|
221
|
+
def dispatch_execute_many(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
222
|
+
"""Execute SQL with multiple parameter sets.
|
|
223
|
+
|
|
224
|
+
Args:
|
|
225
|
+
cursor: Database cursor
|
|
226
|
+
statement: SQL statement with parameter list
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
ExecutionResult with batch execution details
|
|
230
|
+
"""
|
|
231
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
232
|
+
|
|
233
|
+
if not executemany_or_skip(cursor, sql, prepared_parameters):
|
|
234
|
+
return self.create_execution_result(cursor, rowcount_override=0, is_many_result=True)
|
|
235
|
+
|
|
236
|
+
affected_rows = resolve_rowcount(cursor)
|
|
237
|
+
|
|
238
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
|
|
239
|
+
|
|
240
|
+
def dispatch_execute_script(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
241
|
+
"""Execute SQL script with multiple statements.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
cursor: Database cursor
|
|
245
|
+
statement: SQL statement containing multiple commands
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
ExecutionResult with script execution details
|
|
249
|
+
"""
|
|
250
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
251
|
+
statements = self.split_script_statements(sql, statement.statement_config, strip_trailing_semicolon=True)
|
|
252
|
+
|
|
253
|
+
successful_count = 0
|
|
254
|
+
last_cursor = cursor
|
|
255
|
+
|
|
256
|
+
for stmt in statements:
|
|
257
|
+
execute_with_optional_parameters(cursor, stmt, prepared_parameters)
|
|
258
|
+
successful_count += 1
|
|
259
|
+
|
|
260
|
+
return self.create_execution_result(
|
|
261
|
+
last_cursor, statement_count=len(statements), successful_statements=successful_count, is_script_result=True
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
def dispatch_special_handling(self, cursor: Any, statement: "SQL") -> "SQLResult | None":
|
|
265
|
+
"""Hook for PostgreSQL-specific special operations.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
cursor: Psycopg cursor object
|
|
269
|
+
statement: SQL statement to analyze
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
SQLResult if special handling was applied, None otherwise
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
if not is_copy_operation(statement.operation_type):
|
|
276
|
+
return None
|
|
277
|
+
|
|
278
|
+
sql, _ = self._get_compiled_sql(statement, statement.statement_config)
|
|
279
|
+
operation_type = statement.operation_type
|
|
280
|
+
copy_data = statement.parameters
|
|
281
|
+
if isinstance(copy_data, list) and len(copy_data) == 1:
|
|
282
|
+
copy_data = copy_data[0]
|
|
283
|
+
|
|
284
|
+
if is_copy_from_operation(operation_type):
|
|
285
|
+
if isinstance(copy_data, (str, bytes)):
|
|
286
|
+
data_to_write = copy_data
|
|
287
|
+
elif is_readable(copy_data):
|
|
288
|
+
data_to_write = copy_data.read()
|
|
289
|
+
else:
|
|
290
|
+
data_to_write = str(copy_data)
|
|
291
|
+
|
|
292
|
+
if isinstance(data_to_write, str):
|
|
293
|
+
data_to_write = data_to_write.encode()
|
|
294
|
+
|
|
295
|
+
with cursor.copy(sql) as copy_ctx:
|
|
296
|
+
copy_ctx.write(data_to_write)
|
|
297
|
+
|
|
298
|
+
rows_affected = max(cursor.rowcount, 0)
|
|
299
|
+
|
|
300
|
+
return SQLResult(
|
|
301
|
+
data=None, rows_affected=rows_affected, statement=statement, metadata={"copy_operation": "FROM_STDIN"}
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
if is_copy_to_operation(operation_type):
|
|
305
|
+
output_data: list[str] = []
|
|
306
|
+
with cursor.copy(sql) as copy_ctx:
|
|
307
|
+
output_data.extend(row.decode() if isinstance(row, bytes) else str(row) for row in copy_ctx)
|
|
308
|
+
|
|
309
|
+
exported_data = "".join(output_data)
|
|
310
|
+
|
|
311
|
+
return SQLResult(
|
|
312
|
+
data=[{"copy_output": exported_data}],
|
|
313
|
+
rows_affected=0,
|
|
314
|
+
statement=statement,
|
|
315
|
+
metadata={"copy_operation": "TO_STDOUT"},
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
cursor.execute(sql)
|
|
319
|
+
rows_affected = max(cursor.rowcount, 0)
|
|
320
|
+
|
|
321
|
+
return SQLResult(
|
|
322
|
+
data=None, rows_affected=rows_affected, statement=statement, metadata={"copy_operation": "FILE"}
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
326
|
+
# TRANSACTION MANAGEMENT
|
|
327
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
328
|
+
|
|
329
|
+
def begin(self) -> None:
|
|
330
|
+
"""Begin a database transaction on the current connection."""
|
|
331
|
+
try:
|
|
332
|
+
if self.connection.autocommit:
|
|
333
|
+
self.connection.autocommit = False
|
|
334
|
+
except Exception as e:
|
|
335
|
+
msg = f"Failed to begin transaction: {e}"
|
|
336
|
+
raise SQLSpecError(msg) from e
|
|
337
|
+
|
|
338
|
+
def commit(self) -> None:
|
|
339
|
+
"""Commit the current transaction on the current connection."""
|
|
340
|
+
try:
|
|
341
|
+
self.connection.commit()
|
|
342
|
+
except Exception as e:
|
|
343
|
+
msg = f"Failed to commit transaction: {e}"
|
|
344
|
+
raise SQLSpecError(msg) from e
|
|
345
|
+
|
|
346
|
+
def rollback(self) -> None:
|
|
347
|
+
"""Rollback the current transaction on the current connection."""
|
|
348
|
+
try:
|
|
349
|
+
self.connection.rollback()
|
|
350
|
+
except Exception as e:
|
|
351
|
+
msg = f"Failed to rollback transaction: {e}"
|
|
352
|
+
raise SQLSpecError(msg) from e
|
|
353
|
+
|
|
354
|
+
def with_cursor(self, connection: PsycopgSyncConnection) -> PsycopgSyncCursor:
|
|
355
|
+
"""Create context manager for PostgreSQL cursor."""
|
|
356
|
+
return PsycopgSyncCursor(connection)
|
|
357
|
+
|
|
358
|
+
def handle_database_exceptions(self) -> "PsycopgSyncExceptionHandler":
|
|
359
|
+
"""Handle database-specific exceptions and wrap them appropriately."""
|
|
360
|
+
return PsycopgSyncExceptionHandler()
|
|
361
|
+
|
|
362
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
363
|
+
# STACK EXECUTION METHODS
|
|
364
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
365
|
+
|
|
366
|
+
def execute_stack(self, stack: "StatementStack", *, continue_on_error: bool = False) -> "tuple[StackResult, ...]":
|
|
367
|
+
"""Execute a StatementStack using psycopg pipeline mode when supported."""
|
|
368
|
+
|
|
369
|
+
if (
|
|
370
|
+
not isinstance(stack, StatementStack)
|
|
371
|
+
or not stack
|
|
372
|
+
or self.stack_native_disabled
|
|
373
|
+
or not pipeline_supported()
|
|
374
|
+
or continue_on_error
|
|
375
|
+
):
|
|
376
|
+
return super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
377
|
+
|
|
378
|
+
prepared_ops = self._prepare_pipeline_operations(stack)
|
|
379
|
+
if prepared_ops is None:
|
|
380
|
+
return super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
381
|
+
|
|
382
|
+
return self._execute_stack_pipeline(stack, prepared_ops)
|
|
383
|
+
|
|
384
|
+
def _execute_stack_pipeline(
|
|
385
|
+
self, stack: "StatementStack", prepared_ops: "list[PreparedStackOperation]"
|
|
386
|
+
) -> "tuple[StackResult, ...]":
|
|
387
|
+
results: list[StackResult] = []
|
|
388
|
+
started_transaction = False
|
|
389
|
+
|
|
390
|
+
with StackExecutionObserver(self, stack, continue_on_error=False, native_pipeline=True):
|
|
391
|
+
try:
|
|
392
|
+
if not self._connection_in_transaction():
|
|
393
|
+
self.begin()
|
|
394
|
+
started_transaction = True
|
|
395
|
+
|
|
396
|
+
with ExitStack() as resource_stack:
|
|
397
|
+
pipeline = resource_stack.enter_context(self.connection.pipeline())
|
|
398
|
+
pending: list[PipelineCursorEntry] = []
|
|
399
|
+
|
|
400
|
+
for prepared in prepared_ops:
|
|
401
|
+
exception_ctx = self.handle_database_exceptions()
|
|
402
|
+
resource_stack.enter_context(exception_ctx)
|
|
403
|
+
cursor = resource_stack.enter_context(self.with_cursor(self.connection))
|
|
404
|
+
|
|
405
|
+
try:
|
|
406
|
+
if prepared.parameters:
|
|
407
|
+
cursor.execute(prepared.sql, prepared.parameters)
|
|
408
|
+
else:
|
|
409
|
+
cursor.execute(prepared.sql)
|
|
410
|
+
except Exception as exc:
|
|
411
|
+
stack_error = StackExecutionError(
|
|
412
|
+
prepared.operation_index,
|
|
413
|
+
describe_stack_statement(prepared.operation.statement),
|
|
414
|
+
exc,
|
|
415
|
+
adapter=type(self).__name__,
|
|
416
|
+
mode="fail-fast",
|
|
417
|
+
)
|
|
418
|
+
raise stack_error from exc
|
|
419
|
+
|
|
420
|
+
pending.append(PipelineCursorEntry(prepared=prepared, cursor=cursor))
|
|
421
|
+
|
|
422
|
+
pipeline.sync()
|
|
423
|
+
for entry in pending:
|
|
424
|
+
statement = entry.prepared.statement
|
|
425
|
+
cursor = entry.cursor
|
|
426
|
+
|
|
427
|
+
execution_result = build_pipeline_execution_result(statement, cursor)
|
|
428
|
+
sql_result = self.build_statement_result(statement, execution_result)
|
|
429
|
+
results.append(StackResult.from_sql_result(sql_result))
|
|
430
|
+
|
|
431
|
+
if started_transaction:
|
|
432
|
+
self.commit()
|
|
433
|
+
except Exception:
|
|
434
|
+
if started_transaction:
|
|
435
|
+
try:
|
|
436
|
+
self.rollback()
|
|
437
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
438
|
+
logger.debug("Rollback after psycopg pipeline failure failed: %s", rollback_error)
|
|
439
|
+
raise
|
|
440
|
+
|
|
441
|
+
return tuple(results)
|
|
442
|
+
|
|
443
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
444
|
+
# STORAGE API METHODS
|
|
445
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
446
|
+
|
|
447
|
+
def select_to_storage(
|
|
448
|
+
self,
|
|
449
|
+
statement: "SQL | str",
|
|
450
|
+
destination: "StorageDestination",
|
|
451
|
+
/,
|
|
452
|
+
*parameters: Any,
|
|
453
|
+
statement_config: "StatementConfig | None" = None,
|
|
454
|
+
partitioner: "dict[str, object] | None" = None,
|
|
455
|
+
format_hint: "StorageFormat | None" = None,
|
|
456
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
457
|
+
**kwargs: Any,
|
|
458
|
+
) -> "StorageBridgeJob":
|
|
459
|
+
"""Execute a query and stream Arrow results to storage (sync)."""
|
|
460
|
+
|
|
461
|
+
self._require_capability("arrow_export_enabled")
|
|
462
|
+
arrow_result = self.select_to_arrow(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
463
|
+
sync_pipeline = self._storage_pipeline()
|
|
464
|
+
telemetry_payload = self._write_result_to_storage_sync(
|
|
465
|
+
arrow_result, destination, format_hint=format_hint, pipeline=sync_pipeline
|
|
466
|
+
)
|
|
467
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
468
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
469
|
+
|
|
470
|
+
def load_from_arrow(
|
|
471
|
+
self,
|
|
472
|
+
table: str,
|
|
473
|
+
source: "ArrowResult | Any",
|
|
474
|
+
*,
|
|
475
|
+
partitioner: "dict[str, object] | None" = None,
|
|
476
|
+
overwrite: bool = False,
|
|
477
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
478
|
+
) -> "StorageBridgeJob":
|
|
479
|
+
"""Load Arrow data into PostgreSQL using COPY."""
|
|
480
|
+
|
|
481
|
+
self._require_capability("arrow_import_enabled")
|
|
482
|
+
arrow_table = self._coerce_arrow_table(source)
|
|
483
|
+
if overwrite:
|
|
484
|
+
truncate_sql = build_truncate_command(table)
|
|
485
|
+
with self.with_cursor(self.connection) as cursor, self.handle_database_exceptions():
|
|
486
|
+
cursor.execute(truncate_sql)
|
|
487
|
+
columns, records = self._arrow_table_to_rows(arrow_table)
|
|
488
|
+
if records:
|
|
489
|
+
copy_sql = build_copy_from_command(table, columns)
|
|
490
|
+
with ExitStack() as stack:
|
|
491
|
+
stack.enter_context(self.handle_database_exceptions())
|
|
492
|
+
cursor = stack.enter_context(self.with_cursor(self.connection))
|
|
493
|
+
copy_ctx = stack.enter_context(cursor.copy(copy_sql))
|
|
494
|
+
for record in records:
|
|
495
|
+
copy_ctx.write_row(record)
|
|
496
|
+
telemetry_payload = self._build_ingest_telemetry(arrow_table)
|
|
497
|
+
telemetry_payload["destination"] = table
|
|
498
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
499
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
500
|
+
|
|
501
|
+
def load_from_storage(
|
|
502
|
+
self,
|
|
503
|
+
table: str,
|
|
504
|
+
source: "StorageDestination",
|
|
505
|
+
*,
|
|
506
|
+
file_format: "StorageFormat",
|
|
507
|
+
partitioner: "dict[str, object] | None" = None,
|
|
508
|
+
overwrite: bool = False,
|
|
509
|
+
) -> "StorageBridgeJob":
|
|
510
|
+
"""Load staged artifacts into PostgreSQL via COPY."""
|
|
511
|
+
|
|
512
|
+
arrow_table, inbound = self._read_arrow_from_storage_sync(source, file_format=file_format)
|
|
513
|
+
return self.load_from_arrow(table, arrow_table, partitioner=partitioner, overwrite=overwrite, telemetry=inbound)
|
|
514
|
+
|
|
515
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
516
|
+
# UTILITY METHODS
|
|
517
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
518
|
+
|
|
519
|
+
@property
|
|
520
|
+
def data_dictionary(self) -> "PsycopgSyncDataDictionary":
|
|
521
|
+
"""Get the data dictionary for this driver.
|
|
522
|
+
|
|
523
|
+
Returns:
|
|
524
|
+
Data dictionary instance for metadata queries
|
|
525
|
+
"""
|
|
526
|
+
if self._data_dictionary is None:
|
|
527
|
+
self._data_dictionary = PsycopgSyncDataDictionary()
|
|
528
|
+
return self._data_dictionary
|
|
529
|
+
|
|
530
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
531
|
+
# PRIVATE / INTERNAL METHODS
|
|
532
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
533
|
+
|
|
534
|
+
def _connection_in_transaction(self) -> bool:
|
|
535
|
+
"""Check if connection is in transaction."""
|
|
536
|
+
return bool(self.connection.info.transaction_status != TRANSACTION_STATUS_IDLE)
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
class PsycopgAsyncCursor:
|
|
540
|
+
"""Async context manager for PostgreSQL psycopg cursor management."""
|
|
541
|
+
|
|
542
|
+
__slots__ = ("connection", "cursor")
|
|
543
|
+
|
|
544
|
+
def __init__(self, connection: "PsycopgAsyncConnection") -> None:
|
|
545
|
+
self.connection = connection
|
|
546
|
+
self.cursor: Any | None = None
|
|
547
|
+
|
|
548
|
+
async def __aenter__(self) -> Any:
|
|
549
|
+
self.cursor = self.connection.cursor()
|
|
550
|
+
return self.cursor
|
|
551
|
+
|
|
552
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
553
|
+
_ = (exc_type, exc_val, exc_tb)
|
|
554
|
+
if self.cursor is not None:
|
|
555
|
+
await self.cursor.close()
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
class PsycopgAsyncExceptionHandler:
|
|
559
|
+
"""Async context manager for handling PostgreSQL psycopg database exceptions.
|
|
560
|
+
|
|
561
|
+
Maps PostgreSQL SQLSTATE error codes to specific SQLSpec exceptions
|
|
562
|
+
for better error handling in application code.
|
|
563
|
+
|
|
564
|
+
Uses deferred exception pattern for mypyc compatibility: exceptions
|
|
565
|
+
are stored in pending_exception rather than raised from __aexit__
|
|
566
|
+
to avoid ABI boundary violations with compiled code.
|
|
567
|
+
"""
|
|
568
|
+
|
|
569
|
+
__slots__ = ("pending_exception",)
|
|
570
|
+
|
|
571
|
+
def __init__(self) -> None:
|
|
572
|
+
self.pending_exception: Exception | None = None
|
|
573
|
+
|
|
574
|
+
async def __aenter__(self) -> "PsycopgAsyncExceptionHandler":
|
|
575
|
+
return self
|
|
576
|
+
|
|
577
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool:
|
|
578
|
+
if exc_type is None:
|
|
579
|
+
return False
|
|
580
|
+
if issubclass(exc_type, psycopg.Error):
|
|
581
|
+
self.pending_exception = create_mapped_exception(exc_val)
|
|
582
|
+
return True
|
|
583
|
+
return False
|
|
584
|
+
|
|
585
|
+
|
|
586
|
+
class PsycopgAsyncDriver(PsycopgPipelineMixin, AsyncDriverAdapterBase):
|
|
587
|
+
"""PostgreSQL psycopg asynchronous driver.
|
|
588
|
+
|
|
589
|
+
Provides asynchronous database operations for PostgreSQL using psycopg3.
|
|
590
|
+
Supports async SQL statement execution with parameter binding, async
|
|
591
|
+
transaction management, async result processing with column metadata,
|
|
592
|
+
parameter style conversion, PostgreSQL arrays and JSON handling, COPY
|
|
593
|
+
operations for bulk data transfer, PostgreSQL-specific error handling,
|
|
594
|
+
and async pub/sub support.
|
|
595
|
+
"""
|
|
596
|
+
|
|
597
|
+
__slots__ = ("_data_dictionary",)
|
|
598
|
+
dialect = "postgres"
|
|
599
|
+
|
|
600
|
+
def __init__(
|
|
601
|
+
self,
|
|
602
|
+
connection: "PsycopgAsyncConnection",
|
|
603
|
+
statement_config: "StatementConfig | None" = None,
|
|
604
|
+
driver_features: "dict[str, Any] | None" = None,
|
|
605
|
+
) -> None:
|
|
606
|
+
if statement_config is None:
|
|
607
|
+
statement_config = default_statement_config.replace(
|
|
608
|
+
enable_caching=get_cache_config().compiled_cache_enabled
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
|
|
612
|
+
self._data_dictionary: PsycopgAsyncDataDictionary | None = None
|
|
613
|
+
|
|
614
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
615
|
+
# CORE DISPATCH METHODS
|
|
616
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
617
|
+
|
|
618
|
+
async def dispatch_execute(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
619
|
+
"""Execute single SQL statement (async).
|
|
620
|
+
|
|
621
|
+
Args:
|
|
622
|
+
cursor: Database cursor
|
|
623
|
+
statement: SQL statement to execute
|
|
624
|
+
|
|
625
|
+
Returns:
|
|
626
|
+
ExecutionResult with statement execution details
|
|
627
|
+
"""
|
|
628
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
629
|
+
|
|
630
|
+
await execute_with_optional_parameters_async(cursor, sql, prepared_parameters)
|
|
631
|
+
|
|
632
|
+
if statement.returns_rows():
|
|
633
|
+
fetched_data = await cursor.fetchall()
|
|
634
|
+
fetched_data, column_names = collect_rows(cast("list[Any] | None", fetched_data), cursor.description)
|
|
635
|
+
|
|
636
|
+
return self.create_execution_result(
|
|
637
|
+
cursor,
|
|
638
|
+
selected_data=fetched_data,
|
|
639
|
+
column_names=column_names,
|
|
640
|
+
data_row_count=len(fetched_data),
|
|
641
|
+
is_select_result=True,
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
affected_rows = resolve_rowcount(cursor)
|
|
645
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows)
|
|
646
|
+
|
|
647
|
+
async def dispatch_execute_many(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
648
|
+
"""Execute SQL with multiple parameter sets (async).
|
|
649
|
+
|
|
650
|
+
Args:
|
|
651
|
+
cursor: Database cursor
|
|
652
|
+
statement: SQL statement with parameter list
|
|
653
|
+
|
|
654
|
+
Returns:
|
|
655
|
+
ExecutionResult with batch execution details
|
|
656
|
+
"""
|
|
657
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
658
|
+
|
|
659
|
+
if not await executemany_or_skip_async(cursor, sql, prepared_parameters):
|
|
660
|
+
return self.create_execution_result(cursor, rowcount_override=0, is_many_result=True)
|
|
661
|
+
|
|
662
|
+
affected_rows = resolve_rowcount(cursor)
|
|
663
|
+
|
|
664
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
|
|
665
|
+
|
|
666
|
+
async def dispatch_execute_script(self, cursor: Any, statement: "SQL") -> "ExecutionResult":
|
|
667
|
+
"""Execute SQL script with multiple statements (async).
|
|
668
|
+
|
|
669
|
+
Args:
|
|
670
|
+
cursor: Database cursor
|
|
671
|
+
statement: SQL statement containing multiple commands
|
|
672
|
+
|
|
673
|
+
Returns:
|
|
674
|
+
ExecutionResult with script execution details
|
|
675
|
+
"""
|
|
676
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
677
|
+
statements = self.split_script_statements(sql, statement.statement_config, strip_trailing_semicolon=True)
|
|
678
|
+
|
|
679
|
+
successful_count = 0
|
|
680
|
+
last_cursor = cursor
|
|
681
|
+
|
|
682
|
+
for stmt in statements:
|
|
683
|
+
await execute_with_optional_parameters_async(cursor, stmt, prepared_parameters)
|
|
684
|
+
successful_count += 1
|
|
685
|
+
|
|
686
|
+
return self.create_execution_result(
|
|
687
|
+
last_cursor, statement_count=len(statements), successful_statements=successful_count, is_script_result=True
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
async def dispatch_special_handling(self, cursor: Any, statement: "SQL") -> "SQLResult | None":
|
|
691
|
+
"""Hook for PostgreSQL-specific special operations.
|
|
692
|
+
|
|
693
|
+
Args:
|
|
694
|
+
cursor: Psycopg async cursor object
|
|
695
|
+
statement: SQL statement to analyze
|
|
696
|
+
|
|
697
|
+
Returns:
|
|
698
|
+
SQLResult if special handling was applied, None otherwise
|
|
699
|
+
"""
|
|
700
|
+
|
|
701
|
+
if not is_copy_operation(statement.operation_type):
|
|
702
|
+
return None
|
|
703
|
+
|
|
704
|
+
sql, _ = self._get_compiled_sql(statement, statement.statement_config)
|
|
705
|
+
sql_upper = sql.upper()
|
|
706
|
+
operation_type = statement.operation_type
|
|
707
|
+
copy_data = statement.parameters
|
|
708
|
+
if isinstance(copy_data, list) and len(copy_data) == 1:
|
|
709
|
+
copy_data = copy_data[0]
|
|
710
|
+
|
|
711
|
+
if is_copy_from_operation(operation_type) and "FROM STDIN" in sql_upper:
|
|
712
|
+
if isinstance(copy_data, (str, bytes)):
|
|
713
|
+
data_to_write = copy_data
|
|
714
|
+
elif is_readable(copy_data):
|
|
715
|
+
data_to_write = copy_data.read()
|
|
716
|
+
else:
|
|
717
|
+
data_to_write = str(copy_data)
|
|
718
|
+
|
|
719
|
+
if isinstance(data_to_write, str):
|
|
720
|
+
data_to_write = data_to_write.encode()
|
|
721
|
+
|
|
722
|
+
async with cursor.copy(sql) as copy_ctx:
|
|
723
|
+
await copy_ctx.write(data_to_write)
|
|
724
|
+
|
|
725
|
+
rows_affected = max(cursor.rowcount, 0)
|
|
726
|
+
|
|
727
|
+
return SQLResult(
|
|
728
|
+
data=None, rows_affected=rows_affected, statement=statement, metadata={"copy_operation": "FROM_STDIN"}
|
|
729
|
+
)
|
|
730
|
+
|
|
731
|
+
if is_copy_to_operation(operation_type) and "TO STDOUT" in sql_upper:
|
|
732
|
+
output_data: list[str] = []
|
|
733
|
+
async with cursor.copy(sql) as copy_ctx:
|
|
734
|
+
output_data.extend([row.decode() if isinstance(row, bytes) else str(row) async for row in copy_ctx])
|
|
735
|
+
|
|
736
|
+
exported_data = "".join(output_data)
|
|
737
|
+
|
|
738
|
+
return SQLResult(
|
|
739
|
+
data=[{"copy_output": exported_data}],
|
|
740
|
+
rows_affected=0,
|
|
741
|
+
statement=statement,
|
|
742
|
+
metadata={"copy_operation": "TO_STDOUT"},
|
|
743
|
+
)
|
|
744
|
+
|
|
745
|
+
await cursor.execute(sql)
|
|
746
|
+
rows_affected = max(cursor.rowcount, 0)
|
|
747
|
+
|
|
748
|
+
return SQLResult(
|
|
749
|
+
data=None, rows_affected=rows_affected, statement=statement, metadata={"copy_operation": "FILE"}
|
|
750
|
+
)
|
|
751
|
+
|
|
752
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
753
|
+
# TRANSACTION MANAGEMENT
|
|
754
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
755
|
+
|
|
756
|
+
async def begin(self) -> None:
|
|
757
|
+
"""Begin a database transaction on the current connection."""
|
|
758
|
+
try:
|
|
759
|
+
try:
|
|
760
|
+
autocommit_flag = self.connection.autocommit
|
|
761
|
+
except AttributeError:
|
|
762
|
+
autocommit_flag = None
|
|
763
|
+
if isinstance(autocommit_flag, bool) and not autocommit_flag:
|
|
764
|
+
return
|
|
765
|
+
await self.connection.set_autocommit(False)
|
|
766
|
+
except Exception as e:
|
|
767
|
+
msg = f"Failed to begin transaction: {e}"
|
|
768
|
+
raise SQLSpecError(msg) from e
|
|
769
|
+
|
|
770
|
+
async def commit(self) -> None:
|
|
771
|
+
"""Commit the current transaction on the current connection."""
|
|
772
|
+
try:
|
|
773
|
+
await self.connection.commit()
|
|
774
|
+
except Exception as e:
|
|
775
|
+
msg = f"Failed to commit transaction: {e}"
|
|
776
|
+
raise SQLSpecError(msg) from e
|
|
777
|
+
|
|
778
|
+
async def rollback(self) -> None:
|
|
779
|
+
"""Rollback the current transaction on the current connection."""
|
|
780
|
+
try:
|
|
781
|
+
await self.connection.rollback()
|
|
782
|
+
except Exception as e:
|
|
783
|
+
msg = f"Failed to rollback transaction: {e}"
|
|
784
|
+
raise SQLSpecError(msg) from e
|
|
785
|
+
|
|
786
|
+
def with_cursor(self, connection: "PsycopgAsyncConnection") -> "PsycopgAsyncCursor":
|
|
787
|
+
"""Create async context manager for PostgreSQL cursor."""
|
|
788
|
+
return PsycopgAsyncCursor(connection)
|
|
789
|
+
|
|
790
|
+
def handle_database_exceptions(self) -> "PsycopgAsyncExceptionHandler":
|
|
791
|
+
"""Handle database-specific exceptions and wrap them appropriately."""
|
|
792
|
+
return PsycopgAsyncExceptionHandler()
|
|
793
|
+
|
|
794
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
795
|
+
# STACK EXECUTION METHODS
|
|
796
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
797
|
+
|
|
798
|
+
async def execute_stack(
|
|
799
|
+
self, stack: "StatementStack", *, continue_on_error: bool = False
|
|
800
|
+
) -> "tuple[StackResult, ...]":
|
|
801
|
+
"""Execute a StatementStack using psycopg async pipeline when supported."""
|
|
802
|
+
|
|
803
|
+
if (
|
|
804
|
+
not isinstance(stack, StatementStack)
|
|
805
|
+
or not stack
|
|
806
|
+
or self.stack_native_disabled
|
|
807
|
+
or not pipeline_supported()
|
|
808
|
+
or continue_on_error
|
|
809
|
+
):
|
|
810
|
+
return await super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
811
|
+
|
|
812
|
+
prepared_ops = self._prepare_pipeline_operations(stack)
|
|
813
|
+
if prepared_ops is None:
|
|
814
|
+
return await super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
815
|
+
|
|
816
|
+
return await self._execute_stack_pipeline(stack, prepared_ops)
|
|
817
|
+
|
|
818
|
+
async def _execute_stack_pipeline(
|
|
819
|
+
self, stack: "StatementStack", prepared_ops: "list[PreparedStackOperation]"
|
|
820
|
+
) -> "tuple[StackResult, ...]":
|
|
821
|
+
results: list[StackResult] = []
|
|
822
|
+
started_transaction = False
|
|
823
|
+
|
|
824
|
+
with StackExecutionObserver(self, stack, continue_on_error=False, native_pipeline=True):
|
|
825
|
+
try:
|
|
826
|
+
if not self._connection_in_transaction():
|
|
827
|
+
await self.begin()
|
|
828
|
+
started_transaction = True
|
|
829
|
+
|
|
830
|
+
async with AsyncExitStack() as resource_stack:
|
|
831
|
+
pipeline = await resource_stack.enter_async_context(self.connection.pipeline())
|
|
832
|
+
pending: list[PipelineCursorEntry] = []
|
|
833
|
+
|
|
834
|
+
for prepared in prepared_ops:
|
|
835
|
+
exception_ctx = self.handle_database_exceptions()
|
|
836
|
+
await resource_stack.enter_async_context(exception_ctx)
|
|
837
|
+
cursor = await resource_stack.enter_async_context(self.with_cursor(self.connection))
|
|
838
|
+
|
|
839
|
+
try:
|
|
840
|
+
if prepared.parameters:
|
|
841
|
+
await cursor.execute(prepared.sql, prepared.parameters)
|
|
842
|
+
else:
|
|
843
|
+
await cursor.execute(prepared.sql)
|
|
844
|
+
except Exception as exc:
|
|
845
|
+
stack_error = StackExecutionError(
|
|
846
|
+
prepared.operation_index,
|
|
847
|
+
describe_stack_statement(prepared.operation.statement),
|
|
848
|
+
exc,
|
|
849
|
+
adapter=type(self).__name__,
|
|
850
|
+
mode="fail-fast",
|
|
851
|
+
)
|
|
852
|
+
raise stack_error from exc
|
|
853
|
+
|
|
854
|
+
pending.append(PipelineCursorEntry(prepared=prepared, cursor=cursor))
|
|
855
|
+
|
|
856
|
+
await pipeline.sync()
|
|
857
|
+
for entry in pending:
|
|
858
|
+
statement = entry.prepared.statement
|
|
859
|
+
cursor = entry.cursor
|
|
860
|
+
|
|
861
|
+
execution_result = await build_async_pipeline_execution_result(statement, cursor)
|
|
862
|
+
sql_result = self.build_statement_result(statement, execution_result)
|
|
863
|
+
results.append(StackResult.from_sql_result(sql_result))
|
|
864
|
+
|
|
865
|
+
if started_transaction:
|
|
866
|
+
await self.commit()
|
|
867
|
+
except Exception:
|
|
868
|
+
if started_transaction:
|
|
869
|
+
try:
|
|
870
|
+
await self.rollback()
|
|
871
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
872
|
+
logger.debug("Rollback after psycopg pipeline failure failed: %s", rollback_error)
|
|
873
|
+
raise
|
|
874
|
+
|
|
875
|
+
return tuple(results)
|
|
876
|
+
|
|
877
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
878
|
+
# STORAGE API METHODS
|
|
879
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
880
|
+
|
|
881
|
+
async def select_to_storage(
|
|
882
|
+
self,
|
|
883
|
+
statement: "SQL | str",
|
|
884
|
+
destination: "StorageDestination",
|
|
885
|
+
/,
|
|
886
|
+
*parameters: Any,
|
|
887
|
+
statement_config: "StatementConfig | None" = None,
|
|
888
|
+
partitioner: "dict[str, object] | None" = None,
|
|
889
|
+
format_hint: "StorageFormat | None" = None,
|
|
890
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
891
|
+
**kwargs: Any,
|
|
892
|
+
) -> "StorageBridgeJob":
|
|
893
|
+
"""Execute a query and stream Arrow data to storage asynchronously."""
|
|
894
|
+
|
|
895
|
+
self._require_capability("arrow_export_enabled")
|
|
896
|
+
arrow_result = await self.select_to_arrow(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
897
|
+
async_pipeline = self._storage_pipeline()
|
|
898
|
+
telemetry_payload = await self._write_result_to_storage_async(
|
|
899
|
+
arrow_result, destination, format_hint=format_hint, pipeline=async_pipeline
|
|
900
|
+
)
|
|
901
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
902
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
903
|
+
|
|
904
|
+
async def load_from_arrow(
|
|
905
|
+
self,
|
|
906
|
+
table: str,
|
|
907
|
+
source: "ArrowResult | Any",
|
|
908
|
+
*,
|
|
909
|
+
partitioner: "dict[str, object] | None" = None,
|
|
910
|
+
overwrite: bool = False,
|
|
911
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
912
|
+
) -> "StorageBridgeJob":
|
|
913
|
+
"""Load Arrow data into PostgreSQL asynchronously via COPY."""
|
|
914
|
+
|
|
915
|
+
self._require_capability("arrow_import_enabled")
|
|
916
|
+
arrow_table = self._coerce_arrow_table(source)
|
|
917
|
+
if overwrite:
|
|
918
|
+
truncate_sql = build_truncate_command(table)
|
|
919
|
+
async with self.with_cursor(self.connection) as cursor, self.handle_database_exceptions():
|
|
920
|
+
await cursor.execute(truncate_sql)
|
|
921
|
+
columns, records = self._arrow_table_to_rows(arrow_table)
|
|
922
|
+
if records:
|
|
923
|
+
copy_sql = build_copy_from_command(table, columns)
|
|
924
|
+
async with AsyncExitStack() as stack:
|
|
925
|
+
await stack.enter_async_context(self.handle_database_exceptions())
|
|
926
|
+
cursor = await stack.enter_async_context(self.with_cursor(self.connection))
|
|
927
|
+
copy_ctx = await stack.enter_async_context(cursor.copy(copy_sql))
|
|
928
|
+
for record in records:
|
|
929
|
+
await copy_ctx.write_row(record)
|
|
930
|
+
telemetry_payload = self._build_ingest_telemetry(arrow_table)
|
|
931
|
+
telemetry_payload["destination"] = table
|
|
932
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
933
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
934
|
+
|
|
935
|
+
async def load_from_storage(
|
|
936
|
+
self,
|
|
937
|
+
table: str,
|
|
938
|
+
source: "StorageDestination",
|
|
939
|
+
*,
|
|
940
|
+
file_format: "StorageFormat",
|
|
941
|
+
partitioner: "dict[str, object] | None" = None,
|
|
942
|
+
overwrite: bool = False,
|
|
943
|
+
) -> "StorageBridgeJob":
|
|
944
|
+
"""Load staged artifacts asynchronously."""
|
|
945
|
+
|
|
946
|
+
arrow_table, inbound = await self._read_arrow_from_storage_async(source, file_format=file_format)
|
|
947
|
+
return await self.load_from_arrow(
|
|
948
|
+
table, arrow_table, partitioner=partitioner, overwrite=overwrite, telemetry=inbound
|
|
949
|
+
)
|
|
950
|
+
|
|
951
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
952
|
+
# UTILITY METHODS
|
|
953
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
954
|
+
|
|
955
|
+
@property
|
|
956
|
+
def data_dictionary(self) -> "PsycopgAsyncDataDictionary":
|
|
957
|
+
"""Get the data dictionary for this driver.
|
|
958
|
+
|
|
959
|
+
Returns:
|
|
960
|
+
Data dictionary instance for metadata queries
|
|
961
|
+
"""
|
|
962
|
+
if self._data_dictionary is None:
|
|
963
|
+
self._data_dictionary = PsycopgAsyncDataDictionary()
|
|
964
|
+
return self._data_dictionary
|
|
965
|
+
|
|
966
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
967
|
+
# PRIVATE / INTERNAL METHODS
|
|
968
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
969
|
+
|
|
970
|
+
def _connection_in_transaction(self) -> bool:
|
|
971
|
+
"""Check if connection is in transaction."""
|
|
972
|
+
return bool(self.connection.info.transaction_status != TRANSACTION_STATUS_IDLE)
|
|
973
|
+
|
|
974
|
+
|
|
975
|
+
register_driver_profile("psycopg", driver_profile)
|