sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
sqlspec/driver/_async.py
ADDED
|
@@ -0,0 +1,1737 @@
|
|
|
1
|
+
"""Asynchronous driver protocol implementation."""
|
|
2
|
+
|
|
3
|
+
import graphlib
|
|
4
|
+
import logging
|
|
5
|
+
import re
|
|
6
|
+
from abc import abstractmethod
|
|
7
|
+
from contextlib import suppress
|
|
8
|
+
from time import perf_counter
|
|
9
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Final, cast, final, overload
|
|
10
|
+
|
|
11
|
+
from mypy_extensions import mypyc_attr
|
|
12
|
+
|
|
13
|
+
from sqlspec.core import SQL, ProcessedState, StackResult, Statement, create_arrow_result
|
|
14
|
+
from sqlspec.core.stack import StackOperation, StatementStack
|
|
15
|
+
from sqlspec.data_dictionary._loader import get_data_dictionary_loader
|
|
16
|
+
from sqlspec.data_dictionary._registry import get_dialect_config
|
|
17
|
+
from sqlspec.driver._common import (
|
|
18
|
+
VERSION_GROUPS_MIN_FOR_MINOR,
|
|
19
|
+
VERSION_GROUPS_MIN_FOR_PATCH,
|
|
20
|
+
AsyncExceptionHandler,
|
|
21
|
+
CommonDriverAttributesMixin,
|
|
22
|
+
ExecutionResult,
|
|
23
|
+
StackExecutionObserver,
|
|
24
|
+
describe_stack_statement,
|
|
25
|
+
handle_single_row_error,
|
|
26
|
+
resolve_db_system,
|
|
27
|
+
)
|
|
28
|
+
from sqlspec.driver._sql_helpers import DEFAULT_PRETTY
|
|
29
|
+
from sqlspec.driver._sql_helpers import convert_to_dialect as _convert_to_dialect_impl
|
|
30
|
+
from sqlspec.driver._storage_helpers import (
|
|
31
|
+
arrow_table_to_rows,
|
|
32
|
+
attach_partition_telemetry,
|
|
33
|
+
build_ingest_telemetry,
|
|
34
|
+
coerce_arrow_table,
|
|
35
|
+
create_storage_job,
|
|
36
|
+
stringify_storage_target,
|
|
37
|
+
)
|
|
38
|
+
from sqlspec.exceptions import ImproperConfigurationError, SQLFileNotFoundError, StackExecutionError
|
|
39
|
+
from sqlspec.storage import AsyncStoragePipeline, StorageBridgeJob, StorageDestination, StorageFormat, StorageTelemetry
|
|
40
|
+
from sqlspec.typing import VersionInfo
|
|
41
|
+
from sqlspec.utils.arrow_helpers import convert_dict_to_arrow_with_schema
|
|
42
|
+
from sqlspec.utils.logging import get_logger, log_with_context
|
|
43
|
+
|
|
44
|
+
if TYPE_CHECKING:
|
|
45
|
+
from collections.abc import Sequence
|
|
46
|
+
|
|
47
|
+
from sqlglot.dialects.dialect import DialectType
|
|
48
|
+
|
|
49
|
+
from sqlspec.builder import QueryBuilder
|
|
50
|
+
from sqlspec.core import ArrowResult, SQLResult, StatementConfig, StatementFilter
|
|
51
|
+
from sqlspec.data_dictionary._types import DialectConfig
|
|
52
|
+
from sqlspec.protocols import HasDataProtocol, HasExecuteProtocol
|
|
53
|
+
from sqlspec.typing import (
|
|
54
|
+
ArrowReturnFormat,
|
|
55
|
+
ArrowTable,
|
|
56
|
+
ColumnMetadata,
|
|
57
|
+
ForeignKeyMetadata,
|
|
58
|
+
IndexMetadata,
|
|
59
|
+
SchemaT,
|
|
60
|
+
StatementParameters,
|
|
61
|
+
TableMetadata,
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
__all__ = ("AsyncDataDictionaryBase", "AsyncDriverAdapterBase")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
EMPTY_FILTERS: Final["list[StatementFilter]"] = []
|
|
69
|
+
_LOGGER_NAME: Final[str] = "sqlspec"
|
|
70
|
+
logger = get_logger(_LOGGER_NAME)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
74
|
+
class AsyncDriverAdapterBase(CommonDriverAttributesMixin):
|
|
75
|
+
"""Base class for asynchronous database drivers.
|
|
76
|
+
|
|
77
|
+
This class includes flattened storage and SQL translation methods that were
|
|
78
|
+
previously in StorageDriverMixin and SQLTranslatorMixin. The flattening
|
|
79
|
+
eliminates cross-trait attribute access that caused mypyc segmentation faults.
|
|
80
|
+
|
|
81
|
+
Method Organization:
|
|
82
|
+
1. Core dispatch methods (the execution engine)
|
|
83
|
+
2. Transaction management (abstract methods)
|
|
84
|
+
3. Public API - execution methods
|
|
85
|
+
4. Public API - query methods (select/fetch variants)
|
|
86
|
+
5. Arrow API methods
|
|
87
|
+
6. Stack execution
|
|
88
|
+
7. Storage API methods
|
|
89
|
+
8. Utility methods
|
|
90
|
+
9. Private/internal methods
|
|
91
|
+
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
__slots__ = ()
|
|
95
|
+
|
|
96
|
+
dialect: "DialectType | None" = None
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def is_async(self) -> bool:
|
|
100
|
+
"""Return whether the driver executes asynchronously.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
True for async drivers.
|
|
104
|
+
|
|
105
|
+
"""
|
|
106
|
+
return True
|
|
107
|
+
|
|
108
|
+
@property
|
|
109
|
+
@abstractmethod
|
|
110
|
+
def data_dictionary(self) -> "AsyncDataDictionaryBase":
|
|
111
|
+
"""Get the data dictionary for this driver.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Data dictionary instance for metadata queries
|
|
115
|
+
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
119
|
+
# CORE DISPATCH METHODS - The Execution Engine
|
|
120
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
121
|
+
|
|
122
|
+
@final
|
|
123
|
+
async def dispatch_statement_execution(self, statement: "SQL", connection: "Any") -> "SQLResult":
|
|
124
|
+
"""Central execution dispatcher using the Template Method Pattern.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
statement: The SQL statement to execute
|
|
128
|
+
connection: The database connection to use
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
The result of the SQL execution
|
|
132
|
+
|
|
133
|
+
"""
|
|
134
|
+
runtime = self.observability
|
|
135
|
+
compiled_sql, execution_parameters = statement.compile()
|
|
136
|
+
_ = cast("ProcessedState", statement.get_processed_state())
|
|
137
|
+
operation = statement.operation_type
|
|
138
|
+
query_context = {
|
|
139
|
+
"sql": compiled_sql,
|
|
140
|
+
"parameters": execution_parameters,
|
|
141
|
+
"driver": type(self).__name__,
|
|
142
|
+
"operation": operation,
|
|
143
|
+
"is_many": statement.is_many,
|
|
144
|
+
"is_script": statement.is_script,
|
|
145
|
+
}
|
|
146
|
+
runtime.emit_query_start(**query_context)
|
|
147
|
+
span = runtime.start_query_span(compiled_sql, operation, type(self).__name__)
|
|
148
|
+
started = perf_counter()
|
|
149
|
+
|
|
150
|
+
result: SQLResult | None = None
|
|
151
|
+
exc_handler = self.handle_database_exceptions()
|
|
152
|
+
cursor_manager = self.with_cursor(connection)
|
|
153
|
+
cursor: Any | None = None
|
|
154
|
+
exc: Exception | None = None
|
|
155
|
+
exc_handler_entered = False
|
|
156
|
+
cursor_entered = False
|
|
157
|
+
|
|
158
|
+
try:
|
|
159
|
+
await exc_handler.__aenter__()
|
|
160
|
+
exc_handler_entered = True
|
|
161
|
+
cursor = await cursor_manager.__aenter__()
|
|
162
|
+
cursor_entered = True
|
|
163
|
+
special_result = await self.dispatch_special_handling(cursor, statement)
|
|
164
|
+
if special_result is not None:
|
|
165
|
+
result = special_result
|
|
166
|
+
elif statement.is_script:
|
|
167
|
+
execution_result = await self.dispatch_execute_script(cursor, statement)
|
|
168
|
+
result = self.build_statement_result(statement, execution_result)
|
|
169
|
+
elif statement.is_many:
|
|
170
|
+
execution_result = await self.dispatch_execute_many(cursor, statement)
|
|
171
|
+
result = self.build_statement_result(statement, execution_result)
|
|
172
|
+
else:
|
|
173
|
+
execution_result = await self.dispatch_execute(cursor, statement)
|
|
174
|
+
result = self.build_statement_result(statement, execution_result)
|
|
175
|
+
except Exception as err:
|
|
176
|
+
exc = err
|
|
177
|
+
finally:
|
|
178
|
+
if cursor_entered:
|
|
179
|
+
if exc is None:
|
|
180
|
+
await cursor_manager.__aexit__(None, None, None)
|
|
181
|
+
else:
|
|
182
|
+
await cursor_manager.__aexit__(type(exc), exc, exc.__traceback__)
|
|
183
|
+
if exc_handler_entered:
|
|
184
|
+
if exc is None:
|
|
185
|
+
await exc_handler.__aexit__(None, None, None)
|
|
186
|
+
else:
|
|
187
|
+
await exc_handler.__aexit__(type(exc), exc, exc.__traceback__)
|
|
188
|
+
|
|
189
|
+
if exc is not None:
|
|
190
|
+
mapped_exc = exc_handler.pending_exception or exc
|
|
191
|
+
runtime.span_manager.end_span(span, error=mapped_exc)
|
|
192
|
+
runtime.emit_error(mapped_exc, **query_context)
|
|
193
|
+
if exc_handler.pending_exception is not None:
|
|
194
|
+
raise mapped_exc from exc
|
|
195
|
+
raise exc
|
|
196
|
+
|
|
197
|
+
if exc_handler.pending_exception is not None:
|
|
198
|
+
mapped_exc = exc_handler.pending_exception
|
|
199
|
+
runtime.span_manager.end_span(span, error=mapped_exc)
|
|
200
|
+
runtime.emit_error(mapped_exc, **query_context)
|
|
201
|
+
raise mapped_exc from None
|
|
202
|
+
|
|
203
|
+
assert result is not None # Guaranteed: no exception means result was assigned
|
|
204
|
+
|
|
205
|
+
runtime.span_manager.end_span(span)
|
|
206
|
+
duration = perf_counter() - started
|
|
207
|
+
runtime.emit_query_complete(**{**query_context, "rows_affected": result.rows_affected})
|
|
208
|
+
runtime.emit_statement_event(
|
|
209
|
+
sql=compiled_sql,
|
|
210
|
+
parameters=execution_parameters,
|
|
211
|
+
driver=type(self).__name__,
|
|
212
|
+
operation=operation,
|
|
213
|
+
execution_mode=self.statement_config.execution_mode,
|
|
214
|
+
is_many=statement.is_many,
|
|
215
|
+
is_script=statement.is_script,
|
|
216
|
+
rows_affected=result.rows_affected,
|
|
217
|
+
duration_s=duration,
|
|
218
|
+
storage_backend=(result.metadata or {}).get("storage_backend"),
|
|
219
|
+
started_at=started,
|
|
220
|
+
)
|
|
221
|
+
return result
|
|
222
|
+
|
|
223
|
+
@abstractmethod
|
|
224
|
+
async def dispatch_execute(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
225
|
+
"""Execute a single SQL statement.
|
|
226
|
+
|
|
227
|
+
Must be implemented by each driver for database-specific execution logic.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
cursor: Database cursor/connection object
|
|
231
|
+
statement: SQL statement object with all necessary data and configuration
|
|
232
|
+
|
|
233
|
+
Returns:
|
|
234
|
+
ExecutionResult with execution data
|
|
235
|
+
|
|
236
|
+
"""
|
|
237
|
+
|
|
238
|
+
@abstractmethod
|
|
239
|
+
async def dispatch_execute_many(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
240
|
+
"""Execute SQL with multiple parameter sets (executemany).
|
|
241
|
+
|
|
242
|
+
Must be implemented by each driver for database-specific executemany logic.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
cursor: Database cursor/connection object
|
|
246
|
+
statement: SQL statement object with all necessary data and configuration
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
ExecutionResult with execution data for the many operation
|
|
250
|
+
|
|
251
|
+
"""
|
|
252
|
+
|
|
253
|
+
async def dispatch_execute_script(self, cursor: Any, statement: "SQL") -> ExecutionResult:
|
|
254
|
+
"""Execute a SQL script containing multiple statements.
|
|
255
|
+
|
|
256
|
+
Default implementation splits the script and executes statements individually.
|
|
257
|
+
Drivers can override for database-specific script execution methods.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
cursor: Database cursor/connection object
|
|
261
|
+
statement: SQL statement object with all necessary data and configuration
|
|
262
|
+
|
|
263
|
+
Returns:
|
|
264
|
+
ExecutionResult with script execution data including statement counts
|
|
265
|
+
|
|
266
|
+
"""
|
|
267
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
268
|
+
statements = self.split_script_statements(sql, self.statement_config, strip_trailing_semicolon=True)
|
|
269
|
+
|
|
270
|
+
statement_count: int = len(statements)
|
|
271
|
+
successful_count: int = 0
|
|
272
|
+
|
|
273
|
+
for stmt in statements:
|
|
274
|
+
single_stmt = statement.copy(statement=stmt, parameters=prepared_parameters)
|
|
275
|
+
await self.dispatch_execute(cursor, single_stmt)
|
|
276
|
+
successful_count += 1
|
|
277
|
+
|
|
278
|
+
return self.create_execution_result(
|
|
279
|
+
cursor, statement_count=statement_count, successful_statements=successful_count, is_script_result=True
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
async def dispatch_special_handling(self, cursor: Any, statement: "SQL") -> "SQLResult | None":
|
|
283
|
+
"""Hook for database-specific special operations (e.g., PostgreSQL COPY, bulk operations).
|
|
284
|
+
|
|
285
|
+
This method is called first in dispatch_statement_execution() to allow drivers to handle
|
|
286
|
+
special operations that don't follow the standard SQL execution pattern.
|
|
287
|
+
|
|
288
|
+
Args:
|
|
289
|
+
cursor: Database cursor/connection object
|
|
290
|
+
statement: SQL statement to analyze
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
SQLResult if the special operation was handled and completed,
|
|
294
|
+
None if standard execution should proceed
|
|
295
|
+
|
|
296
|
+
"""
|
|
297
|
+
_ = (cursor, statement)
|
|
298
|
+
return None
|
|
299
|
+
|
|
300
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
301
|
+
# TRANSACTION MANAGEMENT - Required Abstract Methods
|
|
302
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
303
|
+
|
|
304
|
+
@abstractmethod
|
|
305
|
+
async def begin(self) -> None:
|
|
306
|
+
"""Begin a database transaction on the current connection."""
|
|
307
|
+
|
|
308
|
+
@abstractmethod
|
|
309
|
+
async def commit(self) -> None:
|
|
310
|
+
"""Commit the current transaction on the current connection."""
|
|
311
|
+
|
|
312
|
+
@abstractmethod
|
|
313
|
+
async def rollback(self) -> None:
|
|
314
|
+
"""Rollback the current transaction on the current connection."""
|
|
315
|
+
|
|
316
|
+
@abstractmethod
|
|
317
|
+
def with_cursor(self, connection: Any) -> Any:
|
|
318
|
+
"""Create and return an async context manager for cursor acquisition and cleanup.
|
|
319
|
+
|
|
320
|
+
Returns an async context manager that yields a cursor for database operations.
|
|
321
|
+
Concrete implementations handle database-specific cursor creation and cleanup.
|
|
322
|
+
"""
|
|
323
|
+
|
|
324
|
+
@abstractmethod
|
|
325
|
+
def handle_database_exceptions(self) -> "AsyncExceptionHandler":
|
|
326
|
+
"""Handle database-specific exceptions and wrap them appropriately.
|
|
327
|
+
|
|
328
|
+
Returns:
|
|
329
|
+
Exception handler with deferred exception pattern for mypyc compatibility.
|
|
330
|
+
The handler stores mapped exceptions in pending_exception rather than
|
|
331
|
+
raising from __aexit__ to avoid ABI boundary violations.
|
|
332
|
+
|
|
333
|
+
"""
|
|
334
|
+
|
|
335
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
336
|
+
# PUBLIC API - Core Execution Methods
|
|
337
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
338
|
+
|
|
339
|
+
async def execute(
|
|
340
|
+
self,
|
|
341
|
+
statement: "SQL | Statement | QueryBuilder",
|
|
342
|
+
/,
|
|
343
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
344
|
+
statement_config: "StatementConfig | None" = None,
|
|
345
|
+
**kwargs: Any,
|
|
346
|
+
) -> "SQLResult":
|
|
347
|
+
"""Execute a statement with parameter handling."""
|
|
348
|
+
sql_statement = self.prepare_statement(
|
|
349
|
+
statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
|
|
350
|
+
)
|
|
351
|
+
return await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
|
|
352
|
+
|
|
353
|
+
async def execute_many(
|
|
354
|
+
self,
|
|
355
|
+
statement: "SQL | Statement | QueryBuilder",
|
|
356
|
+
/,
|
|
357
|
+
parameters: "Sequence[StatementParameters]",
|
|
358
|
+
*filters: "StatementParameters | StatementFilter",
|
|
359
|
+
statement_config: "StatementConfig | None" = None,
|
|
360
|
+
**kwargs: Any,
|
|
361
|
+
) -> "SQLResult":
|
|
362
|
+
"""Execute statement multiple times with different parameters.
|
|
363
|
+
|
|
364
|
+
Parameters passed will be used as the batch execution sequence.
|
|
365
|
+
"""
|
|
366
|
+
config = statement_config or self.statement_config
|
|
367
|
+
|
|
368
|
+
if isinstance(statement, SQL):
|
|
369
|
+
statement_seed = statement.raw_expression or statement.raw_sql
|
|
370
|
+
sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs)
|
|
371
|
+
else:
|
|
372
|
+
base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs)
|
|
373
|
+
statement_seed = base_statement.raw_expression or base_statement.raw_sql
|
|
374
|
+
sql_statement = SQL(statement_seed, parameters, statement_config=config, is_many=True, **kwargs)
|
|
375
|
+
|
|
376
|
+
return await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection)
|
|
377
|
+
|
|
378
|
+
async def execute_script(
|
|
379
|
+
self,
|
|
380
|
+
statement: "str | SQL",
|
|
381
|
+
/,
|
|
382
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
383
|
+
statement_config: "StatementConfig | None" = None,
|
|
384
|
+
**kwargs: Any,
|
|
385
|
+
) -> "SQLResult":
|
|
386
|
+
"""Execute a multi-statement script.
|
|
387
|
+
|
|
388
|
+
By default, validates each statement and logs warnings for dangerous
|
|
389
|
+
operations. Use suppress_warnings=True for migrations and admin scripts.
|
|
390
|
+
"""
|
|
391
|
+
config = statement_config or self.statement_config
|
|
392
|
+
sql_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs)
|
|
393
|
+
|
|
394
|
+
return await self.dispatch_statement_execution(statement=sql_statement.as_script(), connection=self.connection)
|
|
395
|
+
|
|
396
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
397
|
+
# PUBLIC API - Query Methods (select/fetch variants)
|
|
398
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
399
|
+
|
|
400
|
+
@overload
|
|
401
|
+
async def select(
|
|
402
|
+
self,
|
|
403
|
+
statement: "Statement | QueryBuilder",
|
|
404
|
+
/,
|
|
405
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
406
|
+
schema_type: "type[SchemaT]",
|
|
407
|
+
statement_config: "StatementConfig | None" = None,
|
|
408
|
+
**kwargs: Any,
|
|
409
|
+
) -> "list[SchemaT]": ...
|
|
410
|
+
|
|
411
|
+
@overload
|
|
412
|
+
async def select(
|
|
413
|
+
self,
|
|
414
|
+
statement: "Statement | QueryBuilder",
|
|
415
|
+
/,
|
|
416
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
417
|
+
schema_type: None = None,
|
|
418
|
+
statement_config: "StatementConfig | None" = None,
|
|
419
|
+
**kwargs: Any,
|
|
420
|
+
) -> "list[dict[str, Any]]": ...
|
|
421
|
+
|
|
422
|
+
async def select(
|
|
423
|
+
self,
|
|
424
|
+
statement: "Statement | QueryBuilder",
|
|
425
|
+
/,
|
|
426
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
427
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
428
|
+
statement_config: "StatementConfig | None" = None,
|
|
429
|
+
**kwargs: Any,
|
|
430
|
+
) -> "list[SchemaT] | list[dict[str, Any]]":
|
|
431
|
+
"""Execute a select statement and return all rows."""
|
|
432
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
433
|
+
return result.get_data(schema_type=schema_type)
|
|
434
|
+
|
|
435
|
+
@overload
|
|
436
|
+
async def fetch(
|
|
437
|
+
self,
|
|
438
|
+
statement: "Statement | QueryBuilder",
|
|
439
|
+
/,
|
|
440
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
441
|
+
schema_type: "type[SchemaT]",
|
|
442
|
+
statement_config: "StatementConfig | None" = None,
|
|
443
|
+
**kwargs: Any,
|
|
444
|
+
) -> "list[SchemaT]": ...
|
|
445
|
+
|
|
446
|
+
@overload
|
|
447
|
+
async def fetch(
|
|
448
|
+
self,
|
|
449
|
+
statement: "Statement | QueryBuilder",
|
|
450
|
+
/,
|
|
451
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
452
|
+
schema_type: None = None,
|
|
453
|
+
statement_config: "StatementConfig | None" = None,
|
|
454
|
+
**kwargs: Any,
|
|
455
|
+
) -> "list[dict[str, Any]]": ...
|
|
456
|
+
|
|
457
|
+
async def fetch(
|
|
458
|
+
self,
|
|
459
|
+
statement: "Statement | QueryBuilder",
|
|
460
|
+
/,
|
|
461
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
462
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
463
|
+
statement_config: "StatementConfig | None" = None,
|
|
464
|
+
**kwargs: Any,
|
|
465
|
+
) -> "list[SchemaT] | list[dict[str, Any]]":
|
|
466
|
+
"""Execute a select statement and return all rows.
|
|
467
|
+
|
|
468
|
+
This is an alias for :meth:`select` provided for users familiar
|
|
469
|
+
with asyncpg's fetch() naming convention.
|
|
470
|
+
|
|
471
|
+
See Also:
|
|
472
|
+
select(): Primary method with identical behavior
|
|
473
|
+
|
|
474
|
+
"""
|
|
475
|
+
return await self.select(
|
|
476
|
+
statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
@overload
|
|
480
|
+
async def select_one(
|
|
481
|
+
self,
|
|
482
|
+
statement: "Statement | QueryBuilder",
|
|
483
|
+
/,
|
|
484
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
485
|
+
schema_type: "type[SchemaT]",
|
|
486
|
+
statement_config: "StatementConfig | None" = None,
|
|
487
|
+
**kwargs: Any,
|
|
488
|
+
) -> "SchemaT": ...
|
|
489
|
+
|
|
490
|
+
@overload
|
|
491
|
+
async def select_one(
|
|
492
|
+
self,
|
|
493
|
+
statement: "Statement | QueryBuilder",
|
|
494
|
+
/,
|
|
495
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
496
|
+
schema_type: None = None,
|
|
497
|
+
statement_config: "StatementConfig | None" = None,
|
|
498
|
+
**kwargs: Any,
|
|
499
|
+
) -> "dict[str, Any]": ...
|
|
500
|
+
|
|
501
|
+
async def select_one(
|
|
502
|
+
self,
|
|
503
|
+
statement: "Statement | QueryBuilder",
|
|
504
|
+
/,
|
|
505
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
506
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
507
|
+
statement_config: "StatementConfig | None" = None,
|
|
508
|
+
**kwargs: Any,
|
|
509
|
+
) -> "SchemaT | dict[str, Any]":
|
|
510
|
+
"""Execute a select statement and return exactly one row.
|
|
511
|
+
|
|
512
|
+
Raises an exception if no rows or more than one row is returned.
|
|
513
|
+
"""
|
|
514
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
515
|
+
try:
|
|
516
|
+
return result.one(schema_type=schema_type)
|
|
517
|
+
except ValueError as error:
|
|
518
|
+
handle_single_row_error(error)
|
|
519
|
+
|
|
520
|
+
@overload
|
|
521
|
+
async def fetch_one(
|
|
522
|
+
self,
|
|
523
|
+
statement: "Statement | QueryBuilder",
|
|
524
|
+
/,
|
|
525
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
526
|
+
schema_type: "type[SchemaT]",
|
|
527
|
+
statement_config: "StatementConfig | None" = None,
|
|
528
|
+
**kwargs: Any,
|
|
529
|
+
) -> "SchemaT": ...
|
|
530
|
+
|
|
531
|
+
@overload
|
|
532
|
+
async def fetch_one(
|
|
533
|
+
self,
|
|
534
|
+
statement: "Statement | QueryBuilder",
|
|
535
|
+
/,
|
|
536
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
537
|
+
schema_type: None = None,
|
|
538
|
+
statement_config: "StatementConfig | None" = None,
|
|
539
|
+
**kwargs: Any,
|
|
540
|
+
) -> "dict[str, Any]": ...
|
|
541
|
+
|
|
542
|
+
async def fetch_one(
|
|
543
|
+
self,
|
|
544
|
+
statement: "Statement | QueryBuilder",
|
|
545
|
+
/,
|
|
546
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
547
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
548
|
+
statement_config: "StatementConfig | None" = None,
|
|
549
|
+
**kwargs: Any,
|
|
550
|
+
) -> "SchemaT | dict[str, Any]":
|
|
551
|
+
"""Execute a select statement and return exactly one row.
|
|
552
|
+
|
|
553
|
+
This is an alias for :meth:`select_one` provided for users familiar
|
|
554
|
+
with asyncpg's fetch_one() naming convention.
|
|
555
|
+
|
|
556
|
+
Raises an exception if no rows or more than one row is returned.
|
|
557
|
+
|
|
558
|
+
See Also:
|
|
559
|
+
select_one(): Primary method with identical behavior
|
|
560
|
+
|
|
561
|
+
"""
|
|
562
|
+
return await self.select_one(
|
|
563
|
+
statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
@overload
|
|
567
|
+
async def select_one_or_none(
|
|
568
|
+
self,
|
|
569
|
+
statement: "Statement | QueryBuilder",
|
|
570
|
+
/,
|
|
571
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
572
|
+
schema_type: "type[SchemaT]",
|
|
573
|
+
statement_config: "StatementConfig | None" = None,
|
|
574
|
+
**kwargs: Any,
|
|
575
|
+
) -> "SchemaT | None": ...
|
|
576
|
+
|
|
577
|
+
@overload
|
|
578
|
+
async def select_one_or_none(
|
|
579
|
+
self,
|
|
580
|
+
statement: "Statement | QueryBuilder",
|
|
581
|
+
/,
|
|
582
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
583
|
+
schema_type: None = None,
|
|
584
|
+
statement_config: "StatementConfig | None" = None,
|
|
585
|
+
**kwargs: Any,
|
|
586
|
+
) -> "dict[str, Any] | None": ...
|
|
587
|
+
|
|
588
|
+
async def select_one_or_none(
|
|
589
|
+
self,
|
|
590
|
+
statement: "Statement | QueryBuilder",
|
|
591
|
+
/,
|
|
592
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
593
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
594
|
+
statement_config: "StatementConfig | None" = None,
|
|
595
|
+
**kwargs: Any,
|
|
596
|
+
) -> "SchemaT | dict[str, Any] | None":
|
|
597
|
+
"""Execute a select statement and return at most one row.
|
|
598
|
+
|
|
599
|
+
Returns None if no rows are found. Raises ``ValueError`` if more than one
|
|
600
|
+
row is returned. Any database or SQL execution errors raised by the driver
|
|
601
|
+
are propagated unchanged.
|
|
602
|
+
"""
|
|
603
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
604
|
+
return result.one_or_none(schema_type=schema_type)
|
|
605
|
+
|
|
606
|
+
@overload
|
|
607
|
+
async def fetch_one_or_none(
|
|
608
|
+
self,
|
|
609
|
+
statement: "Statement | QueryBuilder",
|
|
610
|
+
/,
|
|
611
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
612
|
+
schema_type: "type[SchemaT]",
|
|
613
|
+
statement_config: "StatementConfig | None" = None,
|
|
614
|
+
**kwargs: Any,
|
|
615
|
+
) -> "SchemaT | None": ...
|
|
616
|
+
|
|
617
|
+
@overload
|
|
618
|
+
async def fetch_one_or_none(
|
|
619
|
+
self,
|
|
620
|
+
statement: "Statement | QueryBuilder",
|
|
621
|
+
/,
|
|
622
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
623
|
+
schema_type: None = None,
|
|
624
|
+
statement_config: "StatementConfig | None" = None,
|
|
625
|
+
**kwargs: Any,
|
|
626
|
+
) -> "dict[str, Any] | None": ...
|
|
627
|
+
|
|
628
|
+
async def fetch_one_or_none(
|
|
629
|
+
self,
|
|
630
|
+
statement: "Statement | QueryBuilder",
|
|
631
|
+
/,
|
|
632
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
633
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
634
|
+
statement_config: "StatementConfig | None" = None,
|
|
635
|
+
**kwargs: Any,
|
|
636
|
+
) -> "SchemaT | dict[str, Any] | None":
|
|
637
|
+
"""Execute a select statement and return at most one row.
|
|
638
|
+
|
|
639
|
+
This is an alias for :meth:`select_one_or_none` provided for users familiar
|
|
640
|
+
with asyncpg's fetch_one_or_none() naming convention.
|
|
641
|
+
|
|
642
|
+
Returns None if no rows are found.
|
|
643
|
+
Raises an exception if more than one row is returned.
|
|
644
|
+
|
|
645
|
+
See Also:
|
|
646
|
+
select_one_or_none(): Primary method with identical behavior
|
|
647
|
+
|
|
648
|
+
"""
|
|
649
|
+
return await self.select_one_or_none(
|
|
650
|
+
statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
async def select_value(
|
|
654
|
+
self,
|
|
655
|
+
statement: "Statement | QueryBuilder",
|
|
656
|
+
/,
|
|
657
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
658
|
+
statement_config: "StatementConfig | None" = None,
|
|
659
|
+
**kwargs: Any,
|
|
660
|
+
) -> Any:
|
|
661
|
+
"""Execute a select statement and return a single scalar value.
|
|
662
|
+
|
|
663
|
+
Expects exactly one row with one column.
|
|
664
|
+
Raises an exception if no rows or more than one row/column is returned.
|
|
665
|
+
"""
|
|
666
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
667
|
+
try:
|
|
668
|
+
return result.scalar()
|
|
669
|
+
except ValueError as error:
|
|
670
|
+
handle_single_row_error(error)
|
|
671
|
+
|
|
672
|
+
async def fetch_value(
|
|
673
|
+
self,
|
|
674
|
+
statement: "Statement | QueryBuilder",
|
|
675
|
+
/,
|
|
676
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
677
|
+
statement_config: "StatementConfig | None" = None,
|
|
678
|
+
**kwargs: Any,
|
|
679
|
+
) -> Any:
|
|
680
|
+
"""Execute a select statement and return a single scalar value.
|
|
681
|
+
|
|
682
|
+
This is an alias for :meth:`select_value` provided for users familiar
|
|
683
|
+
with asyncpg's fetch_value() naming convention.
|
|
684
|
+
|
|
685
|
+
Expects exactly one row with one column.
|
|
686
|
+
Raises an exception if no rows or more than one row/column is returned.
|
|
687
|
+
|
|
688
|
+
See Also:
|
|
689
|
+
select_value(): Primary method with identical behavior
|
|
690
|
+
|
|
691
|
+
"""
|
|
692
|
+
return await self.select_value(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
693
|
+
|
|
694
|
+
async def select_value_or_none(
|
|
695
|
+
self,
|
|
696
|
+
statement: "Statement | QueryBuilder",
|
|
697
|
+
/,
|
|
698
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
699
|
+
statement_config: "StatementConfig | None" = None,
|
|
700
|
+
**kwargs: Any,
|
|
701
|
+
) -> Any:
|
|
702
|
+
"""Execute a select statement and return a single scalar value or None.
|
|
703
|
+
|
|
704
|
+
Returns None if no rows are found.
|
|
705
|
+
Expects at most one row with one column.
|
|
706
|
+
Raises an exception if more than one row is returned.
|
|
707
|
+
"""
|
|
708
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
709
|
+
return result.scalar_or_none()
|
|
710
|
+
|
|
711
|
+
async def fetch_value_or_none(
|
|
712
|
+
self,
|
|
713
|
+
statement: "Statement | QueryBuilder",
|
|
714
|
+
/,
|
|
715
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
716
|
+
statement_config: "StatementConfig | None" = None,
|
|
717
|
+
**kwargs: Any,
|
|
718
|
+
) -> Any:
|
|
719
|
+
"""Execute a select statement and return a single scalar value or None.
|
|
720
|
+
|
|
721
|
+
This is an alias for :meth:`select_value_or_none` provided for users familiar
|
|
722
|
+
with asyncpg's fetch_value_or_none() naming convention.
|
|
723
|
+
|
|
724
|
+
Returns None if no rows are found.
|
|
725
|
+
Expects at most one row with one column.
|
|
726
|
+
Raises an exception if more than one row is returned.
|
|
727
|
+
|
|
728
|
+
See Also:
|
|
729
|
+
select_value_or_none(): Primary method with identical behavior
|
|
730
|
+
|
|
731
|
+
"""
|
|
732
|
+
return await self.select_value_or_none(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
733
|
+
|
|
734
|
+
@overload
|
|
735
|
+
async def select_with_total(
|
|
736
|
+
self,
|
|
737
|
+
statement: "Statement | QueryBuilder",
|
|
738
|
+
/,
|
|
739
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
740
|
+
schema_type: "type[SchemaT]",
|
|
741
|
+
statement_config: "StatementConfig | None" = None,
|
|
742
|
+
**kwargs: Any,
|
|
743
|
+
) -> "tuple[list[SchemaT], int]": ...
|
|
744
|
+
|
|
745
|
+
@overload
|
|
746
|
+
async def select_with_total(
|
|
747
|
+
self,
|
|
748
|
+
statement: "Statement | QueryBuilder",
|
|
749
|
+
/,
|
|
750
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
751
|
+
schema_type: None = None,
|
|
752
|
+
statement_config: "StatementConfig | None" = None,
|
|
753
|
+
**kwargs: Any,
|
|
754
|
+
) -> "tuple[list[dict[str, Any]], int]": ...
|
|
755
|
+
|
|
756
|
+
async def select_with_total(
|
|
757
|
+
self,
|
|
758
|
+
statement: "Statement | QueryBuilder",
|
|
759
|
+
/,
|
|
760
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
761
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
762
|
+
statement_config: "StatementConfig | None" = None,
|
|
763
|
+
**kwargs: Any,
|
|
764
|
+
) -> "tuple[list[SchemaT] | list[dict[str, Any]], int]":
|
|
765
|
+
"""Execute a select statement and return both the data and total count.
|
|
766
|
+
|
|
767
|
+
This method is designed for pagination scenarios where you need both
|
|
768
|
+
the current page of data and the total number of rows that match the query.
|
|
769
|
+
|
|
770
|
+
Args:
|
|
771
|
+
statement: The SQL statement, QueryBuilder, or raw SQL string
|
|
772
|
+
*parameters: Parameters for the SQL statement
|
|
773
|
+
schema_type: Optional schema type for data transformation
|
|
774
|
+
statement_config: Optional SQL configuration
|
|
775
|
+
**kwargs: Additional keyword arguments
|
|
776
|
+
|
|
777
|
+
Returns:
|
|
778
|
+
A tuple containing:
|
|
779
|
+
- List of data rows (transformed by schema_type if provided)
|
|
780
|
+
- Total count of rows matching the query (ignoring LIMIT/OFFSET)
|
|
781
|
+
|
|
782
|
+
"""
|
|
783
|
+
sql_statement = self.prepare_statement(
|
|
784
|
+
statement, parameters, statement_config=statement_config or self.statement_config, kwargs=kwargs
|
|
785
|
+
)
|
|
786
|
+
count_result = await self.dispatch_statement_execution(self._create_count_query(sql_statement), self.connection)
|
|
787
|
+
select_result = await self.execute(sql_statement)
|
|
788
|
+
|
|
789
|
+
return (select_result.get_data(schema_type=schema_type), count_result.scalar())
|
|
790
|
+
|
|
791
|
+
@overload
|
|
792
|
+
async def fetch_with_total(
|
|
793
|
+
self,
|
|
794
|
+
statement: "Statement | QueryBuilder",
|
|
795
|
+
/,
|
|
796
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
797
|
+
schema_type: "type[SchemaT]",
|
|
798
|
+
statement_config: "StatementConfig | None" = None,
|
|
799
|
+
**kwargs: Any,
|
|
800
|
+
) -> "tuple[list[SchemaT], int]": ...
|
|
801
|
+
|
|
802
|
+
@overload
|
|
803
|
+
async def fetch_with_total(
|
|
804
|
+
self,
|
|
805
|
+
statement: "Statement | QueryBuilder",
|
|
806
|
+
/,
|
|
807
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
808
|
+
schema_type: None = None,
|
|
809
|
+
statement_config: "StatementConfig | None" = None,
|
|
810
|
+
**kwargs: Any,
|
|
811
|
+
) -> "tuple[list[dict[str, Any]], int]": ...
|
|
812
|
+
|
|
813
|
+
async def fetch_with_total(
|
|
814
|
+
self,
|
|
815
|
+
statement: "Statement | QueryBuilder",
|
|
816
|
+
/,
|
|
817
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
818
|
+
schema_type: "type[SchemaT] | None" = None,
|
|
819
|
+
statement_config: "StatementConfig | None" = None,
|
|
820
|
+
**kwargs: Any,
|
|
821
|
+
) -> "tuple[list[SchemaT] | list[dict[str, Any]], int]":
|
|
822
|
+
"""Execute a select statement and return both the data and total count.
|
|
823
|
+
|
|
824
|
+
This is an alias for :meth:`select_with_total` provided for users familiar
|
|
825
|
+
with asyncpg's fetch() naming convention.
|
|
826
|
+
|
|
827
|
+
This method is designed for pagination scenarios where you need both
|
|
828
|
+
the current page of data and the total number of rows that match the query.
|
|
829
|
+
|
|
830
|
+
See Also:
|
|
831
|
+
select_with_total(): Primary method with identical behavior and full documentation
|
|
832
|
+
|
|
833
|
+
"""
|
|
834
|
+
return await self.select_with_total(
|
|
835
|
+
statement, *parameters, schema_type=schema_type, statement_config=statement_config, **kwargs
|
|
836
|
+
)
|
|
837
|
+
|
|
838
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
839
|
+
# ARROW API METHODS
|
|
840
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
841
|
+
|
|
842
|
+
async def select_to_arrow(
|
|
843
|
+
self,
|
|
844
|
+
statement: "Statement | QueryBuilder",
|
|
845
|
+
/,
|
|
846
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
847
|
+
statement_config: "StatementConfig | None" = None,
|
|
848
|
+
return_format: "ArrowReturnFormat" = "table",
|
|
849
|
+
native_only: bool = False,
|
|
850
|
+
batch_size: int | None = None,
|
|
851
|
+
arrow_schema: Any = None,
|
|
852
|
+
**kwargs: Any,
|
|
853
|
+
) -> "ArrowResult":
|
|
854
|
+
"""Execute query and return results as Apache Arrow format (async).
|
|
855
|
+
|
|
856
|
+
This base implementation uses the conversion path: execute() → dict → Arrow.
|
|
857
|
+
Adapters with native Arrow support (ADBC, DuckDB, BigQuery) override this
|
|
858
|
+
method to use zero-copy native paths for 5-10x performance improvement.
|
|
859
|
+
|
|
860
|
+
Args:
|
|
861
|
+
statement: SQL query string, Statement, or QueryBuilder
|
|
862
|
+
*parameters: Query parameters (same format as execute()/select())
|
|
863
|
+
statement_config: Optional statement configuration override
|
|
864
|
+
return_format: "table" for pyarrow.Table (default), "batch" for single RecordBatch,
|
|
865
|
+
"batches" for iterator of RecordBatches, "reader" for RecordBatchReader
|
|
866
|
+
native_only: If True, raise error if native Arrow unavailable (default: False)
|
|
867
|
+
batch_size: Rows per batch for "batch"/"batches" format (default: None = all rows)
|
|
868
|
+
arrow_schema: Optional pyarrow.Schema for type casting
|
|
869
|
+
**kwargs: Additional keyword arguments
|
|
870
|
+
|
|
871
|
+
Returns:
|
|
872
|
+
ArrowResult containing pyarrow.Table, RecordBatchReader, or RecordBatches
|
|
873
|
+
|
|
874
|
+
Raises:
|
|
875
|
+
ImproperConfigurationError: If native_only=True and adapter doesn't support native Arrow
|
|
876
|
+
|
|
877
|
+
Examples:
|
|
878
|
+
>>> result = await driver.select_to_arrow(
|
|
879
|
+
... "SELECT * FROM users WHERE age > ?", 18
|
|
880
|
+
... )
|
|
881
|
+
>>> df = result.to_pandas()
|
|
882
|
+
>>> print(df.head())
|
|
883
|
+
|
|
884
|
+
>>> # Force native Arrow path (raises error if unavailable)
|
|
885
|
+
>>> result = await driver.select_to_arrow(
|
|
886
|
+
... "SELECT * FROM users", native_only=True
|
|
887
|
+
... )
|
|
888
|
+
|
|
889
|
+
"""
|
|
890
|
+
if native_only:
|
|
891
|
+
msg = (
|
|
892
|
+
f"Adapter '{self.__class__.__name__}' does not support native Arrow results. "
|
|
893
|
+
f"Use native_only=False to allow conversion path, or switch to an adapter "
|
|
894
|
+
f"with native Arrow support (ADBC, DuckDB, BigQuery)."
|
|
895
|
+
)
|
|
896
|
+
raise ImproperConfigurationError(msg)
|
|
897
|
+
|
|
898
|
+
result = await self.execute(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
899
|
+
|
|
900
|
+
arrow_data = convert_dict_to_arrow_with_schema(
|
|
901
|
+
result.data, return_format=return_format, batch_size=batch_size, arrow_schema=arrow_schema
|
|
902
|
+
)
|
|
903
|
+
return create_arrow_result(
|
|
904
|
+
statement=result.statement,
|
|
905
|
+
data=arrow_data,
|
|
906
|
+
rows_affected=result.rows_affected,
|
|
907
|
+
last_inserted_id=result.last_inserted_id,
|
|
908
|
+
execution_time=result.execution_time,
|
|
909
|
+
metadata=result.metadata,
|
|
910
|
+
)
|
|
911
|
+
|
|
912
|
+
async def fetch_to_arrow(
|
|
913
|
+
self,
|
|
914
|
+
statement: "Statement | QueryBuilder",
|
|
915
|
+
/,
|
|
916
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
917
|
+
statement_config: "StatementConfig | None" = None,
|
|
918
|
+
return_format: "ArrowReturnFormat" = "table",
|
|
919
|
+
native_only: bool = False,
|
|
920
|
+
batch_size: int | None = None,
|
|
921
|
+
arrow_schema: Any = None,
|
|
922
|
+
**kwargs: Any,
|
|
923
|
+
) -> "ArrowResult":
|
|
924
|
+
"""Execute query and return results as Apache Arrow format (async).
|
|
925
|
+
|
|
926
|
+
This is an alias for :meth:`select_to_arrow` provided for users familiar
|
|
927
|
+
with asyncpg's fetch() naming convention.
|
|
928
|
+
|
|
929
|
+
See Also:
|
|
930
|
+
select_to_arrow(): Primary method with identical behavior and full documentation
|
|
931
|
+
|
|
932
|
+
"""
|
|
933
|
+
return await self.select_to_arrow(
|
|
934
|
+
statement,
|
|
935
|
+
*parameters,
|
|
936
|
+
statement_config=statement_config,
|
|
937
|
+
return_format=return_format,
|
|
938
|
+
native_only=native_only,
|
|
939
|
+
batch_size=batch_size,
|
|
940
|
+
arrow_schema=arrow_schema,
|
|
941
|
+
**kwargs,
|
|
942
|
+
)
|
|
943
|
+
|
|
944
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
945
|
+
# STACK EXECUTION
|
|
946
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
947
|
+
|
|
948
|
+
async def execute_stack(
|
|
949
|
+
self, stack: "StatementStack", *, continue_on_error: bool = False
|
|
950
|
+
) -> "tuple[StackResult, ...]":
|
|
951
|
+
"""Execute a StatementStack sequentially using the adapter's primitives."""
|
|
952
|
+
if not isinstance(stack, StatementStack):
|
|
953
|
+
msg = "execute_stack expects a StatementStack instance"
|
|
954
|
+
raise TypeError(msg)
|
|
955
|
+
if not stack:
|
|
956
|
+
msg = "Cannot execute an empty StatementStack"
|
|
957
|
+
raise ValueError(msg)
|
|
958
|
+
|
|
959
|
+
results: list[StackResult] = []
|
|
960
|
+
single_transaction = not continue_on_error
|
|
961
|
+
|
|
962
|
+
with StackExecutionObserver(self, stack, continue_on_error, native_pipeline=False) as observer:
|
|
963
|
+
started_transaction = False
|
|
964
|
+
|
|
965
|
+
try:
|
|
966
|
+
if single_transaction and not self._connection_in_transaction():
|
|
967
|
+
await self.begin()
|
|
968
|
+
started_transaction = True
|
|
969
|
+
|
|
970
|
+
for index, operation in enumerate(stack.operations):
|
|
971
|
+
try:
|
|
972
|
+
result = await self._execute_stack_operation(operation)
|
|
973
|
+
except Exception as exc: # pragma: no cover - exercised via tests
|
|
974
|
+
stack_error = StackExecutionError(
|
|
975
|
+
index,
|
|
976
|
+
describe_stack_statement(operation.statement),
|
|
977
|
+
exc,
|
|
978
|
+
adapter=type(self).__name__,
|
|
979
|
+
mode="continue-on-error" if continue_on_error else "fail-fast",
|
|
980
|
+
)
|
|
981
|
+
|
|
982
|
+
if started_transaction and not continue_on_error:
|
|
983
|
+
try:
|
|
984
|
+
await self.rollback()
|
|
985
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
986
|
+
logger.debug("Rollback after stack failure failed: %s", rollback_error)
|
|
987
|
+
started_transaction = False
|
|
988
|
+
|
|
989
|
+
if continue_on_error:
|
|
990
|
+
await self._rollback_after_stack_error_async()
|
|
991
|
+
observer.record_operation_error(stack_error)
|
|
992
|
+
results.append(StackResult.from_error(stack_error))
|
|
993
|
+
continue
|
|
994
|
+
|
|
995
|
+
raise stack_error from exc
|
|
996
|
+
|
|
997
|
+
results.append(StackResult(result=result))
|
|
998
|
+
|
|
999
|
+
if continue_on_error:
|
|
1000
|
+
await self._commit_after_stack_operation_async()
|
|
1001
|
+
|
|
1002
|
+
if started_transaction:
|
|
1003
|
+
await self.commit()
|
|
1004
|
+
except Exception:
|
|
1005
|
+
if started_transaction:
|
|
1006
|
+
try:
|
|
1007
|
+
await self.rollback()
|
|
1008
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
1009
|
+
logger.debug("Rollback after stack failure failed: %s", rollback_error)
|
|
1010
|
+
raise
|
|
1011
|
+
|
|
1012
|
+
return tuple(results)
|
|
1013
|
+
|
|
1014
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1015
|
+
# STORAGE API METHODS
|
|
1016
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1017
|
+
|
|
1018
|
+
async def select_to_storage(
|
|
1019
|
+
self,
|
|
1020
|
+
statement: "SQL | str",
|
|
1021
|
+
destination: "StorageDestination",
|
|
1022
|
+
/,
|
|
1023
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
1024
|
+
statement_config: "StatementConfig | None" = None,
|
|
1025
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1026
|
+
format_hint: "StorageFormat | None" = None,
|
|
1027
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
1028
|
+
) -> "StorageBridgeJob":
|
|
1029
|
+
"""Stream a SELECT statement directly into storage.
|
|
1030
|
+
|
|
1031
|
+
Args:
|
|
1032
|
+
statement: SQL statement to execute.
|
|
1033
|
+
destination: Storage destination path.
|
|
1034
|
+
parameters: Query parameters.
|
|
1035
|
+
statement_config: Optional statement configuration.
|
|
1036
|
+
partitioner: Optional partitioner configuration.
|
|
1037
|
+
format_hint: Optional format hint for storage.
|
|
1038
|
+
telemetry: Optional telemetry dict to merge.
|
|
1039
|
+
|
|
1040
|
+
Returns:
|
|
1041
|
+
StorageBridgeJob with execution telemetry.
|
|
1042
|
+
|
|
1043
|
+
"""
|
|
1044
|
+
self._raise_storage_not_implemented("select_to_storage")
|
|
1045
|
+
raise NotImplementedError
|
|
1046
|
+
|
|
1047
|
+
async def load_from_arrow(
|
|
1048
|
+
self,
|
|
1049
|
+
table: str,
|
|
1050
|
+
source: "ArrowResult | Any",
|
|
1051
|
+
*,
|
|
1052
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1053
|
+
overwrite: bool = False,
|
|
1054
|
+
) -> "StorageBridgeJob":
|
|
1055
|
+
"""Load Arrow data into the target table.
|
|
1056
|
+
|
|
1057
|
+
Args:
|
|
1058
|
+
table: Target table name.
|
|
1059
|
+
source: Arrow data source.
|
|
1060
|
+
partitioner: Optional partitioner configuration.
|
|
1061
|
+
overwrite: Whether to overwrite existing data.
|
|
1062
|
+
|
|
1063
|
+
Returns:
|
|
1064
|
+
StorageBridgeJob with execution telemetry.
|
|
1065
|
+
|
|
1066
|
+
Raises:
|
|
1067
|
+
NotImplementedError: If not implemented.
|
|
1068
|
+
|
|
1069
|
+
"""
|
|
1070
|
+
self._raise_storage_not_implemented("load_from_arrow")
|
|
1071
|
+
raise NotImplementedError
|
|
1072
|
+
|
|
1073
|
+
async def load_from_storage(
|
|
1074
|
+
self,
|
|
1075
|
+
table: str,
|
|
1076
|
+
source: "StorageDestination",
|
|
1077
|
+
*,
|
|
1078
|
+
file_format: "StorageFormat",
|
|
1079
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1080
|
+
overwrite: bool = False,
|
|
1081
|
+
) -> "StorageBridgeJob":
|
|
1082
|
+
"""Load artifacts from storage into the target table.
|
|
1083
|
+
|
|
1084
|
+
Args:
|
|
1085
|
+
table: Target table name.
|
|
1086
|
+
source: Storage source path.
|
|
1087
|
+
file_format: File format of source.
|
|
1088
|
+
partitioner: Optional partitioner configuration.
|
|
1089
|
+
overwrite: Whether to overwrite existing data.
|
|
1090
|
+
|
|
1091
|
+
Returns:
|
|
1092
|
+
StorageBridgeJob with execution telemetry.
|
|
1093
|
+
|
|
1094
|
+
"""
|
|
1095
|
+
self._raise_storage_not_implemented("load_from_storage")
|
|
1096
|
+
raise NotImplementedError
|
|
1097
|
+
|
|
1098
|
+
def stage_artifact(self, request: "dict[str, Any]") -> "dict[str, Any]":
|
|
1099
|
+
"""Provision staging metadata for adapters that require remote URIs.
|
|
1100
|
+
|
|
1101
|
+
Args:
|
|
1102
|
+
request: Staging request configuration.
|
|
1103
|
+
|
|
1104
|
+
Returns:
|
|
1105
|
+
Staging metadata dict.
|
|
1106
|
+
|
|
1107
|
+
"""
|
|
1108
|
+
self._raise_storage_not_implemented("stage_artifact")
|
|
1109
|
+
raise NotImplementedError
|
|
1110
|
+
|
|
1111
|
+
def flush_staging_artifacts(self, artifacts: "list[dict[str, Any]]", *, error: Exception | None = None) -> None:
|
|
1112
|
+
"""Clean up staged artifacts after a job completes.
|
|
1113
|
+
|
|
1114
|
+
Args:
|
|
1115
|
+
artifacts: List of staging artifacts to clean up.
|
|
1116
|
+
error: Optional error that triggered cleanup.
|
|
1117
|
+
|
|
1118
|
+
"""
|
|
1119
|
+
if artifacts:
|
|
1120
|
+
self._raise_storage_not_implemented("flush_staging_artifacts")
|
|
1121
|
+
|
|
1122
|
+
def get_storage_job(self, job_id: str) -> "StorageBridgeJob | None":
|
|
1123
|
+
"""Fetch a previously created job handle.
|
|
1124
|
+
|
|
1125
|
+
Args:
|
|
1126
|
+
job_id: Job identifier.
|
|
1127
|
+
|
|
1128
|
+
Returns:
|
|
1129
|
+
StorageBridgeJob if found, None otherwise.
|
|
1130
|
+
|
|
1131
|
+
"""
|
|
1132
|
+
return None
|
|
1133
|
+
|
|
1134
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1135
|
+
# UTILITY METHODS
|
|
1136
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1137
|
+
|
|
1138
|
+
def convert_to_dialect(
|
|
1139
|
+
self, statement: "Statement", to_dialect: "DialectType | None" = None, pretty: bool = DEFAULT_PRETTY
|
|
1140
|
+
) -> str:
|
|
1141
|
+
"""Convert a statement to a target SQL dialect.
|
|
1142
|
+
|
|
1143
|
+
Args:
|
|
1144
|
+
statement: SQL statement to convert.
|
|
1145
|
+
to_dialect: Target dialect (defaults to current dialect).
|
|
1146
|
+
pretty: Whether to format the output SQL.
|
|
1147
|
+
|
|
1148
|
+
Returns:
|
|
1149
|
+
SQL string in target dialect.
|
|
1150
|
+
|
|
1151
|
+
"""
|
|
1152
|
+
return _convert_to_dialect_impl(statement, self.dialect, to_dialect, pretty)
|
|
1153
|
+
|
|
1154
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1155
|
+
# PRIVATE/INTERNAL METHODS
|
|
1156
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1157
|
+
|
|
1158
|
+
def _connection_in_transaction(self) -> bool:
|
|
1159
|
+
"""Check if the connection is inside a transaction.
|
|
1160
|
+
|
|
1161
|
+
Each adapter MUST override this method with direct attribute access
|
|
1162
|
+
for optimal mypyc performance. Do not use getattr chains.
|
|
1163
|
+
|
|
1164
|
+
Raises:
|
|
1165
|
+
NotImplementedError: Always - subclasses must override.
|
|
1166
|
+
|
|
1167
|
+
"""
|
|
1168
|
+
msg = "Adapters must override _connection_in_transaction()"
|
|
1169
|
+
raise NotImplementedError(msg)
|
|
1170
|
+
|
|
1171
|
+
async def _execute_stack_operation(self, operation: "StackOperation") -> "SQLResult | ArrowResult | None":
|
|
1172
|
+
kwargs = dict(operation.keyword_arguments) if operation.keyword_arguments else {}
|
|
1173
|
+
|
|
1174
|
+
if operation.method == "execute":
|
|
1175
|
+
return await self.execute(operation.statement, *operation.arguments, **kwargs)
|
|
1176
|
+
|
|
1177
|
+
if operation.method == "execute_many":
|
|
1178
|
+
if not operation.arguments:
|
|
1179
|
+
msg = "execute_many stack operation requires parameter sets"
|
|
1180
|
+
raise ValueError(msg)
|
|
1181
|
+
parameter_sets = operation.arguments[0]
|
|
1182
|
+
filters = operation.arguments[1:]
|
|
1183
|
+
return await self.execute_many(operation.statement, parameter_sets, *filters, **kwargs)
|
|
1184
|
+
|
|
1185
|
+
if operation.method == "execute_script":
|
|
1186
|
+
return await self.execute_script(operation.statement, *operation.arguments, **kwargs)
|
|
1187
|
+
|
|
1188
|
+
if operation.method == "execute_arrow":
|
|
1189
|
+
return await self.select_to_arrow(operation.statement, *operation.arguments, **kwargs)
|
|
1190
|
+
|
|
1191
|
+
msg = f"Unsupported stack operation method: {operation.method}"
|
|
1192
|
+
raise ValueError(msg)
|
|
1193
|
+
|
|
1194
|
+
async def _rollback_after_stack_error_async(self) -> None:
|
|
1195
|
+
"""Attempt to rollback after a stack operation error (async)."""
|
|
1196
|
+
try:
|
|
1197
|
+
await self.rollback()
|
|
1198
|
+
except Exception as rollback_error: # pragma: no cover - driver-specific cleanup
|
|
1199
|
+
logger.debug("Rollback after stack error failed: %s", rollback_error)
|
|
1200
|
+
|
|
1201
|
+
async def _commit_after_stack_operation_async(self) -> None:
|
|
1202
|
+
"""Attempt to commit after a successful stack operation when not batching (async)."""
|
|
1203
|
+
try:
|
|
1204
|
+
await self.commit()
|
|
1205
|
+
except Exception as commit_error: # pragma: no cover - driver-specific cleanup
|
|
1206
|
+
logger.debug("Commit after stack operation failed: %s", commit_error)
|
|
1207
|
+
|
|
1208
|
+
def _storage_pipeline(self) -> "AsyncStoragePipeline":
|
|
1209
|
+
"""Get or create an async storage pipeline.
|
|
1210
|
+
|
|
1211
|
+
Returns:
|
|
1212
|
+
AsyncStoragePipeline instance.
|
|
1213
|
+
|
|
1214
|
+
"""
|
|
1215
|
+
factory = self.storage_pipeline_factory
|
|
1216
|
+
if factory is None:
|
|
1217
|
+
return AsyncStoragePipeline()
|
|
1218
|
+
return cast("AsyncStoragePipeline", factory())
|
|
1219
|
+
|
|
1220
|
+
async def _write_result_to_storage_async(
|
|
1221
|
+
self,
|
|
1222
|
+
result: "ArrowResult",
|
|
1223
|
+
destination: "StorageDestination",
|
|
1224
|
+
*,
|
|
1225
|
+
format_hint: "StorageFormat | None" = None,
|
|
1226
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
1227
|
+
pipeline: "AsyncStoragePipeline | None" = None,
|
|
1228
|
+
) -> "StorageTelemetry":
|
|
1229
|
+
"""Write Arrow result to storage with telemetry.
|
|
1230
|
+
|
|
1231
|
+
Args:
|
|
1232
|
+
result: Arrow result to write.
|
|
1233
|
+
destination: Storage destination.
|
|
1234
|
+
format_hint: Optional format hint.
|
|
1235
|
+
storage_options: Optional storage options.
|
|
1236
|
+
pipeline: Optional storage pipeline.
|
|
1237
|
+
|
|
1238
|
+
Returns:
|
|
1239
|
+
StorageTelemetry with write metrics.
|
|
1240
|
+
|
|
1241
|
+
"""
|
|
1242
|
+
runtime = self.observability
|
|
1243
|
+
span = runtime.start_storage_span(
|
|
1244
|
+
"write", destination=stringify_storage_target(destination), format_label=format_hint
|
|
1245
|
+
)
|
|
1246
|
+
try:
|
|
1247
|
+
telemetry = await result.write_to_storage_async(
|
|
1248
|
+
destination, format_hint=format_hint, storage_options=storage_options, pipeline=pipeline
|
|
1249
|
+
)
|
|
1250
|
+
except Exception as exc:
|
|
1251
|
+
runtime.end_storage_span(span, error=exc)
|
|
1252
|
+
raise
|
|
1253
|
+
telemetry = runtime.annotate_storage_telemetry(telemetry)
|
|
1254
|
+
runtime.end_storage_span(span, telemetry=telemetry)
|
|
1255
|
+
return telemetry
|
|
1256
|
+
|
|
1257
|
+
async def _read_arrow_from_storage_async(
|
|
1258
|
+
self,
|
|
1259
|
+
source: "StorageDestination",
|
|
1260
|
+
*,
|
|
1261
|
+
file_format: "StorageFormat",
|
|
1262
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
1263
|
+
) -> "tuple[ArrowTable, StorageTelemetry]":
|
|
1264
|
+
"""Read Arrow table from storage with telemetry.
|
|
1265
|
+
|
|
1266
|
+
Args:
|
|
1267
|
+
source: Storage source path.
|
|
1268
|
+
file_format: File format to read.
|
|
1269
|
+
storage_options: Optional storage options.
|
|
1270
|
+
|
|
1271
|
+
Returns:
|
|
1272
|
+
Tuple of (ArrowTable, StorageTelemetry).
|
|
1273
|
+
|
|
1274
|
+
"""
|
|
1275
|
+
runtime = self.observability
|
|
1276
|
+
span = runtime.start_storage_span(
|
|
1277
|
+
"read", destination=stringify_storage_target(source), format_label=file_format
|
|
1278
|
+
)
|
|
1279
|
+
pipeline = self._storage_pipeline()
|
|
1280
|
+
try:
|
|
1281
|
+
table, telemetry = await pipeline.read_arrow_async(
|
|
1282
|
+
source, file_format=file_format, storage_options=storage_options
|
|
1283
|
+
)
|
|
1284
|
+
except Exception as exc:
|
|
1285
|
+
runtime.end_storage_span(span, error=exc)
|
|
1286
|
+
raise
|
|
1287
|
+
telemetry = runtime.annotate_storage_telemetry(telemetry)
|
|
1288
|
+
runtime.end_storage_span(span, telemetry=telemetry)
|
|
1289
|
+
return table, telemetry
|
|
1290
|
+
|
|
1291
|
+
def _coerce_arrow_table(self, source: "ArrowResult | Any") -> "ArrowTable":
|
|
1292
|
+
"""Coerce various sources to a PyArrow Table.
|
|
1293
|
+
|
|
1294
|
+
Args:
|
|
1295
|
+
source: ArrowResult, PyArrow Table, RecordBatch, or iterable of dicts.
|
|
1296
|
+
|
|
1297
|
+
Returns:
|
|
1298
|
+
PyArrow Table.
|
|
1299
|
+
|
|
1300
|
+
"""
|
|
1301
|
+
return coerce_arrow_table(source)
|
|
1302
|
+
|
|
1303
|
+
@staticmethod
|
|
1304
|
+
def _arrow_table_to_rows(
|
|
1305
|
+
table: "ArrowTable", columns: "list[str] | None" = None
|
|
1306
|
+
) -> "tuple[list[str], list[tuple[Any, ...]]]":
|
|
1307
|
+
"""Convert Arrow table to column names and row tuples.
|
|
1308
|
+
|
|
1309
|
+
Args:
|
|
1310
|
+
table: Arrow table to convert.
|
|
1311
|
+
columns: Optional list of columns to extract.
|
|
1312
|
+
|
|
1313
|
+
Returns:
|
|
1314
|
+
Tuple of (column_names, list of row tuples).
|
|
1315
|
+
|
|
1316
|
+
"""
|
|
1317
|
+
return arrow_table_to_rows(table, columns)
|
|
1318
|
+
|
|
1319
|
+
@staticmethod
|
|
1320
|
+
def _build_ingest_telemetry(table: "ArrowTable", *, format_label: str = "arrow") -> "StorageTelemetry":
|
|
1321
|
+
"""Build telemetry dict from Arrow table statistics.
|
|
1322
|
+
|
|
1323
|
+
Args:
|
|
1324
|
+
table: Arrow table to extract statistics from.
|
|
1325
|
+
format_label: Format label for telemetry.
|
|
1326
|
+
|
|
1327
|
+
Returns:
|
|
1328
|
+
StorageTelemetry dict with row/byte counts.
|
|
1329
|
+
|
|
1330
|
+
"""
|
|
1331
|
+
return build_ingest_telemetry(table, format_label=format_label)
|
|
1332
|
+
|
|
1333
|
+
def _attach_partition_telemetry(
|
|
1334
|
+
self, telemetry: "StorageTelemetry", partitioner: "dict[str, object] | None"
|
|
1335
|
+
) -> None:
|
|
1336
|
+
"""Attach partitioner info to telemetry dict.
|
|
1337
|
+
|
|
1338
|
+
Args:
|
|
1339
|
+
telemetry: Telemetry dict to update.
|
|
1340
|
+
partitioner: Partitioner configuration or None.
|
|
1341
|
+
|
|
1342
|
+
"""
|
|
1343
|
+
attach_partition_telemetry(telemetry, partitioner)
|
|
1344
|
+
|
|
1345
|
+
def _create_storage_job(
|
|
1346
|
+
self, produced: "StorageTelemetry", provided: "StorageTelemetry | None" = None, *, status: str = "completed"
|
|
1347
|
+
) -> "StorageBridgeJob":
|
|
1348
|
+
"""Create a StorageBridgeJob from telemetry data.
|
|
1349
|
+
|
|
1350
|
+
Args:
|
|
1351
|
+
produced: Telemetry from the production side of the operation.
|
|
1352
|
+
provided: Optional telemetry from the source side.
|
|
1353
|
+
status: Job status string.
|
|
1354
|
+
|
|
1355
|
+
Returns:
|
|
1356
|
+
StorageBridgeJob instance.
|
|
1357
|
+
|
|
1358
|
+
"""
|
|
1359
|
+
return create_storage_job(produced, provided, status=status)
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
@mypyc_attr(allow_interpreted_subclasses=True, native_class=False)
|
|
1363
|
+
class AsyncDataDictionaryBase:
|
|
1364
|
+
"""Base class for asynchronous data dictionary implementations.
|
|
1365
|
+
|
|
1366
|
+
Uses Python-compatible class layouts for cross-module inheritance.
|
|
1367
|
+
Child classes define dialect as a class attribute.
|
|
1368
|
+
"""
|
|
1369
|
+
|
|
1370
|
+
_version_cache: "dict[int, VersionInfo | None]"
|
|
1371
|
+
_version_fetch_attempted: "set[int]"
|
|
1372
|
+
|
|
1373
|
+
dialect: "ClassVar[str]"
|
|
1374
|
+
"""Dialect identifier. Must be defined by subclasses as a class attribute."""
|
|
1375
|
+
|
|
1376
|
+
def __init__(self) -> None:
|
|
1377
|
+
self._version_cache = {}
|
|
1378
|
+
self._version_fetch_attempted = set()
|
|
1379
|
+
|
|
1380
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1381
|
+
# DIALECT SQL METHODS (merged from DialectSQLMixin)
|
|
1382
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1383
|
+
|
|
1384
|
+
def get_dialect_config(self) -> "DialectConfig":
|
|
1385
|
+
"""Return the dialect configuration for this data dictionary."""
|
|
1386
|
+
return get_dialect_config(type(self).dialect)
|
|
1387
|
+
|
|
1388
|
+
def get_query(self, name: str) -> "SQL":
|
|
1389
|
+
"""Return a named SQL query for this dialect."""
|
|
1390
|
+
loader = get_data_dictionary_loader()
|
|
1391
|
+
return loader.get_query(type(self).dialect, name)
|
|
1392
|
+
|
|
1393
|
+
def get_query_text(self, name: str) -> str:
|
|
1394
|
+
"""Return raw SQL text for a named query for this dialect."""
|
|
1395
|
+
loader = get_data_dictionary_loader()
|
|
1396
|
+
return loader.get_query_text(type(self).dialect, name)
|
|
1397
|
+
|
|
1398
|
+
def get_query_text_or_none(self, name: str) -> "str | None":
|
|
1399
|
+
"""Return raw SQL text for a named query or None if missing."""
|
|
1400
|
+
try:
|
|
1401
|
+
return self.get_query_text(name)
|
|
1402
|
+
except SQLFileNotFoundError:
|
|
1403
|
+
return None
|
|
1404
|
+
|
|
1405
|
+
def resolve_schema(self, schema: "str | None") -> "str | None":
|
|
1406
|
+
"""Return a schema name using dialect defaults when missing."""
|
|
1407
|
+
if schema is not None:
|
|
1408
|
+
return schema
|
|
1409
|
+
config = self.get_dialect_config()
|
|
1410
|
+
return config.default_schema
|
|
1411
|
+
|
|
1412
|
+
def resolve_feature_flag(self, feature: str, version: "VersionInfo | None") -> bool:
|
|
1413
|
+
"""Resolve a feature flag using dialect config and version info."""
|
|
1414
|
+
config = self.get_dialect_config()
|
|
1415
|
+
flag = config.get_feature_flag(feature)
|
|
1416
|
+
if flag is not None:
|
|
1417
|
+
return flag
|
|
1418
|
+
required_version = config.get_feature_version(feature)
|
|
1419
|
+
if required_version is None or version is None:
|
|
1420
|
+
return False
|
|
1421
|
+
return bool(version >= required_version)
|
|
1422
|
+
|
|
1423
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1424
|
+
# VERSION CACHING METHODS (inlined from DataDictionaryMixin)
|
|
1425
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1426
|
+
|
|
1427
|
+
def get_cached_version(self, driver_id: int) -> object:
|
|
1428
|
+
"""Get cached version info for a driver.
|
|
1429
|
+
|
|
1430
|
+
Args:
|
|
1431
|
+
driver_id: The id() of the driver instance.
|
|
1432
|
+
|
|
1433
|
+
Returns:
|
|
1434
|
+
Tuple of (was_cached, version_info). If was_cached is False,
|
|
1435
|
+
the caller should fetch the version and call cache_version().
|
|
1436
|
+
"""
|
|
1437
|
+
if driver_id in self._version_fetch_attempted:
|
|
1438
|
+
return True, self._version_cache.get(driver_id)
|
|
1439
|
+
return False, None
|
|
1440
|
+
|
|
1441
|
+
def cache_version(self, driver_id: int, version: "VersionInfo | None") -> None:
|
|
1442
|
+
"""Cache version info for a driver.
|
|
1443
|
+
|
|
1444
|
+
Args:
|
|
1445
|
+
driver_id: The id() of the driver instance.
|
|
1446
|
+
version: The version info to cache (can be None if detection failed).
|
|
1447
|
+
"""
|
|
1448
|
+
self._version_fetch_attempted.add(driver_id)
|
|
1449
|
+
if version is not None:
|
|
1450
|
+
self._version_cache[driver_id] = version
|
|
1451
|
+
|
|
1452
|
+
def parse_version_string(self, version_str: str) -> "VersionInfo | None":
|
|
1453
|
+
"""Parse version string into VersionInfo.
|
|
1454
|
+
|
|
1455
|
+
Args:
|
|
1456
|
+
version_str: Raw version string from database
|
|
1457
|
+
|
|
1458
|
+
Returns:
|
|
1459
|
+
VersionInfo instance or None if parsing fails
|
|
1460
|
+
"""
|
|
1461
|
+
patterns = [r"(\d+)\.(\d+)\.(\d+)", r"(\d+)\.(\d+)", r"(\d+)"]
|
|
1462
|
+
for pattern in patterns:
|
|
1463
|
+
match = re.search(pattern, version_str)
|
|
1464
|
+
if match:
|
|
1465
|
+
groups = match.groups()
|
|
1466
|
+
major = int(groups[0])
|
|
1467
|
+
minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR else 0
|
|
1468
|
+
patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH else 0
|
|
1469
|
+
return VersionInfo(major, minor, patch)
|
|
1470
|
+
return None
|
|
1471
|
+
|
|
1472
|
+
def parse_version_with_pattern(self, pattern: "re.Pattern[str]", version_str: str) -> "VersionInfo | None":
|
|
1473
|
+
"""Parse version string using a specific regex pattern.
|
|
1474
|
+
|
|
1475
|
+
Args:
|
|
1476
|
+
pattern: Compiled regex pattern for the version format
|
|
1477
|
+
version_str: Raw version string from database
|
|
1478
|
+
|
|
1479
|
+
Returns:
|
|
1480
|
+
VersionInfo instance or None if parsing fails
|
|
1481
|
+
"""
|
|
1482
|
+
match = pattern.search(version_str)
|
|
1483
|
+
if not match:
|
|
1484
|
+
return None
|
|
1485
|
+
groups = match.groups()
|
|
1486
|
+
if not groups:
|
|
1487
|
+
return None
|
|
1488
|
+
major = int(groups[0])
|
|
1489
|
+
minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR and groups[1] else 0
|
|
1490
|
+
patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH and groups[2] else 0
|
|
1491
|
+
return VersionInfo(major, minor, patch)
|
|
1492
|
+
|
|
1493
|
+
def _resolve_log_adapter(self) -> str:
|
|
1494
|
+
"""Resolve adapter identifier for logging."""
|
|
1495
|
+
return str(type(self).dialect)
|
|
1496
|
+
|
|
1497
|
+
def _log_version_detected(self, adapter: str, version: "VersionInfo") -> None:
|
|
1498
|
+
"""Log detected database version with db.system context."""
|
|
1499
|
+
logger.debug(
|
|
1500
|
+
"Detected database version", extra={"db.system": resolve_db_system(adapter), "db.version": str(version)}
|
|
1501
|
+
)
|
|
1502
|
+
|
|
1503
|
+
def _log_version_unavailable(self, adapter: str, reason: str) -> None:
|
|
1504
|
+
"""Log that database version could not be determined."""
|
|
1505
|
+
logger.debug("Database version unavailable", extra={"db.system": resolve_db_system(adapter), "reason": reason})
|
|
1506
|
+
|
|
1507
|
+
def _log_schema_introspect(
|
|
1508
|
+
self, driver: Any, *, schema_name: "str | None", table_name: "str | None", operation: str
|
|
1509
|
+
) -> None:
|
|
1510
|
+
"""Log schema-level introspection activity."""
|
|
1511
|
+
log_with_context(
|
|
1512
|
+
logger,
|
|
1513
|
+
logging.DEBUG,
|
|
1514
|
+
"schema.introspect",
|
|
1515
|
+
db_system=resolve_db_system(type(driver).__name__),
|
|
1516
|
+
schema_name=schema_name,
|
|
1517
|
+
table_name=table_name,
|
|
1518
|
+
operation=operation,
|
|
1519
|
+
)
|
|
1520
|
+
|
|
1521
|
+
def _log_table_describe(self, driver: Any, *, schema_name: "str | None", table_name: str, operation: str) -> None:
|
|
1522
|
+
"""Log table-level introspection activity."""
|
|
1523
|
+
log_with_context(
|
|
1524
|
+
logger,
|
|
1525
|
+
logging.DEBUG,
|
|
1526
|
+
"table.describe",
|
|
1527
|
+
db_system=resolve_db_system(type(driver).__name__),
|
|
1528
|
+
schema_name=schema_name,
|
|
1529
|
+
table_name=table_name,
|
|
1530
|
+
operation=operation,
|
|
1531
|
+
)
|
|
1532
|
+
|
|
1533
|
+
def detect_version_with_queries(self, driver: "HasExecuteProtocol", queries: "list[str]") -> "VersionInfo | None":
|
|
1534
|
+
"""Try multiple version queries to detect database version.
|
|
1535
|
+
|
|
1536
|
+
Args:
|
|
1537
|
+
driver: Database driver with execute support
|
|
1538
|
+
queries: List of SQL queries to try
|
|
1539
|
+
|
|
1540
|
+
Returns:
|
|
1541
|
+
Version information or None if detection fails
|
|
1542
|
+
"""
|
|
1543
|
+
for query in queries:
|
|
1544
|
+
with suppress(Exception):
|
|
1545
|
+
result: HasDataProtocol = driver.execute(query)
|
|
1546
|
+
result_data = result.data
|
|
1547
|
+
if result_data:
|
|
1548
|
+
first_row = result_data[0]
|
|
1549
|
+
version_str = str(first_row)
|
|
1550
|
+
if isinstance(first_row, dict):
|
|
1551
|
+
version_str = str(next(iter(first_row.values())))
|
|
1552
|
+
elif isinstance(first_row, (list, tuple)):
|
|
1553
|
+
version_str = str(first_row[0])
|
|
1554
|
+
|
|
1555
|
+
parsed_version = self.parse_version_string(version_str)
|
|
1556
|
+
if parsed_version:
|
|
1557
|
+
self._log_version_detected(self._resolve_log_adapter(), parsed_version)
|
|
1558
|
+
return parsed_version
|
|
1559
|
+
|
|
1560
|
+
self._log_version_unavailable(self._resolve_log_adapter(), "queries_exhausted")
|
|
1561
|
+
return None
|
|
1562
|
+
|
|
1563
|
+
def get_default_type_mapping(self) -> "dict[str, str]":
|
|
1564
|
+
"""Get default type mappings for common categories.
|
|
1565
|
+
|
|
1566
|
+
Returns:
|
|
1567
|
+
Dictionary mapping type categories to generic SQL types
|
|
1568
|
+
"""
|
|
1569
|
+
return {
|
|
1570
|
+
"json": "TEXT",
|
|
1571
|
+
"uuid": "VARCHAR(36)",
|
|
1572
|
+
"boolean": "INTEGER",
|
|
1573
|
+
"timestamp": "TIMESTAMP",
|
|
1574
|
+
"text": "TEXT",
|
|
1575
|
+
"blob": "BLOB",
|
|
1576
|
+
}
|
|
1577
|
+
|
|
1578
|
+
def get_default_features(self) -> "list[str]":
|
|
1579
|
+
"""Get default feature flags supported by most databases.
|
|
1580
|
+
|
|
1581
|
+
Returns:
|
|
1582
|
+
List of commonly supported feature names
|
|
1583
|
+
"""
|
|
1584
|
+
return ["supports_transactions", "supports_prepared_statements"]
|
|
1585
|
+
|
|
1586
|
+
def sort_tables_topologically(self, tables: "list[str]", foreign_keys: "list[ForeignKeyMetadata]") -> "list[str]":
|
|
1587
|
+
"""Sort tables topologically based on foreign key dependencies.
|
|
1588
|
+
|
|
1589
|
+
Args:
|
|
1590
|
+
tables: List of table names.
|
|
1591
|
+
foreign_keys: List of foreign key metadata.
|
|
1592
|
+
|
|
1593
|
+
Returns:
|
|
1594
|
+
List of table names in topological order (dependencies first).
|
|
1595
|
+
"""
|
|
1596
|
+
sorter: graphlib.TopologicalSorter[str] = graphlib.TopologicalSorter()
|
|
1597
|
+
for table in tables:
|
|
1598
|
+
sorter.add(table)
|
|
1599
|
+
for fk in foreign_keys:
|
|
1600
|
+
if fk.table_name == fk.referenced_table:
|
|
1601
|
+
continue
|
|
1602
|
+
sorter.add(fk.table_name, fk.referenced_table)
|
|
1603
|
+
return list(sorter.static_order())
|
|
1604
|
+
|
|
1605
|
+
def get_cached_version_for_driver(self, driver: Any) -> object:
|
|
1606
|
+
"""Get cached version info for a driver instance.
|
|
1607
|
+
|
|
1608
|
+
Args:
|
|
1609
|
+
driver: Async database driver instance.
|
|
1610
|
+
|
|
1611
|
+
Returns:
|
|
1612
|
+
Tuple of (was_cached, version_info).
|
|
1613
|
+
|
|
1614
|
+
"""
|
|
1615
|
+
return self.get_cached_version(id(driver))
|
|
1616
|
+
|
|
1617
|
+
def cache_version_for_driver(self, driver: Any, version: "VersionInfo | None") -> None:
|
|
1618
|
+
"""Cache version info for a driver instance.
|
|
1619
|
+
|
|
1620
|
+
Args:
|
|
1621
|
+
driver: Async database driver instance.
|
|
1622
|
+
version: Parsed version info or None.
|
|
1623
|
+
|
|
1624
|
+
"""
|
|
1625
|
+
self.cache_version(id(driver), version)
|
|
1626
|
+
|
|
1627
|
+
@abstractmethod
|
|
1628
|
+
async def get_version(self, driver: Any) -> "VersionInfo | None":
|
|
1629
|
+
"""Get database version information.
|
|
1630
|
+
|
|
1631
|
+
Args:
|
|
1632
|
+
driver: Async database driver instance
|
|
1633
|
+
|
|
1634
|
+
Returns:
|
|
1635
|
+
Version information or None if detection fails
|
|
1636
|
+
|
|
1637
|
+
"""
|
|
1638
|
+
|
|
1639
|
+
@abstractmethod
|
|
1640
|
+
async def get_feature_flag(self, driver: Any, feature: str) -> bool:
|
|
1641
|
+
"""Check if database supports a specific feature.
|
|
1642
|
+
|
|
1643
|
+
Args:
|
|
1644
|
+
driver: Async database driver instance
|
|
1645
|
+
feature: Feature name to check
|
|
1646
|
+
|
|
1647
|
+
Returns:
|
|
1648
|
+
True if feature is supported, False otherwise
|
|
1649
|
+
|
|
1650
|
+
"""
|
|
1651
|
+
|
|
1652
|
+
@abstractmethod
|
|
1653
|
+
async def get_optimal_type(self, driver: Any, type_category: str) -> str:
|
|
1654
|
+
"""Get optimal database type for a category.
|
|
1655
|
+
|
|
1656
|
+
Args:
|
|
1657
|
+
driver: Async database driver instance
|
|
1658
|
+
type_category: Type category (e.g., 'json', 'uuid', 'boolean')
|
|
1659
|
+
|
|
1660
|
+
Returns:
|
|
1661
|
+
Database-specific type name
|
|
1662
|
+
|
|
1663
|
+
"""
|
|
1664
|
+
|
|
1665
|
+
@abstractmethod
|
|
1666
|
+
async def get_tables(self, driver: Any, schema: "str | None" = None) -> "list[TableMetadata]":
|
|
1667
|
+
"""Get list of tables in schema.
|
|
1668
|
+
|
|
1669
|
+
Args:
|
|
1670
|
+
driver: Async database driver instance
|
|
1671
|
+
schema: Schema name (None for default)
|
|
1672
|
+
|
|
1673
|
+
Returns:
|
|
1674
|
+
List of table metadata dictionaries
|
|
1675
|
+
|
|
1676
|
+
"""
|
|
1677
|
+
|
|
1678
|
+
@abstractmethod
|
|
1679
|
+
async def get_columns(
|
|
1680
|
+
self, driver: Any, table: "str | None" = None, schema: "str | None" = None
|
|
1681
|
+
) -> "list[ColumnMetadata]":
|
|
1682
|
+
"""Get column information for a table or schema.
|
|
1683
|
+
|
|
1684
|
+
Args:
|
|
1685
|
+
driver: Async database driver instance
|
|
1686
|
+
table: Table name (None to fetch columns for all tables in schema)
|
|
1687
|
+
schema: Schema name (None for default)
|
|
1688
|
+
|
|
1689
|
+
Returns:
|
|
1690
|
+
List of column metadata dictionaries
|
|
1691
|
+
|
|
1692
|
+
"""
|
|
1693
|
+
|
|
1694
|
+
@abstractmethod
|
|
1695
|
+
async def get_indexes(
|
|
1696
|
+
self, driver: Any, table: "str | None" = None, schema: "str | None" = None
|
|
1697
|
+
) -> "list[IndexMetadata]":
|
|
1698
|
+
"""Get index information for a table or schema.
|
|
1699
|
+
|
|
1700
|
+
Args:
|
|
1701
|
+
driver: Async database driver instance
|
|
1702
|
+
table: Table name (None to fetch indexes for all tables in schema)
|
|
1703
|
+
schema: Schema name (None for default)
|
|
1704
|
+
|
|
1705
|
+
Returns:
|
|
1706
|
+
List of index metadata dictionaries
|
|
1707
|
+
|
|
1708
|
+
"""
|
|
1709
|
+
|
|
1710
|
+
@abstractmethod
|
|
1711
|
+
async def get_foreign_keys(
|
|
1712
|
+
self, driver: Any, table: "str | None" = None, schema: "str | None" = None
|
|
1713
|
+
) -> "list[ForeignKeyMetadata]":
|
|
1714
|
+
"""Get foreign key metadata.
|
|
1715
|
+
|
|
1716
|
+
Args:
|
|
1717
|
+
driver: Async database driver instance
|
|
1718
|
+
table: Optional table name filter
|
|
1719
|
+
schema: Optional schema name filter
|
|
1720
|
+
|
|
1721
|
+
Returns:
|
|
1722
|
+
List of foreign key metadata
|
|
1723
|
+
|
|
1724
|
+
"""
|
|
1725
|
+
|
|
1726
|
+
def list_available_features(self) -> "list[str]":
|
|
1727
|
+
"""List all features that can be checked via get_feature_flag.
|
|
1728
|
+
|
|
1729
|
+
Returns:
|
|
1730
|
+
List of feature names this data dictionary supports
|
|
1731
|
+
|
|
1732
|
+
"""
|
|
1733
|
+
config = self.get_dialect_config()
|
|
1734
|
+
features = set(self.get_default_features())
|
|
1735
|
+
features.update(config.feature_flags.keys())
|
|
1736
|
+
features.update(config.feature_versions.keys())
|
|
1737
|
+
return sorted(features)
|